VirtualBox

source: vbox/trunk/src/recompiler_new/target-i386/translate.c@ 13440

Last change on this file since 13440 was 13440, checked in by vboxsync, 16 years ago

further MSVC stuff, almost there

  • Property svn:eol-style set to native
File size: 271.9 KB
Line 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Sun LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Sun elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29#include <stdarg.h>
30#include <stdlib.h>
31#include <stdio.h>
32#include <string.h>
33#ifndef VBOX
34#include <inttypes.h>
35#include <signal.h>
36#include <assert.h>
37#endif /* !VBOX */
38
39#include "cpu.h"
40#include "exec-all.h"
41#include "disas.h"
42#include "helper.h"
43#include "tcg-op.h"
44
45#define PREFIX_REPZ 0x01
46#define PREFIX_REPNZ 0x02
47#define PREFIX_LOCK 0x04
48#define PREFIX_DATA 0x08
49#define PREFIX_ADR 0x10
50
51#ifdef TARGET_X86_64
52#define X86_64_ONLY(x) x
53#ifndef VBOX
54#define X86_64_DEF(x...) x
55#else
56#define X86_64_DEF(x...) x
57#endif
58#define CODE64(s) ((s)->code64)
59#define REX_X(s) ((s)->rex_x)
60#define REX_B(s) ((s)->rex_b)
61/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
62#if 1
63#define BUGGY_64(x) NULL
64#endif
65#else
66#define X86_64_ONLY(x) NULL
67#ifndef VBOX
68#define X86_64_DEF(x...)
69#else
70#define X86_64_DEF(x)
71#endif
72#define CODE64(s) 0
73#define REX_X(s) 0
74#define REX_B(s) 0
75#endif
76
77//#define MACRO_TEST 1
78
79/* global register indexes */
80static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
81/* local temps */
82static TCGv cpu_T[2], cpu_T3;
83/* local register indexes (only used inside old micro ops) */
84static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
85static TCGv cpu_tmp5, cpu_tmp6;
86
87#include "gen-icount.h"
88
89#ifdef TARGET_X86_64
90static int x86_64_hregs;
91#endif
92
93#ifdef VBOX
94
95/* Special/override code readers to hide patched code. */
96
97uint8_t ldub_code_raw(target_ulong pc)
98{
99 uint8_t b;
100
101 if (!remR3GetOpcode(cpu_single_env, pc, &b))
102 b = ldub_code(pc);
103 return b;
104}
105#define ldub_code(a) ldub_code_raw(a)
106
107uint16_t lduw_code_raw(target_ulong pc)
108{
109 return (ldub_code(pc+1) << 8) | ldub_code(pc);
110}
111#define lduw_code(a) lduw_code_raw(a)
112
113
114uint32_t ldl_code_raw(target_ulong pc)
115{
116 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
117}
118#define ldl_code(a) ldl_code_raw(a)
119
120#endif /* VBOX */
121
122
123typedef struct DisasContext {
124 /* current insn context */
125 int override; /* -1 if no override */
126 int prefix;
127 int aflag, dflag;
128 target_ulong pc; /* pc = eip + cs_base */
129 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
130 static state change (stop translation) */
131 /* current block context */
132 target_ulong cs_base; /* base of CS segment */
133 int pe; /* protected mode */
134 int code32; /* 32 bit code segment */
135#ifdef TARGET_X86_64
136 int lma; /* long mode active */
137 int code64; /* 64 bit code segment */
138 int rex_x, rex_b;
139#endif
140 int ss32; /* 32 bit stack segment */
141 int cc_op; /* current CC operation */
142 int addseg; /* non zero if either DS/ES/SS have a non zero base */
143 int f_st; /* currently unused */
144 int vm86; /* vm86 mode */
145#ifdef VBOX
146 int vme; /* CR4.VME */
147 int record_call; /* record calls for CSAM or not? */
148#endif
149 int cpl;
150 int iopl;
151 int tf; /* TF cpu flag */
152 int singlestep_enabled; /* "hardware" single step enabled */
153 int jmp_opt; /* use direct block chaining for direct jumps */
154 int mem_index; /* select memory access functions */
155 uint64_t flags; /* all execution flags */
156 struct TranslationBlock *tb;
157 int popl_esp_hack; /* for correct popl with esp base handling */
158 int rip_offset; /* only used in x86_64, but left for simplicity */
159 int cpuid_features;
160 int cpuid_ext_features;
161 int cpuid_ext2_features;
162 int cpuid_ext3_features;
163} DisasContext;
164
165static void gen_eob(DisasContext *s);
166static void gen_jmp(DisasContext *s, target_ulong eip);
167static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
168
169#ifdef VBOX
170static void gen_check_external_event();
171#endif
172
173/* i386 arith/logic operations */
174enum {
175 OP_ADDL,
176 OP_ORL,
177 OP_ADCL,
178 OP_SBBL,
179 OP_ANDL,
180 OP_SUBL,
181 OP_XORL,
182 OP_CMPL,
183};
184
185/* i386 shift ops */
186enum {
187 OP_ROL,
188 OP_ROR,
189 OP_RCL,
190 OP_RCR,
191 OP_SHL,
192 OP_SHR,
193 OP_SHL1, /* undocumented */
194 OP_SAR = 7,
195};
196
197enum {
198 JCC_O,
199 JCC_B,
200 JCC_Z,
201 JCC_BE,
202 JCC_S,
203 JCC_P,
204 JCC_L,
205 JCC_LE,
206};
207
208/* operand size */
209enum {
210 OT_BYTE = 0,
211 OT_WORD,
212 OT_LONG,
213 OT_QUAD,
214};
215
216enum {
217 /* I386 int registers */
218 OR_EAX, /* MUST be even numbered */
219 OR_ECX,
220 OR_EDX,
221 OR_EBX,
222 OR_ESP,
223 OR_EBP,
224 OR_ESI,
225 OR_EDI,
226
227 OR_TMP0 = 16, /* temporary operand register */
228 OR_TMP1,
229 OR_A0, /* temporary register used when doing address evaluation */
230};
231
232#ifndef VBOX
233static inline void gen_op_movl_T0_0(void)
234#else /* VBOX */
235DECLINLINE(void) gen_op_movl_T0_0(void)
236#endif /* VBOX */
237{
238 tcg_gen_movi_tl(cpu_T[0], 0);
239}
240
241#ifndef VBOX
242static inline void gen_op_movl_T0_im(int32_t val)
243#else /* VBOX */
244DECLINLINE(void) gen_op_movl_T0_im(int32_t val)
245#endif /* VBOX */
246{
247 tcg_gen_movi_tl(cpu_T[0], val);
248}
249
250#ifndef VBOX
251static inline void gen_op_movl_T0_imu(uint32_t val)
252#else /* VBOX */
253DECLINLINE(void) gen_op_movl_T0_imu(uint32_t val)
254#endif /* VBOX */
255{
256 tcg_gen_movi_tl(cpu_T[0], val);
257}
258
259#ifndef VBOX
260static inline void gen_op_movl_T1_im(int32_t val)
261#else /* VBOX */
262DECLINLINE(void) gen_op_movl_T1_im(int32_t val)
263#endif /* VBOX */
264{
265 tcg_gen_movi_tl(cpu_T[1], val);
266}
267
268#ifndef VBOX
269static inline void gen_op_movl_T1_imu(uint32_t val)
270#else /* VBOX */
271DECLINLINE(void) gen_op_movl_T1_imu(uint32_t val)
272#endif /* VBOX */
273{
274 tcg_gen_movi_tl(cpu_T[1], val);
275}
276
277#ifndef VBOX
278static inline void gen_op_movl_A0_im(uint32_t val)
279#else /* VBOX */
280DECLINLINE(void) gen_op_movl_A0_im(uint32_t val)
281#endif /* VBOX */
282{
283 tcg_gen_movi_tl(cpu_A0, val);
284}
285
286#ifdef TARGET_X86_64
287#ifndef VBOX
288static inline void gen_op_movq_A0_im(int64_t val)
289#else /* VBOX */
290DECLINLINE(void) gen_op_movq_A0_im(int64_t val)
291#endif /* VBOX */
292{
293 tcg_gen_movi_tl(cpu_A0, val);
294}
295#endif
296
297#ifndef VBOX
298static inline void gen_movtl_T0_im(target_ulong val)
299#else /* VBOX */
300DECLINLINE(void) gen_movtl_T0_im(target_ulong val)
301#endif /* VBOX */
302{
303 tcg_gen_movi_tl(cpu_T[0], val);
304}
305
306#ifndef VBOX
307static inline void gen_movtl_T1_im(target_ulong val)
308#else /* VBOX */
309DECLINLINE(void) gen_movtl_T1_im(target_ulong val)
310#endif /* VBOX */
311{
312 tcg_gen_movi_tl(cpu_T[1], val);
313}
314
315#ifndef VBOX
316static inline void gen_op_andl_T0_ffff(void)
317#else /* VBOX */
318DECLINLINE(void) gen_op_andl_T0_ffff(void)
319#endif /* VBOX */
320{
321 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
322}
323
324#ifndef VBOX
325static inline void gen_op_andl_T0_im(uint32_t val)
326#else /* VBOX */
327DECLINLINE(void) gen_op_andl_T0_im(uint32_t val)
328#endif /* VBOX */
329{
330 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
331}
332
333#ifndef VBOX
334static inline void gen_op_movl_T0_T1(void)
335#else /* VBOX */
336DECLINLINE(void) gen_op_movl_T0_T1(void)
337#endif /* VBOX */
338{
339 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
340}
341
342#ifndef VBOX
343static inline void gen_op_andl_A0_ffff(void)
344#else /* VBOX */
345DECLINLINE(void) gen_op_andl_A0_ffff(void)
346#endif /* VBOX */
347{
348 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
349}
350
351#ifdef TARGET_X86_64
352
353#define NB_OP_SIZES 4
354
355#else /* !TARGET_X86_64 */
356
357#define NB_OP_SIZES 3
358
359#endif /* !TARGET_X86_64 */
360
361#if defined(WORDS_BIGENDIAN)
362#define REG_B_OFFSET (sizeof(target_ulong) - 1)
363#define REG_H_OFFSET (sizeof(target_ulong) - 2)
364#define REG_W_OFFSET (sizeof(target_ulong) - 2)
365#define REG_L_OFFSET (sizeof(target_ulong) - 4)
366#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
367#else
368#define REG_B_OFFSET 0
369#define REG_H_OFFSET 1
370#define REG_W_OFFSET 0
371#define REG_L_OFFSET 0
372#define REG_LH_OFFSET 4
373#endif
374
375#ifndef VBOX
376static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
377#else /* VBOX */
378DECLINLINE(void) gen_op_mov_reg_v(int ot, int reg, TCGv t0)
379#endif /* VBOX */
380{
381 switch(ot) {
382 case OT_BYTE:
383 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
384 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
385 } else {
386 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
387 }
388 break;
389 case OT_WORD:
390 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
391 break;
392#ifdef TARGET_X86_64
393 case OT_LONG:
394 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
395 /* high part of register set to zero */
396 tcg_gen_movi_tl(cpu_tmp0, 0);
397 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
398 break;
399 default:
400 case OT_QUAD:
401 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
402 break;
403#else
404 default:
405 case OT_LONG:
406 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
407 break;
408#endif
409 }
410}
411
412#ifndef VBOX
413static inline void gen_op_mov_reg_T0(int ot, int reg)
414#else /* VBOX */
415DECLINLINE(void) gen_op_mov_reg_T0(int ot, int reg)
416#endif /* VBOX */
417{
418 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
419}
420
421#ifndef VBOX
422static inline void gen_op_mov_reg_T1(int ot, int reg)
423#else /* VBOX */
424DECLINLINE(void) gen_op_mov_reg_T1(int ot, int reg)
425#endif /* VBOX */
426{
427 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
428}
429
430#ifndef VBOX
431static inline void gen_op_mov_reg_A0(int size, int reg)
432#else /* VBOX */
433DECLINLINE(void) gen_op_mov_reg_A0(int size, int reg)
434#endif /* VBOX */
435{
436 switch(size) {
437 case 0:
438 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
439 break;
440#ifdef TARGET_X86_64
441 case 1:
442 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
443 /* high part of register set to zero */
444 tcg_gen_movi_tl(cpu_tmp0, 0);
445 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
446 break;
447 default:
448 case 2:
449 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
450 break;
451#else
452 default:
453 case 1:
454 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
455 break;
456#endif
457 }
458}
459
460#ifndef VBOX
461static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
462#else /* VBOX */
463DECLINLINE(void) gen_op_mov_v_reg(int ot, TCGv t0, int reg)
464#endif /* VBOX */
465{
466 switch(ot) {
467 case OT_BYTE:
468 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
469 goto std_case;
470 } else {
471 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
472 }
473 break;
474 default:
475 std_case:
476 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
477 break;
478 }
479}
480
481#ifndef VBOX
482static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
483#else /* VBOX */
484DECLINLINE(void) gen_op_mov_TN_reg(int ot, int t_index, int reg)
485#endif /* VBOX */
486{
487 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
488}
489
490#ifndef VBOX
491static inline void gen_op_movl_A0_reg(int reg)
492#else /* VBOX */
493DECLINLINE(void) gen_op_movl_A0_reg(int reg)
494#endif /* VBOX */
495{
496 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
497}
498
499#ifndef VBOX
500static inline void gen_op_addl_A0_im(int32_t val)
501#else /* VBOX */
502DECLINLINE(void) gen_op_addl_A0_im(int32_t val)
503#endif /* VBOX */
504{
505 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
506#ifdef TARGET_X86_64
507 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
508#endif
509}
510
511#ifdef TARGET_X86_64
512#ifndef VBOX
513static inline void gen_op_addq_A0_im(int64_t val)
514#else /* VBOX */
515DECLINLINE(void) gen_op_addq_A0_im(int64_t val)
516#endif /* VBOX */
517{
518 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
519}
520#endif
521
522static void gen_add_A0_im(DisasContext *s, int val)
523{
524#ifdef TARGET_X86_64
525 if (CODE64(s))
526 gen_op_addq_A0_im(val);
527 else
528#endif
529 gen_op_addl_A0_im(val);
530}
531
532#ifndef VBOX
533static inline void gen_op_addl_T0_T1(void)
534#else /* VBOX */
535DECLINLINE(void) gen_op_addl_T0_T1(void)
536#endif /* VBOX */
537{
538 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
539}
540
541#ifndef VBOX
542static inline void gen_op_jmp_T0(void)
543#else /* VBOX */
544DECLINLINE(void) gen_op_jmp_T0(void)
545#endif /* VBOX */
546{
547 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
548}
549
550#ifndef VBOX
551static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
552#else /* VBOX */
553DECLINLINE(void) gen_op_add_reg_im(int size, int reg, int32_t val)
554#endif /* VBOX */
555{
556 switch(size) {
557 case 0:
558 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
559 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
560 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
561 break;
562 case 1:
563 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
564 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
565#ifdef TARGET_X86_64
566 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
567#endif
568 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
569 break;
570#ifdef TARGET_X86_64
571 case 2:
572 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
573 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
574 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
575 break;
576#endif
577 }
578}
579
580#ifndef VBOX
581static inline void gen_op_add_reg_T0(int size, int reg)
582#else /* VBOX */
583DECLINLINE(void) gen_op_add_reg_T0(int size, int reg)
584#endif /* VBOX */
585{
586 switch(size) {
587 case 0:
588 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
589 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
590 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
591 break;
592 case 1:
593 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
594 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
595#ifdef TARGET_X86_64
596 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
597#endif
598 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
599 break;
600#ifdef TARGET_X86_64
601 case 2:
602 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
603 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
604 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
605 break;
606#endif
607 }
608}
609
610#ifndef VBOX
611static inline void gen_op_set_cc_op(int32_t val)
612#else /* VBOX */
613DECLINLINE(void) gen_op_set_cc_op(int32_t val)
614#endif /* VBOX */
615{
616 tcg_gen_movi_i32(cpu_cc_op, val);
617}
618
619#ifndef VBOX
620static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
621#else /* VBOX */
622DECLINLINE(void) gen_op_addl_A0_reg_sN(int shift, int reg)
623#endif /* VBOX */
624{
625 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
626 if (shift != 0)
627 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
628 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
629#ifdef TARGET_X86_64
630 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
631#endif
632}
633
634#ifndef VBOX
635static inline void gen_op_movl_A0_seg(int reg)
636#else /* VBOX */
637DECLINLINE(void) gen_op_movl_A0_seg(int reg)
638#endif /* VBOX */
639{
640 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
641}
642
643#ifndef VBOX
644static inline void gen_op_addl_A0_seg(int reg)
645#else /* VBOX */
646DECLINLINE(void) gen_op_addl_A0_seg(int reg)
647#endif /* VBOX */
648{
649 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
650 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
651#ifdef TARGET_X86_64
652 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
653#endif
654}
655
656#ifdef TARGET_X86_64
657#ifndef VBOX
658static inline void gen_op_movq_A0_seg(int reg)
659#else /* VBOX */
660DECLINLINE(void) gen_op_movq_A0_seg(int reg)
661#endif /* VBOX */
662{
663 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
664}
665
666#ifndef VBOX
667static inline void gen_op_addq_A0_seg(int reg)
668#else /* VBOX */
669DECLINLINE(void) gen_op_addq_A0_seg(int reg)
670#endif /* VBOX */
671{
672 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
673 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
674}
675
676#ifndef VBOX
677static inline void gen_op_movq_A0_reg(int reg)
678#else /* VBOX */
679DECLINLINE(void) gen_op_movq_A0_reg(int reg)
680#endif /* VBOX */
681{
682 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
683}
684
685#ifndef VBOX
686static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
687#else /* VBOX */
688DECLINLINE(void) gen_op_addq_A0_reg_sN(int shift, int reg)
689#endif /* VBOX */
690{
691 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
692 if (shift != 0)
693 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
694 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
695}
696#endif
697
698#ifndef VBOX
699static inline void gen_op_lds_T0_A0(int idx)
700#else /* VBOX */
701DECLINLINE(void) gen_op_lds_T0_A0(int idx)
702#endif /* VBOX */
703{
704 int mem_index = (idx >> 2) - 1;
705 switch(idx & 3) {
706 case 0:
707 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
708 break;
709 case 1:
710 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
711 break;
712 default:
713 case 2:
714 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
715 break;
716 }
717}
718
719#ifndef VBOX
720static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
721#else /* VBOX */
722DECLINLINE(void) gen_op_ld_v(int idx, TCGv t0, TCGv a0)
723#endif /* VBOX */
724{
725 int mem_index = (idx >> 2) - 1;
726 switch(idx & 3) {
727 case 0:
728 tcg_gen_qemu_ld8u(t0, a0, mem_index);
729 break;
730 case 1:
731 tcg_gen_qemu_ld16u(t0, a0, mem_index);
732 break;
733 case 2:
734 tcg_gen_qemu_ld32u(t0, a0, mem_index);
735 break;
736 default:
737 case 3:
738 tcg_gen_qemu_ld64(t0, a0, mem_index);
739 break;
740 }
741}
742
743/* XXX: always use ldu or lds */
744#ifndef VBOX
745static inline void gen_op_ld_T0_A0(int idx)
746#else /* VBOX */
747DECLINLINE(void) gen_op_ld_T0_A0(int idx)
748#endif /* VBOX */
749{
750 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
751}
752
753#ifndef VBOX
754static inline void gen_op_ldu_T0_A0(int idx)
755#else /* VBOX */
756DECLINLINE(void) gen_op_ldu_T0_A0(int idx)
757#endif /* VBOX */
758{
759 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
760}
761
762#ifndef VBOX
763static inline void gen_op_ld_T1_A0(int idx)
764#else /* VBOX */
765DECLINLINE(void) gen_op_ld_T1_A0(int idx)
766#endif /* VBOX */
767{
768 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
769}
770
771#ifndef VBOX
772static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
773#else /* VBOX */
774DECLINLINE(void) gen_op_st_v(int idx, TCGv t0, TCGv a0)
775#endif /* VBOX */
776{
777 int mem_index = (idx >> 2) - 1;
778 switch(idx & 3) {
779 case 0:
780 tcg_gen_qemu_st8(t0, a0, mem_index);
781 break;
782 case 1:
783 tcg_gen_qemu_st16(t0, a0, mem_index);
784 break;
785 case 2:
786 tcg_gen_qemu_st32(t0, a0, mem_index);
787 break;
788 default:
789 case 3:
790 tcg_gen_qemu_st64(t0, a0, mem_index);
791 break;
792 }
793}
794
795#ifndef VBOX
796static inline void gen_op_st_T0_A0(int idx)
797#else /* VBOX */
798DECLINLINE(void) gen_op_st_T0_A0(int idx)
799#endif /* VBOX */
800{
801 gen_op_st_v(idx, cpu_T[0], cpu_A0);
802}
803
804#ifndef VBOX
805static inline void gen_op_st_T1_A0(int idx)
806#else /* VBOX */
807DECLINLINE(void) gen_op_st_T1_A0(int idx)
808#endif /* VBOX */
809{
810 gen_op_st_v(idx, cpu_T[1], cpu_A0);
811}
812
813#ifndef VBOX
814static inline void gen_jmp_im(target_ulong pc)
815#else /* VBOX */
816DECLINLINE(void) gen_jmp_im(target_ulong pc)
817#endif /* VBOX */
818{
819#ifdef VBOX
820 gen_check_external_event();
821#endif /* VBOX */
822 tcg_gen_movi_tl(cpu_tmp0, pc);
823 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
824}
825
826#ifdef VBOX
827static void gen_check_external_event()
828{
829 /** @todo: this code is either wrong, or low performing,
830 rewrite flags check in TCG IR */
831 tcg_gen_helper_0_0(helper_check_external_event);
832}
833
834#ifndef VBOX
835static inline void gen_update_eip(target_ulong pc)
836#else /* VBOX */
837DECLINLINE(void) gen_update_eip(target_ulong pc)
838#endif /* VBOX */
839{
840 gen_jmp_im(pc);
841
842}
843#endif
844
845#ifndef VBOX
846static inline void gen_string_movl_A0_ESI(DisasContext *s)
847#else /* VBOX */
848DECLINLINE(void) gen_string_movl_A0_ESI(DisasContext *s)
849#endif /* VBOX */
850{
851 int override;
852
853 override = s->override;
854#ifdef TARGET_X86_64
855 if (s->aflag == 2) {
856 if (override >= 0) {
857 gen_op_movq_A0_seg(override);
858 gen_op_addq_A0_reg_sN(0, R_ESI);
859 } else {
860 gen_op_movq_A0_reg(R_ESI);
861 }
862 } else
863#endif
864 if (s->aflag) {
865 /* 32 bit address */
866 if (s->addseg && override < 0)
867 override = R_DS;
868 if (override >= 0) {
869 gen_op_movl_A0_seg(override);
870 gen_op_addl_A0_reg_sN(0, R_ESI);
871 } else {
872 gen_op_movl_A0_reg(R_ESI);
873 }
874 } else {
875 /* 16 address, always override */
876 if (override < 0)
877 override = R_DS;
878 gen_op_movl_A0_reg(R_ESI);
879 gen_op_andl_A0_ffff();
880 gen_op_addl_A0_seg(override);
881 }
882}
883
884#ifndef VBOX
885static inline void gen_string_movl_A0_EDI(DisasContext *s)
886#else /* VBOX */
887DECLINLINE(void) gen_string_movl_A0_EDI(DisasContext *s)
888#endif /* VBOX */
889{
890#ifdef TARGET_X86_64
891 if (s->aflag == 2) {
892 gen_op_movq_A0_reg(R_EDI);
893 } else
894#endif
895 if (s->aflag) {
896 if (s->addseg) {
897 gen_op_movl_A0_seg(R_ES);
898 gen_op_addl_A0_reg_sN(0, R_EDI);
899 } else {
900 gen_op_movl_A0_reg(R_EDI);
901 }
902 } else {
903 gen_op_movl_A0_reg(R_EDI);
904 gen_op_andl_A0_ffff();
905 gen_op_addl_A0_seg(R_ES);
906 }
907}
908
909#ifndef VBOX
910static inline void gen_op_movl_T0_Dshift(int ot)
911#else /* VBOX */
912DECLINLINE(void) gen_op_movl_T0_Dshift(int ot)
913#endif /* VBOX */
914{
915 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
916 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
917};
918
919static void gen_extu(int ot, TCGv reg)
920{
921 switch(ot) {
922 case OT_BYTE:
923 tcg_gen_ext8u_tl(reg, reg);
924 break;
925 case OT_WORD:
926 tcg_gen_ext16u_tl(reg, reg);
927 break;
928 case OT_LONG:
929 tcg_gen_ext32u_tl(reg, reg);
930 break;
931 default:
932 break;
933 }
934}
935
936static void gen_exts(int ot, TCGv reg)
937{
938 switch(ot) {
939 case OT_BYTE:
940 tcg_gen_ext8s_tl(reg, reg);
941 break;
942 case OT_WORD:
943 tcg_gen_ext16s_tl(reg, reg);
944 break;
945 case OT_LONG:
946 tcg_gen_ext32s_tl(reg, reg);
947 break;
948 default:
949 break;
950 }
951}
952
953#ifndef VBOX
954static inline void gen_op_jnz_ecx(int size, int label1)
955#else /* VBOX */
956DECLINLINE(void) gen_op_jnz_ecx(int size, int label1)
957#endif /* VBOX */
958{
959 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
960 gen_extu(size + 1, cpu_tmp0);
961 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
962}
963
964#ifndef VBOX
965static inline void gen_op_jz_ecx(int size, int label1)
966#else /* VBOX */
967DECLINLINE(void) gen_op_jz_ecx(int size, int label1)
968#endif /* VBOX */
969{
970 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
971 gen_extu(size + 1, cpu_tmp0);
972 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
973}
974
975static void *helper_in_func[3] = {
976 helper_inb,
977 helper_inw,
978 helper_inl,
979};
980
981static void *helper_out_func[3] = {
982 helper_outb,
983 helper_outw,
984 helper_outl,
985};
986
987static void *gen_check_io_func[3] = {
988 helper_check_iob,
989 helper_check_iow,
990 helper_check_iol,
991};
992
993static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
994 uint32_t svm_flags)
995{
996 int state_saved;
997 target_ulong next_eip;
998
999 state_saved = 0;
1000 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1001 if (s->cc_op != CC_OP_DYNAMIC)
1002 gen_op_set_cc_op(s->cc_op);
1003 gen_jmp_im(cur_eip);
1004 state_saved = 1;
1005 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1006 tcg_gen_helper_0_1(gen_check_io_func[ot],
1007 cpu_tmp2_i32);
1008 }
1009 if(s->flags & HF_SVMI_MASK) {
1010 if (!state_saved) {
1011 if (s->cc_op != CC_OP_DYNAMIC)
1012 gen_op_set_cc_op(s->cc_op);
1013 gen_jmp_im(cur_eip);
1014 state_saved = 1;
1015 }
1016 svm_flags |= (1 << (4 + ot));
1017 next_eip = s->pc - s->cs_base;
1018 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1019 tcg_gen_helper_0_3(helper_svm_check_io,
1020 cpu_tmp2_i32,
1021 tcg_const_i32(svm_flags),
1022 tcg_const_i32(next_eip - cur_eip));
1023 }
1024}
1025
1026#ifndef VBOX
1027static inline void gen_movs(DisasContext *s, int ot)
1028#else /* VBOX */
1029DECLINLINE(void) gen_movs(DisasContext *s, int ot)
1030#endif /* VBOX */
1031{
1032 gen_string_movl_A0_ESI(s);
1033 gen_op_ld_T0_A0(ot + s->mem_index);
1034 gen_string_movl_A0_EDI(s);
1035 gen_op_st_T0_A0(ot + s->mem_index);
1036 gen_op_movl_T0_Dshift(ot);
1037 gen_op_add_reg_T0(s->aflag, R_ESI);
1038 gen_op_add_reg_T0(s->aflag, R_EDI);
1039}
1040
1041#ifndef VBOX
1042static inline void gen_update_cc_op(DisasContext *s)
1043#else /* VBOX */
1044DECLINLINE(void) gen_update_cc_op(DisasContext *s)
1045#endif /* VBOX */
1046{
1047 if (s->cc_op != CC_OP_DYNAMIC) {
1048 gen_op_set_cc_op(s->cc_op);
1049 s->cc_op = CC_OP_DYNAMIC;
1050 }
1051}
1052
1053static void gen_op_update1_cc(void)
1054{
1055 tcg_gen_discard_tl(cpu_cc_src);
1056 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1057}
1058
1059static void gen_op_update2_cc(void)
1060{
1061 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1062 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1063}
1064
1065#ifndef VBOX
1066static inline void gen_op_cmpl_T0_T1_cc(void)
1067#else /* VBOX */
1068DECLINLINE(void) gen_op_cmpl_T0_T1_cc(void)
1069#endif /* VBOX */
1070{
1071 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1072 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1073}
1074
1075#ifndef VBOX
1076static inline void gen_op_testl_T0_T1_cc(void)
1077#else /* VBOX */
1078DECLINLINE(void) gen_op_testl_T0_T1_cc(void)
1079#endif /* VBOX */
1080{
1081 tcg_gen_discard_tl(cpu_cc_src);
1082 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1083}
1084
1085static void gen_op_update_neg_cc(void)
1086{
1087 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
1088 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1089}
1090
1091/* compute eflags.C to reg */
1092static void gen_compute_eflags_c(TCGv reg)
1093{
1094#if TCG_TARGET_REG_BITS == 32
1095 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1096 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1097 (long)cc_table + offsetof(CCTable, compute_c));
1098 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1099 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1100 1, &cpu_tmp2_i32, 0, NULL);
1101#else
1102 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1103 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1104 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1105 (long)cc_table + offsetof(CCTable, compute_c));
1106 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1107 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1108 1, &cpu_tmp2_i32, 0, NULL);
1109#endif
1110 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1111}
1112
1113/* compute all eflags to cc_src */
1114static void gen_compute_eflags(TCGv reg)
1115{
1116#if TCG_TARGET_REG_BITS == 32
1117 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1118 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1119 (long)cc_table + offsetof(CCTable, compute_all));
1120 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1121 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1122 1, &cpu_tmp2_i32, 0, NULL);
1123#else
1124 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1125 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1126 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1127 (long)cc_table + offsetof(CCTable, compute_all));
1128 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1129 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1130 1, &cpu_tmp2_i32, 0, NULL);
1131#endif
1132 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1133}
1134
1135#ifndef VBOX
1136static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1137#else /* VBOX */
1138DECLINLINE(void) gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1139#endif /* VBOX */
1140{
1141 if (s->cc_op != CC_OP_DYNAMIC)
1142 gen_op_set_cc_op(s->cc_op);
1143 switch(jcc_op) {
1144 case JCC_O:
1145 gen_compute_eflags(cpu_T[0]);
1146 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
1147 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1148 break;
1149 case JCC_B:
1150 gen_compute_eflags_c(cpu_T[0]);
1151 break;
1152 case JCC_Z:
1153 gen_compute_eflags(cpu_T[0]);
1154 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
1155 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1156 break;
1157 case JCC_BE:
1158 gen_compute_eflags(cpu_tmp0);
1159 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
1160 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1161 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1162 break;
1163 case JCC_S:
1164 gen_compute_eflags(cpu_T[0]);
1165 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
1166 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1167 break;
1168 case JCC_P:
1169 gen_compute_eflags(cpu_T[0]);
1170 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
1171 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1172 break;
1173 case JCC_L:
1174 gen_compute_eflags(cpu_tmp0);
1175 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1176 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
1177 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1178 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1179 break;
1180 default:
1181 case JCC_LE:
1182 gen_compute_eflags(cpu_tmp0);
1183 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1184 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
1185 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
1186 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1187 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1188 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1189 break;
1190 }
1191}
1192
1193/* return true if setcc_slow is not needed (WARNING: must be kept in
1194 sync with gen_jcc1) */
1195static int is_fast_jcc_case(DisasContext *s, int b)
1196{
1197 int jcc_op;
1198 jcc_op = (b >> 1) & 7;
1199 switch(s->cc_op) {
1200 /* we optimize the cmp/jcc case */
1201 case CC_OP_SUBB:
1202 case CC_OP_SUBW:
1203 case CC_OP_SUBL:
1204 case CC_OP_SUBQ:
1205 if (jcc_op == JCC_O || jcc_op == JCC_P)
1206 goto slow_jcc;
1207 break;
1208
1209 /* some jumps are easy to compute */
1210 case CC_OP_ADDB:
1211 case CC_OP_ADDW:
1212 case CC_OP_ADDL:
1213 case CC_OP_ADDQ:
1214
1215 case CC_OP_LOGICB:
1216 case CC_OP_LOGICW:
1217 case CC_OP_LOGICL:
1218 case CC_OP_LOGICQ:
1219
1220 case CC_OP_INCB:
1221 case CC_OP_INCW:
1222 case CC_OP_INCL:
1223 case CC_OP_INCQ:
1224
1225 case CC_OP_DECB:
1226 case CC_OP_DECW:
1227 case CC_OP_DECL:
1228 case CC_OP_DECQ:
1229
1230 case CC_OP_SHLB:
1231 case CC_OP_SHLW:
1232 case CC_OP_SHLL:
1233 case CC_OP_SHLQ:
1234 if (jcc_op != JCC_Z && jcc_op != JCC_S)
1235 goto slow_jcc;
1236 break;
1237 default:
1238 slow_jcc:
1239 return 0;
1240 }
1241 return 1;
1242}
1243
1244/* generate a conditional jump to label 'l1' according to jump opcode
1245 value 'b'. In the fast case, T0 is guaranted not to be used. */
1246#ifndef VBOX
1247static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1248#else /* VBOX */
1249DECLINLINE(void) gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1250#endif /* VBOX */
1251{
1252 int inv, jcc_op, size, cond;
1253 TCGv t0;
1254
1255 inv = b & 1;
1256 jcc_op = (b >> 1) & 7;
1257
1258 switch(cc_op) {
1259 /* we optimize the cmp/jcc case */
1260 case CC_OP_SUBB:
1261 case CC_OP_SUBW:
1262 case CC_OP_SUBL:
1263 case CC_OP_SUBQ:
1264
1265 size = cc_op - CC_OP_SUBB;
1266 switch(jcc_op) {
1267 case JCC_Z:
1268 fast_jcc_z:
1269 switch(size) {
1270 case 0:
1271 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
1272 t0 = cpu_tmp0;
1273 break;
1274 case 1:
1275 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
1276 t0 = cpu_tmp0;
1277 break;
1278#ifdef TARGET_X86_64
1279 case 2:
1280 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1281 t0 = cpu_tmp0;
1282 break;
1283#endif
1284 default:
1285 t0 = cpu_cc_dst;
1286 break;
1287 }
1288 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
1289 break;
1290 case JCC_S:
1291 fast_jcc_s:
1292 switch(size) {
1293 case 0:
1294 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1295 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1296 0, l1);
1297 break;
1298 case 1:
1299 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1300 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1301 0, l1);
1302 break;
1303#ifdef TARGET_X86_64
1304 case 2:
1305 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1306 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1307 0, l1);
1308 break;
1309#endif
1310 default:
1311 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1312 0, l1);
1313 break;
1314 }
1315 break;
1316
1317 case JCC_B:
1318 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1319 goto fast_jcc_b;
1320 case JCC_BE:
1321 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1322 fast_jcc_b:
1323 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1324 switch(size) {
1325 case 0:
1326 t0 = cpu_tmp0;
1327 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1328 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1329 break;
1330 case 1:
1331 t0 = cpu_tmp0;
1332 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1333 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1334 break;
1335#ifdef TARGET_X86_64
1336 case 2:
1337 t0 = cpu_tmp0;
1338 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1339 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1340 break;
1341#endif
1342 default:
1343 t0 = cpu_cc_src;
1344 break;
1345 }
1346 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1347 break;
1348
1349 case JCC_L:
1350 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1351 goto fast_jcc_l;
1352 case JCC_LE:
1353 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1354 fast_jcc_l:
1355 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1356 switch(size) {
1357 case 0:
1358 t0 = cpu_tmp0;
1359 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1360 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1361 break;
1362 case 1:
1363 t0 = cpu_tmp0;
1364 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1365 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1366 break;
1367#ifdef TARGET_X86_64
1368 case 2:
1369 t0 = cpu_tmp0;
1370 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1371 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1372 break;
1373#endif
1374 default:
1375 t0 = cpu_cc_src;
1376 break;
1377 }
1378 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1379 break;
1380
1381 default:
1382 goto slow_jcc;
1383 }
1384 break;
1385
1386 /* some jumps are easy to compute */
1387 case CC_OP_ADDB:
1388 case CC_OP_ADDW:
1389 case CC_OP_ADDL:
1390 case CC_OP_ADDQ:
1391
1392 case CC_OP_ADCB:
1393 case CC_OP_ADCW:
1394 case CC_OP_ADCL:
1395 case CC_OP_ADCQ:
1396
1397 case CC_OP_SBBB:
1398 case CC_OP_SBBW:
1399 case CC_OP_SBBL:
1400 case CC_OP_SBBQ:
1401
1402 case CC_OP_LOGICB:
1403 case CC_OP_LOGICW:
1404 case CC_OP_LOGICL:
1405 case CC_OP_LOGICQ:
1406
1407 case CC_OP_INCB:
1408 case CC_OP_INCW:
1409 case CC_OP_INCL:
1410 case CC_OP_INCQ:
1411
1412 case CC_OP_DECB:
1413 case CC_OP_DECW:
1414 case CC_OP_DECL:
1415 case CC_OP_DECQ:
1416
1417 case CC_OP_SHLB:
1418 case CC_OP_SHLW:
1419 case CC_OP_SHLL:
1420 case CC_OP_SHLQ:
1421
1422 case CC_OP_SARB:
1423 case CC_OP_SARW:
1424 case CC_OP_SARL:
1425 case CC_OP_SARQ:
1426 switch(jcc_op) {
1427 case JCC_Z:
1428 size = (cc_op - CC_OP_ADDB) & 3;
1429 goto fast_jcc_z;
1430 case JCC_S:
1431 size = (cc_op - CC_OP_ADDB) & 3;
1432 goto fast_jcc_s;
1433 default:
1434 goto slow_jcc;
1435 }
1436 break;
1437 default:
1438 slow_jcc:
1439 gen_setcc_slow_T0(s, jcc_op);
1440 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1441 cpu_T[0], 0, l1);
1442 break;
1443 }
1444}
1445
1446/* XXX: does not work with gdbstub "ice" single step - not a
1447 serious problem */
1448static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1449{
1450 int l1, l2;
1451
1452 l1 = gen_new_label();
1453 l2 = gen_new_label();
1454 gen_op_jnz_ecx(s->aflag, l1);
1455 gen_set_label(l2);
1456 gen_jmp_tb(s, next_eip, 1);
1457 gen_set_label(l1);
1458 return l2;
1459}
1460
1461#ifndef VBOX
1462static inline void gen_stos(DisasContext *s, int ot)
1463#else /* VBOX */
1464DECLINLINE(void) gen_stos(DisasContext *s, int ot)
1465#endif /* VBOX */
1466{
1467 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1468 gen_string_movl_A0_EDI(s);
1469 gen_op_st_T0_A0(ot + s->mem_index);
1470 gen_op_movl_T0_Dshift(ot);
1471 gen_op_add_reg_T0(s->aflag, R_EDI);
1472}
1473
1474#ifndef VBOX
1475static inline void gen_lods(DisasContext *s, int ot)
1476#else /* VBOX */
1477DECLINLINE(void) gen_lods(DisasContext *s, int ot)
1478#endif /* VBOX */
1479{
1480 gen_string_movl_A0_ESI(s);
1481 gen_op_ld_T0_A0(ot + s->mem_index);
1482 gen_op_mov_reg_T0(ot, R_EAX);
1483 gen_op_movl_T0_Dshift(ot);
1484 gen_op_add_reg_T0(s->aflag, R_ESI);
1485}
1486
1487#ifndef VBOX
1488static inline void gen_scas(DisasContext *s, int ot)
1489#else /* VBOX */
1490DECLINLINE(void) gen_scas(DisasContext *s, int ot)
1491#endif /* VBOX */
1492{
1493 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1494 gen_string_movl_A0_EDI(s);
1495 gen_op_ld_T1_A0(ot + s->mem_index);
1496 gen_op_cmpl_T0_T1_cc();
1497 gen_op_movl_T0_Dshift(ot);
1498 gen_op_add_reg_T0(s->aflag, R_EDI);
1499}
1500
1501#ifndef VBOX
1502static inline void gen_cmps(DisasContext *s, int ot)
1503#else /* VBOX */
1504DECLINLINE(void) gen_cmps(DisasContext *s, int ot)
1505#endif /* VBOX */
1506{
1507 gen_string_movl_A0_ESI(s);
1508 gen_op_ld_T0_A0(ot + s->mem_index);
1509 gen_string_movl_A0_EDI(s);
1510 gen_op_ld_T1_A0(ot + s->mem_index);
1511 gen_op_cmpl_T0_T1_cc();
1512 gen_op_movl_T0_Dshift(ot);
1513 gen_op_add_reg_T0(s->aflag, R_ESI);
1514 gen_op_add_reg_T0(s->aflag, R_EDI);
1515}
1516
1517#ifndef VBOX
1518static inline void gen_ins(DisasContext *s, int ot)
1519#else /* VBOX */
1520DECLINLINE(void) gen_ins(DisasContext *s, int ot)
1521#endif /* VBOX */
1522{
1523 if (use_icount)
1524 gen_io_start();
1525 gen_string_movl_A0_EDI(s);
1526 /* Note: we must do this dummy write first to be restartable in
1527 case of page fault. */
1528 gen_op_movl_T0_0();
1529 gen_op_st_T0_A0(ot + s->mem_index);
1530 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1531 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1532 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1533 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1534 gen_op_st_T0_A0(ot + s->mem_index);
1535 gen_op_movl_T0_Dshift(ot);
1536 gen_op_add_reg_T0(s->aflag, R_EDI);
1537 if (use_icount)
1538 gen_io_end();
1539}
1540
1541#ifndef VBOX
1542static inline void gen_outs(DisasContext *s, int ot)
1543#else /* VBOX */
1544DECLINLINE(void) gen_outs(DisasContext *s, int ot)
1545#endif /* VBOX */
1546{
1547 if (use_icount)
1548 gen_io_start();
1549 gen_string_movl_A0_ESI(s);
1550 gen_op_ld_T0_A0(ot + s->mem_index);
1551
1552 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1553 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1554 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1555 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1556 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1557
1558 gen_op_movl_T0_Dshift(ot);
1559 gen_op_add_reg_T0(s->aflag, R_ESI);
1560 if (use_icount)
1561 gen_io_end();
1562}
1563
1564/* same method as Valgrind : we generate jumps to current or next
1565 instruction */
1566#ifndef VBOX
1567#define GEN_REPZ(op) \
1568static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1569 target_ulong cur_eip, target_ulong next_eip) \
1570{ \
1571 int l2; \
1572 gen_update_cc_op(s); \
1573 l2 = gen_jz_ecx_string(s, next_eip); \
1574 gen_ ## op(s, ot); \
1575 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1576 /* a loop would cause two single step exceptions if ECX = 1 \
1577 before rep string_insn */ \
1578 if (!s->jmp_opt) \
1579 gen_op_jz_ecx(s->aflag, l2); \
1580 gen_jmp(s, cur_eip); \
1581}
1582#else /* VBOX */
1583#define GEN_REPZ(op) \
1584DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1585 target_ulong cur_eip, target_ulong next_eip) \
1586{ \
1587 int l2; \
1588 gen_update_cc_op(s); \
1589 l2 = gen_jz_ecx_string(s, next_eip); \
1590 gen_ ## op(s, ot); \
1591 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1592 /* a loop would cause two single step exceptions if ECX = 1 \
1593 before rep string_insn */ \
1594 if (!s->jmp_opt) \
1595 gen_op_jz_ecx(s->aflag, l2); \
1596 gen_jmp(s, cur_eip); \
1597}
1598#endif /* VBOX */
1599
1600#ifndef VBOX
1601#define GEN_REPZ2(op) \
1602static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1603 target_ulong cur_eip, \
1604 target_ulong next_eip, \
1605 int nz) \
1606{ \
1607 int l2; \
1608 gen_update_cc_op(s); \
1609 l2 = gen_jz_ecx_string(s, next_eip); \
1610 gen_ ## op(s, ot); \
1611 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1612 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1613 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1614 if (!s->jmp_opt) \
1615 gen_op_jz_ecx(s->aflag, l2); \
1616 gen_jmp(s, cur_eip); \
1617}
1618#else /* VBOX */
1619#define GEN_REPZ2(op) \
1620DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1621 target_ulong cur_eip, \
1622 target_ulong next_eip, \
1623 int nz) \
1624{ \
1625 int l2;\
1626 gen_update_cc_op(s); \
1627 l2 = gen_jz_ecx_string(s, next_eip); \
1628 gen_ ## op(s, ot); \
1629 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1630 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1631 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1632 if (!s->jmp_opt) \
1633 gen_op_jz_ecx(s->aflag, l2); \
1634 gen_jmp(s, cur_eip); \
1635}
1636#endif /* VBOX */
1637
1638GEN_REPZ(movs)
1639GEN_REPZ(stos)
1640GEN_REPZ(lods)
1641GEN_REPZ(ins)
1642GEN_REPZ(outs)
1643GEN_REPZ2(scas)
1644GEN_REPZ2(cmps)
1645
1646static void *helper_fp_arith_ST0_FT0[8] = {
1647 helper_fadd_ST0_FT0,
1648 helper_fmul_ST0_FT0,
1649 helper_fcom_ST0_FT0,
1650 helper_fcom_ST0_FT0,
1651 helper_fsub_ST0_FT0,
1652 helper_fsubr_ST0_FT0,
1653 helper_fdiv_ST0_FT0,
1654 helper_fdivr_ST0_FT0,
1655};
1656
1657/* NOTE the exception in "r" op ordering */
1658static void *helper_fp_arith_STN_ST0[8] = {
1659 helper_fadd_STN_ST0,
1660 helper_fmul_STN_ST0,
1661 NULL,
1662 NULL,
1663 helper_fsubr_STN_ST0,
1664 helper_fsub_STN_ST0,
1665 helper_fdivr_STN_ST0,
1666 helper_fdiv_STN_ST0,
1667};
1668
1669/* if d == OR_TMP0, it means memory operand (address in A0) */
1670static void gen_op(DisasContext *s1, int op, int ot, int d)
1671{
1672 if (d != OR_TMP0) {
1673 gen_op_mov_TN_reg(ot, 0, d);
1674 } else {
1675 gen_op_ld_T0_A0(ot + s1->mem_index);
1676 }
1677 switch(op) {
1678 case OP_ADCL:
1679 if (s1->cc_op != CC_OP_DYNAMIC)
1680 gen_op_set_cc_op(s1->cc_op);
1681 gen_compute_eflags_c(cpu_tmp4);
1682 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1683 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1684 if (d != OR_TMP0)
1685 gen_op_mov_reg_T0(ot, d);
1686 else
1687 gen_op_st_T0_A0(ot + s1->mem_index);
1688 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1689 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1690 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1691 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1692 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1693 s1->cc_op = CC_OP_DYNAMIC;
1694 break;
1695 case OP_SBBL:
1696 if (s1->cc_op != CC_OP_DYNAMIC)
1697 gen_op_set_cc_op(s1->cc_op);
1698 gen_compute_eflags_c(cpu_tmp4);
1699 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1700 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1701 if (d != OR_TMP0)
1702 gen_op_mov_reg_T0(ot, d);
1703 else
1704 gen_op_st_T0_A0(ot + s1->mem_index);
1705 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1706 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1707 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1708 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1709 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1710 s1->cc_op = CC_OP_DYNAMIC;
1711 break;
1712 case OP_ADDL:
1713 gen_op_addl_T0_T1();
1714 if (d != OR_TMP0)
1715 gen_op_mov_reg_T0(ot, d);
1716 else
1717 gen_op_st_T0_A0(ot + s1->mem_index);
1718 gen_op_update2_cc();
1719 s1->cc_op = CC_OP_ADDB + ot;
1720 break;
1721 case OP_SUBL:
1722 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1723 if (d != OR_TMP0)
1724 gen_op_mov_reg_T0(ot, d);
1725 else
1726 gen_op_st_T0_A0(ot + s1->mem_index);
1727 gen_op_update2_cc();
1728 s1->cc_op = CC_OP_SUBB + ot;
1729 break;
1730 default:
1731 case OP_ANDL:
1732 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1733 if (d != OR_TMP0)
1734 gen_op_mov_reg_T0(ot, d);
1735 else
1736 gen_op_st_T0_A0(ot + s1->mem_index);
1737 gen_op_update1_cc();
1738 s1->cc_op = CC_OP_LOGICB + ot;
1739 break;
1740 case OP_ORL:
1741 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1742 if (d != OR_TMP0)
1743 gen_op_mov_reg_T0(ot, d);
1744 else
1745 gen_op_st_T0_A0(ot + s1->mem_index);
1746 gen_op_update1_cc();
1747 s1->cc_op = CC_OP_LOGICB + ot;
1748 break;
1749 case OP_XORL:
1750 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1751 if (d != OR_TMP0)
1752 gen_op_mov_reg_T0(ot, d);
1753 else
1754 gen_op_st_T0_A0(ot + s1->mem_index);
1755 gen_op_update1_cc();
1756 s1->cc_op = CC_OP_LOGICB + ot;
1757 break;
1758 case OP_CMPL:
1759 gen_op_cmpl_T0_T1_cc();
1760 s1->cc_op = CC_OP_SUBB + ot;
1761 break;
1762 }
1763}
1764
1765/* if d == OR_TMP0, it means memory operand (address in A0) */
1766static void gen_inc(DisasContext *s1, int ot, int d, int c)
1767{
1768 if (d != OR_TMP0)
1769 gen_op_mov_TN_reg(ot, 0, d);
1770 else
1771 gen_op_ld_T0_A0(ot + s1->mem_index);
1772 if (s1->cc_op != CC_OP_DYNAMIC)
1773 gen_op_set_cc_op(s1->cc_op);
1774 if (c > 0) {
1775 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1776 s1->cc_op = CC_OP_INCB + ot;
1777 } else {
1778 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1779 s1->cc_op = CC_OP_DECB + ot;
1780 }
1781 if (d != OR_TMP0)
1782 gen_op_mov_reg_T0(ot, d);
1783 else
1784 gen_op_st_T0_A0(ot + s1->mem_index);
1785 gen_compute_eflags_c(cpu_cc_src);
1786 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1787}
1788
1789static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1790 int is_right, int is_arith)
1791{
1792 target_ulong mask;
1793 int shift_label;
1794 TCGv t0, t1;
1795
1796 if (ot == OT_QUAD)
1797 mask = 0x3f;
1798 else
1799 mask = 0x1f;
1800
1801 /* load */
1802 if (op1 == OR_TMP0)
1803 gen_op_ld_T0_A0(ot + s->mem_index);
1804 else
1805 gen_op_mov_TN_reg(ot, 0, op1);
1806
1807 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1808
1809 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1810
1811 if (is_right) {
1812 if (is_arith) {
1813 gen_exts(ot, cpu_T[0]);
1814 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1815 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1816 } else {
1817 gen_extu(ot, cpu_T[0]);
1818 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1819 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1820 }
1821 } else {
1822 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1823 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1824 }
1825
1826 /* store */
1827 if (op1 == OR_TMP0)
1828 gen_op_st_T0_A0(ot + s->mem_index);
1829 else
1830 gen_op_mov_reg_T0(ot, op1);
1831
1832 /* update eflags if non zero shift */
1833 if (s->cc_op != CC_OP_DYNAMIC)
1834 gen_op_set_cc_op(s->cc_op);
1835
1836 /* XXX: inefficient */
1837 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1838 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1839
1840 tcg_gen_mov_tl(t0, cpu_T[0]);
1841 tcg_gen_mov_tl(t1, cpu_T3);
1842
1843 shift_label = gen_new_label();
1844 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1845
1846 tcg_gen_mov_tl(cpu_cc_src, t1);
1847 tcg_gen_mov_tl(cpu_cc_dst, t0);
1848 if (is_right)
1849 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1850 else
1851 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1852
1853 gen_set_label(shift_label);
1854 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1855
1856 tcg_temp_free(t0);
1857 tcg_temp_free(t1);
1858}
1859
1860static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1861 int is_right, int is_arith)
1862{
1863 int mask;
1864
1865 if (ot == OT_QUAD)
1866 mask = 0x3f;
1867 else
1868 mask = 0x1f;
1869
1870 /* load */
1871 if (op1 == OR_TMP0)
1872 gen_op_ld_T0_A0(ot + s->mem_index);
1873 else
1874 gen_op_mov_TN_reg(ot, 0, op1);
1875
1876 op2 &= mask;
1877 if (op2 != 0) {
1878 if (is_right) {
1879 if (is_arith) {
1880 gen_exts(ot, cpu_T[0]);
1881 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1882 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1883 } else {
1884 gen_extu(ot, cpu_T[0]);
1885 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1886 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1887 }
1888 } else {
1889 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1890 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1891 }
1892 }
1893
1894 /* store */
1895 if (op1 == OR_TMP0)
1896 gen_op_st_T0_A0(ot + s->mem_index);
1897 else
1898 gen_op_mov_reg_T0(ot, op1);
1899
1900 /* update eflags if non zero shift */
1901 if (op2 != 0) {
1902 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1903 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1904 if (is_right)
1905 s->cc_op = CC_OP_SARB + ot;
1906 else
1907 s->cc_op = CC_OP_SHLB + ot;
1908 }
1909}
1910
1911#ifndef VBOX
1912static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1913#else /* VBOX */
1914DECLINLINE(void) tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1915#endif /* VBOX */
1916{
1917 if (arg2 >= 0)
1918 tcg_gen_shli_tl(ret, arg1, arg2);
1919 else
1920 tcg_gen_shri_tl(ret, arg1, -arg2);
1921}
1922
1923/* XXX: add faster immediate case */
1924static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1925 int is_right)
1926{
1927 target_ulong mask;
1928 int label1, label2, data_bits;
1929 TCGv t0, t1, t2, a0;
1930
1931 /* XXX: inefficient, but we must use local temps */
1932 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1933 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1934 t2 = tcg_temp_local_new(TCG_TYPE_TL);
1935 a0 = tcg_temp_local_new(TCG_TYPE_TL);
1936
1937 if (ot == OT_QUAD)
1938 mask = 0x3f;
1939 else
1940 mask = 0x1f;
1941
1942 /* load */
1943 if (op1 == OR_TMP0) {
1944 tcg_gen_mov_tl(a0, cpu_A0);
1945 gen_op_ld_v(ot + s->mem_index, t0, a0);
1946 } else {
1947 gen_op_mov_v_reg(ot, t0, op1);
1948 }
1949
1950 tcg_gen_mov_tl(t1, cpu_T[1]);
1951
1952 tcg_gen_andi_tl(t1, t1, mask);
1953
1954 /* Must test zero case to avoid using undefined behaviour in TCG
1955 shifts. */
1956 label1 = gen_new_label();
1957 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
1958
1959 if (ot <= OT_WORD)
1960 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
1961 else
1962 tcg_gen_mov_tl(cpu_tmp0, t1);
1963
1964 gen_extu(ot, t0);
1965 tcg_gen_mov_tl(t2, t0);
1966
1967 data_bits = 8 << ot;
1968 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
1969 fix TCG definition) */
1970 if (is_right) {
1971 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
1972 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1973 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
1974 } else {
1975 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
1976 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
1977 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
1978 }
1979 tcg_gen_or_tl(t0, t0, cpu_tmp4);
1980
1981 gen_set_label(label1);
1982 /* store */
1983 if (op1 == OR_TMP0) {
1984 gen_op_st_v(ot + s->mem_index, t0, a0);
1985 } else {
1986 gen_op_mov_reg_v(ot, op1, t0);
1987 }
1988
1989 /* update eflags */
1990 if (s->cc_op != CC_OP_DYNAMIC)
1991 gen_op_set_cc_op(s->cc_op);
1992
1993 label2 = gen_new_label();
1994 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
1995
1996 gen_compute_eflags(cpu_cc_src);
1997 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
1998 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
1999 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
2000 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
2001 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
2002 if (is_right) {
2003 tcg_gen_shri_tl(t0, t0, data_bits - 1);
2004 }
2005 tcg_gen_andi_tl(t0, t0, CC_C);
2006 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
2007
2008 tcg_gen_discard_tl(cpu_cc_dst);
2009 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2010
2011 gen_set_label(label2);
2012 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2013
2014 tcg_temp_free(t0);
2015 tcg_temp_free(t1);
2016 tcg_temp_free(t2);
2017 tcg_temp_free(a0);
2018}
2019
2020static void *helper_rotc[8] = {
2021 helper_rclb,
2022 helper_rclw,
2023 helper_rcll,
2024 X86_64_ONLY(helper_rclq),
2025 helper_rcrb,
2026 helper_rcrw,
2027 helper_rcrl,
2028 X86_64_ONLY(helper_rcrq),
2029};
2030
2031/* XXX: add faster immediate = 1 case */
2032static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
2033 int is_right)
2034{
2035 int label1;
2036
2037 if (s->cc_op != CC_OP_DYNAMIC)
2038 gen_op_set_cc_op(s->cc_op);
2039
2040 /* load */
2041 if (op1 == OR_TMP0)
2042 gen_op_ld_T0_A0(ot + s->mem_index);
2043 else
2044 gen_op_mov_TN_reg(ot, 0, op1);
2045
2046 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
2047 cpu_T[0], cpu_T[0], cpu_T[1]);
2048 /* store */
2049 if (op1 == OR_TMP0)
2050 gen_op_st_T0_A0(ot + s->mem_index);
2051 else
2052 gen_op_mov_reg_T0(ot, op1);
2053
2054 /* update eflags */
2055 label1 = gen_new_label();
2056 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
2057
2058 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
2059 tcg_gen_discard_tl(cpu_cc_dst);
2060 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2061
2062 gen_set_label(label1);
2063 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2064}
2065
2066/* XXX: add faster immediate case */
2067static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
2068 int is_right)
2069{
2070 int label1, label2, data_bits;
2071 target_ulong mask;
2072 TCGv t0, t1, t2, a0;
2073
2074 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2075 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2076 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2077 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2078
2079 if (ot == OT_QUAD)
2080 mask = 0x3f;
2081 else
2082 mask = 0x1f;
2083
2084 /* load */
2085 if (op1 == OR_TMP0) {
2086 tcg_gen_mov_tl(a0, cpu_A0);
2087 gen_op_ld_v(ot + s->mem_index, t0, a0);
2088 } else {
2089 gen_op_mov_v_reg(ot, t0, op1);
2090 }
2091
2092 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
2093
2094 tcg_gen_mov_tl(t1, cpu_T[1]);
2095 tcg_gen_mov_tl(t2, cpu_T3);
2096
2097 /* Must test zero case to avoid using undefined behaviour in TCG
2098 shifts. */
2099 label1 = gen_new_label();
2100 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
2101
2102 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
2103 if (ot == OT_WORD) {
2104 /* Note: we implement the Intel behaviour for shift count > 16 */
2105 if (is_right) {
2106 tcg_gen_andi_tl(t0, t0, 0xffff);
2107 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
2108 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2109 tcg_gen_ext32u_tl(t0, t0);
2110
2111 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2112
2113 /* only needed if count > 16, but a test would complicate */
2114 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2115 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
2116
2117 tcg_gen_shr_tl(t0, t0, t2);
2118
2119 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2120 } else {
2121 /* XXX: not optimal */
2122 tcg_gen_andi_tl(t0, t0, 0xffff);
2123 tcg_gen_shli_tl(t1, t1, 16);
2124 tcg_gen_or_tl(t1, t1, t0);
2125 tcg_gen_ext32u_tl(t1, t1);
2126
2127 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2128 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
2129 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
2130 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
2131
2132 tcg_gen_shl_tl(t0, t0, t2);
2133 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2134 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2135 tcg_gen_or_tl(t0, t0, t1);
2136 }
2137 } else {
2138 data_bits = 8 << ot;
2139 if (is_right) {
2140 if (ot == OT_LONG)
2141 tcg_gen_ext32u_tl(t0, t0);
2142
2143 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2144
2145 tcg_gen_shr_tl(t0, t0, t2);
2146 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2147 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
2148 tcg_gen_or_tl(t0, t0, t1);
2149
2150 } else {
2151 if (ot == OT_LONG)
2152 tcg_gen_ext32u_tl(t1, t1);
2153
2154 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2155
2156 tcg_gen_shl_tl(t0, t0, t2);
2157 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2158 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2159 tcg_gen_or_tl(t0, t0, t1);
2160 }
2161 }
2162 tcg_gen_mov_tl(t1, cpu_tmp4);
2163
2164 gen_set_label(label1);
2165 /* store */
2166 if (op1 == OR_TMP0) {
2167 gen_op_st_v(ot + s->mem_index, t0, a0);
2168 } else {
2169 gen_op_mov_reg_v(ot, op1, t0);
2170 }
2171
2172 /* update eflags */
2173 if (s->cc_op != CC_OP_DYNAMIC)
2174 gen_op_set_cc_op(s->cc_op);
2175
2176 label2 = gen_new_label();
2177 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
2178
2179 tcg_gen_mov_tl(cpu_cc_src, t1);
2180 tcg_gen_mov_tl(cpu_cc_dst, t0);
2181 if (is_right) {
2182 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
2183 } else {
2184 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
2185 }
2186 gen_set_label(label2);
2187 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2188
2189 tcg_temp_free(t0);
2190 tcg_temp_free(t1);
2191 tcg_temp_free(t2);
2192 tcg_temp_free(a0);
2193}
2194
2195static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2196{
2197 if (s != OR_TMP1)
2198 gen_op_mov_TN_reg(ot, 1, s);
2199 switch(op) {
2200 case OP_ROL:
2201 gen_rot_rm_T1(s1, ot, d, 0);
2202 break;
2203 case OP_ROR:
2204 gen_rot_rm_T1(s1, ot, d, 1);
2205 break;
2206 case OP_SHL:
2207 case OP_SHL1:
2208 gen_shift_rm_T1(s1, ot, d, 0, 0);
2209 break;
2210 case OP_SHR:
2211 gen_shift_rm_T1(s1, ot, d, 1, 0);
2212 break;
2213 case OP_SAR:
2214 gen_shift_rm_T1(s1, ot, d, 1, 1);
2215 break;
2216 case OP_RCL:
2217 gen_rotc_rm_T1(s1, ot, d, 0);
2218 break;
2219 case OP_RCR:
2220 gen_rotc_rm_T1(s1, ot, d, 1);
2221 break;
2222 }
2223}
2224
2225static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2226{
2227 switch(op) {
2228 case OP_SHL:
2229 case OP_SHL1:
2230 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2231 break;
2232 case OP_SHR:
2233 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2234 break;
2235 case OP_SAR:
2236 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2237 break;
2238 default:
2239 /* currently not optimized */
2240 gen_op_movl_T1_im(c);
2241 gen_shift(s1, op, ot, d, OR_TMP1);
2242 break;
2243 }
2244}
2245
2246static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
2247{
2248 target_long disp;
2249 int havesib;
2250 int base;
2251 int index;
2252 int scale;
2253 int opreg;
2254 int mod, rm, code, override, must_add_seg;
2255
2256 override = s->override;
2257 must_add_seg = s->addseg;
2258 if (override >= 0)
2259 must_add_seg = 1;
2260 mod = (modrm >> 6) & 3;
2261 rm = modrm & 7;
2262
2263 if (s->aflag) {
2264
2265 havesib = 0;
2266 base = rm;
2267 index = 0;
2268 scale = 0;
2269
2270 if (base == 4) {
2271 havesib = 1;
2272 code = ldub_code(s->pc++);
2273 scale = (code >> 6) & 3;
2274 index = ((code >> 3) & 7) | REX_X(s);
2275 base = (code & 7);
2276 }
2277 base |= REX_B(s);
2278
2279 switch (mod) {
2280 case 0:
2281 if ((base & 7) == 5) {
2282 base = -1;
2283 disp = (int32_t)ldl_code(s->pc);
2284 s->pc += 4;
2285 if (CODE64(s) && !havesib) {
2286 disp += s->pc + s->rip_offset;
2287 }
2288 } else {
2289 disp = 0;
2290 }
2291 break;
2292 case 1:
2293 disp = (int8_t)ldub_code(s->pc++);
2294 break;
2295 default:
2296 case 2:
2297 disp = ldl_code(s->pc);
2298 s->pc += 4;
2299 break;
2300 }
2301
2302 if (base >= 0) {
2303 /* for correct popl handling with esp */
2304 if (base == 4 && s->popl_esp_hack)
2305 disp += s->popl_esp_hack;
2306#ifdef TARGET_X86_64
2307 if (s->aflag == 2) {
2308 gen_op_movq_A0_reg(base);
2309 if (disp != 0) {
2310 gen_op_addq_A0_im(disp);
2311 }
2312 } else
2313#endif
2314 {
2315 gen_op_movl_A0_reg(base);
2316 if (disp != 0)
2317 gen_op_addl_A0_im(disp);
2318 }
2319 } else {
2320#ifdef TARGET_X86_64
2321 if (s->aflag == 2) {
2322 gen_op_movq_A0_im(disp);
2323 } else
2324#endif
2325 {
2326 gen_op_movl_A0_im(disp);
2327 }
2328 }
2329 /* XXX: index == 4 is always invalid */
2330 if (havesib && (index != 4 || scale != 0)) {
2331#ifdef TARGET_X86_64
2332 if (s->aflag == 2) {
2333 gen_op_addq_A0_reg_sN(scale, index);
2334 } else
2335#endif
2336 {
2337 gen_op_addl_A0_reg_sN(scale, index);
2338 }
2339 }
2340 if (must_add_seg) {
2341 if (override < 0) {
2342 if (base == R_EBP || base == R_ESP)
2343 override = R_SS;
2344 else
2345 override = R_DS;
2346 }
2347#ifdef TARGET_X86_64
2348 if (s->aflag == 2) {
2349 gen_op_addq_A0_seg(override);
2350 } else
2351#endif
2352 {
2353 gen_op_addl_A0_seg(override);
2354 }
2355 }
2356 } else {
2357 switch (mod) {
2358 case 0:
2359 if (rm == 6) {
2360 disp = lduw_code(s->pc);
2361 s->pc += 2;
2362 gen_op_movl_A0_im(disp);
2363 rm = 0; /* avoid SS override */
2364 goto no_rm;
2365 } else {
2366 disp = 0;
2367 }
2368 break;
2369 case 1:
2370 disp = (int8_t)ldub_code(s->pc++);
2371 break;
2372 default:
2373 case 2:
2374 disp = lduw_code(s->pc);
2375 s->pc += 2;
2376 break;
2377 }
2378 switch(rm) {
2379 case 0:
2380 gen_op_movl_A0_reg(R_EBX);
2381 gen_op_addl_A0_reg_sN(0, R_ESI);
2382 break;
2383 case 1:
2384 gen_op_movl_A0_reg(R_EBX);
2385 gen_op_addl_A0_reg_sN(0, R_EDI);
2386 break;
2387 case 2:
2388 gen_op_movl_A0_reg(R_EBP);
2389 gen_op_addl_A0_reg_sN(0, R_ESI);
2390 break;
2391 case 3:
2392 gen_op_movl_A0_reg(R_EBP);
2393 gen_op_addl_A0_reg_sN(0, R_EDI);
2394 break;
2395 case 4:
2396 gen_op_movl_A0_reg(R_ESI);
2397 break;
2398 case 5:
2399 gen_op_movl_A0_reg(R_EDI);
2400 break;
2401 case 6:
2402 gen_op_movl_A0_reg(R_EBP);
2403 break;
2404 default:
2405 case 7:
2406 gen_op_movl_A0_reg(R_EBX);
2407 break;
2408 }
2409 if (disp != 0)
2410 gen_op_addl_A0_im(disp);
2411 gen_op_andl_A0_ffff();
2412 no_rm:
2413 if (must_add_seg) {
2414 if (override < 0) {
2415 if (rm == 2 || rm == 3 || rm == 6)
2416 override = R_SS;
2417 else
2418 override = R_DS;
2419 }
2420 gen_op_addl_A0_seg(override);
2421 }
2422 }
2423
2424 opreg = OR_A0;
2425 disp = 0;
2426 *reg_ptr = opreg;
2427 *offset_ptr = disp;
2428}
2429
2430static void gen_nop_modrm(DisasContext *s, int modrm)
2431{
2432 int mod, rm, base, code;
2433
2434 mod = (modrm >> 6) & 3;
2435 if (mod == 3)
2436 return;
2437 rm = modrm & 7;
2438
2439 if (s->aflag) {
2440
2441 base = rm;
2442
2443 if (base == 4) {
2444 code = ldub_code(s->pc++);
2445 base = (code & 7);
2446 }
2447
2448 switch (mod) {
2449 case 0:
2450 if (base == 5) {
2451 s->pc += 4;
2452 }
2453 break;
2454 case 1:
2455 s->pc++;
2456 break;
2457 default:
2458 case 2:
2459 s->pc += 4;
2460 break;
2461 }
2462 } else {
2463 switch (mod) {
2464 case 0:
2465 if (rm == 6) {
2466 s->pc += 2;
2467 }
2468 break;
2469 case 1:
2470 s->pc++;
2471 break;
2472 default:
2473 case 2:
2474 s->pc += 2;
2475 break;
2476 }
2477 }
2478}
2479
2480/* used for LEA and MOV AX, mem */
2481static void gen_add_A0_ds_seg(DisasContext *s)
2482{
2483 int override, must_add_seg;
2484 must_add_seg = s->addseg;
2485 override = R_DS;
2486 if (s->override >= 0) {
2487 override = s->override;
2488 must_add_seg = 1;
2489 } else {
2490 override = R_DS;
2491 }
2492 if (must_add_seg) {
2493#ifdef TARGET_X86_64
2494 if (CODE64(s)) {
2495 gen_op_addq_A0_seg(override);
2496 } else
2497#endif
2498 {
2499 gen_op_addl_A0_seg(override);
2500 }
2501 }
2502}
2503
2504/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2505 OR_TMP0 */
2506static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2507{
2508 int mod, rm, opreg, disp;
2509
2510 mod = (modrm >> 6) & 3;
2511 rm = (modrm & 7) | REX_B(s);
2512 if (mod == 3) {
2513 if (is_store) {
2514 if (reg != OR_TMP0)
2515 gen_op_mov_TN_reg(ot, 0, reg);
2516 gen_op_mov_reg_T0(ot, rm);
2517 } else {
2518 gen_op_mov_TN_reg(ot, 0, rm);
2519 if (reg != OR_TMP0)
2520 gen_op_mov_reg_T0(ot, reg);
2521 }
2522 } else {
2523 gen_lea_modrm(s, modrm, &opreg, &disp);
2524 if (is_store) {
2525 if (reg != OR_TMP0)
2526 gen_op_mov_TN_reg(ot, 0, reg);
2527 gen_op_st_T0_A0(ot + s->mem_index);
2528 } else {
2529 gen_op_ld_T0_A0(ot + s->mem_index);
2530 if (reg != OR_TMP0)
2531 gen_op_mov_reg_T0(ot, reg);
2532 }
2533 }
2534}
2535
2536#ifndef VBOX
2537static inline uint32_t insn_get(DisasContext *s, int ot)
2538#else /* VBOX */
2539DECLINLINE(uint32_t) insn_get(DisasContext *s, int ot)
2540#endif /* VBOX */
2541{
2542 uint32_t ret;
2543
2544 switch(ot) {
2545 case OT_BYTE:
2546 ret = ldub_code(s->pc);
2547 s->pc++;
2548 break;
2549 case OT_WORD:
2550 ret = lduw_code(s->pc);
2551 s->pc += 2;
2552 break;
2553 default:
2554 case OT_LONG:
2555 ret = ldl_code(s->pc);
2556 s->pc += 4;
2557 break;
2558 }
2559 return ret;
2560}
2561
2562#ifndef VBOX
2563static inline int insn_const_size(unsigned int ot)
2564#else /* VBOX */
2565DECLINLINE(int) insn_const_size(unsigned int ot)
2566#endif /* VBOX */
2567{
2568 if (ot <= OT_LONG)
2569 return 1 << ot;
2570 else
2571 return 4;
2572}
2573
2574#ifndef VBOX
2575static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2576#else /* VBOX */
2577DECLINLINE(void) gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2578#endif /* VBOX */
2579{
2580 TranslationBlock *tb;
2581 target_ulong pc;
2582
2583 pc = s->cs_base + eip;
2584 tb = s->tb;
2585 /* NOTE: we handle the case where the TB spans two pages here */
2586 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2587 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2588 /* jump to same page: we can use a direct jump */
2589 tcg_gen_goto_tb(tb_num);
2590 gen_jmp_im(eip);
2591 tcg_gen_exit_tb((long)tb + tb_num);
2592 } else {
2593 /* jump to another page: currently not optimized */
2594 gen_jmp_im(eip);
2595 gen_eob(s);
2596 }
2597}
2598
2599#ifndef VBOX
2600static inline void gen_jcc(DisasContext *s, int b,
2601#else /* VBOX */
2602DECLINLINE(void) gen_jcc(DisasContext *s, int b,
2603#endif /* VBOX */
2604 target_ulong val, target_ulong next_eip)
2605{
2606 int l1, l2, cc_op;
2607
2608 cc_op = s->cc_op;
2609 if (s->cc_op != CC_OP_DYNAMIC) {
2610 gen_op_set_cc_op(s->cc_op);
2611 s->cc_op = CC_OP_DYNAMIC;
2612 }
2613 if (s->jmp_opt) {
2614#ifdef VBOX
2615 gen_check_external_event(s);
2616#endif /* VBOX */
2617 l1 = gen_new_label();
2618 gen_jcc1(s, cc_op, b, l1);
2619
2620 gen_goto_tb(s, 0, next_eip);
2621
2622 gen_set_label(l1);
2623 gen_goto_tb(s, 1, val);
2624 s->is_jmp = 3;
2625 } else {
2626
2627 l1 = gen_new_label();
2628 l2 = gen_new_label();
2629 gen_jcc1(s, cc_op, b, l1);
2630
2631 gen_jmp_im(next_eip);
2632 tcg_gen_br(l2);
2633
2634 gen_set_label(l1);
2635 gen_jmp_im(val);
2636 gen_set_label(l2);
2637 gen_eob(s);
2638 }
2639}
2640
2641static void gen_setcc(DisasContext *s, int b)
2642{
2643 int inv, jcc_op, l1;
2644 TCGv t0;
2645
2646 if (is_fast_jcc_case(s, b)) {
2647 /* nominal case: we use a jump */
2648 /* XXX: make it faster by adding new instructions in TCG */
2649 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2650 tcg_gen_movi_tl(t0, 0);
2651 l1 = gen_new_label();
2652 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2653 tcg_gen_movi_tl(t0, 1);
2654 gen_set_label(l1);
2655 tcg_gen_mov_tl(cpu_T[0], t0);
2656 tcg_temp_free(t0);
2657 } else {
2658 /* slow case: it is more efficient not to generate a jump,
2659 although it is questionnable whether this optimization is
2660 worth to */
2661 inv = b & 1;
2662 jcc_op = (b >> 1) & 7;
2663 gen_setcc_slow_T0(s, jcc_op);
2664 if (inv) {
2665 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2666 }
2667 }
2668}
2669
2670#ifndef VBOX
2671static inline void gen_op_movl_T0_seg(int seg_reg)
2672#else /* VBOX */
2673DECLINLINE(void) gen_op_movl_T0_seg(int seg_reg)
2674#endif /* VBOX */
2675{
2676 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2677 offsetof(CPUX86State,segs[seg_reg].selector));
2678}
2679
2680#ifndef VBOX
2681static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2682#else /* VBOX */
2683DECLINLINE(void) gen_op_movl_seg_T0_vm(int seg_reg)
2684#endif /* VBOX */
2685{
2686 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2687 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2688 offsetof(CPUX86State,segs[seg_reg].selector));
2689 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2690 tcg_gen_st_tl(cpu_T[0], cpu_env,
2691 offsetof(CPUX86State,segs[seg_reg].base));
2692}
2693
2694/* move T0 to seg_reg and compute if the CPU state may change. Never
2695 call this function with seg_reg == R_CS */
2696static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2697{
2698 if (s->pe && !s->vm86) {
2699 /* XXX: optimize by finding processor state dynamically */
2700 if (s->cc_op != CC_OP_DYNAMIC)
2701 gen_op_set_cc_op(s->cc_op);
2702 gen_jmp_im(cur_eip);
2703 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2704 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2705 /* abort translation because the addseg value may change or
2706 because ss32 may change. For R_SS, translation must always
2707 stop as a special handling must be done to disable hardware
2708 interrupts for the next instruction */
2709 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2710 s->is_jmp = 3;
2711 } else {
2712 gen_op_movl_seg_T0_vm(seg_reg);
2713 if (seg_reg == R_SS)
2714 s->is_jmp = 3;
2715 }
2716}
2717
2718#ifndef VBOX
2719static inline int svm_is_rep(int prefixes)
2720#else /* VBOX */
2721DECLINLINE(int) svm_is_rep(int prefixes)
2722#endif /* VBOX */
2723{
2724 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2725}
2726
2727#ifndef VBOX
2728static inline void
2729#else /* VBOX */
2730DECLINLINE(void)
2731#endif /* VBOX */
2732gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2733 uint32_t type, uint64_t param)
2734{
2735 /* no SVM activated; fast case */
2736 if (likely(!(s->flags & HF_SVMI_MASK)))
2737 return;
2738 if (s->cc_op != CC_OP_DYNAMIC)
2739 gen_op_set_cc_op(s->cc_op);
2740 gen_jmp_im(pc_start - s->cs_base);
2741 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2742 tcg_const_i32(type), tcg_const_i64(param));
2743}
2744
2745#ifndef VBOX
2746static inline void
2747#else /* VBOX */
2748DECLINLINE(void)
2749#endif
2750gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2751{
2752 gen_svm_check_intercept_param(s, pc_start, type, 0);
2753}
2754
2755#ifndef VBOX
2756static inline void gen_stack_update(DisasContext *s, int addend)
2757#else /* VBOX */
2758DECLINLINE(void) gen_stack_update(DisasContext *s, int addend)
2759#endif /* VBOX */
2760{
2761#ifdef TARGET_X86_64
2762 if (CODE64(s)) {
2763 gen_op_add_reg_im(2, R_ESP, addend);
2764 } else
2765#endif
2766 if (s->ss32) {
2767 gen_op_add_reg_im(1, R_ESP, addend);
2768 } else {
2769 gen_op_add_reg_im(0, R_ESP, addend);
2770 }
2771}
2772
2773/* generate a push. It depends on ss32, addseg and dflag */
2774static void gen_push_T0(DisasContext *s)
2775{
2776#ifdef TARGET_X86_64
2777 if (CODE64(s)) {
2778 gen_op_movq_A0_reg(R_ESP);
2779 if (s->dflag) {
2780 gen_op_addq_A0_im(-8);
2781 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2782 } else {
2783 gen_op_addq_A0_im(-2);
2784 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2785 }
2786 gen_op_mov_reg_A0(2, R_ESP);
2787 } else
2788#endif
2789 {
2790 gen_op_movl_A0_reg(R_ESP);
2791 if (!s->dflag)
2792 gen_op_addl_A0_im(-2);
2793 else
2794 gen_op_addl_A0_im(-4);
2795 if (s->ss32) {
2796 if (s->addseg) {
2797 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2798 gen_op_addl_A0_seg(R_SS);
2799 }
2800 } else {
2801 gen_op_andl_A0_ffff();
2802 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2803 gen_op_addl_A0_seg(R_SS);
2804 }
2805 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2806 if (s->ss32 && !s->addseg)
2807 gen_op_mov_reg_A0(1, R_ESP);
2808 else
2809 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2810 }
2811}
2812
2813/* generate a push. It depends on ss32, addseg and dflag */
2814/* slower version for T1, only used for call Ev */
2815static void gen_push_T1(DisasContext *s)
2816{
2817#ifdef TARGET_X86_64
2818 if (CODE64(s)) {
2819 gen_op_movq_A0_reg(R_ESP);
2820 if (s->dflag) {
2821 gen_op_addq_A0_im(-8);
2822 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2823 } else {
2824 gen_op_addq_A0_im(-2);
2825 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2826 }
2827 gen_op_mov_reg_A0(2, R_ESP);
2828 } else
2829#endif
2830 {
2831 gen_op_movl_A0_reg(R_ESP);
2832 if (!s->dflag)
2833 gen_op_addl_A0_im(-2);
2834 else
2835 gen_op_addl_A0_im(-4);
2836 if (s->ss32) {
2837 if (s->addseg) {
2838 gen_op_addl_A0_seg(R_SS);
2839 }
2840 } else {
2841 gen_op_andl_A0_ffff();
2842 gen_op_addl_A0_seg(R_SS);
2843 }
2844 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2845
2846 if (s->ss32 && !s->addseg)
2847 gen_op_mov_reg_A0(1, R_ESP);
2848 else
2849 gen_stack_update(s, (-2) << s->dflag);
2850 }
2851}
2852
2853/* two step pop is necessary for precise exceptions */
2854static void gen_pop_T0(DisasContext *s)
2855{
2856#ifdef TARGET_X86_64
2857 if (CODE64(s)) {
2858 gen_op_movq_A0_reg(R_ESP);
2859 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2860 } else
2861#endif
2862 {
2863 gen_op_movl_A0_reg(R_ESP);
2864 if (s->ss32) {
2865 if (s->addseg)
2866 gen_op_addl_A0_seg(R_SS);
2867 } else {
2868 gen_op_andl_A0_ffff();
2869 gen_op_addl_A0_seg(R_SS);
2870 }
2871 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2872 }
2873}
2874
2875static void gen_pop_update(DisasContext *s)
2876{
2877#ifdef TARGET_X86_64
2878 if (CODE64(s) && s->dflag) {
2879 gen_stack_update(s, 8);
2880 } else
2881#endif
2882 {
2883 gen_stack_update(s, 2 << s->dflag);
2884 }
2885}
2886
2887static void gen_stack_A0(DisasContext *s)
2888{
2889 gen_op_movl_A0_reg(R_ESP);
2890 if (!s->ss32)
2891 gen_op_andl_A0_ffff();
2892 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2893 if (s->addseg)
2894 gen_op_addl_A0_seg(R_SS);
2895}
2896
2897/* NOTE: wrap around in 16 bit not fully handled */
2898static void gen_pusha(DisasContext *s)
2899{
2900 int i;
2901 gen_op_movl_A0_reg(R_ESP);
2902 gen_op_addl_A0_im(-16 << s->dflag);
2903 if (!s->ss32)
2904 gen_op_andl_A0_ffff();
2905 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2906 if (s->addseg)
2907 gen_op_addl_A0_seg(R_SS);
2908 for(i = 0;i < 8; i++) {
2909 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2910 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2911 gen_op_addl_A0_im(2 << s->dflag);
2912 }
2913 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2914}
2915
2916/* NOTE: wrap around in 16 bit not fully handled */
2917static void gen_popa(DisasContext *s)
2918{
2919 int i;
2920 gen_op_movl_A0_reg(R_ESP);
2921 if (!s->ss32)
2922 gen_op_andl_A0_ffff();
2923 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2924 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
2925 if (s->addseg)
2926 gen_op_addl_A0_seg(R_SS);
2927 for(i = 0;i < 8; i++) {
2928 /* ESP is not reloaded */
2929 if (i != 3) {
2930 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
2931 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
2932 }
2933 gen_op_addl_A0_im(2 << s->dflag);
2934 }
2935 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2936}
2937
2938static void gen_enter(DisasContext *s, int esp_addend, int level)
2939{
2940 int ot, opsize;
2941
2942 level &= 0x1f;
2943#ifdef TARGET_X86_64
2944 if (CODE64(s)) {
2945 ot = s->dflag ? OT_QUAD : OT_WORD;
2946 opsize = 1 << ot;
2947
2948 gen_op_movl_A0_reg(R_ESP);
2949 gen_op_addq_A0_im(-opsize);
2950 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2951
2952 /* push bp */
2953 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2954 gen_op_st_T0_A0(ot + s->mem_index);
2955 if (level) {
2956 /* XXX: must save state */
2957 tcg_gen_helper_0_3(helper_enter64_level,
2958 tcg_const_i32(level),
2959 tcg_const_i32((ot == OT_QUAD)),
2960 cpu_T[1]);
2961 }
2962 gen_op_mov_reg_T1(ot, R_EBP);
2963 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2964 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
2965 } else
2966#endif
2967 {
2968 ot = s->dflag + OT_WORD;
2969 opsize = 2 << s->dflag;
2970
2971 gen_op_movl_A0_reg(R_ESP);
2972 gen_op_addl_A0_im(-opsize);
2973 if (!s->ss32)
2974 gen_op_andl_A0_ffff();
2975 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2976 if (s->addseg)
2977 gen_op_addl_A0_seg(R_SS);
2978 /* push bp */
2979 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
2980 gen_op_st_T0_A0(ot + s->mem_index);
2981 if (level) {
2982 /* XXX: must save state */
2983 tcg_gen_helper_0_3(helper_enter_level,
2984 tcg_const_i32(level),
2985 tcg_const_i32(s->dflag),
2986 cpu_T[1]);
2987 }
2988 gen_op_mov_reg_T1(ot, R_EBP);
2989 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
2990 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2991 }
2992}
2993
2994static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2995{
2996 if (s->cc_op != CC_OP_DYNAMIC)
2997 gen_op_set_cc_op(s->cc_op);
2998 gen_jmp_im(cur_eip);
2999 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
3000 s->is_jmp = 3;
3001}
3002
3003/* an interrupt is different from an exception because of the
3004 privilege checks */
3005static void gen_interrupt(DisasContext *s, int intno,
3006 target_ulong cur_eip, target_ulong next_eip)
3007{
3008 if (s->cc_op != CC_OP_DYNAMIC)
3009 gen_op_set_cc_op(s->cc_op);
3010 gen_jmp_im(cur_eip);
3011 tcg_gen_helper_0_2(helper_raise_interrupt,
3012 tcg_const_i32(intno),
3013 tcg_const_i32(next_eip - cur_eip));
3014 s->is_jmp = 3;
3015}
3016
3017static void gen_debug(DisasContext *s, target_ulong cur_eip)
3018{
3019 if (s->cc_op != CC_OP_DYNAMIC)
3020 gen_op_set_cc_op(s->cc_op);
3021 gen_jmp_im(cur_eip);
3022 tcg_gen_helper_0_0(helper_debug);
3023 s->is_jmp = 3;
3024}
3025
3026/* generate a generic end of block. Trace exception is also generated
3027 if needed */
3028static void gen_eob(DisasContext *s)
3029{
3030 if (s->cc_op != CC_OP_DYNAMIC)
3031 gen_op_set_cc_op(s->cc_op);
3032 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
3033 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
3034 }
3035 if (s->singlestep_enabled) {
3036 tcg_gen_helper_0_0(helper_debug);
3037 } else if (s->tf) {
3038 tcg_gen_helper_0_0(helper_single_step);
3039 } else {
3040 tcg_gen_exit_tb(0);
3041 }
3042 s->is_jmp = 3;
3043}
3044
3045/* generate a jump to eip. No segment change must happen before as a
3046 direct call to the next block may occur */
3047static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
3048{
3049 if (s->jmp_opt) {
3050#ifdef VBOX
3051 gen_check_external_event(s);
3052#endif /* VBOX */
3053 if (s->cc_op != CC_OP_DYNAMIC) {
3054 gen_op_set_cc_op(s->cc_op);
3055 s->cc_op = CC_OP_DYNAMIC;
3056 }
3057 gen_goto_tb(s, tb_num, eip);
3058 s->is_jmp = 3;
3059 } else {
3060 gen_jmp_im(eip);
3061 gen_eob(s);
3062 }
3063}
3064
3065static void gen_jmp(DisasContext *s, target_ulong eip)
3066{
3067 gen_jmp_tb(s, eip, 0);
3068}
3069
3070#ifndef VBOX
3071static inline void gen_ldq_env_A0(int idx, int offset)
3072#else /* VBOX */
3073DECLINLINE(void) gen_ldq_env_A0(int idx, int offset)
3074#endif /* VBOX */
3075{
3076 int mem_index = (idx >> 2) - 1;
3077 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3078 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
3079}
3080
3081#ifndef VBOX
3082static inline void gen_stq_env_A0(int idx, int offset)
3083#else /* VBOX */
3084DECLINLINE(void) gen_stq_env_A0(int idx, int offset)
3085#endif /* VBOX */
3086{
3087 int mem_index = (idx >> 2) - 1;
3088 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
3089 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3090}
3091
3092#ifndef VBOX
3093static inline void gen_ldo_env_A0(int idx, int offset)
3094#else /* VBOX */
3095DECLINLINE(void) gen_ldo_env_A0(int idx, int offset)
3096#endif /* VBOX */
3097{
3098 int mem_index = (idx >> 2) - 1;
3099 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3100 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3101 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3102 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3103 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3104}
3105
3106#ifndef VBOX
3107static inline void gen_sto_env_A0(int idx, int offset)
3108#else /* VBOX */
3109DECLINLINE(void) gen_sto_env_A0(int idx, int offset)
3110#endif /* VBOX */
3111{
3112 int mem_index = (idx >> 2) - 1;
3113 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3114 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3115 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3116 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3117 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3118}
3119
3120#ifndef VBOX
3121static inline void gen_op_movo(int d_offset, int s_offset)
3122#else /* VBOX */
3123DECLINLINE(void) gen_op_movo(int d_offset, int s_offset)
3124#endif /* VBOX */
3125{
3126 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3127 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3128 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
3129 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
3130}
3131
3132#ifndef VBOX
3133static inline void gen_op_movq(int d_offset, int s_offset)
3134#else /* VBOX */
3135DECLINLINE(void) gen_op_movq(int d_offset, int s_offset)
3136#endif /* VBOX */
3137{
3138 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3139 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3140}
3141
3142#ifndef VBOX
3143static inline void gen_op_movl(int d_offset, int s_offset)
3144#else /* VBOX */
3145DECLINLINE(void) gen_op_movl(int d_offset, int s_offset)
3146#endif /* VBOX */
3147{
3148 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
3149 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
3150}
3151
3152#ifndef VBOX
3153static inline void gen_op_movq_env_0(int d_offset)
3154#else /* VBOX */
3155DECLINLINE(void) gen_op_movq_env_0(int d_offset)
3156#endif /* VBOX */
3157{
3158 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
3159 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3160}
3161
3162#define SSE_SPECIAL ((void *)1)
3163#define SSE_DUMMY ((void *)2)
3164
3165#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
3166#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
3167 helper_ ## x ## ss, helper_ ## x ## sd, }
3168
3169static void *sse_op_table1[256][4] = {
3170 /* 3DNow! extensions */
3171 [0x0e] = { SSE_DUMMY }, /* femms */
3172 [0x0f] = { SSE_DUMMY }, /* pf... */
3173 /* pure SSE operations */
3174 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3175 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3176 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
3177 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
3178 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
3179 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
3180 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
3181 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
3182
3183 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3184 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3185 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
3186 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
3187 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
3188 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
3189 [0x2e] = { helper_ucomiss, helper_ucomisd },
3190 [0x2f] = { helper_comiss, helper_comisd },
3191 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
3192 [0x51] = SSE_FOP(sqrt),
3193 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
3194 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
3195 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
3196 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
3197 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
3198 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
3199 [0x58] = SSE_FOP(add),
3200 [0x59] = SSE_FOP(mul),
3201 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
3202 helper_cvtss2sd, helper_cvtsd2ss },
3203 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
3204 [0x5c] = SSE_FOP(sub),
3205 [0x5d] = SSE_FOP(min),
3206 [0x5e] = SSE_FOP(div),
3207 [0x5f] = SSE_FOP(max),
3208
3209 [0xc2] = SSE_FOP(cmpeq),
3210 [0xc6] = { helper_shufps, helper_shufpd },
3211
3212 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3213 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3214
3215 /* MMX ops and their SSE extensions */
3216 [0x60] = MMX_OP2(punpcklbw),
3217 [0x61] = MMX_OP2(punpcklwd),
3218 [0x62] = MMX_OP2(punpckldq),
3219 [0x63] = MMX_OP2(packsswb),
3220 [0x64] = MMX_OP2(pcmpgtb),
3221 [0x65] = MMX_OP2(pcmpgtw),
3222 [0x66] = MMX_OP2(pcmpgtl),
3223 [0x67] = MMX_OP2(packuswb),
3224 [0x68] = MMX_OP2(punpckhbw),
3225 [0x69] = MMX_OP2(punpckhwd),
3226 [0x6a] = MMX_OP2(punpckhdq),
3227 [0x6b] = MMX_OP2(packssdw),
3228 [0x6c] = { NULL, helper_punpcklqdq_xmm },
3229 [0x6d] = { NULL, helper_punpckhqdq_xmm },
3230 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
3231 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
3232 [0x70] = { helper_pshufw_mmx,
3233 helper_pshufd_xmm,
3234 helper_pshufhw_xmm,
3235 helper_pshuflw_xmm },
3236 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
3237 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
3238 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
3239 [0x74] = MMX_OP2(pcmpeqb),
3240 [0x75] = MMX_OP2(pcmpeqw),
3241 [0x76] = MMX_OP2(pcmpeql),
3242 [0x77] = { SSE_DUMMY }, /* emms */
3243 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
3244 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
3245 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3246 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3247 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3248 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
3249 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
3250 [0xd1] = MMX_OP2(psrlw),
3251 [0xd2] = MMX_OP2(psrld),
3252 [0xd3] = MMX_OP2(psrlq),
3253 [0xd4] = MMX_OP2(paddq),
3254 [0xd5] = MMX_OP2(pmullw),
3255 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3256 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3257 [0xd8] = MMX_OP2(psubusb),
3258 [0xd9] = MMX_OP2(psubusw),
3259 [0xda] = MMX_OP2(pminub),
3260 [0xdb] = MMX_OP2(pand),
3261 [0xdc] = MMX_OP2(paddusb),
3262 [0xdd] = MMX_OP2(paddusw),
3263 [0xde] = MMX_OP2(pmaxub),
3264 [0xdf] = MMX_OP2(pandn),
3265 [0xe0] = MMX_OP2(pavgb),
3266 [0xe1] = MMX_OP2(psraw),
3267 [0xe2] = MMX_OP2(psrad),
3268 [0xe3] = MMX_OP2(pavgw),
3269 [0xe4] = MMX_OP2(pmulhuw),
3270 [0xe5] = MMX_OP2(pmulhw),
3271 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
3272 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3273 [0xe8] = MMX_OP2(psubsb),
3274 [0xe9] = MMX_OP2(psubsw),
3275 [0xea] = MMX_OP2(pminsw),
3276 [0xeb] = MMX_OP2(por),
3277 [0xec] = MMX_OP2(paddsb),
3278 [0xed] = MMX_OP2(paddsw),
3279 [0xee] = MMX_OP2(pmaxsw),
3280 [0xef] = MMX_OP2(pxor),
3281 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
3282 [0xf1] = MMX_OP2(psllw),
3283 [0xf2] = MMX_OP2(pslld),
3284 [0xf3] = MMX_OP2(psllq),
3285 [0xf4] = MMX_OP2(pmuludq),
3286 [0xf5] = MMX_OP2(pmaddwd),
3287 [0xf6] = MMX_OP2(psadbw),
3288 [0xf7] = MMX_OP2(maskmov),
3289 [0xf8] = MMX_OP2(psubb),
3290 [0xf9] = MMX_OP2(psubw),
3291 [0xfa] = MMX_OP2(psubl),
3292 [0xfb] = MMX_OP2(psubq),
3293 [0xfc] = MMX_OP2(paddb),
3294 [0xfd] = MMX_OP2(paddw),
3295 [0xfe] = MMX_OP2(paddl),
3296};
3297
3298static void *sse_op_table2[3 * 8][2] = {
3299 [0 + 2] = MMX_OP2(psrlw),
3300 [0 + 4] = MMX_OP2(psraw),
3301 [0 + 6] = MMX_OP2(psllw),
3302 [8 + 2] = MMX_OP2(psrld),
3303 [8 + 4] = MMX_OP2(psrad),
3304 [8 + 6] = MMX_OP2(pslld),
3305 [16 + 2] = MMX_OP2(psrlq),
3306 [16 + 3] = { NULL, helper_psrldq_xmm },
3307 [16 + 6] = MMX_OP2(psllq),
3308 [16 + 7] = { NULL, helper_pslldq_xmm },
3309};
3310
3311static void *sse_op_table3[4 * 3] = {
3312 helper_cvtsi2ss,
3313 helper_cvtsi2sd,
3314 X86_64_ONLY(helper_cvtsq2ss),
3315 X86_64_ONLY(helper_cvtsq2sd),
3316
3317 helper_cvttss2si,
3318 helper_cvttsd2si,
3319 X86_64_ONLY(helper_cvttss2sq),
3320 X86_64_ONLY(helper_cvttsd2sq),
3321
3322 helper_cvtss2si,
3323 helper_cvtsd2si,
3324 X86_64_ONLY(helper_cvtss2sq),
3325 X86_64_ONLY(helper_cvtsd2sq),
3326};
3327
3328static void *sse_op_table4[8][4] = {
3329 SSE_FOP(cmpeq),
3330 SSE_FOP(cmplt),
3331 SSE_FOP(cmple),
3332 SSE_FOP(cmpunord),
3333 SSE_FOP(cmpneq),
3334 SSE_FOP(cmpnlt),
3335 SSE_FOP(cmpnle),
3336 SSE_FOP(cmpord),
3337};
3338
3339static void *sse_op_table5[256] = {
3340 [0x0c] = helper_pi2fw,
3341 [0x0d] = helper_pi2fd,
3342 [0x1c] = helper_pf2iw,
3343 [0x1d] = helper_pf2id,
3344 [0x8a] = helper_pfnacc,
3345 [0x8e] = helper_pfpnacc,
3346 [0x90] = helper_pfcmpge,
3347 [0x94] = helper_pfmin,
3348 [0x96] = helper_pfrcp,
3349 [0x97] = helper_pfrsqrt,
3350 [0x9a] = helper_pfsub,
3351 [0x9e] = helper_pfadd,
3352 [0xa0] = helper_pfcmpgt,
3353 [0xa4] = helper_pfmax,
3354 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
3355 [0xa7] = helper_movq, /* pfrsqit1 */
3356 [0xaa] = helper_pfsubr,
3357 [0xae] = helper_pfacc,
3358 [0xb0] = helper_pfcmpeq,
3359 [0xb4] = helper_pfmul,
3360 [0xb6] = helper_movq, /* pfrcpit2 */
3361 [0xb7] = helper_pmulhrw_mmx,
3362 [0xbb] = helper_pswapd,
3363 [0xbf] = helper_pavgb_mmx /* pavgusb */
3364};
3365
3366struct sse_op_helper_s {
3367 void *op[2]; uint32_t ext_mask;
3368};
3369#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3370#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3371#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3372#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3373static struct sse_op_helper_s sse_op_table6[256] = {
3374 [0x00] = SSSE3_OP(pshufb),
3375 [0x01] = SSSE3_OP(phaddw),
3376 [0x02] = SSSE3_OP(phaddd),
3377 [0x03] = SSSE3_OP(phaddsw),
3378 [0x04] = SSSE3_OP(pmaddubsw),
3379 [0x05] = SSSE3_OP(phsubw),
3380 [0x06] = SSSE3_OP(phsubd),
3381 [0x07] = SSSE3_OP(phsubsw),
3382 [0x08] = SSSE3_OP(psignb),
3383 [0x09] = SSSE3_OP(psignw),
3384 [0x0a] = SSSE3_OP(psignd),
3385 [0x0b] = SSSE3_OP(pmulhrsw),
3386 [0x10] = SSE41_OP(pblendvb),
3387 [0x14] = SSE41_OP(blendvps),
3388 [0x15] = SSE41_OP(blendvpd),
3389 [0x17] = SSE41_OP(ptest),
3390 [0x1c] = SSSE3_OP(pabsb),
3391 [0x1d] = SSSE3_OP(pabsw),
3392 [0x1e] = SSSE3_OP(pabsd),
3393 [0x20] = SSE41_OP(pmovsxbw),
3394 [0x21] = SSE41_OP(pmovsxbd),
3395 [0x22] = SSE41_OP(pmovsxbq),
3396 [0x23] = SSE41_OP(pmovsxwd),
3397 [0x24] = SSE41_OP(pmovsxwq),
3398 [0x25] = SSE41_OP(pmovsxdq),
3399 [0x28] = SSE41_OP(pmuldq),
3400 [0x29] = SSE41_OP(pcmpeqq),
3401 [0x2a] = SSE41_SPECIAL, /* movntqda */
3402 [0x2b] = SSE41_OP(packusdw),
3403 [0x30] = SSE41_OP(pmovzxbw),
3404 [0x31] = SSE41_OP(pmovzxbd),
3405 [0x32] = SSE41_OP(pmovzxbq),
3406 [0x33] = SSE41_OP(pmovzxwd),
3407 [0x34] = SSE41_OP(pmovzxwq),
3408 [0x35] = SSE41_OP(pmovzxdq),
3409 [0x37] = SSE42_OP(pcmpgtq),
3410 [0x38] = SSE41_OP(pminsb),
3411 [0x39] = SSE41_OP(pminsd),
3412 [0x3a] = SSE41_OP(pminuw),
3413 [0x3b] = SSE41_OP(pminud),
3414 [0x3c] = SSE41_OP(pmaxsb),
3415 [0x3d] = SSE41_OP(pmaxsd),
3416 [0x3e] = SSE41_OP(pmaxuw),
3417 [0x3f] = SSE41_OP(pmaxud),
3418 [0x40] = SSE41_OP(pmulld),
3419 [0x41] = SSE41_OP(phminposuw),
3420};
3421
3422static struct sse_op_helper_s sse_op_table7[256] = {
3423 [0x08] = SSE41_OP(roundps),
3424 [0x09] = SSE41_OP(roundpd),
3425 [0x0a] = SSE41_OP(roundss),
3426 [0x0b] = SSE41_OP(roundsd),
3427 [0x0c] = SSE41_OP(blendps),
3428 [0x0d] = SSE41_OP(blendpd),
3429 [0x0e] = SSE41_OP(pblendw),
3430 [0x0f] = SSSE3_OP(palignr),
3431 [0x14] = SSE41_SPECIAL, /* pextrb */
3432 [0x15] = SSE41_SPECIAL, /* pextrw */
3433 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3434 [0x17] = SSE41_SPECIAL, /* extractps */
3435 [0x20] = SSE41_SPECIAL, /* pinsrb */
3436 [0x21] = SSE41_SPECIAL, /* insertps */
3437 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3438 [0x40] = SSE41_OP(dpps),
3439 [0x41] = SSE41_OP(dppd),
3440 [0x42] = SSE41_OP(mpsadbw),
3441 [0x60] = SSE42_OP(pcmpestrm),
3442 [0x61] = SSE42_OP(pcmpestri),
3443 [0x62] = SSE42_OP(pcmpistrm),
3444 [0x63] = SSE42_OP(pcmpistri),
3445};
3446
3447static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3448{
3449 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3450 int modrm, mod, rm, reg, reg_addr, offset_addr;
3451 void *sse_op2;
3452
3453 b &= 0xff;
3454 if (s->prefix & PREFIX_DATA)
3455 b1 = 1;
3456 else if (s->prefix & PREFIX_REPZ)
3457 b1 = 2;
3458 else if (s->prefix & PREFIX_REPNZ)
3459 b1 = 3;
3460 else
3461 b1 = 0;
3462 sse_op2 = sse_op_table1[b][b1];
3463 if (!sse_op2)
3464 goto illegal_op;
3465 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3466 is_xmm = 1;
3467 } else {
3468 if (b1 == 0) {
3469 /* MMX case */
3470 is_xmm = 0;
3471 } else {
3472 is_xmm = 1;
3473 }
3474 }
3475 /* simple MMX/SSE operation */
3476 if (s->flags & HF_TS_MASK) {
3477 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3478 return;
3479 }
3480 if (s->flags & HF_EM_MASK) {
3481 illegal_op:
3482 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3483 return;
3484 }
3485 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3486 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3487 goto illegal_op;
3488 if (b == 0x0e) {
3489 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3490 goto illegal_op;
3491 /* femms */
3492 tcg_gen_helper_0_0(helper_emms);
3493 return;
3494 }
3495 if (b == 0x77) {
3496 /* emms */
3497 tcg_gen_helper_0_0(helper_emms);
3498 return;
3499 }
3500 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3501 the static cpu state) */
3502 if (!is_xmm) {
3503 tcg_gen_helper_0_0(helper_enter_mmx);
3504 }
3505
3506 modrm = ldub_code(s->pc++);
3507 reg = ((modrm >> 3) & 7);
3508 if (is_xmm)
3509 reg |= rex_r;
3510 mod = (modrm >> 6) & 3;
3511 if (sse_op2 == SSE_SPECIAL) {
3512 b |= (b1 << 8);
3513 switch(b) {
3514 case 0x0e7: /* movntq */
3515 if (mod == 3)
3516 goto illegal_op;
3517 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3518 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3519 break;
3520 case 0x1e7: /* movntdq */
3521 case 0x02b: /* movntps */
3522 case 0x12b: /* movntps */
3523 case 0x3f0: /* lddqu */
3524 if (mod == 3)
3525 goto illegal_op;
3526 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3527 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3528 break;
3529 case 0x6e: /* movd mm, ea */
3530#ifdef TARGET_X86_64
3531 if (s->dflag == 2) {
3532 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3533 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3534 } else
3535#endif
3536 {
3537 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3538 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3539 offsetof(CPUX86State,fpregs[reg].mmx));
3540 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3541 }
3542 break;
3543 case 0x16e: /* movd xmm, ea */
3544#ifdef TARGET_X86_64
3545 if (s->dflag == 2) {
3546 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3547 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3548 offsetof(CPUX86State,xmm_regs[reg]));
3549 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3550 } else
3551#endif
3552 {
3553 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3554 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3555 offsetof(CPUX86State,xmm_regs[reg]));
3556 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3557 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3558 }
3559 break;
3560 case 0x6f: /* movq mm, ea */
3561 if (mod != 3) {
3562 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3563 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3564 } else {
3565 rm = (modrm & 7);
3566 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3567 offsetof(CPUX86State,fpregs[rm].mmx));
3568 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3569 offsetof(CPUX86State,fpregs[reg].mmx));
3570 }
3571 break;
3572 case 0x010: /* movups */
3573 case 0x110: /* movupd */
3574 case 0x028: /* movaps */
3575 case 0x128: /* movapd */
3576 case 0x16f: /* movdqa xmm, ea */
3577 case 0x26f: /* movdqu xmm, ea */
3578 if (mod != 3) {
3579 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3580 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3581 } else {
3582 rm = (modrm & 7) | REX_B(s);
3583 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3584 offsetof(CPUX86State,xmm_regs[rm]));
3585 }
3586 break;
3587 case 0x210: /* movss xmm, ea */
3588 if (mod != 3) {
3589 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3590 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3591 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3592 gen_op_movl_T0_0();
3593 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3594 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3595 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3596 } else {
3597 rm = (modrm & 7) | REX_B(s);
3598 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3599 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3600 }
3601 break;
3602 case 0x310: /* movsd xmm, ea */
3603 if (mod != 3) {
3604 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3605 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3606 gen_op_movl_T0_0();
3607 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3608 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3609 } else {
3610 rm = (modrm & 7) | REX_B(s);
3611 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3612 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3613 }
3614 break;
3615 case 0x012: /* movlps */
3616 case 0x112: /* movlpd */
3617 if (mod != 3) {
3618 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3619 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3620 } else {
3621 /* movhlps */
3622 rm = (modrm & 7) | REX_B(s);
3623 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3624 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3625 }
3626 break;
3627 case 0x212: /* movsldup */
3628 if (mod != 3) {
3629 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3630 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3631 } else {
3632 rm = (modrm & 7) | REX_B(s);
3633 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3634 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3635 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3636 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3637 }
3638 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3639 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3640 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3641 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3642 break;
3643 case 0x312: /* movddup */
3644 if (mod != 3) {
3645 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3646 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3647 } else {
3648 rm = (modrm & 7) | REX_B(s);
3649 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3650 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3651 }
3652 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3653 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3654 break;
3655 case 0x016: /* movhps */
3656 case 0x116: /* movhpd */
3657 if (mod != 3) {
3658 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3659 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3660 } else {
3661 /* movlhps */
3662 rm = (modrm & 7) | REX_B(s);
3663 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3664 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3665 }
3666 break;
3667 case 0x216: /* movshdup */
3668 if (mod != 3) {
3669 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3670 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3671 } else {
3672 rm = (modrm & 7) | REX_B(s);
3673 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3674 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3675 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3676 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3677 }
3678 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3679 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3680 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3681 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3682 break;
3683 case 0x7e: /* movd ea, mm */
3684#ifdef TARGET_X86_64
3685 if (s->dflag == 2) {
3686 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3687 offsetof(CPUX86State,fpregs[reg].mmx));
3688 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3689 } else
3690#endif
3691 {
3692 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3693 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3694 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3695 }
3696 break;
3697 case 0x17e: /* movd ea, xmm */
3698#ifdef TARGET_X86_64
3699 if (s->dflag == 2) {
3700 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3701 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3702 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3703 } else
3704#endif
3705 {
3706 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3707 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3708 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3709 }
3710 break;
3711 case 0x27e: /* movq xmm, ea */
3712 if (mod != 3) {
3713 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3714 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3715 } else {
3716 rm = (modrm & 7) | REX_B(s);
3717 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3718 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3719 }
3720 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3721 break;
3722 case 0x7f: /* movq ea, mm */
3723 if (mod != 3) {
3724 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3725 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3726 } else {
3727 rm = (modrm & 7);
3728 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3729 offsetof(CPUX86State,fpregs[reg].mmx));
3730 }
3731 break;
3732 case 0x011: /* movups */
3733 case 0x111: /* movupd */
3734 case 0x029: /* movaps */
3735 case 0x129: /* movapd */
3736 case 0x17f: /* movdqa ea, xmm */
3737 case 0x27f: /* movdqu ea, xmm */
3738 if (mod != 3) {
3739 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3740 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3741 } else {
3742 rm = (modrm & 7) | REX_B(s);
3743 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3744 offsetof(CPUX86State,xmm_regs[reg]));
3745 }
3746 break;
3747 case 0x211: /* movss ea, xmm */
3748 if (mod != 3) {
3749 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3750 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3751 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3752 } else {
3753 rm = (modrm & 7) | REX_B(s);
3754 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3755 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3756 }
3757 break;
3758 case 0x311: /* movsd ea, xmm */
3759 if (mod != 3) {
3760 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3761 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3762 } else {
3763 rm = (modrm & 7) | REX_B(s);
3764 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3765 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3766 }
3767 break;
3768 case 0x013: /* movlps */
3769 case 0x113: /* movlpd */
3770 if (mod != 3) {
3771 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3772 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3773 } else {
3774 goto illegal_op;
3775 }
3776 break;
3777 case 0x017: /* movhps */
3778 case 0x117: /* movhpd */
3779 if (mod != 3) {
3780 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3781 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3782 } else {
3783 goto illegal_op;
3784 }
3785 break;
3786 case 0x71: /* shift mm, im */
3787 case 0x72:
3788 case 0x73:
3789 case 0x171: /* shift xmm, im */
3790 case 0x172:
3791 case 0x173:
3792 val = ldub_code(s->pc++);
3793 if (is_xmm) {
3794 gen_op_movl_T0_im(val);
3795 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3796 gen_op_movl_T0_0();
3797 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3798 op1_offset = offsetof(CPUX86State,xmm_t0);
3799 } else {
3800 gen_op_movl_T0_im(val);
3801 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3802 gen_op_movl_T0_0();
3803 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3804 op1_offset = offsetof(CPUX86State,mmx_t0);
3805 }
3806 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3807 if (!sse_op2)
3808 goto illegal_op;
3809 if (is_xmm) {
3810 rm = (modrm & 7) | REX_B(s);
3811 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3812 } else {
3813 rm = (modrm & 7);
3814 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3815 }
3816 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3817 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3818 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3819 break;
3820 case 0x050: /* movmskps */
3821 rm = (modrm & 7) | REX_B(s);
3822 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3823 offsetof(CPUX86State,xmm_regs[rm]));
3824 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3825 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3826 gen_op_mov_reg_T0(OT_LONG, reg);
3827 break;
3828 case 0x150: /* movmskpd */
3829 rm = (modrm & 7) | REX_B(s);
3830 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3831 offsetof(CPUX86State,xmm_regs[rm]));
3832 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3833 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3834 gen_op_mov_reg_T0(OT_LONG, reg);
3835 break;
3836 case 0x02a: /* cvtpi2ps */
3837 case 0x12a: /* cvtpi2pd */
3838 tcg_gen_helper_0_0(helper_enter_mmx);
3839 if (mod != 3) {
3840 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3841 op2_offset = offsetof(CPUX86State,mmx_t0);
3842 gen_ldq_env_A0(s->mem_index, op2_offset);
3843 } else {
3844 rm = (modrm & 7);
3845 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3846 }
3847 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3848 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3849 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3850 switch(b >> 8) {
3851 case 0x0:
3852 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3853 break;
3854 default:
3855 case 0x1:
3856 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3857 break;
3858 }
3859 break;
3860 case 0x22a: /* cvtsi2ss */
3861 case 0x32a: /* cvtsi2sd */
3862 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3863 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3864 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3865 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3866 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3867 if (ot == OT_LONG) {
3868 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3869 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3870 } else {
3871 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
3872 }
3873 break;
3874 case 0x02c: /* cvttps2pi */
3875 case 0x12c: /* cvttpd2pi */
3876 case 0x02d: /* cvtps2pi */
3877 case 0x12d: /* cvtpd2pi */
3878 tcg_gen_helper_0_0(helper_enter_mmx);
3879 if (mod != 3) {
3880 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3881 op2_offset = offsetof(CPUX86State,xmm_t0);
3882 gen_ldo_env_A0(s->mem_index, op2_offset);
3883 } else {
3884 rm = (modrm & 7) | REX_B(s);
3885 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3886 }
3887 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3888 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3889 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3890 switch(b) {
3891 case 0x02c:
3892 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3893 break;
3894 case 0x12c:
3895 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3896 break;
3897 case 0x02d:
3898 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3899 break;
3900 case 0x12d:
3901 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3902 break;
3903 }
3904 break;
3905 case 0x22c: /* cvttss2si */
3906 case 0x32c: /* cvttsd2si */
3907 case 0x22d: /* cvtss2si */
3908 case 0x32d: /* cvtsd2si */
3909 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3910 if (mod != 3) {
3911 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3912 if ((b >> 8) & 1) {
3913 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3914 } else {
3915 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3916 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3917 }
3918 op2_offset = offsetof(CPUX86State,xmm_t0);
3919 } else {
3920 rm = (modrm & 7) | REX_B(s);
3921 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3922 }
3923 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3924 (b & 1) * 4];
3925 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3926 if (ot == OT_LONG) {
3927 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
3928 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3929 } else {
3930 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
3931 }
3932 gen_op_mov_reg_T0(ot, reg);
3933 break;
3934 case 0xc4: /* pinsrw */
3935 case 0x1c4:
3936 s->rip_offset = 1;
3937 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3938 val = ldub_code(s->pc++);
3939 if (b1) {
3940 val &= 7;
3941 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3942 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
3943 } else {
3944 val &= 3;
3945 tcg_gen_st16_tl(cpu_T[0], cpu_env,
3946 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
3947 }
3948 break;
3949 case 0xc5: /* pextrw */
3950 case 0x1c5:
3951 if (mod != 3)
3952 goto illegal_op;
3953 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3954 val = ldub_code(s->pc++);
3955 if (b1) {
3956 val &= 7;
3957 rm = (modrm & 7) | REX_B(s);
3958 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3959 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
3960 } else {
3961 val &= 3;
3962 rm = (modrm & 7);
3963 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
3964 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
3965 }
3966 reg = ((modrm >> 3) & 7) | rex_r;
3967 gen_op_mov_reg_T0(ot, reg);
3968 break;
3969 case 0x1d6: /* movq ea, xmm */
3970 if (mod != 3) {
3971 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3972 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3973 } else {
3974 rm = (modrm & 7) | REX_B(s);
3975 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3976 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3977 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3978 }
3979 break;
3980 case 0x2d6: /* movq2dq */
3981 tcg_gen_helper_0_0(helper_enter_mmx);
3982 rm = (modrm & 7);
3983 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3984 offsetof(CPUX86State,fpregs[rm].mmx));
3985 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3986 break;
3987 case 0x3d6: /* movdq2q */
3988 tcg_gen_helper_0_0(helper_enter_mmx);
3989 rm = (modrm & 7) | REX_B(s);
3990 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3991 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3992 break;
3993 case 0xd7: /* pmovmskb */
3994 case 0x1d7:
3995 if (mod != 3)
3996 goto illegal_op;
3997 if (b1) {
3998 rm = (modrm & 7) | REX_B(s);
3999 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
4000 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
4001 } else {
4002 rm = (modrm & 7);
4003 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
4004 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
4005 }
4006 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4007 reg = ((modrm >> 3) & 7) | rex_r;
4008 gen_op_mov_reg_T0(OT_LONG, reg);
4009 break;
4010 case 0x138:
4011 if (s->prefix & PREFIX_REPNZ)
4012 goto crc32;
4013 case 0x038:
4014 b = modrm;
4015 modrm = ldub_code(s->pc++);
4016 rm = modrm & 7;
4017 reg = ((modrm >> 3) & 7) | rex_r;
4018 mod = (modrm >> 6) & 3;
4019
4020 sse_op2 = sse_op_table6[b].op[b1];
4021 if (!sse_op2)
4022 goto illegal_op;
4023 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
4024 goto illegal_op;
4025
4026 if (b1) {
4027 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4028 if (mod == 3) {
4029 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4030 } else {
4031 op2_offset = offsetof(CPUX86State,xmm_t0);
4032 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4033 switch (b) {
4034 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
4035 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
4036 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
4037 gen_ldq_env_A0(s->mem_index, op2_offset +
4038 offsetof(XMMReg, XMM_Q(0)));
4039 break;
4040 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
4041 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
4042 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4043 (s->mem_index >> 2) - 1);
4044 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
4045 offsetof(XMMReg, XMM_L(0)));
4046 break;
4047 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
4048 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
4049 (s->mem_index >> 2) - 1);
4050 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
4051 offsetof(XMMReg, XMM_W(0)));
4052 break;
4053 case 0x2a: /* movntqda */
4054 gen_ldo_env_A0(s->mem_index, op1_offset);
4055 return;
4056 default:
4057 gen_ldo_env_A0(s->mem_index, op2_offset);
4058 }
4059 }
4060 } else {
4061 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4062 if (mod == 3) {
4063 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4064 } else {
4065 op2_offset = offsetof(CPUX86State,mmx_t0);
4066 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4067 gen_ldq_env_A0(s->mem_index, op2_offset);
4068 }
4069 }
4070 if (sse_op2 == SSE_SPECIAL)
4071 goto illegal_op;
4072
4073 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4074 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4075 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4076
4077 if (b == 0x17)
4078 s->cc_op = CC_OP_EFLAGS;
4079 break;
4080 case 0x338: /* crc32 */
4081 crc32:
4082 b = modrm;
4083 modrm = ldub_code(s->pc++);
4084 reg = ((modrm >> 3) & 7) | rex_r;
4085
4086 if (b != 0xf0 && b != 0xf1)
4087 goto illegal_op;
4088 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
4089 goto illegal_op;
4090
4091 if (b == 0xf0)
4092 ot = OT_BYTE;
4093 else if (b == 0xf1 && s->dflag != 2)
4094 if (s->prefix & PREFIX_DATA)
4095 ot = OT_WORD;
4096 else
4097 ot = OT_LONG;
4098 else
4099 ot = OT_QUAD;
4100
4101 gen_op_mov_TN_reg(OT_LONG, 0, reg);
4102 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4103 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4104 tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
4105 cpu_T[0], tcg_const_i32(8 << ot));
4106
4107 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4108 gen_op_mov_reg_T0(ot, reg);
4109 break;
4110 case 0x03a:
4111 case 0x13a:
4112 b = modrm;
4113 modrm = ldub_code(s->pc++);
4114 rm = modrm & 7;
4115 reg = ((modrm >> 3) & 7) | rex_r;
4116 mod = (modrm >> 6) & 3;
4117
4118 sse_op2 = sse_op_table7[b].op[b1];
4119 if (!sse_op2)
4120 goto illegal_op;
4121 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4122 goto illegal_op;
4123
4124 if (sse_op2 == SSE_SPECIAL) {
4125 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4126 rm = (modrm & 7) | REX_B(s);
4127 if (mod != 3)
4128 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4129 reg = ((modrm >> 3) & 7) | rex_r;
4130 val = ldub_code(s->pc++);
4131 switch (b) {
4132 case 0x14: /* pextrb */
4133 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4134 xmm_regs[reg].XMM_B(val & 15)));
4135 if (mod == 3)
4136 gen_op_mov_reg_T0(ot, rm);
4137 else
4138 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4139 (s->mem_index >> 2) - 1);
4140 break;
4141 case 0x15: /* pextrw */
4142 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4143 xmm_regs[reg].XMM_W(val & 7)));
4144 if (mod == 3)
4145 gen_op_mov_reg_T0(ot, rm);
4146 else
4147 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4148 (s->mem_index >> 2) - 1);
4149 break;
4150 case 0x16:
4151 if (ot == OT_LONG) { /* pextrd */
4152 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4153 offsetof(CPUX86State,
4154 xmm_regs[reg].XMM_L(val & 3)));
4155 if (mod == 3)
4156 gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
4157 else
4158 tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
4159 (s->mem_index >> 2) - 1);
4160 } else { /* pextrq */
4161 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4162 offsetof(CPUX86State,
4163 xmm_regs[reg].XMM_Q(val & 1)));
4164 if (mod == 3)
4165 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4166 else
4167 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4168 (s->mem_index >> 2) - 1);
4169 }
4170 break;
4171 case 0x17: /* extractps */
4172 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4173 xmm_regs[reg].XMM_L(val & 3)));
4174 if (mod == 3)
4175 gen_op_mov_reg_T0(ot, rm);
4176 else
4177 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4178 (s->mem_index >> 2) - 1);
4179 break;
4180 case 0x20: /* pinsrb */
4181 if (mod == 3)
4182 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4183 else
4184 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
4185 (s->mem_index >> 2) - 1);
4186 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4187 xmm_regs[reg].XMM_B(val & 15)));
4188 break;
4189 case 0x21: /* insertps */
4190 if (mod == 3)
4191 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4192 offsetof(CPUX86State,xmm_regs[rm]
4193 .XMM_L((val >> 6) & 3)));
4194 else
4195 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4196 (s->mem_index >> 2) - 1);
4197 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4198 offsetof(CPUX86State,xmm_regs[reg]
4199 .XMM_L((val >> 4) & 3)));
4200 if ((val >> 0) & 1)
4201 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4202 cpu_env, offsetof(CPUX86State,
4203 xmm_regs[reg].XMM_L(0)));
4204 if ((val >> 1) & 1)
4205 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4206 cpu_env, offsetof(CPUX86State,
4207 xmm_regs[reg].XMM_L(1)));
4208 if ((val >> 2) & 1)
4209 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4210 cpu_env, offsetof(CPUX86State,
4211 xmm_regs[reg].XMM_L(2)));
4212 if ((val >> 3) & 1)
4213 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4214 cpu_env, offsetof(CPUX86State,
4215 xmm_regs[reg].XMM_L(3)));
4216 break;
4217 case 0x22:
4218 if (ot == OT_LONG) { /* pinsrd */
4219 if (mod == 3)
4220 gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
4221 else
4222 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4223 (s->mem_index >> 2) - 1);
4224 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4225 offsetof(CPUX86State,
4226 xmm_regs[reg].XMM_L(val & 3)));
4227 } else { /* pinsrq */
4228 if (mod == 3)
4229 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4230 else
4231 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4232 (s->mem_index >> 2) - 1);
4233 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4234 offsetof(CPUX86State,
4235 xmm_regs[reg].XMM_Q(val & 1)));
4236 }
4237 break;
4238 }
4239 return;
4240 }
4241
4242 if (b1) {
4243 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4244 if (mod == 3) {
4245 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4246 } else {
4247 op2_offset = offsetof(CPUX86State,xmm_t0);
4248 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4249 gen_ldo_env_A0(s->mem_index, op2_offset);
4250 }
4251 } else {
4252 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4253 if (mod == 3) {
4254 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4255 } else {
4256 op2_offset = offsetof(CPUX86State,mmx_t0);
4257 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4258 gen_ldq_env_A0(s->mem_index, op2_offset);
4259 }
4260 }
4261 val = ldub_code(s->pc++);
4262
4263 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
4264 s->cc_op = CC_OP_EFLAGS;
4265
4266 if (s->dflag == 2)
4267 /* The helper must use entire 64-bit gp registers */
4268 val |= 1 << 8;
4269 }
4270
4271 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4272 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4273 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4274 break;
4275 default:
4276 goto illegal_op;
4277 }
4278 } else {
4279 /* generic MMX or SSE operation */
4280 switch(b) {
4281 case 0x70: /* pshufx insn */
4282 case 0xc6: /* pshufx insn */
4283 case 0xc2: /* compare insns */
4284 s->rip_offset = 1;
4285 break;
4286 default:
4287 break;
4288 }
4289 if (is_xmm) {
4290 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4291 if (mod != 3) {
4292 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4293 op2_offset = offsetof(CPUX86State,xmm_t0);
4294 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
4295 b == 0xc2)) {
4296 /* specific case for SSE single instructions */
4297 if (b1 == 2) {
4298 /* 32 bit access */
4299 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4300 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4301 } else {
4302 /* 64 bit access */
4303 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
4304 }
4305 } else {
4306 gen_ldo_env_A0(s->mem_index, op2_offset);
4307 }
4308 } else {
4309 rm = (modrm & 7) | REX_B(s);
4310 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4311 }
4312 } else {
4313 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4314 if (mod != 3) {
4315 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4316 op2_offset = offsetof(CPUX86State,mmx_t0);
4317 gen_ldq_env_A0(s->mem_index, op2_offset);
4318 } else {
4319 rm = (modrm & 7);
4320 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4321 }
4322 }
4323 switch(b) {
4324 case 0x0f: /* 3DNow! data insns */
4325 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4326 goto illegal_op;
4327 val = ldub_code(s->pc++);
4328 sse_op2 = sse_op_table5[val];
4329 if (!sse_op2)
4330 goto illegal_op;
4331 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4332 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4333 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4334 break;
4335 case 0x70: /* pshufx insn */
4336 case 0xc6: /* pshufx insn */
4337 val = ldub_code(s->pc++);
4338 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4339 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4340 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4341 break;
4342 case 0xc2:
4343 /* compare insns */
4344 val = ldub_code(s->pc++);
4345 if (val >= 8)
4346 goto illegal_op;
4347 sse_op2 = sse_op_table4[val][b1];
4348 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4349 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4350 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4351 break;
4352 case 0xf7:
4353 /* maskmov : we must prepare A0 */
4354 if (mod != 3)
4355 goto illegal_op;
4356#ifdef TARGET_X86_64
4357 if (s->aflag == 2) {
4358 gen_op_movq_A0_reg(R_EDI);
4359 } else
4360#endif
4361 {
4362 gen_op_movl_A0_reg(R_EDI);
4363 if (s->aflag == 0)
4364 gen_op_andl_A0_ffff();
4365 }
4366 gen_add_A0_ds_seg(s);
4367
4368 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4369 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4370 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
4371 break;
4372 default:
4373 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4374 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4375 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4376 break;
4377 }
4378 if (b == 0x2e || b == 0x2f) {
4379 s->cc_op = CC_OP_EFLAGS;
4380 }
4381 }
4382}
4383
4384#ifdef VBOX
4385/* Checks if it's an invalid lock sequence. Only a few instructions
4386 can be used together with the lock prefix and of those only the
4387 form that write a memory operand. So, this is kind of annoying
4388 work to do...
4389 The AMD manual lists the following instructions.
4390 ADC
4391 ADD
4392 AND
4393 BTC
4394 BTR
4395 BTS
4396 CMPXCHG
4397 CMPXCHG8B
4398 CMPXCHG16B
4399 DEC
4400 INC
4401 NEG
4402 NOT
4403 OR
4404 SBB
4405 SUB
4406 XADD
4407 XCHG
4408 XOR */
4409static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
4410{
4411 target_ulong pc = s->pc;
4412 int modrm, mod, op;
4413
4414 /* X={8,16,32,64} Y={16,32,64} */
4415 switch (b)
4416 {
4417 /* /2: ADC reg/memX, immX */
4418 /* /0: ADD reg/memX, immX */
4419 /* /4: AND reg/memX, immX */
4420 /* /1: OR reg/memX, immX */
4421 /* /3: SBB reg/memX, immX */
4422 /* /5: SUB reg/memX, immX */
4423 /* /6: XOR reg/memX, immX */
4424 case 0x80:
4425 case 0x81:
4426 case 0x83:
4427 modrm = ldub_code(pc++);
4428 op = (modrm >> 3) & 7;
4429 if (op == 7) /* /7: CMP */
4430 break;
4431 mod = (modrm >> 6) & 3;
4432 if (mod == 3) /* register destination */
4433 break;
4434 return false;
4435
4436 case 0x10: /* /r: ADC reg/mem8, reg8 */
4437 case 0x11: /* /r: ADC reg/memX, regY */
4438 case 0x00: /* /r: ADD reg/mem8, reg8 */
4439 case 0x01: /* /r: ADD reg/memX, regY */
4440 case 0x20: /* /r: AND reg/mem8, reg8 */
4441 case 0x21: /* /r: AND reg/memY, regY */
4442 case 0x08: /* /r: OR reg/mem8, reg8 */
4443 case 0x09: /* /r: OR reg/memY, regY */
4444 case 0x18: /* /r: SBB reg/mem8, reg8 */
4445 case 0x19: /* /r: SBB reg/memY, regY */
4446 case 0x28: /* /r: SUB reg/mem8, reg8 */
4447 case 0x29: /* /r: SUB reg/memY, regY */
4448 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
4449 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
4450 case 0x30: /* /r: XOR reg/mem8, reg8 */
4451 case 0x31: /* /r: XOR reg/memY, regY */
4452 modrm = ldub_code(pc++);
4453 mod = (modrm >> 6) & 3;
4454 if (mod == 3) /* register destination */
4455 break;
4456 return false;
4457
4458 /* /1: DEC reg/memX */
4459 /* /0: INC reg/memX */
4460 case 0xfe:
4461 case 0xff:
4462 modrm = ldub_code(pc++);
4463 mod = (modrm >> 6) & 3;
4464 if (mod == 3) /* register destination */
4465 break;
4466 return false;
4467
4468 /* /3: NEG reg/memX */
4469 /* /2: NOT reg/memX */
4470 case 0xf6:
4471 case 0xf7:
4472 modrm = ldub_code(pc++);
4473 mod = (modrm >> 6) & 3;
4474 if (mod == 3) /* register destination */
4475 break;
4476 return false;
4477
4478 case 0x0f:
4479 b = ldub_code(pc++);
4480 switch (b)
4481 {
4482 /* /7: BTC reg/memY, imm8 */
4483 /* /6: BTR reg/memY, imm8 */
4484 /* /5: BTS reg/memY, imm8 */
4485 case 0xba:
4486 modrm = ldub_code(pc++);
4487 op = (modrm >> 3) & 7;
4488 if (op < 5)
4489 break;
4490 mod = (modrm >> 6) & 3;
4491 if (mod == 3) /* register destination */
4492 break;
4493 return false;
4494
4495 case 0xbb: /* /r: BTC reg/memY, regY */
4496 case 0xb3: /* /r: BTR reg/memY, regY */
4497 case 0xab: /* /r: BTS reg/memY, regY */
4498 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
4499 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
4500 case 0xc0: /* /r: XADD reg/mem8, reg8 */
4501 case 0xc1: /* /r: XADD reg/memY, regY */
4502 modrm = ldub_code(pc++);
4503 mod = (modrm >> 6) & 3;
4504 if (mod == 3) /* register destination */
4505 break;
4506 return false;
4507
4508 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
4509 case 0xc7:
4510 modrm = ldub_code(pc++);
4511 op = (modrm >> 3) & 7;
4512 if (op != 1)
4513 break;
4514 return false;
4515 }
4516 break;
4517 }
4518
4519 /* illegal sequence. The s->pc is past the lock prefix and that
4520 is sufficient for the TB, I think. */
4521 Log(("illegal lock sequence %VGv (b=%#x)\n", pc_start, b));
4522 return true;
4523}
4524#endif /* VBOX */
4525
4526
4527/* convert one instruction. s->is_jmp is set if the translation must
4528 be stopped. Return the next pc value */
4529static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4530{
4531 int b, prefixes, aflag, dflag;
4532 int shift, ot;
4533 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4534 target_ulong next_eip, tval;
4535 int rex_w, rex_r;
4536
4537 if (unlikely(loglevel & CPU_LOG_TB_OP))
4538 tcg_gen_debug_insn_start(pc_start);
4539 s->pc = pc_start;
4540 prefixes = 0;
4541 aflag = s->code32;
4542 dflag = s->code32;
4543 s->override = -1;
4544 rex_w = -1;
4545 rex_r = 0;
4546#ifdef TARGET_X86_64
4547 s->rex_x = 0;
4548 s->rex_b = 0;
4549 x86_64_hregs = 0;
4550#endif
4551 s->rip_offset = 0; /* for relative ip address */
4552#ifdef VBOX
4553 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
4554 gen_update_eip(pc_start - s->cs_base);
4555#endif
4556 next_byte:
4557 b = ldub_code(s->pc);
4558 s->pc++;
4559 /* check prefixes */
4560#ifdef TARGET_X86_64
4561 if (CODE64(s)) {
4562 switch (b) {
4563 case 0xf3:
4564 prefixes |= PREFIX_REPZ;
4565 goto next_byte;
4566 case 0xf2:
4567 prefixes |= PREFIX_REPNZ;
4568 goto next_byte;
4569 case 0xf0:
4570 prefixes |= PREFIX_LOCK;
4571 goto next_byte;
4572 case 0x2e:
4573 s->override = R_CS;
4574 goto next_byte;
4575 case 0x36:
4576 s->override = R_SS;
4577 goto next_byte;
4578 case 0x3e:
4579 s->override = R_DS;
4580 goto next_byte;
4581 case 0x26:
4582 s->override = R_ES;
4583 goto next_byte;
4584 case 0x64:
4585 s->override = R_FS;
4586 goto next_byte;
4587 case 0x65:
4588 s->override = R_GS;
4589 goto next_byte;
4590 case 0x66:
4591 prefixes |= PREFIX_DATA;
4592 goto next_byte;
4593 case 0x67:
4594 prefixes |= PREFIX_ADR;
4595 goto next_byte;
4596 case 0x40 ... 0x4f:
4597 /* REX prefix */
4598 rex_w = (b >> 3) & 1;
4599 rex_r = (b & 0x4) << 1;
4600 s->rex_x = (b & 0x2) << 2;
4601 REX_B(s) = (b & 0x1) << 3;
4602 x86_64_hregs = 1; /* select uniform byte register addressing */
4603 goto next_byte;
4604 }
4605 if (rex_w == 1) {
4606 /* 0x66 is ignored if rex.w is set */
4607 dflag = 2;
4608 } else {
4609 if (prefixes & PREFIX_DATA)
4610 dflag ^= 1;
4611 }
4612 if (!(prefixes & PREFIX_ADR))
4613 aflag = 2;
4614 } else
4615#endif
4616 {
4617 switch (b) {
4618 case 0xf3:
4619 prefixes |= PREFIX_REPZ;
4620 goto next_byte;
4621 case 0xf2:
4622 prefixes |= PREFIX_REPNZ;
4623 goto next_byte;
4624 case 0xf0:
4625 prefixes |= PREFIX_LOCK;
4626 goto next_byte;
4627 case 0x2e:
4628 s->override = R_CS;
4629 goto next_byte;
4630 case 0x36:
4631 s->override = R_SS;
4632 goto next_byte;
4633 case 0x3e:
4634 s->override = R_DS;
4635 goto next_byte;
4636 case 0x26:
4637 s->override = R_ES;
4638 goto next_byte;
4639 case 0x64:
4640 s->override = R_FS;
4641 goto next_byte;
4642 case 0x65:
4643 s->override = R_GS;
4644 goto next_byte;
4645 case 0x66:
4646 prefixes |= PREFIX_DATA;
4647 goto next_byte;
4648 case 0x67:
4649 prefixes |= PREFIX_ADR;
4650 goto next_byte;
4651 }
4652 if (prefixes & PREFIX_DATA)
4653 dflag ^= 1;
4654 if (prefixes & PREFIX_ADR)
4655 aflag ^= 1;
4656 }
4657
4658 s->prefix = prefixes;
4659 s->aflag = aflag;
4660 s->dflag = dflag;
4661
4662 /* lock generation */
4663#ifndef VBOX
4664 if (prefixes & PREFIX_LOCK)
4665 tcg_gen_helper_0_0(helper_lock);
4666#else /* VBOX */
4667 if (prefixes & PREFIX_LOCK) {
4668 if (is_invalid_lock_sequence(s, pc_start, b)) {
4669 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
4670 return s->pc;
4671 }
4672 tcg_gen_helper_0_0(helper_lock);
4673 }
4674#endif /* VBOX */
4675
4676 /* now check op code */
4677 reswitch:
4678 switch(b) {
4679 case 0x0f:
4680 /**************************/
4681 /* extended op code */
4682 b = ldub_code(s->pc++) | 0x100;
4683 goto reswitch;
4684
4685 /**************************/
4686 /* arith & logic */
4687 case 0x00 ... 0x05:
4688 case 0x08 ... 0x0d:
4689 case 0x10 ... 0x15:
4690 case 0x18 ... 0x1d:
4691 case 0x20 ... 0x25:
4692 case 0x28 ... 0x2d:
4693 case 0x30 ... 0x35:
4694 case 0x38 ... 0x3d:
4695 {
4696 int op, f, val;
4697 op = (b >> 3) & 7;
4698 f = (b >> 1) & 3;
4699
4700 if ((b & 1) == 0)
4701 ot = OT_BYTE;
4702 else
4703 ot = dflag + OT_WORD;
4704
4705 switch(f) {
4706 case 0: /* OP Ev, Gv */
4707 modrm = ldub_code(s->pc++);
4708 reg = ((modrm >> 3) & 7) | rex_r;
4709 mod = (modrm >> 6) & 3;
4710 rm = (modrm & 7) | REX_B(s);
4711 if (mod != 3) {
4712 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4713 opreg = OR_TMP0;
4714 } else if (op == OP_XORL && rm == reg) {
4715 xor_zero:
4716 /* xor reg, reg optimisation */
4717 gen_op_movl_T0_0();
4718 s->cc_op = CC_OP_LOGICB + ot;
4719 gen_op_mov_reg_T0(ot, reg);
4720 gen_op_update1_cc();
4721 break;
4722 } else {
4723 opreg = rm;
4724 }
4725 gen_op_mov_TN_reg(ot, 1, reg);
4726 gen_op(s, op, ot, opreg);
4727 break;
4728 case 1: /* OP Gv, Ev */
4729 modrm = ldub_code(s->pc++);
4730 mod = (modrm >> 6) & 3;
4731 reg = ((modrm >> 3) & 7) | rex_r;
4732 rm = (modrm & 7) | REX_B(s);
4733 if (mod != 3) {
4734 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4735 gen_op_ld_T1_A0(ot + s->mem_index);
4736 } else if (op == OP_XORL && rm == reg) {
4737 goto xor_zero;
4738 } else {
4739 gen_op_mov_TN_reg(ot, 1, rm);
4740 }
4741 gen_op(s, op, ot, reg);
4742 break;
4743 case 2: /* OP A, Iv */
4744 val = insn_get(s, ot);
4745 gen_op_movl_T1_im(val);
4746 gen_op(s, op, ot, OR_EAX);
4747 break;
4748 }
4749 }
4750 break;
4751
4752 case 0x82:
4753 if (CODE64(s))
4754 goto illegal_op;
4755 case 0x80: /* GRP1 */
4756 case 0x81:
4757 case 0x83:
4758 {
4759 int val;
4760
4761 if ((b & 1) == 0)
4762 ot = OT_BYTE;
4763 else
4764 ot = dflag + OT_WORD;
4765
4766 modrm = ldub_code(s->pc++);
4767 mod = (modrm >> 6) & 3;
4768 rm = (modrm & 7) | REX_B(s);
4769 op = (modrm >> 3) & 7;
4770
4771 if (mod != 3) {
4772 if (b == 0x83)
4773 s->rip_offset = 1;
4774 else
4775 s->rip_offset = insn_const_size(ot);
4776 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4777 opreg = OR_TMP0;
4778 } else {
4779 opreg = rm;
4780 }
4781
4782 switch(b) {
4783 default:
4784 case 0x80:
4785 case 0x81:
4786 case 0x82:
4787 val = insn_get(s, ot);
4788 break;
4789 case 0x83:
4790 val = (int8_t)insn_get(s, OT_BYTE);
4791 break;
4792 }
4793 gen_op_movl_T1_im(val);
4794 gen_op(s, op, ot, opreg);
4795 }
4796 break;
4797
4798 /**************************/
4799 /* inc, dec, and other misc arith */
4800 case 0x40 ... 0x47: /* inc Gv */
4801 ot = dflag ? OT_LONG : OT_WORD;
4802 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4803 break;
4804 case 0x48 ... 0x4f: /* dec Gv */
4805 ot = dflag ? OT_LONG : OT_WORD;
4806 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4807 break;
4808 case 0xf6: /* GRP3 */
4809 case 0xf7:
4810 if ((b & 1) == 0)
4811 ot = OT_BYTE;
4812 else
4813 ot = dflag + OT_WORD;
4814
4815 modrm = ldub_code(s->pc++);
4816 mod = (modrm >> 6) & 3;
4817 rm = (modrm & 7) | REX_B(s);
4818 op = (modrm >> 3) & 7;
4819 if (mod != 3) {
4820 if (op == 0)
4821 s->rip_offset = insn_const_size(ot);
4822 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4823 gen_op_ld_T0_A0(ot + s->mem_index);
4824 } else {
4825 gen_op_mov_TN_reg(ot, 0, rm);
4826 }
4827
4828 switch(op) {
4829 case 0: /* test */
4830 val = insn_get(s, ot);
4831 gen_op_movl_T1_im(val);
4832 gen_op_testl_T0_T1_cc();
4833 s->cc_op = CC_OP_LOGICB + ot;
4834 break;
4835 case 2: /* not */
4836 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4837 if (mod != 3) {
4838 gen_op_st_T0_A0(ot + s->mem_index);
4839 } else {
4840 gen_op_mov_reg_T0(ot, rm);
4841 }
4842 break;
4843 case 3: /* neg */
4844 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4845 if (mod != 3) {
4846 gen_op_st_T0_A0(ot + s->mem_index);
4847 } else {
4848 gen_op_mov_reg_T0(ot, rm);
4849 }
4850 gen_op_update_neg_cc();
4851 s->cc_op = CC_OP_SUBB + ot;
4852 break;
4853 case 4: /* mul */
4854 switch(ot) {
4855 case OT_BYTE:
4856 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4857 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4858 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4859 /* XXX: use 32 bit mul which could be faster */
4860 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4861 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4862 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4863 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4864 s->cc_op = CC_OP_MULB;
4865 break;
4866 case OT_WORD:
4867 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4868 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4869 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4870 /* XXX: use 32 bit mul which could be faster */
4871 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4872 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4873 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4874 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4875 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4876 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4877 s->cc_op = CC_OP_MULW;
4878 break;
4879 default:
4880 case OT_LONG:
4881#ifdef TARGET_X86_64
4882 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4883 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4884 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
4885 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4886 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4887 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4888 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4889 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4890 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4891#else
4892 {
4893 TCGv t0, t1;
4894 t0 = tcg_temp_new(TCG_TYPE_I64);
4895 t1 = tcg_temp_new(TCG_TYPE_I64);
4896 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4897 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
4898 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
4899 tcg_gen_mul_i64(t0, t0, t1);
4900 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4901 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4902 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4903 tcg_gen_shri_i64(t0, t0, 32);
4904 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4905 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4906 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4907 }
4908#endif
4909 s->cc_op = CC_OP_MULL;
4910 break;
4911#ifdef TARGET_X86_64
4912 case OT_QUAD:
4913 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
4914 s->cc_op = CC_OP_MULQ;
4915 break;
4916#endif
4917 }
4918 break;
4919 case 5: /* imul */
4920 switch(ot) {
4921 case OT_BYTE:
4922 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4923 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
4924 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
4925 /* XXX: use 32 bit mul which could be faster */
4926 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4927 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4928 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4929 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
4930 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4931 s->cc_op = CC_OP_MULB;
4932 break;
4933 case OT_WORD:
4934 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4935 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
4936 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
4937 /* XXX: use 32 bit mul which could be faster */
4938 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4939 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4940 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4941 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
4942 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4943 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4944 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4945 s->cc_op = CC_OP_MULW;
4946 break;
4947 default:
4948 case OT_LONG:
4949#ifdef TARGET_X86_64
4950 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4951 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
4952 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
4953 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4954 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4955 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4956 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
4957 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4958 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4959 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4960#else
4961 {
4962 TCGv t0, t1;
4963 t0 = tcg_temp_new(TCG_TYPE_I64);
4964 t1 = tcg_temp_new(TCG_TYPE_I64);
4965 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4966 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
4967 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
4968 tcg_gen_mul_i64(t0, t0, t1);
4969 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4970 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4971 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4972 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
4973 tcg_gen_shri_i64(t0, t0, 32);
4974 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4975 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4976 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
4977 }
4978#endif
4979 s->cc_op = CC_OP_MULL;
4980 break;
4981#ifdef TARGET_X86_64
4982 case OT_QUAD:
4983 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
4984 s->cc_op = CC_OP_MULQ;
4985 break;
4986#endif
4987 }
4988 break;
4989 case 6: /* div */
4990 switch(ot) {
4991 case OT_BYTE:
4992 gen_jmp_im(pc_start - s->cs_base);
4993 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
4994 break;
4995 case OT_WORD:
4996 gen_jmp_im(pc_start - s->cs_base);
4997 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
4998 break;
4999 default:
5000 case OT_LONG:
5001 gen_jmp_im(pc_start - s->cs_base);
5002 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
5003 break;
5004#ifdef TARGET_X86_64
5005 case OT_QUAD:
5006 gen_jmp_im(pc_start - s->cs_base);
5007 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
5008 break;
5009#endif
5010 }
5011 break;
5012 case 7: /* idiv */
5013 switch(ot) {
5014 case OT_BYTE:
5015 gen_jmp_im(pc_start - s->cs_base);
5016 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
5017 break;
5018 case OT_WORD:
5019 gen_jmp_im(pc_start - s->cs_base);
5020 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
5021 break;
5022 default:
5023 case OT_LONG:
5024 gen_jmp_im(pc_start - s->cs_base);
5025 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
5026 break;
5027#ifdef TARGET_X86_64
5028 case OT_QUAD:
5029 gen_jmp_im(pc_start - s->cs_base);
5030 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
5031 break;
5032#endif
5033 }
5034 break;
5035 default:
5036 goto illegal_op;
5037 }
5038 break;
5039
5040 case 0xfe: /* GRP4 */
5041 case 0xff: /* GRP5 */
5042 if ((b & 1) == 0)
5043 ot = OT_BYTE;
5044 else
5045 ot = dflag + OT_WORD;
5046
5047 modrm = ldub_code(s->pc++);
5048 mod = (modrm >> 6) & 3;
5049 rm = (modrm & 7) | REX_B(s);
5050 op = (modrm >> 3) & 7;
5051 if (op >= 2 && b == 0xfe) {
5052 goto illegal_op;
5053 }
5054 if (CODE64(s)) {
5055 if (op == 2 || op == 4) {
5056 /* operand size for jumps is 64 bit */
5057 ot = OT_QUAD;
5058 } else if (op == 3 || op == 5) {
5059 /* for call calls, the operand is 16 or 32 bit, even
5060 in long mode */
5061 ot = dflag ? OT_LONG : OT_WORD;
5062 } else if (op == 6) {
5063 /* default push size is 64 bit */
5064 ot = dflag ? OT_QUAD : OT_WORD;
5065 }
5066 }
5067 if (mod != 3) {
5068 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5069 if (op >= 2 && op != 3 && op != 5)
5070 gen_op_ld_T0_A0(ot + s->mem_index);
5071 } else {
5072 gen_op_mov_TN_reg(ot, 0, rm);
5073 }
5074
5075 switch(op) {
5076 case 0: /* inc Ev */
5077 if (mod != 3)
5078 opreg = OR_TMP0;
5079 else
5080 opreg = rm;
5081 gen_inc(s, ot, opreg, 1);
5082 break;
5083 case 1: /* dec Ev */
5084 if (mod != 3)
5085 opreg = OR_TMP0;
5086 else
5087 opreg = rm;
5088 gen_inc(s, ot, opreg, -1);
5089 break;
5090 case 2: /* call Ev */
5091 /* XXX: optimize if memory (no 'and' is necessary) */
5092#ifdef VBOX_WITH_CALL_RECORD
5093 if (s->record_call)
5094 gen_op_record_call();
5095#endif
5096 if (s->dflag == 0)
5097 gen_op_andl_T0_ffff();
5098 next_eip = s->pc - s->cs_base;
5099 gen_movtl_T1_im(next_eip);
5100 gen_push_T1(s);
5101 gen_op_jmp_T0();
5102 gen_eob(s);
5103 break;
5104 case 3: /* lcall Ev */
5105 gen_op_ld_T1_A0(ot + s->mem_index);
5106 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5107 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5108 do_lcall:
5109 if (s->pe && !s->vm86) {
5110 if (s->cc_op != CC_OP_DYNAMIC)
5111 gen_op_set_cc_op(s->cc_op);
5112 gen_jmp_im(pc_start - s->cs_base);
5113 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5114 tcg_gen_helper_0_4(helper_lcall_protected,
5115 cpu_tmp2_i32, cpu_T[1],
5116 tcg_const_i32(dflag),
5117 tcg_const_i32(s->pc - pc_start));
5118 } else {
5119 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5120 tcg_gen_helper_0_4(helper_lcall_real,
5121 cpu_tmp2_i32, cpu_T[1],
5122 tcg_const_i32(dflag),
5123 tcg_const_i32(s->pc - s->cs_base));
5124 }
5125 gen_eob(s);
5126 break;
5127 case 4: /* jmp Ev */
5128 if (s->dflag == 0)
5129 gen_op_andl_T0_ffff();
5130 gen_op_jmp_T0();
5131 gen_eob(s);
5132 break;
5133 case 5: /* ljmp Ev */
5134 gen_op_ld_T1_A0(ot + s->mem_index);
5135 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5136 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5137 do_ljmp:
5138 if (s->pe && !s->vm86) {
5139 if (s->cc_op != CC_OP_DYNAMIC)
5140 gen_op_set_cc_op(s->cc_op);
5141 gen_jmp_im(pc_start - s->cs_base);
5142 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5143 tcg_gen_helper_0_3(helper_ljmp_protected,
5144 cpu_tmp2_i32,
5145 cpu_T[1],
5146 tcg_const_i32(s->pc - pc_start));
5147 } else {
5148 gen_op_movl_seg_T0_vm(R_CS);
5149 gen_op_movl_T0_T1();
5150 gen_op_jmp_T0();
5151 }
5152 gen_eob(s);
5153 break;
5154 case 6: /* push Ev */
5155 gen_push_T0(s);
5156 break;
5157 default:
5158 goto illegal_op;
5159 }
5160 break;
5161
5162 case 0x84: /* test Ev, Gv */
5163 case 0x85:
5164 if ((b & 1) == 0)
5165 ot = OT_BYTE;
5166 else
5167 ot = dflag + OT_WORD;
5168
5169 modrm = ldub_code(s->pc++);
5170 mod = (modrm >> 6) & 3;
5171 rm = (modrm & 7) | REX_B(s);
5172 reg = ((modrm >> 3) & 7) | rex_r;
5173
5174 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5175 gen_op_mov_TN_reg(ot, 1, reg);
5176 gen_op_testl_T0_T1_cc();
5177 s->cc_op = CC_OP_LOGICB + ot;
5178 break;
5179
5180 case 0xa8: /* test eAX, Iv */
5181 case 0xa9:
5182 if ((b & 1) == 0)
5183 ot = OT_BYTE;
5184 else
5185 ot = dflag + OT_WORD;
5186 val = insn_get(s, ot);
5187
5188 gen_op_mov_TN_reg(ot, 0, OR_EAX);
5189 gen_op_movl_T1_im(val);
5190 gen_op_testl_T0_T1_cc();
5191 s->cc_op = CC_OP_LOGICB + ot;
5192 break;
5193
5194 case 0x98: /* CWDE/CBW */
5195#ifdef TARGET_X86_64
5196 if (dflag == 2) {
5197 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5198 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5199 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
5200 } else
5201#endif
5202 if (dflag == 1) {
5203 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5204 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5205 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5206 } else {
5207 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5208 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5209 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5210 }
5211 break;
5212 case 0x99: /* CDQ/CWD */
5213#ifdef TARGET_X86_64
5214 if (dflag == 2) {
5215 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5216 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5217 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
5218 } else
5219#endif
5220 if (dflag == 1) {
5221 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5222 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5223 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5224 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5225 } else {
5226 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5227 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5228 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5229 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5230 }
5231 break;
5232 case 0x1af: /* imul Gv, Ev */
5233 case 0x69: /* imul Gv, Ev, I */
5234 case 0x6b:
5235 ot = dflag + OT_WORD;
5236 modrm = ldub_code(s->pc++);
5237 reg = ((modrm >> 3) & 7) | rex_r;
5238 if (b == 0x69)
5239 s->rip_offset = insn_const_size(ot);
5240 else if (b == 0x6b)
5241 s->rip_offset = 1;
5242 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5243 if (b == 0x69) {
5244 val = insn_get(s, ot);
5245 gen_op_movl_T1_im(val);
5246 } else if (b == 0x6b) {
5247 val = (int8_t)insn_get(s, OT_BYTE);
5248 gen_op_movl_T1_im(val);
5249 } else {
5250 gen_op_mov_TN_reg(ot, 1, reg);
5251 }
5252
5253#ifdef TARGET_X86_64
5254 if (ot == OT_QUAD) {
5255 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
5256 } else
5257#endif
5258 if (ot == OT_LONG) {
5259#ifdef TARGET_X86_64
5260 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5261 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5262 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5263 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5264 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5265 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5266#else
5267 {
5268 TCGv t0, t1;
5269 t0 = tcg_temp_new(TCG_TYPE_I64);
5270 t1 = tcg_temp_new(TCG_TYPE_I64);
5271 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5272 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5273 tcg_gen_mul_i64(t0, t0, t1);
5274 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5275 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5276 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5277 tcg_gen_shri_i64(t0, t0, 32);
5278 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
5279 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
5280 }
5281#endif
5282 } else {
5283 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5284 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5285 /* XXX: use 32 bit mul which could be faster */
5286 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5287 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5288 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5289 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5290 }
5291 gen_op_mov_reg_T0(ot, reg);
5292 s->cc_op = CC_OP_MULB + ot;
5293 break;
5294 case 0x1c0:
5295 case 0x1c1: /* xadd Ev, Gv */
5296 if ((b & 1) == 0)
5297 ot = OT_BYTE;
5298 else
5299 ot = dflag + OT_WORD;
5300 modrm = ldub_code(s->pc++);
5301 reg = ((modrm >> 3) & 7) | rex_r;
5302 mod = (modrm >> 6) & 3;
5303 if (mod == 3) {
5304 rm = (modrm & 7) | REX_B(s);
5305 gen_op_mov_TN_reg(ot, 0, reg);
5306 gen_op_mov_TN_reg(ot, 1, rm);
5307 gen_op_addl_T0_T1();
5308 gen_op_mov_reg_T1(ot, reg);
5309 gen_op_mov_reg_T0(ot, rm);
5310 } else {
5311 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5312 gen_op_mov_TN_reg(ot, 0, reg);
5313 gen_op_ld_T1_A0(ot + s->mem_index);
5314 gen_op_addl_T0_T1();
5315 gen_op_st_T0_A0(ot + s->mem_index);
5316 gen_op_mov_reg_T1(ot, reg);
5317 }
5318 gen_op_update2_cc();
5319 s->cc_op = CC_OP_ADDB + ot;
5320 break;
5321 case 0x1b0:
5322 case 0x1b1: /* cmpxchg Ev, Gv */
5323 {
5324 int label1, label2;
5325 TCGv t0, t1, t2, a0;
5326
5327 if ((b & 1) == 0)
5328 ot = OT_BYTE;
5329 else
5330 ot = dflag + OT_WORD;
5331 modrm = ldub_code(s->pc++);
5332 reg = ((modrm >> 3) & 7) | rex_r;
5333 mod = (modrm >> 6) & 3;
5334 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5335 t1 = tcg_temp_local_new(TCG_TYPE_TL);
5336 t2 = tcg_temp_local_new(TCG_TYPE_TL);
5337 a0 = tcg_temp_local_new(TCG_TYPE_TL);
5338 gen_op_mov_v_reg(ot, t1, reg);
5339 if (mod == 3) {
5340 rm = (modrm & 7) | REX_B(s);
5341 gen_op_mov_v_reg(ot, t0, rm);
5342 } else {
5343 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5344 tcg_gen_mov_tl(a0, cpu_A0);
5345 gen_op_ld_v(ot + s->mem_index, t0, a0);
5346 rm = 0; /* avoid warning */
5347 }
5348 label1 = gen_new_label();
5349 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
5350 tcg_gen_sub_tl(t2, t2, t0);
5351 gen_extu(ot, t2);
5352 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
5353 if (mod == 3) {
5354 label2 = gen_new_label();
5355 gen_op_mov_reg_v(ot, R_EAX, t0);
5356 tcg_gen_br(label2);
5357 gen_set_label(label1);
5358 gen_op_mov_reg_v(ot, rm, t1);
5359 gen_set_label(label2);
5360 } else {
5361 tcg_gen_mov_tl(t1, t0);
5362 gen_op_mov_reg_v(ot, R_EAX, t0);
5363 gen_set_label(label1);
5364 /* always store */
5365 gen_op_st_v(ot + s->mem_index, t1, a0);
5366 }
5367 tcg_gen_mov_tl(cpu_cc_src, t0);
5368 tcg_gen_mov_tl(cpu_cc_dst, t2);
5369 s->cc_op = CC_OP_SUBB + ot;
5370 tcg_temp_free(t0);
5371 tcg_temp_free(t1);
5372 tcg_temp_free(t2);
5373 tcg_temp_free(a0);
5374 }
5375 break;
5376 case 0x1c7: /* cmpxchg8b */
5377 modrm = ldub_code(s->pc++);
5378 mod = (modrm >> 6) & 3;
5379 if ((mod == 3) || ((modrm & 0x38) != 0x8))
5380 goto illegal_op;
5381#ifdef TARGET_X86_64
5382 if (dflag == 2) {
5383 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5384 goto illegal_op;
5385 gen_jmp_im(pc_start - s->cs_base);
5386 if (s->cc_op != CC_OP_DYNAMIC)
5387 gen_op_set_cc_op(s->cc_op);
5388 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5389 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
5390 } else
5391#endif
5392 {
5393 if (!(s->cpuid_features & CPUID_CX8))
5394 goto illegal_op;
5395 gen_jmp_im(pc_start - s->cs_base);
5396 if (s->cc_op != CC_OP_DYNAMIC)
5397 gen_op_set_cc_op(s->cc_op);
5398 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5399 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
5400 }
5401 s->cc_op = CC_OP_EFLAGS;
5402 break;
5403
5404 /**************************/
5405 /* push/pop */
5406 case 0x50 ... 0x57: /* push */
5407 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
5408 gen_push_T0(s);
5409 break;
5410 case 0x58 ... 0x5f: /* pop */
5411 if (CODE64(s)) {
5412 ot = dflag ? OT_QUAD : OT_WORD;
5413 } else {
5414 ot = dflag + OT_WORD;
5415 }
5416 gen_pop_T0(s);
5417 /* NOTE: order is important for pop %sp */
5418 gen_pop_update(s);
5419 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
5420 break;
5421 case 0x60: /* pusha */
5422 if (CODE64(s))
5423 goto illegal_op;
5424 gen_pusha(s);
5425 break;
5426 case 0x61: /* popa */
5427 if (CODE64(s))
5428 goto illegal_op;
5429 gen_popa(s);
5430 break;
5431 case 0x68: /* push Iv */
5432 case 0x6a:
5433 if (CODE64(s)) {
5434 ot = dflag ? OT_QUAD : OT_WORD;
5435 } else {
5436 ot = dflag + OT_WORD;
5437 }
5438 if (b == 0x68)
5439 val = insn_get(s, ot);
5440 else
5441 val = (int8_t)insn_get(s, OT_BYTE);
5442 gen_op_movl_T0_im(val);
5443 gen_push_T0(s);
5444 break;
5445 case 0x8f: /* pop Ev */
5446 if (CODE64(s)) {
5447 ot = dflag ? OT_QUAD : OT_WORD;
5448 } else {
5449 ot = dflag + OT_WORD;
5450 }
5451 modrm = ldub_code(s->pc++);
5452 mod = (modrm >> 6) & 3;
5453 gen_pop_T0(s);
5454 if (mod == 3) {
5455 /* NOTE: order is important for pop %sp */
5456 gen_pop_update(s);
5457 rm = (modrm & 7) | REX_B(s);
5458 gen_op_mov_reg_T0(ot, rm);
5459 } else {
5460 /* NOTE: order is important too for MMU exceptions */
5461 s->popl_esp_hack = 1 << ot;
5462 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5463 s->popl_esp_hack = 0;
5464 gen_pop_update(s);
5465 }
5466 break;
5467 case 0xc8: /* enter */
5468 {
5469 int level;
5470 val = lduw_code(s->pc);
5471 s->pc += 2;
5472 level = ldub_code(s->pc++);
5473 gen_enter(s, val, level);
5474 }
5475 break;
5476 case 0xc9: /* leave */
5477 /* XXX: exception not precise (ESP is updated before potential exception) */
5478 if (CODE64(s)) {
5479 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5480 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5481 } else if (s->ss32) {
5482 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5483 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5484 } else {
5485 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5486 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5487 }
5488 gen_pop_T0(s);
5489 if (CODE64(s)) {
5490 ot = dflag ? OT_QUAD : OT_WORD;
5491 } else {
5492 ot = dflag + OT_WORD;
5493 }
5494 gen_op_mov_reg_T0(ot, R_EBP);
5495 gen_pop_update(s);
5496 break;
5497 case 0x06: /* push es */
5498 case 0x0e: /* push cs */
5499 case 0x16: /* push ss */
5500 case 0x1e: /* push ds */
5501 if (CODE64(s))
5502 goto illegal_op;
5503 gen_op_movl_T0_seg(b >> 3);
5504 gen_push_T0(s);
5505 break;
5506 case 0x1a0: /* push fs */
5507 case 0x1a8: /* push gs */
5508 gen_op_movl_T0_seg((b >> 3) & 7);
5509 gen_push_T0(s);
5510 break;
5511 case 0x07: /* pop es */
5512 case 0x17: /* pop ss */
5513 case 0x1f: /* pop ds */
5514 if (CODE64(s))
5515 goto illegal_op;
5516 reg = b >> 3;
5517 gen_pop_T0(s);
5518 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5519 gen_pop_update(s);
5520 if (reg == R_SS) {
5521 /* if reg == SS, inhibit interrupts/trace. */
5522 /* If several instructions disable interrupts, only the
5523 _first_ does it */
5524 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5525 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5526 s->tf = 0;
5527 }
5528 if (s->is_jmp) {
5529 gen_jmp_im(s->pc - s->cs_base);
5530 gen_eob(s);
5531 }
5532 break;
5533 case 0x1a1: /* pop fs */
5534 case 0x1a9: /* pop gs */
5535 gen_pop_T0(s);
5536 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5537 gen_pop_update(s);
5538 if (s->is_jmp) {
5539 gen_jmp_im(s->pc - s->cs_base);
5540 gen_eob(s);
5541 }
5542 break;
5543
5544 /**************************/
5545 /* mov */
5546 case 0x88:
5547 case 0x89: /* mov Gv, Ev */
5548 if ((b & 1) == 0)
5549 ot = OT_BYTE;
5550 else
5551 ot = dflag + OT_WORD;
5552 modrm = ldub_code(s->pc++);
5553 reg = ((modrm >> 3) & 7) | rex_r;
5554
5555 /* generate a generic store */
5556 gen_ldst_modrm(s, modrm, ot, reg, 1);
5557 break;
5558 case 0xc6:
5559 case 0xc7: /* mov Ev, Iv */
5560 if ((b & 1) == 0)
5561 ot = OT_BYTE;
5562 else
5563 ot = dflag + OT_WORD;
5564 modrm = ldub_code(s->pc++);
5565 mod = (modrm >> 6) & 3;
5566 if (mod != 3) {
5567 s->rip_offset = insn_const_size(ot);
5568 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5569 }
5570 val = insn_get(s, ot);
5571 gen_op_movl_T0_im(val);
5572 if (mod != 3)
5573 gen_op_st_T0_A0(ot + s->mem_index);
5574 else
5575 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5576 break;
5577 case 0x8a:
5578 case 0x8b: /* mov Ev, Gv */
5579#ifdef VBOX /* dtrace hot fix */
5580 if (prefixes & PREFIX_LOCK)
5581 goto illegal_op;
5582#endif
5583 if ((b & 1) == 0)
5584 ot = OT_BYTE;
5585 else
5586 ot = OT_WORD + dflag;
5587 modrm = ldub_code(s->pc++);
5588 reg = ((modrm >> 3) & 7) | rex_r;
5589
5590 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5591 gen_op_mov_reg_T0(ot, reg);
5592 break;
5593 case 0x8e: /* mov seg, Gv */
5594 modrm = ldub_code(s->pc++);
5595 reg = (modrm >> 3) & 7;
5596 if (reg >= 6 || reg == R_CS)
5597 goto illegal_op;
5598 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5599 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5600 if (reg == R_SS) {
5601 /* if reg == SS, inhibit interrupts/trace */
5602 /* If several instructions disable interrupts, only the
5603 _first_ does it */
5604 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5605 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5606 s->tf = 0;
5607 }
5608 if (s->is_jmp) {
5609 gen_jmp_im(s->pc - s->cs_base);
5610 gen_eob(s);
5611 }
5612 break;
5613 case 0x8c: /* mov Gv, seg */
5614 modrm = ldub_code(s->pc++);
5615 reg = (modrm >> 3) & 7;
5616 mod = (modrm >> 6) & 3;
5617 if (reg >= 6)
5618 goto illegal_op;
5619 gen_op_movl_T0_seg(reg);
5620 if (mod == 3)
5621 ot = OT_WORD + dflag;
5622 else
5623 ot = OT_WORD;
5624 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5625 break;
5626
5627 case 0x1b6: /* movzbS Gv, Eb */
5628 case 0x1b7: /* movzwS Gv, Eb */
5629 case 0x1be: /* movsbS Gv, Eb */
5630 case 0x1bf: /* movswS Gv, Eb */
5631 {
5632 int d_ot;
5633 /* d_ot is the size of destination */
5634 d_ot = dflag + OT_WORD;
5635 /* ot is the size of source */
5636 ot = (b & 1) + OT_BYTE;
5637 modrm = ldub_code(s->pc++);
5638 reg = ((modrm >> 3) & 7) | rex_r;
5639 mod = (modrm >> 6) & 3;
5640 rm = (modrm & 7) | REX_B(s);
5641
5642 if (mod == 3) {
5643 gen_op_mov_TN_reg(ot, 0, rm);
5644 switch(ot | (b & 8)) {
5645 case OT_BYTE:
5646 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5647 break;
5648 case OT_BYTE | 8:
5649 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5650 break;
5651 case OT_WORD:
5652 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5653 break;
5654 default:
5655 case OT_WORD | 8:
5656 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5657 break;
5658 }
5659 gen_op_mov_reg_T0(d_ot, reg);
5660 } else {
5661 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5662 if (b & 8) {
5663 gen_op_lds_T0_A0(ot + s->mem_index);
5664 } else {
5665 gen_op_ldu_T0_A0(ot + s->mem_index);
5666 }
5667 gen_op_mov_reg_T0(d_ot, reg);
5668 }
5669 }
5670 break;
5671
5672 case 0x8d: /* lea */
5673 ot = dflag + OT_WORD;
5674 modrm = ldub_code(s->pc++);
5675 mod = (modrm >> 6) & 3;
5676 if (mod == 3)
5677 goto illegal_op;
5678 reg = ((modrm >> 3) & 7) | rex_r;
5679 /* we must ensure that no segment is added */
5680 s->override = -1;
5681 val = s->addseg;
5682 s->addseg = 0;
5683 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5684 s->addseg = val;
5685 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5686 break;
5687
5688 case 0xa0: /* mov EAX, Ov */
5689 case 0xa1:
5690 case 0xa2: /* mov Ov, EAX */
5691 case 0xa3:
5692 {
5693 target_ulong offset_addr;
5694
5695 if ((b & 1) == 0)
5696 ot = OT_BYTE;
5697 else
5698 ot = dflag + OT_WORD;
5699#ifdef TARGET_X86_64
5700 if (s->aflag == 2) {
5701 offset_addr = ldq_code(s->pc);
5702 s->pc += 8;
5703 gen_op_movq_A0_im(offset_addr);
5704 } else
5705#endif
5706 {
5707 if (s->aflag) {
5708 offset_addr = insn_get(s, OT_LONG);
5709 } else {
5710 offset_addr = insn_get(s, OT_WORD);
5711 }
5712 gen_op_movl_A0_im(offset_addr);
5713 }
5714 gen_add_A0_ds_seg(s);
5715 if ((b & 2) == 0) {
5716 gen_op_ld_T0_A0(ot + s->mem_index);
5717 gen_op_mov_reg_T0(ot, R_EAX);
5718 } else {
5719 gen_op_mov_TN_reg(ot, 0, R_EAX);
5720 gen_op_st_T0_A0(ot + s->mem_index);
5721 }
5722 }
5723 break;
5724 case 0xd7: /* xlat */
5725#ifdef TARGET_X86_64
5726 if (s->aflag == 2) {
5727 gen_op_movq_A0_reg(R_EBX);
5728 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5729 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5730 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5731 } else
5732#endif
5733 {
5734 gen_op_movl_A0_reg(R_EBX);
5735 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5736 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5737 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5738 if (s->aflag == 0)
5739 gen_op_andl_A0_ffff();
5740 else
5741 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5742 }
5743 gen_add_A0_ds_seg(s);
5744 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5745 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5746 break;
5747 case 0xb0 ... 0xb7: /* mov R, Ib */
5748 val = insn_get(s, OT_BYTE);
5749 gen_op_movl_T0_im(val);
5750 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5751 break;
5752 case 0xb8 ... 0xbf: /* mov R, Iv */
5753#ifdef TARGET_X86_64
5754 if (dflag == 2) {
5755 uint64_t tmp;
5756 /* 64 bit case */
5757 tmp = ldq_code(s->pc);
5758 s->pc += 8;
5759 reg = (b & 7) | REX_B(s);
5760 gen_movtl_T0_im(tmp);
5761 gen_op_mov_reg_T0(OT_QUAD, reg);
5762 } else
5763#endif
5764 {
5765 ot = dflag ? OT_LONG : OT_WORD;
5766 val = insn_get(s, ot);
5767 reg = (b & 7) | REX_B(s);
5768 gen_op_movl_T0_im(val);
5769 gen_op_mov_reg_T0(ot, reg);
5770 }
5771 break;
5772
5773 case 0x91 ... 0x97: /* xchg R, EAX */
5774 ot = dflag + OT_WORD;
5775 reg = (b & 7) | REX_B(s);
5776 rm = R_EAX;
5777 goto do_xchg_reg;
5778 case 0x86:
5779 case 0x87: /* xchg Ev, Gv */
5780 if ((b & 1) == 0)
5781 ot = OT_BYTE;
5782 else
5783 ot = dflag + OT_WORD;
5784 modrm = ldub_code(s->pc++);
5785 reg = ((modrm >> 3) & 7) | rex_r;
5786 mod = (modrm >> 6) & 3;
5787 if (mod == 3) {
5788 rm = (modrm & 7) | REX_B(s);
5789 do_xchg_reg:
5790 gen_op_mov_TN_reg(ot, 0, reg);
5791 gen_op_mov_TN_reg(ot, 1, rm);
5792 gen_op_mov_reg_T0(ot, rm);
5793 gen_op_mov_reg_T1(ot, reg);
5794 } else {
5795 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5796 gen_op_mov_TN_reg(ot, 0, reg);
5797 /* for xchg, lock is implicit */
5798 if (!(prefixes & PREFIX_LOCK))
5799 tcg_gen_helper_0_0(helper_lock);
5800 gen_op_ld_T1_A0(ot + s->mem_index);
5801 gen_op_st_T0_A0(ot + s->mem_index);
5802 if (!(prefixes & PREFIX_LOCK))
5803 tcg_gen_helper_0_0(helper_unlock);
5804 gen_op_mov_reg_T1(ot, reg);
5805 }
5806 break;
5807 case 0xc4: /* les Gv */
5808 if (CODE64(s))
5809 goto illegal_op;
5810 op = R_ES;
5811 goto do_lxx;
5812 case 0xc5: /* lds Gv */
5813 if (CODE64(s))
5814 goto illegal_op;
5815 op = R_DS;
5816 goto do_lxx;
5817 case 0x1b2: /* lss Gv */
5818 op = R_SS;
5819 goto do_lxx;
5820 case 0x1b4: /* lfs Gv */
5821 op = R_FS;
5822 goto do_lxx;
5823 case 0x1b5: /* lgs Gv */
5824 op = R_GS;
5825 do_lxx:
5826 ot = dflag ? OT_LONG : OT_WORD;
5827 modrm = ldub_code(s->pc++);
5828 reg = ((modrm >> 3) & 7) | rex_r;
5829 mod = (modrm >> 6) & 3;
5830 if (mod == 3)
5831 goto illegal_op;
5832 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5833 gen_op_ld_T1_A0(ot + s->mem_index);
5834 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5835 /* load the segment first to handle exceptions properly */
5836 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5837 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5838 /* then put the data */
5839 gen_op_mov_reg_T1(ot, reg);
5840 if (s->is_jmp) {
5841 gen_jmp_im(s->pc - s->cs_base);
5842 gen_eob(s);
5843 }
5844 break;
5845
5846 /************************/
5847 /* shifts */
5848 case 0xc0:
5849 case 0xc1:
5850 /* shift Ev,Ib */
5851 shift = 2;
5852 grp2:
5853 {
5854 if ((b & 1) == 0)
5855 ot = OT_BYTE;
5856 else
5857 ot = dflag + OT_WORD;
5858
5859 modrm = ldub_code(s->pc++);
5860 mod = (modrm >> 6) & 3;
5861 op = (modrm >> 3) & 7;
5862
5863 if (mod != 3) {
5864 if (shift == 2) {
5865 s->rip_offset = 1;
5866 }
5867 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5868 opreg = OR_TMP0;
5869 } else {
5870 opreg = (modrm & 7) | REX_B(s);
5871 }
5872
5873 /* simpler op */
5874 if (shift == 0) {
5875 gen_shift(s, op, ot, opreg, OR_ECX);
5876 } else {
5877 if (shift == 2) {
5878 shift = ldub_code(s->pc++);
5879 }
5880 gen_shifti(s, op, ot, opreg, shift);
5881 }
5882 }
5883 break;
5884 case 0xd0:
5885 case 0xd1:
5886 /* shift Ev,1 */
5887 shift = 1;
5888 goto grp2;
5889 case 0xd2:
5890 case 0xd3:
5891 /* shift Ev,cl */
5892 shift = 0;
5893 goto grp2;
5894
5895 case 0x1a4: /* shld imm */
5896 op = 0;
5897 shift = 1;
5898 goto do_shiftd;
5899 case 0x1a5: /* shld cl */
5900 op = 0;
5901 shift = 0;
5902 goto do_shiftd;
5903 case 0x1ac: /* shrd imm */
5904 op = 1;
5905 shift = 1;
5906 goto do_shiftd;
5907 case 0x1ad: /* shrd cl */
5908 op = 1;
5909 shift = 0;
5910 do_shiftd:
5911 ot = dflag + OT_WORD;
5912 modrm = ldub_code(s->pc++);
5913 mod = (modrm >> 6) & 3;
5914 rm = (modrm & 7) | REX_B(s);
5915 reg = ((modrm >> 3) & 7) | rex_r;
5916 if (mod != 3) {
5917 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5918 opreg = OR_TMP0;
5919 } else {
5920 opreg = rm;
5921 }
5922 gen_op_mov_TN_reg(ot, 1, reg);
5923
5924 if (shift) {
5925 val = ldub_code(s->pc++);
5926 tcg_gen_movi_tl(cpu_T3, val);
5927 } else {
5928 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
5929 }
5930 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
5931 break;
5932
5933 /************************/
5934 /* floats */
5935 case 0xd8 ... 0xdf:
5936 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
5937 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
5938 /* XXX: what to do if illegal op ? */
5939 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5940 break;
5941 }
5942 modrm = ldub_code(s->pc++);
5943 mod = (modrm >> 6) & 3;
5944 rm = modrm & 7;
5945 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
5946 if (mod != 3) {
5947 /* memory op */
5948 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5949 switch(op) {
5950 case 0x00 ... 0x07: /* fxxxs */
5951 case 0x10 ... 0x17: /* fixxxl */
5952 case 0x20 ... 0x27: /* fxxxl */
5953 case 0x30 ... 0x37: /* fixxx */
5954 {
5955 int op1;
5956 op1 = op & 7;
5957
5958 switch(op >> 4) {
5959 case 0:
5960 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5961 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5962 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
5963 break;
5964 case 1:
5965 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
5966 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5967 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
5968 break;
5969 case 2:
5970 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
5971 (s->mem_index >> 2) - 1);
5972 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
5973 break;
5974 case 3:
5975 default:
5976 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
5977 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5978 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
5979 break;
5980 }
5981
5982 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
5983 if (op1 == 3) {
5984 /* fcomp needs pop */
5985 tcg_gen_helper_0_0(helper_fpop);
5986 }
5987 }
5988 break;
5989 case 0x08: /* flds */
5990 case 0x0a: /* fsts */
5991 case 0x0b: /* fstps */
5992 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
5993 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
5994 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
5995 switch(op & 7) {
5996 case 0:
5997 switch(op >> 4) {
5998 case 0:
5999 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6000 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6001 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
6002 break;
6003 case 1:
6004 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6005 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6006 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6007 break;
6008 case 2:
6009 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6010 (s->mem_index >> 2) - 1);
6011 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
6012 break;
6013 case 3:
6014 default:
6015 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6016 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6017 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6018 break;
6019 }
6020 break;
6021 case 1:
6022 /* XXX: the corresponding CPUID bit must be tested ! */
6023 switch(op >> 4) {
6024 case 1:
6025 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
6026 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6027 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6028 break;
6029 case 2:
6030 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
6031 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6032 (s->mem_index >> 2) - 1);
6033 break;
6034 case 3:
6035 default:
6036 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
6037 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6038 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6039 break;
6040 }
6041 tcg_gen_helper_0_0(helper_fpop);
6042 break;
6043 default:
6044 switch(op >> 4) {
6045 case 0:
6046 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
6047 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6048 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6049 break;
6050 case 1:
6051 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
6052 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6053 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6054 break;
6055 case 2:
6056 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
6057 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6058 (s->mem_index >> 2) - 1);
6059 break;
6060 case 3:
6061 default:
6062 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
6063 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6064 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6065 break;
6066 }
6067 if ((op & 7) == 3)
6068 tcg_gen_helper_0_0(helper_fpop);
6069 break;
6070 }
6071 break;
6072 case 0x0c: /* fldenv mem */
6073 if (s->cc_op != CC_OP_DYNAMIC)
6074 gen_op_set_cc_op(s->cc_op);
6075 gen_jmp_im(pc_start - s->cs_base);
6076 tcg_gen_helper_0_2(helper_fldenv,
6077 cpu_A0, tcg_const_i32(s->dflag));
6078 break;
6079 case 0x0d: /* fldcw mem */
6080 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
6081 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6082 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
6083 break;
6084 case 0x0e: /* fnstenv mem */
6085 if (s->cc_op != CC_OP_DYNAMIC)
6086 gen_op_set_cc_op(s->cc_op);
6087 gen_jmp_im(pc_start - s->cs_base);
6088 tcg_gen_helper_0_2(helper_fstenv,
6089 cpu_A0, tcg_const_i32(s->dflag));
6090 break;
6091 case 0x0f: /* fnstcw mem */
6092 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
6093 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6094 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6095 break;
6096 case 0x1d: /* fldt mem */
6097 if (s->cc_op != CC_OP_DYNAMIC)
6098 gen_op_set_cc_op(s->cc_op);
6099 gen_jmp_im(pc_start - s->cs_base);
6100 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
6101 break;
6102 case 0x1f: /* fstpt mem */
6103 if (s->cc_op != CC_OP_DYNAMIC)
6104 gen_op_set_cc_op(s->cc_op);
6105 gen_jmp_im(pc_start - s->cs_base);
6106 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
6107 tcg_gen_helper_0_0(helper_fpop);
6108 break;
6109 case 0x2c: /* frstor mem */
6110 if (s->cc_op != CC_OP_DYNAMIC)
6111 gen_op_set_cc_op(s->cc_op);
6112 gen_jmp_im(pc_start - s->cs_base);
6113 tcg_gen_helper_0_2(helper_frstor,
6114 cpu_A0, tcg_const_i32(s->dflag));
6115 break;
6116 case 0x2e: /* fnsave mem */
6117 if (s->cc_op != CC_OP_DYNAMIC)
6118 gen_op_set_cc_op(s->cc_op);
6119 gen_jmp_im(pc_start - s->cs_base);
6120 tcg_gen_helper_0_2(helper_fsave,
6121 cpu_A0, tcg_const_i32(s->dflag));
6122 break;
6123 case 0x2f: /* fnstsw mem */
6124 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6125 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6126 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6127 break;
6128 case 0x3c: /* fbld */
6129 if (s->cc_op != CC_OP_DYNAMIC)
6130 gen_op_set_cc_op(s->cc_op);
6131 gen_jmp_im(pc_start - s->cs_base);
6132 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
6133 break;
6134 case 0x3e: /* fbstp */
6135 if (s->cc_op != CC_OP_DYNAMIC)
6136 gen_op_set_cc_op(s->cc_op);
6137 gen_jmp_im(pc_start - s->cs_base);
6138 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
6139 tcg_gen_helper_0_0(helper_fpop);
6140 break;
6141 case 0x3d: /* fildll */
6142 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6143 (s->mem_index >> 2) - 1);
6144 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
6145 break;
6146 case 0x3f: /* fistpll */
6147 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
6148 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6149 (s->mem_index >> 2) - 1);
6150 tcg_gen_helper_0_0(helper_fpop);
6151 break;
6152 default:
6153 goto illegal_op;
6154 }
6155 } else {
6156 /* register float ops */
6157 opreg = rm;
6158
6159 switch(op) {
6160 case 0x08: /* fld sti */
6161 tcg_gen_helper_0_0(helper_fpush);
6162 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
6163 break;
6164 case 0x09: /* fxchg sti */
6165 case 0x29: /* fxchg4 sti, undocumented op */
6166 case 0x39: /* fxchg7 sti, undocumented op */
6167 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
6168 break;
6169 case 0x0a: /* grp d9/2 */
6170 switch(rm) {
6171 case 0: /* fnop */
6172 /* check exceptions (FreeBSD FPU probe) */
6173 if (s->cc_op != CC_OP_DYNAMIC)
6174 gen_op_set_cc_op(s->cc_op);
6175 gen_jmp_im(pc_start - s->cs_base);
6176 tcg_gen_helper_0_0(helper_fwait);
6177 break;
6178 default:
6179 goto illegal_op;
6180 }
6181 break;
6182 case 0x0c: /* grp d9/4 */
6183 switch(rm) {
6184 case 0: /* fchs */
6185 tcg_gen_helper_0_0(helper_fchs_ST0);
6186 break;
6187 case 1: /* fabs */
6188 tcg_gen_helper_0_0(helper_fabs_ST0);
6189 break;
6190 case 4: /* ftst */
6191 tcg_gen_helper_0_0(helper_fldz_FT0);
6192 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6193 break;
6194 case 5: /* fxam */
6195 tcg_gen_helper_0_0(helper_fxam_ST0);
6196 break;
6197 default:
6198 goto illegal_op;
6199 }
6200 break;
6201 case 0x0d: /* grp d9/5 */
6202 {
6203 switch(rm) {
6204 case 0:
6205 tcg_gen_helper_0_0(helper_fpush);
6206 tcg_gen_helper_0_0(helper_fld1_ST0);
6207 break;
6208 case 1:
6209 tcg_gen_helper_0_0(helper_fpush);
6210 tcg_gen_helper_0_0(helper_fldl2t_ST0);
6211 break;
6212 case 2:
6213 tcg_gen_helper_0_0(helper_fpush);
6214 tcg_gen_helper_0_0(helper_fldl2e_ST0);
6215 break;
6216 case 3:
6217 tcg_gen_helper_0_0(helper_fpush);
6218 tcg_gen_helper_0_0(helper_fldpi_ST0);
6219 break;
6220 case 4:
6221 tcg_gen_helper_0_0(helper_fpush);
6222 tcg_gen_helper_0_0(helper_fldlg2_ST0);
6223 break;
6224 case 5:
6225 tcg_gen_helper_0_0(helper_fpush);
6226 tcg_gen_helper_0_0(helper_fldln2_ST0);
6227 break;
6228 case 6:
6229 tcg_gen_helper_0_0(helper_fpush);
6230 tcg_gen_helper_0_0(helper_fldz_ST0);
6231 break;
6232 default:
6233 goto illegal_op;
6234 }
6235 }
6236 break;
6237 case 0x0e: /* grp d9/6 */
6238 switch(rm) {
6239 case 0: /* f2xm1 */
6240 tcg_gen_helper_0_0(helper_f2xm1);
6241 break;
6242 case 1: /* fyl2x */
6243 tcg_gen_helper_0_0(helper_fyl2x);
6244 break;
6245 case 2: /* fptan */
6246 tcg_gen_helper_0_0(helper_fptan);
6247 break;
6248 case 3: /* fpatan */
6249 tcg_gen_helper_0_0(helper_fpatan);
6250 break;
6251 case 4: /* fxtract */
6252 tcg_gen_helper_0_0(helper_fxtract);
6253 break;
6254 case 5: /* fprem1 */
6255 tcg_gen_helper_0_0(helper_fprem1);
6256 break;
6257 case 6: /* fdecstp */
6258 tcg_gen_helper_0_0(helper_fdecstp);
6259 break;
6260 default:
6261 case 7: /* fincstp */
6262 tcg_gen_helper_0_0(helper_fincstp);
6263 break;
6264 }
6265 break;
6266 case 0x0f: /* grp d9/7 */
6267 switch(rm) {
6268 case 0: /* fprem */
6269 tcg_gen_helper_0_0(helper_fprem);
6270 break;
6271 case 1: /* fyl2xp1 */
6272 tcg_gen_helper_0_0(helper_fyl2xp1);
6273 break;
6274 case 2: /* fsqrt */
6275 tcg_gen_helper_0_0(helper_fsqrt);
6276 break;
6277 case 3: /* fsincos */
6278 tcg_gen_helper_0_0(helper_fsincos);
6279 break;
6280 case 5: /* fscale */
6281 tcg_gen_helper_0_0(helper_fscale);
6282 break;
6283 case 4: /* frndint */
6284 tcg_gen_helper_0_0(helper_frndint);
6285 break;
6286 case 6: /* fsin */
6287 tcg_gen_helper_0_0(helper_fsin);
6288 break;
6289 default:
6290 case 7: /* fcos */
6291 tcg_gen_helper_0_0(helper_fcos);
6292 break;
6293 }
6294 break;
6295 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6296 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6297 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6298 {
6299 int op1;
6300
6301 op1 = op & 7;
6302 if (op >= 0x20) {
6303 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
6304 if (op >= 0x30)
6305 tcg_gen_helper_0_0(helper_fpop);
6306 } else {
6307 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6308 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6309 }
6310 }
6311 break;
6312 case 0x02: /* fcom */
6313 case 0x22: /* fcom2, undocumented op */
6314 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6315 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6316 break;
6317 case 0x03: /* fcomp */
6318 case 0x23: /* fcomp3, undocumented op */
6319 case 0x32: /* fcomp5, undocumented op */
6320 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6321 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6322 tcg_gen_helper_0_0(helper_fpop);
6323 break;
6324 case 0x15: /* da/5 */
6325 switch(rm) {
6326 case 1: /* fucompp */
6327 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6328 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6329 tcg_gen_helper_0_0(helper_fpop);
6330 tcg_gen_helper_0_0(helper_fpop);
6331 break;
6332 default:
6333 goto illegal_op;
6334 }
6335 break;
6336 case 0x1c:
6337 switch(rm) {
6338 case 0: /* feni (287 only, just do nop here) */
6339 break;
6340 case 1: /* fdisi (287 only, just do nop here) */
6341 break;
6342 case 2: /* fclex */
6343 tcg_gen_helper_0_0(helper_fclex);
6344 break;
6345 case 3: /* fninit */
6346 tcg_gen_helper_0_0(helper_fninit);
6347 break;
6348 case 4: /* fsetpm (287 only, just do nop here) */
6349 break;
6350 default:
6351 goto illegal_op;
6352 }
6353 break;
6354 case 0x1d: /* fucomi */
6355 if (s->cc_op != CC_OP_DYNAMIC)
6356 gen_op_set_cc_op(s->cc_op);
6357 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6358 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6359 s->cc_op = CC_OP_EFLAGS;
6360 break;
6361 case 0x1e: /* fcomi */
6362 if (s->cc_op != CC_OP_DYNAMIC)
6363 gen_op_set_cc_op(s->cc_op);
6364 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6365 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6366 s->cc_op = CC_OP_EFLAGS;
6367 break;
6368 case 0x28: /* ffree sti */
6369 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6370 break;
6371 case 0x2a: /* fst sti */
6372 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6373 break;
6374 case 0x2b: /* fstp sti */
6375 case 0x0b: /* fstp1 sti, undocumented op */
6376 case 0x3a: /* fstp8 sti, undocumented op */
6377 case 0x3b: /* fstp9 sti, undocumented op */
6378 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6379 tcg_gen_helper_0_0(helper_fpop);
6380 break;
6381 case 0x2c: /* fucom st(i) */
6382 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6383 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6384 break;
6385 case 0x2d: /* fucomp st(i) */
6386 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6387 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6388 tcg_gen_helper_0_0(helper_fpop);
6389 break;
6390 case 0x33: /* de/3 */
6391 switch(rm) {
6392 case 1: /* fcompp */
6393 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6394 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6395 tcg_gen_helper_0_0(helper_fpop);
6396 tcg_gen_helper_0_0(helper_fpop);
6397 break;
6398 default:
6399 goto illegal_op;
6400 }
6401 break;
6402 case 0x38: /* ffreep sti, undocumented op */
6403 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6404 tcg_gen_helper_0_0(helper_fpop);
6405 break;
6406 case 0x3c: /* df/4 */
6407 switch(rm) {
6408 case 0:
6409 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6410 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6411 gen_op_mov_reg_T0(OT_WORD, R_EAX);
6412 break;
6413 default:
6414 goto illegal_op;
6415 }
6416 break;
6417 case 0x3d: /* fucomip */
6418 if (s->cc_op != CC_OP_DYNAMIC)
6419 gen_op_set_cc_op(s->cc_op);
6420 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6421 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6422 tcg_gen_helper_0_0(helper_fpop);
6423 s->cc_op = CC_OP_EFLAGS;
6424 break;
6425 case 0x3e: /* fcomip */
6426 if (s->cc_op != CC_OP_DYNAMIC)
6427 gen_op_set_cc_op(s->cc_op);
6428 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6429 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6430 tcg_gen_helper_0_0(helper_fpop);
6431 s->cc_op = CC_OP_EFLAGS;
6432 break;
6433 case 0x10 ... 0x13: /* fcmovxx */
6434 case 0x18 ... 0x1b:
6435 {
6436 int op1, l1;
6437 static const uint8_t fcmov_cc[8] = {
6438 (JCC_B << 1),
6439 (JCC_Z << 1),
6440 (JCC_BE << 1),
6441 (JCC_P << 1),
6442 };
6443 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
6444 l1 = gen_new_label();
6445 gen_jcc1(s, s->cc_op, op1, l1);
6446 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
6447 gen_set_label(l1);
6448 }
6449 break;
6450 default:
6451 goto illegal_op;
6452 }
6453 }
6454 break;
6455 /************************/
6456 /* string ops */
6457
6458 case 0xa4: /* movsS */
6459 case 0xa5:
6460 if ((b & 1) == 0)
6461 ot = OT_BYTE;
6462 else
6463 ot = dflag + OT_WORD;
6464
6465 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6466 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6467 } else {
6468 gen_movs(s, ot);
6469 }
6470 break;
6471
6472 case 0xaa: /* stosS */
6473 case 0xab:
6474 if ((b & 1) == 0)
6475 ot = OT_BYTE;
6476 else
6477 ot = dflag + OT_WORD;
6478
6479 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6480 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6481 } else {
6482 gen_stos(s, ot);
6483 }
6484 break;
6485 case 0xac: /* lodsS */
6486 case 0xad:
6487 if ((b & 1) == 0)
6488 ot = OT_BYTE;
6489 else
6490 ot = dflag + OT_WORD;
6491 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6492 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6493 } else {
6494 gen_lods(s, ot);
6495 }
6496 break;
6497 case 0xae: /* scasS */
6498 case 0xaf:
6499 if ((b & 1) == 0)
6500 ot = OT_BYTE;
6501 else
6502 ot = dflag + OT_WORD;
6503 if (prefixes & PREFIX_REPNZ) {
6504 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6505 } else if (prefixes & PREFIX_REPZ) {
6506 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6507 } else {
6508 gen_scas(s, ot);
6509 s->cc_op = CC_OP_SUBB + ot;
6510 }
6511 break;
6512
6513 case 0xa6: /* cmpsS */
6514 case 0xa7:
6515 if ((b & 1) == 0)
6516 ot = OT_BYTE;
6517 else
6518 ot = dflag + OT_WORD;
6519 if (prefixes & PREFIX_REPNZ) {
6520 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6521 } else if (prefixes & PREFIX_REPZ) {
6522 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6523 } else {
6524 gen_cmps(s, ot);
6525 s->cc_op = CC_OP_SUBB + ot;
6526 }
6527 break;
6528 case 0x6c: /* insS */
6529 case 0x6d:
6530 if ((b & 1) == 0)
6531 ot = OT_BYTE;
6532 else
6533 ot = dflag ? OT_LONG : OT_WORD;
6534 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6535 gen_op_andl_T0_ffff();
6536 gen_check_io(s, ot, pc_start - s->cs_base,
6537 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6538 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6539 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6540 } else {
6541 gen_ins(s, ot);
6542 if (use_icount) {
6543 gen_jmp(s, s->pc - s->cs_base);
6544 }
6545 }
6546 break;
6547 case 0x6e: /* outsS */
6548 case 0x6f:
6549 if ((b & 1) == 0)
6550 ot = OT_BYTE;
6551 else
6552 ot = dflag ? OT_LONG : OT_WORD;
6553 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6554 gen_op_andl_T0_ffff();
6555 gen_check_io(s, ot, pc_start - s->cs_base,
6556 svm_is_rep(prefixes) | 4);
6557 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6558 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6559 } else {
6560 gen_outs(s, ot);
6561 if (use_icount) {
6562 gen_jmp(s, s->pc - s->cs_base);
6563 }
6564 }
6565 break;
6566
6567 /************************/
6568 /* port I/O */
6569
6570 case 0xe4:
6571 case 0xe5:
6572 if ((b & 1) == 0)
6573 ot = OT_BYTE;
6574 else
6575 ot = dflag ? OT_LONG : OT_WORD;
6576 val = ldub_code(s->pc++);
6577 gen_op_movl_T0_im(val);
6578 gen_check_io(s, ot, pc_start - s->cs_base,
6579 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6580 if (use_icount)
6581 gen_io_start();
6582 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6583 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6584 gen_op_mov_reg_T1(ot, R_EAX);
6585 if (use_icount) {
6586 gen_io_end();
6587 gen_jmp(s, s->pc - s->cs_base);
6588 }
6589 break;
6590 case 0xe6:
6591 case 0xe7:
6592 if ((b & 1) == 0)
6593 ot = OT_BYTE;
6594 else
6595 ot = dflag ? OT_LONG : OT_WORD;
6596 val = ldub_code(s->pc++);
6597 gen_op_movl_T0_im(val);
6598 gen_check_io(s, ot, pc_start - s->cs_base,
6599 svm_is_rep(prefixes));
6600#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
6601 if (val == 0x80)
6602 break;
6603#endif /* VBOX */
6604 gen_op_mov_TN_reg(ot, 1, R_EAX);
6605
6606 if (use_icount)
6607 gen_io_start();
6608 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6609 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6610 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6611 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6612 if (use_icount) {
6613 gen_io_end();
6614 gen_jmp(s, s->pc - s->cs_base);
6615 }
6616 break;
6617 case 0xec:
6618 case 0xed:
6619 if ((b & 1) == 0)
6620 ot = OT_BYTE;
6621 else
6622 ot = dflag ? OT_LONG : OT_WORD;
6623 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6624 gen_op_andl_T0_ffff();
6625 gen_check_io(s, ot, pc_start - s->cs_base,
6626 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6627 if (use_icount)
6628 gen_io_start();
6629 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6630 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6631 gen_op_mov_reg_T1(ot, R_EAX);
6632 if (use_icount) {
6633 gen_io_end();
6634 gen_jmp(s, s->pc - s->cs_base);
6635 }
6636 break;
6637 case 0xee:
6638 case 0xef:
6639 if ((b & 1) == 0)
6640 ot = OT_BYTE;
6641 else
6642 ot = dflag ? OT_LONG : OT_WORD;
6643 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6644 gen_op_andl_T0_ffff();
6645 gen_check_io(s, ot, pc_start - s->cs_base,
6646 svm_is_rep(prefixes));
6647 gen_op_mov_TN_reg(ot, 1, R_EAX);
6648
6649 if (use_icount)
6650 gen_io_start();
6651 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6652 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6653 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6654 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6655 if (use_icount) {
6656 gen_io_end();
6657 gen_jmp(s, s->pc - s->cs_base);
6658 }
6659 break;
6660
6661 /************************/
6662 /* control */
6663 case 0xc2: /* ret im */
6664 val = ldsw_code(s->pc);
6665 s->pc += 2;
6666 gen_pop_T0(s);
6667 if (CODE64(s) && s->dflag)
6668 s->dflag = 2;
6669 gen_stack_update(s, val + (2 << s->dflag));
6670 if (s->dflag == 0)
6671 gen_op_andl_T0_ffff();
6672 gen_op_jmp_T0();
6673 gen_eob(s);
6674 break;
6675 case 0xc3: /* ret */
6676 gen_pop_T0(s);
6677 gen_pop_update(s);
6678 if (s->dflag == 0)
6679 gen_op_andl_T0_ffff();
6680 gen_op_jmp_T0();
6681 gen_eob(s);
6682 break;
6683 case 0xca: /* lret im */
6684 val = ldsw_code(s->pc);
6685 s->pc += 2;
6686 do_lret:
6687 if (s->pe && !s->vm86) {
6688 if (s->cc_op != CC_OP_DYNAMIC)
6689 gen_op_set_cc_op(s->cc_op);
6690 gen_jmp_im(pc_start - s->cs_base);
6691 tcg_gen_helper_0_2(helper_lret_protected,
6692 tcg_const_i32(s->dflag),
6693 tcg_const_i32(val));
6694 } else {
6695 gen_stack_A0(s);
6696 /* pop offset */
6697 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6698 if (s->dflag == 0)
6699 gen_op_andl_T0_ffff();
6700 /* NOTE: keeping EIP updated is not a problem in case of
6701 exception */
6702 gen_op_jmp_T0();
6703 /* pop selector */
6704 gen_op_addl_A0_im(2 << s->dflag);
6705 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6706 gen_op_movl_seg_T0_vm(R_CS);
6707 /* add stack offset */
6708 gen_stack_update(s, val + (4 << s->dflag));
6709 }
6710 gen_eob(s);
6711 break;
6712 case 0xcb: /* lret */
6713 val = 0;
6714 goto do_lret;
6715 case 0xcf: /* iret */
6716 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6717 if (!s->pe) {
6718 /* real mode */
6719 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6720 s->cc_op = CC_OP_EFLAGS;
6721 } else if (s->vm86) {
6722#ifdef VBOX
6723 if (s->iopl != 3 && (!s->vme || s->dflag)) {
6724#else
6725 if (s->iopl != 3) {
6726#endif
6727 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6728 } else {
6729 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6730 s->cc_op = CC_OP_EFLAGS;
6731 }
6732 } else {
6733 if (s->cc_op != CC_OP_DYNAMIC)
6734 gen_op_set_cc_op(s->cc_op);
6735 gen_jmp_im(pc_start - s->cs_base);
6736 tcg_gen_helper_0_2(helper_iret_protected,
6737 tcg_const_i32(s->dflag),
6738 tcg_const_i32(s->pc - s->cs_base));
6739 s->cc_op = CC_OP_EFLAGS;
6740 }
6741 gen_eob(s);
6742 break;
6743 case 0xe8: /* call im */
6744 {
6745 if (dflag)
6746 tval = (int32_t)insn_get(s, OT_LONG);
6747 else
6748 tval = (int16_t)insn_get(s, OT_WORD);
6749 next_eip = s->pc - s->cs_base;
6750 tval += next_eip;
6751 if (s->dflag == 0)
6752 tval &= 0xffff;
6753 gen_movtl_T0_im(next_eip);
6754 gen_push_T0(s);
6755 gen_jmp(s, tval);
6756 }
6757 break;
6758 case 0x9a: /* lcall im */
6759 {
6760 unsigned int selector, offset;
6761
6762 if (CODE64(s))
6763 goto illegal_op;
6764 ot = dflag ? OT_LONG : OT_WORD;
6765 offset = insn_get(s, ot);
6766 selector = insn_get(s, OT_WORD);
6767
6768 gen_op_movl_T0_im(selector);
6769 gen_op_movl_T1_imu(offset);
6770 }
6771 goto do_lcall;
6772 case 0xe9: /* jmp im */
6773 if (dflag)
6774 tval = (int32_t)insn_get(s, OT_LONG);
6775 else
6776 tval = (int16_t)insn_get(s, OT_WORD);
6777 tval += s->pc - s->cs_base;
6778 if (s->dflag == 0)
6779 tval &= 0xffff;
6780 gen_jmp(s, tval);
6781 break;
6782 case 0xea: /* ljmp im */
6783 {
6784 unsigned int selector, offset;
6785
6786 if (CODE64(s))
6787 goto illegal_op;
6788 ot = dflag ? OT_LONG : OT_WORD;
6789 offset = insn_get(s, ot);
6790 selector = insn_get(s, OT_WORD);
6791
6792 gen_op_movl_T0_im(selector);
6793 gen_op_movl_T1_imu(offset);
6794 }
6795 goto do_ljmp;
6796 case 0xeb: /* jmp Jb */
6797 tval = (int8_t)insn_get(s, OT_BYTE);
6798 tval += s->pc - s->cs_base;
6799 if (s->dflag == 0)
6800 tval &= 0xffff;
6801 gen_jmp(s, tval);
6802 break;
6803 case 0x70 ... 0x7f: /* jcc Jb */
6804 tval = (int8_t)insn_get(s, OT_BYTE);
6805 goto do_jcc;
6806 case 0x180 ... 0x18f: /* jcc Jv */
6807 if (dflag) {
6808 tval = (int32_t)insn_get(s, OT_LONG);
6809 } else {
6810 tval = (int16_t)insn_get(s, OT_WORD);
6811 }
6812 do_jcc:
6813 next_eip = s->pc - s->cs_base;
6814 tval += next_eip;
6815 if (s->dflag == 0)
6816 tval &= 0xffff;
6817 gen_jcc(s, b, tval, next_eip);
6818 break;
6819
6820 case 0x190 ... 0x19f: /* setcc Gv */
6821 modrm = ldub_code(s->pc++);
6822 gen_setcc(s, b);
6823 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6824 break;
6825 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6826 {
6827 int l1;
6828 TCGv t0;
6829
6830 ot = dflag + OT_WORD;
6831 modrm = ldub_code(s->pc++);
6832 reg = ((modrm >> 3) & 7) | rex_r;
6833 mod = (modrm >> 6) & 3;
6834 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6835 if (mod != 3) {
6836 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6837 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6838 } else {
6839 rm = (modrm & 7) | REX_B(s);
6840 gen_op_mov_v_reg(ot, t0, rm);
6841 }
6842#ifdef TARGET_X86_64
6843 if (ot == OT_LONG) {
6844 /* XXX: specific Intel behaviour ? */
6845 l1 = gen_new_label();
6846 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6847 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6848 gen_set_label(l1);
6849 tcg_gen_movi_tl(cpu_tmp0, 0);
6850 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6851 } else
6852#endif
6853 {
6854 l1 = gen_new_label();
6855 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6856 gen_op_mov_reg_v(ot, reg, t0);
6857 gen_set_label(l1);
6858 }
6859 tcg_temp_free(t0);
6860 }
6861 break;
6862
6863 /************************/
6864 /* flags */
6865 case 0x9c: /* pushf */
6866 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6867#ifdef VBOX
6868 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6869#else
6870 if (s->vm86 && s->iopl != 3) {
6871#endif
6872 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6873 } else {
6874 if (s->cc_op != CC_OP_DYNAMIC)
6875 gen_op_set_cc_op(s->cc_op);
6876#ifdef VBOX
6877 if (s->vm86 && s->vme && s->iopl != 3)
6878 tcg_gen_helper_1_0(helper_read_eflags_vme, cpu_T[0]);
6879 else
6880#endif
6881 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
6882 gen_push_T0(s);
6883 }
6884 break;
6885 case 0x9d: /* popf */
6886 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
6887#ifdef VBOX
6888 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6889#else
6890 if (s->vm86 && s->iopl != 3) {
6891#endif
6892 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6893 } else {
6894 gen_pop_T0(s);
6895 if (s->cpl == 0) {
6896 if (s->dflag) {
6897 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6898 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
6899 } else {
6900 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6901 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
6902 }
6903 } else {
6904 if (s->cpl <= s->iopl) {
6905 if (s->dflag) {
6906 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6907 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
6908 } else {
6909 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6910 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
6911 }
6912 } else {
6913 if (s->dflag) {
6914 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6915 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
6916 } else {
6917#ifdef VBOX
6918 if (s->vm86 && s->vme)
6919 tcg_gen_helper_0_1(helper_write_eflags_vme, cpu_T[0]);
6920 else
6921#endif
6922 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6923 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
6924 }
6925 }
6926 }
6927 gen_pop_update(s);
6928 s->cc_op = CC_OP_EFLAGS;
6929 /* abort translation because TF flag may change */
6930 gen_jmp_im(s->pc - s->cs_base);
6931 gen_eob(s);
6932 }
6933 break;
6934 case 0x9e: /* sahf */
6935 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6936 goto illegal_op;
6937 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
6938 if (s->cc_op != CC_OP_DYNAMIC)
6939 gen_op_set_cc_op(s->cc_op);
6940 gen_compute_eflags(cpu_cc_src);
6941 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
6942 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
6943 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
6944 s->cc_op = CC_OP_EFLAGS;
6945 break;
6946 case 0x9f: /* lahf */
6947 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
6948 goto illegal_op;
6949 if (s->cc_op != CC_OP_DYNAMIC)
6950 gen_op_set_cc_op(s->cc_op);
6951 gen_compute_eflags(cpu_T[0]);
6952 /* Note: gen_compute_eflags() only gives the condition codes */
6953 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
6954 gen_op_mov_reg_T0(OT_BYTE, R_AH);
6955 break;
6956 case 0xf5: /* cmc */
6957 if (s->cc_op != CC_OP_DYNAMIC)
6958 gen_op_set_cc_op(s->cc_op);
6959 gen_compute_eflags(cpu_cc_src);
6960 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6961 s->cc_op = CC_OP_EFLAGS;
6962 break;
6963 case 0xf8: /* clc */
6964 if (s->cc_op != CC_OP_DYNAMIC)
6965 gen_op_set_cc_op(s->cc_op);
6966 gen_compute_eflags(cpu_cc_src);
6967 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
6968 s->cc_op = CC_OP_EFLAGS;
6969 break;
6970 case 0xf9: /* stc */
6971 if (s->cc_op != CC_OP_DYNAMIC)
6972 gen_op_set_cc_op(s->cc_op);
6973 gen_compute_eflags(cpu_cc_src);
6974 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
6975 s->cc_op = CC_OP_EFLAGS;
6976 break;
6977 case 0xfc: /* cld */
6978 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
6979 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
6980 break;
6981 case 0xfd: /* std */
6982 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
6983 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
6984 break;
6985
6986 /************************/
6987 /* bit operations */
6988 case 0x1ba: /* bt/bts/btr/btc Gv, im */
6989 ot = dflag + OT_WORD;
6990 modrm = ldub_code(s->pc++);
6991 op = (modrm >> 3) & 7;
6992 mod = (modrm >> 6) & 3;
6993 rm = (modrm & 7) | REX_B(s);
6994 if (mod != 3) {
6995 s->rip_offset = 1;
6996 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6997 gen_op_ld_T0_A0(ot + s->mem_index);
6998 } else {
6999 gen_op_mov_TN_reg(ot, 0, rm);
7000 }
7001 /* load shift */
7002 val = ldub_code(s->pc++);
7003 gen_op_movl_T1_im(val);
7004 if (op < 4)
7005 goto illegal_op;
7006 op -= 4;
7007 goto bt_op;
7008 case 0x1a3: /* bt Gv, Ev */
7009 op = 0;
7010 goto do_btx;
7011 case 0x1ab: /* bts */
7012 op = 1;
7013 goto do_btx;
7014 case 0x1b3: /* btr */
7015 op = 2;
7016 goto do_btx;
7017 case 0x1bb: /* btc */
7018 op = 3;
7019 do_btx:
7020 ot = dflag + OT_WORD;
7021 modrm = ldub_code(s->pc++);
7022 reg = ((modrm >> 3) & 7) | rex_r;
7023 mod = (modrm >> 6) & 3;
7024 rm = (modrm & 7) | REX_B(s);
7025 gen_op_mov_TN_reg(OT_LONG, 1, reg);
7026 if (mod != 3) {
7027 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7028 /* specific case: we need to add a displacement */
7029 gen_exts(ot, cpu_T[1]);
7030 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7031 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7032 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
7033 gen_op_ld_T0_A0(ot + s->mem_index);
7034 } else {
7035 gen_op_mov_TN_reg(ot, 0, rm);
7036 }
7037 bt_op:
7038 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7039 switch(op) {
7040 case 0:
7041 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7042 tcg_gen_movi_tl(cpu_cc_dst, 0);
7043 break;
7044 case 1:
7045 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7046 tcg_gen_movi_tl(cpu_tmp0, 1);
7047 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7048 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7049 break;
7050 case 2:
7051 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7052 tcg_gen_movi_tl(cpu_tmp0, 1);
7053 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7054 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7055 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7056 break;
7057 default:
7058 case 3:
7059 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7060 tcg_gen_movi_tl(cpu_tmp0, 1);
7061 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7062 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7063 break;
7064 }
7065 s->cc_op = CC_OP_SARB + ot;
7066 if (op != 0) {
7067 if (mod != 3)
7068 gen_op_st_T0_A0(ot + s->mem_index);
7069 else
7070 gen_op_mov_reg_T0(ot, rm);
7071 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7072 tcg_gen_movi_tl(cpu_cc_dst, 0);
7073 }
7074 break;
7075 case 0x1bc: /* bsf */
7076 case 0x1bd: /* bsr */
7077 {
7078 int label1;
7079 TCGv t0;
7080
7081 ot = dflag + OT_WORD;
7082 modrm = ldub_code(s->pc++);
7083 reg = ((modrm >> 3) & 7) | rex_r;
7084 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7085 gen_extu(ot, cpu_T[0]);
7086 label1 = gen_new_label();
7087 tcg_gen_movi_tl(cpu_cc_dst, 0);
7088 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7089 tcg_gen_mov_tl(t0, cpu_T[0]);
7090 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
7091 if (b & 1) {
7092 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
7093 } else {
7094 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
7095 }
7096 gen_op_mov_reg_T0(ot, reg);
7097 tcg_gen_movi_tl(cpu_cc_dst, 1);
7098 gen_set_label(label1);
7099 tcg_gen_discard_tl(cpu_cc_src);
7100 s->cc_op = CC_OP_LOGICB + ot;
7101 tcg_temp_free(t0);
7102 }
7103 break;
7104 /************************/
7105 /* bcd */
7106 case 0x27: /* daa */
7107 if (CODE64(s))
7108 goto illegal_op;
7109 if (s->cc_op != CC_OP_DYNAMIC)
7110 gen_op_set_cc_op(s->cc_op);
7111 tcg_gen_helper_0_0(helper_daa);
7112 s->cc_op = CC_OP_EFLAGS;
7113 break;
7114 case 0x2f: /* das */
7115 if (CODE64(s))
7116 goto illegal_op;
7117 if (s->cc_op != CC_OP_DYNAMIC)
7118 gen_op_set_cc_op(s->cc_op);
7119 tcg_gen_helper_0_0(helper_das);
7120 s->cc_op = CC_OP_EFLAGS;
7121 break;
7122 case 0x37: /* aaa */
7123 if (CODE64(s))
7124 goto illegal_op;
7125 if (s->cc_op != CC_OP_DYNAMIC)
7126 gen_op_set_cc_op(s->cc_op);
7127 tcg_gen_helper_0_0(helper_aaa);
7128 s->cc_op = CC_OP_EFLAGS;
7129 break;
7130 case 0x3f: /* aas */
7131 if (CODE64(s))
7132 goto illegal_op;
7133 if (s->cc_op != CC_OP_DYNAMIC)
7134 gen_op_set_cc_op(s->cc_op);
7135 tcg_gen_helper_0_0(helper_aas);
7136 s->cc_op = CC_OP_EFLAGS;
7137 break;
7138 case 0xd4: /* aam */
7139 if (CODE64(s))
7140 goto illegal_op;
7141 val = ldub_code(s->pc++);
7142 if (val == 0) {
7143 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7144 } else {
7145 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
7146 s->cc_op = CC_OP_LOGICB;
7147 }
7148 break;
7149 case 0xd5: /* aad */
7150 if (CODE64(s))
7151 goto illegal_op;
7152 val = ldub_code(s->pc++);
7153 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
7154 s->cc_op = CC_OP_LOGICB;
7155 break;
7156 /************************/
7157 /* misc */
7158 case 0x90: /* nop */
7159 /* XXX: xchg + rex handling */
7160 /* XXX: correct lock test for all insn */
7161 if (prefixes & PREFIX_LOCK)
7162 goto illegal_op;
7163 if (prefixes & PREFIX_REPZ) {
7164 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7165 }
7166 break;
7167 case 0x9b: /* fwait */
7168 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7169 (HF_MP_MASK | HF_TS_MASK)) {
7170 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7171 } else {
7172 if (s->cc_op != CC_OP_DYNAMIC)
7173 gen_op_set_cc_op(s->cc_op);
7174 gen_jmp_im(pc_start - s->cs_base);
7175 tcg_gen_helper_0_0(helper_fwait);
7176 }
7177 break;
7178 case 0xcc: /* int3 */
7179#ifdef VBOX
7180 if (s->vm86 && s->iopl != 3 && !s->vme) {
7181 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7182 } else
7183#endif
7184 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7185 break;
7186 case 0xcd: /* int N */
7187 val = ldub_code(s->pc++);
7188#ifdef VBOX
7189 if (s->vm86 && s->iopl != 3 && !s->vme) {
7190#else
7191 if (s->vm86 && s->iopl != 3) {
7192#endif
7193 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7194 } else {
7195 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7196 }
7197 break;
7198 case 0xce: /* into */
7199 if (CODE64(s))
7200 goto illegal_op;
7201 if (s->cc_op != CC_OP_DYNAMIC)
7202 gen_op_set_cc_op(s->cc_op);
7203 gen_jmp_im(pc_start - s->cs_base);
7204 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
7205 break;
7206 case 0xf1: /* icebp (undocumented, exits to external debugger) */
7207 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
7208#if 1
7209 gen_debug(s, pc_start - s->cs_base);
7210#else
7211 /* start debug */
7212 tb_flush(cpu_single_env);
7213 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
7214#endif
7215 break;
7216 case 0xfa: /* cli */
7217 if (!s->vm86) {
7218 if (s->cpl <= s->iopl) {
7219 tcg_gen_helper_0_0(helper_cli);
7220 } else {
7221 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7222 }
7223 } else {
7224 if (s->iopl == 3) {
7225 tcg_gen_helper_0_0(helper_cli);
7226#ifdef VBOX
7227 } else if (s->iopl != 3 && s->vme) {
7228 tcg_gen_helper_0_0(helper_cli_vme);
7229#endif
7230 } else {
7231 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7232 }
7233 }
7234 break;
7235 case 0xfb: /* sti */
7236 if (!s->vm86) {
7237 if (s->cpl <= s->iopl) {
7238 gen_sti:
7239 tcg_gen_helper_0_0(helper_sti);
7240 /* interruptions are enabled only the first insn after sti */
7241 /* If several instructions disable interrupts, only the
7242 _first_ does it */
7243 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
7244 tcg_gen_helper_0_0(helper_set_inhibit_irq);
7245 /* give a chance to handle pending irqs */
7246 gen_jmp_im(s->pc - s->cs_base);
7247 gen_eob(s);
7248 } else {
7249 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7250 }
7251 } else {
7252 if (s->iopl == 3) {
7253 goto gen_sti;
7254#ifdef VBOX
7255 } else if (s->iopl != 3 && s->vme) {
7256 tcg_gen_helper_0_0(helper_sti_vme);
7257 /* give a chance to handle pending irqs */
7258 gen_jmp_im(s->pc - s->cs_base);
7259 gen_eob(s);
7260#endif
7261 } else {
7262 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7263 }
7264 }
7265 break;
7266 case 0x62: /* bound */
7267 if (CODE64(s))
7268 goto illegal_op;
7269 ot = dflag ? OT_LONG : OT_WORD;
7270 modrm = ldub_code(s->pc++);
7271 reg = (modrm >> 3) & 7;
7272 mod = (modrm >> 6) & 3;
7273 if (mod == 3)
7274 goto illegal_op;
7275 gen_op_mov_TN_reg(ot, 0, reg);
7276 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7277 gen_jmp_im(pc_start - s->cs_base);
7278 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7279 if (ot == OT_WORD)
7280 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
7281 else
7282 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
7283 break;
7284 case 0x1c8 ... 0x1cf: /* bswap reg */
7285 reg = (b & 7) | REX_B(s);
7286#ifdef TARGET_X86_64
7287 if (dflag == 2) {
7288 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
7289 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
7290 gen_op_mov_reg_T0(OT_QUAD, reg);
7291 } else
7292 {
7293 TCGv tmp0;
7294 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7295
7296 tmp0 = tcg_temp_new(TCG_TYPE_I32);
7297 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
7298 tcg_gen_bswap_i32(tmp0, tmp0);
7299 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
7300 gen_op_mov_reg_T0(OT_LONG, reg);
7301 }
7302#else
7303 {
7304 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7305 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
7306 gen_op_mov_reg_T0(OT_LONG, reg);
7307 }
7308#endif
7309 break;
7310 case 0xd6: /* salc */
7311 if (CODE64(s))
7312 goto illegal_op;
7313 if (s->cc_op != CC_OP_DYNAMIC)
7314 gen_op_set_cc_op(s->cc_op);
7315 gen_compute_eflags_c(cpu_T[0]);
7316 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7317 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
7318 break;
7319 case 0xe0: /* loopnz */
7320 case 0xe1: /* loopz */
7321 case 0xe2: /* loop */
7322 case 0xe3: /* jecxz */
7323 {
7324 int l1, l2, l3;
7325
7326 tval = (int8_t)insn_get(s, OT_BYTE);
7327 next_eip = s->pc - s->cs_base;
7328 tval += next_eip;
7329 if (s->dflag == 0)
7330 tval &= 0xffff;
7331
7332 l1 = gen_new_label();
7333 l2 = gen_new_label();
7334 l3 = gen_new_label();
7335 b &= 3;
7336 switch(b) {
7337 case 0: /* loopnz */
7338 case 1: /* loopz */
7339 if (s->cc_op != CC_OP_DYNAMIC)
7340 gen_op_set_cc_op(s->cc_op);
7341 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7342 gen_op_jz_ecx(s->aflag, l3);
7343 gen_compute_eflags(cpu_tmp0);
7344 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
7345 if (b == 0) {
7346 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
7347 } else {
7348 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
7349 }
7350 break;
7351 case 2: /* loop */
7352 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7353 gen_op_jnz_ecx(s->aflag, l1);
7354 break;
7355 default:
7356 case 3: /* jcxz */
7357 gen_op_jz_ecx(s->aflag, l1);
7358 break;
7359 }
7360
7361 gen_set_label(l3);
7362 gen_jmp_im(next_eip);
7363 tcg_gen_br(l2);
7364
7365 gen_set_label(l1);
7366 gen_jmp_im(tval);
7367 gen_set_label(l2);
7368 gen_eob(s);
7369 }
7370 break;
7371 case 0x130: /* wrmsr */
7372 case 0x132: /* rdmsr */
7373 if (s->cpl != 0) {
7374 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7375 } else {
7376 if (s->cc_op != CC_OP_DYNAMIC)
7377 gen_op_set_cc_op(s->cc_op);
7378 gen_jmp_im(pc_start - s->cs_base);
7379 if (b & 2) {
7380 tcg_gen_helper_0_0(helper_rdmsr);
7381 } else {
7382 tcg_gen_helper_0_0(helper_wrmsr);
7383 }
7384 }
7385 break;
7386 case 0x131: /* rdtsc */
7387 if (s->cc_op != CC_OP_DYNAMIC)
7388 gen_op_set_cc_op(s->cc_op);
7389 gen_jmp_im(pc_start - s->cs_base);
7390 if (use_icount)
7391 gen_io_start();
7392 tcg_gen_helper_0_0(helper_rdtsc);
7393 if (use_icount) {
7394 gen_io_end();
7395 gen_jmp(s, s->pc - s->cs_base);
7396 }
7397 break;
7398 case 0x133: /* rdpmc */
7399 if (s->cc_op != CC_OP_DYNAMIC)
7400 gen_op_set_cc_op(s->cc_op);
7401 gen_jmp_im(pc_start - s->cs_base);
7402 tcg_gen_helper_0_0(helper_rdpmc);
7403 break;
7404 case 0x134: /* sysenter */
7405#ifndef VBOX
7406 /* For Intel SYSENTER is valid on 64-bit */
7407 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7408#else
7409 /** @todo: make things right */
7410 if (CODE64(s))
7411#endif
7412 goto illegal_op;
7413 if (!s->pe) {
7414 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7415 } else {
7416 if (s->cc_op != CC_OP_DYNAMIC) {
7417 gen_op_set_cc_op(s->cc_op);
7418 s->cc_op = CC_OP_DYNAMIC;
7419 }
7420 gen_jmp_im(pc_start - s->cs_base);
7421 tcg_gen_helper_0_0(helper_sysenter);
7422 gen_eob(s);
7423 }
7424 break;
7425 case 0x135: /* sysexit */
7426#ifndef VBOX
7427 /* For Intel SYSEXIT is valid on 64-bit */
7428 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7429#else
7430 /** @todo: make things right */
7431 if (CODE64(s))
7432#endif
7433 goto illegal_op;
7434 if (!s->pe) {
7435 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7436 } else {
7437 if (s->cc_op != CC_OP_DYNAMIC) {
7438 gen_op_set_cc_op(s->cc_op);
7439 s->cc_op = CC_OP_DYNAMIC;
7440 }
7441 gen_jmp_im(pc_start - s->cs_base);
7442 tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
7443 gen_eob(s);
7444 }
7445 break;
7446#ifdef TARGET_X86_64
7447 case 0x105: /* syscall */
7448 /* XXX: is it usable in real mode ? */
7449 if (s->cc_op != CC_OP_DYNAMIC) {
7450 gen_op_set_cc_op(s->cc_op);
7451 s->cc_op = CC_OP_DYNAMIC;
7452 }
7453 gen_jmp_im(pc_start - s->cs_base);
7454 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
7455 gen_eob(s);
7456 break;
7457 case 0x107: /* sysret */
7458 if (!s->pe) {
7459 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7460 } else {
7461 if (s->cc_op != CC_OP_DYNAMIC) {
7462 gen_op_set_cc_op(s->cc_op);
7463 s->cc_op = CC_OP_DYNAMIC;
7464 }
7465 gen_jmp_im(pc_start - s->cs_base);
7466 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
7467 /* condition codes are modified only in long mode */
7468 if (s->lma)
7469 s->cc_op = CC_OP_EFLAGS;
7470 gen_eob(s);
7471 }
7472 break;
7473#endif
7474 case 0x1a2: /* cpuid */
7475 if (s->cc_op != CC_OP_DYNAMIC)
7476 gen_op_set_cc_op(s->cc_op);
7477 gen_jmp_im(pc_start - s->cs_base);
7478 tcg_gen_helper_0_0(helper_cpuid);
7479 break;
7480 case 0xf4: /* hlt */
7481 if (s->cpl != 0) {
7482 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7483 } else {
7484 if (s->cc_op != CC_OP_DYNAMIC)
7485 gen_op_set_cc_op(s->cc_op);
7486 gen_jmp_im(pc_start - s->cs_base);
7487 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
7488 s->is_jmp = 3;
7489 }
7490 break;
7491 case 0x100:
7492 modrm = ldub_code(s->pc++);
7493 mod = (modrm >> 6) & 3;
7494 op = (modrm >> 3) & 7;
7495 switch(op) {
7496 case 0: /* sldt */
7497 if (!s->pe || s->vm86)
7498 goto illegal_op;
7499 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
7500 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
7501 ot = OT_WORD;
7502 if (mod == 3)
7503 ot += s->dflag;
7504 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7505 break;
7506 case 2: /* lldt */
7507 if (!s->pe || s->vm86)
7508 goto illegal_op;
7509 if (s->cpl != 0) {
7510 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7511 } else {
7512 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
7513 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7514 gen_jmp_im(pc_start - s->cs_base);
7515 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7516 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
7517 }
7518 break;
7519 case 1: /* str */
7520 if (!s->pe || s->vm86)
7521 goto illegal_op;
7522 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
7523 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
7524 ot = OT_WORD;
7525 if (mod == 3)
7526 ot += s->dflag;
7527 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7528 break;
7529 case 3: /* ltr */
7530 if (!s->pe || s->vm86)
7531 goto illegal_op;
7532 if (s->cpl != 0) {
7533 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7534 } else {
7535 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
7536 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7537 gen_jmp_im(pc_start - s->cs_base);
7538 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7539 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
7540 }
7541 break;
7542 case 4: /* verr */
7543 case 5: /* verw */
7544 if (!s->pe || s->vm86)
7545 goto illegal_op;
7546 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7547 if (s->cc_op != CC_OP_DYNAMIC)
7548 gen_op_set_cc_op(s->cc_op);
7549 if (op == 4)
7550 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
7551 else
7552 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
7553 s->cc_op = CC_OP_EFLAGS;
7554 break;
7555 default:
7556 goto illegal_op;
7557 }
7558 break;
7559 case 0x101:
7560 modrm = ldub_code(s->pc++);
7561 mod = (modrm >> 6) & 3;
7562 op = (modrm >> 3) & 7;
7563 rm = modrm & 7;
7564 switch(op) {
7565 case 0: /* sgdt */
7566 if (mod == 3)
7567 goto illegal_op;
7568 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7569 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7570 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7571 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7572 gen_add_A0_im(s, 2);
7573 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7574 if (!s->dflag)
7575 gen_op_andl_T0_im(0xffffff);
7576 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7577 break;
7578 case 1:
7579 if (mod == 3) {
7580 switch (rm) {
7581 case 0: /* monitor */
7582 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7583 s->cpl != 0)
7584 goto illegal_op;
7585 if (s->cc_op != CC_OP_DYNAMIC)
7586 gen_op_set_cc_op(s->cc_op);
7587 gen_jmp_im(pc_start - s->cs_base);
7588#ifdef TARGET_X86_64
7589 if (s->aflag == 2) {
7590 gen_op_movq_A0_reg(R_EAX);
7591 } else
7592#endif
7593 {
7594 gen_op_movl_A0_reg(R_EAX);
7595 if (s->aflag == 0)
7596 gen_op_andl_A0_ffff();
7597 }
7598 gen_add_A0_ds_seg(s);
7599 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
7600 break;
7601 case 1: /* mwait */
7602 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7603 s->cpl != 0)
7604 goto illegal_op;
7605 if (s->cc_op != CC_OP_DYNAMIC) {
7606 gen_op_set_cc_op(s->cc_op);
7607 s->cc_op = CC_OP_DYNAMIC;
7608 }
7609 gen_jmp_im(pc_start - s->cs_base);
7610 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
7611 gen_eob(s);
7612 break;
7613 default:
7614 goto illegal_op;
7615 }
7616 } else { /* sidt */
7617 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7618 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7619 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7620 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7621 gen_add_A0_im(s, 2);
7622 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7623 if (!s->dflag)
7624 gen_op_andl_T0_im(0xffffff);
7625 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7626 }
7627 break;
7628 case 2: /* lgdt */
7629 case 3: /* lidt */
7630 if (mod == 3) {
7631 if (s->cc_op != CC_OP_DYNAMIC)
7632 gen_op_set_cc_op(s->cc_op);
7633 gen_jmp_im(pc_start - s->cs_base);
7634 switch(rm) {
7635 case 0: /* VMRUN */
7636 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7637 goto illegal_op;
7638 if (s->cpl != 0) {
7639 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7640 break;
7641 } else {
7642 tcg_gen_helper_0_2(helper_vmrun,
7643 tcg_const_i32(s->aflag),
7644 tcg_const_i32(s->pc - pc_start));
7645 tcg_gen_exit_tb(0);
7646 s->is_jmp = 3;
7647 }
7648 break;
7649 case 1: /* VMMCALL */
7650 if (!(s->flags & HF_SVME_MASK))
7651 goto illegal_op;
7652 tcg_gen_helper_0_0(helper_vmmcall);
7653 break;
7654 case 2: /* VMLOAD */
7655 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7656 goto illegal_op;
7657 if (s->cpl != 0) {
7658 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7659 break;
7660 } else {
7661 tcg_gen_helper_0_1(helper_vmload,
7662 tcg_const_i32(s->aflag));
7663 }
7664 break;
7665 case 3: /* VMSAVE */
7666 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7667 goto illegal_op;
7668 if (s->cpl != 0) {
7669 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7670 break;
7671 } else {
7672 tcg_gen_helper_0_1(helper_vmsave,
7673 tcg_const_i32(s->aflag));
7674 }
7675 break;
7676 case 4: /* STGI */
7677 if ((!(s->flags & HF_SVME_MASK) &&
7678 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7679 !s->pe)
7680 goto illegal_op;
7681 if (s->cpl != 0) {
7682 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7683 break;
7684 } else {
7685 tcg_gen_helper_0_0(helper_stgi);
7686 }
7687 break;
7688 case 5: /* CLGI */
7689 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7690 goto illegal_op;
7691 if (s->cpl != 0) {
7692 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7693 break;
7694 } else {
7695 tcg_gen_helper_0_0(helper_clgi);
7696 }
7697 break;
7698 case 6: /* SKINIT */
7699 if ((!(s->flags & HF_SVME_MASK) &&
7700 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7701 !s->pe)
7702 goto illegal_op;
7703 tcg_gen_helper_0_0(helper_skinit);
7704 break;
7705 case 7: /* INVLPGA */
7706 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7707 goto illegal_op;
7708 if (s->cpl != 0) {
7709 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7710 break;
7711 } else {
7712 tcg_gen_helper_0_1(helper_invlpga,
7713 tcg_const_i32(s->aflag));
7714 }
7715 break;
7716 default:
7717 goto illegal_op;
7718 }
7719 } else if (s->cpl != 0) {
7720 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7721 } else {
7722 gen_svm_check_intercept(s, pc_start,
7723 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7724 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7725 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7726 gen_add_A0_im(s, 2);
7727 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7728 if (!s->dflag)
7729 gen_op_andl_T0_im(0xffffff);
7730 if (op == 2) {
7731 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7732 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7733 } else {
7734 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7735 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7736 }
7737 }
7738 break;
7739 case 4: /* smsw */
7740 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7741 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7742 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7743 break;
7744 case 6: /* lmsw */
7745 if (s->cpl != 0) {
7746 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7747 } else {
7748 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7749 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7750 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7751 gen_jmp_im(s->pc - s->cs_base);
7752 gen_eob(s);
7753 }
7754 break;
7755 case 7: /* invlpg */
7756 if (s->cpl != 0) {
7757 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7758 } else {
7759 if (mod == 3) {
7760#ifdef TARGET_X86_64
7761 if (CODE64(s) && rm == 0) {
7762 /* swapgs */
7763 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7764 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7765 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7766 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7767 } else
7768#endif
7769 {
7770 goto illegal_op;
7771 }
7772 } else {
7773 if (s->cc_op != CC_OP_DYNAMIC)
7774 gen_op_set_cc_op(s->cc_op);
7775 gen_jmp_im(pc_start - s->cs_base);
7776 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7777 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
7778 gen_jmp_im(s->pc - s->cs_base);
7779 gen_eob(s);
7780 }
7781 }
7782 break;
7783 default:
7784 goto illegal_op;
7785 }
7786 break;
7787 case 0x108: /* invd */
7788 case 0x109: /* wbinvd */
7789 if (s->cpl != 0) {
7790 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7791 } else {
7792 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7793 /* nothing to do */
7794 }
7795 break;
7796 case 0x63: /* arpl or movslS (x86_64) */
7797#ifdef TARGET_X86_64
7798 if (CODE64(s)) {
7799 int d_ot;
7800 /* d_ot is the size of destination */
7801 d_ot = dflag + OT_WORD;
7802
7803 modrm = ldub_code(s->pc++);
7804 reg = ((modrm >> 3) & 7) | rex_r;
7805 mod = (modrm >> 6) & 3;
7806 rm = (modrm & 7) | REX_B(s);
7807
7808 if (mod == 3) {
7809 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7810 /* sign extend */
7811 if (d_ot == OT_QUAD)
7812 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7813 gen_op_mov_reg_T0(d_ot, reg);
7814 } else {
7815 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7816 if (d_ot == OT_QUAD) {
7817 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7818 } else {
7819 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7820 }
7821 gen_op_mov_reg_T0(d_ot, reg);
7822 }
7823 } else
7824#endif
7825 {
7826 int label1;
7827 TCGv t0, t1, t2;
7828
7829 if (!s->pe || s->vm86)
7830 goto illegal_op;
7831 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7832 t1 = tcg_temp_local_new(TCG_TYPE_TL);
7833 t2 = tcg_temp_local_new(TCG_TYPE_TL);
7834 ot = OT_WORD;
7835 modrm = ldub_code(s->pc++);
7836 reg = (modrm >> 3) & 7;
7837 mod = (modrm >> 6) & 3;
7838 rm = modrm & 7;
7839#ifdef VBOX /* Fix for obvious bug - T1 needs to be loaded */
7840 /** @todo: how to do that right? */
7841 //gen_op_mov_TN_reg[ot][1][reg]();
7842#endif
7843 if (mod != 3) {
7844 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7845 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7846 } else {
7847 gen_op_mov_v_reg(ot, t0, rm);
7848 }
7849 gen_op_mov_v_reg(ot, t1, reg);
7850 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7851 tcg_gen_andi_tl(t1, t1, 3);
7852 tcg_gen_movi_tl(t2, 0);
7853 label1 = gen_new_label();
7854 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7855 tcg_gen_andi_tl(t0, t0, ~3);
7856 tcg_gen_or_tl(t0, t0, t1);
7857 tcg_gen_movi_tl(t2, CC_Z);
7858 gen_set_label(label1);
7859 if (mod != 3) {
7860 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
7861 } else {
7862 gen_op_mov_reg_v(ot, rm, t0);
7863 }
7864 if (s->cc_op != CC_OP_DYNAMIC)
7865 gen_op_set_cc_op(s->cc_op);
7866 gen_compute_eflags(cpu_cc_src);
7867 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
7868 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
7869 s->cc_op = CC_OP_EFLAGS;
7870 tcg_temp_free(t0);
7871 tcg_temp_free(t1);
7872 tcg_temp_free(t2);
7873 }
7874 break;
7875 case 0x102: /* lar */
7876 case 0x103: /* lsl */
7877 {
7878 int label1;
7879 TCGv t0;
7880 if (!s->pe || s->vm86)
7881 goto illegal_op;
7882 ot = dflag ? OT_LONG : OT_WORD;
7883 modrm = ldub_code(s->pc++);
7884 reg = ((modrm >> 3) & 7) | rex_r;
7885 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7886 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7887 if (s->cc_op != CC_OP_DYNAMIC)
7888 gen_op_set_cc_op(s->cc_op);
7889 if (b == 0x102)
7890 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
7891 else
7892 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
7893 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
7894 label1 = gen_new_label();
7895 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
7896 gen_op_mov_reg_v(ot, reg, t0);
7897 gen_set_label(label1);
7898 s->cc_op = CC_OP_EFLAGS;
7899 tcg_temp_free(t0);
7900 }
7901 break;
7902 case 0x118:
7903 modrm = ldub_code(s->pc++);
7904 mod = (modrm >> 6) & 3;
7905 op = (modrm >> 3) & 7;
7906 switch(op) {
7907 case 0: /* prefetchnta */
7908 case 1: /* prefetchnt0 */
7909 case 2: /* prefetchnt0 */
7910 case 3: /* prefetchnt0 */
7911 if (mod == 3)
7912 goto illegal_op;
7913 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7914 /* nothing more to do */
7915 break;
7916 default: /* nop (multi byte) */
7917 gen_nop_modrm(s, modrm);
7918 break;
7919 }
7920 break;
7921 case 0x119 ... 0x11f: /* nop (multi byte) */
7922 modrm = ldub_code(s->pc++);
7923 gen_nop_modrm(s, modrm);
7924 break;
7925 case 0x120: /* mov reg, crN */
7926 case 0x122: /* mov crN, reg */
7927 if (s->cpl != 0) {
7928 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7929 } else {
7930 modrm = ldub_code(s->pc++);
7931 if ((modrm & 0xc0) != 0xc0)
7932 goto illegal_op;
7933 rm = (modrm & 7) | REX_B(s);
7934 reg = ((modrm >> 3) & 7) | rex_r;
7935 if (CODE64(s))
7936 ot = OT_QUAD;
7937 else
7938 ot = OT_LONG;
7939 switch(reg) {
7940 case 0:
7941 case 2:
7942 case 3:
7943 case 4:
7944 case 8:
7945 if (s->cc_op != CC_OP_DYNAMIC)
7946 gen_op_set_cc_op(s->cc_op);
7947 gen_jmp_im(pc_start - s->cs_base);
7948 if (b & 2) {
7949 gen_op_mov_TN_reg(ot, 0, rm);
7950 tcg_gen_helper_0_2(helper_write_crN,
7951 tcg_const_i32(reg), cpu_T[0]);
7952 gen_jmp_im(s->pc - s->cs_base);
7953 gen_eob(s);
7954 } else {
7955 tcg_gen_helper_1_1(helper_read_crN,
7956 cpu_T[0], tcg_const_i32(reg));
7957 gen_op_mov_reg_T0(ot, rm);
7958 }
7959 break;
7960 default:
7961 goto illegal_op;
7962 }
7963 }
7964 break;
7965 case 0x121: /* mov reg, drN */
7966 case 0x123: /* mov drN, reg */
7967 if (s->cpl != 0) {
7968 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7969 } else {
7970 modrm = ldub_code(s->pc++);
7971 if ((modrm & 0xc0) != 0xc0)
7972 goto illegal_op;
7973 rm = (modrm & 7) | REX_B(s);
7974 reg = ((modrm >> 3) & 7) | rex_r;
7975 if (CODE64(s))
7976 ot = OT_QUAD;
7977 else
7978 ot = OT_LONG;
7979 /* XXX: do it dynamically with CR4.DE bit */
7980 if (reg == 4 || reg == 5 || reg >= 8)
7981 goto illegal_op;
7982 if (b & 2) {
7983 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
7984 gen_op_mov_TN_reg(ot, 0, rm);
7985 tcg_gen_helper_0_2(helper_movl_drN_T0,
7986 tcg_const_i32(reg), cpu_T[0]);
7987 gen_jmp_im(s->pc - s->cs_base);
7988 gen_eob(s);
7989 } else {
7990 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
7991 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
7992 gen_op_mov_reg_T0(ot, rm);
7993 }
7994 }
7995 break;
7996 case 0x106: /* clts */
7997 if (s->cpl != 0) {
7998 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7999 } else {
8000 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
8001 tcg_gen_helper_0_0(helper_clts);
8002 /* abort block because static cpu state changed */
8003 gen_jmp_im(s->pc - s->cs_base);
8004 gen_eob(s);
8005 }
8006 break;
8007 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
8008 case 0x1c3: /* MOVNTI reg, mem */
8009 if (!(s->cpuid_features & CPUID_SSE2))
8010 goto illegal_op;
8011 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
8012 modrm = ldub_code(s->pc++);
8013 mod = (modrm >> 6) & 3;
8014 if (mod == 3)
8015 goto illegal_op;
8016 reg = ((modrm >> 3) & 7) | rex_r;
8017 /* generate a generic store */
8018 gen_ldst_modrm(s, modrm, ot, reg, 1);
8019 break;
8020 case 0x1ae:
8021 modrm = ldub_code(s->pc++);
8022 mod = (modrm >> 6) & 3;
8023 op = (modrm >> 3) & 7;
8024 switch(op) {
8025 case 0: /* fxsave */
8026 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8027 (s->flags & HF_EM_MASK))
8028 goto illegal_op;
8029 if (s->flags & HF_TS_MASK) {
8030 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8031 break;
8032 }
8033 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8034 if (s->cc_op != CC_OP_DYNAMIC)
8035 gen_op_set_cc_op(s->cc_op);
8036 gen_jmp_im(pc_start - s->cs_base);
8037 tcg_gen_helper_0_2(helper_fxsave,
8038 cpu_A0, tcg_const_i32((s->dflag == 2)));
8039 break;
8040 case 1: /* fxrstor */
8041 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8042 (s->flags & HF_EM_MASK))
8043 goto illegal_op;
8044 if (s->flags & HF_TS_MASK) {
8045 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8046 break;
8047 }
8048 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8049 if (s->cc_op != CC_OP_DYNAMIC)
8050 gen_op_set_cc_op(s->cc_op);
8051 gen_jmp_im(pc_start - s->cs_base);
8052 tcg_gen_helper_0_2(helper_fxrstor,
8053 cpu_A0, tcg_const_i32((s->dflag == 2)));
8054 break;
8055 case 2: /* ldmxcsr */
8056 case 3: /* stmxcsr */
8057 if (s->flags & HF_TS_MASK) {
8058 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8059 break;
8060 }
8061 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8062 mod == 3)
8063 goto illegal_op;
8064 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8065 if (op == 2) {
8066 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
8067 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8068 } else {
8069 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8070 gen_op_st_T0_A0(OT_LONG + s->mem_index);
8071 }
8072 break;
8073 case 5: /* lfence */
8074 case 6: /* mfence */
8075 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
8076 goto illegal_op;
8077 break;
8078 case 7: /* sfence / clflush */
8079 if ((modrm & 0xc7) == 0xc0) {
8080 /* sfence */
8081 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8082 if (!(s->cpuid_features & CPUID_SSE))
8083 goto illegal_op;
8084 } else {
8085 /* clflush */
8086 if (!(s->cpuid_features & CPUID_CLFLUSH))
8087 goto illegal_op;
8088 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8089 }
8090 break;
8091 default:
8092 goto illegal_op;
8093 }
8094 break;
8095 case 0x10d: /* 3DNow! prefetch(w) */
8096 modrm = ldub_code(s->pc++);
8097 mod = (modrm >> 6) & 3;
8098 if (mod == 3)
8099 goto illegal_op;
8100 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8101 /* ignore for now */
8102 break;
8103 case 0x1aa: /* rsm */
8104 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
8105 if (!(s->flags & HF_SMM_MASK))
8106 goto illegal_op;
8107 if (s->cc_op != CC_OP_DYNAMIC) {
8108 gen_op_set_cc_op(s->cc_op);
8109 s->cc_op = CC_OP_DYNAMIC;
8110 }
8111 gen_jmp_im(s->pc - s->cs_base);
8112 tcg_gen_helper_0_0(helper_rsm);
8113 gen_eob(s);
8114 break;
8115 case 0x1b8: /* SSE4.2 popcnt */
8116 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8117 PREFIX_REPZ)
8118 goto illegal_op;
8119 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8120 goto illegal_op;
8121
8122 modrm = ldub_code(s->pc++);
8123 reg = ((modrm >> 3) & 7);
8124
8125 if (s->prefix & PREFIX_DATA)
8126 ot = OT_WORD;
8127 else if (s->dflag != 2)
8128 ot = OT_LONG;
8129 else
8130 ot = OT_QUAD;
8131
8132 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
8133 tcg_gen_helper_1_2(helper_popcnt,
8134 cpu_T[0], cpu_T[0], tcg_const_i32(ot));
8135 gen_op_mov_reg_T0(ot, reg);
8136
8137 s->cc_op = CC_OP_EFLAGS;
8138 break;
8139 case 0x10e ... 0x10f:
8140 /* 3DNow! instructions, ignore prefixes */
8141 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
8142 case 0x110 ... 0x117:
8143 case 0x128 ... 0x12f:
8144 case 0x138 ... 0x13a:
8145 case 0x150 ... 0x177:
8146 case 0x17c ... 0x17f:
8147 case 0x1c2:
8148 case 0x1c4 ... 0x1c6:
8149 case 0x1d0 ... 0x1fe:
8150 gen_sse(s, b, pc_start, rex_r);
8151 break;
8152 default:
8153 goto illegal_op;
8154 }
8155 /* lock generation */
8156 if (s->prefix & PREFIX_LOCK)
8157 tcg_gen_helper_0_0(helper_unlock);
8158 return s->pc;
8159 illegal_op:
8160 if (s->prefix & PREFIX_LOCK)
8161 tcg_gen_helper_0_0(helper_unlock);
8162 /* XXX: ensure that no lock was generated */
8163 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8164 return s->pc;
8165}
8166
8167void optimize_flags_init(void)
8168{
8169#ifndef VBOX
8170#if TCG_TARGET_REG_BITS == 32
8171 assert(sizeof(CCTable) == (1 << 3));
8172#else
8173 assert(sizeof(CCTable) == (1 << 4));
8174#endif
8175#endif
8176 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
8177 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
8178 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
8179 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
8180 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
8181 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
8182 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
8183 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
8184 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
8185
8186 /* register helpers */
8187
8188#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
8189#include "helper.h"
8190}
8191
8192/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8193 basic block 'tb'. If search_pc is TRUE, also generate PC
8194 information for each intermediate instruction. */
8195#ifndef VBOX
8196static inline void gen_intermediate_code_internal(CPUState *env,
8197#else /* VBOX */
8198DECLINLINE(void) gen_intermediate_code_internal(CPUState *env,
8199#endif /* VBOX */
8200 TranslationBlock *tb,
8201 int search_pc)
8202{
8203 DisasContext dc1, *dc = &dc1;
8204 target_ulong pc_ptr;
8205 uint16_t *gen_opc_end;
8206 int j, lj, cflags;
8207 uint64_t flags;
8208 target_ulong pc_start;
8209 target_ulong cs_base;
8210 int num_insns;
8211 int max_insns;
8212
8213 /* generate intermediate code */
8214 pc_start = tb->pc;
8215 cs_base = tb->cs_base;
8216 flags = tb->flags;
8217 cflags = tb->cflags;
8218
8219 dc->pe = (flags >> HF_PE_SHIFT) & 1;
8220 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8221 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8222 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8223 dc->f_st = 0;
8224 dc->vm86 = (flags >> VM_SHIFT) & 1;
8225 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8226 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8227 dc->tf = (flags >> TF_SHIFT) & 1;
8228 dc->singlestep_enabled = env->singlestep_enabled;
8229 dc->cc_op = CC_OP_DYNAMIC;
8230 dc->cs_base = cs_base;
8231 dc->tb = tb;
8232 dc->popl_esp_hack = 0;
8233 /* select memory access functions */
8234 dc->mem_index = 0;
8235 if (flags & HF_SOFTMMU_MASK) {
8236 if (dc->cpl == 3)
8237 dc->mem_index = 2 * 4;
8238 else
8239 dc->mem_index = 1 * 4;
8240 }
8241 dc->cpuid_features = env->cpuid_features;
8242 dc->cpuid_ext_features = env->cpuid_ext_features;
8243 dc->cpuid_ext2_features = env->cpuid_ext2_features;
8244 dc->cpuid_ext3_features = env->cpuid_ext3_features;
8245#ifdef TARGET_X86_64
8246 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8247 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8248#endif
8249 dc->flags = flags;
8250 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8251 (flags & HF_INHIBIT_IRQ_MASK)
8252#ifndef CONFIG_SOFTMMU
8253 || (flags & HF_SOFTMMU_MASK)
8254#endif
8255 );
8256#if 0
8257 /* check addseg logic */
8258 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
8259 printf("ERROR addseg\n");
8260#endif
8261
8262 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
8263 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
8264 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
8265 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
8266
8267 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
8268 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
8269 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
8270 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
8271 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
8272 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
8273 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
8274 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
8275 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
8276
8277 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
8278
8279 dc->is_jmp = DISAS_NEXT;
8280 pc_ptr = pc_start;
8281 lj = -1;
8282 num_insns = 0;
8283 max_insns = tb->cflags & CF_COUNT_MASK;
8284 if (max_insns == 0)
8285 max_insns = CF_COUNT_MASK;
8286
8287 gen_icount_start();
8288 for(;;) {
8289 if (env->nb_breakpoints > 0) {
8290 for(j = 0; j < env->nb_breakpoints; j++) {
8291 if (env->breakpoints[j] == pc_ptr) {
8292 gen_debug(dc, pc_ptr - dc->cs_base);
8293 break;
8294 }
8295 }
8296 }
8297 if (search_pc) {
8298 j = gen_opc_ptr - gen_opc_buf;
8299 if (lj < j) {
8300 lj++;
8301 while (lj < j)
8302 gen_opc_instr_start[lj++] = 0;
8303 }
8304 gen_opc_pc[lj] = pc_ptr;
8305 gen_opc_cc_op[lj] = dc->cc_op;
8306 gen_opc_instr_start[lj] = 1;
8307 gen_opc_icount[lj] = num_insns;
8308 }
8309 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8310 gen_io_start();
8311
8312 pc_ptr = disas_insn(dc, pc_ptr);
8313 num_insns++;
8314 /* stop translation if indicated */
8315 if (dc->is_jmp)
8316 break;
8317 /* if single step mode, we generate only one instruction and
8318 generate an exception */
8319 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8320 the flag and abort the translation to give the irqs a
8321 change to be happen */
8322 if (dc->tf || dc->singlestep_enabled ||
8323 (flags & HF_INHIBIT_IRQ_MASK)) {
8324 gen_jmp_im(pc_ptr - dc->cs_base);
8325 gen_eob(dc);
8326 break;
8327 }
8328 /* if too long translation, stop generation too */
8329 if (gen_opc_ptr >= gen_opc_end ||
8330 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8331 num_insns >= max_insns) {
8332 gen_jmp_im(pc_ptr - dc->cs_base);
8333 gen_eob(dc);
8334 break;
8335 }
8336 }
8337 if (tb->cflags & CF_LAST_IO)
8338 gen_io_end();
8339 gen_icount_end(tb, num_insns);
8340 *gen_opc_ptr = INDEX_op_end;
8341 /* we don't forget to fill the last values */
8342 if (search_pc) {
8343 j = gen_opc_ptr - gen_opc_buf;
8344 lj++;
8345 while (lj <= j)
8346 gen_opc_instr_start[lj++] = 0;
8347 }
8348
8349#ifdef DEBUG_DISAS
8350 if (loglevel & CPU_LOG_TB_CPU) {
8351 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
8352 }
8353 if (loglevel & CPU_LOG_TB_IN_ASM) {
8354 int disas_flags;
8355 fprintf(logfile, "----------------\n");
8356 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
8357#ifdef TARGET_X86_64
8358 if (dc->code64)
8359 disas_flags = 2;
8360 else
8361#endif
8362 disas_flags = !dc->code32;
8363 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
8364 fprintf(logfile, "\n");
8365 }
8366#endif
8367
8368 if (!search_pc) {
8369 tb->size = pc_ptr - pc_start;
8370 tb->icount = num_insns;
8371 }
8372}
8373
8374void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
8375{
8376 gen_intermediate_code_internal(env, tb, 0);
8377}
8378
8379void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
8380{
8381 gen_intermediate_code_internal(env, tb, 1);
8382}
8383
8384void gen_pc_load(CPUState *env, TranslationBlock *tb,
8385 unsigned long searched_pc, int pc_pos, void *puc)
8386{
8387 int cc_op;
8388#ifdef DEBUG_DISAS
8389 if (loglevel & CPU_LOG_TB_OP) {
8390 int i;
8391 fprintf(logfile, "RESTORE:\n");
8392 for(i = 0;i <= pc_pos; i++) {
8393 if (gen_opc_instr_start[i]) {
8394 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
8395 }
8396 }
8397 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
8398 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
8399 (uint32_t)tb->cs_base);
8400 }
8401#endif
8402 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
8403 cc_op = gen_opc_cc_op[pc_pos];
8404 if (cc_op != CC_OP_DYNAMIC)
8405 env->cc_op = cc_op;
8406}
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette