VirtualBox

source: vbox/trunk/src/recompiler/target-i386/translate.c@ 30275

Last change on this file since 30275 was 29732, checked in by vboxsync, 15 years ago

Backed out 61853; causes more invalid state exits

  • Property svn:eol-style set to native
File size: 276.5 KB
Line 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Sun LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Sun elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29#include <stdarg.h>
30#include <stdlib.h>
31#include <stdio.h>
32#include <string.h>
33#ifndef VBOX
34#include <inttypes.h>
35#include <signal.h>
36#include <assert.h>
37#endif /* !VBOX */
38
39#include "cpu.h"
40#include "exec-all.h"
41#include "disas.h"
42#include "helper.h"
43#include "tcg-op.h"
44
45#define PREFIX_REPZ 0x01
46#define PREFIX_REPNZ 0x02
47#define PREFIX_LOCK 0x04
48#define PREFIX_DATA 0x08
49#define PREFIX_ADR 0x10
50
51#ifdef TARGET_X86_64
52#define X86_64_ONLY(x) x
53#ifndef VBOX
54#define X86_64_DEF(x...) x
55#else
56#define X86_64_DEF(x...) x
57#endif
58#define CODE64(s) ((s)->code64)
59#define REX_X(s) ((s)->rex_x)
60#define REX_B(s) ((s)->rex_b)
61/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
62#if 1
63#define BUGGY_64(x) NULL
64#endif
65#else
66#define X86_64_ONLY(x) NULL
67#ifndef VBOX
68#define X86_64_DEF(x...)
69#else
70#define X86_64_DEF(x)
71#endif
72#define CODE64(s) 0
73#define REX_X(s) 0
74#define REX_B(s) 0
75#endif
76
77//#define MACRO_TEST 1
78
79/* global register indexes */
80static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
81/* local temps */
82static TCGv cpu_T[2], cpu_T3;
83/* local register indexes (only used inside old micro ops) */
84static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
85static TCGv cpu_tmp5, cpu_tmp6;
86
87#include "gen-icount.h"
88
89#ifdef TARGET_X86_64
90static int x86_64_hregs;
91#endif
92
93#ifdef VBOX
94
95/* Special/override code readers to hide patched code. */
96
97uint8_t ldub_code_raw(target_ulong pc)
98{
99 uint8_t b;
100
101 if (!remR3GetOpcode(cpu_single_env, pc, &b))
102 b = ldub_code(pc);
103 return b;
104}
105#define ldub_code(a) ldub_code_raw(a)
106
107uint16_t lduw_code_raw(target_ulong pc)
108{
109 return (ldub_code(pc+1) << 8) | ldub_code(pc);
110}
111#define lduw_code(a) lduw_code_raw(a)
112
113
114uint32_t ldl_code_raw(target_ulong pc)
115{
116 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
117}
118#define ldl_code(a) ldl_code_raw(a)
119
120#endif /* VBOX */
121
122
123typedef struct DisasContext {
124 /* current insn context */
125 int override; /* -1 if no override */
126 int prefix;
127 int aflag, dflag;
128 target_ulong pc; /* pc = eip + cs_base */
129 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
130 static state change (stop translation) */
131 /* current block context */
132 target_ulong cs_base; /* base of CS segment */
133 int pe; /* protected mode */
134 int code32; /* 32 bit code segment */
135#ifdef TARGET_X86_64
136 int lma; /* long mode active */
137 int code64; /* 64 bit code segment */
138 int rex_x, rex_b;
139#endif
140 int ss32; /* 32 bit stack segment */
141 int cc_op; /* current CC operation */
142 int addseg; /* non zero if either DS/ES/SS have a non zero base */
143 int f_st; /* currently unused */
144 int vm86; /* vm86 mode */
145#ifdef VBOX
146 int vme; /* CR4.VME */
147 int pvi; /* CR4.PVI */
148 int record_call; /* record calls for CSAM or not? */
149#endif
150 int cpl;
151 int iopl;
152 int tf; /* TF cpu flag */
153 int singlestep_enabled; /* "hardware" single step enabled */
154 int jmp_opt; /* use direct block chaining for direct jumps */
155 int mem_index; /* select memory access functions */
156 uint64_t flags; /* all execution flags */
157 struct TranslationBlock *tb;
158 int popl_esp_hack; /* for correct popl with esp base handling */
159 int rip_offset; /* only used in x86_64, but left for simplicity */
160 int cpuid_features;
161 int cpuid_ext_features;
162 int cpuid_ext2_features;
163 int cpuid_ext3_features;
164} DisasContext;
165
166static void gen_eob(DisasContext *s);
167static void gen_jmp(DisasContext *s, target_ulong eip);
168static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
169
170#ifdef VBOX
171static void gen_check_external_event();
172#endif
173
174/* i386 arith/logic operations */
175enum {
176 OP_ADDL,
177 OP_ORL,
178 OP_ADCL,
179 OP_SBBL,
180 OP_ANDL,
181 OP_SUBL,
182 OP_XORL,
183 OP_CMPL,
184};
185
186/* i386 shift ops */
187enum {
188 OP_ROL,
189 OP_ROR,
190 OP_RCL,
191 OP_RCR,
192 OP_SHL,
193 OP_SHR,
194 OP_SHL1, /* undocumented */
195 OP_SAR = 7,
196};
197
198enum {
199 JCC_O,
200 JCC_B,
201 JCC_Z,
202 JCC_BE,
203 JCC_S,
204 JCC_P,
205 JCC_L,
206 JCC_LE,
207};
208
209/* operand size */
210enum {
211 OT_BYTE = 0,
212 OT_WORD,
213 OT_LONG,
214 OT_QUAD,
215};
216
217enum {
218 /* I386 int registers */
219 OR_EAX, /* MUST be even numbered */
220 OR_ECX,
221 OR_EDX,
222 OR_EBX,
223 OR_ESP,
224 OR_EBP,
225 OR_ESI,
226 OR_EDI,
227
228 OR_TMP0 = 16, /* temporary operand register */
229 OR_TMP1,
230 OR_A0, /* temporary register used when doing address evaluation */
231};
232
233#ifndef VBOX
234static inline void gen_op_movl_T0_0(void)
235#else /* VBOX */
236DECLINLINE(void) gen_op_movl_T0_0(void)
237#endif /* VBOX */
238{
239 tcg_gen_movi_tl(cpu_T[0], 0);
240}
241
242#ifndef VBOX
243static inline void gen_op_movl_T0_im(int32_t val)
244#else /* VBOX */
245DECLINLINE(void) gen_op_movl_T0_im(int32_t val)
246#endif /* VBOX */
247{
248 tcg_gen_movi_tl(cpu_T[0], val);
249}
250
251#ifndef VBOX
252static inline void gen_op_movl_T0_imu(uint32_t val)
253#else /* VBOX */
254DECLINLINE(void) gen_op_movl_T0_imu(uint32_t val)
255#endif /* VBOX */
256{
257 tcg_gen_movi_tl(cpu_T[0], val);
258}
259
260#ifndef VBOX
261static inline void gen_op_movl_T1_im(int32_t val)
262#else /* VBOX */
263DECLINLINE(void) gen_op_movl_T1_im(int32_t val)
264#endif /* VBOX */
265{
266 tcg_gen_movi_tl(cpu_T[1], val);
267}
268
269#ifndef VBOX
270static inline void gen_op_movl_T1_imu(uint32_t val)
271#else /* VBOX */
272DECLINLINE(void) gen_op_movl_T1_imu(uint32_t val)
273#endif /* VBOX */
274{
275 tcg_gen_movi_tl(cpu_T[1], val);
276}
277
278#ifndef VBOX
279static inline void gen_op_movl_A0_im(uint32_t val)
280#else /* VBOX */
281DECLINLINE(void) gen_op_movl_A0_im(uint32_t val)
282#endif /* VBOX */
283{
284 tcg_gen_movi_tl(cpu_A0, val);
285}
286
287#ifdef TARGET_X86_64
288#ifndef VBOX
289static inline void gen_op_movq_A0_im(int64_t val)
290#else /* VBOX */
291DECLINLINE(void) gen_op_movq_A0_im(int64_t val)
292#endif /* VBOX */
293{
294 tcg_gen_movi_tl(cpu_A0, val);
295}
296#endif
297
298#ifndef VBOX
299static inline void gen_movtl_T0_im(target_ulong val)
300#else /* VBOX */
301DECLINLINE(void) gen_movtl_T0_im(target_ulong val)
302#endif /* VBOX */
303{
304 tcg_gen_movi_tl(cpu_T[0], val);
305}
306
307#ifndef VBOX
308static inline void gen_movtl_T1_im(target_ulong val)
309#else /* VBOX */
310DECLINLINE(void) gen_movtl_T1_im(target_ulong val)
311#endif /* VBOX */
312{
313 tcg_gen_movi_tl(cpu_T[1], val);
314}
315
316#ifndef VBOX
317static inline void gen_op_andl_T0_ffff(void)
318#else /* VBOX */
319DECLINLINE(void) gen_op_andl_T0_ffff(void)
320#endif /* VBOX */
321{
322 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
323}
324
325#ifndef VBOX
326static inline void gen_op_andl_T0_im(uint32_t val)
327#else /* VBOX */
328DECLINLINE(void) gen_op_andl_T0_im(uint32_t val)
329#endif /* VBOX */
330{
331 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
332}
333
334#ifndef VBOX
335static inline void gen_op_movl_T0_T1(void)
336#else /* VBOX */
337DECLINLINE(void) gen_op_movl_T0_T1(void)
338#endif /* VBOX */
339{
340 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
341}
342
343#ifndef VBOX
344static inline void gen_op_andl_A0_ffff(void)
345#else /* VBOX */
346DECLINLINE(void) gen_op_andl_A0_ffff(void)
347#endif /* VBOX */
348{
349 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
350}
351
352#ifdef TARGET_X86_64
353
354#define NB_OP_SIZES 4
355
356#else /* !TARGET_X86_64 */
357
358#define NB_OP_SIZES 3
359
360#endif /* !TARGET_X86_64 */
361
362#if defined(WORDS_BIGENDIAN)
363#define REG_B_OFFSET (sizeof(target_ulong) - 1)
364#define REG_H_OFFSET (sizeof(target_ulong) - 2)
365#define REG_W_OFFSET (sizeof(target_ulong) - 2)
366#define REG_L_OFFSET (sizeof(target_ulong) - 4)
367#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
368#else
369#define REG_B_OFFSET 0
370#define REG_H_OFFSET 1
371#define REG_W_OFFSET 0
372#define REG_L_OFFSET 0
373#define REG_LH_OFFSET 4
374#endif
375
376#ifndef VBOX
377static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
378#else /* VBOX */
379DECLINLINE(void) gen_op_mov_reg_v(int ot, int reg, TCGv t0)
380#endif /* VBOX */
381{
382 switch(ot) {
383 case OT_BYTE:
384 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
385 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
386 } else {
387 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
388 }
389 break;
390 case OT_WORD:
391 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
392 break;
393#ifdef TARGET_X86_64
394 case OT_LONG:
395 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
396 /* high part of register set to zero */
397 tcg_gen_movi_tl(cpu_tmp0, 0);
398 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
399 break;
400 default:
401 case OT_QUAD:
402 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
403 break;
404#else
405 default:
406 case OT_LONG:
407 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
408 break;
409#endif
410 }
411}
412
413#ifndef VBOX
414static inline void gen_op_mov_reg_T0(int ot, int reg)
415#else /* VBOX */
416DECLINLINE(void) gen_op_mov_reg_T0(int ot, int reg)
417#endif /* VBOX */
418{
419 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
420}
421
422#ifndef VBOX
423static inline void gen_op_mov_reg_T1(int ot, int reg)
424#else /* VBOX */
425DECLINLINE(void) gen_op_mov_reg_T1(int ot, int reg)
426#endif /* VBOX */
427{
428 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
429}
430
431#ifndef VBOX
432static inline void gen_op_mov_reg_A0(int size, int reg)
433#else /* VBOX */
434DECLINLINE(void) gen_op_mov_reg_A0(int size, int reg)
435#endif /* VBOX */
436{
437 switch(size) {
438 case 0:
439 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
440 break;
441#ifdef TARGET_X86_64
442 case 1:
443 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
444 /* high part of register set to zero */
445 tcg_gen_movi_tl(cpu_tmp0, 0);
446 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
447 break;
448 default:
449 case 2:
450 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
451 break;
452#else
453 default:
454 case 1:
455 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
456 break;
457#endif
458 }
459}
460
461#ifndef VBOX
462static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
463#else /* VBOX */
464DECLINLINE(void) gen_op_mov_v_reg(int ot, TCGv t0, int reg)
465#endif /* VBOX */
466{
467 switch(ot) {
468 case OT_BYTE:
469 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
470#ifndef VBOX
471 goto std_case;
472#else
473 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
474#endif
475 } else {
476 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
477 }
478 break;
479 default:
480#ifndef VBOX
481 std_case:
482#endif
483 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
484 break;
485 }
486}
487
488#ifndef VBOX
489static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
490#else /* VBOX */
491DECLINLINE(void) gen_op_mov_TN_reg(int ot, int t_index, int reg)
492#endif /* VBOX */
493{
494 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
495}
496
497#ifndef VBOX
498static inline void gen_op_movl_A0_reg(int reg)
499#else /* VBOX */
500DECLINLINE(void) gen_op_movl_A0_reg(int reg)
501#endif /* VBOX */
502{
503 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
504}
505
506#ifndef VBOX
507static inline void gen_op_addl_A0_im(int32_t val)
508#else /* VBOX */
509DECLINLINE(void) gen_op_addl_A0_im(int32_t val)
510#endif /* VBOX */
511{
512 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
513#ifdef TARGET_X86_64
514 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
515#endif
516}
517
518#ifdef TARGET_X86_64
519#ifndef VBOX
520static inline void gen_op_addq_A0_im(int64_t val)
521#else /* VBOX */
522DECLINLINE(void) gen_op_addq_A0_im(int64_t val)
523#endif /* VBOX */
524{
525 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
526}
527#endif
528
529static void gen_add_A0_im(DisasContext *s, int val)
530{
531#ifdef TARGET_X86_64
532 if (CODE64(s))
533 gen_op_addq_A0_im(val);
534 else
535#endif
536 gen_op_addl_A0_im(val);
537}
538
539#ifndef VBOX
540static inline void gen_op_addl_T0_T1(void)
541#else /* VBOX */
542DECLINLINE(void) gen_op_addl_T0_T1(void)
543#endif /* VBOX */
544{
545 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
546}
547
548#ifndef VBOX
549static inline void gen_op_jmp_T0(void)
550#else /* VBOX */
551DECLINLINE(void) gen_op_jmp_T0(void)
552#endif /* VBOX */
553{
554 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
555}
556
557#ifndef VBOX
558static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
559#else /* VBOX */
560DECLINLINE(void) gen_op_add_reg_im(int size, int reg, int32_t val)
561#endif /* VBOX */
562{
563 switch(size) {
564 case 0:
565 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
566 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
567 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
568 break;
569 case 1:
570 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
571 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
572#ifdef TARGET_X86_64
573 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
574#endif
575 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
576 break;
577#ifdef TARGET_X86_64
578 case 2:
579 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
580 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
581 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
582 break;
583#endif
584 }
585}
586
587#ifndef VBOX
588static inline void gen_op_add_reg_T0(int size, int reg)
589#else /* VBOX */
590DECLINLINE(void) gen_op_add_reg_T0(int size, int reg)
591#endif /* VBOX */
592{
593 switch(size) {
594 case 0:
595 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
596 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
597 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
598 break;
599 case 1:
600 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
601 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
602#ifdef TARGET_X86_64
603 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
604#endif
605 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
606 break;
607#ifdef TARGET_X86_64
608 case 2:
609 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
610 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
611 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
612 break;
613#endif
614 }
615}
616
617#ifndef VBOX
618static inline void gen_op_set_cc_op(int32_t val)
619#else /* VBOX */
620DECLINLINE(void) gen_op_set_cc_op(int32_t val)
621#endif /* VBOX */
622{
623 tcg_gen_movi_i32(cpu_cc_op, val);
624}
625
626#ifndef VBOX
627static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
628#else /* VBOX */
629DECLINLINE(void) gen_op_addl_A0_reg_sN(int shift, int reg)
630#endif /* VBOX */
631{
632 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
633 if (shift != 0)
634 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
635 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
636#ifdef TARGET_X86_64
637 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
638#endif
639}
640#ifdef VBOX
641DECLINLINE(void) gen_op_seg_check(int reg, bool keepA0)
642{
643 /* It seems segments doesn't get out of sync - if they do in fact - enable below code. */
644#ifdef FORCE_SEGMENT_SYNC
645#if 1
646 TCGv t0;
647
648 /* Considering poor quality of TCG optimizer - better call directly */
649 t0 = tcg_temp_local_new(TCG_TYPE_TL);
650 tcg_gen_movi_tl(t0, reg);
651 tcg_gen_helper_0_1(helper_sync_seg, t0);
652 tcg_temp_free(t0);
653#else
654 /* Our segments could be outdated, thus check for newselector field to see if update really needed */
655 int skip_label;
656 TCGv t0, a0;
657
658 /* For other segments this check is waste of time, and also TCG is unable to cope with this code,
659 for data/stack segments, as expects alive cpu_T[0] */
660 if (reg != R_GS)
661 return;
662
663 if (keepA0)
664 {
665 /* we need to store old cpu_A0 */
666 a0 = tcg_temp_local_new(TCG_TYPE_TL);
667 tcg_gen_mov_tl(a0, cpu_A0);
668 }
669
670 skip_label = gen_new_label();
671 t0 = tcg_temp_local_new(TCG_TYPE_TL);
672
673 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, segs[reg].newselector) + REG_L_OFFSET);
674 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
675 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, eflags) + REG_L_OFFSET);
676 tcg_gen_andi_tl(t0, t0, VM_MASK);
677 tcg_gen_brcondi_i32(TCG_COND_NE, t0, 0, skip_label);
678 tcg_gen_movi_tl(t0, reg);
679
680 tcg_gen_helper_0_1(helper_sync_seg, t0);
681
682 tcg_temp_free(t0);
683
684 gen_set_label(skip_label);
685 if (keepA0)
686 {
687 tcg_gen_mov_tl(cpu_A0, a0);
688 tcg_temp_free(a0);
689 }
690#endif /* 0 */
691#endif /* FORCE_SEGMENT_SYNC */
692}
693#endif
694
695#ifndef VBOX
696static inline void gen_op_movl_A0_seg(int reg)
697#else /* VBOX */
698DECLINLINE(void) gen_op_movl_A0_seg(int reg)
699#endif /* VBOX */
700{
701#ifdef VBOX
702 gen_op_seg_check(reg, false);
703#endif
704 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
705}
706
707#ifndef VBOX
708static inline void gen_op_addl_A0_seg(int reg)
709#else /* VBOX */
710DECLINLINE(void) gen_op_addl_A0_seg(int reg)
711#endif /* VBOX */
712{
713#ifdef VBOX
714 gen_op_seg_check(reg, true);
715#endif
716 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
717 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
718#ifdef TARGET_X86_64
719 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
720#endif
721}
722
723#ifdef TARGET_X86_64
724#ifndef VBOX
725static inline void gen_op_movq_A0_seg(int reg)
726#else /* VBOX */
727DECLINLINE(void) gen_op_movq_A0_seg(int reg)
728#endif /* VBOX */
729{
730#ifdef VBOX
731 gen_op_seg_check(reg, false);
732#endif
733 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
734}
735
736#ifndef VBOX
737static inline void gen_op_addq_A0_seg(int reg)
738#else /* VBOX */
739DECLINLINE(void) gen_op_addq_A0_seg(int reg)
740#endif /* VBOX */
741{
742#ifdef VBOX
743 gen_op_seg_check(reg, true);
744#endif
745 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
746 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
747}
748
749#ifndef VBOX
750static inline void gen_op_movq_A0_reg(int reg)
751#else /* VBOX */
752DECLINLINE(void) gen_op_movq_A0_reg(int reg)
753#endif /* VBOX */
754{
755 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
756}
757
758#ifndef VBOX
759static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
760#else /* VBOX */
761DECLINLINE(void) gen_op_addq_A0_reg_sN(int shift, int reg)
762#endif /* VBOX */
763{
764 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
765 if (shift != 0)
766 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
767 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
768}
769#endif
770
771#ifndef VBOX
772static inline void gen_op_lds_T0_A0(int idx)
773#else /* VBOX */
774DECLINLINE(void) gen_op_lds_T0_A0(int idx)
775#endif /* VBOX */
776{
777 int mem_index = (idx >> 2) - 1;
778 switch(idx & 3) {
779 case 0:
780 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
781 break;
782 case 1:
783 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
784 break;
785 default:
786 case 2:
787 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
788 break;
789 }
790}
791
792#ifndef VBOX
793static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
794#else /* VBOX */
795DECLINLINE(void) gen_op_ld_v(int idx, TCGv t0, TCGv a0)
796#endif /* VBOX */
797{
798 int mem_index = (idx >> 2) - 1;
799 switch(idx & 3) {
800 case 0:
801 tcg_gen_qemu_ld8u(t0, a0, mem_index);
802 break;
803 case 1:
804 tcg_gen_qemu_ld16u(t0, a0, mem_index);
805 break;
806 case 2:
807 tcg_gen_qemu_ld32u(t0, a0, mem_index);
808 break;
809 default:
810 case 3:
811 tcg_gen_qemu_ld64(t0, a0, mem_index);
812 break;
813 }
814}
815
816/* XXX: always use ldu or lds */
817#ifndef VBOX
818static inline void gen_op_ld_T0_A0(int idx)
819#else /* VBOX */
820DECLINLINE(void) gen_op_ld_T0_A0(int idx)
821#endif /* VBOX */
822{
823 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
824}
825
826#ifndef VBOX
827static inline void gen_op_ldu_T0_A0(int idx)
828#else /* VBOX */
829DECLINLINE(void) gen_op_ldu_T0_A0(int idx)
830#endif /* VBOX */
831{
832 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
833}
834
835#ifndef VBOX
836static inline void gen_op_ld_T1_A0(int idx)
837#else /* VBOX */
838DECLINLINE(void) gen_op_ld_T1_A0(int idx)
839#endif /* VBOX */
840{
841 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
842}
843
844#ifndef VBOX
845static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
846#else /* VBOX */
847DECLINLINE(void) gen_op_st_v(int idx, TCGv t0, TCGv a0)
848#endif /* VBOX */
849{
850 int mem_index = (idx >> 2) - 1;
851 switch(idx & 3) {
852 case 0:
853 tcg_gen_qemu_st8(t0, a0, mem_index);
854 break;
855 case 1:
856 tcg_gen_qemu_st16(t0, a0, mem_index);
857 break;
858 case 2:
859 tcg_gen_qemu_st32(t0, a0, mem_index);
860 break;
861 default:
862 case 3:
863 tcg_gen_qemu_st64(t0, a0, mem_index);
864 break;
865 }
866}
867
868#ifndef VBOX
869static inline void gen_op_st_T0_A0(int idx)
870#else /* VBOX */
871DECLINLINE(void) gen_op_st_T0_A0(int idx)
872#endif /* VBOX */
873{
874 gen_op_st_v(idx, cpu_T[0], cpu_A0);
875}
876
877#ifndef VBOX
878static inline void gen_op_st_T1_A0(int idx)
879#else /* VBOX */
880DECLINLINE(void) gen_op_st_T1_A0(int idx)
881#endif /* VBOX */
882{
883 gen_op_st_v(idx, cpu_T[1], cpu_A0);
884}
885
886#ifdef VBOX
887static void gen_check_external_event()
888{
889#if 1
890 /** @todo: once TCG codegen improves, we may want to use version
891 from else version */
892 tcg_gen_helper_0_0(helper_check_external_event);
893#else
894 int skip_label;
895 TCGv t0;
896
897 skip_label = gen_new_label();
898 t0 = tcg_temp_local_new(TCG_TYPE_TL);
899 /* t0 = cpu_tmp0; */
900
901 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, interrupt_request));
902 /* Keep in sync with helper_check_external_event() */
903 tcg_gen_andi_tl(t0, t0,
904 CPU_INTERRUPT_EXTERNAL_EXIT
905 | CPU_INTERRUPT_EXTERNAL_TIMER
906 | CPU_INTERRUPT_EXTERNAL_DMA
907 | CPU_INTERRUPT_EXTERNAL_HARD);
908 /** @todo: predict branch as taken */
909 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
910 tcg_temp_free(t0);
911
912 tcg_gen_helper_0_0(helper_check_external_event);
913
914 gen_set_label(skip_label);
915#endif
916}
917
918#if 0 /* unused code? */
919static void gen_check_external_event2()
920{
921 tcg_gen_helper_0_0(helper_check_external_event);
922}
923#endif
924
925#endif
926
927#ifndef VBOX
928static inline void gen_jmp_im(target_ulong pc)
929#else /* VBOX */
930DECLINLINE(void) gen_jmp_im(target_ulong pc)
931#endif /* VBOX */
932{
933 tcg_gen_movi_tl(cpu_tmp0, pc);
934 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
935}
936
937#ifdef VBOX
938DECLINLINE(void) gen_update_eip(target_ulong pc)
939{
940 gen_jmp_im(pc);
941#ifdef VBOX_DUMP_STATE
942 tcg_gen_helper_0_0(helper_dump_state);
943#endif
944}
945
946#endif
947
948#ifndef VBOX
949static inline void gen_string_movl_A0_ESI(DisasContext *s)
950#else /* VBOX */
951DECLINLINE(void) gen_string_movl_A0_ESI(DisasContext *s)
952#endif /* VBOX */
953{
954 int override;
955
956 override = s->override;
957#ifdef TARGET_X86_64
958 if (s->aflag == 2) {
959 if (override >= 0) {
960 gen_op_movq_A0_seg(override);
961 gen_op_addq_A0_reg_sN(0, R_ESI);
962 } else {
963 gen_op_movq_A0_reg(R_ESI);
964 }
965 } else
966#endif
967 if (s->aflag) {
968 /* 32 bit address */
969 if (s->addseg && override < 0)
970 override = R_DS;
971 if (override >= 0) {
972 gen_op_movl_A0_seg(override);
973 gen_op_addl_A0_reg_sN(0, R_ESI);
974 } else {
975 gen_op_movl_A0_reg(R_ESI);
976 }
977 } else {
978 /* 16 address, always override */
979 if (override < 0)
980 override = R_DS;
981 gen_op_movl_A0_reg(R_ESI);
982 gen_op_andl_A0_ffff();
983 gen_op_addl_A0_seg(override);
984 }
985}
986
987#ifndef VBOX
988static inline void gen_string_movl_A0_EDI(DisasContext *s)
989#else /* VBOX */
990DECLINLINE(void) gen_string_movl_A0_EDI(DisasContext *s)
991#endif /* VBOX */
992{
993#ifdef TARGET_X86_64
994 if (s->aflag == 2) {
995 gen_op_movq_A0_reg(R_EDI);
996 } else
997#endif
998 if (s->aflag) {
999 if (s->addseg) {
1000 gen_op_movl_A0_seg(R_ES);
1001 gen_op_addl_A0_reg_sN(0, R_EDI);
1002 } else {
1003 gen_op_movl_A0_reg(R_EDI);
1004 }
1005 } else {
1006 gen_op_movl_A0_reg(R_EDI);
1007 gen_op_andl_A0_ffff();
1008 gen_op_addl_A0_seg(R_ES);
1009 }
1010}
1011
1012#ifndef VBOX
1013static inline void gen_op_movl_T0_Dshift(int ot)
1014#else /* VBOX */
1015DECLINLINE(void) gen_op_movl_T0_Dshift(int ot)
1016#endif /* VBOX */
1017{
1018 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
1019 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
1020};
1021
1022static void gen_extu(int ot, TCGv reg)
1023{
1024 switch(ot) {
1025 case OT_BYTE:
1026 tcg_gen_ext8u_tl(reg, reg);
1027 break;
1028 case OT_WORD:
1029 tcg_gen_ext16u_tl(reg, reg);
1030 break;
1031 case OT_LONG:
1032 tcg_gen_ext32u_tl(reg, reg);
1033 break;
1034 default:
1035 break;
1036 }
1037}
1038
1039static void gen_exts(int ot, TCGv reg)
1040{
1041 switch(ot) {
1042 case OT_BYTE:
1043 tcg_gen_ext8s_tl(reg, reg);
1044 break;
1045 case OT_WORD:
1046 tcg_gen_ext16s_tl(reg, reg);
1047 break;
1048 case OT_LONG:
1049 tcg_gen_ext32s_tl(reg, reg);
1050 break;
1051 default:
1052 break;
1053 }
1054}
1055
1056#ifndef VBOX
1057static inline void gen_op_jnz_ecx(int size, int label1)
1058#else /* VBOX */
1059DECLINLINE(void) gen_op_jnz_ecx(int size, int label1)
1060#endif /* VBOX */
1061{
1062 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1063 gen_extu(size + 1, cpu_tmp0);
1064 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
1065}
1066
1067#ifndef VBOX
1068static inline void gen_op_jz_ecx(int size, int label1)
1069#else /* VBOX */
1070DECLINLINE(void) gen_op_jz_ecx(int size, int label1)
1071#endif /* VBOX */
1072{
1073 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1074 gen_extu(size + 1, cpu_tmp0);
1075 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
1076}
1077
1078static void *helper_in_func[3] = {
1079 helper_inb,
1080 helper_inw,
1081 helper_inl,
1082};
1083
1084static void *helper_out_func[3] = {
1085 helper_outb,
1086 helper_outw,
1087 helper_outl,
1088};
1089
1090static void *gen_check_io_func[3] = {
1091 helper_check_iob,
1092 helper_check_iow,
1093 helper_check_iol,
1094};
1095
1096static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
1097 uint32_t svm_flags)
1098{
1099 int state_saved;
1100 target_ulong next_eip;
1101
1102 state_saved = 0;
1103 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1104 if (s->cc_op != CC_OP_DYNAMIC)
1105 gen_op_set_cc_op(s->cc_op);
1106 gen_jmp_im(cur_eip);
1107 state_saved = 1;
1108 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1109 tcg_gen_helper_0_1(gen_check_io_func[ot],
1110 cpu_tmp2_i32);
1111 }
1112 if(s->flags & HF_SVMI_MASK) {
1113 if (!state_saved) {
1114 if (s->cc_op != CC_OP_DYNAMIC)
1115 gen_op_set_cc_op(s->cc_op);
1116 gen_jmp_im(cur_eip);
1117 state_saved = 1;
1118 }
1119 svm_flags |= (1 << (4 + ot));
1120 next_eip = s->pc - s->cs_base;
1121 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1122 tcg_gen_helper_0_3(helper_svm_check_io,
1123 cpu_tmp2_i32,
1124 tcg_const_i32(svm_flags),
1125 tcg_const_i32(next_eip - cur_eip));
1126 }
1127}
1128
1129#ifndef VBOX
1130static inline void gen_movs(DisasContext *s, int ot)
1131#else /* VBOX */
1132DECLINLINE(void) gen_movs(DisasContext *s, int ot)
1133#endif /* VBOX */
1134{
1135 gen_string_movl_A0_ESI(s);
1136 gen_op_ld_T0_A0(ot + s->mem_index);
1137 gen_string_movl_A0_EDI(s);
1138 gen_op_st_T0_A0(ot + s->mem_index);
1139 gen_op_movl_T0_Dshift(ot);
1140 gen_op_add_reg_T0(s->aflag, R_ESI);
1141 gen_op_add_reg_T0(s->aflag, R_EDI);
1142}
1143
1144#ifndef VBOX
1145static inline void gen_update_cc_op(DisasContext *s)
1146#else /* VBOX */
1147DECLINLINE(void) gen_update_cc_op(DisasContext *s)
1148#endif /* VBOX */
1149{
1150 if (s->cc_op != CC_OP_DYNAMIC) {
1151 gen_op_set_cc_op(s->cc_op);
1152 s->cc_op = CC_OP_DYNAMIC;
1153 }
1154}
1155
1156static void gen_op_update1_cc(void)
1157{
1158 tcg_gen_discard_tl(cpu_cc_src);
1159 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1160}
1161
1162static void gen_op_update2_cc(void)
1163{
1164 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1165 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1166}
1167
1168#ifndef VBOX
1169static inline void gen_op_cmpl_T0_T1_cc(void)
1170#else /* VBOX */
1171DECLINLINE(void) gen_op_cmpl_T0_T1_cc(void)
1172#endif /* VBOX */
1173{
1174 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1175 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1176}
1177
1178#ifndef VBOX
1179static inline void gen_op_testl_T0_T1_cc(void)
1180#else /* VBOX */
1181DECLINLINE(void) gen_op_testl_T0_T1_cc(void)
1182#endif /* VBOX */
1183{
1184 tcg_gen_discard_tl(cpu_cc_src);
1185 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1186}
1187
1188static void gen_op_update_neg_cc(void)
1189{
1190 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
1191 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1192}
1193
1194/* compute eflags.C to reg */
1195static void gen_compute_eflags_c(TCGv reg)
1196{
1197#if TCG_TARGET_REG_BITS == 32
1198 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1199 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1200 (long)cc_table + offsetof(CCTable, compute_c));
1201 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1202 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1203 1, &cpu_tmp2_i32, 0, NULL);
1204#else
1205 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1206 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1207 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1208 (long)cc_table + offsetof(CCTable, compute_c));
1209 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1210 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1211 1, &cpu_tmp2_i32, 0, NULL);
1212#endif
1213 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1214}
1215
1216/* compute all eflags to cc_src */
1217static void gen_compute_eflags(TCGv reg)
1218{
1219#if TCG_TARGET_REG_BITS == 32
1220 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1221 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1222 (long)cc_table + offsetof(CCTable, compute_all));
1223 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1224 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1225 1, &cpu_tmp2_i32, 0, NULL);
1226#else
1227 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1228 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1229 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1230 (long)cc_table + offsetof(CCTable, compute_all));
1231 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1232 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1233 1, &cpu_tmp2_i32, 0, NULL);
1234#endif
1235 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1236}
1237
1238#ifndef VBOX
1239static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1240#else /* VBOX */
1241DECLINLINE(void) gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1242#endif /* VBOX */
1243{
1244 if (s->cc_op != CC_OP_DYNAMIC)
1245 gen_op_set_cc_op(s->cc_op);
1246 switch(jcc_op) {
1247 case JCC_O:
1248 gen_compute_eflags(cpu_T[0]);
1249 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
1250 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1251 break;
1252 case JCC_B:
1253 gen_compute_eflags_c(cpu_T[0]);
1254 break;
1255 case JCC_Z:
1256 gen_compute_eflags(cpu_T[0]);
1257 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
1258 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1259 break;
1260 case JCC_BE:
1261 gen_compute_eflags(cpu_tmp0);
1262 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
1263 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1264 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1265 break;
1266 case JCC_S:
1267 gen_compute_eflags(cpu_T[0]);
1268 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
1269 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1270 break;
1271 case JCC_P:
1272 gen_compute_eflags(cpu_T[0]);
1273 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
1274 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1275 break;
1276 case JCC_L:
1277 gen_compute_eflags(cpu_tmp0);
1278 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1279 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
1280 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1281 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1282 break;
1283 default:
1284 case JCC_LE:
1285 gen_compute_eflags(cpu_tmp0);
1286 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1287 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
1288 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
1289 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1290 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1291 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1292 break;
1293 }
1294}
1295
1296/* return true if setcc_slow is not needed (WARNING: must be kept in
1297 sync with gen_jcc1) */
1298static int is_fast_jcc_case(DisasContext *s, int b)
1299{
1300 int jcc_op;
1301 jcc_op = (b >> 1) & 7;
1302 switch(s->cc_op) {
1303 /* we optimize the cmp/jcc case */
1304 case CC_OP_SUBB:
1305 case CC_OP_SUBW:
1306 case CC_OP_SUBL:
1307 case CC_OP_SUBQ:
1308 if (jcc_op == JCC_O || jcc_op == JCC_P)
1309 goto slow_jcc;
1310 break;
1311
1312 /* some jumps are easy to compute */
1313 case CC_OP_ADDB:
1314 case CC_OP_ADDW:
1315 case CC_OP_ADDL:
1316 case CC_OP_ADDQ:
1317
1318 case CC_OP_LOGICB:
1319 case CC_OP_LOGICW:
1320 case CC_OP_LOGICL:
1321 case CC_OP_LOGICQ:
1322
1323 case CC_OP_INCB:
1324 case CC_OP_INCW:
1325 case CC_OP_INCL:
1326 case CC_OP_INCQ:
1327
1328 case CC_OP_DECB:
1329 case CC_OP_DECW:
1330 case CC_OP_DECL:
1331 case CC_OP_DECQ:
1332
1333 case CC_OP_SHLB:
1334 case CC_OP_SHLW:
1335 case CC_OP_SHLL:
1336 case CC_OP_SHLQ:
1337 if (jcc_op != JCC_Z && jcc_op != JCC_S)
1338 goto slow_jcc;
1339 break;
1340 default:
1341 slow_jcc:
1342 return 0;
1343 }
1344 return 1;
1345}
1346
1347/* generate a conditional jump to label 'l1' according to jump opcode
1348 value 'b'. In the fast case, T0 is guaranted not to be used. */
1349#ifndef VBOX
1350static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1351#else /* VBOX */
1352DECLINLINE(void) gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1353#endif /* VBOX */
1354{
1355 int inv, jcc_op, size, cond;
1356 TCGv t0;
1357
1358 inv = b & 1;
1359 jcc_op = (b >> 1) & 7;
1360
1361 switch(cc_op) {
1362 /* we optimize the cmp/jcc case */
1363 case CC_OP_SUBB:
1364 case CC_OP_SUBW:
1365 case CC_OP_SUBL:
1366 case CC_OP_SUBQ:
1367
1368 size = cc_op - CC_OP_SUBB;
1369 switch(jcc_op) {
1370 case JCC_Z:
1371 fast_jcc_z:
1372 switch(size) {
1373 case 0:
1374 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
1375 t0 = cpu_tmp0;
1376 break;
1377 case 1:
1378 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
1379 t0 = cpu_tmp0;
1380 break;
1381#ifdef TARGET_X86_64
1382 case 2:
1383 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1384 t0 = cpu_tmp0;
1385 break;
1386#endif
1387 default:
1388 t0 = cpu_cc_dst;
1389 break;
1390 }
1391 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
1392 break;
1393 case JCC_S:
1394 fast_jcc_s:
1395 switch(size) {
1396 case 0:
1397 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1398 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1399 0, l1);
1400 break;
1401 case 1:
1402 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1403 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1404 0, l1);
1405 break;
1406#ifdef TARGET_X86_64
1407 case 2:
1408 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1409 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1410 0, l1);
1411 break;
1412#endif
1413 default:
1414 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1415 0, l1);
1416 break;
1417 }
1418 break;
1419
1420 case JCC_B:
1421 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1422 goto fast_jcc_b;
1423 case JCC_BE:
1424 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1425 fast_jcc_b:
1426 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1427 switch(size) {
1428 case 0:
1429 t0 = cpu_tmp0;
1430 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1431 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1432 break;
1433 case 1:
1434 t0 = cpu_tmp0;
1435 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1436 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1437 break;
1438#ifdef TARGET_X86_64
1439 case 2:
1440 t0 = cpu_tmp0;
1441 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1442 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1443 break;
1444#endif
1445 default:
1446 t0 = cpu_cc_src;
1447 break;
1448 }
1449 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1450 break;
1451
1452 case JCC_L:
1453 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1454 goto fast_jcc_l;
1455 case JCC_LE:
1456 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1457 fast_jcc_l:
1458 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1459 switch(size) {
1460 case 0:
1461 t0 = cpu_tmp0;
1462 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1463 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1464 break;
1465 case 1:
1466 t0 = cpu_tmp0;
1467 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1468 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1469 break;
1470#ifdef TARGET_X86_64
1471 case 2:
1472 t0 = cpu_tmp0;
1473 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1474 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1475 break;
1476#endif
1477 default:
1478 t0 = cpu_cc_src;
1479 break;
1480 }
1481 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1482 break;
1483
1484 default:
1485 goto slow_jcc;
1486 }
1487 break;
1488
1489 /* some jumps are easy to compute */
1490 case CC_OP_ADDB:
1491 case CC_OP_ADDW:
1492 case CC_OP_ADDL:
1493 case CC_OP_ADDQ:
1494
1495 case CC_OP_ADCB:
1496 case CC_OP_ADCW:
1497 case CC_OP_ADCL:
1498 case CC_OP_ADCQ:
1499
1500 case CC_OP_SBBB:
1501 case CC_OP_SBBW:
1502 case CC_OP_SBBL:
1503 case CC_OP_SBBQ:
1504
1505 case CC_OP_LOGICB:
1506 case CC_OP_LOGICW:
1507 case CC_OP_LOGICL:
1508 case CC_OP_LOGICQ:
1509
1510 case CC_OP_INCB:
1511 case CC_OP_INCW:
1512 case CC_OP_INCL:
1513 case CC_OP_INCQ:
1514
1515 case CC_OP_DECB:
1516 case CC_OP_DECW:
1517 case CC_OP_DECL:
1518 case CC_OP_DECQ:
1519
1520 case CC_OP_SHLB:
1521 case CC_OP_SHLW:
1522 case CC_OP_SHLL:
1523 case CC_OP_SHLQ:
1524
1525 case CC_OP_SARB:
1526 case CC_OP_SARW:
1527 case CC_OP_SARL:
1528 case CC_OP_SARQ:
1529 switch(jcc_op) {
1530 case JCC_Z:
1531 size = (cc_op - CC_OP_ADDB) & 3;
1532 goto fast_jcc_z;
1533 case JCC_S:
1534 size = (cc_op - CC_OP_ADDB) & 3;
1535 goto fast_jcc_s;
1536 default:
1537 goto slow_jcc;
1538 }
1539 break;
1540 default:
1541 slow_jcc:
1542 gen_setcc_slow_T0(s, jcc_op);
1543 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1544 cpu_T[0], 0, l1);
1545 break;
1546 }
1547}
1548
1549/* XXX: does not work with gdbstub "ice" single step - not a
1550 serious problem */
1551static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1552{
1553 int l1, l2;
1554
1555 l1 = gen_new_label();
1556 l2 = gen_new_label();
1557 gen_op_jnz_ecx(s->aflag, l1);
1558 gen_set_label(l2);
1559 gen_jmp_tb(s, next_eip, 1);
1560 gen_set_label(l1);
1561 return l2;
1562}
1563
1564#ifndef VBOX
1565static inline void gen_stos(DisasContext *s, int ot)
1566#else /* VBOX */
1567DECLINLINE(void) gen_stos(DisasContext *s, int ot)
1568#endif /* VBOX */
1569{
1570 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1571 gen_string_movl_A0_EDI(s);
1572 gen_op_st_T0_A0(ot + s->mem_index);
1573 gen_op_movl_T0_Dshift(ot);
1574 gen_op_add_reg_T0(s->aflag, R_EDI);
1575}
1576
1577#ifndef VBOX
1578static inline void gen_lods(DisasContext *s, int ot)
1579#else /* VBOX */
1580DECLINLINE(void) gen_lods(DisasContext *s, int ot)
1581#endif /* VBOX */
1582{
1583 gen_string_movl_A0_ESI(s);
1584 gen_op_ld_T0_A0(ot + s->mem_index);
1585 gen_op_mov_reg_T0(ot, R_EAX);
1586 gen_op_movl_T0_Dshift(ot);
1587 gen_op_add_reg_T0(s->aflag, R_ESI);
1588}
1589
1590#ifndef VBOX
1591static inline void gen_scas(DisasContext *s, int ot)
1592#else /* VBOX */
1593DECLINLINE(void) gen_scas(DisasContext *s, int ot)
1594#endif /* VBOX */
1595{
1596 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1597 gen_string_movl_A0_EDI(s);
1598 gen_op_ld_T1_A0(ot + s->mem_index);
1599 gen_op_cmpl_T0_T1_cc();
1600 gen_op_movl_T0_Dshift(ot);
1601 gen_op_add_reg_T0(s->aflag, R_EDI);
1602}
1603
1604#ifndef VBOX
1605static inline void gen_cmps(DisasContext *s, int ot)
1606#else /* VBOX */
1607DECLINLINE(void) gen_cmps(DisasContext *s, int ot)
1608#endif /* VBOX */
1609{
1610 gen_string_movl_A0_ESI(s);
1611 gen_op_ld_T0_A0(ot + s->mem_index);
1612 gen_string_movl_A0_EDI(s);
1613 gen_op_ld_T1_A0(ot + s->mem_index);
1614 gen_op_cmpl_T0_T1_cc();
1615 gen_op_movl_T0_Dshift(ot);
1616 gen_op_add_reg_T0(s->aflag, R_ESI);
1617 gen_op_add_reg_T0(s->aflag, R_EDI);
1618}
1619
1620#ifndef VBOX
1621static inline void gen_ins(DisasContext *s, int ot)
1622#else /* VBOX */
1623DECLINLINE(void) gen_ins(DisasContext *s, int ot)
1624#endif /* VBOX */
1625{
1626 if (use_icount)
1627 gen_io_start();
1628 gen_string_movl_A0_EDI(s);
1629 /* Note: we must do this dummy write first to be restartable in
1630 case of page fault. */
1631 gen_op_movl_T0_0();
1632 gen_op_st_T0_A0(ot + s->mem_index);
1633 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1634 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1635 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1636 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1637 gen_op_st_T0_A0(ot + s->mem_index);
1638 gen_op_movl_T0_Dshift(ot);
1639 gen_op_add_reg_T0(s->aflag, R_EDI);
1640 if (use_icount)
1641 gen_io_end();
1642}
1643
1644#ifndef VBOX
1645static inline void gen_outs(DisasContext *s, int ot)
1646#else /* VBOX */
1647DECLINLINE(void) gen_outs(DisasContext *s, int ot)
1648#endif /* VBOX */
1649{
1650 if (use_icount)
1651 gen_io_start();
1652 gen_string_movl_A0_ESI(s);
1653 gen_op_ld_T0_A0(ot + s->mem_index);
1654
1655 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1656 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1657 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1658 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1659 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1660
1661 gen_op_movl_T0_Dshift(ot);
1662 gen_op_add_reg_T0(s->aflag, R_ESI);
1663 if (use_icount)
1664 gen_io_end();
1665}
1666
1667/* same method as Valgrind : we generate jumps to current or next
1668 instruction */
1669#ifndef VBOX
1670#define GEN_REPZ(op) \
1671static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1672 target_ulong cur_eip, target_ulong next_eip) \
1673{ \
1674 int l2; \
1675 gen_update_cc_op(s); \
1676 l2 = gen_jz_ecx_string(s, next_eip); \
1677 gen_ ## op(s, ot); \
1678 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1679 /* a loop would cause two single step exceptions if ECX = 1 \
1680 before rep string_insn */ \
1681 if (!s->jmp_opt) \
1682 gen_op_jz_ecx(s->aflag, l2); \
1683 gen_jmp(s, cur_eip); \
1684}
1685#else /* VBOX */
1686#define GEN_REPZ(op) \
1687DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1688 target_ulong cur_eip, target_ulong next_eip) \
1689{ \
1690 int l2; \
1691 gen_update_cc_op(s); \
1692 l2 = gen_jz_ecx_string(s, next_eip); \
1693 gen_ ## op(s, ot); \
1694 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1695 /* a loop would cause two single step exceptions if ECX = 1 \
1696 before rep string_insn */ \
1697 if (!s->jmp_opt) \
1698 gen_op_jz_ecx(s->aflag, l2); \
1699 gen_jmp(s, cur_eip); \
1700}
1701#endif /* VBOX */
1702
1703#ifndef VBOX
1704#define GEN_REPZ2(op) \
1705static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1706 target_ulong cur_eip, \
1707 target_ulong next_eip, \
1708 int nz) \
1709{ \
1710 int l2; \
1711 gen_update_cc_op(s); \
1712 l2 = gen_jz_ecx_string(s, next_eip); \
1713 gen_ ## op(s, ot); \
1714 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1715 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1716 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1717 if (!s->jmp_opt) \
1718 gen_op_jz_ecx(s->aflag, l2); \
1719 gen_jmp(s, cur_eip); \
1720}
1721#else /* VBOX */
1722#define GEN_REPZ2(op) \
1723DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1724 target_ulong cur_eip, \
1725 target_ulong next_eip, \
1726 int nz) \
1727{ \
1728 int l2;\
1729 gen_update_cc_op(s); \
1730 l2 = gen_jz_ecx_string(s, next_eip); \
1731 gen_ ## op(s, ot); \
1732 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1733 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1734 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1735 if (!s->jmp_opt) \
1736 gen_op_jz_ecx(s->aflag, l2); \
1737 gen_jmp(s, cur_eip); \
1738}
1739#endif /* VBOX */
1740
1741GEN_REPZ(movs)
1742GEN_REPZ(stos)
1743GEN_REPZ(lods)
1744GEN_REPZ(ins)
1745GEN_REPZ(outs)
1746GEN_REPZ2(scas)
1747GEN_REPZ2(cmps)
1748
1749static void *helper_fp_arith_ST0_FT0[8] = {
1750 helper_fadd_ST0_FT0,
1751 helper_fmul_ST0_FT0,
1752 helper_fcom_ST0_FT0,
1753 helper_fcom_ST0_FT0,
1754 helper_fsub_ST0_FT0,
1755 helper_fsubr_ST0_FT0,
1756 helper_fdiv_ST0_FT0,
1757 helper_fdivr_ST0_FT0,
1758};
1759
1760/* NOTE the exception in "r" op ordering */
1761static void *helper_fp_arith_STN_ST0[8] = {
1762 helper_fadd_STN_ST0,
1763 helper_fmul_STN_ST0,
1764 NULL,
1765 NULL,
1766 helper_fsubr_STN_ST0,
1767 helper_fsub_STN_ST0,
1768 helper_fdivr_STN_ST0,
1769 helper_fdiv_STN_ST0,
1770};
1771
1772/* if d == OR_TMP0, it means memory operand (address in A0) */
1773static void gen_op(DisasContext *s1, int op, int ot, int d)
1774{
1775 if (d != OR_TMP0) {
1776 gen_op_mov_TN_reg(ot, 0, d);
1777 } else {
1778 gen_op_ld_T0_A0(ot + s1->mem_index);
1779 }
1780 switch(op) {
1781 case OP_ADCL:
1782 if (s1->cc_op != CC_OP_DYNAMIC)
1783 gen_op_set_cc_op(s1->cc_op);
1784 gen_compute_eflags_c(cpu_tmp4);
1785 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1786 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1787 if (d != OR_TMP0)
1788 gen_op_mov_reg_T0(ot, d);
1789 else
1790 gen_op_st_T0_A0(ot + s1->mem_index);
1791 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1792 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1793 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1794 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1795 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1796 s1->cc_op = CC_OP_DYNAMIC;
1797 break;
1798 case OP_SBBL:
1799 if (s1->cc_op != CC_OP_DYNAMIC)
1800 gen_op_set_cc_op(s1->cc_op);
1801 gen_compute_eflags_c(cpu_tmp4);
1802 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1803 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1804 if (d != OR_TMP0)
1805 gen_op_mov_reg_T0(ot, d);
1806 else
1807 gen_op_st_T0_A0(ot + s1->mem_index);
1808 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1809 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1810 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1811 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1812 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1813 s1->cc_op = CC_OP_DYNAMIC;
1814 break;
1815 case OP_ADDL:
1816 gen_op_addl_T0_T1();
1817 if (d != OR_TMP0)
1818 gen_op_mov_reg_T0(ot, d);
1819 else
1820 gen_op_st_T0_A0(ot + s1->mem_index);
1821 gen_op_update2_cc();
1822 s1->cc_op = CC_OP_ADDB + ot;
1823 break;
1824 case OP_SUBL:
1825 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1826 if (d != OR_TMP0)
1827 gen_op_mov_reg_T0(ot, d);
1828 else
1829 gen_op_st_T0_A0(ot + s1->mem_index);
1830 gen_op_update2_cc();
1831 s1->cc_op = CC_OP_SUBB + ot;
1832 break;
1833 default:
1834 case OP_ANDL:
1835 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1836 if (d != OR_TMP0)
1837 gen_op_mov_reg_T0(ot, d);
1838 else
1839 gen_op_st_T0_A0(ot + s1->mem_index);
1840 gen_op_update1_cc();
1841 s1->cc_op = CC_OP_LOGICB + ot;
1842 break;
1843 case OP_ORL:
1844 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1845 if (d != OR_TMP0)
1846 gen_op_mov_reg_T0(ot, d);
1847 else
1848 gen_op_st_T0_A0(ot + s1->mem_index);
1849 gen_op_update1_cc();
1850 s1->cc_op = CC_OP_LOGICB + ot;
1851 break;
1852 case OP_XORL:
1853 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1854 if (d != OR_TMP0)
1855 gen_op_mov_reg_T0(ot, d);
1856 else
1857 gen_op_st_T0_A0(ot + s1->mem_index);
1858 gen_op_update1_cc();
1859 s1->cc_op = CC_OP_LOGICB + ot;
1860 break;
1861 case OP_CMPL:
1862 gen_op_cmpl_T0_T1_cc();
1863 s1->cc_op = CC_OP_SUBB + ot;
1864 break;
1865 }
1866}
1867
1868/* if d == OR_TMP0, it means memory operand (address in A0) */
1869static void gen_inc(DisasContext *s1, int ot, int d, int c)
1870{
1871 if (d != OR_TMP0)
1872 gen_op_mov_TN_reg(ot, 0, d);
1873 else
1874 gen_op_ld_T0_A0(ot + s1->mem_index);
1875 if (s1->cc_op != CC_OP_DYNAMIC)
1876 gen_op_set_cc_op(s1->cc_op);
1877 if (c > 0) {
1878 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1879 s1->cc_op = CC_OP_INCB + ot;
1880 } else {
1881 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1882 s1->cc_op = CC_OP_DECB + ot;
1883 }
1884 if (d != OR_TMP0)
1885 gen_op_mov_reg_T0(ot, d);
1886 else
1887 gen_op_st_T0_A0(ot + s1->mem_index);
1888 gen_compute_eflags_c(cpu_cc_src);
1889 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1890}
1891
1892static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1893 int is_right, int is_arith)
1894{
1895 target_ulong mask;
1896 int shift_label;
1897 TCGv t0, t1;
1898
1899 if (ot == OT_QUAD)
1900 mask = 0x3f;
1901 else
1902 mask = 0x1f;
1903
1904 /* load */
1905 if (op1 == OR_TMP0)
1906 gen_op_ld_T0_A0(ot + s->mem_index);
1907 else
1908 gen_op_mov_TN_reg(ot, 0, op1);
1909
1910 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1911
1912 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1913
1914 if (is_right) {
1915 if (is_arith) {
1916 gen_exts(ot, cpu_T[0]);
1917 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1918 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1919 } else {
1920 gen_extu(ot, cpu_T[0]);
1921 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1922 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1923 }
1924 } else {
1925 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1926 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1927 }
1928
1929 /* store */
1930 if (op1 == OR_TMP0)
1931 gen_op_st_T0_A0(ot + s->mem_index);
1932 else
1933 gen_op_mov_reg_T0(ot, op1);
1934
1935 /* update eflags if non zero shift */
1936 if (s->cc_op != CC_OP_DYNAMIC)
1937 gen_op_set_cc_op(s->cc_op);
1938
1939 /* XXX: inefficient */
1940 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1941 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1942
1943 tcg_gen_mov_tl(t0, cpu_T[0]);
1944 tcg_gen_mov_tl(t1, cpu_T3);
1945
1946 shift_label = gen_new_label();
1947 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1948
1949 tcg_gen_mov_tl(cpu_cc_src, t1);
1950 tcg_gen_mov_tl(cpu_cc_dst, t0);
1951 if (is_right)
1952 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1953 else
1954 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1955
1956 gen_set_label(shift_label);
1957 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1958
1959 tcg_temp_free(t0);
1960 tcg_temp_free(t1);
1961}
1962
1963static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1964 int is_right, int is_arith)
1965{
1966 int mask;
1967
1968 if (ot == OT_QUAD)
1969 mask = 0x3f;
1970 else
1971 mask = 0x1f;
1972
1973 /* load */
1974 if (op1 == OR_TMP0)
1975 gen_op_ld_T0_A0(ot + s->mem_index);
1976 else
1977 gen_op_mov_TN_reg(ot, 0, op1);
1978
1979 op2 &= mask;
1980 if (op2 != 0) {
1981 if (is_right) {
1982 if (is_arith) {
1983 gen_exts(ot, cpu_T[0]);
1984 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1985 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1986 } else {
1987 gen_extu(ot, cpu_T[0]);
1988 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1989 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1990 }
1991 } else {
1992 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1993 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1994 }
1995 }
1996
1997 /* store */
1998 if (op1 == OR_TMP0)
1999 gen_op_st_T0_A0(ot + s->mem_index);
2000 else
2001 gen_op_mov_reg_T0(ot, op1);
2002
2003 /* update eflags if non zero shift */
2004 if (op2 != 0) {
2005 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
2006 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
2007 if (is_right)
2008 s->cc_op = CC_OP_SARB + ot;
2009 else
2010 s->cc_op = CC_OP_SHLB + ot;
2011 }
2012}
2013
2014#ifndef VBOX
2015static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
2016#else /* VBOX */
2017DECLINLINE(void) tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
2018#endif /* VBOX */
2019{
2020 if (arg2 >= 0)
2021 tcg_gen_shli_tl(ret, arg1, arg2);
2022 else
2023 tcg_gen_shri_tl(ret, arg1, -arg2);
2024}
2025
2026/* XXX: add faster immediate case */
2027static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
2028 int is_right)
2029{
2030 target_ulong mask;
2031 int label1, label2, data_bits;
2032 TCGv t0, t1, t2, a0;
2033
2034 /* XXX: inefficient, but we must use local temps */
2035 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2036 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2037 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2038 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2039
2040 if (ot == OT_QUAD)
2041 mask = 0x3f;
2042 else
2043 mask = 0x1f;
2044
2045 /* load */
2046 if (op1 == OR_TMP0) {
2047 tcg_gen_mov_tl(a0, cpu_A0);
2048 gen_op_ld_v(ot + s->mem_index, t0, a0);
2049 } else {
2050 gen_op_mov_v_reg(ot, t0, op1);
2051 }
2052
2053 tcg_gen_mov_tl(t1, cpu_T[1]);
2054
2055 tcg_gen_andi_tl(t1, t1, mask);
2056
2057 /* Must test zero case to avoid using undefined behaviour in TCG
2058 shifts. */
2059 label1 = gen_new_label();
2060 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
2061
2062 if (ot <= OT_WORD)
2063 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
2064 else
2065 tcg_gen_mov_tl(cpu_tmp0, t1);
2066
2067 gen_extu(ot, t0);
2068 tcg_gen_mov_tl(t2, t0);
2069
2070 data_bits = 8 << ot;
2071 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
2072 fix TCG definition) */
2073 if (is_right) {
2074 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
2075 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2076 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
2077 } else {
2078 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
2079 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2080 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
2081 }
2082 tcg_gen_or_tl(t0, t0, cpu_tmp4);
2083
2084 gen_set_label(label1);
2085 /* store */
2086 if (op1 == OR_TMP0) {
2087 gen_op_st_v(ot + s->mem_index, t0, a0);
2088 } else {
2089 gen_op_mov_reg_v(ot, op1, t0);
2090 }
2091
2092 /* update eflags */
2093 if (s->cc_op != CC_OP_DYNAMIC)
2094 gen_op_set_cc_op(s->cc_op);
2095
2096 label2 = gen_new_label();
2097 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
2098
2099 gen_compute_eflags(cpu_cc_src);
2100 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
2101 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
2102 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
2103 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
2104 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
2105 if (is_right) {
2106 tcg_gen_shri_tl(t0, t0, data_bits - 1);
2107 }
2108 tcg_gen_andi_tl(t0, t0, CC_C);
2109 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
2110
2111 tcg_gen_discard_tl(cpu_cc_dst);
2112 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2113
2114 gen_set_label(label2);
2115 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2116
2117 tcg_temp_free(t0);
2118 tcg_temp_free(t1);
2119 tcg_temp_free(t2);
2120 tcg_temp_free(a0);
2121}
2122
2123static void *helper_rotc[8] = {
2124 helper_rclb,
2125 helper_rclw,
2126 helper_rcll,
2127 X86_64_ONLY(helper_rclq),
2128 helper_rcrb,
2129 helper_rcrw,
2130 helper_rcrl,
2131 X86_64_ONLY(helper_rcrq),
2132};
2133
2134/* XXX: add faster immediate = 1 case */
2135static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
2136 int is_right)
2137{
2138 int label1;
2139
2140 if (s->cc_op != CC_OP_DYNAMIC)
2141 gen_op_set_cc_op(s->cc_op);
2142
2143 /* load */
2144 if (op1 == OR_TMP0)
2145 gen_op_ld_T0_A0(ot + s->mem_index);
2146 else
2147 gen_op_mov_TN_reg(ot, 0, op1);
2148
2149 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
2150 cpu_T[0], cpu_T[0], cpu_T[1]);
2151 /* store */
2152 if (op1 == OR_TMP0)
2153 gen_op_st_T0_A0(ot + s->mem_index);
2154 else
2155 gen_op_mov_reg_T0(ot, op1);
2156
2157 /* update eflags */
2158 label1 = gen_new_label();
2159 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
2160
2161 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
2162 tcg_gen_discard_tl(cpu_cc_dst);
2163 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2164
2165 gen_set_label(label1);
2166 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2167}
2168
2169/* XXX: add faster immediate case */
2170static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
2171 int is_right)
2172{
2173 int label1, label2, data_bits;
2174 target_ulong mask;
2175 TCGv t0, t1, t2, a0;
2176
2177 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2178 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2179 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2180 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2181
2182 if (ot == OT_QUAD)
2183 mask = 0x3f;
2184 else
2185 mask = 0x1f;
2186
2187 /* load */
2188 if (op1 == OR_TMP0) {
2189 tcg_gen_mov_tl(a0, cpu_A0);
2190 gen_op_ld_v(ot + s->mem_index, t0, a0);
2191 } else {
2192 gen_op_mov_v_reg(ot, t0, op1);
2193 }
2194
2195 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
2196
2197 tcg_gen_mov_tl(t1, cpu_T[1]);
2198 tcg_gen_mov_tl(t2, cpu_T3);
2199
2200 /* Must test zero case to avoid using undefined behaviour in TCG
2201 shifts. */
2202 label1 = gen_new_label();
2203 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
2204
2205 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
2206 if (ot == OT_WORD) {
2207 /* Note: we implement the Intel behaviour for shift count > 16 */
2208 if (is_right) {
2209 tcg_gen_andi_tl(t0, t0, 0xffff);
2210 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
2211 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2212 tcg_gen_ext32u_tl(t0, t0);
2213
2214 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2215
2216 /* only needed if count > 16, but a test would complicate */
2217 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2218 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
2219
2220 tcg_gen_shr_tl(t0, t0, t2);
2221
2222 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2223 } else {
2224 /* XXX: not optimal */
2225 tcg_gen_andi_tl(t0, t0, 0xffff);
2226 tcg_gen_shli_tl(t1, t1, 16);
2227 tcg_gen_or_tl(t1, t1, t0);
2228 tcg_gen_ext32u_tl(t1, t1);
2229
2230 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2231 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
2232 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
2233 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
2234
2235 tcg_gen_shl_tl(t0, t0, t2);
2236 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2237 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2238 tcg_gen_or_tl(t0, t0, t1);
2239 }
2240 } else {
2241 data_bits = 8 << ot;
2242 if (is_right) {
2243 if (ot == OT_LONG)
2244 tcg_gen_ext32u_tl(t0, t0);
2245
2246 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2247
2248 tcg_gen_shr_tl(t0, t0, t2);
2249 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2250 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
2251 tcg_gen_or_tl(t0, t0, t1);
2252
2253 } else {
2254 if (ot == OT_LONG)
2255 tcg_gen_ext32u_tl(t1, t1);
2256
2257 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2258
2259 tcg_gen_shl_tl(t0, t0, t2);
2260 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2261 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2262 tcg_gen_or_tl(t0, t0, t1);
2263 }
2264 }
2265 tcg_gen_mov_tl(t1, cpu_tmp4);
2266
2267 gen_set_label(label1);
2268 /* store */
2269 if (op1 == OR_TMP0) {
2270 gen_op_st_v(ot + s->mem_index, t0, a0);
2271 } else {
2272 gen_op_mov_reg_v(ot, op1, t0);
2273 }
2274
2275 /* update eflags */
2276 if (s->cc_op != CC_OP_DYNAMIC)
2277 gen_op_set_cc_op(s->cc_op);
2278
2279 label2 = gen_new_label();
2280 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
2281
2282 tcg_gen_mov_tl(cpu_cc_src, t1);
2283 tcg_gen_mov_tl(cpu_cc_dst, t0);
2284 if (is_right) {
2285 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
2286 } else {
2287 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
2288 }
2289 gen_set_label(label2);
2290 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2291
2292 tcg_temp_free(t0);
2293 tcg_temp_free(t1);
2294 tcg_temp_free(t2);
2295 tcg_temp_free(a0);
2296}
2297
2298static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2299{
2300 if (s != OR_TMP1)
2301 gen_op_mov_TN_reg(ot, 1, s);
2302 switch(op) {
2303 case OP_ROL:
2304 gen_rot_rm_T1(s1, ot, d, 0);
2305 break;
2306 case OP_ROR:
2307 gen_rot_rm_T1(s1, ot, d, 1);
2308 break;
2309 case OP_SHL:
2310 case OP_SHL1:
2311 gen_shift_rm_T1(s1, ot, d, 0, 0);
2312 break;
2313 case OP_SHR:
2314 gen_shift_rm_T1(s1, ot, d, 1, 0);
2315 break;
2316 case OP_SAR:
2317 gen_shift_rm_T1(s1, ot, d, 1, 1);
2318 break;
2319 case OP_RCL:
2320 gen_rotc_rm_T1(s1, ot, d, 0);
2321 break;
2322 case OP_RCR:
2323 gen_rotc_rm_T1(s1, ot, d, 1);
2324 break;
2325 }
2326}
2327
2328static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2329{
2330 switch(op) {
2331 case OP_SHL:
2332 case OP_SHL1:
2333 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2334 break;
2335 case OP_SHR:
2336 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2337 break;
2338 case OP_SAR:
2339 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2340 break;
2341 default:
2342 /* currently not optimized */
2343 gen_op_movl_T1_im(c);
2344 gen_shift(s1, op, ot, d, OR_TMP1);
2345 break;
2346 }
2347}
2348
2349static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
2350{
2351 target_long disp;
2352 int havesib;
2353 int base;
2354 int index;
2355 int scale;
2356 int opreg;
2357 int mod, rm, code, override, must_add_seg;
2358
2359 override = s->override;
2360 must_add_seg = s->addseg;
2361 if (override >= 0)
2362 must_add_seg = 1;
2363 mod = (modrm >> 6) & 3;
2364 rm = modrm & 7;
2365
2366 if (s->aflag) {
2367
2368 havesib = 0;
2369 base = rm;
2370 index = 0;
2371 scale = 0;
2372
2373 if (base == 4) {
2374 havesib = 1;
2375 code = ldub_code(s->pc++);
2376 scale = (code >> 6) & 3;
2377 index = ((code >> 3) & 7) | REX_X(s);
2378 base = (code & 7);
2379 }
2380 base |= REX_B(s);
2381
2382 switch (mod) {
2383 case 0:
2384 if ((base & 7) == 5) {
2385 base = -1;
2386 disp = (int32_t)ldl_code(s->pc);
2387 s->pc += 4;
2388 if (CODE64(s) && !havesib) {
2389 disp += s->pc + s->rip_offset;
2390 }
2391 } else {
2392 disp = 0;
2393 }
2394 break;
2395 case 1:
2396 disp = (int8_t)ldub_code(s->pc++);
2397 break;
2398 default:
2399 case 2:
2400#ifdef VBOX
2401 disp = (int32_t)ldl_code(s->pc);
2402#else
2403 disp = ldl_code(s->pc);
2404#endif
2405 s->pc += 4;
2406 break;
2407 }
2408
2409 if (base >= 0) {
2410 /* for correct popl handling with esp */
2411 if (base == 4 && s->popl_esp_hack)
2412 disp += s->popl_esp_hack;
2413#ifdef TARGET_X86_64
2414 if (s->aflag == 2) {
2415 gen_op_movq_A0_reg(base);
2416 if (disp != 0) {
2417 gen_op_addq_A0_im(disp);
2418 }
2419 } else
2420#endif
2421 {
2422 gen_op_movl_A0_reg(base);
2423 if (disp != 0)
2424 gen_op_addl_A0_im(disp);
2425 }
2426 } else {
2427#ifdef TARGET_X86_64
2428 if (s->aflag == 2) {
2429 gen_op_movq_A0_im(disp);
2430 } else
2431#endif
2432 {
2433 gen_op_movl_A0_im(disp);
2434 }
2435 }
2436 /* XXX: index == 4 is always invalid */
2437 if (havesib && (index != 4 || scale != 0)) {
2438#ifdef TARGET_X86_64
2439 if (s->aflag == 2) {
2440 gen_op_addq_A0_reg_sN(scale, index);
2441 } else
2442#endif
2443 {
2444 gen_op_addl_A0_reg_sN(scale, index);
2445 }
2446 }
2447 if (must_add_seg) {
2448 if (override < 0) {
2449 if (base == R_EBP || base == R_ESP)
2450 override = R_SS;
2451 else
2452 override = R_DS;
2453 }
2454#ifdef TARGET_X86_64
2455 if (s->aflag == 2) {
2456 gen_op_addq_A0_seg(override);
2457 } else
2458#endif
2459 {
2460 gen_op_addl_A0_seg(override);
2461 }
2462 }
2463 } else {
2464 switch (mod) {
2465 case 0:
2466 if (rm == 6) {
2467 disp = lduw_code(s->pc);
2468 s->pc += 2;
2469 gen_op_movl_A0_im(disp);
2470 rm = 0; /* avoid SS override */
2471 goto no_rm;
2472 } else {
2473 disp = 0;
2474 }
2475 break;
2476 case 1:
2477 disp = (int8_t)ldub_code(s->pc++);
2478 break;
2479 default:
2480 case 2:
2481 disp = lduw_code(s->pc);
2482 s->pc += 2;
2483 break;
2484 }
2485 switch(rm) {
2486 case 0:
2487 gen_op_movl_A0_reg(R_EBX);
2488 gen_op_addl_A0_reg_sN(0, R_ESI);
2489 break;
2490 case 1:
2491 gen_op_movl_A0_reg(R_EBX);
2492 gen_op_addl_A0_reg_sN(0, R_EDI);
2493 break;
2494 case 2:
2495 gen_op_movl_A0_reg(R_EBP);
2496 gen_op_addl_A0_reg_sN(0, R_ESI);
2497 break;
2498 case 3:
2499 gen_op_movl_A0_reg(R_EBP);
2500 gen_op_addl_A0_reg_sN(0, R_EDI);
2501 break;
2502 case 4:
2503 gen_op_movl_A0_reg(R_ESI);
2504 break;
2505 case 5:
2506 gen_op_movl_A0_reg(R_EDI);
2507 break;
2508 case 6:
2509 gen_op_movl_A0_reg(R_EBP);
2510 break;
2511 default:
2512 case 7:
2513 gen_op_movl_A0_reg(R_EBX);
2514 break;
2515 }
2516 if (disp != 0)
2517 gen_op_addl_A0_im(disp);
2518 gen_op_andl_A0_ffff();
2519 no_rm:
2520 if (must_add_seg) {
2521 if (override < 0) {
2522 if (rm == 2 || rm == 3 || rm == 6)
2523 override = R_SS;
2524 else
2525 override = R_DS;
2526 }
2527 gen_op_addl_A0_seg(override);
2528 }
2529 }
2530
2531 opreg = OR_A0;
2532 disp = 0;
2533 *reg_ptr = opreg;
2534 *offset_ptr = disp;
2535}
2536
2537static void gen_nop_modrm(DisasContext *s, int modrm)
2538{
2539 int mod, rm, base, code;
2540
2541 mod = (modrm >> 6) & 3;
2542 if (mod == 3)
2543 return;
2544 rm = modrm & 7;
2545
2546 if (s->aflag) {
2547
2548 base = rm;
2549
2550 if (base == 4) {
2551 code = ldub_code(s->pc++);
2552 base = (code & 7);
2553 }
2554
2555 switch (mod) {
2556 case 0:
2557 if (base == 5) {
2558 s->pc += 4;
2559 }
2560 break;
2561 case 1:
2562 s->pc++;
2563 break;
2564 default:
2565 case 2:
2566 s->pc += 4;
2567 break;
2568 }
2569 } else {
2570 switch (mod) {
2571 case 0:
2572 if (rm == 6) {
2573 s->pc += 2;
2574 }
2575 break;
2576 case 1:
2577 s->pc++;
2578 break;
2579 default:
2580 case 2:
2581 s->pc += 2;
2582 break;
2583 }
2584 }
2585}
2586
2587/* used for LEA and MOV AX, mem */
2588static void gen_add_A0_ds_seg(DisasContext *s)
2589{
2590 int override, must_add_seg;
2591 must_add_seg = s->addseg;
2592 override = R_DS;
2593 if (s->override >= 0) {
2594 override = s->override;
2595 must_add_seg = 1;
2596 } else {
2597 override = R_DS;
2598 }
2599 if (must_add_seg) {
2600#ifdef TARGET_X86_64
2601 if (CODE64(s)) {
2602 gen_op_addq_A0_seg(override);
2603 } else
2604#endif
2605 {
2606 gen_op_addl_A0_seg(override);
2607 }
2608 }
2609}
2610
2611/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2612 OR_TMP0 */
2613static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2614{
2615 int mod, rm, opreg, disp;
2616
2617 mod = (modrm >> 6) & 3;
2618 rm = (modrm & 7) | REX_B(s);
2619 if (mod == 3) {
2620 if (is_store) {
2621 if (reg != OR_TMP0)
2622 gen_op_mov_TN_reg(ot, 0, reg);
2623 gen_op_mov_reg_T0(ot, rm);
2624 } else {
2625 gen_op_mov_TN_reg(ot, 0, rm);
2626 if (reg != OR_TMP0)
2627 gen_op_mov_reg_T0(ot, reg);
2628 }
2629 } else {
2630 gen_lea_modrm(s, modrm, &opreg, &disp);
2631 if (is_store) {
2632 if (reg != OR_TMP0)
2633 gen_op_mov_TN_reg(ot, 0, reg);
2634 gen_op_st_T0_A0(ot + s->mem_index);
2635 } else {
2636 gen_op_ld_T0_A0(ot + s->mem_index);
2637 if (reg != OR_TMP0)
2638 gen_op_mov_reg_T0(ot, reg);
2639 }
2640 }
2641}
2642
2643#ifndef VBOX
2644static inline uint32_t insn_get(DisasContext *s, int ot)
2645#else /* VBOX */
2646DECLINLINE(uint32_t) insn_get(DisasContext *s, int ot)
2647#endif /* VBOX */
2648{
2649 uint32_t ret;
2650
2651 switch(ot) {
2652 case OT_BYTE:
2653 ret = ldub_code(s->pc);
2654 s->pc++;
2655 break;
2656 case OT_WORD:
2657 ret = lduw_code(s->pc);
2658 s->pc += 2;
2659 break;
2660 default:
2661 case OT_LONG:
2662 ret = ldl_code(s->pc);
2663 s->pc += 4;
2664 break;
2665 }
2666 return ret;
2667}
2668
2669#ifndef VBOX
2670static inline int insn_const_size(unsigned int ot)
2671#else /* VBOX */
2672DECLINLINE(int) insn_const_size(unsigned int ot)
2673#endif /* VBOX */
2674{
2675 if (ot <= OT_LONG)
2676 return 1 << ot;
2677 else
2678 return 4;
2679}
2680
2681#ifndef VBOX
2682static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2683#else /* VBOX */
2684DECLINLINE(void) gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2685#endif /* VBOX */
2686{
2687 TranslationBlock *tb;
2688 target_ulong pc;
2689
2690 pc = s->cs_base + eip;
2691 tb = s->tb;
2692 /* NOTE: we handle the case where the TB spans two pages here */
2693 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2694 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2695#ifdef VBOX
2696 gen_check_external_event(s);
2697#endif /* VBOX */
2698 /* jump to same page: we can use a direct jump */
2699 tcg_gen_goto_tb(tb_num);
2700 gen_jmp_im(eip);
2701 tcg_gen_exit_tb((long)tb + tb_num);
2702 } else {
2703 /* jump to another page: currently not optimized */
2704 gen_jmp_im(eip);
2705 gen_eob(s);
2706 }
2707}
2708
2709#ifndef VBOX
2710static inline void gen_jcc(DisasContext *s, int b,
2711#else /* VBOX */
2712DECLINLINE(void) gen_jcc(DisasContext *s, int b,
2713#endif /* VBOX */
2714 target_ulong val, target_ulong next_eip)
2715{
2716 int l1, l2, cc_op;
2717
2718 cc_op = s->cc_op;
2719 if (s->cc_op != CC_OP_DYNAMIC) {
2720 gen_op_set_cc_op(s->cc_op);
2721 s->cc_op = CC_OP_DYNAMIC;
2722 }
2723 if (s->jmp_opt) {
2724 l1 = gen_new_label();
2725 gen_jcc1(s, cc_op, b, l1);
2726
2727 gen_goto_tb(s, 0, next_eip);
2728
2729 gen_set_label(l1);
2730 gen_goto_tb(s, 1, val);
2731 s->is_jmp = 3;
2732 } else {
2733
2734 l1 = gen_new_label();
2735 l2 = gen_new_label();
2736 gen_jcc1(s, cc_op, b, l1);
2737
2738 gen_jmp_im(next_eip);
2739 tcg_gen_br(l2);
2740
2741 gen_set_label(l1);
2742 gen_jmp_im(val);
2743 gen_set_label(l2);
2744 gen_eob(s);
2745 }
2746}
2747
2748static void gen_setcc(DisasContext *s, int b)
2749{
2750 int inv, jcc_op, l1;
2751 TCGv t0;
2752
2753 if (is_fast_jcc_case(s, b)) {
2754 /* nominal case: we use a jump */
2755 /* XXX: make it faster by adding new instructions in TCG */
2756 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2757 tcg_gen_movi_tl(t0, 0);
2758 l1 = gen_new_label();
2759 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2760 tcg_gen_movi_tl(t0, 1);
2761 gen_set_label(l1);
2762 tcg_gen_mov_tl(cpu_T[0], t0);
2763 tcg_temp_free(t0);
2764 } else {
2765 /* slow case: it is more efficient not to generate a jump,
2766 although it is questionnable whether this optimization is
2767 worth to */
2768 inv = b & 1;
2769 jcc_op = (b >> 1) & 7;
2770 gen_setcc_slow_T0(s, jcc_op);
2771 if (inv) {
2772 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2773 }
2774 }
2775}
2776
2777#ifndef VBOX
2778static inline void gen_op_movl_T0_seg(int seg_reg)
2779#else /* VBOX */
2780DECLINLINE(void) gen_op_movl_T0_seg(int seg_reg)
2781#endif /* VBOX */
2782{
2783 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2784 offsetof(CPUX86State,segs[seg_reg].selector));
2785}
2786
2787#ifndef VBOX
2788static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2789#else /* VBOX */
2790DECLINLINE(void) gen_op_movl_seg_T0_vm(int seg_reg)
2791#endif /* VBOX */
2792{
2793 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2794 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2795 offsetof(CPUX86State,segs[seg_reg].selector));
2796 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2797 tcg_gen_st_tl(cpu_T[0], cpu_env,
2798 offsetof(CPUX86State,segs[seg_reg].base));
2799#ifdef VBOX
2800 int flags = DESC_P_MASK | DESC_S_MASK | DESC_W_MASK;
2801 if (seg_reg == R_CS)
2802 flags |= DESC_CS_MASK;
2803 gen_op_movl_T0_im(flags);
2804 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].flags));
2805
2806 /* Set the limit to 0xffff. */
2807 gen_op_movl_T0_im(0xffff);
2808 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].limit));
2809#endif
2810}
2811
2812/* move T0 to seg_reg and compute if the CPU state may change. Never
2813 call this function with seg_reg == R_CS */
2814static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2815{
2816 if (s->pe && !s->vm86) {
2817 /* XXX: optimize by finding processor state dynamically */
2818 if (s->cc_op != CC_OP_DYNAMIC)
2819 gen_op_set_cc_op(s->cc_op);
2820 gen_jmp_im(cur_eip);
2821 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2822 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2823 /* abort translation because the addseg value may change or
2824 because ss32 may change. For R_SS, translation must always
2825 stop as a special handling must be done to disable hardware
2826 interrupts for the next instruction */
2827 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2828 s->is_jmp = 3;
2829 } else {
2830 gen_op_movl_seg_T0_vm(seg_reg);
2831 if (seg_reg == R_SS)
2832 s->is_jmp = 3;
2833 }
2834}
2835
2836#ifndef VBOX
2837static inline int svm_is_rep(int prefixes)
2838#else /* VBOX */
2839DECLINLINE(int) svm_is_rep(int prefixes)
2840#endif /* VBOX */
2841{
2842 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2843}
2844
2845#ifndef VBOX
2846static inline void
2847#else /* VBOX */
2848DECLINLINE(void)
2849#endif /* VBOX */
2850gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2851 uint32_t type, uint64_t param)
2852{
2853 /* no SVM activated; fast case */
2854 if (likely(!(s->flags & HF_SVMI_MASK)))
2855 return;
2856 if (s->cc_op != CC_OP_DYNAMIC)
2857 gen_op_set_cc_op(s->cc_op);
2858 gen_jmp_im(pc_start - s->cs_base);
2859 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2860 tcg_const_i32(type), tcg_const_i64(param));
2861}
2862
2863#ifndef VBOX
2864static inline void
2865#else /* VBOX */
2866DECLINLINE(void)
2867#endif
2868gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2869{
2870 gen_svm_check_intercept_param(s, pc_start, type, 0);
2871}
2872
2873#ifndef VBOX
2874static inline void gen_stack_update(DisasContext *s, int addend)
2875#else /* VBOX */
2876DECLINLINE(void) gen_stack_update(DisasContext *s, int addend)
2877#endif /* VBOX */
2878{
2879#ifdef TARGET_X86_64
2880 if (CODE64(s)) {
2881 gen_op_add_reg_im(2, R_ESP, addend);
2882 } else
2883#endif
2884 if (s->ss32) {
2885 gen_op_add_reg_im(1, R_ESP, addend);
2886 } else {
2887 gen_op_add_reg_im(0, R_ESP, addend);
2888 }
2889}
2890
2891/* generate a push. It depends on ss32, addseg and dflag */
2892static void gen_push_T0(DisasContext *s)
2893{
2894#ifdef TARGET_X86_64
2895 if (CODE64(s)) {
2896 gen_op_movq_A0_reg(R_ESP);
2897 if (s->dflag) {
2898 gen_op_addq_A0_im(-8);
2899 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2900 } else {
2901 gen_op_addq_A0_im(-2);
2902 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2903 }
2904 gen_op_mov_reg_A0(2, R_ESP);
2905 } else
2906#endif
2907 {
2908 gen_op_movl_A0_reg(R_ESP);
2909 if (!s->dflag)
2910 gen_op_addl_A0_im(-2);
2911 else
2912 gen_op_addl_A0_im(-4);
2913 if (s->ss32) {
2914 if (s->addseg) {
2915 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2916 gen_op_addl_A0_seg(R_SS);
2917 }
2918 } else {
2919 gen_op_andl_A0_ffff();
2920 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2921 gen_op_addl_A0_seg(R_SS);
2922 }
2923 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2924 if (s->ss32 && !s->addseg)
2925 gen_op_mov_reg_A0(1, R_ESP);
2926 else
2927 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2928 }
2929}
2930
2931/* generate a push. It depends on ss32, addseg and dflag */
2932/* slower version for T1, only used for call Ev */
2933static void gen_push_T1(DisasContext *s)
2934{
2935#ifdef TARGET_X86_64
2936 if (CODE64(s)) {
2937 gen_op_movq_A0_reg(R_ESP);
2938 if (s->dflag) {
2939 gen_op_addq_A0_im(-8);
2940 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2941 } else {
2942 gen_op_addq_A0_im(-2);
2943 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2944 }
2945 gen_op_mov_reg_A0(2, R_ESP);
2946 } else
2947#endif
2948 {
2949 gen_op_movl_A0_reg(R_ESP);
2950 if (!s->dflag)
2951 gen_op_addl_A0_im(-2);
2952 else
2953 gen_op_addl_A0_im(-4);
2954 if (s->ss32) {
2955 if (s->addseg) {
2956 gen_op_addl_A0_seg(R_SS);
2957 }
2958 } else {
2959 gen_op_andl_A0_ffff();
2960 gen_op_addl_A0_seg(R_SS);
2961 }
2962 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2963
2964 if (s->ss32 && !s->addseg)
2965 gen_op_mov_reg_A0(1, R_ESP);
2966 else
2967 gen_stack_update(s, (-2) << s->dflag);
2968 }
2969}
2970
2971/* two step pop is necessary for precise exceptions */
2972static void gen_pop_T0(DisasContext *s)
2973{
2974#ifdef TARGET_X86_64
2975 if (CODE64(s)) {
2976 gen_op_movq_A0_reg(R_ESP);
2977 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2978 } else
2979#endif
2980 {
2981 gen_op_movl_A0_reg(R_ESP);
2982 if (s->ss32) {
2983 if (s->addseg)
2984 gen_op_addl_A0_seg(R_SS);
2985 } else {
2986 gen_op_andl_A0_ffff();
2987 gen_op_addl_A0_seg(R_SS);
2988 }
2989 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2990 }
2991}
2992
2993static void gen_pop_update(DisasContext *s)
2994{
2995#ifdef TARGET_X86_64
2996 if (CODE64(s) && s->dflag) {
2997 gen_stack_update(s, 8);
2998 } else
2999#endif
3000 {
3001 gen_stack_update(s, 2 << s->dflag);
3002 }
3003}
3004
3005static void gen_stack_A0(DisasContext *s)
3006{
3007 gen_op_movl_A0_reg(R_ESP);
3008 if (!s->ss32)
3009 gen_op_andl_A0_ffff();
3010 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3011 if (s->addseg)
3012 gen_op_addl_A0_seg(R_SS);
3013}
3014
3015/* NOTE: wrap around in 16 bit not fully handled */
3016static void gen_pusha(DisasContext *s)
3017{
3018 int i;
3019 gen_op_movl_A0_reg(R_ESP);
3020 gen_op_addl_A0_im(-16 << s->dflag);
3021 if (!s->ss32)
3022 gen_op_andl_A0_ffff();
3023 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3024 if (s->addseg)
3025 gen_op_addl_A0_seg(R_SS);
3026 for(i = 0;i < 8; i++) {
3027 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
3028 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
3029 gen_op_addl_A0_im(2 << s->dflag);
3030 }
3031 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3032}
3033
3034/* NOTE: wrap around in 16 bit not fully handled */
3035static void gen_popa(DisasContext *s)
3036{
3037 int i;
3038 gen_op_movl_A0_reg(R_ESP);
3039 if (!s->ss32)
3040 gen_op_andl_A0_ffff();
3041 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3042 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
3043 if (s->addseg)
3044 gen_op_addl_A0_seg(R_SS);
3045 for(i = 0;i < 8; i++) {
3046 /* ESP is not reloaded */
3047 if (i != 3) {
3048 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
3049 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
3050 }
3051 gen_op_addl_A0_im(2 << s->dflag);
3052 }
3053 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3054}
3055
3056static void gen_enter(DisasContext *s, int esp_addend, int level)
3057{
3058 int ot, opsize;
3059
3060 level &= 0x1f;
3061#ifdef TARGET_X86_64
3062 if (CODE64(s)) {
3063 ot = s->dflag ? OT_QUAD : OT_WORD;
3064 opsize = 1 << ot;
3065
3066 gen_op_movl_A0_reg(R_ESP);
3067 gen_op_addq_A0_im(-opsize);
3068 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3069
3070 /* push bp */
3071 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3072 gen_op_st_T0_A0(ot + s->mem_index);
3073 if (level) {
3074 /* XXX: must save state */
3075 tcg_gen_helper_0_3(helper_enter64_level,
3076 tcg_const_i32(level),
3077 tcg_const_i32((ot == OT_QUAD)),
3078 cpu_T[1]);
3079 }
3080 gen_op_mov_reg_T1(ot, R_EBP);
3081 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3082 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
3083 } else
3084#endif
3085 {
3086 ot = s->dflag + OT_WORD;
3087 opsize = 2 << s->dflag;
3088
3089 gen_op_movl_A0_reg(R_ESP);
3090 gen_op_addl_A0_im(-opsize);
3091 if (!s->ss32)
3092 gen_op_andl_A0_ffff();
3093 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3094 if (s->addseg)
3095 gen_op_addl_A0_seg(R_SS);
3096 /* push bp */
3097 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3098 gen_op_st_T0_A0(ot + s->mem_index);
3099 if (level) {
3100 /* XXX: must save state */
3101 tcg_gen_helper_0_3(helper_enter_level,
3102 tcg_const_i32(level),
3103 tcg_const_i32(s->dflag),
3104 cpu_T[1]);
3105 }
3106 gen_op_mov_reg_T1(ot, R_EBP);
3107 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3108 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3109 }
3110}
3111
3112static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
3113{
3114 if (s->cc_op != CC_OP_DYNAMIC)
3115 gen_op_set_cc_op(s->cc_op);
3116 gen_jmp_im(cur_eip);
3117 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
3118 s->is_jmp = 3;
3119}
3120
3121/* an interrupt is different from an exception because of the
3122 privilege checks */
3123static void gen_interrupt(DisasContext *s, int intno,
3124 target_ulong cur_eip, target_ulong next_eip)
3125{
3126 if (s->cc_op != CC_OP_DYNAMIC)
3127 gen_op_set_cc_op(s->cc_op);
3128 gen_jmp_im(cur_eip);
3129 tcg_gen_helper_0_2(helper_raise_interrupt,
3130 tcg_const_i32(intno),
3131 tcg_const_i32(next_eip - cur_eip));
3132 s->is_jmp = 3;
3133}
3134
3135static void gen_debug(DisasContext *s, target_ulong cur_eip)
3136{
3137 if (s->cc_op != CC_OP_DYNAMIC)
3138 gen_op_set_cc_op(s->cc_op);
3139 gen_jmp_im(cur_eip);
3140 tcg_gen_helper_0_0(helper_debug);
3141 s->is_jmp = 3;
3142}
3143
3144/* generate a generic end of block. Trace exception is also generated
3145 if needed */
3146static void gen_eob(DisasContext *s)
3147{
3148 if (s->cc_op != CC_OP_DYNAMIC)
3149 gen_op_set_cc_op(s->cc_op);
3150 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
3151 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
3152 }
3153
3154#ifdef VBOX
3155 gen_check_external_event(s);
3156#endif /* VBOX */
3157
3158 if (s->singlestep_enabled) {
3159 tcg_gen_helper_0_0(helper_debug);
3160 } else if (s->tf) {
3161 tcg_gen_helper_0_0(helper_single_step);
3162 } else {
3163 tcg_gen_exit_tb(0);
3164 }
3165 s->is_jmp = 3;
3166}
3167
3168/* generate a jump to eip. No segment change must happen before as a
3169 direct call to the next block may occur */
3170static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
3171{
3172 if (s->jmp_opt) {
3173 if (s->cc_op != CC_OP_DYNAMIC) {
3174 gen_op_set_cc_op(s->cc_op);
3175 s->cc_op = CC_OP_DYNAMIC;
3176 }
3177 gen_goto_tb(s, tb_num, eip);
3178 s->is_jmp = 3;
3179 } else {
3180 gen_jmp_im(eip);
3181 gen_eob(s);
3182 }
3183}
3184
3185static void gen_jmp(DisasContext *s, target_ulong eip)
3186{
3187 gen_jmp_tb(s, eip, 0);
3188}
3189
3190#ifndef VBOX
3191static inline void gen_ldq_env_A0(int idx, int offset)
3192#else /* VBOX */
3193DECLINLINE(void) gen_ldq_env_A0(int idx, int offset)
3194#endif /* VBOX */
3195{
3196 int mem_index = (idx >> 2) - 1;
3197 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3198 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
3199}
3200
3201#ifndef VBOX
3202static inline void gen_stq_env_A0(int idx, int offset)
3203#else /* VBOX */
3204DECLINLINE(void) gen_stq_env_A0(int idx, int offset)
3205#endif /* VBOX */
3206{
3207 int mem_index = (idx >> 2) - 1;
3208 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
3209 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3210}
3211
3212#ifndef VBOX
3213static inline void gen_ldo_env_A0(int idx, int offset)
3214#else /* VBOX */
3215DECLINLINE(void) gen_ldo_env_A0(int idx, int offset)
3216#endif /* VBOX */
3217{
3218 int mem_index = (idx >> 2) - 1;
3219 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3220 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3221 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3222 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3223 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3224}
3225
3226#ifndef VBOX
3227static inline void gen_sto_env_A0(int idx, int offset)
3228#else /* VBOX */
3229DECLINLINE(void) gen_sto_env_A0(int idx, int offset)
3230#endif /* VBOX */
3231{
3232 int mem_index = (idx >> 2) - 1;
3233 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3234 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3235 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3236 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3237 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3238}
3239
3240#ifndef VBOX
3241static inline void gen_op_movo(int d_offset, int s_offset)
3242#else /* VBOX */
3243DECLINLINE(void) gen_op_movo(int d_offset, int s_offset)
3244#endif /* VBOX */
3245{
3246 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3247 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3248 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
3249 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
3250}
3251
3252#ifndef VBOX
3253static inline void gen_op_movq(int d_offset, int s_offset)
3254#else /* VBOX */
3255DECLINLINE(void) gen_op_movq(int d_offset, int s_offset)
3256#endif /* VBOX */
3257{
3258 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3259 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3260}
3261
3262#ifndef VBOX
3263static inline void gen_op_movl(int d_offset, int s_offset)
3264#else /* VBOX */
3265DECLINLINE(void) gen_op_movl(int d_offset, int s_offset)
3266#endif /* VBOX */
3267{
3268 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
3269 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
3270}
3271
3272#ifndef VBOX
3273static inline void gen_op_movq_env_0(int d_offset)
3274#else /* VBOX */
3275DECLINLINE(void) gen_op_movq_env_0(int d_offset)
3276#endif /* VBOX */
3277{
3278 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
3279 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3280}
3281
3282#define SSE_SPECIAL ((void *)1)
3283#define SSE_DUMMY ((void *)2)
3284
3285#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
3286#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
3287 helper_ ## x ## ss, helper_ ## x ## sd, }
3288
3289static void *sse_op_table1[256][4] = {
3290 /* 3DNow! extensions */
3291 [0x0e] = { SSE_DUMMY }, /* femms */
3292 [0x0f] = { SSE_DUMMY }, /* pf... */
3293 /* pure SSE operations */
3294 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3295 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3296 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
3297 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
3298 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
3299 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
3300 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
3301 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
3302
3303 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3304 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3305 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
3306 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
3307 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
3308 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
3309 [0x2e] = { helper_ucomiss, helper_ucomisd },
3310 [0x2f] = { helper_comiss, helper_comisd },
3311 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
3312 [0x51] = SSE_FOP(sqrt),
3313 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
3314 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
3315 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
3316 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
3317 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
3318 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
3319 [0x58] = SSE_FOP(add),
3320 [0x59] = SSE_FOP(mul),
3321 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
3322 helper_cvtss2sd, helper_cvtsd2ss },
3323 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
3324 [0x5c] = SSE_FOP(sub),
3325 [0x5d] = SSE_FOP(min),
3326 [0x5e] = SSE_FOP(div),
3327 [0x5f] = SSE_FOP(max),
3328
3329 [0xc2] = SSE_FOP(cmpeq),
3330 [0xc6] = { helper_shufps, helper_shufpd },
3331
3332 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3333 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3334
3335 /* MMX ops and their SSE extensions */
3336 [0x60] = MMX_OP2(punpcklbw),
3337 [0x61] = MMX_OP2(punpcklwd),
3338 [0x62] = MMX_OP2(punpckldq),
3339 [0x63] = MMX_OP2(packsswb),
3340 [0x64] = MMX_OP2(pcmpgtb),
3341 [0x65] = MMX_OP2(pcmpgtw),
3342 [0x66] = MMX_OP2(pcmpgtl),
3343 [0x67] = MMX_OP2(packuswb),
3344 [0x68] = MMX_OP2(punpckhbw),
3345 [0x69] = MMX_OP2(punpckhwd),
3346 [0x6a] = MMX_OP2(punpckhdq),
3347 [0x6b] = MMX_OP2(packssdw),
3348 [0x6c] = { NULL, helper_punpcklqdq_xmm },
3349 [0x6d] = { NULL, helper_punpckhqdq_xmm },
3350 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
3351 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
3352 [0x70] = { helper_pshufw_mmx,
3353 helper_pshufd_xmm,
3354 helper_pshufhw_xmm,
3355 helper_pshuflw_xmm },
3356 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
3357 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
3358 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
3359 [0x74] = MMX_OP2(pcmpeqb),
3360 [0x75] = MMX_OP2(pcmpeqw),
3361 [0x76] = MMX_OP2(pcmpeql),
3362 [0x77] = { SSE_DUMMY }, /* emms */
3363 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
3364 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
3365 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3366 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3367 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3368 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
3369 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
3370 [0xd1] = MMX_OP2(psrlw),
3371 [0xd2] = MMX_OP2(psrld),
3372 [0xd3] = MMX_OP2(psrlq),
3373 [0xd4] = MMX_OP2(paddq),
3374 [0xd5] = MMX_OP2(pmullw),
3375 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3376 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3377 [0xd8] = MMX_OP2(psubusb),
3378 [0xd9] = MMX_OP2(psubusw),
3379 [0xda] = MMX_OP2(pminub),
3380 [0xdb] = MMX_OP2(pand),
3381 [0xdc] = MMX_OP2(paddusb),
3382 [0xdd] = MMX_OP2(paddusw),
3383 [0xde] = MMX_OP2(pmaxub),
3384 [0xdf] = MMX_OP2(pandn),
3385 [0xe0] = MMX_OP2(pavgb),
3386 [0xe1] = MMX_OP2(psraw),
3387 [0xe2] = MMX_OP2(psrad),
3388 [0xe3] = MMX_OP2(pavgw),
3389 [0xe4] = MMX_OP2(pmulhuw),
3390 [0xe5] = MMX_OP2(pmulhw),
3391 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
3392 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3393 [0xe8] = MMX_OP2(psubsb),
3394 [0xe9] = MMX_OP2(psubsw),
3395 [0xea] = MMX_OP2(pminsw),
3396 [0xeb] = MMX_OP2(por),
3397 [0xec] = MMX_OP2(paddsb),
3398 [0xed] = MMX_OP2(paddsw),
3399 [0xee] = MMX_OP2(pmaxsw),
3400 [0xef] = MMX_OP2(pxor),
3401 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
3402 [0xf1] = MMX_OP2(psllw),
3403 [0xf2] = MMX_OP2(pslld),
3404 [0xf3] = MMX_OP2(psllq),
3405 [0xf4] = MMX_OP2(pmuludq),
3406 [0xf5] = MMX_OP2(pmaddwd),
3407 [0xf6] = MMX_OP2(psadbw),
3408 [0xf7] = MMX_OP2(maskmov),
3409 [0xf8] = MMX_OP2(psubb),
3410 [0xf9] = MMX_OP2(psubw),
3411 [0xfa] = MMX_OP2(psubl),
3412 [0xfb] = MMX_OP2(psubq),
3413 [0xfc] = MMX_OP2(paddb),
3414 [0xfd] = MMX_OP2(paddw),
3415 [0xfe] = MMX_OP2(paddl),
3416};
3417
3418static void *sse_op_table2[3 * 8][2] = {
3419 [0 + 2] = MMX_OP2(psrlw),
3420 [0 + 4] = MMX_OP2(psraw),
3421 [0 + 6] = MMX_OP2(psllw),
3422 [8 + 2] = MMX_OP2(psrld),
3423 [8 + 4] = MMX_OP2(psrad),
3424 [8 + 6] = MMX_OP2(pslld),
3425 [16 + 2] = MMX_OP2(psrlq),
3426 [16 + 3] = { NULL, helper_psrldq_xmm },
3427 [16 + 6] = MMX_OP2(psllq),
3428 [16 + 7] = { NULL, helper_pslldq_xmm },
3429};
3430
3431static void *sse_op_table3[4 * 3] = {
3432 helper_cvtsi2ss,
3433 helper_cvtsi2sd,
3434 X86_64_ONLY(helper_cvtsq2ss),
3435 X86_64_ONLY(helper_cvtsq2sd),
3436
3437 helper_cvttss2si,
3438 helper_cvttsd2si,
3439 X86_64_ONLY(helper_cvttss2sq),
3440 X86_64_ONLY(helper_cvttsd2sq),
3441
3442 helper_cvtss2si,
3443 helper_cvtsd2si,
3444 X86_64_ONLY(helper_cvtss2sq),
3445 X86_64_ONLY(helper_cvtsd2sq),
3446};
3447
3448static void *sse_op_table4[8][4] = {
3449 SSE_FOP(cmpeq),
3450 SSE_FOP(cmplt),
3451 SSE_FOP(cmple),
3452 SSE_FOP(cmpunord),
3453 SSE_FOP(cmpneq),
3454 SSE_FOP(cmpnlt),
3455 SSE_FOP(cmpnle),
3456 SSE_FOP(cmpord),
3457};
3458
3459static void *sse_op_table5[256] = {
3460 [0x0c] = helper_pi2fw,
3461 [0x0d] = helper_pi2fd,
3462 [0x1c] = helper_pf2iw,
3463 [0x1d] = helper_pf2id,
3464 [0x8a] = helper_pfnacc,
3465 [0x8e] = helper_pfpnacc,
3466 [0x90] = helper_pfcmpge,
3467 [0x94] = helper_pfmin,
3468 [0x96] = helper_pfrcp,
3469 [0x97] = helper_pfrsqrt,
3470 [0x9a] = helper_pfsub,
3471 [0x9e] = helper_pfadd,
3472 [0xa0] = helper_pfcmpgt,
3473 [0xa4] = helper_pfmax,
3474 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
3475 [0xa7] = helper_movq, /* pfrsqit1 */
3476 [0xaa] = helper_pfsubr,
3477 [0xae] = helper_pfacc,
3478 [0xb0] = helper_pfcmpeq,
3479 [0xb4] = helper_pfmul,
3480 [0xb6] = helper_movq, /* pfrcpit2 */
3481 [0xb7] = helper_pmulhrw_mmx,
3482 [0xbb] = helper_pswapd,
3483 [0xbf] = helper_pavgb_mmx /* pavgusb */
3484};
3485
3486struct sse_op_helper_s {
3487 void *op[2]; uint32_t ext_mask;
3488};
3489#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3490#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3491#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3492#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3493static struct sse_op_helper_s sse_op_table6[256] = {
3494 [0x00] = SSSE3_OP(pshufb),
3495 [0x01] = SSSE3_OP(phaddw),
3496 [0x02] = SSSE3_OP(phaddd),
3497 [0x03] = SSSE3_OP(phaddsw),
3498 [0x04] = SSSE3_OP(pmaddubsw),
3499 [0x05] = SSSE3_OP(phsubw),
3500 [0x06] = SSSE3_OP(phsubd),
3501 [0x07] = SSSE3_OP(phsubsw),
3502 [0x08] = SSSE3_OP(psignb),
3503 [0x09] = SSSE3_OP(psignw),
3504 [0x0a] = SSSE3_OP(psignd),
3505 [0x0b] = SSSE3_OP(pmulhrsw),
3506 [0x10] = SSE41_OP(pblendvb),
3507 [0x14] = SSE41_OP(blendvps),
3508 [0x15] = SSE41_OP(blendvpd),
3509 [0x17] = SSE41_OP(ptest),
3510 [0x1c] = SSSE3_OP(pabsb),
3511 [0x1d] = SSSE3_OP(pabsw),
3512 [0x1e] = SSSE3_OP(pabsd),
3513 [0x20] = SSE41_OP(pmovsxbw),
3514 [0x21] = SSE41_OP(pmovsxbd),
3515 [0x22] = SSE41_OP(pmovsxbq),
3516 [0x23] = SSE41_OP(pmovsxwd),
3517 [0x24] = SSE41_OP(pmovsxwq),
3518 [0x25] = SSE41_OP(pmovsxdq),
3519 [0x28] = SSE41_OP(pmuldq),
3520 [0x29] = SSE41_OP(pcmpeqq),
3521 [0x2a] = SSE41_SPECIAL, /* movntqda */
3522 [0x2b] = SSE41_OP(packusdw),
3523 [0x30] = SSE41_OP(pmovzxbw),
3524 [0x31] = SSE41_OP(pmovzxbd),
3525 [0x32] = SSE41_OP(pmovzxbq),
3526 [0x33] = SSE41_OP(pmovzxwd),
3527 [0x34] = SSE41_OP(pmovzxwq),
3528 [0x35] = SSE41_OP(pmovzxdq),
3529 [0x37] = SSE42_OP(pcmpgtq),
3530 [0x38] = SSE41_OP(pminsb),
3531 [0x39] = SSE41_OP(pminsd),
3532 [0x3a] = SSE41_OP(pminuw),
3533 [0x3b] = SSE41_OP(pminud),
3534 [0x3c] = SSE41_OP(pmaxsb),
3535 [0x3d] = SSE41_OP(pmaxsd),
3536 [0x3e] = SSE41_OP(pmaxuw),
3537 [0x3f] = SSE41_OP(pmaxud),
3538 [0x40] = SSE41_OP(pmulld),
3539 [0x41] = SSE41_OP(phminposuw),
3540};
3541
3542static struct sse_op_helper_s sse_op_table7[256] = {
3543 [0x08] = SSE41_OP(roundps),
3544 [0x09] = SSE41_OP(roundpd),
3545 [0x0a] = SSE41_OP(roundss),
3546 [0x0b] = SSE41_OP(roundsd),
3547 [0x0c] = SSE41_OP(blendps),
3548 [0x0d] = SSE41_OP(blendpd),
3549 [0x0e] = SSE41_OP(pblendw),
3550 [0x0f] = SSSE3_OP(palignr),
3551 [0x14] = SSE41_SPECIAL, /* pextrb */
3552 [0x15] = SSE41_SPECIAL, /* pextrw */
3553 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3554 [0x17] = SSE41_SPECIAL, /* extractps */
3555 [0x20] = SSE41_SPECIAL, /* pinsrb */
3556 [0x21] = SSE41_SPECIAL, /* insertps */
3557 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3558 [0x40] = SSE41_OP(dpps),
3559 [0x41] = SSE41_OP(dppd),
3560 [0x42] = SSE41_OP(mpsadbw),
3561 [0x60] = SSE42_OP(pcmpestrm),
3562 [0x61] = SSE42_OP(pcmpestri),
3563 [0x62] = SSE42_OP(pcmpistrm),
3564 [0x63] = SSE42_OP(pcmpistri),
3565};
3566
3567static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3568{
3569 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3570 int modrm, mod, rm, reg, reg_addr, offset_addr;
3571 void *sse_op2;
3572
3573 b &= 0xff;
3574 if (s->prefix & PREFIX_DATA)
3575 b1 = 1;
3576 else if (s->prefix & PREFIX_REPZ)
3577 b1 = 2;
3578 else if (s->prefix & PREFIX_REPNZ)
3579 b1 = 3;
3580 else
3581 b1 = 0;
3582 sse_op2 = sse_op_table1[b][b1];
3583 if (!sse_op2)
3584 goto illegal_op;
3585 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3586 is_xmm = 1;
3587 } else {
3588 if (b1 == 0) {
3589 /* MMX case */
3590 is_xmm = 0;
3591 } else {
3592 is_xmm = 1;
3593 }
3594 }
3595 /* simple MMX/SSE operation */
3596 if (s->flags & HF_TS_MASK) {
3597 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3598 return;
3599 }
3600 if (s->flags & HF_EM_MASK) {
3601 illegal_op:
3602 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3603 return;
3604 }
3605 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3606 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3607 goto illegal_op;
3608 if (b == 0x0e) {
3609 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3610 goto illegal_op;
3611 /* femms */
3612 tcg_gen_helper_0_0(helper_emms);
3613 return;
3614 }
3615 if (b == 0x77) {
3616 /* emms */
3617 tcg_gen_helper_0_0(helper_emms);
3618 return;
3619 }
3620 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3621 the static cpu state) */
3622 if (!is_xmm) {
3623 tcg_gen_helper_0_0(helper_enter_mmx);
3624 }
3625
3626 modrm = ldub_code(s->pc++);
3627 reg = ((modrm >> 3) & 7);
3628 if (is_xmm)
3629 reg |= rex_r;
3630 mod = (modrm >> 6) & 3;
3631 if (sse_op2 == SSE_SPECIAL) {
3632 b |= (b1 << 8);
3633 switch(b) {
3634 case 0x0e7: /* movntq */
3635 if (mod == 3)
3636 goto illegal_op;
3637 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3638 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3639 break;
3640 case 0x1e7: /* movntdq */
3641 case 0x02b: /* movntps */
3642 case 0x12b: /* movntps */
3643 case 0x3f0: /* lddqu */
3644 if (mod == 3)
3645 goto illegal_op;
3646 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3647 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3648 break;
3649 case 0x6e: /* movd mm, ea */
3650#ifdef TARGET_X86_64
3651 if (s->dflag == 2) {
3652 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3653 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3654 } else
3655#endif
3656 {
3657 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3658 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3659 offsetof(CPUX86State,fpregs[reg].mmx));
3660 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3661 }
3662 break;
3663 case 0x16e: /* movd xmm, ea */
3664#ifdef TARGET_X86_64
3665 if (s->dflag == 2) {
3666 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3667 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3668 offsetof(CPUX86State,xmm_regs[reg]));
3669 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3670 } else
3671#endif
3672 {
3673 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3674 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3675 offsetof(CPUX86State,xmm_regs[reg]));
3676 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3677 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3678 }
3679 break;
3680 case 0x6f: /* movq mm, ea */
3681 if (mod != 3) {
3682 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3683 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3684 } else {
3685 rm = (modrm & 7);
3686 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3687 offsetof(CPUX86State,fpregs[rm].mmx));
3688 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3689 offsetof(CPUX86State,fpregs[reg].mmx));
3690 }
3691 break;
3692 case 0x010: /* movups */
3693 case 0x110: /* movupd */
3694 case 0x028: /* movaps */
3695 case 0x128: /* movapd */
3696 case 0x16f: /* movdqa xmm, ea */
3697 case 0x26f: /* movdqu xmm, ea */
3698 if (mod != 3) {
3699 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3700 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3701 } else {
3702 rm = (modrm & 7) | REX_B(s);
3703 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3704 offsetof(CPUX86State,xmm_regs[rm]));
3705 }
3706 break;
3707 case 0x210: /* movss xmm, ea */
3708 if (mod != 3) {
3709 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3710 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3711 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3712 gen_op_movl_T0_0();
3713 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3714 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3715 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3716 } else {
3717 rm = (modrm & 7) | REX_B(s);
3718 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3719 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3720 }
3721 break;
3722 case 0x310: /* movsd xmm, ea */
3723 if (mod != 3) {
3724 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3725 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3726 gen_op_movl_T0_0();
3727 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3728 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3729 } else {
3730 rm = (modrm & 7) | REX_B(s);
3731 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3732 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3733 }
3734 break;
3735 case 0x012: /* movlps */
3736 case 0x112: /* movlpd */
3737 if (mod != 3) {
3738 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3739 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3740 } else {
3741 /* movhlps */
3742 rm = (modrm & 7) | REX_B(s);
3743 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3744 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3745 }
3746 break;
3747 case 0x212: /* movsldup */
3748 if (mod != 3) {
3749 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3750 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3751 } else {
3752 rm = (modrm & 7) | REX_B(s);
3753 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3754 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3755 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3756 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3757 }
3758 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3759 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3760 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3761 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3762 break;
3763 case 0x312: /* movddup */
3764 if (mod != 3) {
3765 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3766 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3767 } else {
3768 rm = (modrm & 7) | REX_B(s);
3769 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3770 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3771 }
3772 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3773 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3774 break;
3775 case 0x016: /* movhps */
3776 case 0x116: /* movhpd */
3777 if (mod != 3) {
3778 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3779 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3780 } else {
3781 /* movlhps */
3782 rm = (modrm & 7) | REX_B(s);
3783 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3784 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3785 }
3786 break;
3787 case 0x216: /* movshdup */
3788 if (mod != 3) {
3789 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3790 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3791 } else {
3792 rm = (modrm & 7) | REX_B(s);
3793 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3794 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3795 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3796 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3797 }
3798 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3799 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3800 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3801 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3802 break;
3803 case 0x7e: /* movd ea, mm */
3804#ifdef TARGET_X86_64
3805 if (s->dflag == 2) {
3806 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3807 offsetof(CPUX86State,fpregs[reg].mmx));
3808 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3809 } else
3810#endif
3811 {
3812 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3813 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3814 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3815 }
3816 break;
3817 case 0x17e: /* movd ea, xmm */
3818#ifdef TARGET_X86_64
3819 if (s->dflag == 2) {
3820 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3821 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3822 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3823 } else
3824#endif
3825 {
3826 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3827 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3828 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3829 }
3830 break;
3831 case 0x27e: /* movq xmm, ea */
3832 if (mod != 3) {
3833 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3834 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3835 } else {
3836 rm = (modrm & 7) | REX_B(s);
3837 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3838 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3839 }
3840 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3841 break;
3842 case 0x7f: /* movq ea, mm */
3843 if (mod != 3) {
3844 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3845 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3846 } else {
3847 rm = (modrm & 7);
3848 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3849 offsetof(CPUX86State,fpregs[reg].mmx));
3850 }
3851 break;
3852 case 0x011: /* movups */
3853 case 0x111: /* movupd */
3854 case 0x029: /* movaps */
3855 case 0x129: /* movapd */
3856 case 0x17f: /* movdqa ea, xmm */
3857 case 0x27f: /* movdqu ea, xmm */
3858 if (mod != 3) {
3859 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3860 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3861 } else {
3862 rm = (modrm & 7) | REX_B(s);
3863 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3864 offsetof(CPUX86State,xmm_regs[reg]));
3865 }
3866 break;
3867 case 0x211: /* movss ea, xmm */
3868 if (mod != 3) {
3869 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3870 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3871 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3872 } else {
3873 rm = (modrm & 7) | REX_B(s);
3874 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3875 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3876 }
3877 break;
3878 case 0x311: /* movsd ea, xmm */
3879 if (mod != 3) {
3880 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3881 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3882 } else {
3883 rm = (modrm & 7) | REX_B(s);
3884 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3885 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3886 }
3887 break;
3888 case 0x013: /* movlps */
3889 case 0x113: /* movlpd */
3890 if (mod != 3) {
3891 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3892 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3893 } else {
3894 goto illegal_op;
3895 }
3896 break;
3897 case 0x017: /* movhps */
3898 case 0x117: /* movhpd */
3899 if (mod != 3) {
3900 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3901 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3902 } else {
3903 goto illegal_op;
3904 }
3905 break;
3906 case 0x71: /* shift mm, im */
3907 case 0x72:
3908 case 0x73:
3909 case 0x171: /* shift xmm, im */
3910 case 0x172:
3911 case 0x173:
3912 val = ldub_code(s->pc++);
3913 if (is_xmm) {
3914 gen_op_movl_T0_im(val);
3915 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3916 gen_op_movl_T0_0();
3917 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3918 op1_offset = offsetof(CPUX86State,xmm_t0);
3919 } else {
3920 gen_op_movl_T0_im(val);
3921 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3922 gen_op_movl_T0_0();
3923 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3924 op1_offset = offsetof(CPUX86State,mmx_t0);
3925 }
3926 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3927 if (!sse_op2)
3928 goto illegal_op;
3929 if (is_xmm) {
3930 rm = (modrm & 7) | REX_B(s);
3931 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3932 } else {
3933 rm = (modrm & 7);
3934 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3935 }
3936 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3937 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3938 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3939 break;
3940 case 0x050: /* movmskps */
3941 rm = (modrm & 7) | REX_B(s);
3942 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3943 offsetof(CPUX86State,xmm_regs[rm]));
3944 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3945 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3946 gen_op_mov_reg_T0(OT_LONG, reg);
3947 break;
3948 case 0x150: /* movmskpd */
3949 rm = (modrm & 7) | REX_B(s);
3950 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3951 offsetof(CPUX86State,xmm_regs[rm]));
3952 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3953 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3954 gen_op_mov_reg_T0(OT_LONG, reg);
3955 break;
3956 case 0x02a: /* cvtpi2ps */
3957 case 0x12a: /* cvtpi2pd */
3958 tcg_gen_helper_0_0(helper_enter_mmx);
3959 if (mod != 3) {
3960 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3961 op2_offset = offsetof(CPUX86State,mmx_t0);
3962 gen_ldq_env_A0(s->mem_index, op2_offset);
3963 } else {
3964 rm = (modrm & 7);
3965 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3966 }
3967 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3968 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3969 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3970 switch(b >> 8) {
3971 case 0x0:
3972 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3973 break;
3974 default:
3975 case 0x1:
3976 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3977 break;
3978 }
3979 break;
3980 case 0x22a: /* cvtsi2ss */
3981 case 0x32a: /* cvtsi2sd */
3982 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3983 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3984 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3985 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3986 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3987 if (ot == OT_LONG) {
3988 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3989 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3990 } else {
3991 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
3992 }
3993 break;
3994 case 0x02c: /* cvttps2pi */
3995 case 0x12c: /* cvttpd2pi */
3996 case 0x02d: /* cvtps2pi */
3997 case 0x12d: /* cvtpd2pi */
3998 tcg_gen_helper_0_0(helper_enter_mmx);
3999 if (mod != 3) {
4000 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4001 op2_offset = offsetof(CPUX86State,xmm_t0);
4002 gen_ldo_env_A0(s->mem_index, op2_offset);
4003 } else {
4004 rm = (modrm & 7) | REX_B(s);
4005 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4006 }
4007 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
4008 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4009 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4010 switch(b) {
4011 case 0x02c:
4012 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
4013 break;
4014 case 0x12c:
4015 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
4016 break;
4017 case 0x02d:
4018 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
4019 break;
4020 case 0x12d:
4021 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
4022 break;
4023 }
4024 break;
4025 case 0x22c: /* cvttss2si */
4026 case 0x32c: /* cvttsd2si */
4027 case 0x22d: /* cvtss2si */
4028 case 0x32d: /* cvtsd2si */
4029 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4030 if (mod != 3) {
4031 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4032 if ((b >> 8) & 1) {
4033 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
4034 } else {
4035 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4036 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4037 }
4038 op2_offset = offsetof(CPUX86State,xmm_t0);
4039 } else {
4040 rm = (modrm & 7) | REX_B(s);
4041 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4042 }
4043 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
4044 (b & 1) * 4];
4045 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
4046 if (ot == OT_LONG) {
4047 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
4048 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4049 } else {
4050 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
4051 }
4052 gen_op_mov_reg_T0(ot, reg);
4053 break;
4054 case 0xc4: /* pinsrw */
4055 case 0x1c4:
4056 s->rip_offset = 1;
4057 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4058 val = ldub_code(s->pc++);
4059 if (b1) {
4060 val &= 7;
4061 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4062 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
4063 } else {
4064 val &= 3;
4065 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4066 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
4067 }
4068 break;
4069 case 0xc5: /* pextrw */
4070 case 0x1c5:
4071 if (mod != 3)
4072 goto illegal_op;
4073 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4074 val = ldub_code(s->pc++);
4075 if (b1) {
4076 val &= 7;
4077 rm = (modrm & 7) | REX_B(s);
4078 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4079 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
4080 } else {
4081 val &= 3;
4082 rm = (modrm & 7);
4083 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4084 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
4085 }
4086 reg = ((modrm >> 3) & 7) | rex_r;
4087 gen_op_mov_reg_T0(ot, reg);
4088 break;
4089 case 0x1d6: /* movq ea, xmm */
4090 if (mod != 3) {
4091 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4092 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4093 } else {
4094 rm = (modrm & 7) | REX_B(s);
4095 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
4096 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4097 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
4098 }
4099 break;
4100 case 0x2d6: /* movq2dq */
4101 tcg_gen_helper_0_0(helper_enter_mmx);
4102 rm = (modrm & 7);
4103 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
4104 offsetof(CPUX86State,fpregs[rm].mmx));
4105 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
4106 break;
4107 case 0x3d6: /* movdq2q */
4108 tcg_gen_helper_0_0(helper_enter_mmx);
4109 rm = (modrm & 7) | REX_B(s);
4110 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
4111 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
4112 break;
4113 case 0xd7: /* pmovmskb */
4114 case 0x1d7:
4115 if (mod != 3)
4116 goto illegal_op;
4117 if (b1) {
4118 rm = (modrm & 7) | REX_B(s);
4119 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
4120 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
4121 } else {
4122 rm = (modrm & 7);
4123 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
4124 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
4125 }
4126 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4127 reg = ((modrm >> 3) & 7) | rex_r;
4128 gen_op_mov_reg_T0(OT_LONG, reg);
4129 break;
4130 case 0x138:
4131 if (s->prefix & PREFIX_REPNZ)
4132 goto crc32;
4133 case 0x038:
4134 b = modrm;
4135 modrm = ldub_code(s->pc++);
4136 rm = modrm & 7;
4137 reg = ((modrm >> 3) & 7) | rex_r;
4138 mod = (modrm >> 6) & 3;
4139
4140 sse_op2 = sse_op_table6[b].op[b1];
4141 if (!sse_op2)
4142 goto illegal_op;
4143 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
4144 goto illegal_op;
4145
4146 if (b1) {
4147 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4148 if (mod == 3) {
4149 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4150 } else {
4151 op2_offset = offsetof(CPUX86State,xmm_t0);
4152 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4153 switch (b) {
4154 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
4155 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
4156 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
4157 gen_ldq_env_A0(s->mem_index, op2_offset +
4158 offsetof(XMMReg, XMM_Q(0)));
4159 break;
4160 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
4161 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
4162 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
4163 (s->mem_index >> 2) - 1);
4164 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
4165 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
4166 offsetof(XMMReg, XMM_L(0)));
4167 break;
4168 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
4169 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
4170 (s->mem_index >> 2) - 1);
4171 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
4172 offsetof(XMMReg, XMM_W(0)));
4173 break;
4174 case 0x2a: /* movntqda */
4175 gen_ldo_env_A0(s->mem_index, op1_offset);
4176 return;
4177 default:
4178 gen_ldo_env_A0(s->mem_index, op2_offset);
4179 }
4180 }
4181 } else {
4182 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4183 if (mod == 3) {
4184 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4185 } else {
4186 op2_offset = offsetof(CPUX86State,mmx_t0);
4187 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4188 gen_ldq_env_A0(s->mem_index, op2_offset);
4189 }
4190 }
4191 if (sse_op2 == SSE_SPECIAL)
4192 goto illegal_op;
4193
4194 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4195 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4196 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4197
4198 if (b == 0x17)
4199 s->cc_op = CC_OP_EFLAGS;
4200 break;
4201 case 0x338: /* crc32 */
4202 crc32:
4203 b = modrm;
4204 modrm = ldub_code(s->pc++);
4205 reg = ((modrm >> 3) & 7) | rex_r;
4206
4207 if (b != 0xf0 && b != 0xf1)
4208 goto illegal_op;
4209 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
4210 goto illegal_op;
4211
4212 if (b == 0xf0)
4213 ot = OT_BYTE;
4214 else if (b == 0xf1 && s->dflag != 2)
4215 if (s->prefix & PREFIX_DATA)
4216 ot = OT_WORD;
4217 else
4218 ot = OT_LONG;
4219 else
4220 ot = OT_QUAD;
4221
4222 gen_op_mov_TN_reg(OT_LONG, 0, reg);
4223 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4224 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4225 tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
4226 cpu_T[0], tcg_const_i32(8 << ot));
4227
4228 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4229 gen_op_mov_reg_T0(ot, reg);
4230 break;
4231 case 0x03a:
4232 case 0x13a:
4233 b = modrm;
4234 modrm = ldub_code(s->pc++);
4235 rm = modrm & 7;
4236 reg = ((modrm >> 3) & 7) | rex_r;
4237 mod = (modrm >> 6) & 3;
4238
4239 sse_op2 = sse_op_table7[b].op[b1];
4240 if (!sse_op2)
4241 goto illegal_op;
4242 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4243 goto illegal_op;
4244
4245 if (sse_op2 == SSE_SPECIAL) {
4246 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4247 rm = (modrm & 7) | REX_B(s);
4248 if (mod != 3)
4249 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4250 reg = ((modrm >> 3) & 7) | rex_r;
4251 val = ldub_code(s->pc++);
4252 switch (b) {
4253 case 0x14: /* pextrb */
4254 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4255 xmm_regs[reg].XMM_B(val & 15)));
4256 if (mod == 3)
4257 gen_op_mov_reg_T0(ot, rm);
4258 else
4259 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4260 (s->mem_index >> 2) - 1);
4261 break;
4262 case 0x15: /* pextrw */
4263 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4264 xmm_regs[reg].XMM_W(val & 7)));
4265 if (mod == 3)
4266 gen_op_mov_reg_T0(ot, rm);
4267 else
4268 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4269 (s->mem_index >> 2) - 1);
4270 break;
4271 case 0x16:
4272 if (ot == OT_LONG) { /* pextrd */
4273 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4274 offsetof(CPUX86State,
4275 xmm_regs[reg].XMM_L(val & 3)));
4276 if (mod == 3)
4277 gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
4278 else
4279 tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
4280 (s->mem_index >> 2) - 1);
4281 } else { /* pextrq */
4282 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4283 offsetof(CPUX86State,
4284 xmm_regs[reg].XMM_Q(val & 1)));
4285 if (mod == 3)
4286 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4287 else
4288 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4289 (s->mem_index >> 2) - 1);
4290 }
4291 break;
4292 case 0x17: /* extractps */
4293 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4294 xmm_regs[reg].XMM_L(val & 3)));
4295 if (mod == 3)
4296 gen_op_mov_reg_T0(ot, rm);
4297 else
4298 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4299 (s->mem_index >> 2) - 1);
4300 break;
4301 case 0x20: /* pinsrb */
4302 if (mod == 3)
4303 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4304 else
4305 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
4306 (s->mem_index >> 2) - 1);
4307 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4308 xmm_regs[reg].XMM_B(val & 15)));
4309 break;
4310 case 0x21: /* insertps */
4311 if (mod == 3)
4312 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4313 offsetof(CPUX86State,xmm_regs[rm]
4314 .XMM_L((val >> 6) & 3)));
4315 else
4316 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4317 (s->mem_index >> 2) - 1);
4318 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4319 offsetof(CPUX86State,xmm_regs[reg]
4320 .XMM_L((val >> 4) & 3)));
4321 if ((val >> 0) & 1)
4322 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4323 cpu_env, offsetof(CPUX86State,
4324 xmm_regs[reg].XMM_L(0)));
4325 if ((val >> 1) & 1)
4326 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4327 cpu_env, offsetof(CPUX86State,
4328 xmm_regs[reg].XMM_L(1)));
4329 if ((val >> 2) & 1)
4330 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4331 cpu_env, offsetof(CPUX86State,
4332 xmm_regs[reg].XMM_L(2)));
4333 if ((val >> 3) & 1)
4334 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4335 cpu_env, offsetof(CPUX86State,
4336 xmm_regs[reg].XMM_L(3)));
4337 break;
4338 case 0x22:
4339 if (ot == OT_LONG) { /* pinsrd */
4340 if (mod == 3)
4341 gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
4342 else
4343 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4344 (s->mem_index >> 2) - 1);
4345 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4346 offsetof(CPUX86State,
4347 xmm_regs[reg].XMM_L(val & 3)));
4348 } else { /* pinsrq */
4349 if (mod == 3)
4350 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4351 else
4352 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4353 (s->mem_index >> 2) - 1);
4354 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4355 offsetof(CPUX86State,
4356 xmm_regs[reg].XMM_Q(val & 1)));
4357 }
4358 break;
4359 }
4360 return;
4361 }
4362
4363 if (b1) {
4364 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4365 if (mod == 3) {
4366 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4367 } else {
4368 op2_offset = offsetof(CPUX86State,xmm_t0);
4369 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4370 gen_ldo_env_A0(s->mem_index, op2_offset);
4371 }
4372 } else {
4373 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4374 if (mod == 3) {
4375 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4376 } else {
4377 op2_offset = offsetof(CPUX86State,mmx_t0);
4378 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4379 gen_ldq_env_A0(s->mem_index, op2_offset);
4380 }
4381 }
4382 val = ldub_code(s->pc++);
4383
4384 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
4385 s->cc_op = CC_OP_EFLAGS;
4386
4387 if (s->dflag == 2)
4388 /* The helper must use entire 64-bit gp registers */
4389 val |= 1 << 8;
4390 }
4391
4392 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4393 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4394 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4395 break;
4396 default:
4397 goto illegal_op;
4398 }
4399 } else {
4400 /* generic MMX or SSE operation */
4401 switch(b) {
4402 case 0x70: /* pshufx insn */
4403 case 0xc6: /* pshufx insn */
4404 case 0xc2: /* compare insns */
4405 s->rip_offset = 1;
4406 break;
4407 default:
4408 break;
4409 }
4410 if (is_xmm) {
4411 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4412 if (mod != 3) {
4413 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4414 op2_offset = offsetof(CPUX86State,xmm_t0);
4415 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
4416 b == 0xc2)) {
4417 /* specific case for SSE single instructions */
4418 if (b1 == 2) {
4419 /* 32 bit access */
4420 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4421 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4422 } else {
4423 /* 64 bit access */
4424 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
4425 }
4426 } else {
4427 gen_ldo_env_A0(s->mem_index, op2_offset);
4428 }
4429 } else {
4430 rm = (modrm & 7) | REX_B(s);
4431 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4432 }
4433 } else {
4434 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4435 if (mod != 3) {
4436 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4437 op2_offset = offsetof(CPUX86State,mmx_t0);
4438 gen_ldq_env_A0(s->mem_index, op2_offset);
4439 } else {
4440 rm = (modrm & 7);
4441 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4442 }
4443 }
4444 switch(b) {
4445 case 0x0f: /* 3DNow! data insns */
4446 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4447 goto illegal_op;
4448 val = ldub_code(s->pc++);
4449 sse_op2 = sse_op_table5[val];
4450 if (!sse_op2)
4451 goto illegal_op;
4452 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4453 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4454 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4455 break;
4456 case 0x70: /* pshufx insn */
4457 case 0xc6: /* pshufx insn */
4458 val = ldub_code(s->pc++);
4459 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4460 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4461 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4462 break;
4463 case 0xc2:
4464 /* compare insns */
4465 val = ldub_code(s->pc++);
4466 if (val >= 8)
4467 goto illegal_op;
4468 sse_op2 = sse_op_table4[val][b1];
4469 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4470 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4471 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4472 break;
4473 case 0xf7:
4474 /* maskmov : we must prepare A0 */
4475 if (mod != 3)
4476 goto illegal_op;
4477#ifdef TARGET_X86_64
4478 if (s->aflag == 2) {
4479 gen_op_movq_A0_reg(R_EDI);
4480 } else
4481#endif
4482 {
4483 gen_op_movl_A0_reg(R_EDI);
4484 if (s->aflag == 0)
4485 gen_op_andl_A0_ffff();
4486 }
4487 gen_add_A0_ds_seg(s);
4488
4489 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4490 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4491 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
4492 break;
4493 default:
4494 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4495 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4496 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4497 break;
4498 }
4499 if (b == 0x2e || b == 0x2f) {
4500 s->cc_op = CC_OP_EFLAGS;
4501 }
4502 }
4503}
4504
4505#ifdef VBOX
4506/* Checks if it's an invalid lock sequence. Only a few instructions
4507 can be used together with the lock prefix and of those only the
4508 form that write a memory operand. So, this is kind of annoying
4509 work to do...
4510 The AMD manual lists the following instructions.
4511 ADC
4512 ADD
4513 AND
4514 BTC
4515 BTR
4516 BTS
4517 CMPXCHG
4518 CMPXCHG8B
4519 CMPXCHG16B
4520 DEC
4521 INC
4522 NEG
4523 NOT
4524 OR
4525 SBB
4526 SUB
4527 XADD
4528 XCHG
4529 XOR */
4530static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
4531{
4532 target_ulong pc = s->pc;
4533 int modrm, mod, op;
4534
4535 /* X={8,16,32,64} Y={16,32,64} */
4536 switch (b)
4537 {
4538 /* /2: ADC reg/memX, immX */
4539 /* /0: ADD reg/memX, immX */
4540 /* /4: AND reg/memX, immX */
4541 /* /1: OR reg/memX, immX */
4542 /* /3: SBB reg/memX, immX */
4543 /* /5: SUB reg/memX, immX */
4544 /* /6: XOR reg/memX, immX */
4545 case 0x80:
4546 case 0x81:
4547 case 0x83:
4548 modrm = ldub_code(pc++);
4549 op = (modrm >> 3) & 7;
4550 if (op == 7) /* /7: CMP */
4551 break;
4552 mod = (modrm >> 6) & 3;
4553 if (mod == 3) /* register destination */
4554 break;
4555 return false;
4556
4557 case 0x10: /* /r: ADC reg/mem8, reg8 */
4558 case 0x11: /* /r: ADC reg/memX, regY */
4559 case 0x00: /* /r: ADD reg/mem8, reg8 */
4560 case 0x01: /* /r: ADD reg/memX, regY */
4561 case 0x20: /* /r: AND reg/mem8, reg8 */
4562 case 0x21: /* /r: AND reg/memY, regY */
4563 case 0x08: /* /r: OR reg/mem8, reg8 */
4564 case 0x09: /* /r: OR reg/memY, regY */
4565 case 0x18: /* /r: SBB reg/mem8, reg8 */
4566 case 0x19: /* /r: SBB reg/memY, regY */
4567 case 0x28: /* /r: SUB reg/mem8, reg8 */
4568 case 0x29: /* /r: SUB reg/memY, regY */
4569 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
4570 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
4571 case 0x30: /* /r: XOR reg/mem8, reg8 */
4572 case 0x31: /* /r: XOR reg/memY, regY */
4573 modrm = ldub_code(pc++);
4574 mod = (modrm >> 6) & 3;
4575 if (mod == 3) /* register destination */
4576 break;
4577 return false;
4578
4579 /* /1: DEC reg/memX */
4580 /* /0: INC reg/memX */
4581 case 0xfe:
4582 case 0xff:
4583 modrm = ldub_code(pc++);
4584 mod = (modrm >> 6) & 3;
4585 if (mod == 3) /* register destination */
4586 break;
4587 return false;
4588
4589 /* /3: NEG reg/memX */
4590 /* /2: NOT reg/memX */
4591 case 0xf6:
4592 case 0xf7:
4593 modrm = ldub_code(pc++);
4594 mod = (modrm >> 6) & 3;
4595 if (mod == 3) /* register destination */
4596 break;
4597 return false;
4598
4599 case 0x0f:
4600 b = ldub_code(pc++);
4601 switch (b)
4602 {
4603 /* /7: BTC reg/memY, imm8 */
4604 /* /6: BTR reg/memY, imm8 */
4605 /* /5: BTS reg/memY, imm8 */
4606 case 0xba:
4607 modrm = ldub_code(pc++);
4608 op = (modrm >> 3) & 7;
4609 if (op < 5)
4610 break;
4611 mod = (modrm >> 6) & 3;
4612 if (mod == 3) /* register destination */
4613 break;
4614 return false;
4615
4616 case 0xbb: /* /r: BTC reg/memY, regY */
4617 case 0xb3: /* /r: BTR reg/memY, regY */
4618 case 0xab: /* /r: BTS reg/memY, regY */
4619 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
4620 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
4621 case 0xc0: /* /r: XADD reg/mem8, reg8 */
4622 case 0xc1: /* /r: XADD reg/memY, regY */
4623 modrm = ldub_code(pc++);
4624 mod = (modrm >> 6) & 3;
4625 if (mod == 3) /* register destination */
4626 break;
4627 return false;
4628
4629 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
4630 case 0xc7:
4631 modrm = ldub_code(pc++);
4632 op = (modrm >> 3) & 7;
4633 if (op != 1)
4634 break;
4635 return false;
4636 }
4637 break;
4638 }
4639
4640 /* illegal sequence. The s->pc is past the lock prefix and that
4641 is sufficient for the TB, I think. */
4642 Log(("illegal lock sequence %RGv (b=%#x)\n", pc_start, b));
4643 return true;
4644}
4645#endif /* VBOX */
4646
4647
4648/* convert one instruction. s->is_jmp is set if the translation must
4649 be stopped. Return the next pc value */
4650static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4651{
4652 int b, prefixes, aflag, dflag;
4653 int shift, ot;
4654 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4655 target_ulong next_eip, tval;
4656 int rex_w, rex_r;
4657
4658 if (unlikely(loglevel & CPU_LOG_TB_OP))
4659 tcg_gen_debug_insn_start(pc_start);
4660
4661 s->pc = pc_start;
4662 prefixes = 0;
4663 aflag = s->code32;
4664 dflag = s->code32;
4665 s->override = -1;
4666 rex_w = -1;
4667 rex_r = 0;
4668#ifdef TARGET_X86_64
4669 s->rex_x = 0;
4670 s->rex_b = 0;
4671 x86_64_hregs = 0;
4672#endif
4673 s->rip_offset = 0; /* for relative ip address */
4674#ifdef VBOX
4675 /* nike: seems only slow down things */
4676# if 0
4677 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
4678
4679 gen_update_eip(pc_start - s->cs_base);
4680# endif
4681#endif
4682
4683 next_byte:
4684 b = ldub_code(s->pc);
4685 s->pc++;
4686 /* check prefixes */
4687#ifdef TARGET_X86_64
4688 if (CODE64(s)) {
4689 switch (b) {
4690 case 0xf3:
4691 prefixes |= PREFIX_REPZ;
4692 goto next_byte;
4693 case 0xf2:
4694 prefixes |= PREFIX_REPNZ;
4695 goto next_byte;
4696 case 0xf0:
4697 prefixes |= PREFIX_LOCK;
4698 goto next_byte;
4699 case 0x2e:
4700 s->override = R_CS;
4701 goto next_byte;
4702 case 0x36:
4703 s->override = R_SS;
4704 goto next_byte;
4705 case 0x3e:
4706 s->override = R_DS;
4707 goto next_byte;
4708 case 0x26:
4709 s->override = R_ES;
4710 goto next_byte;
4711 case 0x64:
4712 s->override = R_FS;
4713 goto next_byte;
4714 case 0x65:
4715 s->override = R_GS;
4716 goto next_byte;
4717 case 0x66:
4718 prefixes |= PREFIX_DATA;
4719 goto next_byte;
4720 case 0x67:
4721 prefixes |= PREFIX_ADR;
4722 goto next_byte;
4723 case 0x40 ... 0x4f:
4724 /* REX prefix */
4725 rex_w = (b >> 3) & 1;
4726 rex_r = (b & 0x4) << 1;
4727 s->rex_x = (b & 0x2) << 2;
4728 REX_B(s) = (b & 0x1) << 3;
4729 x86_64_hregs = 1; /* select uniform byte register addressing */
4730 goto next_byte;
4731 }
4732 if (rex_w == 1) {
4733 /* 0x66 is ignored if rex.w is set */
4734 dflag = 2;
4735 } else {
4736 if (prefixes & PREFIX_DATA)
4737 dflag ^= 1;
4738 }
4739 if (!(prefixes & PREFIX_ADR))
4740 aflag = 2;
4741 } else
4742#endif
4743 {
4744 switch (b) {
4745 case 0xf3:
4746 prefixes |= PREFIX_REPZ;
4747 goto next_byte;
4748 case 0xf2:
4749 prefixes |= PREFIX_REPNZ;
4750 goto next_byte;
4751 case 0xf0:
4752 prefixes |= PREFIX_LOCK;
4753 goto next_byte;
4754 case 0x2e:
4755 s->override = R_CS;
4756 goto next_byte;
4757 case 0x36:
4758 s->override = R_SS;
4759 goto next_byte;
4760 case 0x3e:
4761 s->override = R_DS;
4762 goto next_byte;
4763 case 0x26:
4764 s->override = R_ES;
4765 goto next_byte;
4766 case 0x64:
4767 s->override = R_FS;
4768 goto next_byte;
4769 case 0x65:
4770 s->override = R_GS;
4771 goto next_byte;
4772 case 0x66:
4773 prefixes |= PREFIX_DATA;
4774 goto next_byte;
4775 case 0x67:
4776 prefixes |= PREFIX_ADR;
4777 goto next_byte;
4778 }
4779 if (prefixes & PREFIX_DATA)
4780 dflag ^= 1;
4781 if (prefixes & PREFIX_ADR)
4782 aflag ^= 1;
4783 }
4784
4785 s->prefix = prefixes;
4786 s->aflag = aflag;
4787 s->dflag = dflag;
4788
4789 /* lock generation */
4790#ifndef VBOX
4791 if (prefixes & PREFIX_LOCK)
4792 tcg_gen_helper_0_0(helper_lock);
4793#else /* VBOX */
4794 if (prefixes & PREFIX_LOCK) {
4795 if (is_invalid_lock_sequence(s, pc_start, b)) {
4796 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
4797 return s->pc;
4798 }
4799 tcg_gen_helper_0_0(helper_lock);
4800 }
4801#endif /* VBOX */
4802
4803 /* now check op code */
4804 reswitch:
4805 switch(b) {
4806 case 0x0f:
4807 /**************************/
4808 /* extended op code */
4809 b = ldub_code(s->pc++) | 0x100;
4810 goto reswitch;
4811
4812 /**************************/
4813 /* arith & logic */
4814 case 0x00 ... 0x05:
4815 case 0x08 ... 0x0d:
4816 case 0x10 ... 0x15:
4817 case 0x18 ... 0x1d:
4818 case 0x20 ... 0x25:
4819 case 0x28 ... 0x2d:
4820 case 0x30 ... 0x35:
4821 case 0x38 ... 0x3d:
4822 {
4823 int op, f, val;
4824 op = (b >> 3) & 7;
4825 f = (b >> 1) & 3;
4826
4827 if ((b & 1) == 0)
4828 ot = OT_BYTE;
4829 else
4830 ot = dflag + OT_WORD;
4831
4832 switch(f) {
4833 case 0: /* OP Ev, Gv */
4834 modrm = ldub_code(s->pc++);
4835 reg = ((modrm >> 3) & 7) | rex_r;
4836 mod = (modrm >> 6) & 3;
4837 rm = (modrm & 7) | REX_B(s);
4838 if (mod != 3) {
4839 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4840 opreg = OR_TMP0;
4841 } else if (op == OP_XORL && rm == reg) {
4842 xor_zero:
4843 /* xor reg, reg optimisation */
4844 gen_op_movl_T0_0();
4845 s->cc_op = CC_OP_LOGICB + ot;
4846 gen_op_mov_reg_T0(ot, reg);
4847 gen_op_update1_cc();
4848 break;
4849 } else {
4850 opreg = rm;
4851 }
4852 gen_op_mov_TN_reg(ot, 1, reg);
4853 gen_op(s, op, ot, opreg);
4854 break;
4855 case 1: /* OP Gv, Ev */
4856 modrm = ldub_code(s->pc++);
4857 mod = (modrm >> 6) & 3;
4858 reg = ((modrm >> 3) & 7) | rex_r;
4859 rm = (modrm & 7) | REX_B(s);
4860 if (mod != 3) {
4861 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4862 gen_op_ld_T1_A0(ot + s->mem_index);
4863 } else if (op == OP_XORL && rm == reg) {
4864 goto xor_zero;
4865 } else {
4866 gen_op_mov_TN_reg(ot, 1, rm);
4867 }
4868 gen_op(s, op, ot, reg);
4869 break;
4870 case 2: /* OP A, Iv */
4871 val = insn_get(s, ot);
4872 gen_op_movl_T1_im(val);
4873 gen_op(s, op, ot, OR_EAX);
4874 break;
4875 }
4876 }
4877 break;
4878
4879 case 0x82:
4880 if (CODE64(s))
4881 goto illegal_op;
4882 case 0x80: /* GRP1 */
4883 case 0x81:
4884 case 0x83:
4885 {
4886 int val;
4887
4888 if ((b & 1) == 0)
4889 ot = OT_BYTE;
4890 else
4891 ot = dflag + OT_WORD;
4892
4893 modrm = ldub_code(s->pc++);
4894 mod = (modrm >> 6) & 3;
4895 rm = (modrm & 7) | REX_B(s);
4896 op = (modrm >> 3) & 7;
4897
4898 if (mod != 3) {
4899 if (b == 0x83)
4900 s->rip_offset = 1;
4901 else
4902 s->rip_offset = insn_const_size(ot);
4903 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4904 opreg = OR_TMP0;
4905 } else {
4906 opreg = rm;
4907 }
4908
4909 switch(b) {
4910 default:
4911 case 0x80:
4912 case 0x81:
4913 case 0x82:
4914 val = insn_get(s, ot);
4915 break;
4916 case 0x83:
4917 val = (int8_t)insn_get(s, OT_BYTE);
4918 break;
4919 }
4920 gen_op_movl_T1_im(val);
4921 gen_op(s, op, ot, opreg);
4922 }
4923 break;
4924
4925 /**************************/
4926 /* inc, dec, and other misc arith */
4927 case 0x40 ... 0x47: /* inc Gv */
4928 ot = dflag ? OT_LONG : OT_WORD;
4929 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4930 break;
4931 case 0x48 ... 0x4f: /* dec Gv */
4932 ot = dflag ? OT_LONG : OT_WORD;
4933 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4934 break;
4935 case 0xf6: /* GRP3 */
4936 case 0xf7:
4937 if ((b & 1) == 0)
4938 ot = OT_BYTE;
4939 else
4940 ot = dflag + OT_WORD;
4941
4942 modrm = ldub_code(s->pc++);
4943 mod = (modrm >> 6) & 3;
4944 rm = (modrm & 7) | REX_B(s);
4945 op = (modrm >> 3) & 7;
4946 if (mod != 3) {
4947 if (op == 0)
4948 s->rip_offset = insn_const_size(ot);
4949 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4950 gen_op_ld_T0_A0(ot + s->mem_index);
4951 } else {
4952 gen_op_mov_TN_reg(ot, 0, rm);
4953 }
4954
4955 switch(op) {
4956 case 0: /* test */
4957 val = insn_get(s, ot);
4958 gen_op_movl_T1_im(val);
4959 gen_op_testl_T0_T1_cc();
4960 s->cc_op = CC_OP_LOGICB + ot;
4961 break;
4962 case 2: /* not */
4963 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4964 if (mod != 3) {
4965 gen_op_st_T0_A0(ot + s->mem_index);
4966 } else {
4967 gen_op_mov_reg_T0(ot, rm);
4968 }
4969 break;
4970 case 3: /* neg */
4971 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4972 if (mod != 3) {
4973 gen_op_st_T0_A0(ot + s->mem_index);
4974 } else {
4975 gen_op_mov_reg_T0(ot, rm);
4976 }
4977 gen_op_update_neg_cc();
4978 s->cc_op = CC_OP_SUBB + ot;
4979 break;
4980 case 4: /* mul */
4981 switch(ot) {
4982 case OT_BYTE:
4983 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4984 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4985 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4986 /* XXX: use 32 bit mul which could be faster */
4987 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4988 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4989 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4990 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4991 s->cc_op = CC_OP_MULB;
4992 break;
4993 case OT_WORD:
4994 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4995 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4996 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4997 /* XXX: use 32 bit mul which could be faster */
4998 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4999 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5000 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5001 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5002 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5003 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5004 s->cc_op = CC_OP_MULW;
5005 break;
5006 default:
5007 case OT_LONG:
5008#ifdef TARGET_X86_64
5009 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5010 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
5011 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
5012 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5013 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5014 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5015 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5016 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5017 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5018#else
5019 {
5020 TCGv t0, t1;
5021 t0 = tcg_temp_new(TCG_TYPE_I64);
5022 t1 = tcg_temp_new(TCG_TYPE_I64);
5023 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5024 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
5025 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
5026 tcg_gen_mul_i64(t0, t0, t1);
5027 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5028 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5029 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5030 tcg_gen_shri_i64(t0, t0, 32);
5031 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5032 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5033 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5034 }
5035#endif
5036 s->cc_op = CC_OP_MULL;
5037 break;
5038#ifdef TARGET_X86_64
5039 case OT_QUAD:
5040 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
5041 s->cc_op = CC_OP_MULQ;
5042 break;
5043#endif
5044 }
5045 break;
5046 case 5: /* imul */
5047 switch(ot) {
5048 case OT_BYTE:
5049 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
5050 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5051 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
5052 /* XXX: use 32 bit mul which could be faster */
5053 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5054 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5055 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5056 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
5057 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5058 s->cc_op = CC_OP_MULB;
5059 break;
5060 case OT_WORD:
5061 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5062 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5063 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5064 /* XXX: use 32 bit mul which could be faster */
5065 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5066 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5067 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5068 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5069 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5070 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5071 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5072 s->cc_op = CC_OP_MULW;
5073 break;
5074 default:
5075 case OT_LONG:
5076#ifdef TARGET_X86_64
5077 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5078 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5079 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5080 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5081 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5082 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5083 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5084 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5085 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5086 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5087#else
5088 {
5089 TCGv t0, t1;
5090 t0 = tcg_temp_new(TCG_TYPE_I64);
5091 t1 = tcg_temp_new(TCG_TYPE_I64);
5092 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5093 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5094 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5095 tcg_gen_mul_i64(t0, t0, t1);
5096 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5097 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5098 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5099 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5100 tcg_gen_shri_i64(t0, t0, 32);
5101 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5102 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5103 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5104 }
5105#endif
5106 s->cc_op = CC_OP_MULL;
5107 break;
5108#ifdef TARGET_X86_64
5109 case OT_QUAD:
5110 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
5111 s->cc_op = CC_OP_MULQ;
5112 break;
5113#endif
5114 }
5115 break;
5116 case 6: /* div */
5117 switch(ot) {
5118 case OT_BYTE:
5119 gen_jmp_im(pc_start - s->cs_base);
5120 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
5121 break;
5122 case OT_WORD:
5123 gen_jmp_im(pc_start - s->cs_base);
5124 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
5125 break;
5126 default:
5127 case OT_LONG:
5128 gen_jmp_im(pc_start - s->cs_base);
5129 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
5130 break;
5131#ifdef TARGET_X86_64
5132 case OT_QUAD:
5133 gen_jmp_im(pc_start - s->cs_base);
5134 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
5135 break;
5136#endif
5137 }
5138 break;
5139 case 7: /* idiv */
5140 switch(ot) {
5141 case OT_BYTE:
5142 gen_jmp_im(pc_start - s->cs_base);
5143 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
5144 break;
5145 case OT_WORD:
5146 gen_jmp_im(pc_start - s->cs_base);
5147 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
5148 break;
5149 default:
5150 case OT_LONG:
5151 gen_jmp_im(pc_start - s->cs_base);
5152 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
5153 break;
5154#ifdef TARGET_X86_64
5155 case OT_QUAD:
5156 gen_jmp_im(pc_start - s->cs_base);
5157 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
5158 break;
5159#endif
5160 }
5161 break;
5162 default:
5163 goto illegal_op;
5164 }
5165 break;
5166
5167 case 0xfe: /* GRP4 */
5168 case 0xff: /* GRP5 */
5169 if ((b & 1) == 0)
5170 ot = OT_BYTE;
5171 else
5172 ot = dflag + OT_WORD;
5173
5174 modrm = ldub_code(s->pc++);
5175 mod = (modrm >> 6) & 3;
5176 rm = (modrm & 7) | REX_B(s);
5177 op = (modrm >> 3) & 7;
5178 if (op >= 2 && b == 0xfe) {
5179 goto illegal_op;
5180 }
5181 if (CODE64(s)) {
5182 if (op == 2 || op == 4) {
5183 /* operand size for jumps is 64 bit */
5184 ot = OT_QUAD;
5185 } else if (op == 3 || op == 5) {
5186 /* for call calls, the operand is 16 or 32 bit, even
5187 in long mode */
5188 ot = dflag ? OT_LONG : OT_WORD;
5189 } else if (op == 6) {
5190 /* default push size is 64 bit */
5191 ot = dflag ? OT_QUAD : OT_WORD;
5192 }
5193 }
5194 if (mod != 3) {
5195 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5196 if (op >= 2 && op != 3 && op != 5)
5197 gen_op_ld_T0_A0(ot + s->mem_index);
5198 } else {
5199 gen_op_mov_TN_reg(ot, 0, rm);
5200 }
5201
5202 switch(op) {
5203 case 0: /* inc Ev */
5204 if (mod != 3)
5205 opreg = OR_TMP0;
5206 else
5207 opreg = rm;
5208 gen_inc(s, ot, opreg, 1);
5209 break;
5210 case 1: /* dec Ev */
5211 if (mod != 3)
5212 opreg = OR_TMP0;
5213 else
5214 opreg = rm;
5215 gen_inc(s, ot, opreg, -1);
5216 break;
5217 case 2: /* call Ev */
5218 /* XXX: optimize if memory (no 'and' is necessary) */
5219#ifdef VBOX_WITH_CALL_RECORD
5220 if (s->record_call)
5221 gen_op_record_call();
5222#endif
5223 if (s->dflag == 0)
5224 gen_op_andl_T0_ffff();
5225 next_eip = s->pc - s->cs_base;
5226 gen_movtl_T1_im(next_eip);
5227 gen_push_T1(s);
5228 gen_op_jmp_T0();
5229 gen_eob(s);
5230 break;
5231 case 3: /* lcall Ev */
5232 gen_op_ld_T1_A0(ot + s->mem_index);
5233 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5234 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5235 do_lcall:
5236 if (s->pe && !s->vm86) {
5237 if (s->cc_op != CC_OP_DYNAMIC)
5238 gen_op_set_cc_op(s->cc_op);
5239 gen_jmp_im(pc_start - s->cs_base);
5240 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5241 tcg_gen_helper_0_4(helper_lcall_protected,
5242 cpu_tmp2_i32, cpu_T[1],
5243 tcg_const_i32(dflag),
5244 tcg_const_i32(s->pc - pc_start));
5245 } else {
5246 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5247 tcg_gen_helper_0_4(helper_lcall_real,
5248 cpu_tmp2_i32, cpu_T[1],
5249 tcg_const_i32(dflag),
5250 tcg_const_i32(s->pc - s->cs_base));
5251 }
5252 gen_eob(s);
5253 break;
5254 case 4: /* jmp Ev */
5255 if (s->dflag == 0)
5256 gen_op_andl_T0_ffff();
5257 gen_op_jmp_T0();
5258 gen_eob(s);
5259 break;
5260 case 5: /* ljmp Ev */
5261 gen_op_ld_T1_A0(ot + s->mem_index);
5262 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5263 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5264 do_ljmp:
5265 if (s->pe && !s->vm86) {
5266 if (s->cc_op != CC_OP_DYNAMIC)
5267 gen_op_set_cc_op(s->cc_op);
5268 gen_jmp_im(pc_start - s->cs_base);
5269 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5270 tcg_gen_helper_0_3(helper_ljmp_protected,
5271 cpu_tmp2_i32,
5272 cpu_T[1],
5273 tcg_const_i32(s->pc - pc_start));
5274 } else {
5275 gen_op_movl_seg_T0_vm(R_CS);
5276 gen_op_movl_T0_T1();
5277 gen_op_jmp_T0();
5278 }
5279 gen_eob(s);
5280 break;
5281 case 6: /* push Ev */
5282 gen_push_T0(s);
5283 break;
5284 default:
5285 goto illegal_op;
5286 }
5287 break;
5288
5289 case 0x84: /* test Ev, Gv */
5290 case 0x85:
5291 if ((b & 1) == 0)
5292 ot = OT_BYTE;
5293 else
5294 ot = dflag + OT_WORD;
5295
5296 modrm = ldub_code(s->pc++);
5297 mod = (modrm >> 6) & 3;
5298 rm = (modrm & 7) | REX_B(s);
5299 reg = ((modrm >> 3) & 7) | rex_r;
5300
5301 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5302 gen_op_mov_TN_reg(ot, 1, reg);
5303 gen_op_testl_T0_T1_cc();
5304 s->cc_op = CC_OP_LOGICB + ot;
5305 break;
5306
5307 case 0xa8: /* test eAX, Iv */
5308 case 0xa9:
5309 if ((b & 1) == 0)
5310 ot = OT_BYTE;
5311 else
5312 ot = dflag + OT_WORD;
5313 val = insn_get(s, ot);
5314
5315 gen_op_mov_TN_reg(ot, 0, OR_EAX);
5316 gen_op_movl_T1_im(val);
5317 gen_op_testl_T0_T1_cc();
5318 s->cc_op = CC_OP_LOGICB + ot;
5319 break;
5320
5321 case 0x98: /* CWDE/CBW */
5322#ifdef TARGET_X86_64
5323 if (dflag == 2) {
5324 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5325 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5326 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
5327 } else
5328#endif
5329 if (dflag == 1) {
5330 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5331 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5332 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5333 } else {
5334 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5335 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5336 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5337 }
5338 break;
5339 case 0x99: /* CDQ/CWD */
5340#ifdef TARGET_X86_64
5341 if (dflag == 2) {
5342 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5343 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5344 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
5345 } else
5346#endif
5347 if (dflag == 1) {
5348 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5349 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5350 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5351 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5352 } else {
5353 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5354 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5355 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5356 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5357 }
5358 break;
5359 case 0x1af: /* imul Gv, Ev */
5360 case 0x69: /* imul Gv, Ev, I */
5361 case 0x6b:
5362 ot = dflag + OT_WORD;
5363 modrm = ldub_code(s->pc++);
5364 reg = ((modrm >> 3) & 7) | rex_r;
5365 if (b == 0x69)
5366 s->rip_offset = insn_const_size(ot);
5367 else if (b == 0x6b)
5368 s->rip_offset = 1;
5369 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5370 if (b == 0x69) {
5371 val = insn_get(s, ot);
5372 gen_op_movl_T1_im(val);
5373 } else if (b == 0x6b) {
5374 val = (int8_t)insn_get(s, OT_BYTE);
5375 gen_op_movl_T1_im(val);
5376 } else {
5377 gen_op_mov_TN_reg(ot, 1, reg);
5378 }
5379
5380#ifdef TARGET_X86_64
5381 if (ot == OT_QUAD) {
5382 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
5383 } else
5384#endif
5385 if (ot == OT_LONG) {
5386#ifdef TARGET_X86_64
5387 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5388 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5389 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5390 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5391 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5392 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5393#else
5394 {
5395 TCGv t0, t1;
5396 t0 = tcg_temp_new(TCG_TYPE_I64);
5397 t1 = tcg_temp_new(TCG_TYPE_I64);
5398 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5399 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5400 tcg_gen_mul_i64(t0, t0, t1);
5401 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5402 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5403 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5404 tcg_gen_shri_i64(t0, t0, 32);
5405 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
5406 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
5407 }
5408#endif
5409 } else {
5410 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5411 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5412 /* XXX: use 32 bit mul which could be faster */
5413 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5414 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5415 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5416 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5417 }
5418 gen_op_mov_reg_T0(ot, reg);
5419 s->cc_op = CC_OP_MULB + ot;
5420 break;
5421 case 0x1c0:
5422 case 0x1c1: /* xadd Ev, Gv */
5423 if ((b & 1) == 0)
5424 ot = OT_BYTE;
5425 else
5426 ot = dflag + OT_WORD;
5427 modrm = ldub_code(s->pc++);
5428 reg = ((modrm >> 3) & 7) | rex_r;
5429 mod = (modrm >> 6) & 3;
5430 if (mod == 3) {
5431 rm = (modrm & 7) | REX_B(s);
5432 gen_op_mov_TN_reg(ot, 0, reg);
5433 gen_op_mov_TN_reg(ot, 1, rm);
5434 gen_op_addl_T0_T1();
5435 gen_op_mov_reg_T1(ot, reg);
5436 gen_op_mov_reg_T0(ot, rm);
5437 } else {
5438 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5439 gen_op_mov_TN_reg(ot, 0, reg);
5440 gen_op_ld_T1_A0(ot + s->mem_index);
5441 gen_op_addl_T0_T1();
5442 gen_op_st_T0_A0(ot + s->mem_index);
5443 gen_op_mov_reg_T1(ot, reg);
5444 }
5445 gen_op_update2_cc();
5446 s->cc_op = CC_OP_ADDB + ot;
5447 break;
5448 case 0x1b0:
5449 case 0x1b1: /* cmpxchg Ev, Gv */
5450 {
5451 int label1, label2;
5452 TCGv t0, t1, t2, a0;
5453
5454 if ((b & 1) == 0)
5455 ot = OT_BYTE;
5456 else
5457 ot = dflag + OT_WORD;
5458 modrm = ldub_code(s->pc++);
5459 reg = ((modrm >> 3) & 7) | rex_r;
5460 mod = (modrm >> 6) & 3;
5461 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5462 t1 = tcg_temp_local_new(TCG_TYPE_TL);
5463 t2 = tcg_temp_local_new(TCG_TYPE_TL);
5464 a0 = tcg_temp_local_new(TCG_TYPE_TL);
5465 gen_op_mov_v_reg(ot, t1, reg);
5466 if (mod == 3) {
5467 rm = (modrm & 7) | REX_B(s);
5468 gen_op_mov_v_reg(ot, t0, rm);
5469 } else {
5470 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5471 tcg_gen_mov_tl(a0, cpu_A0);
5472 gen_op_ld_v(ot + s->mem_index, t0, a0);
5473 rm = 0; /* avoid warning */
5474 }
5475 label1 = gen_new_label();
5476 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
5477 tcg_gen_sub_tl(t2, t2, t0);
5478 gen_extu(ot, t2);
5479 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
5480 if (mod == 3) {
5481 label2 = gen_new_label();
5482 gen_op_mov_reg_v(ot, R_EAX, t0);
5483 tcg_gen_br(label2);
5484 gen_set_label(label1);
5485 gen_op_mov_reg_v(ot, rm, t1);
5486 gen_set_label(label2);
5487 } else {
5488 tcg_gen_mov_tl(t1, t0);
5489 gen_op_mov_reg_v(ot, R_EAX, t0);
5490 gen_set_label(label1);
5491 /* always store */
5492 gen_op_st_v(ot + s->mem_index, t1, a0);
5493 }
5494 tcg_gen_mov_tl(cpu_cc_src, t0);
5495 tcg_gen_mov_tl(cpu_cc_dst, t2);
5496 s->cc_op = CC_OP_SUBB + ot;
5497 tcg_temp_free(t0);
5498 tcg_temp_free(t1);
5499 tcg_temp_free(t2);
5500 tcg_temp_free(a0);
5501 }
5502 break;
5503 case 0x1c7: /* cmpxchg8b */
5504 modrm = ldub_code(s->pc++);
5505 mod = (modrm >> 6) & 3;
5506 if ((mod == 3) || ((modrm & 0x38) != 0x8))
5507 goto illegal_op;
5508#ifdef TARGET_X86_64
5509 if (dflag == 2) {
5510 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5511 goto illegal_op;
5512 gen_jmp_im(pc_start - s->cs_base);
5513 if (s->cc_op != CC_OP_DYNAMIC)
5514 gen_op_set_cc_op(s->cc_op);
5515 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5516 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
5517 } else
5518#endif
5519 {
5520 if (!(s->cpuid_features & CPUID_CX8))
5521 goto illegal_op;
5522 gen_jmp_im(pc_start - s->cs_base);
5523 if (s->cc_op != CC_OP_DYNAMIC)
5524 gen_op_set_cc_op(s->cc_op);
5525 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5526 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
5527 }
5528 s->cc_op = CC_OP_EFLAGS;
5529 break;
5530
5531 /**************************/
5532 /* push/pop */
5533 case 0x50 ... 0x57: /* push */
5534 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
5535 gen_push_T0(s);
5536 break;
5537 case 0x58 ... 0x5f: /* pop */
5538 if (CODE64(s)) {
5539 ot = dflag ? OT_QUAD : OT_WORD;
5540 } else {
5541 ot = dflag + OT_WORD;
5542 }
5543 gen_pop_T0(s);
5544 /* NOTE: order is important for pop %sp */
5545 gen_pop_update(s);
5546 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
5547 break;
5548 case 0x60: /* pusha */
5549 if (CODE64(s))
5550 goto illegal_op;
5551 gen_pusha(s);
5552 break;
5553 case 0x61: /* popa */
5554 if (CODE64(s))
5555 goto illegal_op;
5556 gen_popa(s);
5557 break;
5558 case 0x68: /* push Iv */
5559 case 0x6a:
5560 if (CODE64(s)) {
5561 ot = dflag ? OT_QUAD : OT_WORD;
5562 } else {
5563 ot = dflag + OT_WORD;
5564 }
5565 if (b == 0x68)
5566 val = insn_get(s, ot);
5567 else
5568 val = (int8_t)insn_get(s, OT_BYTE);
5569 gen_op_movl_T0_im(val);
5570 gen_push_T0(s);
5571 break;
5572 case 0x8f: /* pop Ev */
5573 if (CODE64(s)) {
5574 ot = dflag ? OT_QUAD : OT_WORD;
5575 } else {
5576 ot = dflag + OT_WORD;
5577 }
5578 modrm = ldub_code(s->pc++);
5579 mod = (modrm >> 6) & 3;
5580 gen_pop_T0(s);
5581 if (mod == 3) {
5582 /* NOTE: order is important for pop %sp */
5583 gen_pop_update(s);
5584 rm = (modrm & 7) | REX_B(s);
5585 gen_op_mov_reg_T0(ot, rm);
5586 } else {
5587 /* NOTE: order is important too for MMU exceptions */
5588 s->popl_esp_hack = 1 << ot;
5589 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5590 s->popl_esp_hack = 0;
5591 gen_pop_update(s);
5592 }
5593 break;
5594 case 0xc8: /* enter */
5595 {
5596 int level;
5597 val = lduw_code(s->pc);
5598 s->pc += 2;
5599 level = ldub_code(s->pc++);
5600 gen_enter(s, val, level);
5601 }
5602 break;
5603 case 0xc9: /* leave */
5604 /* XXX: exception not precise (ESP is updated before potential exception) */
5605 if (CODE64(s)) {
5606 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5607 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5608 } else if (s->ss32) {
5609 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5610 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5611 } else {
5612 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5613 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5614 }
5615 gen_pop_T0(s);
5616 if (CODE64(s)) {
5617 ot = dflag ? OT_QUAD : OT_WORD;
5618 } else {
5619 ot = dflag + OT_WORD;
5620 }
5621 gen_op_mov_reg_T0(ot, R_EBP);
5622 gen_pop_update(s);
5623 break;
5624 case 0x06: /* push es */
5625 case 0x0e: /* push cs */
5626 case 0x16: /* push ss */
5627 case 0x1e: /* push ds */
5628 if (CODE64(s))
5629 goto illegal_op;
5630 gen_op_movl_T0_seg(b >> 3);
5631 gen_push_T0(s);
5632 break;
5633 case 0x1a0: /* push fs */
5634 case 0x1a8: /* push gs */
5635 gen_op_movl_T0_seg((b >> 3) & 7);
5636 gen_push_T0(s);
5637 break;
5638 case 0x07: /* pop es */
5639 case 0x17: /* pop ss */
5640 case 0x1f: /* pop ds */
5641 if (CODE64(s))
5642 goto illegal_op;
5643 reg = b >> 3;
5644 gen_pop_T0(s);
5645 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5646 gen_pop_update(s);
5647 if (reg == R_SS) {
5648 /* if reg == SS, inhibit interrupts/trace. */
5649 /* If several instructions disable interrupts, only the
5650 _first_ does it */
5651 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5652 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5653 s->tf = 0;
5654 }
5655 if (s->is_jmp) {
5656 gen_jmp_im(s->pc - s->cs_base);
5657 gen_eob(s);
5658 }
5659 break;
5660 case 0x1a1: /* pop fs */
5661 case 0x1a9: /* pop gs */
5662 gen_pop_T0(s);
5663 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5664 gen_pop_update(s);
5665 if (s->is_jmp) {
5666 gen_jmp_im(s->pc - s->cs_base);
5667 gen_eob(s);
5668 }
5669 break;
5670
5671 /**************************/
5672 /* mov */
5673 case 0x88:
5674 case 0x89: /* mov Gv, Ev */
5675 if ((b & 1) == 0)
5676 ot = OT_BYTE;
5677 else
5678 ot = dflag + OT_WORD;
5679 modrm = ldub_code(s->pc++);
5680 reg = ((modrm >> 3) & 7) | rex_r;
5681
5682 /* generate a generic store */
5683 gen_ldst_modrm(s, modrm, ot, reg, 1);
5684 break;
5685 case 0xc6:
5686 case 0xc7: /* mov Ev, Iv */
5687 if ((b & 1) == 0)
5688 ot = OT_BYTE;
5689 else
5690 ot = dflag + OT_WORD;
5691 modrm = ldub_code(s->pc++);
5692 mod = (modrm >> 6) & 3;
5693 if (mod != 3) {
5694 s->rip_offset = insn_const_size(ot);
5695 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5696 }
5697 val = insn_get(s, ot);
5698 gen_op_movl_T0_im(val);
5699 if (mod != 3)
5700 gen_op_st_T0_A0(ot + s->mem_index);
5701 else
5702 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5703 break;
5704 case 0x8a:
5705 case 0x8b: /* mov Ev, Gv */
5706#ifdef VBOX /* dtrace hot fix */
5707 if (prefixes & PREFIX_LOCK)
5708 goto illegal_op;
5709#endif
5710 if ((b & 1) == 0)
5711 ot = OT_BYTE;
5712 else
5713 ot = OT_WORD + dflag;
5714 modrm = ldub_code(s->pc++);
5715 reg = ((modrm >> 3) & 7) | rex_r;
5716
5717 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5718 gen_op_mov_reg_T0(ot, reg);
5719 break;
5720 case 0x8e: /* mov seg, Gv */
5721 modrm = ldub_code(s->pc++);
5722 reg = (modrm >> 3) & 7;
5723 if (reg >= 6 || reg == R_CS)
5724 goto illegal_op;
5725 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5726 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5727 if (reg == R_SS) {
5728 /* if reg == SS, inhibit interrupts/trace */
5729 /* If several instructions disable interrupts, only the
5730 _first_ does it */
5731 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5732 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5733 s->tf = 0;
5734 }
5735 if (s->is_jmp) {
5736 gen_jmp_im(s->pc - s->cs_base);
5737 gen_eob(s);
5738 }
5739 break;
5740 case 0x8c: /* mov Gv, seg */
5741 modrm = ldub_code(s->pc++);
5742 reg = (modrm >> 3) & 7;
5743 mod = (modrm >> 6) & 3;
5744 if (reg >= 6)
5745 goto illegal_op;
5746 gen_op_movl_T0_seg(reg);
5747 if (mod == 3)
5748 ot = OT_WORD + dflag;
5749 else
5750 ot = OT_WORD;
5751 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5752 break;
5753
5754 case 0x1b6: /* movzbS Gv, Eb */
5755 case 0x1b7: /* movzwS Gv, Eb */
5756 case 0x1be: /* movsbS Gv, Eb */
5757 case 0x1bf: /* movswS Gv, Eb */
5758 {
5759 int d_ot;
5760 /* d_ot is the size of destination */
5761 d_ot = dflag + OT_WORD;
5762 /* ot is the size of source */
5763 ot = (b & 1) + OT_BYTE;
5764 modrm = ldub_code(s->pc++);
5765 reg = ((modrm >> 3) & 7) | rex_r;
5766 mod = (modrm >> 6) & 3;
5767 rm = (modrm & 7) | REX_B(s);
5768
5769 if (mod == 3) {
5770 gen_op_mov_TN_reg(ot, 0, rm);
5771 switch(ot | (b & 8)) {
5772 case OT_BYTE:
5773 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5774 break;
5775 case OT_BYTE | 8:
5776 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5777 break;
5778 case OT_WORD:
5779 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5780 break;
5781 default:
5782 case OT_WORD | 8:
5783 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5784 break;
5785 }
5786 gen_op_mov_reg_T0(d_ot, reg);
5787 } else {
5788 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5789 if (b & 8) {
5790 gen_op_lds_T0_A0(ot + s->mem_index);
5791 } else {
5792 gen_op_ldu_T0_A0(ot + s->mem_index);
5793 }
5794 gen_op_mov_reg_T0(d_ot, reg);
5795 }
5796 }
5797 break;
5798
5799 case 0x8d: /* lea */
5800 ot = dflag + OT_WORD;
5801 modrm = ldub_code(s->pc++);
5802 mod = (modrm >> 6) & 3;
5803 if (mod == 3)
5804 goto illegal_op;
5805 reg = ((modrm >> 3) & 7) | rex_r;
5806 /* we must ensure that no segment is added */
5807 s->override = -1;
5808 val = s->addseg;
5809 s->addseg = 0;
5810 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5811 s->addseg = val;
5812 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5813 break;
5814
5815 case 0xa0: /* mov EAX, Ov */
5816 case 0xa1:
5817 case 0xa2: /* mov Ov, EAX */
5818 case 0xa3:
5819 {
5820 target_ulong offset_addr;
5821
5822 if ((b & 1) == 0)
5823 ot = OT_BYTE;
5824 else
5825 ot = dflag + OT_WORD;
5826#ifdef TARGET_X86_64
5827 if (s->aflag == 2) {
5828 offset_addr = ldq_code(s->pc);
5829 s->pc += 8;
5830 gen_op_movq_A0_im(offset_addr);
5831 } else
5832#endif
5833 {
5834 if (s->aflag) {
5835 offset_addr = insn_get(s, OT_LONG);
5836 } else {
5837 offset_addr = insn_get(s, OT_WORD);
5838 }
5839 gen_op_movl_A0_im(offset_addr);
5840 }
5841 gen_add_A0_ds_seg(s);
5842 if ((b & 2) == 0) {
5843 gen_op_ld_T0_A0(ot + s->mem_index);
5844 gen_op_mov_reg_T0(ot, R_EAX);
5845 } else {
5846 gen_op_mov_TN_reg(ot, 0, R_EAX);
5847 gen_op_st_T0_A0(ot + s->mem_index);
5848 }
5849 }
5850 break;
5851 case 0xd7: /* xlat */
5852#ifdef TARGET_X86_64
5853 if (s->aflag == 2) {
5854 gen_op_movq_A0_reg(R_EBX);
5855 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5856 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5857 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5858 } else
5859#endif
5860 {
5861 gen_op_movl_A0_reg(R_EBX);
5862 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5863 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5864 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5865 if (s->aflag == 0)
5866 gen_op_andl_A0_ffff();
5867 else
5868 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5869 }
5870 gen_add_A0_ds_seg(s);
5871 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5872 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5873 break;
5874 case 0xb0 ... 0xb7: /* mov R, Ib */
5875 val = insn_get(s, OT_BYTE);
5876 gen_op_movl_T0_im(val);
5877 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5878 break;
5879 case 0xb8 ... 0xbf: /* mov R, Iv */
5880#ifdef TARGET_X86_64
5881 if (dflag == 2) {
5882 uint64_t tmp;
5883 /* 64 bit case */
5884 tmp = ldq_code(s->pc);
5885 s->pc += 8;
5886 reg = (b & 7) | REX_B(s);
5887 gen_movtl_T0_im(tmp);
5888 gen_op_mov_reg_T0(OT_QUAD, reg);
5889 } else
5890#endif
5891 {
5892 ot = dflag ? OT_LONG : OT_WORD;
5893 val = insn_get(s, ot);
5894 reg = (b & 7) | REX_B(s);
5895 gen_op_movl_T0_im(val);
5896 gen_op_mov_reg_T0(ot, reg);
5897 }
5898 break;
5899
5900 case 0x91 ... 0x97: /* xchg R, EAX */
5901 ot = dflag + OT_WORD;
5902 reg = (b & 7) | REX_B(s);
5903 rm = R_EAX;
5904 goto do_xchg_reg;
5905 case 0x86:
5906 case 0x87: /* xchg Ev, Gv */
5907 if ((b & 1) == 0)
5908 ot = OT_BYTE;
5909 else
5910 ot = dflag + OT_WORD;
5911 modrm = ldub_code(s->pc++);
5912 reg = ((modrm >> 3) & 7) | rex_r;
5913 mod = (modrm >> 6) & 3;
5914 if (mod == 3) {
5915 rm = (modrm & 7) | REX_B(s);
5916 do_xchg_reg:
5917 gen_op_mov_TN_reg(ot, 0, reg);
5918 gen_op_mov_TN_reg(ot, 1, rm);
5919 gen_op_mov_reg_T0(ot, rm);
5920 gen_op_mov_reg_T1(ot, reg);
5921 } else {
5922 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5923 gen_op_mov_TN_reg(ot, 0, reg);
5924 /* for xchg, lock is implicit */
5925 if (!(prefixes & PREFIX_LOCK))
5926 tcg_gen_helper_0_0(helper_lock);
5927 gen_op_ld_T1_A0(ot + s->mem_index);
5928 gen_op_st_T0_A0(ot + s->mem_index);
5929 if (!(prefixes & PREFIX_LOCK))
5930 tcg_gen_helper_0_0(helper_unlock);
5931 gen_op_mov_reg_T1(ot, reg);
5932 }
5933 break;
5934 case 0xc4: /* les Gv */
5935 if (CODE64(s))
5936 goto illegal_op;
5937 op = R_ES;
5938 goto do_lxx;
5939 case 0xc5: /* lds Gv */
5940 if (CODE64(s))
5941 goto illegal_op;
5942 op = R_DS;
5943 goto do_lxx;
5944 case 0x1b2: /* lss Gv */
5945 op = R_SS;
5946 goto do_lxx;
5947 case 0x1b4: /* lfs Gv */
5948 op = R_FS;
5949 goto do_lxx;
5950 case 0x1b5: /* lgs Gv */
5951 op = R_GS;
5952 do_lxx:
5953 ot = dflag ? OT_LONG : OT_WORD;
5954 modrm = ldub_code(s->pc++);
5955 reg = ((modrm >> 3) & 7) | rex_r;
5956 mod = (modrm >> 6) & 3;
5957 if (mod == 3)
5958 goto illegal_op;
5959 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5960 gen_op_ld_T1_A0(ot + s->mem_index);
5961 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5962 /* load the segment first to handle exceptions properly */
5963 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5964 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5965 /* then put the data */
5966 gen_op_mov_reg_T1(ot, reg);
5967 if (s->is_jmp) {
5968 gen_jmp_im(s->pc - s->cs_base);
5969 gen_eob(s);
5970 }
5971 break;
5972
5973 /************************/
5974 /* shifts */
5975 case 0xc0:
5976 case 0xc1:
5977 /* shift Ev,Ib */
5978 shift = 2;
5979 grp2:
5980 {
5981 if ((b & 1) == 0)
5982 ot = OT_BYTE;
5983 else
5984 ot = dflag + OT_WORD;
5985
5986 modrm = ldub_code(s->pc++);
5987 mod = (modrm >> 6) & 3;
5988 op = (modrm >> 3) & 7;
5989
5990 if (mod != 3) {
5991 if (shift == 2) {
5992 s->rip_offset = 1;
5993 }
5994 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5995 opreg = OR_TMP0;
5996 } else {
5997 opreg = (modrm & 7) | REX_B(s);
5998 }
5999
6000 /* simpler op */
6001 if (shift == 0) {
6002 gen_shift(s, op, ot, opreg, OR_ECX);
6003 } else {
6004 if (shift == 2) {
6005 shift = ldub_code(s->pc++);
6006 }
6007 gen_shifti(s, op, ot, opreg, shift);
6008 }
6009 }
6010 break;
6011 case 0xd0:
6012 case 0xd1:
6013 /* shift Ev,1 */
6014 shift = 1;
6015 goto grp2;
6016 case 0xd2:
6017 case 0xd3:
6018 /* shift Ev,cl */
6019 shift = 0;
6020 goto grp2;
6021
6022 case 0x1a4: /* shld imm */
6023 op = 0;
6024 shift = 1;
6025 goto do_shiftd;
6026 case 0x1a5: /* shld cl */
6027 op = 0;
6028 shift = 0;
6029 goto do_shiftd;
6030 case 0x1ac: /* shrd imm */
6031 op = 1;
6032 shift = 1;
6033 goto do_shiftd;
6034 case 0x1ad: /* shrd cl */
6035 op = 1;
6036 shift = 0;
6037 do_shiftd:
6038 ot = dflag + OT_WORD;
6039 modrm = ldub_code(s->pc++);
6040 mod = (modrm >> 6) & 3;
6041 rm = (modrm & 7) | REX_B(s);
6042 reg = ((modrm >> 3) & 7) | rex_r;
6043 if (mod != 3) {
6044 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6045 opreg = OR_TMP0;
6046 } else {
6047 opreg = rm;
6048 }
6049 gen_op_mov_TN_reg(ot, 1, reg);
6050
6051 if (shift) {
6052 val = ldub_code(s->pc++);
6053 tcg_gen_movi_tl(cpu_T3, val);
6054 } else {
6055 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
6056 }
6057 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
6058 break;
6059
6060 /************************/
6061 /* floats */
6062 case 0xd8 ... 0xdf:
6063 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
6064 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
6065 /* XXX: what to do if illegal op ? */
6066 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6067 break;
6068 }
6069 modrm = ldub_code(s->pc++);
6070 mod = (modrm >> 6) & 3;
6071 rm = modrm & 7;
6072 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
6073 if (mod != 3) {
6074 /* memory op */
6075 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6076 switch(op) {
6077 case 0x00 ... 0x07: /* fxxxs */
6078 case 0x10 ... 0x17: /* fixxxl */
6079 case 0x20 ... 0x27: /* fxxxl */
6080 case 0x30 ... 0x37: /* fixxx */
6081 {
6082 int op1;
6083 op1 = op & 7;
6084
6085 switch(op >> 4) {
6086 case 0:
6087 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6088 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6089 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
6090 break;
6091 case 1:
6092 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6093 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6094 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6095 break;
6096 case 2:
6097 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6098 (s->mem_index >> 2) - 1);
6099 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
6100 break;
6101 case 3:
6102 default:
6103 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6104 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6105 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6106 break;
6107 }
6108
6109 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6110 if (op1 == 3) {
6111 /* fcomp needs pop */
6112 tcg_gen_helper_0_0(helper_fpop);
6113 }
6114 }
6115 break;
6116 case 0x08: /* flds */
6117 case 0x0a: /* fsts */
6118 case 0x0b: /* fstps */
6119 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6120 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6121 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
6122 switch(op & 7) {
6123 case 0:
6124 switch(op >> 4) {
6125 case 0:
6126 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6127 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6128 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
6129 break;
6130 case 1:
6131 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6132 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6133 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6134 break;
6135 case 2:
6136 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6137 (s->mem_index >> 2) - 1);
6138 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
6139 break;
6140 case 3:
6141 default:
6142 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6143 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6144 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6145 break;
6146 }
6147 break;
6148 case 1:
6149 /* XXX: the corresponding CPUID bit must be tested ! */
6150 switch(op >> 4) {
6151 case 1:
6152 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
6153 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6154 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6155 break;
6156 case 2:
6157 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
6158 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6159 (s->mem_index >> 2) - 1);
6160 break;
6161 case 3:
6162 default:
6163 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
6164 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6165 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6166 break;
6167 }
6168 tcg_gen_helper_0_0(helper_fpop);
6169 break;
6170 default:
6171 switch(op >> 4) {
6172 case 0:
6173 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
6174 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6175 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6176 break;
6177 case 1:
6178 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
6179 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6180 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6181 break;
6182 case 2:
6183 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
6184 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6185 (s->mem_index >> 2) - 1);
6186 break;
6187 case 3:
6188 default:
6189 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
6190 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6191 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6192 break;
6193 }
6194 if ((op & 7) == 3)
6195 tcg_gen_helper_0_0(helper_fpop);
6196 break;
6197 }
6198 break;
6199 case 0x0c: /* fldenv mem */
6200 if (s->cc_op != CC_OP_DYNAMIC)
6201 gen_op_set_cc_op(s->cc_op);
6202 gen_jmp_im(pc_start - s->cs_base);
6203 tcg_gen_helper_0_2(helper_fldenv,
6204 cpu_A0, tcg_const_i32(s->dflag));
6205 break;
6206 case 0x0d: /* fldcw mem */
6207 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
6208 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6209 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
6210 break;
6211 case 0x0e: /* fnstenv mem */
6212 if (s->cc_op != CC_OP_DYNAMIC)
6213 gen_op_set_cc_op(s->cc_op);
6214 gen_jmp_im(pc_start - s->cs_base);
6215 tcg_gen_helper_0_2(helper_fstenv,
6216 cpu_A0, tcg_const_i32(s->dflag));
6217 break;
6218 case 0x0f: /* fnstcw mem */
6219 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
6220 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6221 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6222 break;
6223 case 0x1d: /* fldt mem */
6224 if (s->cc_op != CC_OP_DYNAMIC)
6225 gen_op_set_cc_op(s->cc_op);
6226 gen_jmp_im(pc_start - s->cs_base);
6227 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
6228 break;
6229 case 0x1f: /* fstpt mem */
6230 if (s->cc_op != CC_OP_DYNAMIC)
6231 gen_op_set_cc_op(s->cc_op);
6232 gen_jmp_im(pc_start - s->cs_base);
6233 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
6234 tcg_gen_helper_0_0(helper_fpop);
6235 break;
6236 case 0x2c: /* frstor mem */
6237 if (s->cc_op != CC_OP_DYNAMIC)
6238 gen_op_set_cc_op(s->cc_op);
6239 gen_jmp_im(pc_start - s->cs_base);
6240 tcg_gen_helper_0_2(helper_frstor,
6241 cpu_A0, tcg_const_i32(s->dflag));
6242 break;
6243 case 0x2e: /* fnsave mem */
6244 if (s->cc_op != CC_OP_DYNAMIC)
6245 gen_op_set_cc_op(s->cc_op);
6246 gen_jmp_im(pc_start - s->cs_base);
6247 tcg_gen_helper_0_2(helper_fsave,
6248 cpu_A0, tcg_const_i32(s->dflag));
6249 break;
6250 case 0x2f: /* fnstsw mem */
6251 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6252 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6253 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6254 break;
6255 case 0x3c: /* fbld */
6256 if (s->cc_op != CC_OP_DYNAMIC)
6257 gen_op_set_cc_op(s->cc_op);
6258 gen_jmp_im(pc_start - s->cs_base);
6259 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
6260 break;
6261 case 0x3e: /* fbstp */
6262 if (s->cc_op != CC_OP_DYNAMIC)
6263 gen_op_set_cc_op(s->cc_op);
6264 gen_jmp_im(pc_start - s->cs_base);
6265 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
6266 tcg_gen_helper_0_0(helper_fpop);
6267 break;
6268 case 0x3d: /* fildll */
6269 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6270 (s->mem_index >> 2) - 1);
6271 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
6272 break;
6273 case 0x3f: /* fistpll */
6274 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
6275 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6276 (s->mem_index >> 2) - 1);
6277 tcg_gen_helper_0_0(helper_fpop);
6278 break;
6279 default:
6280 goto illegal_op;
6281 }
6282 } else {
6283 /* register float ops */
6284 opreg = rm;
6285
6286 switch(op) {
6287 case 0x08: /* fld sti */
6288 tcg_gen_helper_0_0(helper_fpush);
6289 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
6290 break;
6291 case 0x09: /* fxchg sti */
6292 case 0x29: /* fxchg4 sti, undocumented op */
6293 case 0x39: /* fxchg7 sti, undocumented op */
6294 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
6295 break;
6296 case 0x0a: /* grp d9/2 */
6297 switch(rm) {
6298 case 0: /* fnop */
6299 /* check exceptions (FreeBSD FPU probe) */
6300 if (s->cc_op != CC_OP_DYNAMIC)
6301 gen_op_set_cc_op(s->cc_op);
6302 gen_jmp_im(pc_start - s->cs_base);
6303 tcg_gen_helper_0_0(helper_fwait);
6304 break;
6305 default:
6306 goto illegal_op;
6307 }
6308 break;
6309 case 0x0c: /* grp d9/4 */
6310 switch(rm) {
6311 case 0: /* fchs */
6312 tcg_gen_helper_0_0(helper_fchs_ST0);
6313 break;
6314 case 1: /* fabs */
6315 tcg_gen_helper_0_0(helper_fabs_ST0);
6316 break;
6317 case 4: /* ftst */
6318 tcg_gen_helper_0_0(helper_fldz_FT0);
6319 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6320 break;
6321 case 5: /* fxam */
6322 tcg_gen_helper_0_0(helper_fxam_ST0);
6323 break;
6324 default:
6325 goto illegal_op;
6326 }
6327 break;
6328 case 0x0d: /* grp d9/5 */
6329 {
6330 switch(rm) {
6331 case 0:
6332 tcg_gen_helper_0_0(helper_fpush);
6333 tcg_gen_helper_0_0(helper_fld1_ST0);
6334 break;
6335 case 1:
6336 tcg_gen_helper_0_0(helper_fpush);
6337 tcg_gen_helper_0_0(helper_fldl2t_ST0);
6338 break;
6339 case 2:
6340 tcg_gen_helper_0_0(helper_fpush);
6341 tcg_gen_helper_0_0(helper_fldl2e_ST0);
6342 break;
6343 case 3:
6344 tcg_gen_helper_0_0(helper_fpush);
6345 tcg_gen_helper_0_0(helper_fldpi_ST0);
6346 break;
6347 case 4:
6348 tcg_gen_helper_0_0(helper_fpush);
6349 tcg_gen_helper_0_0(helper_fldlg2_ST0);
6350 break;
6351 case 5:
6352 tcg_gen_helper_0_0(helper_fpush);
6353 tcg_gen_helper_0_0(helper_fldln2_ST0);
6354 break;
6355 case 6:
6356 tcg_gen_helper_0_0(helper_fpush);
6357 tcg_gen_helper_0_0(helper_fldz_ST0);
6358 break;
6359 default:
6360 goto illegal_op;
6361 }
6362 }
6363 break;
6364 case 0x0e: /* grp d9/6 */
6365 switch(rm) {
6366 case 0: /* f2xm1 */
6367 tcg_gen_helper_0_0(helper_f2xm1);
6368 break;
6369 case 1: /* fyl2x */
6370 tcg_gen_helper_0_0(helper_fyl2x);
6371 break;
6372 case 2: /* fptan */
6373 tcg_gen_helper_0_0(helper_fptan);
6374 break;
6375 case 3: /* fpatan */
6376 tcg_gen_helper_0_0(helper_fpatan);
6377 break;
6378 case 4: /* fxtract */
6379 tcg_gen_helper_0_0(helper_fxtract);
6380 break;
6381 case 5: /* fprem1 */
6382 tcg_gen_helper_0_0(helper_fprem1);
6383 break;
6384 case 6: /* fdecstp */
6385 tcg_gen_helper_0_0(helper_fdecstp);
6386 break;
6387 default:
6388 case 7: /* fincstp */
6389 tcg_gen_helper_0_0(helper_fincstp);
6390 break;
6391 }
6392 break;
6393 case 0x0f: /* grp d9/7 */
6394 switch(rm) {
6395 case 0: /* fprem */
6396 tcg_gen_helper_0_0(helper_fprem);
6397 break;
6398 case 1: /* fyl2xp1 */
6399 tcg_gen_helper_0_0(helper_fyl2xp1);
6400 break;
6401 case 2: /* fsqrt */
6402 tcg_gen_helper_0_0(helper_fsqrt);
6403 break;
6404 case 3: /* fsincos */
6405 tcg_gen_helper_0_0(helper_fsincos);
6406 break;
6407 case 5: /* fscale */
6408 tcg_gen_helper_0_0(helper_fscale);
6409 break;
6410 case 4: /* frndint */
6411 tcg_gen_helper_0_0(helper_frndint);
6412 break;
6413 case 6: /* fsin */
6414 tcg_gen_helper_0_0(helper_fsin);
6415 break;
6416 default:
6417 case 7: /* fcos */
6418 tcg_gen_helper_0_0(helper_fcos);
6419 break;
6420 }
6421 break;
6422 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6423 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6424 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6425 {
6426 int op1;
6427
6428 op1 = op & 7;
6429 if (op >= 0x20) {
6430 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
6431 if (op >= 0x30)
6432 tcg_gen_helper_0_0(helper_fpop);
6433 } else {
6434 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6435 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6436 }
6437 }
6438 break;
6439 case 0x02: /* fcom */
6440 case 0x22: /* fcom2, undocumented op */
6441 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6442 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6443 break;
6444 case 0x03: /* fcomp */
6445 case 0x23: /* fcomp3, undocumented op */
6446 case 0x32: /* fcomp5, undocumented op */
6447 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6448 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6449 tcg_gen_helper_0_0(helper_fpop);
6450 break;
6451 case 0x15: /* da/5 */
6452 switch(rm) {
6453 case 1: /* fucompp */
6454 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6455 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6456 tcg_gen_helper_0_0(helper_fpop);
6457 tcg_gen_helper_0_0(helper_fpop);
6458 break;
6459 default:
6460 goto illegal_op;
6461 }
6462 break;
6463 case 0x1c:
6464 switch(rm) {
6465 case 0: /* feni (287 only, just do nop here) */
6466 break;
6467 case 1: /* fdisi (287 only, just do nop here) */
6468 break;
6469 case 2: /* fclex */
6470 tcg_gen_helper_0_0(helper_fclex);
6471 break;
6472 case 3: /* fninit */
6473 tcg_gen_helper_0_0(helper_fninit);
6474 break;
6475 case 4: /* fsetpm (287 only, just do nop here) */
6476 break;
6477 default:
6478 goto illegal_op;
6479 }
6480 break;
6481 case 0x1d: /* fucomi */
6482 if (s->cc_op != CC_OP_DYNAMIC)
6483 gen_op_set_cc_op(s->cc_op);
6484 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6485 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6486 s->cc_op = CC_OP_EFLAGS;
6487 break;
6488 case 0x1e: /* fcomi */
6489 if (s->cc_op != CC_OP_DYNAMIC)
6490 gen_op_set_cc_op(s->cc_op);
6491 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6492 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6493 s->cc_op = CC_OP_EFLAGS;
6494 break;
6495 case 0x28: /* ffree sti */
6496 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6497 break;
6498 case 0x2a: /* fst sti */
6499 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6500 break;
6501 case 0x2b: /* fstp sti */
6502 case 0x0b: /* fstp1 sti, undocumented op */
6503 case 0x3a: /* fstp8 sti, undocumented op */
6504 case 0x3b: /* fstp9 sti, undocumented op */
6505 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6506 tcg_gen_helper_0_0(helper_fpop);
6507 break;
6508 case 0x2c: /* fucom st(i) */
6509 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6510 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6511 break;
6512 case 0x2d: /* fucomp st(i) */
6513 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6514 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6515 tcg_gen_helper_0_0(helper_fpop);
6516 break;
6517 case 0x33: /* de/3 */
6518 switch(rm) {
6519 case 1: /* fcompp */
6520 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6521 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6522 tcg_gen_helper_0_0(helper_fpop);
6523 tcg_gen_helper_0_0(helper_fpop);
6524 break;
6525 default:
6526 goto illegal_op;
6527 }
6528 break;
6529 case 0x38: /* ffreep sti, undocumented op */
6530 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6531 tcg_gen_helper_0_0(helper_fpop);
6532 break;
6533 case 0x3c: /* df/4 */
6534 switch(rm) {
6535 case 0:
6536 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6537 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6538 gen_op_mov_reg_T0(OT_WORD, R_EAX);
6539 break;
6540 default:
6541 goto illegal_op;
6542 }
6543 break;
6544 case 0x3d: /* fucomip */
6545 if (s->cc_op != CC_OP_DYNAMIC)
6546 gen_op_set_cc_op(s->cc_op);
6547 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6548 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6549 tcg_gen_helper_0_0(helper_fpop);
6550 s->cc_op = CC_OP_EFLAGS;
6551 break;
6552 case 0x3e: /* fcomip */
6553 if (s->cc_op != CC_OP_DYNAMIC)
6554 gen_op_set_cc_op(s->cc_op);
6555 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6556 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6557 tcg_gen_helper_0_0(helper_fpop);
6558 s->cc_op = CC_OP_EFLAGS;
6559 break;
6560 case 0x10 ... 0x13: /* fcmovxx */
6561 case 0x18 ... 0x1b:
6562 {
6563 int op1, l1;
6564 static const uint8_t fcmov_cc[8] = {
6565 (JCC_B << 1),
6566 (JCC_Z << 1),
6567 (JCC_BE << 1),
6568 (JCC_P << 1),
6569 };
6570 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
6571 l1 = gen_new_label();
6572 gen_jcc1(s, s->cc_op, op1, l1);
6573 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
6574 gen_set_label(l1);
6575 }
6576 break;
6577 default:
6578 goto illegal_op;
6579 }
6580 }
6581 break;
6582 /************************/
6583 /* string ops */
6584
6585 case 0xa4: /* movsS */
6586 case 0xa5:
6587 if ((b & 1) == 0)
6588 ot = OT_BYTE;
6589 else
6590 ot = dflag + OT_WORD;
6591
6592 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6593 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6594 } else {
6595 gen_movs(s, ot);
6596 }
6597 break;
6598
6599 case 0xaa: /* stosS */
6600 case 0xab:
6601 if ((b & 1) == 0)
6602 ot = OT_BYTE;
6603 else
6604 ot = dflag + OT_WORD;
6605
6606 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6607 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6608 } else {
6609 gen_stos(s, ot);
6610 }
6611 break;
6612 case 0xac: /* lodsS */
6613 case 0xad:
6614 if ((b & 1) == 0)
6615 ot = OT_BYTE;
6616 else
6617 ot = dflag + OT_WORD;
6618 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6619 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6620 } else {
6621 gen_lods(s, ot);
6622 }
6623 break;
6624 case 0xae: /* scasS */
6625 case 0xaf:
6626 if ((b & 1) == 0)
6627 ot = OT_BYTE;
6628 else
6629 ot = dflag + OT_WORD;
6630 if (prefixes & PREFIX_REPNZ) {
6631 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6632 } else if (prefixes & PREFIX_REPZ) {
6633 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6634 } else {
6635 gen_scas(s, ot);
6636 s->cc_op = CC_OP_SUBB + ot;
6637 }
6638 break;
6639
6640 case 0xa6: /* cmpsS */
6641 case 0xa7:
6642 if ((b & 1) == 0)
6643 ot = OT_BYTE;
6644 else
6645 ot = dflag + OT_WORD;
6646 if (prefixes & PREFIX_REPNZ) {
6647 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6648 } else if (prefixes & PREFIX_REPZ) {
6649 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6650 } else {
6651 gen_cmps(s, ot);
6652 s->cc_op = CC_OP_SUBB + ot;
6653 }
6654 break;
6655 case 0x6c: /* insS */
6656 case 0x6d:
6657 if ((b & 1) == 0)
6658 ot = OT_BYTE;
6659 else
6660 ot = dflag ? OT_LONG : OT_WORD;
6661 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6662 gen_op_andl_T0_ffff();
6663 gen_check_io(s, ot, pc_start - s->cs_base,
6664 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6665 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6666 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6667 } else {
6668 gen_ins(s, ot);
6669 if (use_icount) {
6670 gen_jmp(s, s->pc - s->cs_base);
6671 }
6672 }
6673 break;
6674 case 0x6e: /* outsS */
6675 case 0x6f:
6676 if ((b & 1) == 0)
6677 ot = OT_BYTE;
6678 else
6679 ot = dflag ? OT_LONG : OT_WORD;
6680 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6681 gen_op_andl_T0_ffff();
6682 gen_check_io(s, ot, pc_start - s->cs_base,
6683 svm_is_rep(prefixes) | 4);
6684 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6685 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6686 } else {
6687 gen_outs(s, ot);
6688 if (use_icount) {
6689 gen_jmp(s, s->pc - s->cs_base);
6690 }
6691 }
6692 break;
6693
6694 /************************/
6695 /* port I/O */
6696
6697 case 0xe4:
6698 case 0xe5:
6699 if ((b & 1) == 0)
6700 ot = OT_BYTE;
6701 else
6702 ot = dflag ? OT_LONG : OT_WORD;
6703 val = ldub_code(s->pc++);
6704 gen_op_movl_T0_im(val);
6705 gen_check_io(s, ot, pc_start - s->cs_base,
6706 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6707 if (use_icount)
6708 gen_io_start();
6709 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6710 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6711 gen_op_mov_reg_T1(ot, R_EAX);
6712 if (use_icount) {
6713 gen_io_end();
6714 gen_jmp(s, s->pc - s->cs_base);
6715 }
6716 break;
6717 case 0xe6:
6718 case 0xe7:
6719 if ((b & 1) == 0)
6720 ot = OT_BYTE;
6721 else
6722 ot = dflag ? OT_LONG : OT_WORD;
6723 val = ldub_code(s->pc++);
6724 gen_op_movl_T0_im(val);
6725 gen_check_io(s, ot, pc_start - s->cs_base,
6726 svm_is_rep(prefixes));
6727#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
6728 if (val == 0x80)
6729 break;
6730#endif /* VBOX */
6731 gen_op_mov_TN_reg(ot, 1, R_EAX);
6732
6733 if (use_icount)
6734 gen_io_start();
6735 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6736 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6737 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6738 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6739 if (use_icount) {
6740 gen_io_end();
6741 gen_jmp(s, s->pc - s->cs_base);
6742 }
6743 break;
6744 case 0xec:
6745 case 0xed:
6746 if ((b & 1) == 0)
6747 ot = OT_BYTE;
6748 else
6749 ot = dflag ? OT_LONG : OT_WORD;
6750 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6751 gen_op_andl_T0_ffff();
6752 gen_check_io(s, ot, pc_start - s->cs_base,
6753 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6754 if (use_icount)
6755 gen_io_start();
6756 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6757 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6758 gen_op_mov_reg_T1(ot, R_EAX);
6759 if (use_icount) {
6760 gen_io_end();
6761 gen_jmp(s, s->pc - s->cs_base);
6762 }
6763 break;
6764 case 0xee:
6765 case 0xef:
6766 if ((b & 1) == 0)
6767 ot = OT_BYTE;
6768 else
6769 ot = dflag ? OT_LONG : OT_WORD;
6770 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6771 gen_op_andl_T0_ffff();
6772 gen_check_io(s, ot, pc_start - s->cs_base,
6773 svm_is_rep(prefixes));
6774 gen_op_mov_TN_reg(ot, 1, R_EAX);
6775
6776 if (use_icount)
6777 gen_io_start();
6778 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6779 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6780 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6781 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6782 if (use_icount) {
6783 gen_io_end();
6784 gen_jmp(s, s->pc - s->cs_base);
6785 }
6786 break;
6787
6788 /************************/
6789 /* control */
6790 case 0xc2: /* ret im */
6791 val = ldsw_code(s->pc);
6792 s->pc += 2;
6793 gen_pop_T0(s);
6794 if (CODE64(s) && s->dflag)
6795 s->dflag = 2;
6796 gen_stack_update(s, val + (2 << s->dflag));
6797 if (s->dflag == 0)
6798 gen_op_andl_T0_ffff();
6799 gen_op_jmp_T0();
6800 gen_eob(s);
6801 break;
6802 case 0xc3: /* ret */
6803 gen_pop_T0(s);
6804 gen_pop_update(s);
6805 if (s->dflag == 0)
6806 gen_op_andl_T0_ffff();
6807 gen_op_jmp_T0();
6808 gen_eob(s);
6809 break;
6810 case 0xca: /* lret im */
6811 val = ldsw_code(s->pc);
6812 s->pc += 2;
6813 do_lret:
6814 if (s->pe && !s->vm86) {
6815 if (s->cc_op != CC_OP_DYNAMIC)
6816 gen_op_set_cc_op(s->cc_op);
6817 gen_jmp_im(pc_start - s->cs_base);
6818 tcg_gen_helper_0_2(helper_lret_protected,
6819 tcg_const_i32(s->dflag),
6820 tcg_const_i32(val));
6821 } else {
6822 gen_stack_A0(s);
6823 /* pop offset */
6824 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6825 if (s->dflag == 0)
6826 gen_op_andl_T0_ffff();
6827 /* NOTE: keeping EIP updated is not a problem in case of
6828 exception */
6829 gen_op_jmp_T0();
6830 /* pop selector */
6831 gen_op_addl_A0_im(2 << s->dflag);
6832 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6833 gen_op_movl_seg_T0_vm(R_CS);
6834 /* add stack offset */
6835 gen_stack_update(s, val + (4 << s->dflag));
6836 }
6837 gen_eob(s);
6838 break;
6839 case 0xcb: /* lret */
6840 val = 0;
6841 goto do_lret;
6842 case 0xcf: /* iret */
6843 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6844 if (!s->pe) {
6845 /* real mode */
6846 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6847 s->cc_op = CC_OP_EFLAGS;
6848 } else if (s->vm86) {
6849#ifdef VBOX
6850 if (s->iopl != 3 && (!s->vme || s->dflag)) {
6851#else
6852 if (s->iopl != 3) {
6853#endif
6854 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6855 } else {
6856 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6857 s->cc_op = CC_OP_EFLAGS;
6858 }
6859 } else {
6860 if (s->cc_op != CC_OP_DYNAMIC)
6861 gen_op_set_cc_op(s->cc_op);
6862 gen_jmp_im(pc_start - s->cs_base);
6863 tcg_gen_helper_0_2(helper_iret_protected,
6864 tcg_const_i32(s->dflag),
6865 tcg_const_i32(s->pc - s->cs_base));
6866 s->cc_op = CC_OP_EFLAGS;
6867 }
6868 gen_eob(s);
6869 break;
6870 case 0xe8: /* call im */
6871 {
6872 if (dflag)
6873 tval = (int32_t)insn_get(s, OT_LONG);
6874 else
6875 tval = (int16_t)insn_get(s, OT_WORD);
6876 next_eip = s->pc - s->cs_base;
6877 tval += next_eip;
6878 if (s->dflag == 0)
6879 tval &= 0xffff;
6880 gen_movtl_T0_im(next_eip);
6881 gen_push_T0(s);
6882 gen_jmp(s, tval);
6883 }
6884 break;
6885 case 0x9a: /* lcall im */
6886 {
6887 unsigned int selector, offset;
6888
6889 if (CODE64(s))
6890 goto illegal_op;
6891 ot = dflag ? OT_LONG : OT_WORD;
6892 offset = insn_get(s, ot);
6893 selector = insn_get(s, OT_WORD);
6894
6895 gen_op_movl_T0_im(selector);
6896 gen_op_movl_T1_imu(offset);
6897 }
6898 goto do_lcall;
6899 case 0xe9: /* jmp im */
6900 if (dflag)
6901 tval = (int32_t)insn_get(s, OT_LONG);
6902 else
6903 tval = (int16_t)insn_get(s, OT_WORD);
6904 tval += s->pc - s->cs_base;
6905 if (s->dflag == 0)
6906 tval &= 0xffff;
6907 else if(!CODE64(s))
6908 tval &= 0xffffffff;
6909 gen_jmp(s, tval);
6910 break;
6911 case 0xea: /* ljmp im */
6912 {
6913 unsigned int selector, offset;
6914
6915 if (CODE64(s))
6916 goto illegal_op;
6917 ot = dflag ? OT_LONG : OT_WORD;
6918 offset = insn_get(s, ot);
6919 selector = insn_get(s, OT_WORD);
6920
6921 gen_op_movl_T0_im(selector);
6922 gen_op_movl_T1_imu(offset);
6923 }
6924 goto do_ljmp;
6925 case 0xeb: /* jmp Jb */
6926 tval = (int8_t)insn_get(s, OT_BYTE);
6927 tval += s->pc - s->cs_base;
6928 if (s->dflag == 0)
6929 tval &= 0xffff;
6930 gen_jmp(s, tval);
6931 break;
6932 case 0x70 ... 0x7f: /* jcc Jb */
6933 tval = (int8_t)insn_get(s, OT_BYTE);
6934 goto do_jcc;
6935 case 0x180 ... 0x18f: /* jcc Jv */
6936 if (dflag) {
6937 tval = (int32_t)insn_get(s, OT_LONG);
6938 } else {
6939 tval = (int16_t)insn_get(s, OT_WORD);
6940 }
6941 do_jcc:
6942 next_eip = s->pc - s->cs_base;
6943 tval += next_eip;
6944 if (s->dflag == 0)
6945 tval &= 0xffff;
6946 gen_jcc(s, b, tval, next_eip);
6947 break;
6948
6949 case 0x190 ... 0x19f: /* setcc Gv */
6950 modrm = ldub_code(s->pc++);
6951 gen_setcc(s, b);
6952 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6953 break;
6954 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6955 {
6956 int l1;
6957 TCGv t0;
6958
6959 ot = dflag + OT_WORD;
6960 modrm = ldub_code(s->pc++);
6961 reg = ((modrm >> 3) & 7) | rex_r;
6962 mod = (modrm >> 6) & 3;
6963 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6964 if (mod != 3) {
6965 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6966 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6967 } else {
6968 rm = (modrm & 7) | REX_B(s);
6969 gen_op_mov_v_reg(ot, t0, rm);
6970 }
6971#ifdef TARGET_X86_64
6972 if (ot == OT_LONG) {
6973 /* XXX: specific Intel behaviour ? */
6974 l1 = gen_new_label();
6975 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6976 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6977 gen_set_label(l1);
6978 tcg_gen_movi_tl(cpu_tmp0, 0);
6979 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6980 } else
6981#endif
6982 {
6983 l1 = gen_new_label();
6984 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6985 gen_op_mov_reg_v(ot, reg, t0);
6986 gen_set_label(l1);
6987 }
6988 tcg_temp_free(t0);
6989 }
6990 break;
6991
6992 /************************/
6993 /* flags */
6994 case 0x9c: /* pushf */
6995 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6996#ifdef VBOX
6997 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6998#else
6999 if (s->vm86 && s->iopl != 3) {
7000#endif
7001 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7002 } else {
7003 if (s->cc_op != CC_OP_DYNAMIC)
7004 gen_op_set_cc_op(s->cc_op);
7005#ifdef VBOX
7006 if (s->vm86 && s->vme && s->iopl != 3)
7007 tcg_gen_helper_1_0(helper_read_eflags_vme, cpu_T[0]);
7008 else
7009#endif
7010 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
7011 gen_push_T0(s);
7012 }
7013 break;
7014 case 0x9d: /* popf */
7015 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
7016#ifdef VBOX
7017 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
7018#else
7019 if (s->vm86 && s->iopl != 3) {
7020#endif
7021 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7022 } else {
7023 gen_pop_T0(s);
7024 if (s->cpl == 0) {
7025 if (s->dflag) {
7026 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7027 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
7028 } else {
7029 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7030 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
7031 }
7032 } else {
7033 if (s->cpl <= s->iopl) {
7034 if (s->dflag) {
7035 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7036 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
7037 } else {
7038 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7039 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
7040 }
7041 } else {
7042 if (s->dflag) {
7043 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7044 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
7045 } else {
7046#ifdef VBOX
7047 if (s->vm86 && s->vme)
7048 tcg_gen_helper_0_1(helper_write_eflags_vme, cpu_T[0]);
7049 else
7050#endif
7051 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7052 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
7053 }
7054 }
7055 }
7056 gen_pop_update(s);
7057 s->cc_op = CC_OP_EFLAGS;
7058 /* abort translation because TF flag may change */
7059 gen_jmp_im(s->pc - s->cs_base);
7060 gen_eob(s);
7061 }
7062 break;
7063 case 0x9e: /* sahf */
7064 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7065 goto illegal_op;
7066 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
7067 if (s->cc_op != CC_OP_DYNAMIC)
7068 gen_op_set_cc_op(s->cc_op);
7069 gen_compute_eflags(cpu_cc_src);
7070 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
7071 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
7072 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
7073 s->cc_op = CC_OP_EFLAGS;
7074 break;
7075 case 0x9f: /* lahf */
7076 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7077 goto illegal_op;
7078 if (s->cc_op != CC_OP_DYNAMIC)
7079 gen_op_set_cc_op(s->cc_op);
7080 gen_compute_eflags(cpu_T[0]);
7081 /* Note: gen_compute_eflags() only gives the condition codes */
7082 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
7083 gen_op_mov_reg_T0(OT_BYTE, R_AH);
7084 break;
7085 case 0xf5: /* cmc */
7086 if (s->cc_op != CC_OP_DYNAMIC)
7087 gen_op_set_cc_op(s->cc_op);
7088 gen_compute_eflags(cpu_cc_src);
7089 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7090 s->cc_op = CC_OP_EFLAGS;
7091 break;
7092 case 0xf8: /* clc */
7093 if (s->cc_op != CC_OP_DYNAMIC)
7094 gen_op_set_cc_op(s->cc_op);
7095 gen_compute_eflags(cpu_cc_src);
7096 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
7097 s->cc_op = CC_OP_EFLAGS;
7098 break;
7099 case 0xf9: /* stc */
7100 if (s->cc_op != CC_OP_DYNAMIC)
7101 gen_op_set_cc_op(s->cc_op);
7102 gen_compute_eflags(cpu_cc_src);
7103 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7104 s->cc_op = CC_OP_EFLAGS;
7105 break;
7106 case 0xfc: /* cld */
7107 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
7108 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7109 break;
7110 case 0xfd: /* std */
7111 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
7112 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7113 break;
7114
7115 /************************/
7116 /* bit operations */
7117 case 0x1ba: /* bt/bts/btr/btc Gv, im */
7118 ot = dflag + OT_WORD;
7119 modrm = ldub_code(s->pc++);
7120 op = (modrm >> 3) & 7;
7121 mod = (modrm >> 6) & 3;
7122 rm = (modrm & 7) | REX_B(s);
7123 if (mod != 3) {
7124 s->rip_offset = 1;
7125 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7126 gen_op_ld_T0_A0(ot + s->mem_index);
7127 } else {
7128 gen_op_mov_TN_reg(ot, 0, rm);
7129 }
7130 /* load shift */
7131 val = ldub_code(s->pc++);
7132 gen_op_movl_T1_im(val);
7133 if (op < 4)
7134 goto illegal_op;
7135 op -= 4;
7136 goto bt_op;
7137 case 0x1a3: /* bt Gv, Ev */
7138 op = 0;
7139 goto do_btx;
7140 case 0x1ab: /* bts */
7141 op = 1;
7142 goto do_btx;
7143 case 0x1b3: /* btr */
7144 op = 2;
7145 goto do_btx;
7146 case 0x1bb: /* btc */
7147 op = 3;
7148 do_btx:
7149 ot = dflag + OT_WORD;
7150 modrm = ldub_code(s->pc++);
7151 reg = ((modrm >> 3) & 7) | rex_r;
7152 mod = (modrm >> 6) & 3;
7153 rm = (modrm & 7) | REX_B(s);
7154 gen_op_mov_TN_reg(OT_LONG, 1, reg);
7155 if (mod != 3) {
7156 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7157 /* specific case: we need to add a displacement */
7158 gen_exts(ot, cpu_T[1]);
7159 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7160 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7161 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
7162 gen_op_ld_T0_A0(ot + s->mem_index);
7163 } else {
7164 gen_op_mov_TN_reg(ot, 0, rm);
7165 }
7166 bt_op:
7167 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7168 switch(op) {
7169 case 0:
7170 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7171 tcg_gen_movi_tl(cpu_cc_dst, 0);
7172 break;
7173 case 1:
7174 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7175 tcg_gen_movi_tl(cpu_tmp0, 1);
7176 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7177 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7178 break;
7179 case 2:
7180 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7181 tcg_gen_movi_tl(cpu_tmp0, 1);
7182 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7183 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7184 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7185 break;
7186 default:
7187 case 3:
7188 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7189 tcg_gen_movi_tl(cpu_tmp0, 1);
7190 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7191 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7192 break;
7193 }
7194 s->cc_op = CC_OP_SARB + ot;
7195 if (op != 0) {
7196 if (mod != 3)
7197 gen_op_st_T0_A0(ot + s->mem_index);
7198 else
7199 gen_op_mov_reg_T0(ot, rm);
7200 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7201 tcg_gen_movi_tl(cpu_cc_dst, 0);
7202 }
7203 break;
7204 case 0x1bc: /* bsf */
7205 case 0x1bd: /* bsr */
7206 {
7207 int label1;
7208 TCGv t0;
7209
7210 ot = dflag + OT_WORD;
7211 modrm = ldub_code(s->pc++);
7212 reg = ((modrm >> 3) & 7) | rex_r;
7213 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7214 gen_extu(ot, cpu_T[0]);
7215 label1 = gen_new_label();
7216 tcg_gen_movi_tl(cpu_cc_dst, 0);
7217 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7218 tcg_gen_mov_tl(t0, cpu_T[0]);
7219 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
7220 if (b & 1) {
7221 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
7222 } else {
7223 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
7224 }
7225 gen_op_mov_reg_T0(ot, reg);
7226 tcg_gen_movi_tl(cpu_cc_dst, 1);
7227 gen_set_label(label1);
7228 tcg_gen_discard_tl(cpu_cc_src);
7229 s->cc_op = CC_OP_LOGICB + ot;
7230 tcg_temp_free(t0);
7231 }
7232 break;
7233 /************************/
7234 /* bcd */
7235 case 0x27: /* daa */
7236 if (CODE64(s))
7237 goto illegal_op;
7238 if (s->cc_op != CC_OP_DYNAMIC)
7239 gen_op_set_cc_op(s->cc_op);
7240 tcg_gen_helper_0_0(helper_daa);
7241 s->cc_op = CC_OP_EFLAGS;
7242 break;
7243 case 0x2f: /* das */
7244 if (CODE64(s))
7245 goto illegal_op;
7246 if (s->cc_op != CC_OP_DYNAMIC)
7247 gen_op_set_cc_op(s->cc_op);
7248 tcg_gen_helper_0_0(helper_das);
7249 s->cc_op = CC_OP_EFLAGS;
7250 break;
7251 case 0x37: /* aaa */
7252 if (CODE64(s))
7253 goto illegal_op;
7254 if (s->cc_op != CC_OP_DYNAMIC)
7255 gen_op_set_cc_op(s->cc_op);
7256 tcg_gen_helper_0_0(helper_aaa);
7257 s->cc_op = CC_OP_EFLAGS;
7258 break;
7259 case 0x3f: /* aas */
7260 if (CODE64(s))
7261 goto illegal_op;
7262 if (s->cc_op != CC_OP_DYNAMIC)
7263 gen_op_set_cc_op(s->cc_op);
7264 tcg_gen_helper_0_0(helper_aas);
7265 s->cc_op = CC_OP_EFLAGS;
7266 break;
7267 case 0xd4: /* aam */
7268 if (CODE64(s))
7269 goto illegal_op;
7270 val = ldub_code(s->pc++);
7271 if (val == 0) {
7272 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7273 } else {
7274 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
7275 s->cc_op = CC_OP_LOGICB;
7276 }
7277 break;
7278 case 0xd5: /* aad */
7279 if (CODE64(s))
7280 goto illegal_op;
7281 val = ldub_code(s->pc++);
7282 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
7283 s->cc_op = CC_OP_LOGICB;
7284 break;
7285 /************************/
7286 /* misc */
7287 case 0x90: /* nop */
7288 /* XXX: xchg + rex handling */
7289 /* XXX: correct lock test for all insn */
7290 if (prefixes & PREFIX_LOCK)
7291 goto illegal_op;
7292 if (prefixes & PREFIX_REPZ) {
7293 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7294 }
7295 break;
7296 case 0x9b: /* fwait */
7297 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7298 (HF_MP_MASK | HF_TS_MASK)) {
7299 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7300 } else {
7301 if (s->cc_op != CC_OP_DYNAMIC)
7302 gen_op_set_cc_op(s->cc_op);
7303 gen_jmp_im(pc_start - s->cs_base);
7304 tcg_gen_helper_0_0(helper_fwait);
7305 }
7306 break;
7307 case 0xcc: /* int3 */
7308#ifdef VBOX
7309 if (s->vm86 && s->iopl != 3 && !s->vme) {
7310 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7311 } else
7312#endif
7313 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7314 break;
7315 case 0xcd: /* int N */
7316 val = ldub_code(s->pc++);
7317#ifdef VBOX
7318 if (s->vm86 && s->iopl != 3 && !s->vme) {
7319#else
7320 if (s->vm86 && s->iopl != 3) {
7321#endif
7322 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7323 } else {
7324 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7325 }
7326 break;
7327 case 0xce: /* into */
7328 if (CODE64(s))
7329 goto illegal_op;
7330 if (s->cc_op != CC_OP_DYNAMIC)
7331 gen_op_set_cc_op(s->cc_op);
7332 gen_jmp_im(pc_start - s->cs_base);
7333 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
7334 break;
7335 case 0xf1: /* icebp (undocumented, exits to external debugger) */
7336 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
7337#if 1
7338 gen_debug(s, pc_start - s->cs_base);
7339#else
7340 /* start debug */
7341 tb_flush(cpu_single_env);
7342 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
7343#endif
7344 break;
7345 case 0xfa: /* cli */
7346 if (!s->vm86) {
7347 if (s->cpl <= s->iopl) {
7348 tcg_gen_helper_0_0(helper_cli);
7349 } else {
7350 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7351 }
7352 } else {
7353 if (s->iopl == 3) {
7354 tcg_gen_helper_0_0(helper_cli);
7355#ifdef VBOX
7356 } else if (s->iopl != 3 && s->vme) {
7357 tcg_gen_helper_0_0(helper_cli_vme);
7358#endif
7359 } else {
7360 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7361 }
7362 }
7363 break;
7364 case 0xfb: /* sti */
7365 if (!s->vm86) {
7366 if (s->cpl <= s->iopl) {
7367 gen_sti:
7368 tcg_gen_helper_0_0(helper_sti);
7369 /* interruptions are enabled only the first insn after sti */
7370 /* If several instructions disable interrupts, only the
7371 _first_ does it */
7372 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
7373 tcg_gen_helper_0_0(helper_set_inhibit_irq);
7374 /* give a chance to handle pending irqs */
7375 gen_jmp_im(s->pc - s->cs_base);
7376 gen_eob(s);
7377 } else {
7378 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7379 }
7380 } else {
7381 if (s->iopl == 3) {
7382 goto gen_sti;
7383#ifdef VBOX
7384 } else if (s->iopl != 3 && s->vme) {
7385 tcg_gen_helper_0_0(helper_sti_vme);
7386 /* give a chance to handle pending irqs */
7387 gen_jmp_im(s->pc - s->cs_base);
7388 gen_eob(s);
7389#endif
7390 } else {
7391 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7392 }
7393 }
7394 break;
7395 case 0x62: /* bound */
7396 if (CODE64(s))
7397 goto illegal_op;
7398 ot = dflag ? OT_LONG : OT_WORD;
7399 modrm = ldub_code(s->pc++);
7400 reg = (modrm >> 3) & 7;
7401 mod = (modrm >> 6) & 3;
7402 if (mod == 3)
7403 goto illegal_op;
7404 gen_op_mov_TN_reg(ot, 0, reg);
7405 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7406 gen_jmp_im(pc_start - s->cs_base);
7407 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7408 if (ot == OT_WORD)
7409 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
7410 else
7411 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
7412 break;
7413 case 0x1c8 ... 0x1cf: /* bswap reg */
7414 reg = (b & 7) | REX_B(s);
7415#ifdef TARGET_X86_64
7416 if (dflag == 2) {
7417 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
7418 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
7419 gen_op_mov_reg_T0(OT_QUAD, reg);
7420 } else
7421 {
7422 TCGv tmp0;
7423 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7424
7425 tmp0 = tcg_temp_new(TCG_TYPE_I32);
7426 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
7427 tcg_gen_bswap_i32(tmp0, tmp0);
7428 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
7429 gen_op_mov_reg_T0(OT_LONG, reg);
7430 }
7431#else
7432 {
7433 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7434 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
7435 gen_op_mov_reg_T0(OT_LONG, reg);
7436 }
7437#endif
7438 break;
7439 case 0xd6: /* salc */
7440 if (CODE64(s))
7441 goto illegal_op;
7442 if (s->cc_op != CC_OP_DYNAMIC)
7443 gen_op_set_cc_op(s->cc_op);
7444 gen_compute_eflags_c(cpu_T[0]);
7445 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7446 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
7447 break;
7448 case 0xe0: /* loopnz */
7449 case 0xe1: /* loopz */
7450 case 0xe2: /* loop */
7451 case 0xe3: /* jecxz */
7452 {
7453 int l1, l2, l3;
7454
7455 tval = (int8_t)insn_get(s, OT_BYTE);
7456 next_eip = s->pc - s->cs_base;
7457 tval += next_eip;
7458 if (s->dflag == 0)
7459 tval &= 0xffff;
7460
7461 l1 = gen_new_label();
7462 l2 = gen_new_label();
7463 l3 = gen_new_label();
7464 b &= 3;
7465 switch(b) {
7466 case 0: /* loopnz */
7467 case 1: /* loopz */
7468 if (s->cc_op != CC_OP_DYNAMIC)
7469 gen_op_set_cc_op(s->cc_op);
7470 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7471 gen_op_jz_ecx(s->aflag, l3);
7472 gen_compute_eflags(cpu_tmp0);
7473 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
7474 if (b == 0) {
7475 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
7476 } else {
7477 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
7478 }
7479 break;
7480 case 2: /* loop */
7481 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7482 gen_op_jnz_ecx(s->aflag, l1);
7483 break;
7484 default:
7485 case 3: /* jcxz */
7486 gen_op_jz_ecx(s->aflag, l1);
7487 break;
7488 }
7489
7490 gen_set_label(l3);
7491 gen_jmp_im(next_eip);
7492 tcg_gen_br(l2);
7493
7494 gen_set_label(l1);
7495 gen_jmp_im(tval);
7496 gen_set_label(l2);
7497 gen_eob(s);
7498 }
7499 break;
7500 case 0x130: /* wrmsr */
7501 case 0x132: /* rdmsr */
7502 if (s->cpl != 0) {
7503 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7504 } else {
7505 if (s->cc_op != CC_OP_DYNAMIC)
7506 gen_op_set_cc_op(s->cc_op);
7507 gen_jmp_im(pc_start - s->cs_base);
7508 if (b & 2) {
7509 tcg_gen_helper_0_0(helper_rdmsr);
7510 } else {
7511 tcg_gen_helper_0_0(helper_wrmsr);
7512 }
7513 }
7514 break;
7515 case 0x131: /* rdtsc */
7516 if (s->cc_op != CC_OP_DYNAMIC)
7517 gen_op_set_cc_op(s->cc_op);
7518 gen_jmp_im(pc_start - s->cs_base);
7519 if (use_icount)
7520 gen_io_start();
7521 tcg_gen_helper_0_0(helper_rdtsc);
7522 if (use_icount) {
7523 gen_io_end();
7524 gen_jmp(s, s->pc - s->cs_base);
7525 }
7526 break;
7527 case 0x133: /* rdpmc */
7528 if (s->cc_op != CC_OP_DYNAMIC)
7529 gen_op_set_cc_op(s->cc_op);
7530 gen_jmp_im(pc_start - s->cs_base);
7531 tcg_gen_helper_0_0(helper_rdpmc);
7532 break;
7533 case 0x134: /* sysenter */
7534#ifndef VBOX
7535 /* For Intel SYSENTER is valid on 64-bit */
7536 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7537#else
7538 /** @todo: make things right */
7539 if (CODE64(s))
7540#endif
7541 goto illegal_op;
7542 if (!s->pe) {
7543 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7544 } else {
7545 if (s->cc_op != CC_OP_DYNAMIC) {
7546 gen_op_set_cc_op(s->cc_op);
7547 s->cc_op = CC_OP_DYNAMIC;
7548 }
7549 gen_jmp_im(pc_start - s->cs_base);
7550 tcg_gen_helper_0_0(helper_sysenter);
7551 gen_eob(s);
7552 }
7553 break;
7554 case 0x135: /* sysexit */
7555#ifndef VBOX
7556 /* For Intel SYSEXIT is valid on 64-bit */
7557 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7558#else
7559 /** @todo: make things right */
7560 if (CODE64(s))
7561#endif
7562 goto illegal_op;
7563 if (!s->pe) {
7564 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7565 } else {
7566 if (s->cc_op != CC_OP_DYNAMIC) {
7567 gen_op_set_cc_op(s->cc_op);
7568 s->cc_op = CC_OP_DYNAMIC;
7569 }
7570 gen_jmp_im(pc_start - s->cs_base);
7571 tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
7572 gen_eob(s);
7573 }
7574 break;
7575#ifdef TARGET_X86_64
7576 case 0x105: /* syscall */
7577 /* XXX: is it usable in real mode ? */
7578 if (s->cc_op != CC_OP_DYNAMIC) {
7579 gen_op_set_cc_op(s->cc_op);
7580 s->cc_op = CC_OP_DYNAMIC;
7581 }
7582 gen_jmp_im(pc_start - s->cs_base);
7583 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
7584 gen_eob(s);
7585 break;
7586 case 0x107: /* sysret */
7587 if (!s->pe) {
7588 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7589 } else {
7590 if (s->cc_op != CC_OP_DYNAMIC) {
7591 gen_op_set_cc_op(s->cc_op);
7592 s->cc_op = CC_OP_DYNAMIC;
7593 }
7594 gen_jmp_im(pc_start - s->cs_base);
7595 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
7596 /* condition codes are modified only in long mode */
7597 if (s->lma)
7598 s->cc_op = CC_OP_EFLAGS;
7599 gen_eob(s);
7600 }
7601 break;
7602#endif
7603 case 0x1a2: /* cpuid */
7604 if (s->cc_op != CC_OP_DYNAMIC)
7605 gen_op_set_cc_op(s->cc_op);
7606 gen_jmp_im(pc_start - s->cs_base);
7607 tcg_gen_helper_0_0(helper_cpuid);
7608 break;
7609 case 0xf4: /* hlt */
7610 if (s->cpl != 0) {
7611 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7612 } else {
7613 if (s->cc_op != CC_OP_DYNAMIC)
7614 gen_op_set_cc_op(s->cc_op);
7615 gen_jmp_im(pc_start - s->cs_base);
7616 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
7617 s->is_jmp = 3;
7618 }
7619 break;
7620 case 0x100:
7621 modrm = ldub_code(s->pc++);
7622 mod = (modrm >> 6) & 3;
7623 op = (modrm >> 3) & 7;
7624 switch(op) {
7625 case 0: /* sldt */
7626 if (!s->pe || s->vm86)
7627 goto illegal_op;
7628 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
7629 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
7630 ot = OT_WORD;
7631 if (mod == 3)
7632 ot += s->dflag;
7633 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7634 break;
7635 case 2: /* lldt */
7636 if (!s->pe || s->vm86)
7637 goto illegal_op;
7638 if (s->cpl != 0) {
7639 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7640 } else {
7641 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
7642 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7643 gen_jmp_im(pc_start - s->cs_base);
7644 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7645 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
7646 }
7647 break;
7648 case 1: /* str */
7649 if (!s->pe || s->vm86)
7650 goto illegal_op;
7651 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
7652 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
7653 ot = OT_WORD;
7654 if (mod == 3)
7655 ot += s->dflag;
7656 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7657 break;
7658 case 3: /* ltr */
7659 if (!s->pe || s->vm86)
7660 goto illegal_op;
7661 if (s->cpl != 0) {
7662 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7663 } else {
7664 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
7665 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7666 gen_jmp_im(pc_start - s->cs_base);
7667 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7668 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
7669 }
7670 break;
7671 case 4: /* verr */
7672 case 5: /* verw */
7673 if (!s->pe || s->vm86)
7674 goto illegal_op;
7675 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7676 if (s->cc_op != CC_OP_DYNAMIC)
7677 gen_op_set_cc_op(s->cc_op);
7678 if (op == 4)
7679 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
7680 else
7681 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
7682 s->cc_op = CC_OP_EFLAGS;
7683 break;
7684 default:
7685 goto illegal_op;
7686 }
7687 break;
7688 case 0x101:
7689 modrm = ldub_code(s->pc++);
7690 mod = (modrm >> 6) & 3;
7691 op = (modrm >> 3) & 7;
7692 rm = modrm & 7;
7693
7694#ifdef VBOX
7695 /* 0f 01 f9 */
7696 if (modrm == 0xf9)
7697 {
7698 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
7699 goto illegal_op;
7700 gen_jmp_im(pc_start - s->cs_base);
7701 tcg_gen_helper_0_0(helper_rdtscp);
7702 break;
7703 }
7704#endif
7705 switch(op) {
7706 case 0: /* sgdt */
7707 if (mod == 3)
7708 goto illegal_op;
7709 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7710 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7711 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7712 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7713 gen_add_A0_im(s, 2);
7714 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7715 if (!s->dflag)
7716 gen_op_andl_T0_im(0xffffff);
7717 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7718 break;
7719 case 1:
7720 if (mod == 3) {
7721 switch (rm) {
7722 case 0: /* monitor */
7723 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7724 s->cpl != 0)
7725 goto illegal_op;
7726 if (s->cc_op != CC_OP_DYNAMIC)
7727 gen_op_set_cc_op(s->cc_op);
7728 gen_jmp_im(pc_start - s->cs_base);
7729#ifdef TARGET_X86_64
7730 if (s->aflag == 2) {
7731 gen_op_movq_A0_reg(R_EAX);
7732 } else
7733#endif
7734 {
7735 gen_op_movl_A0_reg(R_EAX);
7736 if (s->aflag == 0)
7737 gen_op_andl_A0_ffff();
7738 }
7739 gen_add_A0_ds_seg(s);
7740 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
7741 break;
7742 case 1: /* mwait */
7743 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7744 s->cpl != 0)
7745 goto illegal_op;
7746 if (s->cc_op != CC_OP_DYNAMIC) {
7747 gen_op_set_cc_op(s->cc_op);
7748 s->cc_op = CC_OP_DYNAMIC;
7749 }
7750 gen_jmp_im(pc_start - s->cs_base);
7751 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
7752 gen_eob(s);
7753 break;
7754 default:
7755 goto illegal_op;
7756 }
7757 } else { /* sidt */
7758 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7759 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7760 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7761 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7762 gen_add_A0_im(s, 2);
7763 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7764 if (!s->dflag)
7765 gen_op_andl_T0_im(0xffffff);
7766 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7767 }
7768 break;
7769 case 2: /* lgdt */
7770 case 3: /* lidt */
7771 if (mod == 3) {
7772 if (s->cc_op != CC_OP_DYNAMIC)
7773 gen_op_set_cc_op(s->cc_op);
7774 gen_jmp_im(pc_start - s->cs_base);
7775 switch(rm) {
7776 case 0: /* VMRUN */
7777 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7778 goto illegal_op;
7779 if (s->cpl != 0) {
7780 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7781 break;
7782 } else {
7783 tcg_gen_helper_0_2(helper_vmrun,
7784 tcg_const_i32(s->aflag),
7785 tcg_const_i32(s->pc - pc_start));
7786 tcg_gen_exit_tb(0);
7787 s->is_jmp = 3;
7788 }
7789 break;
7790 case 1: /* VMMCALL */
7791 if (!(s->flags & HF_SVME_MASK))
7792 goto illegal_op;
7793 tcg_gen_helper_0_0(helper_vmmcall);
7794 break;
7795 case 2: /* VMLOAD */
7796 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7797 goto illegal_op;
7798 if (s->cpl != 0) {
7799 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7800 break;
7801 } else {
7802 tcg_gen_helper_0_1(helper_vmload,
7803 tcg_const_i32(s->aflag));
7804 }
7805 break;
7806 case 3: /* VMSAVE */
7807 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7808 goto illegal_op;
7809 if (s->cpl != 0) {
7810 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7811 break;
7812 } else {
7813 tcg_gen_helper_0_1(helper_vmsave,
7814 tcg_const_i32(s->aflag));
7815 }
7816 break;
7817 case 4: /* STGI */
7818 if ((!(s->flags & HF_SVME_MASK) &&
7819 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7820 !s->pe)
7821 goto illegal_op;
7822 if (s->cpl != 0) {
7823 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7824 break;
7825 } else {
7826 tcg_gen_helper_0_0(helper_stgi);
7827 }
7828 break;
7829 case 5: /* CLGI */
7830 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7831 goto illegal_op;
7832 if (s->cpl != 0) {
7833 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7834 break;
7835 } else {
7836 tcg_gen_helper_0_0(helper_clgi);
7837 }
7838 break;
7839 case 6: /* SKINIT */
7840 if ((!(s->flags & HF_SVME_MASK) &&
7841 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7842 !s->pe)
7843 goto illegal_op;
7844 tcg_gen_helper_0_0(helper_skinit);
7845 break;
7846 case 7: /* INVLPGA */
7847 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7848 goto illegal_op;
7849 if (s->cpl != 0) {
7850 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7851 break;
7852 } else {
7853 tcg_gen_helper_0_1(helper_invlpga,
7854 tcg_const_i32(s->aflag));
7855 }
7856 break;
7857 default:
7858 goto illegal_op;
7859 }
7860 } else if (s->cpl != 0) {
7861 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7862 } else {
7863 gen_svm_check_intercept(s, pc_start,
7864 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7865 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7866 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7867 gen_add_A0_im(s, 2);
7868 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7869 if (!s->dflag)
7870 gen_op_andl_T0_im(0xffffff);
7871 if (op == 2) {
7872 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7873 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7874 } else {
7875 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7876 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7877 }
7878 }
7879 break;
7880 case 4: /* smsw */
7881 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7882 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7883 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7884 break;
7885 case 6: /* lmsw */
7886 if (s->cpl != 0) {
7887 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7888 } else {
7889 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7890 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7891 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7892 gen_jmp_im(s->pc - s->cs_base);
7893 gen_eob(s);
7894 }
7895 break;
7896 case 7: /* invlpg */
7897 if (s->cpl != 0) {
7898 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7899 } else {
7900 if (mod == 3) {
7901#ifdef TARGET_X86_64
7902 if (CODE64(s) && rm == 0) {
7903 /* swapgs */
7904 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7905 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7906 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7907 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7908 } else
7909#endif
7910 {
7911 goto illegal_op;
7912 }
7913 } else {
7914 if (s->cc_op != CC_OP_DYNAMIC)
7915 gen_op_set_cc_op(s->cc_op);
7916 gen_jmp_im(pc_start - s->cs_base);
7917 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7918 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
7919 gen_jmp_im(s->pc - s->cs_base);
7920 gen_eob(s);
7921 }
7922 }
7923 break;
7924 default:
7925 goto illegal_op;
7926 }
7927 break;
7928 case 0x108: /* invd */
7929 case 0x109: /* wbinvd */
7930 if (s->cpl != 0) {
7931 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7932 } else {
7933 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7934 /* nothing to do */
7935 }
7936 break;
7937 case 0x63: /* arpl or movslS (x86_64) */
7938#ifdef TARGET_X86_64
7939 if (CODE64(s)) {
7940 int d_ot;
7941 /* d_ot is the size of destination */
7942 d_ot = dflag + OT_WORD;
7943
7944 modrm = ldub_code(s->pc++);
7945 reg = ((modrm >> 3) & 7) | rex_r;
7946 mod = (modrm >> 6) & 3;
7947 rm = (modrm & 7) | REX_B(s);
7948
7949 if (mod == 3) {
7950 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7951 /* sign extend */
7952 if (d_ot == OT_QUAD)
7953 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7954 gen_op_mov_reg_T0(d_ot, reg);
7955 } else {
7956 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7957 if (d_ot == OT_QUAD) {
7958 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7959 } else {
7960 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7961 }
7962 gen_op_mov_reg_T0(d_ot, reg);
7963 }
7964 } else
7965#endif
7966 {
7967 int label1;
7968 TCGv t0, t1, t2, a0;
7969
7970 if (!s->pe || s->vm86)
7971 goto illegal_op;
7972
7973 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7974 t1 = tcg_temp_local_new(TCG_TYPE_TL);
7975 t2 = tcg_temp_local_new(TCG_TYPE_TL);
7976#ifdef VBOX
7977 a0 = tcg_temp_local_new(TCG_TYPE_TL);
7978#endif
7979 ot = OT_WORD;
7980 modrm = ldub_code(s->pc++);
7981 reg = (modrm >> 3) & 7;
7982 mod = (modrm >> 6) & 3;
7983 rm = modrm & 7;
7984 if (mod != 3) {
7985 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7986#ifdef VBOX
7987 tcg_gen_mov_tl(a0, cpu_A0);
7988#endif
7989 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7990 } else {
7991 gen_op_mov_v_reg(ot, t0, rm);
7992 }
7993 gen_op_mov_v_reg(ot, t1, reg);
7994 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7995 tcg_gen_andi_tl(t1, t1, 3);
7996 tcg_gen_movi_tl(t2, 0);
7997 label1 = gen_new_label();
7998 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7999 tcg_gen_andi_tl(t0, t0, ~3);
8000 tcg_gen_or_tl(t0, t0, t1);
8001 tcg_gen_movi_tl(t2, CC_Z);
8002 gen_set_label(label1);
8003 if (mod != 3) {
8004#ifdef VBOX
8005 /* cpu_A0 doesn't survive branch */
8006 gen_op_st_v(ot + s->mem_index, t0, a0);
8007#else
8008 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
8009#endif
8010 } else {
8011 gen_op_mov_reg_v(ot, rm, t0);
8012 }
8013 if (s->cc_op != CC_OP_DYNAMIC)
8014 gen_op_set_cc_op(s->cc_op);
8015 gen_compute_eflags(cpu_cc_src);
8016 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
8017 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
8018 s->cc_op = CC_OP_EFLAGS;
8019 tcg_temp_free(t0);
8020 tcg_temp_free(t1);
8021 tcg_temp_free(t2);
8022#ifdef VBOX
8023 tcg_temp_free(a0);
8024#endif
8025 }
8026 break;
8027 case 0x102: /* lar */
8028 case 0x103: /* lsl */
8029 {
8030 int label1;
8031 TCGv t0;
8032 if (!s->pe || s->vm86)
8033 goto illegal_op;
8034 ot = dflag ? OT_LONG : OT_WORD;
8035 modrm = ldub_code(s->pc++);
8036 reg = ((modrm >> 3) & 7) | rex_r;
8037 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
8038 t0 = tcg_temp_local_new(TCG_TYPE_TL);
8039 if (s->cc_op != CC_OP_DYNAMIC)
8040 gen_op_set_cc_op(s->cc_op);
8041 if (b == 0x102)
8042 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
8043 else
8044 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
8045 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
8046 label1 = gen_new_label();
8047 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
8048 gen_op_mov_reg_v(ot, reg, t0);
8049 gen_set_label(label1);
8050 s->cc_op = CC_OP_EFLAGS;
8051 tcg_temp_free(t0);
8052 }
8053 break;
8054 case 0x118:
8055 modrm = ldub_code(s->pc++);
8056 mod = (modrm >> 6) & 3;
8057 op = (modrm >> 3) & 7;
8058 switch(op) {
8059 case 0: /* prefetchnta */
8060 case 1: /* prefetchnt0 */
8061 case 2: /* prefetchnt0 */
8062 case 3: /* prefetchnt0 */
8063 if (mod == 3)
8064 goto illegal_op;
8065 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8066 /* nothing more to do */
8067 break;
8068 default: /* nop (multi byte) */
8069 gen_nop_modrm(s, modrm);
8070 break;
8071 }
8072 break;
8073 case 0x119 ... 0x11f: /* nop (multi byte) */
8074 modrm = ldub_code(s->pc++);
8075 gen_nop_modrm(s, modrm);
8076 break;
8077 case 0x120: /* mov reg, crN */
8078 case 0x122: /* mov crN, reg */
8079 if (s->cpl != 0) {
8080 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8081 } else {
8082 modrm = ldub_code(s->pc++);
8083#ifndef VBOX /* mod bits are always understood to be 11 (0xc0) regardless of actual content; see AMD manuals */
8084 if ((modrm & 0xc0) != 0xc0)
8085 goto illegal_op;
8086#endif
8087 rm = (modrm & 7) | REX_B(s);
8088 reg = ((modrm >> 3) & 7) | rex_r;
8089 if (CODE64(s))
8090 ot = OT_QUAD;
8091 else
8092 ot = OT_LONG;
8093 switch(reg) {
8094 case 0:
8095 case 2:
8096 case 3:
8097 case 4:
8098 case 8:
8099 if (s->cc_op != CC_OP_DYNAMIC)
8100 gen_op_set_cc_op(s->cc_op);
8101 gen_jmp_im(pc_start - s->cs_base);
8102 if (b & 2) {
8103 gen_op_mov_TN_reg(ot, 0, rm);
8104 tcg_gen_helper_0_2(helper_write_crN,
8105 tcg_const_i32(reg), cpu_T[0]);
8106 gen_jmp_im(s->pc - s->cs_base);
8107 gen_eob(s);
8108 } else {
8109 tcg_gen_helper_1_1(helper_read_crN,
8110 cpu_T[0], tcg_const_i32(reg));
8111 gen_op_mov_reg_T0(ot, rm);
8112 }
8113 break;
8114 default:
8115 goto illegal_op;
8116 }
8117 }
8118 break;
8119 case 0x121: /* mov reg, drN */
8120 case 0x123: /* mov drN, reg */
8121 if (s->cpl != 0) {
8122 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8123 } else {
8124 modrm = ldub_code(s->pc++);
8125#ifndef VBOX /* mod bits are always understood to be 11 (0xc0) regardless of actual content; see AMD manuals */
8126 if ((modrm & 0xc0) != 0xc0)
8127 goto illegal_op;
8128#endif
8129 rm = (modrm & 7) | REX_B(s);
8130 reg = ((modrm >> 3) & 7) | rex_r;
8131 if (CODE64(s))
8132 ot = OT_QUAD;
8133 else
8134 ot = OT_LONG;
8135 /* XXX: do it dynamically with CR4.DE bit */
8136 if (reg == 4 || reg == 5 || reg >= 8)
8137 goto illegal_op;
8138 if (b & 2) {
8139 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
8140 gen_op_mov_TN_reg(ot, 0, rm);
8141 tcg_gen_helper_0_2(helper_movl_drN_T0,
8142 tcg_const_i32(reg), cpu_T[0]);
8143 gen_jmp_im(s->pc - s->cs_base);
8144 gen_eob(s);
8145 } else {
8146 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
8147 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
8148 gen_op_mov_reg_T0(ot, rm);
8149 }
8150 }
8151 break;
8152 case 0x106: /* clts */
8153 if (s->cpl != 0) {
8154 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8155 } else {
8156 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
8157 tcg_gen_helper_0_0(helper_clts);
8158 /* abort block because static cpu state changed */
8159 gen_jmp_im(s->pc - s->cs_base);
8160 gen_eob(s);
8161 }
8162 break;
8163 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
8164 case 0x1c3: /* MOVNTI reg, mem */
8165 if (!(s->cpuid_features & CPUID_SSE2))
8166 goto illegal_op;
8167 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
8168 modrm = ldub_code(s->pc++);
8169 mod = (modrm >> 6) & 3;
8170 if (mod == 3)
8171 goto illegal_op;
8172 reg = ((modrm >> 3) & 7) | rex_r;
8173 /* generate a generic store */
8174 gen_ldst_modrm(s, modrm, ot, reg, 1);
8175 break;
8176 case 0x1ae:
8177 modrm = ldub_code(s->pc++);
8178 mod = (modrm >> 6) & 3;
8179 op = (modrm >> 3) & 7;
8180 switch(op) {
8181 case 0: /* fxsave */
8182 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8183 (s->flags & HF_EM_MASK))
8184 goto illegal_op;
8185 if (s->flags & HF_TS_MASK) {
8186 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8187 break;
8188 }
8189 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8190 if (s->cc_op != CC_OP_DYNAMIC)
8191 gen_op_set_cc_op(s->cc_op);
8192 gen_jmp_im(pc_start - s->cs_base);
8193 tcg_gen_helper_0_2(helper_fxsave,
8194 cpu_A0, tcg_const_i32((s->dflag == 2)));
8195 break;
8196 case 1: /* fxrstor */
8197 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8198 (s->flags & HF_EM_MASK))
8199 goto illegal_op;
8200 if (s->flags & HF_TS_MASK) {
8201 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8202 break;
8203 }
8204 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8205 if (s->cc_op != CC_OP_DYNAMIC)
8206 gen_op_set_cc_op(s->cc_op);
8207 gen_jmp_im(pc_start - s->cs_base);
8208 tcg_gen_helper_0_2(helper_fxrstor,
8209 cpu_A0, tcg_const_i32((s->dflag == 2)));
8210 break;
8211 case 2: /* ldmxcsr */
8212 case 3: /* stmxcsr */
8213 if (s->flags & HF_TS_MASK) {
8214 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8215 break;
8216 }
8217 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8218 mod == 3)
8219 goto illegal_op;
8220 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8221 if (op == 2) {
8222 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
8223 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8224 } else {
8225 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8226 gen_op_st_T0_A0(OT_LONG + s->mem_index);
8227 }
8228 break;
8229 case 5: /* lfence */
8230 case 6: /* mfence */
8231 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
8232 goto illegal_op;
8233 break;
8234 case 7: /* sfence / clflush */
8235 if ((modrm & 0xc7) == 0xc0) {
8236 /* sfence */
8237 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8238 if (!(s->cpuid_features & CPUID_SSE))
8239 goto illegal_op;
8240 } else {
8241 /* clflush */
8242 if (!(s->cpuid_features & CPUID_CLFLUSH))
8243 goto illegal_op;
8244 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8245 }
8246 break;
8247 default:
8248 goto illegal_op;
8249 }
8250 break;
8251 case 0x10d: /* 3DNow! prefetch(w) */
8252 modrm = ldub_code(s->pc++);
8253 mod = (modrm >> 6) & 3;
8254 if (mod == 3)
8255 goto illegal_op;
8256 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8257 /* ignore for now */
8258 break;
8259 case 0x1aa: /* rsm */
8260 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
8261 if (!(s->flags & HF_SMM_MASK))
8262 goto illegal_op;
8263 if (s->cc_op != CC_OP_DYNAMIC) {
8264 gen_op_set_cc_op(s->cc_op);
8265 s->cc_op = CC_OP_DYNAMIC;
8266 }
8267 gen_jmp_im(s->pc - s->cs_base);
8268 tcg_gen_helper_0_0(helper_rsm);
8269 gen_eob(s);
8270 break;
8271 case 0x1b8: /* SSE4.2 popcnt */
8272 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8273 PREFIX_REPZ)
8274 goto illegal_op;
8275 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8276 goto illegal_op;
8277
8278 modrm = ldub_code(s->pc++);
8279 reg = ((modrm >> 3) & 7);
8280
8281 if (s->prefix & PREFIX_DATA)
8282 ot = OT_WORD;
8283 else if (s->dflag != 2)
8284 ot = OT_LONG;
8285 else
8286 ot = OT_QUAD;
8287
8288 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
8289 tcg_gen_helper_1_2(helper_popcnt,
8290 cpu_T[0], cpu_T[0], tcg_const_i32(ot));
8291 gen_op_mov_reg_T0(ot, reg);
8292
8293 s->cc_op = CC_OP_EFLAGS;
8294 break;
8295 case 0x10e ... 0x10f:
8296 /* 3DNow! instructions, ignore prefixes */
8297 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
8298 case 0x110 ... 0x117:
8299 case 0x128 ... 0x12f:
8300 case 0x138 ... 0x13a:
8301 case 0x150 ... 0x177:
8302 case 0x17c ... 0x17f:
8303 case 0x1c2:
8304 case 0x1c4 ... 0x1c6:
8305 case 0x1d0 ... 0x1fe:
8306 gen_sse(s, b, pc_start, rex_r);
8307 break;
8308 default:
8309 goto illegal_op;
8310 }
8311 /* lock generation */
8312 if (s->prefix & PREFIX_LOCK)
8313 tcg_gen_helper_0_0(helper_unlock);
8314 return s->pc;
8315 illegal_op:
8316 if (s->prefix & PREFIX_LOCK)
8317 tcg_gen_helper_0_0(helper_unlock);
8318 /* XXX: ensure that no lock was generated */
8319 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8320 return s->pc;
8321}
8322
8323void optimize_flags_init(void)
8324{
8325#if TCG_TARGET_REG_BITS == 32
8326 assert(sizeof(CCTable) == (1 << 3));
8327#else
8328 assert(sizeof(CCTable) == (1 << 4));
8329#endif
8330 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
8331 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
8332 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
8333 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
8334 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
8335 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
8336 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
8337 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
8338 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
8339
8340 /* register helpers */
8341
8342#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
8343#include "helper.h"
8344}
8345
8346/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8347 basic block 'tb'. If search_pc is TRUE, also generate PC
8348 information for each intermediate instruction. */
8349#ifndef VBOX
8350static inline void gen_intermediate_code_internal(CPUState *env,
8351#else /* VBOX */
8352DECLINLINE(void) gen_intermediate_code_internal(CPUState *env,
8353#endif /* VBOX */
8354 TranslationBlock *tb,
8355 int search_pc)
8356{
8357 DisasContext dc1, *dc = &dc1;
8358 target_ulong pc_ptr;
8359 uint16_t *gen_opc_end;
8360 int j, lj, cflags;
8361 uint64_t flags;
8362 target_ulong pc_start;
8363 target_ulong cs_base;
8364 int num_insns;
8365 int max_insns;
8366
8367 /* generate intermediate code */
8368 pc_start = tb->pc;
8369 cs_base = tb->cs_base;
8370 flags = tb->flags;
8371 cflags = tb->cflags;
8372
8373 dc->pe = (flags >> HF_PE_SHIFT) & 1;
8374 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8375 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8376 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8377 dc->f_st = 0;
8378 dc->vm86 = (flags >> VM_SHIFT) & 1;
8379#ifdef VBOX
8380 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
8381 dc->pvi = !!(env->cr[4] & CR4_PVI_MASK);
8382#ifdef VBOX_WITH_CALL_RECORD
8383 if ( !(env->state & CPU_RAW_RING0)
8384 && (env->cr[0] & CR0_PG_MASK)
8385 && !(env->eflags & X86_EFL_IF)
8386 && dc->code32)
8387 dc->record_call = 1;
8388 else
8389 dc->record_call = 0;
8390#endif
8391#endif
8392 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8393 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8394 dc->tf = (flags >> TF_SHIFT) & 1;
8395 dc->singlestep_enabled = env->singlestep_enabled;
8396 dc->cc_op = CC_OP_DYNAMIC;
8397 dc->cs_base = cs_base;
8398 dc->tb = tb;
8399 dc->popl_esp_hack = 0;
8400 /* select memory access functions */
8401 dc->mem_index = 0;
8402 if (flags & HF_SOFTMMU_MASK) {
8403 if (dc->cpl == 3)
8404 dc->mem_index = 2 * 4;
8405 else
8406 dc->mem_index = 1 * 4;
8407 }
8408 dc->cpuid_features = env->cpuid_features;
8409 dc->cpuid_ext_features = env->cpuid_ext_features;
8410 dc->cpuid_ext2_features = env->cpuid_ext2_features;
8411 dc->cpuid_ext3_features = env->cpuid_ext3_features;
8412#ifdef TARGET_X86_64
8413 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8414 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8415#endif
8416 dc->flags = flags;
8417 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8418 (flags & HF_INHIBIT_IRQ_MASK)
8419#ifndef CONFIG_SOFTMMU
8420 || (flags & HF_SOFTMMU_MASK)
8421#endif
8422 );
8423#if 0
8424 /* check addseg logic */
8425 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
8426 printf("ERROR addseg\n");
8427#endif
8428
8429 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
8430 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
8431 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
8432 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
8433
8434 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
8435 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
8436 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
8437 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
8438 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
8439 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
8440 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
8441 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
8442 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
8443
8444 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
8445
8446 dc->is_jmp = DISAS_NEXT;
8447 pc_ptr = pc_start;
8448 lj = -1;
8449 num_insns = 0;
8450 max_insns = tb->cflags & CF_COUNT_MASK;
8451 if (max_insns == 0)
8452 max_insns = CF_COUNT_MASK;
8453
8454 gen_icount_start();
8455 for(;;) {
8456 if (env->nb_breakpoints > 0) {
8457 for(j = 0; j < env->nb_breakpoints; j++) {
8458 if (env->breakpoints[j] == pc_ptr) {
8459 gen_debug(dc, pc_ptr - dc->cs_base);
8460 break;
8461 }
8462 }
8463 }
8464 if (search_pc) {
8465 j = gen_opc_ptr - gen_opc_buf;
8466 if (lj < j) {
8467 lj++;
8468 while (lj < j)
8469 gen_opc_instr_start[lj++] = 0;
8470 }
8471 gen_opc_pc[lj] = pc_ptr;
8472 gen_opc_cc_op[lj] = dc->cc_op;
8473 gen_opc_instr_start[lj] = 1;
8474 gen_opc_icount[lj] = num_insns;
8475 }
8476 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8477 gen_io_start();
8478
8479 pc_ptr = disas_insn(dc, pc_ptr);
8480 num_insns++;
8481 /* stop translation if indicated */
8482 if (dc->is_jmp)
8483 break;
8484#ifdef VBOX
8485#ifdef DEBUG
8486/*
8487 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
8488 {
8489 //should never happen as the jump to the patch code terminates the translation block
8490 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
8491 }
8492*/
8493#endif
8494 if (env->state & CPU_EMULATE_SINGLE_INSTR)
8495 {
8496 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
8497 gen_jmp_im(pc_ptr - dc->cs_base);
8498 gen_eob(dc);
8499 break;
8500 }
8501#endif /* VBOX */
8502
8503 /* if single step mode, we generate only one instruction and
8504 generate an exception */
8505 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8506 the flag and abort the translation to give the irqs a
8507 change to be happen */
8508 if (dc->tf || dc->singlestep_enabled ||
8509 (flags & HF_INHIBIT_IRQ_MASK)) {
8510 gen_jmp_im(pc_ptr - dc->cs_base);
8511 gen_eob(dc);
8512 break;
8513 }
8514 /* if too long translation, stop generation too */
8515 if (gen_opc_ptr >= gen_opc_end ||
8516 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8517 num_insns >= max_insns) {
8518 gen_jmp_im(pc_ptr - dc->cs_base);
8519 gen_eob(dc);
8520 break;
8521 }
8522 }
8523 if (tb->cflags & CF_LAST_IO)
8524 gen_io_end();
8525 gen_icount_end(tb, num_insns);
8526 *gen_opc_ptr = INDEX_op_end;
8527 /* we don't forget to fill the last values */
8528 if (search_pc) {
8529 j = gen_opc_ptr - gen_opc_buf;
8530 lj++;
8531 while (lj <= j)
8532 gen_opc_instr_start[lj++] = 0;
8533 }
8534
8535#ifdef DEBUG_DISAS
8536 if (loglevel & CPU_LOG_TB_CPU) {
8537 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
8538 }
8539 if (loglevel & CPU_LOG_TB_IN_ASM) {
8540 int disas_flags;
8541 fprintf(logfile, "----------------\n");
8542 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
8543#ifdef TARGET_X86_64
8544 if (dc->code64)
8545 disas_flags = 2;
8546 else
8547#endif
8548 disas_flags = !dc->code32;
8549 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
8550 fprintf(logfile, "\n");
8551 }
8552#endif
8553
8554 if (!search_pc) {
8555 tb->size = pc_ptr - pc_start;
8556 tb->icount = num_insns;
8557 }
8558}
8559
8560void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
8561{
8562 gen_intermediate_code_internal(env, tb, 0);
8563}
8564
8565void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
8566{
8567 gen_intermediate_code_internal(env, tb, 1);
8568}
8569
8570void gen_pc_load(CPUState *env, TranslationBlock *tb,
8571 unsigned long searched_pc, int pc_pos, void *puc)
8572{
8573 int cc_op;
8574#ifdef DEBUG_DISAS
8575 if (loglevel & CPU_LOG_TB_OP) {
8576 int i;
8577 fprintf(logfile, "RESTORE:\n");
8578 for(i = 0;i <= pc_pos; i++) {
8579 if (gen_opc_instr_start[i]) {
8580 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
8581 }
8582 }
8583 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
8584 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
8585 (uint32_t)tb->cs_base);
8586 }
8587#endif
8588 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
8589 cc_op = gen_opc_cc_op[pc_pos];
8590 if (cc_op != CC_OP_DYNAMIC)
8591 env->cc_op = cc_op;
8592}
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette