VirtualBox

source: vbox/trunk/src/recompiler/target-i386/translate.c@ 32050

Last change on this file since 32050 was 30413, checked in by vboxsync, 15 years ago

REM: incorporated git b16f827b from upstream: target-i386: fix SIB decoding with index = 4

  • Property svn:eol-style set to native
File size: 276.8 KB
Line 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Sun LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Sun elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29#include <stdarg.h>
30#include <stdlib.h>
31#include <stdio.h>
32#include <string.h>
33#ifndef VBOX
34#include <inttypes.h>
35#include <signal.h>
36#include <assert.h>
37#endif /* !VBOX */
38
39#include "cpu.h"
40#include "exec-all.h"
41#include "disas.h"
42#include "helper.h"
43#include "tcg-op.h"
44
45#define PREFIX_REPZ 0x01
46#define PREFIX_REPNZ 0x02
47#define PREFIX_LOCK 0x04
48#define PREFIX_DATA 0x08
49#define PREFIX_ADR 0x10
50
51#ifdef TARGET_X86_64
52#define X86_64_ONLY(x) x
53#ifndef VBOX
54#define X86_64_DEF(x...) x
55#else
56#define X86_64_DEF(x...) x
57#endif
58#define CODE64(s) ((s)->code64)
59#define REX_X(s) ((s)->rex_x)
60#define REX_B(s) ((s)->rex_b)
61/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
62#if 1
63#define BUGGY_64(x) NULL
64#endif
65#else
66#define X86_64_ONLY(x) NULL
67#ifndef VBOX
68#define X86_64_DEF(x...)
69#else
70#define X86_64_DEF(x)
71#endif
72#define CODE64(s) 0
73#define REX_X(s) 0
74#define REX_B(s) 0
75#endif
76
77//#define MACRO_TEST 1
78
79/* global register indexes */
80static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
81/* local temps */
82static TCGv cpu_T[2], cpu_T3;
83/* local register indexes (only used inside old micro ops) */
84static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
85static TCGv cpu_tmp5, cpu_tmp6;
86
87#include "gen-icount.h"
88
89#ifdef TARGET_X86_64
90static int x86_64_hregs;
91#endif
92
93#ifdef VBOX
94
95/* Special/override code readers to hide patched code. */
96
97uint8_t ldub_code_raw(target_ulong pc)
98{
99 uint8_t b;
100
101 if (!remR3GetOpcode(cpu_single_env, pc, &b))
102 b = ldub_code(pc);
103 return b;
104}
105#define ldub_code(a) ldub_code_raw(a)
106
107uint16_t lduw_code_raw(target_ulong pc)
108{
109 return (ldub_code(pc+1) << 8) | ldub_code(pc);
110}
111#define lduw_code(a) lduw_code_raw(a)
112
113
114uint32_t ldl_code_raw(target_ulong pc)
115{
116 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
117}
118#define ldl_code(a) ldl_code_raw(a)
119
120#endif /* VBOX */
121
122
123typedef struct DisasContext {
124 /* current insn context */
125 int override; /* -1 if no override */
126 int prefix;
127 int aflag, dflag;
128 target_ulong pc; /* pc = eip + cs_base */
129 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
130 static state change (stop translation) */
131 /* current block context */
132 target_ulong cs_base; /* base of CS segment */
133 int pe; /* protected mode */
134 int code32; /* 32 bit code segment */
135#ifdef TARGET_X86_64
136 int lma; /* long mode active */
137 int code64; /* 64 bit code segment */
138 int rex_x, rex_b;
139#endif
140 int ss32; /* 32 bit stack segment */
141 int cc_op; /* current CC operation */
142 int addseg; /* non zero if either DS/ES/SS have a non zero base */
143 int f_st; /* currently unused */
144 int vm86; /* vm86 mode */
145#ifdef VBOX
146 int vme; /* CR4.VME */
147 int pvi; /* CR4.PVI */
148 int record_call; /* record calls for CSAM or not? */
149#endif
150 int cpl;
151 int iopl;
152 int tf; /* TF cpu flag */
153 int singlestep_enabled; /* "hardware" single step enabled */
154 int jmp_opt; /* use direct block chaining for direct jumps */
155 int mem_index; /* select memory access functions */
156 uint64_t flags; /* all execution flags */
157 struct TranslationBlock *tb;
158 int popl_esp_hack; /* for correct popl with esp base handling */
159 int rip_offset; /* only used in x86_64, but left for simplicity */
160 int cpuid_features;
161 int cpuid_ext_features;
162 int cpuid_ext2_features;
163 int cpuid_ext3_features;
164} DisasContext;
165
166static void gen_eob(DisasContext *s);
167static void gen_jmp(DisasContext *s, target_ulong eip);
168static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
169
170#ifdef VBOX
171static void gen_check_external_event();
172#endif
173
174/* i386 arith/logic operations */
175enum {
176 OP_ADDL,
177 OP_ORL,
178 OP_ADCL,
179 OP_SBBL,
180 OP_ANDL,
181 OP_SUBL,
182 OP_XORL,
183 OP_CMPL,
184};
185
186/* i386 shift ops */
187enum {
188 OP_ROL,
189 OP_ROR,
190 OP_RCL,
191 OP_RCR,
192 OP_SHL,
193 OP_SHR,
194 OP_SHL1, /* undocumented */
195 OP_SAR = 7,
196};
197
198enum {
199 JCC_O,
200 JCC_B,
201 JCC_Z,
202 JCC_BE,
203 JCC_S,
204 JCC_P,
205 JCC_L,
206 JCC_LE,
207};
208
209/* operand size */
210enum {
211 OT_BYTE = 0,
212 OT_WORD,
213 OT_LONG,
214 OT_QUAD,
215};
216
217enum {
218 /* I386 int registers */
219 OR_EAX, /* MUST be even numbered */
220 OR_ECX,
221 OR_EDX,
222 OR_EBX,
223 OR_ESP,
224 OR_EBP,
225 OR_ESI,
226 OR_EDI,
227
228 OR_TMP0 = 16, /* temporary operand register */
229 OR_TMP1,
230 OR_A0, /* temporary register used when doing address evaluation */
231};
232
233#ifndef VBOX
234static inline void gen_op_movl_T0_0(void)
235#else /* VBOX */
236DECLINLINE(void) gen_op_movl_T0_0(void)
237#endif /* VBOX */
238{
239 tcg_gen_movi_tl(cpu_T[0], 0);
240}
241
242#ifndef VBOX
243static inline void gen_op_movl_T0_im(int32_t val)
244#else /* VBOX */
245DECLINLINE(void) gen_op_movl_T0_im(int32_t val)
246#endif /* VBOX */
247{
248 tcg_gen_movi_tl(cpu_T[0], val);
249}
250
251#ifndef VBOX
252static inline void gen_op_movl_T0_imu(uint32_t val)
253#else /* VBOX */
254DECLINLINE(void) gen_op_movl_T0_imu(uint32_t val)
255#endif /* VBOX */
256{
257 tcg_gen_movi_tl(cpu_T[0], val);
258}
259
260#ifndef VBOX
261static inline void gen_op_movl_T1_im(int32_t val)
262#else /* VBOX */
263DECLINLINE(void) gen_op_movl_T1_im(int32_t val)
264#endif /* VBOX */
265{
266 tcg_gen_movi_tl(cpu_T[1], val);
267}
268
269#ifndef VBOX
270static inline void gen_op_movl_T1_imu(uint32_t val)
271#else /* VBOX */
272DECLINLINE(void) gen_op_movl_T1_imu(uint32_t val)
273#endif /* VBOX */
274{
275 tcg_gen_movi_tl(cpu_T[1], val);
276}
277
278#ifndef VBOX
279static inline void gen_op_movl_A0_im(uint32_t val)
280#else /* VBOX */
281DECLINLINE(void) gen_op_movl_A0_im(uint32_t val)
282#endif /* VBOX */
283{
284 tcg_gen_movi_tl(cpu_A0, val);
285}
286
287#ifdef TARGET_X86_64
288#ifndef VBOX
289static inline void gen_op_movq_A0_im(int64_t val)
290#else /* VBOX */
291DECLINLINE(void) gen_op_movq_A0_im(int64_t val)
292#endif /* VBOX */
293{
294 tcg_gen_movi_tl(cpu_A0, val);
295}
296#endif
297
298#ifndef VBOX
299static inline void gen_movtl_T0_im(target_ulong val)
300#else /* VBOX */
301DECLINLINE(void) gen_movtl_T0_im(target_ulong val)
302#endif /* VBOX */
303{
304 tcg_gen_movi_tl(cpu_T[0], val);
305}
306
307#ifndef VBOX
308static inline void gen_movtl_T1_im(target_ulong val)
309#else /* VBOX */
310DECLINLINE(void) gen_movtl_T1_im(target_ulong val)
311#endif /* VBOX */
312{
313 tcg_gen_movi_tl(cpu_T[1], val);
314}
315
316#ifndef VBOX
317static inline void gen_op_andl_T0_ffff(void)
318#else /* VBOX */
319DECLINLINE(void) gen_op_andl_T0_ffff(void)
320#endif /* VBOX */
321{
322 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
323}
324
325#ifndef VBOX
326static inline void gen_op_andl_T0_im(uint32_t val)
327#else /* VBOX */
328DECLINLINE(void) gen_op_andl_T0_im(uint32_t val)
329#endif /* VBOX */
330{
331 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
332}
333
334#ifndef VBOX
335static inline void gen_op_movl_T0_T1(void)
336#else /* VBOX */
337DECLINLINE(void) gen_op_movl_T0_T1(void)
338#endif /* VBOX */
339{
340 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
341}
342
343#ifndef VBOX
344static inline void gen_op_andl_A0_ffff(void)
345#else /* VBOX */
346DECLINLINE(void) gen_op_andl_A0_ffff(void)
347#endif /* VBOX */
348{
349 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
350}
351
352#ifdef TARGET_X86_64
353
354#define NB_OP_SIZES 4
355
356#else /* !TARGET_X86_64 */
357
358#define NB_OP_SIZES 3
359
360#endif /* !TARGET_X86_64 */
361
362#if defined(WORDS_BIGENDIAN)
363#define REG_B_OFFSET (sizeof(target_ulong) - 1)
364#define REG_H_OFFSET (sizeof(target_ulong) - 2)
365#define REG_W_OFFSET (sizeof(target_ulong) - 2)
366#define REG_L_OFFSET (sizeof(target_ulong) - 4)
367#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
368#else
369#define REG_B_OFFSET 0
370#define REG_H_OFFSET 1
371#define REG_W_OFFSET 0
372#define REG_L_OFFSET 0
373#define REG_LH_OFFSET 4
374#endif
375
376#ifndef VBOX
377static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
378#else /* VBOX */
379DECLINLINE(void) gen_op_mov_reg_v(int ot, int reg, TCGv t0)
380#endif /* VBOX */
381{
382 switch(ot) {
383 case OT_BYTE:
384 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
385 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
386 } else {
387 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
388 }
389 break;
390 case OT_WORD:
391 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
392 break;
393#ifdef TARGET_X86_64
394 case OT_LONG:
395 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
396 /* high part of register set to zero */
397 tcg_gen_movi_tl(cpu_tmp0, 0);
398 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
399 break;
400 default:
401 case OT_QUAD:
402 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
403 break;
404#else
405 default:
406 case OT_LONG:
407 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
408 break;
409#endif
410 }
411}
412
413#ifndef VBOX
414static inline void gen_op_mov_reg_T0(int ot, int reg)
415#else /* VBOX */
416DECLINLINE(void) gen_op_mov_reg_T0(int ot, int reg)
417#endif /* VBOX */
418{
419 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
420}
421
422#ifndef VBOX
423static inline void gen_op_mov_reg_T1(int ot, int reg)
424#else /* VBOX */
425DECLINLINE(void) gen_op_mov_reg_T1(int ot, int reg)
426#endif /* VBOX */
427{
428 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
429}
430
431#ifndef VBOX
432static inline void gen_op_mov_reg_A0(int size, int reg)
433#else /* VBOX */
434DECLINLINE(void) gen_op_mov_reg_A0(int size, int reg)
435#endif /* VBOX */
436{
437 switch(size) {
438 case 0:
439 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
440 break;
441#ifdef TARGET_X86_64
442 case 1:
443 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
444 /* high part of register set to zero */
445 tcg_gen_movi_tl(cpu_tmp0, 0);
446 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
447 break;
448 default:
449 case 2:
450 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
451 break;
452#else
453 default:
454 case 1:
455 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
456 break;
457#endif
458 }
459}
460
461#ifndef VBOX
462static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
463#else /* VBOX */
464DECLINLINE(void) gen_op_mov_v_reg(int ot, TCGv t0, int reg)
465#endif /* VBOX */
466{
467 switch(ot) {
468 case OT_BYTE:
469 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
470#ifndef VBOX
471 goto std_case;
472#else
473 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
474#endif
475 } else {
476 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
477 }
478 break;
479 default:
480#ifndef VBOX
481 std_case:
482#endif
483 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
484 break;
485 }
486}
487
488#ifndef VBOX
489static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
490#else /* VBOX */
491DECLINLINE(void) gen_op_mov_TN_reg(int ot, int t_index, int reg)
492#endif /* VBOX */
493{
494 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
495}
496
497#ifndef VBOX
498static inline void gen_op_movl_A0_reg(int reg)
499#else /* VBOX */
500DECLINLINE(void) gen_op_movl_A0_reg(int reg)
501#endif /* VBOX */
502{
503 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
504}
505
506#ifndef VBOX
507static inline void gen_op_addl_A0_im(int32_t val)
508#else /* VBOX */
509DECLINLINE(void) gen_op_addl_A0_im(int32_t val)
510#endif /* VBOX */
511{
512 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
513#ifdef TARGET_X86_64
514 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
515#endif
516}
517
518#ifdef TARGET_X86_64
519#ifndef VBOX
520static inline void gen_op_addq_A0_im(int64_t val)
521#else /* VBOX */
522DECLINLINE(void) gen_op_addq_A0_im(int64_t val)
523#endif /* VBOX */
524{
525 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
526}
527#endif
528
529static void gen_add_A0_im(DisasContext *s, int val)
530{
531#ifdef TARGET_X86_64
532 if (CODE64(s))
533 gen_op_addq_A0_im(val);
534 else
535#endif
536 gen_op_addl_A0_im(val);
537}
538
539#ifndef VBOX
540static inline void gen_op_addl_T0_T1(void)
541#else /* VBOX */
542DECLINLINE(void) gen_op_addl_T0_T1(void)
543#endif /* VBOX */
544{
545 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
546}
547
548#ifndef VBOX
549static inline void gen_op_jmp_T0(void)
550#else /* VBOX */
551DECLINLINE(void) gen_op_jmp_T0(void)
552#endif /* VBOX */
553{
554 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
555}
556
557#ifndef VBOX
558static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
559#else /* VBOX */
560DECLINLINE(void) gen_op_add_reg_im(int size, int reg, int32_t val)
561#endif /* VBOX */
562{
563 switch(size) {
564 case 0:
565 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
566 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
567 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
568 break;
569 case 1:
570 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
571 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
572#ifdef TARGET_X86_64
573 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
574#endif
575 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
576 break;
577#ifdef TARGET_X86_64
578 case 2:
579 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
580 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
581 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
582 break;
583#endif
584 }
585}
586
587#ifndef VBOX
588static inline void gen_op_add_reg_T0(int size, int reg)
589#else /* VBOX */
590DECLINLINE(void) gen_op_add_reg_T0(int size, int reg)
591#endif /* VBOX */
592{
593 switch(size) {
594 case 0:
595 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
596 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
597 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
598 break;
599 case 1:
600 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
601 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
602#ifdef TARGET_X86_64
603 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
604#endif
605 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
606 break;
607#ifdef TARGET_X86_64
608 case 2:
609 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
610 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
611 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
612 break;
613#endif
614 }
615}
616
617#ifndef VBOX
618static inline void gen_op_set_cc_op(int32_t val)
619#else /* VBOX */
620DECLINLINE(void) gen_op_set_cc_op(int32_t val)
621#endif /* VBOX */
622{
623 tcg_gen_movi_i32(cpu_cc_op, val);
624}
625
626#ifndef VBOX
627static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
628#else /* VBOX */
629DECLINLINE(void) gen_op_addl_A0_reg_sN(int shift, int reg)
630#endif /* VBOX */
631{
632 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
633 if (shift != 0)
634 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
635 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
636#ifdef TARGET_X86_64
637 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
638#endif
639}
640#ifdef VBOX
641DECLINLINE(void) gen_op_seg_check(int reg, bool keepA0)
642{
643 /* It seems segments doesn't get out of sync - if they do in fact - enable below code. */
644#ifdef FORCE_SEGMENT_SYNC
645#if 1
646 TCGv t0;
647
648 /* Considering poor quality of TCG optimizer - better call directly */
649 t0 = tcg_temp_local_new(TCG_TYPE_TL);
650 tcg_gen_movi_tl(t0, reg);
651 tcg_gen_helper_0_1(helper_sync_seg, t0);
652 tcg_temp_free(t0);
653#else
654 /* Our segments could be outdated, thus check for newselector field to see if update really needed */
655 int skip_label;
656 TCGv t0, a0;
657
658 /* For other segments this check is waste of time, and also TCG is unable to cope with this code,
659 for data/stack segments, as expects alive cpu_T[0] */
660 if (reg != R_GS)
661 return;
662
663 if (keepA0)
664 {
665 /* we need to store old cpu_A0 */
666 a0 = tcg_temp_local_new(TCG_TYPE_TL);
667 tcg_gen_mov_tl(a0, cpu_A0);
668 }
669
670 skip_label = gen_new_label();
671 t0 = tcg_temp_local_new(TCG_TYPE_TL);
672
673 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, segs[reg].newselector) + REG_L_OFFSET);
674 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
675 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, eflags) + REG_L_OFFSET);
676 tcg_gen_andi_tl(t0, t0, VM_MASK);
677 tcg_gen_brcondi_i32(TCG_COND_NE, t0, 0, skip_label);
678 tcg_gen_movi_tl(t0, reg);
679
680 tcg_gen_helper_0_1(helper_sync_seg, t0);
681
682 tcg_temp_free(t0);
683
684 gen_set_label(skip_label);
685 if (keepA0)
686 {
687 tcg_gen_mov_tl(cpu_A0, a0);
688 tcg_temp_free(a0);
689 }
690#endif /* 0 */
691#endif /* FORCE_SEGMENT_SYNC */
692}
693#endif
694
695#ifndef VBOX
696static inline void gen_op_movl_A0_seg(int reg)
697#else /* VBOX */
698DECLINLINE(void) gen_op_movl_A0_seg(int reg)
699#endif /* VBOX */
700{
701#ifdef VBOX
702 gen_op_seg_check(reg, false);
703#endif
704 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
705}
706
707#ifndef VBOX
708static inline void gen_op_addl_A0_seg(int reg)
709#else /* VBOX */
710DECLINLINE(void) gen_op_addl_A0_seg(int reg)
711#endif /* VBOX */
712{
713#ifdef VBOX
714 gen_op_seg_check(reg, true);
715#endif
716 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
717 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
718#ifdef TARGET_X86_64
719 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
720#endif
721}
722
723#ifdef TARGET_X86_64
724#ifndef VBOX
725static inline void gen_op_movq_A0_seg(int reg)
726#else /* VBOX */
727DECLINLINE(void) gen_op_movq_A0_seg(int reg)
728#endif /* VBOX */
729{
730#ifdef VBOX
731 gen_op_seg_check(reg, false);
732#endif
733 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
734}
735
736#ifndef VBOX
737static inline void gen_op_addq_A0_seg(int reg)
738#else /* VBOX */
739DECLINLINE(void) gen_op_addq_A0_seg(int reg)
740#endif /* VBOX */
741{
742#ifdef VBOX
743 gen_op_seg_check(reg, true);
744#endif
745 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
746 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
747}
748
749#ifndef VBOX
750static inline void gen_op_movq_A0_reg(int reg)
751#else /* VBOX */
752DECLINLINE(void) gen_op_movq_A0_reg(int reg)
753#endif /* VBOX */
754{
755 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
756}
757
758#ifndef VBOX
759static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
760#else /* VBOX */
761DECLINLINE(void) gen_op_addq_A0_reg_sN(int shift, int reg)
762#endif /* VBOX */
763{
764 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
765 if (shift != 0)
766 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
767 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
768}
769#endif
770
771#ifndef VBOX
772static inline void gen_op_lds_T0_A0(int idx)
773#else /* VBOX */
774DECLINLINE(void) gen_op_lds_T0_A0(int idx)
775#endif /* VBOX */
776{
777 int mem_index = (idx >> 2) - 1;
778 switch(idx & 3) {
779 case 0:
780 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
781 break;
782 case 1:
783 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
784 break;
785 default:
786 case 2:
787 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
788 break;
789 }
790}
791
792#ifndef VBOX
793static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
794#else /* VBOX */
795DECLINLINE(void) gen_op_ld_v(int idx, TCGv t0, TCGv a0)
796#endif /* VBOX */
797{
798 int mem_index = (idx >> 2) - 1;
799 switch(idx & 3) {
800 case 0:
801 tcg_gen_qemu_ld8u(t0, a0, mem_index);
802 break;
803 case 1:
804 tcg_gen_qemu_ld16u(t0, a0, mem_index);
805 break;
806 case 2:
807 tcg_gen_qemu_ld32u(t0, a0, mem_index);
808 break;
809 default:
810 case 3:
811 tcg_gen_qemu_ld64(t0, a0, mem_index);
812 break;
813 }
814}
815
816/* XXX: always use ldu or lds */
817#ifndef VBOX
818static inline void gen_op_ld_T0_A0(int idx)
819#else /* VBOX */
820DECLINLINE(void) gen_op_ld_T0_A0(int idx)
821#endif /* VBOX */
822{
823 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
824}
825
826#ifndef VBOX
827static inline void gen_op_ldu_T0_A0(int idx)
828#else /* VBOX */
829DECLINLINE(void) gen_op_ldu_T0_A0(int idx)
830#endif /* VBOX */
831{
832 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
833}
834
835#ifndef VBOX
836static inline void gen_op_ld_T1_A0(int idx)
837#else /* VBOX */
838DECLINLINE(void) gen_op_ld_T1_A0(int idx)
839#endif /* VBOX */
840{
841 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
842}
843
844#ifndef VBOX
845static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
846#else /* VBOX */
847DECLINLINE(void) gen_op_st_v(int idx, TCGv t0, TCGv a0)
848#endif /* VBOX */
849{
850 int mem_index = (idx >> 2) - 1;
851 switch(idx & 3) {
852 case 0:
853 tcg_gen_qemu_st8(t0, a0, mem_index);
854 break;
855 case 1:
856 tcg_gen_qemu_st16(t0, a0, mem_index);
857 break;
858 case 2:
859 tcg_gen_qemu_st32(t0, a0, mem_index);
860 break;
861 default:
862 case 3:
863 tcg_gen_qemu_st64(t0, a0, mem_index);
864 break;
865 }
866}
867
868#ifndef VBOX
869static inline void gen_op_st_T0_A0(int idx)
870#else /* VBOX */
871DECLINLINE(void) gen_op_st_T0_A0(int idx)
872#endif /* VBOX */
873{
874 gen_op_st_v(idx, cpu_T[0], cpu_A0);
875}
876
877#ifndef VBOX
878static inline void gen_op_st_T1_A0(int idx)
879#else /* VBOX */
880DECLINLINE(void) gen_op_st_T1_A0(int idx)
881#endif /* VBOX */
882{
883 gen_op_st_v(idx, cpu_T[1], cpu_A0);
884}
885
886#ifdef VBOX
887static void gen_check_external_event()
888{
889#if 1
890 /** @todo: once TCG codegen improves, we may want to use version
891 from else version */
892 tcg_gen_helper_0_0(helper_check_external_event);
893#else
894 int skip_label;
895 TCGv t0;
896
897 skip_label = gen_new_label();
898 t0 = tcg_temp_local_new(TCG_TYPE_TL);
899 /* t0 = cpu_tmp0; */
900
901 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, interrupt_request));
902 /* Keep in sync with helper_check_external_event() */
903 tcg_gen_andi_tl(t0, t0,
904 CPU_INTERRUPT_EXTERNAL_EXIT
905 | CPU_INTERRUPT_EXTERNAL_TIMER
906 | CPU_INTERRUPT_EXTERNAL_DMA
907 | CPU_INTERRUPT_EXTERNAL_HARD);
908 /** @todo: predict branch as taken */
909 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
910 tcg_temp_free(t0);
911
912 tcg_gen_helper_0_0(helper_check_external_event);
913
914 gen_set_label(skip_label);
915#endif
916}
917
918#if 0 /* unused code? */
919static void gen_check_external_event2()
920{
921 tcg_gen_helper_0_0(helper_check_external_event);
922}
923#endif
924
925#endif
926
927#ifndef VBOX
928static inline void gen_jmp_im(target_ulong pc)
929#else /* VBOX */
930DECLINLINE(void) gen_jmp_im(target_ulong pc)
931#endif /* VBOX */
932{
933 tcg_gen_movi_tl(cpu_tmp0, pc);
934 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
935}
936
937#ifdef VBOX
938DECLINLINE(void) gen_update_eip(target_ulong pc)
939{
940 gen_jmp_im(pc);
941#ifdef VBOX_DUMP_STATE
942 tcg_gen_helper_0_0(helper_dump_state);
943#endif
944}
945
946#endif
947
948#ifndef VBOX
949static inline void gen_string_movl_A0_ESI(DisasContext *s)
950#else /* VBOX */
951DECLINLINE(void) gen_string_movl_A0_ESI(DisasContext *s)
952#endif /* VBOX */
953{
954 int override;
955
956 override = s->override;
957#ifdef TARGET_X86_64
958 if (s->aflag == 2) {
959 if (override >= 0) {
960 gen_op_movq_A0_seg(override);
961 gen_op_addq_A0_reg_sN(0, R_ESI);
962 } else {
963 gen_op_movq_A0_reg(R_ESI);
964 }
965 } else
966#endif
967 if (s->aflag) {
968 /* 32 bit address */
969 if (s->addseg && override < 0)
970 override = R_DS;
971 if (override >= 0) {
972 gen_op_movl_A0_seg(override);
973 gen_op_addl_A0_reg_sN(0, R_ESI);
974 } else {
975 gen_op_movl_A0_reg(R_ESI);
976 }
977 } else {
978 /* 16 address, always override */
979 if (override < 0)
980 override = R_DS;
981 gen_op_movl_A0_reg(R_ESI);
982 gen_op_andl_A0_ffff();
983 gen_op_addl_A0_seg(override);
984 }
985}
986
987#ifndef VBOX
988static inline void gen_string_movl_A0_EDI(DisasContext *s)
989#else /* VBOX */
990DECLINLINE(void) gen_string_movl_A0_EDI(DisasContext *s)
991#endif /* VBOX */
992{
993#ifdef TARGET_X86_64
994 if (s->aflag == 2) {
995 gen_op_movq_A0_reg(R_EDI);
996 } else
997#endif
998 if (s->aflag) {
999 if (s->addseg) {
1000 gen_op_movl_A0_seg(R_ES);
1001 gen_op_addl_A0_reg_sN(0, R_EDI);
1002 } else {
1003 gen_op_movl_A0_reg(R_EDI);
1004 }
1005 } else {
1006 gen_op_movl_A0_reg(R_EDI);
1007 gen_op_andl_A0_ffff();
1008 gen_op_addl_A0_seg(R_ES);
1009 }
1010}
1011
1012#ifndef VBOX
1013static inline void gen_op_movl_T0_Dshift(int ot)
1014#else /* VBOX */
1015DECLINLINE(void) gen_op_movl_T0_Dshift(int ot)
1016#endif /* VBOX */
1017{
1018 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
1019 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
1020};
1021
1022static void gen_extu(int ot, TCGv reg)
1023{
1024 switch(ot) {
1025 case OT_BYTE:
1026 tcg_gen_ext8u_tl(reg, reg);
1027 break;
1028 case OT_WORD:
1029 tcg_gen_ext16u_tl(reg, reg);
1030 break;
1031 case OT_LONG:
1032 tcg_gen_ext32u_tl(reg, reg);
1033 break;
1034 default:
1035 break;
1036 }
1037}
1038
1039static void gen_exts(int ot, TCGv reg)
1040{
1041 switch(ot) {
1042 case OT_BYTE:
1043 tcg_gen_ext8s_tl(reg, reg);
1044 break;
1045 case OT_WORD:
1046 tcg_gen_ext16s_tl(reg, reg);
1047 break;
1048 case OT_LONG:
1049 tcg_gen_ext32s_tl(reg, reg);
1050 break;
1051 default:
1052 break;
1053 }
1054}
1055
1056#ifndef VBOX
1057static inline void gen_op_jnz_ecx(int size, int label1)
1058#else /* VBOX */
1059DECLINLINE(void) gen_op_jnz_ecx(int size, int label1)
1060#endif /* VBOX */
1061{
1062 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1063 gen_extu(size + 1, cpu_tmp0);
1064 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
1065}
1066
1067#ifndef VBOX
1068static inline void gen_op_jz_ecx(int size, int label1)
1069#else /* VBOX */
1070DECLINLINE(void) gen_op_jz_ecx(int size, int label1)
1071#endif /* VBOX */
1072{
1073 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1074 gen_extu(size + 1, cpu_tmp0);
1075 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
1076}
1077
1078static void *helper_in_func[3] = {
1079 helper_inb,
1080 helper_inw,
1081 helper_inl,
1082};
1083
1084static void *helper_out_func[3] = {
1085 helper_outb,
1086 helper_outw,
1087 helper_outl,
1088};
1089
1090static void *gen_check_io_func[3] = {
1091 helper_check_iob,
1092 helper_check_iow,
1093 helper_check_iol,
1094};
1095
1096static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
1097 uint32_t svm_flags)
1098{
1099 int state_saved;
1100 target_ulong next_eip;
1101
1102 state_saved = 0;
1103 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1104 if (s->cc_op != CC_OP_DYNAMIC)
1105 gen_op_set_cc_op(s->cc_op);
1106 gen_jmp_im(cur_eip);
1107 state_saved = 1;
1108 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1109 tcg_gen_helper_0_1(gen_check_io_func[ot],
1110 cpu_tmp2_i32);
1111 }
1112 if(s->flags & HF_SVMI_MASK) {
1113 if (!state_saved) {
1114 if (s->cc_op != CC_OP_DYNAMIC)
1115 gen_op_set_cc_op(s->cc_op);
1116 gen_jmp_im(cur_eip);
1117 state_saved = 1;
1118 }
1119 svm_flags |= (1 << (4 + ot));
1120 next_eip = s->pc - s->cs_base;
1121 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1122 tcg_gen_helper_0_3(helper_svm_check_io,
1123 cpu_tmp2_i32,
1124 tcg_const_i32(svm_flags),
1125 tcg_const_i32(next_eip - cur_eip));
1126 }
1127}
1128
1129#ifndef VBOX
1130static inline void gen_movs(DisasContext *s, int ot)
1131#else /* VBOX */
1132DECLINLINE(void) gen_movs(DisasContext *s, int ot)
1133#endif /* VBOX */
1134{
1135 gen_string_movl_A0_ESI(s);
1136 gen_op_ld_T0_A0(ot + s->mem_index);
1137 gen_string_movl_A0_EDI(s);
1138 gen_op_st_T0_A0(ot + s->mem_index);
1139 gen_op_movl_T0_Dshift(ot);
1140 gen_op_add_reg_T0(s->aflag, R_ESI);
1141 gen_op_add_reg_T0(s->aflag, R_EDI);
1142}
1143
1144#ifndef VBOX
1145static inline void gen_update_cc_op(DisasContext *s)
1146#else /* VBOX */
1147DECLINLINE(void) gen_update_cc_op(DisasContext *s)
1148#endif /* VBOX */
1149{
1150 if (s->cc_op != CC_OP_DYNAMIC) {
1151 gen_op_set_cc_op(s->cc_op);
1152 s->cc_op = CC_OP_DYNAMIC;
1153 }
1154}
1155
1156static void gen_op_update1_cc(void)
1157{
1158 tcg_gen_discard_tl(cpu_cc_src);
1159 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1160}
1161
1162static void gen_op_update2_cc(void)
1163{
1164 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1165 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1166}
1167
1168#ifndef VBOX
1169static inline void gen_op_cmpl_T0_T1_cc(void)
1170#else /* VBOX */
1171DECLINLINE(void) gen_op_cmpl_T0_T1_cc(void)
1172#endif /* VBOX */
1173{
1174 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1175 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1176}
1177
1178#ifndef VBOX
1179static inline void gen_op_testl_T0_T1_cc(void)
1180#else /* VBOX */
1181DECLINLINE(void) gen_op_testl_T0_T1_cc(void)
1182#endif /* VBOX */
1183{
1184 tcg_gen_discard_tl(cpu_cc_src);
1185 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1186}
1187
1188static void gen_op_update_neg_cc(void)
1189{
1190 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
1191 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1192}
1193
1194/* compute eflags.C to reg */
1195static void gen_compute_eflags_c(TCGv reg)
1196{
1197#if TCG_TARGET_REG_BITS == 32
1198 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1199 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1200 (long)cc_table + offsetof(CCTable, compute_c));
1201 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1202 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1203 1, &cpu_tmp2_i32, 0, NULL);
1204#else
1205 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1206 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1207 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1208 (long)cc_table + offsetof(CCTable, compute_c));
1209 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1210 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1211 1, &cpu_tmp2_i32, 0, NULL);
1212#endif
1213 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1214}
1215
1216/* compute all eflags to cc_src */
1217static void gen_compute_eflags(TCGv reg)
1218{
1219#if TCG_TARGET_REG_BITS == 32
1220 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1221 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1222 (long)cc_table + offsetof(CCTable, compute_all));
1223 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1224 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1225 1, &cpu_tmp2_i32, 0, NULL);
1226#else
1227 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1228 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1229 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1230 (long)cc_table + offsetof(CCTable, compute_all));
1231 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1232 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1233 1, &cpu_tmp2_i32, 0, NULL);
1234#endif
1235 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1236}
1237
1238#ifndef VBOX
1239static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1240#else /* VBOX */
1241DECLINLINE(void) gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1242#endif /* VBOX */
1243{
1244 if (s->cc_op != CC_OP_DYNAMIC)
1245 gen_op_set_cc_op(s->cc_op);
1246 switch(jcc_op) {
1247 case JCC_O:
1248 gen_compute_eflags(cpu_T[0]);
1249 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
1250 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1251 break;
1252 case JCC_B:
1253 gen_compute_eflags_c(cpu_T[0]);
1254 break;
1255 case JCC_Z:
1256 gen_compute_eflags(cpu_T[0]);
1257 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
1258 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1259 break;
1260 case JCC_BE:
1261 gen_compute_eflags(cpu_tmp0);
1262 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
1263 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1264 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1265 break;
1266 case JCC_S:
1267 gen_compute_eflags(cpu_T[0]);
1268 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
1269 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1270 break;
1271 case JCC_P:
1272 gen_compute_eflags(cpu_T[0]);
1273 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
1274 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1275 break;
1276 case JCC_L:
1277 gen_compute_eflags(cpu_tmp0);
1278 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1279 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
1280 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1281 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1282 break;
1283 default:
1284 case JCC_LE:
1285 gen_compute_eflags(cpu_tmp0);
1286 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1287 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
1288 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
1289 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1290 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1291 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1292 break;
1293 }
1294}
1295
1296/* return true if setcc_slow is not needed (WARNING: must be kept in
1297 sync with gen_jcc1) */
1298static int is_fast_jcc_case(DisasContext *s, int b)
1299{
1300 int jcc_op;
1301 jcc_op = (b >> 1) & 7;
1302 switch(s->cc_op) {
1303 /* we optimize the cmp/jcc case */
1304 case CC_OP_SUBB:
1305 case CC_OP_SUBW:
1306 case CC_OP_SUBL:
1307 case CC_OP_SUBQ:
1308 if (jcc_op == JCC_O || jcc_op == JCC_P)
1309 goto slow_jcc;
1310 break;
1311
1312 /* some jumps are easy to compute */
1313 case CC_OP_ADDB:
1314 case CC_OP_ADDW:
1315 case CC_OP_ADDL:
1316 case CC_OP_ADDQ:
1317
1318 case CC_OP_LOGICB:
1319 case CC_OP_LOGICW:
1320 case CC_OP_LOGICL:
1321 case CC_OP_LOGICQ:
1322
1323 case CC_OP_INCB:
1324 case CC_OP_INCW:
1325 case CC_OP_INCL:
1326 case CC_OP_INCQ:
1327
1328 case CC_OP_DECB:
1329 case CC_OP_DECW:
1330 case CC_OP_DECL:
1331 case CC_OP_DECQ:
1332
1333 case CC_OP_SHLB:
1334 case CC_OP_SHLW:
1335 case CC_OP_SHLL:
1336 case CC_OP_SHLQ:
1337 if (jcc_op != JCC_Z && jcc_op != JCC_S)
1338 goto slow_jcc;
1339 break;
1340 default:
1341 slow_jcc:
1342 return 0;
1343 }
1344 return 1;
1345}
1346
1347/* generate a conditional jump to label 'l1' according to jump opcode
1348 value 'b'. In the fast case, T0 is guaranted not to be used. */
1349#ifndef VBOX
1350static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1351#else /* VBOX */
1352DECLINLINE(void) gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1353#endif /* VBOX */
1354{
1355 int inv, jcc_op, size, cond;
1356 TCGv t0;
1357
1358 inv = b & 1;
1359 jcc_op = (b >> 1) & 7;
1360
1361 switch(cc_op) {
1362 /* we optimize the cmp/jcc case */
1363 case CC_OP_SUBB:
1364 case CC_OP_SUBW:
1365 case CC_OP_SUBL:
1366 case CC_OP_SUBQ:
1367
1368 size = cc_op - CC_OP_SUBB;
1369 switch(jcc_op) {
1370 case JCC_Z:
1371 fast_jcc_z:
1372 switch(size) {
1373 case 0:
1374 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
1375 t0 = cpu_tmp0;
1376 break;
1377 case 1:
1378 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
1379 t0 = cpu_tmp0;
1380 break;
1381#ifdef TARGET_X86_64
1382 case 2:
1383 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1384 t0 = cpu_tmp0;
1385 break;
1386#endif
1387 default:
1388 t0 = cpu_cc_dst;
1389 break;
1390 }
1391 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
1392 break;
1393 case JCC_S:
1394 fast_jcc_s:
1395 switch(size) {
1396 case 0:
1397 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1398 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1399 0, l1);
1400 break;
1401 case 1:
1402 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1403 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1404 0, l1);
1405 break;
1406#ifdef TARGET_X86_64
1407 case 2:
1408 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1409 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1410 0, l1);
1411 break;
1412#endif
1413 default:
1414 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1415 0, l1);
1416 break;
1417 }
1418 break;
1419
1420 case JCC_B:
1421 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1422 goto fast_jcc_b;
1423 case JCC_BE:
1424 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1425 fast_jcc_b:
1426 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1427 switch(size) {
1428 case 0:
1429 t0 = cpu_tmp0;
1430 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1431 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1432 break;
1433 case 1:
1434 t0 = cpu_tmp0;
1435 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1436 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1437 break;
1438#ifdef TARGET_X86_64
1439 case 2:
1440 t0 = cpu_tmp0;
1441 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1442 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1443 break;
1444#endif
1445 default:
1446 t0 = cpu_cc_src;
1447 break;
1448 }
1449 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1450 break;
1451
1452 case JCC_L:
1453 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1454 goto fast_jcc_l;
1455 case JCC_LE:
1456 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1457 fast_jcc_l:
1458 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1459 switch(size) {
1460 case 0:
1461 t0 = cpu_tmp0;
1462 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1463 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1464 break;
1465 case 1:
1466 t0 = cpu_tmp0;
1467 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1468 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1469 break;
1470#ifdef TARGET_X86_64
1471 case 2:
1472 t0 = cpu_tmp0;
1473 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1474 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1475 break;
1476#endif
1477 default:
1478 t0 = cpu_cc_src;
1479 break;
1480 }
1481 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1482 break;
1483
1484 default:
1485 goto slow_jcc;
1486 }
1487 break;
1488
1489 /* some jumps are easy to compute */
1490 case CC_OP_ADDB:
1491 case CC_OP_ADDW:
1492 case CC_OP_ADDL:
1493 case CC_OP_ADDQ:
1494
1495 case CC_OP_ADCB:
1496 case CC_OP_ADCW:
1497 case CC_OP_ADCL:
1498 case CC_OP_ADCQ:
1499
1500 case CC_OP_SBBB:
1501 case CC_OP_SBBW:
1502 case CC_OP_SBBL:
1503 case CC_OP_SBBQ:
1504
1505 case CC_OP_LOGICB:
1506 case CC_OP_LOGICW:
1507 case CC_OP_LOGICL:
1508 case CC_OP_LOGICQ:
1509
1510 case CC_OP_INCB:
1511 case CC_OP_INCW:
1512 case CC_OP_INCL:
1513 case CC_OP_INCQ:
1514
1515 case CC_OP_DECB:
1516 case CC_OP_DECW:
1517 case CC_OP_DECL:
1518 case CC_OP_DECQ:
1519
1520 case CC_OP_SHLB:
1521 case CC_OP_SHLW:
1522 case CC_OP_SHLL:
1523 case CC_OP_SHLQ:
1524
1525 case CC_OP_SARB:
1526 case CC_OP_SARW:
1527 case CC_OP_SARL:
1528 case CC_OP_SARQ:
1529 switch(jcc_op) {
1530 case JCC_Z:
1531 size = (cc_op - CC_OP_ADDB) & 3;
1532 goto fast_jcc_z;
1533 case JCC_S:
1534 size = (cc_op - CC_OP_ADDB) & 3;
1535 goto fast_jcc_s;
1536 default:
1537 goto slow_jcc;
1538 }
1539 break;
1540 default:
1541 slow_jcc:
1542 gen_setcc_slow_T0(s, jcc_op);
1543 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1544 cpu_T[0], 0, l1);
1545 break;
1546 }
1547}
1548
1549/* XXX: does not work with gdbstub "ice" single step - not a
1550 serious problem */
1551static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1552{
1553 int l1, l2;
1554
1555 l1 = gen_new_label();
1556 l2 = gen_new_label();
1557 gen_op_jnz_ecx(s->aflag, l1);
1558 gen_set_label(l2);
1559 gen_jmp_tb(s, next_eip, 1);
1560 gen_set_label(l1);
1561 return l2;
1562}
1563
1564#ifndef VBOX
1565static inline void gen_stos(DisasContext *s, int ot)
1566#else /* VBOX */
1567DECLINLINE(void) gen_stos(DisasContext *s, int ot)
1568#endif /* VBOX */
1569{
1570 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1571 gen_string_movl_A0_EDI(s);
1572 gen_op_st_T0_A0(ot + s->mem_index);
1573 gen_op_movl_T0_Dshift(ot);
1574 gen_op_add_reg_T0(s->aflag, R_EDI);
1575}
1576
1577#ifndef VBOX
1578static inline void gen_lods(DisasContext *s, int ot)
1579#else /* VBOX */
1580DECLINLINE(void) gen_lods(DisasContext *s, int ot)
1581#endif /* VBOX */
1582{
1583 gen_string_movl_A0_ESI(s);
1584 gen_op_ld_T0_A0(ot + s->mem_index);
1585 gen_op_mov_reg_T0(ot, R_EAX);
1586 gen_op_movl_T0_Dshift(ot);
1587 gen_op_add_reg_T0(s->aflag, R_ESI);
1588}
1589
1590#ifndef VBOX
1591static inline void gen_scas(DisasContext *s, int ot)
1592#else /* VBOX */
1593DECLINLINE(void) gen_scas(DisasContext *s, int ot)
1594#endif /* VBOX */
1595{
1596 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1597 gen_string_movl_A0_EDI(s);
1598 gen_op_ld_T1_A0(ot + s->mem_index);
1599 gen_op_cmpl_T0_T1_cc();
1600 gen_op_movl_T0_Dshift(ot);
1601 gen_op_add_reg_T0(s->aflag, R_EDI);
1602}
1603
1604#ifndef VBOX
1605static inline void gen_cmps(DisasContext *s, int ot)
1606#else /* VBOX */
1607DECLINLINE(void) gen_cmps(DisasContext *s, int ot)
1608#endif /* VBOX */
1609{
1610 gen_string_movl_A0_ESI(s);
1611 gen_op_ld_T0_A0(ot + s->mem_index);
1612 gen_string_movl_A0_EDI(s);
1613 gen_op_ld_T1_A0(ot + s->mem_index);
1614 gen_op_cmpl_T0_T1_cc();
1615 gen_op_movl_T0_Dshift(ot);
1616 gen_op_add_reg_T0(s->aflag, R_ESI);
1617 gen_op_add_reg_T0(s->aflag, R_EDI);
1618}
1619
1620#ifndef VBOX
1621static inline void gen_ins(DisasContext *s, int ot)
1622#else /* VBOX */
1623DECLINLINE(void) gen_ins(DisasContext *s, int ot)
1624#endif /* VBOX */
1625{
1626 if (use_icount)
1627 gen_io_start();
1628 gen_string_movl_A0_EDI(s);
1629 /* Note: we must do this dummy write first to be restartable in
1630 case of page fault. */
1631 gen_op_movl_T0_0();
1632 gen_op_st_T0_A0(ot + s->mem_index);
1633 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1634 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1635 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1636 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1637 gen_op_st_T0_A0(ot + s->mem_index);
1638 gen_op_movl_T0_Dshift(ot);
1639 gen_op_add_reg_T0(s->aflag, R_EDI);
1640 if (use_icount)
1641 gen_io_end();
1642}
1643
1644#ifndef VBOX
1645static inline void gen_outs(DisasContext *s, int ot)
1646#else /* VBOX */
1647DECLINLINE(void) gen_outs(DisasContext *s, int ot)
1648#endif /* VBOX */
1649{
1650 if (use_icount)
1651 gen_io_start();
1652 gen_string_movl_A0_ESI(s);
1653 gen_op_ld_T0_A0(ot + s->mem_index);
1654
1655 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1656 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1657 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1658 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1659 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1660
1661 gen_op_movl_T0_Dshift(ot);
1662 gen_op_add_reg_T0(s->aflag, R_ESI);
1663 if (use_icount)
1664 gen_io_end();
1665}
1666
1667/* same method as Valgrind : we generate jumps to current or next
1668 instruction */
1669#ifndef VBOX
1670#define GEN_REPZ(op) \
1671static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1672 target_ulong cur_eip, target_ulong next_eip) \
1673{ \
1674 int l2; \
1675 gen_update_cc_op(s); \
1676 l2 = gen_jz_ecx_string(s, next_eip); \
1677 gen_ ## op(s, ot); \
1678 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1679 /* a loop would cause two single step exceptions if ECX = 1 \
1680 before rep string_insn */ \
1681 if (!s->jmp_opt) \
1682 gen_op_jz_ecx(s->aflag, l2); \
1683 gen_jmp(s, cur_eip); \
1684}
1685#else /* VBOX */
1686#define GEN_REPZ(op) \
1687DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1688 target_ulong cur_eip, target_ulong next_eip) \
1689{ \
1690 int l2; \
1691 gen_update_cc_op(s); \
1692 l2 = gen_jz_ecx_string(s, next_eip); \
1693 gen_ ## op(s, ot); \
1694 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1695 /* a loop would cause two single step exceptions if ECX = 1 \
1696 before rep string_insn */ \
1697 if (!s->jmp_opt) \
1698 gen_op_jz_ecx(s->aflag, l2); \
1699 gen_jmp(s, cur_eip); \
1700}
1701#endif /* VBOX */
1702
1703#ifndef VBOX
1704#define GEN_REPZ2(op) \
1705static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1706 target_ulong cur_eip, \
1707 target_ulong next_eip, \
1708 int nz) \
1709{ \
1710 int l2; \
1711 gen_update_cc_op(s); \
1712 l2 = gen_jz_ecx_string(s, next_eip); \
1713 gen_ ## op(s, ot); \
1714 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1715 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1716 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1717 if (!s->jmp_opt) \
1718 gen_op_jz_ecx(s->aflag, l2); \
1719 gen_jmp(s, cur_eip); \
1720}
1721#else /* VBOX */
1722#define GEN_REPZ2(op) \
1723DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1724 target_ulong cur_eip, \
1725 target_ulong next_eip, \
1726 int nz) \
1727{ \
1728 int l2;\
1729 gen_update_cc_op(s); \
1730 l2 = gen_jz_ecx_string(s, next_eip); \
1731 gen_ ## op(s, ot); \
1732 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1733 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1734 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1735 if (!s->jmp_opt) \
1736 gen_op_jz_ecx(s->aflag, l2); \
1737 gen_jmp(s, cur_eip); \
1738}
1739#endif /* VBOX */
1740
1741GEN_REPZ(movs)
1742GEN_REPZ(stos)
1743GEN_REPZ(lods)
1744GEN_REPZ(ins)
1745GEN_REPZ(outs)
1746GEN_REPZ2(scas)
1747GEN_REPZ2(cmps)
1748
1749static void *helper_fp_arith_ST0_FT0[8] = {
1750 helper_fadd_ST0_FT0,
1751 helper_fmul_ST0_FT0,
1752 helper_fcom_ST0_FT0,
1753 helper_fcom_ST0_FT0,
1754 helper_fsub_ST0_FT0,
1755 helper_fsubr_ST0_FT0,
1756 helper_fdiv_ST0_FT0,
1757 helper_fdivr_ST0_FT0,
1758};
1759
1760/* NOTE the exception in "r" op ordering */
1761static void *helper_fp_arith_STN_ST0[8] = {
1762 helper_fadd_STN_ST0,
1763 helper_fmul_STN_ST0,
1764 NULL,
1765 NULL,
1766 helper_fsubr_STN_ST0,
1767 helper_fsub_STN_ST0,
1768 helper_fdivr_STN_ST0,
1769 helper_fdiv_STN_ST0,
1770};
1771
1772/* if d == OR_TMP0, it means memory operand (address in A0) */
1773static void gen_op(DisasContext *s1, int op, int ot, int d)
1774{
1775 if (d != OR_TMP0) {
1776 gen_op_mov_TN_reg(ot, 0, d);
1777 } else {
1778 gen_op_ld_T0_A0(ot + s1->mem_index);
1779 }
1780 switch(op) {
1781 case OP_ADCL:
1782 if (s1->cc_op != CC_OP_DYNAMIC)
1783 gen_op_set_cc_op(s1->cc_op);
1784 gen_compute_eflags_c(cpu_tmp4);
1785 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1786 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1787 if (d != OR_TMP0)
1788 gen_op_mov_reg_T0(ot, d);
1789 else
1790 gen_op_st_T0_A0(ot + s1->mem_index);
1791 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1792 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1793 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1794 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1795 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1796 s1->cc_op = CC_OP_DYNAMIC;
1797 break;
1798 case OP_SBBL:
1799 if (s1->cc_op != CC_OP_DYNAMIC)
1800 gen_op_set_cc_op(s1->cc_op);
1801 gen_compute_eflags_c(cpu_tmp4);
1802 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1803 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1804 if (d != OR_TMP0)
1805 gen_op_mov_reg_T0(ot, d);
1806 else
1807 gen_op_st_T0_A0(ot + s1->mem_index);
1808 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1809 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1810 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1811 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1812 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1813 s1->cc_op = CC_OP_DYNAMIC;
1814 break;
1815 case OP_ADDL:
1816 gen_op_addl_T0_T1();
1817 if (d != OR_TMP0)
1818 gen_op_mov_reg_T0(ot, d);
1819 else
1820 gen_op_st_T0_A0(ot + s1->mem_index);
1821 gen_op_update2_cc();
1822 s1->cc_op = CC_OP_ADDB + ot;
1823 break;
1824 case OP_SUBL:
1825 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1826 if (d != OR_TMP0)
1827 gen_op_mov_reg_T0(ot, d);
1828 else
1829 gen_op_st_T0_A0(ot + s1->mem_index);
1830 gen_op_update2_cc();
1831 s1->cc_op = CC_OP_SUBB + ot;
1832 break;
1833 default:
1834 case OP_ANDL:
1835 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1836 if (d != OR_TMP0)
1837 gen_op_mov_reg_T0(ot, d);
1838 else
1839 gen_op_st_T0_A0(ot + s1->mem_index);
1840 gen_op_update1_cc();
1841 s1->cc_op = CC_OP_LOGICB + ot;
1842 break;
1843 case OP_ORL:
1844 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1845 if (d != OR_TMP0)
1846 gen_op_mov_reg_T0(ot, d);
1847 else
1848 gen_op_st_T0_A0(ot + s1->mem_index);
1849 gen_op_update1_cc();
1850 s1->cc_op = CC_OP_LOGICB + ot;
1851 break;
1852 case OP_XORL:
1853 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1854 if (d != OR_TMP0)
1855 gen_op_mov_reg_T0(ot, d);
1856 else
1857 gen_op_st_T0_A0(ot + s1->mem_index);
1858 gen_op_update1_cc();
1859 s1->cc_op = CC_OP_LOGICB + ot;
1860 break;
1861 case OP_CMPL:
1862 gen_op_cmpl_T0_T1_cc();
1863 s1->cc_op = CC_OP_SUBB + ot;
1864 break;
1865 }
1866}
1867
1868/* if d == OR_TMP0, it means memory operand (address in A0) */
1869static void gen_inc(DisasContext *s1, int ot, int d, int c)
1870{
1871 if (d != OR_TMP0)
1872 gen_op_mov_TN_reg(ot, 0, d);
1873 else
1874 gen_op_ld_T0_A0(ot + s1->mem_index);
1875 if (s1->cc_op != CC_OP_DYNAMIC)
1876 gen_op_set_cc_op(s1->cc_op);
1877 if (c > 0) {
1878 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1879 s1->cc_op = CC_OP_INCB + ot;
1880 } else {
1881 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1882 s1->cc_op = CC_OP_DECB + ot;
1883 }
1884 if (d != OR_TMP0)
1885 gen_op_mov_reg_T0(ot, d);
1886 else
1887 gen_op_st_T0_A0(ot + s1->mem_index);
1888 gen_compute_eflags_c(cpu_cc_src);
1889 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1890}
1891
1892static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1893 int is_right, int is_arith)
1894{
1895 target_ulong mask;
1896 int shift_label;
1897 TCGv t0, t1;
1898
1899 if (ot == OT_QUAD)
1900 mask = 0x3f;
1901 else
1902 mask = 0x1f;
1903
1904 /* load */
1905 if (op1 == OR_TMP0)
1906 gen_op_ld_T0_A0(ot + s->mem_index);
1907 else
1908 gen_op_mov_TN_reg(ot, 0, op1);
1909
1910 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1911
1912 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1913
1914 if (is_right) {
1915 if (is_arith) {
1916 gen_exts(ot, cpu_T[0]);
1917 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1918 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1919 } else {
1920 gen_extu(ot, cpu_T[0]);
1921 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1922 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1923 }
1924 } else {
1925 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1926 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1927 }
1928
1929 /* store */
1930 if (op1 == OR_TMP0)
1931 gen_op_st_T0_A0(ot + s->mem_index);
1932 else
1933 gen_op_mov_reg_T0(ot, op1);
1934
1935 /* update eflags if non zero shift */
1936 if (s->cc_op != CC_OP_DYNAMIC)
1937 gen_op_set_cc_op(s->cc_op);
1938
1939 /* XXX: inefficient */
1940 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1941 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1942
1943 tcg_gen_mov_tl(t0, cpu_T[0]);
1944 tcg_gen_mov_tl(t1, cpu_T3);
1945
1946 shift_label = gen_new_label();
1947 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1948
1949 tcg_gen_mov_tl(cpu_cc_src, t1);
1950 tcg_gen_mov_tl(cpu_cc_dst, t0);
1951 if (is_right)
1952 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1953 else
1954 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1955
1956 gen_set_label(shift_label);
1957 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1958
1959 tcg_temp_free(t0);
1960 tcg_temp_free(t1);
1961}
1962
1963static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1964 int is_right, int is_arith)
1965{
1966 int mask;
1967
1968 if (ot == OT_QUAD)
1969 mask = 0x3f;
1970 else
1971 mask = 0x1f;
1972
1973 /* load */
1974 if (op1 == OR_TMP0)
1975 gen_op_ld_T0_A0(ot + s->mem_index);
1976 else
1977 gen_op_mov_TN_reg(ot, 0, op1);
1978
1979 op2 &= mask;
1980 if (op2 != 0) {
1981 if (is_right) {
1982 if (is_arith) {
1983 gen_exts(ot, cpu_T[0]);
1984 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1985 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1986 } else {
1987 gen_extu(ot, cpu_T[0]);
1988 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1989 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1990 }
1991 } else {
1992 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1993 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1994 }
1995 }
1996
1997 /* store */
1998 if (op1 == OR_TMP0)
1999 gen_op_st_T0_A0(ot + s->mem_index);
2000 else
2001 gen_op_mov_reg_T0(ot, op1);
2002
2003 /* update eflags if non zero shift */
2004 if (op2 != 0) {
2005 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
2006 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
2007 if (is_right)
2008 s->cc_op = CC_OP_SARB + ot;
2009 else
2010 s->cc_op = CC_OP_SHLB + ot;
2011 }
2012}
2013
2014#ifndef VBOX
2015static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
2016#else /* VBOX */
2017DECLINLINE(void) tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
2018#endif /* VBOX */
2019{
2020 if (arg2 >= 0)
2021 tcg_gen_shli_tl(ret, arg1, arg2);
2022 else
2023 tcg_gen_shri_tl(ret, arg1, -arg2);
2024}
2025
2026/* XXX: add faster immediate case */
2027static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
2028 int is_right)
2029{
2030 target_ulong mask;
2031 int label1, label2, data_bits;
2032 TCGv t0, t1, t2, a0;
2033
2034 /* XXX: inefficient, but we must use local temps */
2035 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2036 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2037 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2038 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2039
2040 if (ot == OT_QUAD)
2041 mask = 0x3f;
2042 else
2043 mask = 0x1f;
2044
2045 /* load */
2046 if (op1 == OR_TMP0) {
2047 tcg_gen_mov_tl(a0, cpu_A0);
2048 gen_op_ld_v(ot + s->mem_index, t0, a0);
2049 } else {
2050 gen_op_mov_v_reg(ot, t0, op1);
2051 }
2052
2053 tcg_gen_mov_tl(t1, cpu_T[1]);
2054
2055 tcg_gen_andi_tl(t1, t1, mask);
2056
2057 /* Must test zero case to avoid using undefined behaviour in TCG
2058 shifts. */
2059 label1 = gen_new_label();
2060 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
2061
2062 if (ot <= OT_WORD)
2063 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
2064 else
2065 tcg_gen_mov_tl(cpu_tmp0, t1);
2066
2067 gen_extu(ot, t0);
2068 tcg_gen_mov_tl(t2, t0);
2069
2070 data_bits = 8 << ot;
2071 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
2072 fix TCG definition) */
2073 if (is_right) {
2074 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
2075 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2076 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
2077 } else {
2078 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
2079 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2080 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
2081 }
2082 tcg_gen_or_tl(t0, t0, cpu_tmp4);
2083
2084 gen_set_label(label1);
2085 /* store */
2086 if (op1 == OR_TMP0) {
2087 gen_op_st_v(ot + s->mem_index, t0, a0);
2088 } else {
2089 gen_op_mov_reg_v(ot, op1, t0);
2090 }
2091
2092 /* update eflags */
2093 if (s->cc_op != CC_OP_DYNAMIC)
2094 gen_op_set_cc_op(s->cc_op);
2095
2096 label2 = gen_new_label();
2097 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
2098
2099 gen_compute_eflags(cpu_cc_src);
2100 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
2101 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
2102 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
2103 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
2104 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
2105 if (is_right) {
2106 tcg_gen_shri_tl(t0, t0, data_bits - 1);
2107 }
2108 tcg_gen_andi_tl(t0, t0, CC_C);
2109 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
2110
2111 tcg_gen_discard_tl(cpu_cc_dst);
2112 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2113
2114 gen_set_label(label2);
2115 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2116
2117 tcg_temp_free(t0);
2118 tcg_temp_free(t1);
2119 tcg_temp_free(t2);
2120 tcg_temp_free(a0);
2121}
2122
2123static void *helper_rotc[8] = {
2124 helper_rclb,
2125 helper_rclw,
2126 helper_rcll,
2127 X86_64_ONLY(helper_rclq),
2128 helper_rcrb,
2129 helper_rcrw,
2130 helper_rcrl,
2131 X86_64_ONLY(helper_rcrq),
2132};
2133
2134/* XXX: add faster immediate = 1 case */
2135static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
2136 int is_right)
2137{
2138 int label1;
2139
2140 if (s->cc_op != CC_OP_DYNAMIC)
2141 gen_op_set_cc_op(s->cc_op);
2142
2143 /* load */
2144 if (op1 == OR_TMP0)
2145 gen_op_ld_T0_A0(ot + s->mem_index);
2146 else
2147 gen_op_mov_TN_reg(ot, 0, op1);
2148
2149 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
2150 cpu_T[0], cpu_T[0], cpu_T[1]);
2151 /* store */
2152 if (op1 == OR_TMP0)
2153 gen_op_st_T0_A0(ot + s->mem_index);
2154 else
2155 gen_op_mov_reg_T0(ot, op1);
2156
2157 /* update eflags */
2158 label1 = gen_new_label();
2159 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
2160
2161 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
2162 tcg_gen_discard_tl(cpu_cc_dst);
2163 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2164
2165 gen_set_label(label1);
2166 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2167}
2168
2169/* XXX: add faster immediate case */
2170static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
2171 int is_right)
2172{
2173 int label1, label2, data_bits;
2174 target_ulong mask;
2175 TCGv t0, t1, t2, a0;
2176
2177 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2178 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2179 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2180 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2181
2182 if (ot == OT_QUAD)
2183 mask = 0x3f;
2184 else
2185 mask = 0x1f;
2186
2187 /* load */
2188 if (op1 == OR_TMP0) {
2189 tcg_gen_mov_tl(a0, cpu_A0);
2190 gen_op_ld_v(ot + s->mem_index, t0, a0);
2191 } else {
2192 gen_op_mov_v_reg(ot, t0, op1);
2193 }
2194
2195 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
2196
2197 tcg_gen_mov_tl(t1, cpu_T[1]);
2198 tcg_gen_mov_tl(t2, cpu_T3);
2199
2200 /* Must test zero case to avoid using undefined behaviour in TCG
2201 shifts. */
2202 label1 = gen_new_label();
2203 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
2204
2205 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
2206 if (ot == OT_WORD) {
2207 /* Note: we implement the Intel behaviour for shift count > 16 */
2208 if (is_right) {
2209 tcg_gen_andi_tl(t0, t0, 0xffff);
2210 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
2211 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2212 tcg_gen_ext32u_tl(t0, t0);
2213
2214 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2215
2216 /* only needed if count > 16, but a test would complicate */
2217 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2218 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
2219
2220 tcg_gen_shr_tl(t0, t0, t2);
2221
2222 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2223 } else {
2224 /* XXX: not optimal */
2225 tcg_gen_andi_tl(t0, t0, 0xffff);
2226 tcg_gen_shli_tl(t1, t1, 16);
2227 tcg_gen_or_tl(t1, t1, t0);
2228 tcg_gen_ext32u_tl(t1, t1);
2229
2230 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2231 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
2232 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
2233 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
2234
2235 tcg_gen_shl_tl(t0, t0, t2);
2236 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2237 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2238 tcg_gen_or_tl(t0, t0, t1);
2239 }
2240 } else {
2241 data_bits = 8 << ot;
2242 if (is_right) {
2243 if (ot == OT_LONG)
2244 tcg_gen_ext32u_tl(t0, t0);
2245
2246 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2247
2248 tcg_gen_shr_tl(t0, t0, t2);
2249 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2250 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
2251 tcg_gen_or_tl(t0, t0, t1);
2252
2253 } else {
2254 if (ot == OT_LONG)
2255 tcg_gen_ext32u_tl(t1, t1);
2256
2257 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2258
2259 tcg_gen_shl_tl(t0, t0, t2);
2260 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2261 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2262 tcg_gen_or_tl(t0, t0, t1);
2263 }
2264 }
2265 tcg_gen_mov_tl(t1, cpu_tmp4);
2266
2267 gen_set_label(label1);
2268 /* store */
2269 if (op1 == OR_TMP0) {
2270 gen_op_st_v(ot + s->mem_index, t0, a0);
2271 } else {
2272 gen_op_mov_reg_v(ot, op1, t0);
2273 }
2274
2275 /* update eflags */
2276 if (s->cc_op != CC_OP_DYNAMIC)
2277 gen_op_set_cc_op(s->cc_op);
2278
2279 label2 = gen_new_label();
2280 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
2281
2282 tcg_gen_mov_tl(cpu_cc_src, t1);
2283 tcg_gen_mov_tl(cpu_cc_dst, t0);
2284 if (is_right) {
2285 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
2286 } else {
2287 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
2288 }
2289 gen_set_label(label2);
2290 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2291
2292 tcg_temp_free(t0);
2293 tcg_temp_free(t1);
2294 tcg_temp_free(t2);
2295 tcg_temp_free(a0);
2296}
2297
2298static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2299{
2300 if (s != OR_TMP1)
2301 gen_op_mov_TN_reg(ot, 1, s);
2302 switch(op) {
2303 case OP_ROL:
2304 gen_rot_rm_T1(s1, ot, d, 0);
2305 break;
2306 case OP_ROR:
2307 gen_rot_rm_T1(s1, ot, d, 1);
2308 break;
2309 case OP_SHL:
2310 case OP_SHL1:
2311 gen_shift_rm_T1(s1, ot, d, 0, 0);
2312 break;
2313 case OP_SHR:
2314 gen_shift_rm_T1(s1, ot, d, 1, 0);
2315 break;
2316 case OP_SAR:
2317 gen_shift_rm_T1(s1, ot, d, 1, 1);
2318 break;
2319 case OP_RCL:
2320 gen_rotc_rm_T1(s1, ot, d, 0);
2321 break;
2322 case OP_RCR:
2323 gen_rotc_rm_T1(s1, ot, d, 1);
2324 break;
2325 }
2326}
2327
2328static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2329{
2330 switch(op) {
2331 case OP_SHL:
2332 case OP_SHL1:
2333 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2334 break;
2335 case OP_SHR:
2336 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2337 break;
2338 case OP_SAR:
2339 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2340 break;
2341 default:
2342 /* currently not optimized */
2343 gen_op_movl_T1_im(c);
2344 gen_shift(s1, op, ot, d, OR_TMP1);
2345 break;
2346 }
2347}
2348
2349static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
2350{
2351 target_long disp;
2352 int havesib;
2353 int base;
2354 int index;
2355 int scale;
2356 int opreg;
2357 int mod, rm, code, override, must_add_seg;
2358
2359 override = s->override;
2360 must_add_seg = s->addseg;
2361 if (override >= 0)
2362 must_add_seg = 1;
2363 mod = (modrm >> 6) & 3;
2364 rm = modrm & 7;
2365
2366 if (s->aflag) {
2367
2368 havesib = 0;
2369 base = rm;
2370 index = 0;
2371 scale = 0;
2372
2373 if (base == 4) {
2374 havesib = 1;
2375 code = ldub_code(s->pc++);
2376 scale = (code >> 6) & 3;
2377 index = ((code >> 3) & 7) | REX_X(s);
2378 base = (code & 7);
2379 }
2380 base |= REX_B(s);
2381
2382 switch (mod) {
2383 case 0:
2384 if ((base & 7) == 5) {
2385 base = -1;
2386 disp = (int32_t)ldl_code(s->pc);
2387 s->pc += 4;
2388 if (CODE64(s) && !havesib) {
2389 disp += s->pc + s->rip_offset;
2390 }
2391 } else {
2392 disp = 0;
2393 }
2394 break;
2395 case 1:
2396 disp = (int8_t)ldub_code(s->pc++);
2397 break;
2398 default:
2399 case 2:
2400#ifdef VBOX
2401 disp = (int32_t)ldl_code(s->pc);
2402#else
2403 disp = ldl_code(s->pc);
2404#endif
2405 s->pc += 4;
2406 break;
2407 }
2408
2409 if (base >= 0) {
2410 /* for correct popl handling with esp */
2411 if (base == 4 && s->popl_esp_hack)
2412 disp += s->popl_esp_hack;
2413#ifdef TARGET_X86_64
2414 if (s->aflag == 2) {
2415 gen_op_movq_A0_reg(base);
2416 if (disp != 0) {
2417 gen_op_addq_A0_im(disp);
2418 }
2419 } else
2420#endif
2421 {
2422 gen_op_movl_A0_reg(base);
2423 if (disp != 0)
2424 gen_op_addl_A0_im(disp);
2425 }
2426 } else {
2427#ifdef TARGET_X86_64
2428 if (s->aflag == 2) {
2429 gen_op_movq_A0_im(disp);
2430 } else
2431#endif
2432 {
2433 gen_op_movl_A0_im(disp);
2434 }
2435 }
2436 /* index == 4 means no index */
2437 if (havesib && (index != 4)) {
2438#ifdef TARGET_X86_64
2439 if (s->aflag == 2) {
2440 gen_op_addq_A0_reg_sN(scale, index);
2441 } else
2442#endif
2443 {
2444 gen_op_addl_A0_reg_sN(scale, index);
2445 }
2446 }
2447 if (must_add_seg) {
2448 if (override < 0) {
2449 if (base == R_EBP || base == R_ESP)
2450 override = R_SS;
2451 else
2452 override = R_DS;
2453 }
2454#ifdef TARGET_X86_64
2455 if (s->aflag == 2) {
2456 gen_op_addq_A0_seg(override);
2457 } else
2458#endif
2459 {
2460 gen_op_addl_A0_seg(override);
2461 }
2462 }
2463 } else {
2464 switch (mod) {
2465 case 0:
2466 if (rm == 6) {
2467 disp = lduw_code(s->pc);
2468 s->pc += 2;
2469 gen_op_movl_A0_im(disp);
2470 rm = 0; /* avoid SS override */
2471 goto no_rm;
2472 } else {
2473 disp = 0;
2474 }
2475 break;
2476 case 1:
2477 disp = (int8_t)ldub_code(s->pc++);
2478 break;
2479 default:
2480 case 2:
2481 disp = lduw_code(s->pc);
2482 s->pc += 2;
2483 break;
2484 }
2485 switch(rm) {
2486 case 0:
2487 gen_op_movl_A0_reg(R_EBX);
2488 gen_op_addl_A0_reg_sN(0, R_ESI);
2489 break;
2490 case 1:
2491 gen_op_movl_A0_reg(R_EBX);
2492 gen_op_addl_A0_reg_sN(0, R_EDI);
2493 break;
2494 case 2:
2495 gen_op_movl_A0_reg(R_EBP);
2496 gen_op_addl_A0_reg_sN(0, R_ESI);
2497 break;
2498 case 3:
2499 gen_op_movl_A0_reg(R_EBP);
2500 gen_op_addl_A0_reg_sN(0, R_EDI);
2501 break;
2502 case 4:
2503 gen_op_movl_A0_reg(R_ESI);
2504 break;
2505 case 5:
2506 gen_op_movl_A0_reg(R_EDI);
2507 break;
2508 case 6:
2509 gen_op_movl_A0_reg(R_EBP);
2510 break;
2511 default:
2512 case 7:
2513 gen_op_movl_A0_reg(R_EBX);
2514 break;
2515 }
2516 if (disp != 0)
2517 gen_op_addl_A0_im(disp);
2518 gen_op_andl_A0_ffff();
2519 no_rm:
2520 if (must_add_seg) {
2521 if (override < 0) {
2522 if (rm == 2 || rm == 3 || rm == 6)
2523 override = R_SS;
2524 else
2525 override = R_DS;
2526 }
2527 gen_op_addl_A0_seg(override);
2528 }
2529 }
2530
2531 opreg = OR_A0;
2532 disp = 0;
2533 *reg_ptr = opreg;
2534 *offset_ptr = disp;
2535}
2536
2537static void gen_nop_modrm(DisasContext *s, int modrm)
2538{
2539 int mod, rm, base, code;
2540
2541 mod = (modrm >> 6) & 3;
2542 if (mod == 3)
2543 return;
2544 rm = modrm & 7;
2545
2546 if (s->aflag) {
2547
2548 base = rm;
2549
2550 if (base == 4) {
2551 code = ldub_code(s->pc++);
2552 base = (code & 7);
2553 }
2554
2555 switch (mod) {
2556 case 0:
2557 if (base == 5) {
2558 s->pc += 4;
2559 }
2560 break;
2561 case 1:
2562 s->pc++;
2563 break;
2564 default:
2565 case 2:
2566 s->pc += 4;
2567 break;
2568 }
2569 } else {
2570 switch (mod) {
2571 case 0:
2572 if (rm == 6) {
2573 s->pc += 2;
2574 }
2575 break;
2576 case 1:
2577 s->pc++;
2578 break;
2579 default:
2580 case 2:
2581 s->pc += 2;
2582 break;
2583 }
2584 }
2585}
2586
2587/* used for LEA and MOV AX, mem */
2588static void gen_add_A0_ds_seg(DisasContext *s)
2589{
2590 int override, must_add_seg;
2591 must_add_seg = s->addseg;
2592 override = R_DS;
2593 if (s->override >= 0) {
2594 override = s->override;
2595 must_add_seg = 1;
2596 } else {
2597 override = R_DS;
2598 }
2599 if (must_add_seg) {
2600#ifdef TARGET_X86_64
2601 if (CODE64(s)) {
2602 gen_op_addq_A0_seg(override);
2603 } else
2604#endif
2605 {
2606 gen_op_addl_A0_seg(override);
2607 }
2608 }
2609}
2610
2611/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2612 OR_TMP0 */
2613static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2614{
2615 int mod, rm, opreg, disp;
2616
2617 mod = (modrm >> 6) & 3;
2618 rm = (modrm & 7) | REX_B(s);
2619 if (mod == 3) {
2620 if (is_store) {
2621 if (reg != OR_TMP0)
2622 gen_op_mov_TN_reg(ot, 0, reg);
2623 gen_op_mov_reg_T0(ot, rm);
2624 } else {
2625 gen_op_mov_TN_reg(ot, 0, rm);
2626 if (reg != OR_TMP0)
2627 gen_op_mov_reg_T0(ot, reg);
2628 }
2629 } else {
2630 gen_lea_modrm(s, modrm, &opreg, &disp);
2631 if (is_store) {
2632 if (reg != OR_TMP0)
2633 gen_op_mov_TN_reg(ot, 0, reg);
2634 gen_op_st_T0_A0(ot + s->mem_index);
2635 } else {
2636 gen_op_ld_T0_A0(ot + s->mem_index);
2637 if (reg != OR_TMP0)
2638 gen_op_mov_reg_T0(ot, reg);
2639 }
2640 }
2641}
2642
2643#ifndef VBOX
2644static inline uint32_t insn_get(DisasContext *s, int ot)
2645#else /* VBOX */
2646DECLINLINE(uint32_t) insn_get(DisasContext *s, int ot)
2647#endif /* VBOX */
2648{
2649 uint32_t ret;
2650
2651 switch(ot) {
2652 case OT_BYTE:
2653 ret = ldub_code(s->pc);
2654 s->pc++;
2655 break;
2656 case OT_WORD:
2657 ret = lduw_code(s->pc);
2658 s->pc += 2;
2659 break;
2660 default:
2661 case OT_LONG:
2662 ret = ldl_code(s->pc);
2663 s->pc += 4;
2664 break;
2665 }
2666 return ret;
2667}
2668
2669#ifndef VBOX
2670static inline int insn_const_size(unsigned int ot)
2671#else /* VBOX */
2672DECLINLINE(int) insn_const_size(unsigned int ot)
2673#endif /* VBOX */
2674{
2675 if (ot <= OT_LONG)
2676 return 1 << ot;
2677 else
2678 return 4;
2679}
2680
2681#ifndef VBOX
2682static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2683#else /* VBOX */
2684DECLINLINE(void) gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2685#endif /* VBOX */
2686{
2687 TranslationBlock *tb;
2688 target_ulong pc;
2689
2690 pc = s->cs_base + eip;
2691 tb = s->tb;
2692 /* NOTE: we handle the case where the TB spans two pages here */
2693 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2694 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2695#ifdef VBOX
2696 gen_check_external_event(s);
2697#endif /* VBOX */
2698 /* jump to same page: we can use a direct jump */
2699 tcg_gen_goto_tb(tb_num);
2700 gen_jmp_im(eip);
2701 tcg_gen_exit_tb((long)tb + tb_num);
2702 } else {
2703 /* jump to another page: currently not optimized */
2704 gen_jmp_im(eip);
2705 gen_eob(s);
2706 }
2707}
2708
2709#ifndef VBOX
2710static inline void gen_jcc(DisasContext *s, int b,
2711#else /* VBOX */
2712DECLINLINE(void) gen_jcc(DisasContext *s, int b,
2713#endif /* VBOX */
2714 target_ulong val, target_ulong next_eip)
2715{
2716 int l1, l2, cc_op;
2717
2718 cc_op = s->cc_op;
2719 if (s->cc_op != CC_OP_DYNAMIC) {
2720 gen_op_set_cc_op(s->cc_op);
2721 s->cc_op = CC_OP_DYNAMIC;
2722 }
2723 if (s->jmp_opt) {
2724 l1 = gen_new_label();
2725 gen_jcc1(s, cc_op, b, l1);
2726
2727 gen_goto_tb(s, 0, next_eip);
2728
2729 gen_set_label(l1);
2730 gen_goto_tb(s, 1, val);
2731 s->is_jmp = 3;
2732 } else {
2733
2734 l1 = gen_new_label();
2735 l2 = gen_new_label();
2736 gen_jcc1(s, cc_op, b, l1);
2737
2738 gen_jmp_im(next_eip);
2739 tcg_gen_br(l2);
2740
2741 gen_set_label(l1);
2742 gen_jmp_im(val);
2743 gen_set_label(l2);
2744 gen_eob(s);
2745 }
2746}
2747
2748static void gen_setcc(DisasContext *s, int b)
2749{
2750 int inv, jcc_op, l1;
2751 TCGv t0;
2752
2753 if (is_fast_jcc_case(s, b)) {
2754 /* nominal case: we use a jump */
2755 /* XXX: make it faster by adding new instructions in TCG */
2756 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2757 tcg_gen_movi_tl(t0, 0);
2758 l1 = gen_new_label();
2759 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2760 tcg_gen_movi_tl(t0, 1);
2761 gen_set_label(l1);
2762 tcg_gen_mov_tl(cpu_T[0], t0);
2763 tcg_temp_free(t0);
2764 } else {
2765 /* slow case: it is more efficient not to generate a jump,
2766 although it is questionnable whether this optimization is
2767 worth to */
2768 inv = b & 1;
2769 jcc_op = (b >> 1) & 7;
2770 gen_setcc_slow_T0(s, jcc_op);
2771 if (inv) {
2772 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2773 }
2774 }
2775}
2776
2777#ifndef VBOX
2778static inline void gen_op_movl_T0_seg(int seg_reg)
2779#else /* VBOX */
2780DECLINLINE(void) gen_op_movl_T0_seg(int seg_reg)
2781#endif /* VBOX */
2782{
2783 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2784 offsetof(CPUX86State,segs[seg_reg].selector));
2785}
2786
2787#ifndef VBOX
2788static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2789#else /* VBOX */
2790DECLINLINE(void) gen_op_movl_seg_T0_vm(int seg_reg)
2791#endif /* VBOX */
2792{
2793 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2794 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2795 offsetof(CPUX86State,segs[seg_reg].selector));
2796 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2797 tcg_gen_st_tl(cpu_T[0], cpu_env,
2798 offsetof(CPUX86State,segs[seg_reg].base));
2799#ifdef VBOX
2800 int flags = DESC_P_MASK | DESC_S_MASK | DESC_W_MASK;
2801 if (seg_reg == R_CS)
2802 flags |= DESC_CS_MASK;
2803 gen_op_movl_T0_im(flags);
2804 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].flags));
2805
2806 /* Set the limit to 0xffff. */
2807 gen_op_movl_T0_im(0xffff);
2808 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].limit));
2809#endif
2810}
2811
2812/* move T0 to seg_reg and compute if the CPU state may change. Never
2813 call this function with seg_reg == R_CS */
2814static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2815{
2816 if (s->pe && !s->vm86) {
2817 /* XXX: optimize by finding processor state dynamically */
2818 if (s->cc_op != CC_OP_DYNAMIC)
2819 gen_op_set_cc_op(s->cc_op);
2820 gen_jmp_im(cur_eip);
2821 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2822 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2823 /* abort translation because the addseg value may change or
2824 because ss32 may change. For R_SS, translation must always
2825 stop as a special handling must be done to disable hardware
2826 interrupts for the next instruction */
2827 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2828 s->is_jmp = 3;
2829 } else {
2830 gen_op_movl_seg_T0_vm(seg_reg);
2831 if (seg_reg == R_SS)
2832 s->is_jmp = 3;
2833 }
2834}
2835
2836#ifndef VBOX
2837static inline int svm_is_rep(int prefixes)
2838#else /* VBOX */
2839DECLINLINE(int) svm_is_rep(int prefixes)
2840#endif /* VBOX */
2841{
2842 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2843}
2844
2845#ifndef VBOX
2846static inline void
2847#else /* VBOX */
2848DECLINLINE(void)
2849#endif /* VBOX */
2850gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2851 uint32_t type, uint64_t param)
2852{
2853 /* no SVM activated; fast case */
2854 if (likely(!(s->flags & HF_SVMI_MASK)))
2855 return;
2856 if (s->cc_op != CC_OP_DYNAMIC)
2857 gen_op_set_cc_op(s->cc_op);
2858 gen_jmp_im(pc_start - s->cs_base);
2859 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2860 tcg_const_i32(type), tcg_const_i64(param));
2861}
2862
2863#ifndef VBOX
2864static inline void
2865#else /* VBOX */
2866DECLINLINE(void)
2867#endif
2868gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2869{
2870 gen_svm_check_intercept_param(s, pc_start, type, 0);
2871}
2872
2873#ifndef VBOX
2874static inline void gen_stack_update(DisasContext *s, int addend)
2875#else /* VBOX */
2876DECLINLINE(void) gen_stack_update(DisasContext *s, int addend)
2877#endif /* VBOX */
2878{
2879#ifdef TARGET_X86_64
2880 if (CODE64(s)) {
2881 gen_op_add_reg_im(2, R_ESP, addend);
2882 } else
2883#endif
2884 if (s->ss32) {
2885 gen_op_add_reg_im(1, R_ESP, addend);
2886 } else {
2887 gen_op_add_reg_im(0, R_ESP, addend);
2888 }
2889}
2890
2891/* generate a push. It depends on ss32, addseg and dflag */
2892static void gen_push_T0(DisasContext *s)
2893{
2894#ifdef TARGET_X86_64
2895 if (CODE64(s)) {
2896 gen_op_movq_A0_reg(R_ESP);
2897 if (s->dflag) {
2898 gen_op_addq_A0_im(-8);
2899 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2900 } else {
2901 gen_op_addq_A0_im(-2);
2902 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2903 }
2904 gen_op_mov_reg_A0(2, R_ESP);
2905 } else
2906#endif
2907 {
2908 gen_op_movl_A0_reg(R_ESP);
2909 if (!s->dflag)
2910 gen_op_addl_A0_im(-2);
2911 else
2912 gen_op_addl_A0_im(-4);
2913 if (s->ss32) {
2914 if (s->addseg) {
2915 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2916 gen_op_addl_A0_seg(R_SS);
2917 }
2918 } else {
2919 gen_op_andl_A0_ffff();
2920 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2921 gen_op_addl_A0_seg(R_SS);
2922 }
2923 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2924 if (s->ss32 && !s->addseg)
2925 gen_op_mov_reg_A0(1, R_ESP);
2926 else
2927 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2928 }
2929}
2930
2931/* generate a push. It depends on ss32, addseg and dflag */
2932/* slower version for T1, only used for call Ev */
2933static void gen_push_T1(DisasContext *s)
2934{
2935#ifdef TARGET_X86_64
2936 if (CODE64(s)) {
2937 gen_op_movq_A0_reg(R_ESP);
2938 if (s->dflag) {
2939 gen_op_addq_A0_im(-8);
2940 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2941 } else {
2942 gen_op_addq_A0_im(-2);
2943 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2944 }
2945 gen_op_mov_reg_A0(2, R_ESP);
2946 } else
2947#endif
2948 {
2949 gen_op_movl_A0_reg(R_ESP);
2950 if (!s->dflag)
2951 gen_op_addl_A0_im(-2);
2952 else
2953 gen_op_addl_A0_im(-4);
2954 if (s->ss32) {
2955 if (s->addseg) {
2956 gen_op_addl_A0_seg(R_SS);
2957 }
2958 } else {
2959 gen_op_andl_A0_ffff();
2960 gen_op_addl_A0_seg(R_SS);
2961 }
2962 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2963
2964 if (s->ss32 && !s->addseg)
2965 gen_op_mov_reg_A0(1, R_ESP);
2966 else
2967 gen_stack_update(s, (-2) << s->dflag);
2968 }
2969}
2970
2971/* two step pop is necessary for precise exceptions */
2972static void gen_pop_T0(DisasContext *s)
2973{
2974#ifdef TARGET_X86_64
2975 if (CODE64(s)) {
2976 gen_op_movq_A0_reg(R_ESP);
2977 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2978 } else
2979#endif
2980 {
2981 gen_op_movl_A0_reg(R_ESP);
2982 if (s->ss32) {
2983 if (s->addseg)
2984 gen_op_addl_A0_seg(R_SS);
2985 } else {
2986 gen_op_andl_A0_ffff();
2987 gen_op_addl_A0_seg(R_SS);
2988 }
2989 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2990 }
2991}
2992
2993static void gen_pop_update(DisasContext *s)
2994{
2995#ifdef TARGET_X86_64
2996 if (CODE64(s) && s->dflag) {
2997 gen_stack_update(s, 8);
2998 } else
2999#endif
3000 {
3001 gen_stack_update(s, 2 << s->dflag);
3002 }
3003}
3004
3005static void gen_stack_A0(DisasContext *s)
3006{
3007 gen_op_movl_A0_reg(R_ESP);
3008 if (!s->ss32)
3009 gen_op_andl_A0_ffff();
3010 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3011 if (s->addseg)
3012 gen_op_addl_A0_seg(R_SS);
3013}
3014
3015/* NOTE: wrap around in 16 bit not fully handled */
3016static void gen_pusha(DisasContext *s)
3017{
3018 int i;
3019 gen_op_movl_A0_reg(R_ESP);
3020 gen_op_addl_A0_im(-16 << s->dflag);
3021 if (!s->ss32)
3022 gen_op_andl_A0_ffff();
3023 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3024 if (s->addseg)
3025 gen_op_addl_A0_seg(R_SS);
3026 for(i = 0;i < 8; i++) {
3027 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
3028 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
3029 gen_op_addl_A0_im(2 << s->dflag);
3030 }
3031 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3032}
3033
3034/* NOTE: wrap around in 16 bit not fully handled */
3035static void gen_popa(DisasContext *s)
3036{
3037 int i;
3038 gen_op_movl_A0_reg(R_ESP);
3039 if (!s->ss32)
3040 gen_op_andl_A0_ffff();
3041 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3042 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
3043 if (s->addseg)
3044 gen_op_addl_A0_seg(R_SS);
3045 for(i = 0;i < 8; i++) {
3046 /* ESP is not reloaded */
3047 if (i != 3) {
3048 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
3049 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
3050 }
3051 gen_op_addl_A0_im(2 << s->dflag);
3052 }
3053 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3054}
3055
3056static void gen_enter(DisasContext *s, int esp_addend, int level)
3057{
3058 int ot, opsize;
3059
3060 level &= 0x1f;
3061#ifdef TARGET_X86_64
3062 if (CODE64(s)) {
3063 ot = s->dflag ? OT_QUAD : OT_WORD;
3064 opsize = 1 << ot;
3065
3066 gen_op_movl_A0_reg(R_ESP);
3067 gen_op_addq_A0_im(-opsize);
3068 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3069
3070 /* push bp */
3071 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3072 gen_op_st_T0_A0(ot + s->mem_index);
3073 if (level) {
3074 /* XXX: must save state */
3075 tcg_gen_helper_0_3(helper_enter64_level,
3076 tcg_const_i32(level),
3077 tcg_const_i32((ot == OT_QUAD)),
3078 cpu_T[1]);
3079 }
3080 gen_op_mov_reg_T1(ot, R_EBP);
3081 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3082 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
3083 } else
3084#endif
3085 {
3086 ot = s->dflag + OT_WORD;
3087 opsize = 2 << s->dflag;
3088
3089 gen_op_movl_A0_reg(R_ESP);
3090 gen_op_addl_A0_im(-opsize);
3091 if (!s->ss32)
3092 gen_op_andl_A0_ffff();
3093 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3094 if (s->addseg)
3095 gen_op_addl_A0_seg(R_SS);
3096 /* push bp */
3097 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3098 gen_op_st_T0_A0(ot + s->mem_index);
3099 if (level) {
3100 /* XXX: must save state */
3101 tcg_gen_helper_0_3(helper_enter_level,
3102 tcg_const_i32(level),
3103 tcg_const_i32(s->dflag),
3104 cpu_T[1]);
3105 }
3106 gen_op_mov_reg_T1(ot, R_EBP);
3107 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3108 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3109 }
3110}
3111
3112static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
3113{
3114 if (s->cc_op != CC_OP_DYNAMIC)
3115 gen_op_set_cc_op(s->cc_op);
3116 gen_jmp_im(cur_eip);
3117 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
3118 s->is_jmp = 3;
3119}
3120
3121/* an interrupt is different from an exception because of the
3122 privilege checks */
3123static void gen_interrupt(DisasContext *s, int intno,
3124 target_ulong cur_eip, target_ulong next_eip)
3125{
3126 if (s->cc_op != CC_OP_DYNAMIC)
3127 gen_op_set_cc_op(s->cc_op);
3128 gen_jmp_im(cur_eip);
3129 tcg_gen_helper_0_2(helper_raise_interrupt,
3130 tcg_const_i32(intno),
3131 tcg_const_i32(next_eip - cur_eip));
3132 s->is_jmp = 3;
3133}
3134
3135static void gen_debug(DisasContext *s, target_ulong cur_eip)
3136{
3137 if (s->cc_op != CC_OP_DYNAMIC)
3138 gen_op_set_cc_op(s->cc_op);
3139 gen_jmp_im(cur_eip);
3140 tcg_gen_helper_0_0(helper_debug);
3141 s->is_jmp = 3;
3142}
3143
3144/* generate a generic end of block. Trace exception is also generated
3145 if needed */
3146static void gen_eob(DisasContext *s)
3147{
3148 if (s->cc_op != CC_OP_DYNAMIC)
3149 gen_op_set_cc_op(s->cc_op);
3150 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
3151 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
3152 }
3153
3154#ifdef VBOX
3155 gen_check_external_event(s);
3156#endif /* VBOX */
3157
3158 if (s->singlestep_enabled) {
3159 tcg_gen_helper_0_0(helper_debug);
3160 } else if (s->tf) {
3161 tcg_gen_helper_0_0(helper_single_step);
3162 } else {
3163 tcg_gen_exit_tb(0);
3164 }
3165 s->is_jmp = 3;
3166}
3167
3168/* generate a jump to eip. No segment change must happen before as a
3169 direct call to the next block may occur */
3170static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
3171{
3172 if (s->jmp_opt) {
3173 if (s->cc_op != CC_OP_DYNAMIC) {
3174 gen_op_set_cc_op(s->cc_op);
3175 s->cc_op = CC_OP_DYNAMIC;
3176 }
3177 gen_goto_tb(s, tb_num, eip);
3178 s->is_jmp = 3;
3179 } else {
3180 gen_jmp_im(eip);
3181 gen_eob(s);
3182 }
3183}
3184
3185static void gen_jmp(DisasContext *s, target_ulong eip)
3186{
3187 gen_jmp_tb(s, eip, 0);
3188}
3189
3190#ifndef VBOX
3191static inline void gen_ldq_env_A0(int idx, int offset)
3192#else /* VBOX */
3193DECLINLINE(void) gen_ldq_env_A0(int idx, int offset)
3194#endif /* VBOX */
3195{
3196 int mem_index = (idx >> 2) - 1;
3197 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3198 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
3199}
3200
3201#ifndef VBOX
3202static inline void gen_stq_env_A0(int idx, int offset)
3203#else /* VBOX */
3204DECLINLINE(void) gen_stq_env_A0(int idx, int offset)
3205#endif /* VBOX */
3206{
3207 int mem_index = (idx >> 2) - 1;
3208 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
3209 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3210}
3211
3212#ifndef VBOX
3213static inline void gen_ldo_env_A0(int idx, int offset)
3214#else /* VBOX */
3215DECLINLINE(void) gen_ldo_env_A0(int idx, int offset)
3216#endif /* VBOX */
3217{
3218 int mem_index = (idx >> 2) - 1;
3219 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3220 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3221 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3222 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3223 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3224}
3225
3226#ifndef VBOX
3227static inline void gen_sto_env_A0(int idx, int offset)
3228#else /* VBOX */
3229DECLINLINE(void) gen_sto_env_A0(int idx, int offset)
3230#endif /* VBOX */
3231{
3232 int mem_index = (idx >> 2) - 1;
3233 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3234 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3235 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3236 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3237 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3238}
3239
3240#ifndef VBOX
3241static inline void gen_op_movo(int d_offset, int s_offset)
3242#else /* VBOX */
3243DECLINLINE(void) gen_op_movo(int d_offset, int s_offset)
3244#endif /* VBOX */
3245{
3246 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3247 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3248 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
3249 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
3250}
3251
3252#ifndef VBOX
3253static inline void gen_op_movq(int d_offset, int s_offset)
3254#else /* VBOX */
3255DECLINLINE(void) gen_op_movq(int d_offset, int s_offset)
3256#endif /* VBOX */
3257{
3258 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3259 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3260}
3261
3262#ifndef VBOX
3263static inline void gen_op_movl(int d_offset, int s_offset)
3264#else /* VBOX */
3265DECLINLINE(void) gen_op_movl(int d_offset, int s_offset)
3266#endif /* VBOX */
3267{
3268 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
3269 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
3270}
3271
3272#ifndef VBOX
3273static inline void gen_op_movq_env_0(int d_offset)
3274#else /* VBOX */
3275DECLINLINE(void) gen_op_movq_env_0(int d_offset)
3276#endif /* VBOX */
3277{
3278 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
3279 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3280}
3281
3282#define SSE_SPECIAL ((void *)1)
3283#define SSE_DUMMY ((void *)2)
3284
3285#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
3286#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
3287 helper_ ## x ## ss, helper_ ## x ## sd, }
3288
3289static void *sse_op_table1[256][4] = {
3290 /* 3DNow! extensions */
3291 [0x0e] = { SSE_DUMMY }, /* femms */
3292 [0x0f] = { SSE_DUMMY }, /* pf... */
3293 /* pure SSE operations */
3294 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3295 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3296 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
3297 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
3298 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
3299 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
3300 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
3301 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
3302
3303 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3304 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3305 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
3306 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
3307 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
3308 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
3309 [0x2e] = { helper_ucomiss, helper_ucomisd },
3310 [0x2f] = { helper_comiss, helper_comisd },
3311 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
3312 [0x51] = SSE_FOP(sqrt),
3313 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
3314 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
3315 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
3316 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
3317 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
3318 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
3319 [0x58] = SSE_FOP(add),
3320 [0x59] = SSE_FOP(mul),
3321 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
3322 helper_cvtss2sd, helper_cvtsd2ss },
3323 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
3324 [0x5c] = SSE_FOP(sub),
3325 [0x5d] = SSE_FOP(min),
3326 [0x5e] = SSE_FOP(div),
3327 [0x5f] = SSE_FOP(max),
3328
3329 [0xc2] = SSE_FOP(cmpeq),
3330 [0xc6] = { helper_shufps, helper_shufpd },
3331
3332 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3333 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3334
3335 /* MMX ops and their SSE extensions */
3336 [0x60] = MMX_OP2(punpcklbw),
3337 [0x61] = MMX_OP2(punpcklwd),
3338 [0x62] = MMX_OP2(punpckldq),
3339 [0x63] = MMX_OP2(packsswb),
3340 [0x64] = MMX_OP2(pcmpgtb),
3341 [0x65] = MMX_OP2(pcmpgtw),
3342 [0x66] = MMX_OP2(pcmpgtl),
3343 [0x67] = MMX_OP2(packuswb),
3344 [0x68] = MMX_OP2(punpckhbw),
3345 [0x69] = MMX_OP2(punpckhwd),
3346 [0x6a] = MMX_OP2(punpckhdq),
3347 [0x6b] = MMX_OP2(packssdw),
3348 [0x6c] = { NULL, helper_punpcklqdq_xmm },
3349 [0x6d] = { NULL, helper_punpckhqdq_xmm },
3350 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
3351 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
3352 [0x70] = { helper_pshufw_mmx,
3353 helper_pshufd_xmm,
3354 helper_pshufhw_xmm,
3355 helper_pshuflw_xmm },
3356 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
3357 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
3358 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
3359 [0x74] = MMX_OP2(pcmpeqb),
3360 [0x75] = MMX_OP2(pcmpeqw),
3361 [0x76] = MMX_OP2(pcmpeql),
3362 [0x77] = { SSE_DUMMY }, /* emms */
3363 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
3364 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
3365 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3366 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3367 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3368 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
3369 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
3370 [0xd1] = MMX_OP2(psrlw),
3371 [0xd2] = MMX_OP2(psrld),
3372 [0xd3] = MMX_OP2(psrlq),
3373 [0xd4] = MMX_OP2(paddq),
3374 [0xd5] = MMX_OP2(pmullw),
3375 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3376 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3377 [0xd8] = MMX_OP2(psubusb),
3378 [0xd9] = MMX_OP2(psubusw),
3379 [0xda] = MMX_OP2(pminub),
3380 [0xdb] = MMX_OP2(pand),
3381 [0xdc] = MMX_OP2(paddusb),
3382 [0xdd] = MMX_OP2(paddusw),
3383 [0xde] = MMX_OP2(pmaxub),
3384 [0xdf] = MMX_OP2(pandn),
3385 [0xe0] = MMX_OP2(pavgb),
3386 [0xe1] = MMX_OP2(psraw),
3387 [0xe2] = MMX_OP2(psrad),
3388 [0xe3] = MMX_OP2(pavgw),
3389 [0xe4] = MMX_OP2(pmulhuw),
3390 [0xe5] = MMX_OP2(pmulhw),
3391 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
3392 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3393 [0xe8] = MMX_OP2(psubsb),
3394 [0xe9] = MMX_OP2(psubsw),
3395 [0xea] = MMX_OP2(pminsw),
3396 [0xeb] = MMX_OP2(por),
3397 [0xec] = MMX_OP2(paddsb),
3398 [0xed] = MMX_OP2(paddsw),
3399 [0xee] = MMX_OP2(pmaxsw),
3400 [0xef] = MMX_OP2(pxor),
3401 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
3402 [0xf1] = MMX_OP2(psllw),
3403 [0xf2] = MMX_OP2(pslld),
3404 [0xf3] = MMX_OP2(psllq),
3405 [0xf4] = MMX_OP2(pmuludq),
3406 [0xf5] = MMX_OP2(pmaddwd),
3407 [0xf6] = MMX_OP2(psadbw),
3408 [0xf7] = MMX_OP2(maskmov),
3409 [0xf8] = MMX_OP2(psubb),
3410 [0xf9] = MMX_OP2(psubw),
3411 [0xfa] = MMX_OP2(psubl),
3412 [0xfb] = MMX_OP2(psubq),
3413 [0xfc] = MMX_OP2(paddb),
3414 [0xfd] = MMX_OP2(paddw),
3415 [0xfe] = MMX_OP2(paddl),
3416};
3417
3418static void *sse_op_table2[3 * 8][2] = {
3419 [0 + 2] = MMX_OP2(psrlw),
3420 [0 + 4] = MMX_OP2(psraw),
3421 [0 + 6] = MMX_OP2(psllw),
3422 [8 + 2] = MMX_OP2(psrld),
3423 [8 + 4] = MMX_OP2(psrad),
3424 [8 + 6] = MMX_OP2(pslld),
3425 [16 + 2] = MMX_OP2(psrlq),
3426 [16 + 3] = { NULL, helper_psrldq_xmm },
3427 [16 + 6] = MMX_OP2(psllq),
3428 [16 + 7] = { NULL, helper_pslldq_xmm },
3429};
3430
3431static void *sse_op_table3[4 * 3] = {
3432 helper_cvtsi2ss,
3433 helper_cvtsi2sd,
3434 X86_64_ONLY(helper_cvtsq2ss),
3435 X86_64_ONLY(helper_cvtsq2sd),
3436
3437 helper_cvttss2si,
3438 helper_cvttsd2si,
3439 X86_64_ONLY(helper_cvttss2sq),
3440 X86_64_ONLY(helper_cvttsd2sq),
3441
3442 helper_cvtss2si,
3443 helper_cvtsd2si,
3444 X86_64_ONLY(helper_cvtss2sq),
3445 X86_64_ONLY(helper_cvtsd2sq),
3446};
3447
3448static void *sse_op_table4[8][4] = {
3449 SSE_FOP(cmpeq),
3450 SSE_FOP(cmplt),
3451 SSE_FOP(cmple),
3452 SSE_FOP(cmpunord),
3453 SSE_FOP(cmpneq),
3454 SSE_FOP(cmpnlt),
3455 SSE_FOP(cmpnle),
3456 SSE_FOP(cmpord),
3457};
3458
3459static void *sse_op_table5[256] = {
3460 [0x0c] = helper_pi2fw,
3461 [0x0d] = helper_pi2fd,
3462 [0x1c] = helper_pf2iw,
3463 [0x1d] = helper_pf2id,
3464 [0x8a] = helper_pfnacc,
3465 [0x8e] = helper_pfpnacc,
3466 [0x90] = helper_pfcmpge,
3467 [0x94] = helper_pfmin,
3468 [0x96] = helper_pfrcp,
3469 [0x97] = helper_pfrsqrt,
3470 [0x9a] = helper_pfsub,
3471 [0x9e] = helper_pfadd,
3472 [0xa0] = helper_pfcmpgt,
3473 [0xa4] = helper_pfmax,
3474 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
3475 [0xa7] = helper_movq, /* pfrsqit1 */
3476 [0xaa] = helper_pfsubr,
3477 [0xae] = helper_pfacc,
3478 [0xb0] = helper_pfcmpeq,
3479 [0xb4] = helper_pfmul,
3480 [0xb6] = helper_movq, /* pfrcpit2 */
3481 [0xb7] = helper_pmulhrw_mmx,
3482 [0xbb] = helper_pswapd,
3483 [0xbf] = helper_pavgb_mmx /* pavgusb */
3484};
3485
3486struct sse_op_helper_s {
3487 void *op[2]; uint32_t ext_mask;
3488};
3489#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3490#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3491#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3492#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3493static struct sse_op_helper_s sse_op_table6[256] = {
3494 [0x00] = SSSE3_OP(pshufb),
3495 [0x01] = SSSE3_OP(phaddw),
3496 [0x02] = SSSE3_OP(phaddd),
3497 [0x03] = SSSE3_OP(phaddsw),
3498 [0x04] = SSSE3_OP(pmaddubsw),
3499 [0x05] = SSSE3_OP(phsubw),
3500 [0x06] = SSSE3_OP(phsubd),
3501 [0x07] = SSSE3_OP(phsubsw),
3502 [0x08] = SSSE3_OP(psignb),
3503 [0x09] = SSSE3_OP(psignw),
3504 [0x0a] = SSSE3_OP(psignd),
3505 [0x0b] = SSSE3_OP(pmulhrsw),
3506 [0x10] = SSE41_OP(pblendvb),
3507 [0x14] = SSE41_OP(blendvps),
3508 [0x15] = SSE41_OP(blendvpd),
3509 [0x17] = SSE41_OP(ptest),
3510 [0x1c] = SSSE3_OP(pabsb),
3511 [0x1d] = SSSE3_OP(pabsw),
3512 [0x1e] = SSSE3_OP(pabsd),
3513 [0x20] = SSE41_OP(pmovsxbw),
3514 [0x21] = SSE41_OP(pmovsxbd),
3515 [0x22] = SSE41_OP(pmovsxbq),
3516 [0x23] = SSE41_OP(pmovsxwd),
3517 [0x24] = SSE41_OP(pmovsxwq),
3518 [0x25] = SSE41_OP(pmovsxdq),
3519 [0x28] = SSE41_OP(pmuldq),
3520 [0x29] = SSE41_OP(pcmpeqq),
3521 [0x2a] = SSE41_SPECIAL, /* movntqda */
3522 [0x2b] = SSE41_OP(packusdw),
3523 [0x30] = SSE41_OP(pmovzxbw),
3524 [0x31] = SSE41_OP(pmovzxbd),
3525 [0x32] = SSE41_OP(pmovzxbq),
3526 [0x33] = SSE41_OP(pmovzxwd),
3527 [0x34] = SSE41_OP(pmovzxwq),
3528 [0x35] = SSE41_OP(pmovzxdq),
3529 [0x37] = SSE42_OP(pcmpgtq),
3530 [0x38] = SSE41_OP(pminsb),
3531 [0x39] = SSE41_OP(pminsd),
3532 [0x3a] = SSE41_OP(pminuw),
3533 [0x3b] = SSE41_OP(pminud),
3534 [0x3c] = SSE41_OP(pmaxsb),
3535 [0x3d] = SSE41_OP(pmaxsd),
3536 [0x3e] = SSE41_OP(pmaxuw),
3537 [0x3f] = SSE41_OP(pmaxud),
3538 [0x40] = SSE41_OP(pmulld),
3539 [0x41] = SSE41_OP(phminposuw),
3540};
3541
3542static struct sse_op_helper_s sse_op_table7[256] = {
3543 [0x08] = SSE41_OP(roundps),
3544 [0x09] = SSE41_OP(roundpd),
3545 [0x0a] = SSE41_OP(roundss),
3546 [0x0b] = SSE41_OP(roundsd),
3547 [0x0c] = SSE41_OP(blendps),
3548 [0x0d] = SSE41_OP(blendpd),
3549 [0x0e] = SSE41_OP(pblendw),
3550 [0x0f] = SSSE3_OP(palignr),
3551 [0x14] = SSE41_SPECIAL, /* pextrb */
3552 [0x15] = SSE41_SPECIAL, /* pextrw */
3553 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3554 [0x17] = SSE41_SPECIAL, /* extractps */
3555 [0x20] = SSE41_SPECIAL, /* pinsrb */
3556 [0x21] = SSE41_SPECIAL, /* insertps */
3557 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3558 [0x40] = SSE41_OP(dpps),
3559 [0x41] = SSE41_OP(dppd),
3560 [0x42] = SSE41_OP(mpsadbw),
3561 [0x60] = SSE42_OP(pcmpestrm),
3562 [0x61] = SSE42_OP(pcmpestri),
3563 [0x62] = SSE42_OP(pcmpistrm),
3564 [0x63] = SSE42_OP(pcmpistri),
3565};
3566
3567static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3568{
3569 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3570 int modrm, mod, rm, reg, reg_addr, offset_addr;
3571 void *sse_op2;
3572
3573 b &= 0xff;
3574 if (s->prefix & PREFIX_DATA)
3575 b1 = 1;
3576 else if (s->prefix & PREFIX_REPZ)
3577 b1 = 2;
3578 else if (s->prefix & PREFIX_REPNZ)
3579 b1 = 3;
3580 else
3581 b1 = 0;
3582 sse_op2 = sse_op_table1[b][b1];
3583 if (!sse_op2)
3584 goto illegal_op;
3585 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3586 is_xmm = 1;
3587 } else {
3588 if (b1 == 0) {
3589 /* MMX case */
3590 is_xmm = 0;
3591 } else {
3592 is_xmm = 1;
3593 }
3594 }
3595 /* simple MMX/SSE operation */
3596 if (s->flags & HF_TS_MASK) {
3597 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3598 return;
3599 }
3600 if (s->flags & HF_EM_MASK) {
3601 illegal_op:
3602 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3603 return;
3604 }
3605 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3606 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3607 goto illegal_op;
3608 if (b == 0x0e) {
3609 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3610 goto illegal_op;
3611 /* femms */
3612 tcg_gen_helper_0_0(helper_emms);
3613 return;
3614 }
3615 if (b == 0x77) {
3616 /* emms */
3617 tcg_gen_helper_0_0(helper_emms);
3618 return;
3619 }
3620 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3621 the static cpu state) */
3622 if (!is_xmm) {
3623 tcg_gen_helper_0_0(helper_enter_mmx);
3624 }
3625
3626 modrm = ldub_code(s->pc++);
3627 reg = ((modrm >> 3) & 7);
3628 if (is_xmm)
3629 reg |= rex_r;
3630 mod = (modrm >> 6) & 3;
3631 if (sse_op2 == SSE_SPECIAL) {
3632 b |= (b1 << 8);
3633 switch(b) {
3634 case 0x0e7: /* movntq */
3635 if (mod == 3)
3636 goto illegal_op;
3637 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3638 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3639 break;
3640 case 0x1e7: /* movntdq */
3641 case 0x02b: /* movntps */
3642 case 0x12b: /* movntps */
3643 if (mod == 3)
3644 goto illegal_op;
3645 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3646 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3647 break;
3648 case 0x3f0: /* lddqu */
3649 if (mod == 3)
3650 goto illegal_op;
3651 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3652 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3653 break;
3654 case 0x6e: /* movd mm, ea */
3655#ifdef TARGET_X86_64
3656 if (s->dflag == 2) {
3657 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3658 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3659 } else
3660#endif
3661 {
3662 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3663 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3664 offsetof(CPUX86State,fpregs[reg].mmx));
3665 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3666 }
3667 break;
3668 case 0x16e: /* movd xmm, ea */
3669#ifdef TARGET_X86_64
3670 if (s->dflag == 2) {
3671 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3672 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3673 offsetof(CPUX86State,xmm_regs[reg]));
3674 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3675 } else
3676#endif
3677 {
3678 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3679 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3680 offsetof(CPUX86State,xmm_regs[reg]));
3681 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3682 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3683 }
3684 break;
3685 case 0x6f: /* movq mm, ea */
3686 if (mod != 3) {
3687 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3688 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3689 } else {
3690 rm = (modrm & 7);
3691 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3692 offsetof(CPUX86State,fpregs[rm].mmx));
3693 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3694 offsetof(CPUX86State,fpregs[reg].mmx));
3695 }
3696 break;
3697 case 0x010: /* movups */
3698 case 0x110: /* movupd */
3699 case 0x028: /* movaps */
3700 case 0x128: /* movapd */
3701 case 0x16f: /* movdqa xmm, ea */
3702 case 0x26f: /* movdqu xmm, ea */
3703 if (mod != 3) {
3704 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3705 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3706 } else {
3707 rm = (modrm & 7) | REX_B(s);
3708 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3709 offsetof(CPUX86State,xmm_regs[rm]));
3710 }
3711 break;
3712 case 0x210: /* movss xmm, ea */
3713 if (mod != 3) {
3714 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3715 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3716 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3717 gen_op_movl_T0_0();
3718 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3719 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3720 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3721 } else {
3722 rm = (modrm & 7) | REX_B(s);
3723 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3724 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3725 }
3726 break;
3727 case 0x310: /* movsd xmm, ea */
3728 if (mod != 3) {
3729 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3730 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3731 gen_op_movl_T0_0();
3732 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3733 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3734 } else {
3735 rm = (modrm & 7) | REX_B(s);
3736 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3737 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3738 }
3739 break;
3740 case 0x012: /* movlps */
3741 case 0x112: /* movlpd */
3742 if (mod != 3) {
3743 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3744 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3745 } else {
3746 /* movhlps */
3747 rm = (modrm & 7) | REX_B(s);
3748 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3749 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3750 }
3751 break;
3752 case 0x212: /* movsldup */
3753 if (mod != 3) {
3754 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3755 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3756 } else {
3757 rm = (modrm & 7) | REX_B(s);
3758 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3759 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3760 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3761 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3762 }
3763 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3764 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3765 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3766 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3767 break;
3768 case 0x312: /* movddup */
3769 if (mod != 3) {
3770 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3771 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3772 } else {
3773 rm = (modrm & 7) | REX_B(s);
3774 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3775 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3776 }
3777 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3778 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3779 break;
3780 case 0x016: /* movhps */
3781 case 0x116: /* movhpd */
3782 if (mod != 3) {
3783 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3784 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3785 } else {
3786 /* movlhps */
3787 rm = (modrm & 7) | REX_B(s);
3788 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3789 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3790 }
3791 break;
3792 case 0x216: /* movshdup */
3793 if (mod != 3) {
3794 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3795 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3796 } else {
3797 rm = (modrm & 7) | REX_B(s);
3798 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3799 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3800 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3801 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3802 }
3803 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3804 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3805 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3806 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3807 break;
3808 case 0x7e: /* movd ea, mm */
3809#ifdef TARGET_X86_64
3810 if (s->dflag == 2) {
3811 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3812 offsetof(CPUX86State,fpregs[reg].mmx));
3813 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3814 } else
3815#endif
3816 {
3817 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3818 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3819 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3820 }
3821 break;
3822 case 0x17e: /* movd ea, xmm */
3823#ifdef TARGET_X86_64
3824 if (s->dflag == 2) {
3825 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3826 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3827 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3828 } else
3829#endif
3830 {
3831 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3832 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3833 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3834 }
3835 break;
3836 case 0x27e: /* movq xmm, ea */
3837 if (mod != 3) {
3838 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3839 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3840 } else {
3841 rm = (modrm & 7) | REX_B(s);
3842 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3843 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3844 }
3845 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3846 break;
3847 case 0x7f: /* movq ea, mm */
3848 if (mod != 3) {
3849 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3850 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3851 } else {
3852 rm = (modrm & 7);
3853 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3854 offsetof(CPUX86State,fpregs[reg].mmx));
3855 }
3856 break;
3857 case 0x011: /* movups */
3858 case 0x111: /* movupd */
3859 case 0x029: /* movaps */
3860 case 0x129: /* movapd */
3861 case 0x17f: /* movdqa ea, xmm */
3862 case 0x27f: /* movdqu ea, xmm */
3863 if (mod != 3) {
3864 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3865 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3866 } else {
3867 rm = (modrm & 7) | REX_B(s);
3868 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3869 offsetof(CPUX86State,xmm_regs[reg]));
3870 }
3871 break;
3872 case 0x211: /* movss ea, xmm */
3873 if (mod != 3) {
3874 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3875 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3876 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3877 } else {
3878 rm = (modrm & 7) | REX_B(s);
3879 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3880 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3881 }
3882 break;
3883 case 0x311: /* movsd ea, xmm */
3884 if (mod != 3) {
3885 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3886 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3887 } else {
3888 rm = (modrm & 7) | REX_B(s);
3889 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3890 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3891 }
3892 break;
3893 case 0x013: /* movlps */
3894 case 0x113: /* movlpd */
3895 if (mod != 3) {
3896 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3897 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3898 } else {
3899 goto illegal_op;
3900 }
3901 break;
3902 case 0x017: /* movhps */
3903 case 0x117: /* movhpd */
3904 if (mod != 3) {
3905 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3906 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3907 } else {
3908 goto illegal_op;
3909 }
3910 break;
3911 case 0x71: /* shift mm, im */
3912 case 0x72:
3913 case 0x73:
3914 case 0x171: /* shift xmm, im */
3915 case 0x172:
3916 case 0x173:
3917 val = ldub_code(s->pc++);
3918 if (is_xmm) {
3919 gen_op_movl_T0_im(val);
3920 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3921 gen_op_movl_T0_0();
3922 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3923 op1_offset = offsetof(CPUX86State,xmm_t0);
3924 } else {
3925 gen_op_movl_T0_im(val);
3926 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3927 gen_op_movl_T0_0();
3928 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3929 op1_offset = offsetof(CPUX86State,mmx_t0);
3930 }
3931 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3932 if (!sse_op2)
3933 goto illegal_op;
3934 if (is_xmm) {
3935 rm = (modrm & 7) | REX_B(s);
3936 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3937 } else {
3938 rm = (modrm & 7);
3939 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3940 }
3941 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3942 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3943 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3944 break;
3945 case 0x050: /* movmskps */
3946 rm = (modrm & 7) | REX_B(s);
3947 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3948 offsetof(CPUX86State,xmm_regs[rm]));
3949 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3950 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3951 gen_op_mov_reg_T0(OT_LONG, reg);
3952 break;
3953 case 0x150: /* movmskpd */
3954 rm = (modrm & 7) | REX_B(s);
3955 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3956 offsetof(CPUX86State,xmm_regs[rm]));
3957 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3958 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3959 gen_op_mov_reg_T0(OT_LONG, reg);
3960 break;
3961 case 0x02a: /* cvtpi2ps */
3962 case 0x12a: /* cvtpi2pd */
3963 tcg_gen_helper_0_0(helper_enter_mmx);
3964 if (mod != 3) {
3965 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3966 op2_offset = offsetof(CPUX86State,mmx_t0);
3967 gen_ldq_env_A0(s->mem_index, op2_offset);
3968 } else {
3969 rm = (modrm & 7);
3970 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3971 }
3972 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3973 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3974 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3975 switch(b >> 8) {
3976 case 0x0:
3977 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3978 break;
3979 default:
3980 case 0x1:
3981 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3982 break;
3983 }
3984 break;
3985 case 0x22a: /* cvtsi2ss */
3986 case 0x32a: /* cvtsi2sd */
3987 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3988 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3989 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3990 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3991 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3992 if (ot == OT_LONG) {
3993 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3994 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3995 } else {
3996 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
3997 }
3998 break;
3999 case 0x02c: /* cvttps2pi */
4000 case 0x12c: /* cvttpd2pi */
4001 case 0x02d: /* cvtps2pi */
4002 case 0x12d: /* cvtpd2pi */
4003 tcg_gen_helper_0_0(helper_enter_mmx);
4004 if (mod != 3) {
4005 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4006 op2_offset = offsetof(CPUX86State,xmm_t0);
4007 gen_ldo_env_A0(s->mem_index, op2_offset);
4008 } else {
4009 rm = (modrm & 7) | REX_B(s);
4010 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4011 }
4012 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
4013 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4014 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4015 switch(b) {
4016 case 0x02c:
4017 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
4018 break;
4019 case 0x12c:
4020 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
4021 break;
4022 case 0x02d:
4023 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
4024 break;
4025 case 0x12d:
4026 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
4027 break;
4028 }
4029 break;
4030 case 0x22c: /* cvttss2si */
4031 case 0x32c: /* cvttsd2si */
4032 case 0x22d: /* cvtss2si */
4033 case 0x32d: /* cvtsd2si */
4034 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4035 if (mod != 3) {
4036 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4037 if ((b >> 8) & 1) {
4038 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
4039 } else {
4040 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4041 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4042 }
4043 op2_offset = offsetof(CPUX86State,xmm_t0);
4044 } else {
4045 rm = (modrm & 7) | REX_B(s);
4046 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4047 }
4048 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
4049 (b & 1) * 4];
4050 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
4051 if (ot == OT_LONG) {
4052 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
4053 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4054 } else {
4055 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
4056 }
4057 gen_op_mov_reg_T0(ot, reg);
4058 break;
4059 case 0xc4: /* pinsrw */
4060 case 0x1c4:
4061 s->rip_offset = 1;
4062 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4063 val = ldub_code(s->pc++);
4064 if (b1) {
4065 val &= 7;
4066 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4067 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
4068 } else {
4069 val &= 3;
4070 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4071 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
4072 }
4073 break;
4074 case 0xc5: /* pextrw */
4075 case 0x1c5:
4076 if (mod != 3)
4077 goto illegal_op;
4078 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4079 val = ldub_code(s->pc++);
4080 if (b1) {
4081 val &= 7;
4082 rm = (modrm & 7) | REX_B(s);
4083 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4084 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
4085 } else {
4086 val &= 3;
4087 rm = (modrm & 7);
4088 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4089 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
4090 }
4091 reg = ((modrm >> 3) & 7) | rex_r;
4092 gen_op_mov_reg_T0(ot, reg);
4093 break;
4094 case 0x1d6: /* movq ea, xmm */
4095 if (mod != 3) {
4096 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4097 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4098 } else {
4099 rm = (modrm & 7) | REX_B(s);
4100 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
4101 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4102 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
4103 }
4104 break;
4105 case 0x2d6: /* movq2dq */
4106 tcg_gen_helper_0_0(helper_enter_mmx);
4107 rm = (modrm & 7);
4108 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
4109 offsetof(CPUX86State,fpregs[rm].mmx));
4110 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
4111 break;
4112 case 0x3d6: /* movdq2q */
4113 tcg_gen_helper_0_0(helper_enter_mmx);
4114 rm = (modrm & 7) | REX_B(s);
4115 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
4116 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
4117 break;
4118 case 0xd7: /* pmovmskb */
4119 case 0x1d7:
4120 if (mod != 3)
4121 goto illegal_op;
4122 if (b1) {
4123 rm = (modrm & 7) | REX_B(s);
4124 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
4125 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
4126 } else {
4127 rm = (modrm & 7);
4128 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
4129 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
4130 }
4131 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4132 reg = ((modrm >> 3) & 7) | rex_r;
4133 gen_op_mov_reg_T0(OT_LONG, reg);
4134 break;
4135 case 0x138:
4136 if (s->prefix & PREFIX_REPNZ)
4137 goto crc32;
4138 case 0x038:
4139 b = modrm;
4140 modrm = ldub_code(s->pc++);
4141 rm = modrm & 7;
4142 reg = ((modrm >> 3) & 7) | rex_r;
4143 mod = (modrm >> 6) & 3;
4144
4145 sse_op2 = sse_op_table6[b].op[b1];
4146 if (!sse_op2)
4147 goto illegal_op;
4148 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
4149 goto illegal_op;
4150
4151 if (b1) {
4152 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4153 if (mod == 3) {
4154 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4155 } else {
4156 op2_offset = offsetof(CPUX86State,xmm_t0);
4157 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4158 switch (b) {
4159 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
4160 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
4161 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
4162 gen_ldq_env_A0(s->mem_index, op2_offset +
4163 offsetof(XMMReg, XMM_Q(0)));
4164 break;
4165 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
4166 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
4167 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
4168 (s->mem_index >> 2) - 1);
4169 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
4170 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
4171 offsetof(XMMReg, XMM_L(0)));
4172 break;
4173 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
4174 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
4175 (s->mem_index >> 2) - 1);
4176 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
4177 offsetof(XMMReg, XMM_W(0)));
4178 break;
4179 case 0x2a: /* movntqda */
4180 gen_ldo_env_A0(s->mem_index, op1_offset);
4181 return;
4182 default:
4183 gen_ldo_env_A0(s->mem_index, op2_offset);
4184 }
4185 }
4186 } else {
4187 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4188 if (mod == 3) {
4189 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4190 } else {
4191 op2_offset = offsetof(CPUX86State,mmx_t0);
4192 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4193 gen_ldq_env_A0(s->mem_index, op2_offset);
4194 }
4195 }
4196 if (sse_op2 == SSE_SPECIAL)
4197 goto illegal_op;
4198
4199 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4200 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4201 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4202
4203 if (b == 0x17)
4204 s->cc_op = CC_OP_EFLAGS;
4205 break;
4206 case 0x338: /* crc32 */
4207 crc32:
4208 b = modrm;
4209 modrm = ldub_code(s->pc++);
4210 reg = ((modrm >> 3) & 7) | rex_r;
4211
4212 if (b != 0xf0 && b != 0xf1)
4213 goto illegal_op;
4214 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
4215 goto illegal_op;
4216
4217 if (b == 0xf0)
4218 ot = OT_BYTE;
4219 else if (b == 0xf1 && s->dflag != 2)
4220 if (s->prefix & PREFIX_DATA)
4221 ot = OT_WORD;
4222 else
4223 ot = OT_LONG;
4224 else
4225 ot = OT_QUAD;
4226
4227 gen_op_mov_TN_reg(OT_LONG, 0, reg);
4228 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4229 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4230 tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
4231 cpu_T[0], tcg_const_i32(8 << ot));
4232
4233 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4234 gen_op_mov_reg_T0(ot, reg);
4235 break;
4236 case 0x03a:
4237 case 0x13a:
4238 b = modrm;
4239 modrm = ldub_code(s->pc++);
4240 rm = modrm & 7;
4241 reg = ((modrm >> 3) & 7) | rex_r;
4242 mod = (modrm >> 6) & 3;
4243
4244 sse_op2 = sse_op_table7[b].op[b1];
4245 if (!sse_op2)
4246 goto illegal_op;
4247 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4248 goto illegal_op;
4249
4250 if (sse_op2 == SSE_SPECIAL) {
4251 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4252 rm = (modrm & 7) | REX_B(s);
4253 if (mod != 3)
4254 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4255 reg = ((modrm >> 3) & 7) | rex_r;
4256 val = ldub_code(s->pc++);
4257 switch (b) {
4258 case 0x14: /* pextrb */
4259 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4260 xmm_regs[reg].XMM_B(val & 15)));
4261 if (mod == 3)
4262 gen_op_mov_reg_T0(ot, rm);
4263 else
4264 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4265 (s->mem_index >> 2) - 1);
4266 break;
4267 case 0x15: /* pextrw */
4268 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4269 xmm_regs[reg].XMM_W(val & 7)));
4270 if (mod == 3)
4271 gen_op_mov_reg_T0(ot, rm);
4272 else
4273 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4274 (s->mem_index >> 2) - 1);
4275 break;
4276 case 0x16:
4277 if (ot == OT_LONG) { /* pextrd */
4278 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4279 offsetof(CPUX86State,
4280 xmm_regs[reg].XMM_L(val & 3)));
4281 if (mod == 3)
4282 gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
4283 else
4284 tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
4285 (s->mem_index >> 2) - 1);
4286 } else { /* pextrq */
4287 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4288 offsetof(CPUX86State,
4289 xmm_regs[reg].XMM_Q(val & 1)));
4290 if (mod == 3)
4291 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4292 else
4293 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4294 (s->mem_index >> 2) - 1);
4295 }
4296 break;
4297 case 0x17: /* extractps */
4298 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4299 xmm_regs[reg].XMM_L(val & 3)));
4300 if (mod == 3)
4301 gen_op_mov_reg_T0(ot, rm);
4302 else
4303 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4304 (s->mem_index >> 2) - 1);
4305 break;
4306 case 0x20: /* pinsrb */
4307 if (mod == 3)
4308 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4309 else
4310 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
4311 (s->mem_index >> 2) - 1);
4312 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4313 xmm_regs[reg].XMM_B(val & 15)));
4314 break;
4315 case 0x21: /* insertps */
4316 if (mod == 3)
4317 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4318 offsetof(CPUX86State,xmm_regs[rm]
4319 .XMM_L((val >> 6) & 3)));
4320 else
4321 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4322 (s->mem_index >> 2) - 1);
4323 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4324 offsetof(CPUX86State,xmm_regs[reg]
4325 .XMM_L((val >> 4) & 3)));
4326 if ((val >> 0) & 1)
4327 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4328 cpu_env, offsetof(CPUX86State,
4329 xmm_regs[reg].XMM_L(0)));
4330 if ((val >> 1) & 1)
4331 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4332 cpu_env, offsetof(CPUX86State,
4333 xmm_regs[reg].XMM_L(1)));
4334 if ((val >> 2) & 1)
4335 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4336 cpu_env, offsetof(CPUX86State,
4337 xmm_regs[reg].XMM_L(2)));
4338 if ((val >> 3) & 1)
4339 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4340 cpu_env, offsetof(CPUX86State,
4341 xmm_regs[reg].XMM_L(3)));
4342 break;
4343 case 0x22:
4344 if (ot == OT_LONG) { /* pinsrd */
4345 if (mod == 3)
4346 gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
4347 else
4348 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4349 (s->mem_index >> 2) - 1);
4350 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4351 offsetof(CPUX86State,
4352 xmm_regs[reg].XMM_L(val & 3)));
4353 } else { /* pinsrq */
4354 if (mod == 3)
4355 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4356 else
4357 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4358 (s->mem_index >> 2) - 1);
4359 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4360 offsetof(CPUX86State,
4361 xmm_regs[reg].XMM_Q(val & 1)));
4362 }
4363 break;
4364 }
4365 return;
4366 }
4367
4368 if (b1) {
4369 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4370 if (mod == 3) {
4371 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4372 } else {
4373 op2_offset = offsetof(CPUX86State,xmm_t0);
4374 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4375 gen_ldo_env_A0(s->mem_index, op2_offset);
4376 }
4377 } else {
4378 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4379 if (mod == 3) {
4380 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4381 } else {
4382 op2_offset = offsetof(CPUX86State,mmx_t0);
4383 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4384 gen_ldq_env_A0(s->mem_index, op2_offset);
4385 }
4386 }
4387 val = ldub_code(s->pc++);
4388
4389 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
4390 s->cc_op = CC_OP_EFLAGS;
4391
4392 if (s->dflag == 2)
4393 /* The helper must use entire 64-bit gp registers */
4394 val |= 1 << 8;
4395 }
4396
4397 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4398 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4399 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4400 break;
4401 default:
4402 goto illegal_op;
4403 }
4404 } else {
4405 /* generic MMX or SSE operation */
4406 switch(b) {
4407 case 0x70: /* pshufx insn */
4408 case 0xc6: /* pshufx insn */
4409 case 0xc2: /* compare insns */
4410 s->rip_offset = 1;
4411 break;
4412 default:
4413 break;
4414 }
4415 if (is_xmm) {
4416 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4417 if (mod != 3) {
4418 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4419 op2_offset = offsetof(CPUX86State,xmm_t0);
4420 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
4421 b == 0xc2)) {
4422 /* specific case for SSE single instructions */
4423 if (b1 == 2) {
4424 /* 32 bit access */
4425 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4426 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4427 } else {
4428 /* 64 bit access */
4429 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
4430 }
4431 } else {
4432 gen_ldo_env_A0(s->mem_index, op2_offset);
4433 }
4434 } else {
4435 rm = (modrm & 7) | REX_B(s);
4436 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4437 }
4438 } else {
4439 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4440 if (mod != 3) {
4441 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4442 op2_offset = offsetof(CPUX86State,mmx_t0);
4443 gen_ldq_env_A0(s->mem_index, op2_offset);
4444 } else {
4445 rm = (modrm & 7);
4446 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4447 }
4448 }
4449 switch(b) {
4450 case 0x0f: /* 3DNow! data insns */
4451 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4452 goto illegal_op;
4453 val = ldub_code(s->pc++);
4454 sse_op2 = sse_op_table5[val];
4455 if (!sse_op2)
4456 goto illegal_op;
4457 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4458 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4459 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4460 break;
4461 case 0x70: /* pshufx insn */
4462 case 0xc6: /* pshufx insn */
4463 val = ldub_code(s->pc++);
4464 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4465 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4466 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4467 break;
4468 case 0xc2:
4469 /* compare insns */
4470 val = ldub_code(s->pc++);
4471 if (val >= 8)
4472 goto illegal_op;
4473 sse_op2 = sse_op_table4[val][b1];
4474 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4475 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4476 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4477 break;
4478 case 0xf7:
4479 /* maskmov : we must prepare A0 */
4480 if (mod != 3)
4481 goto illegal_op;
4482#ifdef TARGET_X86_64
4483 if (s->aflag == 2) {
4484 gen_op_movq_A0_reg(R_EDI);
4485 } else
4486#endif
4487 {
4488 gen_op_movl_A0_reg(R_EDI);
4489 if (s->aflag == 0)
4490 gen_op_andl_A0_ffff();
4491 }
4492 gen_add_A0_ds_seg(s);
4493
4494 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4495 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4496 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
4497 break;
4498 default:
4499 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4500 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4501 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4502 break;
4503 }
4504 if (b == 0x2e || b == 0x2f) {
4505 s->cc_op = CC_OP_EFLAGS;
4506 }
4507 }
4508}
4509
4510#ifdef VBOX
4511/* Checks if it's an invalid lock sequence. Only a few instructions
4512 can be used together with the lock prefix and of those only the
4513 form that write a memory operand. So, this is kind of annoying
4514 work to do...
4515 The AMD manual lists the following instructions.
4516 ADC
4517 ADD
4518 AND
4519 BTC
4520 BTR
4521 BTS
4522 CMPXCHG
4523 CMPXCHG8B
4524 CMPXCHG16B
4525 DEC
4526 INC
4527 NEG
4528 NOT
4529 OR
4530 SBB
4531 SUB
4532 XADD
4533 XCHG
4534 XOR */
4535static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
4536{
4537 target_ulong pc = s->pc;
4538 int modrm, mod, op;
4539
4540 /* X={8,16,32,64} Y={16,32,64} */
4541 switch (b)
4542 {
4543 /* /2: ADC reg/memX, immX */
4544 /* /0: ADD reg/memX, immX */
4545 /* /4: AND reg/memX, immX */
4546 /* /1: OR reg/memX, immX */
4547 /* /3: SBB reg/memX, immX */
4548 /* /5: SUB reg/memX, immX */
4549 /* /6: XOR reg/memX, immX */
4550 case 0x80:
4551 case 0x81:
4552 case 0x83:
4553 modrm = ldub_code(pc++);
4554 op = (modrm >> 3) & 7;
4555 if (op == 7) /* /7: CMP */
4556 break;
4557 mod = (modrm >> 6) & 3;
4558 if (mod == 3) /* register destination */
4559 break;
4560 return false;
4561
4562 case 0x10: /* /r: ADC reg/mem8, reg8 */
4563 case 0x11: /* /r: ADC reg/memX, regY */
4564 case 0x00: /* /r: ADD reg/mem8, reg8 */
4565 case 0x01: /* /r: ADD reg/memX, regY */
4566 case 0x20: /* /r: AND reg/mem8, reg8 */
4567 case 0x21: /* /r: AND reg/memY, regY */
4568 case 0x08: /* /r: OR reg/mem8, reg8 */
4569 case 0x09: /* /r: OR reg/memY, regY */
4570 case 0x18: /* /r: SBB reg/mem8, reg8 */
4571 case 0x19: /* /r: SBB reg/memY, regY */
4572 case 0x28: /* /r: SUB reg/mem8, reg8 */
4573 case 0x29: /* /r: SUB reg/memY, regY */
4574 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
4575 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
4576 case 0x30: /* /r: XOR reg/mem8, reg8 */
4577 case 0x31: /* /r: XOR reg/memY, regY */
4578 modrm = ldub_code(pc++);
4579 mod = (modrm >> 6) & 3;
4580 if (mod == 3) /* register destination */
4581 break;
4582 return false;
4583
4584 /* /1: DEC reg/memX */
4585 /* /0: INC reg/memX */
4586 case 0xfe:
4587 case 0xff:
4588 modrm = ldub_code(pc++);
4589 mod = (modrm >> 6) & 3;
4590 if (mod == 3) /* register destination */
4591 break;
4592 return false;
4593
4594 /* /3: NEG reg/memX */
4595 /* /2: NOT reg/memX */
4596 case 0xf6:
4597 case 0xf7:
4598 modrm = ldub_code(pc++);
4599 mod = (modrm >> 6) & 3;
4600 if (mod == 3) /* register destination */
4601 break;
4602 return false;
4603
4604 case 0x0f:
4605 b = ldub_code(pc++);
4606 switch (b)
4607 {
4608 /* /7: BTC reg/memY, imm8 */
4609 /* /6: BTR reg/memY, imm8 */
4610 /* /5: BTS reg/memY, imm8 */
4611 case 0xba:
4612 modrm = ldub_code(pc++);
4613 op = (modrm >> 3) & 7;
4614 if (op < 5)
4615 break;
4616 mod = (modrm >> 6) & 3;
4617 if (mod == 3) /* register destination */
4618 break;
4619 return false;
4620
4621 case 0xbb: /* /r: BTC reg/memY, regY */
4622 case 0xb3: /* /r: BTR reg/memY, regY */
4623 case 0xab: /* /r: BTS reg/memY, regY */
4624 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
4625 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
4626 case 0xc0: /* /r: XADD reg/mem8, reg8 */
4627 case 0xc1: /* /r: XADD reg/memY, regY */
4628 modrm = ldub_code(pc++);
4629 mod = (modrm >> 6) & 3;
4630 if (mod == 3) /* register destination */
4631 break;
4632 return false;
4633
4634 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
4635 case 0xc7:
4636 modrm = ldub_code(pc++);
4637 op = (modrm >> 3) & 7;
4638 if (op != 1)
4639 break;
4640 return false;
4641 }
4642 break;
4643 }
4644
4645 /* illegal sequence. The s->pc is past the lock prefix and that
4646 is sufficient for the TB, I think. */
4647 Log(("illegal lock sequence %RGv (b=%#x)\n", pc_start, b));
4648 return true;
4649}
4650#endif /* VBOX */
4651
4652
4653/* convert one instruction. s->is_jmp is set if the translation must
4654 be stopped. Return the next pc value */
4655static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4656{
4657 int b, prefixes, aflag, dflag;
4658 int shift, ot;
4659 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4660 target_ulong next_eip, tval;
4661 int rex_w, rex_r;
4662
4663 if (unlikely(loglevel & CPU_LOG_TB_OP))
4664 tcg_gen_debug_insn_start(pc_start);
4665
4666 s->pc = pc_start;
4667 prefixes = 0;
4668 aflag = s->code32;
4669 dflag = s->code32;
4670 s->override = -1;
4671 rex_w = -1;
4672 rex_r = 0;
4673#ifdef TARGET_X86_64
4674 s->rex_x = 0;
4675 s->rex_b = 0;
4676 x86_64_hregs = 0;
4677#endif
4678 s->rip_offset = 0; /* for relative ip address */
4679#ifdef VBOX
4680 /* nike: seems only slow down things */
4681# if 0
4682 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
4683
4684 gen_update_eip(pc_start - s->cs_base);
4685# endif
4686#endif
4687
4688 next_byte:
4689 b = ldub_code(s->pc);
4690 s->pc++;
4691 /* check prefixes */
4692#ifdef TARGET_X86_64
4693 if (CODE64(s)) {
4694 switch (b) {
4695 case 0xf3:
4696 prefixes |= PREFIX_REPZ;
4697 goto next_byte;
4698 case 0xf2:
4699 prefixes |= PREFIX_REPNZ;
4700 goto next_byte;
4701 case 0xf0:
4702 prefixes |= PREFIX_LOCK;
4703 goto next_byte;
4704 case 0x2e:
4705 s->override = R_CS;
4706 goto next_byte;
4707 case 0x36:
4708 s->override = R_SS;
4709 goto next_byte;
4710 case 0x3e:
4711 s->override = R_DS;
4712 goto next_byte;
4713 case 0x26:
4714 s->override = R_ES;
4715 goto next_byte;
4716 case 0x64:
4717 s->override = R_FS;
4718 goto next_byte;
4719 case 0x65:
4720 s->override = R_GS;
4721 goto next_byte;
4722 case 0x66:
4723 prefixes |= PREFIX_DATA;
4724 goto next_byte;
4725 case 0x67:
4726 prefixes |= PREFIX_ADR;
4727 goto next_byte;
4728 case 0x40 ... 0x4f:
4729 /* REX prefix */
4730 rex_w = (b >> 3) & 1;
4731 rex_r = (b & 0x4) << 1;
4732 s->rex_x = (b & 0x2) << 2;
4733 REX_B(s) = (b & 0x1) << 3;
4734 x86_64_hregs = 1; /* select uniform byte register addressing */
4735 goto next_byte;
4736 }
4737 if (rex_w == 1) {
4738 /* 0x66 is ignored if rex.w is set */
4739 dflag = 2;
4740 } else {
4741 if (prefixes & PREFIX_DATA)
4742 dflag ^= 1;
4743 }
4744 if (!(prefixes & PREFIX_ADR))
4745 aflag = 2;
4746 } else
4747#endif
4748 {
4749 switch (b) {
4750 case 0xf3:
4751 prefixes |= PREFIX_REPZ;
4752 goto next_byte;
4753 case 0xf2:
4754 prefixes |= PREFIX_REPNZ;
4755 goto next_byte;
4756 case 0xf0:
4757 prefixes |= PREFIX_LOCK;
4758 goto next_byte;
4759 case 0x2e:
4760 s->override = R_CS;
4761 goto next_byte;
4762 case 0x36:
4763 s->override = R_SS;
4764 goto next_byte;
4765 case 0x3e:
4766 s->override = R_DS;
4767 goto next_byte;
4768 case 0x26:
4769 s->override = R_ES;
4770 goto next_byte;
4771 case 0x64:
4772 s->override = R_FS;
4773 goto next_byte;
4774 case 0x65:
4775 s->override = R_GS;
4776 goto next_byte;
4777 case 0x66:
4778 prefixes |= PREFIX_DATA;
4779 goto next_byte;
4780 case 0x67:
4781 prefixes |= PREFIX_ADR;
4782 goto next_byte;
4783 }
4784 if (prefixes & PREFIX_DATA)
4785 dflag ^= 1;
4786 if (prefixes & PREFIX_ADR)
4787 aflag ^= 1;
4788 }
4789
4790 s->prefix = prefixes;
4791 s->aflag = aflag;
4792 s->dflag = dflag;
4793
4794 /* lock generation */
4795#ifndef VBOX
4796 if (prefixes & PREFIX_LOCK)
4797 tcg_gen_helper_0_0(helper_lock);
4798#else /* VBOX */
4799 if (prefixes & PREFIX_LOCK) {
4800 if (is_invalid_lock_sequence(s, pc_start, b)) {
4801 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
4802 return s->pc;
4803 }
4804 tcg_gen_helper_0_0(helper_lock);
4805 }
4806#endif /* VBOX */
4807
4808 /* now check op code */
4809 reswitch:
4810 switch(b) {
4811 case 0x0f:
4812 /**************************/
4813 /* extended op code */
4814 b = ldub_code(s->pc++) | 0x100;
4815 goto reswitch;
4816
4817 /**************************/
4818 /* arith & logic */
4819 case 0x00 ... 0x05:
4820 case 0x08 ... 0x0d:
4821 case 0x10 ... 0x15:
4822 case 0x18 ... 0x1d:
4823 case 0x20 ... 0x25:
4824 case 0x28 ... 0x2d:
4825 case 0x30 ... 0x35:
4826 case 0x38 ... 0x3d:
4827 {
4828 int op, f, val;
4829 op = (b >> 3) & 7;
4830 f = (b >> 1) & 3;
4831
4832 if ((b & 1) == 0)
4833 ot = OT_BYTE;
4834 else
4835 ot = dflag + OT_WORD;
4836
4837 switch(f) {
4838 case 0: /* OP Ev, Gv */
4839 modrm = ldub_code(s->pc++);
4840 reg = ((modrm >> 3) & 7) | rex_r;
4841 mod = (modrm >> 6) & 3;
4842 rm = (modrm & 7) | REX_B(s);
4843 if (mod != 3) {
4844 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4845 opreg = OR_TMP0;
4846 } else if (op == OP_XORL && rm == reg) {
4847 xor_zero:
4848 /* xor reg, reg optimisation */
4849 gen_op_movl_T0_0();
4850 s->cc_op = CC_OP_LOGICB + ot;
4851 gen_op_mov_reg_T0(ot, reg);
4852 gen_op_update1_cc();
4853 break;
4854 } else {
4855 opreg = rm;
4856 }
4857 gen_op_mov_TN_reg(ot, 1, reg);
4858 gen_op(s, op, ot, opreg);
4859 break;
4860 case 1: /* OP Gv, Ev */
4861 modrm = ldub_code(s->pc++);
4862 mod = (modrm >> 6) & 3;
4863 reg = ((modrm >> 3) & 7) | rex_r;
4864 rm = (modrm & 7) | REX_B(s);
4865 if (mod != 3) {
4866 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4867 gen_op_ld_T1_A0(ot + s->mem_index);
4868 } else if (op == OP_XORL && rm == reg) {
4869 goto xor_zero;
4870 } else {
4871 gen_op_mov_TN_reg(ot, 1, rm);
4872 }
4873 gen_op(s, op, ot, reg);
4874 break;
4875 case 2: /* OP A, Iv */
4876 val = insn_get(s, ot);
4877 gen_op_movl_T1_im(val);
4878 gen_op(s, op, ot, OR_EAX);
4879 break;
4880 }
4881 }
4882 break;
4883
4884 case 0x82:
4885 if (CODE64(s))
4886 goto illegal_op;
4887 case 0x80: /* GRP1 */
4888 case 0x81:
4889 case 0x83:
4890 {
4891 int val;
4892
4893 if ((b & 1) == 0)
4894 ot = OT_BYTE;
4895 else
4896 ot = dflag + OT_WORD;
4897
4898 modrm = ldub_code(s->pc++);
4899 mod = (modrm >> 6) & 3;
4900 rm = (modrm & 7) | REX_B(s);
4901 op = (modrm >> 3) & 7;
4902
4903 if (mod != 3) {
4904 if (b == 0x83)
4905 s->rip_offset = 1;
4906 else
4907 s->rip_offset = insn_const_size(ot);
4908 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4909 opreg = OR_TMP0;
4910 } else {
4911 opreg = rm;
4912 }
4913
4914 switch(b) {
4915 default:
4916 case 0x80:
4917 case 0x81:
4918 case 0x82:
4919 val = insn_get(s, ot);
4920 break;
4921 case 0x83:
4922 val = (int8_t)insn_get(s, OT_BYTE);
4923 break;
4924 }
4925 gen_op_movl_T1_im(val);
4926 gen_op(s, op, ot, opreg);
4927 }
4928 break;
4929
4930 /**************************/
4931 /* inc, dec, and other misc arith */
4932 case 0x40 ... 0x47: /* inc Gv */
4933 ot = dflag ? OT_LONG : OT_WORD;
4934 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4935 break;
4936 case 0x48 ... 0x4f: /* dec Gv */
4937 ot = dflag ? OT_LONG : OT_WORD;
4938 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4939 break;
4940 case 0xf6: /* GRP3 */
4941 case 0xf7:
4942 if ((b & 1) == 0)
4943 ot = OT_BYTE;
4944 else
4945 ot = dflag + OT_WORD;
4946
4947 modrm = ldub_code(s->pc++);
4948 mod = (modrm >> 6) & 3;
4949 rm = (modrm & 7) | REX_B(s);
4950 op = (modrm >> 3) & 7;
4951 if (mod != 3) {
4952 if (op == 0)
4953 s->rip_offset = insn_const_size(ot);
4954 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4955 gen_op_ld_T0_A0(ot + s->mem_index);
4956 } else {
4957 gen_op_mov_TN_reg(ot, 0, rm);
4958 }
4959
4960 switch(op) {
4961 case 0: /* test */
4962 val = insn_get(s, ot);
4963 gen_op_movl_T1_im(val);
4964 gen_op_testl_T0_T1_cc();
4965 s->cc_op = CC_OP_LOGICB + ot;
4966 break;
4967 case 2: /* not */
4968 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4969 if (mod != 3) {
4970 gen_op_st_T0_A0(ot + s->mem_index);
4971 } else {
4972 gen_op_mov_reg_T0(ot, rm);
4973 }
4974 break;
4975 case 3: /* neg */
4976 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4977 if (mod != 3) {
4978 gen_op_st_T0_A0(ot + s->mem_index);
4979 } else {
4980 gen_op_mov_reg_T0(ot, rm);
4981 }
4982 gen_op_update_neg_cc();
4983 s->cc_op = CC_OP_SUBB + ot;
4984 break;
4985 case 4: /* mul */
4986 switch(ot) {
4987 case OT_BYTE:
4988 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4989 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4990 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4991 /* XXX: use 32 bit mul which could be faster */
4992 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4993 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4994 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4995 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4996 s->cc_op = CC_OP_MULB;
4997 break;
4998 case OT_WORD:
4999 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5000 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5001 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
5002 /* XXX: use 32 bit mul which could be faster */
5003 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5004 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5005 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5006 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5007 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5008 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5009 s->cc_op = CC_OP_MULW;
5010 break;
5011 default:
5012 case OT_LONG:
5013#ifdef TARGET_X86_64
5014 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5015 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
5016 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
5017 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5018 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5019 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5020 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5021 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5022 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5023#else
5024 {
5025 TCGv t0, t1;
5026 t0 = tcg_temp_new(TCG_TYPE_I64);
5027 t1 = tcg_temp_new(TCG_TYPE_I64);
5028 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5029 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
5030 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
5031 tcg_gen_mul_i64(t0, t0, t1);
5032 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5033 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5034 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5035 tcg_gen_shri_i64(t0, t0, 32);
5036 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5037 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5038 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5039 }
5040#endif
5041 s->cc_op = CC_OP_MULL;
5042 break;
5043#ifdef TARGET_X86_64
5044 case OT_QUAD:
5045 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
5046 s->cc_op = CC_OP_MULQ;
5047 break;
5048#endif
5049 }
5050 break;
5051 case 5: /* imul */
5052 switch(ot) {
5053 case OT_BYTE:
5054 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
5055 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5056 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
5057 /* XXX: use 32 bit mul which could be faster */
5058 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5059 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5060 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5061 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
5062 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5063 s->cc_op = CC_OP_MULB;
5064 break;
5065 case OT_WORD:
5066 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5067 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5068 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5069 /* XXX: use 32 bit mul which could be faster */
5070 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5071 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5072 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5073 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5074 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5075 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5076 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5077 s->cc_op = CC_OP_MULW;
5078 break;
5079 default:
5080 case OT_LONG:
5081#ifdef TARGET_X86_64
5082 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5083 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5084 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5085 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5086 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5087 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5088 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5089 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5090 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5091 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5092#else
5093 {
5094 TCGv t0, t1;
5095 t0 = tcg_temp_new(TCG_TYPE_I64);
5096 t1 = tcg_temp_new(TCG_TYPE_I64);
5097 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5098 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5099 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5100 tcg_gen_mul_i64(t0, t0, t1);
5101 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5102 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5103 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5104 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5105 tcg_gen_shri_i64(t0, t0, 32);
5106 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5107 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5108 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5109 }
5110#endif
5111 s->cc_op = CC_OP_MULL;
5112 break;
5113#ifdef TARGET_X86_64
5114 case OT_QUAD:
5115 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
5116 s->cc_op = CC_OP_MULQ;
5117 break;
5118#endif
5119 }
5120 break;
5121 case 6: /* div */
5122 switch(ot) {
5123 case OT_BYTE:
5124 gen_jmp_im(pc_start - s->cs_base);
5125 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
5126 break;
5127 case OT_WORD:
5128 gen_jmp_im(pc_start - s->cs_base);
5129 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
5130 break;
5131 default:
5132 case OT_LONG:
5133 gen_jmp_im(pc_start - s->cs_base);
5134 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
5135 break;
5136#ifdef TARGET_X86_64
5137 case OT_QUAD:
5138 gen_jmp_im(pc_start - s->cs_base);
5139 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
5140 break;
5141#endif
5142 }
5143 break;
5144 case 7: /* idiv */
5145 switch(ot) {
5146 case OT_BYTE:
5147 gen_jmp_im(pc_start - s->cs_base);
5148 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
5149 break;
5150 case OT_WORD:
5151 gen_jmp_im(pc_start - s->cs_base);
5152 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
5153 break;
5154 default:
5155 case OT_LONG:
5156 gen_jmp_im(pc_start - s->cs_base);
5157 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
5158 break;
5159#ifdef TARGET_X86_64
5160 case OT_QUAD:
5161 gen_jmp_im(pc_start - s->cs_base);
5162 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
5163 break;
5164#endif
5165 }
5166 break;
5167 default:
5168 goto illegal_op;
5169 }
5170 break;
5171
5172 case 0xfe: /* GRP4 */
5173 case 0xff: /* GRP5 */
5174 if ((b & 1) == 0)
5175 ot = OT_BYTE;
5176 else
5177 ot = dflag + OT_WORD;
5178
5179 modrm = ldub_code(s->pc++);
5180 mod = (modrm >> 6) & 3;
5181 rm = (modrm & 7) | REX_B(s);
5182 op = (modrm >> 3) & 7;
5183 if (op >= 2 && b == 0xfe) {
5184 goto illegal_op;
5185 }
5186 if (CODE64(s)) {
5187 if (op == 2 || op == 4) {
5188 /* operand size for jumps is 64 bit */
5189 ot = OT_QUAD;
5190 } else if (op == 3 || op == 5) {
5191 /* for call calls, the operand is 16 or 32 bit, even
5192 in long mode */
5193 ot = dflag ? OT_LONG : OT_WORD;
5194 } else if (op == 6) {
5195 /* default push size is 64 bit */
5196 ot = dflag ? OT_QUAD : OT_WORD;
5197 }
5198 }
5199 if (mod != 3) {
5200 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5201 if (op >= 2 && op != 3 && op != 5)
5202 gen_op_ld_T0_A0(ot + s->mem_index);
5203 } else {
5204 gen_op_mov_TN_reg(ot, 0, rm);
5205 }
5206
5207 switch(op) {
5208 case 0: /* inc Ev */
5209 if (mod != 3)
5210 opreg = OR_TMP0;
5211 else
5212 opreg = rm;
5213 gen_inc(s, ot, opreg, 1);
5214 break;
5215 case 1: /* dec Ev */
5216 if (mod != 3)
5217 opreg = OR_TMP0;
5218 else
5219 opreg = rm;
5220 gen_inc(s, ot, opreg, -1);
5221 break;
5222 case 2: /* call Ev */
5223 /* XXX: optimize if memory (no 'and' is necessary) */
5224#ifdef VBOX_WITH_CALL_RECORD
5225 if (s->record_call)
5226 gen_op_record_call();
5227#endif
5228 if (s->dflag == 0)
5229 gen_op_andl_T0_ffff();
5230 next_eip = s->pc - s->cs_base;
5231 gen_movtl_T1_im(next_eip);
5232 gen_push_T1(s);
5233 gen_op_jmp_T0();
5234 gen_eob(s);
5235 break;
5236 case 3: /* lcall Ev */
5237 gen_op_ld_T1_A0(ot + s->mem_index);
5238 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5239 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5240 do_lcall:
5241 if (s->pe && !s->vm86) {
5242 if (s->cc_op != CC_OP_DYNAMIC)
5243 gen_op_set_cc_op(s->cc_op);
5244 gen_jmp_im(pc_start - s->cs_base);
5245 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5246 tcg_gen_helper_0_4(helper_lcall_protected,
5247 cpu_tmp2_i32, cpu_T[1],
5248 tcg_const_i32(dflag),
5249 tcg_const_i32(s->pc - pc_start));
5250 } else {
5251 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5252 tcg_gen_helper_0_4(helper_lcall_real,
5253 cpu_tmp2_i32, cpu_T[1],
5254 tcg_const_i32(dflag),
5255 tcg_const_i32(s->pc - s->cs_base));
5256 }
5257 gen_eob(s);
5258 break;
5259 case 4: /* jmp Ev */
5260 if (s->dflag == 0)
5261 gen_op_andl_T0_ffff();
5262 gen_op_jmp_T0();
5263 gen_eob(s);
5264 break;
5265 case 5: /* ljmp Ev */
5266 gen_op_ld_T1_A0(ot + s->mem_index);
5267 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5268 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5269 do_ljmp:
5270 if (s->pe && !s->vm86) {
5271 if (s->cc_op != CC_OP_DYNAMIC)
5272 gen_op_set_cc_op(s->cc_op);
5273 gen_jmp_im(pc_start - s->cs_base);
5274 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5275 tcg_gen_helper_0_3(helper_ljmp_protected,
5276 cpu_tmp2_i32,
5277 cpu_T[1],
5278 tcg_const_i32(s->pc - pc_start));
5279 } else {
5280 gen_op_movl_seg_T0_vm(R_CS);
5281 gen_op_movl_T0_T1();
5282 gen_op_jmp_T0();
5283 }
5284 gen_eob(s);
5285 break;
5286 case 6: /* push Ev */
5287 gen_push_T0(s);
5288 break;
5289 default:
5290 goto illegal_op;
5291 }
5292 break;
5293
5294 case 0x84: /* test Ev, Gv */
5295 case 0x85:
5296 if ((b & 1) == 0)
5297 ot = OT_BYTE;
5298 else
5299 ot = dflag + OT_WORD;
5300
5301 modrm = ldub_code(s->pc++);
5302 mod = (modrm >> 6) & 3;
5303 rm = (modrm & 7) | REX_B(s);
5304 reg = ((modrm >> 3) & 7) | rex_r;
5305
5306 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5307 gen_op_mov_TN_reg(ot, 1, reg);
5308 gen_op_testl_T0_T1_cc();
5309 s->cc_op = CC_OP_LOGICB + ot;
5310 break;
5311
5312 case 0xa8: /* test eAX, Iv */
5313 case 0xa9:
5314 if ((b & 1) == 0)
5315 ot = OT_BYTE;
5316 else
5317 ot = dflag + OT_WORD;
5318 val = insn_get(s, ot);
5319
5320 gen_op_mov_TN_reg(ot, 0, OR_EAX);
5321 gen_op_movl_T1_im(val);
5322 gen_op_testl_T0_T1_cc();
5323 s->cc_op = CC_OP_LOGICB + ot;
5324 break;
5325
5326 case 0x98: /* CWDE/CBW */
5327#ifdef TARGET_X86_64
5328 if (dflag == 2) {
5329 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5330 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5331 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
5332 } else
5333#endif
5334 if (dflag == 1) {
5335 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5336 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5337 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5338 } else {
5339 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5340 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5341 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5342 }
5343 break;
5344 case 0x99: /* CDQ/CWD */
5345#ifdef TARGET_X86_64
5346 if (dflag == 2) {
5347 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5348 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5349 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
5350 } else
5351#endif
5352 if (dflag == 1) {
5353 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5354 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5355 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5356 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5357 } else {
5358 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5359 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5360 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5361 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5362 }
5363 break;
5364 case 0x1af: /* imul Gv, Ev */
5365 case 0x69: /* imul Gv, Ev, I */
5366 case 0x6b:
5367 ot = dflag + OT_WORD;
5368 modrm = ldub_code(s->pc++);
5369 reg = ((modrm >> 3) & 7) | rex_r;
5370 if (b == 0x69)
5371 s->rip_offset = insn_const_size(ot);
5372 else if (b == 0x6b)
5373 s->rip_offset = 1;
5374 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5375 if (b == 0x69) {
5376 val = insn_get(s, ot);
5377 gen_op_movl_T1_im(val);
5378 } else if (b == 0x6b) {
5379 val = (int8_t)insn_get(s, OT_BYTE);
5380 gen_op_movl_T1_im(val);
5381 } else {
5382 gen_op_mov_TN_reg(ot, 1, reg);
5383 }
5384
5385#ifdef TARGET_X86_64
5386 if (ot == OT_QUAD) {
5387 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
5388 } else
5389#endif
5390 if (ot == OT_LONG) {
5391#ifdef TARGET_X86_64
5392 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5393 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5394 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5395 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5396 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5397 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5398#else
5399 {
5400 TCGv t0, t1;
5401 t0 = tcg_temp_new(TCG_TYPE_I64);
5402 t1 = tcg_temp_new(TCG_TYPE_I64);
5403 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5404 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5405 tcg_gen_mul_i64(t0, t0, t1);
5406 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5407 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5408 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5409 tcg_gen_shri_i64(t0, t0, 32);
5410 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
5411 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
5412 }
5413#endif
5414 } else {
5415 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5416 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5417 /* XXX: use 32 bit mul which could be faster */
5418 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5419 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5420 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5421 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5422 }
5423 gen_op_mov_reg_T0(ot, reg);
5424 s->cc_op = CC_OP_MULB + ot;
5425 break;
5426 case 0x1c0:
5427 case 0x1c1: /* xadd Ev, Gv */
5428 if ((b & 1) == 0)
5429 ot = OT_BYTE;
5430 else
5431 ot = dflag + OT_WORD;
5432 modrm = ldub_code(s->pc++);
5433 reg = ((modrm >> 3) & 7) | rex_r;
5434 mod = (modrm >> 6) & 3;
5435 if (mod == 3) {
5436 rm = (modrm & 7) | REX_B(s);
5437 gen_op_mov_TN_reg(ot, 0, reg);
5438 gen_op_mov_TN_reg(ot, 1, rm);
5439 gen_op_addl_T0_T1();
5440 gen_op_mov_reg_T1(ot, reg);
5441 gen_op_mov_reg_T0(ot, rm);
5442 } else {
5443 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5444 gen_op_mov_TN_reg(ot, 0, reg);
5445 gen_op_ld_T1_A0(ot + s->mem_index);
5446 gen_op_addl_T0_T1();
5447 gen_op_st_T0_A0(ot + s->mem_index);
5448 gen_op_mov_reg_T1(ot, reg);
5449 }
5450 gen_op_update2_cc();
5451 s->cc_op = CC_OP_ADDB + ot;
5452 break;
5453 case 0x1b0:
5454 case 0x1b1: /* cmpxchg Ev, Gv */
5455 {
5456 int label1, label2;
5457 TCGv t0, t1, t2, a0;
5458
5459 if ((b & 1) == 0)
5460 ot = OT_BYTE;
5461 else
5462 ot = dflag + OT_WORD;
5463 modrm = ldub_code(s->pc++);
5464 reg = ((modrm >> 3) & 7) | rex_r;
5465 mod = (modrm >> 6) & 3;
5466 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5467 t1 = tcg_temp_local_new(TCG_TYPE_TL);
5468 t2 = tcg_temp_local_new(TCG_TYPE_TL);
5469 a0 = tcg_temp_local_new(TCG_TYPE_TL);
5470 gen_op_mov_v_reg(ot, t1, reg);
5471 if (mod == 3) {
5472 rm = (modrm & 7) | REX_B(s);
5473 gen_op_mov_v_reg(ot, t0, rm);
5474 } else {
5475 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5476 tcg_gen_mov_tl(a0, cpu_A0);
5477 gen_op_ld_v(ot + s->mem_index, t0, a0);
5478 rm = 0; /* avoid warning */
5479 }
5480 label1 = gen_new_label();
5481 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
5482 tcg_gen_sub_tl(t2, t2, t0);
5483 gen_extu(ot, t2);
5484 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
5485 if (mod == 3) {
5486 label2 = gen_new_label();
5487 gen_op_mov_reg_v(ot, R_EAX, t0);
5488 tcg_gen_br(label2);
5489 gen_set_label(label1);
5490 gen_op_mov_reg_v(ot, rm, t1);
5491 gen_set_label(label2);
5492 } else {
5493 tcg_gen_mov_tl(t1, t0);
5494 gen_op_mov_reg_v(ot, R_EAX, t0);
5495 gen_set_label(label1);
5496 /* always store */
5497 gen_op_st_v(ot + s->mem_index, t1, a0);
5498 }
5499 tcg_gen_mov_tl(cpu_cc_src, t0);
5500 tcg_gen_mov_tl(cpu_cc_dst, t2);
5501 s->cc_op = CC_OP_SUBB + ot;
5502 tcg_temp_free(t0);
5503 tcg_temp_free(t1);
5504 tcg_temp_free(t2);
5505 tcg_temp_free(a0);
5506 }
5507 break;
5508 case 0x1c7: /* cmpxchg8b */
5509 modrm = ldub_code(s->pc++);
5510 mod = (modrm >> 6) & 3;
5511 if ((mod == 3) || ((modrm & 0x38) != 0x8))
5512 goto illegal_op;
5513#ifdef TARGET_X86_64
5514 if (dflag == 2) {
5515 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5516 goto illegal_op;
5517 gen_jmp_im(pc_start - s->cs_base);
5518 if (s->cc_op != CC_OP_DYNAMIC)
5519 gen_op_set_cc_op(s->cc_op);
5520 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5521 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
5522 } else
5523#endif
5524 {
5525 if (!(s->cpuid_features & CPUID_CX8))
5526 goto illegal_op;
5527 gen_jmp_im(pc_start - s->cs_base);
5528 if (s->cc_op != CC_OP_DYNAMIC)
5529 gen_op_set_cc_op(s->cc_op);
5530 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5531 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
5532 }
5533 s->cc_op = CC_OP_EFLAGS;
5534 break;
5535
5536 /**************************/
5537 /* push/pop */
5538 case 0x50 ... 0x57: /* push */
5539 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
5540 gen_push_T0(s);
5541 break;
5542 case 0x58 ... 0x5f: /* pop */
5543 if (CODE64(s)) {
5544 ot = dflag ? OT_QUAD : OT_WORD;
5545 } else {
5546 ot = dflag + OT_WORD;
5547 }
5548 gen_pop_T0(s);
5549 /* NOTE: order is important for pop %sp */
5550 gen_pop_update(s);
5551 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
5552 break;
5553 case 0x60: /* pusha */
5554 if (CODE64(s))
5555 goto illegal_op;
5556 gen_pusha(s);
5557 break;
5558 case 0x61: /* popa */
5559 if (CODE64(s))
5560 goto illegal_op;
5561 gen_popa(s);
5562 break;
5563 case 0x68: /* push Iv */
5564 case 0x6a:
5565 if (CODE64(s)) {
5566 ot = dflag ? OT_QUAD : OT_WORD;
5567 } else {
5568 ot = dflag + OT_WORD;
5569 }
5570 if (b == 0x68)
5571 val = insn_get(s, ot);
5572 else
5573 val = (int8_t)insn_get(s, OT_BYTE);
5574 gen_op_movl_T0_im(val);
5575 gen_push_T0(s);
5576 break;
5577 case 0x8f: /* pop Ev */
5578 if (CODE64(s)) {
5579 ot = dflag ? OT_QUAD : OT_WORD;
5580 } else {
5581 ot = dflag + OT_WORD;
5582 }
5583 modrm = ldub_code(s->pc++);
5584 mod = (modrm >> 6) & 3;
5585 gen_pop_T0(s);
5586 if (mod == 3) {
5587 /* NOTE: order is important for pop %sp */
5588 gen_pop_update(s);
5589 rm = (modrm & 7) | REX_B(s);
5590 gen_op_mov_reg_T0(ot, rm);
5591 } else {
5592 /* NOTE: order is important too for MMU exceptions */
5593 s->popl_esp_hack = 1 << ot;
5594 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5595 s->popl_esp_hack = 0;
5596 gen_pop_update(s);
5597 }
5598 break;
5599 case 0xc8: /* enter */
5600 {
5601 int level;
5602 val = lduw_code(s->pc);
5603 s->pc += 2;
5604 level = ldub_code(s->pc++);
5605 gen_enter(s, val, level);
5606 }
5607 break;
5608 case 0xc9: /* leave */
5609 /* XXX: exception not precise (ESP is updated before potential exception) */
5610 if (CODE64(s)) {
5611 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5612 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5613 } else if (s->ss32) {
5614 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5615 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5616 } else {
5617 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5618 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5619 }
5620 gen_pop_T0(s);
5621 if (CODE64(s)) {
5622 ot = dflag ? OT_QUAD : OT_WORD;
5623 } else {
5624 ot = dflag + OT_WORD;
5625 }
5626 gen_op_mov_reg_T0(ot, R_EBP);
5627 gen_pop_update(s);
5628 break;
5629 case 0x06: /* push es */
5630 case 0x0e: /* push cs */
5631 case 0x16: /* push ss */
5632 case 0x1e: /* push ds */
5633 if (CODE64(s))
5634 goto illegal_op;
5635 gen_op_movl_T0_seg(b >> 3);
5636 gen_push_T0(s);
5637 break;
5638 case 0x1a0: /* push fs */
5639 case 0x1a8: /* push gs */
5640 gen_op_movl_T0_seg((b >> 3) & 7);
5641 gen_push_T0(s);
5642 break;
5643 case 0x07: /* pop es */
5644 case 0x17: /* pop ss */
5645 case 0x1f: /* pop ds */
5646 if (CODE64(s))
5647 goto illegal_op;
5648 reg = b >> 3;
5649 gen_pop_T0(s);
5650 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5651 gen_pop_update(s);
5652 if (reg == R_SS) {
5653 /* if reg == SS, inhibit interrupts/trace. */
5654 /* If several instructions disable interrupts, only the
5655 _first_ does it */
5656 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5657 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5658 s->tf = 0;
5659 }
5660 if (s->is_jmp) {
5661 gen_jmp_im(s->pc - s->cs_base);
5662 gen_eob(s);
5663 }
5664 break;
5665 case 0x1a1: /* pop fs */
5666 case 0x1a9: /* pop gs */
5667 gen_pop_T0(s);
5668 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5669 gen_pop_update(s);
5670 if (s->is_jmp) {
5671 gen_jmp_im(s->pc - s->cs_base);
5672 gen_eob(s);
5673 }
5674 break;
5675
5676 /**************************/
5677 /* mov */
5678 case 0x88:
5679 case 0x89: /* mov Gv, Ev */
5680 if ((b & 1) == 0)
5681 ot = OT_BYTE;
5682 else
5683 ot = dflag + OT_WORD;
5684 modrm = ldub_code(s->pc++);
5685 reg = ((modrm >> 3) & 7) | rex_r;
5686
5687 /* generate a generic store */
5688 gen_ldst_modrm(s, modrm, ot, reg, 1);
5689 break;
5690 case 0xc6:
5691 case 0xc7: /* mov Ev, Iv */
5692 if ((b & 1) == 0)
5693 ot = OT_BYTE;
5694 else
5695 ot = dflag + OT_WORD;
5696 modrm = ldub_code(s->pc++);
5697 mod = (modrm >> 6) & 3;
5698 if (mod != 3) {
5699 s->rip_offset = insn_const_size(ot);
5700 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5701 }
5702 val = insn_get(s, ot);
5703 gen_op_movl_T0_im(val);
5704 if (mod != 3)
5705 gen_op_st_T0_A0(ot + s->mem_index);
5706 else
5707 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5708 break;
5709 case 0x8a:
5710 case 0x8b: /* mov Ev, Gv */
5711#ifdef VBOX /* dtrace hot fix */
5712 if (prefixes & PREFIX_LOCK)
5713 goto illegal_op;
5714#endif
5715 if ((b & 1) == 0)
5716 ot = OT_BYTE;
5717 else
5718 ot = OT_WORD + dflag;
5719 modrm = ldub_code(s->pc++);
5720 reg = ((modrm >> 3) & 7) | rex_r;
5721
5722 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5723 gen_op_mov_reg_T0(ot, reg);
5724 break;
5725 case 0x8e: /* mov seg, Gv */
5726 modrm = ldub_code(s->pc++);
5727 reg = (modrm >> 3) & 7;
5728 if (reg >= 6 || reg == R_CS)
5729 goto illegal_op;
5730 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5731 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5732 if (reg == R_SS) {
5733 /* if reg == SS, inhibit interrupts/trace */
5734 /* If several instructions disable interrupts, only the
5735 _first_ does it */
5736 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5737 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5738 s->tf = 0;
5739 }
5740 if (s->is_jmp) {
5741 gen_jmp_im(s->pc - s->cs_base);
5742 gen_eob(s);
5743 }
5744 break;
5745 case 0x8c: /* mov Gv, seg */
5746 modrm = ldub_code(s->pc++);
5747 reg = (modrm >> 3) & 7;
5748 mod = (modrm >> 6) & 3;
5749 if (reg >= 6)
5750 goto illegal_op;
5751 gen_op_movl_T0_seg(reg);
5752 if (mod == 3)
5753 ot = OT_WORD + dflag;
5754 else
5755 ot = OT_WORD;
5756 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5757 break;
5758
5759 case 0x1b6: /* movzbS Gv, Eb */
5760 case 0x1b7: /* movzwS Gv, Eb */
5761 case 0x1be: /* movsbS Gv, Eb */
5762 case 0x1bf: /* movswS Gv, Eb */
5763 {
5764 int d_ot;
5765 /* d_ot is the size of destination */
5766 d_ot = dflag + OT_WORD;
5767 /* ot is the size of source */
5768 ot = (b & 1) + OT_BYTE;
5769 modrm = ldub_code(s->pc++);
5770 reg = ((modrm >> 3) & 7) | rex_r;
5771 mod = (modrm >> 6) & 3;
5772 rm = (modrm & 7) | REX_B(s);
5773
5774 if (mod == 3) {
5775 gen_op_mov_TN_reg(ot, 0, rm);
5776 switch(ot | (b & 8)) {
5777 case OT_BYTE:
5778 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5779 break;
5780 case OT_BYTE | 8:
5781 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5782 break;
5783 case OT_WORD:
5784 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5785 break;
5786 default:
5787 case OT_WORD | 8:
5788 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5789 break;
5790 }
5791 gen_op_mov_reg_T0(d_ot, reg);
5792 } else {
5793 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5794 if (b & 8) {
5795 gen_op_lds_T0_A0(ot + s->mem_index);
5796 } else {
5797 gen_op_ldu_T0_A0(ot + s->mem_index);
5798 }
5799 gen_op_mov_reg_T0(d_ot, reg);
5800 }
5801 }
5802 break;
5803
5804 case 0x8d: /* lea */
5805 ot = dflag + OT_WORD;
5806 modrm = ldub_code(s->pc++);
5807 mod = (modrm >> 6) & 3;
5808 if (mod == 3)
5809 goto illegal_op;
5810 reg = ((modrm >> 3) & 7) | rex_r;
5811 /* we must ensure that no segment is added */
5812 s->override = -1;
5813 val = s->addseg;
5814 s->addseg = 0;
5815 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5816 s->addseg = val;
5817 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5818 break;
5819
5820 case 0xa0: /* mov EAX, Ov */
5821 case 0xa1:
5822 case 0xa2: /* mov Ov, EAX */
5823 case 0xa3:
5824 {
5825 target_ulong offset_addr;
5826
5827 if ((b & 1) == 0)
5828 ot = OT_BYTE;
5829 else
5830 ot = dflag + OT_WORD;
5831#ifdef TARGET_X86_64
5832 if (s->aflag == 2) {
5833 offset_addr = ldq_code(s->pc);
5834 s->pc += 8;
5835 gen_op_movq_A0_im(offset_addr);
5836 } else
5837#endif
5838 {
5839 if (s->aflag) {
5840 offset_addr = insn_get(s, OT_LONG);
5841 } else {
5842 offset_addr = insn_get(s, OT_WORD);
5843 }
5844 gen_op_movl_A0_im(offset_addr);
5845 }
5846 gen_add_A0_ds_seg(s);
5847 if ((b & 2) == 0) {
5848 gen_op_ld_T0_A0(ot + s->mem_index);
5849 gen_op_mov_reg_T0(ot, R_EAX);
5850 } else {
5851 gen_op_mov_TN_reg(ot, 0, R_EAX);
5852 gen_op_st_T0_A0(ot + s->mem_index);
5853 }
5854 }
5855 break;
5856 case 0xd7: /* xlat */
5857#ifdef TARGET_X86_64
5858 if (s->aflag == 2) {
5859 gen_op_movq_A0_reg(R_EBX);
5860 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5861 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5862 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5863 } else
5864#endif
5865 {
5866 gen_op_movl_A0_reg(R_EBX);
5867 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5868 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5869 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5870 if (s->aflag == 0)
5871 gen_op_andl_A0_ffff();
5872 else
5873 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5874 }
5875 gen_add_A0_ds_seg(s);
5876 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5877 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5878 break;
5879 case 0xb0 ... 0xb7: /* mov R, Ib */
5880 val = insn_get(s, OT_BYTE);
5881 gen_op_movl_T0_im(val);
5882 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5883 break;
5884 case 0xb8 ... 0xbf: /* mov R, Iv */
5885#ifdef TARGET_X86_64
5886 if (dflag == 2) {
5887 uint64_t tmp;
5888 /* 64 bit case */
5889 tmp = ldq_code(s->pc);
5890 s->pc += 8;
5891 reg = (b & 7) | REX_B(s);
5892 gen_movtl_T0_im(tmp);
5893 gen_op_mov_reg_T0(OT_QUAD, reg);
5894 } else
5895#endif
5896 {
5897 ot = dflag ? OT_LONG : OT_WORD;
5898 val = insn_get(s, ot);
5899 reg = (b & 7) | REX_B(s);
5900 gen_op_movl_T0_im(val);
5901 gen_op_mov_reg_T0(ot, reg);
5902 }
5903 break;
5904
5905 case 0x91 ... 0x97: /* xchg R, EAX */
5906 ot = dflag + OT_WORD;
5907 reg = (b & 7) | REX_B(s);
5908 rm = R_EAX;
5909 goto do_xchg_reg;
5910 case 0x86:
5911 case 0x87: /* xchg Ev, Gv */
5912 if ((b & 1) == 0)
5913 ot = OT_BYTE;
5914 else
5915 ot = dflag + OT_WORD;
5916 modrm = ldub_code(s->pc++);
5917 reg = ((modrm >> 3) & 7) | rex_r;
5918 mod = (modrm >> 6) & 3;
5919 if (mod == 3) {
5920 rm = (modrm & 7) | REX_B(s);
5921 do_xchg_reg:
5922 gen_op_mov_TN_reg(ot, 0, reg);
5923 gen_op_mov_TN_reg(ot, 1, rm);
5924 gen_op_mov_reg_T0(ot, rm);
5925 gen_op_mov_reg_T1(ot, reg);
5926 } else {
5927 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5928 gen_op_mov_TN_reg(ot, 0, reg);
5929 /* for xchg, lock is implicit */
5930 if (!(prefixes & PREFIX_LOCK))
5931 tcg_gen_helper_0_0(helper_lock);
5932 gen_op_ld_T1_A0(ot + s->mem_index);
5933 gen_op_st_T0_A0(ot + s->mem_index);
5934 if (!(prefixes & PREFIX_LOCK))
5935 tcg_gen_helper_0_0(helper_unlock);
5936 gen_op_mov_reg_T1(ot, reg);
5937 }
5938 break;
5939 case 0xc4: /* les Gv */
5940 if (CODE64(s))
5941 goto illegal_op;
5942 op = R_ES;
5943 goto do_lxx;
5944 case 0xc5: /* lds Gv */
5945 if (CODE64(s))
5946 goto illegal_op;
5947 op = R_DS;
5948 goto do_lxx;
5949 case 0x1b2: /* lss Gv */
5950 op = R_SS;
5951 goto do_lxx;
5952 case 0x1b4: /* lfs Gv */
5953 op = R_FS;
5954 goto do_lxx;
5955 case 0x1b5: /* lgs Gv */
5956 op = R_GS;
5957 do_lxx:
5958 ot = dflag ? OT_LONG : OT_WORD;
5959 modrm = ldub_code(s->pc++);
5960 reg = ((modrm >> 3) & 7) | rex_r;
5961 mod = (modrm >> 6) & 3;
5962 if (mod == 3)
5963 goto illegal_op;
5964 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5965 gen_op_ld_T1_A0(ot + s->mem_index);
5966 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5967 /* load the segment first to handle exceptions properly */
5968 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5969 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5970 /* then put the data */
5971 gen_op_mov_reg_T1(ot, reg);
5972 if (s->is_jmp) {
5973 gen_jmp_im(s->pc - s->cs_base);
5974 gen_eob(s);
5975 }
5976 break;
5977
5978 /************************/
5979 /* shifts */
5980 case 0xc0:
5981 case 0xc1:
5982 /* shift Ev,Ib */
5983 shift = 2;
5984 grp2:
5985 {
5986 if ((b & 1) == 0)
5987 ot = OT_BYTE;
5988 else
5989 ot = dflag + OT_WORD;
5990
5991 modrm = ldub_code(s->pc++);
5992 mod = (modrm >> 6) & 3;
5993 op = (modrm >> 3) & 7;
5994
5995 if (mod != 3) {
5996 if (shift == 2) {
5997 s->rip_offset = 1;
5998 }
5999 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6000 opreg = OR_TMP0;
6001 } else {
6002 opreg = (modrm & 7) | REX_B(s);
6003 }
6004
6005 /* simpler op */
6006 if (shift == 0) {
6007 gen_shift(s, op, ot, opreg, OR_ECX);
6008 } else {
6009 if (shift == 2) {
6010 shift = ldub_code(s->pc++);
6011 }
6012 gen_shifti(s, op, ot, opreg, shift);
6013 }
6014 }
6015 break;
6016 case 0xd0:
6017 case 0xd1:
6018 /* shift Ev,1 */
6019 shift = 1;
6020 goto grp2;
6021 case 0xd2:
6022 case 0xd3:
6023 /* shift Ev,cl */
6024 shift = 0;
6025 goto grp2;
6026
6027 case 0x1a4: /* shld imm */
6028 op = 0;
6029 shift = 1;
6030 goto do_shiftd;
6031 case 0x1a5: /* shld cl */
6032 op = 0;
6033 shift = 0;
6034 goto do_shiftd;
6035 case 0x1ac: /* shrd imm */
6036 op = 1;
6037 shift = 1;
6038 goto do_shiftd;
6039 case 0x1ad: /* shrd cl */
6040 op = 1;
6041 shift = 0;
6042 do_shiftd:
6043 ot = dflag + OT_WORD;
6044 modrm = ldub_code(s->pc++);
6045 mod = (modrm >> 6) & 3;
6046 rm = (modrm & 7) | REX_B(s);
6047 reg = ((modrm >> 3) & 7) | rex_r;
6048 if (mod != 3) {
6049 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6050 opreg = OR_TMP0;
6051 } else {
6052 opreg = rm;
6053 }
6054 gen_op_mov_TN_reg(ot, 1, reg);
6055
6056 if (shift) {
6057 val = ldub_code(s->pc++);
6058 tcg_gen_movi_tl(cpu_T3, val);
6059 } else {
6060 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
6061 }
6062 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
6063 break;
6064
6065 /************************/
6066 /* floats */
6067 case 0xd8 ... 0xdf:
6068 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
6069 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
6070 /* XXX: what to do if illegal op ? */
6071 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6072 break;
6073 }
6074 modrm = ldub_code(s->pc++);
6075 mod = (modrm >> 6) & 3;
6076 rm = modrm & 7;
6077 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
6078 if (mod != 3) {
6079 /* memory op */
6080 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6081 switch(op) {
6082 case 0x00 ... 0x07: /* fxxxs */
6083 case 0x10 ... 0x17: /* fixxxl */
6084 case 0x20 ... 0x27: /* fxxxl */
6085 case 0x30 ... 0x37: /* fixxx */
6086 {
6087 int op1;
6088 op1 = op & 7;
6089
6090 switch(op >> 4) {
6091 case 0:
6092 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6093 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6094 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
6095 break;
6096 case 1:
6097 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6098 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6099 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6100 break;
6101 case 2:
6102 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6103 (s->mem_index >> 2) - 1);
6104 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
6105 break;
6106 case 3:
6107 default:
6108 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6109 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6110 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6111 break;
6112 }
6113
6114 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6115 if (op1 == 3) {
6116 /* fcomp needs pop */
6117 tcg_gen_helper_0_0(helper_fpop);
6118 }
6119 }
6120 break;
6121 case 0x08: /* flds */
6122 case 0x0a: /* fsts */
6123 case 0x0b: /* fstps */
6124 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6125 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6126 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
6127 switch(op & 7) {
6128 case 0:
6129 switch(op >> 4) {
6130 case 0:
6131 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6132 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6133 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
6134 break;
6135 case 1:
6136 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6137 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6138 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6139 break;
6140 case 2:
6141 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6142 (s->mem_index >> 2) - 1);
6143 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
6144 break;
6145 case 3:
6146 default:
6147 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6148 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6149 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6150 break;
6151 }
6152 break;
6153 case 1:
6154 /* XXX: the corresponding CPUID bit must be tested ! */
6155 switch(op >> 4) {
6156 case 1:
6157 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
6158 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6159 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6160 break;
6161 case 2:
6162 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
6163 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6164 (s->mem_index >> 2) - 1);
6165 break;
6166 case 3:
6167 default:
6168 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
6169 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6170 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6171 break;
6172 }
6173 tcg_gen_helper_0_0(helper_fpop);
6174 break;
6175 default:
6176 switch(op >> 4) {
6177 case 0:
6178 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
6179 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6180 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6181 break;
6182 case 1:
6183 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
6184 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6185 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6186 break;
6187 case 2:
6188 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
6189 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6190 (s->mem_index >> 2) - 1);
6191 break;
6192 case 3:
6193 default:
6194 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
6195 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6196 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6197 break;
6198 }
6199 if ((op & 7) == 3)
6200 tcg_gen_helper_0_0(helper_fpop);
6201 break;
6202 }
6203 break;
6204 case 0x0c: /* fldenv mem */
6205 if (s->cc_op != CC_OP_DYNAMIC)
6206 gen_op_set_cc_op(s->cc_op);
6207 gen_jmp_im(pc_start - s->cs_base);
6208 tcg_gen_helper_0_2(helper_fldenv,
6209 cpu_A0, tcg_const_i32(s->dflag));
6210 break;
6211 case 0x0d: /* fldcw mem */
6212 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
6213 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6214 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
6215 break;
6216 case 0x0e: /* fnstenv mem */
6217 if (s->cc_op != CC_OP_DYNAMIC)
6218 gen_op_set_cc_op(s->cc_op);
6219 gen_jmp_im(pc_start - s->cs_base);
6220 tcg_gen_helper_0_2(helper_fstenv,
6221 cpu_A0, tcg_const_i32(s->dflag));
6222 break;
6223 case 0x0f: /* fnstcw mem */
6224 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
6225 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6226 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6227 break;
6228 case 0x1d: /* fldt mem */
6229 if (s->cc_op != CC_OP_DYNAMIC)
6230 gen_op_set_cc_op(s->cc_op);
6231 gen_jmp_im(pc_start - s->cs_base);
6232 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
6233 break;
6234 case 0x1f: /* fstpt mem */
6235 if (s->cc_op != CC_OP_DYNAMIC)
6236 gen_op_set_cc_op(s->cc_op);
6237 gen_jmp_im(pc_start - s->cs_base);
6238 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
6239 tcg_gen_helper_0_0(helper_fpop);
6240 break;
6241 case 0x2c: /* frstor mem */
6242 if (s->cc_op != CC_OP_DYNAMIC)
6243 gen_op_set_cc_op(s->cc_op);
6244 gen_jmp_im(pc_start - s->cs_base);
6245 tcg_gen_helper_0_2(helper_frstor,
6246 cpu_A0, tcg_const_i32(s->dflag));
6247 break;
6248 case 0x2e: /* fnsave mem */
6249 if (s->cc_op != CC_OP_DYNAMIC)
6250 gen_op_set_cc_op(s->cc_op);
6251 gen_jmp_im(pc_start - s->cs_base);
6252 tcg_gen_helper_0_2(helper_fsave,
6253 cpu_A0, tcg_const_i32(s->dflag));
6254 break;
6255 case 0x2f: /* fnstsw mem */
6256 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6257 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6258 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6259 break;
6260 case 0x3c: /* fbld */
6261 if (s->cc_op != CC_OP_DYNAMIC)
6262 gen_op_set_cc_op(s->cc_op);
6263 gen_jmp_im(pc_start - s->cs_base);
6264 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
6265 break;
6266 case 0x3e: /* fbstp */
6267 if (s->cc_op != CC_OP_DYNAMIC)
6268 gen_op_set_cc_op(s->cc_op);
6269 gen_jmp_im(pc_start - s->cs_base);
6270 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
6271 tcg_gen_helper_0_0(helper_fpop);
6272 break;
6273 case 0x3d: /* fildll */
6274 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6275 (s->mem_index >> 2) - 1);
6276 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
6277 break;
6278 case 0x3f: /* fistpll */
6279 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
6280 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6281 (s->mem_index >> 2) - 1);
6282 tcg_gen_helper_0_0(helper_fpop);
6283 break;
6284 default:
6285 goto illegal_op;
6286 }
6287 } else {
6288 /* register float ops */
6289 opreg = rm;
6290
6291 switch(op) {
6292 case 0x08: /* fld sti */
6293 tcg_gen_helper_0_0(helper_fpush);
6294 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
6295 break;
6296 case 0x09: /* fxchg sti */
6297 case 0x29: /* fxchg4 sti, undocumented op */
6298 case 0x39: /* fxchg7 sti, undocumented op */
6299 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
6300 break;
6301 case 0x0a: /* grp d9/2 */
6302 switch(rm) {
6303 case 0: /* fnop */
6304 /* check exceptions (FreeBSD FPU probe) */
6305 if (s->cc_op != CC_OP_DYNAMIC)
6306 gen_op_set_cc_op(s->cc_op);
6307 gen_jmp_im(pc_start - s->cs_base);
6308 tcg_gen_helper_0_0(helper_fwait);
6309 break;
6310 default:
6311 goto illegal_op;
6312 }
6313 break;
6314 case 0x0c: /* grp d9/4 */
6315 switch(rm) {
6316 case 0: /* fchs */
6317 tcg_gen_helper_0_0(helper_fchs_ST0);
6318 break;
6319 case 1: /* fabs */
6320 tcg_gen_helper_0_0(helper_fabs_ST0);
6321 break;
6322 case 4: /* ftst */
6323 tcg_gen_helper_0_0(helper_fldz_FT0);
6324 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6325 break;
6326 case 5: /* fxam */
6327 tcg_gen_helper_0_0(helper_fxam_ST0);
6328 break;
6329 default:
6330 goto illegal_op;
6331 }
6332 break;
6333 case 0x0d: /* grp d9/5 */
6334 {
6335 switch(rm) {
6336 case 0:
6337 tcg_gen_helper_0_0(helper_fpush);
6338 tcg_gen_helper_0_0(helper_fld1_ST0);
6339 break;
6340 case 1:
6341 tcg_gen_helper_0_0(helper_fpush);
6342 tcg_gen_helper_0_0(helper_fldl2t_ST0);
6343 break;
6344 case 2:
6345 tcg_gen_helper_0_0(helper_fpush);
6346 tcg_gen_helper_0_0(helper_fldl2e_ST0);
6347 break;
6348 case 3:
6349 tcg_gen_helper_0_0(helper_fpush);
6350 tcg_gen_helper_0_0(helper_fldpi_ST0);
6351 break;
6352 case 4:
6353 tcg_gen_helper_0_0(helper_fpush);
6354 tcg_gen_helper_0_0(helper_fldlg2_ST0);
6355 break;
6356 case 5:
6357 tcg_gen_helper_0_0(helper_fpush);
6358 tcg_gen_helper_0_0(helper_fldln2_ST0);
6359 break;
6360 case 6:
6361 tcg_gen_helper_0_0(helper_fpush);
6362 tcg_gen_helper_0_0(helper_fldz_ST0);
6363 break;
6364 default:
6365 goto illegal_op;
6366 }
6367 }
6368 break;
6369 case 0x0e: /* grp d9/6 */
6370 switch(rm) {
6371 case 0: /* f2xm1 */
6372 tcg_gen_helper_0_0(helper_f2xm1);
6373 break;
6374 case 1: /* fyl2x */
6375 tcg_gen_helper_0_0(helper_fyl2x);
6376 break;
6377 case 2: /* fptan */
6378 tcg_gen_helper_0_0(helper_fptan);
6379 break;
6380 case 3: /* fpatan */
6381 tcg_gen_helper_0_0(helper_fpatan);
6382 break;
6383 case 4: /* fxtract */
6384 tcg_gen_helper_0_0(helper_fxtract);
6385 break;
6386 case 5: /* fprem1 */
6387 tcg_gen_helper_0_0(helper_fprem1);
6388 break;
6389 case 6: /* fdecstp */
6390 tcg_gen_helper_0_0(helper_fdecstp);
6391 break;
6392 default:
6393 case 7: /* fincstp */
6394 tcg_gen_helper_0_0(helper_fincstp);
6395 break;
6396 }
6397 break;
6398 case 0x0f: /* grp d9/7 */
6399 switch(rm) {
6400 case 0: /* fprem */
6401 tcg_gen_helper_0_0(helper_fprem);
6402 break;
6403 case 1: /* fyl2xp1 */
6404 tcg_gen_helper_0_0(helper_fyl2xp1);
6405 break;
6406 case 2: /* fsqrt */
6407 tcg_gen_helper_0_0(helper_fsqrt);
6408 break;
6409 case 3: /* fsincos */
6410 tcg_gen_helper_0_0(helper_fsincos);
6411 break;
6412 case 5: /* fscale */
6413 tcg_gen_helper_0_0(helper_fscale);
6414 break;
6415 case 4: /* frndint */
6416 tcg_gen_helper_0_0(helper_frndint);
6417 break;
6418 case 6: /* fsin */
6419 tcg_gen_helper_0_0(helper_fsin);
6420 break;
6421 default:
6422 case 7: /* fcos */
6423 tcg_gen_helper_0_0(helper_fcos);
6424 break;
6425 }
6426 break;
6427 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6428 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6429 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6430 {
6431 int op1;
6432
6433 op1 = op & 7;
6434 if (op >= 0x20) {
6435 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
6436 if (op >= 0x30)
6437 tcg_gen_helper_0_0(helper_fpop);
6438 } else {
6439 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6440 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6441 }
6442 }
6443 break;
6444 case 0x02: /* fcom */
6445 case 0x22: /* fcom2, undocumented op */
6446 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6447 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6448 break;
6449 case 0x03: /* fcomp */
6450 case 0x23: /* fcomp3, undocumented op */
6451 case 0x32: /* fcomp5, undocumented op */
6452 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6453 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6454 tcg_gen_helper_0_0(helper_fpop);
6455 break;
6456 case 0x15: /* da/5 */
6457 switch(rm) {
6458 case 1: /* fucompp */
6459 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6460 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6461 tcg_gen_helper_0_0(helper_fpop);
6462 tcg_gen_helper_0_0(helper_fpop);
6463 break;
6464 default:
6465 goto illegal_op;
6466 }
6467 break;
6468 case 0x1c:
6469 switch(rm) {
6470 case 0: /* feni (287 only, just do nop here) */
6471 break;
6472 case 1: /* fdisi (287 only, just do nop here) */
6473 break;
6474 case 2: /* fclex */
6475 tcg_gen_helper_0_0(helper_fclex);
6476 break;
6477 case 3: /* fninit */
6478 tcg_gen_helper_0_0(helper_fninit);
6479 break;
6480 case 4: /* fsetpm (287 only, just do nop here) */
6481 break;
6482 default:
6483 goto illegal_op;
6484 }
6485 break;
6486 case 0x1d: /* fucomi */
6487 if (s->cc_op != CC_OP_DYNAMIC)
6488 gen_op_set_cc_op(s->cc_op);
6489 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6490 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6491 s->cc_op = CC_OP_EFLAGS;
6492 break;
6493 case 0x1e: /* fcomi */
6494 if (s->cc_op != CC_OP_DYNAMIC)
6495 gen_op_set_cc_op(s->cc_op);
6496 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6497 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6498 s->cc_op = CC_OP_EFLAGS;
6499 break;
6500 case 0x28: /* ffree sti */
6501 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6502 break;
6503 case 0x2a: /* fst sti */
6504 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6505 break;
6506 case 0x2b: /* fstp sti */
6507 case 0x0b: /* fstp1 sti, undocumented op */
6508 case 0x3a: /* fstp8 sti, undocumented op */
6509 case 0x3b: /* fstp9 sti, undocumented op */
6510 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6511 tcg_gen_helper_0_0(helper_fpop);
6512 break;
6513 case 0x2c: /* fucom st(i) */
6514 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6515 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6516 break;
6517 case 0x2d: /* fucomp st(i) */
6518 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6519 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6520 tcg_gen_helper_0_0(helper_fpop);
6521 break;
6522 case 0x33: /* de/3 */
6523 switch(rm) {
6524 case 1: /* fcompp */
6525 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6526 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6527 tcg_gen_helper_0_0(helper_fpop);
6528 tcg_gen_helper_0_0(helper_fpop);
6529 break;
6530 default:
6531 goto illegal_op;
6532 }
6533 break;
6534 case 0x38: /* ffreep sti, undocumented op */
6535 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6536 tcg_gen_helper_0_0(helper_fpop);
6537 break;
6538 case 0x3c: /* df/4 */
6539 switch(rm) {
6540 case 0:
6541 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6542 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6543 gen_op_mov_reg_T0(OT_WORD, R_EAX);
6544 break;
6545 default:
6546 goto illegal_op;
6547 }
6548 break;
6549 case 0x3d: /* fucomip */
6550 if (s->cc_op != CC_OP_DYNAMIC)
6551 gen_op_set_cc_op(s->cc_op);
6552 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6553 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6554 tcg_gen_helper_0_0(helper_fpop);
6555 s->cc_op = CC_OP_EFLAGS;
6556 break;
6557 case 0x3e: /* fcomip */
6558 if (s->cc_op != CC_OP_DYNAMIC)
6559 gen_op_set_cc_op(s->cc_op);
6560 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6561 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6562 tcg_gen_helper_0_0(helper_fpop);
6563 s->cc_op = CC_OP_EFLAGS;
6564 break;
6565 case 0x10 ... 0x13: /* fcmovxx */
6566 case 0x18 ... 0x1b:
6567 {
6568 int op1, l1;
6569 static const uint8_t fcmov_cc[8] = {
6570 (JCC_B << 1),
6571 (JCC_Z << 1),
6572 (JCC_BE << 1),
6573 (JCC_P << 1),
6574 };
6575 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
6576 l1 = gen_new_label();
6577 gen_jcc1(s, s->cc_op, op1, l1);
6578 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
6579 gen_set_label(l1);
6580 }
6581 break;
6582 default:
6583 goto illegal_op;
6584 }
6585 }
6586 break;
6587 /************************/
6588 /* string ops */
6589
6590 case 0xa4: /* movsS */
6591 case 0xa5:
6592 if ((b & 1) == 0)
6593 ot = OT_BYTE;
6594 else
6595 ot = dflag + OT_WORD;
6596
6597 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6598 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6599 } else {
6600 gen_movs(s, ot);
6601 }
6602 break;
6603
6604 case 0xaa: /* stosS */
6605 case 0xab:
6606 if ((b & 1) == 0)
6607 ot = OT_BYTE;
6608 else
6609 ot = dflag + OT_WORD;
6610
6611 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6612 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6613 } else {
6614 gen_stos(s, ot);
6615 }
6616 break;
6617 case 0xac: /* lodsS */
6618 case 0xad:
6619 if ((b & 1) == 0)
6620 ot = OT_BYTE;
6621 else
6622 ot = dflag + OT_WORD;
6623 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6624 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6625 } else {
6626 gen_lods(s, ot);
6627 }
6628 break;
6629 case 0xae: /* scasS */
6630 case 0xaf:
6631 if ((b & 1) == 0)
6632 ot = OT_BYTE;
6633 else
6634 ot = dflag + OT_WORD;
6635 if (prefixes & PREFIX_REPNZ) {
6636 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6637 } else if (prefixes & PREFIX_REPZ) {
6638 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6639 } else {
6640 gen_scas(s, ot);
6641 s->cc_op = CC_OP_SUBB + ot;
6642 }
6643 break;
6644
6645 case 0xa6: /* cmpsS */
6646 case 0xa7:
6647 if ((b & 1) == 0)
6648 ot = OT_BYTE;
6649 else
6650 ot = dflag + OT_WORD;
6651 if (prefixes & PREFIX_REPNZ) {
6652 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6653 } else if (prefixes & PREFIX_REPZ) {
6654 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6655 } else {
6656 gen_cmps(s, ot);
6657 s->cc_op = CC_OP_SUBB + ot;
6658 }
6659 break;
6660 case 0x6c: /* insS */
6661 case 0x6d:
6662 if ((b & 1) == 0)
6663 ot = OT_BYTE;
6664 else
6665 ot = dflag ? OT_LONG : OT_WORD;
6666 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6667 gen_op_andl_T0_ffff();
6668 gen_check_io(s, ot, pc_start - s->cs_base,
6669 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6670 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6671 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6672 } else {
6673 gen_ins(s, ot);
6674 if (use_icount) {
6675 gen_jmp(s, s->pc - s->cs_base);
6676 }
6677 }
6678 break;
6679 case 0x6e: /* outsS */
6680 case 0x6f:
6681 if ((b & 1) == 0)
6682 ot = OT_BYTE;
6683 else
6684 ot = dflag ? OT_LONG : OT_WORD;
6685 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6686 gen_op_andl_T0_ffff();
6687 gen_check_io(s, ot, pc_start - s->cs_base,
6688 svm_is_rep(prefixes) | 4);
6689 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6690 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6691 } else {
6692 gen_outs(s, ot);
6693 if (use_icount) {
6694 gen_jmp(s, s->pc - s->cs_base);
6695 }
6696 }
6697 break;
6698
6699 /************************/
6700 /* port I/O */
6701
6702 case 0xe4:
6703 case 0xe5:
6704 if ((b & 1) == 0)
6705 ot = OT_BYTE;
6706 else
6707 ot = dflag ? OT_LONG : OT_WORD;
6708 val = ldub_code(s->pc++);
6709 gen_op_movl_T0_im(val);
6710 gen_check_io(s, ot, pc_start - s->cs_base,
6711 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6712 if (use_icount)
6713 gen_io_start();
6714 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6715 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6716 gen_op_mov_reg_T1(ot, R_EAX);
6717 if (use_icount) {
6718 gen_io_end();
6719 gen_jmp(s, s->pc - s->cs_base);
6720 }
6721 break;
6722 case 0xe6:
6723 case 0xe7:
6724 if ((b & 1) == 0)
6725 ot = OT_BYTE;
6726 else
6727 ot = dflag ? OT_LONG : OT_WORD;
6728 val = ldub_code(s->pc++);
6729 gen_op_movl_T0_im(val);
6730 gen_check_io(s, ot, pc_start - s->cs_base,
6731 svm_is_rep(prefixes));
6732#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
6733 if (val == 0x80)
6734 break;
6735#endif /* VBOX */
6736 gen_op_mov_TN_reg(ot, 1, R_EAX);
6737
6738 if (use_icount)
6739 gen_io_start();
6740 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6741 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6742 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6743 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6744 if (use_icount) {
6745 gen_io_end();
6746 gen_jmp(s, s->pc - s->cs_base);
6747 }
6748 break;
6749 case 0xec:
6750 case 0xed:
6751 if ((b & 1) == 0)
6752 ot = OT_BYTE;
6753 else
6754 ot = dflag ? OT_LONG : OT_WORD;
6755 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6756 gen_op_andl_T0_ffff();
6757 gen_check_io(s, ot, pc_start - s->cs_base,
6758 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6759 if (use_icount)
6760 gen_io_start();
6761 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6762 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6763 gen_op_mov_reg_T1(ot, R_EAX);
6764 if (use_icount) {
6765 gen_io_end();
6766 gen_jmp(s, s->pc - s->cs_base);
6767 }
6768 break;
6769 case 0xee:
6770 case 0xef:
6771 if ((b & 1) == 0)
6772 ot = OT_BYTE;
6773 else
6774 ot = dflag ? OT_LONG : OT_WORD;
6775 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6776 gen_op_andl_T0_ffff();
6777 gen_check_io(s, ot, pc_start - s->cs_base,
6778 svm_is_rep(prefixes));
6779 gen_op_mov_TN_reg(ot, 1, R_EAX);
6780
6781 if (use_icount)
6782 gen_io_start();
6783 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6784 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6785 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6786 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6787 if (use_icount) {
6788 gen_io_end();
6789 gen_jmp(s, s->pc - s->cs_base);
6790 }
6791 break;
6792
6793 /************************/
6794 /* control */
6795 case 0xc2: /* ret im */
6796 val = ldsw_code(s->pc);
6797 s->pc += 2;
6798 gen_pop_T0(s);
6799 if (CODE64(s) && s->dflag)
6800 s->dflag = 2;
6801 gen_stack_update(s, val + (2 << s->dflag));
6802 if (s->dflag == 0)
6803 gen_op_andl_T0_ffff();
6804 gen_op_jmp_T0();
6805 gen_eob(s);
6806 break;
6807 case 0xc3: /* ret */
6808 gen_pop_T0(s);
6809 gen_pop_update(s);
6810 if (s->dflag == 0)
6811 gen_op_andl_T0_ffff();
6812 gen_op_jmp_T0();
6813 gen_eob(s);
6814 break;
6815 case 0xca: /* lret im */
6816 val = ldsw_code(s->pc);
6817 s->pc += 2;
6818 do_lret:
6819 if (s->pe && !s->vm86) {
6820 if (s->cc_op != CC_OP_DYNAMIC)
6821 gen_op_set_cc_op(s->cc_op);
6822 gen_jmp_im(pc_start - s->cs_base);
6823 tcg_gen_helper_0_2(helper_lret_protected,
6824 tcg_const_i32(s->dflag),
6825 tcg_const_i32(val));
6826 } else {
6827 gen_stack_A0(s);
6828 /* pop offset */
6829 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6830 if (s->dflag == 0)
6831 gen_op_andl_T0_ffff();
6832 /* NOTE: keeping EIP updated is not a problem in case of
6833 exception */
6834 gen_op_jmp_T0();
6835 /* pop selector */
6836 gen_op_addl_A0_im(2 << s->dflag);
6837 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6838 gen_op_movl_seg_T0_vm(R_CS);
6839 /* add stack offset */
6840 gen_stack_update(s, val + (4 << s->dflag));
6841 }
6842 gen_eob(s);
6843 break;
6844 case 0xcb: /* lret */
6845 val = 0;
6846 goto do_lret;
6847 case 0xcf: /* iret */
6848 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6849 if (!s->pe) {
6850 /* real mode */
6851 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6852 s->cc_op = CC_OP_EFLAGS;
6853 } else if (s->vm86) {
6854#ifdef VBOX
6855 if (s->iopl != 3 && (!s->vme || s->dflag)) {
6856#else
6857 if (s->iopl != 3) {
6858#endif
6859 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6860 } else {
6861 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6862 s->cc_op = CC_OP_EFLAGS;
6863 }
6864 } else {
6865 if (s->cc_op != CC_OP_DYNAMIC)
6866 gen_op_set_cc_op(s->cc_op);
6867 gen_jmp_im(pc_start - s->cs_base);
6868 tcg_gen_helper_0_2(helper_iret_protected,
6869 tcg_const_i32(s->dflag),
6870 tcg_const_i32(s->pc - s->cs_base));
6871 s->cc_op = CC_OP_EFLAGS;
6872 }
6873 gen_eob(s);
6874 break;
6875 case 0xe8: /* call im */
6876 {
6877 if (dflag)
6878 tval = (int32_t)insn_get(s, OT_LONG);
6879 else
6880 tval = (int16_t)insn_get(s, OT_WORD);
6881 next_eip = s->pc - s->cs_base;
6882 tval += next_eip;
6883 if (s->dflag == 0)
6884 tval &= 0xffff;
6885 else if (!CODE64(s))
6886 tval &= 0xffffffff;
6887 gen_movtl_T0_im(next_eip);
6888 gen_push_T0(s);
6889 gen_jmp(s, tval);
6890 }
6891 break;
6892 case 0x9a: /* lcall im */
6893 {
6894 unsigned int selector, offset;
6895
6896 if (CODE64(s))
6897 goto illegal_op;
6898 ot = dflag ? OT_LONG : OT_WORD;
6899 offset = insn_get(s, ot);
6900 selector = insn_get(s, OT_WORD);
6901
6902 gen_op_movl_T0_im(selector);
6903 gen_op_movl_T1_imu(offset);
6904 }
6905 goto do_lcall;
6906 case 0xe9: /* jmp im */
6907 if (dflag)
6908 tval = (int32_t)insn_get(s, OT_LONG);
6909 else
6910 tval = (int16_t)insn_get(s, OT_WORD);
6911 tval += s->pc - s->cs_base;
6912 if (s->dflag == 0)
6913 tval &= 0xffff;
6914 else if(!CODE64(s))
6915 tval &= 0xffffffff;
6916 gen_jmp(s, tval);
6917 break;
6918 case 0xea: /* ljmp im */
6919 {
6920 unsigned int selector, offset;
6921
6922 if (CODE64(s))
6923 goto illegal_op;
6924 ot = dflag ? OT_LONG : OT_WORD;
6925 offset = insn_get(s, ot);
6926 selector = insn_get(s, OT_WORD);
6927
6928 gen_op_movl_T0_im(selector);
6929 gen_op_movl_T1_imu(offset);
6930 }
6931 goto do_ljmp;
6932 case 0xeb: /* jmp Jb */
6933 tval = (int8_t)insn_get(s, OT_BYTE);
6934 tval += s->pc - s->cs_base;
6935 if (s->dflag == 0)
6936 tval &= 0xffff;
6937 gen_jmp(s, tval);
6938 break;
6939 case 0x70 ... 0x7f: /* jcc Jb */
6940 tval = (int8_t)insn_get(s, OT_BYTE);
6941 goto do_jcc;
6942 case 0x180 ... 0x18f: /* jcc Jv */
6943 if (dflag) {
6944 tval = (int32_t)insn_get(s, OT_LONG);
6945 } else {
6946 tval = (int16_t)insn_get(s, OT_WORD);
6947 }
6948 do_jcc:
6949 next_eip = s->pc - s->cs_base;
6950 tval += next_eip;
6951 if (s->dflag == 0)
6952 tval &= 0xffff;
6953 gen_jcc(s, b, tval, next_eip);
6954 break;
6955
6956 case 0x190 ... 0x19f: /* setcc Gv */
6957 modrm = ldub_code(s->pc++);
6958 gen_setcc(s, b);
6959 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6960 break;
6961 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6962 {
6963 int l1;
6964 TCGv t0;
6965
6966 ot = dflag + OT_WORD;
6967 modrm = ldub_code(s->pc++);
6968 reg = ((modrm >> 3) & 7) | rex_r;
6969 mod = (modrm >> 6) & 3;
6970 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6971 if (mod != 3) {
6972 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6973 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6974 } else {
6975 rm = (modrm & 7) | REX_B(s);
6976 gen_op_mov_v_reg(ot, t0, rm);
6977 }
6978#ifdef TARGET_X86_64
6979 if (ot == OT_LONG) {
6980 /* XXX: specific Intel behaviour ? */
6981 l1 = gen_new_label();
6982 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6983 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6984 gen_set_label(l1);
6985 tcg_gen_movi_tl(cpu_tmp0, 0);
6986 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6987 } else
6988#endif
6989 {
6990 l1 = gen_new_label();
6991 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6992 gen_op_mov_reg_v(ot, reg, t0);
6993 gen_set_label(l1);
6994 }
6995 tcg_temp_free(t0);
6996 }
6997 break;
6998
6999 /************************/
7000 /* flags */
7001 case 0x9c: /* pushf */
7002 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
7003#ifdef VBOX
7004 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
7005#else
7006 if (s->vm86 && s->iopl != 3) {
7007#endif
7008 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7009 } else {
7010 if (s->cc_op != CC_OP_DYNAMIC)
7011 gen_op_set_cc_op(s->cc_op);
7012#ifdef VBOX
7013 if (s->vm86 && s->vme && s->iopl != 3)
7014 tcg_gen_helper_1_0(helper_read_eflags_vme, cpu_T[0]);
7015 else
7016#endif
7017 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
7018 gen_push_T0(s);
7019 }
7020 break;
7021 case 0x9d: /* popf */
7022 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
7023#ifdef VBOX
7024 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
7025#else
7026 if (s->vm86 && s->iopl != 3) {
7027#endif
7028 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7029 } else {
7030 gen_pop_T0(s);
7031 if (s->cpl == 0) {
7032 if (s->dflag) {
7033 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7034 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
7035 } else {
7036 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7037 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
7038 }
7039 } else {
7040 if (s->cpl <= s->iopl) {
7041 if (s->dflag) {
7042 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7043 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
7044 } else {
7045 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7046 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
7047 }
7048 } else {
7049 if (s->dflag) {
7050 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7051 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
7052 } else {
7053#ifdef VBOX
7054 if (s->vm86 && s->vme)
7055 tcg_gen_helper_0_1(helper_write_eflags_vme, cpu_T[0]);
7056 else
7057#endif
7058 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7059 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
7060 }
7061 }
7062 }
7063 gen_pop_update(s);
7064 s->cc_op = CC_OP_EFLAGS;
7065 /* abort translation because TF flag may change */
7066 gen_jmp_im(s->pc - s->cs_base);
7067 gen_eob(s);
7068 }
7069 break;
7070 case 0x9e: /* sahf */
7071 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7072 goto illegal_op;
7073 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
7074 if (s->cc_op != CC_OP_DYNAMIC)
7075 gen_op_set_cc_op(s->cc_op);
7076 gen_compute_eflags(cpu_cc_src);
7077 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
7078 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
7079 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
7080 s->cc_op = CC_OP_EFLAGS;
7081 break;
7082 case 0x9f: /* lahf */
7083 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7084 goto illegal_op;
7085 if (s->cc_op != CC_OP_DYNAMIC)
7086 gen_op_set_cc_op(s->cc_op);
7087 gen_compute_eflags(cpu_T[0]);
7088 /* Note: gen_compute_eflags() only gives the condition codes */
7089 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
7090 gen_op_mov_reg_T0(OT_BYTE, R_AH);
7091 break;
7092 case 0xf5: /* cmc */
7093 if (s->cc_op != CC_OP_DYNAMIC)
7094 gen_op_set_cc_op(s->cc_op);
7095 gen_compute_eflags(cpu_cc_src);
7096 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7097 s->cc_op = CC_OP_EFLAGS;
7098 break;
7099 case 0xf8: /* clc */
7100 if (s->cc_op != CC_OP_DYNAMIC)
7101 gen_op_set_cc_op(s->cc_op);
7102 gen_compute_eflags(cpu_cc_src);
7103 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
7104 s->cc_op = CC_OP_EFLAGS;
7105 break;
7106 case 0xf9: /* stc */
7107 if (s->cc_op != CC_OP_DYNAMIC)
7108 gen_op_set_cc_op(s->cc_op);
7109 gen_compute_eflags(cpu_cc_src);
7110 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7111 s->cc_op = CC_OP_EFLAGS;
7112 break;
7113 case 0xfc: /* cld */
7114 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
7115 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7116 break;
7117 case 0xfd: /* std */
7118 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
7119 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7120 break;
7121
7122 /************************/
7123 /* bit operations */
7124 case 0x1ba: /* bt/bts/btr/btc Gv, im */
7125 ot = dflag + OT_WORD;
7126 modrm = ldub_code(s->pc++);
7127 op = (modrm >> 3) & 7;
7128 mod = (modrm >> 6) & 3;
7129 rm = (modrm & 7) | REX_B(s);
7130 if (mod != 3) {
7131 s->rip_offset = 1;
7132 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7133 gen_op_ld_T0_A0(ot + s->mem_index);
7134 } else {
7135 gen_op_mov_TN_reg(ot, 0, rm);
7136 }
7137 /* load shift */
7138 val = ldub_code(s->pc++);
7139 gen_op_movl_T1_im(val);
7140 if (op < 4)
7141 goto illegal_op;
7142 op -= 4;
7143 goto bt_op;
7144 case 0x1a3: /* bt Gv, Ev */
7145 op = 0;
7146 goto do_btx;
7147 case 0x1ab: /* bts */
7148 op = 1;
7149 goto do_btx;
7150 case 0x1b3: /* btr */
7151 op = 2;
7152 goto do_btx;
7153 case 0x1bb: /* btc */
7154 op = 3;
7155 do_btx:
7156 ot = dflag + OT_WORD;
7157 modrm = ldub_code(s->pc++);
7158 reg = ((modrm >> 3) & 7) | rex_r;
7159 mod = (modrm >> 6) & 3;
7160 rm = (modrm & 7) | REX_B(s);
7161 gen_op_mov_TN_reg(OT_LONG, 1, reg);
7162 if (mod != 3) {
7163 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7164 /* specific case: we need to add a displacement */
7165 gen_exts(ot, cpu_T[1]);
7166 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7167 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7168 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
7169 gen_op_ld_T0_A0(ot + s->mem_index);
7170 } else {
7171 gen_op_mov_TN_reg(ot, 0, rm);
7172 }
7173 bt_op:
7174 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7175 switch(op) {
7176 case 0:
7177 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7178 tcg_gen_movi_tl(cpu_cc_dst, 0);
7179 break;
7180 case 1:
7181 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7182 tcg_gen_movi_tl(cpu_tmp0, 1);
7183 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7184 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7185 break;
7186 case 2:
7187 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7188 tcg_gen_movi_tl(cpu_tmp0, 1);
7189 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7190 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7191 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7192 break;
7193 default:
7194 case 3:
7195 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7196 tcg_gen_movi_tl(cpu_tmp0, 1);
7197 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7198 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7199 break;
7200 }
7201 s->cc_op = CC_OP_SARB + ot;
7202 if (op != 0) {
7203 if (mod != 3)
7204 gen_op_st_T0_A0(ot + s->mem_index);
7205 else
7206 gen_op_mov_reg_T0(ot, rm);
7207 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7208 tcg_gen_movi_tl(cpu_cc_dst, 0);
7209 }
7210 break;
7211 case 0x1bc: /* bsf */
7212 case 0x1bd: /* bsr */
7213 {
7214 int label1;
7215 TCGv t0;
7216
7217 ot = dflag + OT_WORD;
7218 modrm = ldub_code(s->pc++);
7219 reg = ((modrm >> 3) & 7) | rex_r;
7220 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7221 gen_extu(ot, cpu_T[0]);
7222 label1 = gen_new_label();
7223 tcg_gen_movi_tl(cpu_cc_dst, 0);
7224 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7225 tcg_gen_mov_tl(t0, cpu_T[0]);
7226 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
7227 if (b & 1) {
7228 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
7229 } else {
7230 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
7231 }
7232 gen_op_mov_reg_T0(ot, reg);
7233 tcg_gen_movi_tl(cpu_cc_dst, 1);
7234 gen_set_label(label1);
7235 tcg_gen_discard_tl(cpu_cc_src);
7236 s->cc_op = CC_OP_LOGICB + ot;
7237 tcg_temp_free(t0);
7238 }
7239 break;
7240 /************************/
7241 /* bcd */
7242 case 0x27: /* daa */
7243 if (CODE64(s))
7244 goto illegal_op;
7245 if (s->cc_op != CC_OP_DYNAMIC)
7246 gen_op_set_cc_op(s->cc_op);
7247 tcg_gen_helper_0_0(helper_daa);
7248 s->cc_op = CC_OP_EFLAGS;
7249 break;
7250 case 0x2f: /* das */
7251 if (CODE64(s))
7252 goto illegal_op;
7253 if (s->cc_op != CC_OP_DYNAMIC)
7254 gen_op_set_cc_op(s->cc_op);
7255 tcg_gen_helper_0_0(helper_das);
7256 s->cc_op = CC_OP_EFLAGS;
7257 break;
7258 case 0x37: /* aaa */
7259 if (CODE64(s))
7260 goto illegal_op;
7261 if (s->cc_op != CC_OP_DYNAMIC)
7262 gen_op_set_cc_op(s->cc_op);
7263 tcg_gen_helper_0_0(helper_aaa);
7264 s->cc_op = CC_OP_EFLAGS;
7265 break;
7266 case 0x3f: /* aas */
7267 if (CODE64(s))
7268 goto illegal_op;
7269 if (s->cc_op != CC_OP_DYNAMIC)
7270 gen_op_set_cc_op(s->cc_op);
7271 tcg_gen_helper_0_0(helper_aas);
7272 s->cc_op = CC_OP_EFLAGS;
7273 break;
7274 case 0xd4: /* aam */
7275 if (CODE64(s))
7276 goto illegal_op;
7277 val = ldub_code(s->pc++);
7278 if (val == 0) {
7279 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7280 } else {
7281 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
7282 s->cc_op = CC_OP_LOGICB;
7283 }
7284 break;
7285 case 0xd5: /* aad */
7286 if (CODE64(s))
7287 goto illegal_op;
7288 val = ldub_code(s->pc++);
7289 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
7290 s->cc_op = CC_OP_LOGICB;
7291 break;
7292 /************************/
7293 /* misc */
7294 case 0x90: /* nop */
7295 /* XXX: xchg + rex handling */
7296 /* XXX: correct lock test for all insn */
7297 if (prefixes & PREFIX_LOCK)
7298 goto illegal_op;
7299 if (prefixes & PREFIX_REPZ) {
7300 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7301 }
7302 break;
7303 case 0x9b: /* fwait */
7304 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7305 (HF_MP_MASK | HF_TS_MASK)) {
7306 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7307 } else {
7308 if (s->cc_op != CC_OP_DYNAMIC)
7309 gen_op_set_cc_op(s->cc_op);
7310 gen_jmp_im(pc_start - s->cs_base);
7311 tcg_gen_helper_0_0(helper_fwait);
7312 }
7313 break;
7314 case 0xcc: /* int3 */
7315#ifdef VBOX
7316 if (s->vm86 && s->iopl != 3 && !s->vme) {
7317 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7318 } else
7319#endif
7320 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7321 break;
7322 case 0xcd: /* int N */
7323 val = ldub_code(s->pc++);
7324#ifdef VBOX
7325 if (s->vm86 && s->iopl != 3 && !s->vme) {
7326#else
7327 if (s->vm86 && s->iopl != 3) {
7328#endif
7329 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7330 } else {
7331 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7332 }
7333 break;
7334 case 0xce: /* into */
7335 if (CODE64(s))
7336 goto illegal_op;
7337 if (s->cc_op != CC_OP_DYNAMIC)
7338 gen_op_set_cc_op(s->cc_op);
7339 gen_jmp_im(pc_start - s->cs_base);
7340 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
7341 break;
7342 case 0xf1: /* icebp (undocumented, exits to external debugger) */
7343 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
7344#if 1
7345 gen_debug(s, pc_start - s->cs_base);
7346#else
7347 /* start debug */
7348 tb_flush(cpu_single_env);
7349 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
7350#endif
7351 break;
7352 case 0xfa: /* cli */
7353 if (!s->vm86) {
7354 if (s->cpl <= s->iopl) {
7355 tcg_gen_helper_0_0(helper_cli);
7356 } else {
7357 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7358 }
7359 } else {
7360 if (s->iopl == 3) {
7361 tcg_gen_helper_0_0(helper_cli);
7362#ifdef VBOX
7363 } else if (s->iopl != 3 && s->vme) {
7364 tcg_gen_helper_0_0(helper_cli_vme);
7365#endif
7366 } else {
7367 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7368 }
7369 }
7370 break;
7371 case 0xfb: /* sti */
7372 if (!s->vm86) {
7373 if (s->cpl <= s->iopl) {
7374 gen_sti:
7375 tcg_gen_helper_0_0(helper_sti);
7376 /* interruptions are enabled only the first insn after sti */
7377 /* If several instructions disable interrupts, only the
7378 _first_ does it */
7379 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
7380 tcg_gen_helper_0_0(helper_set_inhibit_irq);
7381 /* give a chance to handle pending irqs */
7382 gen_jmp_im(s->pc - s->cs_base);
7383 gen_eob(s);
7384 } else {
7385 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7386 }
7387 } else {
7388 if (s->iopl == 3) {
7389 goto gen_sti;
7390#ifdef VBOX
7391 } else if (s->iopl != 3 && s->vme) {
7392 tcg_gen_helper_0_0(helper_sti_vme);
7393 /* give a chance to handle pending irqs */
7394 gen_jmp_im(s->pc - s->cs_base);
7395 gen_eob(s);
7396#endif
7397 } else {
7398 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7399 }
7400 }
7401 break;
7402 case 0x62: /* bound */
7403 if (CODE64(s))
7404 goto illegal_op;
7405 ot = dflag ? OT_LONG : OT_WORD;
7406 modrm = ldub_code(s->pc++);
7407 reg = (modrm >> 3) & 7;
7408 mod = (modrm >> 6) & 3;
7409 if (mod == 3)
7410 goto illegal_op;
7411 gen_op_mov_TN_reg(ot, 0, reg);
7412 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7413 gen_jmp_im(pc_start - s->cs_base);
7414 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7415 if (ot == OT_WORD)
7416 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
7417 else
7418 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
7419 break;
7420 case 0x1c8 ... 0x1cf: /* bswap reg */
7421 reg = (b & 7) | REX_B(s);
7422#ifdef TARGET_X86_64
7423 if (dflag == 2) {
7424 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
7425 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
7426 gen_op_mov_reg_T0(OT_QUAD, reg);
7427 } else
7428 {
7429 TCGv tmp0;
7430 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7431
7432 tmp0 = tcg_temp_new(TCG_TYPE_I32);
7433 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
7434 tcg_gen_bswap_i32(tmp0, tmp0);
7435 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
7436 gen_op_mov_reg_T0(OT_LONG, reg);
7437 }
7438#else
7439 {
7440 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7441 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
7442 gen_op_mov_reg_T0(OT_LONG, reg);
7443 }
7444#endif
7445 break;
7446 case 0xd6: /* salc */
7447 if (CODE64(s))
7448 goto illegal_op;
7449 if (s->cc_op != CC_OP_DYNAMIC)
7450 gen_op_set_cc_op(s->cc_op);
7451 gen_compute_eflags_c(cpu_T[0]);
7452 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7453 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
7454 break;
7455 case 0xe0: /* loopnz */
7456 case 0xe1: /* loopz */
7457 case 0xe2: /* loop */
7458 case 0xe3: /* jecxz */
7459 {
7460 int l1, l2, l3;
7461
7462 tval = (int8_t)insn_get(s, OT_BYTE);
7463 next_eip = s->pc - s->cs_base;
7464 tval += next_eip;
7465 if (s->dflag == 0)
7466 tval &= 0xffff;
7467
7468 l1 = gen_new_label();
7469 l2 = gen_new_label();
7470 l3 = gen_new_label();
7471 b &= 3;
7472 switch(b) {
7473 case 0: /* loopnz */
7474 case 1: /* loopz */
7475 if (s->cc_op != CC_OP_DYNAMIC)
7476 gen_op_set_cc_op(s->cc_op);
7477 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7478 gen_op_jz_ecx(s->aflag, l3);
7479 gen_compute_eflags(cpu_tmp0);
7480 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
7481 if (b == 0) {
7482 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
7483 } else {
7484 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
7485 }
7486 break;
7487 case 2: /* loop */
7488 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7489 gen_op_jnz_ecx(s->aflag, l1);
7490 break;
7491 default:
7492 case 3: /* jcxz */
7493 gen_op_jz_ecx(s->aflag, l1);
7494 break;
7495 }
7496
7497 gen_set_label(l3);
7498 gen_jmp_im(next_eip);
7499 tcg_gen_br(l2);
7500
7501 gen_set_label(l1);
7502 gen_jmp_im(tval);
7503 gen_set_label(l2);
7504 gen_eob(s);
7505 }
7506 break;
7507 case 0x130: /* wrmsr */
7508 case 0x132: /* rdmsr */
7509 if (s->cpl != 0) {
7510 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7511 } else {
7512 if (s->cc_op != CC_OP_DYNAMIC)
7513 gen_op_set_cc_op(s->cc_op);
7514 gen_jmp_im(pc_start - s->cs_base);
7515 if (b & 2) {
7516 tcg_gen_helper_0_0(helper_rdmsr);
7517 } else {
7518 tcg_gen_helper_0_0(helper_wrmsr);
7519 }
7520 }
7521 break;
7522 case 0x131: /* rdtsc */
7523 if (s->cc_op != CC_OP_DYNAMIC)
7524 gen_op_set_cc_op(s->cc_op);
7525 gen_jmp_im(pc_start - s->cs_base);
7526 if (use_icount)
7527 gen_io_start();
7528 tcg_gen_helper_0_0(helper_rdtsc);
7529 if (use_icount) {
7530 gen_io_end();
7531 gen_jmp(s, s->pc - s->cs_base);
7532 }
7533 break;
7534 case 0x133: /* rdpmc */
7535 if (s->cc_op != CC_OP_DYNAMIC)
7536 gen_op_set_cc_op(s->cc_op);
7537 gen_jmp_im(pc_start - s->cs_base);
7538 tcg_gen_helper_0_0(helper_rdpmc);
7539 break;
7540 case 0x134: /* sysenter */
7541#ifndef VBOX
7542 /* For Intel SYSENTER is valid on 64-bit */
7543 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7544#else
7545 /** @todo: make things right */
7546 if (CODE64(s))
7547#endif
7548 goto illegal_op;
7549 if (!s->pe) {
7550 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7551 } else {
7552 if (s->cc_op != CC_OP_DYNAMIC) {
7553 gen_op_set_cc_op(s->cc_op);
7554 s->cc_op = CC_OP_DYNAMIC;
7555 }
7556 gen_jmp_im(pc_start - s->cs_base);
7557 tcg_gen_helper_0_0(helper_sysenter);
7558 gen_eob(s);
7559 }
7560 break;
7561 case 0x135: /* sysexit */
7562#ifndef VBOX
7563 /* For Intel SYSEXIT is valid on 64-bit */
7564 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7565#else
7566 /** @todo: make things right */
7567 if (CODE64(s))
7568#endif
7569 goto illegal_op;
7570 if (!s->pe) {
7571 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7572 } else {
7573 if (s->cc_op != CC_OP_DYNAMIC) {
7574 gen_op_set_cc_op(s->cc_op);
7575 s->cc_op = CC_OP_DYNAMIC;
7576 }
7577 gen_jmp_im(pc_start - s->cs_base);
7578 tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
7579 gen_eob(s);
7580 }
7581 break;
7582#ifdef TARGET_X86_64
7583 case 0x105: /* syscall */
7584 /* XXX: is it usable in real mode ? */
7585 if (s->cc_op != CC_OP_DYNAMIC) {
7586 gen_op_set_cc_op(s->cc_op);
7587 s->cc_op = CC_OP_DYNAMIC;
7588 }
7589 gen_jmp_im(pc_start - s->cs_base);
7590 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
7591 gen_eob(s);
7592 break;
7593 case 0x107: /* sysret */
7594 if (!s->pe) {
7595 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7596 } else {
7597 if (s->cc_op != CC_OP_DYNAMIC) {
7598 gen_op_set_cc_op(s->cc_op);
7599 s->cc_op = CC_OP_DYNAMIC;
7600 }
7601 gen_jmp_im(pc_start - s->cs_base);
7602 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
7603 /* condition codes are modified only in long mode */
7604 if (s->lma)
7605 s->cc_op = CC_OP_EFLAGS;
7606 gen_eob(s);
7607 }
7608 break;
7609#endif
7610 case 0x1a2: /* cpuid */
7611 if (s->cc_op != CC_OP_DYNAMIC)
7612 gen_op_set_cc_op(s->cc_op);
7613 gen_jmp_im(pc_start - s->cs_base);
7614 tcg_gen_helper_0_0(helper_cpuid);
7615 break;
7616 case 0xf4: /* hlt */
7617 if (s->cpl != 0) {
7618 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7619 } else {
7620 if (s->cc_op != CC_OP_DYNAMIC)
7621 gen_op_set_cc_op(s->cc_op);
7622 gen_jmp_im(pc_start - s->cs_base);
7623 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
7624 s->is_jmp = 3;
7625 }
7626 break;
7627 case 0x100:
7628 modrm = ldub_code(s->pc++);
7629 mod = (modrm >> 6) & 3;
7630 op = (modrm >> 3) & 7;
7631 switch(op) {
7632 case 0: /* sldt */
7633 if (!s->pe || s->vm86)
7634 goto illegal_op;
7635 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
7636 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
7637 ot = OT_WORD;
7638 if (mod == 3)
7639 ot += s->dflag;
7640 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7641 break;
7642 case 2: /* lldt */
7643 if (!s->pe || s->vm86)
7644 goto illegal_op;
7645 if (s->cpl != 0) {
7646 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7647 } else {
7648 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
7649 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7650 gen_jmp_im(pc_start - s->cs_base);
7651 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7652 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
7653 }
7654 break;
7655 case 1: /* str */
7656 if (!s->pe || s->vm86)
7657 goto illegal_op;
7658 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
7659 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
7660 ot = OT_WORD;
7661 if (mod == 3)
7662 ot += s->dflag;
7663 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7664 break;
7665 case 3: /* ltr */
7666 if (!s->pe || s->vm86)
7667 goto illegal_op;
7668 if (s->cpl != 0) {
7669 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7670 } else {
7671 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
7672 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7673 gen_jmp_im(pc_start - s->cs_base);
7674 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7675 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
7676 }
7677 break;
7678 case 4: /* verr */
7679 case 5: /* verw */
7680 if (!s->pe || s->vm86)
7681 goto illegal_op;
7682 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7683 if (s->cc_op != CC_OP_DYNAMIC)
7684 gen_op_set_cc_op(s->cc_op);
7685 if (op == 4)
7686 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
7687 else
7688 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
7689 s->cc_op = CC_OP_EFLAGS;
7690 break;
7691 default:
7692 goto illegal_op;
7693 }
7694 break;
7695 case 0x101:
7696 modrm = ldub_code(s->pc++);
7697 mod = (modrm >> 6) & 3;
7698 op = (modrm >> 3) & 7;
7699 rm = modrm & 7;
7700
7701#ifdef VBOX
7702 /* 0f 01 f9 */
7703 if (modrm == 0xf9)
7704 {
7705 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
7706 goto illegal_op;
7707 gen_jmp_im(pc_start - s->cs_base);
7708 tcg_gen_helper_0_0(helper_rdtscp);
7709 break;
7710 }
7711#endif
7712 switch(op) {
7713 case 0: /* sgdt */
7714 if (mod == 3)
7715 goto illegal_op;
7716 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7717 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7718 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7719 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7720 gen_add_A0_im(s, 2);
7721 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7722 if (!s->dflag)
7723 gen_op_andl_T0_im(0xffffff);
7724 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7725 break;
7726 case 1:
7727 if (mod == 3) {
7728 switch (rm) {
7729 case 0: /* monitor */
7730 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7731 s->cpl != 0)
7732 goto illegal_op;
7733 if (s->cc_op != CC_OP_DYNAMIC)
7734 gen_op_set_cc_op(s->cc_op);
7735 gen_jmp_im(pc_start - s->cs_base);
7736#ifdef TARGET_X86_64
7737 if (s->aflag == 2) {
7738 gen_op_movq_A0_reg(R_EAX);
7739 } else
7740#endif
7741 {
7742 gen_op_movl_A0_reg(R_EAX);
7743 if (s->aflag == 0)
7744 gen_op_andl_A0_ffff();
7745 }
7746 gen_add_A0_ds_seg(s);
7747 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
7748 break;
7749 case 1: /* mwait */
7750 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7751 s->cpl != 0)
7752 goto illegal_op;
7753 if (s->cc_op != CC_OP_DYNAMIC) {
7754 gen_op_set_cc_op(s->cc_op);
7755 s->cc_op = CC_OP_DYNAMIC;
7756 }
7757 gen_jmp_im(pc_start - s->cs_base);
7758 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
7759 gen_eob(s);
7760 break;
7761 default:
7762 goto illegal_op;
7763 }
7764 } else { /* sidt */
7765 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7766 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7767 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7768 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7769 gen_add_A0_im(s, 2);
7770 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7771 if (!s->dflag)
7772 gen_op_andl_T0_im(0xffffff);
7773 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7774 }
7775 break;
7776 case 2: /* lgdt */
7777 case 3: /* lidt */
7778 if (mod == 3) {
7779 if (s->cc_op != CC_OP_DYNAMIC)
7780 gen_op_set_cc_op(s->cc_op);
7781 gen_jmp_im(pc_start - s->cs_base);
7782 switch(rm) {
7783 case 0: /* VMRUN */
7784 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7785 goto illegal_op;
7786 if (s->cpl != 0) {
7787 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7788 break;
7789 } else {
7790 tcg_gen_helper_0_2(helper_vmrun,
7791 tcg_const_i32(s->aflag),
7792 tcg_const_i32(s->pc - pc_start));
7793 tcg_gen_exit_tb(0);
7794 s->is_jmp = 3;
7795 }
7796 break;
7797 case 1: /* VMMCALL */
7798 if (!(s->flags & HF_SVME_MASK))
7799 goto illegal_op;
7800 tcg_gen_helper_0_0(helper_vmmcall);
7801 break;
7802 case 2: /* VMLOAD */
7803 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7804 goto illegal_op;
7805 if (s->cpl != 0) {
7806 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7807 break;
7808 } else {
7809 tcg_gen_helper_0_1(helper_vmload,
7810 tcg_const_i32(s->aflag));
7811 }
7812 break;
7813 case 3: /* VMSAVE */
7814 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7815 goto illegal_op;
7816 if (s->cpl != 0) {
7817 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7818 break;
7819 } else {
7820 tcg_gen_helper_0_1(helper_vmsave,
7821 tcg_const_i32(s->aflag));
7822 }
7823 break;
7824 case 4: /* STGI */
7825 if ((!(s->flags & HF_SVME_MASK) &&
7826 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7827 !s->pe)
7828 goto illegal_op;
7829 if (s->cpl != 0) {
7830 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7831 break;
7832 } else {
7833 tcg_gen_helper_0_0(helper_stgi);
7834 }
7835 break;
7836 case 5: /* CLGI */
7837 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7838 goto illegal_op;
7839 if (s->cpl != 0) {
7840 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7841 break;
7842 } else {
7843 tcg_gen_helper_0_0(helper_clgi);
7844 }
7845 break;
7846 case 6: /* SKINIT */
7847 if ((!(s->flags & HF_SVME_MASK) &&
7848 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7849 !s->pe)
7850 goto illegal_op;
7851 tcg_gen_helper_0_0(helper_skinit);
7852 break;
7853 case 7: /* INVLPGA */
7854 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7855 goto illegal_op;
7856 if (s->cpl != 0) {
7857 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7858 break;
7859 } else {
7860 tcg_gen_helper_0_1(helper_invlpga,
7861 tcg_const_i32(s->aflag));
7862 }
7863 break;
7864 default:
7865 goto illegal_op;
7866 }
7867 } else if (s->cpl != 0) {
7868 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7869 } else {
7870 gen_svm_check_intercept(s, pc_start,
7871 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7872 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7873 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7874 gen_add_A0_im(s, 2);
7875 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7876 if (!s->dflag)
7877 gen_op_andl_T0_im(0xffffff);
7878 if (op == 2) {
7879 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7880 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7881 } else {
7882 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7883 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7884 }
7885 }
7886 break;
7887 case 4: /* smsw */
7888 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7889 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7890 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7891 break;
7892 case 6: /* lmsw */
7893 if (s->cpl != 0) {
7894 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7895 } else {
7896 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7897 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7898 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7899 gen_jmp_im(s->pc - s->cs_base);
7900 gen_eob(s);
7901 }
7902 break;
7903 case 7: /* invlpg */
7904 if (s->cpl != 0) {
7905 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7906 } else {
7907 if (mod == 3) {
7908#ifdef TARGET_X86_64
7909 if (CODE64(s) && rm == 0) {
7910 /* swapgs */
7911 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7912 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7913 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7914 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7915 } else
7916#endif
7917 {
7918 goto illegal_op;
7919 }
7920 } else {
7921 if (s->cc_op != CC_OP_DYNAMIC)
7922 gen_op_set_cc_op(s->cc_op);
7923 gen_jmp_im(pc_start - s->cs_base);
7924 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7925 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
7926 gen_jmp_im(s->pc - s->cs_base);
7927 gen_eob(s);
7928 }
7929 }
7930 break;
7931 default:
7932 goto illegal_op;
7933 }
7934 break;
7935 case 0x108: /* invd */
7936 case 0x109: /* wbinvd */
7937 if (s->cpl != 0) {
7938 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7939 } else {
7940 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7941 /* nothing to do */
7942 }
7943 break;
7944 case 0x63: /* arpl or movslS (x86_64) */
7945#ifdef TARGET_X86_64
7946 if (CODE64(s)) {
7947 int d_ot;
7948 /* d_ot is the size of destination */
7949 d_ot = dflag + OT_WORD;
7950
7951 modrm = ldub_code(s->pc++);
7952 reg = ((modrm >> 3) & 7) | rex_r;
7953 mod = (modrm >> 6) & 3;
7954 rm = (modrm & 7) | REX_B(s);
7955
7956 if (mod == 3) {
7957 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7958 /* sign extend */
7959 if (d_ot == OT_QUAD)
7960 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7961 gen_op_mov_reg_T0(d_ot, reg);
7962 } else {
7963 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7964 if (d_ot == OT_QUAD) {
7965 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7966 } else {
7967 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7968 }
7969 gen_op_mov_reg_T0(d_ot, reg);
7970 }
7971 } else
7972#endif
7973 {
7974 int label1;
7975 TCGv t0, t1, t2, a0;
7976
7977 if (!s->pe || s->vm86)
7978 goto illegal_op;
7979
7980 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7981 t1 = tcg_temp_local_new(TCG_TYPE_TL);
7982 t2 = tcg_temp_local_new(TCG_TYPE_TL);
7983#ifdef VBOX
7984 a0 = tcg_temp_local_new(TCG_TYPE_TL);
7985#endif
7986 ot = OT_WORD;
7987 modrm = ldub_code(s->pc++);
7988 reg = (modrm >> 3) & 7;
7989 mod = (modrm >> 6) & 3;
7990 rm = modrm & 7;
7991 if (mod != 3) {
7992 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7993#ifdef VBOX
7994 tcg_gen_mov_tl(a0, cpu_A0);
7995#endif
7996 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7997 } else {
7998 gen_op_mov_v_reg(ot, t0, rm);
7999 }
8000 gen_op_mov_v_reg(ot, t1, reg);
8001 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
8002 tcg_gen_andi_tl(t1, t1, 3);
8003 tcg_gen_movi_tl(t2, 0);
8004 label1 = gen_new_label();
8005 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
8006 tcg_gen_andi_tl(t0, t0, ~3);
8007 tcg_gen_or_tl(t0, t0, t1);
8008 tcg_gen_movi_tl(t2, CC_Z);
8009 gen_set_label(label1);
8010 if (mod != 3) {
8011#ifdef VBOX
8012 /* cpu_A0 doesn't survive branch */
8013 gen_op_st_v(ot + s->mem_index, t0, a0);
8014#else
8015 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
8016#endif
8017 } else {
8018 gen_op_mov_reg_v(ot, rm, t0);
8019 }
8020 if (s->cc_op != CC_OP_DYNAMIC)
8021 gen_op_set_cc_op(s->cc_op);
8022 gen_compute_eflags(cpu_cc_src);
8023 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
8024 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
8025 s->cc_op = CC_OP_EFLAGS;
8026 tcg_temp_free(t0);
8027 tcg_temp_free(t1);
8028 tcg_temp_free(t2);
8029#ifdef VBOX
8030 tcg_temp_free(a0);
8031#endif
8032 }
8033 break;
8034 case 0x102: /* lar */
8035 case 0x103: /* lsl */
8036 {
8037 int label1;
8038 TCGv t0;
8039 if (!s->pe || s->vm86)
8040 goto illegal_op;
8041 ot = dflag ? OT_LONG : OT_WORD;
8042 modrm = ldub_code(s->pc++);
8043 reg = ((modrm >> 3) & 7) | rex_r;
8044 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
8045 t0 = tcg_temp_local_new(TCG_TYPE_TL);
8046 if (s->cc_op != CC_OP_DYNAMIC)
8047 gen_op_set_cc_op(s->cc_op);
8048 if (b == 0x102)
8049 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
8050 else
8051 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
8052 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
8053 label1 = gen_new_label();
8054 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
8055 gen_op_mov_reg_v(ot, reg, t0);
8056 gen_set_label(label1);
8057 s->cc_op = CC_OP_EFLAGS;
8058 tcg_temp_free(t0);
8059 }
8060 break;
8061 case 0x118:
8062 modrm = ldub_code(s->pc++);
8063 mod = (modrm >> 6) & 3;
8064 op = (modrm >> 3) & 7;
8065 switch(op) {
8066 case 0: /* prefetchnta */
8067 case 1: /* prefetchnt0 */
8068 case 2: /* prefetchnt0 */
8069 case 3: /* prefetchnt0 */
8070 if (mod == 3)
8071 goto illegal_op;
8072 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8073 /* nothing more to do */
8074 break;
8075 default: /* nop (multi byte) */
8076 gen_nop_modrm(s, modrm);
8077 break;
8078 }
8079 break;
8080 case 0x119 ... 0x11f: /* nop (multi byte) */
8081 modrm = ldub_code(s->pc++);
8082 gen_nop_modrm(s, modrm);
8083 break;
8084 case 0x120: /* mov reg, crN */
8085 case 0x122: /* mov crN, reg */
8086 if (s->cpl != 0) {
8087 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8088 } else {
8089 modrm = ldub_code(s->pc++);
8090#ifndef VBOX /* mod bits are always understood to be 11 (0xc0) regardless of actual content; see AMD manuals */
8091 if ((modrm & 0xc0) != 0xc0)
8092 goto illegal_op;
8093#endif
8094 rm = (modrm & 7) | REX_B(s);
8095 reg = ((modrm >> 3) & 7) | rex_r;
8096 if (CODE64(s))
8097 ot = OT_QUAD;
8098 else
8099 ot = OT_LONG;
8100 switch(reg) {
8101 case 0:
8102 case 2:
8103 case 3:
8104 case 4:
8105 case 8:
8106 if (s->cc_op != CC_OP_DYNAMIC)
8107 gen_op_set_cc_op(s->cc_op);
8108 gen_jmp_im(pc_start - s->cs_base);
8109 if (b & 2) {
8110 gen_op_mov_TN_reg(ot, 0, rm);
8111 tcg_gen_helper_0_2(helper_write_crN,
8112 tcg_const_i32(reg), cpu_T[0]);
8113 gen_jmp_im(s->pc - s->cs_base);
8114 gen_eob(s);
8115 } else {
8116 tcg_gen_helper_1_1(helper_read_crN,
8117 cpu_T[0], tcg_const_i32(reg));
8118 gen_op_mov_reg_T0(ot, rm);
8119 }
8120 break;
8121 default:
8122 goto illegal_op;
8123 }
8124 }
8125 break;
8126 case 0x121: /* mov reg, drN */
8127 case 0x123: /* mov drN, reg */
8128 if (s->cpl != 0) {
8129 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8130 } else {
8131 modrm = ldub_code(s->pc++);
8132#ifndef VBOX /* mod bits are always understood to be 11 (0xc0) regardless of actual content; see AMD manuals */
8133 if ((modrm & 0xc0) != 0xc0)
8134 goto illegal_op;
8135#endif
8136 rm = (modrm & 7) | REX_B(s);
8137 reg = ((modrm >> 3) & 7) | rex_r;
8138 if (CODE64(s))
8139 ot = OT_QUAD;
8140 else
8141 ot = OT_LONG;
8142 /* XXX: do it dynamically with CR4.DE bit */
8143 if (reg == 4 || reg == 5 || reg >= 8)
8144 goto illegal_op;
8145 if (b & 2) {
8146 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
8147 gen_op_mov_TN_reg(ot, 0, rm);
8148 tcg_gen_helper_0_2(helper_movl_drN_T0,
8149 tcg_const_i32(reg), cpu_T[0]);
8150 gen_jmp_im(s->pc - s->cs_base);
8151 gen_eob(s);
8152 } else {
8153 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
8154 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
8155 gen_op_mov_reg_T0(ot, rm);
8156 }
8157 }
8158 break;
8159 case 0x106: /* clts */
8160 if (s->cpl != 0) {
8161 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8162 } else {
8163 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
8164 tcg_gen_helper_0_0(helper_clts);
8165 /* abort block because static cpu state changed */
8166 gen_jmp_im(s->pc - s->cs_base);
8167 gen_eob(s);
8168 }
8169 break;
8170 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
8171 case 0x1c3: /* MOVNTI reg, mem */
8172 if (!(s->cpuid_features & CPUID_SSE2))
8173 goto illegal_op;
8174 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
8175 modrm = ldub_code(s->pc++);
8176 mod = (modrm >> 6) & 3;
8177 if (mod == 3)
8178 goto illegal_op;
8179 reg = ((modrm >> 3) & 7) | rex_r;
8180 /* generate a generic store */
8181 gen_ldst_modrm(s, modrm, ot, reg, 1);
8182 break;
8183 case 0x1ae:
8184 modrm = ldub_code(s->pc++);
8185 mod = (modrm >> 6) & 3;
8186 op = (modrm >> 3) & 7;
8187 switch(op) {
8188 case 0: /* fxsave */
8189 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8190 (s->flags & HF_EM_MASK))
8191 goto illegal_op;
8192 if (s->flags & HF_TS_MASK) {
8193 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8194 break;
8195 }
8196 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8197 if (s->cc_op != CC_OP_DYNAMIC)
8198 gen_op_set_cc_op(s->cc_op);
8199 gen_jmp_im(pc_start - s->cs_base);
8200 tcg_gen_helper_0_2(helper_fxsave,
8201 cpu_A0, tcg_const_i32((s->dflag == 2)));
8202 break;
8203 case 1: /* fxrstor */
8204 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8205 (s->flags & HF_EM_MASK))
8206 goto illegal_op;
8207 if (s->flags & HF_TS_MASK) {
8208 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8209 break;
8210 }
8211 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8212 if (s->cc_op != CC_OP_DYNAMIC)
8213 gen_op_set_cc_op(s->cc_op);
8214 gen_jmp_im(pc_start - s->cs_base);
8215 tcg_gen_helper_0_2(helper_fxrstor,
8216 cpu_A0, tcg_const_i32((s->dflag == 2)));
8217 break;
8218 case 2: /* ldmxcsr */
8219 case 3: /* stmxcsr */
8220 if (s->flags & HF_TS_MASK) {
8221 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8222 break;
8223 }
8224 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8225 mod == 3)
8226 goto illegal_op;
8227 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8228 if (op == 2) {
8229 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
8230 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8231 } else {
8232 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8233 gen_op_st_T0_A0(OT_LONG + s->mem_index);
8234 }
8235 break;
8236 case 5: /* lfence */
8237 case 6: /* mfence */
8238 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
8239 goto illegal_op;
8240 break;
8241 case 7: /* sfence / clflush */
8242 if ((modrm & 0xc7) == 0xc0) {
8243 /* sfence */
8244 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8245 if (!(s->cpuid_features & CPUID_SSE))
8246 goto illegal_op;
8247 } else {
8248 /* clflush */
8249 if (!(s->cpuid_features & CPUID_CLFLUSH))
8250 goto illegal_op;
8251 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8252 }
8253 break;
8254 default:
8255 goto illegal_op;
8256 }
8257 break;
8258 case 0x10d: /* 3DNow! prefetch(w) */
8259 modrm = ldub_code(s->pc++);
8260 mod = (modrm >> 6) & 3;
8261 if (mod == 3)
8262 goto illegal_op;
8263 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8264 /* ignore for now */
8265 break;
8266 case 0x1aa: /* rsm */
8267 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
8268 if (!(s->flags & HF_SMM_MASK))
8269 goto illegal_op;
8270 if (s->cc_op != CC_OP_DYNAMIC) {
8271 gen_op_set_cc_op(s->cc_op);
8272 s->cc_op = CC_OP_DYNAMIC;
8273 }
8274 gen_jmp_im(s->pc - s->cs_base);
8275 tcg_gen_helper_0_0(helper_rsm);
8276 gen_eob(s);
8277 break;
8278 case 0x1b8: /* SSE4.2 popcnt */
8279 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8280 PREFIX_REPZ)
8281 goto illegal_op;
8282 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8283 goto illegal_op;
8284
8285 modrm = ldub_code(s->pc++);
8286 reg = ((modrm >> 3) & 7);
8287
8288 if (s->prefix & PREFIX_DATA)
8289 ot = OT_WORD;
8290 else if (s->dflag != 2)
8291 ot = OT_LONG;
8292 else
8293 ot = OT_QUAD;
8294
8295 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
8296 tcg_gen_helper_1_2(helper_popcnt,
8297 cpu_T[0], cpu_T[0], tcg_const_i32(ot));
8298 gen_op_mov_reg_T0(ot, reg);
8299
8300 s->cc_op = CC_OP_EFLAGS;
8301 break;
8302 case 0x10e ... 0x10f:
8303 /* 3DNow! instructions, ignore prefixes */
8304 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
8305 case 0x110 ... 0x117:
8306 case 0x128 ... 0x12f:
8307 case 0x138 ... 0x13a:
8308 case 0x150 ... 0x177:
8309 case 0x17c ... 0x17f:
8310 case 0x1c2:
8311 case 0x1c4 ... 0x1c6:
8312 case 0x1d0 ... 0x1fe:
8313 gen_sse(s, b, pc_start, rex_r);
8314 break;
8315 default:
8316 goto illegal_op;
8317 }
8318 /* lock generation */
8319 if (s->prefix & PREFIX_LOCK)
8320 tcg_gen_helper_0_0(helper_unlock);
8321 return s->pc;
8322 illegal_op:
8323 if (s->prefix & PREFIX_LOCK)
8324 tcg_gen_helper_0_0(helper_unlock);
8325 /* XXX: ensure that no lock was generated */
8326 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8327 return s->pc;
8328}
8329
8330void optimize_flags_init(void)
8331{
8332#if TCG_TARGET_REG_BITS == 32
8333 assert(sizeof(CCTable) == (1 << 3));
8334#else
8335 assert(sizeof(CCTable) == (1 << 4));
8336#endif
8337 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
8338 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
8339 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
8340 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
8341 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
8342 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
8343 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
8344 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
8345 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
8346
8347 /* register helpers */
8348
8349#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
8350#include "helper.h"
8351}
8352
8353/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8354 basic block 'tb'. If search_pc is TRUE, also generate PC
8355 information for each intermediate instruction. */
8356#ifndef VBOX
8357static inline void gen_intermediate_code_internal(CPUState *env,
8358#else /* VBOX */
8359DECLINLINE(void) gen_intermediate_code_internal(CPUState *env,
8360#endif /* VBOX */
8361 TranslationBlock *tb,
8362 int search_pc)
8363{
8364 DisasContext dc1, *dc = &dc1;
8365 target_ulong pc_ptr;
8366 uint16_t *gen_opc_end;
8367 int j, lj, cflags;
8368 uint64_t flags;
8369 target_ulong pc_start;
8370 target_ulong cs_base;
8371 int num_insns;
8372 int max_insns;
8373
8374 /* generate intermediate code */
8375 pc_start = tb->pc;
8376 cs_base = tb->cs_base;
8377 flags = tb->flags;
8378 cflags = tb->cflags;
8379
8380 dc->pe = (flags >> HF_PE_SHIFT) & 1;
8381 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8382 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8383 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8384 dc->f_st = 0;
8385 dc->vm86 = (flags >> VM_SHIFT) & 1;
8386#ifdef VBOX
8387 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
8388 dc->pvi = !!(env->cr[4] & CR4_PVI_MASK);
8389#ifdef VBOX_WITH_CALL_RECORD
8390 if ( !(env->state & CPU_RAW_RING0)
8391 && (env->cr[0] & CR0_PG_MASK)
8392 && !(env->eflags & X86_EFL_IF)
8393 && dc->code32)
8394 dc->record_call = 1;
8395 else
8396 dc->record_call = 0;
8397#endif
8398#endif
8399 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8400 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8401 dc->tf = (flags >> TF_SHIFT) & 1;
8402 dc->singlestep_enabled = env->singlestep_enabled;
8403 dc->cc_op = CC_OP_DYNAMIC;
8404 dc->cs_base = cs_base;
8405 dc->tb = tb;
8406 dc->popl_esp_hack = 0;
8407 /* select memory access functions */
8408 dc->mem_index = 0;
8409 if (flags & HF_SOFTMMU_MASK) {
8410 if (dc->cpl == 3)
8411 dc->mem_index = 2 * 4;
8412 else
8413 dc->mem_index = 1 * 4;
8414 }
8415 dc->cpuid_features = env->cpuid_features;
8416 dc->cpuid_ext_features = env->cpuid_ext_features;
8417 dc->cpuid_ext2_features = env->cpuid_ext2_features;
8418 dc->cpuid_ext3_features = env->cpuid_ext3_features;
8419#ifdef TARGET_X86_64
8420 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8421 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8422#endif
8423 dc->flags = flags;
8424 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8425 (flags & HF_INHIBIT_IRQ_MASK)
8426#ifndef CONFIG_SOFTMMU
8427 || (flags & HF_SOFTMMU_MASK)
8428#endif
8429 );
8430#if 0
8431 /* check addseg logic */
8432 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
8433 printf("ERROR addseg\n");
8434#endif
8435
8436 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
8437 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
8438 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
8439 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
8440
8441 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
8442 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
8443 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
8444 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
8445 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
8446 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
8447 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
8448 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
8449 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
8450
8451 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
8452
8453 dc->is_jmp = DISAS_NEXT;
8454 pc_ptr = pc_start;
8455 lj = -1;
8456 num_insns = 0;
8457 max_insns = tb->cflags & CF_COUNT_MASK;
8458 if (max_insns == 0)
8459 max_insns = CF_COUNT_MASK;
8460
8461 gen_icount_start();
8462 for(;;) {
8463 if (env->nb_breakpoints > 0) {
8464 for(j = 0; j < env->nb_breakpoints; j++) {
8465 if (env->breakpoints[j] == pc_ptr) {
8466 gen_debug(dc, pc_ptr - dc->cs_base);
8467 break;
8468 }
8469 }
8470 }
8471 if (search_pc) {
8472 j = gen_opc_ptr - gen_opc_buf;
8473 if (lj < j) {
8474 lj++;
8475 while (lj < j)
8476 gen_opc_instr_start[lj++] = 0;
8477 }
8478 gen_opc_pc[lj] = pc_ptr;
8479 gen_opc_cc_op[lj] = dc->cc_op;
8480 gen_opc_instr_start[lj] = 1;
8481 gen_opc_icount[lj] = num_insns;
8482 }
8483 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8484 gen_io_start();
8485
8486 pc_ptr = disas_insn(dc, pc_ptr);
8487 num_insns++;
8488 /* stop translation if indicated */
8489 if (dc->is_jmp)
8490 break;
8491#ifdef VBOX
8492#ifdef DEBUG
8493/*
8494 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
8495 {
8496 //should never happen as the jump to the patch code terminates the translation block
8497 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
8498 }
8499*/
8500#endif
8501 if (env->state & CPU_EMULATE_SINGLE_INSTR)
8502 {
8503 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
8504 gen_jmp_im(pc_ptr - dc->cs_base);
8505 gen_eob(dc);
8506 break;
8507 }
8508#endif /* VBOX */
8509
8510 /* if single step mode, we generate only one instruction and
8511 generate an exception */
8512 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8513 the flag and abort the translation to give the irqs a
8514 change to be happen */
8515 if (dc->tf || dc->singlestep_enabled ||
8516 (flags & HF_INHIBIT_IRQ_MASK)) {
8517 gen_jmp_im(pc_ptr - dc->cs_base);
8518 gen_eob(dc);
8519 break;
8520 }
8521 /* if too long translation, stop generation too */
8522 if (gen_opc_ptr >= gen_opc_end ||
8523 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8524 num_insns >= max_insns) {
8525 gen_jmp_im(pc_ptr - dc->cs_base);
8526 gen_eob(dc);
8527 break;
8528 }
8529 }
8530 if (tb->cflags & CF_LAST_IO)
8531 gen_io_end();
8532 gen_icount_end(tb, num_insns);
8533 *gen_opc_ptr = INDEX_op_end;
8534 /* we don't forget to fill the last values */
8535 if (search_pc) {
8536 j = gen_opc_ptr - gen_opc_buf;
8537 lj++;
8538 while (lj <= j)
8539 gen_opc_instr_start[lj++] = 0;
8540 }
8541
8542#ifdef DEBUG_DISAS
8543 if (loglevel & CPU_LOG_TB_CPU) {
8544 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
8545 }
8546 if (loglevel & CPU_LOG_TB_IN_ASM) {
8547 int disas_flags;
8548 fprintf(logfile, "----------------\n");
8549 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
8550#ifdef TARGET_X86_64
8551 if (dc->code64)
8552 disas_flags = 2;
8553 else
8554#endif
8555 disas_flags = !dc->code32;
8556 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
8557 fprintf(logfile, "\n");
8558 }
8559#endif
8560
8561 if (!search_pc) {
8562 tb->size = pc_ptr - pc_start;
8563 tb->icount = num_insns;
8564 }
8565}
8566
8567void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
8568{
8569 gen_intermediate_code_internal(env, tb, 0);
8570}
8571
8572void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
8573{
8574 gen_intermediate_code_internal(env, tb, 1);
8575}
8576
8577void gen_pc_load(CPUState *env, TranslationBlock *tb,
8578 unsigned long searched_pc, int pc_pos, void *puc)
8579{
8580 int cc_op;
8581#ifdef DEBUG_DISAS
8582 if (loglevel & CPU_LOG_TB_OP) {
8583 int i;
8584 fprintf(logfile, "RESTORE:\n");
8585 for(i = 0;i <= pc_pos; i++) {
8586 if (gen_opc_instr_start[i]) {
8587 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
8588 }
8589 }
8590 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
8591 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
8592 (uint32_t)tb->cs_base);
8593 }
8594#endif
8595 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
8596 cc_op = gen_opc_cc_op[pc_pos];
8597 if (cc_op != CC_OP_DYNAMIC)
8598 env->cc_op = cc_op;
8599}
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette