VirtualBox

source: vbox/trunk/src/recompiler_new/target-i386/translate.c@ 16254

Last change on this file since 16254 was 16066, checked in by vboxsync, 16 years ago

SSE truncation from QEMU trunk

  • Property svn:eol-style set to native
File size: 275.8 KB
Line 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Sun LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Sun elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29#include <stdarg.h>
30#include <stdlib.h>
31#include <stdio.h>
32#include <string.h>
33#ifndef VBOX
34#include <inttypes.h>
35#include <signal.h>
36#include <assert.h>
37#endif /* !VBOX */
38
39#include "cpu.h"
40#include "exec-all.h"
41#include "disas.h"
42#include "helper.h"
43#include "tcg-op.h"
44
45#define PREFIX_REPZ 0x01
46#define PREFIX_REPNZ 0x02
47#define PREFIX_LOCK 0x04
48#define PREFIX_DATA 0x08
49#define PREFIX_ADR 0x10
50
51#ifdef TARGET_X86_64
52#define X86_64_ONLY(x) x
53#ifndef VBOX
54#define X86_64_DEF(x...) x
55#else
56#define X86_64_DEF(x...) x
57#endif
58#define CODE64(s) ((s)->code64)
59#define REX_X(s) ((s)->rex_x)
60#define REX_B(s) ((s)->rex_b)
61/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
62#if 1
63#define BUGGY_64(x) NULL
64#endif
65#else
66#define X86_64_ONLY(x) NULL
67#ifndef VBOX
68#define X86_64_DEF(x...)
69#else
70#define X86_64_DEF(x)
71#endif
72#define CODE64(s) 0
73#define REX_X(s) 0
74#define REX_B(s) 0
75#endif
76
77//#define MACRO_TEST 1
78
79/* global register indexes */
80static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
81/* local temps */
82static TCGv cpu_T[2], cpu_T3;
83/* local register indexes (only used inside old micro ops) */
84static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
85static TCGv cpu_tmp5, cpu_tmp6;
86
87#include "gen-icount.h"
88
89#ifdef TARGET_X86_64
90static int x86_64_hregs;
91#endif
92
93#ifdef VBOX
94
95/* Special/override code readers to hide patched code. */
96
97uint8_t ldub_code_raw(target_ulong pc)
98{
99 uint8_t b;
100
101 if (!remR3GetOpcode(cpu_single_env, pc, &b))
102 b = ldub_code(pc);
103 return b;
104}
105#define ldub_code(a) ldub_code_raw(a)
106
107uint16_t lduw_code_raw(target_ulong pc)
108{
109 return (ldub_code(pc+1) << 8) | ldub_code(pc);
110}
111#define lduw_code(a) lduw_code_raw(a)
112
113
114uint32_t ldl_code_raw(target_ulong pc)
115{
116 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
117}
118#define ldl_code(a) ldl_code_raw(a)
119
120#endif /* VBOX */
121
122
123typedef struct DisasContext {
124 /* current insn context */
125 int override; /* -1 if no override */
126 int prefix;
127 int aflag, dflag;
128 target_ulong pc; /* pc = eip + cs_base */
129 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
130 static state change (stop translation) */
131 /* current block context */
132 target_ulong cs_base; /* base of CS segment */
133 int pe; /* protected mode */
134 int code32; /* 32 bit code segment */
135#ifdef TARGET_X86_64
136 int lma; /* long mode active */
137 int code64; /* 64 bit code segment */
138 int rex_x, rex_b;
139#endif
140 int ss32; /* 32 bit stack segment */
141 int cc_op; /* current CC operation */
142 int addseg; /* non zero if either DS/ES/SS have a non zero base */
143 int f_st; /* currently unused */
144 int vm86; /* vm86 mode */
145#ifdef VBOX
146 int vme; /* CR4.VME */
147 int pvi; /* CR4.PVI */
148 int record_call; /* record calls for CSAM or not? */
149#endif
150 int cpl;
151 int iopl;
152 int tf; /* TF cpu flag */
153 int singlestep_enabled; /* "hardware" single step enabled */
154 int jmp_opt; /* use direct block chaining for direct jumps */
155 int mem_index; /* select memory access functions */
156 uint64_t flags; /* all execution flags */
157 struct TranslationBlock *tb;
158 int popl_esp_hack; /* for correct popl with esp base handling */
159 int rip_offset; /* only used in x86_64, but left for simplicity */
160 int cpuid_features;
161 int cpuid_ext_features;
162 int cpuid_ext2_features;
163 int cpuid_ext3_features;
164} DisasContext;
165
166static void gen_eob(DisasContext *s);
167static void gen_jmp(DisasContext *s, target_ulong eip);
168static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
169
170#ifdef VBOX
171static void gen_check_external_event();
172#endif
173
174/* i386 arith/logic operations */
175enum {
176 OP_ADDL,
177 OP_ORL,
178 OP_ADCL,
179 OP_SBBL,
180 OP_ANDL,
181 OP_SUBL,
182 OP_XORL,
183 OP_CMPL,
184};
185
186/* i386 shift ops */
187enum {
188 OP_ROL,
189 OP_ROR,
190 OP_RCL,
191 OP_RCR,
192 OP_SHL,
193 OP_SHR,
194 OP_SHL1, /* undocumented */
195 OP_SAR = 7,
196};
197
198enum {
199 JCC_O,
200 JCC_B,
201 JCC_Z,
202 JCC_BE,
203 JCC_S,
204 JCC_P,
205 JCC_L,
206 JCC_LE,
207};
208
209/* operand size */
210enum {
211 OT_BYTE = 0,
212 OT_WORD,
213 OT_LONG,
214 OT_QUAD,
215};
216
217enum {
218 /* I386 int registers */
219 OR_EAX, /* MUST be even numbered */
220 OR_ECX,
221 OR_EDX,
222 OR_EBX,
223 OR_ESP,
224 OR_EBP,
225 OR_ESI,
226 OR_EDI,
227
228 OR_TMP0 = 16, /* temporary operand register */
229 OR_TMP1,
230 OR_A0, /* temporary register used when doing address evaluation */
231};
232
233#ifndef VBOX
234static inline void gen_op_movl_T0_0(void)
235#else /* VBOX */
236DECLINLINE(void) gen_op_movl_T0_0(void)
237#endif /* VBOX */
238{
239 tcg_gen_movi_tl(cpu_T[0], 0);
240}
241
242#ifndef VBOX
243static inline void gen_op_movl_T0_im(int32_t val)
244#else /* VBOX */
245DECLINLINE(void) gen_op_movl_T0_im(int32_t val)
246#endif /* VBOX */
247{
248 tcg_gen_movi_tl(cpu_T[0], val);
249}
250
251#ifndef VBOX
252static inline void gen_op_movl_T0_imu(uint32_t val)
253#else /* VBOX */
254DECLINLINE(void) gen_op_movl_T0_imu(uint32_t val)
255#endif /* VBOX */
256{
257 tcg_gen_movi_tl(cpu_T[0], val);
258}
259
260#ifndef VBOX
261static inline void gen_op_movl_T1_im(int32_t val)
262#else /* VBOX */
263DECLINLINE(void) gen_op_movl_T1_im(int32_t val)
264#endif /* VBOX */
265{
266 tcg_gen_movi_tl(cpu_T[1], val);
267}
268
269#ifndef VBOX
270static inline void gen_op_movl_T1_imu(uint32_t val)
271#else /* VBOX */
272DECLINLINE(void) gen_op_movl_T1_imu(uint32_t val)
273#endif /* VBOX */
274{
275 tcg_gen_movi_tl(cpu_T[1], val);
276}
277
278#ifndef VBOX
279static inline void gen_op_movl_A0_im(uint32_t val)
280#else /* VBOX */
281DECLINLINE(void) gen_op_movl_A0_im(uint32_t val)
282#endif /* VBOX */
283{
284 tcg_gen_movi_tl(cpu_A0, val);
285}
286
287#ifdef TARGET_X86_64
288#ifndef VBOX
289static inline void gen_op_movq_A0_im(int64_t val)
290#else /* VBOX */
291DECLINLINE(void) gen_op_movq_A0_im(int64_t val)
292#endif /* VBOX */
293{
294 tcg_gen_movi_tl(cpu_A0, val);
295}
296#endif
297
298#ifndef VBOX
299static inline void gen_movtl_T0_im(target_ulong val)
300#else /* VBOX */
301DECLINLINE(void) gen_movtl_T0_im(target_ulong val)
302#endif /* VBOX */
303{
304 tcg_gen_movi_tl(cpu_T[0], val);
305}
306
307#ifndef VBOX
308static inline void gen_movtl_T1_im(target_ulong val)
309#else /* VBOX */
310DECLINLINE(void) gen_movtl_T1_im(target_ulong val)
311#endif /* VBOX */
312{
313 tcg_gen_movi_tl(cpu_T[1], val);
314}
315
316#ifndef VBOX
317static inline void gen_op_andl_T0_ffff(void)
318#else /* VBOX */
319DECLINLINE(void) gen_op_andl_T0_ffff(void)
320#endif /* VBOX */
321{
322 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
323}
324
325#ifndef VBOX
326static inline void gen_op_andl_T0_im(uint32_t val)
327#else /* VBOX */
328DECLINLINE(void) gen_op_andl_T0_im(uint32_t val)
329#endif /* VBOX */
330{
331 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
332}
333
334#ifndef VBOX
335static inline void gen_op_movl_T0_T1(void)
336#else /* VBOX */
337DECLINLINE(void) gen_op_movl_T0_T1(void)
338#endif /* VBOX */
339{
340 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
341}
342
343#ifndef VBOX
344static inline void gen_op_andl_A0_ffff(void)
345#else /* VBOX */
346DECLINLINE(void) gen_op_andl_A0_ffff(void)
347#endif /* VBOX */
348{
349 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
350}
351
352#ifdef TARGET_X86_64
353
354#define NB_OP_SIZES 4
355
356#else /* !TARGET_X86_64 */
357
358#define NB_OP_SIZES 3
359
360#endif /* !TARGET_X86_64 */
361
362#if defined(WORDS_BIGENDIAN)
363#define REG_B_OFFSET (sizeof(target_ulong) - 1)
364#define REG_H_OFFSET (sizeof(target_ulong) - 2)
365#define REG_W_OFFSET (sizeof(target_ulong) - 2)
366#define REG_L_OFFSET (sizeof(target_ulong) - 4)
367#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
368#else
369#define REG_B_OFFSET 0
370#define REG_H_OFFSET 1
371#define REG_W_OFFSET 0
372#define REG_L_OFFSET 0
373#define REG_LH_OFFSET 4
374#endif
375
376#ifndef VBOX
377static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
378#else /* VBOX */
379DECLINLINE(void) gen_op_mov_reg_v(int ot, int reg, TCGv t0)
380#endif /* VBOX */
381{
382 switch(ot) {
383 case OT_BYTE:
384 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
385 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
386 } else {
387 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
388 }
389 break;
390 case OT_WORD:
391 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
392 break;
393#ifdef TARGET_X86_64
394 case OT_LONG:
395 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
396 /* high part of register set to zero */
397 tcg_gen_movi_tl(cpu_tmp0, 0);
398 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
399 break;
400 default:
401 case OT_QUAD:
402 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
403 break;
404#else
405 default:
406 case OT_LONG:
407 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
408 break;
409#endif
410 }
411}
412
413#ifndef VBOX
414static inline void gen_op_mov_reg_T0(int ot, int reg)
415#else /* VBOX */
416DECLINLINE(void) gen_op_mov_reg_T0(int ot, int reg)
417#endif /* VBOX */
418{
419 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
420}
421
422#ifndef VBOX
423static inline void gen_op_mov_reg_T1(int ot, int reg)
424#else /* VBOX */
425DECLINLINE(void) gen_op_mov_reg_T1(int ot, int reg)
426#endif /* VBOX */
427{
428 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
429}
430
431#ifndef VBOX
432static inline void gen_op_mov_reg_A0(int size, int reg)
433#else /* VBOX */
434DECLINLINE(void) gen_op_mov_reg_A0(int size, int reg)
435#endif /* VBOX */
436{
437 switch(size) {
438 case 0:
439 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
440 break;
441#ifdef TARGET_X86_64
442 case 1:
443 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
444 /* high part of register set to zero */
445 tcg_gen_movi_tl(cpu_tmp0, 0);
446 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
447 break;
448 default:
449 case 2:
450 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
451 break;
452#else
453 default:
454 case 1:
455 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
456 break;
457#endif
458 }
459}
460
461#ifndef VBOX
462static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
463#else /* VBOX */
464DECLINLINE(void) gen_op_mov_v_reg(int ot, TCGv t0, int reg)
465#endif /* VBOX */
466{
467 switch(ot) {
468 case OT_BYTE:
469 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
470#ifndef VBOX
471 goto std_case;
472#else
473 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
474#endif
475 } else {
476 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
477 }
478 break;
479 default:
480 std_case:
481 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
482 break;
483 }
484}
485
486#ifndef VBOX
487static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
488#else /* VBOX */
489DECLINLINE(void) gen_op_mov_TN_reg(int ot, int t_index, int reg)
490#endif /* VBOX */
491{
492 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
493}
494
495#ifndef VBOX
496static inline void gen_op_movl_A0_reg(int reg)
497#else /* VBOX */
498DECLINLINE(void) gen_op_movl_A0_reg(int reg)
499#endif /* VBOX */
500{
501 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
502}
503
504#ifndef VBOX
505static inline void gen_op_addl_A0_im(int32_t val)
506#else /* VBOX */
507DECLINLINE(void) gen_op_addl_A0_im(int32_t val)
508#endif /* VBOX */
509{
510 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
511#ifdef TARGET_X86_64
512 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
513#endif
514}
515
516#ifdef TARGET_X86_64
517#ifndef VBOX
518static inline void gen_op_addq_A0_im(int64_t val)
519#else /* VBOX */
520DECLINLINE(void) gen_op_addq_A0_im(int64_t val)
521#endif /* VBOX */
522{
523 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
524}
525#endif
526
527static void gen_add_A0_im(DisasContext *s, int val)
528{
529#ifdef TARGET_X86_64
530 if (CODE64(s))
531 gen_op_addq_A0_im(val);
532 else
533#endif
534 gen_op_addl_A0_im(val);
535}
536
537#ifndef VBOX
538static inline void gen_op_addl_T0_T1(void)
539#else /* VBOX */
540DECLINLINE(void) gen_op_addl_T0_T1(void)
541#endif /* VBOX */
542{
543 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
544}
545
546#ifndef VBOX
547static inline void gen_op_jmp_T0(void)
548#else /* VBOX */
549DECLINLINE(void) gen_op_jmp_T0(void)
550#endif /* VBOX */
551{
552 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
553}
554
555#ifndef VBOX
556static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
557#else /* VBOX */
558DECLINLINE(void) gen_op_add_reg_im(int size, int reg, int32_t val)
559#endif /* VBOX */
560{
561 switch(size) {
562 case 0:
563 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
564 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
565 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
566 break;
567 case 1:
568 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
569 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
570#ifdef TARGET_X86_64
571 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
572#endif
573 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
574 break;
575#ifdef TARGET_X86_64
576 case 2:
577 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
578 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
579 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
580 break;
581#endif
582 }
583}
584
585#ifndef VBOX
586static inline void gen_op_add_reg_T0(int size, int reg)
587#else /* VBOX */
588DECLINLINE(void) gen_op_add_reg_T0(int size, int reg)
589#endif /* VBOX */
590{
591 switch(size) {
592 case 0:
593 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
594 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
595 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
596 break;
597 case 1:
598 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
599 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
600#ifdef TARGET_X86_64
601 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
602#endif
603 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
604 break;
605#ifdef TARGET_X86_64
606 case 2:
607 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
608 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
609 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
610 break;
611#endif
612 }
613}
614
615#ifndef VBOX
616static inline void gen_op_set_cc_op(int32_t val)
617#else /* VBOX */
618DECLINLINE(void) gen_op_set_cc_op(int32_t val)
619#endif /* VBOX */
620{
621 tcg_gen_movi_i32(cpu_cc_op, val);
622}
623
624#ifndef VBOX
625static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
626#else /* VBOX */
627DECLINLINE(void) gen_op_addl_A0_reg_sN(int shift, int reg)
628#endif /* VBOX */
629{
630 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
631 if (shift != 0)
632 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
633 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
634#ifdef TARGET_X86_64
635 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
636#endif
637}
638#ifdef VBOX
639DECLINLINE(void) gen_op_seg_check(int reg, bool keepA0)
640{
641 /* It seems segments doesn't get out of sync - if they do in fact - enable below code. */
642#if 0
643 /* Our segments could be outdated, thus check for newselector field to see if update really needed */
644 int skip_label;
645 TCGv t0, a0;
646
647 /* For other segments this check is waste of time, and also TCG is unable to cope with this code,
648 for data/stack segments, as expects alive cpu_T[0] */
649 if (reg != R_GS)
650 return;
651
652 if (keepA0)
653 {
654 /* we need to store old cpu_A0 */
655 a0 = tcg_temp_local_new(TCG_TYPE_TL);
656 tcg_gen_mov_tl(a0, cpu_A0);
657 }
658
659 skip_label = gen_new_label();
660 t0 = tcg_temp_local_new(TCG_TYPE_TL);
661
662 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, segs[reg].newselector) + REG_L_OFFSET);
663 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
664 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, eflags) + REG_L_OFFSET);
665 tcg_gen_andi_tl(t0, t0, VM_MASK);
666 tcg_gen_brcondi_i32(TCG_COND_NE, t0, 0, skip_label);
667 tcg_gen_movi_tl(t0, reg);
668
669 tcg_gen_helper_0_1(helper_sync_seg, t0);
670
671 tcg_temp_free(t0);
672
673 gen_set_label(skip_label);
674 if (keepA0)
675 {
676 tcg_gen_mov_tl(cpu_A0, a0);
677 tcg_temp_free(a0);
678 }
679#endif /* 0 */
680}
681#endif
682
683#ifndef VBOX
684static inline void gen_op_movl_A0_seg(int reg)
685#else /* VBOX */
686DECLINLINE(void) gen_op_movl_A0_seg(int reg)
687#endif /* VBOX */
688{
689#ifdef VBOX
690 gen_op_seg_check(reg, false);
691#endif
692 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
693}
694
695#ifndef VBOX
696static inline void gen_op_addl_A0_seg(int reg)
697#else /* VBOX */
698DECLINLINE(void) gen_op_addl_A0_seg(int reg)
699#endif /* VBOX */
700{
701#ifdef VBOX
702 gen_op_seg_check(reg, true);
703#endif
704 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
705 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
706#ifdef TARGET_X86_64
707 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
708#endif
709}
710
711#ifdef TARGET_X86_64
712#ifndef VBOX
713static inline void gen_op_movq_A0_seg(int reg)
714#else /* VBOX */
715DECLINLINE(void) gen_op_movq_A0_seg(int reg)
716#endif /* VBOX */
717{
718#ifdef VBOX
719 gen_op_seg_check(reg, false);
720#endif
721 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
722}
723
724#ifndef VBOX
725static inline void gen_op_addq_A0_seg(int reg)
726#else /* VBOX */
727DECLINLINE(void) gen_op_addq_A0_seg(int reg)
728#endif /* VBOX */
729{
730#ifdef VBOX
731 gen_op_seg_check(reg, true);
732#endif
733 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
734 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
735}
736
737#ifndef VBOX
738static inline void gen_op_movq_A0_reg(int reg)
739#else /* VBOX */
740DECLINLINE(void) gen_op_movq_A0_reg(int reg)
741#endif /* VBOX */
742{
743 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
744}
745
746#ifndef VBOX
747static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
748#else /* VBOX */
749DECLINLINE(void) gen_op_addq_A0_reg_sN(int shift, int reg)
750#endif /* VBOX */
751{
752 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
753 if (shift != 0)
754 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
755 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
756}
757#endif
758
759#ifndef VBOX
760static inline void gen_op_lds_T0_A0(int idx)
761#else /* VBOX */
762DECLINLINE(void) gen_op_lds_T0_A0(int idx)
763#endif /* VBOX */
764{
765 int mem_index = (idx >> 2) - 1;
766 switch(idx & 3) {
767 case 0:
768 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
769 break;
770 case 1:
771 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
772 break;
773 default:
774 case 2:
775 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
776 break;
777 }
778}
779
780#ifndef VBOX
781static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
782#else /* VBOX */
783DECLINLINE(void) gen_op_ld_v(int idx, TCGv t0, TCGv a0)
784#endif /* VBOX */
785{
786 int mem_index = (idx >> 2) - 1;
787 switch(idx & 3) {
788 case 0:
789 tcg_gen_qemu_ld8u(t0, a0, mem_index);
790 break;
791 case 1:
792 tcg_gen_qemu_ld16u(t0, a0, mem_index);
793 break;
794 case 2:
795 tcg_gen_qemu_ld32u(t0, a0, mem_index);
796 break;
797 default:
798 case 3:
799 tcg_gen_qemu_ld64(t0, a0, mem_index);
800 break;
801 }
802}
803
804/* XXX: always use ldu or lds */
805#ifndef VBOX
806static inline void gen_op_ld_T0_A0(int idx)
807#else /* VBOX */
808DECLINLINE(void) gen_op_ld_T0_A0(int idx)
809#endif /* VBOX */
810{
811 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
812}
813
814#ifndef VBOX
815static inline void gen_op_ldu_T0_A0(int idx)
816#else /* VBOX */
817DECLINLINE(void) gen_op_ldu_T0_A0(int idx)
818#endif /* VBOX */
819{
820 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
821}
822
823#ifndef VBOX
824static inline void gen_op_ld_T1_A0(int idx)
825#else /* VBOX */
826DECLINLINE(void) gen_op_ld_T1_A0(int idx)
827#endif /* VBOX */
828{
829 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
830}
831
832#ifndef VBOX
833static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
834#else /* VBOX */
835DECLINLINE(void) gen_op_st_v(int idx, TCGv t0, TCGv a0)
836#endif /* VBOX */
837{
838 int mem_index = (idx >> 2) - 1;
839 switch(idx & 3) {
840 case 0:
841 tcg_gen_qemu_st8(t0, a0, mem_index);
842 break;
843 case 1:
844 tcg_gen_qemu_st16(t0, a0, mem_index);
845 break;
846 case 2:
847 tcg_gen_qemu_st32(t0, a0, mem_index);
848 break;
849 default:
850 case 3:
851 tcg_gen_qemu_st64(t0, a0, mem_index);
852 break;
853 }
854}
855
856#ifndef VBOX
857static inline void gen_op_st_T0_A0(int idx)
858#else /* VBOX */
859DECLINLINE(void) gen_op_st_T0_A0(int idx)
860#endif /* VBOX */
861{
862 gen_op_st_v(idx, cpu_T[0], cpu_A0);
863}
864
865#ifndef VBOX
866static inline void gen_op_st_T1_A0(int idx)
867#else /* VBOX */
868DECLINLINE(void) gen_op_st_T1_A0(int idx)
869#endif /* VBOX */
870{
871 gen_op_st_v(idx, cpu_T[1], cpu_A0);
872}
873
874#ifdef VBOX
875static void gen_check_external_event()
876{
877 int skip_label;
878 TCGv t0;
879
880 skip_label = gen_new_label();
881 t0 = tcg_temp_local_new(TCG_TYPE_TL);
882 /* t0 = cpu_tmp0; */
883
884 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, interrupt_request));
885 /* Keep in sync with helper_check_external_event() */
886 tcg_gen_andi_tl(t0, t0,
887 CPU_INTERRUPT_EXTERNAL_EXIT
888 | CPU_INTERRUPT_EXTERNAL_TIMER
889 | CPU_INTERRUPT_EXTERNAL_DMA
890 | CPU_INTERRUPT_EXTERNAL_HARD);
891 /** @todo: predict branch as taken */
892 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
893 tcg_temp_free(t0);
894
895 tcg_gen_helper_0_0(helper_check_external_event);
896
897 gen_set_label(skip_label);
898}
899
900static void gen_check_external_event2()
901{
902 tcg_gen_helper_0_0(helper_check_external_event);
903}
904
905#endif
906
907#ifndef VBOX
908static inline void gen_jmp_im(target_ulong pc)
909#else /* VBOX */
910DECLINLINE(void) gen_jmp_im(target_ulong pc)
911#endif /* VBOX */
912{
913 tcg_gen_movi_tl(cpu_tmp0, pc);
914 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
915}
916
917#ifdef VBOX
918DECLINLINE(void) gen_update_eip(target_ulong pc)
919{
920 gen_jmp_im(pc);
921#ifdef VBOX_DUMP_STATE
922 tcg_gen_helper_0_0(helper_dump_state);
923#endif
924}
925
926#endif
927
928#ifndef VBOX
929static inline void gen_string_movl_A0_ESI(DisasContext *s)
930#else /* VBOX */
931DECLINLINE(void) gen_string_movl_A0_ESI(DisasContext *s)
932#endif /* VBOX */
933{
934 int override;
935
936 override = s->override;
937#ifdef TARGET_X86_64
938 if (s->aflag == 2) {
939 if (override >= 0) {
940 gen_op_movq_A0_seg(override);
941 gen_op_addq_A0_reg_sN(0, R_ESI);
942 } else {
943 gen_op_movq_A0_reg(R_ESI);
944 }
945 } else
946#endif
947 if (s->aflag) {
948 /* 32 bit address */
949 if (s->addseg && override < 0)
950 override = R_DS;
951 if (override >= 0) {
952 gen_op_movl_A0_seg(override);
953 gen_op_addl_A0_reg_sN(0, R_ESI);
954 } else {
955 gen_op_movl_A0_reg(R_ESI);
956 }
957 } else {
958 /* 16 address, always override */
959 if (override < 0)
960 override = R_DS;
961 gen_op_movl_A0_reg(R_ESI);
962 gen_op_andl_A0_ffff();
963 gen_op_addl_A0_seg(override);
964 }
965}
966
967#ifndef VBOX
968static inline void gen_string_movl_A0_EDI(DisasContext *s)
969#else /* VBOX */
970DECLINLINE(void) gen_string_movl_A0_EDI(DisasContext *s)
971#endif /* VBOX */
972{
973#ifdef TARGET_X86_64
974 if (s->aflag == 2) {
975 gen_op_movq_A0_reg(R_EDI);
976 } else
977#endif
978 if (s->aflag) {
979 if (s->addseg) {
980 gen_op_movl_A0_seg(R_ES);
981 gen_op_addl_A0_reg_sN(0, R_EDI);
982 } else {
983 gen_op_movl_A0_reg(R_EDI);
984 }
985 } else {
986 gen_op_movl_A0_reg(R_EDI);
987 gen_op_andl_A0_ffff();
988 gen_op_addl_A0_seg(R_ES);
989 }
990}
991
992#ifndef VBOX
993static inline void gen_op_movl_T0_Dshift(int ot)
994#else /* VBOX */
995DECLINLINE(void) gen_op_movl_T0_Dshift(int ot)
996#endif /* VBOX */
997{
998 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
999 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
1000};
1001
1002static void gen_extu(int ot, TCGv reg)
1003{
1004 switch(ot) {
1005 case OT_BYTE:
1006 tcg_gen_ext8u_tl(reg, reg);
1007 break;
1008 case OT_WORD:
1009 tcg_gen_ext16u_tl(reg, reg);
1010 break;
1011 case OT_LONG:
1012 tcg_gen_ext32u_tl(reg, reg);
1013 break;
1014 default:
1015 break;
1016 }
1017}
1018
1019static void gen_exts(int ot, TCGv reg)
1020{
1021 switch(ot) {
1022 case OT_BYTE:
1023 tcg_gen_ext8s_tl(reg, reg);
1024 break;
1025 case OT_WORD:
1026 tcg_gen_ext16s_tl(reg, reg);
1027 break;
1028 case OT_LONG:
1029 tcg_gen_ext32s_tl(reg, reg);
1030 break;
1031 default:
1032 break;
1033 }
1034}
1035
1036#ifndef VBOX
1037static inline void gen_op_jnz_ecx(int size, int label1)
1038#else /* VBOX */
1039DECLINLINE(void) gen_op_jnz_ecx(int size, int label1)
1040#endif /* VBOX */
1041{
1042 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1043 gen_extu(size + 1, cpu_tmp0);
1044 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
1045}
1046
1047#ifndef VBOX
1048static inline void gen_op_jz_ecx(int size, int label1)
1049#else /* VBOX */
1050DECLINLINE(void) gen_op_jz_ecx(int size, int label1)
1051#endif /* VBOX */
1052{
1053 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1054 gen_extu(size + 1, cpu_tmp0);
1055 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
1056}
1057
1058static void *helper_in_func[3] = {
1059 helper_inb,
1060 helper_inw,
1061 helper_inl,
1062};
1063
1064static void *helper_out_func[3] = {
1065 helper_outb,
1066 helper_outw,
1067 helper_outl,
1068};
1069
1070static void *gen_check_io_func[3] = {
1071 helper_check_iob,
1072 helper_check_iow,
1073 helper_check_iol,
1074};
1075
1076static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
1077 uint32_t svm_flags)
1078{
1079 int state_saved;
1080 target_ulong next_eip;
1081
1082 state_saved = 0;
1083 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1084 if (s->cc_op != CC_OP_DYNAMIC)
1085 gen_op_set_cc_op(s->cc_op);
1086 gen_jmp_im(cur_eip);
1087 state_saved = 1;
1088 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1089 tcg_gen_helper_0_1(gen_check_io_func[ot],
1090 cpu_tmp2_i32);
1091 }
1092 if(s->flags & HF_SVMI_MASK) {
1093 if (!state_saved) {
1094 if (s->cc_op != CC_OP_DYNAMIC)
1095 gen_op_set_cc_op(s->cc_op);
1096 gen_jmp_im(cur_eip);
1097 state_saved = 1;
1098 }
1099 svm_flags |= (1 << (4 + ot));
1100 next_eip = s->pc - s->cs_base;
1101 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1102 tcg_gen_helper_0_3(helper_svm_check_io,
1103 cpu_tmp2_i32,
1104 tcg_const_i32(svm_flags),
1105 tcg_const_i32(next_eip - cur_eip));
1106 }
1107}
1108
1109#ifndef VBOX
1110static inline void gen_movs(DisasContext *s, int ot)
1111#else /* VBOX */
1112DECLINLINE(void) gen_movs(DisasContext *s, int ot)
1113#endif /* VBOX */
1114{
1115 gen_string_movl_A0_ESI(s);
1116 gen_op_ld_T0_A0(ot + s->mem_index);
1117 gen_string_movl_A0_EDI(s);
1118 gen_op_st_T0_A0(ot + s->mem_index);
1119 gen_op_movl_T0_Dshift(ot);
1120 gen_op_add_reg_T0(s->aflag, R_ESI);
1121 gen_op_add_reg_T0(s->aflag, R_EDI);
1122}
1123
1124#ifndef VBOX
1125static inline void gen_update_cc_op(DisasContext *s)
1126#else /* VBOX */
1127DECLINLINE(void) gen_update_cc_op(DisasContext *s)
1128#endif /* VBOX */
1129{
1130 if (s->cc_op != CC_OP_DYNAMIC) {
1131 gen_op_set_cc_op(s->cc_op);
1132 s->cc_op = CC_OP_DYNAMIC;
1133 }
1134}
1135
1136static void gen_op_update1_cc(void)
1137{
1138 tcg_gen_discard_tl(cpu_cc_src);
1139 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1140}
1141
1142static void gen_op_update2_cc(void)
1143{
1144 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1145 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1146}
1147
1148#ifndef VBOX
1149static inline void gen_op_cmpl_T0_T1_cc(void)
1150#else /* VBOX */
1151DECLINLINE(void) gen_op_cmpl_T0_T1_cc(void)
1152#endif /* VBOX */
1153{
1154 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1155 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1156}
1157
1158#ifndef VBOX
1159static inline void gen_op_testl_T0_T1_cc(void)
1160#else /* VBOX */
1161DECLINLINE(void) gen_op_testl_T0_T1_cc(void)
1162#endif /* VBOX */
1163{
1164 tcg_gen_discard_tl(cpu_cc_src);
1165 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1166}
1167
1168static void gen_op_update_neg_cc(void)
1169{
1170 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
1171 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1172}
1173
1174/* compute eflags.C to reg */
1175static void gen_compute_eflags_c(TCGv reg)
1176{
1177#if TCG_TARGET_REG_BITS == 32
1178 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1179 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1180 (long)cc_table + offsetof(CCTable, compute_c));
1181 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1182 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1183 1, &cpu_tmp2_i32, 0, NULL);
1184#else
1185 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1186 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1187 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1188 (long)cc_table + offsetof(CCTable, compute_c));
1189 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1190 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1191 1, &cpu_tmp2_i32, 0, NULL);
1192#endif
1193 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1194}
1195
1196/* compute all eflags to cc_src */
1197static void gen_compute_eflags(TCGv reg)
1198{
1199#if TCG_TARGET_REG_BITS == 32
1200 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1201 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1202 (long)cc_table + offsetof(CCTable, compute_all));
1203 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1204 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1205 1, &cpu_tmp2_i32, 0, NULL);
1206#else
1207 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1208 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1209 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1210 (long)cc_table + offsetof(CCTable, compute_all));
1211 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1212 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1213 1, &cpu_tmp2_i32, 0, NULL);
1214#endif
1215 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1216}
1217
1218#ifndef VBOX
1219static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1220#else /* VBOX */
1221DECLINLINE(void) gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1222#endif /* VBOX */
1223{
1224 if (s->cc_op != CC_OP_DYNAMIC)
1225 gen_op_set_cc_op(s->cc_op);
1226 switch(jcc_op) {
1227 case JCC_O:
1228 gen_compute_eflags(cpu_T[0]);
1229 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
1230 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1231 break;
1232 case JCC_B:
1233 gen_compute_eflags_c(cpu_T[0]);
1234 break;
1235 case JCC_Z:
1236 gen_compute_eflags(cpu_T[0]);
1237 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
1238 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1239 break;
1240 case JCC_BE:
1241 gen_compute_eflags(cpu_tmp0);
1242 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
1243 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1244 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1245 break;
1246 case JCC_S:
1247 gen_compute_eflags(cpu_T[0]);
1248 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
1249 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1250 break;
1251 case JCC_P:
1252 gen_compute_eflags(cpu_T[0]);
1253 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
1254 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1255 break;
1256 case JCC_L:
1257 gen_compute_eflags(cpu_tmp0);
1258 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1259 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
1260 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1261 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1262 break;
1263 default:
1264 case JCC_LE:
1265 gen_compute_eflags(cpu_tmp0);
1266 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1267 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
1268 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
1269 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1270 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1271 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1272 break;
1273 }
1274}
1275
1276/* return true if setcc_slow is not needed (WARNING: must be kept in
1277 sync with gen_jcc1) */
1278static int is_fast_jcc_case(DisasContext *s, int b)
1279{
1280 int jcc_op;
1281 jcc_op = (b >> 1) & 7;
1282 switch(s->cc_op) {
1283 /* we optimize the cmp/jcc case */
1284 case CC_OP_SUBB:
1285 case CC_OP_SUBW:
1286 case CC_OP_SUBL:
1287 case CC_OP_SUBQ:
1288 if (jcc_op == JCC_O || jcc_op == JCC_P)
1289 goto slow_jcc;
1290 break;
1291
1292 /* some jumps are easy to compute */
1293 case CC_OP_ADDB:
1294 case CC_OP_ADDW:
1295 case CC_OP_ADDL:
1296 case CC_OP_ADDQ:
1297
1298 case CC_OP_LOGICB:
1299 case CC_OP_LOGICW:
1300 case CC_OP_LOGICL:
1301 case CC_OP_LOGICQ:
1302
1303 case CC_OP_INCB:
1304 case CC_OP_INCW:
1305 case CC_OP_INCL:
1306 case CC_OP_INCQ:
1307
1308 case CC_OP_DECB:
1309 case CC_OP_DECW:
1310 case CC_OP_DECL:
1311 case CC_OP_DECQ:
1312
1313 case CC_OP_SHLB:
1314 case CC_OP_SHLW:
1315 case CC_OP_SHLL:
1316 case CC_OP_SHLQ:
1317 if (jcc_op != JCC_Z && jcc_op != JCC_S)
1318 goto slow_jcc;
1319 break;
1320 default:
1321 slow_jcc:
1322 return 0;
1323 }
1324 return 1;
1325}
1326
1327/* generate a conditional jump to label 'l1' according to jump opcode
1328 value 'b'. In the fast case, T0 is guaranted not to be used. */
1329#ifndef VBOX
1330static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1331#else /* VBOX */
1332DECLINLINE(void) gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1333#endif /* VBOX */
1334{
1335 int inv, jcc_op, size, cond;
1336 TCGv t0;
1337
1338 inv = b & 1;
1339 jcc_op = (b >> 1) & 7;
1340
1341 switch(cc_op) {
1342 /* we optimize the cmp/jcc case */
1343 case CC_OP_SUBB:
1344 case CC_OP_SUBW:
1345 case CC_OP_SUBL:
1346 case CC_OP_SUBQ:
1347
1348 size = cc_op - CC_OP_SUBB;
1349 switch(jcc_op) {
1350 case JCC_Z:
1351 fast_jcc_z:
1352 switch(size) {
1353 case 0:
1354 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
1355 t0 = cpu_tmp0;
1356 break;
1357 case 1:
1358 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
1359 t0 = cpu_tmp0;
1360 break;
1361#ifdef TARGET_X86_64
1362 case 2:
1363 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1364 t0 = cpu_tmp0;
1365 break;
1366#endif
1367 default:
1368 t0 = cpu_cc_dst;
1369 break;
1370 }
1371 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
1372 break;
1373 case JCC_S:
1374 fast_jcc_s:
1375 switch(size) {
1376 case 0:
1377 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1378 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1379 0, l1);
1380 break;
1381 case 1:
1382 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1383 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1384 0, l1);
1385 break;
1386#ifdef TARGET_X86_64
1387 case 2:
1388 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1389 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1390 0, l1);
1391 break;
1392#endif
1393 default:
1394 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1395 0, l1);
1396 break;
1397 }
1398 break;
1399
1400 case JCC_B:
1401 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1402 goto fast_jcc_b;
1403 case JCC_BE:
1404 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1405 fast_jcc_b:
1406 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1407 switch(size) {
1408 case 0:
1409 t0 = cpu_tmp0;
1410 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1411 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1412 break;
1413 case 1:
1414 t0 = cpu_tmp0;
1415 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1416 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1417 break;
1418#ifdef TARGET_X86_64
1419 case 2:
1420 t0 = cpu_tmp0;
1421 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1422 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1423 break;
1424#endif
1425 default:
1426 t0 = cpu_cc_src;
1427 break;
1428 }
1429 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1430 break;
1431
1432 case JCC_L:
1433 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1434 goto fast_jcc_l;
1435 case JCC_LE:
1436 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1437 fast_jcc_l:
1438 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1439 switch(size) {
1440 case 0:
1441 t0 = cpu_tmp0;
1442 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1443 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1444 break;
1445 case 1:
1446 t0 = cpu_tmp0;
1447 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1448 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1449 break;
1450#ifdef TARGET_X86_64
1451 case 2:
1452 t0 = cpu_tmp0;
1453 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1454 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1455 break;
1456#endif
1457 default:
1458 t0 = cpu_cc_src;
1459 break;
1460 }
1461 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1462 break;
1463
1464 default:
1465 goto slow_jcc;
1466 }
1467 break;
1468
1469 /* some jumps are easy to compute */
1470 case CC_OP_ADDB:
1471 case CC_OP_ADDW:
1472 case CC_OP_ADDL:
1473 case CC_OP_ADDQ:
1474
1475 case CC_OP_ADCB:
1476 case CC_OP_ADCW:
1477 case CC_OP_ADCL:
1478 case CC_OP_ADCQ:
1479
1480 case CC_OP_SBBB:
1481 case CC_OP_SBBW:
1482 case CC_OP_SBBL:
1483 case CC_OP_SBBQ:
1484
1485 case CC_OP_LOGICB:
1486 case CC_OP_LOGICW:
1487 case CC_OP_LOGICL:
1488 case CC_OP_LOGICQ:
1489
1490 case CC_OP_INCB:
1491 case CC_OP_INCW:
1492 case CC_OP_INCL:
1493 case CC_OP_INCQ:
1494
1495 case CC_OP_DECB:
1496 case CC_OP_DECW:
1497 case CC_OP_DECL:
1498 case CC_OP_DECQ:
1499
1500 case CC_OP_SHLB:
1501 case CC_OP_SHLW:
1502 case CC_OP_SHLL:
1503 case CC_OP_SHLQ:
1504
1505 case CC_OP_SARB:
1506 case CC_OP_SARW:
1507 case CC_OP_SARL:
1508 case CC_OP_SARQ:
1509 switch(jcc_op) {
1510 case JCC_Z:
1511 size = (cc_op - CC_OP_ADDB) & 3;
1512 goto fast_jcc_z;
1513 case JCC_S:
1514 size = (cc_op - CC_OP_ADDB) & 3;
1515 goto fast_jcc_s;
1516 default:
1517 goto slow_jcc;
1518 }
1519 break;
1520 default:
1521 slow_jcc:
1522 gen_setcc_slow_T0(s, jcc_op);
1523 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1524 cpu_T[0], 0, l1);
1525 break;
1526 }
1527}
1528
1529/* XXX: does not work with gdbstub "ice" single step - not a
1530 serious problem */
1531static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1532{
1533 int l1, l2;
1534
1535 l1 = gen_new_label();
1536 l2 = gen_new_label();
1537 gen_op_jnz_ecx(s->aflag, l1);
1538 gen_set_label(l2);
1539 gen_jmp_tb(s, next_eip, 1);
1540 gen_set_label(l1);
1541 return l2;
1542}
1543
1544#ifndef VBOX
1545static inline void gen_stos(DisasContext *s, int ot)
1546#else /* VBOX */
1547DECLINLINE(void) gen_stos(DisasContext *s, int ot)
1548#endif /* VBOX */
1549{
1550 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1551 gen_string_movl_A0_EDI(s);
1552 gen_op_st_T0_A0(ot + s->mem_index);
1553 gen_op_movl_T0_Dshift(ot);
1554 gen_op_add_reg_T0(s->aflag, R_EDI);
1555}
1556
1557#ifndef VBOX
1558static inline void gen_lods(DisasContext *s, int ot)
1559#else /* VBOX */
1560DECLINLINE(void) gen_lods(DisasContext *s, int ot)
1561#endif /* VBOX */
1562{
1563 gen_string_movl_A0_ESI(s);
1564 gen_op_ld_T0_A0(ot + s->mem_index);
1565 gen_op_mov_reg_T0(ot, R_EAX);
1566 gen_op_movl_T0_Dshift(ot);
1567 gen_op_add_reg_T0(s->aflag, R_ESI);
1568}
1569
1570#ifndef VBOX
1571static inline void gen_scas(DisasContext *s, int ot)
1572#else /* VBOX */
1573DECLINLINE(void) gen_scas(DisasContext *s, int ot)
1574#endif /* VBOX */
1575{
1576 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1577 gen_string_movl_A0_EDI(s);
1578 gen_op_ld_T1_A0(ot + s->mem_index);
1579 gen_op_cmpl_T0_T1_cc();
1580 gen_op_movl_T0_Dshift(ot);
1581 gen_op_add_reg_T0(s->aflag, R_EDI);
1582}
1583
1584#ifndef VBOX
1585static inline void gen_cmps(DisasContext *s, int ot)
1586#else /* VBOX */
1587DECLINLINE(void) gen_cmps(DisasContext *s, int ot)
1588#endif /* VBOX */
1589{
1590 gen_string_movl_A0_ESI(s);
1591 gen_op_ld_T0_A0(ot + s->mem_index);
1592 gen_string_movl_A0_EDI(s);
1593 gen_op_ld_T1_A0(ot + s->mem_index);
1594 gen_op_cmpl_T0_T1_cc();
1595 gen_op_movl_T0_Dshift(ot);
1596 gen_op_add_reg_T0(s->aflag, R_ESI);
1597 gen_op_add_reg_T0(s->aflag, R_EDI);
1598}
1599
1600#ifndef VBOX
1601static inline void gen_ins(DisasContext *s, int ot)
1602#else /* VBOX */
1603DECLINLINE(void) gen_ins(DisasContext *s, int ot)
1604#endif /* VBOX */
1605{
1606 if (use_icount)
1607 gen_io_start();
1608 gen_string_movl_A0_EDI(s);
1609 /* Note: we must do this dummy write first to be restartable in
1610 case of page fault. */
1611 gen_op_movl_T0_0();
1612 gen_op_st_T0_A0(ot + s->mem_index);
1613 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1614 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1615 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1616 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1617 gen_op_st_T0_A0(ot + s->mem_index);
1618 gen_op_movl_T0_Dshift(ot);
1619 gen_op_add_reg_T0(s->aflag, R_EDI);
1620 if (use_icount)
1621 gen_io_end();
1622}
1623
1624#ifndef VBOX
1625static inline void gen_outs(DisasContext *s, int ot)
1626#else /* VBOX */
1627DECLINLINE(void) gen_outs(DisasContext *s, int ot)
1628#endif /* VBOX */
1629{
1630 if (use_icount)
1631 gen_io_start();
1632 gen_string_movl_A0_ESI(s);
1633 gen_op_ld_T0_A0(ot + s->mem_index);
1634
1635 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1636 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1637 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1638 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1639 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1640
1641 gen_op_movl_T0_Dshift(ot);
1642 gen_op_add_reg_T0(s->aflag, R_ESI);
1643 if (use_icount)
1644 gen_io_end();
1645}
1646
1647/* same method as Valgrind : we generate jumps to current or next
1648 instruction */
1649#ifndef VBOX
1650#define GEN_REPZ(op) \
1651static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1652 target_ulong cur_eip, target_ulong next_eip) \
1653{ \
1654 int l2; \
1655 gen_update_cc_op(s); \
1656 l2 = gen_jz_ecx_string(s, next_eip); \
1657 gen_ ## op(s, ot); \
1658 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1659 /* a loop would cause two single step exceptions if ECX = 1 \
1660 before rep string_insn */ \
1661 if (!s->jmp_opt) \
1662 gen_op_jz_ecx(s->aflag, l2); \
1663 gen_jmp(s, cur_eip); \
1664}
1665#else /* VBOX */
1666#define GEN_REPZ(op) \
1667DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1668 target_ulong cur_eip, target_ulong next_eip) \
1669{ \
1670 int l2; \
1671 gen_update_cc_op(s); \
1672 l2 = gen_jz_ecx_string(s, next_eip); \
1673 gen_ ## op(s, ot); \
1674 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1675 /* a loop would cause two single step exceptions if ECX = 1 \
1676 before rep string_insn */ \
1677 if (!s->jmp_opt) \
1678 gen_op_jz_ecx(s->aflag, l2); \
1679 gen_jmp(s, cur_eip); \
1680}
1681#endif /* VBOX */
1682
1683#ifndef VBOX
1684#define GEN_REPZ2(op) \
1685static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1686 target_ulong cur_eip, \
1687 target_ulong next_eip, \
1688 int nz) \
1689{ \
1690 int l2; \
1691 gen_update_cc_op(s); \
1692 l2 = gen_jz_ecx_string(s, next_eip); \
1693 gen_ ## op(s, ot); \
1694 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1695 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1696 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1697 if (!s->jmp_opt) \
1698 gen_op_jz_ecx(s->aflag, l2); \
1699 gen_jmp(s, cur_eip); \
1700}
1701#else /* VBOX */
1702#define GEN_REPZ2(op) \
1703DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1704 target_ulong cur_eip, \
1705 target_ulong next_eip, \
1706 int nz) \
1707{ \
1708 int l2;\
1709 gen_update_cc_op(s); \
1710 l2 = gen_jz_ecx_string(s, next_eip); \
1711 gen_ ## op(s, ot); \
1712 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1713 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1714 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1715 if (!s->jmp_opt) \
1716 gen_op_jz_ecx(s->aflag, l2); \
1717 gen_jmp(s, cur_eip); \
1718}
1719#endif /* VBOX */
1720
1721GEN_REPZ(movs)
1722GEN_REPZ(stos)
1723GEN_REPZ(lods)
1724GEN_REPZ(ins)
1725GEN_REPZ(outs)
1726GEN_REPZ2(scas)
1727GEN_REPZ2(cmps)
1728
1729static void *helper_fp_arith_ST0_FT0[8] = {
1730 helper_fadd_ST0_FT0,
1731 helper_fmul_ST0_FT0,
1732 helper_fcom_ST0_FT0,
1733 helper_fcom_ST0_FT0,
1734 helper_fsub_ST0_FT0,
1735 helper_fsubr_ST0_FT0,
1736 helper_fdiv_ST0_FT0,
1737 helper_fdivr_ST0_FT0,
1738};
1739
1740/* NOTE the exception in "r" op ordering */
1741static void *helper_fp_arith_STN_ST0[8] = {
1742 helper_fadd_STN_ST0,
1743 helper_fmul_STN_ST0,
1744 NULL,
1745 NULL,
1746 helper_fsubr_STN_ST0,
1747 helper_fsub_STN_ST0,
1748 helper_fdivr_STN_ST0,
1749 helper_fdiv_STN_ST0,
1750};
1751
1752/* if d == OR_TMP0, it means memory operand (address in A0) */
1753static void gen_op(DisasContext *s1, int op, int ot, int d)
1754{
1755 if (d != OR_TMP0) {
1756 gen_op_mov_TN_reg(ot, 0, d);
1757 } else {
1758 gen_op_ld_T0_A0(ot + s1->mem_index);
1759 }
1760 switch(op) {
1761 case OP_ADCL:
1762 if (s1->cc_op != CC_OP_DYNAMIC)
1763 gen_op_set_cc_op(s1->cc_op);
1764 gen_compute_eflags_c(cpu_tmp4);
1765 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1766 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1767 if (d != OR_TMP0)
1768 gen_op_mov_reg_T0(ot, d);
1769 else
1770 gen_op_st_T0_A0(ot + s1->mem_index);
1771 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1772 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1773 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1774 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1775 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1776 s1->cc_op = CC_OP_DYNAMIC;
1777 break;
1778 case OP_SBBL:
1779 if (s1->cc_op != CC_OP_DYNAMIC)
1780 gen_op_set_cc_op(s1->cc_op);
1781 gen_compute_eflags_c(cpu_tmp4);
1782 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1783 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1784 if (d != OR_TMP0)
1785 gen_op_mov_reg_T0(ot, d);
1786 else
1787 gen_op_st_T0_A0(ot + s1->mem_index);
1788 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1789 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1790 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1791 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1792 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1793 s1->cc_op = CC_OP_DYNAMIC;
1794 break;
1795 case OP_ADDL:
1796 gen_op_addl_T0_T1();
1797 if (d != OR_TMP0)
1798 gen_op_mov_reg_T0(ot, d);
1799 else
1800 gen_op_st_T0_A0(ot + s1->mem_index);
1801 gen_op_update2_cc();
1802 s1->cc_op = CC_OP_ADDB + ot;
1803 break;
1804 case OP_SUBL:
1805 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1806 if (d != OR_TMP0)
1807 gen_op_mov_reg_T0(ot, d);
1808 else
1809 gen_op_st_T0_A0(ot + s1->mem_index);
1810 gen_op_update2_cc();
1811 s1->cc_op = CC_OP_SUBB + ot;
1812 break;
1813 default:
1814 case OP_ANDL:
1815 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1816 if (d != OR_TMP0)
1817 gen_op_mov_reg_T0(ot, d);
1818 else
1819 gen_op_st_T0_A0(ot + s1->mem_index);
1820 gen_op_update1_cc();
1821 s1->cc_op = CC_OP_LOGICB + ot;
1822 break;
1823 case OP_ORL:
1824 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1825 if (d != OR_TMP0)
1826 gen_op_mov_reg_T0(ot, d);
1827 else
1828 gen_op_st_T0_A0(ot + s1->mem_index);
1829 gen_op_update1_cc();
1830 s1->cc_op = CC_OP_LOGICB + ot;
1831 break;
1832 case OP_XORL:
1833 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1834 if (d != OR_TMP0)
1835 gen_op_mov_reg_T0(ot, d);
1836 else
1837 gen_op_st_T0_A0(ot + s1->mem_index);
1838 gen_op_update1_cc();
1839 s1->cc_op = CC_OP_LOGICB + ot;
1840 break;
1841 case OP_CMPL:
1842 gen_op_cmpl_T0_T1_cc();
1843 s1->cc_op = CC_OP_SUBB + ot;
1844 break;
1845 }
1846}
1847
1848/* if d == OR_TMP0, it means memory operand (address in A0) */
1849static void gen_inc(DisasContext *s1, int ot, int d, int c)
1850{
1851 if (d != OR_TMP0)
1852 gen_op_mov_TN_reg(ot, 0, d);
1853 else
1854 gen_op_ld_T0_A0(ot + s1->mem_index);
1855 if (s1->cc_op != CC_OP_DYNAMIC)
1856 gen_op_set_cc_op(s1->cc_op);
1857 if (c > 0) {
1858 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1859 s1->cc_op = CC_OP_INCB + ot;
1860 } else {
1861 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1862 s1->cc_op = CC_OP_DECB + ot;
1863 }
1864 if (d != OR_TMP0)
1865 gen_op_mov_reg_T0(ot, d);
1866 else
1867 gen_op_st_T0_A0(ot + s1->mem_index);
1868 gen_compute_eflags_c(cpu_cc_src);
1869 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1870}
1871
1872static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1873 int is_right, int is_arith)
1874{
1875 target_ulong mask;
1876 int shift_label;
1877 TCGv t0, t1;
1878
1879 if (ot == OT_QUAD)
1880 mask = 0x3f;
1881 else
1882 mask = 0x1f;
1883
1884 /* load */
1885 if (op1 == OR_TMP0)
1886 gen_op_ld_T0_A0(ot + s->mem_index);
1887 else
1888 gen_op_mov_TN_reg(ot, 0, op1);
1889
1890 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1891
1892 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1893
1894 if (is_right) {
1895 if (is_arith) {
1896 gen_exts(ot, cpu_T[0]);
1897 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1898 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1899 } else {
1900 gen_extu(ot, cpu_T[0]);
1901 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1902 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1903 }
1904 } else {
1905 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1906 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1907 }
1908
1909 /* store */
1910 if (op1 == OR_TMP0)
1911 gen_op_st_T0_A0(ot + s->mem_index);
1912 else
1913 gen_op_mov_reg_T0(ot, op1);
1914
1915 /* update eflags if non zero shift */
1916 if (s->cc_op != CC_OP_DYNAMIC)
1917 gen_op_set_cc_op(s->cc_op);
1918
1919 /* XXX: inefficient */
1920 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1921 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1922
1923 tcg_gen_mov_tl(t0, cpu_T[0]);
1924 tcg_gen_mov_tl(t1, cpu_T3);
1925
1926 shift_label = gen_new_label();
1927 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1928
1929 tcg_gen_mov_tl(cpu_cc_src, t1);
1930 tcg_gen_mov_tl(cpu_cc_dst, t0);
1931 if (is_right)
1932 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1933 else
1934 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1935
1936 gen_set_label(shift_label);
1937 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1938
1939 tcg_temp_free(t0);
1940 tcg_temp_free(t1);
1941}
1942
1943static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1944 int is_right, int is_arith)
1945{
1946 int mask;
1947
1948 if (ot == OT_QUAD)
1949 mask = 0x3f;
1950 else
1951 mask = 0x1f;
1952
1953 /* load */
1954 if (op1 == OR_TMP0)
1955 gen_op_ld_T0_A0(ot + s->mem_index);
1956 else
1957 gen_op_mov_TN_reg(ot, 0, op1);
1958
1959 op2 &= mask;
1960 if (op2 != 0) {
1961 if (is_right) {
1962 if (is_arith) {
1963 gen_exts(ot, cpu_T[0]);
1964 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1965 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1966 } else {
1967 gen_extu(ot, cpu_T[0]);
1968 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1969 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1970 }
1971 } else {
1972 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1973 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1974 }
1975 }
1976
1977 /* store */
1978 if (op1 == OR_TMP0)
1979 gen_op_st_T0_A0(ot + s->mem_index);
1980 else
1981 gen_op_mov_reg_T0(ot, op1);
1982
1983 /* update eflags if non zero shift */
1984 if (op2 != 0) {
1985 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1986 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1987 if (is_right)
1988 s->cc_op = CC_OP_SARB + ot;
1989 else
1990 s->cc_op = CC_OP_SHLB + ot;
1991 }
1992}
1993
1994#ifndef VBOX
1995static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1996#else /* VBOX */
1997DECLINLINE(void) tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1998#endif /* VBOX */
1999{
2000 if (arg2 >= 0)
2001 tcg_gen_shli_tl(ret, arg1, arg2);
2002 else
2003 tcg_gen_shri_tl(ret, arg1, -arg2);
2004}
2005
2006/* XXX: add faster immediate case */
2007static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
2008 int is_right)
2009{
2010 target_ulong mask;
2011 int label1, label2, data_bits;
2012 TCGv t0, t1, t2, a0;
2013
2014 /* XXX: inefficient, but we must use local temps */
2015 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2016 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2017 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2018 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2019
2020 if (ot == OT_QUAD)
2021 mask = 0x3f;
2022 else
2023 mask = 0x1f;
2024
2025 /* load */
2026 if (op1 == OR_TMP0) {
2027 tcg_gen_mov_tl(a0, cpu_A0);
2028 gen_op_ld_v(ot + s->mem_index, t0, a0);
2029 } else {
2030 gen_op_mov_v_reg(ot, t0, op1);
2031 }
2032
2033 tcg_gen_mov_tl(t1, cpu_T[1]);
2034
2035 tcg_gen_andi_tl(t1, t1, mask);
2036
2037 /* Must test zero case to avoid using undefined behaviour in TCG
2038 shifts. */
2039 label1 = gen_new_label();
2040 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
2041
2042 if (ot <= OT_WORD)
2043 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
2044 else
2045 tcg_gen_mov_tl(cpu_tmp0, t1);
2046
2047 gen_extu(ot, t0);
2048 tcg_gen_mov_tl(t2, t0);
2049
2050 data_bits = 8 << ot;
2051 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
2052 fix TCG definition) */
2053 if (is_right) {
2054 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
2055 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2056 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
2057 } else {
2058 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
2059 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2060 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
2061 }
2062 tcg_gen_or_tl(t0, t0, cpu_tmp4);
2063
2064 gen_set_label(label1);
2065 /* store */
2066 if (op1 == OR_TMP0) {
2067 gen_op_st_v(ot + s->mem_index, t0, a0);
2068 } else {
2069 gen_op_mov_reg_v(ot, op1, t0);
2070 }
2071
2072 /* update eflags */
2073 if (s->cc_op != CC_OP_DYNAMIC)
2074 gen_op_set_cc_op(s->cc_op);
2075
2076 label2 = gen_new_label();
2077 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
2078
2079 gen_compute_eflags(cpu_cc_src);
2080 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
2081 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
2082 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
2083 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
2084 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
2085 if (is_right) {
2086 tcg_gen_shri_tl(t0, t0, data_bits - 1);
2087 }
2088 tcg_gen_andi_tl(t0, t0, CC_C);
2089 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
2090
2091 tcg_gen_discard_tl(cpu_cc_dst);
2092 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2093
2094 gen_set_label(label2);
2095 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2096
2097 tcg_temp_free(t0);
2098 tcg_temp_free(t1);
2099 tcg_temp_free(t2);
2100 tcg_temp_free(a0);
2101}
2102
2103static void *helper_rotc[8] = {
2104 helper_rclb,
2105 helper_rclw,
2106 helper_rcll,
2107 X86_64_ONLY(helper_rclq),
2108 helper_rcrb,
2109 helper_rcrw,
2110 helper_rcrl,
2111 X86_64_ONLY(helper_rcrq),
2112};
2113
2114/* XXX: add faster immediate = 1 case */
2115static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
2116 int is_right)
2117{
2118 int label1;
2119
2120 if (s->cc_op != CC_OP_DYNAMIC)
2121 gen_op_set_cc_op(s->cc_op);
2122
2123 /* load */
2124 if (op1 == OR_TMP0)
2125 gen_op_ld_T0_A0(ot + s->mem_index);
2126 else
2127 gen_op_mov_TN_reg(ot, 0, op1);
2128
2129 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
2130 cpu_T[0], cpu_T[0], cpu_T[1]);
2131 /* store */
2132 if (op1 == OR_TMP0)
2133 gen_op_st_T0_A0(ot + s->mem_index);
2134 else
2135 gen_op_mov_reg_T0(ot, op1);
2136
2137 /* update eflags */
2138 label1 = gen_new_label();
2139 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
2140
2141 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
2142 tcg_gen_discard_tl(cpu_cc_dst);
2143 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2144
2145 gen_set_label(label1);
2146 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2147}
2148
2149/* XXX: add faster immediate case */
2150static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
2151 int is_right)
2152{
2153 int label1, label2, data_bits;
2154 target_ulong mask;
2155 TCGv t0, t1, t2, a0;
2156
2157 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2158 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2159 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2160 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2161
2162 if (ot == OT_QUAD)
2163 mask = 0x3f;
2164 else
2165 mask = 0x1f;
2166
2167 /* load */
2168 if (op1 == OR_TMP0) {
2169 tcg_gen_mov_tl(a0, cpu_A0);
2170 gen_op_ld_v(ot + s->mem_index, t0, a0);
2171 } else {
2172 gen_op_mov_v_reg(ot, t0, op1);
2173 }
2174
2175 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
2176
2177 tcg_gen_mov_tl(t1, cpu_T[1]);
2178 tcg_gen_mov_tl(t2, cpu_T3);
2179
2180 /* Must test zero case to avoid using undefined behaviour in TCG
2181 shifts. */
2182 label1 = gen_new_label();
2183 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
2184
2185 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
2186 if (ot == OT_WORD) {
2187 /* Note: we implement the Intel behaviour for shift count > 16 */
2188 if (is_right) {
2189 tcg_gen_andi_tl(t0, t0, 0xffff);
2190 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
2191 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2192 tcg_gen_ext32u_tl(t0, t0);
2193
2194 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2195
2196 /* only needed if count > 16, but a test would complicate */
2197 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2198 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
2199
2200 tcg_gen_shr_tl(t0, t0, t2);
2201
2202 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2203 } else {
2204 /* XXX: not optimal */
2205 tcg_gen_andi_tl(t0, t0, 0xffff);
2206 tcg_gen_shli_tl(t1, t1, 16);
2207 tcg_gen_or_tl(t1, t1, t0);
2208 tcg_gen_ext32u_tl(t1, t1);
2209
2210 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2211 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
2212 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
2213 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
2214
2215 tcg_gen_shl_tl(t0, t0, t2);
2216 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2217 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2218 tcg_gen_or_tl(t0, t0, t1);
2219 }
2220 } else {
2221 data_bits = 8 << ot;
2222 if (is_right) {
2223 if (ot == OT_LONG)
2224 tcg_gen_ext32u_tl(t0, t0);
2225
2226 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2227
2228 tcg_gen_shr_tl(t0, t0, t2);
2229 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2230 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
2231 tcg_gen_or_tl(t0, t0, t1);
2232
2233 } else {
2234 if (ot == OT_LONG)
2235 tcg_gen_ext32u_tl(t1, t1);
2236
2237 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2238
2239 tcg_gen_shl_tl(t0, t0, t2);
2240 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2241 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2242 tcg_gen_or_tl(t0, t0, t1);
2243 }
2244 }
2245 tcg_gen_mov_tl(t1, cpu_tmp4);
2246
2247 gen_set_label(label1);
2248 /* store */
2249 if (op1 == OR_TMP0) {
2250 gen_op_st_v(ot + s->mem_index, t0, a0);
2251 } else {
2252 gen_op_mov_reg_v(ot, op1, t0);
2253 }
2254
2255 /* update eflags */
2256 if (s->cc_op != CC_OP_DYNAMIC)
2257 gen_op_set_cc_op(s->cc_op);
2258
2259 label2 = gen_new_label();
2260 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
2261
2262 tcg_gen_mov_tl(cpu_cc_src, t1);
2263 tcg_gen_mov_tl(cpu_cc_dst, t0);
2264 if (is_right) {
2265 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
2266 } else {
2267 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
2268 }
2269 gen_set_label(label2);
2270 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2271
2272 tcg_temp_free(t0);
2273 tcg_temp_free(t1);
2274 tcg_temp_free(t2);
2275 tcg_temp_free(a0);
2276}
2277
2278static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2279{
2280 if (s != OR_TMP1)
2281 gen_op_mov_TN_reg(ot, 1, s);
2282 switch(op) {
2283 case OP_ROL:
2284 gen_rot_rm_T1(s1, ot, d, 0);
2285 break;
2286 case OP_ROR:
2287 gen_rot_rm_T1(s1, ot, d, 1);
2288 break;
2289 case OP_SHL:
2290 case OP_SHL1:
2291 gen_shift_rm_T1(s1, ot, d, 0, 0);
2292 break;
2293 case OP_SHR:
2294 gen_shift_rm_T1(s1, ot, d, 1, 0);
2295 break;
2296 case OP_SAR:
2297 gen_shift_rm_T1(s1, ot, d, 1, 1);
2298 break;
2299 case OP_RCL:
2300 gen_rotc_rm_T1(s1, ot, d, 0);
2301 break;
2302 case OP_RCR:
2303 gen_rotc_rm_T1(s1, ot, d, 1);
2304 break;
2305 }
2306}
2307
2308static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2309{
2310 switch(op) {
2311 case OP_SHL:
2312 case OP_SHL1:
2313 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2314 break;
2315 case OP_SHR:
2316 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2317 break;
2318 case OP_SAR:
2319 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2320 break;
2321 default:
2322 /* currently not optimized */
2323 gen_op_movl_T1_im(c);
2324 gen_shift(s1, op, ot, d, OR_TMP1);
2325 break;
2326 }
2327}
2328
2329static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
2330{
2331 target_long disp;
2332 int havesib;
2333 int base;
2334 int index;
2335 int scale;
2336 int opreg;
2337 int mod, rm, code, override, must_add_seg;
2338
2339 override = s->override;
2340 must_add_seg = s->addseg;
2341 if (override >= 0)
2342 must_add_seg = 1;
2343 mod = (modrm >> 6) & 3;
2344 rm = modrm & 7;
2345
2346 if (s->aflag) {
2347
2348 havesib = 0;
2349 base = rm;
2350 index = 0;
2351 scale = 0;
2352
2353 if (base == 4) {
2354 havesib = 1;
2355 code = ldub_code(s->pc++);
2356 scale = (code >> 6) & 3;
2357 index = ((code >> 3) & 7) | REX_X(s);
2358 base = (code & 7);
2359 }
2360 base |= REX_B(s);
2361
2362 switch (mod) {
2363 case 0:
2364 if ((base & 7) == 5) {
2365 base = -1;
2366 disp = (int32_t)ldl_code(s->pc);
2367 s->pc += 4;
2368 if (CODE64(s) && !havesib) {
2369 disp += s->pc + s->rip_offset;
2370 }
2371 } else {
2372 disp = 0;
2373 }
2374 break;
2375 case 1:
2376 disp = (int8_t)ldub_code(s->pc++);
2377 break;
2378 default:
2379 case 2:
2380#ifdef VBOX
2381 disp = (int32_t)ldl_code(s->pc);
2382#else
2383 disp = ldl_code(s->pc);
2384#endif
2385 s->pc += 4;
2386 break;
2387 }
2388
2389 if (base >= 0) {
2390 /* for correct popl handling with esp */
2391 if (base == 4 && s->popl_esp_hack)
2392 disp += s->popl_esp_hack;
2393#ifdef TARGET_X86_64
2394 if (s->aflag == 2) {
2395 gen_op_movq_A0_reg(base);
2396 if (disp != 0) {
2397 gen_op_addq_A0_im(disp);
2398 }
2399 } else
2400#endif
2401 {
2402 gen_op_movl_A0_reg(base);
2403 if (disp != 0)
2404 gen_op_addl_A0_im(disp);
2405 }
2406 } else {
2407#ifdef TARGET_X86_64
2408 if (s->aflag == 2) {
2409 gen_op_movq_A0_im(disp);
2410 } else
2411#endif
2412 {
2413 gen_op_movl_A0_im(disp);
2414 }
2415 }
2416 /* XXX: index == 4 is always invalid */
2417 if (havesib && (index != 4 || scale != 0)) {
2418#ifdef TARGET_X86_64
2419 if (s->aflag == 2) {
2420 gen_op_addq_A0_reg_sN(scale, index);
2421 } else
2422#endif
2423 {
2424 gen_op_addl_A0_reg_sN(scale, index);
2425 }
2426 }
2427 if (must_add_seg) {
2428 if (override < 0) {
2429 if (base == R_EBP || base == R_ESP)
2430 override = R_SS;
2431 else
2432 override = R_DS;
2433 }
2434#ifdef TARGET_X86_64
2435 if (s->aflag == 2) {
2436 gen_op_addq_A0_seg(override);
2437 } else
2438#endif
2439 {
2440 gen_op_addl_A0_seg(override);
2441 }
2442 }
2443 } else {
2444 switch (mod) {
2445 case 0:
2446 if (rm == 6) {
2447 disp = lduw_code(s->pc);
2448 s->pc += 2;
2449 gen_op_movl_A0_im(disp);
2450 rm = 0; /* avoid SS override */
2451 goto no_rm;
2452 } else {
2453 disp = 0;
2454 }
2455 break;
2456 case 1:
2457 disp = (int8_t)ldub_code(s->pc++);
2458 break;
2459 default:
2460 case 2:
2461 disp = lduw_code(s->pc);
2462 s->pc += 2;
2463 break;
2464 }
2465 switch(rm) {
2466 case 0:
2467 gen_op_movl_A0_reg(R_EBX);
2468 gen_op_addl_A0_reg_sN(0, R_ESI);
2469 break;
2470 case 1:
2471 gen_op_movl_A0_reg(R_EBX);
2472 gen_op_addl_A0_reg_sN(0, R_EDI);
2473 break;
2474 case 2:
2475 gen_op_movl_A0_reg(R_EBP);
2476 gen_op_addl_A0_reg_sN(0, R_ESI);
2477 break;
2478 case 3:
2479 gen_op_movl_A0_reg(R_EBP);
2480 gen_op_addl_A0_reg_sN(0, R_EDI);
2481 break;
2482 case 4:
2483 gen_op_movl_A0_reg(R_ESI);
2484 break;
2485 case 5:
2486 gen_op_movl_A0_reg(R_EDI);
2487 break;
2488 case 6:
2489 gen_op_movl_A0_reg(R_EBP);
2490 break;
2491 default:
2492 case 7:
2493 gen_op_movl_A0_reg(R_EBX);
2494 break;
2495 }
2496 if (disp != 0)
2497 gen_op_addl_A0_im(disp);
2498 gen_op_andl_A0_ffff();
2499 no_rm:
2500 if (must_add_seg) {
2501 if (override < 0) {
2502 if (rm == 2 || rm == 3 || rm == 6)
2503 override = R_SS;
2504 else
2505 override = R_DS;
2506 }
2507 gen_op_addl_A0_seg(override);
2508 }
2509 }
2510
2511 opreg = OR_A0;
2512 disp = 0;
2513 *reg_ptr = opreg;
2514 *offset_ptr = disp;
2515}
2516
2517static void gen_nop_modrm(DisasContext *s, int modrm)
2518{
2519 int mod, rm, base, code;
2520
2521 mod = (modrm >> 6) & 3;
2522 if (mod == 3)
2523 return;
2524 rm = modrm & 7;
2525
2526 if (s->aflag) {
2527
2528 base = rm;
2529
2530 if (base == 4) {
2531 code = ldub_code(s->pc++);
2532 base = (code & 7);
2533 }
2534
2535 switch (mod) {
2536 case 0:
2537 if (base == 5) {
2538 s->pc += 4;
2539 }
2540 break;
2541 case 1:
2542 s->pc++;
2543 break;
2544 default:
2545 case 2:
2546 s->pc += 4;
2547 break;
2548 }
2549 } else {
2550 switch (mod) {
2551 case 0:
2552 if (rm == 6) {
2553 s->pc += 2;
2554 }
2555 break;
2556 case 1:
2557 s->pc++;
2558 break;
2559 default:
2560 case 2:
2561 s->pc += 2;
2562 break;
2563 }
2564 }
2565}
2566
2567/* used for LEA and MOV AX, mem */
2568static void gen_add_A0_ds_seg(DisasContext *s)
2569{
2570 int override, must_add_seg;
2571 must_add_seg = s->addseg;
2572 override = R_DS;
2573 if (s->override >= 0) {
2574 override = s->override;
2575 must_add_seg = 1;
2576 } else {
2577 override = R_DS;
2578 }
2579 if (must_add_seg) {
2580#ifdef TARGET_X86_64
2581 if (CODE64(s)) {
2582 gen_op_addq_A0_seg(override);
2583 } else
2584#endif
2585 {
2586 gen_op_addl_A0_seg(override);
2587 }
2588 }
2589}
2590
2591/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2592 OR_TMP0 */
2593static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2594{
2595 int mod, rm, opreg, disp;
2596
2597 mod = (modrm >> 6) & 3;
2598 rm = (modrm & 7) | REX_B(s);
2599 if (mod == 3) {
2600 if (is_store) {
2601 if (reg != OR_TMP0)
2602 gen_op_mov_TN_reg(ot, 0, reg);
2603 gen_op_mov_reg_T0(ot, rm);
2604 } else {
2605 gen_op_mov_TN_reg(ot, 0, rm);
2606 if (reg != OR_TMP0)
2607 gen_op_mov_reg_T0(ot, reg);
2608 }
2609 } else {
2610 gen_lea_modrm(s, modrm, &opreg, &disp);
2611 if (is_store) {
2612 if (reg != OR_TMP0)
2613 gen_op_mov_TN_reg(ot, 0, reg);
2614 gen_op_st_T0_A0(ot + s->mem_index);
2615 } else {
2616 gen_op_ld_T0_A0(ot + s->mem_index);
2617 if (reg != OR_TMP0)
2618 gen_op_mov_reg_T0(ot, reg);
2619 }
2620 }
2621}
2622
2623#ifndef VBOX
2624static inline uint32_t insn_get(DisasContext *s, int ot)
2625#else /* VBOX */
2626DECLINLINE(uint32_t) insn_get(DisasContext *s, int ot)
2627#endif /* VBOX */
2628{
2629 uint32_t ret;
2630
2631 switch(ot) {
2632 case OT_BYTE:
2633 ret = ldub_code(s->pc);
2634 s->pc++;
2635 break;
2636 case OT_WORD:
2637 ret = lduw_code(s->pc);
2638 s->pc += 2;
2639 break;
2640 default:
2641 case OT_LONG:
2642 ret = ldl_code(s->pc);
2643 s->pc += 4;
2644 break;
2645 }
2646 return ret;
2647}
2648
2649#ifndef VBOX
2650static inline int insn_const_size(unsigned int ot)
2651#else /* VBOX */
2652DECLINLINE(int) insn_const_size(unsigned int ot)
2653#endif /* VBOX */
2654{
2655 if (ot <= OT_LONG)
2656 return 1 << ot;
2657 else
2658 return 4;
2659}
2660
2661#ifndef VBOX
2662static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2663#else /* VBOX */
2664DECLINLINE(void) gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2665#endif /* VBOX */
2666{
2667 TranslationBlock *tb;
2668 target_ulong pc;
2669
2670 pc = s->cs_base + eip;
2671 tb = s->tb;
2672 /* NOTE: we handle the case where the TB spans two pages here */
2673 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2674 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2675#ifdef VBOX
2676 gen_check_external_event(s);
2677#endif /* VBOX */
2678 /* jump to same page: we can use a direct jump */
2679 tcg_gen_goto_tb(tb_num);
2680 gen_jmp_im(eip);
2681 tcg_gen_exit_tb((long)tb + tb_num);
2682 } else {
2683 /* jump to another page: currently not optimized */
2684 gen_jmp_im(eip);
2685 gen_eob(s);
2686 }
2687}
2688
2689#ifndef VBOX
2690static inline void gen_jcc(DisasContext *s, int b,
2691#else /* VBOX */
2692DECLINLINE(void) gen_jcc(DisasContext *s, int b,
2693#endif /* VBOX */
2694 target_ulong val, target_ulong next_eip)
2695{
2696 int l1, l2, cc_op;
2697
2698 cc_op = s->cc_op;
2699 if (s->cc_op != CC_OP_DYNAMIC) {
2700 gen_op_set_cc_op(s->cc_op);
2701 s->cc_op = CC_OP_DYNAMIC;
2702 }
2703 if (s->jmp_opt) {
2704 l1 = gen_new_label();
2705 gen_jcc1(s, cc_op, b, l1);
2706
2707 gen_goto_tb(s, 0, next_eip);
2708
2709 gen_set_label(l1);
2710 gen_goto_tb(s, 1, val);
2711 s->is_jmp = 3;
2712 } else {
2713
2714 l1 = gen_new_label();
2715 l2 = gen_new_label();
2716 gen_jcc1(s, cc_op, b, l1);
2717
2718 gen_jmp_im(next_eip);
2719 tcg_gen_br(l2);
2720
2721 gen_set_label(l1);
2722 gen_jmp_im(val);
2723 gen_set_label(l2);
2724 gen_eob(s);
2725 }
2726}
2727
2728static void gen_setcc(DisasContext *s, int b)
2729{
2730 int inv, jcc_op, l1;
2731 TCGv t0;
2732
2733 if (is_fast_jcc_case(s, b)) {
2734 /* nominal case: we use a jump */
2735 /* XXX: make it faster by adding new instructions in TCG */
2736 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2737 tcg_gen_movi_tl(t0, 0);
2738 l1 = gen_new_label();
2739 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2740 tcg_gen_movi_tl(t0, 1);
2741 gen_set_label(l1);
2742 tcg_gen_mov_tl(cpu_T[0], t0);
2743 tcg_temp_free(t0);
2744 } else {
2745 /* slow case: it is more efficient not to generate a jump,
2746 although it is questionnable whether this optimization is
2747 worth to */
2748 inv = b & 1;
2749 jcc_op = (b >> 1) & 7;
2750 gen_setcc_slow_T0(s, jcc_op);
2751 if (inv) {
2752 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2753 }
2754 }
2755}
2756
2757#ifndef VBOX
2758static inline void gen_op_movl_T0_seg(int seg_reg)
2759#else /* VBOX */
2760DECLINLINE(void) gen_op_movl_T0_seg(int seg_reg)
2761#endif /* VBOX */
2762{
2763 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2764 offsetof(CPUX86State,segs[seg_reg].selector));
2765}
2766
2767#ifndef VBOX
2768static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2769#else /* VBOX */
2770DECLINLINE(void) gen_op_movl_seg_T0_vm(int seg_reg)
2771#endif /* VBOX */
2772{
2773 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2774 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2775 offsetof(CPUX86State,segs[seg_reg].selector));
2776 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2777 tcg_gen_st_tl(cpu_T[0], cpu_env,
2778 offsetof(CPUX86State,segs[seg_reg].base));
2779#ifdef VBOX
2780 int flags = DESC_P_MASK | DESC_S_MASK | DESC_W_MASK;
2781 if (seg_reg == R_CS)
2782 flags |= DESC_CS_MASK;
2783 gen_op_movl_T0_im(flags);
2784 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].flags));
2785
2786 /* Set the limit to 0xffff. */
2787 gen_op_movl_T0_im(0xffff);
2788 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].limit));
2789#endif
2790}
2791
2792/* move T0 to seg_reg and compute if the CPU state may change. Never
2793 call this function with seg_reg == R_CS */
2794static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2795{
2796 if (s->pe && !s->vm86) {
2797 /* XXX: optimize by finding processor state dynamically */
2798 if (s->cc_op != CC_OP_DYNAMIC)
2799 gen_op_set_cc_op(s->cc_op);
2800 gen_jmp_im(cur_eip);
2801 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2802 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2803 /* abort translation because the addseg value may change or
2804 because ss32 may change. For R_SS, translation must always
2805 stop as a special handling must be done to disable hardware
2806 interrupts for the next instruction */
2807 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2808 s->is_jmp = 3;
2809 } else {
2810 gen_op_movl_seg_T0_vm(seg_reg);
2811 if (seg_reg == R_SS)
2812 s->is_jmp = 3;
2813 }
2814}
2815
2816#ifndef VBOX
2817static inline int svm_is_rep(int prefixes)
2818#else /* VBOX */
2819DECLINLINE(int) svm_is_rep(int prefixes)
2820#endif /* VBOX */
2821{
2822 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2823}
2824
2825#ifndef VBOX
2826static inline void
2827#else /* VBOX */
2828DECLINLINE(void)
2829#endif /* VBOX */
2830gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2831 uint32_t type, uint64_t param)
2832{
2833 /* no SVM activated; fast case */
2834 if (likely(!(s->flags & HF_SVMI_MASK)))
2835 return;
2836 if (s->cc_op != CC_OP_DYNAMIC)
2837 gen_op_set_cc_op(s->cc_op);
2838 gen_jmp_im(pc_start - s->cs_base);
2839 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2840 tcg_const_i32(type), tcg_const_i64(param));
2841}
2842
2843#ifndef VBOX
2844static inline void
2845#else /* VBOX */
2846DECLINLINE(void)
2847#endif
2848gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2849{
2850 gen_svm_check_intercept_param(s, pc_start, type, 0);
2851}
2852
2853#ifndef VBOX
2854static inline void gen_stack_update(DisasContext *s, int addend)
2855#else /* VBOX */
2856DECLINLINE(void) gen_stack_update(DisasContext *s, int addend)
2857#endif /* VBOX */
2858{
2859#ifdef TARGET_X86_64
2860 if (CODE64(s)) {
2861 gen_op_add_reg_im(2, R_ESP, addend);
2862 } else
2863#endif
2864 if (s->ss32) {
2865 gen_op_add_reg_im(1, R_ESP, addend);
2866 } else {
2867 gen_op_add_reg_im(0, R_ESP, addend);
2868 }
2869}
2870
2871/* generate a push. It depends on ss32, addseg and dflag */
2872static void gen_push_T0(DisasContext *s)
2873{
2874#ifdef TARGET_X86_64
2875 if (CODE64(s)) {
2876 gen_op_movq_A0_reg(R_ESP);
2877 if (s->dflag) {
2878 gen_op_addq_A0_im(-8);
2879 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2880 } else {
2881 gen_op_addq_A0_im(-2);
2882 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2883 }
2884 gen_op_mov_reg_A0(2, R_ESP);
2885 } else
2886#endif
2887 {
2888 gen_op_movl_A0_reg(R_ESP);
2889 if (!s->dflag)
2890 gen_op_addl_A0_im(-2);
2891 else
2892 gen_op_addl_A0_im(-4);
2893 if (s->ss32) {
2894 if (s->addseg) {
2895 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2896 gen_op_addl_A0_seg(R_SS);
2897 }
2898 } else {
2899 gen_op_andl_A0_ffff();
2900 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2901 gen_op_addl_A0_seg(R_SS);
2902 }
2903 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2904 if (s->ss32 && !s->addseg)
2905 gen_op_mov_reg_A0(1, R_ESP);
2906 else
2907 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2908 }
2909}
2910
2911/* generate a push. It depends on ss32, addseg and dflag */
2912/* slower version for T1, only used for call Ev */
2913static void gen_push_T1(DisasContext *s)
2914{
2915#ifdef TARGET_X86_64
2916 if (CODE64(s)) {
2917 gen_op_movq_A0_reg(R_ESP);
2918 if (s->dflag) {
2919 gen_op_addq_A0_im(-8);
2920 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2921 } else {
2922 gen_op_addq_A0_im(-2);
2923 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2924 }
2925 gen_op_mov_reg_A0(2, R_ESP);
2926 } else
2927#endif
2928 {
2929 gen_op_movl_A0_reg(R_ESP);
2930 if (!s->dflag)
2931 gen_op_addl_A0_im(-2);
2932 else
2933 gen_op_addl_A0_im(-4);
2934 if (s->ss32) {
2935 if (s->addseg) {
2936 gen_op_addl_A0_seg(R_SS);
2937 }
2938 } else {
2939 gen_op_andl_A0_ffff();
2940 gen_op_addl_A0_seg(R_SS);
2941 }
2942 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2943
2944 if (s->ss32 && !s->addseg)
2945 gen_op_mov_reg_A0(1, R_ESP);
2946 else
2947 gen_stack_update(s, (-2) << s->dflag);
2948 }
2949}
2950
2951/* two step pop is necessary for precise exceptions */
2952static void gen_pop_T0(DisasContext *s)
2953{
2954#ifdef TARGET_X86_64
2955 if (CODE64(s)) {
2956 gen_op_movq_A0_reg(R_ESP);
2957 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2958 } else
2959#endif
2960 {
2961 gen_op_movl_A0_reg(R_ESP);
2962 if (s->ss32) {
2963 if (s->addseg)
2964 gen_op_addl_A0_seg(R_SS);
2965 } else {
2966 gen_op_andl_A0_ffff();
2967 gen_op_addl_A0_seg(R_SS);
2968 }
2969 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2970 }
2971}
2972
2973static void gen_pop_update(DisasContext *s)
2974{
2975#ifdef TARGET_X86_64
2976 if (CODE64(s) && s->dflag) {
2977 gen_stack_update(s, 8);
2978 } else
2979#endif
2980 {
2981 gen_stack_update(s, 2 << s->dflag);
2982 }
2983}
2984
2985static void gen_stack_A0(DisasContext *s)
2986{
2987 gen_op_movl_A0_reg(R_ESP);
2988 if (!s->ss32)
2989 gen_op_andl_A0_ffff();
2990 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2991 if (s->addseg)
2992 gen_op_addl_A0_seg(R_SS);
2993}
2994
2995/* NOTE: wrap around in 16 bit not fully handled */
2996static void gen_pusha(DisasContext *s)
2997{
2998 int i;
2999 gen_op_movl_A0_reg(R_ESP);
3000 gen_op_addl_A0_im(-16 << s->dflag);
3001 if (!s->ss32)
3002 gen_op_andl_A0_ffff();
3003 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3004 if (s->addseg)
3005 gen_op_addl_A0_seg(R_SS);
3006 for(i = 0;i < 8; i++) {
3007 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
3008 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
3009 gen_op_addl_A0_im(2 << s->dflag);
3010 }
3011 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3012}
3013
3014/* NOTE: wrap around in 16 bit not fully handled */
3015static void gen_popa(DisasContext *s)
3016{
3017 int i;
3018 gen_op_movl_A0_reg(R_ESP);
3019 if (!s->ss32)
3020 gen_op_andl_A0_ffff();
3021 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3022 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
3023 if (s->addseg)
3024 gen_op_addl_A0_seg(R_SS);
3025 for(i = 0;i < 8; i++) {
3026 /* ESP is not reloaded */
3027 if (i != 3) {
3028 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
3029 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
3030 }
3031 gen_op_addl_A0_im(2 << s->dflag);
3032 }
3033 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3034}
3035
3036static void gen_enter(DisasContext *s, int esp_addend, int level)
3037{
3038 int ot, opsize;
3039
3040 level &= 0x1f;
3041#ifdef TARGET_X86_64
3042 if (CODE64(s)) {
3043 ot = s->dflag ? OT_QUAD : OT_WORD;
3044 opsize = 1 << ot;
3045
3046 gen_op_movl_A0_reg(R_ESP);
3047 gen_op_addq_A0_im(-opsize);
3048 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3049
3050 /* push bp */
3051 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3052 gen_op_st_T0_A0(ot + s->mem_index);
3053 if (level) {
3054 /* XXX: must save state */
3055 tcg_gen_helper_0_3(helper_enter64_level,
3056 tcg_const_i32(level),
3057 tcg_const_i32((ot == OT_QUAD)),
3058 cpu_T[1]);
3059 }
3060 gen_op_mov_reg_T1(ot, R_EBP);
3061 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3062 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
3063 } else
3064#endif
3065 {
3066 ot = s->dflag + OT_WORD;
3067 opsize = 2 << s->dflag;
3068
3069 gen_op_movl_A0_reg(R_ESP);
3070 gen_op_addl_A0_im(-opsize);
3071 if (!s->ss32)
3072 gen_op_andl_A0_ffff();
3073 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3074 if (s->addseg)
3075 gen_op_addl_A0_seg(R_SS);
3076 /* push bp */
3077 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3078 gen_op_st_T0_A0(ot + s->mem_index);
3079 if (level) {
3080 /* XXX: must save state */
3081 tcg_gen_helper_0_3(helper_enter_level,
3082 tcg_const_i32(level),
3083 tcg_const_i32(s->dflag),
3084 cpu_T[1]);
3085 }
3086 gen_op_mov_reg_T1(ot, R_EBP);
3087 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3088 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3089 }
3090}
3091
3092static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
3093{
3094 if (s->cc_op != CC_OP_DYNAMIC)
3095 gen_op_set_cc_op(s->cc_op);
3096 gen_jmp_im(cur_eip);
3097 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
3098 s->is_jmp = 3;
3099}
3100
3101/* an interrupt is different from an exception because of the
3102 privilege checks */
3103static void gen_interrupt(DisasContext *s, int intno,
3104 target_ulong cur_eip, target_ulong next_eip)
3105{
3106 if (s->cc_op != CC_OP_DYNAMIC)
3107 gen_op_set_cc_op(s->cc_op);
3108 gen_jmp_im(cur_eip);
3109 tcg_gen_helper_0_2(helper_raise_interrupt,
3110 tcg_const_i32(intno),
3111 tcg_const_i32(next_eip - cur_eip));
3112 s->is_jmp = 3;
3113}
3114
3115static void gen_debug(DisasContext *s, target_ulong cur_eip)
3116{
3117 if (s->cc_op != CC_OP_DYNAMIC)
3118 gen_op_set_cc_op(s->cc_op);
3119 gen_jmp_im(cur_eip);
3120 tcg_gen_helper_0_0(helper_debug);
3121 s->is_jmp = 3;
3122}
3123
3124/* generate a generic end of block. Trace exception is also generated
3125 if needed */
3126static void gen_eob(DisasContext *s)
3127{
3128#ifdef VBOX
3129 gen_check_external_event(s);
3130#endif /* VBOX */
3131 if (s->cc_op != CC_OP_DYNAMIC)
3132 gen_op_set_cc_op(s->cc_op);
3133 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
3134 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
3135 }
3136 if (s->singlestep_enabled) {
3137 tcg_gen_helper_0_0(helper_debug);
3138 } else if (s->tf) {
3139 tcg_gen_helper_0_0(helper_single_step);
3140 } else {
3141 tcg_gen_exit_tb(0);
3142 }
3143 s->is_jmp = 3;
3144}
3145
3146/* generate a jump to eip. No segment change must happen before as a
3147 direct call to the next block may occur */
3148static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
3149{
3150 if (s->jmp_opt) {
3151 if (s->cc_op != CC_OP_DYNAMIC) {
3152 gen_op_set_cc_op(s->cc_op);
3153 s->cc_op = CC_OP_DYNAMIC;
3154 }
3155 gen_goto_tb(s, tb_num, eip);
3156 s->is_jmp = 3;
3157 } else {
3158 gen_jmp_im(eip);
3159 gen_eob(s);
3160 }
3161}
3162
3163static void gen_jmp(DisasContext *s, target_ulong eip)
3164{
3165 gen_jmp_tb(s, eip, 0);
3166}
3167
3168#ifndef VBOX
3169static inline void gen_ldq_env_A0(int idx, int offset)
3170#else /* VBOX */
3171DECLINLINE(void) gen_ldq_env_A0(int idx, int offset)
3172#endif /* VBOX */
3173{
3174 int mem_index = (idx >> 2) - 1;
3175 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3176 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
3177}
3178
3179#ifndef VBOX
3180static inline void gen_stq_env_A0(int idx, int offset)
3181#else /* VBOX */
3182DECLINLINE(void) gen_stq_env_A0(int idx, int offset)
3183#endif /* VBOX */
3184{
3185 int mem_index = (idx >> 2) - 1;
3186 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
3187 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3188}
3189
3190#ifndef VBOX
3191static inline void gen_ldo_env_A0(int idx, int offset)
3192#else /* VBOX */
3193DECLINLINE(void) gen_ldo_env_A0(int idx, int offset)
3194#endif /* VBOX */
3195{
3196 int mem_index = (idx >> 2) - 1;
3197 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3198 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3199 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3200 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3201 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3202}
3203
3204#ifndef VBOX
3205static inline void gen_sto_env_A0(int idx, int offset)
3206#else /* VBOX */
3207DECLINLINE(void) gen_sto_env_A0(int idx, int offset)
3208#endif /* VBOX */
3209{
3210 int mem_index = (idx >> 2) - 1;
3211 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3212 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3213 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3214 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3215 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3216}
3217
3218#ifndef VBOX
3219static inline void gen_op_movo(int d_offset, int s_offset)
3220#else /* VBOX */
3221DECLINLINE(void) gen_op_movo(int d_offset, int s_offset)
3222#endif /* VBOX */
3223{
3224 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3225 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3226 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
3227 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
3228}
3229
3230#ifndef VBOX
3231static inline void gen_op_movq(int d_offset, int s_offset)
3232#else /* VBOX */
3233DECLINLINE(void) gen_op_movq(int d_offset, int s_offset)
3234#endif /* VBOX */
3235{
3236 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3237 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3238}
3239
3240#ifndef VBOX
3241static inline void gen_op_movl(int d_offset, int s_offset)
3242#else /* VBOX */
3243DECLINLINE(void) gen_op_movl(int d_offset, int s_offset)
3244#endif /* VBOX */
3245{
3246 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
3247 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
3248}
3249
3250#ifndef VBOX
3251static inline void gen_op_movq_env_0(int d_offset)
3252#else /* VBOX */
3253DECLINLINE(void) gen_op_movq_env_0(int d_offset)
3254#endif /* VBOX */
3255{
3256 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
3257 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3258}
3259
3260#define SSE_SPECIAL ((void *)1)
3261#define SSE_DUMMY ((void *)2)
3262
3263#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
3264#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
3265 helper_ ## x ## ss, helper_ ## x ## sd, }
3266
3267static void *sse_op_table1[256][4] = {
3268 /* 3DNow! extensions */
3269 [0x0e] = { SSE_DUMMY }, /* femms */
3270 [0x0f] = { SSE_DUMMY }, /* pf... */
3271 /* pure SSE operations */
3272 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3273 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3274 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
3275 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
3276 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
3277 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
3278 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
3279 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
3280
3281 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3282 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3283 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
3284 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
3285 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
3286 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
3287 [0x2e] = { helper_ucomiss, helper_ucomisd },
3288 [0x2f] = { helper_comiss, helper_comisd },
3289 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
3290 [0x51] = SSE_FOP(sqrt),
3291 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
3292 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
3293 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
3294 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
3295 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
3296 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
3297 [0x58] = SSE_FOP(add),
3298 [0x59] = SSE_FOP(mul),
3299 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
3300 helper_cvtss2sd, helper_cvtsd2ss },
3301 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
3302 [0x5c] = SSE_FOP(sub),
3303 [0x5d] = SSE_FOP(min),
3304 [0x5e] = SSE_FOP(div),
3305 [0x5f] = SSE_FOP(max),
3306
3307 [0xc2] = SSE_FOP(cmpeq),
3308 [0xc6] = { helper_shufps, helper_shufpd },
3309
3310 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3311 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3312
3313 /* MMX ops and their SSE extensions */
3314 [0x60] = MMX_OP2(punpcklbw),
3315 [0x61] = MMX_OP2(punpcklwd),
3316 [0x62] = MMX_OP2(punpckldq),
3317 [0x63] = MMX_OP2(packsswb),
3318 [0x64] = MMX_OP2(pcmpgtb),
3319 [0x65] = MMX_OP2(pcmpgtw),
3320 [0x66] = MMX_OP2(pcmpgtl),
3321 [0x67] = MMX_OP2(packuswb),
3322 [0x68] = MMX_OP2(punpckhbw),
3323 [0x69] = MMX_OP2(punpckhwd),
3324 [0x6a] = MMX_OP2(punpckhdq),
3325 [0x6b] = MMX_OP2(packssdw),
3326 [0x6c] = { NULL, helper_punpcklqdq_xmm },
3327 [0x6d] = { NULL, helper_punpckhqdq_xmm },
3328 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
3329 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
3330 [0x70] = { helper_pshufw_mmx,
3331 helper_pshufd_xmm,
3332 helper_pshufhw_xmm,
3333 helper_pshuflw_xmm },
3334 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
3335 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
3336 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
3337 [0x74] = MMX_OP2(pcmpeqb),
3338 [0x75] = MMX_OP2(pcmpeqw),
3339 [0x76] = MMX_OP2(pcmpeql),
3340 [0x77] = { SSE_DUMMY }, /* emms */
3341 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
3342 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
3343 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3344 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3345 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3346 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
3347 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
3348 [0xd1] = MMX_OP2(psrlw),
3349 [0xd2] = MMX_OP2(psrld),
3350 [0xd3] = MMX_OP2(psrlq),
3351 [0xd4] = MMX_OP2(paddq),
3352 [0xd5] = MMX_OP2(pmullw),
3353 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3354 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3355 [0xd8] = MMX_OP2(psubusb),
3356 [0xd9] = MMX_OP2(psubusw),
3357 [0xda] = MMX_OP2(pminub),
3358 [0xdb] = MMX_OP2(pand),
3359 [0xdc] = MMX_OP2(paddusb),
3360 [0xdd] = MMX_OP2(paddusw),
3361 [0xde] = MMX_OP2(pmaxub),
3362 [0xdf] = MMX_OP2(pandn),
3363 [0xe0] = MMX_OP2(pavgb),
3364 [0xe1] = MMX_OP2(psraw),
3365 [0xe2] = MMX_OP2(psrad),
3366 [0xe3] = MMX_OP2(pavgw),
3367 [0xe4] = MMX_OP2(pmulhuw),
3368 [0xe5] = MMX_OP2(pmulhw),
3369 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
3370 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3371 [0xe8] = MMX_OP2(psubsb),
3372 [0xe9] = MMX_OP2(psubsw),
3373 [0xea] = MMX_OP2(pminsw),
3374 [0xeb] = MMX_OP2(por),
3375 [0xec] = MMX_OP2(paddsb),
3376 [0xed] = MMX_OP2(paddsw),
3377 [0xee] = MMX_OP2(pmaxsw),
3378 [0xef] = MMX_OP2(pxor),
3379 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
3380 [0xf1] = MMX_OP2(psllw),
3381 [0xf2] = MMX_OP2(pslld),
3382 [0xf3] = MMX_OP2(psllq),
3383 [0xf4] = MMX_OP2(pmuludq),
3384 [0xf5] = MMX_OP2(pmaddwd),
3385 [0xf6] = MMX_OP2(psadbw),
3386 [0xf7] = MMX_OP2(maskmov),
3387 [0xf8] = MMX_OP2(psubb),
3388 [0xf9] = MMX_OP2(psubw),
3389 [0xfa] = MMX_OP2(psubl),
3390 [0xfb] = MMX_OP2(psubq),
3391 [0xfc] = MMX_OP2(paddb),
3392 [0xfd] = MMX_OP2(paddw),
3393 [0xfe] = MMX_OP2(paddl),
3394};
3395
3396static void *sse_op_table2[3 * 8][2] = {
3397 [0 + 2] = MMX_OP2(psrlw),
3398 [0 + 4] = MMX_OP2(psraw),
3399 [0 + 6] = MMX_OP2(psllw),
3400 [8 + 2] = MMX_OP2(psrld),
3401 [8 + 4] = MMX_OP2(psrad),
3402 [8 + 6] = MMX_OP2(pslld),
3403 [16 + 2] = MMX_OP2(psrlq),
3404 [16 + 3] = { NULL, helper_psrldq_xmm },
3405 [16 + 6] = MMX_OP2(psllq),
3406 [16 + 7] = { NULL, helper_pslldq_xmm },
3407};
3408
3409static void *sse_op_table3[4 * 3] = {
3410 helper_cvtsi2ss,
3411 helper_cvtsi2sd,
3412 X86_64_ONLY(helper_cvtsq2ss),
3413 X86_64_ONLY(helper_cvtsq2sd),
3414
3415 helper_cvttss2si,
3416 helper_cvttsd2si,
3417 X86_64_ONLY(helper_cvttss2sq),
3418 X86_64_ONLY(helper_cvttsd2sq),
3419
3420 helper_cvtss2si,
3421 helper_cvtsd2si,
3422 X86_64_ONLY(helper_cvtss2sq),
3423 X86_64_ONLY(helper_cvtsd2sq),
3424};
3425
3426static void *sse_op_table4[8][4] = {
3427 SSE_FOP(cmpeq),
3428 SSE_FOP(cmplt),
3429 SSE_FOP(cmple),
3430 SSE_FOP(cmpunord),
3431 SSE_FOP(cmpneq),
3432 SSE_FOP(cmpnlt),
3433 SSE_FOP(cmpnle),
3434 SSE_FOP(cmpord),
3435};
3436
3437static void *sse_op_table5[256] = {
3438 [0x0c] = helper_pi2fw,
3439 [0x0d] = helper_pi2fd,
3440 [0x1c] = helper_pf2iw,
3441 [0x1d] = helper_pf2id,
3442 [0x8a] = helper_pfnacc,
3443 [0x8e] = helper_pfpnacc,
3444 [0x90] = helper_pfcmpge,
3445 [0x94] = helper_pfmin,
3446 [0x96] = helper_pfrcp,
3447 [0x97] = helper_pfrsqrt,
3448 [0x9a] = helper_pfsub,
3449 [0x9e] = helper_pfadd,
3450 [0xa0] = helper_pfcmpgt,
3451 [0xa4] = helper_pfmax,
3452 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
3453 [0xa7] = helper_movq, /* pfrsqit1 */
3454 [0xaa] = helper_pfsubr,
3455 [0xae] = helper_pfacc,
3456 [0xb0] = helper_pfcmpeq,
3457 [0xb4] = helper_pfmul,
3458 [0xb6] = helper_movq, /* pfrcpit2 */
3459 [0xb7] = helper_pmulhrw_mmx,
3460 [0xbb] = helper_pswapd,
3461 [0xbf] = helper_pavgb_mmx /* pavgusb */
3462};
3463
3464struct sse_op_helper_s {
3465 void *op[2]; uint32_t ext_mask;
3466};
3467#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3468#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3469#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3470#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3471static struct sse_op_helper_s sse_op_table6[256] = {
3472 [0x00] = SSSE3_OP(pshufb),
3473 [0x01] = SSSE3_OP(phaddw),
3474 [0x02] = SSSE3_OP(phaddd),
3475 [0x03] = SSSE3_OP(phaddsw),
3476 [0x04] = SSSE3_OP(pmaddubsw),
3477 [0x05] = SSSE3_OP(phsubw),
3478 [0x06] = SSSE3_OP(phsubd),
3479 [0x07] = SSSE3_OP(phsubsw),
3480 [0x08] = SSSE3_OP(psignb),
3481 [0x09] = SSSE3_OP(psignw),
3482 [0x0a] = SSSE3_OP(psignd),
3483 [0x0b] = SSSE3_OP(pmulhrsw),
3484 [0x10] = SSE41_OP(pblendvb),
3485 [0x14] = SSE41_OP(blendvps),
3486 [0x15] = SSE41_OP(blendvpd),
3487 [0x17] = SSE41_OP(ptest),
3488 [0x1c] = SSSE3_OP(pabsb),
3489 [0x1d] = SSSE3_OP(pabsw),
3490 [0x1e] = SSSE3_OP(pabsd),
3491 [0x20] = SSE41_OP(pmovsxbw),
3492 [0x21] = SSE41_OP(pmovsxbd),
3493 [0x22] = SSE41_OP(pmovsxbq),
3494 [0x23] = SSE41_OP(pmovsxwd),
3495 [0x24] = SSE41_OP(pmovsxwq),
3496 [0x25] = SSE41_OP(pmovsxdq),
3497 [0x28] = SSE41_OP(pmuldq),
3498 [0x29] = SSE41_OP(pcmpeqq),
3499 [0x2a] = SSE41_SPECIAL, /* movntqda */
3500 [0x2b] = SSE41_OP(packusdw),
3501 [0x30] = SSE41_OP(pmovzxbw),
3502 [0x31] = SSE41_OP(pmovzxbd),
3503 [0x32] = SSE41_OP(pmovzxbq),
3504 [0x33] = SSE41_OP(pmovzxwd),
3505 [0x34] = SSE41_OP(pmovzxwq),
3506 [0x35] = SSE41_OP(pmovzxdq),
3507 [0x37] = SSE42_OP(pcmpgtq),
3508 [0x38] = SSE41_OP(pminsb),
3509 [0x39] = SSE41_OP(pminsd),
3510 [0x3a] = SSE41_OP(pminuw),
3511 [0x3b] = SSE41_OP(pminud),
3512 [0x3c] = SSE41_OP(pmaxsb),
3513 [0x3d] = SSE41_OP(pmaxsd),
3514 [0x3e] = SSE41_OP(pmaxuw),
3515 [0x3f] = SSE41_OP(pmaxud),
3516 [0x40] = SSE41_OP(pmulld),
3517 [0x41] = SSE41_OP(phminposuw),
3518};
3519
3520static struct sse_op_helper_s sse_op_table7[256] = {
3521 [0x08] = SSE41_OP(roundps),
3522 [0x09] = SSE41_OP(roundpd),
3523 [0x0a] = SSE41_OP(roundss),
3524 [0x0b] = SSE41_OP(roundsd),
3525 [0x0c] = SSE41_OP(blendps),
3526 [0x0d] = SSE41_OP(blendpd),
3527 [0x0e] = SSE41_OP(pblendw),
3528 [0x0f] = SSSE3_OP(palignr),
3529 [0x14] = SSE41_SPECIAL, /* pextrb */
3530 [0x15] = SSE41_SPECIAL, /* pextrw */
3531 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3532 [0x17] = SSE41_SPECIAL, /* extractps */
3533 [0x20] = SSE41_SPECIAL, /* pinsrb */
3534 [0x21] = SSE41_SPECIAL, /* insertps */
3535 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3536 [0x40] = SSE41_OP(dpps),
3537 [0x41] = SSE41_OP(dppd),
3538 [0x42] = SSE41_OP(mpsadbw),
3539 [0x60] = SSE42_OP(pcmpestrm),
3540 [0x61] = SSE42_OP(pcmpestri),
3541 [0x62] = SSE42_OP(pcmpistrm),
3542 [0x63] = SSE42_OP(pcmpistri),
3543};
3544
3545static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3546{
3547 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3548 int modrm, mod, rm, reg, reg_addr, offset_addr;
3549 void *sse_op2;
3550
3551 b &= 0xff;
3552 if (s->prefix & PREFIX_DATA)
3553 b1 = 1;
3554 else if (s->prefix & PREFIX_REPZ)
3555 b1 = 2;
3556 else if (s->prefix & PREFIX_REPNZ)
3557 b1 = 3;
3558 else
3559 b1 = 0;
3560 sse_op2 = sse_op_table1[b][b1];
3561 if (!sse_op2)
3562 goto illegal_op;
3563 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3564 is_xmm = 1;
3565 } else {
3566 if (b1 == 0) {
3567 /* MMX case */
3568 is_xmm = 0;
3569 } else {
3570 is_xmm = 1;
3571 }
3572 }
3573 /* simple MMX/SSE operation */
3574 if (s->flags & HF_TS_MASK) {
3575 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3576 return;
3577 }
3578 if (s->flags & HF_EM_MASK) {
3579 illegal_op:
3580 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3581 return;
3582 }
3583 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3584 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3585 goto illegal_op;
3586 if (b == 0x0e) {
3587 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3588 goto illegal_op;
3589 /* femms */
3590 tcg_gen_helper_0_0(helper_emms);
3591 return;
3592 }
3593 if (b == 0x77) {
3594 /* emms */
3595 tcg_gen_helper_0_0(helper_emms);
3596 return;
3597 }
3598 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3599 the static cpu state) */
3600 if (!is_xmm) {
3601 tcg_gen_helper_0_0(helper_enter_mmx);
3602 }
3603
3604 modrm = ldub_code(s->pc++);
3605 reg = ((modrm >> 3) & 7);
3606 if (is_xmm)
3607 reg |= rex_r;
3608 mod = (modrm >> 6) & 3;
3609 if (sse_op2 == SSE_SPECIAL) {
3610 b |= (b1 << 8);
3611 switch(b) {
3612 case 0x0e7: /* movntq */
3613 if (mod == 3)
3614 goto illegal_op;
3615 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3616 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3617 break;
3618 case 0x1e7: /* movntdq */
3619 case 0x02b: /* movntps */
3620 case 0x12b: /* movntps */
3621 case 0x3f0: /* lddqu */
3622 if (mod == 3)
3623 goto illegal_op;
3624 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3625 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3626 break;
3627 case 0x6e: /* movd mm, ea */
3628#ifdef TARGET_X86_64
3629 if (s->dflag == 2) {
3630 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3631 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3632 } else
3633#endif
3634 {
3635 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3636 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3637 offsetof(CPUX86State,fpregs[reg].mmx));
3638 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3639 }
3640 break;
3641 case 0x16e: /* movd xmm, ea */
3642#ifdef TARGET_X86_64
3643 if (s->dflag == 2) {
3644 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3645 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3646 offsetof(CPUX86State,xmm_regs[reg]));
3647 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3648 } else
3649#endif
3650 {
3651 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3652 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3653 offsetof(CPUX86State,xmm_regs[reg]));
3654 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3655 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3656 }
3657 break;
3658 case 0x6f: /* movq mm, ea */
3659 if (mod != 3) {
3660 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3661 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3662 } else {
3663 rm = (modrm & 7);
3664 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3665 offsetof(CPUX86State,fpregs[rm].mmx));
3666 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3667 offsetof(CPUX86State,fpregs[reg].mmx));
3668 }
3669 break;
3670 case 0x010: /* movups */
3671 case 0x110: /* movupd */
3672 case 0x028: /* movaps */
3673 case 0x128: /* movapd */
3674 case 0x16f: /* movdqa xmm, ea */
3675 case 0x26f: /* movdqu xmm, ea */
3676 if (mod != 3) {
3677 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3678 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3679 } else {
3680 rm = (modrm & 7) | REX_B(s);
3681 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3682 offsetof(CPUX86State,xmm_regs[rm]));
3683 }
3684 break;
3685 case 0x210: /* movss xmm, ea */
3686 if (mod != 3) {
3687 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3688 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3689 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3690 gen_op_movl_T0_0();
3691 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3692 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3693 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3694 } else {
3695 rm = (modrm & 7) | REX_B(s);
3696 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3697 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3698 }
3699 break;
3700 case 0x310: /* movsd xmm, ea */
3701 if (mod != 3) {
3702 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3703 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3704 gen_op_movl_T0_0();
3705 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3706 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3707 } else {
3708 rm = (modrm & 7) | REX_B(s);
3709 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3710 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3711 }
3712 break;
3713 case 0x012: /* movlps */
3714 case 0x112: /* movlpd */
3715 if (mod != 3) {
3716 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3717 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3718 } else {
3719 /* movhlps */
3720 rm = (modrm & 7) | REX_B(s);
3721 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3722 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3723 }
3724 break;
3725 case 0x212: /* movsldup */
3726 if (mod != 3) {
3727 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3728 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3729 } else {
3730 rm = (modrm & 7) | REX_B(s);
3731 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3732 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3733 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3734 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3735 }
3736 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3737 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3738 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3739 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3740 break;
3741 case 0x312: /* movddup */
3742 if (mod != 3) {
3743 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3744 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3745 } else {
3746 rm = (modrm & 7) | REX_B(s);
3747 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3748 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3749 }
3750 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3751 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3752 break;
3753 case 0x016: /* movhps */
3754 case 0x116: /* movhpd */
3755 if (mod != 3) {
3756 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3757 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3758 } else {
3759 /* movlhps */
3760 rm = (modrm & 7) | REX_B(s);
3761 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3762 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3763 }
3764 break;
3765 case 0x216: /* movshdup */
3766 if (mod != 3) {
3767 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3768 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3769 } else {
3770 rm = (modrm & 7) | REX_B(s);
3771 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3772 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3773 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3774 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3775 }
3776 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3777 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3778 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3779 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3780 break;
3781 case 0x7e: /* movd ea, mm */
3782#ifdef TARGET_X86_64
3783 if (s->dflag == 2) {
3784 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3785 offsetof(CPUX86State,fpregs[reg].mmx));
3786 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3787 } else
3788#endif
3789 {
3790 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3791 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3792 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3793 }
3794 break;
3795 case 0x17e: /* movd ea, xmm */
3796#ifdef TARGET_X86_64
3797 if (s->dflag == 2) {
3798 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3799 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3800 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3801 } else
3802#endif
3803 {
3804 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3805 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3806 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3807 }
3808 break;
3809 case 0x27e: /* movq xmm, ea */
3810 if (mod != 3) {
3811 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3812 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3813 } else {
3814 rm = (modrm & 7) | REX_B(s);
3815 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3816 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3817 }
3818 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3819 break;
3820 case 0x7f: /* movq ea, mm */
3821 if (mod != 3) {
3822 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3823 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3824 } else {
3825 rm = (modrm & 7);
3826 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3827 offsetof(CPUX86State,fpregs[reg].mmx));
3828 }
3829 break;
3830 case 0x011: /* movups */
3831 case 0x111: /* movupd */
3832 case 0x029: /* movaps */
3833 case 0x129: /* movapd */
3834 case 0x17f: /* movdqa ea, xmm */
3835 case 0x27f: /* movdqu ea, xmm */
3836 if (mod != 3) {
3837 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3838 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3839 } else {
3840 rm = (modrm & 7) | REX_B(s);
3841 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3842 offsetof(CPUX86State,xmm_regs[reg]));
3843 }
3844 break;
3845 case 0x211: /* movss ea, xmm */
3846 if (mod != 3) {
3847 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3848 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3849 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3850 } else {
3851 rm = (modrm & 7) | REX_B(s);
3852 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3853 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3854 }
3855 break;
3856 case 0x311: /* movsd ea, xmm */
3857 if (mod != 3) {
3858 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3859 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3860 } else {
3861 rm = (modrm & 7) | REX_B(s);
3862 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3863 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3864 }
3865 break;
3866 case 0x013: /* movlps */
3867 case 0x113: /* movlpd */
3868 if (mod != 3) {
3869 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3870 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3871 } else {
3872 goto illegal_op;
3873 }
3874 break;
3875 case 0x017: /* movhps */
3876 case 0x117: /* movhpd */
3877 if (mod != 3) {
3878 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3879 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3880 } else {
3881 goto illegal_op;
3882 }
3883 break;
3884 case 0x71: /* shift mm, im */
3885 case 0x72:
3886 case 0x73:
3887 case 0x171: /* shift xmm, im */
3888 case 0x172:
3889 case 0x173:
3890 val = ldub_code(s->pc++);
3891 if (is_xmm) {
3892 gen_op_movl_T0_im(val);
3893 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3894 gen_op_movl_T0_0();
3895 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3896 op1_offset = offsetof(CPUX86State,xmm_t0);
3897 } else {
3898 gen_op_movl_T0_im(val);
3899 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3900 gen_op_movl_T0_0();
3901 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3902 op1_offset = offsetof(CPUX86State,mmx_t0);
3903 }
3904 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3905 if (!sse_op2)
3906 goto illegal_op;
3907 if (is_xmm) {
3908 rm = (modrm & 7) | REX_B(s);
3909 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3910 } else {
3911 rm = (modrm & 7);
3912 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3913 }
3914 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3915 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3916 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3917 break;
3918 case 0x050: /* movmskps */
3919 rm = (modrm & 7) | REX_B(s);
3920 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3921 offsetof(CPUX86State,xmm_regs[rm]));
3922 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3923 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3924 gen_op_mov_reg_T0(OT_LONG, reg);
3925 break;
3926 case 0x150: /* movmskpd */
3927 rm = (modrm & 7) | REX_B(s);
3928 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3929 offsetof(CPUX86State,xmm_regs[rm]));
3930 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3931 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3932 gen_op_mov_reg_T0(OT_LONG, reg);
3933 break;
3934 case 0x02a: /* cvtpi2ps */
3935 case 0x12a: /* cvtpi2pd */
3936 tcg_gen_helper_0_0(helper_enter_mmx);
3937 if (mod != 3) {
3938 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3939 op2_offset = offsetof(CPUX86State,mmx_t0);
3940 gen_ldq_env_A0(s->mem_index, op2_offset);
3941 } else {
3942 rm = (modrm & 7);
3943 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3944 }
3945 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3946 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3947 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3948 switch(b >> 8) {
3949 case 0x0:
3950 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3951 break;
3952 default:
3953 case 0x1:
3954 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3955 break;
3956 }
3957 break;
3958 case 0x22a: /* cvtsi2ss */
3959 case 0x32a: /* cvtsi2sd */
3960 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3961 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3962 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3963 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3964 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3965 if (ot == OT_LONG) {
3966 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3967 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3968 } else {
3969 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
3970 }
3971 break;
3972 case 0x02c: /* cvttps2pi */
3973 case 0x12c: /* cvttpd2pi */
3974 case 0x02d: /* cvtps2pi */
3975 case 0x12d: /* cvtpd2pi */
3976 tcg_gen_helper_0_0(helper_enter_mmx);
3977 if (mod != 3) {
3978 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3979 op2_offset = offsetof(CPUX86State,xmm_t0);
3980 gen_ldo_env_A0(s->mem_index, op2_offset);
3981 } else {
3982 rm = (modrm & 7) | REX_B(s);
3983 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3984 }
3985 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3986 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3987 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3988 switch(b) {
3989 case 0x02c:
3990 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3991 break;
3992 case 0x12c:
3993 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3994 break;
3995 case 0x02d:
3996 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3997 break;
3998 case 0x12d:
3999 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
4000 break;
4001 }
4002 break;
4003 case 0x22c: /* cvttss2si */
4004 case 0x32c: /* cvttsd2si */
4005 case 0x22d: /* cvtss2si */
4006 case 0x32d: /* cvtsd2si */
4007 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4008 if (mod != 3) {
4009 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4010 if ((b >> 8) & 1) {
4011 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
4012 } else {
4013 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4014 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4015 }
4016 op2_offset = offsetof(CPUX86State,xmm_t0);
4017 } else {
4018 rm = (modrm & 7) | REX_B(s);
4019 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4020 }
4021 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
4022 (b & 1) * 4];
4023 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
4024 if (ot == OT_LONG) {
4025 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
4026 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4027 } else {
4028 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
4029 }
4030 gen_op_mov_reg_T0(ot, reg);
4031 break;
4032 case 0xc4: /* pinsrw */
4033 case 0x1c4:
4034 s->rip_offset = 1;
4035 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4036 val = ldub_code(s->pc++);
4037 if (b1) {
4038 val &= 7;
4039 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4040 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
4041 } else {
4042 val &= 3;
4043 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4044 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
4045 }
4046 break;
4047 case 0xc5: /* pextrw */
4048 case 0x1c5:
4049 if (mod != 3)
4050 goto illegal_op;
4051 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4052 val = ldub_code(s->pc++);
4053 if (b1) {
4054 val &= 7;
4055 rm = (modrm & 7) | REX_B(s);
4056 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4057 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
4058 } else {
4059 val &= 3;
4060 rm = (modrm & 7);
4061 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4062 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
4063 }
4064 reg = ((modrm >> 3) & 7) | rex_r;
4065 gen_op_mov_reg_T0(ot, reg);
4066 break;
4067 case 0x1d6: /* movq ea, xmm */
4068 if (mod != 3) {
4069 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4070 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4071 } else {
4072 rm = (modrm & 7) | REX_B(s);
4073 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
4074 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4075 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
4076 }
4077 break;
4078 case 0x2d6: /* movq2dq */
4079 tcg_gen_helper_0_0(helper_enter_mmx);
4080 rm = (modrm & 7);
4081 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
4082 offsetof(CPUX86State,fpregs[rm].mmx));
4083 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
4084 break;
4085 case 0x3d6: /* movdq2q */
4086 tcg_gen_helper_0_0(helper_enter_mmx);
4087 rm = (modrm & 7) | REX_B(s);
4088 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
4089 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
4090 break;
4091 case 0xd7: /* pmovmskb */
4092 case 0x1d7:
4093 if (mod != 3)
4094 goto illegal_op;
4095 if (b1) {
4096 rm = (modrm & 7) | REX_B(s);
4097 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
4098 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
4099 } else {
4100 rm = (modrm & 7);
4101 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
4102 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
4103 }
4104 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4105 reg = ((modrm >> 3) & 7) | rex_r;
4106 gen_op_mov_reg_T0(OT_LONG, reg);
4107 break;
4108 case 0x138:
4109 if (s->prefix & PREFIX_REPNZ)
4110 goto crc32;
4111 case 0x038:
4112 b = modrm;
4113 modrm = ldub_code(s->pc++);
4114 rm = modrm & 7;
4115 reg = ((modrm >> 3) & 7) | rex_r;
4116 mod = (modrm >> 6) & 3;
4117
4118 sse_op2 = sse_op_table6[b].op[b1];
4119 if (!sse_op2)
4120 goto illegal_op;
4121 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
4122 goto illegal_op;
4123
4124 if (b1) {
4125 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4126 if (mod == 3) {
4127 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4128 } else {
4129 op2_offset = offsetof(CPUX86State,xmm_t0);
4130 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4131 switch (b) {
4132 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
4133 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
4134 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
4135 gen_ldq_env_A0(s->mem_index, op2_offset +
4136 offsetof(XMMReg, XMM_Q(0)));
4137 break;
4138 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
4139 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
4140 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
4141 (s->mem_index >> 2) - 1);
4142 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
4143 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
4144 offsetof(XMMReg, XMM_L(0)));
4145 break;
4146 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
4147 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
4148 (s->mem_index >> 2) - 1);
4149 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
4150 offsetof(XMMReg, XMM_W(0)));
4151 break;
4152 case 0x2a: /* movntqda */
4153 gen_ldo_env_A0(s->mem_index, op1_offset);
4154 return;
4155 default:
4156 gen_ldo_env_A0(s->mem_index, op2_offset);
4157 }
4158 }
4159 } else {
4160 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4161 if (mod == 3) {
4162 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4163 } else {
4164 op2_offset = offsetof(CPUX86State,mmx_t0);
4165 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4166 gen_ldq_env_A0(s->mem_index, op2_offset);
4167 }
4168 }
4169 if (sse_op2 == SSE_SPECIAL)
4170 goto illegal_op;
4171
4172 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4173 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4174 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4175
4176 if (b == 0x17)
4177 s->cc_op = CC_OP_EFLAGS;
4178 break;
4179 case 0x338: /* crc32 */
4180 crc32:
4181 b = modrm;
4182 modrm = ldub_code(s->pc++);
4183 reg = ((modrm >> 3) & 7) | rex_r;
4184
4185 if (b != 0xf0 && b != 0xf1)
4186 goto illegal_op;
4187 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
4188 goto illegal_op;
4189
4190 if (b == 0xf0)
4191 ot = OT_BYTE;
4192 else if (b == 0xf1 && s->dflag != 2)
4193 if (s->prefix & PREFIX_DATA)
4194 ot = OT_WORD;
4195 else
4196 ot = OT_LONG;
4197 else
4198 ot = OT_QUAD;
4199
4200 gen_op_mov_TN_reg(OT_LONG, 0, reg);
4201 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4202 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4203 tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
4204 cpu_T[0], tcg_const_i32(8 << ot));
4205
4206 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4207 gen_op_mov_reg_T0(ot, reg);
4208 break;
4209 case 0x03a:
4210 case 0x13a:
4211 b = modrm;
4212 modrm = ldub_code(s->pc++);
4213 rm = modrm & 7;
4214 reg = ((modrm >> 3) & 7) | rex_r;
4215 mod = (modrm >> 6) & 3;
4216
4217 sse_op2 = sse_op_table7[b].op[b1];
4218 if (!sse_op2)
4219 goto illegal_op;
4220 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4221 goto illegal_op;
4222
4223 if (sse_op2 == SSE_SPECIAL) {
4224 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4225 rm = (modrm & 7) | REX_B(s);
4226 if (mod != 3)
4227 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4228 reg = ((modrm >> 3) & 7) | rex_r;
4229 val = ldub_code(s->pc++);
4230 switch (b) {
4231 case 0x14: /* pextrb */
4232 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4233 xmm_regs[reg].XMM_B(val & 15)));
4234 if (mod == 3)
4235 gen_op_mov_reg_T0(ot, rm);
4236 else
4237 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4238 (s->mem_index >> 2) - 1);
4239 break;
4240 case 0x15: /* pextrw */
4241 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4242 xmm_regs[reg].XMM_W(val & 7)));
4243 if (mod == 3)
4244 gen_op_mov_reg_T0(ot, rm);
4245 else
4246 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4247 (s->mem_index >> 2) - 1);
4248 break;
4249 case 0x16:
4250 if (ot == OT_LONG) { /* pextrd */
4251 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4252 offsetof(CPUX86State,
4253 xmm_regs[reg].XMM_L(val & 3)));
4254 if (mod == 3)
4255 gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
4256 else
4257 tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
4258 (s->mem_index >> 2) - 1);
4259 } else { /* pextrq */
4260 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4261 offsetof(CPUX86State,
4262 xmm_regs[reg].XMM_Q(val & 1)));
4263 if (mod == 3)
4264 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4265 else
4266 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4267 (s->mem_index >> 2) - 1);
4268 }
4269 break;
4270 case 0x17: /* extractps */
4271 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4272 xmm_regs[reg].XMM_L(val & 3)));
4273 if (mod == 3)
4274 gen_op_mov_reg_T0(ot, rm);
4275 else
4276 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4277 (s->mem_index >> 2) - 1);
4278 break;
4279 case 0x20: /* pinsrb */
4280 if (mod == 3)
4281 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4282 else
4283 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
4284 (s->mem_index >> 2) - 1);
4285 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4286 xmm_regs[reg].XMM_B(val & 15)));
4287 break;
4288 case 0x21: /* insertps */
4289 if (mod == 3)
4290 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4291 offsetof(CPUX86State,xmm_regs[rm]
4292 .XMM_L((val >> 6) & 3)));
4293 else
4294 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4295 (s->mem_index >> 2) - 1);
4296 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4297 offsetof(CPUX86State,xmm_regs[reg]
4298 .XMM_L((val >> 4) & 3)));
4299 if ((val >> 0) & 1)
4300 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4301 cpu_env, offsetof(CPUX86State,
4302 xmm_regs[reg].XMM_L(0)));
4303 if ((val >> 1) & 1)
4304 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4305 cpu_env, offsetof(CPUX86State,
4306 xmm_regs[reg].XMM_L(1)));
4307 if ((val >> 2) & 1)
4308 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4309 cpu_env, offsetof(CPUX86State,
4310 xmm_regs[reg].XMM_L(2)));
4311 if ((val >> 3) & 1)
4312 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4313 cpu_env, offsetof(CPUX86State,
4314 xmm_regs[reg].XMM_L(3)));
4315 break;
4316 case 0x22:
4317 if (ot == OT_LONG) { /* pinsrd */
4318 if (mod == 3)
4319 gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
4320 else
4321 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4322 (s->mem_index >> 2) - 1);
4323 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4324 offsetof(CPUX86State,
4325 xmm_regs[reg].XMM_L(val & 3)));
4326 } else { /* pinsrq */
4327 if (mod == 3)
4328 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4329 else
4330 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4331 (s->mem_index >> 2) - 1);
4332 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4333 offsetof(CPUX86State,
4334 xmm_regs[reg].XMM_Q(val & 1)));
4335 }
4336 break;
4337 }
4338 return;
4339 }
4340
4341 if (b1) {
4342 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4343 if (mod == 3) {
4344 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4345 } else {
4346 op2_offset = offsetof(CPUX86State,xmm_t0);
4347 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4348 gen_ldo_env_A0(s->mem_index, op2_offset);
4349 }
4350 } else {
4351 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4352 if (mod == 3) {
4353 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4354 } else {
4355 op2_offset = offsetof(CPUX86State,mmx_t0);
4356 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4357 gen_ldq_env_A0(s->mem_index, op2_offset);
4358 }
4359 }
4360 val = ldub_code(s->pc++);
4361
4362 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
4363 s->cc_op = CC_OP_EFLAGS;
4364
4365 if (s->dflag == 2)
4366 /* The helper must use entire 64-bit gp registers */
4367 val |= 1 << 8;
4368 }
4369
4370 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4371 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4372 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4373 break;
4374 default:
4375 goto illegal_op;
4376 }
4377 } else {
4378 /* generic MMX or SSE operation */
4379 switch(b) {
4380 case 0x70: /* pshufx insn */
4381 case 0xc6: /* pshufx insn */
4382 case 0xc2: /* compare insns */
4383 s->rip_offset = 1;
4384 break;
4385 default:
4386 break;
4387 }
4388 if (is_xmm) {
4389 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4390 if (mod != 3) {
4391 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4392 op2_offset = offsetof(CPUX86State,xmm_t0);
4393 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
4394 b == 0xc2)) {
4395 /* specific case for SSE single instructions */
4396 if (b1 == 2) {
4397 /* 32 bit access */
4398 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4399 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4400 } else {
4401 /* 64 bit access */
4402 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
4403 }
4404 } else {
4405 gen_ldo_env_A0(s->mem_index, op2_offset);
4406 }
4407 } else {
4408 rm = (modrm & 7) | REX_B(s);
4409 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4410 }
4411 } else {
4412 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4413 if (mod != 3) {
4414 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4415 op2_offset = offsetof(CPUX86State,mmx_t0);
4416 gen_ldq_env_A0(s->mem_index, op2_offset);
4417 } else {
4418 rm = (modrm & 7);
4419 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4420 }
4421 }
4422 switch(b) {
4423 case 0x0f: /* 3DNow! data insns */
4424 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4425 goto illegal_op;
4426 val = ldub_code(s->pc++);
4427 sse_op2 = sse_op_table5[val];
4428 if (!sse_op2)
4429 goto illegal_op;
4430 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4431 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4432 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4433 break;
4434 case 0x70: /* pshufx insn */
4435 case 0xc6: /* pshufx insn */
4436 val = ldub_code(s->pc++);
4437 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4438 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4439 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4440 break;
4441 case 0xc2:
4442 /* compare insns */
4443 val = ldub_code(s->pc++);
4444 if (val >= 8)
4445 goto illegal_op;
4446 sse_op2 = sse_op_table4[val][b1];
4447 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4448 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4449 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4450 break;
4451 case 0xf7:
4452 /* maskmov : we must prepare A0 */
4453 if (mod != 3)
4454 goto illegal_op;
4455#ifdef TARGET_X86_64
4456 if (s->aflag == 2) {
4457 gen_op_movq_A0_reg(R_EDI);
4458 } else
4459#endif
4460 {
4461 gen_op_movl_A0_reg(R_EDI);
4462 if (s->aflag == 0)
4463 gen_op_andl_A0_ffff();
4464 }
4465 gen_add_A0_ds_seg(s);
4466
4467 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4468 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4469 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
4470 break;
4471 default:
4472 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4473 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4474 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4475 break;
4476 }
4477 if (b == 0x2e || b == 0x2f) {
4478 s->cc_op = CC_OP_EFLAGS;
4479 }
4480 }
4481}
4482
4483#ifdef VBOX
4484/* Checks if it's an invalid lock sequence. Only a few instructions
4485 can be used together with the lock prefix and of those only the
4486 form that write a memory operand. So, this is kind of annoying
4487 work to do...
4488 The AMD manual lists the following instructions.
4489 ADC
4490 ADD
4491 AND
4492 BTC
4493 BTR
4494 BTS
4495 CMPXCHG
4496 CMPXCHG8B
4497 CMPXCHG16B
4498 DEC
4499 INC
4500 NEG
4501 NOT
4502 OR
4503 SBB
4504 SUB
4505 XADD
4506 XCHG
4507 XOR */
4508static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
4509{
4510 target_ulong pc = s->pc;
4511 int modrm, mod, op;
4512
4513 /* X={8,16,32,64} Y={16,32,64} */
4514 switch (b)
4515 {
4516 /* /2: ADC reg/memX, immX */
4517 /* /0: ADD reg/memX, immX */
4518 /* /4: AND reg/memX, immX */
4519 /* /1: OR reg/memX, immX */
4520 /* /3: SBB reg/memX, immX */
4521 /* /5: SUB reg/memX, immX */
4522 /* /6: XOR reg/memX, immX */
4523 case 0x80:
4524 case 0x81:
4525 case 0x83:
4526 modrm = ldub_code(pc++);
4527 op = (modrm >> 3) & 7;
4528 if (op == 7) /* /7: CMP */
4529 break;
4530 mod = (modrm >> 6) & 3;
4531 if (mod == 3) /* register destination */
4532 break;
4533 return false;
4534
4535 case 0x10: /* /r: ADC reg/mem8, reg8 */
4536 case 0x11: /* /r: ADC reg/memX, regY */
4537 case 0x00: /* /r: ADD reg/mem8, reg8 */
4538 case 0x01: /* /r: ADD reg/memX, regY */
4539 case 0x20: /* /r: AND reg/mem8, reg8 */
4540 case 0x21: /* /r: AND reg/memY, regY */
4541 case 0x08: /* /r: OR reg/mem8, reg8 */
4542 case 0x09: /* /r: OR reg/memY, regY */
4543 case 0x18: /* /r: SBB reg/mem8, reg8 */
4544 case 0x19: /* /r: SBB reg/memY, regY */
4545 case 0x28: /* /r: SUB reg/mem8, reg8 */
4546 case 0x29: /* /r: SUB reg/memY, regY */
4547 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
4548 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
4549 case 0x30: /* /r: XOR reg/mem8, reg8 */
4550 case 0x31: /* /r: XOR reg/memY, regY */
4551 modrm = ldub_code(pc++);
4552 mod = (modrm >> 6) & 3;
4553 if (mod == 3) /* register destination */
4554 break;
4555 return false;
4556
4557 /* /1: DEC reg/memX */
4558 /* /0: INC reg/memX */
4559 case 0xfe:
4560 case 0xff:
4561 modrm = ldub_code(pc++);
4562 mod = (modrm >> 6) & 3;
4563 if (mod == 3) /* register destination */
4564 break;
4565 return false;
4566
4567 /* /3: NEG reg/memX */
4568 /* /2: NOT reg/memX */
4569 case 0xf6:
4570 case 0xf7:
4571 modrm = ldub_code(pc++);
4572 mod = (modrm >> 6) & 3;
4573 if (mod == 3) /* register destination */
4574 break;
4575 return false;
4576
4577 case 0x0f:
4578 b = ldub_code(pc++);
4579 switch (b)
4580 {
4581 /* /7: BTC reg/memY, imm8 */
4582 /* /6: BTR reg/memY, imm8 */
4583 /* /5: BTS reg/memY, imm8 */
4584 case 0xba:
4585 modrm = ldub_code(pc++);
4586 op = (modrm >> 3) & 7;
4587 if (op < 5)
4588 break;
4589 mod = (modrm >> 6) & 3;
4590 if (mod == 3) /* register destination */
4591 break;
4592 return false;
4593
4594 case 0xbb: /* /r: BTC reg/memY, regY */
4595 case 0xb3: /* /r: BTR reg/memY, regY */
4596 case 0xab: /* /r: BTS reg/memY, regY */
4597 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
4598 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
4599 case 0xc0: /* /r: XADD reg/mem8, reg8 */
4600 case 0xc1: /* /r: XADD reg/memY, regY */
4601 modrm = ldub_code(pc++);
4602 mod = (modrm >> 6) & 3;
4603 if (mod == 3) /* register destination */
4604 break;
4605 return false;
4606
4607 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
4608 case 0xc7:
4609 modrm = ldub_code(pc++);
4610 op = (modrm >> 3) & 7;
4611 if (op != 1)
4612 break;
4613 return false;
4614 }
4615 break;
4616 }
4617
4618 /* illegal sequence. The s->pc is past the lock prefix and that
4619 is sufficient for the TB, I think. */
4620 Log(("illegal lock sequence %RGv (b=%#x)\n", pc_start, b));
4621 return true;
4622}
4623#endif /* VBOX */
4624
4625
4626/* convert one instruction. s->is_jmp is set if the translation must
4627 be stopped. Return the next pc value */
4628static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4629{
4630 int b, prefixes, aflag, dflag;
4631 int shift, ot;
4632 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4633 target_ulong next_eip, tval;
4634 int rex_w, rex_r;
4635
4636 if (unlikely(loglevel & CPU_LOG_TB_OP))
4637 tcg_gen_debug_insn_start(pc_start);
4638
4639 s->pc = pc_start;
4640 prefixes = 0;
4641 aflag = s->code32;
4642 dflag = s->code32;
4643 s->override = -1;
4644 rex_w = -1;
4645 rex_r = 0;
4646#ifdef TARGET_X86_64
4647 s->rex_x = 0;
4648 s->rex_b = 0;
4649 x86_64_hregs = 0;
4650#endif
4651 s->rip_offset = 0; /* for relative ip address */
4652#ifdef VBOX
4653 /* nike: seems only slow down things */
4654# if 0
4655 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
4656
4657 gen_update_eip(pc_start - s->cs_base);
4658# endif
4659#endif
4660
4661 next_byte:
4662 b = ldub_code(s->pc);
4663 s->pc++;
4664 /* check prefixes */
4665#ifdef TARGET_X86_64
4666 if (CODE64(s)) {
4667 switch (b) {
4668 case 0xf3:
4669 prefixes |= PREFIX_REPZ;
4670 goto next_byte;
4671 case 0xf2:
4672 prefixes |= PREFIX_REPNZ;
4673 goto next_byte;
4674 case 0xf0:
4675 prefixes |= PREFIX_LOCK;
4676 goto next_byte;
4677 case 0x2e:
4678 s->override = R_CS;
4679 goto next_byte;
4680 case 0x36:
4681 s->override = R_SS;
4682 goto next_byte;
4683 case 0x3e:
4684 s->override = R_DS;
4685 goto next_byte;
4686 case 0x26:
4687 s->override = R_ES;
4688 goto next_byte;
4689 case 0x64:
4690 s->override = R_FS;
4691 goto next_byte;
4692 case 0x65:
4693 s->override = R_GS;
4694 goto next_byte;
4695 case 0x66:
4696 prefixes |= PREFIX_DATA;
4697 goto next_byte;
4698 case 0x67:
4699 prefixes |= PREFIX_ADR;
4700 goto next_byte;
4701 case 0x40 ... 0x4f:
4702 /* REX prefix */
4703 rex_w = (b >> 3) & 1;
4704 rex_r = (b & 0x4) << 1;
4705 s->rex_x = (b & 0x2) << 2;
4706 REX_B(s) = (b & 0x1) << 3;
4707 x86_64_hregs = 1; /* select uniform byte register addressing */
4708 goto next_byte;
4709 }
4710 if (rex_w == 1) {
4711 /* 0x66 is ignored if rex.w is set */
4712 dflag = 2;
4713 } else {
4714 if (prefixes & PREFIX_DATA)
4715 dflag ^= 1;
4716 }
4717 if (!(prefixes & PREFIX_ADR))
4718 aflag = 2;
4719 } else
4720#endif
4721 {
4722 switch (b) {
4723 case 0xf3:
4724 prefixes |= PREFIX_REPZ;
4725 goto next_byte;
4726 case 0xf2:
4727 prefixes |= PREFIX_REPNZ;
4728 goto next_byte;
4729 case 0xf0:
4730 prefixes |= PREFIX_LOCK;
4731 goto next_byte;
4732 case 0x2e:
4733 s->override = R_CS;
4734 goto next_byte;
4735 case 0x36:
4736 s->override = R_SS;
4737 goto next_byte;
4738 case 0x3e:
4739 s->override = R_DS;
4740 goto next_byte;
4741 case 0x26:
4742 s->override = R_ES;
4743 goto next_byte;
4744 case 0x64:
4745 s->override = R_FS;
4746 goto next_byte;
4747 case 0x65:
4748 s->override = R_GS;
4749 goto next_byte;
4750 case 0x66:
4751 prefixes |= PREFIX_DATA;
4752 goto next_byte;
4753 case 0x67:
4754 prefixes |= PREFIX_ADR;
4755 goto next_byte;
4756 }
4757 if (prefixes & PREFIX_DATA)
4758 dflag ^= 1;
4759 if (prefixes & PREFIX_ADR)
4760 aflag ^= 1;
4761 }
4762
4763 s->prefix = prefixes;
4764 s->aflag = aflag;
4765 s->dflag = dflag;
4766
4767 /* lock generation */
4768#ifndef VBOX
4769 if (prefixes & PREFIX_LOCK)
4770 tcg_gen_helper_0_0(helper_lock);
4771#else /* VBOX */
4772 if (prefixes & PREFIX_LOCK) {
4773 if (is_invalid_lock_sequence(s, pc_start, b)) {
4774 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
4775 return s->pc;
4776 }
4777 tcg_gen_helper_0_0(helper_lock);
4778 }
4779#endif /* VBOX */
4780
4781 /* now check op code */
4782 reswitch:
4783 switch(b) {
4784 case 0x0f:
4785 /**************************/
4786 /* extended op code */
4787 b = ldub_code(s->pc++) | 0x100;
4788 goto reswitch;
4789
4790 /**************************/
4791 /* arith & logic */
4792 case 0x00 ... 0x05:
4793 case 0x08 ... 0x0d:
4794 case 0x10 ... 0x15:
4795 case 0x18 ... 0x1d:
4796 case 0x20 ... 0x25:
4797 case 0x28 ... 0x2d:
4798 case 0x30 ... 0x35:
4799 case 0x38 ... 0x3d:
4800 {
4801 int op, f, val;
4802 op = (b >> 3) & 7;
4803 f = (b >> 1) & 3;
4804
4805 if ((b & 1) == 0)
4806 ot = OT_BYTE;
4807 else
4808 ot = dflag + OT_WORD;
4809
4810 switch(f) {
4811 case 0: /* OP Ev, Gv */
4812 modrm = ldub_code(s->pc++);
4813 reg = ((modrm >> 3) & 7) | rex_r;
4814 mod = (modrm >> 6) & 3;
4815 rm = (modrm & 7) | REX_B(s);
4816 if (mod != 3) {
4817 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4818 opreg = OR_TMP0;
4819 } else if (op == OP_XORL && rm == reg) {
4820 xor_zero:
4821 /* xor reg, reg optimisation */
4822 gen_op_movl_T0_0();
4823 s->cc_op = CC_OP_LOGICB + ot;
4824 gen_op_mov_reg_T0(ot, reg);
4825 gen_op_update1_cc();
4826 break;
4827 } else {
4828 opreg = rm;
4829 }
4830 gen_op_mov_TN_reg(ot, 1, reg);
4831 gen_op(s, op, ot, opreg);
4832 break;
4833 case 1: /* OP Gv, Ev */
4834 modrm = ldub_code(s->pc++);
4835 mod = (modrm >> 6) & 3;
4836 reg = ((modrm >> 3) & 7) | rex_r;
4837 rm = (modrm & 7) | REX_B(s);
4838 if (mod != 3) {
4839 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4840 gen_op_ld_T1_A0(ot + s->mem_index);
4841 } else if (op == OP_XORL && rm == reg) {
4842 goto xor_zero;
4843 } else {
4844 gen_op_mov_TN_reg(ot, 1, rm);
4845 }
4846 gen_op(s, op, ot, reg);
4847 break;
4848 case 2: /* OP A, Iv */
4849 val = insn_get(s, ot);
4850 gen_op_movl_T1_im(val);
4851 gen_op(s, op, ot, OR_EAX);
4852 break;
4853 }
4854 }
4855 break;
4856
4857 case 0x82:
4858 if (CODE64(s))
4859 goto illegal_op;
4860 case 0x80: /* GRP1 */
4861 case 0x81:
4862 case 0x83:
4863 {
4864 int val;
4865
4866 if ((b & 1) == 0)
4867 ot = OT_BYTE;
4868 else
4869 ot = dflag + OT_WORD;
4870
4871 modrm = ldub_code(s->pc++);
4872 mod = (modrm >> 6) & 3;
4873 rm = (modrm & 7) | REX_B(s);
4874 op = (modrm >> 3) & 7;
4875
4876 if (mod != 3) {
4877 if (b == 0x83)
4878 s->rip_offset = 1;
4879 else
4880 s->rip_offset = insn_const_size(ot);
4881 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4882 opreg = OR_TMP0;
4883 } else {
4884 opreg = rm;
4885 }
4886
4887 switch(b) {
4888 default:
4889 case 0x80:
4890 case 0x81:
4891 case 0x82:
4892 val = insn_get(s, ot);
4893 break;
4894 case 0x83:
4895 val = (int8_t)insn_get(s, OT_BYTE);
4896 break;
4897 }
4898 gen_op_movl_T1_im(val);
4899 gen_op(s, op, ot, opreg);
4900 }
4901 break;
4902
4903 /**************************/
4904 /* inc, dec, and other misc arith */
4905 case 0x40 ... 0x47: /* inc Gv */
4906 ot = dflag ? OT_LONG : OT_WORD;
4907 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4908 break;
4909 case 0x48 ... 0x4f: /* dec Gv */
4910 ot = dflag ? OT_LONG : OT_WORD;
4911 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4912 break;
4913 case 0xf6: /* GRP3 */
4914 case 0xf7:
4915 if ((b & 1) == 0)
4916 ot = OT_BYTE;
4917 else
4918 ot = dflag + OT_WORD;
4919
4920 modrm = ldub_code(s->pc++);
4921 mod = (modrm >> 6) & 3;
4922 rm = (modrm & 7) | REX_B(s);
4923 op = (modrm >> 3) & 7;
4924 if (mod != 3) {
4925 if (op == 0)
4926 s->rip_offset = insn_const_size(ot);
4927 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4928 gen_op_ld_T0_A0(ot + s->mem_index);
4929 } else {
4930 gen_op_mov_TN_reg(ot, 0, rm);
4931 }
4932
4933 switch(op) {
4934 case 0: /* test */
4935 val = insn_get(s, ot);
4936 gen_op_movl_T1_im(val);
4937 gen_op_testl_T0_T1_cc();
4938 s->cc_op = CC_OP_LOGICB + ot;
4939 break;
4940 case 2: /* not */
4941 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4942 if (mod != 3) {
4943 gen_op_st_T0_A0(ot + s->mem_index);
4944 } else {
4945 gen_op_mov_reg_T0(ot, rm);
4946 }
4947 break;
4948 case 3: /* neg */
4949 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4950 if (mod != 3) {
4951 gen_op_st_T0_A0(ot + s->mem_index);
4952 } else {
4953 gen_op_mov_reg_T0(ot, rm);
4954 }
4955 gen_op_update_neg_cc();
4956 s->cc_op = CC_OP_SUBB + ot;
4957 break;
4958 case 4: /* mul */
4959 switch(ot) {
4960 case OT_BYTE:
4961 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4962 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4963 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4964 /* XXX: use 32 bit mul which could be faster */
4965 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4966 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4967 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4968 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4969 s->cc_op = CC_OP_MULB;
4970 break;
4971 case OT_WORD:
4972 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4973 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4974 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4975 /* XXX: use 32 bit mul which could be faster */
4976 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4977 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4978 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4979 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4980 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4981 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4982 s->cc_op = CC_OP_MULW;
4983 break;
4984 default:
4985 case OT_LONG:
4986#ifdef TARGET_X86_64
4987 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4988 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4989 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
4990 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4991 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4992 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4993 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4994 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4995 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4996#else
4997 {
4998 TCGv t0, t1;
4999 t0 = tcg_temp_new(TCG_TYPE_I64);
5000 t1 = tcg_temp_new(TCG_TYPE_I64);
5001 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5002 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
5003 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
5004 tcg_gen_mul_i64(t0, t0, t1);
5005 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5006 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5007 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5008 tcg_gen_shri_i64(t0, t0, 32);
5009 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5010 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5011 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5012 }
5013#endif
5014 s->cc_op = CC_OP_MULL;
5015 break;
5016#ifdef TARGET_X86_64
5017 case OT_QUAD:
5018 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
5019 s->cc_op = CC_OP_MULQ;
5020 break;
5021#endif
5022 }
5023 break;
5024 case 5: /* imul */
5025 switch(ot) {
5026 case OT_BYTE:
5027 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
5028 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5029 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
5030 /* XXX: use 32 bit mul which could be faster */
5031 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5032 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5033 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5034 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
5035 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5036 s->cc_op = CC_OP_MULB;
5037 break;
5038 case OT_WORD:
5039 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5040 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5041 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5042 /* XXX: use 32 bit mul which could be faster */
5043 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5044 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5045 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5046 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5047 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5048 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5049 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5050 s->cc_op = CC_OP_MULW;
5051 break;
5052 default:
5053 case OT_LONG:
5054#ifdef TARGET_X86_64
5055 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5056 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5057 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5058 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5059 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5060 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5061 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5062 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5063 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5064 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5065#else
5066 {
5067 TCGv t0, t1;
5068 t0 = tcg_temp_new(TCG_TYPE_I64);
5069 t1 = tcg_temp_new(TCG_TYPE_I64);
5070 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5071 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5072 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5073 tcg_gen_mul_i64(t0, t0, t1);
5074 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5075 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5076 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5077 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5078 tcg_gen_shri_i64(t0, t0, 32);
5079 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5080 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5081 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5082 }
5083#endif
5084 s->cc_op = CC_OP_MULL;
5085 break;
5086#ifdef TARGET_X86_64
5087 case OT_QUAD:
5088 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
5089 s->cc_op = CC_OP_MULQ;
5090 break;
5091#endif
5092 }
5093 break;
5094 case 6: /* div */
5095 switch(ot) {
5096 case OT_BYTE:
5097 gen_jmp_im(pc_start - s->cs_base);
5098 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
5099 break;
5100 case OT_WORD:
5101 gen_jmp_im(pc_start - s->cs_base);
5102 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
5103 break;
5104 default:
5105 case OT_LONG:
5106 gen_jmp_im(pc_start - s->cs_base);
5107 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
5108 break;
5109#ifdef TARGET_X86_64
5110 case OT_QUAD:
5111 gen_jmp_im(pc_start - s->cs_base);
5112 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
5113 break;
5114#endif
5115 }
5116 break;
5117 case 7: /* idiv */
5118 switch(ot) {
5119 case OT_BYTE:
5120 gen_jmp_im(pc_start - s->cs_base);
5121 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
5122 break;
5123 case OT_WORD:
5124 gen_jmp_im(pc_start - s->cs_base);
5125 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
5126 break;
5127 default:
5128 case OT_LONG:
5129 gen_jmp_im(pc_start - s->cs_base);
5130 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
5131 break;
5132#ifdef TARGET_X86_64
5133 case OT_QUAD:
5134 gen_jmp_im(pc_start - s->cs_base);
5135 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
5136 break;
5137#endif
5138 }
5139 break;
5140 default:
5141 goto illegal_op;
5142 }
5143 break;
5144
5145 case 0xfe: /* GRP4 */
5146 case 0xff: /* GRP5 */
5147 if ((b & 1) == 0)
5148 ot = OT_BYTE;
5149 else
5150 ot = dflag + OT_WORD;
5151
5152 modrm = ldub_code(s->pc++);
5153 mod = (modrm >> 6) & 3;
5154 rm = (modrm & 7) | REX_B(s);
5155 op = (modrm >> 3) & 7;
5156 if (op >= 2 && b == 0xfe) {
5157 goto illegal_op;
5158 }
5159 if (CODE64(s)) {
5160 if (op == 2 || op == 4) {
5161 /* operand size for jumps is 64 bit */
5162 ot = OT_QUAD;
5163 } else if (op == 3 || op == 5) {
5164 /* for call calls, the operand is 16 or 32 bit, even
5165 in long mode */
5166 ot = dflag ? OT_LONG : OT_WORD;
5167 } else if (op == 6) {
5168 /* default push size is 64 bit */
5169 ot = dflag ? OT_QUAD : OT_WORD;
5170 }
5171 }
5172 if (mod != 3) {
5173 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5174 if (op >= 2 && op != 3 && op != 5)
5175 gen_op_ld_T0_A0(ot + s->mem_index);
5176 } else {
5177 gen_op_mov_TN_reg(ot, 0, rm);
5178 }
5179
5180 switch(op) {
5181 case 0: /* inc Ev */
5182 if (mod != 3)
5183 opreg = OR_TMP0;
5184 else
5185 opreg = rm;
5186 gen_inc(s, ot, opreg, 1);
5187 break;
5188 case 1: /* dec Ev */
5189 if (mod != 3)
5190 opreg = OR_TMP0;
5191 else
5192 opreg = rm;
5193 gen_inc(s, ot, opreg, -1);
5194 break;
5195 case 2: /* call Ev */
5196 /* XXX: optimize if memory (no 'and' is necessary) */
5197#ifdef VBOX_WITH_CALL_RECORD
5198 if (s->record_call)
5199 gen_op_record_call();
5200#endif
5201 if (s->dflag == 0)
5202 gen_op_andl_T0_ffff();
5203 next_eip = s->pc - s->cs_base;
5204 gen_movtl_T1_im(next_eip);
5205 gen_push_T1(s);
5206 gen_op_jmp_T0();
5207 gen_eob(s);
5208 break;
5209 case 3: /* lcall Ev */
5210 gen_op_ld_T1_A0(ot + s->mem_index);
5211 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5212 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5213 do_lcall:
5214 if (s->pe && !s->vm86) {
5215 if (s->cc_op != CC_OP_DYNAMIC)
5216 gen_op_set_cc_op(s->cc_op);
5217 gen_jmp_im(pc_start - s->cs_base);
5218 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5219 tcg_gen_helper_0_4(helper_lcall_protected,
5220 cpu_tmp2_i32, cpu_T[1],
5221 tcg_const_i32(dflag),
5222 tcg_const_i32(s->pc - pc_start));
5223 } else {
5224 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5225 tcg_gen_helper_0_4(helper_lcall_real,
5226 cpu_tmp2_i32, cpu_T[1],
5227 tcg_const_i32(dflag),
5228 tcg_const_i32(s->pc - s->cs_base));
5229 }
5230 gen_eob(s);
5231 break;
5232 case 4: /* jmp Ev */
5233 if (s->dflag == 0)
5234 gen_op_andl_T0_ffff();
5235 gen_op_jmp_T0();
5236 gen_eob(s);
5237 break;
5238 case 5: /* ljmp Ev */
5239 gen_op_ld_T1_A0(ot + s->mem_index);
5240 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5241 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5242 do_ljmp:
5243 if (s->pe && !s->vm86) {
5244 if (s->cc_op != CC_OP_DYNAMIC)
5245 gen_op_set_cc_op(s->cc_op);
5246 gen_jmp_im(pc_start - s->cs_base);
5247 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5248 tcg_gen_helper_0_3(helper_ljmp_protected,
5249 cpu_tmp2_i32,
5250 cpu_T[1],
5251 tcg_const_i32(s->pc - pc_start));
5252 } else {
5253 gen_op_movl_seg_T0_vm(R_CS);
5254 gen_op_movl_T0_T1();
5255 gen_op_jmp_T0();
5256 }
5257 gen_eob(s);
5258 break;
5259 case 6: /* push Ev */
5260 gen_push_T0(s);
5261 break;
5262 default:
5263 goto illegal_op;
5264 }
5265 break;
5266
5267 case 0x84: /* test Ev, Gv */
5268 case 0x85:
5269 if ((b & 1) == 0)
5270 ot = OT_BYTE;
5271 else
5272 ot = dflag + OT_WORD;
5273
5274 modrm = ldub_code(s->pc++);
5275 mod = (modrm >> 6) & 3;
5276 rm = (modrm & 7) | REX_B(s);
5277 reg = ((modrm >> 3) & 7) | rex_r;
5278
5279 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5280 gen_op_mov_TN_reg(ot, 1, reg);
5281 gen_op_testl_T0_T1_cc();
5282 s->cc_op = CC_OP_LOGICB + ot;
5283 break;
5284
5285 case 0xa8: /* test eAX, Iv */
5286 case 0xa9:
5287 if ((b & 1) == 0)
5288 ot = OT_BYTE;
5289 else
5290 ot = dflag + OT_WORD;
5291 val = insn_get(s, ot);
5292
5293 gen_op_mov_TN_reg(ot, 0, OR_EAX);
5294 gen_op_movl_T1_im(val);
5295 gen_op_testl_T0_T1_cc();
5296 s->cc_op = CC_OP_LOGICB + ot;
5297 break;
5298
5299 case 0x98: /* CWDE/CBW */
5300#ifdef TARGET_X86_64
5301 if (dflag == 2) {
5302 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5303 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5304 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
5305 } else
5306#endif
5307 if (dflag == 1) {
5308 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5309 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5310 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5311 } else {
5312 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5313 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5314 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5315 }
5316 break;
5317 case 0x99: /* CDQ/CWD */
5318#ifdef TARGET_X86_64
5319 if (dflag == 2) {
5320 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5321 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5322 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
5323 } else
5324#endif
5325 if (dflag == 1) {
5326 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5327 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5328 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5329 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5330 } else {
5331 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5332 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5333 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5334 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5335 }
5336 break;
5337 case 0x1af: /* imul Gv, Ev */
5338 case 0x69: /* imul Gv, Ev, I */
5339 case 0x6b:
5340 ot = dflag + OT_WORD;
5341 modrm = ldub_code(s->pc++);
5342 reg = ((modrm >> 3) & 7) | rex_r;
5343 if (b == 0x69)
5344 s->rip_offset = insn_const_size(ot);
5345 else if (b == 0x6b)
5346 s->rip_offset = 1;
5347 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5348 if (b == 0x69) {
5349 val = insn_get(s, ot);
5350 gen_op_movl_T1_im(val);
5351 } else if (b == 0x6b) {
5352 val = (int8_t)insn_get(s, OT_BYTE);
5353 gen_op_movl_T1_im(val);
5354 } else {
5355 gen_op_mov_TN_reg(ot, 1, reg);
5356 }
5357
5358#ifdef TARGET_X86_64
5359 if (ot == OT_QUAD) {
5360 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
5361 } else
5362#endif
5363 if (ot == OT_LONG) {
5364#ifdef TARGET_X86_64
5365 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5366 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5367 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5368 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5369 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5370 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5371#else
5372 {
5373 TCGv t0, t1;
5374 t0 = tcg_temp_new(TCG_TYPE_I64);
5375 t1 = tcg_temp_new(TCG_TYPE_I64);
5376 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5377 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5378 tcg_gen_mul_i64(t0, t0, t1);
5379 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5380 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5381 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5382 tcg_gen_shri_i64(t0, t0, 32);
5383 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
5384 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
5385 }
5386#endif
5387 } else {
5388 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5389 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5390 /* XXX: use 32 bit mul which could be faster */
5391 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5392 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5393 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5394 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5395 }
5396 gen_op_mov_reg_T0(ot, reg);
5397 s->cc_op = CC_OP_MULB + ot;
5398 break;
5399 case 0x1c0:
5400 case 0x1c1: /* xadd Ev, Gv */
5401 if ((b & 1) == 0)
5402 ot = OT_BYTE;
5403 else
5404 ot = dflag + OT_WORD;
5405 modrm = ldub_code(s->pc++);
5406 reg = ((modrm >> 3) & 7) | rex_r;
5407 mod = (modrm >> 6) & 3;
5408 if (mod == 3) {
5409 rm = (modrm & 7) | REX_B(s);
5410 gen_op_mov_TN_reg(ot, 0, reg);
5411 gen_op_mov_TN_reg(ot, 1, rm);
5412 gen_op_addl_T0_T1();
5413 gen_op_mov_reg_T1(ot, reg);
5414 gen_op_mov_reg_T0(ot, rm);
5415 } else {
5416 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5417 gen_op_mov_TN_reg(ot, 0, reg);
5418 gen_op_ld_T1_A0(ot + s->mem_index);
5419 gen_op_addl_T0_T1();
5420 gen_op_st_T0_A0(ot + s->mem_index);
5421 gen_op_mov_reg_T1(ot, reg);
5422 }
5423 gen_op_update2_cc();
5424 s->cc_op = CC_OP_ADDB + ot;
5425 break;
5426 case 0x1b0:
5427 case 0x1b1: /* cmpxchg Ev, Gv */
5428 {
5429 int label1, label2;
5430 TCGv t0, t1, t2, a0;
5431
5432 if ((b & 1) == 0)
5433 ot = OT_BYTE;
5434 else
5435 ot = dflag + OT_WORD;
5436 modrm = ldub_code(s->pc++);
5437 reg = ((modrm >> 3) & 7) | rex_r;
5438 mod = (modrm >> 6) & 3;
5439 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5440 t1 = tcg_temp_local_new(TCG_TYPE_TL);
5441 t2 = tcg_temp_local_new(TCG_TYPE_TL);
5442 a0 = tcg_temp_local_new(TCG_TYPE_TL);
5443 gen_op_mov_v_reg(ot, t1, reg);
5444 if (mod == 3) {
5445 rm = (modrm & 7) | REX_B(s);
5446 gen_op_mov_v_reg(ot, t0, rm);
5447 } else {
5448 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5449 tcg_gen_mov_tl(a0, cpu_A0);
5450 gen_op_ld_v(ot + s->mem_index, t0, a0);
5451 rm = 0; /* avoid warning */
5452 }
5453 label1 = gen_new_label();
5454 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
5455 tcg_gen_sub_tl(t2, t2, t0);
5456 gen_extu(ot, t2);
5457 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
5458 if (mod == 3) {
5459 label2 = gen_new_label();
5460 gen_op_mov_reg_v(ot, R_EAX, t0);
5461 tcg_gen_br(label2);
5462 gen_set_label(label1);
5463 gen_op_mov_reg_v(ot, rm, t1);
5464 gen_set_label(label2);
5465 } else {
5466 tcg_gen_mov_tl(t1, t0);
5467 gen_op_mov_reg_v(ot, R_EAX, t0);
5468 gen_set_label(label1);
5469 /* always store */
5470 gen_op_st_v(ot + s->mem_index, t1, a0);
5471 }
5472 tcg_gen_mov_tl(cpu_cc_src, t0);
5473 tcg_gen_mov_tl(cpu_cc_dst, t2);
5474 s->cc_op = CC_OP_SUBB + ot;
5475 tcg_temp_free(t0);
5476 tcg_temp_free(t1);
5477 tcg_temp_free(t2);
5478 tcg_temp_free(a0);
5479 }
5480 break;
5481 case 0x1c7: /* cmpxchg8b */
5482 modrm = ldub_code(s->pc++);
5483 mod = (modrm >> 6) & 3;
5484 if ((mod == 3) || ((modrm & 0x38) != 0x8))
5485 goto illegal_op;
5486#ifdef TARGET_X86_64
5487 if (dflag == 2) {
5488 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5489 goto illegal_op;
5490 gen_jmp_im(pc_start - s->cs_base);
5491 if (s->cc_op != CC_OP_DYNAMIC)
5492 gen_op_set_cc_op(s->cc_op);
5493 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5494 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
5495 } else
5496#endif
5497 {
5498 if (!(s->cpuid_features & CPUID_CX8))
5499 goto illegal_op;
5500 gen_jmp_im(pc_start - s->cs_base);
5501 if (s->cc_op != CC_OP_DYNAMIC)
5502 gen_op_set_cc_op(s->cc_op);
5503 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5504 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
5505 }
5506 s->cc_op = CC_OP_EFLAGS;
5507 break;
5508
5509 /**************************/
5510 /* push/pop */
5511 case 0x50 ... 0x57: /* push */
5512 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
5513 gen_push_T0(s);
5514 break;
5515 case 0x58 ... 0x5f: /* pop */
5516 if (CODE64(s)) {
5517 ot = dflag ? OT_QUAD : OT_WORD;
5518 } else {
5519 ot = dflag + OT_WORD;
5520 }
5521 gen_pop_T0(s);
5522 /* NOTE: order is important for pop %sp */
5523 gen_pop_update(s);
5524 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
5525 break;
5526 case 0x60: /* pusha */
5527 if (CODE64(s))
5528 goto illegal_op;
5529 gen_pusha(s);
5530 break;
5531 case 0x61: /* popa */
5532 if (CODE64(s))
5533 goto illegal_op;
5534 gen_popa(s);
5535 break;
5536 case 0x68: /* push Iv */
5537 case 0x6a:
5538 if (CODE64(s)) {
5539 ot = dflag ? OT_QUAD : OT_WORD;
5540 } else {
5541 ot = dflag + OT_WORD;
5542 }
5543 if (b == 0x68)
5544 val = insn_get(s, ot);
5545 else
5546 val = (int8_t)insn_get(s, OT_BYTE);
5547 gen_op_movl_T0_im(val);
5548 gen_push_T0(s);
5549 break;
5550 case 0x8f: /* pop Ev */
5551 if (CODE64(s)) {
5552 ot = dflag ? OT_QUAD : OT_WORD;
5553 } else {
5554 ot = dflag + OT_WORD;
5555 }
5556 modrm = ldub_code(s->pc++);
5557 mod = (modrm >> 6) & 3;
5558 gen_pop_T0(s);
5559 if (mod == 3) {
5560 /* NOTE: order is important for pop %sp */
5561 gen_pop_update(s);
5562 rm = (modrm & 7) | REX_B(s);
5563 gen_op_mov_reg_T0(ot, rm);
5564 } else {
5565 /* NOTE: order is important too for MMU exceptions */
5566 s->popl_esp_hack = 1 << ot;
5567 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5568 s->popl_esp_hack = 0;
5569 gen_pop_update(s);
5570 }
5571 break;
5572 case 0xc8: /* enter */
5573 {
5574 int level;
5575 val = lduw_code(s->pc);
5576 s->pc += 2;
5577 level = ldub_code(s->pc++);
5578 gen_enter(s, val, level);
5579 }
5580 break;
5581 case 0xc9: /* leave */
5582 /* XXX: exception not precise (ESP is updated before potential exception) */
5583 if (CODE64(s)) {
5584 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5585 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5586 } else if (s->ss32) {
5587 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5588 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5589 } else {
5590 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5591 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5592 }
5593 gen_pop_T0(s);
5594 if (CODE64(s)) {
5595 ot = dflag ? OT_QUAD : OT_WORD;
5596 } else {
5597 ot = dflag + OT_WORD;
5598 }
5599 gen_op_mov_reg_T0(ot, R_EBP);
5600 gen_pop_update(s);
5601 break;
5602 case 0x06: /* push es */
5603 case 0x0e: /* push cs */
5604 case 0x16: /* push ss */
5605 case 0x1e: /* push ds */
5606 if (CODE64(s))
5607 goto illegal_op;
5608 gen_op_movl_T0_seg(b >> 3);
5609 gen_push_T0(s);
5610 break;
5611 case 0x1a0: /* push fs */
5612 case 0x1a8: /* push gs */
5613 gen_op_movl_T0_seg((b >> 3) & 7);
5614 gen_push_T0(s);
5615 break;
5616 case 0x07: /* pop es */
5617 case 0x17: /* pop ss */
5618 case 0x1f: /* pop ds */
5619 if (CODE64(s))
5620 goto illegal_op;
5621 reg = b >> 3;
5622 gen_pop_T0(s);
5623 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5624 gen_pop_update(s);
5625 if (reg == R_SS) {
5626 /* if reg == SS, inhibit interrupts/trace. */
5627 /* If several instructions disable interrupts, only the
5628 _first_ does it */
5629 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5630 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5631 s->tf = 0;
5632 }
5633 if (s->is_jmp) {
5634 gen_jmp_im(s->pc - s->cs_base);
5635 gen_eob(s);
5636 }
5637 break;
5638 case 0x1a1: /* pop fs */
5639 case 0x1a9: /* pop gs */
5640 gen_pop_T0(s);
5641 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5642 gen_pop_update(s);
5643 if (s->is_jmp) {
5644 gen_jmp_im(s->pc - s->cs_base);
5645 gen_eob(s);
5646 }
5647 break;
5648
5649 /**************************/
5650 /* mov */
5651 case 0x88:
5652 case 0x89: /* mov Gv, Ev */
5653 if ((b & 1) == 0)
5654 ot = OT_BYTE;
5655 else
5656 ot = dflag + OT_WORD;
5657 modrm = ldub_code(s->pc++);
5658 reg = ((modrm >> 3) & 7) | rex_r;
5659
5660 /* generate a generic store */
5661 gen_ldst_modrm(s, modrm, ot, reg, 1);
5662 break;
5663 case 0xc6:
5664 case 0xc7: /* mov Ev, Iv */
5665 if ((b & 1) == 0)
5666 ot = OT_BYTE;
5667 else
5668 ot = dflag + OT_WORD;
5669 modrm = ldub_code(s->pc++);
5670 mod = (modrm >> 6) & 3;
5671 if (mod != 3) {
5672 s->rip_offset = insn_const_size(ot);
5673 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5674 }
5675 val = insn_get(s, ot);
5676 gen_op_movl_T0_im(val);
5677 if (mod != 3)
5678 gen_op_st_T0_A0(ot + s->mem_index);
5679 else
5680 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5681 break;
5682 case 0x8a:
5683 case 0x8b: /* mov Ev, Gv */
5684#ifdef VBOX /* dtrace hot fix */
5685 if (prefixes & PREFIX_LOCK)
5686 goto illegal_op;
5687#endif
5688 if ((b & 1) == 0)
5689 ot = OT_BYTE;
5690 else
5691 ot = OT_WORD + dflag;
5692 modrm = ldub_code(s->pc++);
5693 reg = ((modrm >> 3) & 7) | rex_r;
5694
5695 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5696 gen_op_mov_reg_T0(ot, reg);
5697 break;
5698 case 0x8e: /* mov seg, Gv */
5699 modrm = ldub_code(s->pc++);
5700 reg = (modrm >> 3) & 7;
5701 if (reg >= 6 || reg == R_CS)
5702 goto illegal_op;
5703 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5704 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5705 if (reg == R_SS) {
5706 /* if reg == SS, inhibit interrupts/trace */
5707 /* If several instructions disable interrupts, only the
5708 _first_ does it */
5709 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5710 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5711 s->tf = 0;
5712 }
5713 if (s->is_jmp) {
5714 gen_jmp_im(s->pc - s->cs_base);
5715 gen_eob(s);
5716 }
5717 break;
5718 case 0x8c: /* mov Gv, seg */
5719 modrm = ldub_code(s->pc++);
5720 reg = (modrm >> 3) & 7;
5721 mod = (modrm >> 6) & 3;
5722 if (reg >= 6)
5723 goto illegal_op;
5724 gen_op_movl_T0_seg(reg);
5725 if (mod == 3)
5726 ot = OT_WORD + dflag;
5727 else
5728 ot = OT_WORD;
5729 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5730 break;
5731
5732 case 0x1b6: /* movzbS Gv, Eb */
5733 case 0x1b7: /* movzwS Gv, Eb */
5734 case 0x1be: /* movsbS Gv, Eb */
5735 case 0x1bf: /* movswS Gv, Eb */
5736 {
5737 int d_ot;
5738 /* d_ot is the size of destination */
5739 d_ot = dflag + OT_WORD;
5740 /* ot is the size of source */
5741 ot = (b & 1) + OT_BYTE;
5742 modrm = ldub_code(s->pc++);
5743 reg = ((modrm >> 3) & 7) | rex_r;
5744 mod = (modrm >> 6) & 3;
5745 rm = (modrm & 7) | REX_B(s);
5746
5747 if (mod == 3) {
5748 gen_op_mov_TN_reg(ot, 0, rm);
5749 switch(ot | (b & 8)) {
5750 case OT_BYTE:
5751 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5752 break;
5753 case OT_BYTE | 8:
5754 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5755 break;
5756 case OT_WORD:
5757 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5758 break;
5759 default:
5760 case OT_WORD | 8:
5761 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5762 break;
5763 }
5764 gen_op_mov_reg_T0(d_ot, reg);
5765 } else {
5766 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5767 if (b & 8) {
5768 gen_op_lds_T0_A0(ot + s->mem_index);
5769 } else {
5770 gen_op_ldu_T0_A0(ot + s->mem_index);
5771 }
5772 gen_op_mov_reg_T0(d_ot, reg);
5773 }
5774 }
5775 break;
5776
5777 case 0x8d: /* lea */
5778 ot = dflag + OT_WORD;
5779 modrm = ldub_code(s->pc++);
5780 mod = (modrm >> 6) & 3;
5781 if (mod == 3)
5782 goto illegal_op;
5783 reg = ((modrm >> 3) & 7) | rex_r;
5784 /* we must ensure that no segment is added */
5785 s->override = -1;
5786 val = s->addseg;
5787 s->addseg = 0;
5788 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5789 s->addseg = val;
5790 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5791 break;
5792
5793 case 0xa0: /* mov EAX, Ov */
5794 case 0xa1:
5795 case 0xa2: /* mov Ov, EAX */
5796 case 0xa3:
5797 {
5798 target_ulong offset_addr;
5799
5800 if ((b & 1) == 0)
5801 ot = OT_BYTE;
5802 else
5803 ot = dflag + OT_WORD;
5804#ifdef TARGET_X86_64
5805 if (s->aflag == 2) {
5806 offset_addr = ldq_code(s->pc);
5807 s->pc += 8;
5808 gen_op_movq_A0_im(offset_addr);
5809 } else
5810#endif
5811 {
5812 if (s->aflag) {
5813 offset_addr = insn_get(s, OT_LONG);
5814 } else {
5815 offset_addr = insn_get(s, OT_WORD);
5816 }
5817 gen_op_movl_A0_im(offset_addr);
5818 }
5819 gen_add_A0_ds_seg(s);
5820 if ((b & 2) == 0) {
5821 gen_op_ld_T0_A0(ot + s->mem_index);
5822 gen_op_mov_reg_T0(ot, R_EAX);
5823 } else {
5824 gen_op_mov_TN_reg(ot, 0, R_EAX);
5825 gen_op_st_T0_A0(ot + s->mem_index);
5826 }
5827 }
5828 break;
5829 case 0xd7: /* xlat */
5830#ifdef TARGET_X86_64
5831 if (s->aflag == 2) {
5832 gen_op_movq_A0_reg(R_EBX);
5833 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5834 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5835 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5836 } else
5837#endif
5838 {
5839 gen_op_movl_A0_reg(R_EBX);
5840 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5841 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5842 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5843 if (s->aflag == 0)
5844 gen_op_andl_A0_ffff();
5845 else
5846 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5847 }
5848 gen_add_A0_ds_seg(s);
5849 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5850 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5851 break;
5852 case 0xb0 ... 0xb7: /* mov R, Ib */
5853 val = insn_get(s, OT_BYTE);
5854 gen_op_movl_T0_im(val);
5855 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5856 break;
5857 case 0xb8 ... 0xbf: /* mov R, Iv */
5858#ifdef TARGET_X86_64
5859 if (dflag == 2) {
5860 uint64_t tmp;
5861 /* 64 bit case */
5862 tmp = ldq_code(s->pc);
5863 s->pc += 8;
5864 reg = (b & 7) | REX_B(s);
5865 gen_movtl_T0_im(tmp);
5866 gen_op_mov_reg_T0(OT_QUAD, reg);
5867 } else
5868#endif
5869 {
5870 ot = dflag ? OT_LONG : OT_WORD;
5871 val = insn_get(s, ot);
5872 reg = (b & 7) | REX_B(s);
5873 gen_op_movl_T0_im(val);
5874 gen_op_mov_reg_T0(ot, reg);
5875 }
5876 break;
5877
5878 case 0x91 ... 0x97: /* xchg R, EAX */
5879 ot = dflag + OT_WORD;
5880 reg = (b & 7) | REX_B(s);
5881 rm = R_EAX;
5882 goto do_xchg_reg;
5883 case 0x86:
5884 case 0x87: /* xchg Ev, Gv */
5885 if ((b & 1) == 0)
5886 ot = OT_BYTE;
5887 else
5888 ot = dflag + OT_WORD;
5889 modrm = ldub_code(s->pc++);
5890 reg = ((modrm >> 3) & 7) | rex_r;
5891 mod = (modrm >> 6) & 3;
5892 if (mod == 3) {
5893 rm = (modrm & 7) | REX_B(s);
5894 do_xchg_reg:
5895 gen_op_mov_TN_reg(ot, 0, reg);
5896 gen_op_mov_TN_reg(ot, 1, rm);
5897 gen_op_mov_reg_T0(ot, rm);
5898 gen_op_mov_reg_T1(ot, reg);
5899 } else {
5900 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5901 gen_op_mov_TN_reg(ot, 0, reg);
5902 /* for xchg, lock is implicit */
5903 if (!(prefixes & PREFIX_LOCK))
5904 tcg_gen_helper_0_0(helper_lock);
5905 gen_op_ld_T1_A0(ot + s->mem_index);
5906 gen_op_st_T0_A0(ot + s->mem_index);
5907 if (!(prefixes & PREFIX_LOCK))
5908 tcg_gen_helper_0_0(helper_unlock);
5909 gen_op_mov_reg_T1(ot, reg);
5910 }
5911 break;
5912 case 0xc4: /* les Gv */
5913 if (CODE64(s))
5914 goto illegal_op;
5915 op = R_ES;
5916 goto do_lxx;
5917 case 0xc5: /* lds Gv */
5918 if (CODE64(s))
5919 goto illegal_op;
5920 op = R_DS;
5921 goto do_lxx;
5922 case 0x1b2: /* lss Gv */
5923 op = R_SS;
5924 goto do_lxx;
5925 case 0x1b4: /* lfs Gv */
5926 op = R_FS;
5927 goto do_lxx;
5928 case 0x1b5: /* lgs Gv */
5929 op = R_GS;
5930 do_lxx:
5931 ot = dflag ? OT_LONG : OT_WORD;
5932 modrm = ldub_code(s->pc++);
5933 reg = ((modrm >> 3) & 7) | rex_r;
5934 mod = (modrm >> 6) & 3;
5935 if (mod == 3)
5936 goto illegal_op;
5937 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5938 gen_op_ld_T1_A0(ot + s->mem_index);
5939 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5940 /* load the segment first to handle exceptions properly */
5941 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5942 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5943 /* then put the data */
5944 gen_op_mov_reg_T1(ot, reg);
5945 if (s->is_jmp) {
5946 gen_jmp_im(s->pc - s->cs_base);
5947 gen_eob(s);
5948 }
5949 break;
5950
5951 /************************/
5952 /* shifts */
5953 case 0xc0:
5954 case 0xc1:
5955 /* shift Ev,Ib */
5956 shift = 2;
5957 grp2:
5958 {
5959 if ((b & 1) == 0)
5960 ot = OT_BYTE;
5961 else
5962 ot = dflag + OT_WORD;
5963
5964 modrm = ldub_code(s->pc++);
5965 mod = (modrm >> 6) & 3;
5966 op = (modrm >> 3) & 7;
5967
5968 if (mod != 3) {
5969 if (shift == 2) {
5970 s->rip_offset = 1;
5971 }
5972 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5973 opreg = OR_TMP0;
5974 } else {
5975 opreg = (modrm & 7) | REX_B(s);
5976 }
5977
5978 /* simpler op */
5979 if (shift == 0) {
5980 gen_shift(s, op, ot, opreg, OR_ECX);
5981 } else {
5982 if (shift == 2) {
5983 shift = ldub_code(s->pc++);
5984 }
5985 gen_shifti(s, op, ot, opreg, shift);
5986 }
5987 }
5988 break;
5989 case 0xd0:
5990 case 0xd1:
5991 /* shift Ev,1 */
5992 shift = 1;
5993 goto grp2;
5994 case 0xd2:
5995 case 0xd3:
5996 /* shift Ev,cl */
5997 shift = 0;
5998 goto grp2;
5999
6000 case 0x1a4: /* shld imm */
6001 op = 0;
6002 shift = 1;
6003 goto do_shiftd;
6004 case 0x1a5: /* shld cl */
6005 op = 0;
6006 shift = 0;
6007 goto do_shiftd;
6008 case 0x1ac: /* shrd imm */
6009 op = 1;
6010 shift = 1;
6011 goto do_shiftd;
6012 case 0x1ad: /* shrd cl */
6013 op = 1;
6014 shift = 0;
6015 do_shiftd:
6016 ot = dflag + OT_WORD;
6017 modrm = ldub_code(s->pc++);
6018 mod = (modrm >> 6) & 3;
6019 rm = (modrm & 7) | REX_B(s);
6020 reg = ((modrm >> 3) & 7) | rex_r;
6021 if (mod != 3) {
6022 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6023 opreg = OR_TMP0;
6024 } else {
6025 opreg = rm;
6026 }
6027 gen_op_mov_TN_reg(ot, 1, reg);
6028
6029 if (shift) {
6030 val = ldub_code(s->pc++);
6031 tcg_gen_movi_tl(cpu_T3, val);
6032 } else {
6033 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
6034 }
6035 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
6036 break;
6037
6038 /************************/
6039 /* floats */
6040 case 0xd8 ... 0xdf:
6041 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
6042 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
6043 /* XXX: what to do if illegal op ? */
6044 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6045 break;
6046 }
6047 modrm = ldub_code(s->pc++);
6048 mod = (modrm >> 6) & 3;
6049 rm = modrm & 7;
6050 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
6051 if (mod != 3) {
6052 /* memory op */
6053 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6054 switch(op) {
6055 case 0x00 ... 0x07: /* fxxxs */
6056 case 0x10 ... 0x17: /* fixxxl */
6057 case 0x20 ... 0x27: /* fxxxl */
6058 case 0x30 ... 0x37: /* fixxx */
6059 {
6060 int op1;
6061 op1 = op & 7;
6062
6063 switch(op >> 4) {
6064 case 0:
6065 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6066 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6067 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
6068 break;
6069 case 1:
6070 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6071 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6072 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6073 break;
6074 case 2:
6075 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6076 (s->mem_index >> 2) - 1);
6077 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
6078 break;
6079 case 3:
6080 default:
6081 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6082 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6083 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6084 break;
6085 }
6086
6087 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6088 if (op1 == 3) {
6089 /* fcomp needs pop */
6090 tcg_gen_helper_0_0(helper_fpop);
6091 }
6092 }
6093 break;
6094 case 0x08: /* flds */
6095 case 0x0a: /* fsts */
6096 case 0x0b: /* fstps */
6097 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6098 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6099 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
6100 switch(op & 7) {
6101 case 0:
6102 switch(op >> 4) {
6103 case 0:
6104 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6105 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6106 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
6107 break;
6108 case 1:
6109 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6110 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6111 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6112 break;
6113 case 2:
6114 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6115 (s->mem_index >> 2) - 1);
6116 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
6117 break;
6118 case 3:
6119 default:
6120 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6121 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6122 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6123 break;
6124 }
6125 break;
6126 case 1:
6127 /* XXX: the corresponding CPUID bit must be tested ! */
6128 switch(op >> 4) {
6129 case 1:
6130 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
6131 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6132 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6133 break;
6134 case 2:
6135 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
6136 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6137 (s->mem_index >> 2) - 1);
6138 break;
6139 case 3:
6140 default:
6141 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
6142 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6143 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6144 break;
6145 }
6146 tcg_gen_helper_0_0(helper_fpop);
6147 break;
6148 default:
6149 switch(op >> 4) {
6150 case 0:
6151 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
6152 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6153 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6154 break;
6155 case 1:
6156 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
6157 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6158 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6159 break;
6160 case 2:
6161 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
6162 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6163 (s->mem_index >> 2) - 1);
6164 break;
6165 case 3:
6166 default:
6167 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
6168 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6169 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6170 break;
6171 }
6172 if ((op & 7) == 3)
6173 tcg_gen_helper_0_0(helper_fpop);
6174 break;
6175 }
6176 break;
6177 case 0x0c: /* fldenv mem */
6178 if (s->cc_op != CC_OP_DYNAMIC)
6179 gen_op_set_cc_op(s->cc_op);
6180 gen_jmp_im(pc_start - s->cs_base);
6181 tcg_gen_helper_0_2(helper_fldenv,
6182 cpu_A0, tcg_const_i32(s->dflag));
6183 break;
6184 case 0x0d: /* fldcw mem */
6185 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
6186 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6187 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
6188 break;
6189 case 0x0e: /* fnstenv mem */
6190 if (s->cc_op != CC_OP_DYNAMIC)
6191 gen_op_set_cc_op(s->cc_op);
6192 gen_jmp_im(pc_start - s->cs_base);
6193 tcg_gen_helper_0_2(helper_fstenv,
6194 cpu_A0, tcg_const_i32(s->dflag));
6195 break;
6196 case 0x0f: /* fnstcw mem */
6197 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
6198 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6199 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6200 break;
6201 case 0x1d: /* fldt mem */
6202 if (s->cc_op != CC_OP_DYNAMIC)
6203 gen_op_set_cc_op(s->cc_op);
6204 gen_jmp_im(pc_start - s->cs_base);
6205 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
6206 break;
6207 case 0x1f: /* fstpt mem */
6208 if (s->cc_op != CC_OP_DYNAMIC)
6209 gen_op_set_cc_op(s->cc_op);
6210 gen_jmp_im(pc_start - s->cs_base);
6211 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
6212 tcg_gen_helper_0_0(helper_fpop);
6213 break;
6214 case 0x2c: /* frstor mem */
6215 if (s->cc_op != CC_OP_DYNAMIC)
6216 gen_op_set_cc_op(s->cc_op);
6217 gen_jmp_im(pc_start - s->cs_base);
6218 tcg_gen_helper_0_2(helper_frstor,
6219 cpu_A0, tcg_const_i32(s->dflag));
6220 break;
6221 case 0x2e: /* fnsave mem */
6222 if (s->cc_op != CC_OP_DYNAMIC)
6223 gen_op_set_cc_op(s->cc_op);
6224 gen_jmp_im(pc_start - s->cs_base);
6225 tcg_gen_helper_0_2(helper_fsave,
6226 cpu_A0, tcg_const_i32(s->dflag));
6227 break;
6228 case 0x2f: /* fnstsw mem */
6229 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6230 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6231 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6232 break;
6233 case 0x3c: /* fbld */
6234 if (s->cc_op != CC_OP_DYNAMIC)
6235 gen_op_set_cc_op(s->cc_op);
6236 gen_jmp_im(pc_start - s->cs_base);
6237 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
6238 break;
6239 case 0x3e: /* fbstp */
6240 if (s->cc_op != CC_OP_DYNAMIC)
6241 gen_op_set_cc_op(s->cc_op);
6242 gen_jmp_im(pc_start - s->cs_base);
6243 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
6244 tcg_gen_helper_0_0(helper_fpop);
6245 break;
6246 case 0x3d: /* fildll */
6247 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6248 (s->mem_index >> 2) - 1);
6249 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
6250 break;
6251 case 0x3f: /* fistpll */
6252 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
6253 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6254 (s->mem_index >> 2) - 1);
6255 tcg_gen_helper_0_0(helper_fpop);
6256 break;
6257 default:
6258 goto illegal_op;
6259 }
6260 } else {
6261 /* register float ops */
6262 opreg = rm;
6263
6264 switch(op) {
6265 case 0x08: /* fld sti */
6266 tcg_gen_helper_0_0(helper_fpush);
6267 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
6268 break;
6269 case 0x09: /* fxchg sti */
6270 case 0x29: /* fxchg4 sti, undocumented op */
6271 case 0x39: /* fxchg7 sti, undocumented op */
6272 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
6273 break;
6274 case 0x0a: /* grp d9/2 */
6275 switch(rm) {
6276 case 0: /* fnop */
6277 /* check exceptions (FreeBSD FPU probe) */
6278 if (s->cc_op != CC_OP_DYNAMIC)
6279 gen_op_set_cc_op(s->cc_op);
6280 gen_jmp_im(pc_start - s->cs_base);
6281 tcg_gen_helper_0_0(helper_fwait);
6282 break;
6283 default:
6284 goto illegal_op;
6285 }
6286 break;
6287 case 0x0c: /* grp d9/4 */
6288 switch(rm) {
6289 case 0: /* fchs */
6290 tcg_gen_helper_0_0(helper_fchs_ST0);
6291 break;
6292 case 1: /* fabs */
6293 tcg_gen_helper_0_0(helper_fabs_ST0);
6294 break;
6295 case 4: /* ftst */
6296 tcg_gen_helper_0_0(helper_fldz_FT0);
6297 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6298 break;
6299 case 5: /* fxam */
6300 tcg_gen_helper_0_0(helper_fxam_ST0);
6301 break;
6302 default:
6303 goto illegal_op;
6304 }
6305 break;
6306 case 0x0d: /* grp d9/5 */
6307 {
6308 switch(rm) {
6309 case 0:
6310 tcg_gen_helper_0_0(helper_fpush);
6311 tcg_gen_helper_0_0(helper_fld1_ST0);
6312 break;
6313 case 1:
6314 tcg_gen_helper_0_0(helper_fpush);
6315 tcg_gen_helper_0_0(helper_fldl2t_ST0);
6316 break;
6317 case 2:
6318 tcg_gen_helper_0_0(helper_fpush);
6319 tcg_gen_helper_0_0(helper_fldl2e_ST0);
6320 break;
6321 case 3:
6322 tcg_gen_helper_0_0(helper_fpush);
6323 tcg_gen_helper_0_0(helper_fldpi_ST0);
6324 break;
6325 case 4:
6326 tcg_gen_helper_0_0(helper_fpush);
6327 tcg_gen_helper_0_0(helper_fldlg2_ST0);
6328 break;
6329 case 5:
6330 tcg_gen_helper_0_0(helper_fpush);
6331 tcg_gen_helper_0_0(helper_fldln2_ST0);
6332 break;
6333 case 6:
6334 tcg_gen_helper_0_0(helper_fpush);
6335 tcg_gen_helper_0_0(helper_fldz_ST0);
6336 break;
6337 default:
6338 goto illegal_op;
6339 }
6340 }
6341 break;
6342 case 0x0e: /* grp d9/6 */
6343 switch(rm) {
6344 case 0: /* f2xm1 */
6345 tcg_gen_helper_0_0(helper_f2xm1);
6346 break;
6347 case 1: /* fyl2x */
6348 tcg_gen_helper_0_0(helper_fyl2x);
6349 break;
6350 case 2: /* fptan */
6351 tcg_gen_helper_0_0(helper_fptan);
6352 break;
6353 case 3: /* fpatan */
6354 tcg_gen_helper_0_0(helper_fpatan);
6355 break;
6356 case 4: /* fxtract */
6357 tcg_gen_helper_0_0(helper_fxtract);
6358 break;
6359 case 5: /* fprem1 */
6360 tcg_gen_helper_0_0(helper_fprem1);
6361 break;
6362 case 6: /* fdecstp */
6363 tcg_gen_helper_0_0(helper_fdecstp);
6364 break;
6365 default:
6366 case 7: /* fincstp */
6367 tcg_gen_helper_0_0(helper_fincstp);
6368 break;
6369 }
6370 break;
6371 case 0x0f: /* grp d9/7 */
6372 switch(rm) {
6373 case 0: /* fprem */
6374 tcg_gen_helper_0_0(helper_fprem);
6375 break;
6376 case 1: /* fyl2xp1 */
6377 tcg_gen_helper_0_0(helper_fyl2xp1);
6378 break;
6379 case 2: /* fsqrt */
6380 tcg_gen_helper_0_0(helper_fsqrt);
6381 break;
6382 case 3: /* fsincos */
6383 tcg_gen_helper_0_0(helper_fsincos);
6384 break;
6385 case 5: /* fscale */
6386 tcg_gen_helper_0_0(helper_fscale);
6387 break;
6388 case 4: /* frndint */
6389 tcg_gen_helper_0_0(helper_frndint);
6390 break;
6391 case 6: /* fsin */
6392 tcg_gen_helper_0_0(helper_fsin);
6393 break;
6394 default:
6395 case 7: /* fcos */
6396 tcg_gen_helper_0_0(helper_fcos);
6397 break;
6398 }
6399 break;
6400 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6401 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6402 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6403 {
6404 int op1;
6405
6406 op1 = op & 7;
6407 if (op >= 0x20) {
6408 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
6409 if (op >= 0x30)
6410 tcg_gen_helper_0_0(helper_fpop);
6411 } else {
6412 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6413 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6414 }
6415 }
6416 break;
6417 case 0x02: /* fcom */
6418 case 0x22: /* fcom2, undocumented op */
6419 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6420 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6421 break;
6422 case 0x03: /* fcomp */
6423 case 0x23: /* fcomp3, undocumented op */
6424 case 0x32: /* fcomp5, undocumented op */
6425 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6426 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6427 tcg_gen_helper_0_0(helper_fpop);
6428 break;
6429 case 0x15: /* da/5 */
6430 switch(rm) {
6431 case 1: /* fucompp */
6432 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6433 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6434 tcg_gen_helper_0_0(helper_fpop);
6435 tcg_gen_helper_0_0(helper_fpop);
6436 break;
6437 default:
6438 goto illegal_op;
6439 }
6440 break;
6441 case 0x1c:
6442 switch(rm) {
6443 case 0: /* feni (287 only, just do nop here) */
6444 break;
6445 case 1: /* fdisi (287 only, just do nop here) */
6446 break;
6447 case 2: /* fclex */
6448 tcg_gen_helper_0_0(helper_fclex);
6449 break;
6450 case 3: /* fninit */
6451 tcg_gen_helper_0_0(helper_fninit);
6452 break;
6453 case 4: /* fsetpm (287 only, just do nop here) */
6454 break;
6455 default:
6456 goto illegal_op;
6457 }
6458 break;
6459 case 0x1d: /* fucomi */
6460 if (s->cc_op != CC_OP_DYNAMIC)
6461 gen_op_set_cc_op(s->cc_op);
6462 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6463 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6464 s->cc_op = CC_OP_EFLAGS;
6465 break;
6466 case 0x1e: /* fcomi */
6467 if (s->cc_op != CC_OP_DYNAMIC)
6468 gen_op_set_cc_op(s->cc_op);
6469 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6470 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6471 s->cc_op = CC_OP_EFLAGS;
6472 break;
6473 case 0x28: /* ffree sti */
6474 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6475 break;
6476 case 0x2a: /* fst sti */
6477 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6478 break;
6479 case 0x2b: /* fstp sti */
6480 case 0x0b: /* fstp1 sti, undocumented op */
6481 case 0x3a: /* fstp8 sti, undocumented op */
6482 case 0x3b: /* fstp9 sti, undocumented op */
6483 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6484 tcg_gen_helper_0_0(helper_fpop);
6485 break;
6486 case 0x2c: /* fucom st(i) */
6487 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6488 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6489 break;
6490 case 0x2d: /* fucomp st(i) */
6491 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6492 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6493 tcg_gen_helper_0_0(helper_fpop);
6494 break;
6495 case 0x33: /* de/3 */
6496 switch(rm) {
6497 case 1: /* fcompp */
6498 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6499 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6500 tcg_gen_helper_0_0(helper_fpop);
6501 tcg_gen_helper_0_0(helper_fpop);
6502 break;
6503 default:
6504 goto illegal_op;
6505 }
6506 break;
6507 case 0x38: /* ffreep sti, undocumented op */
6508 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6509 tcg_gen_helper_0_0(helper_fpop);
6510 break;
6511 case 0x3c: /* df/4 */
6512 switch(rm) {
6513 case 0:
6514 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6515 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6516 gen_op_mov_reg_T0(OT_WORD, R_EAX);
6517 break;
6518 default:
6519 goto illegal_op;
6520 }
6521 break;
6522 case 0x3d: /* fucomip */
6523 if (s->cc_op != CC_OP_DYNAMIC)
6524 gen_op_set_cc_op(s->cc_op);
6525 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6526 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6527 tcg_gen_helper_0_0(helper_fpop);
6528 s->cc_op = CC_OP_EFLAGS;
6529 break;
6530 case 0x3e: /* fcomip */
6531 if (s->cc_op != CC_OP_DYNAMIC)
6532 gen_op_set_cc_op(s->cc_op);
6533 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6534 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6535 tcg_gen_helper_0_0(helper_fpop);
6536 s->cc_op = CC_OP_EFLAGS;
6537 break;
6538 case 0x10 ... 0x13: /* fcmovxx */
6539 case 0x18 ... 0x1b:
6540 {
6541 int op1, l1;
6542 static const uint8_t fcmov_cc[8] = {
6543 (JCC_B << 1),
6544 (JCC_Z << 1),
6545 (JCC_BE << 1),
6546 (JCC_P << 1),
6547 };
6548 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
6549 l1 = gen_new_label();
6550 gen_jcc1(s, s->cc_op, op1, l1);
6551 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
6552 gen_set_label(l1);
6553 }
6554 break;
6555 default:
6556 goto illegal_op;
6557 }
6558 }
6559 break;
6560 /************************/
6561 /* string ops */
6562
6563 case 0xa4: /* movsS */
6564 case 0xa5:
6565 if ((b & 1) == 0)
6566 ot = OT_BYTE;
6567 else
6568 ot = dflag + OT_WORD;
6569
6570 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6571 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6572 } else {
6573 gen_movs(s, ot);
6574 }
6575 break;
6576
6577 case 0xaa: /* stosS */
6578 case 0xab:
6579 if ((b & 1) == 0)
6580 ot = OT_BYTE;
6581 else
6582 ot = dflag + OT_WORD;
6583
6584 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6585 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6586 } else {
6587 gen_stos(s, ot);
6588 }
6589 break;
6590 case 0xac: /* lodsS */
6591 case 0xad:
6592 if ((b & 1) == 0)
6593 ot = OT_BYTE;
6594 else
6595 ot = dflag + OT_WORD;
6596 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6597 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6598 } else {
6599 gen_lods(s, ot);
6600 }
6601 break;
6602 case 0xae: /* scasS */
6603 case 0xaf:
6604 if ((b & 1) == 0)
6605 ot = OT_BYTE;
6606 else
6607 ot = dflag + OT_WORD;
6608 if (prefixes & PREFIX_REPNZ) {
6609 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6610 } else if (prefixes & PREFIX_REPZ) {
6611 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6612 } else {
6613 gen_scas(s, ot);
6614 s->cc_op = CC_OP_SUBB + ot;
6615 }
6616 break;
6617
6618 case 0xa6: /* cmpsS */
6619 case 0xa7:
6620 if ((b & 1) == 0)
6621 ot = OT_BYTE;
6622 else
6623 ot = dflag + OT_WORD;
6624 if (prefixes & PREFIX_REPNZ) {
6625 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6626 } else if (prefixes & PREFIX_REPZ) {
6627 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6628 } else {
6629 gen_cmps(s, ot);
6630 s->cc_op = CC_OP_SUBB + ot;
6631 }
6632 break;
6633 case 0x6c: /* insS */
6634 case 0x6d:
6635 if ((b & 1) == 0)
6636 ot = OT_BYTE;
6637 else
6638 ot = dflag ? OT_LONG : OT_WORD;
6639 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6640 gen_op_andl_T0_ffff();
6641 gen_check_io(s, ot, pc_start - s->cs_base,
6642 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6643 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6644 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6645 } else {
6646 gen_ins(s, ot);
6647 if (use_icount) {
6648 gen_jmp(s, s->pc - s->cs_base);
6649 }
6650 }
6651 break;
6652 case 0x6e: /* outsS */
6653 case 0x6f:
6654 if ((b & 1) == 0)
6655 ot = OT_BYTE;
6656 else
6657 ot = dflag ? OT_LONG : OT_WORD;
6658 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6659 gen_op_andl_T0_ffff();
6660 gen_check_io(s, ot, pc_start - s->cs_base,
6661 svm_is_rep(prefixes) | 4);
6662 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6663 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6664 } else {
6665 gen_outs(s, ot);
6666 if (use_icount) {
6667 gen_jmp(s, s->pc - s->cs_base);
6668 }
6669 }
6670 break;
6671
6672 /************************/
6673 /* port I/O */
6674
6675 case 0xe4:
6676 case 0xe5:
6677 if ((b & 1) == 0)
6678 ot = OT_BYTE;
6679 else
6680 ot = dflag ? OT_LONG : OT_WORD;
6681 val = ldub_code(s->pc++);
6682 gen_op_movl_T0_im(val);
6683 gen_check_io(s, ot, pc_start - s->cs_base,
6684 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6685 if (use_icount)
6686 gen_io_start();
6687 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6688 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6689 gen_op_mov_reg_T1(ot, R_EAX);
6690 if (use_icount) {
6691 gen_io_end();
6692 gen_jmp(s, s->pc - s->cs_base);
6693 }
6694 break;
6695 case 0xe6:
6696 case 0xe7:
6697 if ((b & 1) == 0)
6698 ot = OT_BYTE;
6699 else
6700 ot = dflag ? OT_LONG : OT_WORD;
6701 val = ldub_code(s->pc++);
6702 gen_op_movl_T0_im(val);
6703 gen_check_io(s, ot, pc_start - s->cs_base,
6704 svm_is_rep(prefixes));
6705#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
6706 if (val == 0x80)
6707 break;
6708#endif /* VBOX */
6709 gen_op_mov_TN_reg(ot, 1, R_EAX);
6710
6711 if (use_icount)
6712 gen_io_start();
6713 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6714 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6715 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6716 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6717 if (use_icount) {
6718 gen_io_end();
6719 gen_jmp(s, s->pc - s->cs_base);
6720 }
6721 break;
6722 case 0xec:
6723 case 0xed:
6724 if ((b & 1) == 0)
6725 ot = OT_BYTE;
6726 else
6727 ot = dflag ? OT_LONG : OT_WORD;
6728 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6729 gen_op_andl_T0_ffff();
6730 gen_check_io(s, ot, pc_start - s->cs_base,
6731 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6732 if (use_icount)
6733 gen_io_start();
6734 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6735 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6736 gen_op_mov_reg_T1(ot, R_EAX);
6737 if (use_icount) {
6738 gen_io_end();
6739 gen_jmp(s, s->pc - s->cs_base);
6740 }
6741 break;
6742 case 0xee:
6743 case 0xef:
6744 if ((b & 1) == 0)
6745 ot = OT_BYTE;
6746 else
6747 ot = dflag ? OT_LONG : OT_WORD;
6748 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6749 gen_op_andl_T0_ffff();
6750 gen_check_io(s, ot, pc_start - s->cs_base,
6751 svm_is_rep(prefixes));
6752 gen_op_mov_TN_reg(ot, 1, R_EAX);
6753
6754 if (use_icount)
6755 gen_io_start();
6756 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6757 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6758 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6759 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6760 if (use_icount) {
6761 gen_io_end();
6762 gen_jmp(s, s->pc - s->cs_base);
6763 }
6764 break;
6765
6766 /************************/
6767 /* control */
6768 case 0xc2: /* ret im */
6769 val = ldsw_code(s->pc);
6770 s->pc += 2;
6771 gen_pop_T0(s);
6772 if (CODE64(s) && s->dflag)
6773 s->dflag = 2;
6774 gen_stack_update(s, val + (2 << s->dflag));
6775 if (s->dflag == 0)
6776 gen_op_andl_T0_ffff();
6777 gen_op_jmp_T0();
6778 gen_eob(s);
6779 break;
6780 case 0xc3: /* ret */
6781 gen_pop_T0(s);
6782 gen_pop_update(s);
6783 if (s->dflag == 0)
6784 gen_op_andl_T0_ffff();
6785 gen_op_jmp_T0();
6786 gen_eob(s);
6787 break;
6788 case 0xca: /* lret im */
6789 val = ldsw_code(s->pc);
6790 s->pc += 2;
6791 do_lret:
6792 if (s->pe && !s->vm86) {
6793 if (s->cc_op != CC_OP_DYNAMIC)
6794 gen_op_set_cc_op(s->cc_op);
6795 gen_jmp_im(pc_start - s->cs_base);
6796 tcg_gen_helper_0_2(helper_lret_protected,
6797 tcg_const_i32(s->dflag),
6798 tcg_const_i32(val));
6799 } else {
6800 gen_stack_A0(s);
6801 /* pop offset */
6802 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6803 if (s->dflag == 0)
6804 gen_op_andl_T0_ffff();
6805 /* NOTE: keeping EIP updated is not a problem in case of
6806 exception */
6807 gen_op_jmp_T0();
6808 /* pop selector */
6809 gen_op_addl_A0_im(2 << s->dflag);
6810 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6811 gen_op_movl_seg_T0_vm(R_CS);
6812 /* add stack offset */
6813 gen_stack_update(s, val + (4 << s->dflag));
6814 }
6815 gen_eob(s);
6816 break;
6817 case 0xcb: /* lret */
6818 val = 0;
6819 goto do_lret;
6820 case 0xcf: /* iret */
6821 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6822 if (!s->pe) {
6823 /* real mode */
6824 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6825 s->cc_op = CC_OP_EFLAGS;
6826 } else if (s->vm86) {
6827#ifdef VBOX
6828 if (s->iopl != 3 && (!s->vme || s->dflag)) {
6829#else
6830 if (s->iopl != 3) {
6831#endif
6832 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6833 } else {
6834 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6835 s->cc_op = CC_OP_EFLAGS;
6836 }
6837 } else {
6838 if (s->cc_op != CC_OP_DYNAMIC)
6839 gen_op_set_cc_op(s->cc_op);
6840 gen_jmp_im(pc_start - s->cs_base);
6841 tcg_gen_helper_0_2(helper_iret_protected,
6842 tcg_const_i32(s->dflag),
6843 tcg_const_i32(s->pc - s->cs_base));
6844 s->cc_op = CC_OP_EFLAGS;
6845 }
6846 gen_eob(s);
6847 break;
6848 case 0xe8: /* call im */
6849 {
6850 if (dflag)
6851 tval = (int32_t)insn_get(s, OT_LONG);
6852 else
6853 tval = (int16_t)insn_get(s, OT_WORD);
6854 next_eip = s->pc - s->cs_base;
6855 tval += next_eip;
6856 if (s->dflag == 0)
6857 tval &= 0xffff;
6858 gen_movtl_T0_im(next_eip);
6859 gen_push_T0(s);
6860 gen_jmp(s, tval);
6861 }
6862 break;
6863 case 0x9a: /* lcall im */
6864 {
6865 unsigned int selector, offset;
6866
6867 if (CODE64(s))
6868 goto illegal_op;
6869 ot = dflag ? OT_LONG : OT_WORD;
6870 offset = insn_get(s, ot);
6871 selector = insn_get(s, OT_WORD);
6872
6873 gen_op_movl_T0_im(selector);
6874 gen_op_movl_T1_imu(offset);
6875 }
6876 goto do_lcall;
6877 case 0xe9: /* jmp im */
6878 if (dflag)
6879 tval = (int32_t)insn_get(s, OT_LONG);
6880 else
6881 tval = (int16_t)insn_get(s, OT_WORD);
6882 tval += s->pc - s->cs_base;
6883 if (s->dflag == 0)
6884 tval &= 0xffff;
6885 else if(!CODE64(s))
6886 tval &= 0xffffffff;
6887 gen_jmp(s, tval);
6888 break;
6889 case 0xea: /* ljmp im */
6890 {
6891 unsigned int selector, offset;
6892
6893 if (CODE64(s))
6894 goto illegal_op;
6895 ot = dflag ? OT_LONG : OT_WORD;
6896 offset = insn_get(s, ot);
6897 selector = insn_get(s, OT_WORD);
6898
6899 gen_op_movl_T0_im(selector);
6900 gen_op_movl_T1_imu(offset);
6901 }
6902 goto do_ljmp;
6903 case 0xeb: /* jmp Jb */
6904 tval = (int8_t)insn_get(s, OT_BYTE);
6905 tval += s->pc - s->cs_base;
6906 if (s->dflag == 0)
6907 tval &= 0xffff;
6908 gen_jmp(s, tval);
6909 break;
6910 case 0x70 ... 0x7f: /* jcc Jb */
6911 tval = (int8_t)insn_get(s, OT_BYTE);
6912 goto do_jcc;
6913 case 0x180 ... 0x18f: /* jcc Jv */
6914 if (dflag) {
6915 tval = (int32_t)insn_get(s, OT_LONG);
6916 } else {
6917 tval = (int16_t)insn_get(s, OT_WORD);
6918 }
6919 do_jcc:
6920 next_eip = s->pc - s->cs_base;
6921 tval += next_eip;
6922 if (s->dflag == 0)
6923 tval &= 0xffff;
6924 gen_jcc(s, b, tval, next_eip);
6925 break;
6926
6927 case 0x190 ... 0x19f: /* setcc Gv */
6928 modrm = ldub_code(s->pc++);
6929 gen_setcc(s, b);
6930 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6931 break;
6932 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6933 {
6934 int l1;
6935 TCGv t0;
6936
6937 ot = dflag + OT_WORD;
6938 modrm = ldub_code(s->pc++);
6939 reg = ((modrm >> 3) & 7) | rex_r;
6940 mod = (modrm >> 6) & 3;
6941 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6942 if (mod != 3) {
6943 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6944 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6945 } else {
6946 rm = (modrm & 7) | REX_B(s);
6947 gen_op_mov_v_reg(ot, t0, rm);
6948 }
6949#ifdef TARGET_X86_64
6950 if (ot == OT_LONG) {
6951 /* XXX: specific Intel behaviour ? */
6952 l1 = gen_new_label();
6953 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6954 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6955 gen_set_label(l1);
6956 tcg_gen_movi_tl(cpu_tmp0, 0);
6957 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6958 } else
6959#endif
6960 {
6961 l1 = gen_new_label();
6962 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6963 gen_op_mov_reg_v(ot, reg, t0);
6964 gen_set_label(l1);
6965 }
6966 tcg_temp_free(t0);
6967 }
6968 break;
6969
6970 /************************/
6971 /* flags */
6972 case 0x9c: /* pushf */
6973 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6974#ifdef VBOX
6975 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6976#else
6977 if (s->vm86 && s->iopl != 3) {
6978#endif
6979 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6980 } else {
6981 if (s->cc_op != CC_OP_DYNAMIC)
6982 gen_op_set_cc_op(s->cc_op);
6983#ifdef VBOX
6984 if (s->vm86 && s->vme && s->iopl != 3)
6985 tcg_gen_helper_1_0(helper_read_eflags_vme, cpu_T[0]);
6986 else
6987#endif
6988 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
6989 gen_push_T0(s);
6990 }
6991 break;
6992 case 0x9d: /* popf */
6993 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
6994#ifdef VBOX
6995 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6996#else
6997 if (s->vm86 && s->iopl != 3) {
6998#endif
6999 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7000 } else {
7001 gen_pop_T0(s);
7002 if (s->cpl == 0) {
7003 if (s->dflag) {
7004 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7005 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
7006 } else {
7007 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7008 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
7009 }
7010 } else {
7011 if (s->cpl <= s->iopl) {
7012 if (s->dflag) {
7013 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7014 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
7015 } else {
7016 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7017 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
7018 }
7019 } else {
7020 if (s->dflag) {
7021 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7022 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
7023 } else {
7024#ifdef VBOX
7025 if (s->vm86 && s->vme)
7026 tcg_gen_helper_0_1(helper_write_eflags_vme, cpu_T[0]);
7027 else
7028#endif
7029 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7030 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
7031 }
7032 }
7033 }
7034 gen_pop_update(s);
7035 s->cc_op = CC_OP_EFLAGS;
7036 /* abort translation because TF flag may change */
7037 gen_jmp_im(s->pc - s->cs_base);
7038 gen_eob(s);
7039 }
7040 break;
7041 case 0x9e: /* sahf */
7042 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7043 goto illegal_op;
7044 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
7045 if (s->cc_op != CC_OP_DYNAMIC)
7046 gen_op_set_cc_op(s->cc_op);
7047 gen_compute_eflags(cpu_cc_src);
7048 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
7049 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
7050 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
7051 s->cc_op = CC_OP_EFLAGS;
7052 break;
7053 case 0x9f: /* lahf */
7054 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7055 goto illegal_op;
7056 if (s->cc_op != CC_OP_DYNAMIC)
7057 gen_op_set_cc_op(s->cc_op);
7058 gen_compute_eflags(cpu_T[0]);
7059 /* Note: gen_compute_eflags() only gives the condition codes */
7060 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
7061 gen_op_mov_reg_T0(OT_BYTE, R_AH);
7062 break;
7063 case 0xf5: /* cmc */
7064 if (s->cc_op != CC_OP_DYNAMIC)
7065 gen_op_set_cc_op(s->cc_op);
7066 gen_compute_eflags(cpu_cc_src);
7067 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7068 s->cc_op = CC_OP_EFLAGS;
7069 break;
7070 case 0xf8: /* clc */
7071 if (s->cc_op != CC_OP_DYNAMIC)
7072 gen_op_set_cc_op(s->cc_op);
7073 gen_compute_eflags(cpu_cc_src);
7074 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
7075 s->cc_op = CC_OP_EFLAGS;
7076 break;
7077 case 0xf9: /* stc */
7078 if (s->cc_op != CC_OP_DYNAMIC)
7079 gen_op_set_cc_op(s->cc_op);
7080 gen_compute_eflags(cpu_cc_src);
7081 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7082 s->cc_op = CC_OP_EFLAGS;
7083 break;
7084 case 0xfc: /* cld */
7085 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
7086 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7087 break;
7088 case 0xfd: /* std */
7089 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
7090 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7091 break;
7092
7093 /************************/
7094 /* bit operations */
7095 case 0x1ba: /* bt/bts/btr/btc Gv, im */
7096 ot = dflag + OT_WORD;
7097 modrm = ldub_code(s->pc++);
7098 op = (modrm >> 3) & 7;
7099 mod = (modrm >> 6) & 3;
7100 rm = (modrm & 7) | REX_B(s);
7101 if (mod != 3) {
7102 s->rip_offset = 1;
7103 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7104 gen_op_ld_T0_A0(ot + s->mem_index);
7105 } else {
7106 gen_op_mov_TN_reg(ot, 0, rm);
7107 }
7108 /* load shift */
7109 val = ldub_code(s->pc++);
7110 gen_op_movl_T1_im(val);
7111 if (op < 4)
7112 goto illegal_op;
7113 op -= 4;
7114 goto bt_op;
7115 case 0x1a3: /* bt Gv, Ev */
7116 op = 0;
7117 goto do_btx;
7118 case 0x1ab: /* bts */
7119 op = 1;
7120 goto do_btx;
7121 case 0x1b3: /* btr */
7122 op = 2;
7123 goto do_btx;
7124 case 0x1bb: /* btc */
7125 op = 3;
7126 do_btx:
7127 ot = dflag + OT_WORD;
7128 modrm = ldub_code(s->pc++);
7129 reg = ((modrm >> 3) & 7) | rex_r;
7130 mod = (modrm >> 6) & 3;
7131 rm = (modrm & 7) | REX_B(s);
7132 gen_op_mov_TN_reg(OT_LONG, 1, reg);
7133 if (mod != 3) {
7134 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7135 /* specific case: we need to add a displacement */
7136 gen_exts(ot, cpu_T[1]);
7137 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7138 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7139 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
7140 gen_op_ld_T0_A0(ot + s->mem_index);
7141 } else {
7142 gen_op_mov_TN_reg(ot, 0, rm);
7143 }
7144 bt_op:
7145 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7146 switch(op) {
7147 case 0:
7148 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7149 tcg_gen_movi_tl(cpu_cc_dst, 0);
7150 break;
7151 case 1:
7152 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7153 tcg_gen_movi_tl(cpu_tmp0, 1);
7154 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7155 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7156 break;
7157 case 2:
7158 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7159 tcg_gen_movi_tl(cpu_tmp0, 1);
7160 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7161 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7162 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7163 break;
7164 default:
7165 case 3:
7166 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7167 tcg_gen_movi_tl(cpu_tmp0, 1);
7168 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7169 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7170 break;
7171 }
7172 s->cc_op = CC_OP_SARB + ot;
7173 if (op != 0) {
7174 if (mod != 3)
7175 gen_op_st_T0_A0(ot + s->mem_index);
7176 else
7177 gen_op_mov_reg_T0(ot, rm);
7178 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7179 tcg_gen_movi_tl(cpu_cc_dst, 0);
7180 }
7181 break;
7182 case 0x1bc: /* bsf */
7183 case 0x1bd: /* bsr */
7184 {
7185 int label1;
7186 TCGv t0;
7187
7188 ot = dflag + OT_WORD;
7189 modrm = ldub_code(s->pc++);
7190 reg = ((modrm >> 3) & 7) | rex_r;
7191 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7192 gen_extu(ot, cpu_T[0]);
7193 label1 = gen_new_label();
7194 tcg_gen_movi_tl(cpu_cc_dst, 0);
7195 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7196 tcg_gen_mov_tl(t0, cpu_T[0]);
7197 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
7198 if (b & 1) {
7199 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
7200 } else {
7201 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
7202 }
7203 gen_op_mov_reg_T0(ot, reg);
7204 tcg_gen_movi_tl(cpu_cc_dst, 1);
7205 gen_set_label(label1);
7206 tcg_gen_discard_tl(cpu_cc_src);
7207 s->cc_op = CC_OP_LOGICB + ot;
7208 tcg_temp_free(t0);
7209 }
7210 break;
7211 /************************/
7212 /* bcd */
7213 case 0x27: /* daa */
7214 if (CODE64(s))
7215 goto illegal_op;
7216 if (s->cc_op != CC_OP_DYNAMIC)
7217 gen_op_set_cc_op(s->cc_op);
7218 tcg_gen_helper_0_0(helper_daa);
7219 s->cc_op = CC_OP_EFLAGS;
7220 break;
7221 case 0x2f: /* das */
7222 if (CODE64(s))
7223 goto illegal_op;
7224 if (s->cc_op != CC_OP_DYNAMIC)
7225 gen_op_set_cc_op(s->cc_op);
7226 tcg_gen_helper_0_0(helper_das);
7227 s->cc_op = CC_OP_EFLAGS;
7228 break;
7229 case 0x37: /* aaa */
7230 if (CODE64(s))
7231 goto illegal_op;
7232 if (s->cc_op != CC_OP_DYNAMIC)
7233 gen_op_set_cc_op(s->cc_op);
7234 tcg_gen_helper_0_0(helper_aaa);
7235 s->cc_op = CC_OP_EFLAGS;
7236 break;
7237 case 0x3f: /* aas */
7238 if (CODE64(s))
7239 goto illegal_op;
7240 if (s->cc_op != CC_OP_DYNAMIC)
7241 gen_op_set_cc_op(s->cc_op);
7242 tcg_gen_helper_0_0(helper_aas);
7243 s->cc_op = CC_OP_EFLAGS;
7244 break;
7245 case 0xd4: /* aam */
7246 if (CODE64(s))
7247 goto illegal_op;
7248 val = ldub_code(s->pc++);
7249 if (val == 0) {
7250 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7251 } else {
7252 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
7253 s->cc_op = CC_OP_LOGICB;
7254 }
7255 break;
7256 case 0xd5: /* aad */
7257 if (CODE64(s))
7258 goto illegal_op;
7259 val = ldub_code(s->pc++);
7260 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
7261 s->cc_op = CC_OP_LOGICB;
7262 break;
7263 /************************/
7264 /* misc */
7265 case 0x90: /* nop */
7266 /* XXX: xchg + rex handling */
7267 /* XXX: correct lock test for all insn */
7268 if (prefixes & PREFIX_LOCK)
7269 goto illegal_op;
7270 if (prefixes & PREFIX_REPZ) {
7271 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7272 }
7273 break;
7274 case 0x9b: /* fwait */
7275 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7276 (HF_MP_MASK | HF_TS_MASK)) {
7277 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7278 } else {
7279 if (s->cc_op != CC_OP_DYNAMIC)
7280 gen_op_set_cc_op(s->cc_op);
7281 gen_jmp_im(pc_start - s->cs_base);
7282 tcg_gen_helper_0_0(helper_fwait);
7283 }
7284 break;
7285 case 0xcc: /* int3 */
7286#ifdef VBOX
7287 if (s->vm86 && s->iopl != 3 && !s->vme) {
7288 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7289 } else
7290#endif
7291 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7292 break;
7293 case 0xcd: /* int N */
7294 val = ldub_code(s->pc++);
7295#ifdef VBOX
7296 if (s->vm86 && s->iopl != 3 && !s->vme) {
7297#else
7298 if (s->vm86 && s->iopl != 3) {
7299#endif
7300 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7301 } else {
7302 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7303 }
7304 break;
7305 case 0xce: /* into */
7306 if (CODE64(s))
7307 goto illegal_op;
7308 if (s->cc_op != CC_OP_DYNAMIC)
7309 gen_op_set_cc_op(s->cc_op);
7310 gen_jmp_im(pc_start - s->cs_base);
7311 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
7312 break;
7313 case 0xf1: /* icebp (undocumented, exits to external debugger) */
7314 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
7315#if 1
7316 gen_debug(s, pc_start - s->cs_base);
7317#else
7318 /* start debug */
7319 tb_flush(cpu_single_env);
7320 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
7321#endif
7322 break;
7323 case 0xfa: /* cli */
7324 if (!s->vm86) {
7325 if (s->cpl <= s->iopl) {
7326 tcg_gen_helper_0_0(helper_cli);
7327 } else {
7328 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7329 }
7330 } else {
7331 if (s->iopl == 3) {
7332 tcg_gen_helper_0_0(helper_cli);
7333#ifdef VBOX
7334 } else if (s->iopl != 3 && s->vme) {
7335 tcg_gen_helper_0_0(helper_cli_vme);
7336#endif
7337 } else {
7338 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7339 }
7340 }
7341 break;
7342 case 0xfb: /* sti */
7343 if (!s->vm86) {
7344 if (s->cpl <= s->iopl) {
7345 gen_sti:
7346 tcg_gen_helper_0_0(helper_sti);
7347 /* interruptions are enabled only the first insn after sti */
7348 /* If several instructions disable interrupts, only the
7349 _first_ does it */
7350 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
7351 tcg_gen_helper_0_0(helper_set_inhibit_irq);
7352 /* give a chance to handle pending irqs */
7353 gen_jmp_im(s->pc - s->cs_base);
7354 gen_eob(s);
7355 } else {
7356 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7357 }
7358 } else {
7359 if (s->iopl == 3) {
7360 goto gen_sti;
7361#ifdef VBOX
7362 } else if (s->iopl != 3 && s->vme) {
7363 tcg_gen_helper_0_0(helper_sti_vme);
7364 /* give a chance to handle pending irqs */
7365 gen_jmp_im(s->pc - s->cs_base);
7366 gen_eob(s);
7367#endif
7368 } else {
7369 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7370 }
7371 }
7372 break;
7373 case 0x62: /* bound */
7374 if (CODE64(s))
7375 goto illegal_op;
7376 ot = dflag ? OT_LONG : OT_WORD;
7377 modrm = ldub_code(s->pc++);
7378 reg = (modrm >> 3) & 7;
7379 mod = (modrm >> 6) & 3;
7380 if (mod == 3)
7381 goto illegal_op;
7382 gen_op_mov_TN_reg(ot, 0, reg);
7383 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7384 gen_jmp_im(pc_start - s->cs_base);
7385 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7386 if (ot == OT_WORD)
7387 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
7388 else
7389 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
7390 break;
7391 case 0x1c8 ... 0x1cf: /* bswap reg */
7392 reg = (b & 7) | REX_B(s);
7393#ifdef TARGET_X86_64
7394 if (dflag == 2) {
7395 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
7396 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
7397 gen_op_mov_reg_T0(OT_QUAD, reg);
7398 } else
7399 {
7400 TCGv tmp0;
7401 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7402
7403 tmp0 = tcg_temp_new(TCG_TYPE_I32);
7404 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
7405 tcg_gen_bswap_i32(tmp0, tmp0);
7406 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
7407 gen_op_mov_reg_T0(OT_LONG, reg);
7408 }
7409#else
7410 {
7411 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7412 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
7413 gen_op_mov_reg_T0(OT_LONG, reg);
7414 }
7415#endif
7416 break;
7417 case 0xd6: /* salc */
7418 if (CODE64(s))
7419 goto illegal_op;
7420 if (s->cc_op != CC_OP_DYNAMIC)
7421 gen_op_set_cc_op(s->cc_op);
7422 gen_compute_eflags_c(cpu_T[0]);
7423 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7424 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
7425 break;
7426 case 0xe0: /* loopnz */
7427 case 0xe1: /* loopz */
7428 case 0xe2: /* loop */
7429 case 0xe3: /* jecxz */
7430 {
7431 int l1, l2, l3;
7432
7433 tval = (int8_t)insn_get(s, OT_BYTE);
7434 next_eip = s->pc - s->cs_base;
7435 tval += next_eip;
7436 if (s->dflag == 0)
7437 tval &= 0xffff;
7438
7439 l1 = gen_new_label();
7440 l2 = gen_new_label();
7441 l3 = gen_new_label();
7442 b &= 3;
7443 switch(b) {
7444 case 0: /* loopnz */
7445 case 1: /* loopz */
7446 if (s->cc_op != CC_OP_DYNAMIC)
7447 gen_op_set_cc_op(s->cc_op);
7448 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7449 gen_op_jz_ecx(s->aflag, l3);
7450 gen_compute_eflags(cpu_tmp0);
7451 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
7452 if (b == 0) {
7453 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
7454 } else {
7455 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
7456 }
7457 break;
7458 case 2: /* loop */
7459 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7460 gen_op_jnz_ecx(s->aflag, l1);
7461 break;
7462 default:
7463 case 3: /* jcxz */
7464 gen_op_jz_ecx(s->aflag, l1);
7465 break;
7466 }
7467
7468 gen_set_label(l3);
7469 gen_jmp_im(next_eip);
7470 tcg_gen_br(l2);
7471
7472 gen_set_label(l1);
7473 gen_jmp_im(tval);
7474 gen_set_label(l2);
7475 gen_eob(s);
7476 }
7477 break;
7478 case 0x130: /* wrmsr */
7479 case 0x132: /* rdmsr */
7480 if (s->cpl != 0) {
7481 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7482 } else {
7483 if (s->cc_op != CC_OP_DYNAMIC)
7484 gen_op_set_cc_op(s->cc_op);
7485 gen_jmp_im(pc_start - s->cs_base);
7486 if (b & 2) {
7487 tcg_gen_helper_0_0(helper_rdmsr);
7488 } else {
7489 tcg_gen_helper_0_0(helper_wrmsr);
7490 }
7491 }
7492 break;
7493 case 0x131: /* rdtsc */
7494 if (s->cc_op != CC_OP_DYNAMIC)
7495 gen_op_set_cc_op(s->cc_op);
7496 gen_jmp_im(pc_start - s->cs_base);
7497 if (use_icount)
7498 gen_io_start();
7499 tcg_gen_helper_0_0(helper_rdtsc);
7500 if (use_icount) {
7501 gen_io_end();
7502 gen_jmp(s, s->pc - s->cs_base);
7503 }
7504 break;
7505 case 0x133: /* rdpmc */
7506 if (s->cc_op != CC_OP_DYNAMIC)
7507 gen_op_set_cc_op(s->cc_op);
7508 gen_jmp_im(pc_start - s->cs_base);
7509 tcg_gen_helper_0_0(helper_rdpmc);
7510 break;
7511 case 0x134: /* sysenter */
7512#ifndef VBOX
7513 /* For Intel SYSENTER is valid on 64-bit */
7514 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7515#else
7516 /** @todo: make things right */
7517 if (CODE64(s))
7518#endif
7519 goto illegal_op;
7520 if (!s->pe) {
7521 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7522 } else {
7523 if (s->cc_op != CC_OP_DYNAMIC) {
7524 gen_op_set_cc_op(s->cc_op);
7525 s->cc_op = CC_OP_DYNAMIC;
7526 }
7527 gen_jmp_im(pc_start - s->cs_base);
7528 tcg_gen_helper_0_0(helper_sysenter);
7529 gen_eob(s);
7530 }
7531 break;
7532 case 0x135: /* sysexit */
7533#ifndef VBOX
7534 /* For Intel SYSEXIT is valid on 64-bit */
7535 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7536#else
7537 /** @todo: make things right */
7538 if (CODE64(s))
7539#endif
7540 goto illegal_op;
7541 if (!s->pe) {
7542 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7543 } else {
7544 if (s->cc_op != CC_OP_DYNAMIC) {
7545 gen_op_set_cc_op(s->cc_op);
7546 s->cc_op = CC_OP_DYNAMIC;
7547 }
7548 gen_jmp_im(pc_start - s->cs_base);
7549 tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
7550 gen_eob(s);
7551 }
7552 break;
7553#ifdef TARGET_X86_64
7554 case 0x105: /* syscall */
7555 /* XXX: is it usable in real mode ? */
7556 if (s->cc_op != CC_OP_DYNAMIC) {
7557 gen_op_set_cc_op(s->cc_op);
7558 s->cc_op = CC_OP_DYNAMIC;
7559 }
7560 gen_jmp_im(pc_start - s->cs_base);
7561 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
7562 gen_eob(s);
7563 break;
7564 case 0x107: /* sysret */
7565 if (!s->pe) {
7566 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7567 } else {
7568 if (s->cc_op != CC_OP_DYNAMIC) {
7569 gen_op_set_cc_op(s->cc_op);
7570 s->cc_op = CC_OP_DYNAMIC;
7571 }
7572 gen_jmp_im(pc_start - s->cs_base);
7573 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
7574 /* condition codes are modified only in long mode */
7575 if (s->lma)
7576 s->cc_op = CC_OP_EFLAGS;
7577 gen_eob(s);
7578 }
7579 break;
7580#endif
7581 case 0x1a2: /* cpuid */
7582 if (s->cc_op != CC_OP_DYNAMIC)
7583 gen_op_set_cc_op(s->cc_op);
7584 gen_jmp_im(pc_start - s->cs_base);
7585 tcg_gen_helper_0_0(helper_cpuid);
7586 break;
7587 case 0xf4: /* hlt */
7588 if (s->cpl != 0) {
7589 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7590 } else {
7591 if (s->cc_op != CC_OP_DYNAMIC)
7592 gen_op_set_cc_op(s->cc_op);
7593 gen_jmp_im(pc_start - s->cs_base);
7594 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
7595 s->is_jmp = 3;
7596 }
7597 break;
7598 case 0x100:
7599 modrm = ldub_code(s->pc++);
7600 mod = (modrm >> 6) & 3;
7601 op = (modrm >> 3) & 7;
7602 switch(op) {
7603 case 0: /* sldt */
7604 if (!s->pe || s->vm86)
7605 goto illegal_op;
7606 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
7607 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
7608 ot = OT_WORD;
7609 if (mod == 3)
7610 ot += s->dflag;
7611 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7612 break;
7613 case 2: /* lldt */
7614 if (!s->pe || s->vm86)
7615 goto illegal_op;
7616 if (s->cpl != 0) {
7617 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7618 } else {
7619 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
7620 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7621 gen_jmp_im(pc_start - s->cs_base);
7622 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7623 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
7624 }
7625 break;
7626 case 1: /* str */
7627 if (!s->pe || s->vm86)
7628 goto illegal_op;
7629 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
7630 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
7631 ot = OT_WORD;
7632 if (mod == 3)
7633 ot += s->dflag;
7634 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7635 break;
7636 case 3: /* ltr */
7637 if (!s->pe || s->vm86)
7638 goto illegal_op;
7639 if (s->cpl != 0) {
7640 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7641 } else {
7642 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
7643 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7644 gen_jmp_im(pc_start - s->cs_base);
7645 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7646 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
7647 }
7648 break;
7649 case 4: /* verr */
7650 case 5: /* verw */
7651 if (!s->pe || s->vm86)
7652 goto illegal_op;
7653 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7654 if (s->cc_op != CC_OP_DYNAMIC)
7655 gen_op_set_cc_op(s->cc_op);
7656 if (op == 4)
7657 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
7658 else
7659 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
7660 s->cc_op = CC_OP_EFLAGS;
7661 break;
7662 default:
7663 goto illegal_op;
7664 }
7665 break;
7666 case 0x101:
7667 modrm = ldub_code(s->pc++);
7668 mod = (modrm >> 6) & 3;
7669 op = (modrm >> 3) & 7;
7670 rm = modrm & 7;
7671
7672#ifdef VBOX
7673 /* 0f 01 f9 */
7674 if (modrm == 0xf9)
7675 {
7676 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
7677 goto illegal_op;
7678 gen_jmp_im(pc_start - s->cs_base);
7679 tcg_gen_helper_0_0(helper_rdtscp);
7680 break;
7681 }
7682#endif
7683 switch(op) {
7684 case 0: /* sgdt */
7685 if (mod == 3)
7686 goto illegal_op;
7687 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7688 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7689 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7690 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7691 gen_add_A0_im(s, 2);
7692 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7693 if (!s->dflag)
7694 gen_op_andl_T0_im(0xffffff);
7695 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7696 break;
7697 case 1:
7698 if (mod == 3) {
7699 switch (rm) {
7700 case 0: /* monitor */
7701 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7702 s->cpl != 0)
7703 goto illegal_op;
7704 if (s->cc_op != CC_OP_DYNAMIC)
7705 gen_op_set_cc_op(s->cc_op);
7706 gen_jmp_im(pc_start - s->cs_base);
7707#ifdef TARGET_X86_64
7708 if (s->aflag == 2) {
7709 gen_op_movq_A0_reg(R_EAX);
7710 } else
7711#endif
7712 {
7713 gen_op_movl_A0_reg(R_EAX);
7714 if (s->aflag == 0)
7715 gen_op_andl_A0_ffff();
7716 }
7717 gen_add_A0_ds_seg(s);
7718 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
7719 break;
7720 case 1: /* mwait */
7721 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7722 s->cpl != 0)
7723 goto illegal_op;
7724 if (s->cc_op != CC_OP_DYNAMIC) {
7725 gen_op_set_cc_op(s->cc_op);
7726 s->cc_op = CC_OP_DYNAMIC;
7727 }
7728 gen_jmp_im(pc_start - s->cs_base);
7729 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
7730 gen_eob(s);
7731 break;
7732 default:
7733 goto illegal_op;
7734 }
7735 } else { /* sidt */
7736 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7737 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7738 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7739 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7740 gen_add_A0_im(s, 2);
7741 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7742 if (!s->dflag)
7743 gen_op_andl_T0_im(0xffffff);
7744 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7745 }
7746 break;
7747 case 2: /* lgdt */
7748 case 3: /* lidt */
7749 if (mod == 3) {
7750 if (s->cc_op != CC_OP_DYNAMIC)
7751 gen_op_set_cc_op(s->cc_op);
7752 gen_jmp_im(pc_start - s->cs_base);
7753 switch(rm) {
7754 case 0: /* VMRUN */
7755 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7756 goto illegal_op;
7757 if (s->cpl != 0) {
7758 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7759 break;
7760 } else {
7761 tcg_gen_helper_0_2(helper_vmrun,
7762 tcg_const_i32(s->aflag),
7763 tcg_const_i32(s->pc - pc_start));
7764 tcg_gen_exit_tb(0);
7765 s->is_jmp = 3;
7766 }
7767 break;
7768 case 1: /* VMMCALL */
7769 if (!(s->flags & HF_SVME_MASK))
7770 goto illegal_op;
7771 tcg_gen_helper_0_0(helper_vmmcall);
7772 break;
7773 case 2: /* VMLOAD */
7774 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7775 goto illegal_op;
7776 if (s->cpl != 0) {
7777 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7778 break;
7779 } else {
7780 tcg_gen_helper_0_1(helper_vmload,
7781 tcg_const_i32(s->aflag));
7782 }
7783 break;
7784 case 3: /* VMSAVE */
7785 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7786 goto illegal_op;
7787 if (s->cpl != 0) {
7788 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7789 break;
7790 } else {
7791 tcg_gen_helper_0_1(helper_vmsave,
7792 tcg_const_i32(s->aflag));
7793 }
7794 break;
7795 case 4: /* STGI */
7796 if ((!(s->flags & HF_SVME_MASK) &&
7797 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7798 !s->pe)
7799 goto illegal_op;
7800 if (s->cpl != 0) {
7801 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7802 break;
7803 } else {
7804 tcg_gen_helper_0_0(helper_stgi);
7805 }
7806 break;
7807 case 5: /* CLGI */
7808 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7809 goto illegal_op;
7810 if (s->cpl != 0) {
7811 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7812 break;
7813 } else {
7814 tcg_gen_helper_0_0(helper_clgi);
7815 }
7816 break;
7817 case 6: /* SKINIT */
7818 if ((!(s->flags & HF_SVME_MASK) &&
7819 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7820 !s->pe)
7821 goto illegal_op;
7822 tcg_gen_helper_0_0(helper_skinit);
7823 break;
7824 case 7: /* INVLPGA */
7825 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7826 goto illegal_op;
7827 if (s->cpl != 0) {
7828 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7829 break;
7830 } else {
7831 tcg_gen_helper_0_1(helper_invlpga,
7832 tcg_const_i32(s->aflag));
7833 }
7834 break;
7835 default:
7836 goto illegal_op;
7837 }
7838 } else if (s->cpl != 0) {
7839 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7840 } else {
7841 gen_svm_check_intercept(s, pc_start,
7842 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7843 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7844 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7845 gen_add_A0_im(s, 2);
7846 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7847 if (!s->dflag)
7848 gen_op_andl_T0_im(0xffffff);
7849 if (op == 2) {
7850 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7851 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7852 } else {
7853 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7854 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7855 }
7856 }
7857 break;
7858 case 4: /* smsw */
7859 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7860 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7861 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7862 break;
7863 case 6: /* lmsw */
7864 if (s->cpl != 0) {
7865 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7866 } else {
7867 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7868 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7869 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7870 gen_jmp_im(s->pc - s->cs_base);
7871 gen_eob(s);
7872 }
7873 break;
7874 case 7: /* invlpg */
7875 if (s->cpl != 0) {
7876 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7877 } else {
7878 if (mod == 3) {
7879#ifdef TARGET_X86_64
7880 if (CODE64(s) && rm == 0) {
7881 /* swapgs */
7882 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7883 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7884 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7885 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7886 } else
7887#endif
7888 {
7889 goto illegal_op;
7890 }
7891 } else {
7892 if (s->cc_op != CC_OP_DYNAMIC)
7893 gen_op_set_cc_op(s->cc_op);
7894 gen_jmp_im(pc_start - s->cs_base);
7895 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7896 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
7897 gen_jmp_im(s->pc - s->cs_base);
7898 gen_eob(s);
7899 }
7900 }
7901 break;
7902 default:
7903 goto illegal_op;
7904 }
7905 break;
7906 case 0x108: /* invd */
7907 case 0x109: /* wbinvd */
7908 if (s->cpl != 0) {
7909 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7910 } else {
7911 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7912 /* nothing to do */
7913 }
7914 break;
7915 case 0x63: /* arpl or movslS (x86_64) */
7916#ifdef TARGET_X86_64
7917 if (CODE64(s)) {
7918 int d_ot;
7919 /* d_ot is the size of destination */
7920 d_ot = dflag + OT_WORD;
7921
7922 modrm = ldub_code(s->pc++);
7923 reg = ((modrm >> 3) & 7) | rex_r;
7924 mod = (modrm >> 6) & 3;
7925 rm = (modrm & 7) | REX_B(s);
7926
7927 if (mod == 3) {
7928 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7929 /* sign extend */
7930 if (d_ot == OT_QUAD)
7931 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7932 gen_op_mov_reg_T0(d_ot, reg);
7933 } else {
7934 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7935 if (d_ot == OT_QUAD) {
7936 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7937 } else {
7938 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7939 }
7940 gen_op_mov_reg_T0(d_ot, reg);
7941 }
7942 } else
7943#endif
7944 {
7945 int label1;
7946 TCGv t0, t1, t2, a0;
7947
7948 if (!s->pe || s->vm86)
7949 goto illegal_op;
7950
7951 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7952 t1 = tcg_temp_local_new(TCG_TYPE_TL);
7953 t2 = tcg_temp_local_new(TCG_TYPE_TL);
7954#ifdef VBOX
7955 a0 = tcg_temp_local_new(TCG_TYPE_TL);
7956#endif
7957 ot = OT_WORD;
7958 modrm = ldub_code(s->pc++);
7959 reg = (modrm >> 3) & 7;
7960 mod = (modrm >> 6) & 3;
7961 rm = modrm & 7;
7962 if (mod != 3) {
7963 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7964#ifdef VBOX
7965 tcg_gen_mov_tl(a0, cpu_A0);
7966#endif
7967 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7968 } else {
7969 gen_op_mov_v_reg(ot, t0, rm);
7970 }
7971 gen_op_mov_v_reg(ot, t1, reg);
7972 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7973 tcg_gen_andi_tl(t1, t1, 3);
7974 tcg_gen_movi_tl(t2, 0);
7975 label1 = gen_new_label();
7976 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7977 tcg_gen_andi_tl(t0, t0, ~3);
7978 tcg_gen_or_tl(t0, t0, t1);
7979 tcg_gen_movi_tl(t2, CC_Z);
7980 gen_set_label(label1);
7981 if (mod != 3) {
7982#ifdef VBOX
7983 /* cpu_A0 doesn't survive branch */
7984 gen_op_st_v(ot + s->mem_index, t0, a0);
7985#else
7986 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
7987#endif
7988 } else {
7989 gen_op_mov_reg_v(ot, rm, t0);
7990 }
7991 if (s->cc_op != CC_OP_DYNAMIC)
7992 gen_op_set_cc_op(s->cc_op);
7993 gen_compute_eflags(cpu_cc_src);
7994 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
7995 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
7996 s->cc_op = CC_OP_EFLAGS;
7997 tcg_temp_free(t0);
7998 tcg_temp_free(t1);
7999 tcg_temp_free(t2);
8000#ifdef VBOX
8001 tcg_temp_free(a0);
8002#endif
8003 }
8004 break;
8005 case 0x102: /* lar */
8006 case 0x103: /* lsl */
8007 {
8008 int label1;
8009 TCGv t0;
8010 if (!s->pe || s->vm86)
8011 goto illegal_op;
8012 ot = dflag ? OT_LONG : OT_WORD;
8013 modrm = ldub_code(s->pc++);
8014 reg = ((modrm >> 3) & 7) | rex_r;
8015 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
8016 t0 = tcg_temp_local_new(TCG_TYPE_TL);
8017 if (s->cc_op != CC_OP_DYNAMIC)
8018 gen_op_set_cc_op(s->cc_op);
8019 if (b == 0x102)
8020 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
8021 else
8022 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
8023 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
8024 label1 = gen_new_label();
8025 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
8026 gen_op_mov_reg_v(ot, reg, t0);
8027 gen_set_label(label1);
8028 s->cc_op = CC_OP_EFLAGS;
8029 tcg_temp_free(t0);
8030 }
8031 break;
8032 case 0x118:
8033 modrm = ldub_code(s->pc++);
8034 mod = (modrm >> 6) & 3;
8035 op = (modrm >> 3) & 7;
8036 switch(op) {
8037 case 0: /* prefetchnta */
8038 case 1: /* prefetchnt0 */
8039 case 2: /* prefetchnt0 */
8040 case 3: /* prefetchnt0 */
8041 if (mod == 3)
8042 goto illegal_op;
8043 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8044 /* nothing more to do */
8045 break;
8046 default: /* nop (multi byte) */
8047 gen_nop_modrm(s, modrm);
8048 break;
8049 }
8050 break;
8051 case 0x119 ... 0x11f: /* nop (multi byte) */
8052 modrm = ldub_code(s->pc++);
8053 gen_nop_modrm(s, modrm);
8054 break;
8055 case 0x120: /* mov reg, crN */
8056 case 0x122: /* mov crN, reg */
8057 if (s->cpl != 0) {
8058 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8059 } else {
8060 modrm = ldub_code(s->pc++);
8061 if ((modrm & 0xc0) != 0xc0)
8062 goto illegal_op;
8063 rm = (modrm & 7) | REX_B(s);
8064 reg = ((modrm >> 3) & 7) | rex_r;
8065 if (CODE64(s))
8066 ot = OT_QUAD;
8067 else
8068 ot = OT_LONG;
8069 switch(reg) {
8070 case 0:
8071 case 2:
8072 case 3:
8073 case 4:
8074 case 8:
8075 if (s->cc_op != CC_OP_DYNAMIC)
8076 gen_op_set_cc_op(s->cc_op);
8077 gen_jmp_im(pc_start - s->cs_base);
8078 if (b & 2) {
8079 gen_op_mov_TN_reg(ot, 0, rm);
8080 tcg_gen_helper_0_2(helper_write_crN,
8081 tcg_const_i32(reg), cpu_T[0]);
8082 gen_jmp_im(s->pc - s->cs_base);
8083 gen_eob(s);
8084 } else {
8085 tcg_gen_helper_1_1(helper_read_crN,
8086 cpu_T[0], tcg_const_i32(reg));
8087 gen_op_mov_reg_T0(ot, rm);
8088 }
8089 break;
8090 default:
8091 goto illegal_op;
8092 }
8093 }
8094 break;
8095 case 0x121: /* mov reg, drN */
8096 case 0x123: /* mov drN, reg */
8097 if (s->cpl != 0) {
8098 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8099 } else {
8100 modrm = ldub_code(s->pc++);
8101 if ((modrm & 0xc0) != 0xc0)
8102 goto illegal_op;
8103 rm = (modrm & 7) | REX_B(s);
8104 reg = ((modrm >> 3) & 7) | rex_r;
8105 if (CODE64(s))
8106 ot = OT_QUAD;
8107 else
8108 ot = OT_LONG;
8109 /* XXX: do it dynamically with CR4.DE bit */
8110 if (reg == 4 || reg == 5 || reg >= 8)
8111 goto illegal_op;
8112 if (b & 2) {
8113 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
8114 gen_op_mov_TN_reg(ot, 0, rm);
8115 tcg_gen_helper_0_2(helper_movl_drN_T0,
8116 tcg_const_i32(reg), cpu_T[0]);
8117 gen_jmp_im(s->pc - s->cs_base);
8118 gen_eob(s);
8119 } else {
8120 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
8121 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
8122 gen_op_mov_reg_T0(ot, rm);
8123 }
8124 }
8125 break;
8126 case 0x106: /* clts */
8127 if (s->cpl != 0) {
8128 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8129 } else {
8130 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
8131 tcg_gen_helper_0_0(helper_clts);
8132 /* abort block because static cpu state changed */
8133 gen_jmp_im(s->pc - s->cs_base);
8134 gen_eob(s);
8135 }
8136 break;
8137 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
8138 case 0x1c3: /* MOVNTI reg, mem */
8139 if (!(s->cpuid_features & CPUID_SSE2))
8140 goto illegal_op;
8141 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
8142 modrm = ldub_code(s->pc++);
8143 mod = (modrm >> 6) & 3;
8144 if (mod == 3)
8145 goto illegal_op;
8146 reg = ((modrm >> 3) & 7) | rex_r;
8147 /* generate a generic store */
8148 gen_ldst_modrm(s, modrm, ot, reg, 1);
8149 break;
8150 case 0x1ae:
8151 modrm = ldub_code(s->pc++);
8152 mod = (modrm >> 6) & 3;
8153 op = (modrm >> 3) & 7;
8154 switch(op) {
8155 case 0: /* fxsave */
8156 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8157 (s->flags & HF_EM_MASK))
8158 goto illegal_op;
8159 if (s->flags & HF_TS_MASK) {
8160 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8161 break;
8162 }
8163 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8164 if (s->cc_op != CC_OP_DYNAMIC)
8165 gen_op_set_cc_op(s->cc_op);
8166 gen_jmp_im(pc_start - s->cs_base);
8167 tcg_gen_helper_0_2(helper_fxsave,
8168 cpu_A0, tcg_const_i32((s->dflag == 2)));
8169 break;
8170 case 1: /* fxrstor */
8171 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8172 (s->flags & HF_EM_MASK))
8173 goto illegal_op;
8174 if (s->flags & HF_TS_MASK) {
8175 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8176 break;
8177 }
8178 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8179 if (s->cc_op != CC_OP_DYNAMIC)
8180 gen_op_set_cc_op(s->cc_op);
8181 gen_jmp_im(pc_start - s->cs_base);
8182 tcg_gen_helper_0_2(helper_fxrstor,
8183 cpu_A0, tcg_const_i32((s->dflag == 2)));
8184 break;
8185 case 2: /* ldmxcsr */
8186 case 3: /* stmxcsr */
8187 if (s->flags & HF_TS_MASK) {
8188 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8189 break;
8190 }
8191 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8192 mod == 3)
8193 goto illegal_op;
8194 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8195 if (op == 2) {
8196 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
8197 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8198 } else {
8199 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8200 gen_op_st_T0_A0(OT_LONG + s->mem_index);
8201 }
8202 break;
8203 case 5: /* lfence */
8204 case 6: /* mfence */
8205 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
8206 goto illegal_op;
8207 break;
8208 case 7: /* sfence / clflush */
8209 if ((modrm & 0xc7) == 0xc0) {
8210 /* sfence */
8211 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8212 if (!(s->cpuid_features & CPUID_SSE))
8213 goto illegal_op;
8214 } else {
8215 /* clflush */
8216 if (!(s->cpuid_features & CPUID_CLFLUSH))
8217 goto illegal_op;
8218 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8219 }
8220 break;
8221 default:
8222 goto illegal_op;
8223 }
8224 break;
8225 case 0x10d: /* 3DNow! prefetch(w) */
8226 modrm = ldub_code(s->pc++);
8227 mod = (modrm >> 6) & 3;
8228 if (mod == 3)
8229 goto illegal_op;
8230 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8231 /* ignore for now */
8232 break;
8233 case 0x1aa: /* rsm */
8234 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
8235 if (!(s->flags & HF_SMM_MASK))
8236 goto illegal_op;
8237 if (s->cc_op != CC_OP_DYNAMIC) {
8238 gen_op_set_cc_op(s->cc_op);
8239 s->cc_op = CC_OP_DYNAMIC;
8240 }
8241 gen_jmp_im(s->pc - s->cs_base);
8242 tcg_gen_helper_0_0(helper_rsm);
8243 gen_eob(s);
8244 break;
8245 case 0x1b8: /* SSE4.2 popcnt */
8246 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8247 PREFIX_REPZ)
8248 goto illegal_op;
8249 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8250 goto illegal_op;
8251
8252 modrm = ldub_code(s->pc++);
8253 reg = ((modrm >> 3) & 7);
8254
8255 if (s->prefix & PREFIX_DATA)
8256 ot = OT_WORD;
8257 else if (s->dflag != 2)
8258 ot = OT_LONG;
8259 else
8260 ot = OT_QUAD;
8261
8262 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
8263 tcg_gen_helper_1_2(helper_popcnt,
8264 cpu_T[0], cpu_T[0], tcg_const_i32(ot));
8265 gen_op_mov_reg_T0(ot, reg);
8266
8267 s->cc_op = CC_OP_EFLAGS;
8268 break;
8269 case 0x10e ... 0x10f:
8270 /* 3DNow! instructions, ignore prefixes */
8271 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
8272 case 0x110 ... 0x117:
8273 case 0x128 ... 0x12f:
8274 case 0x138 ... 0x13a:
8275 case 0x150 ... 0x177:
8276 case 0x17c ... 0x17f:
8277 case 0x1c2:
8278 case 0x1c4 ... 0x1c6:
8279 case 0x1d0 ... 0x1fe:
8280 gen_sse(s, b, pc_start, rex_r);
8281 break;
8282 default:
8283 goto illegal_op;
8284 }
8285 /* lock generation */
8286 if (s->prefix & PREFIX_LOCK)
8287 tcg_gen_helper_0_0(helper_unlock);
8288 return s->pc;
8289 illegal_op:
8290 if (s->prefix & PREFIX_LOCK)
8291 tcg_gen_helper_0_0(helper_unlock);
8292 /* XXX: ensure that no lock was generated */
8293 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8294 return s->pc;
8295}
8296
8297void optimize_flags_init(void)
8298{
8299#if TCG_TARGET_REG_BITS == 32
8300 assert(sizeof(CCTable) == (1 << 3));
8301#else
8302 assert(sizeof(CCTable) == (1 << 4));
8303#endif
8304 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
8305 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
8306 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
8307 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
8308 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
8309 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
8310 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
8311 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
8312 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
8313
8314 /* register helpers */
8315
8316#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
8317#include "helper.h"
8318}
8319
8320/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8321 basic block 'tb'. If search_pc is TRUE, also generate PC
8322 information for each intermediate instruction. */
8323#ifndef VBOX
8324static inline void gen_intermediate_code_internal(CPUState *env,
8325#else /* VBOX */
8326DECLINLINE(void) gen_intermediate_code_internal(CPUState *env,
8327#endif /* VBOX */
8328 TranslationBlock *tb,
8329 int search_pc)
8330{
8331 DisasContext dc1, *dc = &dc1;
8332 target_ulong pc_ptr;
8333 uint16_t *gen_opc_end;
8334 int j, lj, cflags;
8335 uint64_t flags;
8336 target_ulong pc_start;
8337 target_ulong cs_base;
8338 int num_insns;
8339 int max_insns;
8340
8341 /* generate intermediate code */
8342 pc_start = tb->pc;
8343 cs_base = tb->cs_base;
8344 flags = tb->flags;
8345 cflags = tb->cflags;
8346
8347 dc->pe = (flags >> HF_PE_SHIFT) & 1;
8348 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8349 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8350 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8351 dc->f_st = 0;
8352 dc->vm86 = (flags >> VM_SHIFT) & 1;
8353#ifdef VBOX
8354 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
8355 dc->pvi = !!(env->cr[4] & CR4_PVI_MASK);
8356#ifdef VBOX_WITH_CALL_RECORD
8357 if ( !(env->state & CPU_RAW_RING0)
8358 && (env->cr[0] & CR0_PG_MASK)
8359 && !(env->eflags & X86_EFL_IF)
8360 && dc->code32)
8361 dc->record_call = 1;
8362 else
8363 dc->record_call = 0;
8364#endif
8365#endif
8366 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8367 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8368 dc->tf = (flags >> TF_SHIFT) & 1;
8369 dc->singlestep_enabled = env->singlestep_enabled;
8370 dc->cc_op = CC_OP_DYNAMIC;
8371 dc->cs_base = cs_base;
8372 dc->tb = tb;
8373 dc->popl_esp_hack = 0;
8374 /* select memory access functions */
8375 dc->mem_index = 0;
8376 if (flags & HF_SOFTMMU_MASK) {
8377 if (dc->cpl == 3)
8378 dc->mem_index = 2 * 4;
8379 else
8380 dc->mem_index = 1 * 4;
8381 }
8382 dc->cpuid_features = env->cpuid_features;
8383 dc->cpuid_ext_features = env->cpuid_ext_features;
8384 dc->cpuid_ext2_features = env->cpuid_ext2_features;
8385 dc->cpuid_ext3_features = env->cpuid_ext3_features;
8386#ifdef TARGET_X86_64
8387 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8388 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8389#endif
8390 dc->flags = flags;
8391 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8392 (flags & HF_INHIBIT_IRQ_MASK)
8393#ifndef CONFIG_SOFTMMU
8394 || (flags & HF_SOFTMMU_MASK)
8395#endif
8396 );
8397#if 0
8398 /* check addseg logic */
8399 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
8400 printf("ERROR addseg\n");
8401#endif
8402
8403 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
8404 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
8405 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
8406 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
8407
8408 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
8409 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
8410 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
8411 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
8412 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
8413 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
8414 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
8415 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
8416 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
8417
8418 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
8419
8420 dc->is_jmp = DISAS_NEXT;
8421 pc_ptr = pc_start;
8422 lj = -1;
8423 num_insns = 0;
8424 max_insns = tb->cflags & CF_COUNT_MASK;
8425 if (max_insns == 0)
8426 max_insns = CF_COUNT_MASK;
8427
8428 gen_icount_start();
8429 for(;;) {
8430 if (env->nb_breakpoints > 0) {
8431 for(j = 0; j < env->nb_breakpoints; j++) {
8432 if (env->breakpoints[j] == pc_ptr) {
8433 gen_debug(dc, pc_ptr - dc->cs_base);
8434 break;
8435 }
8436 }
8437 }
8438 if (search_pc) {
8439 j = gen_opc_ptr - gen_opc_buf;
8440 if (lj < j) {
8441 lj++;
8442 while (lj < j)
8443 gen_opc_instr_start[lj++] = 0;
8444 }
8445 gen_opc_pc[lj] = pc_ptr;
8446 gen_opc_cc_op[lj] = dc->cc_op;
8447 gen_opc_instr_start[lj] = 1;
8448 gen_opc_icount[lj] = num_insns;
8449 }
8450 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8451 gen_io_start();
8452
8453 pc_ptr = disas_insn(dc, pc_ptr);
8454 num_insns++;
8455 /* stop translation if indicated */
8456 if (dc->is_jmp)
8457 break;
8458#ifdef VBOX
8459#ifdef DEBUG
8460/*
8461 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
8462 {
8463 //should never happen as the jump to the patch code terminates the translation block
8464 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
8465 }
8466*/
8467#endif
8468 if (env->state & CPU_EMULATE_SINGLE_INSTR)
8469 {
8470 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
8471 gen_jmp_im(pc_ptr - dc->cs_base);
8472 gen_eob(dc);
8473 break;
8474 }
8475#endif /* VBOX */
8476
8477 /* if single step mode, we generate only one instruction and
8478 generate an exception */
8479 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8480 the flag and abort the translation to give the irqs a
8481 change to be happen */
8482 if (dc->tf || dc->singlestep_enabled ||
8483 (flags & HF_INHIBIT_IRQ_MASK)) {
8484 gen_jmp_im(pc_ptr - dc->cs_base);
8485 gen_eob(dc);
8486 break;
8487 }
8488 /* if too long translation, stop generation too */
8489 if (gen_opc_ptr >= gen_opc_end ||
8490 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8491 num_insns >= max_insns) {
8492 gen_jmp_im(pc_ptr - dc->cs_base);
8493 gen_eob(dc);
8494 break;
8495 }
8496 }
8497 if (tb->cflags & CF_LAST_IO)
8498 gen_io_end();
8499 gen_icount_end(tb, num_insns);
8500 *gen_opc_ptr = INDEX_op_end;
8501 /* we don't forget to fill the last values */
8502 if (search_pc) {
8503 j = gen_opc_ptr - gen_opc_buf;
8504 lj++;
8505 while (lj <= j)
8506 gen_opc_instr_start[lj++] = 0;
8507 }
8508
8509#ifdef DEBUG_DISAS
8510 if (loglevel & CPU_LOG_TB_CPU) {
8511 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
8512 }
8513 if (loglevel & CPU_LOG_TB_IN_ASM) {
8514 int disas_flags;
8515 fprintf(logfile, "----------------\n");
8516 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
8517#ifdef TARGET_X86_64
8518 if (dc->code64)
8519 disas_flags = 2;
8520 else
8521#endif
8522 disas_flags = !dc->code32;
8523 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
8524 fprintf(logfile, "\n");
8525 }
8526#endif
8527
8528 if (!search_pc) {
8529 tb->size = pc_ptr - pc_start;
8530 tb->icount = num_insns;
8531 }
8532}
8533
8534void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
8535{
8536 gen_intermediate_code_internal(env, tb, 0);
8537}
8538
8539void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
8540{
8541 gen_intermediate_code_internal(env, tb, 1);
8542}
8543
8544void gen_pc_load(CPUState *env, TranslationBlock *tb,
8545 unsigned long searched_pc, int pc_pos, void *puc)
8546{
8547 int cc_op;
8548#ifdef DEBUG_DISAS
8549 if (loglevel & CPU_LOG_TB_OP) {
8550 int i;
8551 fprintf(logfile, "RESTORE:\n");
8552 for(i = 0;i <= pc_pos; i++) {
8553 if (gen_opc_instr_start[i]) {
8554 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
8555 }
8556 }
8557 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
8558 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
8559 (uint32_t)tb->cs_base);
8560 }
8561#endif
8562 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
8563 cc_op = gen_opc_cc_op[pc_pos];
8564 if (cc_op != CC_OP_DYNAMIC)
8565 env->cc_op = cc_op;
8566}
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette