VirtualBox

source: vbox/trunk/src/recompiler_new/target-i386/translate.c@ 14522

Last change on this file since 14522 was 14425, checked in by vboxsync, 16 years ago

ported r39642 - Sander, please tell me to port such changes

  • Property svn:eol-style set to native
File size: 275.2 KB
Line 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Sun LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Sun elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29#include <stdarg.h>
30#include <stdlib.h>
31#include <stdio.h>
32#include <string.h>
33#ifndef VBOX
34#include <inttypes.h>
35#include <signal.h>
36#include <assert.h>
37#endif /* !VBOX */
38
39#include "cpu.h"
40#include "exec-all.h"
41#include "disas.h"
42#include "helper.h"
43#include "tcg-op.h"
44
45#define PREFIX_REPZ 0x01
46#define PREFIX_REPNZ 0x02
47#define PREFIX_LOCK 0x04
48#define PREFIX_DATA 0x08
49#define PREFIX_ADR 0x10
50
51#ifdef TARGET_X86_64
52#define X86_64_ONLY(x) x
53#ifndef VBOX
54#define X86_64_DEF(x...) x
55#else
56#define X86_64_DEF(x...) x
57#endif
58#define CODE64(s) ((s)->code64)
59#define REX_X(s) ((s)->rex_x)
60#define REX_B(s) ((s)->rex_b)
61/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
62#if 1
63#define BUGGY_64(x) NULL
64#endif
65#else
66#define X86_64_ONLY(x) NULL
67#ifndef VBOX
68#define X86_64_DEF(x...)
69#else
70#define X86_64_DEF(x)
71#endif
72#define CODE64(s) 0
73#define REX_X(s) 0
74#define REX_B(s) 0
75#endif
76
77//#define MACRO_TEST 1
78
79/* global register indexes */
80static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
81/* local temps */
82static TCGv cpu_T[2], cpu_T3;
83/* local register indexes (only used inside old micro ops) */
84static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
85static TCGv cpu_tmp5, cpu_tmp6;
86
87#include "gen-icount.h"
88
89#ifdef TARGET_X86_64
90static int x86_64_hregs;
91#endif
92
93#ifdef VBOX
94
95/* Special/override code readers to hide patched code. */
96
97uint8_t ldub_code_raw(target_ulong pc)
98{
99 uint8_t b;
100
101 if (!remR3GetOpcode(cpu_single_env, pc, &b))
102 b = ldub_code(pc);
103 return b;
104}
105#define ldub_code(a) ldub_code_raw(a)
106
107uint16_t lduw_code_raw(target_ulong pc)
108{
109 return (ldub_code(pc+1) << 8) | ldub_code(pc);
110}
111#define lduw_code(a) lduw_code_raw(a)
112
113
114uint32_t ldl_code_raw(target_ulong pc)
115{
116 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
117}
118#define ldl_code(a) ldl_code_raw(a)
119
120#endif /* VBOX */
121
122
123typedef struct DisasContext {
124 /* current insn context */
125 int override; /* -1 if no override */
126 int prefix;
127 int aflag, dflag;
128 target_ulong pc; /* pc = eip + cs_base */
129 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
130 static state change (stop translation) */
131 /* current block context */
132 target_ulong cs_base; /* base of CS segment */
133 int pe; /* protected mode */
134 int code32; /* 32 bit code segment */
135#ifdef TARGET_X86_64
136 int lma; /* long mode active */
137 int code64; /* 64 bit code segment */
138 int rex_x, rex_b;
139#endif
140 int ss32; /* 32 bit stack segment */
141 int cc_op; /* current CC operation */
142 int addseg; /* non zero if either DS/ES/SS have a non zero base */
143 int f_st; /* currently unused */
144 int vm86; /* vm86 mode */
145#ifdef VBOX
146 int vme; /* CR4.VME */
147 int record_call; /* record calls for CSAM or not? */
148#endif
149 int cpl;
150 int iopl;
151 int tf; /* TF cpu flag */
152 int singlestep_enabled; /* "hardware" single step enabled */
153 int jmp_opt; /* use direct block chaining for direct jumps */
154 int mem_index; /* select memory access functions */
155 uint64_t flags; /* all execution flags */
156 struct TranslationBlock *tb;
157 int popl_esp_hack; /* for correct popl with esp base handling */
158 int rip_offset; /* only used in x86_64, but left for simplicity */
159 int cpuid_features;
160 int cpuid_ext_features;
161 int cpuid_ext2_features;
162 int cpuid_ext3_features;
163} DisasContext;
164
165static void gen_eob(DisasContext *s);
166static void gen_jmp(DisasContext *s, target_ulong eip);
167static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
168
169#ifdef VBOX
170static void gen_check_external_event();
171#endif
172
173/* i386 arith/logic operations */
174enum {
175 OP_ADDL,
176 OP_ORL,
177 OP_ADCL,
178 OP_SBBL,
179 OP_ANDL,
180 OP_SUBL,
181 OP_XORL,
182 OP_CMPL,
183};
184
185/* i386 shift ops */
186enum {
187 OP_ROL,
188 OP_ROR,
189 OP_RCL,
190 OP_RCR,
191 OP_SHL,
192 OP_SHR,
193 OP_SHL1, /* undocumented */
194 OP_SAR = 7,
195};
196
197enum {
198 JCC_O,
199 JCC_B,
200 JCC_Z,
201 JCC_BE,
202 JCC_S,
203 JCC_P,
204 JCC_L,
205 JCC_LE,
206};
207
208/* operand size */
209enum {
210 OT_BYTE = 0,
211 OT_WORD,
212 OT_LONG,
213 OT_QUAD,
214};
215
216enum {
217 /* I386 int registers */
218 OR_EAX, /* MUST be even numbered */
219 OR_ECX,
220 OR_EDX,
221 OR_EBX,
222 OR_ESP,
223 OR_EBP,
224 OR_ESI,
225 OR_EDI,
226
227 OR_TMP0 = 16, /* temporary operand register */
228 OR_TMP1,
229 OR_A0, /* temporary register used when doing address evaluation */
230};
231
232#ifndef VBOX
233static inline void gen_op_movl_T0_0(void)
234#else /* VBOX */
235DECLINLINE(void) gen_op_movl_T0_0(void)
236#endif /* VBOX */
237{
238 tcg_gen_movi_tl(cpu_T[0], 0);
239}
240
241#ifndef VBOX
242static inline void gen_op_movl_T0_im(int32_t val)
243#else /* VBOX */
244DECLINLINE(void) gen_op_movl_T0_im(int32_t val)
245#endif /* VBOX */
246{
247 tcg_gen_movi_tl(cpu_T[0], val);
248}
249
250#ifndef VBOX
251static inline void gen_op_movl_T0_imu(uint32_t val)
252#else /* VBOX */
253DECLINLINE(void) gen_op_movl_T0_imu(uint32_t val)
254#endif /* VBOX */
255{
256 tcg_gen_movi_tl(cpu_T[0], val);
257}
258
259#ifndef VBOX
260static inline void gen_op_movl_T1_im(int32_t val)
261#else /* VBOX */
262DECLINLINE(void) gen_op_movl_T1_im(int32_t val)
263#endif /* VBOX */
264{
265 tcg_gen_movi_tl(cpu_T[1], val);
266}
267
268#ifndef VBOX
269static inline void gen_op_movl_T1_imu(uint32_t val)
270#else /* VBOX */
271DECLINLINE(void) gen_op_movl_T1_imu(uint32_t val)
272#endif /* VBOX */
273{
274 tcg_gen_movi_tl(cpu_T[1], val);
275}
276
277#ifndef VBOX
278static inline void gen_op_movl_A0_im(uint32_t val)
279#else /* VBOX */
280DECLINLINE(void) gen_op_movl_A0_im(uint32_t val)
281#endif /* VBOX */
282{
283 tcg_gen_movi_tl(cpu_A0, val);
284}
285
286#ifdef TARGET_X86_64
287#ifndef VBOX
288static inline void gen_op_movq_A0_im(int64_t val)
289#else /* VBOX */
290DECLINLINE(void) gen_op_movq_A0_im(int64_t val)
291#endif /* VBOX */
292{
293 tcg_gen_movi_tl(cpu_A0, val);
294}
295#endif
296
297#ifndef VBOX
298static inline void gen_movtl_T0_im(target_ulong val)
299#else /* VBOX */
300DECLINLINE(void) gen_movtl_T0_im(target_ulong val)
301#endif /* VBOX */
302{
303 tcg_gen_movi_tl(cpu_T[0], val);
304}
305
306#ifndef VBOX
307static inline void gen_movtl_T1_im(target_ulong val)
308#else /* VBOX */
309DECLINLINE(void) gen_movtl_T1_im(target_ulong val)
310#endif /* VBOX */
311{
312 tcg_gen_movi_tl(cpu_T[1], val);
313}
314
315#ifndef VBOX
316static inline void gen_op_andl_T0_ffff(void)
317#else /* VBOX */
318DECLINLINE(void) gen_op_andl_T0_ffff(void)
319#endif /* VBOX */
320{
321 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
322}
323
324#ifndef VBOX
325static inline void gen_op_andl_T0_im(uint32_t val)
326#else /* VBOX */
327DECLINLINE(void) gen_op_andl_T0_im(uint32_t val)
328#endif /* VBOX */
329{
330 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
331}
332
333#ifndef VBOX
334static inline void gen_op_movl_T0_T1(void)
335#else /* VBOX */
336DECLINLINE(void) gen_op_movl_T0_T1(void)
337#endif /* VBOX */
338{
339 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
340}
341
342#ifndef VBOX
343static inline void gen_op_andl_A0_ffff(void)
344#else /* VBOX */
345DECLINLINE(void) gen_op_andl_A0_ffff(void)
346#endif /* VBOX */
347{
348 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
349}
350
351#ifdef TARGET_X86_64
352
353#define NB_OP_SIZES 4
354
355#else /* !TARGET_X86_64 */
356
357#define NB_OP_SIZES 3
358
359#endif /* !TARGET_X86_64 */
360
361#if defined(WORDS_BIGENDIAN)
362#define REG_B_OFFSET (sizeof(target_ulong) - 1)
363#define REG_H_OFFSET (sizeof(target_ulong) - 2)
364#define REG_W_OFFSET (sizeof(target_ulong) - 2)
365#define REG_L_OFFSET (sizeof(target_ulong) - 4)
366#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
367#else
368#define REG_B_OFFSET 0
369#define REG_H_OFFSET 1
370#define REG_W_OFFSET 0
371#define REG_L_OFFSET 0
372#define REG_LH_OFFSET 4
373#endif
374
375#ifndef VBOX
376static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
377#else /* VBOX */
378DECLINLINE(void) gen_op_mov_reg_v(int ot, int reg, TCGv t0)
379#endif /* VBOX */
380{
381 switch(ot) {
382 case OT_BYTE:
383 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
384 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
385 } else {
386 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
387 }
388 break;
389 case OT_WORD:
390 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
391 break;
392#ifdef TARGET_X86_64
393 case OT_LONG:
394 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
395 /* high part of register set to zero */
396 tcg_gen_movi_tl(cpu_tmp0, 0);
397 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
398 break;
399 default:
400 case OT_QUAD:
401 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
402 break;
403#else
404 default:
405 case OT_LONG:
406 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
407 break;
408#endif
409 }
410}
411
412#ifndef VBOX
413static inline void gen_op_mov_reg_T0(int ot, int reg)
414#else /* VBOX */
415DECLINLINE(void) gen_op_mov_reg_T0(int ot, int reg)
416#endif /* VBOX */
417{
418 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
419}
420
421#ifndef VBOX
422static inline void gen_op_mov_reg_T1(int ot, int reg)
423#else /* VBOX */
424DECLINLINE(void) gen_op_mov_reg_T1(int ot, int reg)
425#endif /* VBOX */
426{
427 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
428}
429
430#ifndef VBOX
431static inline void gen_op_mov_reg_A0(int size, int reg)
432#else /* VBOX */
433DECLINLINE(void) gen_op_mov_reg_A0(int size, int reg)
434#endif /* VBOX */
435{
436 switch(size) {
437 case 0:
438 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
439 break;
440#ifdef TARGET_X86_64
441 case 1:
442 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
443 /* high part of register set to zero */
444 tcg_gen_movi_tl(cpu_tmp0, 0);
445 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
446 break;
447 default:
448 case 2:
449 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
450 break;
451#else
452 default:
453 case 1:
454 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
455 break;
456#endif
457 }
458}
459
460#ifndef VBOX
461static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
462#else /* VBOX */
463DECLINLINE(void) gen_op_mov_v_reg(int ot, TCGv t0, int reg)
464#endif /* VBOX */
465{
466 switch(ot) {
467 case OT_BYTE:
468 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
469 goto std_case;
470 } else {
471 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
472 }
473 break;
474 default:
475 std_case:
476 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
477 break;
478 }
479}
480
481#ifndef VBOX
482static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
483#else /* VBOX */
484DECLINLINE(void) gen_op_mov_TN_reg(int ot, int t_index, int reg)
485#endif /* VBOX */
486{
487 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
488}
489
490#ifndef VBOX
491static inline void gen_op_movl_A0_reg(int reg)
492#else /* VBOX */
493DECLINLINE(void) gen_op_movl_A0_reg(int reg)
494#endif /* VBOX */
495{
496 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
497}
498
499#ifndef VBOX
500static inline void gen_op_addl_A0_im(int32_t val)
501#else /* VBOX */
502DECLINLINE(void) gen_op_addl_A0_im(int32_t val)
503#endif /* VBOX */
504{
505 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
506#ifdef TARGET_X86_64
507 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
508#endif
509}
510
511#ifdef TARGET_X86_64
512#ifndef VBOX
513static inline void gen_op_addq_A0_im(int64_t val)
514#else /* VBOX */
515DECLINLINE(void) gen_op_addq_A0_im(int64_t val)
516#endif /* VBOX */
517{
518 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
519}
520#endif
521
522static void gen_add_A0_im(DisasContext *s, int val)
523{
524#ifdef TARGET_X86_64
525 if (CODE64(s))
526 gen_op_addq_A0_im(val);
527 else
528#endif
529 gen_op_addl_A0_im(val);
530}
531
532#ifndef VBOX
533static inline void gen_op_addl_T0_T1(void)
534#else /* VBOX */
535DECLINLINE(void) gen_op_addl_T0_T1(void)
536#endif /* VBOX */
537{
538 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
539}
540
541#ifndef VBOX
542static inline void gen_op_jmp_T0(void)
543#else /* VBOX */
544DECLINLINE(void) gen_op_jmp_T0(void)
545#endif /* VBOX */
546{
547 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
548}
549
550#ifndef VBOX
551static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
552#else /* VBOX */
553DECLINLINE(void) gen_op_add_reg_im(int size, int reg, int32_t val)
554#endif /* VBOX */
555{
556 switch(size) {
557 case 0:
558 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
559 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
560 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
561 break;
562 case 1:
563 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
564 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
565#ifdef TARGET_X86_64
566 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
567#endif
568 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
569 break;
570#ifdef TARGET_X86_64
571 case 2:
572 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
573 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
574 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
575 break;
576#endif
577 }
578}
579
580#ifndef VBOX
581static inline void gen_op_add_reg_T0(int size, int reg)
582#else /* VBOX */
583DECLINLINE(void) gen_op_add_reg_T0(int size, int reg)
584#endif /* VBOX */
585{
586 switch(size) {
587 case 0:
588 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
589 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
590 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
591 break;
592 case 1:
593 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
594 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
595#ifdef TARGET_X86_64
596 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
597#endif
598 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
599 break;
600#ifdef TARGET_X86_64
601 case 2:
602 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
603 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
604 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
605 break;
606#endif
607 }
608}
609
610#ifndef VBOX
611static inline void gen_op_set_cc_op(int32_t val)
612#else /* VBOX */
613DECLINLINE(void) gen_op_set_cc_op(int32_t val)
614#endif /* VBOX */
615{
616 tcg_gen_movi_i32(cpu_cc_op, val);
617}
618
619#ifndef VBOX
620static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
621#else /* VBOX */
622DECLINLINE(void) gen_op_addl_A0_reg_sN(int shift, int reg)
623#endif /* VBOX */
624{
625 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
626 if (shift != 0)
627 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
628 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
629#ifdef TARGET_X86_64
630 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
631#endif
632}
633#ifdef VBOX
634DECLINLINE(void) gen_op_seg_check(int reg, bool keepA0)
635{
636 /* It seems segments doesn't get out of sync - if they do in fact - enable below code. */
637#if 0
638 /* Our segments could be outdated, thus check for newselector field to see if update really needed */
639 int skip_label;
640 TCGv t0, a0;
641
642 /* For other segments this check is waste of time, and also TCG is unable to cope with this code,
643 for data/stack segments, as expects alive cpu_T[0] */
644 if (reg != R_GS)
645 return;
646
647 if (keepA0)
648 {
649 /* we need to store old cpu_A0 */
650 a0 = tcg_temp_local_new(TCG_TYPE_TL);
651 tcg_gen_mov_tl(a0, cpu_A0);
652 }
653
654 skip_label = gen_new_label();
655 t0 = tcg_temp_local_new(TCG_TYPE_TL);
656
657 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, segs[reg].newselector) + REG_L_OFFSET);
658 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
659 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, eflags) + REG_L_OFFSET);
660 tcg_gen_andi_tl(t0, t0, VM_MASK);
661 tcg_gen_brcondi_i32(TCG_COND_NE, t0, 0, skip_label);
662 tcg_gen_movi_tl(t0, reg);
663
664 tcg_gen_helper_0_1(helper_sync_seg, t0);
665
666 tcg_temp_free(t0);
667
668 gen_set_label(skip_label);
669 if (keepA0)
670 {
671 tcg_gen_mov_tl(cpu_A0, a0);
672 tcg_temp_free(a0);
673 }
674#endif /* 0 */
675}
676#endif
677
678#ifndef VBOX
679static inline void gen_op_movl_A0_seg(int reg)
680#else /* VBOX */
681DECLINLINE(void) gen_op_movl_A0_seg(int reg)
682#endif /* VBOX */
683{
684#ifdef VBOX
685 gen_op_seg_check(reg, false);
686#endif
687 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
688}
689
690#ifndef VBOX
691static inline void gen_op_addl_A0_seg(int reg)
692#else /* VBOX */
693DECLINLINE(void) gen_op_addl_A0_seg(int reg)
694#endif /* VBOX */
695{
696#ifdef VBOX
697 gen_op_seg_check(reg, true);
698#endif
699 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
700 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
701#ifdef TARGET_X86_64
702 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
703#endif
704}
705
706#ifdef TARGET_X86_64
707#ifndef VBOX
708static inline void gen_op_movq_A0_seg(int reg)
709#else /* VBOX */
710DECLINLINE(void) gen_op_movq_A0_seg(int reg)
711#endif /* VBOX */
712{
713#ifdef VBOX
714 gen_op_seg_check(reg, false);
715#endif
716 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
717}
718
719#ifndef VBOX
720static inline void gen_op_addq_A0_seg(int reg)
721#else /* VBOX */
722DECLINLINE(void) gen_op_addq_A0_seg(int reg)
723#endif /* VBOX */
724{
725#ifdef VBOX
726 gen_op_seg_check(reg, true);
727#endif
728 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
729 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
730}
731
732#ifndef VBOX
733static inline void gen_op_movq_A0_reg(int reg)
734#else /* VBOX */
735DECLINLINE(void) gen_op_movq_A0_reg(int reg)
736#endif /* VBOX */
737{
738 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
739}
740
741#ifndef VBOX
742static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
743#else /* VBOX */
744DECLINLINE(void) gen_op_addq_A0_reg_sN(int shift, int reg)
745#endif /* VBOX */
746{
747 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
748 if (shift != 0)
749 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
750 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
751}
752#endif
753
754#ifndef VBOX
755static inline void gen_op_lds_T0_A0(int idx)
756#else /* VBOX */
757DECLINLINE(void) gen_op_lds_T0_A0(int idx)
758#endif /* VBOX */
759{
760 int mem_index = (idx >> 2) - 1;
761 switch(idx & 3) {
762 case 0:
763 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
764 break;
765 case 1:
766 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
767 break;
768 default:
769 case 2:
770 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
771 break;
772 }
773}
774
775#ifndef VBOX
776static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
777#else /* VBOX */
778DECLINLINE(void) gen_op_ld_v(int idx, TCGv t0, TCGv a0)
779#endif /* VBOX */
780{
781 int mem_index = (idx >> 2) - 1;
782 switch(idx & 3) {
783 case 0:
784 tcg_gen_qemu_ld8u(t0, a0, mem_index);
785 break;
786 case 1:
787 tcg_gen_qemu_ld16u(t0, a0, mem_index);
788 break;
789 case 2:
790 tcg_gen_qemu_ld32u(t0, a0, mem_index);
791 break;
792 default:
793 case 3:
794 tcg_gen_qemu_ld64(t0, a0, mem_index);
795 break;
796 }
797}
798
799/* XXX: always use ldu or lds */
800#ifndef VBOX
801static inline void gen_op_ld_T0_A0(int idx)
802#else /* VBOX */
803DECLINLINE(void) gen_op_ld_T0_A0(int idx)
804#endif /* VBOX */
805{
806 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
807}
808
809#ifndef VBOX
810static inline void gen_op_ldu_T0_A0(int idx)
811#else /* VBOX */
812DECLINLINE(void) gen_op_ldu_T0_A0(int idx)
813#endif /* VBOX */
814{
815 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
816}
817
818#ifndef VBOX
819static inline void gen_op_ld_T1_A0(int idx)
820#else /* VBOX */
821DECLINLINE(void) gen_op_ld_T1_A0(int idx)
822#endif /* VBOX */
823{
824 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
825}
826
827#ifndef VBOX
828static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
829#else /* VBOX */
830DECLINLINE(void) gen_op_st_v(int idx, TCGv t0, TCGv a0)
831#endif /* VBOX */
832{
833 int mem_index = (idx >> 2) - 1;
834 switch(idx & 3) {
835 case 0:
836 tcg_gen_qemu_st8(t0, a0, mem_index);
837 break;
838 case 1:
839 tcg_gen_qemu_st16(t0, a0, mem_index);
840 break;
841 case 2:
842 tcg_gen_qemu_st32(t0, a0, mem_index);
843 break;
844 default:
845 case 3:
846 tcg_gen_qemu_st64(t0, a0, mem_index);
847 break;
848 }
849}
850
851#ifndef VBOX
852static inline void gen_op_st_T0_A0(int idx)
853#else /* VBOX */
854DECLINLINE(void) gen_op_st_T0_A0(int idx)
855#endif /* VBOX */
856{
857 gen_op_st_v(idx, cpu_T[0], cpu_A0);
858}
859
860#ifndef VBOX
861static inline void gen_op_st_T1_A0(int idx)
862#else /* VBOX */
863DECLINLINE(void) gen_op_st_T1_A0(int idx)
864#endif /* VBOX */
865{
866 gen_op_st_v(idx, cpu_T[1], cpu_A0);
867}
868
869#ifndef VBOX
870static inline void gen_jmp_im(target_ulong pc)
871#else /* VBOX */
872DECLINLINE(void) gen_jmp_im(target_ulong pc)
873#endif /* VBOX */
874{
875 tcg_gen_movi_tl(cpu_tmp0, pc);
876 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
877}
878
879#ifdef VBOX
880static void gen_check_external_event()
881{
882 int skip_label;
883 TCGv t0;
884
885 skip_label = gen_new_label();
886 t0 = tcg_temp_local_new(TCG_TYPE_TL);
887 /* t0 = cpu_tmp0; */
888
889 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, interrupt_request));
890 /* Keep in sync with helper_check_external_event() */
891 tcg_gen_andi_tl(t0, t0,
892 CPU_INTERRUPT_EXTERNAL_EXIT
893 | CPU_INTERRUPT_EXTERNAL_TIMER
894 | CPU_INTERRUPT_EXTERNAL_DMA
895 | CPU_INTERRUPT_EXTERNAL_HARD);
896 /** @todo: predict branch as taken */
897 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
898 tcg_temp_free(t0);
899
900 tcg_gen_helper_0_0(helper_check_external_event);
901
902 gen_set_label(skip_label);
903}
904
905#ifndef VBOX
906static inline void gen_update_eip(target_ulong pc)
907#else /* VBOX */
908DECLINLINE(void) gen_update_eip(target_ulong pc)
909#endif /* VBOX */
910{
911 gen_jmp_im(pc);
912#if defined (VBOX) && defined(VBOX_DUMP_STATE)
913 tcg_gen_helper_0_0(helper_dump_state);
914#endif
915}
916#endif
917
918#ifndef VBOX
919static inline void gen_string_movl_A0_ESI(DisasContext *s)
920#else /* VBOX */
921DECLINLINE(void) gen_string_movl_A0_ESI(DisasContext *s)
922#endif /* VBOX */
923{
924 int override;
925
926 override = s->override;
927#ifdef TARGET_X86_64
928 if (s->aflag == 2) {
929 if (override >= 0) {
930 gen_op_movq_A0_seg(override);
931 gen_op_addq_A0_reg_sN(0, R_ESI);
932 } else {
933 gen_op_movq_A0_reg(R_ESI);
934 }
935 } else
936#endif
937 if (s->aflag) {
938 /* 32 bit address */
939 if (s->addseg && override < 0)
940 override = R_DS;
941 if (override >= 0) {
942 gen_op_movl_A0_seg(override);
943 gen_op_addl_A0_reg_sN(0, R_ESI);
944 } else {
945 gen_op_movl_A0_reg(R_ESI);
946 }
947 } else {
948 /* 16 address, always override */
949 if (override < 0)
950 override = R_DS;
951 gen_op_movl_A0_reg(R_ESI);
952 gen_op_andl_A0_ffff();
953 gen_op_addl_A0_seg(override);
954 }
955}
956
957#ifndef VBOX
958static inline void gen_string_movl_A0_EDI(DisasContext *s)
959#else /* VBOX */
960DECLINLINE(void) gen_string_movl_A0_EDI(DisasContext *s)
961#endif /* VBOX */
962{
963#ifdef TARGET_X86_64
964 if (s->aflag == 2) {
965 gen_op_movq_A0_reg(R_EDI);
966 } else
967#endif
968 if (s->aflag) {
969 if (s->addseg) {
970 gen_op_movl_A0_seg(R_ES);
971 gen_op_addl_A0_reg_sN(0, R_EDI);
972 } else {
973 gen_op_movl_A0_reg(R_EDI);
974 }
975 } else {
976 gen_op_movl_A0_reg(R_EDI);
977 gen_op_andl_A0_ffff();
978 gen_op_addl_A0_seg(R_ES);
979 }
980}
981
982#ifndef VBOX
983static inline void gen_op_movl_T0_Dshift(int ot)
984#else /* VBOX */
985DECLINLINE(void) gen_op_movl_T0_Dshift(int ot)
986#endif /* VBOX */
987{
988 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
989 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
990};
991
992static void gen_extu(int ot, TCGv reg)
993{
994 switch(ot) {
995 case OT_BYTE:
996 tcg_gen_ext8u_tl(reg, reg);
997 break;
998 case OT_WORD:
999 tcg_gen_ext16u_tl(reg, reg);
1000 break;
1001 case OT_LONG:
1002 tcg_gen_ext32u_tl(reg, reg);
1003 break;
1004 default:
1005 break;
1006 }
1007}
1008
1009static void gen_exts(int ot, TCGv reg)
1010{
1011 switch(ot) {
1012 case OT_BYTE:
1013 tcg_gen_ext8s_tl(reg, reg);
1014 break;
1015 case OT_WORD:
1016 tcg_gen_ext16s_tl(reg, reg);
1017 break;
1018 case OT_LONG:
1019 tcg_gen_ext32s_tl(reg, reg);
1020 break;
1021 default:
1022 break;
1023 }
1024}
1025
1026#ifndef VBOX
1027static inline void gen_op_jnz_ecx(int size, int label1)
1028#else /* VBOX */
1029DECLINLINE(void) gen_op_jnz_ecx(int size, int label1)
1030#endif /* VBOX */
1031{
1032 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1033 gen_extu(size + 1, cpu_tmp0);
1034 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
1035}
1036
1037#ifndef VBOX
1038static inline void gen_op_jz_ecx(int size, int label1)
1039#else /* VBOX */
1040DECLINLINE(void) gen_op_jz_ecx(int size, int label1)
1041#endif /* VBOX */
1042{
1043 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1044 gen_extu(size + 1, cpu_tmp0);
1045 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
1046}
1047
1048static void *helper_in_func[3] = {
1049 helper_inb,
1050 helper_inw,
1051 helper_inl,
1052};
1053
1054static void *helper_out_func[3] = {
1055 helper_outb,
1056 helper_outw,
1057 helper_outl,
1058};
1059
1060static void *gen_check_io_func[3] = {
1061 helper_check_iob,
1062 helper_check_iow,
1063 helper_check_iol,
1064};
1065
1066static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
1067 uint32_t svm_flags)
1068{
1069 int state_saved;
1070 target_ulong next_eip;
1071
1072 state_saved = 0;
1073 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1074 if (s->cc_op != CC_OP_DYNAMIC)
1075 gen_op_set_cc_op(s->cc_op);
1076 gen_jmp_im(cur_eip);
1077 state_saved = 1;
1078 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1079 tcg_gen_helper_0_1(gen_check_io_func[ot],
1080 cpu_tmp2_i32);
1081 }
1082 if(s->flags & HF_SVMI_MASK) {
1083 if (!state_saved) {
1084 if (s->cc_op != CC_OP_DYNAMIC)
1085 gen_op_set_cc_op(s->cc_op);
1086 gen_jmp_im(cur_eip);
1087 state_saved = 1;
1088 }
1089 svm_flags |= (1 << (4 + ot));
1090 next_eip = s->pc - s->cs_base;
1091 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1092 tcg_gen_helper_0_3(helper_svm_check_io,
1093 cpu_tmp2_i32,
1094 tcg_const_i32(svm_flags),
1095 tcg_const_i32(next_eip - cur_eip));
1096 }
1097}
1098
1099#ifndef VBOX
1100static inline void gen_movs(DisasContext *s, int ot)
1101#else /* VBOX */
1102DECLINLINE(void) gen_movs(DisasContext *s, int ot)
1103#endif /* VBOX */
1104{
1105 gen_string_movl_A0_ESI(s);
1106 gen_op_ld_T0_A0(ot + s->mem_index);
1107 gen_string_movl_A0_EDI(s);
1108 gen_op_st_T0_A0(ot + s->mem_index);
1109 gen_op_movl_T0_Dshift(ot);
1110 gen_op_add_reg_T0(s->aflag, R_ESI);
1111 gen_op_add_reg_T0(s->aflag, R_EDI);
1112}
1113
1114#ifndef VBOX
1115static inline void gen_update_cc_op(DisasContext *s)
1116#else /* VBOX */
1117DECLINLINE(void) gen_update_cc_op(DisasContext *s)
1118#endif /* VBOX */
1119{
1120 if (s->cc_op != CC_OP_DYNAMIC) {
1121 gen_op_set_cc_op(s->cc_op);
1122 s->cc_op = CC_OP_DYNAMIC;
1123 }
1124}
1125
1126static void gen_op_update1_cc(void)
1127{
1128 tcg_gen_discard_tl(cpu_cc_src);
1129 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1130}
1131
1132static void gen_op_update2_cc(void)
1133{
1134 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1135 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1136}
1137
1138#ifndef VBOX
1139static inline void gen_op_cmpl_T0_T1_cc(void)
1140#else /* VBOX */
1141DECLINLINE(void) gen_op_cmpl_T0_T1_cc(void)
1142#endif /* VBOX */
1143{
1144 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1145 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1146}
1147
1148#ifndef VBOX
1149static inline void gen_op_testl_T0_T1_cc(void)
1150#else /* VBOX */
1151DECLINLINE(void) gen_op_testl_T0_T1_cc(void)
1152#endif /* VBOX */
1153{
1154 tcg_gen_discard_tl(cpu_cc_src);
1155 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1156}
1157
1158static void gen_op_update_neg_cc(void)
1159{
1160 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
1161 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1162}
1163
1164/* compute eflags.C to reg */
1165static void gen_compute_eflags_c(TCGv reg)
1166{
1167#if TCG_TARGET_REG_BITS == 32
1168 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1169 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1170 (long)cc_table + offsetof(CCTable, compute_c));
1171 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1172 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1173 1, &cpu_tmp2_i32, 0, NULL);
1174#else
1175 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1176 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1177 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1178 (long)cc_table + offsetof(CCTable, compute_c));
1179 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1180 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1181 1, &cpu_tmp2_i32, 0, NULL);
1182#endif
1183 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1184}
1185
1186/* compute all eflags to cc_src */
1187static void gen_compute_eflags(TCGv reg)
1188{
1189#if TCG_TARGET_REG_BITS == 32
1190 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1191 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1192 (long)cc_table + offsetof(CCTable, compute_all));
1193 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1194 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1195 1, &cpu_tmp2_i32, 0, NULL);
1196#else
1197 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1198 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1199 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1200 (long)cc_table + offsetof(CCTable, compute_all));
1201 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1202 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1203 1, &cpu_tmp2_i32, 0, NULL);
1204#endif
1205 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1206}
1207
1208#ifndef VBOX
1209static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1210#else /* VBOX */
1211DECLINLINE(void) gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1212#endif /* VBOX */
1213{
1214 if (s->cc_op != CC_OP_DYNAMIC)
1215 gen_op_set_cc_op(s->cc_op);
1216 switch(jcc_op) {
1217 case JCC_O:
1218 gen_compute_eflags(cpu_T[0]);
1219 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
1220 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1221 break;
1222 case JCC_B:
1223 gen_compute_eflags_c(cpu_T[0]);
1224 break;
1225 case JCC_Z:
1226 gen_compute_eflags(cpu_T[0]);
1227 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
1228 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1229 break;
1230 case JCC_BE:
1231 gen_compute_eflags(cpu_tmp0);
1232 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
1233 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1234 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1235 break;
1236 case JCC_S:
1237 gen_compute_eflags(cpu_T[0]);
1238 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
1239 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1240 break;
1241 case JCC_P:
1242 gen_compute_eflags(cpu_T[0]);
1243 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
1244 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1245 break;
1246 case JCC_L:
1247 gen_compute_eflags(cpu_tmp0);
1248 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1249 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
1250 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1251 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1252 break;
1253 default:
1254 case JCC_LE:
1255 gen_compute_eflags(cpu_tmp0);
1256 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1257 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
1258 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
1259 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1260 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1261 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1262 break;
1263 }
1264}
1265
1266/* return true if setcc_slow is not needed (WARNING: must be kept in
1267 sync with gen_jcc1) */
1268static int is_fast_jcc_case(DisasContext *s, int b)
1269{
1270 int jcc_op;
1271 jcc_op = (b >> 1) & 7;
1272 switch(s->cc_op) {
1273 /* we optimize the cmp/jcc case */
1274 case CC_OP_SUBB:
1275 case CC_OP_SUBW:
1276 case CC_OP_SUBL:
1277 case CC_OP_SUBQ:
1278 if (jcc_op == JCC_O || jcc_op == JCC_P)
1279 goto slow_jcc;
1280 break;
1281
1282 /* some jumps are easy to compute */
1283 case CC_OP_ADDB:
1284 case CC_OP_ADDW:
1285 case CC_OP_ADDL:
1286 case CC_OP_ADDQ:
1287
1288 case CC_OP_LOGICB:
1289 case CC_OP_LOGICW:
1290 case CC_OP_LOGICL:
1291 case CC_OP_LOGICQ:
1292
1293 case CC_OP_INCB:
1294 case CC_OP_INCW:
1295 case CC_OP_INCL:
1296 case CC_OP_INCQ:
1297
1298 case CC_OP_DECB:
1299 case CC_OP_DECW:
1300 case CC_OP_DECL:
1301 case CC_OP_DECQ:
1302
1303 case CC_OP_SHLB:
1304 case CC_OP_SHLW:
1305 case CC_OP_SHLL:
1306 case CC_OP_SHLQ:
1307 if (jcc_op != JCC_Z && jcc_op != JCC_S)
1308 goto slow_jcc;
1309 break;
1310 default:
1311 slow_jcc:
1312 return 0;
1313 }
1314 return 1;
1315}
1316
1317/* generate a conditional jump to label 'l1' according to jump opcode
1318 value 'b'. In the fast case, T0 is guaranted not to be used. */
1319#ifndef VBOX
1320static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1321#else /* VBOX */
1322DECLINLINE(void) gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1323#endif /* VBOX */
1324{
1325 int inv, jcc_op, size, cond;
1326 TCGv t0;
1327
1328 inv = b & 1;
1329 jcc_op = (b >> 1) & 7;
1330
1331 switch(cc_op) {
1332 /* we optimize the cmp/jcc case */
1333 case CC_OP_SUBB:
1334 case CC_OP_SUBW:
1335 case CC_OP_SUBL:
1336 case CC_OP_SUBQ:
1337
1338 size = cc_op - CC_OP_SUBB;
1339 switch(jcc_op) {
1340 case JCC_Z:
1341 fast_jcc_z:
1342 switch(size) {
1343 case 0:
1344 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
1345 t0 = cpu_tmp0;
1346 break;
1347 case 1:
1348 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
1349 t0 = cpu_tmp0;
1350 break;
1351#ifdef TARGET_X86_64
1352 case 2:
1353 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1354 t0 = cpu_tmp0;
1355 break;
1356#endif
1357 default:
1358 t0 = cpu_cc_dst;
1359 break;
1360 }
1361 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
1362 break;
1363 case JCC_S:
1364 fast_jcc_s:
1365 switch(size) {
1366 case 0:
1367 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1368 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1369 0, l1);
1370 break;
1371 case 1:
1372 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1373 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1374 0, l1);
1375 break;
1376#ifdef TARGET_X86_64
1377 case 2:
1378 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1379 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1380 0, l1);
1381 break;
1382#endif
1383 default:
1384 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1385 0, l1);
1386 break;
1387 }
1388 break;
1389
1390 case JCC_B:
1391 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1392 goto fast_jcc_b;
1393 case JCC_BE:
1394 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1395 fast_jcc_b:
1396 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1397 switch(size) {
1398 case 0:
1399 t0 = cpu_tmp0;
1400 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1401 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1402 break;
1403 case 1:
1404 t0 = cpu_tmp0;
1405 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1406 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1407 break;
1408#ifdef TARGET_X86_64
1409 case 2:
1410 t0 = cpu_tmp0;
1411 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1412 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1413 break;
1414#endif
1415 default:
1416 t0 = cpu_cc_src;
1417 break;
1418 }
1419 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1420 break;
1421
1422 case JCC_L:
1423 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1424 goto fast_jcc_l;
1425 case JCC_LE:
1426 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1427 fast_jcc_l:
1428 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1429 switch(size) {
1430 case 0:
1431 t0 = cpu_tmp0;
1432 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1433 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1434 break;
1435 case 1:
1436 t0 = cpu_tmp0;
1437 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1438 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1439 break;
1440#ifdef TARGET_X86_64
1441 case 2:
1442 t0 = cpu_tmp0;
1443 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1444 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1445 break;
1446#endif
1447 default:
1448 t0 = cpu_cc_src;
1449 break;
1450 }
1451 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1452 break;
1453
1454 default:
1455 goto slow_jcc;
1456 }
1457 break;
1458
1459 /* some jumps are easy to compute */
1460 case CC_OP_ADDB:
1461 case CC_OP_ADDW:
1462 case CC_OP_ADDL:
1463 case CC_OP_ADDQ:
1464
1465 case CC_OP_ADCB:
1466 case CC_OP_ADCW:
1467 case CC_OP_ADCL:
1468 case CC_OP_ADCQ:
1469
1470 case CC_OP_SBBB:
1471 case CC_OP_SBBW:
1472 case CC_OP_SBBL:
1473 case CC_OP_SBBQ:
1474
1475 case CC_OP_LOGICB:
1476 case CC_OP_LOGICW:
1477 case CC_OP_LOGICL:
1478 case CC_OP_LOGICQ:
1479
1480 case CC_OP_INCB:
1481 case CC_OP_INCW:
1482 case CC_OP_INCL:
1483 case CC_OP_INCQ:
1484
1485 case CC_OP_DECB:
1486 case CC_OP_DECW:
1487 case CC_OP_DECL:
1488 case CC_OP_DECQ:
1489
1490 case CC_OP_SHLB:
1491 case CC_OP_SHLW:
1492 case CC_OP_SHLL:
1493 case CC_OP_SHLQ:
1494
1495 case CC_OP_SARB:
1496 case CC_OP_SARW:
1497 case CC_OP_SARL:
1498 case CC_OP_SARQ:
1499 switch(jcc_op) {
1500 case JCC_Z:
1501 size = (cc_op - CC_OP_ADDB) & 3;
1502 goto fast_jcc_z;
1503 case JCC_S:
1504 size = (cc_op - CC_OP_ADDB) & 3;
1505 goto fast_jcc_s;
1506 default:
1507 goto slow_jcc;
1508 }
1509 break;
1510 default:
1511 slow_jcc:
1512 gen_setcc_slow_T0(s, jcc_op);
1513 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1514 cpu_T[0], 0, l1);
1515 break;
1516 }
1517}
1518
1519/* XXX: does not work with gdbstub "ice" single step - not a
1520 serious problem */
1521static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1522{
1523 int l1, l2;
1524
1525 l1 = gen_new_label();
1526 l2 = gen_new_label();
1527 gen_op_jnz_ecx(s->aflag, l1);
1528 gen_set_label(l2);
1529 gen_jmp_tb(s, next_eip, 1);
1530 gen_set_label(l1);
1531 return l2;
1532}
1533
1534#ifndef VBOX
1535static inline void gen_stos(DisasContext *s, int ot)
1536#else /* VBOX */
1537DECLINLINE(void) gen_stos(DisasContext *s, int ot)
1538#endif /* VBOX */
1539{
1540 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1541 gen_string_movl_A0_EDI(s);
1542 gen_op_st_T0_A0(ot + s->mem_index);
1543 gen_op_movl_T0_Dshift(ot);
1544 gen_op_add_reg_T0(s->aflag, R_EDI);
1545}
1546
1547#ifndef VBOX
1548static inline void gen_lods(DisasContext *s, int ot)
1549#else /* VBOX */
1550DECLINLINE(void) gen_lods(DisasContext *s, int ot)
1551#endif /* VBOX */
1552{
1553 gen_string_movl_A0_ESI(s);
1554 gen_op_ld_T0_A0(ot + s->mem_index);
1555 gen_op_mov_reg_T0(ot, R_EAX);
1556 gen_op_movl_T0_Dshift(ot);
1557 gen_op_add_reg_T0(s->aflag, R_ESI);
1558}
1559
1560#ifndef VBOX
1561static inline void gen_scas(DisasContext *s, int ot)
1562#else /* VBOX */
1563DECLINLINE(void) gen_scas(DisasContext *s, int ot)
1564#endif /* VBOX */
1565{
1566 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1567 gen_string_movl_A0_EDI(s);
1568 gen_op_ld_T1_A0(ot + s->mem_index);
1569 gen_op_cmpl_T0_T1_cc();
1570 gen_op_movl_T0_Dshift(ot);
1571 gen_op_add_reg_T0(s->aflag, R_EDI);
1572}
1573
1574#ifndef VBOX
1575static inline void gen_cmps(DisasContext *s, int ot)
1576#else /* VBOX */
1577DECLINLINE(void) gen_cmps(DisasContext *s, int ot)
1578#endif /* VBOX */
1579{
1580 gen_string_movl_A0_ESI(s);
1581 gen_op_ld_T0_A0(ot + s->mem_index);
1582 gen_string_movl_A0_EDI(s);
1583 gen_op_ld_T1_A0(ot + s->mem_index);
1584 gen_op_cmpl_T0_T1_cc();
1585 gen_op_movl_T0_Dshift(ot);
1586 gen_op_add_reg_T0(s->aflag, R_ESI);
1587 gen_op_add_reg_T0(s->aflag, R_EDI);
1588}
1589
1590#ifndef VBOX
1591static inline void gen_ins(DisasContext *s, int ot)
1592#else /* VBOX */
1593DECLINLINE(void) gen_ins(DisasContext *s, int ot)
1594#endif /* VBOX */
1595{
1596 if (use_icount)
1597 gen_io_start();
1598 gen_string_movl_A0_EDI(s);
1599 /* Note: we must do this dummy write first to be restartable in
1600 case of page fault. */
1601 gen_op_movl_T0_0();
1602 gen_op_st_T0_A0(ot + s->mem_index);
1603 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1604 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1605 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1606 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1607 gen_op_st_T0_A0(ot + s->mem_index);
1608 gen_op_movl_T0_Dshift(ot);
1609 gen_op_add_reg_T0(s->aflag, R_EDI);
1610 if (use_icount)
1611 gen_io_end();
1612}
1613
1614#ifndef VBOX
1615static inline void gen_outs(DisasContext *s, int ot)
1616#else /* VBOX */
1617DECLINLINE(void) gen_outs(DisasContext *s, int ot)
1618#endif /* VBOX */
1619{
1620 if (use_icount)
1621 gen_io_start();
1622 gen_string_movl_A0_ESI(s);
1623 gen_op_ld_T0_A0(ot + s->mem_index);
1624
1625 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1626 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1627 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1628 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1629 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1630
1631 gen_op_movl_T0_Dshift(ot);
1632 gen_op_add_reg_T0(s->aflag, R_ESI);
1633 if (use_icount)
1634 gen_io_end();
1635}
1636
1637/* same method as Valgrind : we generate jumps to current or next
1638 instruction */
1639#ifndef VBOX
1640#define GEN_REPZ(op) \
1641static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1642 target_ulong cur_eip, target_ulong next_eip) \
1643{ \
1644 int l2; \
1645 gen_update_cc_op(s); \
1646 l2 = gen_jz_ecx_string(s, next_eip); \
1647 gen_ ## op(s, ot); \
1648 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1649 /* a loop would cause two single step exceptions if ECX = 1 \
1650 before rep string_insn */ \
1651 if (!s->jmp_opt) \
1652 gen_op_jz_ecx(s->aflag, l2); \
1653 gen_jmp(s, cur_eip); \
1654}
1655#else /* VBOX */
1656#define GEN_REPZ(op) \
1657DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1658 target_ulong cur_eip, target_ulong next_eip) \
1659{ \
1660 int l2; \
1661 gen_update_cc_op(s); \
1662 l2 = gen_jz_ecx_string(s, next_eip); \
1663 gen_ ## op(s, ot); \
1664 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1665 /* a loop would cause two single step exceptions if ECX = 1 \
1666 before rep string_insn */ \
1667 if (!s->jmp_opt) \
1668 gen_op_jz_ecx(s->aflag, l2); \
1669 gen_jmp(s, cur_eip); \
1670}
1671#endif /* VBOX */
1672
1673#ifndef VBOX
1674#define GEN_REPZ2(op) \
1675static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1676 target_ulong cur_eip, \
1677 target_ulong next_eip, \
1678 int nz) \
1679{ \
1680 int l2; \
1681 gen_update_cc_op(s); \
1682 l2 = gen_jz_ecx_string(s, next_eip); \
1683 gen_ ## op(s, ot); \
1684 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1685 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1686 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1687 if (!s->jmp_opt) \
1688 gen_op_jz_ecx(s->aflag, l2); \
1689 gen_jmp(s, cur_eip); \
1690}
1691#else /* VBOX */
1692#define GEN_REPZ2(op) \
1693DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1694 target_ulong cur_eip, \
1695 target_ulong next_eip, \
1696 int nz) \
1697{ \
1698 int l2;\
1699 gen_update_cc_op(s); \
1700 l2 = gen_jz_ecx_string(s, next_eip); \
1701 gen_ ## op(s, ot); \
1702 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1703 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1704 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1705 if (!s->jmp_opt) \
1706 gen_op_jz_ecx(s->aflag, l2); \
1707 gen_jmp(s, cur_eip); \
1708}
1709#endif /* VBOX */
1710
1711GEN_REPZ(movs)
1712GEN_REPZ(stos)
1713GEN_REPZ(lods)
1714GEN_REPZ(ins)
1715GEN_REPZ(outs)
1716GEN_REPZ2(scas)
1717GEN_REPZ2(cmps)
1718
1719static void *helper_fp_arith_ST0_FT0[8] = {
1720 helper_fadd_ST0_FT0,
1721 helper_fmul_ST0_FT0,
1722 helper_fcom_ST0_FT0,
1723 helper_fcom_ST0_FT0,
1724 helper_fsub_ST0_FT0,
1725 helper_fsubr_ST0_FT0,
1726 helper_fdiv_ST0_FT0,
1727 helper_fdivr_ST0_FT0,
1728};
1729
1730/* NOTE the exception in "r" op ordering */
1731static void *helper_fp_arith_STN_ST0[8] = {
1732 helper_fadd_STN_ST0,
1733 helper_fmul_STN_ST0,
1734 NULL,
1735 NULL,
1736 helper_fsubr_STN_ST0,
1737 helper_fsub_STN_ST0,
1738 helper_fdivr_STN_ST0,
1739 helper_fdiv_STN_ST0,
1740};
1741
1742/* if d == OR_TMP0, it means memory operand (address in A0) */
1743static void gen_op(DisasContext *s1, int op, int ot, int d)
1744{
1745 if (d != OR_TMP0) {
1746 gen_op_mov_TN_reg(ot, 0, d);
1747 } else {
1748 gen_op_ld_T0_A0(ot + s1->mem_index);
1749 }
1750 switch(op) {
1751 case OP_ADCL:
1752 if (s1->cc_op != CC_OP_DYNAMIC)
1753 gen_op_set_cc_op(s1->cc_op);
1754 gen_compute_eflags_c(cpu_tmp4);
1755 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1756 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1757 if (d != OR_TMP0)
1758 gen_op_mov_reg_T0(ot, d);
1759 else
1760 gen_op_st_T0_A0(ot + s1->mem_index);
1761 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1762 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1763 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1764 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1765 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1766 s1->cc_op = CC_OP_DYNAMIC;
1767 break;
1768 case OP_SBBL:
1769 if (s1->cc_op != CC_OP_DYNAMIC)
1770 gen_op_set_cc_op(s1->cc_op);
1771 gen_compute_eflags_c(cpu_tmp4);
1772 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1773 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1774 if (d != OR_TMP0)
1775 gen_op_mov_reg_T0(ot, d);
1776 else
1777 gen_op_st_T0_A0(ot + s1->mem_index);
1778 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1779 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1780 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1781 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1782 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1783 s1->cc_op = CC_OP_DYNAMIC;
1784 break;
1785 case OP_ADDL:
1786 gen_op_addl_T0_T1();
1787 if (d != OR_TMP0)
1788 gen_op_mov_reg_T0(ot, d);
1789 else
1790 gen_op_st_T0_A0(ot + s1->mem_index);
1791 gen_op_update2_cc();
1792 s1->cc_op = CC_OP_ADDB + ot;
1793 break;
1794 case OP_SUBL:
1795 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1796 if (d != OR_TMP0)
1797 gen_op_mov_reg_T0(ot, d);
1798 else
1799 gen_op_st_T0_A0(ot + s1->mem_index);
1800 gen_op_update2_cc();
1801 s1->cc_op = CC_OP_SUBB + ot;
1802 break;
1803 default:
1804 case OP_ANDL:
1805 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1806 if (d != OR_TMP0)
1807 gen_op_mov_reg_T0(ot, d);
1808 else
1809 gen_op_st_T0_A0(ot + s1->mem_index);
1810 gen_op_update1_cc();
1811 s1->cc_op = CC_OP_LOGICB + ot;
1812 break;
1813 case OP_ORL:
1814 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1815 if (d != OR_TMP0)
1816 gen_op_mov_reg_T0(ot, d);
1817 else
1818 gen_op_st_T0_A0(ot + s1->mem_index);
1819 gen_op_update1_cc();
1820 s1->cc_op = CC_OP_LOGICB + ot;
1821 break;
1822 case OP_XORL:
1823 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1824 if (d != OR_TMP0)
1825 gen_op_mov_reg_T0(ot, d);
1826 else
1827 gen_op_st_T0_A0(ot + s1->mem_index);
1828 gen_op_update1_cc();
1829 s1->cc_op = CC_OP_LOGICB + ot;
1830 break;
1831 case OP_CMPL:
1832 gen_op_cmpl_T0_T1_cc();
1833 s1->cc_op = CC_OP_SUBB + ot;
1834 break;
1835 }
1836}
1837
1838/* if d == OR_TMP0, it means memory operand (address in A0) */
1839static void gen_inc(DisasContext *s1, int ot, int d, int c)
1840{
1841 if (d != OR_TMP0)
1842 gen_op_mov_TN_reg(ot, 0, d);
1843 else
1844 gen_op_ld_T0_A0(ot + s1->mem_index);
1845 if (s1->cc_op != CC_OP_DYNAMIC)
1846 gen_op_set_cc_op(s1->cc_op);
1847 if (c > 0) {
1848 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1849 s1->cc_op = CC_OP_INCB + ot;
1850 } else {
1851 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1852 s1->cc_op = CC_OP_DECB + ot;
1853 }
1854 if (d != OR_TMP0)
1855 gen_op_mov_reg_T0(ot, d);
1856 else
1857 gen_op_st_T0_A0(ot + s1->mem_index);
1858 gen_compute_eflags_c(cpu_cc_src);
1859 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1860}
1861
1862static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1863 int is_right, int is_arith)
1864{
1865 target_ulong mask;
1866 int shift_label;
1867 TCGv t0, t1;
1868
1869 if (ot == OT_QUAD)
1870 mask = 0x3f;
1871 else
1872 mask = 0x1f;
1873
1874 /* load */
1875 if (op1 == OR_TMP0)
1876 gen_op_ld_T0_A0(ot + s->mem_index);
1877 else
1878 gen_op_mov_TN_reg(ot, 0, op1);
1879
1880 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1881
1882 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1883
1884 if (is_right) {
1885 if (is_arith) {
1886 gen_exts(ot, cpu_T[0]);
1887 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1888 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1889 } else {
1890 gen_extu(ot, cpu_T[0]);
1891 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1892 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1893 }
1894 } else {
1895 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1896 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1897 }
1898
1899 /* store */
1900 if (op1 == OR_TMP0)
1901 gen_op_st_T0_A0(ot + s->mem_index);
1902 else
1903 gen_op_mov_reg_T0(ot, op1);
1904
1905 /* update eflags if non zero shift */
1906 if (s->cc_op != CC_OP_DYNAMIC)
1907 gen_op_set_cc_op(s->cc_op);
1908
1909 /* XXX: inefficient */
1910 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1911 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1912
1913 tcg_gen_mov_tl(t0, cpu_T[0]);
1914 tcg_gen_mov_tl(t1, cpu_T3);
1915
1916 shift_label = gen_new_label();
1917 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1918
1919 tcg_gen_mov_tl(cpu_cc_src, t1);
1920 tcg_gen_mov_tl(cpu_cc_dst, t0);
1921 if (is_right)
1922 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1923 else
1924 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1925
1926 gen_set_label(shift_label);
1927 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1928
1929 tcg_temp_free(t0);
1930 tcg_temp_free(t1);
1931}
1932
1933static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1934 int is_right, int is_arith)
1935{
1936 int mask;
1937
1938 if (ot == OT_QUAD)
1939 mask = 0x3f;
1940 else
1941 mask = 0x1f;
1942
1943 /* load */
1944 if (op1 == OR_TMP0)
1945 gen_op_ld_T0_A0(ot + s->mem_index);
1946 else
1947 gen_op_mov_TN_reg(ot, 0, op1);
1948
1949 op2 &= mask;
1950 if (op2 != 0) {
1951 if (is_right) {
1952 if (is_arith) {
1953 gen_exts(ot, cpu_T[0]);
1954 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1955 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1956 } else {
1957 gen_extu(ot, cpu_T[0]);
1958 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1959 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1960 }
1961 } else {
1962 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1963 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1964 }
1965 }
1966
1967 /* store */
1968 if (op1 == OR_TMP0)
1969 gen_op_st_T0_A0(ot + s->mem_index);
1970 else
1971 gen_op_mov_reg_T0(ot, op1);
1972
1973 /* update eflags if non zero shift */
1974 if (op2 != 0) {
1975 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1976 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1977 if (is_right)
1978 s->cc_op = CC_OP_SARB + ot;
1979 else
1980 s->cc_op = CC_OP_SHLB + ot;
1981 }
1982}
1983
1984#ifndef VBOX
1985static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1986#else /* VBOX */
1987DECLINLINE(void) tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1988#endif /* VBOX */
1989{
1990 if (arg2 >= 0)
1991 tcg_gen_shli_tl(ret, arg1, arg2);
1992 else
1993 tcg_gen_shri_tl(ret, arg1, -arg2);
1994}
1995
1996/* XXX: add faster immediate case */
1997static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
1998 int is_right)
1999{
2000 target_ulong mask;
2001 int label1, label2, data_bits;
2002 TCGv t0, t1, t2, a0;
2003
2004 /* XXX: inefficient, but we must use local temps */
2005 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2006 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2007 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2008 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2009
2010 if (ot == OT_QUAD)
2011 mask = 0x3f;
2012 else
2013 mask = 0x1f;
2014
2015 /* load */
2016 if (op1 == OR_TMP0) {
2017 tcg_gen_mov_tl(a0, cpu_A0);
2018 gen_op_ld_v(ot + s->mem_index, t0, a0);
2019 } else {
2020 gen_op_mov_v_reg(ot, t0, op1);
2021 }
2022
2023 tcg_gen_mov_tl(t1, cpu_T[1]);
2024
2025 tcg_gen_andi_tl(t1, t1, mask);
2026
2027 /* Must test zero case to avoid using undefined behaviour in TCG
2028 shifts. */
2029 label1 = gen_new_label();
2030 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
2031
2032 if (ot <= OT_WORD)
2033 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
2034 else
2035 tcg_gen_mov_tl(cpu_tmp0, t1);
2036
2037 gen_extu(ot, t0);
2038 tcg_gen_mov_tl(t2, t0);
2039
2040 data_bits = 8 << ot;
2041 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
2042 fix TCG definition) */
2043 if (is_right) {
2044 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
2045 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2046 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
2047 } else {
2048 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
2049 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2050 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
2051 }
2052 tcg_gen_or_tl(t0, t0, cpu_tmp4);
2053
2054 gen_set_label(label1);
2055 /* store */
2056 if (op1 == OR_TMP0) {
2057 gen_op_st_v(ot + s->mem_index, t0, a0);
2058 } else {
2059 gen_op_mov_reg_v(ot, op1, t0);
2060 }
2061
2062 /* update eflags */
2063 if (s->cc_op != CC_OP_DYNAMIC)
2064 gen_op_set_cc_op(s->cc_op);
2065
2066 label2 = gen_new_label();
2067 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
2068
2069 gen_compute_eflags(cpu_cc_src);
2070 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
2071 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
2072 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
2073 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
2074 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
2075 if (is_right) {
2076 tcg_gen_shri_tl(t0, t0, data_bits - 1);
2077 }
2078 tcg_gen_andi_tl(t0, t0, CC_C);
2079 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
2080
2081 tcg_gen_discard_tl(cpu_cc_dst);
2082 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2083
2084 gen_set_label(label2);
2085 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2086
2087 tcg_temp_free(t0);
2088 tcg_temp_free(t1);
2089 tcg_temp_free(t2);
2090 tcg_temp_free(a0);
2091}
2092
2093static void *helper_rotc[8] = {
2094 helper_rclb,
2095 helper_rclw,
2096 helper_rcll,
2097 X86_64_ONLY(helper_rclq),
2098 helper_rcrb,
2099 helper_rcrw,
2100 helper_rcrl,
2101 X86_64_ONLY(helper_rcrq),
2102};
2103
2104/* XXX: add faster immediate = 1 case */
2105static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
2106 int is_right)
2107{
2108 int label1;
2109
2110 if (s->cc_op != CC_OP_DYNAMIC)
2111 gen_op_set_cc_op(s->cc_op);
2112
2113 /* load */
2114 if (op1 == OR_TMP0)
2115 gen_op_ld_T0_A0(ot + s->mem_index);
2116 else
2117 gen_op_mov_TN_reg(ot, 0, op1);
2118
2119 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
2120 cpu_T[0], cpu_T[0], cpu_T[1]);
2121 /* store */
2122 if (op1 == OR_TMP0)
2123 gen_op_st_T0_A0(ot + s->mem_index);
2124 else
2125 gen_op_mov_reg_T0(ot, op1);
2126
2127 /* update eflags */
2128 label1 = gen_new_label();
2129 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
2130
2131 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
2132 tcg_gen_discard_tl(cpu_cc_dst);
2133 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2134
2135 gen_set_label(label1);
2136 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2137}
2138
2139/* XXX: add faster immediate case */
2140static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
2141 int is_right)
2142{
2143 int label1, label2, data_bits;
2144 target_ulong mask;
2145 TCGv t0, t1, t2, a0;
2146
2147 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2148 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2149 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2150 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2151
2152 if (ot == OT_QUAD)
2153 mask = 0x3f;
2154 else
2155 mask = 0x1f;
2156
2157 /* load */
2158 if (op1 == OR_TMP0) {
2159 tcg_gen_mov_tl(a0, cpu_A0);
2160 gen_op_ld_v(ot + s->mem_index, t0, a0);
2161 } else {
2162 gen_op_mov_v_reg(ot, t0, op1);
2163 }
2164
2165 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
2166
2167 tcg_gen_mov_tl(t1, cpu_T[1]);
2168 tcg_gen_mov_tl(t2, cpu_T3);
2169
2170 /* Must test zero case to avoid using undefined behaviour in TCG
2171 shifts. */
2172 label1 = gen_new_label();
2173 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
2174
2175 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
2176 if (ot == OT_WORD) {
2177 /* Note: we implement the Intel behaviour for shift count > 16 */
2178 if (is_right) {
2179 tcg_gen_andi_tl(t0, t0, 0xffff);
2180 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
2181 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2182 tcg_gen_ext32u_tl(t0, t0);
2183
2184 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2185
2186 /* only needed if count > 16, but a test would complicate */
2187 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2188 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
2189
2190 tcg_gen_shr_tl(t0, t0, t2);
2191
2192 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2193 } else {
2194 /* XXX: not optimal */
2195 tcg_gen_andi_tl(t0, t0, 0xffff);
2196 tcg_gen_shli_tl(t1, t1, 16);
2197 tcg_gen_or_tl(t1, t1, t0);
2198 tcg_gen_ext32u_tl(t1, t1);
2199
2200 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2201 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
2202 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
2203 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
2204
2205 tcg_gen_shl_tl(t0, t0, t2);
2206 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2207 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2208 tcg_gen_or_tl(t0, t0, t1);
2209 }
2210 } else {
2211 data_bits = 8 << ot;
2212 if (is_right) {
2213 if (ot == OT_LONG)
2214 tcg_gen_ext32u_tl(t0, t0);
2215
2216 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2217
2218 tcg_gen_shr_tl(t0, t0, t2);
2219 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2220 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
2221 tcg_gen_or_tl(t0, t0, t1);
2222
2223 } else {
2224 if (ot == OT_LONG)
2225 tcg_gen_ext32u_tl(t1, t1);
2226
2227 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2228
2229 tcg_gen_shl_tl(t0, t0, t2);
2230 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2231 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2232 tcg_gen_or_tl(t0, t0, t1);
2233 }
2234 }
2235 tcg_gen_mov_tl(t1, cpu_tmp4);
2236
2237 gen_set_label(label1);
2238 /* store */
2239 if (op1 == OR_TMP0) {
2240 gen_op_st_v(ot + s->mem_index, t0, a0);
2241 } else {
2242 gen_op_mov_reg_v(ot, op1, t0);
2243 }
2244
2245 /* update eflags */
2246 if (s->cc_op != CC_OP_DYNAMIC)
2247 gen_op_set_cc_op(s->cc_op);
2248
2249 label2 = gen_new_label();
2250 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
2251
2252 tcg_gen_mov_tl(cpu_cc_src, t1);
2253 tcg_gen_mov_tl(cpu_cc_dst, t0);
2254 if (is_right) {
2255 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
2256 } else {
2257 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
2258 }
2259 gen_set_label(label2);
2260 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2261
2262 tcg_temp_free(t0);
2263 tcg_temp_free(t1);
2264 tcg_temp_free(t2);
2265 tcg_temp_free(a0);
2266}
2267
2268static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2269{
2270 if (s != OR_TMP1)
2271 gen_op_mov_TN_reg(ot, 1, s);
2272 switch(op) {
2273 case OP_ROL:
2274 gen_rot_rm_T1(s1, ot, d, 0);
2275 break;
2276 case OP_ROR:
2277 gen_rot_rm_T1(s1, ot, d, 1);
2278 break;
2279 case OP_SHL:
2280 case OP_SHL1:
2281 gen_shift_rm_T1(s1, ot, d, 0, 0);
2282 break;
2283 case OP_SHR:
2284 gen_shift_rm_T1(s1, ot, d, 1, 0);
2285 break;
2286 case OP_SAR:
2287 gen_shift_rm_T1(s1, ot, d, 1, 1);
2288 break;
2289 case OP_RCL:
2290 gen_rotc_rm_T1(s1, ot, d, 0);
2291 break;
2292 case OP_RCR:
2293 gen_rotc_rm_T1(s1, ot, d, 1);
2294 break;
2295 }
2296}
2297
2298static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2299{
2300 switch(op) {
2301 case OP_SHL:
2302 case OP_SHL1:
2303 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2304 break;
2305 case OP_SHR:
2306 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2307 break;
2308 case OP_SAR:
2309 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2310 break;
2311 default:
2312 /* currently not optimized */
2313 gen_op_movl_T1_im(c);
2314 gen_shift(s1, op, ot, d, OR_TMP1);
2315 break;
2316 }
2317}
2318
2319static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
2320{
2321 target_long disp;
2322 int havesib;
2323 int base;
2324 int index;
2325 int scale;
2326 int opreg;
2327 int mod, rm, code, override, must_add_seg;
2328
2329 override = s->override;
2330 must_add_seg = s->addseg;
2331 if (override >= 0)
2332 must_add_seg = 1;
2333 mod = (modrm >> 6) & 3;
2334 rm = modrm & 7;
2335
2336 if (s->aflag) {
2337
2338 havesib = 0;
2339 base = rm;
2340 index = 0;
2341 scale = 0;
2342
2343 if (base == 4) {
2344 havesib = 1;
2345 code = ldub_code(s->pc++);
2346 scale = (code >> 6) & 3;
2347 index = ((code >> 3) & 7) | REX_X(s);
2348 base = (code & 7);
2349 }
2350 base |= REX_B(s);
2351
2352 switch (mod) {
2353 case 0:
2354 if ((base & 7) == 5) {
2355 base = -1;
2356 disp = (int32_t)ldl_code(s->pc);
2357 s->pc += 4;
2358 if (CODE64(s) && !havesib) {
2359 disp += s->pc + s->rip_offset;
2360 }
2361 } else {
2362 disp = 0;
2363 }
2364 break;
2365 case 1:
2366 disp = (int8_t)ldub_code(s->pc++);
2367 break;
2368 default:
2369 case 2:
2370 disp = ldl_code(s->pc);
2371 s->pc += 4;
2372 break;
2373 }
2374
2375 if (base >= 0) {
2376 /* for correct popl handling with esp */
2377 if (base == 4 && s->popl_esp_hack)
2378 disp += s->popl_esp_hack;
2379#ifdef TARGET_X86_64
2380 if (s->aflag == 2) {
2381 gen_op_movq_A0_reg(base);
2382 if (disp != 0) {
2383 gen_op_addq_A0_im(disp);
2384 }
2385 } else
2386#endif
2387 {
2388 gen_op_movl_A0_reg(base);
2389 if (disp != 0)
2390 gen_op_addl_A0_im(disp);
2391 }
2392 } else {
2393#ifdef TARGET_X86_64
2394 if (s->aflag == 2) {
2395 gen_op_movq_A0_im(disp);
2396 } else
2397#endif
2398 {
2399 gen_op_movl_A0_im(disp);
2400 }
2401 }
2402 /* XXX: index == 4 is always invalid */
2403 if (havesib && (index != 4 || scale != 0)) {
2404#ifdef TARGET_X86_64
2405 if (s->aflag == 2) {
2406 gen_op_addq_A0_reg_sN(scale, index);
2407 } else
2408#endif
2409 {
2410 gen_op_addl_A0_reg_sN(scale, index);
2411 }
2412 }
2413 if (must_add_seg) {
2414 if (override < 0) {
2415 if (base == R_EBP || base == R_ESP)
2416 override = R_SS;
2417 else
2418 override = R_DS;
2419 }
2420#ifdef TARGET_X86_64
2421 if (s->aflag == 2) {
2422 gen_op_addq_A0_seg(override);
2423 } else
2424#endif
2425 {
2426 gen_op_addl_A0_seg(override);
2427 }
2428 }
2429 } else {
2430 switch (mod) {
2431 case 0:
2432 if (rm == 6) {
2433 disp = lduw_code(s->pc);
2434 s->pc += 2;
2435 gen_op_movl_A0_im(disp);
2436 rm = 0; /* avoid SS override */
2437 goto no_rm;
2438 } else {
2439 disp = 0;
2440 }
2441 break;
2442 case 1:
2443 disp = (int8_t)ldub_code(s->pc++);
2444 break;
2445 default:
2446 case 2:
2447 disp = lduw_code(s->pc);
2448 s->pc += 2;
2449 break;
2450 }
2451 switch(rm) {
2452 case 0:
2453 gen_op_movl_A0_reg(R_EBX);
2454 gen_op_addl_A0_reg_sN(0, R_ESI);
2455 break;
2456 case 1:
2457 gen_op_movl_A0_reg(R_EBX);
2458 gen_op_addl_A0_reg_sN(0, R_EDI);
2459 break;
2460 case 2:
2461 gen_op_movl_A0_reg(R_EBP);
2462 gen_op_addl_A0_reg_sN(0, R_ESI);
2463 break;
2464 case 3:
2465 gen_op_movl_A0_reg(R_EBP);
2466 gen_op_addl_A0_reg_sN(0, R_EDI);
2467 break;
2468 case 4:
2469 gen_op_movl_A0_reg(R_ESI);
2470 break;
2471 case 5:
2472 gen_op_movl_A0_reg(R_EDI);
2473 break;
2474 case 6:
2475 gen_op_movl_A0_reg(R_EBP);
2476 break;
2477 default:
2478 case 7:
2479 gen_op_movl_A0_reg(R_EBX);
2480 break;
2481 }
2482 if (disp != 0)
2483 gen_op_addl_A0_im(disp);
2484 gen_op_andl_A0_ffff();
2485 no_rm:
2486 if (must_add_seg) {
2487 if (override < 0) {
2488 if (rm == 2 || rm == 3 || rm == 6)
2489 override = R_SS;
2490 else
2491 override = R_DS;
2492 }
2493 gen_op_addl_A0_seg(override);
2494 }
2495 }
2496
2497 opreg = OR_A0;
2498 disp = 0;
2499 *reg_ptr = opreg;
2500 *offset_ptr = disp;
2501}
2502
2503static void gen_nop_modrm(DisasContext *s, int modrm)
2504{
2505 int mod, rm, base, code;
2506
2507 mod = (modrm >> 6) & 3;
2508 if (mod == 3)
2509 return;
2510 rm = modrm & 7;
2511
2512 if (s->aflag) {
2513
2514 base = rm;
2515
2516 if (base == 4) {
2517 code = ldub_code(s->pc++);
2518 base = (code & 7);
2519 }
2520
2521 switch (mod) {
2522 case 0:
2523 if (base == 5) {
2524 s->pc += 4;
2525 }
2526 break;
2527 case 1:
2528 s->pc++;
2529 break;
2530 default:
2531 case 2:
2532 s->pc += 4;
2533 break;
2534 }
2535 } else {
2536 switch (mod) {
2537 case 0:
2538 if (rm == 6) {
2539 s->pc += 2;
2540 }
2541 break;
2542 case 1:
2543 s->pc++;
2544 break;
2545 default:
2546 case 2:
2547 s->pc += 2;
2548 break;
2549 }
2550 }
2551}
2552
2553/* used for LEA and MOV AX, mem */
2554static void gen_add_A0_ds_seg(DisasContext *s)
2555{
2556 int override, must_add_seg;
2557 must_add_seg = s->addseg;
2558 override = R_DS;
2559 if (s->override >= 0) {
2560 override = s->override;
2561 must_add_seg = 1;
2562 } else {
2563 override = R_DS;
2564 }
2565 if (must_add_seg) {
2566#ifdef TARGET_X86_64
2567 if (CODE64(s)) {
2568 gen_op_addq_A0_seg(override);
2569 } else
2570#endif
2571 {
2572 gen_op_addl_A0_seg(override);
2573 }
2574 }
2575}
2576
2577/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2578 OR_TMP0 */
2579static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2580{
2581 int mod, rm, opreg, disp;
2582
2583 mod = (modrm >> 6) & 3;
2584 rm = (modrm & 7) | REX_B(s);
2585 if (mod == 3) {
2586 if (is_store) {
2587 if (reg != OR_TMP0)
2588 gen_op_mov_TN_reg(ot, 0, reg);
2589 gen_op_mov_reg_T0(ot, rm);
2590 } else {
2591 gen_op_mov_TN_reg(ot, 0, rm);
2592 if (reg != OR_TMP0)
2593 gen_op_mov_reg_T0(ot, reg);
2594 }
2595 } else {
2596 gen_lea_modrm(s, modrm, &opreg, &disp);
2597 if (is_store) {
2598 if (reg != OR_TMP0)
2599 gen_op_mov_TN_reg(ot, 0, reg);
2600 gen_op_st_T0_A0(ot + s->mem_index);
2601 } else {
2602 gen_op_ld_T0_A0(ot + s->mem_index);
2603 if (reg != OR_TMP0)
2604 gen_op_mov_reg_T0(ot, reg);
2605 }
2606 }
2607}
2608
2609#ifndef VBOX
2610static inline uint32_t insn_get(DisasContext *s, int ot)
2611#else /* VBOX */
2612DECLINLINE(uint32_t) insn_get(DisasContext *s, int ot)
2613#endif /* VBOX */
2614{
2615 uint32_t ret;
2616
2617 switch(ot) {
2618 case OT_BYTE:
2619 ret = ldub_code(s->pc);
2620 s->pc++;
2621 break;
2622 case OT_WORD:
2623 ret = lduw_code(s->pc);
2624 s->pc += 2;
2625 break;
2626 default:
2627 case OT_LONG:
2628 ret = ldl_code(s->pc);
2629 s->pc += 4;
2630 break;
2631 }
2632 return ret;
2633}
2634
2635#ifndef VBOX
2636static inline int insn_const_size(unsigned int ot)
2637#else /* VBOX */
2638DECLINLINE(int) insn_const_size(unsigned int ot)
2639#endif /* VBOX */
2640{
2641 if (ot <= OT_LONG)
2642 return 1 << ot;
2643 else
2644 return 4;
2645}
2646
2647#ifndef VBOX
2648static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2649#else /* VBOX */
2650DECLINLINE(void) gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2651#endif /* VBOX */
2652{
2653 TranslationBlock *tb;
2654 target_ulong pc;
2655
2656 pc = s->cs_base + eip;
2657 tb = s->tb;
2658 /* NOTE: we handle the case where the TB spans two pages here */
2659 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2660 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2661 /* jump to same page: we can use a direct jump */
2662 tcg_gen_goto_tb(tb_num);
2663 gen_jmp_im(eip);
2664 tcg_gen_exit_tb((long)tb + tb_num);
2665 } else {
2666 /* jump to another page: currently not optimized */
2667 gen_jmp_im(eip);
2668 gen_eob(s);
2669 }
2670}
2671
2672#ifndef VBOX
2673static inline void gen_jcc(DisasContext *s, int b,
2674#else /* VBOX */
2675DECLINLINE(void) gen_jcc(DisasContext *s, int b,
2676#endif /* VBOX */
2677 target_ulong val, target_ulong next_eip)
2678{
2679 int l1, l2, cc_op;
2680
2681 cc_op = s->cc_op;
2682 if (s->cc_op != CC_OP_DYNAMIC) {
2683 gen_op_set_cc_op(s->cc_op);
2684 s->cc_op = CC_OP_DYNAMIC;
2685 }
2686 if (s->jmp_opt) {
2687#ifdef VBOX
2688 gen_check_external_event(s);
2689#endif /* VBOX */
2690 l1 = gen_new_label();
2691 gen_jcc1(s, cc_op, b, l1);
2692
2693 gen_goto_tb(s, 0, next_eip);
2694
2695 gen_set_label(l1);
2696 gen_goto_tb(s, 1, val);
2697 s->is_jmp = 3;
2698 } else {
2699
2700 l1 = gen_new_label();
2701 l2 = gen_new_label();
2702 gen_jcc1(s, cc_op, b, l1);
2703
2704 gen_jmp_im(next_eip);
2705 tcg_gen_br(l2);
2706
2707 gen_set_label(l1);
2708 gen_jmp_im(val);
2709 gen_set_label(l2);
2710 gen_eob(s);
2711 }
2712}
2713
2714static void gen_setcc(DisasContext *s, int b)
2715{
2716 int inv, jcc_op, l1;
2717 TCGv t0;
2718
2719 if (is_fast_jcc_case(s, b)) {
2720 /* nominal case: we use a jump */
2721 /* XXX: make it faster by adding new instructions in TCG */
2722 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2723 tcg_gen_movi_tl(t0, 0);
2724 l1 = gen_new_label();
2725 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2726 tcg_gen_movi_tl(t0, 1);
2727 gen_set_label(l1);
2728 tcg_gen_mov_tl(cpu_T[0], t0);
2729 tcg_temp_free(t0);
2730 } else {
2731 /* slow case: it is more efficient not to generate a jump,
2732 although it is questionnable whether this optimization is
2733 worth to */
2734 inv = b & 1;
2735 jcc_op = (b >> 1) & 7;
2736 gen_setcc_slow_T0(s, jcc_op);
2737 if (inv) {
2738 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2739 }
2740 }
2741}
2742
2743#ifndef VBOX
2744static inline void gen_op_movl_T0_seg(int seg_reg)
2745#else /* VBOX */
2746DECLINLINE(void) gen_op_movl_T0_seg(int seg_reg)
2747#endif /* VBOX */
2748{
2749 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2750 offsetof(CPUX86State,segs[seg_reg].selector));
2751}
2752
2753#ifndef VBOX
2754static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2755#else /* VBOX */
2756DECLINLINE(void) gen_op_movl_seg_T0_vm(int seg_reg)
2757#endif /* VBOX */
2758{
2759 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2760 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2761 offsetof(CPUX86State,segs[seg_reg].selector));
2762 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2763 tcg_gen_st_tl(cpu_T[0], cpu_env,
2764 offsetof(CPUX86State,segs[seg_reg].base));
2765#ifdef VBOX
2766 int flags = DESC_P_MASK | DESC_S_MASK | DESC_W_MASK;
2767 if (seg_reg == R_CS)
2768 flags |= DESC_CS_MASK;
2769 gen_op_movl_T0_im(flags);
2770 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].flags));
2771#endif
2772}
2773
2774/* move T0 to seg_reg and compute if the CPU state may change. Never
2775 call this function with seg_reg == R_CS */
2776static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2777{
2778 if (s->pe && !s->vm86) {
2779 /* XXX: optimize by finding processor state dynamically */
2780 if (s->cc_op != CC_OP_DYNAMIC)
2781 gen_op_set_cc_op(s->cc_op);
2782 gen_jmp_im(cur_eip);
2783 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2784 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2785 /* abort translation because the addseg value may change or
2786 because ss32 may change. For R_SS, translation must always
2787 stop as a special handling must be done to disable hardware
2788 interrupts for the next instruction */
2789 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2790 s->is_jmp = 3;
2791 } else {
2792 gen_op_movl_seg_T0_vm(seg_reg);
2793 if (seg_reg == R_SS)
2794 s->is_jmp = 3;
2795 }
2796}
2797
2798#ifndef VBOX
2799static inline int svm_is_rep(int prefixes)
2800#else /* VBOX */
2801DECLINLINE(int) svm_is_rep(int prefixes)
2802#endif /* VBOX */
2803{
2804 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2805}
2806
2807#ifndef VBOX
2808static inline void
2809#else /* VBOX */
2810DECLINLINE(void)
2811#endif /* VBOX */
2812gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2813 uint32_t type, uint64_t param)
2814{
2815 /* no SVM activated; fast case */
2816 if (likely(!(s->flags & HF_SVMI_MASK)))
2817 return;
2818 if (s->cc_op != CC_OP_DYNAMIC)
2819 gen_op_set_cc_op(s->cc_op);
2820 gen_jmp_im(pc_start - s->cs_base);
2821 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2822 tcg_const_i32(type), tcg_const_i64(param));
2823}
2824
2825#ifndef VBOX
2826static inline void
2827#else /* VBOX */
2828DECLINLINE(void)
2829#endif
2830gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2831{
2832 gen_svm_check_intercept_param(s, pc_start, type, 0);
2833}
2834
2835#ifndef VBOX
2836static inline void gen_stack_update(DisasContext *s, int addend)
2837#else /* VBOX */
2838DECLINLINE(void) gen_stack_update(DisasContext *s, int addend)
2839#endif /* VBOX */
2840{
2841#ifdef TARGET_X86_64
2842 if (CODE64(s)) {
2843 gen_op_add_reg_im(2, R_ESP, addend);
2844 } else
2845#endif
2846 if (s->ss32) {
2847 gen_op_add_reg_im(1, R_ESP, addend);
2848 } else {
2849 gen_op_add_reg_im(0, R_ESP, addend);
2850 }
2851}
2852
2853/* generate a push. It depends on ss32, addseg and dflag */
2854static void gen_push_T0(DisasContext *s)
2855{
2856#ifdef TARGET_X86_64
2857 if (CODE64(s)) {
2858 gen_op_movq_A0_reg(R_ESP);
2859 if (s->dflag) {
2860 gen_op_addq_A0_im(-8);
2861 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2862 } else {
2863 gen_op_addq_A0_im(-2);
2864 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2865 }
2866 gen_op_mov_reg_A0(2, R_ESP);
2867 } else
2868#endif
2869 {
2870 gen_op_movl_A0_reg(R_ESP);
2871 if (!s->dflag)
2872 gen_op_addl_A0_im(-2);
2873 else
2874 gen_op_addl_A0_im(-4);
2875 if (s->ss32) {
2876 if (s->addseg) {
2877 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2878 gen_op_addl_A0_seg(R_SS);
2879 }
2880 } else {
2881 gen_op_andl_A0_ffff();
2882 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2883 gen_op_addl_A0_seg(R_SS);
2884 }
2885 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2886 if (s->ss32 && !s->addseg)
2887 gen_op_mov_reg_A0(1, R_ESP);
2888 else
2889 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2890 }
2891}
2892
2893/* generate a push. It depends on ss32, addseg and dflag */
2894/* slower version for T1, only used for call Ev */
2895static void gen_push_T1(DisasContext *s)
2896{
2897#ifdef TARGET_X86_64
2898 if (CODE64(s)) {
2899 gen_op_movq_A0_reg(R_ESP);
2900 if (s->dflag) {
2901 gen_op_addq_A0_im(-8);
2902 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2903 } else {
2904 gen_op_addq_A0_im(-2);
2905 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2906 }
2907 gen_op_mov_reg_A0(2, R_ESP);
2908 } else
2909#endif
2910 {
2911 gen_op_movl_A0_reg(R_ESP);
2912 if (!s->dflag)
2913 gen_op_addl_A0_im(-2);
2914 else
2915 gen_op_addl_A0_im(-4);
2916 if (s->ss32) {
2917 if (s->addseg) {
2918 gen_op_addl_A0_seg(R_SS);
2919 }
2920 } else {
2921 gen_op_andl_A0_ffff();
2922 gen_op_addl_A0_seg(R_SS);
2923 }
2924 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2925
2926 if (s->ss32 && !s->addseg)
2927 gen_op_mov_reg_A0(1, R_ESP);
2928 else
2929 gen_stack_update(s, (-2) << s->dflag);
2930 }
2931}
2932
2933/* two step pop is necessary for precise exceptions */
2934static void gen_pop_T0(DisasContext *s)
2935{
2936#ifdef TARGET_X86_64
2937 if (CODE64(s)) {
2938 gen_op_movq_A0_reg(R_ESP);
2939 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2940 } else
2941#endif
2942 {
2943 gen_op_movl_A0_reg(R_ESP);
2944 if (s->ss32) {
2945 if (s->addseg)
2946 gen_op_addl_A0_seg(R_SS);
2947 } else {
2948 gen_op_andl_A0_ffff();
2949 gen_op_addl_A0_seg(R_SS);
2950 }
2951 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2952 }
2953}
2954
2955static void gen_pop_update(DisasContext *s)
2956{
2957#ifdef TARGET_X86_64
2958 if (CODE64(s) && s->dflag) {
2959 gen_stack_update(s, 8);
2960 } else
2961#endif
2962 {
2963 gen_stack_update(s, 2 << s->dflag);
2964 }
2965}
2966
2967static void gen_stack_A0(DisasContext *s)
2968{
2969 gen_op_movl_A0_reg(R_ESP);
2970 if (!s->ss32)
2971 gen_op_andl_A0_ffff();
2972 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2973 if (s->addseg)
2974 gen_op_addl_A0_seg(R_SS);
2975}
2976
2977/* NOTE: wrap around in 16 bit not fully handled */
2978static void gen_pusha(DisasContext *s)
2979{
2980 int i;
2981 gen_op_movl_A0_reg(R_ESP);
2982 gen_op_addl_A0_im(-16 << s->dflag);
2983 if (!s->ss32)
2984 gen_op_andl_A0_ffff();
2985 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2986 if (s->addseg)
2987 gen_op_addl_A0_seg(R_SS);
2988 for(i = 0;i < 8; i++) {
2989 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2990 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
2991 gen_op_addl_A0_im(2 << s->dflag);
2992 }
2993 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
2994}
2995
2996/* NOTE: wrap around in 16 bit not fully handled */
2997static void gen_popa(DisasContext *s)
2998{
2999 int i;
3000 gen_op_movl_A0_reg(R_ESP);
3001 if (!s->ss32)
3002 gen_op_andl_A0_ffff();
3003 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3004 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
3005 if (s->addseg)
3006 gen_op_addl_A0_seg(R_SS);
3007 for(i = 0;i < 8; i++) {
3008 /* ESP is not reloaded */
3009 if (i != 3) {
3010 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
3011 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
3012 }
3013 gen_op_addl_A0_im(2 << s->dflag);
3014 }
3015 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3016}
3017
3018static void gen_enter(DisasContext *s, int esp_addend, int level)
3019{
3020 int ot, opsize;
3021
3022 level &= 0x1f;
3023#ifdef TARGET_X86_64
3024 if (CODE64(s)) {
3025 ot = s->dflag ? OT_QUAD : OT_WORD;
3026 opsize = 1 << ot;
3027
3028 gen_op_movl_A0_reg(R_ESP);
3029 gen_op_addq_A0_im(-opsize);
3030 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3031
3032 /* push bp */
3033 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3034 gen_op_st_T0_A0(ot + s->mem_index);
3035 if (level) {
3036 /* XXX: must save state */
3037 tcg_gen_helper_0_3(helper_enter64_level,
3038 tcg_const_i32(level),
3039 tcg_const_i32((ot == OT_QUAD)),
3040 cpu_T[1]);
3041 }
3042 gen_op_mov_reg_T1(ot, R_EBP);
3043 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3044 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
3045 } else
3046#endif
3047 {
3048 ot = s->dflag + OT_WORD;
3049 opsize = 2 << s->dflag;
3050
3051 gen_op_movl_A0_reg(R_ESP);
3052 gen_op_addl_A0_im(-opsize);
3053 if (!s->ss32)
3054 gen_op_andl_A0_ffff();
3055 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3056 if (s->addseg)
3057 gen_op_addl_A0_seg(R_SS);
3058 /* push bp */
3059 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3060 gen_op_st_T0_A0(ot + s->mem_index);
3061 if (level) {
3062 /* XXX: must save state */
3063 tcg_gen_helper_0_3(helper_enter_level,
3064 tcg_const_i32(level),
3065 tcg_const_i32(s->dflag),
3066 cpu_T[1]);
3067 }
3068 gen_op_mov_reg_T1(ot, R_EBP);
3069 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3070 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3071 }
3072}
3073
3074static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
3075{
3076 if (s->cc_op != CC_OP_DYNAMIC)
3077 gen_op_set_cc_op(s->cc_op);
3078 gen_jmp_im(cur_eip);
3079 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
3080 s->is_jmp = 3;
3081}
3082
3083/* an interrupt is different from an exception because of the
3084 privilege checks */
3085static void gen_interrupt(DisasContext *s, int intno,
3086 target_ulong cur_eip, target_ulong next_eip)
3087{
3088 if (s->cc_op != CC_OP_DYNAMIC)
3089 gen_op_set_cc_op(s->cc_op);
3090 gen_jmp_im(cur_eip);
3091 tcg_gen_helper_0_2(helper_raise_interrupt,
3092 tcg_const_i32(intno),
3093 tcg_const_i32(next_eip - cur_eip));
3094 s->is_jmp = 3;
3095}
3096
3097static void gen_debug(DisasContext *s, target_ulong cur_eip)
3098{
3099 if (s->cc_op != CC_OP_DYNAMIC)
3100 gen_op_set_cc_op(s->cc_op);
3101 gen_jmp_im(cur_eip);
3102 tcg_gen_helper_0_0(helper_debug);
3103 s->is_jmp = 3;
3104}
3105
3106/* generate a generic end of block. Trace exception is also generated
3107 if needed */
3108static void gen_eob(DisasContext *s)
3109{
3110 if (s->cc_op != CC_OP_DYNAMIC)
3111 gen_op_set_cc_op(s->cc_op);
3112 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
3113 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
3114 }
3115 if (s->singlestep_enabled) {
3116 tcg_gen_helper_0_0(helper_debug);
3117 } else if (s->tf) {
3118 tcg_gen_helper_0_0(helper_single_step);
3119 } else {
3120 tcg_gen_exit_tb(0);
3121 }
3122 s->is_jmp = 3;
3123}
3124
3125/* generate a jump to eip. No segment change must happen before as a
3126 direct call to the next block may occur */
3127static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
3128{
3129 if (s->jmp_opt) {
3130#ifdef VBOX
3131 gen_check_external_event(s);
3132#endif /* VBOX */
3133 if (s->cc_op != CC_OP_DYNAMIC) {
3134 gen_op_set_cc_op(s->cc_op);
3135 s->cc_op = CC_OP_DYNAMIC;
3136 }
3137 gen_goto_tb(s, tb_num, eip);
3138 s->is_jmp = 3;
3139 } else {
3140 gen_jmp_im(eip);
3141 gen_eob(s);
3142 }
3143}
3144
3145static void gen_jmp(DisasContext *s, target_ulong eip)
3146{
3147 gen_jmp_tb(s, eip, 0);
3148}
3149
3150#ifndef VBOX
3151static inline void gen_ldq_env_A0(int idx, int offset)
3152#else /* VBOX */
3153DECLINLINE(void) gen_ldq_env_A0(int idx, int offset)
3154#endif /* VBOX */
3155{
3156 int mem_index = (idx >> 2) - 1;
3157 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3158 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
3159}
3160
3161#ifndef VBOX
3162static inline void gen_stq_env_A0(int idx, int offset)
3163#else /* VBOX */
3164DECLINLINE(void) gen_stq_env_A0(int idx, int offset)
3165#endif /* VBOX */
3166{
3167 int mem_index = (idx >> 2) - 1;
3168 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
3169 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3170}
3171
3172#ifndef VBOX
3173static inline void gen_ldo_env_A0(int idx, int offset)
3174#else /* VBOX */
3175DECLINLINE(void) gen_ldo_env_A0(int idx, int offset)
3176#endif /* VBOX */
3177{
3178 int mem_index = (idx >> 2) - 1;
3179 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3180 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3181 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3182 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3183 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3184}
3185
3186#ifndef VBOX
3187static inline void gen_sto_env_A0(int idx, int offset)
3188#else /* VBOX */
3189DECLINLINE(void) gen_sto_env_A0(int idx, int offset)
3190#endif /* VBOX */
3191{
3192 int mem_index = (idx >> 2) - 1;
3193 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3194 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3195 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3196 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3197 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3198}
3199
3200#ifndef VBOX
3201static inline void gen_op_movo(int d_offset, int s_offset)
3202#else /* VBOX */
3203DECLINLINE(void) gen_op_movo(int d_offset, int s_offset)
3204#endif /* VBOX */
3205{
3206 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3207 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3208 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
3209 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
3210}
3211
3212#ifndef VBOX
3213static inline void gen_op_movq(int d_offset, int s_offset)
3214#else /* VBOX */
3215DECLINLINE(void) gen_op_movq(int d_offset, int s_offset)
3216#endif /* VBOX */
3217{
3218 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3219 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3220}
3221
3222#ifndef VBOX
3223static inline void gen_op_movl(int d_offset, int s_offset)
3224#else /* VBOX */
3225DECLINLINE(void) gen_op_movl(int d_offset, int s_offset)
3226#endif /* VBOX */
3227{
3228 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
3229 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
3230}
3231
3232#ifndef VBOX
3233static inline void gen_op_movq_env_0(int d_offset)
3234#else /* VBOX */
3235DECLINLINE(void) gen_op_movq_env_0(int d_offset)
3236#endif /* VBOX */
3237{
3238 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
3239 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3240}
3241
3242#define SSE_SPECIAL ((void *)1)
3243#define SSE_DUMMY ((void *)2)
3244
3245#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
3246#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
3247 helper_ ## x ## ss, helper_ ## x ## sd, }
3248
3249static void *sse_op_table1[256][4] = {
3250 /* 3DNow! extensions */
3251 [0x0e] = { SSE_DUMMY }, /* femms */
3252 [0x0f] = { SSE_DUMMY }, /* pf... */
3253 /* pure SSE operations */
3254 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3255 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3256 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
3257 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
3258 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
3259 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
3260 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
3261 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
3262
3263 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3264 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3265 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
3266 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
3267 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
3268 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
3269 [0x2e] = { helper_ucomiss, helper_ucomisd },
3270 [0x2f] = { helper_comiss, helper_comisd },
3271 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
3272 [0x51] = SSE_FOP(sqrt),
3273 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
3274 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
3275 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
3276 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
3277 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
3278 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
3279 [0x58] = SSE_FOP(add),
3280 [0x59] = SSE_FOP(mul),
3281 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
3282 helper_cvtss2sd, helper_cvtsd2ss },
3283 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
3284 [0x5c] = SSE_FOP(sub),
3285 [0x5d] = SSE_FOP(min),
3286 [0x5e] = SSE_FOP(div),
3287 [0x5f] = SSE_FOP(max),
3288
3289 [0xc2] = SSE_FOP(cmpeq),
3290 [0xc6] = { helper_shufps, helper_shufpd },
3291
3292 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3293 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3294
3295 /* MMX ops and their SSE extensions */
3296 [0x60] = MMX_OP2(punpcklbw),
3297 [0x61] = MMX_OP2(punpcklwd),
3298 [0x62] = MMX_OP2(punpckldq),
3299 [0x63] = MMX_OP2(packsswb),
3300 [0x64] = MMX_OP2(pcmpgtb),
3301 [0x65] = MMX_OP2(pcmpgtw),
3302 [0x66] = MMX_OP2(pcmpgtl),
3303 [0x67] = MMX_OP2(packuswb),
3304 [0x68] = MMX_OP2(punpckhbw),
3305 [0x69] = MMX_OP2(punpckhwd),
3306 [0x6a] = MMX_OP2(punpckhdq),
3307 [0x6b] = MMX_OP2(packssdw),
3308 [0x6c] = { NULL, helper_punpcklqdq_xmm },
3309 [0x6d] = { NULL, helper_punpckhqdq_xmm },
3310 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
3311 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
3312 [0x70] = { helper_pshufw_mmx,
3313 helper_pshufd_xmm,
3314 helper_pshufhw_xmm,
3315 helper_pshuflw_xmm },
3316 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
3317 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
3318 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
3319 [0x74] = MMX_OP2(pcmpeqb),
3320 [0x75] = MMX_OP2(pcmpeqw),
3321 [0x76] = MMX_OP2(pcmpeql),
3322 [0x77] = { SSE_DUMMY }, /* emms */
3323 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
3324 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
3325 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3326 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3327 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3328 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
3329 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
3330 [0xd1] = MMX_OP2(psrlw),
3331 [0xd2] = MMX_OP2(psrld),
3332 [0xd3] = MMX_OP2(psrlq),
3333 [0xd4] = MMX_OP2(paddq),
3334 [0xd5] = MMX_OP2(pmullw),
3335 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3336 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3337 [0xd8] = MMX_OP2(psubusb),
3338 [0xd9] = MMX_OP2(psubusw),
3339 [0xda] = MMX_OP2(pminub),
3340 [0xdb] = MMX_OP2(pand),
3341 [0xdc] = MMX_OP2(paddusb),
3342 [0xdd] = MMX_OP2(paddusw),
3343 [0xde] = MMX_OP2(pmaxub),
3344 [0xdf] = MMX_OP2(pandn),
3345 [0xe0] = MMX_OP2(pavgb),
3346 [0xe1] = MMX_OP2(psraw),
3347 [0xe2] = MMX_OP2(psrad),
3348 [0xe3] = MMX_OP2(pavgw),
3349 [0xe4] = MMX_OP2(pmulhuw),
3350 [0xe5] = MMX_OP2(pmulhw),
3351 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
3352 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3353 [0xe8] = MMX_OP2(psubsb),
3354 [0xe9] = MMX_OP2(psubsw),
3355 [0xea] = MMX_OP2(pminsw),
3356 [0xeb] = MMX_OP2(por),
3357 [0xec] = MMX_OP2(paddsb),
3358 [0xed] = MMX_OP2(paddsw),
3359 [0xee] = MMX_OP2(pmaxsw),
3360 [0xef] = MMX_OP2(pxor),
3361 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
3362 [0xf1] = MMX_OP2(psllw),
3363 [0xf2] = MMX_OP2(pslld),
3364 [0xf3] = MMX_OP2(psllq),
3365 [0xf4] = MMX_OP2(pmuludq),
3366 [0xf5] = MMX_OP2(pmaddwd),
3367 [0xf6] = MMX_OP2(psadbw),
3368 [0xf7] = MMX_OP2(maskmov),
3369 [0xf8] = MMX_OP2(psubb),
3370 [0xf9] = MMX_OP2(psubw),
3371 [0xfa] = MMX_OP2(psubl),
3372 [0xfb] = MMX_OP2(psubq),
3373 [0xfc] = MMX_OP2(paddb),
3374 [0xfd] = MMX_OP2(paddw),
3375 [0xfe] = MMX_OP2(paddl),
3376};
3377
3378static void *sse_op_table2[3 * 8][2] = {
3379 [0 + 2] = MMX_OP2(psrlw),
3380 [0 + 4] = MMX_OP2(psraw),
3381 [0 + 6] = MMX_OP2(psllw),
3382 [8 + 2] = MMX_OP2(psrld),
3383 [8 + 4] = MMX_OP2(psrad),
3384 [8 + 6] = MMX_OP2(pslld),
3385 [16 + 2] = MMX_OP2(psrlq),
3386 [16 + 3] = { NULL, helper_psrldq_xmm },
3387 [16 + 6] = MMX_OP2(psllq),
3388 [16 + 7] = { NULL, helper_pslldq_xmm },
3389};
3390
3391static void *sse_op_table3[4 * 3] = {
3392 helper_cvtsi2ss,
3393 helper_cvtsi2sd,
3394 X86_64_ONLY(helper_cvtsq2ss),
3395 X86_64_ONLY(helper_cvtsq2sd),
3396
3397 helper_cvttss2si,
3398 helper_cvttsd2si,
3399 X86_64_ONLY(helper_cvttss2sq),
3400 X86_64_ONLY(helper_cvttsd2sq),
3401
3402 helper_cvtss2si,
3403 helper_cvtsd2si,
3404 X86_64_ONLY(helper_cvtss2sq),
3405 X86_64_ONLY(helper_cvtsd2sq),
3406};
3407
3408static void *sse_op_table4[8][4] = {
3409 SSE_FOP(cmpeq),
3410 SSE_FOP(cmplt),
3411 SSE_FOP(cmple),
3412 SSE_FOP(cmpunord),
3413 SSE_FOP(cmpneq),
3414 SSE_FOP(cmpnlt),
3415 SSE_FOP(cmpnle),
3416 SSE_FOP(cmpord),
3417};
3418
3419static void *sse_op_table5[256] = {
3420 [0x0c] = helper_pi2fw,
3421 [0x0d] = helper_pi2fd,
3422 [0x1c] = helper_pf2iw,
3423 [0x1d] = helper_pf2id,
3424 [0x8a] = helper_pfnacc,
3425 [0x8e] = helper_pfpnacc,
3426 [0x90] = helper_pfcmpge,
3427 [0x94] = helper_pfmin,
3428 [0x96] = helper_pfrcp,
3429 [0x97] = helper_pfrsqrt,
3430 [0x9a] = helper_pfsub,
3431 [0x9e] = helper_pfadd,
3432 [0xa0] = helper_pfcmpgt,
3433 [0xa4] = helper_pfmax,
3434 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
3435 [0xa7] = helper_movq, /* pfrsqit1 */
3436 [0xaa] = helper_pfsubr,
3437 [0xae] = helper_pfacc,
3438 [0xb0] = helper_pfcmpeq,
3439 [0xb4] = helper_pfmul,
3440 [0xb6] = helper_movq, /* pfrcpit2 */
3441 [0xb7] = helper_pmulhrw_mmx,
3442 [0xbb] = helper_pswapd,
3443 [0xbf] = helper_pavgb_mmx /* pavgusb */
3444};
3445
3446struct sse_op_helper_s {
3447 void *op[2]; uint32_t ext_mask;
3448};
3449#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3450#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3451#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3452#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3453static struct sse_op_helper_s sse_op_table6[256] = {
3454 [0x00] = SSSE3_OP(pshufb),
3455 [0x01] = SSSE3_OP(phaddw),
3456 [0x02] = SSSE3_OP(phaddd),
3457 [0x03] = SSSE3_OP(phaddsw),
3458 [0x04] = SSSE3_OP(pmaddubsw),
3459 [0x05] = SSSE3_OP(phsubw),
3460 [0x06] = SSSE3_OP(phsubd),
3461 [0x07] = SSSE3_OP(phsubsw),
3462 [0x08] = SSSE3_OP(psignb),
3463 [0x09] = SSSE3_OP(psignw),
3464 [0x0a] = SSSE3_OP(psignd),
3465 [0x0b] = SSSE3_OP(pmulhrsw),
3466 [0x10] = SSE41_OP(pblendvb),
3467 [0x14] = SSE41_OP(blendvps),
3468 [0x15] = SSE41_OP(blendvpd),
3469 [0x17] = SSE41_OP(ptest),
3470 [0x1c] = SSSE3_OP(pabsb),
3471 [0x1d] = SSSE3_OP(pabsw),
3472 [0x1e] = SSSE3_OP(pabsd),
3473 [0x20] = SSE41_OP(pmovsxbw),
3474 [0x21] = SSE41_OP(pmovsxbd),
3475 [0x22] = SSE41_OP(pmovsxbq),
3476 [0x23] = SSE41_OP(pmovsxwd),
3477 [0x24] = SSE41_OP(pmovsxwq),
3478 [0x25] = SSE41_OP(pmovsxdq),
3479 [0x28] = SSE41_OP(pmuldq),
3480 [0x29] = SSE41_OP(pcmpeqq),
3481 [0x2a] = SSE41_SPECIAL, /* movntqda */
3482 [0x2b] = SSE41_OP(packusdw),
3483 [0x30] = SSE41_OP(pmovzxbw),
3484 [0x31] = SSE41_OP(pmovzxbd),
3485 [0x32] = SSE41_OP(pmovzxbq),
3486 [0x33] = SSE41_OP(pmovzxwd),
3487 [0x34] = SSE41_OP(pmovzxwq),
3488 [0x35] = SSE41_OP(pmovzxdq),
3489 [0x37] = SSE42_OP(pcmpgtq),
3490 [0x38] = SSE41_OP(pminsb),
3491 [0x39] = SSE41_OP(pminsd),
3492 [0x3a] = SSE41_OP(pminuw),
3493 [0x3b] = SSE41_OP(pminud),
3494 [0x3c] = SSE41_OP(pmaxsb),
3495 [0x3d] = SSE41_OP(pmaxsd),
3496 [0x3e] = SSE41_OP(pmaxuw),
3497 [0x3f] = SSE41_OP(pmaxud),
3498 [0x40] = SSE41_OP(pmulld),
3499 [0x41] = SSE41_OP(phminposuw),
3500};
3501
3502static struct sse_op_helper_s sse_op_table7[256] = {
3503 [0x08] = SSE41_OP(roundps),
3504 [0x09] = SSE41_OP(roundpd),
3505 [0x0a] = SSE41_OP(roundss),
3506 [0x0b] = SSE41_OP(roundsd),
3507 [0x0c] = SSE41_OP(blendps),
3508 [0x0d] = SSE41_OP(blendpd),
3509 [0x0e] = SSE41_OP(pblendw),
3510 [0x0f] = SSSE3_OP(palignr),
3511 [0x14] = SSE41_SPECIAL, /* pextrb */
3512 [0x15] = SSE41_SPECIAL, /* pextrw */
3513 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3514 [0x17] = SSE41_SPECIAL, /* extractps */
3515 [0x20] = SSE41_SPECIAL, /* pinsrb */
3516 [0x21] = SSE41_SPECIAL, /* insertps */
3517 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3518 [0x40] = SSE41_OP(dpps),
3519 [0x41] = SSE41_OP(dppd),
3520 [0x42] = SSE41_OP(mpsadbw),
3521 [0x60] = SSE42_OP(pcmpestrm),
3522 [0x61] = SSE42_OP(pcmpestri),
3523 [0x62] = SSE42_OP(pcmpistrm),
3524 [0x63] = SSE42_OP(pcmpistri),
3525};
3526
3527static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3528{
3529 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3530 int modrm, mod, rm, reg, reg_addr, offset_addr;
3531 void *sse_op2;
3532
3533 b &= 0xff;
3534 if (s->prefix & PREFIX_DATA)
3535 b1 = 1;
3536 else if (s->prefix & PREFIX_REPZ)
3537 b1 = 2;
3538 else if (s->prefix & PREFIX_REPNZ)
3539 b1 = 3;
3540 else
3541 b1 = 0;
3542 sse_op2 = sse_op_table1[b][b1];
3543 if (!sse_op2)
3544 goto illegal_op;
3545 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3546 is_xmm = 1;
3547 } else {
3548 if (b1 == 0) {
3549 /* MMX case */
3550 is_xmm = 0;
3551 } else {
3552 is_xmm = 1;
3553 }
3554 }
3555 /* simple MMX/SSE operation */
3556 if (s->flags & HF_TS_MASK) {
3557 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3558 return;
3559 }
3560 if (s->flags & HF_EM_MASK) {
3561 illegal_op:
3562 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3563 return;
3564 }
3565 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3566 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3567 goto illegal_op;
3568 if (b == 0x0e) {
3569 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3570 goto illegal_op;
3571 /* femms */
3572 tcg_gen_helper_0_0(helper_emms);
3573 return;
3574 }
3575 if (b == 0x77) {
3576 /* emms */
3577 tcg_gen_helper_0_0(helper_emms);
3578 return;
3579 }
3580 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3581 the static cpu state) */
3582 if (!is_xmm) {
3583 tcg_gen_helper_0_0(helper_enter_mmx);
3584 }
3585
3586 modrm = ldub_code(s->pc++);
3587 reg = ((modrm >> 3) & 7);
3588 if (is_xmm)
3589 reg |= rex_r;
3590 mod = (modrm >> 6) & 3;
3591 if (sse_op2 == SSE_SPECIAL) {
3592 b |= (b1 << 8);
3593 switch(b) {
3594 case 0x0e7: /* movntq */
3595 if (mod == 3)
3596 goto illegal_op;
3597 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3598 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3599 break;
3600 case 0x1e7: /* movntdq */
3601 case 0x02b: /* movntps */
3602 case 0x12b: /* movntps */
3603 case 0x3f0: /* lddqu */
3604 if (mod == 3)
3605 goto illegal_op;
3606 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3607 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3608 break;
3609 case 0x6e: /* movd mm, ea */
3610#ifdef TARGET_X86_64
3611 if (s->dflag == 2) {
3612 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3613 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3614 } else
3615#endif
3616 {
3617 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3618 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3619 offsetof(CPUX86State,fpregs[reg].mmx));
3620 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3621 }
3622 break;
3623 case 0x16e: /* movd xmm, ea */
3624#ifdef TARGET_X86_64
3625 if (s->dflag == 2) {
3626 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3627 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3628 offsetof(CPUX86State,xmm_regs[reg]));
3629 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3630 } else
3631#endif
3632 {
3633 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3634 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3635 offsetof(CPUX86State,xmm_regs[reg]));
3636 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3637 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3638 }
3639 break;
3640 case 0x6f: /* movq mm, ea */
3641 if (mod != 3) {
3642 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3643 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3644 } else {
3645 rm = (modrm & 7);
3646 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3647 offsetof(CPUX86State,fpregs[rm].mmx));
3648 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3649 offsetof(CPUX86State,fpregs[reg].mmx));
3650 }
3651 break;
3652 case 0x010: /* movups */
3653 case 0x110: /* movupd */
3654 case 0x028: /* movaps */
3655 case 0x128: /* movapd */
3656 case 0x16f: /* movdqa xmm, ea */
3657 case 0x26f: /* movdqu xmm, ea */
3658 if (mod != 3) {
3659 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3660 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3661 } else {
3662 rm = (modrm & 7) | REX_B(s);
3663 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3664 offsetof(CPUX86State,xmm_regs[rm]));
3665 }
3666 break;
3667 case 0x210: /* movss xmm, ea */
3668 if (mod != 3) {
3669 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3670 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3671 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3672 gen_op_movl_T0_0();
3673 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3674 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3675 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3676 } else {
3677 rm = (modrm & 7) | REX_B(s);
3678 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3679 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3680 }
3681 break;
3682 case 0x310: /* movsd xmm, ea */
3683 if (mod != 3) {
3684 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3685 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3686 gen_op_movl_T0_0();
3687 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3688 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3689 } else {
3690 rm = (modrm & 7) | REX_B(s);
3691 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3692 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3693 }
3694 break;
3695 case 0x012: /* movlps */
3696 case 0x112: /* movlpd */
3697 if (mod != 3) {
3698 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3699 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3700 } else {
3701 /* movhlps */
3702 rm = (modrm & 7) | REX_B(s);
3703 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3704 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3705 }
3706 break;
3707 case 0x212: /* movsldup */
3708 if (mod != 3) {
3709 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3710 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3711 } else {
3712 rm = (modrm & 7) | REX_B(s);
3713 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3714 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3715 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3716 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3717 }
3718 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3719 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3720 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3721 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3722 break;
3723 case 0x312: /* movddup */
3724 if (mod != 3) {
3725 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3726 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3727 } else {
3728 rm = (modrm & 7) | REX_B(s);
3729 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3730 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3731 }
3732 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3733 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3734 break;
3735 case 0x016: /* movhps */
3736 case 0x116: /* movhpd */
3737 if (mod != 3) {
3738 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3739 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3740 } else {
3741 /* movlhps */
3742 rm = (modrm & 7) | REX_B(s);
3743 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3744 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3745 }
3746 break;
3747 case 0x216: /* movshdup */
3748 if (mod != 3) {
3749 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3750 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3751 } else {
3752 rm = (modrm & 7) | REX_B(s);
3753 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3754 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3755 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3756 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3757 }
3758 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3759 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3760 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3761 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3762 break;
3763 case 0x7e: /* movd ea, mm */
3764#ifdef TARGET_X86_64
3765 if (s->dflag == 2) {
3766 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3767 offsetof(CPUX86State,fpregs[reg].mmx));
3768 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3769 } else
3770#endif
3771 {
3772 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3773 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3774 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3775 }
3776 break;
3777 case 0x17e: /* movd ea, xmm */
3778#ifdef TARGET_X86_64
3779 if (s->dflag == 2) {
3780 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3781 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3782 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3783 } else
3784#endif
3785 {
3786 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3787 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3788 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3789 }
3790 break;
3791 case 0x27e: /* movq xmm, ea */
3792 if (mod != 3) {
3793 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3794 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3795 } else {
3796 rm = (modrm & 7) | REX_B(s);
3797 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3798 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3799 }
3800 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3801 break;
3802 case 0x7f: /* movq ea, mm */
3803 if (mod != 3) {
3804 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3805 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3806 } else {
3807 rm = (modrm & 7);
3808 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3809 offsetof(CPUX86State,fpregs[reg].mmx));
3810 }
3811 break;
3812 case 0x011: /* movups */
3813 case 0x111: /* movupd */
3814 case 0x029: /* movaps */
3815 case 0x129: /* movapd */
3816 case 0x17f: /* movdqa ea, xmm */
3817 case 0x27f: /* movdqu ea, xmm */
3818 if (mod != 3) {
3819 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3820 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3821 } else {
3822 rm = (modrm & 7) | REX_B(s);
3823 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3824 offsetof(CPUX86State,xmm_regs[reg]));
3825 }
3826 break;
3827 case 0x211: /* movss ea, xmm */
3828 if (mod != 3) {
3829 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3830 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3831 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3832 } else {
3833 rm = (modrm & 7) | REX_B(s);
3834 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3835 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3836 }
3837 break;
3838 case 0x311: /* movsd ea, xmm */
3839 if (mod != 3) {
3840 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3841 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3842 } else {
3843 rm = (modrm & 7) | REX_B(s);
3844 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3845 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3846 }
3847 break;
3848 case 0x013: /* movlps */
3849 case 0x113: /* movlpd */
3850 if (mod != 3) {
3851 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3852 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3853 } else {
3854 goto illegal_op;
3855 }
3856 break;
3857 case 0x017: /* movhps */
3858 case 0x117: /* movhpd */
3859 if (mod != 3) {
3860 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3861 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3862 } else {
3863 goto illegal_op;
3864 }
3865 break;
3866 case 0x71: /* shift mm, im */
3867 case 0x72:
3868 case 0x73:
3869 case 0x171: /* shift xmm, im */
3870 case 0x172:
3871 case 0x173:
3872 val = ldub_code(s->pc++);
3873 if (is_xmm) {
3874 gen_op_movl_T0_im(val);
3875 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3876 gen_op_movl_T0_0();
3877 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3878 op1_offset = offsetof(CPUX86State,xmm_t0);
3879 } else {
3880 gen_op_movl_T0_im(val);
3881 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3882 gen_op_movl_T0_0();
3883 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3884 op1_offset = offsetof(CPUX86State,mmx_t0);
3885 }
3886 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3887 if (!sse_op2)
3888 goto illegal_op;
3889 if (is_xmm) {
3890 rm = (modrm & 7) | REX_B(s);
3891 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3892 } else {
3893 rm = (modrm & 7);
3894 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3895 }
3896 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3897 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3898 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3899 break;
3900 case 0x050: /* movmskps */
3901 rm = (modrm & 7) | REX_B(s);
3902 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3903 offsetof(CPUX86State,xmm_regs[rm]));
3904 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3905 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3906 gen_op_mov_reg_T0(OT_LONG, reg);
3907 break;
3908 case 0x150: /* movmskpd */
3909 rm = (modrm & 7) | REX_B(s);
3910 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3911 offsetof(CPUX86State,xmm_regs[rm]));
3912 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3913 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3914 gen_op_mov_reg_T0(OT_LONG, reg);
3915 break;
3916 case 0x02a: /* cvtpi2ps */
3917 case 0x12a: /* cvtpi2pd */
3918 tcg_gen_helper_0_0(helper_enter_mmx);
3919 if (mod != 3) {
3920 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3921 op2_offset = offsetof(CPUX86State,mmx_t0);
3922 gen_ldq_env_A0(s->mem_index, op2_offset);
3923 } else {
3924 rm = (modrm & 7);
3925 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3926 }
3927 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3928 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3929 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3930 switch(b >> 8) {
3931 case 0x0:
3932 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3933 break;
3934 default:
3935 case 0x1:
3936 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3937 break;
3938 }
3939 break;
3940 case 0x22a: /* cvtsi2ss */
3941 case 0x32a: /* cvtsi2sd */
3942 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3943 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3944 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3945 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3946 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3947 if (ot == OT_LONG) {
3948 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3949 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3950 } else {
3951 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
3952 }
3953 break;
3954 case 0x02c: /* cvttps2pi */
3955 case 0x12c: /* cvttpd2pi */
3956 case 0x02d: /* cvtps2pi */
3957 case 0x12d: /* cvtpd2pi */
3958 tcg_gen_helper_0_0(helper_enter_mmx);
3959 if (mod != 3) {
3960 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3961 op2_offset = offsetof(CPUX86State,xmm_t0);
3962 gen_ldo_env_A0(s->mem_index, op2_offset);
3963 } else {
3964 rm = (modrm & 7) | REX_B(s);
3965 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3966 }
3967 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3968 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3969 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3970 switch(b) {
3971 case 0x02c:
3972 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3973 break;
3974 case 0x12c:
3975 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3976 break;
3977 case 0x02d:
3978 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3979 break;
3980 case 0x12d:
3981 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3982 break;
3983 }
3984 break;
3985 case 0x22c: /* cvttss2si */
3986 case 0x32c: /* cvttsd2si */
3987 case 0x22d: /* cvtss2si */
3988 case 0x32d: /* cvtsd2si */
3989 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3990 if (mod != 3) {
3991 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3992 if ((b >> 8) & 1) {
3993 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3994 } else {
3995 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3996 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3997 }
3998 op2_offset = offsetof(CPUX86State,xmm_t0);
3999 } else {
4000 rm = (modrm & 7) | REX_B(s);
4001 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4002 }
4003 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
4004 (b & 1) * 4];
4005 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
4006 if (ot == OT_LONG) {
4007 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
4008 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4009 } else {
4010 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
4011 }
4012 gen_op_mov_reg_T0(ot, reg);
4013 break;
4014 case 0xc4: /* pinsrw */
4015 case 0x1c4:
4016 s->rip_offset = 1;
4017 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4018 val = ldub_code(s->pc++);
4019 if (b1) {
4020 val &= 7;
4021 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4022 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
4023 } else {
4024 val &= 3;
4025 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4026 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
4027 }
4028 break;
4029 case 0xc5: /* pextrw */
4030 case 0x1c5:
4031 if (mod != 3)
4032 goto illegal_op;
4033 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4034 val = ldub_code(s->pc++);
4035 if (b1) {
4036 val &= 7;
4037 rm = (modrm & 7) | REX_B(s);
4038 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4039 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
4040 } else {
4041 val &= 3;
4042 rm = (modrm & 7);
4043 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4044 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
4045 }
4046 reg = ((modrm >> 3) & 7) | rex_r;
4047 gen_op_mov_reg_T0(ot, reg);
4048 break;
4049 case 0x1d6: /* movq ea, xmm */
4050 if (mod != 3) {
4051 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4052 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4053 } else {
4054 rm = (modrm & 7) | REX_B(s);
4055 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
4056 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4057 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
4058 }
4059 break;
4060 case 0x2d6: /* movq2dq */
4061 tcg_gen_helper_0_0(helper_enter_mmx);
4062 rm = (modrm & 7);
4063 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
4064 offsetof(CPUX86State,fpregs[rm].mmx));
4065 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
4066 break;
4067 case 0x3d6: /* movdq2q */
4068 tcg_gen_helper_0_0(helper_enter_mmx);
4069 rm = (modrm & 7) | REX_B(s);
4070 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
4071 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
4072 break;
4073 case 0xd7: /* pmovmskb */
4074 case 0x1d7:
4075 if (mod != 3)
4076 goto illegal_op;
4077 if (b1) {
4078 rm = (modrm & 7) | REX_B(s);
4079 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
4080 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
4081 } else {
4082 rm = (modrm & 7);
4083 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
4084 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
4085 }
4086 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4087 reg = ((modrm >> 3) & 7) | rex_r;
4088 gen_op_mov_reg_T0(OT_LONG, reg);
4089 break;
4090 case 0x138:
4091 if (s->prefix & PREFIX_REPNZ)
4092 goto crc32;
4093 case 0x038:
4094 b = modrm;
4095 modrm = ldub_code(s->pc++);
4096 rm = modrm & 7;
4097 reg = ((modrm >> 3) & 7) | rex_r;
4098 mod = (modrm >> 6) & 3;
4099
4100 sse_op2 = sse_op_table6[b].op[b1];
4101 if (!sse_op2)
4102 goto illegal_op;
4103 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
4104 goto illegal_op;
4105
4106 if (b1) {
4107 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4108 if (mod == 3) {
4109 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4110 } else {
4111 op2_offset = offsetof(CPUX86State,xmm_t0);
4112 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4113 switch (b) {
4114 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
4115 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
4116 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
4117 gen_ldq_env_A0(s->mem_index, op2_offset +
4118 offsetof(XMMReg, XMM_Q(0)));
4119 break;
4120 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
4121 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
4122 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4123 (s->mem_index >> 2) - 1);
4124 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
4125 offsetof(XMMReg, XMM_L(0)));
4126 break;
4127 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
4128 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
4129 (s->mem_index >> 2) - 1);
4130 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
4131 offsetof(XMMReg, XMM_W(0)));
4132 break;
4133 case 0x2a: /* movntqda */
4134 gen_ldo_env_A0(s->mem_index, op1_offset);
4135 return;
4136 default:
4137 gen_ldo_env_A0(s->mem_index, op2_offset);
4138 }
4139 }
4140 } else {
4141 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4142 if (mod == 3) {
4143 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4144 } else {
4145 op2_offset = offsetof(CPUX86State,mmx_t0);
4146 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4147 gen_ldq_env_A0(s->mem_index, op2_offset);
4148 }
4149 }
4150 if (sse_op2 == SSE_SPECIAL)
4151 goto illegal_op;
4152
4153 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4154 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4155 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4156
4157 if (b == 0x17)
4158 s->cc_op = CC_OP_EFLAGS;
4159 break;
4160 case 0x338: /* crc32 */
4161 crc32:
4162 b = modrm;
4163 modrm = ldub_code(s->pc++);
4164 reg = ((modrm >> 3) & 7) | rex_r;
4165
4166 if (b != 0xf0 && b != 0xf1)
4167 goto illegal_op;
4168 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
4169 goto illegal_op;
4170
4171 if (b == 0xf0)
4172 ot = OT_BYTE;
4173 else if (b == 0xf1 && s->dflag != 2)
4174 if (s->prefix & PREFIX_DATA)
4175 ot = OT_WORD;
4176 else
4177 ot = OT_LONG;
4178 else
4179 ot = OT_QUAD;
4180
4181 gen_op_mov_TN_reg(OT_LONG, 0, reg);
4182 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4183 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4184 tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
4185 cpu_T[0], tcg_const_i32(8 << ot));
4186
4187 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4188 gen_op_mov_reg_T0(ot, reg);
4189 break;
4190 case 0x03a:
4191 case 0x13a:
4192 b = modrm;
4193 modrm = ldub_code(s->pc++);
4194 rm = modrm & 7;
4195 reg = ((modrm >> 3) & 7) | rex_r;
4196 mod = (modrm >> 6) & 3;
4197
4198 sse_op2 = sse_op_table7[b].op[b1];
4199 if (!sse_op2)
4200 goto illegal_op;
4201 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4202 goto illegal_op;
4203
4204 if (sse_op2 == SSE_SPECIAL) {
4205 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4206 rm = (modrm & 7) | REX_B(s);
4207 if (mod != 3)
4208 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4209 reg = ((modrm >> 3) & 7) | rex_r;
4210 val = ldub_code(s->pc++);
4211 switch (b) {
4212 case 0x14: /* pextrb */
4213 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4214 xmm_regs[reg].XMM_B(val & 15)));
4215 if (mod == 3)
4216 gen_op_mov_reg_T0(ot, rm);
4217 else
4218 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4219 (s->mem_index >> 2) - 1);
4220 break;
4221 case 0x15: /* pextrw */
4222 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4223 xmm_regs[reg].XMM_W(val & 7)));
4224 if (mod == 3)
4225 gen_op_mov_reg_T0(ot, rm);
4226 else
4227 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4228 (s->mem_index >> 2) - 1);
4229 break;
4230 case 0x16:
4231 if (ot == OT_LONG) { /* pextrd */
4232 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4233 offsetof(CPUX86State,
4234 xmm_regs[reg].XMM_L(val & 3)));
4235 if (mod == 3)
4236 gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
4237 else
4238 tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
4239 (s->mem_index >> 2) - 1);
4240 } else { /* pextrq */
4241 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4242 offsetof(CPUX86State,
4243 xmm_regs[reg].XMM_Q(val & 1)));
4244 if (mod == 3)
4245 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4246 else
4247 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4248 (s->mem_index >> 2) - 1);
4249 }
4250 break;
4251 case 0x17: /* extractps */
4252 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4253 xmm_regs[reg].XMM_L(val & 3)));
4254 if (mod == 3)
4255 gen_op_mov_reg_T0(ot, rm);
4256 else
4257 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4258 (s->mem_index >> 2) - 1);
4259 break;
4260 case 0x20: /* pinsrb */
4261 if (mod == 3)
4262 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4263 else
4264 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
4265 (s->mem_index >> 2) - 1);
4266 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4267 xmm_regs[reg].XMM_B(val & 15)));
4268 break;
4269 case 0x21: /* insertps */
4270 if (mod == 3)
4271 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4272 offsetof(CPUX86State,xmm_regs[rm]
4273 .XMM_L((val >> 6) & 3)));
4274 else
4275 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4276 (s->mem_index >> 2) - 1);
4277 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4278 offsetof(CPUX86State,xmm_regs[reg]
4279 .XMM_L((val >> 4) & 3)));
4280 if ((val >> 0) & 1)
4281 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4282 cpu_env, offsetof(CPUX86State,
4283 xmm_regs[reg].XMM_L(0)));
4284 if ((val >> 1) & 1)
4285 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4286 cpu_env, offsetof(CPUX86State,
4287 xmm_regs[reg].XMM_L(1)));
4288 if ((val >> 2) & 1)
4289 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4290 cpu_env, offsetof(CPUX86State,
4291 xmm_regs[reg].XMM_L(2)));
4292 if ((val >> 3) & 1)
4293 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4294 cpu_env, offsetof(CPUX86State,
4295 xmm_regs[reg].XMM_L(3)));
4296 break;
4297 case 0x22:
4298 if (ot == OT_LONG) { /* pinsrd */
4299 if (mod == 3)
4300 gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
4301 else
4302 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4303 (s->mem_index >> 2) - 1);
4304 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4305 offsetof(CPUX86State,
4306 xmm_regs[reg].XMM_L(val & 3)));
4307 } else { /* pinsrq */
4308 if (mod == 3)
4309 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4310 else
4311 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4312 (s->mem_index >> 2) - 1);
4313 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4314 offsetof(CPUX86State,
4315 xmm_regs[reg].XMM_Q(val & 1)));
4316 }
4317 break;
4318 }
4319 return;
4320 }
4321
4322 if (b1) {
4323 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4324 if (mod == 3) {
4325 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4326 } else {
4327 op2_offset = offsetof(CPUX86State,xmm_t0);
4328 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4329 gen_ldo_env_A0(s->mem_index, op2_offset);
4330 }
4331 } else {
4332 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4333 if (mod == 3) {
4334 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4335 } else {
4336 op2_offset = offsetof(CPUX86State,mmx_t0);
4337 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4338 gen_ldq_env_A0(s->mem_index, op2_offset);
4339 }
4340 }
4341 val = ldub_code(s->pc++);
4342
4343 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
4344 s->cc_op = CC_OP_EFLAGS;
4345
4346 if (s->dflag == 2)
4347 /* The helper must use entire 64-bit gp registers */
4348 val |= 1 << 8;
4349 }
4350
4351 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4352 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4353 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4354 break;
4355 default:
4356 goto illegal_op;
4357 }
4358 } else {
4359 /* generic MMX or SSE operation */
4360 switch(b) {
4361 case 0x70: /* pshufx insn */
4362 case 0xc6: /* pshufx insn */
4363 case 0xc2: /* compare insns */
4364 s->rip_offset = 1;
4365 break;
4366 default:
4367 break;
4368 }
4369 if (is_xmm) {
4370 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4371 if (mod != 3) {
4372 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4373 op2_offset = offsetof(CPUX86State,xmm_t0);
4374 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
4375 b == 0xc2)) {
4376 /* specific case for SSE single instructions */
4377 if (b1 == 2) {
4378 /* 32 bit access */
4379 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4380 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4381 } else {
4382 /* 64 bit access */
4383 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
4384 }
4385 } else {
4386 gen_ldo_env_A0(s->mem_index, op2_offset);
4387 }
4388 } else {
4389 rm = (modrm & 7) | REX_B(s);
4390 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4391 }
4392 } else {
4393 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4394 if (mod != 3) {
4395 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4396 op2_offset = offsetof(CPUX86State,mmx_t0);
4397 gen_ldq_env_A0(s->mem_index, op2_offset);
4398 } else {
4399 rm = (modrm & 7);
4400 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4401 }
4402 }
4403 switch(b) {
4404 case 0x0f: /* 3DNow! data insns */
4405 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4406 goto illegal_op;
4407 val = ldub_code(s->pc++);
4408 sse_op2 = sse_op_table5[val];
4409 if (!sse_op2)
4410 goto illegal_op;
4411 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4412 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4413 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4414 break;
4415 case 0x70: /* pshufx insn */
4416 case 0xc6: /* pshufx insn */
4417 val = ldub_code(s->pc++);
4418 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4419 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4420 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4421 break;
4422 case 0xc2:
4423 /* compare insns */
4424 val = ldub_code(s->pc++);
4425 if (val >= 8)
4426 goto illegal_op;
4427 sse_op2 = sse_op_table4[val][b1];
4428 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4429 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4430 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4431 break;
4432 case 0xf7:
4433 /* maskmov : we must prepare A0 */
4434 if (mod != 3)
4435 goto illegal_op;
4436#ifdef TARGET_X86_64
4437 if (s->aflag == 2) {
4438 gen_op_movq_A0_reg(R_EDI);
4439 } else
4440#endif
4441 {
4442 gen_op_movl_A0_reg(R_EDI);
4443 if (s->aflag == 0)
4444 gen_op_andl_A0_ffff();
4445 }
4446 gen_add_A0_ds_seg(s);
4447
4448 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4449 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4450 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
4451 break;
4452 default:
4453 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4454 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4455 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4456 break;
4457 }
4458 if (b == 0x2e || b == 0x2f) {
4459 s->cc_op = CC_OP_EFLAGS;
4460 }
4461 }
4462}
4463
4464#ifdef VBOX
4465/* Checks if it's an invalid lock sequence. Only a few instructions
4466 can be used together with the lock prefix and of those only the
4467 form that write a memory operand. So, this is kind of annoying
4468 work to do...
4469 The AMD manual lists the following instructions.
4470 ADC
4471 ADD
4472 AND
4473 BTC
4474 BTR
4475 BTS
4476 CMPXCHG
4477 CMPXCHG8B
4478 CMPXCHG16B
4479 DEC
4480 INC
4481 NEG
4482 NOT
4483 OR
4484 SBB
4485 SUB
4486 XADD
4487 XCHG
4488 XOR */
4489static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
4490{
4491 target_ulong pc = s->pc;
4492 int modrm, mod, op;
4493
4494 /* X={8,16,32,64} Y={16,32,64} */
4495 switch (b)
4496 {
4497 /* /2: ADC reg/memX, immX */
4498 /* /0: ADD reg/memX, immX */
4499 /* /4: AND reg/memX, immX */
4500 /* /1: OR reg/memX, immX */
4501 /* /3: SBB reg/memX, immX */
4502 /* /5: SUB reg/memX, immX */
4503 /* /6: XOR reg/memX, immX */
4504 case 0x80:
4505 case 0x81:
4506 case 0x83:
4507 modrm = ldub_code(pc++);
4508 op = (modrm >> 3) & 7;
4509 if (op == 7) /* /7: CMP */
4510 break;
4511 mod = (modrm >> 6) & 3;
4512 if (mod == 3) /* register destination */
4513 break;
4514 return false;
4515
4516 case 0x10: /* /r: ADC reg/mem8, reg8 */
4517 case 0x11: /* /r: ADC reg/memX, regY */
4518 case 0x00: /* /r: ADD reg/mem8, reg8 */
4519 case 0x01: /* /r: ADD reg/memX, regY */
4520 case 0x20: /* /r: AND reg/mem8, reg8 */
4521 case 0x21: /* /r: AND reg/memY, regY */
4522 case 0x08: /* /r: OR reg/mem8, reg8 */
4523 case 0x09: /* /r: OR reg/memY, regY */
4524 case 0x18: /* /r: SBB reg/mem8, reg8 */
4525 case 0x19: /* /r: SBB reg/memY, regY */
4526 case 0x28: /* /r: SUB reg/mem8, reg8 */
4527 case 0x29: /* /r: SUB reg/memY, regY */
4528 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
4529 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
4530 case 0x30: /* /r: XOR reg/mem8, reg8 */
4531 case 0x31: /* /r: XOR reg/memY, regY */
4532 modrm = ldub_code(pc++);
4533 mod = (modrm >> 6) & 3;
4534 if (mod == 3) /* register destination */
4535 break;
4536 return false;
4537
4538 /* /1: DEC reg/memX */
4539 /* /0: INC reg/memX */
4540 case 0xfe:
4541 case 0xff:
4542 modrm = ldub_code(pc++);
4543 mod = (modrm >> 6) & 3;
4544 if (mod == 3) /* register destination */
4545 break;
4546 return false;
4547
4548 /* /3: NEG reg/memX */
4549 /* /2: NOT reg/memX */
4550 case 0xf6:
4551 case 0xf7:
4552 modrm = ldub_code(pc++);
4553 mod = (modrm >> 6) & 3;
4554 if (mod == 3) /* register destination */
4555 break;
4556 return false;
4557
4558 case 0x0f:
4559 b = ldub_code(pc++);
4560 switch (b)
4561 {
4562 /* /7: BTC reg/memY, imm8 */
4563 /* /6: BTR reg/memY, imm8 */
4564 /* /5: BTS reg/memY, imm8 */
4565 case 0xba:
4566 modrm = ldub_code(pc++);
4567 op = (modrm >> 3) & 7;
4568 if (op < 5)
4569 break;
4570 mod = (modrm >> 6) & 3;
4571 if (mod == 3) /* register destination */
4572 break;
4573 return false;
4574
4575 case 0xbb: /* /r: BTC reg/memY, regY */
4576 case 0xb3: /* /r: BTR reg/memY, regY */
4577 case 0xab: /* /r: BTS reg/memY, regY */
4578 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
4579 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
4580 case 0xc0: /* /r: XADD reg/mem8, reg8 */
4581 case 0xc1: /* /r: XADD reg/memY, regY */
4582 modrm = ldub_code(pc++);
4583 mod = (modrm >> 6) & 3;
4584 if (mod == 3) /* register destination */
4585 break;
4586 return false;
4587
4588 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
4589 case 0xc7:
4590 modrm = ldub_code(pc++);
4591 op = (modrm >> 3) & 7;
4592 if (op != 1)
4593 break;
4594 return false;
4595 }
4596 break;
4597 }
4598
4599 /* illegal sequence. The s->pc is past the lock prefix and that
4600 is sufficient for the TB, I think. */
4601 Log(("illegal lock sequence %RGv (b=%#x)\n", pc_start, b));
4602 return true;
4603}
4604#endif /* VBOX */
4605
4606
4607/* convert one instruction. s->is_jmp is set if the translation must
4608 be stopped. Return the next pc value */
4609static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4610{
4611 int b, prefixes, aflag, dflag;
4612 int shift, ot;
4613 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4614 target_ulong next_eip, tval;
4615 int rex_w, rex_r;
4616
4617 if (unlikely(loglevel & CPU_LOG_TB_OP))
4618 tcg_gen_debug_insn_start(pc_start);
4619 s->pc = pc_start;
4620 prefixes = 0;
4621 aflag = s->code32;
4622 dflag = s->code32;
4623 s->override = -1;
4624 rex_w = -1;
4625 rex_r = 0;
4626#ifdef TARGET_X86_64
4627 s->rex_x = 0;
4628 s->rex_b = 0;
4629 x86_64_hregs = 0;
4630#endif
4631 s->rip_offset = 0; /* for relative ip address */
4632#ifdef VBOX
4633 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
4634 gen_update_eip(pc_start - s->cs_base);
4635#endif
4636 next_byte:
4637 b = ldub_code(s->pc);
4638 s->pc++;
4639 /* check prefixes */
4640#ifdef TARGET_X86_64
4641 if (CODE64(s)) {
4642 switch (b) {
4643 case 0xf3:
4644 prefixes |= PREFIX_REPZ;
4645 goto next_byte;
4646 case 0xf2:
4647 prefixes |= PREFIX_REPNZ;
4648 goto next_byte;
4649 case 0xf0:
4650 prefixes |= PREFIX_LOCK;
4651 goto next_byte;
4652 case 0x2e:
4653 s->override = R_CS;
4654 goto next_byte;
4655 case 0x36:
4656 s->override = R_SS;
4657 goto next_byte;
4658 case 0x3e:
4659 s->override = R_DS;
4660 goto next_byte;
4661 case 0x26:
4662 s->override = R_ES;
4663 goto next_byte;
4664 case 0x64:
4665 s->override = R_FS;
4666 goto next_byte;
4667 case 0x65:
4668 s->override = R_GS;
4669 goto next_byte;
4670 case 0x66:
4671 prefixes |= PREFIX_DATA;
4672 goto next_byte;
4673 case 0x67:
4674 prefixes |= PREFIX_ADR;
4675 goto next_byte;
4676 case 0x40 ... 0x4f:
4677 /* REX prefix */
4678 rex_w = (b >> 3) & 1;
4679 rex_r = (b & 0x4) << 1;
4680 s->rex_x = (b & 0x2) << 2;
4681 REX_B(s) = (b & 0x1) << 3;
4682 x86_64_hregs = 1; /* select uniform byte register addressing */
4683 goto next_byte;
4684 }
4685 if (rex_w == 1) {
4686 /* 0x66 is ignored if rex.w is set */
4687 dflag = 2;
4688 } else {
4689 if (prefixes & PREFIX_DATA)
4690 dflag ^= 1;
4691 }
4692 if (!(prefixes & PREFIX_ADR))
4693 aflag = 2;
4694 } else
4695#endif
4696 {
4697 switch (b) {
4698 case 0xf3:
4699 prefixes |= PREFIX_REPZ;
4700 goto next_byte;
4701 case 0xf2:
4702 prefixes |= PREFIX_REPNZ;
4703 goto next_byte;
4704 case 0xf0:
4705 prefixes |= PREFIX_LOCK;
4706 goto next_byte;
4707 case 0x2e:
4708 s->override = R_CS;
4709 goto next_byte;
4710 case 0x36:
4711 s->override = R_SS;
4712 goto next_byte;
4713 case 0x3e:
4714 s->override = R_DS;
4715 goto next_byte;
4716 case 0x26:
4717 s->override = R_ES;
4718 goto next_byte;
4719 case 0x64:
4720 s->override = R_FS;
4721 goto next_byte;
4722 case 0x65:
4723 s->override = R_GS;
4724 goto next_byte;
4725 case 0x66:
4726 prefixes |= PREFIX_DATA;
4727 goto next_byte;
4728 case 0x67:
4729 prefixes |= PREFIX_ADR;
4730 goto next_byte;
4731 }
4732 if (prefixes & PREFIX_DATA)
4733 dflag ^= 1;
4734 if (prefixes & PREFIX_ADR)
4735 aflag ^= 1;
4736 }
4737
4738 s->prefix = prefixes;
4739 s->aflag = aflag;
4740 s->dflag = dflag;
4741
4742 /* lock generation */
4743#ifndef VBOX
4744 if (prefixes & PREFIX_LOCK)
4745 tcg_gen_helper_0_0(helper_lock);
4746#else /* VBOX */
4747 if (prefixes & PREFIX_LOCK) {
4748 if (is_invalid_lock_sequence(s, pc_start, b)) {
4749 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
4750 return s->pc;
4751 }
4752 tcg_gen_helper_0_0(helper_lock);
4753 }
4754#endif /* VBOX */
4755
4756 /* now check op code */
4757 reswitch:
4758 switch(b) {
4759 case 0x0f:
4760 /**************************/
4761 /* extended op code */
4762 b = ldub_code(s->pc++) | 0x100;
4763 goto reswitch;
4764
4765 /**************************/
4766 /* arith & logic */
4767 case 0x00 ... 0x05:
4768 case 0x08 ... 0x0d:
4769 case 0x10 ... 0x15:
4770 case 0x18 ... 0x1d:
4771 case 0x20 ... 0x25:
4772 case 0x28 ... 0x2d:
4773 case 0x30 ... 0x35:
4774 case 0x38 ... 0x3d:
4775 {
4776 int op, f, val;
4777 op = (b >> 3) & 7;
4778 f = (b >> 1) & 3;
4779
4780 if ((b & 1) == 0)
4781 ot = OT_BYTE;
4782 else
4783 ot = dflag + OT_WORD;
4784
4785 switch(f) {
4786 case 0: /* OP Ev, Gv */
4787 modrm = ldub_code(s->pc++);
4788 reg = ((modrm >> 3) & 7) | rex_r;
4789 mod = (modrm >> 6) & 3;
4790 rm = (modrm & 7) | REX_B(s);
4791 if (mod != 3) {
4792 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4793 opreg = OR_TMP0;
4794 } else if (op == OP_XORL && rm == reg) {
4795 xor_zero:
4796 /* xor reg, reg optimisation */
4797 gen_op_movl_T0_0();
4798 s->cc_op = CC_OP_LOGICB + ot;
4799 gen_op_mov_reg_T0(ot, reg);
4800 gen_op_update1_cc();
4801 break;
4802 } else {
4803 opreg = rm;
4804 }
4805 gen_op_mov_TN_reg(ot, 1, reg);
4806 gen_op(s, op, ot, opreg);
4807 break;
4808 case 1: /* OP Gv, Ev */
4809 modrm = ldub_code(s->pc++);
4810 mod = (modrm >> 6) & 3;
4811 reg = ((modrm >> 3) & 7) | rex_r;
4812 rm = (modrm & 7) | REX_B(s);
4813 if (mod != 3) {
4814 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4815 gen_op_ld_T1_A0(ot + s->mem_index);
4816 } else if (op == OP_XORL && rm == reg) {
4817 goto xor_zero;
4818 } else {
4819 gen_op_mov_TN_reg(ot, 1, rm);
4820 }
4821 gen_op(s, op, ot, reg);
4822 break;
4823 case 2: /* OP A, Iv */
4824 val = insn_get(s, ot);
4825 gen_op_movl_T1_im(val);
4826 gen_op(s, op, ot, OR_EAX);
4827 break;
4828 }
4829 }
4830 break;
4831
4832 case 0x82:
4833 if (CODE64(s))
4834 goto illegal_op;
4835 case 0x80: /* GRP1 */
4836 case 0x81:
4837 case 0x83:
4838 {
4839 int val;
4840
4841 if ((b & 1) == 0)
4842 ot = OT_BYTE;
4843 else
4844 ot = dflag + OT_WORD;
4845
4846 modrm = ldub_code(s->pc++);
4847 mod = (modrm >> 6) & 3;
4848 rm = (modrm & 7) | REX_B(s);
4849 op = (modrm >> 3) & 7;
4850
4851 if (mod != 3) {
4852 if (b == 0x83)
4853 s->rip_offset = 1;
4854 else
4855 s->rip_offset = insn_const_size(ot);
4856 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4857 opreg = OR_TMP0;
4858 } else {
4859 opreg = rm;
4860 }
4861
4862 switch(b) {
4863 default:
4864 case 0x80:
4865 case 0x81:
4866 case 0x82:
4867 val = insn_get(s, ot);
4868 break;
4869 case 0x83:
4870 val = (int8_t)insn_get(s, OT_BYTE);
4871 break;
4872 }
4873 gen_op_movl_T1_im(val);
4874 gen_op(s, op, ot, opreg);
4875 }
4876 break;
4877
4878 /**************************/
4879 /* inc, dec, and other misc arith */
4880 case 0x40 ... 0x47: /* inc Gv */
4881 ot = dflag ? OT_LONG : OT_WORD;
4882 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4883 break;
4884 case 0x48 ... 0x4f: /* dec Gv */
4885 ot = dflag ? OT_LONG : OT_WORD;
4886 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4887 break;
4888 case 0xf6: /* GRP3 */
4889 case 0xf7:
4890 if ((b & 1) == 0)
4891 ot = OT_BYTE;
4892 else
4893 ot = dflag + OT_WORD;
4894
4895 modrm = ldub_code(s->pc++);
4896 mod = (modrm >> 6) & 3;
4897 rm = (modrm & 7) | REX_B(s);
4898 op = (modrm >> 3) & 7;
4899 if (mod != 3) {
4900 if (op == 0)
4901 s->rip_offset = insn_const_size(ot);
4902 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4903 gen_op_ld_T0_A0(ot + s->mem_index);
4904 } else {
4905 gen_op_mov_TN_reg(ot, 0, rm);
4906 }
4907
4908 switch(op) {
4909 case 0: /* test */
4910 val = insn_get(s, ot);
4911 gen_op_movl_T1_im(val);
4912 gen_op_testl_T0_T1_cc();
4913 s->cc_op = CC_OP_LOGICB + ot;
4914 break;
4915 case 2: /* not */
4916 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4917 if (mod != 3) {
4918 gen_op_st_T0_A0(ot + s->mem_index);
4919 } else {
4920 gen_op_mov_reg_T0(ot, rm);
4921 }
4922 break;
4923 case 3: /* neg */
4924 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4925 if (mod != 3) {
4926 gen_op_st_T0_A0(ot + s->mem_index);
4927 } else {
4928 gen_op_mov_reg_T0(ot, rm);
4929 }
4930 gen_op_update_neg_cc();
4931 s->cc_op = CC_OP_SUBB + ot;
4932 break;
4933 case 4: /* mul */
4934 switch(ot) {
4935 case OT_BYTE:
4936 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4937 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4938 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4939 /* XXX: use 32 bit mul which could be faster */
4940 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4941 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4942 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4943 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4944 s->cc_op = CC_OP_MULB;
4945 break;
4946 case OT_WORD:
4947 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4948 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4949 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4950 /* XXX: use 32 bit mul which could be faster */
4951 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4952 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4953 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4954 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4955 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4956 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4957 s->cc_op = CC_OP_MULW;
4958 break;
4959 default:
4960 case OT_LONG:
4961#ifdef TARGET_X86_64
4962 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4963 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4964 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
4965 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4966 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4967 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4968 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4969 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4970 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4971#else
4972 {
4973 TCGv t0, t1;
4974 t0 = tcg_temp_new(TCG_TYPE_I64);
4975 t1 = tcg_temp_new(TCG_TYPE_I64);
4976 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4977 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
4978 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
4979 tcg_gen_mul_i64(t0, t0, t1);
4980 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4981 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4982 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4983 tcg_gen_shri_i64(t0, t0, 32);
4984 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4985 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4986 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4987 }
4988#endif
4989 s->cc_op = CC_OP_MULL;
4990 break;
4991#ifdef TARGET_X86_64
4992 case OT_QUAD:
4993 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
4994 s->cc_op = CC_OP_MULQ;
4995 break;
4996#endif
4997 }
4998 break;
4999 case 5: /* imul */
5000 switch(ot) {
5001 case OT_BYTE:
5002 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
5003 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5004 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
5005 /* XXX: use 32 bit mul which could be faster */
5006 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5007 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5008 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5009 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
5010 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5011 s->cc_op = CC_OP_MULB;
5012 break;
5013 case OT_WORD:
5014 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5015 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5016 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5017 /* XXX: use 32 bit mul which could be faster */
5018 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5019 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5020 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5021 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5022 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5023 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5024 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5025 s->cc_op = CC_OP_MULW;
5026 break;
5027 default:
5028 case OT_LONG:
5029#ifdef TARGET_X86_64
5030 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5031 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5032 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5033 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5034 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5035 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5036 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5037 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5038 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5039 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5040#else
5041 {
5042 TCGv t0, t1;
5043 t0 = tcg_temp_new(TCG_TYPE_I64);
5044 t1 = tcg_temp_new(TCG_TYPE_I64);
5045 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5046 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5047 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5048 tcg_gen_mul_i64(t0, t0, t1);
5049 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5050 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5051 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5052 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5053 tcg_gen_shri_i64(t0, t0, 32);
5054 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5055 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5056 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5057 }
5058#endif
5059 s->cc_op = CC_OP_MULL;
5060 break;
5061#ifdef TARGET_X86_64
5062 case OT_QUAD:
5063 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
5064 s->cc_op = CC_OP_MULQ;
5065 break;
5066#endif
5067 }
5068 break;
5069 case 6: /* div */
5070 switch(ot) {
5071 case OT_BYTE:
5072 gen_jmp_im(pc_start - s->cs_base);
5073 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
5074 break;
5075 case OT_WORD:
5076 gen_jmp_im(pc_start - s->cs_base);
5077 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
5078 break;
5079 default:
5080 case OT_LONG:
5081 gen_jmp_im(pc_start - s->cs_base);
5082 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
5083 break;
5084#ifdef TARGET_X86_64
5085 case OT_QUAD:
5086 gen_jmp_im(pc_start - s->cs_base);
5087 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
5088 break;
5089#endif
5090 }
5091 break;
5092 case 7: /* idiv */
5093 switch(ot) {
5094 case OT_BYTE:
5095 gen_jmp_im(pc_start - s->cs_base);
5096 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
5097 break;
5098 case OT_WORD:
5099 gen_jmp_im(pc_start - s->cs_base);
5100 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
5101 break;
5102 default:
5103 case OT_LONG:
5104 gen_jmp_im(pc_start - s->cs_base);
5105 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
5106 break;
5107#ifdef TARGET_X86_64
5108 case OT_QUAD:
5109 gen_jmp_im(pc_start - s->cs_base);
5110 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
5111 break;
5112#endif
5113 }
5114 break;
5115 default:
5116 goto illegal_op;
5117 }
5118 break;
5119
5120 case 0xfe: /* GRP4 */
5121 case 0xff: /* GRP5 */
5122 if ((b & 1) == 0)
5123 ot = OT_BYTE;
5124 else
5125 ot = dflag + OT_WORD;
5126
5127 modrm = ldub_code(s->pc++);
5128 mod = (modrm >> 6) & 3;
5129 rm = (modrm & 7) | REX_B(s);
5130 op = (modrm >> 3) & 7;
5131 if (op >= 2 && b == 0xfe) {
5132 goto illegal_op;
5133 }
5134 if (CODE64(s)) {
5135 if (op == 2 || op == 4) {
5136 /* operand size for jumps is 64 bit */
5137 ot = OT_QUAD;
5138 } else if (op == 3 || op == 5) {
5139 /* for call calls, the operand is 16 or 32 bit, even
5140 in long mode */
5141 ot = dflag ? OT_LONG : OT_WORD;
5142 } else if (op == 6) {
5143 /* default push size is 64 bit */
5144 ot = dflag ? OT_QUAD : OT_WORD;
5145 }
5146 }
5147 if (mod != 3) {
5148 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5149 if (op >= 2 && op != 3 && op != 5)
5150 gen_op_ld_T0_A0(ot + s->mem_index);
5151 } else {
5152 gen_op_mov_TN_reg(ot, 0, rm);
5153 }
5154
5155 switch(op) {
5156 case 0: /* inc Ev */
5157 if (mod != 3)
5158 opreg = OR_TMP0;
5159 else
5160 opreg = rm;
5161 gen_inc(s, ot, opreg, 1);
5162 break;
5163 case 1: /* dec Ev */
5164 if (mod != 3)
5165 opreg = OR_TMP0;
5166 else
5167 opreg = rm;
5168 gen_inc(s, ot, opreg, -1);
5169 break;
5170 case 2: /* call Ev */
5171 /* XXX: optimize if memory (no 'and' is necessary) */
5172#ifdef VBOX_WITH_CALL_RECORD
5173 if (s->record_call)
5174 gen_op_record_call();
5175#endif
5176 if (s->dflag == 0)
5177 gen_op_andl_T0_ffff();
5178 next_eip = s->pc - s->cs_base;
5179 gen_movtl_T1_im(next_eip);
5180 gen_push_T1(s);
5181 gen_op_jmp_T0();
5182 gen_eob(s);
5183 break;
5184 case 3: /* lcall Ev */
5185 gen_op_ld_T1_A0(ot + s->mem_index);
5186 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5187 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5188 do_lcall:
5189 if (s->pe && !s->vm86) {
5190 if (s->cc_op != CC_OP_DYNAMIC)
5191 gen_op_set_cc_op(s->cc_op);
5192 gen_jmp_im(pc_start - s->cs_base);
5193 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5194 tcg_gen_helper_0_4(helper_lcall_protected,
5195 cpu_tmp2_i32, cpu_T[1],
5196 tcg_const_i32(dflag),
5197 tcg_const_i32(s->pc - pc_start));
5198 } else {
5199 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5200 tcg_gen_helper_0_4(helper_lcall_real,
5201 cpu_tmp2_i32, cpu_T[1],
5202 tcg_const_i32(dflag),
5203 tcg_const_i32(s->pc - s->cs_base));
5204 }
5205 gen_eob(s);
5206 break;
5207 case 4: /* jmp Ev */
5208 if (s->dflag == 0)
5209 gen_op_andl_T0_ffff();
5210 gen_op_jmp_T0();
5211 gen_eob(s);
5212 break;
5213 case 5: /* ljmp Ev */
5214 gen_op_ld_T1_A0(ot + s->mem_index);
5215 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5216 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5217 do_ljmp:
5218 if (s->pe && !s->vm86) {
5219 if (s->cc_op != CC_OP_DYNAMIC)
5220 gen_op_set_cc_op(s->cc_op);
5221 gen_jmp_im(pc_start - s->cs_base);
5222 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5223 tcg_gen_helper_0_3(helper_ljmp_protected,
5224 cpu_tmp2_i32,
5225 cpu_T[1],
5226 tcg_const_i32(s->pc - pc_start));
5227 } else {
5228 gen_op_movl_seg_T0_vm(R_CS);
5229 gen_op_movl_T0_T1();
5230 gen_op_jmp_T0();
5231 }
5232 gen_eob(s);
5233 break;
5234 case 6: /* push Ev */
5235 gen_push_T0(s);
5236 break;
5237 default:
5238 goto illegal_op;
5239 }
5240 break;
5241
5242 case 0x84: /* test Ev, Gv */
5243 case 0x85:
5244 if ((b & 1) == 0)
5245 ot = OT_BYTE;
5246 else
5247 ot = dflag + OT_WORD;
5248
5249 modrm = ldub_code(s->pc++);
5250 mod = (modrm >> 6) & 3;
5251 rm = (modrm & 7) | REX_B(s);
5252 reg = ((modrm >> 3) & 7) | rex_r;
5253
5254 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5255 gen_op_mov_TN_reg(ot, 1, reg);
5256 gen_op_testl_T0_T1_cc();
5257 s->cc_op = CC_OP_LOGICB + ot;
5258 break;
5259
5260 case 0xa8: /* test eAX, Iv */
5261 case 0xa9:
5262 if ((b & 1) == 0)
5263 ot = OT_BYTE;
5264 else
5265 ot = dflag + OT_WORD;
5266 val = insn_get(s, ot);
5267
5268 gen_op_mov_TN_reg(ot, 0, OR_EAX);
5269 gen_op_movl_T1_im(val);
5270 gen_op_testl_T0_T1_cc();
5271 s->cc_op = CC_OP_LOGICB + ot;
5272 break;
5273
5274 case 0x98: /* CWDE/CBW */
5275#ifdef TARGET_X86_64
5276 if (dflag == 2) {
5277 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5278 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5279 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
5280 } else
5281#endif
5282 if (dflag == 1) {
5283 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5284 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5285 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5286 } else {
5287 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5288 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5289 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5290 }
5291 break;
5292 case 0x99: /* CDQ/CWD */
5293#ifdef TARGET_X86_64
5294 if (dflag == 2) {
5295 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5296 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5297 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
5298 } else
5299#endif
5300 if (dflag == 1) {
5301 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5302 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5303 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5304 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5305 } else {
5306 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5307 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5308 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5309 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5310 }
5311 break;
5312 case 0x1af: /* imul Gv, Ev */
5313 case 0x69: /* imul Gv, Ev, I */
5314 case 0x6b:
5315 ot = dflag + OT_WORD;
5316 modrm = ldub_code(s->pc++);
5317 reg = ((modrm >> 3) & 7) | rex_r;
5318 if (b == 0x69)
5319 s->rip_offset = insn_const_size(ot);
5320 else if (b == 0x6b)
5321 s->rip_offset = 1;
5322 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5323 if (b == 0x69) {
5324 val = insn_get(s, ot);
5325 gen_op_movl_T1_im(val);
5326 } else if (b == 0x6b) {
5327 val = (int8_t)insn_get(s, OT_BYTE);
5328 gen_op_movl_T1_im(val);
5329 } else {
5330 gen_op_mov_TN_reg(ot, 1, reg);
5331 }
5332
5333#ifdef TARGET_X86_64
5334 if (ot == OT_QUAD) {
5335 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
5336 } else
5337#endif
5338 if (ot == OT_LONG) {
5339#ifdef TARGET_X86_64
5340 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5341 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5342 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5343 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5344 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5345 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5346#else
5347 {
5348 TCGv t0, t1;
5349 t0 = tcg_temp_new(TCG_TYPE_I64);
5350 t1 = tcg_temp_new(TCG_TYPE_I64);
5351 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5352 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5353 tcg_gen_mul_i64(t0, t0, t1);
5354 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5355 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5356 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5357 tcg_gen_shri_i64(t0, t0, 32);
5358 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
5359 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
5360 }
5361#endif
5362 } else {
5363 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5364 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5365 /* XXX: use 32 bit mul which could be faster */
5366 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5367 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5368 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5369 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5370 }
5371 gen_op_mov_reg_T0(ot, reg);
5372 s->cc_op = CC_OP_MULB + ot;
5373 break;
5374 case 0x1c0:
5375 case 0x1c1: /* xadd Ev, Gv */
5376 if ((b & 1) == 0)
5377 ot = OT_BYTE;
5378 else
5379 ot = dflag + OT_WORD;
5380 modrm = ldub_code(s->pc++);
5381 reg = ((modrm >> 3) & 7) | rex_r;
5382 mod = (modrm >> 6) & 3;
5383 if (mod == 3) {
5384 rm = (modrm & 7) | REX_B(s);
5385 gen_op_mov_TN_reg(ot, 0, reg);
5386 gen_op_mov_TN_reg(ot, 1, rm);
5387 gen_op_addl_T0_T1();
5388 gen_op_mov_reg_T1(ot, reg);
5389 gen_op_mov_reg_T0(ot, rm);
5390 } else {
5391 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5392 gen_op_mov_TN_reg(ot, 0, reg);
5393 gen_op_ld_T1_A0(ot + s->mem_index);
5394 gen_op_addl_T0_T1();
5395 gen_op_st_T0_A0(ot + s->mem_index);
5396 gen_op_mov_reg_T1(ot, reg);
5397 }
5398 gen_op_update2_cc();
5399 s->cc_op = CC_OP_ADDB + ot;
5400 break;
5401 case 0x1b0:
5402 case 0x1b1: /* cmpxchg Ev, Gv */
5403 {
5404 int label1, label2;
5405 TCGv t0, t1, t2, a0;
5406
5407 if ((b & 1) == 0)
5408 ot = OT_BYTE;
5409 else
5410 ot = dflag + OT_WORD;
5411 modrm = ldub_code(s->pc++);
5412 reg = ((modrm >> 3) & 7) | rex_r;
5413 mod = (modrm >> 6) & 3;
5414 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5415 t1 = tcg_temp_local_new(TCG_TYPE_TL);
5416 t2 = tcg_temp_local_new(TCG_TYPE_TL);
5417 a0 = tcg_temp_local_new(TCG_TYPE_TL);
5418 gen_op_mov_v_reg(ot, t1, reg);
5419 if (mod == 3) {
5420 rm = (modrm & 7) | REX_B(s);
5421 gen_op_mov_v_reg(ot, t0, rm);
5422 } else {
5423 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5424 tcg_gen_mov_tl(a0, cpu_A0);
5425 gen_op_ld_v(ot + s->mem_index, t0, a0);
5426 rm = 0; /* avoid warning */
5427 }
5428 label1 = gen_new_label();
5429 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
5430 tcg_gen_sub_tl(t2, t2, t0);
5431 gen_extu(ot, t2);
5432 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
5433 if (mod == 3) {
5434 label2 = gen_new_label();
5435 gen_op_mov_reg_v(ot, R_EAX, t0);
5436 tcg_gen_br(label2);
5437 gen_set_label(label1);
5438 gen_op_mov_reg_v(ot, rm, t1);
5439 gen_set_label(label2);
5440 } else {
5441 tcg_gen_mov_tl(t1, t0);
5442 gen_op_mov_reg_v(ot, R_EAX, t0);
5443 gen_set_label(label1);
5444 /* always store */
5445 gen_op_st_v(ot + s->mem_index, t1, a0);
5446 }
5447 tcg_gen_mov_tl(cpu_cc_src, t0);
5448 tcg_gen_mov_tl(cpu_cc_dst, t2);
5449 s->cc_op = CC_OP_SUBB + ot;
5450 tcg_temp_free(t0);
5451 tcg_temp_free(t1);
5452 tcg_temp_free(t2);
5453 tcg_temp_free(a0);
5454 }
5455 break;
5456 case 0x1c7: /* cmpxchg8b */
5457 modrm = ldub_code(s->pc++);
5458 mod = (modrm >> 6) & 3;
5459 if ((mod == 3) || ((modrm & 0x38) != 0x8))
5460 goto illegal_op;
5461#ifdef TARGET_X86_64
5462 if (dflag == 2) {
5463 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5464 goto illegal_op;
5465 gen_jmp_im(pc_start - s->cs_base);
5466 if (s->cc_op != CC_OP_DYNAMIC)
5467 gen_op_set_cc_op(s->cc_op);
5468 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5469 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
5470 } else
5471#endif
5472 {
5473 if (!(s->cpuid_features & CPUID_CX8))
5474 goto illegal_op;
5475 gen_jmp_im(pc_start - s->cs_base);
5476 if (s->cc_op != CC_OP_DYNAMIC)
5477 gen_op_set_cc_op(s->cc_op);
5478 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5479 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
5480 }
5481 s->cc_op = CC_OP_EFLAGS;
5482 break;
5483
5484 /**************************/
5485 /* push/pop */
5486 case 0x50 ... 0x57: /* push */
5487 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
5488 gen_push_T0(s);
5489 break;
5490 case 0x58 ... 0x5f: /* pop */
5491 if (CODE64(s)) {
5492 ot = dflag ? OT_QUAD : OT_WORD;
5493 } else {
5494 ot = dflag + OT_WORD;
5495 }
5496 gen_pop_T0(s);
5497 /* NOTE: order is important for pop %sp */
5498 gen_pop_update(s);
5499 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
5500 break;
5501 case 0x60: /* pusha */
5502 if (CODE64(s))
5503 goto illegal_op;
5504 gen_pusha(s);
5505 break;
5506 case 0x61: /* popa */
5507 if (CODE64(s))
5508 goto illegal_op;
5509 gen_popa(s);
5510 break;
5511 case 0x68: /* push Iv */
5512 case 0x6a:
5513 if (CODE64(s)) {
5514 ot = dflag ? OT_QUAD : OT_WORD;
5515 } else {
5516 ot = dflag + OT_WORD;
5517 }
5518 if (b == 0x68)
5519 val = insn_get(s, ot);
5520 else
5521 val = (int8_t)insn_get(s, OT_BYTE);
5522 gen_op_movl_T0_im(val);
5523 gen_push_T0(s);
5524 break;
5525 case 0x8f: /* pop Ev */
5526 if (CODE64(s)) {
5527 ot = dflag ? OT_QUAD : OT_WORD;
5528 } else {
5529 ot = dflag + OT_WORD;
5530 }
5531 modrm = ldub_code(s->pc++);
5532 mod = (modrm >> 6) & 3;
5533 gen_pop_T0(s);
5534 if (mod == 3) {
5535 /* NOTE: order is important for pop %sp */
5536 gen_pop_update(s);
5537 rm = (modrm & 7) | REX_B(s);
5538 gen_op_mov_reg_T0(ot, rm);
5539 } else {
5540 /* NOTE: order is important too for MMU exceptions */
5541 s->popl_esp_hack = 1 << ot;
5542 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5543 s->popl_esp_hack = 0;
5544 gen_pop_update(s);
5545 }
5546 break;
5547 case 0xc8: /* enter */
5548 {
5549 int level;
5550 val = lduw_code(s->pc);
5551 s->pc += 2;
5552 level = ldub_code(s->pc++);
5553 gen_enter(s, val, level);
5554 }
5555 break;
5556 case 0xc9: /* leave */
5557 /* XXX: exception not precise (ESP is updated before potential exception) */
5558 if (CODE64(s)) {
5559 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5560 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5561 } else if (s->ss32) {
5562 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5563 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5564 } else {
5565 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5566 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5567 }
5568 gen_pop_T0(s);
5569 if (CODE64(s)) {
5570 ot = dflag ? OT_QUAD : OT_WORD;
5571 } else {
5572 ot = dflag + OT_WORD;
5573 }
5574 gen_op_mov_reg_T0(ot, R_EBP);
5575 gen_pop_update(s);
5576 break;
5577 case 0x06: /* push es */
5578 case 0x0e: /* push cs */
5579 case 0x16: /* push ss */
5580 case 0x1e: /* push ds */
5581 if (CODE64(s))
5582 goto illegal_op;
5583 gen_op_movl_T0_seg(b >> 3);
5584 gen_push_T0(s);
5585 break;
5586 case 0x1a0: /* push fs */
5587 case 0x1a8: /* push gs */
5588 gen_op_movl_T0_seg((b >> 3) & 7);
5589 gen_push_T0(s);
5590 break;
5591 case 0x07: /* pop es */
5592 case 0x17: /* pop ss */
5593 case 0x1f: /* pop ds */
5594 if (CODE64(s))
5595 goto illegal_op;
5596 reg = b >> 3;
5597 gen_pop_T0(s);
5598 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5599 gen_pop_update(s);
5600 if (reg == R_SS) {
5601 /* if reg == SS, inhibit interrupts/trace. */
5602 /* If several instructions disable interrupts, only the
5603 _first_ does it */
5604 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5605 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5606 s->tf = 0;
5607 }
5608 if (s->is_jmp) {
5609 gen_jmp_im(s->pc - s->cs_base);
5610 gen_eob(s);
5611 }
5612 break;
5613 case 0x1a1: /* pop fs */
5614 case 0x1a9: /* pop gs */
5615 gen_pop_T0(s);
5616 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5617 gen_pop_update(s);
5618 if (s->is_jmp) {
5619 gen_jmp_im(s->pc - s->cs_base);
5620 gen_eob(s);
5621 }
5622 break;
5623
5624 /**************************/
5625 /* mov */
5626 case 0x88:
5627 case 0x89: /* mov Gv, Ev */
5628 if ((b & 1) == 0)
5629 ot = OT_BYTE;
5630 else
5631 ot = dflag + OT_WORD;
5632 modrm = ldub_code(s->pc++);
5633 reg = ((modrm >> 3) & 7) | rex_r;
5634
5635 /* generate a generic store */
5636 gen_ldst_modrm(s, modrm, ot, reg, 1);
5637 break;
5638 case 0xc6:
5639 case 0xc7: /* mov Ev, Iv */
5640 if ((b & 1) == 0)
5641 ot = OT_BYTE;
5642 else
5643 ot = dflag + OT_WORD;
5644 modrm = ldub_code(s->pc++);
5645 mod = (modrm >> 6) & 3;
5646 if (mod != 3) {
5647 s->rip_offset = insn_const_size(ot);
5648 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5649 }
5650 val = insn_get(s, ot);
5651 gen_op_movl_T0_im(val);
5652 if (mod != 3)
5653 gen_op_st_T0_A0(ot + s->mem_index);
5654 else
5655 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5656 break;
5657 case 0x8a:
5658 case 0x8b: /* mov Ev, Gv */
5659#ifdef VBOX /* dtrace hot fix */
5660 if (prefixes & PREFIX_LOCK)
5661 goto illegal_op;
5662#endif
5663 if ((b & 1) == 0)
5664 ot = OT_BYTE;
5665 else
5666 ot = OT_WORD + dflag;
5667 modrm = ldub_code(s->pc++);
5668 reg = ((modrm >> 3) & 7) | rex_r;
5669
5670 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5671 gen_op_mov_reg_T0(ot, reg);
5672 break;
5673 case 0x8e: /* mov seg, Gv */
5674 modrm = ldub_code(s->pc++);
5675 reg = (modrm >> 3) & 7;
5676 if (reg >= 6 || reg == R_CS)
5677 goto illegal_op;
5678 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5679 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5680 if (reg == R_SS) {
5681 /* if reg == SS, inhibit interrupts/trace */
5682 /* If several instructions disable interrupts, only the
5683 _first_ does it */
5684 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5685 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5686 s->tf = 0;
5687 }
5688 if (s->is_jmp) {
5689 gen_jmp_im(s->pc - s->cs_base);
5690 gen_eob(s);
5691 }
5692 break;
5693 case 0x8c: /* mov Gv, seg */
5694 modrm = ldub_code(s->pc++);
5695 reg = (modrm >> 3) & 7;
5696 mod = (modrm >> 6) & 3;
5697 if (reg >= 6)
5698 goto illegal_op;
5699 gen_op_movl_T0_seg(reg);
5700 if (mod == 3)
5701 ot = OT_WORD + dflag;
5702 else
5703 ot = OT_WORD;
5704 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5705 break;
5706
5707 case 0x1b6: /* movzbS Gv, Eb */
5708 case 0x1b7: /* movzwS Gv, Eb */
5709 case 0x1be: /* movsbS Gv, Eb */
5710 case 0x1bf: /* movswS Gv, Eb */
5711 {
5712 int d_ot;
5713 /* d_ot is the size of destination */
5714 d_ot = dflag + OT_WORD;
5715 /* ot is the size of source */
5716 ot = (b & 1) + OT_BYTE;
5717 modrm = ldub_code(s->pc++);
5718 reg = ((modrm >> 3) & 7) | rex_r;
5719 mod = (modrm >> 6) & 3;
5720 rm = (modrm & 7) | REX_B(s);
5721
5722 if (mod == 3) {
5723 gen_op_mov_TN_reg(ot, 0, rm);
5724 switch(ot | (b & 8)) {
5725 case OT_BYTE:
5726 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5727 break;
5728 case OT_BYTE | 8:
5729 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5730 break;
5731 case OT_WORD:
5732 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5733 break;
5734 default:
5735 case OT_WORD | 8:
5736 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5737 break;
5738 }
5739 gen_op_mov_reg_T0(d_ot, reg);
5740 } else {
5741 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5742 if (b & 8) {
5743 gen_op_lds_T0_A0(ot + s->mem_index);
5744 } else {
5745 gen_op_ldu_T0_A0(ot + s->mem_index);
5746 }
5747 gen_op_mov_reg_T0(d_ot, reg);
5748 }
5749 }
5750 break;
5751
5752 case 0x8d: /* lea */
5753 ot = dflag + OT_WORD;
5754 modrm = ldub_code(s->pc++);
5755 mod = (modrm >> 6) & 3;
5756 if (mod == 3)
5757 goto illegal_op;
5758 reg = ((modrm >> 3) & 7) | rex_r;
5759 /* we must ensure that no segment is added */
5760 s->override = -1;
5761 val = s->addseg;
5762 s->addseg = 0;
5763 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5764 s->addseg = val;
5765 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5766 break;
5767
5768 case 0xa0: /* mov EAX, Ov */
5769 case 0xa1:
5770 case 0xa2: /* mov Ov, EAX */
5771 case 0xa3:
5772 {
5773 target_ulong offset_addr;
5774
5775 if ((b & 1) == 0)
5776 ot = OT_BYTE;
5777 else
5778 ot = dflag + OT_WORD;
5779#ifdef TARGET_X86_64
5780 if (s->aflag == 2) {
5781 offset_addr = ldq_code(s->pc);
5782 s->pc += 8;
5783 gen_op_movq_A0_im(offset_addr);
5784 } else
5785#endif
5786 {
5787 if (s->aflag) {
5788 offset_addr = insn_get(s, OT_LONG);
5789 } else {
5790 offset_addr = insn_get(s, OT_WORD);
5791 }
5792 gen_op_movl_A0_im(offset_addr);
5793 }
5794 gen_add_A0_ds_seg(s);
5795 if ((b & 2) == 0) {
5796 gen_op_ld_T0_A0(ot + s->mem_index);
5797 gen_op_mov_reg_T0(ot, R_EAX);
5798 } else {
5799 gen_op_mov_TN_reg(ot, 0, R_EAX);
5800 gen_op_st_T0_A0(ot + s->mem_index);
5801 }
5802 }
5803 break;
5804 case 0xd7: /* xlat */
5805#ifdef TARGET_X86_64
5806 if (s->aflag == 2) {
5807 gen_op_movq_A0_reg(R_EBX);
5808 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5809 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5810 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5811 } else
5812#endif
5813 {
5814 gen_op_movl_A0_reg(R_EBX);
5815 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5816 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5817 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5818 if (s->aflag == 0)
5819 gen_op_andl_A0_ffff();
5820 else
5821 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5822 }
5823 gen_add_A0_ds_seg(s);
5824 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5825 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5826 break;
5827 case 0xb0 ... 0xb7: /* mov R, Ib */
5828 val = insn_get(s, OT_BYTE);
5829 gen_op_movl_T0_im(val);
5830 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5831 break;
5832 case 0xb8 ... 0xbf: /* mov R, Iv */
5833#ifdef TARGET_X86_64
5834 if (dflag == 2) {
5835 uint64_t tmp;
5836 /* 64 bit case */
5837 tmp = ldq_code(s->pc);
5838 s->pc += 8;
5839 reg = (b & 7) | REX_B(s);
5840 gen_movtl_T0_im(tmp);
5841 gen_op_mov_reg_T0(OT_QUAD, reg);
5842 } else
5843#endif
5844 {
5845 ot = dflag ? OT_LONG : OT_WORD;
5846 val = insn_get(s, ot);
5847 reg = (b & 7) | REX_B(s);
5848 gen_op_movl_T0_im(val);
5849 gen_op_mov_reg_T0(ot, reg);
5850 }
5851 break;
5852
5853 case 0x91 ... 0x97: /* xchg R, EAX */
5854 ot = dflag + OT_WORD;
5855 reg = (b & 7) | REX_B(s);
5856 rm = R_EAX;
5857 goto do_xchg_reg;
5858 case 0x86:
5859 case 0x87: /* xchg Ev, Gv */
5860 if ((b & 1) == 0)
5861 ot = OT_BYTE;
5862 else
5863 ot = dflag + OT_WORD;
5864 modrm = ldub_code(s->pc++);
5865 reg = ((modrm >> 3) & 7) | rex_r;
5866 mod = (modrm >> 6) & 3;
5867 if (mod == 3) {
5868 rm = (modrm & 7) | REX_B(s);
5869 do_xchg_reg:
5870 gen_op_mov_TN_reg(ot, 0, reg);
5871 gen_op_mov_TN_reg(ot, 1, rm);
5872 gen_op_mov_reg_T0(ot, rm);
5873 gen_op_mov_reg_T1(ot, reg);
5874 } else {
5875 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5876 gen_op_mov_TN_reg(ot, 0, reg);
5877 /* for xchg, lock is implicit */
5878 if (!(prefixes & PREFIX_LOCK))
5879 tcg_gen_helper_0_0(helper_lock);
5880 gen_op_ld_T1_A0(ot + s->mem_index);
5881 gen_op_st_T0_A0(ot + s->mem_index);
5882 if (!(prefixes & PREFIX_LOCK))
5883 tcg_gen_helper_0_0(helper_unlock);
5884 gen_op_mov_reg_T1(ot, reg);
5885 }
5886 break;
5887 case 0xc4: /* les Gv */
5888 if (CODE64(s))
5889 goto illegal_op;
5890 op = R_ES;
5891 goto do_lxx;
5892 case 0xc5: /* lds Gv */
5893 if (CODE64(s))
5894 goto illegal_op;
5895 op = R_DS;
5896 goto do_lxx;
5897 case 0x1b2: /* lss Gv */
5898 op = R_SS;
5899 goto do_lxx;
5900 case 0x1b4: /* lfs Gv */
5901 op = R_FS;
5902 goto do_lxx;
5903 case 0x1b5: /* lgs Gv */
5904 op = R_GS;
5905 do_lxx:
5906 ot = dflag ? OT_LONG : OT_WORD;
5907 modrm = ldub_code(s->pc++);
5908 reg = ((modrm >> 3) & 7) | rex_r;
5909 mod = (modrm >> 6) & 3;
5910 if (mod == 3)
5911 goto illegal_op;
5912 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5913 gen_op_ld_T1_A0(ot + s->mem_index);
5914 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5915 /* load the segment first to handle exceptions properly */
5916 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5917 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5918 /* then put the data */
5919 gen_op_mov_reg_T1(ot, reg);
5920 if (s->is_jmp) {
5921 gen_jmp_im(s->pc - s->cs_base);
5922 gen_eob(s);
5923 }
5924 break;
5925
5926 /************************/
5927 /* shifts */
5928 case 0xc0:
5929 case 0xc1:
5930 /* shift Ev,Ib */
5931 shift = 2;
5932 grp2:
5933 {
5934 if ((b & 1) == 0)
5935 ot = OT_BYTE;
5936 else
5937 ot = dflag + OT_WORD;
5938
5939 modrm = ldub_code(s->pc++);
5940 mod = (modrm >> 6) & 3;
5941 op = (modrm >> 3) & 7;
5942
5943 if (mod != 3) {
5944 if (shift == 2) {
5945 s->rip_offset = 1;
5946 }
5947 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5948 opreg = OR_TMP0;
5949 } else {
5950 opreg = (modrm & 7) | REX_B(s);
5951 }
5952
5953 /* simpler op */
5954 if (shift == 0) {
5955 gen_shift(s, op, ot, opreg, OR_ECX);
5956 } else {
5957 if (shift == 2) {
5958 shift = ldub_code(s->pc++);
5959 }
5960 gen_shifti(s, op, ot, opreg, shift);
5961 }
5962 }
5963 break;
5964 case 0xd0:
5965 case 0xd1:
5966 /* shift Ev,1 */
5967 shift = 1;
5968 goto grp2;
5969 case 0xd2:
5970 case 0xd3:
5971 /* shift Ev,cl */
5972 shift = 0;
5973 goto grp2;
5974
5975 case 0x1a4: /* shld imm */
5976 op = 0;
5977 shift = 1;
5978 goto do_shiftd;
5979 case 0x1a5: /* shld cl */
5980 op = 0;
5981 shift = 0;
5982 goto do_shiftd;
5983 case 0x1ac: /* shrd imm */
5984 op = 1;
5985 shift = 1;
5986 goto do_shiftd;
5987 case 0x1ad: /* shrd cl */
5988 op = 1;
5989 shift = 0;
5990 do_shiftd:
5991 ot = dflag + OT_WORD;
5992 modrm = ldub_code(s->pc++);
5993 mod = (modrm >> 6) & 3;
5994 rm = (modrm & 7) | REX_B(s);
5995 reg = ((modrm >> 3) & 7) | rex_r;
5996 if (mod != 3) {
5997 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5998 opreg = OR_TMP0;
5999 } else {
6000 opreg = rm;
6001 }
6002 gen_op_mov_TN_reg(ot, 1, reg);
6003
6004 if (shift) {
6005 val = ldub_code(s->pc++);
6006 tcg_gen_movi_tl(cpu_T3, val);
6007 } else {
6008 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
6009 }
6010 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
6011 break;
6012
6013 /************************/
6014 /* floats */
6015 case 0xd8 ... 0xdf:
6016 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
6017 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
6018 /* XXX: what to do if illegal op ? */
6019 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6020 break;
6021 }
6022 modrm = ldub_code(s->pc++);
6023 mod = (modrm >> 6) & 3;
6024 rm = modrm & 7;
6025 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
6026 if (mod != 3) {
6027 /* memory op */
6028 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6029 switch(op) {
6030 case 0x00 ... 0x07: /* fxxxs */
6031 case 0x10 ... 0x17: /* fixxxl */
6032 case 0x20 ... 0x27: /* fxxxl */
6033 case 0x30 ... 0x37: /* fixxx */
6034 {
6035 int op1;
6036 op1 = op & 7;
6037
6038 switch(op >> 4) {
6039 case 0:
6040 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6041 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6042 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
6043 break;
6044 case 1:
6045 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6046 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6047 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6048 break;
6049 case 2:
6050 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6051 (s->mem_index >> 2) - 1);
6052 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
6053 break;
6054 case 3:
6055 default:
6056 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6057 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6058 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6059 break;
6060 }
6061
6062 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6063 if (op1 == 3) {
6064 /* fcomp needs pop */
6065 tcg_gen_helper_0_0(helper_fpop);
6066 }
6067 }
6068 break;
6069 case 0x08: /* flds */
6070 case 0x0a: /* fsts */
6071 case 0x0b: /* fstps */
6072 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6073 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6074 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
6075 switch(op & 7) {
6076 case 0:
6077 switch(op >> 4) {
6078 case 0:
6079 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6080 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6081 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
6082 break;
6083 case 1:
6084 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6085 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6086 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6087 break;
6088 case 2:
6089 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6090 (s->mem_index >> 2) - 1);
6091 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
6092 break;
6093 case 3:
6094 default:
6095 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6096 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6097 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6098 break;
6099 }
6100 break;
6101 case 1:
6102 /* XXX: the corresponding CPUID bit must be tested ! */
6103 switch(op >> 4) {
6104 case 1:
6105 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
6106 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6107 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6108 break;
6109 case 2:
6110 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
6111 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6112 (s->mem_index >> 2) - 1);
6113 break;
6114 case 3:
6115 default:
6116 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
6117 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6118 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6119 break;
6120 }
6121 tcg_gen_helper_0_0(helper_fpop);
6122 break;
6123 default:
6124 switch(op >> 4) {
6125 case 0:
6126 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
6127 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6128 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6129 break;
6130 case 1:
6131 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
6132 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6133 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6134 break;
6135 case 2:
6136 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
6137 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6138 (s->mem_index >> 2) - 1);
6139 break;
6140 case 3:
6141 default:
6142 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
6143 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6144 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6145 break;
6146 }
6147 if ((op & 7) == 3)
6148 tcg_gen_helper_0_0(helper_fpop);
6149 break;
6150 }
6151 break;
6152 case 0x0c: /* fldenv mem */
6153 if (s->cc_op != CC_OP_DYNAMIC)
6154 gen_op_set_cc_op(s->cc_op);
6155 gen_jmp_im(pc_start - s->cs_base);
6156 tcg_gen_helper_0_2(helper_fldenv,
6157 cpu_A0, tcg_const_i32(s->dflag));
6158 break;
6159 case 0x0d: /* fldcw mem */
6160 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
6161 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6162 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
6163 break;
6164 case 0x0e: /* fnstenv mem */
6165 if (s->cc_op != CC_OP_DYNAMIC)
6166 gen_op_set_cc_op(s->cc_op);
6167 gen_jmp_im(pc_start - s->cs_base);
6168 tcg_gen_helper_0_2(helper_fstenv,
6169 cpu_A0, tcg_const_i32(s->dflag));
6170 break;
6171 case 0x0f: /* fnstcw mem */
6172 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
6173 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6174 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6175 break;
6176 case 0x1d: /* fldt mem */
6177 if (s->cc_op != CC_OP_DYNAMIC)
6178 gen_op_set_cc_op(s->cc_op);
6179 gen_jmp_im(pc_start - s->cs_base);
6180 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
6181 break;
6182 case 0x1f: /* fstpt mem */
6183 if (s->cc_op != CC_OP_DYNAMIC)
6184 gen_op_set_cc_op(s->cc_op);
6185 gen_jmp_im(pc_start - s->cs_base);
6186 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
6187 tcg_gen_helper_0_0(helper_fpop);
6188 break;
6189 case 0x2c: /* frstor mem */
6190 if (s->cc_op != CC_OP_DYNAMIC)
6191 gen_op_set_cc_op(s->cc_op);
6192 gen_jmp_im(pc_start - s->cs_base);
6193 tcg_gen_helper_0_2(helper_frstor,
6194 cpu_A0, tcg_const_i32(s->dflag));
6195 break;
6196 case 0x2e: /* fnsave mem */
6197 if (s->cc_op != CC_OP_DYNAMIC)
6198 gen_op_set_cc_op(s->cc_op);
6199 gen_jmp_im(pc_start - s->cs_base);
6200 tcg_gen_helper_0_2(helper_fsave,
6201 cpu_A0, tcg_const_i32(s->dflag));
6202 break;
6203 case 0x2f: /* fnstsw mem */
6204 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6205 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6206 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6207 break;
6208 case 0x3c: /* fbld */
6209 if (s->cc_op != CC_OP_DYNAMIC)
6210 gen_op_set_cc_op(s->cc_op);
6211 gen_jmp_im(pc_start - s->cs_base);
6212 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
6213 break;
6214 case 0x3e: /* fbstp */
6215 if (s->cc_op != CC_OP_DYNAMIC)
6216 gen_op_set_cc_op(s->cc_op);
6217 gen_jmp_im(pc_start - s->cs_base);
6218 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
6219 tcg_gen_helper_0_0(helper_fpop);
6220 break;
6221 case 0x3d: /* fildll */
6222 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6223 (s->mem_index >> 2) - 1);
6224 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
6225 break;
6226 case 0x3f: /* fistpll */
6227 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
6228 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6229 (s->mem_index >> 2) - 1);
6230 tcg_gen_helper_0_0(helper_fpop);
6231 break;
6232 default:
6233 goto illegal_op;
6234 }
6235 } else {
6236 /* register float ops */
6237 opreg = rm;
6238
6239 switch(op) {
6240 case 0x08: /* fld sti */
6241 tcg_gen_helper_0_0(helper_fpush);
6242 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
6243 break;
6244 case 0x09: /* fxchg sti */
6245 case 0x29: /* fxchg4 sti, undocumented op */
6246 case 0x39: /* fxchg7 sti, undocumented op */
6247 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
6248 break;
6249 case 0x0a: /* grp d9/2 */
6250 switch(rm) {
6251 case 0: /* fnop */
6252 /* check exceptions (FreeBSD FPU probe) */
6253 if (s->cc_op != CC_OP_DYNAMIC)
6254 gen_op_set_cc_op(s->cc_op);
6255 gen_jmp_im(pc_start - s->cs_base);
6256 tcg_gen_helper_0_0(helper_fwait);
6257 break;
6258 default:
6259 goto illegal_op;
6260 }
6261 break;
6262 case 0x0c: /* grp d9/4 */
6263 switch(rm) {
6264 case 0: /* fchs */
6265 tcg_gen_helper_0_0(helper_fchs_ST0);
6266 break;
6267 case 1: /* fabs */
6268 tcg_gen_helper_0_0(helper_fabs_ST0);
6269 break;
6270 case 4: /* ftst */
6271 tcg_gen_helper_0_0(helper_fldz_FT0);
6272 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6273 break;
6274 case 5: /* fxam */
6275 tcg_gen_helper_0_0(helper_fxam_ST0);
6276 break;
6277 default:
6278 goto illegal_op;
6279 }
6280 break;
6281 case 0x0d: /* grp d9/5 */
6282 {
6283 switch(rm) {
6284 case 0:
6285 tcg_gen_helper_0_0(helper_fpush);
6286 tcg_gen_helper_0_0(helper_fld1_ST0);
6287 break;
6288 case 1:
6289 tcg_gen_helper_0_0(helper_fpush);
6290 tcg_gen_helper_0_0(helper_fldl2t_ST0);
6291 break;
6292 case 2:
6293 tcg_gen_helper_0_0(helper_fpush);
6294 tcg_gen_helper_0_0(helper_fldl2e_ST0);
6295 break;
6296 case 3:
6297 tcg_gen_helper_0_0(helper_fpush);
6298 tcg_gen_helper_0_0(helper_fldpi_ST0);
6299 break;
6300 case 4:
6301 tcg_gen_helper_0_0(helper_fpush);
6302 tcg_gen_helper_0_0(helper_fldlg2_ST0);
6303 break;
6304 case 5:
6305 tcg_gen_helper_0_0(helper_fpush);
6306 tcg_gen_helper_0_0(helper_fldln2_ST0);
6307 break;
6308 case 6:
6309 tcg_gen_helper_0_0(helper_fpush);
6310 tcg_gen_helper_0_0(helper_fldz_ST0);
6311 break;
6312 default:
6313 goto illegal_op;
6314 }
6315 }
6316 break;
6317 case 0x0e: /* grp d9/6 */
6318 switch(rm) {
6319 case 0: /* f2xm1 */
6320 tcg_gen_helper_0_0(helper_f2xm1);
6321 break;
6322 case 1: /* fyl2x */
6323 tcg_gen_helper_0_0(helper_fyl2x);
6324 break;
6325 case 2: /* fptan */
6326 tcg_gen_helper_0_0(helper_fptan);
6327 break;
6328 case 3: /* fpatan */
6329 tcg_gen_helper_0_0(helper_fpatan);
6330 break;
6331 case 4: /* fxtract */
6332 tcg_gen_helper_0_0(helper_fxtract);
6333 break;
6334 case 5: /* fprem1 */
6335 tcg_gen_helper_0_0(helper_fprem1);
6336 break;
6337 case 6: /* fdecstp */
6338 tcg_gen_helper_0_0(helper_fdecstp);
6339 break;
6340 default:
6341 case 7: /* fincstp */
6342 tcg_gen_helper_0_0(helper_fincstp);
6343 break;
6344 }
6345 break;
6346 case 0x0f: /* grp d9/7 */
6347 switch(rm) {
6348 case 0: /* fprem */
6349 tcg_gen_helper_0_0(helper_fprem);
6350 break;
6351 case 1: /* fyl2xp1 */
6352 tcg_gen_helper_0_0(helper_fyl2xp1);
6353 break;
6354 case 2: /* fsqrt */
6355 tcg_gen_helper_0_0(helper_fsqrt);
6356 break;
6357 case 3: /* fsincos */
6358 tcg_gen_helper_0_0(helper_fsincos);
6359 break;
6360 case 5: /* fscale */
6361 tcg_gen_helper_0_0(helper_fscale);
6362 break;
6363 case 4: /* frndint */
6364 tcg_gen_helper_0_0(helper_frndint);
6365 break;
6366 case 6: /* fsin */
6367 tcg_gen_helper_0_0(helper_fsin);
6368 break;
6369 default:
6370 case 7: /* fcos */
6371 tcg_gen_helper_0_0(helper_fcos);
6372 break;
6373 }
6374 break;
6375 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6376 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6377 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6378 {
6379 int op1;
6380
6381 op1 = op & 7;
6382 if (op >= 0x20) {
6383 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
6384 if (op >= 0x30)
6385 tcg_gen_helper_0_0(helper_fpop);
6386 } else {
6387 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6388 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6389 }
6390 }
6391 break;
6392 case 0x02: /* fcom */
6393 case 0x22: /* fcom2, undocumented op */
6394 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6395 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6396 break;
6397 case 0x03: /* fcomp */
6398 case 0x23: /* fcomp3, undocumented op */
6399 case 0x32: /* fcomp5, undocumented op */
6400 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6401 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6402 tcg_gen_helper_0_0(helper_fpop);
6403 break;
6404 case 0x15: /* da/5 */
6405 switch(rm) {
6406 case 1: /* fucompp */
6407 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6408 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6409 tcg_gen_helper_0_0(helper_fpop);
6410 tcg_gen_helper_0_0(helper_fpop);
6411 break;
6412 default:
6413 goto illegal_op;
6414 }
6415 break;
6416 case 0x1c:
6417 switch(rm) {
6418 case 0: /* feni (287 only, just do nop here) */
6419 break;
6420 case 1: /* fdisi (287 only, just do nop here) */
6421 break;
6422 case 2: /* fclex */
6423 tcg_gen_helper_0_0(helper_fclex);
6424 break;
6425 case 3: /* fninit */
6426 tcg_gen_helper_0_0(helper_fninit);
6427 break;
6428 case 4: /* fsetpm (287 only, just do nop here) */
6429 break;
6430 default:
6431 goto illegal_op;
6432 }
6433 break;
6434 case 0x1d: /* fucomi */
6435 if (s->cc_op != CC_OP_DYNAMIC)
6436 gen_op_set_cc_op(s->cc_op);
6437 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6438 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6439 s->cc_op = CC_OP_EFLAGS;
6440 break;
6441 case 0x1e: /* fcomi */
6442 if (s->cc_op != CC_OP_DYNAMIC)
6443 gen_op_set_cc_op(s->cc_op);
6444 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6445 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6446 s->cc_op = CC_OP_EFLAGS;
6447 break;
6448 case 0x28: /* ffree sti */
6449 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6450 break;
6451 case 0x2a: /* fst sti */
6452 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6453 break;
6454 case 0x2b: /* fstp sti */
6455 case 0x0b: /* fstp1 sti, undocumented op */
6456 case 0x3a: /* fstp8 sti, undocumented op */
6457 case 0x3b: /* fstp9 sti, undocumented op */
6458 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6459 tcg_gen_helper_0_0(helper_fpop);
6460 break;
6461 case 0x2c: /* fucom st(i) */
6462 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6463 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6464 break;
6465 case 0x2d: /* fucomp st(i) */
6466 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6467 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6468 tcg_gen_helper_0_0(helper_fpop);
6469 break;
6470 case 0x33: /* de/3 */
6471 switch(rm) {
6472 case 1: /* fcompp */
6473 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6474 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6475 tcg_gen_helper_0_0(helper_fpop);
6476 tcg_gen_helper_0_0(helper_fpop);
6477 break;
6478 default:
6479 goto illegal_op;
6480 }
6481 break;
6482 case 0x38: /* ffreep sti, undocumented op */
6483 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6484 tcg_gen_helper_0_0(helper_fpop);
6485 break;
6486 case 0x3c: /* df/4 */
6487 switch(rm) {
6488 case 0:
6489 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6490 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6491 gen_op_mov_reg_T0(OT_WORD, R_EAX);
6492 break;
6493 default:
6494 goto illegal_op;
6495 }
6496 break;
6497 case 0x3d: /* fucomip */
6498 if (s->cc_op != CC_OP_DYNAMIC)
6499 gen_op_set_cc_op(s->cc_op);
6500 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6501 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6502 tcg_gen_helper_0_0(helper_fpop);
6503 s->cc_op = CC_OP_EFLAGS;
6504 break;
6505 case 0x3e: /* fcomip */
6506 if (s->cc_op != CC_OP_DYNAMIC)
6507 gen_op_set_cc_op(s->cc_op);
6508 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6509 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6510 tcg_gen_helper_0_0(helper_fpop);
6511 s->cc_op = CC_OP_EFLAGS;
6512 break;
6513 case 0x10 ... 0x13: /* fcmovxx */
6514 case 0x18 ... 0x1b:
6515 {
6516 int op1, l1;
6517 static const uint8_t fcmov_cc[8] = {
6518 (JCC_B << 1),
6519 (JCC_Z << 1),
6520 (JCC_BE << 1),
6521 (JCC_P << 1),
6522 };
6523 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
6524 l1 = gen_new_label();
6525 gen_jcc1(s, s->cc_op, op1, l1);
6526 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
6527 gen_set_label(l1);
6528 }
6529 break;
6530 default:
6531 goto illegal_op;
6532 }
6533 }
6534 break;
6535 /************************/
6536 /* string ops */
6537
6538 case 0xa4: /* movsS */
6539 case 0xa5:
6540 if ((b & 1) == 0)
6541 ot = OT_BYTE;
6542 else
6543 ot = dflag + OT_WORD;
6544
6545 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6546 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6547 } else {
6548 gen_movs(s, ot);
6549 }
6550 break;
6551
6552 case 0xaa: /* stosS */
6553 case 0xab:
6554 if ((b & 1) == 0)
6555 ot = OT_BYTE;
6556 else
6557 ot = dflag + OT_WORD;
6558
6559 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6560 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6561 } else {
6562 gen_stos(s, ot);
6563 }
6564 break;
6565 case 0xac: /* lodsS */
6566 case 0xad:
6567 if ((b & 1) == 0)
6568 ot = OT_BYTE;
6569 else
6570 ot = dflag + OT_WORD;
6571 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6572 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6573 } else {
6574 gen_lods(s, ot);
6575 }
6576 break;
6577 case 0xae: /* scasS */
6578 case 0xaf:
6579 if ((b & 1) == 0)
6580 ot = OT_BYTE;
6581 else
6582 ot = dflag + OT_WORD;
6583 if (prefixes & PREFIX_REPNZ) {
6584 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6585 } else if (prefixes & PREFIX_REPZ) {
6586 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6587 } else {
6588 gen_scas(s, ot);
6589 s->cc_op = CC_OP_SUBB + ot;
6590 }
6591 break;
6592
6593 case 0xa6: /* cmpsS */
6594 case 0xa7:
6595 if ((b & 1) == 0)
6596 ot = OT_BYTE;
6597 else
6598 ot = dflag + OT_WORD;
6599 if (prefixes & PREFIX_REPNZ) {
6600 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6601 } else if (prefixes & PREFIX_REPZ) {
6602 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6603 } else {
6604 gen_cmps(s, ot);
6605 s->cc_op = CC_OP_SUBB + ot;
6606 }
6607 break;
6608 case 0x6c: /* insS */
6609 case 0x6d:
6610 if ((b & 1) == 0)
6611 ot = OT_BYTE;
6612 else
6613 ot = dflag ? OT_LONG : OT_WORD;
6614 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6615 gen_op_andl_T0_ffff();
6616 gen_check_io(s, ot, pc_start - s->cs_base,
6617 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6618 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6619 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6620 } else {
6621 gen_ins(s, ot);
6622 if (use_icount) {
6623 gen_jmp(s, s->pc - s->cs_base);
6624 }
6625 }
6626 break;
6627 case 0x6e: /* outsS */
6628 case 0x6f:
6629 if ((b & 1) == 0)
6630 ot = OT_BYTE;
6631 else
6632 ot = dflag ? OT_LONG : OT_WORD;
6633 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6634 gen_op_andl_T0_ffff();
6635 gen_check_io(s, ot, pc_start - s->cs_base,
6636 svm_is_rep(prefixes) | 4);
6637 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6638 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6639 } else {
6640 gen_outs(s, ot);
6641 if (use_icount) {
6642 gen_jmp(s, s->pc - s->cs_base);
6643 }
6644 }
6645 break;
6646
6647 /************************/
6648 /* port I/O */
6649
6650 case 0xe4:
6651 case 0xe5:
6652 if ((b & 1) == 0)
6653 ot = OT_BYTE;
6654 else
6655 ot = dflag ? OT_LONG : OT_WORD;
6656 val = ldub_code(s->pc++);
6657 gen_op_movl_T0_im(val);
6658 gen_check_io(s, ot, pc_start - s->cs_base,
6659 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6660 if (use_icount)
6661 gen_io_start();
6662 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6663 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6664 gen_op_mov_reg_T1(ot, R_EAX);
6665 if (use_icount) {
6666 gen_io_end();
6667 gen_jmp(s, s->pc - s->cs_base);
6668 }
6669 break;
6670 case 0xe6:
6671 case 0xe7:
6672 if ((b & 1) == 0)
6673 ot = OT_BYTE;
6674 else
6675 ot = dflag ? OT_LONG : OT_WORD;
6676 val = ldub_code(s->pc++);
6677 gen_op_movl_T0_im(val);
6678 gen_check_io(s, ot, pc_start - s->cs_base,
6679 svm_is_rep(prefixes));
6680#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
6681 if (val == 0x80)
6682 break;
6683#endif /* VBOX */
6684 gen_op_mov_TN_reg(ot, 1, R_EAX);
6685
6686 if (use_icount)
6687 gen_io_start();
6688 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6689 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6690 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6691 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6692 if (use_icount) {
6693 gen_io_end();
6694 gen_jmp(s, s->pc - s->cs_base);
6695 }
6696 break;
6697 case 0xec:
6698 case 0xed:
6699 if ((b & 1) == 0)
6700 ot = OT_BYTE;
6701 else
6702 ot = dflag ? OT_LONG : OT_WORD;
6703 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6704 gen_op_andl_T0_ffff();
6705 gen_check_io(s, ot, pc_start - s->cs_base,
6706 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6707 if (use_icount)
6708 gen_io_start();
6709 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6710 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6711 gen_op_mov_reg_T1(ot, R_EAX);
6712 if (use_icount) {
6713 gen_io_end();
6714 gen_jmp(s, s->pc - s->cs_base);
6715 }
6716 break;
6717 case 0xee:
6718 case 0xef:
6719 if ((b & 1) == 0)
6720 ot = OT_BYTE;
6721 else
6722 ot = dflag ? OT_LONG : OT_WORD;
6723 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6724 gen_op_andl_T0_ffff();
6725 gen_check_io(s, ot, pc_start - s->cs_base,
6726 svm_is_rep(prefixes));
6727 gen_op_mov_TN_reg(ot, 1, R_EAX);
6728
6729 if (use_icount)
6730 gen_io_start();
6731 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6732 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6733 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6734 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6735 if (use_icount) {
6736 gen_io_end();
6737 gen_jmp(s, s->pc - s->cs_base);
6738 }
6739 break;
6740
6741 /************************/
6742 /* control */
6743 case 0xc2: /* ret im */
6744 val = ldsw_code(s->pc);
6745 s->pc += 2;
6746 gen_pop_T0(s);
6747 if (CODE64(s) && s->dflag)
6748 s->dflag = 2;
6749 gen_stack_update(s, val + (2 << s->dflag));
6750 if (s->dflag == 0)
6751 gen_op_andl_T0_ffff();
6752 gen_op_jmp_T0();
6753 gen_eob(s);
6754 break;
6755 case 0xc3: /* ret */
6756 gen_pop_T0(s);
6757 gen_pop_update(s);
6758 if (s->dflag == 0)
6759 gen_op_andl_T0_ffff();
6760 gen_op_jmp_T0();
6761 gen_eob(s);
6762 break;
6763 case 0xca: /* lret im */
6764 val = ldsw_code(s->pc);
6765 s->pc += 2;
6766 do_lret:
6767 if (s->pe && !s->vm86) {
6768 if (s->cc_op != CC_OP_DYNAMIC)
6769 gen_op_set_cc_op(s->cc_op);
6770 gen_jmp_im(pc_start - s->cs_base);
6771 tcg_gen_helper_0_2(helper_lret_protected,
6772 tcg_const_i32(s->dflag),
6773 tcg_const_i32(val));
6774 } else {
6775 gen_stack_A0(s);
6776 /* pop offset */
6777 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6778 if (s->dflag == 0)
6779 gen_op_andl_T0_ffff();
6780 /* NOTE: keeping EIP updated is not a problem in case of
6781 exception */
6782 gen_op_jmp_T0();
6783 /* pop selector */
6784 gen_op_addl_A0_im(2 << s->dflag);
6785 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6786 gen_op_movl_seg_T0_vm(R_CS);
6787 /* add stack offset */
6788 gen_stack_update(s, val + (4 << s->dflag));
6789 }
6790 gen_eob(s);
6791 break;
6792 case 0xcb: /* lret */
6793 val = 0;
6794 goto do_lret;
6795 case 0xcf: /* iret */
6796 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6797 if (!s->pe) {
6798 /* real mode */
6799 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6800 s->cc_op = CC_OP_EFLAGS;
6801 } else if (s->vm86) {
6802#ifdef VBOX
6803 if (s->iopl != 3 && (!s->vme || s->dflag)) {
6804#else
6805 if (s->iopl != 3) {
6806#endif
6807 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6808 } else {
6809 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6810 s->cc_op = CC_OP_EFLAGS;
6811 }
6812 } else {
6813 if (s->cc_op != CC_OP_DYNAMIC)
6814 gen_op_set_cc_op(s->cc_op);
6815 gen_jmp_im(pc_start - s->cs_base);
6816 tcg_gen_helper_0_2(helper_iret_protected,
6817 tcg_const_i32(s->dflag),
6818 tcg_const_i32(s->pc - s->cs_base));
6819 s->cc_op = CC_OP_EFLAGS;
6820 }
6821 gen_eob(s);
6822 break;
6823 case 0xe8: /* call im */
6824 {
6825 if (dflag)
6826 tval = (int32_t)insn_get(s, OT_LONG);
6827 else
6828 tval = (int16_t)insn_get(s, OT_WORD);
6829 next_eip = s->pc - s->cs_base;
6830 tval += next_eip;
6831 if (s->dflag == 0)
6832 tval &= 0xffff;
6833 gen_movtl_T0_im(next_eip);
6834 gen_push_T0(s);
6835 gen_jmp(s, tval);
6836 }
6837 break;
6838 case 0x9a: /* lcall im */
6839 {
6840 unsigned int selector, offset;
6841
6842 if (CODE64(s))
6843 goto illegal_op;
6844 ot = dflag ? OT_LONG : OT_WORD;
6845 offset = insn_get(s, ot);
6846 selector = insn_get(s, OT_WORD);
6847
6848 gen_op_movl_T0_im(selector);
6849 gen_op_movl_T1_imu(offset);
6850 }
6851 goto do_lcall;
6852 case 0xe9: /* jmp im */
6853 if (dflag)
6854 tval = (int32_t)insn_get(s, OT_LONG);
6855 else
6856 tval = (int16_t)insn_get(s, OT_WORD);
6857 tval += s->pc - s->cs_base;
6858 if (s->dflag == 0)
6859 tval &= 0xffff;
6860 gen_jmp(s, tval);
6861 break;
6862 case 0xea: /* ljmp im */
6863 {
6864 unsigned int selector, offset;
6865
6866 if (CODE64(s))
6867 goto illegal_op;
6868 ot = dflag ? OT_LONG : OT_WORD;
6869 offset = insn_get(s, ot);
6870 selector = insn_get(s, OT_WORD);
6871
6872 gen_op_movl_T0_im(selector);
6873 gen_op_movl_T1_imu(offset);
6874 }
6875 goto do_ljmp;
6876 case 0xeb: /* jmp Jb */
6877 tval = (int8_t)insn_get(s, OT_BYTE);
6878 tval += s->pc - s->cs_base;
6879 if (s->dflag == 0)
6880 tval &= 0xffff;
6881 gen_jmp(s, tval);
6882 break;
6883 case 0x70 ... 0x7f: /* jcc Jb */
6884 tval = (int8_t)insn_get(s, OT_BYTE);
6885 goto do_jcc;
6886 case 0x180 ... 0x18f: /* jcc Jv */
6887 if (dflag) {
6888 tval = (int32_t)insn_get(s, OT_LONG);
6889 } else {
6890 tval = (int16_t)insn_get(s, OT_WORD);
6891 }
6892 do_jcc:
6893 next_eip = s->pc - s->cs_base;
6894 tval += next_eip;
6895 if (s->dflag == 0)
6896 tval &= 0xffff;
6897 gen_jcc(s, b, tval, next_eip);
6898 break;
6899
6900 case 0x190 ... 0x19f: /* setcc Gv */
6901 modrm = ldub_code(s->pc++);
6902 gen_setcc(s, b);
6903 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6904 break;
6905 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6906 {
6907 int l1;
6908 TCGv t0;
6909
6910 ot = dflag + OT_WORD;
6911 modrm = ldub_code(s->pc++);
6912 reg = ((modrm >> 3) & 7) | rex_r;
6913 mod = (modrm >> 6) & 3;
6914 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6915 if (mod != 3) {
6916 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6917 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6918 } else {
6919 rm = (modrm & 7) | REX_B(s);
6920 gen_op_mov_v_reg(ot, t0, rm);
6921 }
6922#ifdef TARGET_X86_64
6923 if (ot == OT_LONG) {
6924 /* XXX: specific Intel behaviour ? */
6925 l1 = gen_new_label();
6926 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6927 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6928 gen_set_label(l1);
6929 tcg_gen_movi_tl(cpu_tmp0, 0);
6930 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6931 } else
6932#endif
6933 {
6934 l1 = gen_new_label();
6935 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6936 gen_op_mov_reg_v(ot, reg, t0);
6937 gen_set_label(l1);
6938 }
6939 tcg_temp_free(t0);
6940 }
6941 break;
6942
6943 /************************/
6944 /* flags */
6945 case 0x9c: /* pushf */
6946 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6947#ifdef VBOX
6948 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6949#else
6950 if (s->vm86 && s->iopl != 3) {
6951#endif
6952 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6953 } else {
6954 if (s->cc_op != CC_OP_DYNAMIC)
6955 gen_op_set_cc_op(s->cc_op);
6956#ifdef VBOX
6957 if (s->vm86 && s->vme && s->iopl != 3)
6958 tcg_gen_helper_1_0(helper_read_eflags_vme, cpu_T[0]);
6959 else
6960#endif
6961 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
6962 gen_push_T0(s);
6963 }
6964 break;
6965 case 0x9d: /* popf */
6966 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
6967#ifdef VBOX
6968 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6969#else
6970 if (s->vm86 && s->iopl != 3) {
6971#endif
6972 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6973 } else {
6974 gen_pop_T0(s);
6975 if (s->cpl == 0) {
6976 if (s->dflag) {
6977 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6978 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
6979 } else {
6980 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6981 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
6982 }
6983 } else {
6984 if (s->cpl <= s->iopl) {
6985 if (s->dflag) {
6986 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6987 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
6988 } else {
6989 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6990 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
6991 }
6992 } else {
6993 if (s->dflag) {
6994 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6995 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
6996 } else {
6997#ifdef VBOX
6998 if (s->vm86 && s->vme)
6999 tcg_gen_helper_0_1(helper_write_eflags_vme, cpu_T[0]);
7000 else
7001#endif
7002 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7003 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
7004 }
7005 }
7006 }
7007 gen_pop_update(s);
7008 s->cc_op = CC_OP_EFLAGS;
7009 /* abort translation because TF flag may change */
7010 gen_jmp_im(s->pc - s->cs_base);
7011 gen_eob(s);
7012 }
7013 break;
7014 case 0x9e: /* sahf */
7015 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7016 goto illegal_op;
7017 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
7018 if (s->cc_op != CC_OP_DYNAMIC)
7019 gen_op_set_cc_op(s->cc_op);
7020 gen_compute_eflags(cpu_cc_src);
7021 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
7022 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
7023 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
7024 s->cc_op = CC_OP_EFLAGS;
7025 break;
7026 case 0x9f: /* lahf */
7027 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7028 goto illegal_op;
7029 if (s->cc_op != CC_OP_DYNAMIC)
7030 gen_op_set_cc_op(s->cc_op);
7031 gen_compute_eflags(cpu_T[0]);
7032 /* Note: gen_compute_eflags() only gives the condition codes */
7033 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
7034 gen_op_mov_reg_T0(OT_BYTE, R_AH);
7035 break;
7036 case 0xf5: /* cmc */
7037 if (s->cc_op != CC_OP_DYNAMIC)
7038 gen_op_set_cc_op(s->cc_op);
7039 gen_compute_eflags(cpu_cc_src);
7040 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7041 s->cc_op = CC_OP_EFLAGS;
7042 break;
7043 case 0xf8: /* clc */
7044 if (s->cc_op != CC_OP_DYNAMIC)
7045 gen_op_set_cc_op(s->cc_op);
7046 gen_compute_eflags(cpu_cc_src);
7047 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
7048 s->cc_op = CC_OP_EFLAGS;
7049 break;
7050 case 0xf9: /* stc */
7051 if (s->cc_op != CC_OP_DYNAMIC)
7052 gen_op_set_cc_op(s->cc_op);
7053 gen_compute_eflags(cpu_cc_src);
7054 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7055 s->cc_op = CC_OP_EFLAGS;
7056 break;
7057 case 0xfc: /* cld */
7058 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
7059 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7060 break;
7061 case 0xfd: /* std */
7062 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
7063 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7064 break;
7065
7066 /************************/
7067 /* bit operations */
7068 case 0x1ba: /* bt/bts/btr/btc Gv, im */
7069 ot = dflag + OT_WORD;
7070 modrm = ldub_code(s->pc++);
7071 op = (modrm >> 3) & 7;
7072 mod = (modrm >> 6) & 3;
7073 rm = (modrm & 7) | REX_B(s);
7074 if (mod != 3) {
7075 s->rip_offset = 1;
7076 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7077 gen_op_ld_T0_A0(ot + s->mem_index);
7078 } else {
7079 gen_op_mov_TN_reg(ot, 0, rm);
7080 }
7081 /* load shift */
7082 val = ldub_code(s->pc++);
7083 gen_op_movl_T1_im(val);
7084 if (op < 4)
7085 goto illegal_op;
7086 op -= 4;
7087 goto bt_op;
7088 case 0x1a3: /* bt Gv, Ev */
7089 op = 0;
7090 goto do_btx;
7091 case 0x1ab: /* bts */
7092 op = 1;
7093 goto do_btx;
7094 case 0x1b3: /* btr */
7095 op = 2;
7096 goto do_btx;
7097 case 0x1bb: /* btc */
7098 op = 3;
7099 do_btx:
7100 ot = dflag + OT_WORD;
7101 modrm = ldub_code(s->pc++);
7102 reg = ((modrm >> 3) & 7) | rex_r;
7103 mod = (modrm >> 6) & 3;
7104 rm = (modrm & 7) | REX_B(s);
7105 gen_op_mov_TN_reg(OT_LONG, 1, reg);
7106 if (mod != 3) {
7107 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7108 /* specific case: we need to add a displacement */
7109 gen_exts(ot, cpu_T[1]);
7110 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7111 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7112 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
7113 gen_op_ld_T0_A0(ot + s->mem_index);
7114 } else {
7115 gen_op_mov_TN_reg(ot, 0, rm);
7116 }
7117 bt_op:
7118 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7119 switch(op) {
7120 case 0:
7121 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7122 tcg_gen_movi_tl(cpu_cc_dst, 0);
7123 break;
7124 case 1:
7125 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7126 tcg_gen_movi_tl(cpu_tmp0, 1);
7127 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7128 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7129 break;
7130 case 2:
7131 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7132 tcg_gen_movi_tl(cpu_tmp0, 1);
7133 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7134 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7135 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7136 break;
7137 default:
7138 case 3:
7139 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7140 tcg_gen_movi_tl(cpu_tmp0, 1);
7141 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7142 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7143 break;
7144 }
7145 s->cc_op = CC_OP_SARB + ot;
7146 if (op != 0) {
7147 if (mod != 3)
7148 gen_op_st_T0_A0(ot + s->mem_index);
7149 else
7150 gen_op_mov_reg_T0(ot, rm);
7151 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7152 tcg_gen_movi_tl(cpu_cc_dst, 0);
7153 }
7154 break;
7155 case 0x1bc: /* bsf */
7156 case 0x1bd: /* bsr */
7157 {
7158 int label1;
7159 TCGv t0;
7160
7161 ot = dflag + OT_WORD;
7162 modrm = ldub_code(s->pc++);
7163 reg = ((modrm >> 3) & 7) | rex_r;
7164 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7165 gen_extu(ot, cpu_T[0]);
7166 label1 = gen_new_label();
7167 tcg_gen_movi_tl(cpu_cc_dst, 0);
7168 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7169 tcg_gen_mov_tl(t0, cpu_T[0]);
7170 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
7171 if (b & 1) {
7172 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
7173 } else {
7174 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
7175 }
7176 gen_op_mov_reg_T0(ot, reg);
7177 tcg_gen_movi_tl(cpu_cc_dst, 1);
7178 gen_set_label(label1);
7179 tcg_gen_discard_tl(cpu_cc_src);
7180 s->cc_op = CC_OP_LOGICB + ot;
7181 tcg_temp_free(t0);
7182 }
7183 break;
7184 /************************/
7185 /* bcd */
7186 case 0x27: /* daa */
7187 if (CODE64(s))
7188 goto illegal_op;
7189 if (s->cc_op != CC_OP_DYNAMIC)
7190 gen_op_set_cc_op(s->cc_op);
7191 tcg_gen_helper_0_0(helper_daa);
7192 s->cc_op = CC_OP_EFLAGS;
7193 break;
7194 case 0x2f: /* das */
7195 if (CODE64(s))
7196 goto illegal_op;
7197 if (s->cc_op != CC_OP_DYNAMIC)
7198 gen_op_set_cc_op(s->cc_op);
7199 tcg_gen_helper_0_0(helper_das);
7200 s->cc_op = CC_OP_EFLAGS;
7201 break;
7202 case 0x37: /* aaa */
7203 if (CODE64(s))
7204 goto illegal_op;
7205 if (s->cc_op != CC_OP_DYNAMIC)
7206 gen_op_set_cc_op(s->cc_op);
7207 tcg_gen_helper_0_0(helper_aaa);
7208 s->cc_op = CC_OP_EFLAGS;
7209 break;
7210 case 0x3f: /* aas */
7211 if (CODE64(s))
7212 goto illegal_op;
7213 if (s->cc_op != CC_OP_DYNAMIC)
7214 gen_op_set_cc_op(s->cc_op);
7215 tcg_gen_helper_0_0(helper_aas);
7216 s->cc_op = CC_OP_EFLAGS;
7217 break;
7218 case 0xd4: /* aam */
7219 if (CODE64(s))
7220 goto illegal_op;
7221 val = ldub_code(s->pc++);
7222 if (val == 0) {
7223 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7224 } else {
7225 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
7226 s->cc_op = CC_OP_LOGICB;
7227 }
7228 break;
7229 case 0xd5: /* aad */
7230 if (CODE64(s))
7231 goto illegal_op;
7232 val = ldub_code(s->pc++);
7233 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
7234 s->cc_op = CC_OP_LOGICB;
7235 break;
7236 /************************/
7237 /* misc */
7238 case 0x90: /* nop */
7239 /* XXX: xchg + rex handling */
7240 /* XXX: correct lock test for all insn */
7241 if (prefixes & PREFIX_LOCK)
7242 goto illegal_op;
7243 if (prefixes & PREFIX_REPZ) {
7244 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7245 }
7246 break;
7247 case 0x9b: /* fwait */
7248 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7249 (HF_MP_MASK | HF_TS_MASK)) {
7250 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7251 } else {
7252 if (s->cc_op != CC_OP_DYNAMIC)
7253 gen_op_set_cc_op(s->cc_op);
7254 gen_jmp_im(pc_start - s->cs_base);
7255 tcg_gen_helper_0_0(helper_fwait);
7256 }
7257 break;
7258 case 0xcc: /* int3 */
7259#ifdef VBOX
7260 if (s->vm86 && s->iopl != 3 && !s->vme) {
7261 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7262 } else
7263#endif
7264 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7265 break;
7266 case 0xcd: /* int N */
7267 val = ldub_code(s->pc++);
7268#ifdef VBOX
7269 if (s->vm86 && s->iopl != 3 && !s->vme) {
7270#else
7271 if (s->vm86 && s->iopl != 3) {
7272#endif
7273 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7274 } else {
7275 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7276 }
7277 break;
7278 case 0xce: /* into */
7279 if (CODE64(s))
7280 goto illegal_op;
7281 if (s->cc_op != CC_OP_DYNAMIC)
7282 gen_op_set_cc_op(s->cc_op);
7283 gen_jmp_im(pc_start - s->cs_base);
7284 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
7285 break;
7286 case 0xf1: /* icebp (undocumented, exits to external debugger) */
7287 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
7288#if 1
7289 gen_debug(s, pc_start - s->cs_base);
7290#else
7291 /* start debug */
7292 tb_flush(cpu_single_env);
7293 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
7294#endif
7295 break;
7296 case 0xfa: /* cli */
7297 if (!s->vm86) {
7298 if (s->cpl <= s->iopl) {
7299 tcg_gen_helper_0_0(helper_cli);
7300 } else {
7301 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7302 }
7303 } else {
7304 if (s->iopl == 3) {
7305 tcg_gen_helper_0_0(helper_cli);
7306#ifdef VBOX
7307 } else if (s->iopl != 3 && s->vme) {
7308 tcg_gen_helper_0_0(helper_cli_vme);
7309#endif
7310 } else {
7311 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7312 }
7313 }
7314 break;
7315 case 0xfb: /* sti */
7316 if (!s->vm86) {
7317 if (s->cpl <= s->iopl) {
7318 gen_sti:
7319 tcg_gen_helper_0_0(helper_sti);
7320 /* interruptions are enabled only the first insn after sti */
7321 /* If several instructions disable interrupts, only the
7322 _first_ does it */
7323 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
7324 tcg_gen_helper_0_0(helper_set_inhibit_irq);
7325 /* give a chance to handle pending irqs */
7326 gen_jmp_im(s->pc - s->cs_base);
7327 gen_eob(s);
7328 } else {
7329 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7330 }
7331 } else {
7332 if (s->iopl == 3) {
7333 goto gen_sti;
7334#ifdef VBOX
7335 } else if (s->iopl != 3 && s->vme) {
7336 tcg_gen_helper_0_0(helper_sti_vme);
7337 /* give a chance to handle pending irqs */
7338 gen_jmp_im(s->pc - s->cs_base);
7339 gen_eob(s);
7340#endif
7341 } else {
7342 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7343 }
7344 }
7345 break;
7346 case 0x62: /* bound */
7347 if (CODE64(s))
7348 goto illegal_op;
7349 ot = dflag ? OT_LONG : OT_WORD;
7350 modrm = ldub_code(s->pc++);
7351 reg = (modrm >> 3) & 7;
7352 mod = (modrm >> 6) & 3;
7353 if (mod == 3)
7354 goto illegal_op;
7355 gen_op_mov_TN_reg(ot, 0, reg);
7356 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7357 gen_jmp_im(pc_start - s->cs_base);
7358 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7359 if (ot == OT_WORD)
7360 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
7361 else
7362 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
7363 break;
7364 case 0x1c8 ... 0x1cf: /* bswap reg */
7365 reg = (b & 7) | REX_B(s);
7366#ifdef TARGET_X86_64
7367 if (dflag == 2) {
7368 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
7369 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
7370 gen_op_mov_reg_T0(OT_QUAD, reg);
7371 } else
7372 {
7373 TCGv tmp0;
7374 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7375
7376 tmp0 = tcg_temp_new(TCG_TYPE_I32);
7377 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
7378 tcg_gen_bswap_i32(tmp0, tmp0);
7379 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
7380 gen_op_mov_reg_T0(OT_LONG, reg);
7381 }
7382#else
7383 {
7384 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7385 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
7386 gen_op_mov_reg_T0(OT_LONG, reg);
7387 }
7388#endif
7389 break;
7390 case 0xd6: /* salc */
7391 if (CODE64(s))
7392 goto illegal_op;
7393 if (s->cc_op != CC_OP_DYNAMIC)
7394 gen_op_set_cc_op(s->cc_op);
7395 gen_compute_eflags_c(cpu_T[0]);
7396 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7397 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
7398 break;
7399 case 0xe0: /* loopnz */
7400 case 0xe1: /* loopz */
7401 case 0xe2: /* loop */
7402 case 0xe3: /* jecxz */
7403 {
7404 int l1, l2, l3;
7405
7406 tval = (int8_t)insn_get(s, OT_BYTE);
7407 next_eip = s->pc - s->cs_base;
7408 tval += next_eip;
7409 if (s->dflag == 0)
7410 tval &= 0xffff;
7411
7412 l1 = gen_new_label();
7413 l2 = gen_new_label();
7414 l3 = gen_new_label();
7415 b &= 3;
7416 switch(b) {
7417 case 0: /* loopnz */
7418 case 1: /* loopz */
7419 if (s->cc_op != CC_OP_DYNAMIC)
7420 gen_op_set_cc_op(s->cc_op);
7421 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7422 gen_op_jz_ecx(s->aflag, l3);
7423 gen_compute_eflags(cpu_tmp0);
7424 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
7425 if (b == 0) {
7426 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
7427 } else {
7428 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
7429 }
7430 break;
7431 case 2: /* loop */
7432 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7433 gen_op_jnz_ecx(s->aflag, l1);
7434 break;
7435 default:
7436 case 3: /* jcxz */
7437 gen_op_jz_ecx(s->aflag, l1);
7438 break;
7439 }
7440
7441 gen_set_label(l3);
7442 gen_jmp_im(next_eip);
7443 tcg_gen_br(l2);
7444
7445 gen_set_label(l1);
7446 gen_jmp_im(tval);
7447 gen_set_label(l2);
7448 gen_eob(s);
7449 }
7450 break;
7451 case 0x130: /* wrmsr */
7452 case 0x132: /* rdmsr */
7453 if (s->cpl != 0) {
7454 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7455 } else {
7456 if (s->cc_op != CC_OP_DYNAMIC)
7457 gen_op_set_cc_op(s->cc_op);
7458 gen_jmp_im(pc_start - s->cs_base);
7459 if (b & 2) {
7460 tcg_gen_helper_0_0(helper_rdmsr);
7461 } else {
7462 tcg_gen_helper_0_0(helper_wrmsr);
7463 }
7464 }
7465 break;
7466 case 0x131: /* rdtsc */
7467 if (s->cc_op != CC_OP_DYNAMIC)
7468 gen_op_set_cc_op(s->cc_op);
7469 gen_jmp_im(pc_start - s->cs_base);
7470 if (use_icount)
7471 gen_io_start();
7472 tcg_gen_helper_0_0(helper_rdtsc);
7473 if (use_icount) {
7474 gen_io_end();
7475 gen_jmp(s, s->pc - s->cs_base);
7476 }
7477 break;
7478 case 0x133: /* rdpmc */
7479 if (s->cc_op != CC_OP_DYNAMIC)
7480 gen_op_set_cc_op(s->cc_op);
7481 gen_jmp_im(pc_start - s->cs_base);
7482 tcg_gen_helper_0_0(helper_rdpmc);
7483 break;
7484 case 0x134: /* sysenter */
7485#ifndef VBOX
7486 /* For Intel SYSENTER is valid on 64-bit */
7487 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7488#else
7489 /** @todo: make things right */
7490 if (CODE64(s))
7491#endif
7492 goto illegal_op;
7493 if (!s->pe) {
7494 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7495 } else {
7496 if (s->cc_op != CC_OP_DYNAMIC) {
7497 gen_op_set_cc_op(s->cc_op);
7498 s->cc_op = CC_OP_DYNAMIC;
7499 }
7500 gen_jmp_im(pc_start - s->cs_base);
7501 tcg_gen_helper_0_0(helper_sysenter);
7502 gen_eob(s);
7503 }
7504 break;
7505 case 0x135: /* sysexit */
7506#ifndef VBOX
7507 /* For Intel SYSEXIT is valid on 64-bit */
7508 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7509#else
7510 /** @todo: make things right */
7511 if (CODE64(s))
7512#endif
7513 goto illegal_op;
7514 if (!s->pe) {
7515 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7516 } else {
7517 if (s->cc_op != CC_OP_DYNAMIC) {
7518 gen_op_set_cc_op(s->cc_op);
7519 s->cc_op = CC_OP_DYNAMIC;
7520 }
7521 gen_jmp_im(pc_start - s->cs_base);
7522 tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
7523 gen_eob(s);
7524 }
7525 break;
7526#ifdef TARGET_X86_64
7527 case 0x105: /* syscall */
7528 /* XXX: is it usable in real mode ? */
7529 if (s->cc_op != CC_OP_DYNAMIC) {
7530 gen_op_set_cc_op(s->cc_op);
7531 s->cc_op = CC_OP_DYNAMIC;
7532 }
7533 gen_jmp_im(pc_start - s->cs_base);
7534 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
7535 gen_eob(s);
7536 break;
7537 case 0x107: /* sysret */
7538 if (!s->pe) {
7539 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7540 } else {
7541 if (s->cc_op != CC_OP_DYNAMIC) {
7542 gen_op_set_cc_op(s->cc_op);
7543 s->cc_op = CC_OP_DYNAMIC;
7544 }
7545 gen_jmp_im(pc_start - s->cs_base);
7546 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
7547 /* condition codes are modified only in long mode */
7548 if (s->lma)
7549 s->cc_op = CC_OP_EFLAGS;
7550 gen_eob(s);
7551 }
7552 break;
7553#endif
7554 case 0x1a2: /* cpuid */
7555 if (s->cc_op != CC_OP_DYNAMIC)
7556 gen_op_set_cc_op(s->cc_op);
7557 gen_jmp_im(pc_start - s->cs_base);
7558 tcg_gen_helper_0_0(helper_cpuid);
7559 break;
7560 case 0xf4: /* hlt */
7561 if (s->cpl != 0) {
7562 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7563 } else {
7564 if (s->cc_op != CC_OP_DYNAMIC)
7565 gen_op_set_cc_op(s->cc_op);
7566 gen_jmp_im(pc_start - s->cs_base);
7567 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
7568 s->is_jmp = 3;
7569 }
7570 break;
7571 case 0x100:
7572 modrm = ldub_code(s->pc++);
7573 mod = (modrm >> 6) & 3;
7574 op = (modrm >> 3) & 7;
7575 switch(op) {
7576 case 0: /* sldt */
7577 if (!s->pe || s->vm86)
7578 goto illegal_op;
7579 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
7580 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
7581 ot = OT_WORD;
7582 if (mod == 3)
7583 ot += s->dflag;
7584 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7585 break;
7586 case 2: /* lldt */
7587 if (!s->pe || s->vm86)
7588 goto illegal_op;
7589 if (s->cpl != 0) {
7590 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7591 } else {
7592 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
7593 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7594 gen_jmp_im(pc_start - s->cs_base);
7595 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7596 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
7597 }
7598 break;
7599 case 1: /* str */
7600 if (!s->pe || s->vm86)
7601 goto illegal_op;
7602 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
7603 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
7604 ot = OT_WORD;
7605 if (mod == 3)
7606 ot += s->dflag;
7607 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7608 break;
7609 case 3: /* ltr */
7610 if (!s->pe || s->vm86)
7611 goto illegal_op;
7612 if (s->cpl != 0) {
7613 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7614 } else {
7615 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
7616 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7617 gen_jmp_im(pc_start - s->cs_base);
7618 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7619 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
7620 }
7621 break;
7622 case 4: /* verr */
7623 case 5: /* verw */
7624 if (!s->pe || s->vm86)
7625 goto illegal_op;
7626 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7627 if (s->cc_op != CC_OP_DYNAMIC)
7628 gen_op_set_cc_op(s->cc_op);
7629 if (op == 4)
7630 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
7631 else
7632 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
7633 s->cc_op = CC_OP_EFLAGS;
7634 break;
7635 default:
7636 goto illegal_op;
7637 }
7638 break;
7639 case 0x101:
7640 modrm = ldub_code(s->pc++);
7641 mod = (modrm >> 6) & 3;
7642 op = (modrm >> 3) & 7;
7643 rm = modrm & 7;
7644
7645#ifdef VBOX
7646 /* 0f 01 f9 */
7647 if (modrm == 0xf9)
7648 {
7649 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
7650 goto illegal_op;
7651 gen_jmp_im(pc_start - s->cs_base);
7652 tcg_gen_helper_0_0(helper_rdtscp);
7653 break;
7654 }
7655#endif
7656 switch(op) {
7657 case 0: /* sgdt */
7658 if (mod == 3)
7659 goto illegal_op;
7660 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7661 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7662 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7663 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7664 gen_add_A0_im(s, 2);
7665 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7666 if (!s->dflag)
7667 gen_op_andl_T0_im(0xffffff);
7668 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7669 break;
7670 case 1:
7671 if (mod == 3) {
7672 switch (rm) {
7673 case 0: /* monitor */
7674 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7675 s->cpl != 0)
7676 goto illegal_op;
7677 if (s->cc_op != CC_OP_DYNAMIC)
7678 gen_op_set_cc_op(s->cc_op);
7679 gen_jmp_im(pc_start - s->cs_base);
7680#ifdef TARGET_X86_64
7681 if (s->aflag == 2) {
7682 gen_op_movq_A0_reg(R_EAX);
7683 } else
7684#endif
7685 {
7686 gen_op_movl_A0_reg(R_EAX);
7687 if (s->aflag == 0)
7688 gen_op_andl_A0_ffff();
7689 }
7690 gen_add_A0_ds_seg(s);
7691 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
7692 break;
7693 case 1: /* mwait */
7694 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7695 s->cpl != 0)
7696 goto illegal_op;
7697 if (s->cc_op != CC_OP_DYNAMIC) {
7698 gen_op_set_cc_op(s->cc_op);
7699 s->cc_op = CC_OP_DYNAMIC;
7700 }
7701 gen_jmp_im(pc_start - s->cs_base);
7702 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
7703 gen_eob(s);
7704 break;
7705 default:
7706 goto illegal_op;
7707 }
7708 } else { /* sidt */
7709 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7710 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7711 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7712 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7713 gen_add_A0_im(s, 2);
7714 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7715 if (!s->dflag)
7716 gen_op_andl_T0_im(0xffffff);
7717 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7718 }
7719 break;
7720 case 2: /* lgdt */
7721 case 3: /* lidt */
7722 if (mod == 3) {
7723 if (s->cc_op != CC_OP_DYNAMIC)
7724 gen_op_set_cc_op(s->cc_op);
7725 gen_jmp_im(pc_start - s->cs_base);
7726 switch(rm) {
7727 case 0: /* VMRUN */
7728 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7729 goto illegal_op;
7730 if (s->cpl != 0) {
7731 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7732 break;
7733 } else {
7734 tcg_gen_helper_0_2(helper_vmrun,
7735 tcg_const_i32(s->aflag),
7736 tcg_const_i32(s->pc - pc_start));
7737 tcg_gen_exit_tb(0);
7738 s->is_jmp = 3;
7739 }
7740 break;
7741 case 1: /* VMMCALL */
7742 if (!(s->flags & HF_SVME_MASK))
7743 goto illegal_op;
7744 tcg_gen_helper_0_0(helper_vmmcall);
7745 break;
7746 case 2: /* VMLOAD */
7747 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7748 goto illegal_op;
7749 if (s->cpl != 0) {
7750 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7751 break;
7752 } else {
7753 tcg_gen_helper_0_1(helper_vmload,
7754 tcg_const_i32(s->aflag));
7755 }
7756 break;
7757 case 3: /* VMSAVE */
7758 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7759 goto illegal_op;
7760 if (s->cpl != 0) {
7761 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7762 break;
7763 } else {
7764 tcg_gen_helper_0_1(helper_vmsave,
7765 tcg_const_i32(s->aflag));
7766 }
7767 break;
7768 case 4: /* STGI */
7769 if ((!(s->flags & HF_SVME_MASK) &&
7770 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7771 !s->pe)
7772 goto illegal_op;
7773 if (s->cpl != 0) {
7774 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7775 break;
7776 } else {
7777 tcg_gen_helper_0_0(helper_stgi);
7778 }
7779 break;
7780 case 5: /* CLGI */
7781 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7782 goto illegal_op;
7783 if (s->cpl != 0) {
7784 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7785 break;
7786 } else {
7787 tcg_gen_helper_0_0(helper_clgi);
7788 }
7789 break;
7790 case 6: /* SKINIT */
7791 if ((!(s->flags & HF_SVME_MASK) &&
7792 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7793 !s->pe)
7794 goto illegal_op;
7795 tcg_gen_helper_0_0(helper_skinit);
7796 break;
7797 case 7: /* INVLPGA */
7798 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7799 goto illegal_op;
7800 if (s->cpl != 0) {
7801 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7802 break;
7803 } else {
7804 tcg_gen_helper_0_1(helper_invlpga,
7805 tcg_const_i32(s->aflag));
7806 }
7807 break;
7808 default:
7809 goto illegal_op;
7810 }
7811 } else if (s->cpl != 0) {
7812 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7813 } else {
7814 gen_svm_check_intercept(s, pc_start,
7815 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7816 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7817 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7818 gen_add_A0_im(s, 2);
7819 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7820 if (!s->dflag)
7821 gen_op_andl_T0_im(0xffffff);
7822 if (op == 2) {
7823 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7824 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7825 } else {
7826 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7827 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7828 }
7829 }
7830 break;
7831 case 4: /* smsw */
7832 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7833 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7834 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7835 break;
7836 case 6: /* lmsw */
7837 if (s->cpl != 0) {
7838 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7839 } else {
7840 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7841 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7842 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7843 gen_jmp_im(s->pc - s->cs_base);
7844 gen_eob(s);
7845 }
7846 break;
7847 case 7: /* invlpg */
7848 if (s->cpl != 0) {
7849 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7850 } else {
7851 if (mod == 3) {
7852#ifdef TARGET_X86_64
7853 if (CODE64(s) && rm == 0) {
7854 /* swapgs */
7855 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7856 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7857 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7858 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7859 } else
7860#endif
7861 {
7862 goto illegal_op;
7863 }
7864 } else {
7865 if (s->cc_op != CC_OP_DYNAMIC)
7866 gen_op_set_cc_op(s->cc_op);
7867 gen_jmp_im(pc_start - s->cs_base);
7868 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7869 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
7870 gen_jmp_im(s->pc - s->cs_base);
7871 gen_eob(s);
7872 }
7873 }
7874 break;
7875 default:
7876 goto illegal_op;
7877 }
7878 break;
7879 case 0x108: /* invd */
7880 case 0x109: /* wbinvd */
7881 if (s->cpl != 0) {
7882 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7883 } else {
7884 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7885 /* nothing to do */
7886 }
7887 break;
7888 case 0x63: /* arpl or movslS (x86_64) */
7889#ifdef TARGET_X86_64
7890 if (CODE64(s)) {
7891 int d_ot;
7892 /* d_ot is the size of destination */
7893 d_ot = dflag + OT_WORD;
7894
7895 modrm = ldub_code(s->pc++);
7896 reg = ((modrm >> 3) & 7) | rex_r;
7897 mod = (modrm >> 6) & 3;
7898 rm = (modrm & 7) | REX_B(s);
7899
7900 if (mod == 3) {
7901 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7902 /* sign extend */
7903 if (d_ot == OT_QUAD)
7904 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7905 gen_op_mov_reg_T0(d_ot, reg);
7906 } else {
7907 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7908 if (d_ot == OT_QUAD) {
7909 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7910 } else {
7911 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7912 }
7913 gen_op_mov_reg_T0(d_ot, reg);
7914 }
7915 } else
7916#endif
7917 {
7918 int label1;
7919 TCGv t0, t1, t2, a0;
7920
7921 if (!s->pe || s->vm86)
7922 goto illegal_op;
7923
7924 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7925 t1 = tcg_temp_local_new(TCG_TYPE_TL);
7926 t2 = tcg_temp_local_new(TCG_TYPE_TL);
7927#ifdef VBOX
7928 a0 = tcg_temp_local_new(TCG_TYPE_TL);
7929#endif
7930 ot = OT_WORD;
7931 modrm = ldub_code(s->pc++);
7932 reg = (modrm >> 3) & 7;
7933 mod = (modrm >> 6) & 3;
7934 rm = modrm & 7;
7935 if (mod != 3) {
7936 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7937#ifdef VBOX
7938 tcg_gen_mov_tl(a0, cpu_A0);
7939#endif
7940 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7941 } else {
7942 gen_op_mov_v_reg(ot, t0, rm);
7943 }
7944 gen_op_mov_v_reg(ot, t1, reg);
7945 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7946 tcg_gen_andi_tl(t1, t1, 3);
7947 tcg_gen_movi_tl(t2, 0);
7948 label1 = gen_new_label();
7949 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7950 tcg_gen_andi_tl(t0, t0, ~3);
7951 tcg_gen_or_tl(t0, t0, t1);
7952 tcg_gen_movi_tl(t2, CC_Z);
7953 gen_set_label(label1);
7954 if (mod != 3) {
7955#ifdef VBOX
7956 /* cpu_A0 doesn't survive branch */
7957 gen_op_st_v(ot + s->mem_index, t0, a0);
7958#else
7959 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
7960#endif
7961 } else {
7962 gen_op_mov_reg_v(ot, rm, t0);
7963 }
7964 if (s->cc_op != CC_OP_DYNAMIC)
7965 gen_op_set_cc_op(s->cc_op);
7966 gen_compute_eflags(cpu_cc_src);
7967 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
7968 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
7969 s->cc_op = CC_OP_EFLAGS;
7970 tcg_temp_free(t0);
7971 tcg_temp_free(t1);
7972 tcg_temp_free(t2);
7973#ifdef VBOX
7974 tcg_temp_free(a0);
7975#endif
7976 }
7977 break;
7978 case 0x102: /* lar */
7979 case 0x103: /* lsl */
7980 {
7981 int label1;
7982 TCGv t0;
7983 if (!s->pe || s->vm86)
7984 goto illegal_op;
7985 ot = dflag ? OT_LONG : OT_WORD;
7986 modrm = ldub_code(s->pc++);
7987 reg = ((modrm >> 3) & 7) | rex_r;
7988 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7989 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7990 if (s->cc_op != CC_OP_DYNAMIC)
7991 gen_op_set_cc_op(s->cc_op);
7992 if (b == 0x102)
7993 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
7994 else
7995 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
7996 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
7997 label1 = gen_new_label();
7998 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
7999 gen_op_mov_reg_v(ot, reg, t0);
8000 gen_set_label(label1);
8001 s->cc_op = CC_OP_EFLAGS;
8002 tcg_temp_free(t0);
8003 }
8004 break;
8005 case 0x118:
8006 modrm = ldub_code(s->pc++);
8007 mod = (modrm >> 6) & 3;
8008 op = (modrm >> 3) & 7;
8009 switch(op) {
8010 case 0: /* prefetchnta */
8011 case 1: /* prefetchnt0 */
8012 case 2: /* prefetchnt0 */
8013 case 3: /* prefetchnt0 */
8014 if (mod == 3)
8015 goto illegal_op;
8016 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8017 /* nothing more to do */
8018 break;
8019 default: /* nop (multi byte) */
8020 gen_nop_modrm(s, modrm);
8021 break;
8022 }
8023 break;
8024 case 0x119 ... 0x11f: /* nop (multi byte) */
8025 modrm = ldub_code(s->pc++);
8026 gen_nop_modrm(s, modrm);
8027 break;
8028 case 0x120: /* mov reg, crN */
8029 case 0x122: /* mov crN, reg */
8030 if (s->cpl != 0) {
8031 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8032 } else {
8033 modrm = ldub_code(s->pc++);
8034 if ((modrm & 0xc0) != 0xc0)
8035 goto illegal_op;
8036 rm = (modrm & 7) | REX_B(s);
8037 reg = ((modrm >> 3) & 7) | rex_r;
8038 if (CODE64(s))
8039 ot = OT_QUAD;
8040 else
8041 ot = OT_LONG;
8042 switch(reg) {
8043 case 0:
8044 case 2:
8045 case 3:
8046 case 4:
8047 case 8:
8048 if (s->cc_op != CC_OP_DYNAMIC)
8049 gen_op_set_cc_op(s->cc_op);
8050 gen_jmp_im(pc_start - s->cs_base);
8051 if (b & 2) {
8052 gen_op_mov_TN_reg(ot, 0, rm);
8053 tcg_gen_helper_0_2(helper_write_crN,
8054 tcg_const_i32(reg), cpu_T[0]);
8055 gen_jmp_im(s->pc - s->cs_base);
8056 gen_eob(s);
8057 } else {
8058 tcg_gen_helper_1_1(helper_read_crN,
8059 cpu_T[0], tcg_const_i32(reg));
8060 gen_op_mov_reg_T0(ot, rm);
8061 }
8062 break;
8063 default:
8064 goto illegal_op;
8065 }
8066 }
8067 break;
8068 case 0x121: /* mov reg, drN */
8069 case 0x123: /* mov drN, reg */
8070 if (s->cpl != 0) {
8071 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8072 } else {
8073 modrm = ldub_code(s->pc++);
8074 if ((modrm & 0xc0) != 0xc0)
8075 goto illegal_op;
8076 rm = (modrm & 7) | REX_B(s);
8077 reg = ((modrm >> 3) & 7) | rex_r;
8078 if (CODE64(s))
8079 ot = OT_QUAD;
8080 else
8081 ot = OT_LONG;
8082 /* XXX: do it dynamically with CR4.DE bit */
8083 if (reg == 4 || reg == 5 || reg >= 8)
8084 goto illegal_op;
8085 if (b & 2) {
8086 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
8087 gen_op_mov_TN_reg(ot, 0, rm);
8088 tcg_gen_helper_0_2(helper_movl_drN_T0,
8089 tcg_const_i32(reg), cpu_T[0]);
8090 gen_jmp_im(s->pc - s->cs_base);
8091 gen_eob(s);
8092 } else {
8093 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
8094 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
8095 gen_op_mov_reg_T0(ot, rm);
8096 }
8097 }
8098 break;
8099 case 0x106: /* clts */
8100 if (s->cpl != 0) {
8101 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8102 } else {
8103 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
8104 tcg_gen_helper_0_0(helper_clts);
8105 /* abort block because static cpu state changed */
8106 gen_jmp_im(s->pc - s->cs_base);
8107 gen_eob(s);
8108 }
8109 break;
8110 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
8111 case 0x1c3: /* MOVNTI reg, mem */
8112 if (!(s->cpuid_features & CPUID_SSE2))
8113 goto illegal_op;
8114 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
8115 modrm = ldub_code(s->pc++);
8116 mod = (modrm >> 6) & 3;
8117 if (mod == 3)
8118 goto illegal_op;
8119 reg = ((modrm >> 3) & 7) | rex_r;
8120 /* generate a generic store */
8121 gen_ldst_modrm(s, modrm, ot, reg, 1);
8122 break;
8123 case 0x1ae:
8124 modrm = ldub_code(s->pc++);
8125 mod = (modrm >> 6) & 3;
8126 op = (modrm >> 3) & 7;
8127 switch(op) {
8128 case 0: /* fxsave */
8129 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8130 (s->flags & HF_EM_MASK))
8131 goto illegal_op;
8132 if (s->flags & HF_TS_MASK) {
8133 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8134 break;
8135 }
8136 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8137 if (s->cc_op != CC_OP_DYNAMIC)
8138 gen_op_set_cc_op(s->cc_op);
8139 gen_jmp_im(pc_start - s->cs_base);
8140 tcg_gen_helper_0_2(helper_fxsave,
8141 cpu_A0, tcg_const_i32((s->dflag == 2)));
8142 break;
8143 case 1: /* fxrstor */
8144 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8145 (s->flags & HF_EM_MASK))
8146 goto illegal_op;
8147 if (s->flags & HF_TS_MASK) {
8148 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8149 break;
8150 }
8151 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8152 if (s->cc_op != CC_OP_DYNAMIC)
8153 gen_op_set_cc_op(s->cc_op);
8154 gen_jmp_im(pc_start - s->cs_base);
8155 tcg_gen_helper_0_2(helper_fxrstor,
8156 cpu_A0, tcg_const_i32((s->dflag == 2)));
8157 break;
8158 case 2: /* ldmxcsr */
8159 case 3: /* stmxcsr */
8160 if (s->flags & HF_TS_MASK) {
8161 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8162 break;
8163 }
8164 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8165 mod == 3)
8166 goto illegal_op;
8167 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8168 if (op == 2) {
8169 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
8170 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8171 } else {
8172 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8173 gen_op_st_T0_A0(OT_LONG + s->mem_index);
8174 }
8175 break;
8176 case 5: /* lfence */
8177 case 6: /* mfence */
8178 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
8179 goto illegal_op;
8180 break;
8181 case 7: /* sfence / clflush */
8182 if ((modrm & 0xc7) == 0xc0) {
8183 /* sfence */
8184 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8185 if (!(s->cpuid_features & CPUID_SSE))
8186 goto illegal_op;
8187 } else {
8188 /* clflush */
8189 if (!(s->cpuid_features & CPUID_CLFLUSH))
8190 goto illegal_op;
8191 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8192 }
8193 break;
8194 default:
8195 goto illegal_op;
8196 }
8197 break;
8198 case 0x10d: /* 3DNow! prefetch(w) */
8199 modrm = ldub_code(s->pc++);
8200 mod = (modrm >> 6) & 3;
8201 if (mod == 3)
8202 goto illegal_op;
8203 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8204 /* ignore for now */
8205 break;
8206 case 0x1aa: /* rsm */
8207 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
8208 if (!(s->flags & HF_SMM_MASK))
8209 goto illegal_op;
8210 if (s->cc_op != CC_OP_DYNAMIC) {
8211 gen_op_set_cc_op(s->cc_op);
8212 s->cc_op = CC_OP_DYNAMIC;
8213 }
8214 gen_jmp_im(s->pc - s->cs_base);
8215 tcg_gen_helper_0_0(helper_rsm);
8216 gen_eob(s);
8217 break;
8218 case 0x1b8: /* SSE4.2 popcnt */
8219 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8220 PREFIX_REPZ)
8221 goto illegal_op;
8222 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8223 goto illegal_op;
8224
8225 modrm = ldub_code(s->pc++);
8226 reg = ((modrm >> 3) & 7);
8227
8228 if (s->prefix & PREFIX_DATA)
8229 ot = OT_WORD;
8230 else if (s->dflag != 2)
8231 ot = OT_LONG;
8232 else
8233 ot = OT_QUAD;
8234
8235 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
8236 tcg_gen_helper_1_2(helper_popcnt,
8237 cpu_T[0], cpu_T[0], tcg_const_i32(ot));
8238 gen_op_mov_reg_T0(ot, reg);
8239
8240 s->cc_op = CC_OP_EFLAGS;
8241 break;
8242 case 0x10e ... 0x10f:
8243 /* 3DNow! instructions, ignore prefixes */
8244 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
8245 case 0x110 ... 0x117:
8246 case 0x128 ... 0x12f:
8247 case 0x138 ... 0x13a:
8248 case 0x150 ... 0x177:
8249 case 0x17c ... 0x17f:
8250 case 0x1c2:
8251 case 0x1c4 ... 0x1c6:
8252 case 0x1d0 ... 0x1fe:
8253 gen_sse(s, b, pc_start, rex_r);
8254 break;
8255 default:
8256 goto illegal_op;
8257 }
8258 /* lock generation */
8259 if (s->prefix & PREFIX_LOCK)
8260 tcg_gen_helper_0_0(helper_unlock);
8261 return s->pc;
8262 illegal_op:
8263 if (s->prefix & PREFIX_LOCK)
8264 tcg_gen_helper_0_0(helper_unlock);
8265 /* XXX: ensure that no lock was generated */
8266 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8267 return s->pc;
8268}
8269
8270void optimize_flags_init(void)
8271{
8272#if TCG_TARGET_REG_BITS == 32
8273 assert(sizeof(CCTable) == (1 << 3));
8274#else
8275 assert(sizeof(CCTable) == (1 << 4));
8276#endif
8277 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
8278 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
8279 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
8280 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
8281 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
8282 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
8283 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
8284 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
8285 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
8286
8287 /* register helpers */
8288
8289#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
8290#include "helper.h"
8291}
8292
8293/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8294 basic block 'tb'. If search_pc is TRUE, also generate PC
8295 information for each intermediate instruction. */
8296#ifndef VBOX
8297static inline void gen_intermediate_code_internal(CPUState *env,
8298#else /* VBOX */
8299DECLINLINE(void) gen_intermediate_code_internal(CPUState *env,
8300#endif /* VBOX */
8301 TranslationBlock *tb,
8302 int search_pc)
8303{
8304 DisasContext dc1, *dc = &dc1;
8305 target_ulong pc_ptr;
8306 uint16_t *gen_opc_end;
8307 int j, lj, cflags;
8308 uint64_t flags;
8309 target_ulong pc_start;
8310 target_ulong cs_base;
8311 int num_insns;
8312 int max_insns;
8313
8314 /* generate intermediate code */
8315 pc_start = tb->pc;
8316 cs_base = tb->cs_base;
8317 flags = tb->flags;
8318 cflags = tb->cflags;
8319
8320 dc->pe = (flags >> HF_PE_SHIFT) & 1;
8321 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8322 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8323 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8324 dc->f_st = 0;
8325 dc->vm86 = (flags >> VM_SHIFT) & 1;
8326#ifdef VBOX_WITH_CALL_RECORD
8327 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
8328 if ( !(env->state & CPU_RAW_RING0)
8329 && (env->cr[0] & CR0_PG_MASK)
8330 && !(env->eflags & X86_EFL_IF)
8331 && dc->code32)
8332 dc->record_call = 1;
8333 else
8334 dc->record_call = 0;
8335#endif
8336 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8337 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8338 dc->tf = (flags >> TF_SHIFT) & 1;
8339 dc->singlestep_enabled = env->singlestep_enabled;
8340 dc->cc_op = CC_OP_DYNAMIC;
8341 dc->cs_base = cs_base;
8342 dc->tb = tb;
8343 dc->popl_esp_hack = 0;
8344 /* select memory access functions */
8345 dc->mem_index = 0;
8346 if (flags & HF_SOFTMMU_MASK) {
8347 if (dc->cpl == 3)
8348 dc->mem_index = 2 * 4;
8349 else
8350 dc->mem_index = 1 * 4;
8351 }
8352 dc->cpuid_features = env->cpuid_features;
8353 dc->cpuid_ext_features = env->cpuid_ext_features;
8354 dc->cpuid_ext2_features = env->cpuid_ext2_features;
8355 dc->cpuid_ext3_features = env->cpuid_ext3_features;
8356#ifdef TARGET_X86_64
8357 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8358 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8359#endif
8360 dc->flags = flags;
8361 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8362 (flags & HF_INHIBIT_IRQ_MASK)
8363#ifndef CONFIG_SOFTMMU
8364 || (flags & HF_SOFTMMU_MASK)
8365#endif
8366 );
8367#if 0
8368 /* check addseg logic */
8369 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
8370 printf("ERROR addseg\n");
8371#endif
8372
8373 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
8374 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
8375 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
8376 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
8377
8378 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
8379 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
8380 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
8381 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
8382 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
8383 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
8384 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
8385 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
8386 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
8387
8388 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
8389
8390 dc->is_jmp = DISAS_NEXT;
8391 pc_ptr = pc_start;
8392 lj = -1;
8393 num_insns = 0;
8394 max_insns = tb->cflags & CF_COUNT_MASK;
8395 if (max_insns == 0)
8396 max_insns = CF_COUNT_MASK;
8397
8398 gen_icount_start();
8399 for(;;) {
8400 if (env->nb_breakpoints > 0) {
8401 for(j = 0; j < env->nb_breakpoints; j++) {
8402 if (env->breakpoints[j] == pc_ptr) {
8403 gen_debug(dc, pc_ptr - dc->cs_base);
8404 break;
8405 }
8406 }
8407 }
8408 if (search_pc) {
8409 j = gen_opc_ptr - gen_opc_buf;
8410 if (lj < j) {
8411 lj++;
8412 while (lj < j)
8413 gen_opc_instr_start[lj++] = 0;
8414 }
8415 gen_opc_pc[lj] = pc_ptr;
8416 gen_opc_cc_op[lj] = dc->cc_op;
8417 gen_opc_instr_start[lj] = 1;
8418 gen_opc_icount[lj] = num_insns;
8419 }
8420 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8421 gen_io_start();
8422
8423 pc_ptr = disas_insn(dc, pc_ptr);
8424 num_insns++;
8425 /* stop translation if indicated */
8426 if (dc->is_jmp)
8427 break;
8428#ifdef VBOX
8429#ifdef DEBUG
8430/*
8431 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
8432 {
8433 //should never happen as the jump to the patch code terminates the translation block
8434 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
8435 }
8436*/
8437#endif
8438 if (env->state & CPU_EMULATE_SINGLE_INSTR)
8439 {
8440 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
8441 gen_jmp_im(pc_ptr - dc->cs_base);
8442 gen_eob(dc);
8443 break;
8444 }
8445#endif /* VBOX */
8446
8447 /* if single step mode, we generate only one instruction and
8448 generate an exception */
8449 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8450 the flag and abort the translation to give the irqs a
8451 change to be happen */
8452 if (dc->tf || dc->singlestep_enabled ||
8453 (flags & HF_INHIBIT_IRQ_MASK)) {
8454 gen_jmp_im(pc_ptr - dc->cs_base);
8455 gen_eob(dc);
8456 break;
8457 }
8458 /* if too long translation, stop generation too */
8459 if (gen_opc_ptr >= gen_opc_end ||
8460 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8461 num_insns >= max_insns) {
8462 gen_jmp_im(pc_ptr - dc->cs_base);
8463 gen_eob(dc);
8464 break;
8465 }
8466 }
8467 if (tb->cflags & CF_LAST_IO)
8468 gen_io_end();
8469 gen_icount_end(tb, num_insns);
8470 *gen_opc_ptr = INDEX_op_end;
8471 /* we don't forget to fill the last values */
8472 if (search_pc) {
8473 j = gen_opc_ptr - gen_opc_buf;
8474 lj++;
8475 while (lj <= j)
8476 gen_opc_instr_start[lj++] = 0;
8477 }
8478
8479#ifdef DEBUG_DISAS
8480 if (loglevel & CPU_LOG_TB_CPU) {
8481 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
8482 }
8483 if (loglevel & CPU_LOG_TB_IN_ASM) {
8484 int disas_flags;
8485 fprintf(logfile, "----------------\n");
8486 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
8487#ifdef TARGET_X86_64
8488 if (dc->code64)
8489 disas_flags = 2;
8490 else
8491#endif
8492 disas_flags = !dc->code32;
8493 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
8494 fprintf(logfile, "\n");
8495 }
8496#endif
8497
8498 if (!search_pc) {
8499 tb->size = pc_ptr - pc_start;
8500 tb->icount = num_insns;
8501 }
8502}
8503
8504void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
8505{
8506 gen_intermediate_code_internal(env, tb, 0);
8507}
8508
8509void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
8510{
8511 gen_intermediate_code_internal(env, tb, 1);
8512}
8513
8514void gen_pc_load(CPUState *env, TranslationBlock *tb,
8515 unsigned long searched_pc, int pc_pos, void *puc)
8516{
8517 int cc_op;
8518#ifdef DEBUG_DISAS
8519 if (loglevel & CPU_LOG_TB_OP) {
8520 int i;
8521 fprintf(logfile, "RESTORE:\n");
8522 for(i = 0;i <= pc_pos; i++) {
8523 if (gen_opc_instr_start[i]) {
8524 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
8525 }
8526 }
8527 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
8528 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
8529 (uint32_t)tb->cs_base);
8530 }
8531#endif
8532 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
8533 cc_op = gen_opc_cc_op[pc_pos];
8534 if (cc_op != CC_OP_DYNAMIC)
8535 env->cc_op = cc_op;
8536}
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette