VirtualBox

source: vbox/trunk/src/recompiler_new/target-i386/translate.c@ 15562

Last change on this file since 15562 was 15107, checked in by vboxsync, 16 years ago

Sign extend 32 bits displacements to 64 bits

  • Property svn:eol-style set to native
File size: 275.4 KB
Line 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Sun LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Sun elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29#include <stdarg.h>
30#include <stdlib.h>
31#include <stdio.h>
32#include <string.h>
33#ifndef VBOX
34#include <inttypes.h>
35#include <signal.h>
36#include <assert.h>
37#endif /* !VBOX */
38
39#include "cpu.h"
40#include "exec-all.h"
41#include "disas.h"
42#include "helper.h"
43#include "tcg-op.h"
44
45#define PREFIX_REPZ 0x01
46#define PREFIX_REPNZ 0x02
47#define PREFIX_LOCK 0x04
48#define PREFIX_DATA 0x08
49#define PREFIX_ADR 0x10
50
51#ifdef TARGET_X86_64
52#define X86_64_ONLY(x) x
53#ifndef VBOX
54#define X86_64_DEF(x...) x
55#else
56#define X86_64_DEF(x...) x
57#endif
58#define CODE64(s) ((s)->code64)
59#define REX_X(s) ((s)->rex_x)
60#define REX_B(s) ((s)->rex_b)
61/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
62#if 1
63#define BUGGY_64(x) NULL
64#endif
65#else
66#define X86_64_ONLY(x) NULL
67#ifndef VBOX
68#define X86_64_DEF(x...)
69#else
70#define X86_64_DEF(x)
71#endif
72#define CODE64(s) 0
73#define REX_X(s) 0
74#define REX_B(s) 0
75#endif
76
77//#define MACRO_TEST 1
78
79/* global register indexes */
80static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
81/* local temps */
82static TCGv cpu_T[2], cpu_T3;
83/* local register indexes (only used inside old micro ops) */
84static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
85static TCGv cpu_tmp5, cpu_tmp6;
86
87#include "gen-icount.h"
88
89#ifdef TARGET_X86_64
90static int x86_64_hregs;
91#endif
92
93#ifdef VBOX
94
95/* Special/override code readers to hide patched code. */
96
97uint8_t ldub_code_raw(target_ulong pc)
98{
99 uint8_t b;
100
101 if (!remR3GetOpcode(cpu_single_env, pc, &b))
102 b = ldub_code(pc);
103 return b;
104}
105#define ldub_code(a) ldub_code_raw(a)
106
107uint16_t lduw_code_raw(target_ulong pc)
108{
109 return (ldub_code(pc+1) << 8) | ldub_code(pc);
110}
111#define lduw_code(a) lduw_code_raw(a)
112
113
114uint32_t ldl_code_raw(target_ulong pc)
115{
116 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
117}
118#define ldl_code(a) ldl_code_raw(a)
119
120#endif /* VBOX */
121
122
123typedef struct DisasContext {
124 /* current insn context */
125 int override; /* -1 if no override */
126 int prefix;
127 int aflag, dflag;
128 target_ulong pc; /* pc = eip + cs_base */
129 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
130 static state change (stop translation) */
131 /* current block context */
132 target_ulong cs_base; /* base of CS segment */
133 int pe; /* protected mode */
134 int code32; /* 32 bit code segment */
135#ifdef TARGET_X86_64
136 int lma; /* long mode active */
137 int code64; /* 64 bit code segment */
138 int rex_x, rex_b;
139#endif
140 int ss32; /* 32 bit stack segment */
141 int cc_op; /* current CC operation */
142 int addseg; /* non zero if either DS/ES/SS have a non zero base */
143 int f_st; /* currently unused */
144 int vm86; /* vm86 mode */
145#ifdef VBOX
146 int vme; /* CR4.VME */
147 int record_call; /* record calls for CSAM or not? */
148#endif
149 int cpl;
150 int iopl;
151 int tf; /* TF cpu flag */
152 int singlestep_enabled; /* "hardware" single step enabled */
153 int jmp_opt; /* use direct block chaining for direct jumps */
154 int mem_index; /* select memory access functions */
155 uint64_t flags; /* all execution flags */
156 struct TranslationBlock *tb;
157 int popl_esp_hack; /* for correct popl with esp base handling */
158 int rip_offset; /* only used in x86_64, but left for simplicity */
159 int cpuid_features;
160 int cpuid_ext_features;
161 int cpuid_ext2_features;
162 int cpuid_ext3_features;
163} DisasContext;
164
165static void gen_eob(DisasContext *s);
166static void gen_jmp(DisasContext *s, target_ulong eip);
167static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
168
169#ifdef VBOX
170static void gen_check_external_event();
171#endif
172
173/* i386 arith/logic operations */
174enum {
175 OP_ADDL,
176 OP_ORL,
177 OP_ADCL,
178 OP_SBBL,
179 OP_ANDL,
180 OP_SUBL,
181 OP_XORL,
182 OP_CMPL,
183};
184
185/* i386 shift ops */
186enum {
187 OP_ROL,
188 OP_ROR,
189 OP_RCL,
190 OP_RCR,
191 OP_SHL,
192 OP_SHR,
193 OP_SHL1, /* undocumented */
194 OP_SAR = 7,
195};
196
197enum {
198 JCC_O,
199 JCC_B,
200 JCC_Z,
201 JCC_BE,
202 JCC_S,
203 JCC_P,
204 JCC_L,
205 JCC_LE,
206};
207
208/* operand size */
209enum {
210 OT_BYTE = 0,
211 OT_WORD,
212 OT_LONG,
213 OT_QUAD,
214};
215
216enum {
217 /* I386 int registers */
218 OR_EAX, /* MUST be even numbered */
219 OR_ECX,
220 OR_EDX,
221 OR_EBX,
222 OR_ESP,
223 OR_EBP,
224 OR_ESI,
225 OR_EDI,
226
227 OR_TMP0 = 16, /* temporary operand register */
228 OR_TMP1,
229 OR_A0, /* temporary register used when doing address evaluation */
230};
231
232#ifndef VBOX
233static inline void gen_op_movl_T0_0(void)
234#else /* VBOX */
235DECLINLINE(void) gen_op_movl_T0_0(void)
236#endif /* VBOX */
237{
238 tcg_gen_movi_tl(cpu_T[0], 0);
239}
240
241#ifndef VBOX
242static inline void gen_op_movl_T0_im(int32_t val)
243#else /* VBOX */
244DECLINLINE(void) gen_op_movl_T0_im(int32_t val)
245#endif /* VBOX */
246{
247 tcg_gen_movi_tl(cpu_T[0], val);
248}
249
250#ifndef VBOX
251static inline void gen_op_movl_T0_imu(uint32_t val)
252#else /* VBOX */
253DECLINLINE(void) gen_op_movl_T0_imu(uint32_t val)
254#endif /* VBOX */
255{
256 tcg_gen_movi_tl(cpu_T[0], val);
257}
258
259#ifndef VBOX
260static inline void gen_op_movl_T1_im(int32_t val)
261#else /* VBOX */
262DECLINLINE(void) gen_op_movl_T1_im(int32_t val)
263#endif /* VBOX */
264{
265 tcg_gen_movi_tl(cpu_T[1], val);
266}
267
268#ifndef VBOX
269static inline void gen_op_movl_T1_imu(uint32_t val)
270#else /* VBOX */
271DECLINLINE(void) gen_op_movl_T1_imu(uint32_t val)
272#endif /* VBOX */
273{
274 tcg_gen_movi_tl(cpu_T[1], val);
275}
276
277#ifndef VBOX
278static inline void gen_op_movl_A0_im(uint32_t val)
279#else /* VBOX */
280DECLINLINE(void) gen_op_movl_A0_im(uint32_t val)
281#endif /* VBOX */
282{
283 tcg_gen_movi_tl(cpu_A0, val);
284}
285
286#ifdef TARGET_X86_64
287#ifndef VBOX
288static inline void gen_op_movq_A0_im(int64_t val)
289#else /* VBOX */
290DECLINLINE(void) gen_op_movq_A0_im(int64_t val)
291#endif /* VBOX */
292{
293 tcg_gen_movi_tl(cpu_A0, val);
294}
295#endif
296
297#ifndef VBOX
298static inline void gen_movtl_T0_im(target_ulong val)
299#else /* VBOX */
300DECLINLINE(void) gen_movtl_T0_im(target_ulong val)
301#endif /* VBOX */
302{
303 tcg_gen_movi_tl(cpu_T[0], val);
304}
305
306#ifndef VBOX
307static inline void gen_movtl_T1_im(target_ulong val)
308#else /* VBOX */
309DECLINLINE(void) gen_movtl_T1_im(target_ulong val)
310#endif /* VBOX */
311{
312 tcg_gen_movi_tl(cpu_T[1], val);
313}
314
315#ifndef VBOX
316static inline void gen_op_andl_T0_ffff(void)
317#else /* VBOX */
318DECLINLINE(void) gen_op_andl_T0_ffff(void)
319#endif /* VBOX */
320{
321 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
322}
323
324#ifndef VBOX
325static inline void gen_op_andl_T0_im(uint32_t val)
326#else /* VBOX */
327DECLINLINE(void) gen_op_andl_T0_im(uint32_t val)
328#endif /* VBOX */
329{
330 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
331}
332
333#ifndef VBOX
334static inline void gen_op_movl_T0_T1(void)
335#else /* VBOX */
336DECLINLINE(void) gen_op_movl_T0_T1(void)
337#endif /* VBOX */
338{
339 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
340}
341
342#ifndef VBOX
343static inline void gen_op_andl_A0_ffff(void)
344#else /* VBOX */
345DECLINLINE(void) gen_op_andl_A0_ffff(void)
346#endif /* VBOX */
347{
348 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
349}
350
351#ifdef TARGET_X86_64
352
353#define NB_OP_SIZES 4
354
355#else /* !TARGET_X86_64 */
356
357#define NB_OP_SIZES 3
358
359#endif /* !TARGET_X86_64 */
360
361#if defined(WORDS_BIGENDIAN)
362#define REG_B_OFFSET (sizeof(target_ulong) - 1)
363#define REG_H_OFFSET (sizeof(target_ulong) - 2)
364#define REG_W_OFFSET (sizeof(target_ulong) - 2)
365#define REG_L_OFFSET (sizeof(target_ulong) - 4)
366#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
367#else
368#define REG_B_OFFSET 0
369#define REG_H_OFFSET 1
370#define REG_W_OFFSET 0
371#define REG_L_OFFSET 0
372#define REG_LH_OFFSET 4
373#endif
374
375#ifndef VBOX
376static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
377#else /* VBOX */
378DECLINLINE(void) gen_op_mov_reg_v(int ot, int reg, TCGv t0)
379#endif /* VBOX */
380{
381 switch(ot) {
382 case OT_BYTE:
383 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
384 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
385 } else {
386 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
387 }
388 break;
389 case OT_WORD:
390 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
391 break;
392#ifdef TARGET_X86_64
393 case OT_LONG:
394 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
395 /* high part of register set to zero */
396 tcg_gen_movi_tl(cpu_tmp0, 0);
397 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
398 break;
399 default:
400 case OT_QUAD:
401 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
402 break;
403#else
404 default:
405 case OT_LONG:
406 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
407 break;
408#endif
409 }
410}
411
412#ifndef VBOX
413static inline void gen_op_mov_reg_T0(int ot, int reg)
414#else /* VBOX */
415DECLINLINE(void) gen_op_mov_reg_T0(int ot, int reg)
416#endif /* VBOX */
417{
418 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
419}
420
421#ifndef VBOX
422static inline void gen_op_mov_reg_T1(int ot, int reg)
423#else /* VBOX */
424DECLINLINE(void) gen_op_mov_reg_T1(int ot, int reg)
425#endif /* VBOX */
426{
427 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
428}
429
430#ifndef VBOX
431static inline void gen_op_mov_reg_A0(int size, int reg)
432#else /* VBOX */
433DECLINLINE(void) gen_op_mov_reg_A0(int size, int reg)
434#endif /* VBOX */
435{
436 switch(size) {
437 case 0:
438 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
439 break;
440#ifdef TARGET_X86_64
441 case 1:
442 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
443 /* high part of register set to zero */
444 tcg_gen_movi_tl(cpu_tmp0, 0);
445 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
446 break;
447 default:
448 case 2:
449 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
450 break;
451#else
452 default:
453 case 1:
454 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
455 break;
456#endif
457 }
458}
459
460#ifndef VBOX
461static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
462#else /* VBOX */
463DECLINLINE(void) gen_op_mov_v_reg(int ot, TCGv t0, int reg)
464#endif /* VBOX */
465{
466 switch(ot) {
467 case OT_BYTE:
468 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
469#ifndef VBOX
470 goto std_case;
471#else
472 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
473#endif
474 } else {
475 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
476 }
477 break;
478 default:
479 std_case:
480 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
481 break;
482 }
483}
484
485#ifndef VBOX
486static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
487#else /* VBOX */
488DECLINLINE(void) gen_op_mov_TN_reg(int ot, int t_index, int reg)
489#endif /* VBOX */
490{
491 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
492}
493
494#ifndef VBOX
495static inline void gen_op_movl_A0_reg(int reg)
496#else /* VBOX */
497DECLINLINE(void) gen_op_movl_A0_reg(int reg)
498#endif /* VBOX */
499{
500 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
501}
502
503#ifndef VBOX
504static inline void gen_op_addl_A0_im(int32_t val)
505#else /* VBOX */
506DECLINLINE(void) gen_op_addl_A0_im(int32_t val)
507#endif /* VBOX */
508{
509 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
510#ifdef TARGET_X86_64
511 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
512#endif
513}
514
515#ifdef TARGET_X86_64
516#ifndef VBOX
517static inline void gen_op_addq_A0_im(int64_t val)
518#else /* VBOX */
519DECLINLINE(void) gen_op_addq_A0_im(int64_t val)
520#endif /* VBOX */
521{
522 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
523}
524#endif
525
526static void gen_add_A0_im(DisasContext *s, int val)
527{
528#ifdef TARGET_X86_64
529 if (CODE64(s))
530 gen_op_addq_A0_im(val);
531 else
532#endif
533 gen_op_addl_A0_im(val);
534}
535
536#ifndef VBOX
537static inline void gen_op_addl_T0_T1(void)
538#else /* VBOX */
539DECLINLINE(void) gen_op_addl_T0_T1(void)
540#endif /* VBOX */
541{
542 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
543}
544
545#ifndef VBOX
546static inline void gen_op_jmp_T0(void)
547#else /* VBOX */
548DECLINLINE(void) gen_op_jmp_T0(void)
549#endif /* VBOX */
550{
551 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
552}
553
554#ifndef VBOX
555static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
556#else /* VBOX */
557DECLINLINE(void) gen_op_add_reg_im(int size, int reg, int32_t val)
558#endif /* VBOX */
559{
560 switch(size) {
561 case 0:
562 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
563 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
564 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
565 break;
566 case 1:
567 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
568 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
569#ifdef TARGET_X86_64
570 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
571#endif
572 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
573 break;
574#ifdef TARGET_X86_64
575 case 2:
576 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
577 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
578 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
579 break;
580#endif
581 }
582}
583
584#ifndef VBOX
585static inline void gen_op_add_reg_T0(int size, int reg)
586#else /* VBOX */
587DECLINLINE(void) gen_op_add_reg_T0(int size, int reg)
588#endif /* VBOX */
589{
590 switch(size) {
591 case 0:
592 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
593 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
594 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
595 break;
596 case 1:
597 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
598 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
599#ifdef TARGET_X86_64
600 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
601#endif
602 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
603 break;
604#ifdef TARGET_X86_64
605 case 2:
606 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
607 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
608 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
609 break;
610#endif
611 }
612}
613
614#ifndef VBOX
615static inline void gen_op_set_cc_op(int32_t val)
616#else /* VBOX */
617DECLINLINE(void) gen_op_set_cc_op(int32_t val)
618#endif /* VBOX */
619{
620 tcg_gen_movi_i32(cpu_cc_op, val);
621}
622
623#ifndef VBOX
624static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
625#else /* VBOX */
626DECLINLINE(void) gen_op_addl_A0_reg_sN(int shift, int reg)
627#endif /* VBOX */
628{
629 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
630 if (shift != 0)
631 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
632 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
633#ifdef TARGET_X86_64
634 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
635#endif
636}
637#ifdef VBOX
638DECLINLINE(void) gen_op_seg_check(int reg, bool keepA0)
639{
640 /* It seems segments doesn't get out of sync - if they do in fact - enable below code. */
641#if 0
642 /* Our segments could be outdated, thus check for newselector field to see if update really needed */
643 int skip_label;
644 TCGv t0, a0;
645
646 /* For other segments this check is waste of time, and also TCG is unable to cope with this code,
647 for data/stack segments, as expects alive cpu_T[0] */
648 if (reg != R_GS)
649 return;
650
651 if (keepA0)
652 {
653 /* we need to store old cpu_A0 */
654 a0 = tcg_temp_local_new(TCG_TYPE_TL);
655 tcg_gen_mov_tl(a0, cpu_A0);
656 }
657
658 skip_label = gen_new_label();
659 t0 = tcg_temp_local_new(TCG_TYPE_TL);
660
661 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, segs[reg].newselector) + REG_L_OFFSET);
662 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
663 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, eflags) + REG_L_OFFSET);
664 tcg_gen_andi_tl(t0, t0, VM_MASK);
665 tcg_gen_brcondi_i32(TCG_COND_NE, t0, 0, skip_label);
666 tcg_gen_movi_tl(t0, reg);
667
668 tcg_gen_helper_0_1(helper_sync_seg, t0);
669
670 tcg_temp_free(t0);
671
672 gen_set_label(skip_label);
673 if (keepA0)
674 {
675 tcg_gen_mov_tl(cpu_A0, a0);
676 tcg_temp_free(a0);
677 }
678#endif /* 0 */
679}
680#endif
681
682#ifndef VBOX
683static inline void gen_op_movl_A0_seg(int reg)
684#else /* VBOX */
685DECLINLINE(void) gen_op_movl_A0_seg(int reg)
686#endif /* VBOX */
687{
688#ifdef VBOX
689 gen_op_seg_check(reg, false);
690#endif
691 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
692}
693
694#ifndef VBOX
695static inline void gen_op_addl_A0_seg(int reg)
696#else /* VBOX */
697DECLINLINE(void) gen_op_addl_A0_seg(int reg)
698#endif /* VBOX */
699{
700#ifdef VBOX
701 gen_op_seg_check(reg, true);
702#endif
703 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
704 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
705#ifdef TARGET_X86_64
706 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
707#endif
708}
709
710#ifdef TARGET_X86_64
711#ifndef VBOX
712static inline void gen_op_movq_A0_seg(int reg)
713#else /* VBOX */
714DECLINLINE(void) gen_op_movq_A0_seg(int reg)
715#endif /* VBOX */
716{
717#ifdef VBOX
718 gen_op_seg_check(reg, false);
719#endif
720 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
721}
722
723#ifndef VBOX
724static inline void gen_op_addq_A0_seg(int reg)
725#else /* VBOX */
726DECLINLINE(void) gen_op_addq_A0_seg(int reg)
727#endif /* VBOX */
728{
729#ifdef VBOX
730 gen_op_seg_check(reg, true);
731#endif
732 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
733 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
734}
735
736#ifndef VBOX
737static inline void gen_op_movq_A0_reg(int reg)
738#else /* VBOX */
739DECLINLINE(void) gen_op_movq_A0_reg(int reg)
740#endif /* VBOX */
741{
742 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
743}
744
745#ifndef VBOX
746static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
747#else /* VBOX */
748DECLINLINE(void) gen_op_addq_A0_reg_sN(int shift, int reg)
749#endif /* VBOX */
750{
751 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
752 if (shift != 0)
753 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
754 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
755}
756#endif
757
758#ifndef VBOX
759static inline void gen_op_lds_T0_A0(int idx)
760#else /* VBOX */
761DECLINLINE(void) gen_op_lds_T0_A0(int idx)
762#endif /* VBOX */
763{
764 int mem_index = (idx >> 2) - 1;
765 switch(idx & 3) {
766 case 0:
767 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
768 break;
769 case 1:
770 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
771 break;
772 default:
773 case 2:
774 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
775 break;
776 }
777}
778
779#ifndef VBOX
780static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
781#else /* VBOX */
782DECLINLINE(void) gen_op_ld_v(int idx, TCGv t0, TCGv a0)
783#endif /* VBOX */
784{
785 int mem_index = (idx >> 2) - 1;
786 switch(idx & 3) {
787 case 0:
788 tcg_gen_qemu_ld8u(t0, a0, mem_index);
789 break;
790 case 1:
791 tcg_gen_qemu_ld16u(t0, a0, mem_index);
792 break;
793 case 2:
794 tcg_gen_qemu_ld32u(t0, a0, mem_index);
795 break;
796 default:
797 case 3:
798 tcg_gen_qemu_ld64(t0, a0, mem_index);
799 break;
800 }
801}
802
803/* XXX: always use ldu or lds */
804#ifndef VBOX
805static inline void gen_op_ld_T0_A0(int idx)
806#else /* VBOX */
807DECLINLINE(void) gen_op_ld_T0_A0(int idx)
808#endif /* VBOX */
809{
810 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
811}
812
813#ifndef VBOX
814static inline void gen_op_ldu_T0_A0(int idx)
815#else /* VBOX */
816DECLINLINE(void) gen_op_ldu_T0_A0(int idx)
817#endif /* VBOX */
818{
819 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
820}
821
822#ifndef VBOX
823static inline void gen_op_ld_T1_A0(int idx)
824#else /* VBOX */
825DECLINLINE(void) gen_op_ld_T1_A0(int idx)
826#endif /* VBOX */
827{
828 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
829}
830
831#ifndef VBOX
832static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
833#else /* VBOX */
834DECLINLINE(void) gen_op_st_v(int idx, TCGv t0, TCGv a0)
835#endif /* VBOX */
836{
837 int mem_index = (idx >> 2) - 1;
838 switch(idx & 3) {
839 case 0:
840 tcg_gen_qemu_st8(t0, a0, mem_index);
841 break;
842 case 1:
843 tcg_gen_qemu_st16(t0, a0, mem_index);
844 break;
845 case 2:
846 tcg_gen_qemu_st32(t0, a0, mem_index);
847 break;
848 default:
849 case 3:
850 tcg_gen_qemu_st64(t0, a0, mem_index);
851 break;
852 }
853}
854
855#ifndef VBOX
856static inline void gen_op_st_T0_A0(int idx)
857#else /* VBOX */
858DECLINLINE(void) gen_op_st_T0_A0(int idx)
859#endif /* VBOX */
860{
861 gen_op_st_v(idx, cpu_T[0], cpu_A0);
862}
863
864#ifndef VBOX
865static inline void gen_op_st_T1_A0(int idx)
866#else /* VBOX */
867DECLINLINE(void) gen_op_st_T1_A0(int idx)
868#endif /* VBOX */
869{
870 gen_op_st_v(idx, cpu_T[1], cpu_A0);
871}
872
873#ifdef VBOX
874static void gen_check_external_event()
875{
876 int skip_label;
877 TCGv t0;
878
879 skip_label = gen_new_label();
880 t0 = tcg_temp_local_new(TCG_TYPE_TL);
881 /* t0 = cpu_tmp0; */
882
883 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, interrupt_request));
884 /* Keep in sync with helper_check_external_event() */
885 tcg_gen_andi_tl(t0, t0,
886 CPU_INTERRUPT_EXTERNAL_EXIT
887 | CPU_INTERRUPT_EXTERNAL_TIMER
888 | CPU_INTERRUPT_EXTERNAL_DMA
889 | CPU_INTERRUPT_EXTERNAL_HARD);
890 /** @todo: predict branch as taken */
891 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
892 tcg_temp_free(t0);
893
894 tcg_gen_helper_0_0(helper_check_external_event);
895
896 gen_set_label(skip_label);
897}
898
899static void gen_check_external_event2()
900{
901 tcg_gen_helper_0_0(helper_check_external_event);
902}
903
904#endif
905
906#ifndef VBOX
907static inline void gen_jmp_im(target_ulong pc)
908#else /* VBOX */
909DECLINLINE(void) gen_jmp_im(target_ulong pc)
910#endif /* VBOX */
911{
912 tcg_gen_movi_tl(cpu_tmp0, pc);
913 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
914}
915
916#ifdef VBOX
917DECLINLINE(void) gen_update_eip(target_ulong pc)
918{
919 gen_jmp_im(pc);
920#ifdef VBOX_DUMP_STATE
921 tcg_gen_helper_0_0(helper_dump_state);
922#endif
923}
924
925#endif
926
927#ifndef VBOX
928static inline void gen_string_movl_A0_ESI(DisasContext *s)
929#else /* VBOX */
930DECLINLINE(void) gen_string_movl_A0_ESI(DisasContext *s)
931#endif /* VBOX */
932{
933 int override;
934
935 override = s->override;
936#ifdef TARGET_X86_64
937 if (s->aflag == 2) {
938 if (override >= 0) {
939 gen_op_movq_A0_seg(override);
940 gen_op_addq_A0_reg_sN(0, R_ESI);
941 } else {
942 gen_op_movq_A0_reg(R_ESI);
943 }
944 } else
945#endif
946 if (s->aflag) {
947 /* 32 bit address */
948 if (s->addseg && override < 0)
949 override = R_DS;
950 if (override >= 0) {
951 gen_op_movl_A0_seg(override);
952 gen_op_addl_A0_reg_sN(0, R_ESI);
953 } else {
954 gen_op_movl_A0_reg(R_ESI);
955 }
956 } else {
957 /* 16 address, always override */
958 if (override < 0)
959 override = R_DS;
960 gen_op_movl_A0_reg(R_ESI);
961 gen_op_andl_A0_ffff();
962 gen_op_addl_A0_seg(override);
963 }
964}
965
966#ifndef VBOX
967static inline void gen_string_movl_A0_EDI(DisasContext *s)
968#else /* VBOX */
969DECLINLINE(void) gen_string_movl_A0_EDI(DisasContext *s)
970#endif /* VBOX */
971{
972#ifdef TARGET_X86_64
973 if (s->aflag == 2) {
974 gen_op_movq_A0_reg(R_EDI);
975 } else
976#endif
977 if (s->aflag) {
978 if (s->addseg) {
979 gen_op_movl_A0_seg(R_ES);
980 gen_op_addl_A0_reg_sN(0, R_EDI);
981 } else {
982 gen_op_movl_A0_reg(R_EDI);
983 }
984 } else {
985 gen_op_movl_A0_reg(R_EDI);
986 gen_op_andl_A0_ffff();
987 gen_op_addl_A0_seg(R_ES);
988 }
989}
990
991#ifndef VBOX
992static inline void gen_op_movl_T0_Dshift(int ot)
993#else /* VBOX */
994DECLINLINE(void) gen_op_movl_T0_Dshift(int ot)
995#endif /* VBOX */
996{
997 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
998 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
999};
1000
1001static void gen_extu(int ot, TCGv reg)
1002{
1003 switch(ot) {
1004 case OT_BYTE:
1005 tcg_gen_ext8u_tl(reg, reg);
1006 break;
1007 case OT_WORD:
1008 tcg_gen_ext16u_tl(reg, reg);
1009 break;
1010 case OT_LONG:
1011 tcg_gen_ext32u_tl(reg, reg);
1012 break;
1013 default:
1014 break;
1015 }
1016}
1017
1018static void gen_exts(int ot, TCGv reg)
1019{
1020 switch(ot) {
1021 case OT_BYTE:
1022 tcg_gen_ext8s_tl(reg, reg);
1023 break;
1024 case OT_WORD:
1025 tcg_gen_ext16s_tl(reg, reg);
1026 break;
1027 case OT_LONG:
1028 tcg_gen_ext32s_tl(reg, reg);
1029 break;
1030 default:
1031 break;
1032 }
1033}
1034
1035#ifndef VBOX
1036static inline void gen_op_jnz_ecx(int size, int label1)
1037#else /* VBOX */
1038DECLINLINE(void) gen_op_jnz_ecx(int size, int label1)
1039#endif /* VBOX */
1040{
1041 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1042 gen_extu(size + 1, cpu_tmp0);
1043 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
1044}
1045
1046#ifndef VBOX
1047static inline void gen_op_jz_ecx(int size, int label1)
1048#else /* VBOX */
1049DECLINLINE(void) gen_op_jz_ecx(int size, int label1)
1050#endif /* VBOX */
1051{
1052 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1053 gen_extu(size + 1, cpu_tmp0);
1054 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
1055}
1056
1057static void *helper_in_func[3] = {
1058 helper_inb,
1059 helper_inw,
1060 helper_inl,
1061};
1062
1063static void *helper_out_func[3] = {
1064 helper_outb,
1065 helper_outw,
1066 helper_outl,
1067};
1068
1069static void *gen_check_io_func[3] = {
1070 helper_check_iob,
1071 helper_check_iow,
1072 helper_check_iol,
1073};
1074
1075static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
1076 uint32_t svm_flags)
1077{
1078 int state_saved;
1079 target_ulong next_eip;
1080
1081 state_saved = 0;
1082 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1083 if (s->cc_op != CC_OP_DYNAMIC)
1084 gen_op_set_cc_op(s->cc_op);
1085 gen_jmp_im(cur_eip);
1086 state_saved = 1;
1087 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1088 tcg_gen_helper_0_1(gen_check_io_func[ot],
1089 cpu_tmp2_i32);
1090 }
1091 if(s->flags & HF_SVMI_MASK) {
1092 if (!state_saved) {
1093 if (s->cc_op != CC_OP_DYNAMIC)
1094 gen_op_set_cc_op(s->cc_op);
1095 gen_jmp_im(cur_eip);
1096 state_saved = 1;
1097 }
1098 svm_flags |= (1 << (4 + ot));
1099 next_eip = s->pc - s->cs_base;
1100 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1101 tcg_gen_helper_0_3(helper_svm_check_io,
1102 cpu_tmp2_i32,
1103 tcg_const_i32(svm_flags),
1104 tcg_const_i32(next_eip - cur_eip));
1105 }
1106}
1107
1108#ifndef VBOX
1109static inline void gen_movs(DisasContext *s, int ot)
1110#else /* VBOX */
1111DECLINLINE(void) gen_movs(DisasContext *s, int ot)
1112#endif /* VBOX */
1113{
1114 gen_string_movl_A0_ESI(s);
1115 gen_op_ld_T0_A0(ot + s->mem_index);
1116 gen_string_movl_A0_EDI(s);
1117 gen_op_st_T0_A0(ot + s->mem_index);
1118 gen_op_movl_T0_Dshift(ot);
1119 gen_op_add_reg_T0(s->aflag, R_ESI);
1120 gen_op_add_reg_T0(s->aflag, R_EDI);
1121}
1122
1123#ifndef VBOX
1124static inline void gen_update_cc_op(DisasContext *s)
1125#else /* VBOX */
1126DECLINLINE(void) gen_update_cc_op(DisasContext *s)
1127#endif /* VBOX */
1128{
1129 if (s->cc_op != CC_OP_DYNAMIC) {
1130 gen_op_set_cc_op(s->cc_op);
1131 s->cc_op = CC_OP_DYNAMIC;
1132 }
1133}
1134
1135static void gen_op_update1_cc(void)
1136{
1137 tcg_gen_discard_tl(cpu_cc_src);
1138 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1139}
1140
1141static void gen_op_update2_cc(void)
1142{
1143 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1144 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1145}
1146
1147#ifndef VBOX
1148static inline void gen_op_cmpl_T0_T1_cc(void)
1149#else /* VBOX */
1150DECLINLINE(void) gen_op_cmpl_T0_T1_cc(void)
1151#endif /* VBOX */
1152{
1153 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1154 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1155}
1156
1157#ifndef VBOX
1158static inline void gen_op_testl_T0_T1_cc(void)
1159#else /* VBOX */
1160DECLINLINE(void) gen_op_testl_T0_T1_cc(void)
1161#endif /* VBOX */
1162{
1163 tcg_gen_discard_tl(cpu_cc_src);
1164 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1165}
1166
1167static void gen_op_update_neg_cc(void)
1168{
1169 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
1170 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1171}
1172
1173/* compute eflags.C to reg */
1174static void gen_compute_eflags_c(TCGv reg)
1175{
1176#if TCG_TARGET_REG_BITS == 32
1177 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1178 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1179 (long)cc_table + offsetof(CCTable, compute_c));
1180 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1181 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1182 1, &cpu_tmp2_i32, 0, NULL);
1183#else
1184 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1185 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1186 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1187 (long)cc_table + offsetof(CCTable, compute_c));
1188 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1189 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1190 1, &cpu_tmp2_i32, 0, NULL);
1191#endif
1192 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1193}
1194
1195/* compute all eflags to cc_src */
1196static void gen_compute_eflags(TCGv reg)
1197{
1198#if TCG_TARGET_REG_BITS == 32
1199 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1200 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1201 (long)cc_table + offsetof(CCTable, compute_all));
1202 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1203 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1204 1, &cpu_tmp2_i32, 0, NULL);
1205#else
1206 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1207 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1208 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1209 (long)cc_table + offsetof(CCTable, compute_all));
1210 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1211 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1212 1, &cpu_tmp2_i32, 0, NULL);
1213#endif
1214 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1215}
1216
1217#ifndef VBOX
1218static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1219#else /* VBOX */
1220DECLINLINE(void) gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1221#endif /* VBOX */
1222{
1223 if (s->cc_op != CC_OP_DYNAMIC)
1224 gen_op_set_cc_op(s->cc_op);
1225 switch(jcc_op) {
1226 case JCC_O:
1227 gen_compute_eflags(cpu_T[0]);
1228 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
1229 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1230 break;
1231 case JCC_B:
1232 gen_compute_eflags_c(cpu_T[0]);
1233 break;
1234 case JCC_Z:
1235 gen_compute_eflags(cpu_T[0]);
1236 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
1237 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1238 break;
1239 case JCC_BE:
1240 gen_compute_eflags(cpu_tmp0);
1241 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
1242 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1243 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1244 break;
1245 case JCC_S:
1246 gen_compute_eflags(cpu_T[0]);
1247 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
1248 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1249 break;
1250 case JCC_P:
1251 gen_compute_eflags(cpu_T[0]);
1252 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
1253 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1254 break;
1255 case JCC_L:
1256 gen_compute_eflags(cpu_tmp0);
1257 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1258 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
1259 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1260 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1261 break;
1262 default:
1263 case JCC_LE:
1264 gen_compute_eflags(cpu_tmp0);
1265 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1266 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
1267 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
1268 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1269 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1270 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1271 break;
1272 }
1273}
1274
1275/* return true if setcc_slow is not needed (WARNING: must be kept in
1276 sync with gen_jcc1) */
1277static int is_fast_jcc_case(DisasContext *s, int b)
1278{
1279 int jcc_op;
1280 jcc_op = (b >> 1) & 7;
1281 switch(s->cc_op) {
1282 /* we optimize the cmp/jcc case */
1283 case CC_OP_SUBB:
1284 case CC_OP_SUBW:
1285 case CC_OP_SUBL:
1286 case CC_OP_SUBQ:
1287 if (jcc_op == JCC_O || jcc_op == JCC_P)
1288 goto slow_jcc;
1289 break;
1290
1291 /* some jumps are easy to compute */
1292 case CC_OP_ADDB:
1293 case CC_OP_ADDW:
1294 case CC_OP_ADDL:
1295 case CC_OP_ADDQ:
1296
1297 case CC_OP_LOGICB:
1298 case CC_OP_LOGICW:
1299 case CC_OP_LOGICL:
1300 case CC_OP_LOGICQ:
1301
1302 case CC_OP_INCB:
1303 case CC_OP_INCW:
1304 case CC_OP_INCL:
1305 case CC_OP_INCQ:
1306
1307 case CC_OP_DECB:
1308 case CC_OP_DECW:
1309 case CC_OP_DECL:
1310 case CC_OP_DECQ:
1311
1312 case CC_OP_SHLB:
1313 case CC_OP_SHLW:
1314 case CC_OP_SHLL:
1315 case CC_OP_SHLQ:
1316 if (jcc_op != JCC_Z && jcc_op != JCC_S)
1317 goto slow_jcc;
1318 break;
1319 default:
1320 slow_jcc:
1321 return 0;
1322 }
1323 return 1;
1324}
1325
1326/* generate a conditional jump to label 'l1' according to jump opcode
1327 value 'b'. In the fast case, T0 is guaranted not to be used. */
1328#ifndef VBOX
1329static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1330#else /* VBOX */
1331DECLINLINE(void) gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1332#endif /* VBOX */
1333{
1334 int inv, jcc_op, size, cond;
1335 TCGv t0;
1336
1337 inv = b & 1;
1338 jcc_op = (b >> 1) & 7;
1339
1340 switch(cc_op) {
1341 /* we optimize the cmp/jcc case */
1342 case CC_OP_SUBB:
1343 case CC_OP_SUBW:
1344 case CC_OP_SUBL:
1345 case CC_OP_SUBQ:
1346
1347 size = cc_op - CC_OP_SUBB;
1348 switch(jcc_op) {
1349 case JCC_Z:
1350 fast_jcc_z:
1351 switch(size) {
1352 case 0:
1353 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
1354 t0 = cpu_tmp0;
1355 break;
1356 case 1:
1357 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
1358 t0 = cpu_tmp0;
1359 break;
1360#ifdef TARGET_X86_64
1361 case 2:
1362 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1363 t0 = cpu_tmp0;
1364 break;
1365#endif
1366 default:
1367 t0 = cpu_cc_dst;
1368 break;
1369 }
1370 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
1371 break;
1372 case JCC_S:
1373 fast_jcc_s:
1374 switch(size) {
1375 case 0:
1376 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1377 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1378 0, l1);
1379 break;
1380 case 1:
1381 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1382 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1383 0, l1);
1384 break;
1385#ifdef TARGET_X86_64
1386 case 2:
1387 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1388 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1389 0, l1);
1390 break;
1391#endif
1392 default:
1393 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1394 0, l1);
1395 break;
1396 }
1397 break;
1398
1399 case JCC_B:
1400 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1401 goto fast_jcc_b;
1402 case JCC_BE:
1403 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1404 fast_jcc_b:
1405 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1406 switch(size) {
1407 case 0:
1408 t0 = cpu_tmp0;
1409 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1410 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1411 break;
1412 case 1:
1413 t0 = cpu_tmp0;
1414 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1415 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1416 break;
1417#ifdef TARGET_X86_64
1418 case 2:
1419 t0 = cpu_tmp0;
1420 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1421 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1422 break;
1423#endif
1424 default:
1425 t0 = cpu_cc_src;
1426 break;
1427 }
1428 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1429 break;
1430
1431 case JCC_L:
1432 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1433 goto fast_jcc_l;
1434 case JCC_LE:
1435 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1436 fast_jcc_l:
1437 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1438 switch(size) {
1439 case 0:
1440 t0 = cpu_tmp0;
1441 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1442 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1443 break;
1444 case 1:
1445 t0 = cpu_tmp0;
1446 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1447 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1448 break;
1449#ifdef TARGET_X86_64
1450 case 2:
1451 t0 = cpu_tmp0;
1452 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1453 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1454 break;
1455#endif
1456 default:
1457 t0 = cpu_cc_src;
1458 break;
1459 }
1460 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1461 break;
1462
1463 default:
1464 goto slow_jcc;
1465 }
1466 break;
1467
1468 /* some jumps are easy to compute */
1469 case CC_OP_ADDB:
1470 case CC_OP_ADDW:
1471 case CC_OP_ADDL:
1472 case CC_OP_ADDQ:
1473
1474 case CC_OP_ADCB:
1475 case CC_OP_ADCW:
1476 case CC_OP_ADCL:
1477 case CC_OP_ADCQ:
1478
1479 case CC_OP_SBBB:
1480 case CC_OP_SBBW:
1481 case CC_OP_SBBL:
1482 case CC_OP_SBBQ:
1483
1484 case CC_OP_LOGICB:
1485 case CC_OP_LOGICW:
1486 case CC_OP_LOGICL:
1487 case CC_OP_LOGICQ:
1488
1489 case CC_OP_INCB:
1490 case CC_OP_INCW:
1491 case CC_OP_INCL:
1492 case CC_OP_INCQ:
1493
1494 case CC_OP_DECB:
1495 case CC_OP_DECW:
1496 case CC_OP_DECL:
1497 case CC_OP_DECQ:
1498
1499 case CC_OP_SHLB:
1500 case CC_OP_SHLW:
1501 case CC_OP_SHLL:
1502 case CC_OP_SHLQ:
1503
1504 case CC_OP_SARB:
1505 case CC_OP_SARW:
1506 case CC_OP_SARL:
1507 case CC_OP_SARQ:
1508 switch(jcc_op) {
1509 case JCC_Z:
1510 size = (cc_op - CC_OP_ADDB) & 3;
1511 goto fast_jcc_z;
1512 case JCC_S:
1513 size = (cc_op - CC_OP_ADDB) & 3;
1514 goto fast_jcc_s;
1515 default:
1516 goto slow_jcc;
1517 }
1518 break;
1519 default:
1520 slow_jcc:
1521 gen_setcc_slow_T0(s, jcc_op);
1522 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1523 cpu_T[0], 0, l1);
1524 break;
1525 }
1526}
1527
1528/* XXX: does not work with gdbstub "ice" single step - not a
1529 serious problem */
1530static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1531{
1532 int l1, l2;
1533
1534 l1 = gen_new_label();
1535 l2 = gen_new_label();
1536 gen_op_jnz_ecx(s->aflag, l1);
1537 gen_set_label(l2);
1538 gen_jmp_tb(s, next_eip, 1);
1539 gen_set_label(l1);
1540 return l2;
1541}
1542
1543#ifndef VBOX
1544static inline void gen_stos(DisasContext *s, int ot)
1545#else /* VBOX */
1546DECLINLINE(void) gen_stos(DisasContext *s, int ot)
1547#endif /* VBOX */
1548{
1549 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1550 gen_string_movl_A0_EDI(s);
1551 gen_op_st_T0_A0(ot + s->mem_index);
1552 gen_op_movl_T0_Dshift(ot);
1553 gen_op_add_reg_T0(s->aflag, R_EDI);
1554}
1555
1556#ifndef VBOX
1557static inline void gen_lods(DisasContext *s, int ot)
1558#else /* VBOX */
1559DECLINLINE(void) gen_lods(DisasContext *s, int ot)
1560#endif /* VBOX */
1561{
1562 gen_string_movl_A0_ESI(s);
1563 gen_op_ld_T0_A0(ot + s->mem_index);
1564 gen_op_mov_reg_T0(ot, R_EAX);
1565 gen_op_movl_T0_Dshift(ot);
1566 gen_op_add_reg_T0(s->aflag, R_ESI);
1567}
1568
1569#ifndef VBOX
1570static inline void gen_scas(DisasContext *s, int ot)
1571#else /* VBOX */
1572DECLINLINE(void) gen_scas(DisasContext *s, int ot)
1573#endif /* VBOX */
1574{
1575 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1576 gen_string_movl_A0_EDI(s);
1577 gen_op_ld_T1_A0(ot + s->mem_index);
1578 gen_op_cmpl_T0_T1_cc();
1579 gen_op_movl_T0_Dshift(ot);
1580 gen_op_add_reg_T0(s->aflag, R_EDI);
1581}
1582
1583#ifndef VBOX
1584static inline void gen_cmps(DisasContext *s, int ot)
1585#else /* VBOX */
1586DECLINLINE(void) gen_cmps(DisasContext *s, int ot)
1587#endif /* VBOX */
1588{
1589 gen_string_movl_A0_ESI(s);
1590 gen_op_ld_T0_A0(ot + s->mem_index);
1591 gen_string_movl_A0_EDI(s);
1592 gen_op_ld_T1_A0(ot + s->mem_index);
1593 gen_op_cmpl_T0_T1_cc();
1594 gen_op_movl_T0_Dshift(ot);
1595 gen_op_add_reg_T0(s->aflag, R_ESI);
1596 gen_op_add_reg_T0(s->aflag, R_EDI);
1597}
1598
1599#ifndef VBOX
1600static inline void gen_ins(DisasContext *s, int ot)
1601#else /* VBOX */
1602DECLINLINE(void) gen_ins(DisasContext *s, int ot)
1603#endif /* VBOX */
1604{
1605 if (use_icount)
1606 gen_io_start();
1607 gen_string_movl_A0_EDI(s);
1608 /* Note: we must do this dummy write first to be restartable in
1609 case of page fault. */
1610 gen_op_movl_T0_0();
1611 gen_op_st_T0_A0(ot + s->mem_index);
1612 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1613 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1614 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1615 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1616 gen_op_st_T0_A0(ot + s->mem_index);
1617 gen_op_movl_T0_Dshift(ot);
1618 gen_op_add_reg_T0(s->aflag, R_EDI);
1619 if (use_icount)
1620 gen_io_end();
1621}
1622
1623#ifndef VBOX
1624static inline void gen_outs(DisasContext *s, int ot)
1625#else /* VBOX */
1626DECLINLINE(void) gen_outs(DisasContext *s, int ot)
1627#endif /* VBOX */
1628{
1629 if (use_icount)
1630 gen_io_start();
1631 gen_string_movl_A0_ESI(s);
1632 gen_op_ld_T0_A0(ot + s->mem_index);
1633
1634 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1635 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1636 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1637 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1638 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1639
1640 gen_op_movl_T0_Dshift(ot);
1641 gen_op_add_reg_T0(s->aflag, R_ESI);
1642 if (use_icount)
1643 gen_io_end();
1644}
1645
1646/* same method as Valgrind : we generate jumps to current or next
1647 instruction */
1648#ifndef VBOX
1649#define GEN_REPZ(op) \
1650static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1651 target_ulong cur_eip, target_ulong next_eip) \
1652{ \
1653 int l2; \
1654 gen_update_cc_op(s); \
1655 l2 = gen_jz_ecx_string(s, next_eip); \
1656 gen_ ## op(s, ot); \
1657 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1658 /* a loop would cause two single step exceptions if ECX = 1 \
1659 before rep string_insn */ \
1660 if (!s->jmp_opt) \
1661 gen_op_jz_ecx(s->aflag, l2); \
1662 gen_jmp(s, cur_eip); \
1663}
1664#else /* VBOX */
1665#define GEN_REPZ(op) \
1666DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1667 target_ulong cur_eip, target_ulong next_eip) \
1668{ \
1669 int l2; \
1670 gen_update_cc_op(s); \
1671 l2 = gen_jz_ecx_string(s, next_eip); \
1672 gen_ ## op(s, ot); \
1673 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1674 /* a loop would cause two single step exceptions if ECX = 1 \
1675 before rep string_insn */ \
1676 if (!s->jmp_opt) \
1677 gen_op_jz_ecx(s->aflag, l2); \
1678 gen_jmp(s, cur_eip); \
1679}
1680#endif /* VBOX */
1681
1682#ifndef VBOX
1683#define GEN_REPZ2(op) \
1684static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1685 target_ulong cur_eip, \
1686 target_ulong next_eip, \
1687 int nz) \
1688{ \
1689 int l2; \
1690 gen_update_cc_op(s); \
1691 l2 = gen_jz_ecx_string(s, next_eip); \
1692 gen_ ## op(s, ot); \
1693 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1694 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1695 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1696 if (!s->jmp_opt) \
1697 gen_op_jz_ecx(s->aflag, l2); \
1698 gen_jmp(s, cur_eip); \
1699}
1700#else /* VBOX */
1701#define GEN_REPZ2(op) \
1702DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1703 target_ulong cur_eip, \
1704 target_ulong next_eip, \
1705 int nz) \
1706{ \
1707 int l2;\
1708 gen_update_cc_op(s); \
1709 l2 = gen_jz_ecx_string(s, next_eip); \
1710 gen_ ## op(s, ot); \
1711 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1712 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1713 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1714 if (!s->jmp_opt) \
1715 gen_op_jz_ecx(s->aflag, l2); \
1716 gen_jmp(s, cur_eip); \
1717}
1718#endif /* VBOX */
1719
1720GEN_REPZ(movs)
1721GEN_REPZ(stos)
1722GEN_REPZ(lods)
1723GEN_REPZ(ins)
1724GEN_REPZ(outs)
1725GEN_REPZ2(scas)
1726GEN_REPZ2(cmps)
1727
1728static void *helper_fp_arith_ST0_FT0[8] = {
1729 helper_fadd_ST0_FT0,
1730 helper_fmul_ST0_FT0,
1731 helper_fcom_ST0_FT0,
1732 helper_fcom_ST0_FT0,
1733 helper_fsub_ST0_FT0,
1734 helper_fsubr_ST0_FT0,
1735 helper_fdiv_ST0_FT0,
1736 helper_fdivr_ST0_FT0,
1737};
1738
1739/* NOTE the exception in "r" op ordering */
1740static void *helper_fp_arith_STN_ST0[8] = {
1741 helper_fadd_STN_ST0,
1742 helper_fmul_STN_ST0,
1743 NULL,
1744 NULL,
1745 helper_fsubr_STN_ST0,
1746 helper_fsub_STN_ST0,
1747 helper_fdivr_STN_ST0,
1748 helper_fdiv_STN_ST0,
1749};
1750
1751/* if d == OR_TMP0, it means memory operand (address in A0) */
1752static void gen_op(DisasContext *s1, int op, int ot, int d)
1753{
1754 if (d != OR_TMP0) {
1755 gen_op_mov_TN_reg(ot, 0, d);
1756 } else {
1757 gen_op_ld_T0_A0(ot + s1->mem_index);
1758 }
1759 switch(op) {
1760 case OP_ADCL:
1761 if (s1->cc_op != CC_OP_DYNAMIC)
1762 gen_op_set_cc_op(s1->cc_op);
1763 gen_compute_eflags_c(cpu_tmp4);
1764 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1765 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1766 if (d != OR_TMP0)
1767 gen_op_mov_reg_T0(ot, d);
1768 else
1769 gen_op_st_T0_A0(ot + s1->mem_index);
1770 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1771 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1772 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1773 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1774 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1775 s1->cc_op = CC_OP_DYNAMIC;
1776 break;
1777 case OP_SBBL:
1778 if (s1->cc_op != CC_OP_DYNAMIC)
1779 gen_op_set_cc_op(s1->cc_op);
1780 gen_compute_eflags_c(cpu_tmp4);
1781 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1782 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1783 if (d != OR_TMP0)
1784 gen_op_mov_reg_T0(ot, d);
1785 else
1786 gen_op_st_T0_A0(ot + s1->mem_index);
1787 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1788 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1789 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1790 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1791 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1792 s1->cc_op = CC_OP_DYNAMIC;
1793 break;
1794 case OP_ADDL:
1795 gen_op_addl_T0_T1();
1796 if (d != OR_TMP0)
1797 gen_op_mov_reg_T0(ot, d);
1798 else
1799 gen_op_st_T0_A0(ot + s1->mem_index);
1800 gen_op_update2_cc();
1801 s1->cc_op = CC_OP_ADDB + ot;
1802 break;
1803 case OP_SUBL:
1804 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1805 if (d != OR_TMP0)
1806 gen_op_mov_reg_T0(ot, d);
1807 else
1808 gen_op_st_T0_A0(ot + s1->mem_index);
1809 gen_op_update2_cc();
1810 s1->cc_op = CC_OP_SUBB + ot;
1811 break;
1812 default:
1813 case OP_ANDL:
1814 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1815 if (d != OR_TMP0)
1816 gen_op_mov_reg_T0(ot, d);
1817 else
1818 gen_op_st_T0_A0(ot + s1->mem_index);
1819 gen_op_update1_cc();
1820 s1->cc_op = CC_OP_LOGICB + ot;
1821 break;
1822 case OP_ORL:
1823 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1824 if (d != OR_TMP0)
1825 gen_op_mov_reg_T0(ot, d);
1826 else
1827 gen_op_st_T0_A0(ot + s1->mem_index);
1828 gen_op_update1_cc();
1829 s1->cc_op = CC_OP_LOGICB + ot;
1830 break;
1831 case OP_XORL:
1832 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1833 if (d != OR_TMP0)
1834 gen_op_mov_reg_T0(ot, d);
1835 else
1836 gen_op_st_T0_A0(ot + s1->mem_index);
1837 gen_op_update1_cc();
1838 s1->cc_op = CC_OP_LOGICB + ot;
1839 break;
1840 case OP_CMPL:
1841 gen_op_cmpl_T0_T1_cc();
1842 s1->cc_op = CC_OP_SUBB + ot;
1843 break;
1844 }
1845}
1846
1847/* if d == OR_TMP0, it means memory operand (address in A0) */
1848static void gen_inc(DisasContext *s1, int ot, int d, int c)
1849{
1850 if (d != OR_TMP0)
1851 gen_op_mov_TN_reg(ot, 0, d);
1852 else
1853 gen_op_ld_T0_A0(ot + s1->mem_index);
1854 if (s1->cc_op != CC_OP_DYNAMIC)
1855 gen_op_set_cc_op(s1->cc_op);
1856 if (c > 0) {
1857 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1858 s1->cc_op = CC_OP_INCB + ot;
1859 } else {
1860 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1861 s1->cc_op = CC_OP_DECB + ot;
1862 }
1863 if (d != OR_TMP0)
1864 gen_op_mov_reg_T0(ot, d);
1865 else
1866 gen_op_st_T0_A0(ot + s1->mem_index);
1867 gen_compute_eflags_c(cpu_cc_src);
1868 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1869}
1870
1871static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1872 int is_right, int is_arith)
1873{
1874 target_ulong mask;
1875 int shift_label;
1876 TCGv t0, t1;
1877
1878 if (ot == OT_QUAD)
1879 mask = 0x3f;
1880 else
1881 mask = 0x1f;
1882
1883 /* load */
1884 if (op1 == OR_TMP0)
1885 gen_op_ld_T0_A0(ot + s->mem_index);
1886 else
1887 gen_op_mov_TN_reg(ot, 0, op1);
1888
1889 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1890
1891 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1892
1893 if (is_right) {
1894 if (is_arith) {
1895 gen_exts(ot, cpu_T[0]);
1896 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1897 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1898 } else {
1899 gen_extu(ot, cpu_T[0]);
1900 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1901 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1902 }
1903 } else {
1904 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1905 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1906 }
1907
1908 /* store */
1909 if (op1 == OR_TMP0)
1910 gen_op_st_T0_A0(ot + s->mem_index);
1911 else
1912 gen_op_mov_reg_T0(ot, op1);
1913
1914 /* update eflags if non zero shift */
1915 if (s->cc_op != CC_OP_DYNAMIC)
1916 gen_op_set_cc_op(s->cc_op);
1917
1918 /* XXX: inefficient */
1919 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1920 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1921
1922 tcg_gen_mov_tl(t0, cpu_T[0]);
1923 tcg_gen_mov_tl(t1, cpu_T3);
1924
1925 shift_label = gen_new_label();
1926 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1927
1928 tcg_gen_mov_tl(cpu_cc_src, t1);
1929 tcg_gen_mov_tl(cpu_cc_dst, t0);
1930 if (is_right)
1931 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1932 else
1933 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1934
1935 gen_set_label(shift_label);
1936 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1937
1938 tcg_temp_free(t0);
1939 tcg_temp_free(t1);
1940}
1941
1942static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1943 int is_right, int is_arith)
1944{
1945 int mask;
1946
1947 if (ot == OT_QUAD)
1948 mask = 0x3f;
1949 else
1950 mask = 0x1f;
1951
1952 /* load */
1953 if (op1 == OR_TMP0)
1954 gen_op_ld_T0_A0(ot + s->mem_index);
1955 else
1956 gen_op_mov_TN_reg(ot, 0, op1);
1957
1958 op2 &= mask;
1959 if (op2 != 0) {
1960 if (is_right) {
1961 if (is_arith) {
1962 gen_exts(ot, cpu_T[0]);
1963 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1964 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1965 } else {
1966 gen_extu(ot, cpu_T[0]);
1967 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1968 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1969 }
1970 } else {
1971 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1972 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1973 }
1974 }
1975
1976 /* store */
1977 if (op1 == OR_TMP0)
1978 gen_op_st_T0_A0(ot + s->mem_index);
1979 else
1980 gen_op_mov_reg_T0(ot, op1);
1981
1982 /* update eflags if non zero shift */
1983 if (op2 != 0) {
1984 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1985 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1986 if (is_right)
1987 s->cc_op = CC_OP_SARB + ot;
1988 else
1989 s->cc_op = CC_OP_SHLB + ot;
1990 }
1991}
1992
1993#ifndef VBOX
1994static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1995#else /* VBOX */
1996DECLINLINE(void) tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1997#endif /* VBOX */
1998{
1999 if (arg2 >= 0)
2000 tcg_gen_shli_tl(ret, arg1, arg2);
2001 else
2002 tcg_gen_shri_tl(ret, arg1, -arg2);
2003}
2004
2005/* XXX: add faster immediate case */
2006static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
2007 int is_right)
2008{
2009 target_ulong mask;
2010 int label1, label2, data_bits;
2011 TCGv t0, t1, t2, a0;
2012
2013 /* XXX: inefficient, but we must use local temps */
2014 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2015 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2016 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2017 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2018
2019 if (ot == OT_QUAD)
2020 mask = 0x3f;
2021 else
2022 mask = 0x1f;
2023
2024 /* load */
2025 if (op1 == OR_TMP0) {
2026 tcg_gen_mov_tl(a0, cpu_A0);
2027 gen_op_ld_v(ot + s->mem_index, t0, a0);
2028 } else {
2029 gen_op_mov_v_reg(ot, t0, op1);
2030 }
2031
2032 tcg_gen_mov_tl(t1, cpu_T[1]);
2033
2034 tcg_gen_andi_tl(t1, t1, mask);
2035
2036 /* Must test zero case to avoid using undefined behaviour in TCG
2037 shifts. */
2038 label1 = gen_new_label();
2039 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
2040
2041 if (ot <= OT_WORD)
2042 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
2043 else
2044 tcg_gen_mov_tl(cpu_tmp0, t1);
2045
2046 gen_extu(ot, t0);
2047 tcg_gen_mov_tl(t2, t0);
2048
2049 data_bits = 8 << ot;
2050 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
2051 fix TCG definition) */
2052 if (is_right) {
2053 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
2054 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2055 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
2056 } else {
2057 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
2058 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2059 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
2060 }
2061 tcg_gen_or_tl(t0, t0, cpu_tmp4);
2062
2063 gen_set_label(label1);
2064 /* store */
2065 if (op1 == OR_TMP0) {
2066 gen_op_st_v(ot + s->mem_index, t0, a0);
2067 } else {
2068 gen_op_mov_reg_v(ot, op1, t0);
2069 }
2070
2071 /* update eflags */
2072 if (s->cc_op != CC_OP_DYNAMIC)
2073 gen_op_set_cc_op(s->cc_op);
2074
2075 label2 = gen_new_label();
2076 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
2077
2078 gen_compute_eflags(cpu_cc_src);
2079 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
2080 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
2081 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
2082 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
2083 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
2084 if (is_right) {
2085 tcg_gen_shri_tl(t0, t0, data_bits - 1);
2086 }
2087 tcg_gen_andi_tl(t0, t0, CC_C);
2088 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
2089
2090 tcg_gen_discard_tl(cpu_cc_dst);
2091 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2092
2093 gen_set_label(label2);
2094 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2095
2096 tcg_temp_free(t0);
2097 tcg_temp_free(t1);
2098 tcg_temp_free(t2);
2099 tcg_temp_free(a0);
2100}
2101
2102static void *helper_rotc[8] = {
2103 helper_rclb,
2104 helper_rclw,
2105 helper_rcll,
2106 X86_64_ONLY(helper_rclq),
2107 helper_rcrb,
2108 helper_rcrw,
2109 helper_rcrl,
2110 X86_64_ONLY(helper_rcrq),
2111};
2112
2113/* XXX: add faster immediate = 1 case */
2114static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
2115 int is_right)
2116{
2117 int label1;
2118
2119 if (s->cc_op != CC_OP_DYNAMIC)
2120 gen_op_set_cc_op(s->cc_op);
2121
2122 /* load */
2123 if (op1 == OR_TMP0)
2124 gen_op_ld_T0_A0(ot + s->mem_index);
2125 else
2126 gen_op_mov_TN_reg(ot, 0, op1);
2127
2128 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
2129 cpu_T[0], cpu_T[0], cpu_T[1]);
2130 /* store */
2131 if (op1 == OR_TMP0)
2132 gen_op_st_T0_A0(ot + s->mem_index);
2133 else
2134 gen_op_mov_reg_T0(ot, op1);
2135
2136 /* update eflags */
2137 label1 = gen_new_label();
2138 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
2139
2140 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
2141 tcg_gen_discard_tl(cpu_cc_dst);
2142 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2143
2144 gen_set_label(label1);
2145 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2146}
2147
2148/* XXX: add faster immediate case */
2149static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
2150 int is_right)
2151{
2152 int label1, label2, data_bits;
2153 target_ulong mask;
2154 TCGv t0, t1, t2, a0;
2155
2156 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2157 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2158 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2159 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2160
2161 if (ot == OT_QUAD)
2162 mask = 0x3f;
2163 else
2164 mask = 0x1f;
2165
2166 /* load */
2167 if (op1 == OR_TMP0) {
2168 tcg_gen_mov_tl(a0, cpu_A0);
2169 gen_op_ld_v(ot + s->mem_index, t0, a0);
2170 } else {
2171 gen_op_mov_v_reg(ot, t0, op1);
2172 }
2173
2174 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
2175
2176 tcg_gen_mov_tl(t1, cpu_T[1]);
2177 tcg_gen_mov_tl(t2, cpu_T3);
2178
2179 /* Must test zero case to avoid using undefined behaviour in TCG
2180 shifts. */
2181 label1 = gen_new_label();
2182 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
2183
2184 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
2185 if (ot == OT_WORD) {
2186 /* Note: we implement the Intel behaviour for shift count > 16 */
2187 if (is_right) {
2188 tcg_gen_andi_tl(t0, t0, 0xffff);
2189 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
2190 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2191 tcg_gen_ext32u_tl(t0, t0);
2192
2193 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2194
2195 /* only needed if count > 16, but a test would complicate */
2196 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2197 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
2198
2199 tcg_gen_shr_tl(t0, t0, t2);
2200
2201 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2202 } else {
2203 /* XXX: not optimal */
2204 tcg_gen_andi_tl(t0, t0, 0xffff);
2205 tcg_gen_shli_tl(t1, t1, 16);
2206 tcg_gen_or_tl(t1, t1, t0);
2207 tcg_gen_ext32u_tl(t1, t1);
2208
2209 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2210 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
2211 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
2212 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
2213
2214 tcg_gen_shl_tl(t0, t0, t2);
2215 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2216 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2217 tcg_gen_or_tl(t0, t0, t1);
2218 }
2219 } else {
2220 data_bits = 8 << ot;
2221 if (is_right) {
2222 if (ot == OT_LONG)
2223 tcg_gen_ext32u_tl(t0, t0);
2224
2225 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2226
2227 tcg_gen_shr_tl(t0, t0, t2);
2228 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2229 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
2230 tcg_gen_or_tl(t0, t0, t1);
2231
2232 } else {
2233 if (ot == OT_LONG)
2234 tcg_gen_ext32u_tl(t1, t1);
2235
2236 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2237
2238 tcg_gen_shl_tl(t0, t0, t2);
2239 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2240 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2241 tcg_gen_or_tl(t0, t0, t1);
2242 }
2243 }
2244 tcg_gen_mov_tl(t1, cpu_tmp4);
2245
2246 gen_set_label(label1);
2247 /* store */
2248 if (op1 == OR_TMP0) {
2249 gen_op_st_v(ot + s->mem_index, t0, a0);
2250 } else {
2251 gen_op_mov_reg_v(ot, op1, t0);
2252 }
2253
2254 /* update eflags */
2255 if (s->cc_op != CC_OP_DYNAMIC)
2256 gen_op_set_cc_op(s->cc_op);
2257
2258 label2 = gen_new_label();
2259 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
2260
2261 tcg_gen_mov_tl(cpu_cc_src, t1);
2262 tcg_gen_mov_tl(cpu_cc_dst, t0);
2263 if (is_right) {
2264 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
2265 } else {
2266 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
2267 }
2268 gen_set_label(label2);
2269 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2270
2271 tcg_temp_free(t0);
2272 tcg_temp_free(t1);
2273 tcg_temp_free(t2);
2274 tcg_temp_free(a0);
2275}
2276
2277static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2278{
2279 if (s != OR_TMP1)
2280 gen_op_mov_TN_reg(ot, 1, s);
2281 switch(op) {
2282 case OP_ROL:
2283 gen_rot_rm_T1(s1, ot, d, 0);
2284 break;
2285 case OP_ROR:
2286 gen_rot_rm_T1(s1, ot, d, 1);
2287 break;
2288 case OP_SHL:
2289 case OP_SHL1:
2290 gen_shift_rm_T1(s1, ot, d, 0, 0);
2291 break;
2292 case OP_SHR:
2293 gen_shift_rm_T1(s1, ot, d, 1, 0);
2294 break;
2295 case OP_SAR:
2296 gen_shift_rm_T1(s1, ot, d, 1, 1);
2297 break;
2298 case OP_RCL:
2299 gen_rotc_rm_T1(s1, ot, d, 0);
2300 break;
2301 case OP_RCR:
2302 gen_rotc_rm_T1(s1, ot, d, 1);
2303 break;
2304 }
2305}
2306
2307static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2308{
2309 switch(op) {
2310 case OP_SHL:
2311 case OP_SHL1:
2312 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2313 break;
2314 case OP_SHR:
2315 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2316 break;
2317 case OP_SAR:
2318 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2319 break;
2320 default:
2321 /* currently not optimized */
2322 gen_op_movl_T1_im(c);
2323 gen_shift(s1, op, ot, d, OR_TMP1);
2324 break;
2325 }
2326}
2327
2328static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
2329{
2330 target_long disp;
2331 int havesib;
2332 int base;
2333 int index;
2334 int scale;
2335 int opreg;
2336 int mod, rm, code, override, must_add_seg;
2337
2338 override = s->override;
2339 must_add_seg = s->addseg;
2340 if (override >= 0)
2341 must_add_seg = 1;
2342 mod = (modrm >> 6) & 3;
2343 rm = modrm & 7;
2344
2345 if (s->aflag) {
2346
2347 havesib = 0;
2348 base = rm;
2349 index = 0;
2350 scale = 0;
2351
2352 if (base == 4) {
2353 havesib = 1;
2354 code = ldub_code(s->pc++);
2355 scale = (code >> 6) & 3;
2356 index = ((code >> 3) & 7) | REX_X(s);
2357 base = (code & 7);
2358 }
2359 base |= REX_B(s);
2360
2361 switch (mod) {
2362 case 0:
2363 if ((base & 7) == 5) {
2364 base = -1;
2365 disp = (int32_t)ldl_code(s->pc);
2366 s->pc += 4;
2367 if (CODE64(s) && !havesib) {
2368 disp += s->pc + s->rip_offset;
2369 }
2370 } else {
2371 disp = 0;
2372 }
2373 break;
2374 case 1:
2375 disp = (int8_t)ldub_code(s->pc++);
2376 break;
2377 default:
2378 case 2:
2379#ifdef VBOX
2380 disp = (int32_t)ldl_code(s->pc);
2381#else
2382 disp = ldl_code(s->pc);
2383#endif
2384 s->pc += 4;
2385 break;
2386 }
2387
2388 if (base >= 0) {
2389 /* for correct popl handling with esp */
2390 if (base == 4 && s->popl_esp_hack)
2391 disp += s->popl_esp_hack;
2392#ifdef TARGET_X86_64
2393 if (s->aflag == 2) {
2394 gen_op_movq_A0_reg(base);
2395 if (disp != 0) {
2396 gen_op_addq_A0_im(disp);
2397 }
2398 } else
2399#endif
2400 {
2401 gen_op_movl_A0_reg(base);
2402 if (disp != 0)
2403 gen_op_addl_A0_im(disp);
2404 }
2405 } else {
2406#ifdef TARGET_X86_64
2407 if (s->aflag == 2) {
2408 gen_op_movq_A0_im(disp);
2409 } else
2410#endif
2411 {
2412 gen_op_movl_A0_im(disp);
2413 }
2414 }
2415 /* XXX: index == 4 is always invalid */
2416 if (havesib && (index != 4 || scale != 0)) {
2417#ifdef TARGET_X86_64
2418 if (s->aflag == 2) {
2419 gen_op_addq_A0_reg_sN(scale, index);
2420 } else
2421#endif
2422 {
2423 gen_op_addl_A0_reg_sN(scale, index);
2424 }
2425 }
2426 if (must_add_seg) {
2427 if (override < 0) {
2428 if (base == R_EBP || base == R_ESP)
2429 override = R_SS;
2430 else
2431 override = R_DS;
2432 }
2433#ifdef TARGET_X86_64
2434 if (s->aflag == 2) {
2435 gen_op_addq_A0_seg(override);
2436 } else
2437#endif
2438 {
2439 gen_op_addl_A0_seg(override);
2440 }
2441 }
2442 } else {
2443 switch (mod) {
2444 case 0:
2445 if (rm == 6) {
2446 disp = lduw_code(s->pc);
2447 s->pc += 2;
2448 gen_op_movl_A0_im(disp);
2449 rm = 0; /* avoid SS override */
2450 goto no_rm;
2451 } else {
2452 disp = 0;
2453 }
2454 break;
2455 case 1:
2456 disp = (int8_t)ldub_code(s->pc++);
2457 break;
2458 default:
2459 case 2:
2460 disp = lduw_code(s->pc);
2461 s->pc += 2;
2462 break;
2463 }
2464 switch(rm) {
2465 case 0:
2466 gen_op_movl_A0_reg(R_EBX);
2467 gen_op_addl_A0_reg_sN(0, R_ESI);
2468 break;
2469 case 1:
2470 gen_op_movl_A0_reg(R_EBX);
2471 gen_op_addl_A0_reg_sN(0, R_EDI);
2472 break;
2473 case 2:
2474 gen_op_movl_A0_reg(R_EBP);
2475 gen_op_addl_A0_reg_sN(0, R_ESI);
2476 break;
2477 case 3:
2478 gen_op_movl_A0_reg(R_EBP);
2479 gen_op_addl_A0_reg_sN(0, R_EDI);
2480 break;
2481 case 4:
2482 gen_op_movl_A0_reg(R_ESI);
2483 break;
2484 case 5:
2485 gen_op_movl_A0_reg(R_EDI);
2486 break;
2487 case 6:
2488 gen_op_movl_A0_reg(R_EBP);
2489 break;
2490 default:
2491 case 7:
2492 gen_op_movl_A0_reg(R_EBX);
2493 break;
2494 }
2495 if (disp != 0)
2496 gen_op_addl_A0_im(disp);
2497 gen_op_andl_A0_ffff();
2498 no_rm:
2499 if (must_add_seg) {
2500 if (override < 0) {
2501 if (rm == 2 || rm == 3 || rm == 6)
2502 override = R_SS;
2503 else
2504 override = R_DS;
2505 }
2506 gen_op_addl_A0_seg(override);
2507 }
2508 }
2509
2510 opreg = OR_A0;
2511 disp = 0;
2512 *reg_ptr = opreg;
2513 *offset_ptr = disp;
2514}
2515
2516static void gen_nop_modrm(DisasContext *s, int modrm)
2517{
2518 int mod, rm, base, code;
2519
2520 mod = (modrm >> 6) & 3;
2521 if (mod == 3)
2522 return;
2523 rm = modrm & 7;
2524
2525 if (s->aflag) {
2526
2527 base = rm;
2528
2529 if (base == 4) {
2530 code = ldub_code(s->pc++);
2531 base = (code & 7);
2532 }
2533
2534 switch (mod) {
2535 case 0:
2536 if (base == 5) {
2537 s->pc += 4;
2538 }
2539 break;
2540 case 1:
2541 s->pc++;
2542 break;
2543 default:
2544 case 2:
2545 s->pc += 4;
2546 break;
2547 }
2548 } else {
2549 switch (mod) {
2550 case 0:
2551 if (rm == 6) {
2552 s->pc += 2;
2553 }
2554 break;
2555 case 1:
2556 s->pc++;
2557 break;
2558 default:
2559 case 2:
2560 s->pc += 2;
2561 break;
2562 }
2563 }
2564}
2565
2566/* used for LEA and MOV AX, mem */
2567static void gen_add_A0_ds_seg(DisasContext *s)
2568{
2569 int override, must_add_seg;
2570 must_add_seg = s->addseg;
2571 override = R_DS;
2572 if (s->override >= 0) {
2573 override = s->override;
2574 must_add_seg = 1;
2575 } else {
2576 override = R_DS;
2577 }
2578 if (must_add_seg) {
2579#ifdef TARGET_X86_64
2580 if (CODE64(s)) {
2581 gen_op_addq_A0_seg(override);
2582 } else
2583#endif
2584 {
2585 gen_op_addl_A0_seg(override);
2586 }
2587 }
2588}
2589
2590/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2591 OR_TMP0 */
2592static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2593{
2594 int mod, rm, opreg, disp;
2595
2596 mod = (modrm >> 6) & 3;
2597 rm = (modrm & 7) | REX_B(s);
2598 if (mod == 3) {
2599 if (is_store) {
2600 if (reg != OR_TMP0)
2601 gen_op_mov_TN_reg(ot, 0, reg);
2602 gen_op_mov_reg_T0(ot, rm);
2603 } else {
2604 gen_op_mov_TN_reg(ot, 0, rm);
2605 if (reg != OR_TMP0)
2606 gen_op_mov_reg_T0(ot, reg);
2607 }
2608 } else {
2609 gen_lea_modrm(s, modrm, &opreg, &disp);
2610 if (is_store) {
2611 if (reg != OR_TMP0)
2612 gen_op_mov_TN_reg(ot, 0, reg);
2613 gen_op_st_T0_A0(ot + s->mem_index);
2614 } else {
2615 gen_op_ld_T0_A0(ot + s->mem_index);
2616 if (reg != OR_TMP0)
2617 gen_op_mov_reg_T0(ot, reg);
2618 }
2619 }
2620}
2621
2622#ifndef VBOX
2623static inline uint32_t insn_get(DisasContext *s, int ot)
2624#else /* VBOX */
2625DECLINLINE(uint32_t) insn_get(DisasContext *s, int ot)
2626#endif /* VBOX */
2627{
2628 uint32_t ret;
2629
2630 switch(ot) {
2631 case OT_BYTE:
2632 ret = ldub_code(s->pc);
2633 s->pc++;
2634 break;
2635 case OT_WORD:
2636 ret = lduw_code(s->pc);
2637 s->pc += 2;
2638 break;
2639 default:
2640 case OT_LONG:
2641 ret = ldl_code(s->pc);
2642 s->pc += 4;
2643 break;
2644 }
2645 return ret;
2646}
2647
2648#ifndef VBOX
2649static inline int insn_const_size(unsigned int ot)
2650#else /* VBOX */
2651DECLINLINE(int) insn_const_size(unsigned int ot)
2652#endif /* VBOX */
2653{
2654 if (ot <= OT_LONG)
2655 return 1 << ot;
2656 else
2657 return 4;
2658}
2659
2660#ifndef VBOX
2661static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2662#else /* VBOX */
2663DECLINLINE(void) gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2664#endif /* VBOX */
2665{
2666 TranslationBlock *tb;
2667 target_ulong pc;
2668
2669 pc = s->cs_base + eip;
2670 tb = s->tb;
2671 /* NOTE: we handle the case where the TB spans two pages here */
2672 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2673 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2674#ifdef VBOX
2675 gen_check_external_event(s);
2676#endif /* VBOX */
2677 /* jump to same page: we can use a direct jump */
2678 tcg_gen_goto_tb(tb_num);
2679 gen_jmp_im(eip);
2680 tcg_gen_exit_tb((long)tb + tb_num);
2681 } else {
2682 /* jump to another page: currently not optimized */
2683 gen_jmp_im(eip);
2684 gen_eob(s);
2685 }
2686}
2687
2688#ifndef VBOX
2689static inline void gen_jcc(DisasContext *s, int b,
2690#else /* VBOX */
2691DECLINLINE(void) gen_jcc(DisasContext *s, int b,
2692#endif /* VBOX */
2693 target_ulong val, target_ulong next_eip)
2694{
2695 int l1, l2, cc_op;
2696
2697 cc_op = s->cc_op;
2698 if (s->cc_op != CC_OP_DYNAMIC) {
2699 gen_op_set_cc_op(s->cc_op);
2700 s->cc_op = CC_OP_DYNAMIC;
2701 }
2702 if (s->jmp_opt) {
2703 l1 = gen_new_label();
2704 gen_jcc1(s, cc_op, b, l1);
2705
2706 gen_goto_tb(s, 0, next_eip);
2707
2708 gen_set_label(l1);
2709 gen_goto_tb(s, 1, val);
2710 s->is_jmp = 3;
2711 } else {
2712
2713 l1 = gen_new_label();
2714 l2 = gen_new_label();
2715 gen_jcc1(s, cc_op, b, l1);
2716
2717 gen_jmp_im(next_eip);
2718 tcg_gen_br(l2);
2719
2720 gen_set_label(l1);
2721 gen_jmp_im(val);
2722 gen_set_label(l2);
2723 gen_eob(s);
2724 }
2725}
2726
2727static void gen_setcc(DisasContext *s, int b)
2728{
2729 int inv, jcc_op, l1;
2730 TCGv t0;
2731
2732 if (is_fast_jcc_case(s, b)) {
2733 /* nominal case: we use a jump */
2734 /* XXX: make it faster by adding new instructions in TCG */
2735 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2736 tcg_gen_movi_tl(t0, 0);
2737 l1 = gen_new_label();
2738 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2739 tcg_gen_movi_tl(t0, 1);
2740 gen_set_label(l1);
2741 tcg_gen_mov_tl(cpu_T[0], t0);
2742 tcg_temp_free(t0);
2743 } else {
2744 /* slow case: it is more efficient not to generate a jump,
2745 although it is questionnable whether this optimization is
2746 worth to */
2747 inv = b & 1;
2748 jcc_op = (b >> 1) & 7;
2749 gen_setcc_slow_T0(s, jcc_op);
2750 if (inv) {
2751 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2752 }
2753 }
2754}
2755
2756#ifndef VBOX
2757static inline void gen_op_movl_T0_seg(int seg_reg)
2758#else /* VBOX */
2759DECLINLINE(void) gen_op_movl_T0_seg(int seg_reg)
2760#endif /* VBOX */
2761{
2762 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2763 offsetof(CPUX86State,segs[seg_reg].selector));
2764}
2765
2766#ifndef VBOX
2767static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2768#else /* VBOX */
2769DECLINLINE(void) gen_op_movl_seg_T0_vm(int seg_reg)
2770#endif /* VBOX */
2771{
2772 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2773 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2774 offsetof(CPUX86State,segs[seg_reg].selector));
2775 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2776 tcg_gen_st_tl(cpu_T[0], cpu_env,
2777 offsetof(CPUX86State,segs[seg_reg].base));
2778#ifdef VBOX
2779 int flags = DESC_P_MASK | DESC_S_MASK | DESC_W_MASK;
2780 if (seg_reg == R_CS)
2781 flags |= DESC_CS_MASK;
2782 gen_op_movl_T0_im(flags);
2783 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].flags));
2784#endif
2785}
2786
2787/* move T0 to seg_reg and compute if the CPU state may change. Never
2788 call this function with seg_reg == R_CS */
2789static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2790{
2791 if (s->pe && !s->vm86) {
2792 /* XXX: optimize by finding processor state dynamically */
2793 if (s->cc_op != CC_OP_DYNAMIC)
2794 gen_op_set_cc_op(s->cc_op);
2795 gen_jmp_im(cur_eip);
2796 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2797 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2798 /* abort translation because the addseg value may change or
2799 because ss32 may change. For R_SS, translation must always
2800 stop as a special handling must be done to disable hardware
2801 interrupts for the next instruction */
2802 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2803 s->is_jmp = 3;
2804 } else {
2805 gen_op_movl_seg_T0_vm(seg_reg);
2806 if (seg_reg == R_SS)
2807 s->is_jmp = 3;
2808 }
2809}
2810
2811#ifndef VBOX
2812static inline int svm_is_rep(int prefixes)
2813#else /* VBOX */
2814DECLINLINE(int) svm_is_rep(int prefixes)
2815#endif /* VBOX */
2816{
2817 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2818}
2819
2820#ifndef VBOX
2821static inline void
2822#else /* VBOX */
2823DECLINLINE(void)
2824#endif /* VBOX */
2825gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2826 uint32_t type, uint64_t param)
2827{
2828 /* no SVM activated; fast case */
2829 if (likely(!(s->flags & HF_SVMI_MASK)))
2830 return;
2831 if (s->cc_op != CC_OP_DYNAMIC)
2832 gen_op_set_cc_op(s->cc_op);
2833 gen_jmp_im(pc_start - s->cs_base);
2834 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2835 tcg_const_i32(type), tcg_const_i64(param));
2836}
2837
2838#ifndef VBOX
2839static inline void
2840#else /* VBOX */
2841DECLINLINE(void)
2842#endif
2843gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2844{
2845 gen_svm_check_intercept_param(s, pc_start, type, 0);
2846}
2847
2848#ifndef VBOX
2849static inline void gen_stack_update(DisasContext *s, int addend)
2850#else /* VBOX */
2851DECLINLINE(void) gen_stack_update(DisasContext *s, int addend)
2852#endif /* VBOX */
2853{
2854#ifdef TARGET_X86_64
2855 if (CODE64(s)) {
2856 gen_op_add_reg_im(2, R_ESP, addend);
2857 } else
2858#endif
2859 if (s->ss32) {
2860 gen_op_add_reg_im(1, R_ESP, addend);
2861 } else {
2862 gen_op_add_reg_im(0, R_ESP, addend);
2863 }
2864}
2865
2866/* generate a push. It depends on ss32, addseg and dflag */
2867static void gen_push_T0(DisasContext *s)
2868{
2869#ifdef TARGET_X86_64
2870 if (CODE64(s)) {
2871 gen_op_movq_A0_reg(R_ESP);
2872 if (s->dflag) {
2873 gen_op_addq_A0_im(-8);
2874 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2875 } else {
2876 gen_op_addq_A0_im(-2);
2877 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2878 }
2879 gen_op_mov_reg_A0(2, R_ESP);
2880 } else
2881#endif
2882 {
2883 gen_op_movl_A0_reg(R_ESP);
2884 if (!s->dflag)
2885 gen_op_addl_A0_im(-2);
2886 else
2887 gen_op_addl_A0_im(-4);
2888 if (s->ss32) {
2889 if (s->addseg) {
2890 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2891 gen_op_addl_A0_seg(R_SS);
2892 }
2893 } else {
2894 gen_op_andl_A0_ffff();
2895 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2896 gen_op_addl_A0_seg(R_SS);
2897 }
2898 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2899 if (s->ss32 && !s->addseg)
2900 gen_op_mov_reg_A0(1, R_ESP);
2901 else
2902 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2903 }
2904}
2905
2906/* generate a push. It depends on ss32, addseg and dflag */
2907/* slower version for T1, only used for call Ev */
2908static void gen_push_T1(DisasContext *s)
2909{
2910#ifdef TARGET_X86_64
2911 if (CODE64(s)) {
2912 gen_op_movq_A0_reg(R_ESP);
2913 if (s->dflag) {
2914 gen_op_addq_A0_im(-8);
2915 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2916 } else {
2917 gen_op_addq_A0_im(-2);
2918 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2919 }
2920 gen_op_mov_reg_A0(2, R_ESP);
2921 } else
2922#endif
2923 {
2924 gen_op_movl_A0_reg(R_ESP);
2925 if (!s->dflag)
2926 gen_op_addl_A0_im(-2);
2927 else
2928 gen_op_addl_A0_im(-4);
2929 if (s->ss32) {
2930 if (s->addseg) {
2931 gen_op_addl_A0_seg(R_SS);
2932 }
2933 } else {
2934 gen_op_andl_A0_ffff();
2935 gen_op_addl_A0_seg(R_SS);
2936 }
2937 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2938
2939 if (s->ss32 && !s->addseg)
2940 gen_op_mov_reg_A0(1, R_ESP);
2941 else
2942 gen_stack_update(s, (-2) << s->dflag);
2943 }
2944}
2945
2946/* two step pop is necessary for precise exceptions */
2947static void gen_pop_T0(DisasContext *s)
2948{
2949#ifdef TARGET_X86_64
2950 if (CODE64(s)) {
2951 gen_op_movq_A0_reg(R_ESP);
2952 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2953 } else
2954#endif
2955 {
2956 gen_op_movl_A0_reg(R_ESP);
2957 if (s->ss32) {
2958 if (s->addseg)
2959 gen_op_addl_A0_seg(R_SS);
2960 } else {
2961 gen_op_andl_A0_ffff();
2962 gen_op_addl_A0_seg(R_SS);
2963 }
2964 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2965 }
2966}
2967
2968static void gen_pop_update(DisasContext *s)
2969{
2970#ifdef TARGET_X86_64
2971 if (CODE64(s) && s->dflag) {
2972 gen_stack_update(s, 8);
2973 } else
2974#endif
2975 {
2976 gen_stack_update(s, 2 << s->dflag);
2977 }
2978}
2979
2980static void gen_stack_A0(DisasContext *s)
2981{
2982 gen_op_movl_A0_reg(R_ESP);
2983 if (!s->ss32)
2984 gen_op_andl_A0_ffff();
2985 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2986 if (s->addseg)
2987 gen_op_addl_A0_seg(R_SS);
2988}
2989
2990/* NOTE: wrap around in 16 bit not fully handled */
2991static void gen_pusha(DisasContext *s)
2992{
2993 int i;
2994 gen_op_movl_A0_reg(R_ESP);
2995 gen_op_addl_A0_im(-16 << s->dflag);
2996 if (!s->ss32)
2997 gen_op_andl_A0_ffff();
2998 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2999 if (s->addseg)
3000 gen_op_addl_A0_seg(R_SS);
3001 for(i = 0;i < 8; i++) {
3002 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
3003 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
3004 gen_op_addl_A0_im(2 << s->dflag);
3005 }
3006 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3007}
3008
3009/* NOTE: wrap around in 16 bit not fully handled */
3010static void gen_popa(DisasContext *s)
3011{
3012 int i;
3013 gen_op_movl_A0_reg(R_ESP);
3014 if (!s->ss32)
3015 gen_op_andl_A0_ffff();
3016 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3017 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
3018 if (s->addseg)
3019 gen_op_addl_A0_seg(R_SS);
3020 for(i = 0;i < 8; i++) {
3021 /* ESP is not reloaded */
3022 if (i != 3) {
3023 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
3024 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
3025 }
3026 gen_op_addl_A0_im(2 << s->dflag);
3027 }
3028 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3029}
3030
3031static void gen_enter(DisasContext *s, int esp_addend, int level)
3032{
3033 int ot, opsize;
3034
3035 level &= 0x1f;
3036#ifdef TARGET_X86_64
3037 if (CODE64(s)) {
3038 ot = s->dflag ? OT_QUAD : OT_WORD;
3039 opsize = 1 << ot;
3040
3041 gen_op_movl_A0_reg(R_ESP);
3042 gen_op_addq_A0_im(-opsize);
3043 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3044
3045 /* push bp */
3046 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3047 gen_op_st_T0_A0(ot + s->mem_index);
3048 if (level) {
3049 /* XXX: must save state */
3050 tcg_gen_helper_0_3(helper_enter64_level,
3051 tcg_const_i32(level),
3052 tcg_const_i32((ot == OT_QUAD)),
3053 cpu_T[1]);
3054 }
3055 gen_op_mov_reg_T1(ot, R_EBP);
3056 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3057 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
3058 } else
3059#endif
3060 {
3061 ot = s->dflag + OT_WORD;
3062 opsize = 2 << s->dflag;
3063
3064 gen_op_movl_A0_reg(R_ESP);
3065 gen_op_addl_A0_im(-opsize);
3066 if (!s->ss32)
3067 gen_op_andl_A0_ffff();
3068 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3069 if (s->addseg)
3070 gen_op_addl_A0_seg(R_SS);
3071 /* push bp */
3072 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3073 gen_op_st_T0_A0(ot + s->mem_index);
3074 if (level) {
3075 /* XXX: must save state */
3076 tcg_gen_helper_0_3(helper_enter_level,
3077 tcg_const_i32(level),
3078 tcg_const_i32(s->dflag),
3079 cpu_T[1]);
3080 }
3081 gen_op_mov_reg_T1(ot, R_EBP);
3082 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3083 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3084 }
3085}
3086
3087static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
3088{
3089 if (s->cc_op != CC_OP_DYNAMIC)
3090 gen_op_set_cc_op(s->cc_op);
3091 gen_jmp_im(cur_eip);
3092 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
3093 s->is_jmp = 3;
3094}
3095
3096/* an interrupt is different from an exception because of the
3097 privilege checks */
3098static void gen_interrupt(DisasContext *s, int intno,
3099 target_ulong cur_eip, target_ulong next_eip)
3100{
3101 if (s->cc_op != CC_OP_DYNAMIC)
3102 gen_op_set_cc_op(s->cc_op);
3103 gen_jmp_im(cur_eip);
3104 tcg_gen_helper_0_2(helper_raise_interrupt,
3105 tcg_const_i32(intno),
3106 tcg_const_i32(next_eip - cur_eip));
3107 s->is_jmp = 3;
3108}
3109
3110static void gen_debug(DisasContext *s, target_ulong cur_eip)
3111{
3112 if (s->cc_op != CC_OP_DYNAMIC)
3113 gen_op_set_cc_op(s->cc_op);
3114 gen_jmp_im(cur_eip);
3115 tcg_gen_helper_0_0(helper_debug);
3116 s->is_jmp = 3;
3117}
3118
3119/* generate a generic end of block. Trace exception is also generated
3120 if needed */
3121static void gen_eob(DisasContext *s)
3122{
3123#ifdef VBOX
3124 gen_check_external_event(s);
3125#endif /* VBOX */
3126 if (s->cc_op != CC_OP_DYNAMIC)
3127 gen_op_set_cc_op(s->cc_op);
3128 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
3129 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
3130 }
3131 if (s->singlestep_enabled) {
3132 tcg_gen_helper_0_0(helper_debug);
3133 } else if (s->tf) {
3134 tcg_gen_helper_0_0(helper_single_step);
3135 } else {
3136 tcg_gen_exit_tb(0);
3137 }
3138 s->is_jmp = 3;
3139}
3140
3141/* generate a jump to eip. No segment change must happen before as a
3142 direct call to the next block may occur */
3143static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
3144{
3145 if (s->jmp_opt) {
3146 if (s->cc_op != CC_OP_DYNAMIC) {
3147 gen_op_set_cc_op(s->cc_op);
3148 s->cc_op = CC_OP_DYNAMIC;
3149 }
3150 gen_goto_tb(s, tb_num, eip);
3151 s->is_jmp = 3;
3152 } else {
3153 gen_jmp_im(eip);
3154 gen_eob(s);
3155 }
3156}
3157
3158static void gen_jmp(DisasContext *s, target_ulong eip)
3159{
3160 gen_jmp_tb(s, eip, 0);
3161}
3162
3163#ifndef VBOX
3164static inline void gen_ldq_env_A0(int idx, int offset)
3165#else /* VBOX */
3166DECLINLINE(void) gen_ldq_env_A0(int idx, int offset)
3167#endif /* VBOX */
3168{
3169 int mem_index = (idx >> 2) - 1;
3170 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3171 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
3172}
3173
3174#ifndef VBOX
3175static inline void gen_stq_env_A0(int idx, int offset)
3176#else /* VBOX */
3177DECLINLINE(void) gen_stq_env_A0(int idx, int offset)
3178#endif /* VBOX */
3179{
3180 int mem_index = (idx >> 2) - 1;
3181 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
3182 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3183}
3184
3185#ifndef VBOX
3186static inline void gen_ldo_env_A0(int idx, int offset)
3187#else /* VBOX */
3188DECLINLINE(void) gen_ldo_env_A0(int idx, int offset)
3189#endif /* VBOX */
3190{
3191 int mem_index = (idx >> 2) - 1;
3192 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3193 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3194 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3195 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3196 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3197}
3198
3199#ifndef VBOX
3200static inline void gen_sto_env_A0(int idx, int offset)
3201#else /* VBOX */
3202DECLINLINE(void) gen_sto_env_A0(int idx, int offset)
3203#endif /* VBOX */
3204{
3205 int mem_index = (idx >> 2) - 1;
3206 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3207 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3208 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3209 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3210 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3211}
3212
3213#ifndef VBOX
3214static inline void gen_op_movo(int d_offset, int s_offset)
3215#else /* VBOX */
3216DECLINLINE(void) gen_op_movo(int d_offset, int s_offset)
3217#endif /* VBOX */
3218{
3219 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3220 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3221 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
3222 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
3223}
3224
3225#ifndef VBOX
3226static inline void gen_op_movq(int d_offset, int s_offset)
3227#else /* VBOX */
3228DECLINLINE(void) gen_op_movq(int d_offset, int s_offset)
3229#endif /* VBOX */
3230{
3231 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3232 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3233}
3234
3235#ifndef VBOX
3236static inline void gen_op_movl(int d_offset, int s_offset)
3237#else /* VBOX */
3238DECLINLINE(void) gen_op_movl(int d_offset, int s_offset)
3239#endif /* VBOX */
3240{
3241 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
3242 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
3243}
3244
3245#ifndef VBOX
3246static inline void gen_op_movq_env_0(int d_offset)
3247#else /* VBOX */
3248DECLINLINE(void) gen_op_movq_env_0(int d_offset)
3249#endif /* VBOX */
3250{
3251 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
3252 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3253}
3254
3255#define SSE_SPECIAL ((void *)1)
3256#define SSE_DUMMY ((void *)2)
3257
3258#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
3259#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
3260 helper_ ## x ## ss, helper_ ## x ## sd, }
3261
3262static void *sse_op_table1[256][4] = {
3263 /* 3DNow! extensions */
3264 [0x0e] = { SSE_DUMMY }, /* femms */
3265 [0x0f] = { SSE_DUMMY }, /* pf... */
3266 /* pure SSE operations */
3267 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3268 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3269 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
3270 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
3271 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
3272 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
3273 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
3274 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
3275
3276 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3277 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3278 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
3279 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
3280 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
3281 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
3282 [0x2e] = { helper_ucomiss, helper_ucomisd },
3283 [0x2f] = { helper_comiss, helper_comisd },
3284 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
3285 [0x51] = SSE_FOP(sqrt),
3286 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
3287 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
3288 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
3289 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
3290 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
3291 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
3292 [0x58] = SSE_FOP(add),
3293 [0x59] = SSE_FOP(mul),
3294 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
3295 helper_cvtss2sd, helper_cvtsd2ss },
3296 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
3297 [0x5c] = SSE_FOP(sub),
3298 [0x5d] = SSE_FOP(min),
3299 [0x5e] = SSE_FOP(div),
3300 [0x5f] = SSE_FOP(max),
3301
3302 [0xc2] = SSE_FOP(cmpeq),
3303 [0xc6] = { helper_shufps, helper_shufpd },
3304
3305 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3306 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3307
3308 /* MMX ops and their SSE extensions */
3309 [0x60] = MMX_OP2(punpcklbw),
3310 [0x61] = MMX_OP2(punpcklwd),
3311 [0x62] = MMX_OP2(punpckldq),
3312 [0x63] = MMX_OP2(packsswb),
3313 [0x64] = MMX_OP2(pcmpgtb),
3314 [0x65] = MMX_OP2(pcmpgtw),
3315 [0x66] = MMX_OP2(pcmpgtl),
3316 [0x67] = MMX_OP2(packuswb),
3317 [0x68] = MMX_OP2(punpckhbw),
3318 [0x69] = MMX_OP2(punpckhwd),
3319 [0x6a] = MMX_OP2(punpckhdq),
3320 [0x6b] = MMX_OP2(packssdw),
3321 [0x6c] = { NULL, helper_punpcklqdq_xmm },
3322 [0x6d] = { NULL, helper_punpckhqdq_xmm },
3323 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
3324 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
3325 [0x70] = { helper_pshufw_mmx,
3326 helper_pshufd_xmm,
3327 helper_pshufhw_xmm,
3328 helper_pshuflw_xmm },
3329 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
3330 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
3331 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
3332 [0x74] = MMX_OP2(pcmpeqb),
3333 [0x75] = MMX_OP2(pcmpeqw),
3334 [0x76] = MMX_OP2(pcmpeql),
3335 [0x77] = { SSE_DUMMY }, /* emms */
3336 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
3337 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
3338 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3339 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3340 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3341 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
3342 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
3343 [0xd1] = MMX_OP2(psrlw),
3344 [0xd2] = MMX_OP2(psrld),
3345 [0xd3] = MMX_OP2(psrlq),
3346 [0xd4] = MMX_OP2(paddq),
3347 [0xd5] = MMX_OP2(pmullw),
3348 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3349 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3350 [0xd8] = MMX_OP2(psubusb),
3351 [0xd9] = MMX_OP2(psubusw),
3352 [0xda] = MMX_OP2(pminub),
3353 [0xdb] = MMX_OP2(pand),
3354 [0xdc] = MMX_OP2(paddusb),
3355 [0xdd] = MMX_OP2(paddusw),
3356 [0xde] = MMX_OP2(pmaxub),
3357 [0xdf] = MMX_OP2(pandn),
3358 [0xe0] = MMX_OP2(pavgb),
3359 [0xe1] = MMX_OP2(psraw),
3360 [0xe2] = MMX_OP2(psrad),
3361 [0xe3] = MMX_OP2(pavgw),
3362 [0xe4] = MMX_OP2(pmulhuw),
3363 [0xe5] = MMX_OP2(pmulhw),
3364 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
3365 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3366 [0xe8] = MMX_OP2(psubsb),
3367 [0xe9] = MMX_OP2(psubsw),
3368 [0xea] = MMX_OP2(pminsw),
3369 [0xeb] = MMX_OP2(por),
3370 [0xec] = MMX_OP2(paddsb),
3371 [0xed] = MMX_OP2(paddsw),
3372 [0xee] = MMX_OP2(pmaxsw),
3373 [0xef] = MMX_OP2(pxor),
3374 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
3375 [0xf1] = MMX_OP2(psllw),
3376 [0xf2] = MMX_OP2(pslld),
3377 [0xf3] = MMX_OP2(psllq),
3378 [0xf4] = MMX_OP2(pmuludq),
3379 [0xf5] = MMX_OP2(pmaddwd),
3380 [0xf6] = MMX_OP2(psadbw),
3381 [0xf7] = MMX_OP2(maskmov),
3382 [0xf8] = MMX_OP2(psubb),
3383 [0xf9] = MMX_OP2(psubw),
3384 [0xfa] = MMX_OP2(psubl),
3385 [0xfb] = MMX_OP2(psubq),
3386 [0xfc] = MMX_OP2(paddb),
3387 [0xfd] = MMX_OP2(paddw),
3388 [0xfe] = MMX_OP2(paddl),
3389};
3390
3391static void *sse_op_table2[3 * 8][2] = {
3392 [0 + 2] = MMX_OP2(psrlw),
3393 [0 + 4] = MMX_OP2(psraw),
3394 [0 + 6] = MMX_OP2(psllw),
3395 [8 + 2] = MMX_OP2(psrld),
3396 [8 + 4] = MMX_OP2(psrad),
3397 [8 + 6] = MMX_OP2(pslld),
3398 [16 + 2] = MMX_OP2(psrlq),
3399 [16 + 3] = { NULL, helper_psrldq_xmm },
3400 [16 + 6] = MMX_OP2(psllq),
3401 [16 + 7] = { NULL, helper_pslldq_xmm },
3402};
3403
3404static void *sse_op_table3[4 * 3] = {
3405 helper_cvtsi2ss,
3406 helper_cvtsi2sd,
3407 X86_64_ONLY(helper_cvtsq2ss),
3408 X86_64_ONLY(helper_cvtsq2sd),
3409
3410 helper_cvttss2si,
3411 helper_cvttsd2si,
3412 X86_64_ONLY(helper_cvttss2sq),
3413 X86_64_ONLY(helper_cvttsd2sq),
3414
3415 helper_cvtss2si,
3416 helper_cvtsd2si,
3417 X86_64_ONLY(helper_cvtss2sq),
3418 X86_64_ONLY(helper_cvtsd2sq),
3419};
3420
3421static void *sse_op_table4[8][4] = {
3422 SSE_FOP(cmpeq),
3423 SSE_FOP(cmplt),
3424 SSE_FOP(cmple),
3425 SSE_FOP(cmpunord),
3426 SSE_FOP(cmpneq),
3427 SSE_FOP(cmpnlt),
3428 SSE_FOP(cmpnle),
3429 SSE_FOP(cmpord),
3430};
3431
3432static void *sse_op_table5[256] = {
3433 [0x0c] = helper_pi2fw,
3434 [0x0d] = helper_pi2fd,
3435 [0x1c] = helper_pf2iw,
3436 [0x1d] = helper_pf2id,
3437 [0x8a] = helper_pfnacc,
3438 [0x8e] = helper_pfpnacc,
3439 [0x90] = helper_pfcmpge,
3440 [0x94] = helper_pfmin,
3441 [0x96] = helper_pfrcp,
3442 [0x97] = helper_pfrsqrt,
3443 [0x9a] = helper_pfsub,
3444 [0x9e] = helper_pfadd,
3445 [0xa0] = helper_pfcmpgt,
3446 [0xa4] = helper_pfmax,
3447 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
3448 [0xa7] = helper_movq, /* pfrsqit1 */
3449 [0xaa] = helper_pfsubr,
3450 [0xae] = helper_pfacc,
3451 [0xb0] = helper_pfcmpeq,
3452 [0xb4] = helper_pfmul,
3453 [0xb6] = helper_movq, /* pfrcpit2 */
3454 [0xb7] = helper_pmulhrw_mmx,
3455 [0xbb] = helper_pswapd,
3456 [0xbf] = helper_pavgb_mmx /* pavgusb */
3457};
3458
3459struct sse_op_helper_s {
3460 void *op[2]; uint32_t ext_mask;
3461};
3462#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3463#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3464#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3465#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3466static struct sse_op_helper_s sse_op_table6[256] = {
3467 [0x00] = SSSE3_OP(pshufb),
3468 [0x01] = SSSE3_OP(phaddw),
3469 [0x02] = SSSE3_OP(phaddd),
3470 [0x03] = SSSE3_OP(phaddsw),
3471 [0x04] = SSSE3_OP(pmaddubsw),
3472 [0x05] = SSSE3_OP(phsubw),
3473 [0x06] = SSSE3_OP(phsubd),
3474 [0x07] = SSSE3_OP(phsubsw),
3475 [0x08] = SSSE3_OP(psignb),
3476 [0x09] = SSSE3_OP(psignw),
3477 [0x0a] = SSSE3_OP(psignd),
3478 [0x0b] = SSSE3_OP(pmulhrsw),
3479 [0x10] = SSE41_OP(pblendvb),
3480 [0x14] = SSE41_OP(blendvps),
3481 [0x15] = SSE41_OP(blendvpd),
3482 [0x17] = SSE41_OP(ptest),
3483 [0x1c] = SSSE3_OP(pabsb),
3484 [0x1d] = SSSE3_OP(pabsw),
3485 [0x1e] = SSSE3_OP(pabsd),
3486 [0x20] = SSE41_OP(pmovsxbw),
3487 [0x21] = SSE41_OP(pmovsxbd),
3488 [0x22] = SSE41_OP(pmovsxbq),
3489 [0x23] = SSE41_OP(pmovsxwd),
3490 [0x24] = SSE41_OP(pmovsxwq),
3491 [0x25] = SSE41_OP(pmovsxdq),
3492 [0x28] = SSE41_OP(pmuldq),
3493 [0x29] = SSE41_OP(pcmpeqq),
3494 [0x2a] = SSE41_SPECIAL, /* movntqda */
3495 [0x2b] = SSE41_OP(packusdw),
3496 [0x30] = SSE41_OP(pmovzxbw),
3497 [0x31] = SSE41_OP(pmovzxbd),
3498 [0x32] = SSE41_OP(pmovzxbq),
3499 [0x33] = SSE41_OP(pmovzxwd),
3500 [0x34] = SSE41_OP(pmovzxwq),
3501 [0x35] = SSE41_OP(pmovzxdq),
3502 [0x37] = SSE42_OP(pcmpgtq),
3503 [0x38] = SSE41_OP(pminsb),
3504 [0x39] = SSE41_OP(pminsd),
3505 [0x3a] = SSE41_OP(pminuw),
3506 [0x3b] = SSE41_OP(pminud),
3507 [0x3c] = SSE41_OP(pmaxsb),
3508 [0x3d] = SSE41_OP(pmaxsd),
3509 [0x3e] = SSE41_OP(pmaxuw),
3510 [0x3f] = SSE41_OP(pmaxud),
3511 [0x40] = SSE41_OP(pmulld),
3512 [0x41] = SSE41_OP(phminposuw),
3513};
3514
3515static struct sse_op_helper_s sse_op_table7[256] = {
3516 [0x08] = SSE41_OP(roundps),
3517 [0x09] = SSE41_OP(roundpd),
3518 [0x0a] = SSE41_OP(roundss),
3519 [0x0b] = SSE41_OP(roundsd),
3520 [0x0c] = SSE41_OP(blendps),
3521 [0x0d] = SSE41_OP(blendpd),
3522 [0x0e] = SSE41_OP(pblendw),
3523 [0x0f] = SSSE3_OP(palignr),
3524 [0x14] = SSE41_SPECIAL, /* pextrb */
3525 [0x15] = SSE41_SPECIAL, /* pextrw */
3526 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3527 [0x17] = SSE41_SPECIAL, /* extractps */
3528 [0x20] = SSE41_SPECIAL, /* pinsrb */
3529 [0x21] = SSE41_SPECIAL, /* insertps */
3530 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3531 [0x40] = SSE41_OP(dpps),
3532 [0x41] = SSE41_OP(dppd),
3533 [0x42] = SSE41_OP(mpsadbw),
3534 [0x60] = SSE42_OP(pcmpestrm),
3535 [0x61] = SSE42_OP(pcmpestri),
3536 [0x62] = SSE42_OP(pcmpistrm),
3537 [0x63] = SSE42_OP(pcmpistri),
3538};
3539
3540static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3541{
3542 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3543 int modrm, mod, rm, reg, reg_addr, offset_addr;
3544 void *sse_op2;
3545
3546 b &= 0xff;
3547 if (s->prefix & PREFIX_DATA)
3548 b1 = 1;
3549 else if (s->prefix & PREFIX_REPZ)
3550 b1 = 2;
3551 else if (s->prefix & PREFIX_REPNZ)
3552 b1 = 3;
3553 else
3554 b1 = 0;
3555 sse_op2 = sse_op_table1[b][b1];
3556 if (!sse_op2)
3557 goto illegal_op;
3558 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3559 is_xmm = 1;
3560 } else {
3561 if (b1 == 0) {
3562 /* MMX case */
3563 is_xmm = 0;
3564 } else {
3565 is_xmm = 1;
3566 }
3567 }
3568 /* simple MMX/SSE operation */
3569 if (s->flags & HF_TS_MASK) {
3570 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3571 return;
3572 }
3573 if (s->flags & HF_EM_MASK) {
3574 illegal_op:
3575 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3576 return;
3577 }
3578 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3579 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3580 goto illegal_op;
3581 if (b == 0x0e) {
3582 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3583 goto illegal_op;
3584 /* femms */
3585 tcg_gen_helper_0_0(helper_emms);
3586 return;
3587 }
3588 if (b == 0x77) {
3589 /* emms */
3590 tcg_gen_helper_0_0(helper_emms);
3591 return;
3592 }
3593 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3594 the static cpu state) */
3595 if (!is_xmm) {
3596 tcg_gen_helper_0_0(helper_enter_mmx);
3597 }
3598
3599 modrm = ldub_code(s->pc++);
3600 reg = ((modrm >> 3) & 7);
3601 if (is_xmm)
3602 reg |= rex_r;
3603 mod = (modrm >> 6) & 3;
3604 if (sse_op2 == SSE_SPECIAL) {
3605 b |= (b1 << 8);
3606 switch(b) {
3607 case 0x0e7: /* movntq */
3608 if (mod == 3)
3609 goto illegal_op;
3610 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3611 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3612 break;
3613 case 0x1e7: /* movntdq */
3614 case 0x02b: /* movntps */
3615 case 0x12b: /* movntps */
3616 case 0x3f0: /* lddqu */
3617 if (mod == 3)
3618 goto illegal_op;
3619 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3620 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3621 break;
3622 case 0x6e: /* movd mm, ea */
3623#ifdef TARGET_X86_64
3624 if (s->dflag == 2) {
3625 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3626 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3627 } else
3628#endif
3629 {
3630 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3631 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3632 offsetof(CPUX86State,fpregs[reg].mmx));
3633 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3634 }
3635 break;
3636 case 0x16e: /* movd xmm, ea */
3637#ifdef TARGET_X86_64
3638 if (s->dflag == 2) {
3639 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3640 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3641 offsetof(CPUX86State,xmm_regs[reg]));
3642 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3643 } else
3644#endif
3645 {
3646 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3647 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3648 offsetof(CPUX86State,xmm_regs[reg]));
3649 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3650 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3651 }
3652 break;
3653 case 0x6f: /* movq mm, ea */
3654 if (mod != 3) {
3655 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3656 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3657 } else {
3658 rm = (modrm & 7);
3659 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3660 offsetof(CPUX86State,fpregs[rm].mmx));
3661 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3662 offsetof(CPUX86State,fpregs[reg].mmx));
3663 }
3664 break;
3665 case 0x010: /* movups */
3666 case 0x110: /* movupd */
3667 case 0x028: /* movaps */
3668 case 0x128: /* movapd */
3669 case 0x16f: /* movdqa xmm, ea */
3670 case 0x26f: /* movdqu xmm, ea */
3671 if (mod != 3) {
3672 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3673 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3674 } else {
3675 rm = (modrm & 7) | REX_B(s);
3676 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3677 offsetof(CPUX86State,xmm_regs[rm]));
3678 }
3679 break;
3680 case 0x210: /* movss xmm, ea */
3681 if (mod != 3) {
3682 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3683 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3684 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3685 gen_op_movl_T0_0();
3686 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3687 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3688 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3689 } else {
3690 rm = (modrm & 7) | REX_B(s);
3691 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3692 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3693 }
3694 break;
3695 case 0x310: /* movsd xmm, ea */
3696 if (mod != 3) {
3697 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3698 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3699 gen_op_movl_T0_0();
3700 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3701 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3702 } else {
3703 rm = (modrm & 7) | REX_B(s);
3704 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3705 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3706 }
3707 break;
3708 case 0x012: /* movlps */
3709 case 0x112: /* movlpd */
3710 if (mod != 3) {
3711 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3712 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3713 } else {
3714 /* movhlps */
3715 rm = (modrm & 7) | REX_B(s);
3716 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3717 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3718 }
3719 break;
3720 case 0x212: /* movsldup */
3721 if (mod != 3) {
3722 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3723 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3724 } else {
3725 rm = (modrm & 7) | REX_B(s);
3726 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3727 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3728 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3729 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3730 }
3731 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3732 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3733 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3734 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3735 break;
3736 case 0x312: /* movddup */
3737 if (mod != 3) {
3738 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3739 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3740 } else {
3741 rm = (modrm & 7) | REX_B(s);
3742 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3743 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3744 }
3745 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3746 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3747 break;
3748 case 0x016: /* movhps */
3749 case 0x116: /* movhpd */
3750 if (mod != 3) {
3751 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3752 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3753 } else {
3754 /* movlhps */
3755 rm = (modrm & 7) | REX_B(s);
3756 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3757 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3758 }
3759 break;
3760 case 0x216: /* movshdup */
3761 if (mod != 3) {
3762 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3763 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3764 } else {
3765 rm = (modrm & 7) | REX_B(s);
3766 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3767 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3768 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3769 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3770 }
3771 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3772 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3773 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3774 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3775 break;
3776 case 0x7e: /* movd ea, mm */
3777#ifdef TARGET_X86_64
3778 if (s->dflag == 2) {
3779 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3780 offsetof(CPUX86State,fpregs[reg].mmx));
3781 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3782 } else
3783#endif
3784 {
3785 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3786 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3787 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3788 }
3789 break;
3790 case 0x17e: /* movd ea, xmm */
3791#ifdef TARGET_X86_64
3792 if (s->dflag == 2) {
3793 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3794 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3795 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3796 } else
3797#endif
3798 {
3799 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3800 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3801 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3802 }
3803 break;
3804 case 0x27e: /* movq xmm, ea */
3805 if (mod != 3) {
3806 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3807 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3808 } else {
3809 rm = (modrm & 7) | REX_B(s);
3810 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3811 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3812 }
3813 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3814 break;
3815 case 0x7f: /* movq ea, mm */
3816 if (mod != 3) {
3817 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3818 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3819 } else {
3820 rm = (modrm & 7);
3821 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3822 offsetof(CPUX86State,fpregs[reg].mmx));
3823 }
3824 break;
3825 case 0x011: /* movups */
3826 case 0x111: /* movupd */
3827 case 0x029: /* movaps */
3828 case 0x129: /* movapd */
3829 case 0x17f: /* movdqa ea, xmm */
3830 case 0x27f: /* movdqu ea, xmm */
3831 if (mod != 3) {
3832 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3833 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3834 } else {
3835 rm = (modrm & 7) | REX_B(s);
3836 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3837 offsetof(CPUX86State,xmm_regs[reg]));
3838 }
3839 break;
3840 case 0x211: /* movss ea, xmm */
3841 if (mod != 3) {
3842 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3843 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3844 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3845 } else {
3846 rm = (modrm & 7) | REX_B(s);
3847 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3848 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3849 }
3850 break;
3851 case 0x311: /* movsd ea, xmm */
3852 if (mod != 3) {
3853 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3854 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3855 } else {
3856 rm = (modrm & 7) | REX_B(s);
3857 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3858 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3859 }
3860 break;
3861 case 0x013: /* movlps */
3862 case 0x113: /* movlpd */
3863 if (mod != 3) {
3864 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3865 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3866 } else {
3867 goto illegal_op;
3868 }
3869 break;
3870 case 0x017: /* movhps */
3871 case 0x117: /* movhpd */
3872 if (mod != 3) {
3873 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3874 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3875 } else {
3876 goto illegal_op;
3877 }
3878 break;
3879 case 0x71: /* shift mm, im */
3880 case 0x72:
3881 case 0x73:
3882 case 0x171: /* shift xmm, im */
3883 case 0x172:
3884 case 0x173:
3885 val = ldub_code(s->pc++);
3886 if (is_xmm) {
3887 gen_op_movl_T0_im(val);
3888 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3889 gen_op_movl_T0_0();
3890 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3891 op1_offset = offsetof(CPUX86State,xmm_t0);
3892 } else {
3893 gen_op_movl_T0_im(val);
3894 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3895 gen_op_movl_T0_0();
3896 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3897 op1_offset = offsetof(CPUX86State,mmx_t0);
3898 }
3899 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3900 if (!sse_op2)
3901 goto illegal_op;
3902 if (is_xmm) {
3903 rm = (modrm & 7) | REX_B(s);
3904 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3905 } else {
3906 rm = (modrm & 7);
3907 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3908 }
3909 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3910 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3911 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3912 break;
3913 case 0x050: /* movmskps */
3914 rm = (modrm & 7) | REX_B(s);
3915 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3916 offsetof(CPUX86State,xmm_regs[rm]));
3917 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3918 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3919 gen_op_mov_reg_T0(OT_LONG, reg);
3920 break;
3921 case 0x150: /* movmskpd */
3922 rm = (modrm & 7) | REX_B(s);
3923 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3924 offsetof(CPUX86State,xmm_regs[rm]));
3925 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3926 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3927 gen_op_mov_reg_T0(OT_LONG, reg);
3928 break;
3929 case 0x02a: /* cvtpi2ps */
3930 case 0x12a: /* cvtpi2pd */
3931 tcg_gen_helper_0_0(helper_enter_mmx);
3932 if (mod != 3) {
3933 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3934 op2_offset = offsetof(CPUX86State,mmx_t0);
3935 gen_ldq_env_A0(s->mem_index, op2_offset);
3936 } else {
3937 rm = (modrm & 7);
3938 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3939 }
3940 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3941 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3942 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3943 switch(b >> 8) {
3944 case 0x0:
3945 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3946 break;
3947 default:
3948 case 0x1:
3949 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3950 break;
3951 }
3952 break;
3953 case 0x22a: /* cvtsi2ss */
3954 case 0x32a: /* cvtsi2sd */
3955 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3956 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3957 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3958 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3959 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3960 if (ot == OT_LONG) {
3961 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3962 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3963 } else {
3964 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
3965 }
3966 break;
3967 case 0x02c: /* cvttps2pi */
3968 case 0x12c: /* cvttpd2pi */
3969 case 0x02d: /* cvtps2pi */
3970 case 0x12d: /* cvtpd2pi */
3971 tcg_gen_helper_0_0(helper_enter_mmx);
3972 if (mod != 3) {
3973 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3974 op2_offset = offsetof(CPUX86State,xmm_t0);
3975 gen_ldo_env_A0(s->mem_index, op2_offset);
3976 } else {
3977 rm = (modrm & 7) | REX_B(s);
3978 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3979 }
3980 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3981 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3982 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3983 switch(b) {
3984 case 0x02c:
3985 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3986 break;
3987 case 0x12c:
3988 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3989 break;
3990 case 0x02d:
3991 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3992 break;
3993 case 0x12d:
3994 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3995 break;
3996 }
3997 break;
3998 case 0x22c: /* cvttss2si */
3999 case 0x32c: /* cvttsd2si */
4000 case 0x22d: /* cvtss2si */
4001 case 0x32d: /* cvtsd2si */
4002 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4003 if (mod != 3) {
4004 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4005 if ((b >> 8) & 1) {
4006 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
4007 } else {
4008 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4009 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4010 }
4011 op2_offset = offsetof(CPUX86State,xmm_t0);
4012 } else {
4013 rm = (modrm & 7) | REX_B(s);
4014 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4015 }
4016 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
4017 (b & 1) * 4];
4018 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
4019 if (ot == OT_LONG) {
4020 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
4021 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4022 } else {
4023 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
4024 }
4025 gen_op_mov_reg_T0(ot, reg);
4026 break;
4027 case 0xc4: /* pinsrw */
4028 case 0x1c4:
4029 s->rip_offset = 1;
4030 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4031 val = ldub_code(s->pc++);
4032 if (b1) {
4033 val &= 7;
4034 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4035 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
4036 } else {
4037 val &= 3;
4038 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4039 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
4040 }
4041 break;
4042 case 0xc5: /* pextrw */
4043 case 0x1c5:
4044 if (mod != 3)
4045 goto illegal_op;
4046 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4047 val = ldub_code(s->pc++);
4048 if (b1) {
4049 val &= 7;
4050 rm = (modrm & 7) | REX_B(s);
4051 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4052 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
4053 } else {
4054 val &= 3;
4055 rm = (modrm & 7);
4056 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4057 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
4058 }
4059 reg = ((modrm >> 3) & 7) | rex_r;
4060 gen_op_mov_reg_T0(ot, reg);
4061 break;
4062 case 0x1d6: /* movq ea, xmm */
4063 if (mod != 3) {
4064 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4065 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4066 } else {
4067 rm = (modrm & 7) | REX_B(s);
4068 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
4069 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4070 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
4071 }
4072 break;
4073 case 0x2d6: /* movq2dq */
4074 tcg_gen_helper_0_0(helper_enter_mmx);
4075 rm = (modrm & 7);
4076 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
4077 offsetof(CPUX86State,fpregs[rm].mmx));
4078 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
4079 break;
4080 case 0x3d6: /* movdq2q */
4081 tcg_gen_helper_0_0(helper_enter_mmx);
4082 rm = (modrm & 7) | REX_B(s);
4083 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
4084 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
4085 break;
4086 case 0xd7: /* pmovmskb */
4087 case 0x1d7:
4088 if (mod != 3)
4089 goto illegal_op;
4090 if (b1) {
4091 rm = (modrm & 7) | REX_B(s);
4092 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
4093 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
4094 } else {
4095 rm = (modrm & 7);
4096 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
4097 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
4098 }
4099 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4100 reg = ((modrm >> 3) & 7) | rex_r;
4101 gen_op_mov_reg_T0(OT_LONG, reg);
4102 break;
4103 case 0x138:
4104 if (s->prefix & PREFIX_REPNZ)
4105 goto crc32;
4106 case 0x038:
4107 b = modrm;
4108 modrm = ldub_code(s->pc++);
4109 rm = modrm & 7;
4110 reg = ((modrm >> 3) & 7) | rex_r;
4111 mod = (modrm >> 6) & 3;
4112
4113 sse_op2 = sse_op_table6[b].op[b1];
4114 if (!sse_op2)
4115 goto illegal_op;
4116 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
4117 goto illegal_op;
4118
4119 if (b1) {
4120 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4121 if (mod == 3) {
4122 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4123 } else {
4124 op2_offset = offsetof(CPUX86State,xmm_t0);
4125 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4126 switch (b) {
4127 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
4128 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
4129 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
4130 gen_ldq_env_A0(s->mem_index, op2_offset +
4131 offsetof(XMMReg, XMM_Q(0)));
4132 break;
4133 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
4134 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
4135 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4136 (s->mem_index >> 2) - 1);
4137 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
4138 offsetof(XMMReg, XMM_L(0)));
4139 break;
4140 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
4141 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
4142 (s->mem_index >> 2) - 1);
4143 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
4144 offsetof(XMMReg, XMM_W(0)));
4145 break;
4146 case 0x2a: /* movntqda */
4147 gen_ldo_env_A0(s->mem_index, op1_offset);
4148 return;
4149 default:
4150 gen_ldo_env_A0(s->mem_index, op2_offset);
4151 }
4152 }
4153 } else {
4154 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4155 if (mod == 3) {
4156 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4157 } else {
4158 op2_offset = offsetof(CPUX86State,mmx_t0);
4159 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4160 gen_ldq_env_A0(s->mem_index, op2_offset);
4161 }
4162 }
4163 if (sse_op2 == SSE_SPECIAL)
4164 goto illegal_op;
4165
4166 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4167 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4168 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4169
4170 if (b == 0x17)
4171 s->cc_op = CC_OP_EFLAGS;
4172 break;
4173 case 0x338: /* crc32 */
4174 crc32:
4175 b = modrm;
4176 modrm = ldub_code(s->pc++);
4177 reg = ((modrm >> 3) & 7) | rex_r;
4178
4179 if (b != 0xf0 && b != 0xf1)
4180 goto illegal_op;
4181 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
4182 goto illegal_op;
4183
4184 if (b == 0xf0)
4185 ot = OT_BYTE;
4186 else if (b == 0xf1 && s->dflag != 2)
4187 if (s->prefix & PREFIX_DATA)
4188 ot = OT_WORD;
4189 else
4190 ot = OT_LONG;
4191 else
4192 ot = OT_QUAD;
4193
4194 gen_op_mov_TN_reg(OT_LONG, 0, reg);
4195 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4196 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4197 tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
4198 cpu_T[0], tcg_const_i32(8 << ot));
4199
4200 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4201 gen_op_mov_reg_T0(ot, reg);
4202 break;
4203 case 0x03a:
4204 case 0x13a:
4205 b = modrm;
4206 modrm = ldub_code(s->pc++);
4207 rm = modrm & 7;
4208 reg = ((modrm >> 3) & 7) | rex_r;
4209 mod = (modrm >> 6) & 3;
4210
4211 sse_op2 = sse_op_table7[b].op[b1];
4212 if (!sse_op2)
4213 goto illegal_op;
4214 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4215 goto illegal_op;
4216
4217 if (sse_op2 == SSE_SPECIAL) {
4218 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4219 rm = (modrm & 7) | REX_B(s);
4220 if (mod != 3)
4221 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4222 reg = ((modrm >> 3) & 7) | rex_r;
4223 val = ldub_code(s->pc++);
4224 switch (b) {
4225 case 0x14: /* pextrb */
4226 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4227 xmm_regs[reg].XMM_B(val & 15)));
4228 if (mod == 3)
4229 gen_op_mov_reg_T0(ot, rm);
4230 else
4231 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4232 (s->mem_index >> 2) - 1);
4233 break;
4234 case 0x15: /* pextrw */
4235 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4236 xmm_regs[reg].XMM_W(val & 7)));
4237 if (mod == 3)
4238 gen_op_mov_reg_T0(ot, rm);
4239 else
4240 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4241 (s->mem_index >> 2) - 1);
4242 break;
4243 case 0x16:
4244 if (ot == OT_LONG) { /* pextrd */
4245 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4246 offsetof(CPUX86State,
4247 xmm_regs[reg].XMM_L(val & 3)));
4248 if (mod == 3)
4249 gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
4250 else
4251 tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
4252 (s->mem_index >> 2) - 1);
4253 } else { /* pextrq */
4254 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4255 offsetof(CPUX86State,
4256 xmm_regs[reg].XMM_Q(val & 1)));
4257 if (mod == 3)
4258 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4259 else
4260 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4261 (s->mem_index >> 2) - 1);
4262 }
4263 break;
4264 case 0x17: /* extractps */
4265 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4266 xmm_regs[reg].XMM_L(val & 3)));
4267 if (mod == 3)
4268 gen_op_mov_reg_T0(ot, rm);
4269 else
4270 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4271 (s->mem_index >> 2) - 1);
4272 break;
4273 case 0x20: /* pinsrb */
4274 if (mod == 3)
4275 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4276 else
4277 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
4278 (s->mem_index >> 2) - 1);
4279 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4280 xmm_regs[reg].XMM_B(val & 15)));
4281 break;
4282 case 0x21: /* insertps */
4283 if (mod == 3)
4284 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4285 offsetof(CPUX86State,xmm_regs[rm]
4286 .XMM_L((val >> 6) & 3)));
4287 else
4288 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4289 (s->mem_index >> 2) - 1);
4290 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4291 offsetof(CPUX86State,xmm_regs[reg]
4292 .XMM_L((val >> 4) & 3)));
4293 if ((val >> 0) & 1)
4294 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4295 cpu_env, offsetof(CPUX86State,
4296 xmm_regs[reg].XMM_L(0)));
4297 if ((val >> 1) & 1)
4298 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4299 cpu_env, offsetof(CPUX86State,
4300 xmm_regs[reg].XMM_L(1)));
4301 if ((val >> 2) & 1)
4302 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4303 cpu_env, offsetof(CPUX86State,
4304 xmm_regs[reg].XMM_L(2)));
4305 if ((val >> 3) & 1)
4306 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4307 cpu_env, offsetof(CPUX86State,
4308 xmm_regs[reg].XMM_L(3)));
4309 break;
4310 case 0x22:
4311 if (ot == OT_LONG) { /* pinsrd */
4312 if (mod == 3)
4313 gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
4314 else
4315 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4316 (s->mem_index >> 2) - 1);
4317 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4318 offsetof(CPUX86State,
4319 xmm_regs[reg].XMM_L(val & 3)));
4320 } else { /* pinsrq */
4321 if (mod == 3)
4322 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4323 else
4324 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4325 (s->mem_index >> 2) - 1);
4326 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4327 offsetof(CPUX86State,
4328 xmm_regs[reg].XMM_Q(val & 1)));
4329 }
4330 break;
4331 }
4332 return;
4333 }
4334
4335 if (b1) {
4336 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4337 if (mod == 3) {
4338 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4339 } else {
4340 op2_offset = offsetof(CPUX86State,xmm_t0);
4341 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4342 gen_ldo_env_A0(s->mem_index, op2_offset);
4343 }
4344 } else {
4345 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4346 if (mod == 3) {
4347 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4348 } else {
4349 op2_offset = offsetof(CPUX86State,mmx_t0);
4350 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4351 gen_ldq_env_A0(s->mem_index, op2_offset);
4352 }
4353 }
4354 val = ldub_code(s->pc++);
4355
4356 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
4357 s->cc_op = CC_OP_EFLAGS;
4358
4359 if (s->dflag == 2)
4360 /* The helper must use entire 64-bit gp registers */
4361 val |= 1 << 8;
4362 }
4363
4364 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4365 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4366 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4367 break;
4368 default:
4369 goto illegal_op;
4370 }
4371 } else {
4372 /* generic MMX or SSE operation */
4373 switch(b) {
4374 case 0x70: /* pshufx insn */
4375 case 0xc6: /* pshufx insn */
4376 case 0xc2: /* compare insns */
4377 s->rip_offset = 1;
4378 break;
4379 default:
4380 break;
4381 }
4382 if (is_xmm) {
4383 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4384 if (mod != 3) {
4385 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4386 op2_offset = offsetof(CPUX86State,xmm_t0);
4387 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
4388 b == 0xc2)) {
4389 /* specific case for SSE single instructions */
4390 if (b1 == 2) {
4391 /* 32 bit access */
4392 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4393 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4394 } else {
4395 /* 64 bit access */
4396 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
4397 }
4398 } else {
4399 gen_ldo_env_A0(s->mem_index, op2_offset);
4400 }
4401 } else {
4402 rm = (modrm & 7) | REX_B(s);
4403 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4404 }
4405 } else {
4406 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4407 if (mod != 3) {
4408 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4409 op2_offset = offsetof(CPUX86State,mmx_t0);
4410 gen_ldq_env_A0(s->mem_index, op2_offset);
4411 } else {
4412 rm = (modrm & 7);
4413 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4414 }
4415 }
4416 switch(b) {
4417 case 0x0f: /* 3DNow! data insns */
4418 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4419 goto illegal_op;
4420 val = ldub_code(s->pc++);
4421 sse_op2 = sse_op_table5[val];
4422 if (!sse_op2)
4423 goto illegal_op;
4424 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4425 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4426 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4427 break;
4428 case 0x70: /* pshufx insn */
4429 case 0xc6: /* pshufx insn */
4430 val = ldub_code(s->pc++);
4431 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4432 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4433 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4434 break;
4435 case 0xc2:
4436 /* compare insns */
4437 val = ldub_code(s->pc++);
4438 if (val >= 8)
4439 goto illegal_op;
4440 sse_op2 = sse_op_table4[val][b1];
4441 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4442 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4443 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4444 break;
4445 case 0xf7:
4446 /* maskmov : we must prepare A0 */
4447 if (mod != 3)
4448 goto illegal_op;
4449#ifdef TARGET_X86_64
4450 if (s->aflag == 2) {
4451 gen_op_movq_A0_reg(R_EDI);
4452 } else
4453#endif
4454 {
4455 gen_op_movl_A0_reg(R_EDI);
4456 if (s->aflag == 0)
4457 gen_op_andl_A0_ffff();
4458 }
4459 gen_add_A0_ds_seg(s);
4460
4461 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4462 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4463 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
4464 break;
4465 default:
4466 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4467 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4468 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4469 break;
4470 }
4471 if (b == 0x2e || b == 0x2f) {
4472 s->cc_op = CC_OP_EFLAGS;
4473 }
4474 }
4475}
4476
4477#ifdef VBOX
4478/* Checks if it's an invalid lock sequence. Only a few instructions
4479 can be used together with the lock prefix and of those only the
4480 form that write a memory operand. So, this is kind of annoying
4481 work to do...
4482 The AMD manual lists the following instructions.
4483 ADC
4484 ADD
4485 AND
4486 BTC
4487 BTR
4488 BTS
4489 CMPXCHG
4490 CMPXCHG8B
4491 CMPXCHG16B
4492 DEC
4493 INC
4494 NEG
4495 NOT
4496 OR
4497 SBB
4498 SUB
4499 XADD
4500 XCHG
4501 XOR */
4502static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
4503{
4504 target_ulong pc = s->pc;
4505 int modrm, mod, op;
4506
4507 /* X={8,16,32,64} Y={16,32,64} */
4508 switch (b)
4509 {
4510 /* /2: ADC reg/memX, immX */
4511 /* /0: ADD reg/memX, immX */
4512 /* /4: AND reg/memX, immX */
4513 /* /1: OR reg/memX, immX */
4514 /* /3: SBB reg/memX, immX */
4515 /* /5: SUB reg/memX, immX */
4516 /* /6: XOR reg/memX, immX */
4517 case 0x80:
4518 case 0x81:
4519 case 0x83:
4520 modrm = ldub_code(pc++);
4521 op = (modrm >> 3) & 7;
4522 if (op == 7) /* /7: CMP */
4523 break;
4524 mod = (modrm >> 6) & 3;
4525 if (mod == 3) /* register destination */
4526 break;
4527 return false;
4528
4529 case 0x10: /* /r: ADC reg/mem8, reg8 */
4530 case 0x11: /* /r: ADC reg/memX, regY */
4531 case 0x00: /* /r: ADD reg/mem8, reg8 */
4532 case 0x01: /* /r: ADD reg/memX, regY */
4533 case 0x20: /* /r: AND reg/mem8, reg8 */
4534 case 0x21: /* /r: AND reg/memY, regY */
4535 case 0x08: /* /r: OR reg/mem8, reg8 */
4536 case 0x09: /* /r: OR reg/memY, regY */
4537 case 0x18: /* /r: SBB reg/mem8, reg8 */
4538 case 0x19: /* /r: SBB reg/memY, regY */
4539 case 0x28: /* /r: SUB reg/mem8, reg8 */
4540 case 0x29: /* /r: SUB reg/memY, regY */
4541 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
4542 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
4543 case 0x30: /* /r: XOR reg/mem8, reg8 */
4544 case 0x31: /* /r: XOR reg/memY, regY */
4545 modrm = ldub_code(pc++);
4546 mod = (modrm >> 6) & 3;
4547 if (mod == 3) /* register destination */
4548 break;
4549 return false;
4550
4551 /* /1: DEC reg/memX */
4552 /* /0: INC reg/memX */
4553 case 0xfe:
4554 case 0xff:
4555 modrm = ldub_code(pc++);
4556 mod = (modrm >> 6) & 3;
4557 if (mod == 3) /* register destination */
4558 break;
4559 return false;
4560
4561 /* /3: NEG reg/memX */
4562 /* /2: NOT reg/memX */
4563 case 0xf6:
4564 case 0xf7:
4565 modrm = ldub_code(pc++);
4566 mod = (modrm >> 6) & 3;
4567 if (mod == 3) /* register destination */
4568 break;
4569 return false;
4570
4571 case 0x0f:
4572 b = ldub_code(pc++);
4573 switch (b)
4574 {
4575 /* /7: BTC reg/memY, imm8 */
4576 /* /6: BTR reg/memY, imm8 */
4577 /* /5: BTS reg/memY, imm8 */
4578 case 0xba:
4579 modrm = ldub_code(pc++);
4580 op = (modrm >> 3) & 7;
4581 if (op < 5)
4582 break;
4583 mod = (modrm >> 6) & 3;
4584 if (mod == 3) /* register destination */
4585 break;
4586 return false;
4587
4588 case 0xbb: /* /r: BTC reg/memY, regY */
4589 case 0xb3: /* /r: BTR reg/memY, regY */
4590 case 0xab: /* /r: BTS reg/memY, regY */
4591 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
4592 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
4593 case 0xc0: /* /r: XADD reg/mem8, reg8 */
4594 case 0xc1: /* /r: XADD reg/memY, regY */
4595 modrm = ldub_code(pc++);
4596 mod = (modrm >> 6) & 3;
4597 if (mod == 3) /* register destination */
4598 break;
4599 return false;
4600
4601 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
4602 case 0xc7:
4603 modrm = ldub_code(pc++);
4604 op = (modrm >> 3) & 7;
4605 if (op != 1)
4606 break;
4607 return false;
4608 }
4609 break;
4610 }
4611
4612 /* illegal sequence. The s->pc is past the lock prefix and that
4613 is sufficient for the TB, I think. */
4614 Log(("illegal lock sequence %RGv (b=%#x)\n", pc_start, b));
4615 return true;
4616}
4617#endif /* VBOX */
4618
4619
4620/* convert one instruction. s->is_jmp is set if the translation must
4621 be stopped. Return the next pc value */
4622static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4623{
4624 int b, prefixes, aflag, dflag;
4625 int shift, ot;
4626 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4627 target_ulong next_eip, tval;
4628 int rex_w, rex_r;
4629
4630 if (unlikely(loglevel & CPU_LOG_TB_OP))
4631 tcg_gen_debug_insn_start(pc_start);
4632 s->pc = pc_start;
4633 prefixes = 0;
4634 aflag = s->code32;
4635 dflag = s->code32;
4636 s->override = -1;
4637 rex_w = -1;
4638 rex_r = 0;
4639#ifdef TARGET_X86_64
4640 s->rex_x = 0;
4641 s->rex_b = 0;
4642 x86_64_hregs = 0;
4643#endif
4644 s->rip_offset = 0; /* for relative ip address */
4645#ifdef VBOX
4646 /* nike: seems only slow down things */
4647# if 0
4648 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
4649
4650 gen_update_eip(pc_start - s->cs_base);
4651# endif
4652#endif
4653
4654 next_byte:
4655 b = ldub_code(s->pc);
4656 s->pc++;
4657 /* check prefixes */
4658#ifdef TARGET_X86_64
4659 if (CODE64(s)) {
4660 switch (b) {
4661 case 0xf3:
4662 prefixes |= PREFIX_REPZ;
4663 goto next_byte;
4664 case 0xf2:
4665 prefixes |= PREFIX_REPNZ;
4666 goto next_byte;
4667 case 0xf0:
4668 prefixes |= PREFIX_LOCK;
4669 goto next_byte;
4670 case 0x2e:
4671 s->override = R_CS;
4672 goto next_byte;
4673 case 0x36:
4674 s->override = R_SS;
4675 goto next_byte;
4676 case 0x3e:
4677 s->override = R_DS;
4678 goto next_byte;
4679 case 0x26:
4680 s->override = R_ES;
4681 goto next_byte;
4682 case 0x64:
4683 s->override = R_FS;
4684 goto next_byte;
4685 case 0x65:
4686 s->override = R_GS;
4687 goto next_byte;
4688 case 0x66:
4689 prefixes |= PREFIX_DATA;
4690 goto next_byte;
4691 case 0x67:
4692 prefixes |= PREFIX_ADR;
4693 goto next_byte;
4694 case 0x40 ... 0x4f:
4695 /* REX prefix */
4696 rex_w = (b >> 3) & 1;
4697 rex_r = (b & 0x4) << 1;
4698 s->rex_x = (b & 0x2) << 2;
4699 REX_B(s) = (b & 0x1) << 3;
4700 x86_64_hregs = 1; /* select uniform byte register addressing */
4701 goto next_byte;
4702 }
4703 if (rex_w == 1) {
4704 /* 0x66 is ignored if rex.w is set */
4705 dflag = 2;
4706 } else {
4707 if (prefixes & PREFIX_DATA)
4708 dflag ^= 1;
4709 }
4710 if (!(prefixes & PREFIX_ADR))
4711 aflag = 2;
4712 } else
4713#endif
4714 {
4715 switch (b) {
4716 case 0xf3:
4717 prefixes |= PREFIX_REPZ;
4718 goto next_byte;
4719 case 0xf2:
4720 prefixes |= PREFIX_REPNZ;
4721 goto next_byte;
4722 case 0xf0:
4723 prefixes |= PREFIX_LOCK;
4724 goto next_byte;
4725 case 0x2e:
4726 s->override = R_CS;
4727 goto next_byte;
4728 case 0x36:
4729 s->override = R_SS;
4730 goto next_byte;
4731 case 0x3e:
4732 s->override = R_DS;
4733 goto next_byte;
4734 case 0x26:
4735 s->override = R_ES;
4736 goto next_byte;
4737 case 0x64:
4738 s->override = R_FS;
4739 goto next_byte;
4740 case 0x65:
4741 s->override = R_GS;
4742 goto next_byte;
4743 case 0x66:
4744 prefixes |= PREFIX_DATA;
4745 goto next_byte;
4746 case 0x67:
4747 prefixes |= PREFIX_ADR;
4748 goto next_byte;
4749 }
4750 if (prefixes & PREFIX_DATA)
4751 dflag ^= 1;
4752 if (prefixes & PREFIX_ADR)
4753 aflag ^= 1;
4754 }
4755
4756 s->prefix = prefixes;
4757 s->aflag = aflag;
4758 s->dflag = dflag;
4759
4760 /* lock generation */
4761#ifndef VBOX
4762 if (prefixes & PREFIX_LOCK)
4763 tcg_gen_helper_0_0(helper_lock);
4764#else /* VBOX */
4765 if (prefixes & PREFIX_LOCK) {
4766 if (is_invalid_lock_sequence(s, pc_start, b)) {
4767 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
4768 return s->pc;
4769 }
4770 tcg_gen_helper_0_0(helper_lock);
4771 }
4772#endif /* VBOX */
4773
4774 /* now check op code */
4775 reswitch:
4776 switch(b) {
4777 case 0x0f:
4778 /**************************/
4779 /* extended op code */
4780 b = ldub_code(s->pc++) | 0x100;
4781 goto reswitch;
4782
4783 /**************************/
4784 /* arith & logic */
4785 case 0x00 ... 0x05:
4786 case 0x08 ... 0x0d:
4787 case 0x10 ... 0x15:
4788 case 0x18 ... 0x1d:
4789 case 0x20 ... 0x25:
4790 case 0x28 ... 0x2d:
4791 case 0x30 ... 0x35:
4792 case 0x38 ... 0x3d:
4793 {
4794 int op, f, val;
4795 op = (b >> 3) & 7;
4796 f = (b >> 1) & 3;
4797
4798 if ((b & 1) == 0)
4799 ot = OT_BYTE;
4800 else
4801 ot = dflag + OT_WORD;
4802
4803 switch(f) {
4804 case 0: /* OP Ev, Gv */
4805 modrm = ldub_code(s->pc++);
4806 reg = ((modrm >> 3) & 7) | rex_r;
4807 mod = (modrm >> 6) & 3;
4808 rm = (modrm & 7) | REX_B(s);
4809 if (mod != 3) {
4810 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4811 opreg = OR_TMP0;
4812 } else if (op == OP_XORL && rm == reg) {
4813 xor_zero:
4814 /* xor reg, reg optimisation */
4815 gen_op_movl_T0_0();
4816 s->cc_op = CC_OP_LOGICB + ot;
4817 gen_op_mov_reg_T0(ot, reg);
4818 gen_op_update1_cc();
4819 break;
4820 } else {
4821 opreg = rm;
4822 }
4823 gen_op_mov_TN_reg(ot, 1, reg);
4824 gen_op(s, op, ot, opreg);
4825 break;
4826 case 1: /* OP Gv, Ev */
4827 modrm = ldub_code(s->pc++);
4828 mod = (modrm >> 6) & 3;
4829 reg = ((modrm >> 3) & 7) | rex_r;
4830 rm = (modrm & 7) | REX_B(s);
4831 if (mod != 3) {
4832 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4833 gen_op_ld_T1_A0(ot + s->mem_index);
4834 } else if (op == OP_XORL && rm == reg) {
4835 goto xor_zero;
4836 } else {
4837 gen_op_mov_TN_reg(ot, 1, rm);
4838 }
4839 gen_op(s, op, ot, reg);
4840 break;
4841 case 2: /* OP A, Iv */
4842 val = insn_get(s, ot);
4843 gen_op_movl_T1_im(val);
4844 gen_op(s, op, ot, OR_EAX);
4845 break;
4846 }
4847 }
4848 break;
4849
4850 case 0x82:
4851 if (CODE64(s))
4852 goto illegal_op;
4853 case 0x80: /* GRP1 */
4854 case 0x81:
4855 case 0x83:
4856 {
4857 int val;
4858
4859 if ((b & 1) == 0)
4860 ot = OT_BYTE;
4861 else
4862 ot = dflag + OT_WORD;
4863
4864 modrm = ldub_code(s->pc++);
4865 mod = (modrm >> 6) & 3;
4866 rm = (modrm & 7) | REX_B(s);
4867 op = (modrm >> 3) & 7;
4868
4869 if (mod != 3) {
4870 if (b == 0x83)
4871 s->rip_offset = 1;
4872 else
4873 s->rip_offset = insn_const_size(ot);
4874 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4875 opreg = OR_TMP0;
4876 } else {
4877 opreg = rm;
4878 }
4879
4880 switch(b) {
4881 default:
4882 case 0x80:
4883 case 0x81:
4884 case 0x82:
4885 val = insn_get(s, ot);
4886 break;
4887 case 0x83:
4888 val = (int8_t)insn_get(s, OT_BYTE);
4889 break;
4890 }
4891 gen_op_movl_T1_im(val);
4892 gen_op(s, op, ot, opreg);
4893 }
4894 break;
4895
4896 /**************************/
4897 /* inc, dec, and other misc arith */
4898 case 0x40 ... 0x47: /* inc Gv */
4899 ot = dflag ? OT_LONG : OT_WORD;
4900 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4901 break;
4902 case 0x48 ... 0x4f: /* dec Gv */
4903 ot = dflag ? OT_LONG : OT_WORD;
4904 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4905 break;
4906 case 0xf6: /* GRP3 */
4907 case 0xf7:
4908 if ((b & 1) == 0)
4909 ot = OT_BYTE;
4910 else
4911 ot = dflag + OT_WORD;
4912
4913 modrm = ldub_code(s->pc++);
4914 mod = (modrm >> 6) & 3;
4915 rm = (modrm & 7) | REX_B(s);
4916 op = (modrm >> 3) & 7;
4917 if (mod != 3) {
4918 if (op == 0)
4919 s->rip_offset = insn_const_size(ot);
4920 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4921 gen_op_ld_T0_A0(ot + s->mem_index);
4922 } else {
4923 gen_op_mov_TN_reg(ot, 0, rm);
4924 }
4925
4926 switch(op) {
4927 case 0: /* test */
4928 val = insn_get(s, ot);
4929 gen_op_movl_T1_im(val);
4930 gen_op_testl_T0_T1_cc();
4931 s->cc_op = CC_OP_LOGICB + ot;
4932 break;
4933 case 2: /* not */
4934 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4935 if (mod != 3) {
4936 gen_op_st_T0_A0(ot + s->mem_index);
4937 } else {
4938 gen_op_mov_reg_T0(ot, rm);
4939 }
4940 break;
4941 case 3: /* neg */
4942 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4943 if (mod != 3) {
4944 gen_op_st_T0_A0(ot + s->mem_index);
4945 } else {
4946 gen_op_mov_reg_T0(ot, rm);
4947 }
4948 gen_op_update_neg_cc();
4949 s->cc_op = CC_OP_SUBB + ot;
4950 break;
4951 case 4: /* mul */
4952 switch(ot) {
4953 case OT_BYTE:
4954 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4955 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4956 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4957 /* XXX: use 32 bit mul which could be faster */
4958 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4959 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4960 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4961 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4962 s->cc_op = CC_OP_MULB;
4963 break;
4964 case OT_WORD:
4965 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4966 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4967 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4968 /* XXX: use 32 bit mul which could be faster */
4969 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4970 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4971 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4972 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4973 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4974 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4975 s->cc_op = CC_OP_MULW;
4976 break;
4977 default:
4978 case OT_LONG:
4979#ifdef TARGET_X86_64
4980 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4981 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4982 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
4983 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4984 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4985 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4986 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4987 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4988 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4989#else
4990 {
4991 TCGv t0, t1;
4992 t0 = tcg_temp_new(TCG_TYPE_I64);
4993 t1 = tcg_temp_new(TCG_TYPE_I64);
4994 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4995 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
4996 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
4997 tcg_gen_mul_i64(t0, t0, t1);
4998 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4999 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5000 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5001 tcg_gen_shri_i64(t0, t0, 32);
5002 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5003 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5004 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5005 }
5006#endif
5007 s->cc_op = CC_OP_MULL;
5008 break;
5009#ifdef TARGET_X86_64
5010 case OT_QUAD:
5011 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
5012 s->cc_op = CC_OP_MULQ;
5013 break;
5014#endif
5015 }
5016 break;
5017 case 5: /* imul */
5018 switch(ot) {
5019 case OT_BYTE:
5020 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
5021 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5022 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
5023 /* XXX: use 32 bit mul which could be faster */
5024 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5025 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5026 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5027 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
5028 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5029 s->cc_op = CC_OP_MULB;
5030 break;
5031 case OT_WORD:
5032 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5033 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5034 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5035 /* XXX: use 32 bit mul which could be faster */
5036 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5037 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5038 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5039 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5040 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5041 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5042 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5043 s->cc_op = CC_OP_MULW;
5044 break;
5045 default:
5046 case OT_LONG:
5047#ifdef TARGET_X86_64
5048 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5049 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5050 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5051 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5052 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5053 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5054 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5055 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5056 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5057 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5058#else
5059 {
5060 TCGv t0, t1;
5061 t0 = tcg_temp_new(TCG_TYPE_I64);
5062 t1 = tcg_temp_new(TCG_TYPE_I64);
5063 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5064 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5065 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5066 tcg_gen_mul_i64(t0, t0, t1);
5067 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5068 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5069 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5070 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5071 tcg_gen_shri_i64(t0, t0, 32);
5072 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5073 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5074 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5075 }
5076#endif
5077 s->cc_op = CC_OP_MULL;
5078 break;
5079#ifdef TARGET_X86_64
5080 case OT_QUAD:
5081 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
5082 s->cc_op = CC_OP_MULQ;
5083 break;
5084#endif
5085 }
5086 break;
5087 case 6: /* div */
5088 switch(ot) {
5089 case OT_BYTE:
5090 gen_jmp_im(pc_start - s->cs_base);
5091 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
5092 break;
5093 case OT_WORD:
5094 gen_jmp_im(pc_start - s->cs_base);
5095 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
5096 break;
5097 default:
5098 case OT_LONG:
5099 gen_jmp_im(pc_start - s->cs_base);
5100 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
5101 break;
5102#ifdef TARGET_X86_64
5103 case OT_QUAD:
5104 gen_jmp_im(pc_start - s->cs_base);
5105 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
5106 break;
5107#endif
5108 }
5109 break;
5110 case 7: /* idiv */
5111 switch(ot) {
5112 case OT_BYTE:
5113 gen_jmp_im(pc_start - s->cs_base);
5114 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
5115 break;
5116 case OT_WORD:
5117 gen_jmp_im(pc_start - s->cs_base);
5118 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
5119 break;
5120 default:
5121 case OT_LONG:
5122 gen_jmp_im(pc_start - s->cs_base);
5123 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
5124 break;
5125#ifdef TARGET_X86_64
5126 case OT_QUAD:
5127 gen_jmp_im(pc_start - s->cs_base);
5128 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
5129 break;
5130#endif
5131 }
5132 break;
5133 default:
5134 goto illegal_op;
5135 }
5136 break;
5137
5138 case 0xfe: /* GRP4 */
5139 case 0xff: /* GRP5 */
5140 if ((b & 1) == 0)
5141 ot = OT_BYTE;
5142 else
5143 ot = dflag + OT_WORD;
5144
5145 modrm = ldub_code(s->pc++);
5146 mod = (modrm >> 6) & 3;
5147 rm = (modrm & 7) | REX_B(s);
5148 op = (modrm >> 3) & 7;
5149 if (op >= 2 && b == 0xfe) {
5150 goto illegal_op;
5151 }
5152 if (CODE64(s)) {
5153 if (op == 2 || op == 4) {
5154 /* operand size for jumps is 64 bit */
5155 ot = OT_QUAD;
5156 } else if (op == 3 || op == 5) {
5157 /* for call calls, the operand is 16 or 32 bit, even
5158 in long mode */
5159 ot = dflag ? OT_LONG : OT_WORD;
5160 } else if (op == 6) {
5161 /* default push size is 64 bit */
5162 ot = dflag ? OT_QUAD : OT_WORD;
5163 }
5164 }
5165 if (mod != 3) {
5166 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5167 if (op >= 2 && op != 3 && op != 5)
5168 gen_op_ld_T0_A0(ot + s->mem_index);
5169 } else {
5170 gen_op_mov_TN_reg(ot, 0, rm);
5171 }
5172
5173 switch(op) {
5174 case 0: /* inc Ev */
5175 if (mod != 3)
5176 opreg = OR_TMP0;
5177 else
5178 opreg = rm;
5179 gen_inc(s, ot, opreg, 1);
5180 break;
5181 case 1: /* dec Ev */
5182 if (mod != 3)
5183 opreg = OR_TMP0;
5184 else
5185 opreg = rm;
5186 gen_inc(s, ot, opreg, -1);
5187 break;
5188 case 2: /* call Ev */
5189 /* XXX: optimize if memory (no 'and' is necessary) */
5190#ifdef VBOX_WITH_CALL_RECORD
5191 if (s->record_call)
5192 gen_op_record_call();
5193#endif
5194 if (s->dflag == 0)
5195 gen_op_andl_T0_ffff();
5196 next_eip = s->pc - s->cs_base;
5197 gen_movtl_T1_im(next_eip);
5198 gen_push_T1(s);
5199 gen_op_jmp_T0();
5200 gen_eob(s);
5201 break;
5202 case 3: /* lcall Ev */
5203 gen_op_ld_T1_A0(ot + s->mem_index);
5204 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5205 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5206 do_lcall:
5207 if (s->pe && !s->vm86) {
5208 if (s->cc_op != CC_OP_DYNAMIC)
5209 gen_op_set_cc_op(s->cc_op);
5210 gen_jmp_im(pc_start - s->cs_base);
5211 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5212 tcg_gen_helper_0_4(helper_lcall_protected,
5213 cpu_tmp2_i32, cpu_T[1],
5214 tcg_const_i32(dflag),
5215 tcg_const_i32(s->pc - pc_start));
5216 } else {
5217 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5218 tcg_gen_helper_0_4(helper_lcall_real,
5219 cpu_tmp2_i32, cpu_T[1],
5220 tcg_const_i32(dflag),
5221 tcg_const_i32(s->pc - s->cs_base));
5222 }
5223 gen_eob(s);
5224 break;
5225 case 4: /* jmp Ev */
5226 if (s->dflag == 0)
5227 gen_op_andl_T0_ffff();
5228 gen_op_jmp_T0();
5229 gen_eob(s);
5230 break;
5231 case 5: /* ljmp Ev */
5232 gen_op_ld_T1_A0(ot + s->mem_index);
5233 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5234 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5235 do_ljmp:
5236 if (s->pe && !s->vm86) {
5237 if (s->cc_op != CC_OP_DYNAMIC)
5238 gen_op_set_cc_op(s->cc_op);
5239 gen_jmp_im(pc_start - s->cs_base);
5240 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5241 tcg_gen_helper_0_3(helper_ljmp_protected,
5242 cpu_tmp2_i32,
5243 cpu_T[1],
5244 tcg_const_i32(s->pc - pc_start));
5245 } else {
5246 gen_op_movl_seg_T0_vm(R_CS);
5247 gen_op_movl_T0_T1();
5248 gen_op_jmp_T0();
5249 }
5250 gen_eob(s);
5251 break;
5252 case 6: /* push Ev */
5253 gen_push_T0(s);
5254 break;
5255 default:
5256 goto illegal_op;
5257 }
5258 break;
5259
5260 case 0x84: /* test Ev, Gv */
5261 case 0x85:
5262 if ((b & 1) == 0)
5263 ot = OT_BYTE;
5264 else
5265 ot = dflag + OT_WORD;
5266
5267 modrm = ldub_code(s->pc++);
5268 mod = (modrm >> 6) & 3;
5269 rm = (modrm & 7) | REX_B(s);
5270 reg = ((modrm >> 3) & 7) | rex_r;
5271
5272 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5273 gen_op_mov_TN_reg(ot, 1, reg);
5274 gen_op_testl_T0_T1_cc();
5275 s->cc_op = CC_OP_LOGICB + ot;
5276 break;
5277
5278 case 0xa8: /* test eAX, Iv */
5279 case 0xa9:
5280 if ((b & 1) == 0)
5281 ot = OT_BYTE;
5282 else
5283 ot = dflag + OT_WORD;
5284 val = insn_get(s, ot);
5285
5286 gen_op_mov_TN_reg(ot, 0, OR_EAX);
5287 gen_op_movl_T1_im(val);
5288 gen_op_testl_T0_T1_cc();
5289 s->cc_op = CC_OP_LOGICB + ot;
5290 break;
5291
5292 case 0x98: /* CWDE/CBW */
5293#ifdef TARGET_X86_64
5294 if (dflag == 2) {
5295 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5296 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5297 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
5298 } else
5299#endif
5300 if (dflag == 1) {
5301 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5302 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5303 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5304 } else {
5305 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5306 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5307 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5308 }
5309 break;
5310 case 0x99: /* CDQ/CWD */
5311#ifdef TARGET_X86_64
5312 if (dflag == 2) {
5313 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5314 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5315 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
5316 } else
5317#endif
5318 if (dflag == 1) {
5319 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5320 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5321 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5322 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5323 } else {
5324 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5325 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5326 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5327 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5328 }
5329 break;
5330 case 0x1af: /* imul Gv, Ev */
5331 case 0x69: /* imul Gv, Ev, I */
5332 case 0x6b:
5333 ot = dflag + OT_WORD;
5334 modrm = ldub_code(s->pc++);
5335 reg = ((modrm >> 3) & 7) | rex_r;
5336 if (b == 0x69)
5337 s->rip_offset = insn_const_size(ot);
5338 else if (b == 0x6b)
5339 s->rip_offset = 1;
5340 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5341 if (b == 0x69) {
5342 val = insn_get(s, ot);
5343 gen_op_movl_T1_im(val);
5344 } else if (b == 0x6b) {
5345 val = (int8_t)insn_get(s, OT_BYTE);
5346 gen_op_movl_T1_im(val);
5347 } else {
5348 gen_op_mov_TN_reg(ot, 1, reg);
5349 }
5350
5351#ifdef TARGET_X86_64
5352 if (ot == OT_QUAD) {
5353 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
5354 } else
5355#endif
5356 if (ot == OT_LONG) {
5357#ifdef TARGET_X86_64
5358 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5359 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5360 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5361 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5362 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5363 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5364#else
5365 {
5366 TCGv t0, t1;
5367 t0 = tcg_temp_new(TCG_TYPE_I64);
5368 t1 = tcg_temp_new(TCG_TYPE_I64);
5369 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5370 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5371 tcg_gen_mul_i64(t0, t0, t1);
5372 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5373 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5374 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5375 tcg_gen_shri_i64(t0, t0, 32);
5376 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
5377 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
5378 }
5379#endif
5380 } else {
5381 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5382 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5383 /* XXX: use 32 bit mul which could be faster */
5384 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5385 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5386 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5387 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5388 }
5389 gen_op_mov_reg_T0(ot, reg);
5390 s->cc_op = CC_OP_MULB + ot;
5391 break;
5392 case 0x1c0:
5393 case 0x1c1: /* xadd Ev, Gv */
5394 if ((b & 1) == 0)
5395 ot = OT_BYTE;
5396 else
5397 ot = dflag + OT_WORD;
5398 modrm = ldub_code(s->pc++);
5399 reg = ((modrm >> 3) & 7) | rex_r;
5400 mod = (modrm >> 6) & 3;
5401 if (mod == 3) {
5402 rm = (modrm & 7) | REX_B(s);
5403 gen_op_mov_TN_reg(ot, 0, reg);
5404 gen_op_mov_TN_reg(ot, 1, rm);
5405 gen_op_addl_T0_T1();
5406 gen_op_mov_reg_T1(ot, reg);
5407 gen_op_mov_reg_T0(ot, rm);
5408 } else {
5409 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5410 gen_op_mov_TN_reg(ot, 0, reg);
5411 gen_op_ld_T1_A0(ot + s->mem_index);
5412 gen_op_addl_T0_T1();
5413 gen_op_st_T0_A0(ot + s->mem_index);
5414 gen_op_mov_reg_T1(ot, reg);
5415 }
5416 gen_op_update2_cc();
5417 s->cc_op = CC_OP_ADDB + ot;
5418 break;
5419 case 0x1b0:
5420 case 0x1b1: /* cmpxchg Ev, Gv */
5421 {
5422 int label1, label2;
5423 TCGv t0, t1, t2, a0;
5424
5425 if ((b & 1) == 0)
5426 ot = OT_BYTE;
5427 else
5428 ot = dflag + OT_WORD;
5429 modrm = ldub_code(s->pc++);
5430 reg = ((modrm >> 3) & 7) | rex_r;
5431 mod = (modrm >> 6) & 3;
5432 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5433 t1 = tcg_temp_local_new(TCG_TYPE_TL);
5434 t2 = tcg_temp_local_new(TCG_TYPE_TL);
5435 a0 = tcg_temp_local_new(TCG_TYPE_TL);
5436 gen_op_mov_v_reg(ot, t1, reg);
5437 if (mod == 3) {
5438 rm = (modrm & 7) | REX_B(s);
5439 gen_op_mov_v_reg(ot, t0, rm);
5440 } else {
5441 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5442 tcg_gen_mov_tl(a0, cpu_A0);
5443 gen_op_ld_v(ot + s->mem_index, t0, a0);
5444 rm = 0; /* avoid warning */
5445 }
5446 label1 = gen_new_label();
5447 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
5448 tcg_gen_sub_tl(t2, t2, t0);
5449 gen_extu(ot, t2);
5450 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
5451 if (mod == 3) {
5452 label2 = gen_new_label();
5453 gen_op_mov_reg_v(ot, R_EAX, t0);
5454 tcg_gen_br(label2);
5455 gen_set_label(label1);
5456 gen_op_mov_reg_v(ot, rm, t1);
5457 gen_set_label(label2);
5458 } else {
5459 tcg_gen_mov_tl(t1, t0);
5460 gen_op_mov_reg_v(ot, R_EAX, t0);
5461 gen_set_label(label1);
5462 /* always store */
5463 gen_op_st_v(ot + s->mem_index, t1, a0);
5464 }
5465 tcg_gen_mov_tl(cpu_cc_src, t0);
5466 tcg_gen_mov_tl(cpu_cc_dst, t2);
5467 s->cc_op = CC_OP_SUBB + ot;
5468 tcg_temp_free(t0);
5469 tcg_temp_free(t1);
5470 tcg_temp_free(t2);
5471 tcg_temp_free(a0);
5472 }
5473 break;
5474 case 0x1c7: /* cmpxchg8b */
5475 modrm = ldub_code(s->pc++);
5476 mod = (modrm >> 6) & 3;
5477 if ((mod == 3) || ((modrm & 0x38) != 0x8))
5478 goto illegal_op;
5479#ifdef TARGET_X86_64
5480 if (dflag == 2) {
5481 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5482 goto illegal_op;
5483 gen_jmp_im(pc_start - s->cs_base);
5484 if (s->cc_op != CC_OP_DYNAMIC)
5485 gen_op_set_cc_op(s->cc_op);
5486 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5487 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
5488 } else
5489#endif
5490 {
5491 if (!(s->cpuid_features & CPUID_CX8))
5492 goto illegal_op;
5493 gen_jmp_im(pc_start - s->cs_base);
5494 if (s->cc_op != CC_OP_DYNAMIC)
5495 gen_op_set_cc_op(s->cc_op);
5496 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5497 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
5498 }
5499 s->cc_op = CC_OP_EFLAGS;
5500 break;
5501
5502 /**************************/
5503 /* push/pop */
5504 case 0x50 ... 0x57: /* push */
5505 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
5506 gen_push_T0(s);
5507 break;
5508 case 0x58 ... 0x5f: /* pop */
5509 if (CODE64(s)) {
5510 ot = dflag ? OT_QUAD : OT_WORD;
5511 } else {
5512 ot = dflag + OT_WORD;
5513 }
5514 gen_pop_T0(s);
5515 /* NOTE: order is important for pop %sp */
5516 gen_pop_update(s);
5517 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
5518 break;
5519 case 0x60: /* pusha */
5520 if (CODE64(s))
5521 goto illegal_op;
5522 gen_pusha(s);
5523 break;
5524 case 0x61: /* popa */
5525 if (CODE64(s))
5526 goto illegal_op;
5527 gen_popa(s);
5528 break;
5529 case 0x68: /* push Iv */
5530 case 0x6a:
5531 if (CODE64(s)) {
5532 ot = dflag ? OT_QUAD : OT_WORD;
5533 } else {
5534 ot = dflag + OT_WORD;
5535 }
5536 if (b == 0x68)
5537 val = insn_get(s, ot);
5538 else
5539 val = (int8_t)insn_get(s, OT_BYTE);
5540 gen_op_movl_T0_im(val);
5541 gen_push_T0(s);
5542 break;
5543 case 0x8f: /* pop Ev */
5544 if (CODE64(s)) {
5545 ot = dflag ? OT_QUAD : OT_WORD;
5546 } else {
5547 ot = dflag + OT_WORD;
5548 }
5549 modrm = ldub_code(s->pc++);
5550 mod = (modrm >> 6) & 3;
5551 gen_pop_T0(s);
5552 if (mod == 3) {
5553 /* NOTE: order is important for pop %sp */
5554 gen_pop_update(s);
5555 rm = (modrm & 7) | REX_B(s);
5556 gen_op_mov_reg_T0(ot, rm);
5557 } else {
5558 /* NOTE: order is important too for MMU exceptions */
5559 s->popl_esp_hack = 1 << ot;
5560 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5561 s->popl_esp_hack = 0;
5562 gen_pop_update(s);
5563 }
5564 break;
5565 case 0xc8: /* enter */
5566 {
5567 int level;
5568 val = lduw_code(s->pc);
5569 s->pc += 2;
5570 level = ldub_code(s->pc++);
5571 gen_enter(s, val, level);
5572 }
5573 break;
5574 case 0xc9: /* leave */
5575 /* XXX: exception not precise (ESP is updated before potential exception) */
5576 if (CODE64(s)) {
5577 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5578 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5579 } else if (s->ss32) {
5580 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5581 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5582 } else {
5583 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5584 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5585 }
5586 gen_pop_T0(s);
5587 if (CODE64(s)) {
5588 ot = dflag ? OT_QUAD : OT_WORD;
5589 } else {
5590 ot = dflag + OT_WORD;
5591 }
5592 gen_op_mov_reg_T0(ot, R_EBP);
5593 gen_pop_update(s);
5594 break;
5595 case 0x06: /* push es */
5596 case 0x0e: /* push cs */
5597 case 0x16: /* push ss */
5598 case 0x1e: /* push ds */
5599 if (CODE64(s))
5600 goto illegal_op;
5601 gen_op_movl_T0_seg(b >> 3);
5602 gen_push_T0(s);
5603 break;
5604 case 0x1a0: /* push fs */
5605 case 0x1a8: /* push gs */
5606 gen_op_movl_T0_seg((b >> 3) & 7);
5607 gen_push_T0(s);
5608 break;
5609 case 0x07: /* pop es */
5610 case 0x17: /* pop ss */
5611 case 0x1f: /* pop ds */
5612 if (CODE64(s))
5613 goto illegal_op;
5614 reg = b >> 3;
5615 gen_pop_T0(s);
5616 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5617 gen_pop_update(s);
5618 if (reg == R_SS) {
5619 /* if reg == SS, inhibit interrupts/trace. */
5620 /* If several instructions disable interrupts, only the
5621 _first_ does it */
5622 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5623 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5624 s->tf = 0;
5625 }
5626 if (s->is_jmp) {
5627 gen_jmp_im(s->pc - s->cs_base);
5628 gen_eob(s);
5629 }
5630 break;
5631 case 0x1a1: /* pop fs */
5632 case 0x1a9: /* pop gs */
5633 gen_pop_T0(s);
5634 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5635 gen_pop_update(s);
5636 if (s->is_jmp) {
5637 gen_jmp_im(s->pc - s->cs_base);
5638 gen_eob(s);
5639 }
5640 break;
5641
5642 /**************************/
5643 /* mov */
5644 case 0x88:
5645 case 0x89: /* mov Gv, Ev */
5646 if ((b & 1) == 0)
5647 ot = OT_BYTE;
5648 else
5649 ot = dflag + OT_WORD;
5650 modrm = ldub_code(s->pc++);
5651 reg = ((modrm >> 3) & 7) | rex_r;
5652
5653 /* generate a generic store */
5654 gen_ldst_modrm(s, modrm, ot, reg, 1);
5655 break;
5656 case 0xc6:
5657 case 0xc7: /* mov Ev, Iv */
5658 if ((b & 1) == 0)
5659 ot = OT_BYTE;
5660 else
5661 ot = dflag + OT_WORD;
5662 modrm = ldub_code(s->pc++);
5663 mod = (modrm >> 6) & 3;
5664 if (mod != 3) {
5665 s->rip_offset = insn_const_size(ot);
5666 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5667 }
5668 val = insn_get(s, ot);
5669 gen_op_movl_T0_im(val);
5670 if (mod != 3)
5671 gen_op_st_T0_A0(ot + s->mem_index);
5672 else
5673 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5674 break;
5675 case 0x8a:
5676 case 0x8b: /* mov Ev, Gv */
5677#ifdef VBOX /* dtrace hot fix */
5678 if (prefixes & PREFIX_LOCK)
5679 goto illegal_op;
5680#endif
5681 if ((b & 1) == 0)
5682 ot = OT_BYTE;
5683 else
5684 ot = OT_WORD + dflag;
5685 modrm = ldub_code(s->pc++);
5686 reg = ((modrm >> 3) & 7) | rex_r;
5687
5688 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5689 gen_op_mov_reg_T0(ot, reg);
5690 break;
5691 case 0x8e: /* mov seg, Gv */
5692 modrm = ldub_code(s->pc++);
5693 reg = (modrm >> 3) & 7;
5694 if (reg >= 6 || reg == R_CS)
5695 goto illegal_op;
5696 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5697 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5698 if (reg == R_SS) {
5699 /* if reg == SS, inhibit interrupts/trace */
5700 /* If several instructions disable interrupts, only the
5701 _first_ does it */
5702 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5703 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5704 s->tf = 0;
5705 }
5706 if (s->is_jmp) {
5707 gen_jmp_im(s->pc - s->cs_base);
5708 gen_eob(s);
5709 }
5710 break;
5711 case 0x8c: /* mov Gv, seg */
5712 modrm = ldub_code(s->pc++);
5713 reg = (modrm >> 3) & 7;
5714 mod = (modrm >> 6) & 3;
5715 if (reg >= 6)
5716 goto illegal_op;
5717 gen_op_movl_T0_seg(reg);
5718 if (mod == 3)
5719 ot = OT_WORD + dflag;
5720 else
5721 ot = OT_WORD;
5722 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5723 break;
5724
5725 case 0x1b6: /* movzbS Gv, Eb */
5726 case 0x1b7: /* movzwS Gv, Eb */
5727 case 0x1be: /* movsbS Gv, Eb */
5728 case 0x1bf: /* movswS Gv, Eb */
5729 {
5730 int d_ot;
5731 /* d_ot is the size of destination */
5732 d_ot = dflag + OT_WORD;
5733 /* ot is the size of source */
5734 ot = (b & 1) + OT_BYTE;
5735 modrm = ldub_code(s->pc++);
5736 reg = ((modrm >> 3) & 7) | rex_r;
5737 mod = (modrm >> 6) & 3;
5738 rm = (modrm & 7) | REX_B(s);
5739
5740 if (mod == 3) {
5741 gen_op_mov_TN_reg(ot, 0, rm);
5742 switch(ot | (b & 8)) {
5743 case OT_BYTE:
5744 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5745 break;
5746 case OT_BYTE | 8:
5747 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5748 break;
5749 case OT_WORD:
5750 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5751 break;
5752 default:
5753 case OT_WORD | 8:
5754 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5755 break;
5756 }
5757 gen_op_mov_reg_T0(d_ot, reg);
5758 } else {
5759 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5760 if (b & 8) {
5761 gen_op_lds_T0_A0(ot + s->mem_index);
5762 } else {
5763 gen_op_ldu_T0_A0(ot + s->mem_index);
5764 }
5765 gen_op_mov_reg_T0(d_ot, reg);
5766 }
5767 }
5768 break;
5769
5770 case 0x8d: /* lea */
5771 ot = dflag + OT_WORD;
5772 modrm = ldub_code(s->pc++);
5773 mod = (modrm >> 6) & 3;
5774 if (mod == 3)
5775 goto illegal_op;
5776 reg = ((modrm >> 3) & 7) | rex_r;
5777 /* we must ensure that no segment is added */
5778 s->override = -1;
5779 val = s->addseg;
5780 s->addseg = 0;
5781 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5782 s->addseg = val;
5783 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5784 break;
5785
5786 case 0xa0: /* mov EAX, Ov */
5787 case 0xa1:
5788 case 0xa2: /* mov Ov, EAX */
5789 case 0xa3:
5790 {
5791 target_ulong offset_addr;
5792
5793 if ((b & 1) == 0)
5794 ot = OT_BYTE;
5795 else
5796 ot = dflag + OT_WORD;
5797#ifdef TARGET_X86_64
5798 if (s->aflag == 2) {
5799 offset_addr = ldq_code(s->pc);
5800 s->pc += 8;
5801 gen_op_movq_A0_im(offset_addr);
5802 } else
5803#endif
5804 {
5805 if (s->aflag) {
5806 offset_addr = insn_get(s, OT_LONG);
5807 } else {
5808 offset_addr = insn_get(s, OT_WORD);
5809 }
5810 gen_op_movl_A0_im(offset_addr);
5811 }
5812 gen_add_A0_ds_seg(s);
5813 if ((b & 2) == 0) {
5814 gen_op_ld_T0_A0(ot + s->mem_index);
5815 gen_op_mov_reg_T0(ot, R_EAX);
5816 } else {
5817 gen_op_mov_TN_reg(ot, 0, R_EAX);
5818 gen_op_st_T0_A0(ot + s->mem_index);
5819 }
5820 }
5821 break;
5822 case 0xd7: /* xlat */
5823#ifdef TARGET_X86_64
5824 if (s->aflag == 2) {
5825 gen_op_movq_A0_reg(R_EBX);
5826 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5827 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5828 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5829 } else
5830#endif
5831 {
5832 gen_op_movl_A0_reg(R_EBX);
5833 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5834 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5835 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5836 if (s->aflag == 0)
5837 gen_op_andl_A0_ffff();
5838 else
5839 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5840 }
5841 gen_add_A0_ds_seg(s);
5842 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5843 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5844 break;
5845 case 0xb0 ... 0xb7: /* mov R, Ib */
5846 val = insn_get(s, OT_BYTE);
5847 gen_op_movl_T0_im(val);
5848 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5849 break;
5850 case 0xb8 ... 0xbf: /* mov R, Iv */
5851#ifdef TARGET_X86_64
5852 if (dflag == 2) {
5853 uint64_t tmp;
5854 /* 64 bit case */
5855 tmp = ldq_code(s->pc);
5856 s->pc += 8;
5857 reg = (b & 7) | REX_B(s);
5858 gen_movtl_T0_im(tmp);
5859 gen_op_mov_reg_T0(OT_QUAD, reg);
5860 } else
5861#endif
5862 {
5863 ot = dflag ? OT_LONG : OT_WORD;
5864 val = insn_get(s, ot);
5865 reg = (b & 7) | REX_B(s);
5866 gen_op_movl_T0_im(val);
5867 gen_op_mov_reg_T0(ot, reg);
5868 }
5869 break;
5870
5871 case 0x91 ... 0x97: /* xchg R, EAX */
5872 ot = dflag + OT_WORD;
5873 reg = (b & 7) | REX_B(s);
5874 rm = R_EAX;
5875 goto do_xchg_reg;
5876 case 0x86:
5877 case 0x87: /* xchg Ev, Gv */
5878 if ((b & 1) == 0)
5879 ot = OT_BYTE;
5880 else
5881 ot = dflag + OT_WORD;
5882 modrm = ldub_code(s->pc++);
5883 reg = ((modrm >> 3) & 7) | rex_r;
5884 mod = (modrm >> 6) & 3;
5885 if (mod == 3) {
5886 rm = (modrm & 7) | REX_B(s);
5887 do_xchg_reg:
5888 gen_op_mov_TN_reg(ot, 0, reg);
5889 gen_op_mov_TN_reg(ot, 1, rm);
5890 gen_op_mov_reg_T0(ot, rm);
5891 gen_op_mov_reg_T1(ot, reg);
5892 } else {
5893 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5894 gen_op_mov_TN_reg(ot, 0, reg);
5895 /* for xchg, lock is implicit */
5896 if (!(prefixes & PREFIX_LOCK))
5897 tcg_gen_helper_0_0(helper_lock);
5898 gen_op_ld_T1_A0(ot + s->mem_index);
5899 gen_op_st_T0_A0(ot + s->mem_index);
5900 if (!(prefixes & PREFIX_LOCK))
5901 tcg_gen_helper_0_0(helper_unlock);
5902 gen_op_mov_reg_T1(ot, reg);
5903 }
5904 break;
5905 case 0xc4: /* les Gv */
5906 if (CODE64(s))
5907 goto illegal_op;
5908 op = R_ES;
5909 goto do_lxx;
5910 case 0xc5: /* lds Gv */
5911 if (CODE64(s))
5912 goto illegal_op;
5913 op = R_DS;
5914 goto do_lxx;
5915 case 0x1b2: /* lss Gv */
5916 op = R_SS;
5917 goto do_lxx;
5918 case 0x1b4: /* lfs Gv */
5919 op = R_FS;
5920 goto do_lxx;
5921 case 0x1b5: /* lgs Gv */
5922 op = R_GS;
5923 do_lxx:
5924 ot = dflag ? OT_LONG : OT_WORD;
5925 modrm = ldub_code(s->pc++);
5926 reg = ((modrm >> 3) & 7) | rex_r;
5927 mod = (modrm >> 6) & 3;
5928 if (mod == 3)
5929 goto illegal_op;
5930 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5931 gen_op_ld_T1_A0(ot + s->mem_index);
5932 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5933 /* load the segment first to handle exceptions properly */
5934 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5935 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5936 /* then put the data */
5937 gen_op_mov_reg_T1(ot, reg);
5938 if (s->is_jmp) {
5939 gen_jmp_im(s->pc - s->cs_base);
5940 gen_eob(s);
5941 }
5942 break;
5943
5944 /************************/
5945 /* shifts */
5946 case 0xc0:
5947 case 0xc1:
5948 /* shift Ev,Ib */
5949 shift = 2;
5950 grp2:
5951 {
5952 if ((b & 1) == 0)
5953 ot = OT_BYTE;
5954 else
5955 ot = dflag + OT_WORD;
5956
5957 modrm = ldub_code(s->pc++);
5958 mod = (modrm >> 6) & 3;
5959 op = (modrm >> 3) & 7;
5960
5961 if (mod != 3) {
5962 if (shift == 2) {
5963 s->rip_offset = 1;
5964 }
5965 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5966 opreg = OR_TMP0;
5967 } else {
5968 opreg = (modrm & 7) | REX_B(s);
5969 }
5970
5971 /* simpler op */
5972 if (shift == 0) {
5973 gen_shift(s, op, ot, opreg, OR_ECX);
5974 } else {
5975 if (shift == 2) {
5976 shift = ldub_code(s->pc++);
5977 }
5978 gen_shifti(s, op, ot, opreg, shift);
5979 }
5980 }
5981 break;
5982 case 0xd0:
5983 case 0xd1:
5984 /* shift Ev,1 */
5985 shift = 1;
5986 goto grp2;
5987 case 0xd2:
5988 case 0xd3:
5989 /* shift Ev,cl */
5990 shift = 0;
5991 goto grp2;
5992
5993 case 0x1a4: /* shld imm */
5994 op = 0;
5995 shift = 1;
5996 goto do_shiftd;
5997 case 0x1a5: /* shld cl */
5998 op = 0;
5999 shift = 0;
6000 goto do_shiftd;
6001 case 0x1ac: /* shrd imm */
6002 op = 1;
6003 shift = 1;
6004 goto do_shiftd;
6005 case 0x1ad: /* shrd cl */
6006 op = 1;
6007 shift = 0;
6008 do_shiftd:
6009 ot = dflag + OT_WORD;
6010 modrm = ldub_code(s->pc++);
6011 mod = (modrm >> 6) & 3;
6012 rm = (modrm & 7) | REX_B(s);
6013 reg = ((modrm >> 3) & 7) | rex_r;
6014 if (mod != 3) {
6015 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6016 opreg = OR_TMP0;
6017 } else {
6018 opreg = rm;
6019 }
6020 gen_op_mov_TN_reg(ot, 1, reg);
6021
6022 if (shift) {
6023 val = ldub_code(s->pc++);
6024 tcg_gen_movi_tl(cpu_T3, val);
6025 } else {
6026 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
6027 }
6028 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
6029 break;
6030
6031 /************************/
6032 /* floats */
6033 case 0xd8 ... 0xdf:
6034 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
6035 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
6036 /* XXX: what to do if illegal op ? */
6037 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6038 break;
6039 }
6040 modrm = ldub_code(s->pc++);
6041 mod = (modrm >> 6) & 3;
6042 rm = modrm & 7;
6043 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
6044 if (mod != 3) {
6045 /* memory op */
6046 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6047 switch(op) {
6048 case 0x00 ... 0x07: /* fxxxs */
6049 case 0x10 ... 0x17: /* fixxxl */
6050 case 0x20 ... 0x27: /* fxxxl */
6051 case 0x30 ... 0x37: /* fixxx */
6052 {
6053 int op1;
6054 op1 = op & 7;
6055
6056 switch(op >> 4) {
6057 case 0:
6058 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6059 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6060 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
6061 break;
6062 case 1:
6063 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6064 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6065 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6066 break;
6067 case 2:
6068 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6069 (s->mem_index >> 2) - 1);
6070 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
6071 break;
6072 case 3:
6073 default:
6074 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6075 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6076 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6077 break;
6078 }
6079
6080 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6081 if (op1 == 3) {
6082 /* fcomp needs pop */
6083 tcg_gen_helper_0_0(helper_fpop);
6084 }
6085 }
6086 break;
6087 case 0x08: /* flds */
6088 case 0x0a: /* fsts */
6089 case 0x0b: /* fstps */
6090 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6091 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6092 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
6093 switch(op & 7) {
6094 case 0:
6095 switch(op >> 4) {
6096 case 0:
6097 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6098 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6099 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
6100 break;
6101 case 1:
6102 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6103 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6104 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6105 break;
6106 case 2:
6107 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6108 (s->mem_index >> 2) - 1);
6109 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
6110 break;
6111 case 3:
6112 default:
6113 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6114 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6115 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6116 break;
6117 }
6118 break;
6119 case 1:
6120 /* XXX: the corresponding CPUID bit must be tested ! */
6121 switch(op >> 4) {
6122 case 1:
6123 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
6124 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6125 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6126 break;
6127 case 2:
6128 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
6129 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6130 (s->mem_index >> 2) - 1);
6131 break;
6132 case 3:
6133 default:
6134 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
6135 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6136 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6137 break;
6138 }
6139 tcg_gen_helper_0_0(helper_fpop);
6140 break;
6141 default:
6142 switch(op >> 4) {
6143 case 0:
6144 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
6145 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6146 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6147 break;
6148 case 1:
6149 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
6150 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6151 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6152 break;
6153 case 2:
6154 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
6155 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6156 (s->mem_index >> 2) - 1);
6157 break;
6158 case 3:
6159 default:
6160 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
6161 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6162 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6163 break;
6164 }
6165 if ((op & 7) == 3)
6166 tcg_gen_helper_0_0(helper_fpop);
6167 break;
6168 }
6169 break;
6170 case 0x0c: /* fldenv mem */
6171 if (s->cc_op != CC_OP_DYNAMIC)
6172 gen_op_set_cc_op(s->cc_op);
6173 gen_jmp_im(pc_start - s->cs_base);
6174 tcg_gen_helper_0_2(helper_fldenv,
6175 cpu_A0, tcg_const_i32(s->dflag));
6176 break;
6177 case 0x0d: /* fldcw mem */
6178 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
6179 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6180 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
6181 break;
6182 case 0x0e: /* fnstenv mem */
6183 if (s->cc_op != CC_OP_DYNAMIC)
6184 gen_op_set_cc_op(s->cc_op);
6185 gen_jmp_im(pc_start - s->cs_base);
6186 tcg_gen_helper_0_2(helper_fstenv,
6187 cpu_A0, tcg_const_i32(s->dflag));
6188 break;
6189 case 0x0f: /* fnstcw mem */
6190 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
6191 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6192 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6193 break;
6194 case 0x1d: /* fldt mem */
6195 if (s->cc_op != CC_OP_DYNAMIC)
6196 gen_op_set_cc_op(s->cc_op);
6197 gen_jmp_im(pc_start - s->cs_base);
6198 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
6199 break;
6200 case 0x1f: /* fstpt mem */
6201 if (s->cc_op != CC_OP_DYNAMIC)
6202 gen_op_set_cc_op(s->cc_op);
6203 gen_jmp_im(pc_start - s->cs_base);
6204 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
6205 tcg_gen_helper_0_0(helper_fpop);
6206 break;
6207 case 0x2c: /* frstor mem */
6208 if (s->cc_op != CC_OP_DYNAMIC)
6209 gen_op_set_cc_op(s->cc_op);
6210 gen_jmp_im(pc_start - s->cs_base);
6211 tcg_gen_helper_0_2(helper_frstor,
6212 cpu_A0, tcg_const_i32(s->dflag));
6213 break;
6214 case 0x2e: /* fnsave mem */
6215 if (s->cc_op != CC_OP_DYNAMIC)
6216 gen_op_set_cc_op(s->cc_op);
6217 gen_jmp_im(pc_start - s->cs_base);
6218 tcg_gen_helper_0_2(helper_fsave,
6219 cpu_A0, tcg_const_i32(s->dflag));
6220 break;
6221 case 0x2f: /* fnstsw mem */
6222 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6223 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6224 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6225 break;
6226 case 0x3c: /* fbld */
6227 if (s->cc_op != CC_OP_DYNAMIC)
6228 gen_op_set_cc_op(s->cc_op);
6229 gen_jmp_im(pc_start - s->cs_base);
6230 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
6231 break;
6232 case 0x3e: /* fbstp */
6233 if (s->cc_op != CC_OP_DYNAMIC)
6234 gen_op_set_cc_op(s->cc_op);
6235 gen_jmp_im(pc_start - s->cs_base);
6236 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
6237 tcg_gen_helper_0_0(helper_fpop);
6238 break;
6239 case 0x3d: /* fildll */
6240 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6241 (s->mem_index >> 2) - 1);
6242 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
6243 break;
6244 case 0x3f: /* fistpll */
6245 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
6246 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6247 (s->mem_index >> 2) - 1);
6248 tcg_gen_helper_0_0(helper_fpop);
6249 break;
6250 default:
6251 goto illegal_op;
6252 }
6253 } else {
6254 /* register float ops */
6255 opreg = rm;
6256
6257 switch(op) {
6258 case 0x08: /* fld sti */
6259 tcg_gen_helper_0_0(helper_fpush);
6260 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
6261 break;
6262 case 0x09: /* fxchg sti */
6263 case 0x29: /* fxchg4 sti, undocumented op */
6264 case 0x39: /* fxchg7 sti, undocumented op */
6265 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
6266 break;
6267 case 0x0a: /* grp d9/2 */
6268 switch(rm) {
6269 case 0: /* fnop */
6270 /* check exceptions (FreeBSD FPU probe) */
6271 if (s->cc_op != CC_OP_DYNAMIC)
6272 gen_op_set_cc_op(s->cc_op);
6273 gen_jmp_im(pc_start - s->cs_base);
6274 tcg_gen_helper_0_0(helper_fwait);
6275 break;
6276 default:
6277 goto illegal_op;
6278 }
6279 break;
6280 case 0x0c: /* grp d9/4 */
6281 switch(rm) {
6282 case 0: /* fchs */
6283 tcg_gen_helper_0_0(helper_fchs_ST0);
6284 break;
6285 case 1: /* fabs */
6286 tcg_gen_helper_0_0(helper_fabs_ST0);
6287 break;
6288 case 4: /* ftst */
6289 tcg_gen_helper_0_0(helper_fldz_FT0);
6290 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6291 break;
6292 case 5: /* fxam */
6293 tcg_gen_helper_0_0(helper_fxam_ST0);
6294 break;
6295 default:
6296 goto illegal_op;
6297 }
6298 break;
6299 case 0x0d: /* grp d9/5 */
6300 {
6301 switch(rm) {
6302 case 0:
6303 tcg_gen_helper_0_0(helper_fpush);
6304 tcg_gen_helper_0_0(helper_fld1_ST0);
6305 break;
6306 case 1:
6307 tcg_gen_helper_0_0(helper_fpush);
6308 tcg_gen_helper_0_0(helper_fldl2t_ST0);
6309 break;
6310 case 2:
6311 tcg_gen_helper_0_0(helper_fpush);
6312 tcg_gen_helper_0_0(helper_fldl2e_ST0);
6313 break;
6314 case 3:
6315 tcg_gen_helper_0_0(helper_fpush);
6316 tcg_gen_helper_0_0(helper_fldpi_ST0);
6317 break;
6318 case 4:
6319 tcg_gen_helper_0_0(helper_fpush);
6320 tcg_gen_helper_0_0(helper_fldlg2_ST0);
6321 break;
6322 case 5:
6323 tcg_gen_helper_0_0(helper_fpush);
6324 tcg_gen_helper_0_0(helper_fldln2_ST0);
6325 break;
6326 case 6:
6327 tcg_gen_helper_0_0(helper_fpush);
6328 tcg_gen_helper_0_0(helper_fldz_ST0);
6329 break;
6330 default:
6331 goto illegal_op;
6332 }
6333 }
6334 break;
6335 case 0x0e: /* grp d9/6 */
6336 switch(rm) {
6337 case 0: /* f2xm1 */
6338 tcg_gen_helper_0_0(helper_f2xm1);
6339 break;
6340 case 1: /* fyl2x */
6341 tcg_gen_helper_0_0(helper_fyl2x);
6342 break;
6343 case 2: /* fptan */
6344 tcg_gen_helper_0_0(helper_fptan);
6345 break;
6346 case 3: /* fpatan */
6347 tcg_gen_helper_0_0(helper_fpatan);
6348 break;
6349 case 4: /* fxtract */
6350 tcg_gen_helper_0_0(helper_fxtract);
6351 break;
6352 case 5: /* fprem1 */
6353 tcg_gen_helper_0_0(helper_fprem1);
6354 break;
6355 case 6: /* fdecstp */
6356 tcg_gen_helper_0_0(helper_fdecstp);
6357 break;
6358 default:
6359 case 7: /* fincstp */
6360 tcg_gen_helper_0_0(helper_fincstp);
6361 break;
6362 }
6363 break;
6364 case 0x0f: /* grp d9/7 */
6365 switch(rm) {
6366 case 0: /* fprem */
6367 tcg_gen_helper_0_0(helper_fprem);
6368 break;
6369 case 1: /* fyl2xp1 */
6370 tcg_gen_helper_0_0(helper_fyl2xp1);
6371 break;
6372 case 2: /* fsqrt */
6373 tcg_gen_helper_0_0(helper_fsqrt);
6374 break;
6375 case 3: /* fsincos */
6376 tcg_gen_helper_0_0(helper_fsincos);
6377 break;
6378 case 5: /* fscale */
6379 tcg_gen_helper_0_0(helper_fscale);
6380 break;
6381 case 4: /* frndint */
6382 tcg_gen_helper_0_0(helper_frndint);
6383 break;
6384 case 6: /* fsin */
6385 tcg_gen_helper_0_0(helper_fsin);
6386 break;
6387 default:
6388 case 7: /* fcos */
6389 tcg_gen_helper_0_0(helper_fcos);
6390 break;
6391 }
6392 break;
6393 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6394 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6395 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6396 {
6397 int op1;
6398
6399 op1 = op & 7;
6400 if (op >= 0x20) {
6401 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
6402 if (op >= 0x30)
6403 tcg_gen_helper_0_0(helper_fpop);
6404 } else {
6405 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6406 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6407 }
6408 }
6409 break;
6410 case 0x02: /* fcom */
6411 case 0x22: /* fcom2, undocumented op */
6412 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6413 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6414 break;
6415 case 0x03: /* fcomp */
6416 case 0x23: /* fcomp3, undocumented op */
6417 case 0x32: /* fcomp5, undocumented op */
6418 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6419 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6420 tcg_gen_helper_0_0(helper_fpop);
6421 break;
6422 case 0x15: /* da/5 */
6423 switch(rm) {
6424 case 1: /* fucompp */
6425 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6426 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6427 tcg_gen_helper_0_0(helper_fpop);
6428 tcg_gen_helper_0_0(helper_fpop);
6429 break;
6430 default:
6431 goto illegal_op;
6432 }
6433 break;
6434 case 0x1c:
6435 switch(rm) {
6436 case 0: /* feni (287 only, just do nop here) */
6437 break;
6438 case 1: /* fdisi (287 only, just do nop here) */
6439 break;
6440 case 2: /* fclex */
6441 tcg_gen_helper_0_0(helper_fclex);
6442 break;
6443 case 3: /* fninit */
6444 tcg_gen_helper_0_0(helper_fninit);
6445 break;
6446 case 4: /* fsetpm (287 only, just do nop here) */
6447 break;
6448 default:
6449 goto illegal_op;
6450 }
6451 break;
6452 case 0x1d: /* fucomi */
6453 if (s->cc_op != CC_OP_DYNAMIC)
6454 gen_op_set_cc_op(s->cc_op);
6455 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6456 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6457 s->cc_op = CC_OP_EFLAGS;
6458 break;
6459 case 0x1e: /* fcomi */
6460 if (s->cc_op != CC_OP_DYNAMIC)
6461 gen_op_set_cc_op(s->cc_op);
6462 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6463 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6464 s->cc_op = CC_OP_EFLAGS;
6465 break;
6466 case 0x28: /* ffree sti */
6467 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6468 break;
6469 case 0x2a: /* fst sti */
6470 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6471 break;
6472 case 0x2b: /* fstp sti */
6473 case 0x0b: /* fstp1 sti, undocumented op */
6474 case 0x3a: /* fstp8 sti, undocumented op */
6475 case 0x3b: /* fstp9 sti, undocumented op */
6476 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6477 tcg_gen_helper_0_0(helper_fpop);
6478 break;
6479 case 0x2c: /* fucom st(i) */
6480 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6481 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6482 break;
6483 case 0x2d: /* fucomp st(i) */
6484 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6485 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6486 tcg_gen_helper_0_0(helper_fpop);
6487 break;
6488 case 0x33: /* de/3 */
6489 switch(rm) {
6490 case 1: /* fcompp */
6491 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6492 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6493 tcg_gen_helper_0_0(helper_fpop);
6494 tcg_gen_helper_0_0(helper_fpop);
6495 break;
6496 default:
6497 goto illegal_op;
6498 }
6499 break;
6500 case 0x38: /* ffreep sti, undocumented op */
6501 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6502 tcg_gen_helper_0_0(helper_fpop);
6503 break;
6504 case 0x3c: /* df/4 */
6505 switch(rm) {
6506 case 0:
6507 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6508 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6509 gen_op_mov_reg_T0(OT_WORD, R_EAX);
6510 break;
6511 default:
6512 goto illegal_op;
6513 }
6514 break;
6515 case 0x3d: /* fucomip */
6516 if (s->cc_op != CC_OP_DYNAMIC)
6517 gen_op_set_cc_op(s->cc_op);
6518 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6519 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6520 tcg_gen_helper_0_0(helper_fpop);
6521 s->cc_op = CC_OP_EFLAGS;
6522 break;
6523 case 0x3e: /* fcomip */
6524 if (s->cc_op != CC_OP_DYNAMIC)
6525 gen_op_set_cc_op(s->cc_op);
6526 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6527 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6528 tcg_gen_helper_0_0(helper_fpop);
6529 s->cc_op = CC_OP_EFLAGS;
6530 break;
6531 case 0x10 ... 0x13: /* fcmovxx */
6532 case 0x18 ... 0x1b:
6533 {
6534 int op1, l1;
6535 static const uint8_t fcmov_cc[8] = {
6536 (JCC_B << 1),
6537 (JCC_Z << 1),
6538 (JCC_BE << 1),
6539 (JCC_P << 1),
6540 };
6541 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
6542 l1 = gen_new_label();
6543 gen_jcc1(s, s->cc_op, op1, l1);
6544 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
6545 gen_set_label(l1);
6546 }
6547 break;
6548 default:
6549 goto illegal_op;
6550 }
6551 }
6552 break;
6553 /************************/
6554 /* string ops */
6555
6556 case 0xa4: /* movsS */
6557 case 0xa5:
6558 if ((b & 1) == 0)
6559 ot = OT_BYTE;
6560 else
6561 ot = dflag + OT_WORD;
6562
6563 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6564 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6565 } else {
6566 gen_movs(s, ot);
6567 }
6568 break;
6569
6570 case 0xaa: /* stosS */
6571 case 0xab:
6572 if ((b & 1) == 0)
6573 ot = OT_BYTE;
6574 else
6575 ot = dflag + OT_WORD;
6576
6577 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6578 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6579 } else {
6580 gen_stos(s, ot);
6581 }
6582 break;
6583 case 0xac: /* lodsS */
6584 case 0xad:
6585 if ((b & 1) == 0)
6586 ot = OT_BYTE;
6587 else
6588 ot = dflag + OT_WORD;
6589 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6590 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6591 } else {
6592 gen_lods(s, ot);
6593 }
6594 break;
6595 case 0xae: /* scasS */
6596 case 0xaf:
6597 if ((b & 1) == 0)
6598 ot = OT_BYTE;
6599 else
6600 ot = dflag + OT_WORD;
6601 if (prefixes & PREFIX_REPNZ) {
6602 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6603 } else if (prefixes & PREFIX_REPZ) {
6604 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6605 } else {
6606 gen_scas(s, ot);
6607 s->cc_op = CC_OP_SUBB + ot;
6608 }
6609 break;
6610
6611 case 0xa6: /* cmpsS */
6612 case 0xa7:
6613 if ((b & 1) == 0)
6614 ot = OT_BYTE;
6615 else
6616 ot = dflag + OT_WORD;
6617 if (prefixes & PREFIX_REPNZ) {
6618 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6619 } else if (prefixes & PREFIX_REPZ) {
6620 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6621 } else {
6622 gen_cmps(s, ot);
6623 s->cc_op = CC_OP_SUBB + ot;
6624 }
6625 break;
6626 case 0x6c: /* insS */
6627 case 0x6d:
6628 if ((b & 1) == 0)
6629 ot = OT_BYTE;
6630 else
6631 ot = dflag ? OT_LONG : OT_WORD;
6632 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6633 gen_op_andl_T0_ffff();
6634 gen_check_io(s, ot, pc_start - s->cs_base,
6635 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6636 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6637 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6638 } else {
6639 gen_ins(s, ot);
6640 if (use_icount) {
6641 gen_jmp(s, s->pc - s->cs_base);
6642 }
6643 }
6644 break;
6645 case 0x6e: /* outsS */
6646 case 0x6f:
6647 if ((b & 1) == 0)
6648 ot = OT_BYTE;
6649 else
6650 ot = dflag ? OT_LONG : OT_WORD;
6651 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6652 gen_op_andl_T0_ffff();
6653 gen_check_io(s, ot, pc_start - s->cs_base,
6654 svm_is_rep(prefixes) | 4);
6655 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6656 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6657 } else {
6658 gen_outs(s, ot);
6659 if (use_icount) {
6660 gen_jmp(s, s->pc - s->cs_base);
6661 }
6662 }
6663 break;
6664
6665 /************************/
6666 /* port I/O */
6667
6668 case 0xe4:
6669 case 0xe5:
6670 if ((b & 1) == 0)
6671 ot = OT_BYTE;
6672 else
6673 ot = dflag ? OT_LONG : OT_WORD;
6674 val = ldub_code(s->pc++);
6675 gen_op_movl_T0_im(val);
6676 gen_check_io(s, ot, pc_start - s->cs_base,
6677 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6678 if (use_icount)
6679 gen_io_start();
6680 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6681 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6682 gen_op_mov_reg_T1(ot, R_EAX);
6683 if (use_icount) {
6684 gen_io_end();
6685 gen_jmp(s, s->pc - s->cs_base);
6686 }
6687 break;
6688 case 0xe6:
6689 case 0xe7:
6690 if ((b & 1) == 0)
6691 ot = OT_BYTE;
6692 else
6693 ot = dflag ? OT_LONG : OT_WORD;
6694 val = ldub_code(s->pc++);
6695 gen_op_movl_T0_im(val);
6696 gen_check_io(s, ot, pc_start - s->cs_base,
6697 svm_is_rep(prefixes));
6698#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
6699 if (val == 0x80)
6700 break;
6701#endif /* VBOX */
6702 gen_op_mov_TN_reg(ot, 1, R_EAX);
6703
6704 if (use_icount)
6705 gen_io_start();
6706 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6707 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6708 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6709 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6710 if (use_icount) {
6711 gen_io_end();
6712 gen_jmp(s, s->pc - s->cs_base);
6713 }
6714 break;
6715 case 0xec:
6716 case 0xed:
6717 if ((b & 1) == 0)
6718 ot = OT_BYTE;
6719 else
6720 ot = dflag ? OT_LONG : OT_WORD;
6721 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6722 gen_op_andl_T0_ffff();
6723 gen_check_io(s, ot, pc_start - s->cs_base,
6724 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6725 if (use_icount)
6726 gen_io_start();
6727 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6728 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6729 gen_op_mov_reg_T1(ot, R_EAX);
6730 if (use_icount) {
6731 gen_io_end();
6732 gen_jmp(s, s->pc - s->cs_base);
6733 }
6734 break;
6735 case 0xee:
6736 case 0xef:
6737 if ((b & 1) == 0)
6738 ot = OT_BYTE;
6739 else
6740 ot = dflag ? OT_LONG : OT_WORD;
6741 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6742 gen_op_andl_T0_ffff();
6743 gen_check_io(s, ot, pc_start - s->cs_base,
6744 svm_is_rep(prefixes));
6745 gen_op_mov_TN_reg(ot, 1, R_EAX);
6746
6747 if (use_icount)
6748 gen_io_start();
6749 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6750 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6751 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6752 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6753 if (use_icount) {
6754 gen_io_end();
6755 gen_jmp(s, s->pc - s->cs_base);
6756 }
6757 break;
6758
6759 /************************/
6760 /* control */
6761 case 0xc2: /* ret im */
6762 val = ldsw_code(s->pc);
6763 s->pc += 2;
6764 gen_pop_T0(s);
6765 if (CODE64(s) && s->dflag)
6766 s->dflag = 2;
6767 gen_stack_update(s, val + (2 << s->dflag));
6768 if (s->dflag == 0)
6769 gen_op_andl_T0_ffff();
6770 gen_op_jmp_T0();
6771 gen_eob(s);
6772 break;
6773 case 0xc3: /* ret */
6774 gen_pop_T0(s);
6775 gen_pop_update(s);
6776 if (s->dflag == 0)
6777 gen_op_andl_T0_ffff();
6778 gen_op_jmp_T0();
6779 gen_eob(s);
6780 break;
6781 case 0xca: /* lret im */
6782 val = ldsw_code(s->pc);
6783 s->pc += 2;
6784 do_lret:
6785 if (s->pe && !s->vm86) {
6786 if (s->cc_op != CC_OP_DYNAMIC)
6787 gen_op_set_cc_op(s->cc_op);
6788 gen_jmp_im(pc_start - s->cs_base);
6789 tcg_gen_helper_0_2(helper_lret_protected,
6790 tcg_const_i32(s->dflag),
6791 tcg_const_i32(val));
6792 } else {
6793 gen_stack_A0(s);
6794 /* pop offset */
6795 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6796 if (s->dflag == 0)
6797 gen_op_andl_T0_ffff();
6798 /* NOTE: keeping EIP updated is not a problem in case of
6799 exception */
6800 gen_op_jmp_T0();
6801 /* pop selector */
6802 gen_op_addl_A0_im(2 << s->dflag);
6803 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6804 gen_op_movl_seg_T0_vm(R_CS);
6805 /* add stack offset */
6806 gen_stack_update(s, val + (4 << s->dflag));
6807 }
6808 gen_eob(s);
6809 break;
6810 case 0xcb: /* lret */
6811 val = 0;
6812 goto do_lret;
6813 case 0xcf: /* iret */
6814 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6815 if (!s->pe) {
6816 /* real mode */
6817 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6818 s->cc_op = CC_OP_EFLAGS;
6819 } else if (s->vm86) {
6820#ifdef VBOX
6821 if (s->iopl != 3 && (!s->vme || s->dflag)) {
6822#else
6823 if (s->iopl != 3) {
6824#endif
6825 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6826 } else {
6827 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6828 s->cc_op = CC_OP_EFLAGS;
6829 }
6830 } else {
6831 if (s->cc_op != CC_OP_DYNAMIC)
6832 gen_op_set_cc_op(s->cc_op);
6833 gen_jmp_im(pc_start - s->cs_base);
6834 tcg_gen_helper_0_2(helper_iret_protected,
6835 tcg_const_i32(s->dflag),
6836 tcg_const_i32(s->pc - s->cs_base));
6837 s->cc_op = CC_OP_EFLAGS;
6838 }
6839 gen_eob(s);
6840 break;
6841 case 0xe8: /* call im */
6842 {
6843 if (dflag)
6844 tval = (int32_t)insn_get(s, OT_LONG);
6845 else
6846 tval = (int16_t)insn_get(s, OT_WORD);
6847 next_eip = s->pc - s->cs_base;
6848 tval += next_eip;
6849 if (s->dflag == 0)
6850 tval &= 0xffff;
6851 gen_movtl_T0_im(next_eip);
6852 gen_push_T0(s);
6853 gen_jmp(s, tval);
6854 }
6855 break;
6856 case 0x9a: /* lcall im */
6857 {
6858 unsigned int selector, offset;
6859
6860 if (CODE64(s))
6861 goto illegal_op;
6862 ot = dflag ? OT_LONG : OT_WORD;
6863 offset = insn_get(s, ot);
6864 selector = insn_get(s, OT_WORD);
6865
6866 gen_op_movl_T0_im(selector);
6867 gen_op_movl_T1_imu(offset);
6868 }
6869 goto do_lcall;
6870 case 0xe9: /* jmp im */
6871 if (dflag)
6872 tval = (int32_t)insn_get(s, OT_LONG);
6873 else
6874 tval = (int16_t)insn_get(s, OT_WORD);
6875 tval += s->pc - s->cs_base;
6876 if (s->dflag == 0)
6877 tval &= 0xffff;
6878 gen_jmp(s, tval);
6879 break;
6880 case 0xea: /* ljmp im */
6881 {
6882 unsigned int selector, offset;
6883
6884 if (CODE64(s))
6885 goto illegal_op;
6886 ot = dflag ? OT_LONG : OT_WORD;
6887 offset = insn_get(s, ot);
6888 selector = insn_get(s, OT_WORD);
6889
6890 gen_op_movl_T0_im(selector);
6891 gen_op_movl_T1_imu(offset);
6892 }
6893 goto do_ljmp;
6894 case 0xeb: /* jmp Jb */
6895 tval = (int8_t)insn_get(s, OT_BYTE);
6896 tval += s->pc - s->cs_base;
6897 if (s->dflag == 0)
6898 tval &= 0xffff;
6899 gen_jmp(s, tval);
6900 break;
6901 case 0x70 ... 0x7f: /* jcc Jb */
6902 tval = (int8_t)insn_get(s, OT_BYTE);
6903 goto do_jcc;
6904 case 0x180 ... 0x18f: /* jcc Jv */
6905 if (dflag) {
6906 tval = (int32_t)insn_get(s, OT_LONG);
6907 } else {
6908 tval = (int16_t)insn_get(s, OT_WORD);
6909 }
6910 do_jcc:
6911 next_eip = s->pc - s->cs_base;
6912 tval += next_eip;
6913 if (s->dflag == 0)
6914 tval &= 0xffff;
6915 gen_jcc(s, b, tval, next_eip);
6916 break;
6917
6918 case 0x190 ... 0x19f: /* setcc Gv */
6919 modrm = ldub_code(s->pc++);
6920 gen_setcc(s, b);
6921 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6922 break;
6923 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6924 {
6925 int l1;
6926 TCGv t0;
6927
6928 ot = dflag + OT_WORD;
6929 modrm = ldub_code(s->pc++);
6930 reg = ((modrm >> 3) & 7) | rex_r;
6931 mod = (modrm >> 6) & 3;
6932 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6933 if (mod != 3) {
6934 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6935 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6936 } else {
6937 rm = (modrm & 7) | REX_B(s);
6938 gen_op_mov_v_reg(ot, t0, rm);
6939 }
6940#ifdef TARGET_X86_64
6941 if (ot == OT_LONG) {
6942 /* XXX: specific Intel behaviour ? */
6943 l1 = gen_new_label();
6944 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6945 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6946 gen_set_label(l1);
6947 tcg_gen_movi_tl(cpu_tmp0, 0);
6948 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6949 } else
6950#endif
6951 {
6952 l1 = gen_new_label();
6953 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6954 gen_op_mov_reg_v(ot, reg, t0);
6955 gen_set_label(l1);
6956 }
6957 tcg_temp_free(t0);
6958 }
6959 break;
6960
6961 /************************/
6962 /* flags */
6963 case 0x9c: /* pushf */
6964 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6965#ifdef VBOX
6966 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6967#else
6968 if (s->vm86 && s->iopl != 3) {
6969#endif
6970 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6971 } else {
6972 if (s->cc_op != CC_OP_DYNAMIC)
6973 gen_op_set_cc_op(s->cc_op);
6974#ifdef VBOX
6975 if (s->vm86 && s->vme && s->iopl != 3)
6976 tcg_gen_helper_1_0(helper_read_eflags_vme, cpu_T[0]);
6977 else
6978#endif
6979 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
6980 gen_push_T0(s);
6981 }
6982 break;
6983 case 0x9d: /* popf */
6984 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
6985#ifdef VBOX
6986 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6987#else
6988 if (s->vm86 && s->iopl != 3) {
6989#endif
6990 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6991 } else {
6992 gen_pop_T0(s);
6993 if (s->cpl == 0) {
6994 if (s->dflag) {
6995 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6996 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
6997 } else {
6998 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6999 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
7000 }
7001 } else {
7002 if (s->cpl <= s->iopl) {
7003 if (s->dflag) {
7004 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7005 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
7006 } else {
7007 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7008 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
7009 }
7010 } else {
7011 if (s->dflag) {
7012 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7013 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
7014 } else {
7015#ifdef VBOX
7016 if (s->vm86 && s->vme)
7017 tcg_gen_helper_0_1(helper_write_eflags_vme, cpu_T[0]);
7018 else
7019#endif
7020 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7021 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
7022 }
7023 }
7024 }
7025 gen_pop_update(s);
7026 s->cc_op = CC_OP_EFLAGS;
7027 /* abort translation because TF flag may change */
7028 gen_jmp_im(s->pc - s->cs_base);
7029 gen_eob(s);
7030 }
7031 break;
7032 case 0x9e: /* sahf */
7033 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7034 goto illegal_op;
7035 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
7036 if (s->cc_op != CC_OP_DYNAMIC)
7037 gen_op_set_cc_op(s->cc_op);
7038 gen_compute_eflags(cpu_cc_src);
7039 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
7040 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
7041 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
7042 s->cc_op = CC_OP_EFLAGS;
7043 break;
7044 case 0x9f: /* lahf */
7045 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7046 goto illegal_op;
7047 if (s->cc_op != CC_OP_DYNAMIC)
7048 gen_op_set_cc_op(s->cc_op);
7049 gen_compute_eflags(cpu_T[0]);
7050 /* Note: gen_compute_eflags() only gives the condition codes */
7051 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
7052 gen_op_mov_reg_T0(OT_BYTE, R_AH);
7053 break;
7054 case 0xf5: /* cmc */
7055 if (s->cc_op != CC_OP_DYNAMIC)
7056 gen_op_set_cc_op(s->cc_op);
7057 gen_compute_eflags(cpu_cc_src);
7058 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7059 s->cc_op = CC_OP_EFLAGS;
7060 break;
7061 case 0xf8: /* clc */
7062 if (s->cc_op != CC_OP_DYNAMIC)
7063 gen_op_set_cc_op(s->cc_op);
7064 gen_compute_eflags(cpu_cc_src);
7065 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
7066 s->cc_op = CC_OP_EFLAGS;
7067 break;
7068 case 0xf9: /* stc */
7069 if (s->cc_op != CC_OP_DYNAMIC)
7070 gen_op_set_cc_op(s->cc_op);
7071 gen_compute_eflags(cpu_cc_src);
7072 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7073 s->cc_op = CC_OP_EFLAGS;
7074 break;
7075 case 0xfc: /* cld */
7076 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
7077 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7078 break;
7079 case 0xfd: /* std */
7080 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
7081 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7082 break;
7083
7084 /************************/
7085 /* bit operations */
7086 case 0x1ba: /* bt/bts/btr/btc Gv, im */
7087 ot = dflag + OT_WORD;
7088 modrm = ldub_code(s->pc++);
7089 op = (modrm >> 3) & 7;
7090 mod = (modrm >> 6) & 3;
7091 rm = (modrm & 7) | REX_B(s);
7092 if (mod != 3) {
7093 s->rip_offset = 1;
7094 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7095 gen_op_ld_T0_A0(ot + s->mem_index);
7096 } else {
7097 gen_op_mov_TN_reg(ot, 0, rm);
7098 }
7099 /* load shift */
7100 val = ldub_code(s->pc++);
7101 gen_op_movl_T1_im(val);
7102 if (op < 4)
7103 goto illegal_op;
7104 op -= 4;
7105 goto bt_op;
7106 case 0x1a3: /* bt Gv, Ev */
7107 op = 0;
7108 goto do_btx;
7109 case 0x1ab: /* bts */
7110 op = 1;
7111 goto do_btx;
7112 case 0x1b3: /* btr */
7113 op = 2;
7114 goto do_btx;
7115 case 0x1bb: /* btc */
7116 op = 3;
7117 do_btx:
7118 ot = dflag + OT_WORD;
7119 modrm = ldub_code(s->pc++);
7120 reg = ((modrm >> 3) & 7) | rex_r;
7121 mod = (modrm >> 6) & 3;
7122 rm = (modrm & 7) | REX_B(s);
7123 gen_op_mov_TN_reg(OT_LONG, 1, reg);
7124 if (mod != 3) {
7125 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7126 /* specific case: we need to add a displacement */
7127 gen_exts(ot, cpu_T[1]);
7128 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7129 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7130 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
7131 gen_op_ld_T0_A0(ot + s->mem_index);
7132 } else {
7133 gen_op_mov_TN_reg(ot, 0, rm);
7134 }
7135 bt_op:
7136 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7137 switch(op) {
7138 case 0:
7139 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7140 tcg_gen_movi_tl(cpu_cc_dst, 0);
7141 break;
7142 case 1:
7143 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7144 tcg_gen_movi_tl(cpu_tmp0, 1);
7145 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7146 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7147 break;
7148 case 2:
7149 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7150 tcg_gen_movi_tl(cpu_tmp0, 1);
7151 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7152 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7153 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7154 break;
7155 default:
7156 case 3:
7157 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7158 tcg_gen_movi_tl(cpu_tmp0, 1);
7159 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7160 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7161 break;
7162 }
7163 s->cc_op = CC_OP_SARB + ot;
7164 if (op != 0) {
7165 if (mod != 3)
7166 gen_op_st_T0_A0(ot + s->mem_index);
7167 else
7168 gen_op_mov_reg_T0(ot, rm);
7169 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7170 tcg_gen_movi_tl(cpu_cc_dst, 0);
7171 }
7172 break;
7173 case 0x1bc: /* bsf */
7174 case 0x1bd: /* bsr */
7175 {
7176 int label1;
7177 TCGv t0;
7178
7179 ot = dflag + OT_WORD;
7180 modrm = ldub_code(s->pc++);
7181 reg = ((modrm >> 3) & 7) | rex_r;
7182 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7183 gen_extu(ot, cpu_T[0]);
7184 label1 = gen_new_label();
7185 tcg_gen_movi_tl(cpu_cc_dst, 0);
7186 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7187 tcg_gen_mov_tl(t0, cpu_T[0]);
7188 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
7189 if (b & 1) {
7190 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
7191 } else {
7192 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
7193 }
7194 gen_op_mov_reg_T0(ot, reg);
7195 tcg_gen_movi_tl(cpu_cc_dst, 1);
7196 gen_set_label(label1);
7197 tcg_gen_discard_tl(cpu_cc_src);
7198 s->cc_op = CC_OP_LOGICB + ot;
7199 tcg_temp_free(t0);
7200 }
7201 break;
7202 /************************/
7203 /* bcd */
7204 case 0x27: /* daa */
7205 if (CODE64(s))
7206 goto illegal_op;
7207 if (s->cc_op != CC_OP_DYNAMIC)
7208 gen_op_set_cc_op(s->cc_op);
7209 tcg_gen_helper_0_0(helper_daa);
7210 s->cc_op = CC_OP_EFLAGS;
7211 break;
7212 case 0x2f: /* das */
7213 if (CODE64(s))
7214 goto illegal_op;
7215 if (s->cc_op != CC_OP_DYNAMIC)
7216 gen_op_set_cc_op(s->cc_op);
7217 tcg_gen_helper_0_0(helper_das);
7218 s->cc_op = CC_OP_EFLAGS;
7219 break;
7220 case 0x37: /* aaa */
7221 if (CODE64(s))
7222 goto illegal_op;
7223 if (s->cc_op != CC_OP_DYNAMIC)
7224 gen_op_set_cc_op(s->cc_op);
7225 tcg_gen_helper_0_0(helper_aaa);
7226 s->cc_op = CC_OP_EFLAGS;
7227 break;
7228 case 0x3f: /* aas */
7229 if (CODE64(s))
7230 goto illegal_op;
7231 if (s->cc_op != CC_OP_DYNAMIC)
7232 gen_op_set_cc_op(s->cc_op);
7233 tcg_gen_helper_0_0(helper_aas);
7234 s->cc_op = CC_OP_EFLAGS;
7235 break;
7236 case 0xd4: /* aam */
7237 if (CODE64(s))
7238 goto illegal_op;
7239 val = ldub_code(s->pc++);
7240 if (val == 0) {
7241 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7242 } else {
7243 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
7244 s->cc_op = CC_OP_LOGICB;
7245 }
7246 break;
7247 case 0xd5: /* aad */
7248 if (CODE64(s))
7249 goto illegal_op;
7250 val = ldub_code(s->pc++);
7251 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
7252 s->cc_op = CC_OP_LOGICB;
7253 break;
7254 /************************/
7255 /* misc */
7256 case 0x90: /* nop */
7257 /* XXX: xchg + rex handling */
7258 /* XXX: correct lock test for all insn */
7259 if (prefixes & PREFIX_LOCK)
7260 goto illegal_op;
7261 if (prefixes & PREFIX_REPZ) {
7262 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7263 }
7264 break;
7265 case 0x9b: /* fwait */
7266 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7267 (HF_MP_MASK | HF_TS_MASK)) {
7268 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7269 } else {
7270 if (s->cc_op != CC_OP_DYNAMIC)
7271 gen_op_set_cc_op(s->cc_op);
7272 gen_jmp_im(pc_start - s->cs_base);
7273 tcg_gen_helper_0_0(helper_fwait);
7274 }
7275 break;
7276 case 0xcc: /* int3 */
7277#ifdef VBOX
7278 if (s->vm86 && s->iopl != 3 && !s->vme) {
7279 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7280 } else
7281#endif
7282 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7283 break;
7284 case 0xcd: /* int N */
7285 val = ldub_code(s->pc++);
7286#ifdef VBOX
7287 if (s->vm86 && s->iopl != 3 && !s->vme) {
7288#else
7289 if (s->vm86 && s->iopl != 3) {
7290#endif
7291 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7292 } else {
7293 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7294 }
7295 break;
7296 case 0xce: /* into */
7297 if (CODE64(s))
7298 goto illegal_op;
7299 if (s->cc_op != CC_OP_DYNAMIC)
7300 gen_op_set_cc_op(s->cc_op);
7301 gen_jmp_im(pc_start - s->cs_base);
7302 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
7303 break;
7304 case 0xf1: /* icebp (undocumented, exits to external debugger) */
7305 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
7306#if 1
7307 gen_debug(s, pc_start - s->cs_base);
7308#else
7309 /* start debug */
7310 tb_flush(cpu_single_env);
7311 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
7312#endif
7313 break;
7314 case 0xfa: /* cli */
7315 if (!s->vm86) {
7316 if (s->cpl <= s->iopl) {
7317 tcg_gen_helper_0_0(helper_cli);
7318 } else {
7319 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7320 }
7321 } else {
7322 if (s->iopl == 3) {
7323 tcg_gen_helper_0_0(helper_cli);
7324#ifdef VBOX
7325 } else if (s->iopl != 3 && s->vme) {
7326 tcg_gen_helper_0_0(helper_cli_vme);
7327#endif
7328 } else {
7329 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7330 }
7331 }
7332 break;
7333 case 0xfb: /* sti */
7334 if (!s->vm86) {
7335 if (s->cpl <= s->iopl) {
7336 gen_sti:
7337 tcg_gen_helper_0_0(helper_sti);
7338 /* interruptions are enabled only the first insn after sti */
7339 /* If several instructions disable interrupts, only the
7340 _first_ does it */
7341 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
7342 tcg_gen_helper_0_0(helper_set_inhibit_irq);
7343 /* give a chance to handle pending irqs */
7344 gen_jmp_im(s->pc - s->cs_base);
7345 gen_eob(s);
7346 } else {
7347 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7348 }
7349 } else {
7350 if (s->iopl == 3) {
7351 goto gen_sti;
7352#ifdef VBOX
7353 } else if (s->iopl != 3 && s->vme) {
7354 tcg_gen_helper_0_0(helper_sti_vme);
7355 /* give a chance to handle pending irqs */
7356 gen_jmp_im(s->pc - s->cs_base);
7357 gen_eob(s);
7358#endif
7359 } else {
7360 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7361 }
7362 }
7363 break;
7364 case 0x62: /* bound */
7365 if (CODE64(s))
7366 goto illegal_op;
7367 ot = dflag ? OT_LONG : OT_WORD;
7368 modrm = ldub_code(s->pc++);
7369 reg = (modrm >> 3) & 7;
7370 mod = (modrm >> 6) & 3;
7371 if (mod == 3)
7372 goto illegal_op;
7373 gen_op_mov_TN_reg(ot, 0, reg);
7374 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7375 gen_jmp_im(pc_start - s->cs_base);
7376 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7377 if (ot == OT_WORD)
7378 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
7379 else
7380 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
7381 break;
7382 case 0x1c8 ... 0x1cf: /* bswap reg */
7383 reg = (b & 7) | REX_B(s);
7384#ifdef TARGET_X86_64
7385 if (dflag == 2) {
7386 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
7387 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
7388 gen_op_mov_reg_T0(OT_QUAD, reg);
7389 } else
7390 {
7391 TCGv tmp0;
7392 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7393
7394 tmp0 = tcg_temp_new(TCG_TYPE_I32);
7395 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
7396 tcg_gen_bswap_i32(tmp0, tmp0);
7397 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
7398 gen_op_mov_reg_T0(OT_LONG, reg);
7399 }
7400#else
7401 {
7402 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7403 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
7404 gen_op_mov_reg_T0(OT_LONG, reg);
7405 }
7406#endif
7407 break;
7408 case 0xd6: /* salc */
7409 if (CODE64(s))
7410 goto illegal_op;
7411 if (s->cc_op != CC_OP_DYNAMIC)
7412 gen_op_set_cc_op(s->cc_op);
7413 gen_compute_eflags_c(cpu_T[0]);
7414 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7415 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
7416 break;
7417 case 0xe0: /* loopnz */
7418 case 0xe1: /* loopz */
7419 case 0xe2: /* loop */
7420 case 0xe3: /* jecxz */
7421 {
7422 int l1, l2, l3;
7423
7424 tval = (int8_t)insn_get(s, OT_BYTE);
7425 next_eip = s->pc - s->cs_base;
7426 tval += next_eip;
7427 if (s->dflag == 0)
7428 tval &= 0xffff;
7429
7430 l1 = gen_new_label();
7431 l2 = gen_new_label();
7432 l3 = gen_new_label();
7433 b &= 3;
7434 switch(b) {
7435 case 0: /* loopnz */
7436 case 1: /* loopz */
7437 if (s->cc_op != CC_OP_DYNAMIC)
7438 gen_op_set_cc_op(s->cc_op);
7439 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7440 gen_op_jz_ecx(s->aflag, l3);
7441 gen_compute_eflags(cpu_tmp0);
7442 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
7443 if (b == 0) {
7444 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
7445 } else {
7446 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
7447 }
7448 break;
7449 case 2: /* loop */
7450 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7451 gen_op_jnz_ecx(s->aflag, l1);
7452 break;
7453 default:
7454 case 3: /* jcxz */
7455 gen_op_jz_ecx(s->aflag, l1);
7456 break;
7457 }
7458
7459 gen_set_label(l3);
7460 gen_jmp_im(next_eip);
7461 tcg_gen_br(l2);
7462
7463 gen_set_label(l1);
7464 gen_jmp_im(tval);
7465 gen_set_label(l2);
7466 gen_eob(s);
7467 }
7468 break;
7469 case 0x130: /* wrmsr */
7470 case 0x132: /* rdmsr */
7471 if (s->cpl != 0) {
7472 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7473 } else {
7474 if (s->cc_op != CC_OP_DYNAMIC)
7475 gen_op_set_cc_op(s->cc_op);
7476 gen_jmp_im(pc_start - s->cs_base);
7477 if (b & 2) {
7478 tcg_gen_helper_0_0(helper_rdmsr);
7479 } else {
7480 tcg_gen_helper_0_0(helper_wrmsr);
7481 }
7482 }
7483 break;
7484 case 0x131: /* rdtsc */
7485 if (s->cc_op != CC_OP_DYNAMIC)
7486 gen_op_set_cc_op(s->cc_op);
7487 gen_jmp_im(pc_start - s->cs_base);
7488 if (use_icount)
7489 gen_io_start();
7490 tcg_gen_helper_0_0(helper_rdtsc);
7491 if (use_icount) {
7492 gen_io_end();
7493 gen_jmp(s, s->pc - s->cs_base);
7494 }
7495 break;
7496 case 0x133: /* rdpmc */
7497 if (s->cc_op != CC_OP_DYNAMIC)
7498 gen_op_set_cc_op(s->cc_op);
7499 gen_jmp_im(pc_start - s->cs_base);
7500 tcg_gen_helper_0_0(helper_rdpmc);
7501 break;
7502 case 0x134: /* sysenter */
7503#ifndef VBOX
7504 /* For Intel SYSENTER is valid on 64-bit */
7505 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7506#else
7507 /** @todo: make things right */
7508 if (CODE64(s))
7509#endif
7510 goto illegal_op;
7511 if (!s->pe) {
7512 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7513 } else {
7514 if (s->cc_op != CC_OP_DYNAMIC) {
7515 gen_op_set_cc_op(s->cc_op);
7516 s->cc_op = CC_OP_DYNAMIC;
7517 }
7518 gen_jmp_im(pc_start - s->cs_base);
7519 tcg_gen_helper_0_0(helper_sysenter);
7520 gen_eob(s);
7521 }
7522 break;
7523 case 0x135: /* sysexit */
7524#ifndef VBOX
7525 /* For Intel SYSEXIT is valid on 64-bit */
7526 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7527#else
7528 /** @todo: make things right */
7529 if (CODE64(s))
7530#endif
7531 goto illegal_op;
7532 if (!s->pe) {
7533 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7534 } else {
7535 if (s->cc_op != CC_OP_DYNAMIC) {
7536 gen_op_set_cc_op(s->cc_op);
7537 s->cc_op = CC_OP_DYNAMIC;
7538 }
7539 gen_jmp_im(pc_start - s->cs_base);
7540 tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
7541 gen_eob(s);
7542 }
7543 break;
7544#ifdef TARGET_X86_64
7545 case 0x105: /* syscall */
7546 /* XXX: is it usable in real mode ? */
7547 if (s->cc_op != CC_OP_DYNAMIC) {
7548 gen_op_set_cc_op(s->cc_op);
7549 s->cc_op = CC_OP_DYNAMIC;
7550 }
7551 gen_jmp_im(pc_start - s->cs_base);
7552 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
7553 gen_eob(s);
7554 break;
7555 case 0x107: /* sysret */
7556 if (!s->pe) {
7557 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7558 } else {
7559 if (s->cc_op != CC_OP_DYNAMIC) {
7560 gen_op_set_cc_op(s->cc_op);
7561 s->cc_op = CC_OP_DYNAMIC;
7562 }
7563 gen_jmp_im(pc_start - s->cs_base);
7564 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
7565 /* condition codes are modified only in long mode */
7566 if (s->lma)
7567 s->cc_op = CC_OP_EFLAGS;
7568 gen_eob(s);
7569 }
7570 break;
7571#endif
7572 case 0x1a2: /* cpuid */
7573 if (s->cc_op != CC_OP_DYNAMIC)
7574 gen_op_set_cc_op(s->cc_op);
7575 gen_jmp_im(pc_start - s->cs_base);
7576 tcg_gen_helper_0_0(helper_cpuid);
7577 break;
7578 case 0xf4: /* hlt */
7579 if (s->cpl != 0) {
7580 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7581 } else {
7582 if (s->cc_op != CC_OP_DYNAMIC)
7583 gen_op_set_cc_op(s->cc_op);
7584 gen_jmp_im(pc_start - s->cs_base);
7585 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
7586 s->is_jmp = 3;
7587 }
7588 break;
7589 case 0x100:
7590 modrm = ldub_code(s->pc++);
7591 mod = (modrm >> 6) & 3;
7592 op = (modrm >> 3) & 7;
7593 switch(op) {
7594 case 0: /* sldt */
7595 if (!s->pe || s->vm86)
7596 goto illegal_op;
7597 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
7598 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
7599 ot = OT_WORD;
7600 if (mod == 3)
7601 ot += s->dflag;
7602 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7603 break;
7604 case 2: /* lldt */
7605 if (!s->pe || s->vm86)
7606 goto illegal_op;
7607 if (s->cpl != 0) {
7608 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7609 } else {
7610 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
7611 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7612 gen_jmp_im(pc_start - s->cs_base);
7613 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7614 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
7615 }
7616 break;
7617 case 1: /* str */
7618 if (!s->pe || s->vm86)
7619 goto illegal_op;
7620 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
7621 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
7622 ot = OT_WORD;
7623 if (mod == 3)
7624 ot += s->dflag;
7625 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7626 break;
7627 case 3: /* ltr */
7628 if (!s->pe || s->vm86)
7629 goto illegal_op;
7630 if (s->cpl != 0) {
7631 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7632 } else {
7633 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
7634 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7635 gen_jmp_im(pc_start - s->cs_base);
7636 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7637 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
7638 }
7639 break;
7640 case 4: /* verr */
7641 case 5: /* verw */
7642 if (!s->pe || s->vm86)
7643 goto illegal_op;
7644 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7645 if (s->cc_op != CC_OP_DYNAMIC)
7646 gen_op_set_cc_op(s->cc_op);
7647 if (op == 4)
7648 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
7649 else
7650 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
7651 s->cc_op = CC_OP_EFLAGS;
7652 break;
7653 default:
7654 goto illegal_op;
7655 }
7656 break;
7657 case 0x101:
7658 modrm = ldub_code(s->pc++);
7659 mod = (modrm >> 6) & 3;
7660 op = (modrm >> 3) & 7;
7661 rm = modrm & 7;
7662
7663#ifdef VBOX
7664 /* 0f 01 f9 */
7665 if (modrm == 0xf9)
7666 {
7667 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
7668 goto illegal_op;
7669 gen_jmp_im(pc_start - s->cs_base);
7670 tcg_gen_helper_0_0(helper_rdtscp);
7671 break;
7672 }
7673#endif
7674 switch(op) {
7675 case 0: /* sgdt */
7676 if (mod == 3)
7677 goto illegal_op;
7678 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7679 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7680 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7681 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7682 gen_add_A0_im(s, 2);
7683 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7684 if (!s->dflag)
7685 gen_op_andl_T0_im(0xffffff);
7686 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7687 break;
7688 case 1:
7689 if (mod == 3) {
7690 switch (rm) {
7691 case 0: /* monitor */
7692 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7693 s->cpl != 0)
7694 goto illegal_op;
7695 if (s->cc_op != CC_OP_DYNAMIC)
7696 gen_op_set_cc_op(s->cc_op);
7697 gen_jmp_im(pc_start - s->cs_base);
7698#ifdef TARGET_X86_64
7699 if (s->aflag == 2) {
7700 gen_op_movq_A0_reg(R_EAX);
7701 } else
7702#endif
7703 {
7704 gen_op_movl_A0_reg(R_EAX);
7705 if (s->aflag == 0)
7706 gen_op_andl_A0_ffff();
7707 }
7708 gen_add_A0_ds_seg(s);
7709 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
7710 break;
7711 case 1: /* mwait */
7712 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7713 s->cpl != 0)
7714 goto illegal_op;
7715 if (s->cc_op != CC_OP_DYNAMIC) {
7716 gen_op_set_cc_op(s->cc_op);
7717 s->cc_op = CC_OP_DYNAMIC;
7718 }
7719 gen_jmp_im(pc_start - s->cs_base);
7720 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
7721 gen_eob(s);
7722 break;
7723 default:
7724 goto illegal_op;
7725 }
7726 } else { /* sidt */
7727 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7728 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7729 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7730 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7731 gen_add_A0_im(s, 2);
7732 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7733 if (!s->dflag)
7734 gen_op_andl_T0_im(0xffffff);
7735 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7736 }
7737 break;
7738 case 2: /* lgdt */
7739 case 3: /* lidt */
7740 if (mod == 3) {
7741 if (s->cc_op != CC_OP_DYNAMIC)
7742 gen_op_set_cc_op(s->cc_op);
7743 gen_jmp_im(pc_start - s->cs_base);
7744 switch(rm) {
7745 case 0: /* VMRUN */
7746 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7747 goto illegal_op;
7748 if (s->cpl != 0) {
7749 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7750 break;
7751 } else {
7752 tcg_gen_helper_0_2(helper_vmrun,
7753 tcg_const_i32(s->aflag),
7754 tcg_const_i32(s->pc - pc_start));
7755 tcg_gen_exit_tb(0);
7756 s->is_jmp = 3;
7757 }
7758 break;
7759 case 1: /* VMMCALL */
7760 if (!(s->flags & HF_SVME_MASK))
7761 goto illegal_op;
7762 tcg_gen_helper_0_0(helper_vmmcall);
7763 break;
7764 case 2: /* VMLOAD */
7765 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7766 goto illegal_op;
7767 if (s->cpl != 0) {
7768 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7769 break;
7770 } else {
7771 tcg_gen_helper_0_1(helper_vmload,
7772 tcg_const_i32(s->aflag));
7773 }
7774 break;
7775 case 3: /* VMSAVE */
7776 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7777 goto illegal_op;
7778 if (s->cpl != 0) {
7779 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7780 break;
7781 } else {
7782 tcg_gen_helper_0_1(helper_vmsave,
7783 tcg_const_i32(s->aflag));
7784 }
7785 break;
7786 case 4: /* STGI */
7787 if ((!(s->flags & HF_SVME_MASK) &&
7788 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7789 !s->pe)
7790 goto illegal_op;
7791 if (s->cpl != 0) {
7792 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7793 break;
7794 } else {
7795 tcg_gen_helper_0_0(helper_stgi);
7796 }
7797 break;
7798 case 5: /* CLGI */
7799 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7800 goto illegal_op;
7801 if (s->cpl != 0) {
7802 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7803 break;
7804 } else {
7805 tcg_gen_helper_0_0(helper_clgi);
7806 }
7807 break;
7808 case 6: /* SKINIT */
7809 if ((!(s->flags & HF_SVME_MASK) &&
7810 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7811 !s->pe)
7812 goto illegal_op;
7813 tcg_gen_helper_0_0(helper_skinit);
7814 break;
7815 case 7: /* INVLPGA */
7816 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7817 goto illegal_op;
7818 if (s->cpl != 0) {
7819 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7820 break;
7821 } else {
7822 tcg_gen_helper_0_1(helper_invlpga,
7823 tcg_const_i32(s->aflag));
7824 }
7825 break;
7826 default:
7827 goto illegal_op;
7828 }
7829 } else if (s->cpl != 0) {
7830 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7831 } else {
7832 gen_svm_check_intercept(s, pc_start,
7833 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7834 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7835 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7836 gen_add_A0_im(s, 2);
7837 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7838 if (!s->dflag)
7839 gen_op_andl_T0_im(0xffffff);
7840 if (op == 2) {
7841 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7842 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7843 } else {
7844 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7845 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7846 }
7847 }
7848 break;
7849 case 4: /* smsw */
7850 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7851 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7852 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7853 break;
7854 case 6: /* lmsw */
7855 if (s->cpl != 0) {
7856 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7857 } else {
7858 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7859 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7860 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7861 gen_jmp_im(s->pc - s->cs_base);
7862 gen_eob(s);
7863 }
7864 break;
7865 case 7: /* invlpg */
7866 if (s->cpl != 0) {
7867 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7868 } else {
7869 if (mod == 3) {
7870#ifdef TARGET_X86_64
7871 if (CODE64(s) && rm == 0) {
7872 /* swapgs */
7873 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7874 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7875 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7876 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7877 } else
7878#endif
7879 {
7880 goto illegal_op;
7881 }
7882 } else {
7883 if (s->cc_op != CC_OP_DYNAMIC)
7884 gen_op_set_cc_op(s->cc_op);
7885 gen_jmp_im(pc_start - s->cs_base);
7886 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7887 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
7888 gen_jmp_im(s->pc - s->cs_base);
7889 gen_eob(s);
7890 }
7891 }
7892 break;
7893 default:
7894 goto illegal_op;
7895 }
7896 break;
7897 case 0x108: /* invd */
7898 case 0x109: /* wbinvd */
7899 if (s->cpl != 0) {
7900 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7901 } else {
7902 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7903 /* nothing to do */
7904 }
7905 break;
7906 case 0x63: /* arpl or movslS (x86_64) */
7907#ifdef TARGET_X86_64
7908 if (CODE64(s)) {
7909 int d_ot;
7910 /* d_ot is the size of destination */
7911 d_ot = dflag + OT_WORD;
7912
7913 modrm = ldub_code(s->pc++);
7914 reg = ((modrm >> 3) & 7) | rex_r;
7915 mod = (modrm >> 6) & 3;
7916 rm = (modrm & 7) | REX_B(s);
7917
7918 if (mod == 3) {
7919 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7920 /* sign extend */
7921 if (d_ot == OT_QUAD)
7922 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7923 gen_op_mov_reg_T0(d_ot, reg);
7924 } else {
7925 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7926 if (d_ot == OT_QUAD) {
7927 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7928 } else {
7929 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7930 }
7931 gen_op_mov_reg_T0(d_ot, reg);
7932 }
7933 } else
7934#endif
7935 {
7936 int label1;
7937 TCGv t0, t1, t2, a0;
7938
7939 if (!s->pe || s->vm86)
7940 goto illegal_op;
7941
7942 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7943 t1 = tcg_temp_local_new(TCG_TYPE_TL);
7944 t2 = tcg_temp_local_new(TCG_TYPE_TL);
7945#ifdef VBOX
7946 a0 = tcg_temp_local_new(TCG_TYPE_TL);
7947#endif
7948 ot = OT_WORD;
7949 modrm = ldub_code(s->pc++);
7950 reg = (modrm >> 3) & 7;
7951 mod = (modrm >> 6) & 3;
7952 rm = modrm & 7;
7953 if (mod != 3) {
7954 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7955#ifdef VBOX
7956 tcg_gen_mov_tl(a0, cpu_A0);
7957#endif
7958 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7959 } else {
7960 gen_op_mov_v_reg(ot, t0, rm);
7961 }
7962 gen_op_mov_v_reg(ot, t1, reg);
7963 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7964 tcg_gen_andi_tl(t1, t1, 3);
7965 tcg_gen_movi_tl(t2, 0);
7966 label1 = gen_new_label();
7967 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7968 tcg_gen_andi_tl(t0, t0, ~3);
7969 tcg_gen_or_tl(t0, t0, t1);
7970 tcg_gen_movi_tl(t2, CC_Z);
7971 gen_set_label(label1);
7972 if (mod != 3) {
7973#ifdef VBOX
7974 /* cpu_A0 doesn't survive branch */
7975 gen_op_st_v(ot + s->mem_index, t0, a0);
7976#else
7977 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
7978#endif
7979 } else {
7980 gen_op_mov_reg_v(ot, rm, t0);
7981 }
7982 if (s->cc_op != CC_OP_DYNAMIC)
7983 gen_op_set_cc_op(s->cc_op);
7984 gen_compute_eflags(cpu_cc_src);
7985 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
7986 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
7987 s->cc_op = CC_OP_EFLAGS;
7988 tcg_temp_free(t0);
7989 tcg_temp_free(t1);
7990 tcg_temp_free(t2);
7991#ifdef VBOX
7992 tcg_temp_free(a0);
7993#endif
7994 }
7995 break;
7996 case 0x102: /* lar */
7997 case 0x103: /* lsl */
7998 {
7999 int label1;
8000 TCGv t0;
8001 if (!s->pe || s->vm86)
8002 goto illegal_op;
8003 ot = dflag ? OT_LONG : OT_WORD;
8004 modrm = ldub_code(s->pc++);
8005 reg = ((modrm >> 3) & 7) | rex_r;
8006 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
8007 t0 = tcg_temp_local_new(TCG_TYPE_TL);
8008 if (s->cc_op != CC_OP_DYNAMIC)
8009 gen_op_set_cc_op(s->cc_op);
8010 if (b == 0x102)
8011 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
8012 else
8013 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
8014 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
8015 label1 = gen_new_label();
8016 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
8017 gen_op_mov_reg_v(ot, reg, t0);
8018 gen_set_label(label1);
8019 s->cc_op = CC_OP_EFLAGS;
8020 tcg_temp_free(t0);
8021 }
8022 break;
8023 case 0x118:
8024 modrm = ldub_code(s->pc++);
8025 mod = (modrm >> 6) & 3;
8026 op = (modrm >> 3) & 7;
8027 switch(op) {
8028 case 0: /* prefetchnta */
8029 case 1: /* prefetchnt0 */
8030 case 2: /* prefetchnt0 */
8031 case 3: /* prefetchnt0 */
8032 if (mod == 3)
8033 goto illegal_op;
8034 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8035 /* nothing more to do */
8036 break;
8037 default: /* nop (multi byte) */
8038 gen_nop_modrm(s, modrm);
8039 break;
8040 }
8041 break;
8042 case 0x119 ... 0x11f: /* nop (multi byte) */
8043 modrm = ldub_code(s->pc++);
8044 gen_nop_modrm(s, modrm);
8045 break;
8046 case 0x120: /* mov reg, crN */
8047 case 0x122: /* mov crN, reg */
8048 if (s->cpl != 0) {
8049 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8050 } else {
8051 modrm = ldub_code(s->pc++);
8052 if ((modrm & 0xc0) != 0xc0)
8053 goto illegal_op;
8054 rm = (modrm & 7) | REX_B(s);
8055 reg = ((modrm >> 3) & 7) | rex_r;
8056 if (CODE64(s))
8057 ot = OT_QUAD;
8058 else
8059 ot = OT_LONG;
8060 switch(reg) {
8061 case 0:
8062 case 2:
8063 case 3:
8064 case 4:
8065 case 8:
8066 if (s->cc_op != CC_OP_DYNAMIC)
8067 gen_op_set_cc_op(s->cc_op);
8068 gen_jmp_im(pc_start - s->cs_base);
8069 if (b & 2) {
8070 gen_op_mov_TN_reg(ot, 0, rm);
8071 tcg_gen_helper_0_2(helper_write_crN,
8072 tcg_const_i32(reg), cpu_T[0]);
8073 gen_jmp_im(s->pc - s->cs_base);
8074 gen_eob(s);
8075 } else {
8076 tcg_gen_helper_1_1(helper_read_crN,
8077 cpu_T[0], tcg_const_i32(reg));
8078 gen_op_mov_reg_T0(ot, rm);
8079 }
8080 break;
8081 default:
8082 goto illegal_op;
8083 }
8084 }
8085 break;
8086 case 0x121: /* mov reg, drN */
8087 case 0x123: /* mov drN, reg */
8088 if (s->cpl != 0) {
8089 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8090 } else {
8091 modrm = ldub_code(s->pc++);
8092 if ((modrm & 0xc0) != 0xc0)
8093 goto illegal_op;
8094 rm = (modrm & 7) | REX_B(s);
8095 reg = ((modrm >> 3) & 7) | rex_r;
8096 if (CODE64(s))
8097 ot = OT_QUAD;
8098 else
8099 ot = OT_LONG;
8100 /* XXX: do it dynamically with CR4.DE bit */
8101 if (reg == 4 || reg == 5 || reg >= 8)
8102 goto illegal_op;
8103 if (b & 2) {
8104 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
8105 gen_op_mov_TN_reg(ot, 0, rm);
8106 tcg_gen_helper_0_2(helper_movl_drN_T0,
8107 tcg_const_i32(reg), cpu_T[0]);
8108 gen_jmp_im(s->pc - s->cs_base);
8109 gen_eob(s);
8110 } else {
8111 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
8112 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
8113 gen_op_mov_reg_T0(ot, rm);
8114 }
8115 }
8116 break;
8117 case 0x106: /* clts */
8118 if (s->cpl != 0) {
8119 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8120 } else {
8121 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
8122 tcg_gen_helper_0_0(helper_clts);
8123 /* abort block because static cpu state changed */
8124 gen_jmp_im(s->pc - s->cs_base);
8125 gen_eob(s);
8126 }
8127 break;
8128 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
8129 case 0x1c3: /* MOVNTI reg, mem */
8130 if (!(s->cpuid_features & CPUID_SSE2))
8131 goto illegal_op;
8132 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
8133 modrm = ldub_code(s->pc++);
8134 mod = (modrm >> 6) & 3;
8135 if (mod == 3)
8136 goto illegal_op;
8137 reg = ((modrm >> 3) & 7) | rex_r;
8138 /* generate a generic store */
8139 gen_ldst_modrm(s, modrm, ot, reg, 1);
8140 break;
8141 case 0x1ae:
8142 modrm = ldub_code(s->pc++);
8143 mod = (modrm >> 6) & 3;
8144 op = (modrm >> 3) & 7;
8145 switch(op) {
8146 case 0: /* fxsave */
8147 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8148 (s->flags & HF_EM_MASK))
8149 goto illegal_op;
8150 if (s->flags & HF_TS_MASK) {
8151 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8152 break;
8153 }
8154 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8155 if (s->cc_op != CC_OP_DYNAMIC)
8156 gen_op_set_cc_op(s->cc_op);
8157 gen_jmp_im(pc_start - s->cs_base);
8158 tcg_gen_helper_0_2(helper_fxsave,
8159 cpu_A0, tcg_const_i32((s->dflag == 2)));
8160 break;
8161 case 1: /* fxrstor */
8162 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8163 (s->flags & HF_EM_MASK))
8164 goto illegal_op;
8165 if (s->flags & HF_TS_MASK) {
8166 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8167 break;
8168 }
8169 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8170 if (s->cc_op != CC_OP_DYNAMIC)
8171 gen_op_set_cc_op(s->cc_op);
8172 gen_jmp_im(pc_start - s->cs_base);
8173 tcg_gen_helper_0_2(helper_fxrstor,
8174 cpu_A0, tcg_const_i32((s->dflag == 2)));
8175 break;
8176 case 2: /* ldmxcsr */
8177 case 3: /* stmxcsr */
8178 if (s->flags & HF_TS_MASK) {
8179 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8180 break;
8181 }
8182 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8183 mod == 3)
8184 goto illegal_op;
8185 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8186 if (op == 2) {
8187 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
8188 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8189 } else {
8190 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8191 gen_op_st_T0_A0(OT_LONG + s->mem_index);
8192 }
8193 break;
8194 case 5: /* lfence */
8195 case 6: /* mfence */
8196 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
8197 goto illegal_op;
8198 break;
8199 case 7: /* sfence / clflush */
8200 if ((modrm & 0xc7) == 0xc0) {
8201 /* sfence */
8202 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8203 if (!(s->cpuid_features & CPUID_SSE))
8204 goto illegal_op;
8205 } else {
8206 /* clflush */
8207 if (!(s->cpuid_features & CPUID_CLFLUSH))
8208 goto illegal_op;
8209 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8210 }
8211 break;
8212 default:
8213 goto illegal_op;
8214 }
8215 break;
8216 case 0x10d: /* 3DNow! prefetch(w) */
8217 modrm = ldub_code(s->pc++);
8218 mod = (modrm >> 6) & 3;
8219 if (mod == 3)
8220 goto illegal_op;
8221 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8222 /* ignore for now */
8223 break;
8224 case 0x1aa: /* rsm */
8225 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
8226 if (!(s->flags & HF_SMM_MASK))
8227 goto illegal_op;
8228 if (s->cc_op != CC_OP_DYNAMIC) {
8229 gen_op_set_cc_op(s->cc_op);
8230 s->cc_op = CC_OP_DYNAMIC;
8231 }
8232 gen_jmp_im(s->pc - s->cs_base);
8233 tcg_gen_helper_0_0(helper_rsm);
8234 gen_eob(s);
8235 break;
8236 case 0x1b8: /* SSE4.2 popcnt */
8237 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8238 PREFIX_REPZ)
8239 goto illegal_op;
8240 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8241 goto illegal_op;
8242
8243 modrm = ldub_code(s->pc++);
8244 reg = ((modrm >> 3) & 7);
8245
8246 if (s->prefix & PREFIX_DATA)
8247 ot = OT_WORD;
8248 else if (s->dflag != 2)
8249 ot = OT_LONG;
8250 else
8251 ot = OT_QUAD;
8252
8253 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
8254 tcg_gen_helper_1_2(helper_popcnt,
8255 cpu_T[0], cpu_T[0], tcg_const_i32(ot));
8256 gen_op_mov_reg_T0(ot, reg);
8257
8258 s->cc_op = CC_OP_EFLAGS;
8259 break;
8260 case 0x10e ... 0x10f:
8261 /* 3DNow! instructions, ignore prefixes */
8262 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
8263 case 0x110 ... 0x117:
8264 case 0x128 ... 0x12f:
8265 case 0x138 ... 0x13a:
8266 case 0x150 ... 0x177:
8267 case 0x17c ... 0x17f:
8268 case 0x1c2:
8269 case 0x1c4 ... 0x1c6:
8270 case 0x1d0 ... 0x1fe:
8271 gen_sse(s, b, pc_start, rex_r);
8272 break;
8273 default:
8274 goto illegal_op;
8275 }
8276 /* lock generation */
8277 if (s->prefix & PREFIX_LOCK)
8278 tcg_gen_helper_0_0(helper_unlock);
8279 return s->pc;
8280 illegal_op:
8281 if (s->prefix & PREFIX_LOCK)
8282 tcg_gen_helper_0_0(helper_unlock);
8283 /* XXX: ensure that no lock was generated */
8284 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8285 return s->pc;
8286}
8287
8288void optimize_flags_init(void)
8289{
8290#if TCG_TARGET_REG_BITS == 32
8291 assert(sizeof(CCTable) == (1 << 3));
8292#else
8293 assert(sizeof(CCTable) == (1 << 4));
8294#endif
8295 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
8296 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
8297 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
8298 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
8299 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
8300 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
8301 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
8302 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
8303 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
8304
8305 /* register helpers */
8306
8307#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
8308#include "helper.h"
8309}
8310
8311/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8312 basic block 'tb'. If search_pc is TRUE, also generate PC
8313 information for each intermediate instruction. */
8314#ifndef VBOX
8315static inline void gen_intermediate_code_internal(CPUState *env,
8316#else /* VBOX */
8317DECLINLINE(void) gen_intermediate_code_internal(CPUState *env,
8318#endif /* VBOX */
8319 TranslationBlock *tb,
8320 int search_pc)
8321{
8322 DisasContext dc1, *dc = &dc1;
8323 target_ulong pc_ptr;
8324 uint16_t *gen_opc_end;
8325 int j, lj, cflags;
8326 uint64_t flags;
8327 target_ulong pc_start;
8328 target_ulong cs_base;
8329 int num_insns;
8330 int max_insns;
8331
8332 /* generate intermediate code */
8333 pc_start = tb->pc;
8334 cs_base = tb->cs_base;
8335 flags = tb->flags;
8336 cflags = tb->cflags;
8337
8338 dc->pe = (flags >> HF_PE_SHIFT) & 1;
8339 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8340 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8341 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8342 dc->f_st = 0;
8343 dc->vm86 = (flags >> VM_SHIFT) & 1;
8344#ifdef VBOX_WITH_CALL_RECORD
8345 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
8346 if ( !(env->state & CPU_RAW_RING0)
8347 && (env->cr[0] & CR0_PG_MASK)
8348 && !(env->eflags & X86_EFL_IF)
8349 && dc->code32)
8350 dc->record_call = 1;
8351 else
8352 dc->record_call = 0;
8353#endif
8354 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8355 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8356 dc->tf = (flags >> TF_SHIFT) & 1;
8357 dc->singlestep_enabled = env->singlestep_enabled;
8358 dc->cc_op = CC_OP_DYNAMIC;
8359 dc->cs_base = cs_base;
8360 dc->tb = tb;
8361 dc->popl_esp_hack = 0;
8362 /* select memory access functions */
8363 dc->mem_index = 0;
8364 if (flags & HF_SOFTMMU_MASK) {
8365 if (dc->cpl == 3)
8366 dc->mem_index = 2 * 4;
8367 else
8368 dc->mem_index = 1 * 4;
8369 }
8370 dc->cpuid_features = env->cpuid_features;
8371 dc->cpuid_ext_features = env->cpuid_ext_features;
8372 dc->cpuid_ext2_features = env->cpuid_ext2_features;
8373 dc->cpuid_ext3_features = env->cpuid_ext3_features;
8374#ifdef TARGET_X86_64
8375 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8376 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8377#endif
8378 dc->flags = flags;
8379 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8380 (flags & HF_INHIBIT_IRQ_MASK)
8381#ifndef CONFIG_SOFTMMU
8382 || (flags & HF_SOFTMMU_MASK)
8383#endif
8384 );
8385#if 0
8386 /* check addseg logic */
8387 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
8388 printf("ERROR addseg\n");
8389#endif
8390
8391 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
8392 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
8393 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
8394 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
8395
8396 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
8397 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
8398 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
8399 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
8400 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
8401 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
8402 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
8403 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
8404 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
8405
8406 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
8407
8408 dc->is_jmp = DISAS_NEXT;
8409 pc_ptr = pc_start;
8410 lj = -1;
8411 num_insns = 0;
8412 max_insns = tb->cflags & CF_COUNT_MASK;
8413 if (max_insns == 0)
8414 max_insns = CF_COUNT_MASK;
8415
8416 gen_icount_start();
8417 for(;;) {
8418 if (env->nb_breakpoints > 0) {
8419 for(j = 0; j < env->nb_breakpoints; j++) {
8420 if (env->breakpoints[j] == pc_ptr) {
8421 gen_debug(dc, pc_ptr - dc->cs_base);
8422 break;
8423 }
8424 }
8425 }
8426 if (search_pc) {
8427 j = gen_opc_ptr - gen_opc_buf;
8428 if (lj < j) {
8429 lj++;
8430 while (lj < j)
8431 gen_opc_instr_start[lj++] = 0;
8432 }
8433 gen_opc_pc[lj] = pc_ptr;
8434 gen_opc_cc_op[lj] = dc->cc_op;
8435 gen_opc_instr_start[lj] = 1;
8436 gen_opc_icount[lj] = num_insns;
8437 }
8438 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8439 gen_io_start();
8440
8441 pc_ptr = disas_insn(dc, pc_ptr);
8442 num_insns++;
8443 /* stop translation if indicated */
8444 if (dc->is_jmp)
8445 break;
8446#ifdef VBOX
8447#ifdef DEBUG
8448/*
8449 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
8450 {
8451 //should never happen as the jump to the patch code terminates the translation block
8452 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
8453 }
8454*/
8455#endif
8456 if (env->state & CPU_EMULATE_SINGLE_INSTR)
8457 {
8458 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
8459 gen_jmp_im(pc_ptr - dc->cs_base);
8460 gen_eob(dc);
8461 break;
8462 }
8463#endif /* VBOX */
8464
8465 /* if single step mode, we generate only one instruction and
8466 generate an exception */
8467 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8468 the flag and abort the translation to give the irqs a
8469 change to be happen */
8470 if (dc->tf || dc->singlestep_enabled ||
8471 (flags & HF_INHIBIT_IRQ_MASK)) {
8472 gen_jmp_im(pc_ptr - dc->cs_base);
8473 gen_eob(dc);
8474 break;
8475 }
8476 /* if too long translation, stop generation too */
8477 if (gen_opc_ptr >= gen_opc_end ||
8478 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8479 num_insns >= max_insns) {
8480 gen_jmp_im(pc_ptr - dc->cs_base);
8481 gen_eob(dc);
8482 break;
8483 }
8484 }
8485 if (tb->cflags & CF_LAST_IO)
8486 gen_io_end();
8487 gen_icount_end(tb, num_insns);
8488 *gen_opc_ptr = INDEX_op_end;
8489 /* we don't forget to fill the last values */
8490 if (search_pc) {
8491 j = gen_opc_ptr - gen_opc_buf;
8492 lj++;
8493 while (lj <= j)
8494 gen_opc_instr_start[lj++] = 0;
8495 }
8496
8497#ifdef DEBUG_DISAS
8498 if (loglevel & CPU_LOG_TB_CPU) {
8499 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
8500 }
8501 if (loglevel & CPU_LOG_TB_IN_ASM) {
8502 int disas_flags;
8503 fprintf(logfile, "----------------\n");
8504 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
8505#ifdef TARGET_X86_64
8506 if (dc->code64)
8507 disas_flags = 2;
8508 else
8509#endif
8510 disas_flags = !dc->code32;
8511 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
8512 fprintf(logfile, "\n");
8513 }
8514#endif
8515
8516 if (!search_pc) {
8517 tb->size = pc_ptr - pc_start;
8518 tb->icount = num_insns;
8519 }
8520}
8521
8522void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
8523{
8524 gen_intermediate_code_internal(env, tb, 0);
8525}
8526
8527void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
8528{
8529 gen_intermediate_code_internal(env, tb, 1);
8530}
8531
8532void gen_pc_load(CPUState *env, TranslationBlock *tb,
8533 unsigned long searched_pc, int pc_pos, void *puc)
8534{
8535 int cc_op;
8536#ifdef DEBUG_DISAS
8537 if (loglevel & CPU_LOG_TB_OP) {
8538 int i;
8539 fprintf(logfile, "RESTORE:\n");
8540 for(i = 0;i <= pc_pos; i++) {
8541 if (gen_opc_instr_start[i]) {
8542 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
8543 }
8544 }
8545 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
8546 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
8547 (uint32_t)tb->cs_base);
8548 }
8549#endif
8550 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
8551 cc_op = gen_opc_cc_op[pc_pos];
8552 if (cc_op != CC_OP_DYNAMIC)
8553 env->cc_op = cc_op;
8554}
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette