VirtualBox

source: vbox/trunk/src/recompiler_new/target-i386/translate.c@ 15009

Last change on this file since 15009 was 15009, checked in by vboxsync, 16 years ago

new_recompiler: cleanup, optimization, compile with the right tool - gets rid of the nasty bug with bootmenu

  • Property svn:eol-style set to native
File size: 275.4 KB
Line 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Sun LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Sun elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29#include <stdarg.h>
30#include <stdlib.h>
31#include <stdio.h>
32#include <string.h>
33#ifndef VBOX
34#include <inttypes.h>
35#include <signal.h>
36#include <assert.h>
37#endif /* !VBOX */
38
39#include "cpu.h"
40#include "exec-all.h"
41#include "disas.h"
42#include "helper.h"
43#include "tcg-op.h"
44
45#define PREFIX_REPZ 0x01
46#define PREFIX_REPNZ 0x02
47#define PREFIX_LOCK 0x04
48#define PREFIX_DATA 0x08
49#define PREFIX_ADR 0x10
50
51#ifdef TARGET_X86_64
52#define X86_64_ONLY(x) x
53#ifndef VBOX
54#define X86_64_DEF(x...) x
55#else
56#define X86_64_DEF(x...) x
57#endif
58#define CODE64(s) ((s)->code64)
59#define REX_X(s) ((s)->rex_x)
60#define REX_B(s) ((s)->rex_b)
61/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
62#if 1
63#define BUGGY_64(x) NULL
64#endif
65#else
66#define X86_64_ONLY(x) NULL
67#ifndef VBOX
68#define X86_64_DEF(x...)
69#else
70#define X86_64_DEF(x)
71#endif
72#define CODE64(s) 0
73#define REX_X(s) 0
74#define REX_B(s) 0
75#endif
76
77//#define MACRO_TEST 1
78
79/* global register indexes */
80static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
81/* local temps */
82static TCGv cpu_T[2], cpu_T3;
83/* local register indexes (only used inside old micro ops) */
84static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
85static TCGv cpu_tmp5, cpu_tmp6;
86
87#include "gen-icount.h"
88
89#ifdef TARGET_X86_64
90static int x86_64_hregs;
91#endif
92
93#ifdef VBOX
94
95/* Special/override code readers to hide patched code. */
96
97uint8_t ldub_code_raw(target_ulong pc)
98{
99 uint8_t b;
100
101 if (!remR3GetOpcode(cpu_single_env, pc, &b))
102 b = ldub_code(pc);
103 return b;
104}
105#define ldub_code(a) ldub_code_raw(a)
106
107uint16_t lduw_code_raw(target_ulong pc)
108{
109 return (ldub_code(pc+1) << 8) | ldub_code(pc);
110}
111#define lduw_code(a) lduw_code_raw(a)
112
113
114uint32_t ldl_code_raw(target_ulong pc)
115{
116 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
117}
118#define ldl_code(a) ldl_code_raw(a)
119
120#endif /* VBOX */
121
122
123typedef struct DisasContext {
124 /* current insn context */
125 int override; /* -1 if no override */
126 int prefix;
127 int aflag, dflag;
128 target_ulong pc; /* pc = eip + cs_base */
129 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
130 static state change (stop translation) */
131 /* current block context */
132 target_ulong cs_base; /* base of CS segment */
133 int pe; /* protected mode */
134 int code32; /* 32 bit code segment */
135#ifdef TARGET_X86_64
136 int lma; /* long mode active */
137 int code64; /* 64 bit code segment */
138 int rex_x, rex_b;
139#endif
140 int ss32; /* 32 bit stack segment */
141 int cc_op; /* current CC operation */
142 int addseg; /* non zero if either DS/ES/SS have a non zero base */
143 int f_st; /* currently unused */
144 int vm86; /* vm86 mode */
145#ifdef VBOX
146 int vme; /* CR4.VME */
147 int record_call; /* record calls for CSAM or not? */
148#endif
149 int cpl;
150 int iopl;
151 int tf; /* TF cpu flag */
152 int singlestep_enabled; /* "hardware" single step enabled */
153 int jmp_opt; /* use direct block chaining for direct jumps */
154 int mem_index; /* select memory access functions */
155 uint64_t flags; /* all execution flags */
156 struct TranslationBlock *tb;
157 int popl_esp_hack; /* for correct popl with esp base handling */
158 int rip_offset; /* only used in x86_64, but left for simplicity */
159 int cpuid_features;
160 int cpuid_ext_features;
161 int cpuid_ext2_features;
162 int cpuid_ext3_features;
163} DisasContext;
164
165static void gen_eob(DisasContext *s);
166static void gen_jmp(DisasContext *s, target_ulong eip);
167static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
168
169#ifdef VBOX
170static void gen_check_external_event();
171#endif
172
173/* i386 arith/logic operations */
174enum {
175 OP_ADDL,
176 OP_ORL,
177 OP_ADCL,
178 OP_SBBL,
179 OP_ANDL,
180 OP_SUBL,
181 OP_XORL,
182 OP_CMPL,
183};
184
185/* i386 shift ops */
186enum {
187 OP_ROL,
188 OP_ROR,
189 OP_RCL,
190 OP_RCR,
191 OP_SHL,
192 OP_SHR,
193 OP_SHL1, /* undocumented */
194 OP_SAR = 7,
195};
196
197enum {
198 JCC_O,
199 JCC_B,
200 JCC_Z,
201 JCC_BE,
202 JCC_S,
203 JCC_P,
204 JCC_L,
205 JCC_LE,
206};
207
208/* operand size */
209enum {
210 OT_BYTE = 0,
211 OT_WORD,
212 OT_LONG,
213 OT_QUAD,
214};
215
216enum {
217 /* I386 int registers */
218 OR_EAX, /* MUST be even numbered */
219 OR_ECX,
220 OR_EDX,
221 OR_EBX,
222 OR_ESP,
223 OR_EBP,
224 OR_ESI,
225 OR_EDI,
226
227 OR_TMP0 = 16, /* temporary operand register */
228 OR_TMP1,
229 OR_A0, /* temporary register used when doing address evaluation */
230};
231
232#ifndef VBOX
233static inline void gen_op_movl_T0_0(void)
234#else /* VBOX */
235DECLINLINE(void) gen_op_movl_T0_0(void)
236#endif /* VBOX */
237{
238 tcg_gen_movi_tl(cpu_T[0], 0);
239}
240
241#ifndef VBOX
242static inline void gen_op_movl_T0_im(int32_t val)
243#else /* VBOX */
244DECLINLINE(void) gen_op_movl_T0_im(int32_t val)
245#endif /* VBOX */
246{
247 tcg_gen_movi_tl(cpu_T[0], val);
248}
249
250#ifndef VBOX
251static inline void gen_op_movl_T0_imu(uint32_t val)
252#else /* VBOX */
253DECLINLINE(void) gen_op_movl_T0_imu(uint32_t val)
254#endif /* VBOX */
255{
256 tcg_gen_movi_tl(cpu_T[0], val);
257}
258
259#ifndef VBOX
260static inline void gen_op_movl_T1_im(int32_t val)
261#else /* VBOX */
262DECLINLINE(void) gen_op_movl_T1_im(int32_t val)
263#endif /* VBOX */
264{
265 tcg_gen_movi_tl(cpu_T[1], val);
266}
267
268#ifndef VBOX
269static inline void gen_op_movl_T1_imu(uint32_t val)
270#else /* VBOX */
271DECLINLINE(void) gen_op_movl_T1_imu(uint32_t val)
272#endif /* VBOX */
273{
274 tcg_gen_movi_tl(cpu_T[1], val);
275}
276
277#ifndef VBOX
278static inline void gen_op_movl_A0_im(uint32_t val)
279#else /* VBOX */
280DECLINLINE(void) gen_op_movl_A0_im(uint32_t val)
281#endif /* VBOX */
282{
283 tcg_gen_movi_tl(cpu_A0, val);
284}
285
286#ifdef TARGET_X86_64
287#ifndef VBOX
288static inline void gen_op_movq_A0_im(int64_t val)
289#else /* VBOX */
290DECLINLINE(void) gen_op_movq_A0_im(int64_t val)
291#endif /* VBOX */
292{
293 tcg_gen_movi_tl(cpu_A0, val);
294}
295#endif
296
297#ifndef VBOX
298static inline void gen_movtl_T0_im(target_ulong val)
299#else /* VBOX */
300DECLINLINE(void) gen_movtl_T0_im(target_ulong val)
301#endif /* VBOX */
302{
303 tcg_gen_movi_tl(cpu_T[0], val);
304}
305
306#ifndef VBOX
307static inline void gen_movtl_T1_im(target_ulong val)
308#else /* VBOX */
309DECLINLINE(void) gen_movtl_T1_im(target_ulong val)
310#endif /* VBOX */
311{
312 tcg_gen_movi_tl(cpu_T[1], val);
313}
314
315#ifndef VBOX
316static inline void gen_op_andl_T0_ffff(void)
317#else /* VBOX */
318DECLINLINE(void) gen_op_andl_T0_ffff(void)
319#endif /* VBOX */
320{
321 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
322}
323
324#ifndef VBOX
325static inline void gen_op_andl_T0_im(uint32_t val)
326#else /* VBOX */
327DECLINLINE(void) gen_op_andl_T0_im(uint32_t val)
328#endif /* VBOX */
329{
330 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
331}
332
333#ifndef VBOX
334static inline void gen_op_movl_T0_T1(void)
335#else /* VBOX */
336DECLINLINE(void) gen_op_movl_T0_T1(void)
337#endif /* VBOX */
338{
339 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
340}
341
342#ifndef VBOX
343static inline void gen_op_andl_A0_ffff(void)
344#else /* VBOX */
345DECLINLINE(void) gen_op_andl_A0_ffff(void)
346#endif /* VBOX */
347{
348 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
349}
350
351#ifdef TARGET_X86_64
352
353#define NB_OP_SIZES 4
354
355#else /* !TARGET_X86_64 */
356
357#define NB_OP_SIZES 3
358
359#endif /* !TARGET_X86_64 */
360
361#if defined(WORDS_BIGENDIAN)
362#define REG_B_OFFSET (sizeof(target_ulong) - 1)
363#define REG_H_OFFSET (sizeof(target_ulong) - 2)
364#define REG_W_OFFSET (sizeof(target_ulong) - 2)
365#define REG_L_OFFSET (sizeof(target_ulong) - 4)
366#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
367#else
368#define REG_B_OFFSET 0
369#define REG_H_OFFSET 1
370#define REG_W_OFFSET 0
371#define REG_L_OFFSET 0
372#define REG_LH_OFFSET 4
373#endif
374
375#ifndef VBOX
376static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
377#else /* VBOX */
378DECLINLINE(void) gen_op_mov_reg_v(int ot, int reg, TCGv t0)
379#endif /* VBOX */
380{
381 switch(ot) {
382 case OT_BYTE:
383 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
384 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
385 } else {
386 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
387 }
388 break;
389 case OT_WORD:
390 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
391 break;
392#ifdef TARGET_X86_64
393 case OT_LONG:
394 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
395 /* high part of register set to zero */
396 tcg_gen_movi_tl(cpu_tmp0, 0);
397 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
398 break;
399 default:
400 case OT_QUAD:
401 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
402 break;
403#else
404 default:
405 case OT_LONG:
406 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
407 break;
408#endif
409 }
410}
411
412#ifndef VBOX
413static inline void gen_op_mov_reg_T0(int ot, int reg)
414#else /* VBOX */
415DECLINLINE(void) gen_op_mov_reg_T0(int ot, int reg)
416#endif /* VBOX */
417{
418 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
419}
420
421#ifndef VBOX
422static inline void gen_op_mov_reg_T1(int ot, int reg)
423#else /* VBOX */
424DECLINLINE(void) gen_op_mov_reg_T1(int ot, int reg)
425#endif /* VBOX */
426{
427 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
428}
429
430#ifndef VBOX
431static inline void gen_op_mov_reg_A0(int size, int reg)
432#else /* VBOX */
433DECLINLINE(void) gen_op_mov_reg_A0(int size, int reg)
434#endif /* VBOX */
435{
436 switch(size) {
437 case 0:
438 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
439 break;
440#ifdef TARGET_X86_64
441 case 1:
442 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
443 /* high part of register set to zero */
444 tcg_gen_movi_tl(cpu_tmp0, 0);
445 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
446 break;
447 default:
448 case 2:
449 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
450 break;
451#else
452 default:
453 case 1:
454 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
455 break;
456#endif
457 }
458}
459
460#ifndef VBOX
461static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
462#else /* VBOX */
463DECLINLINE(void) gen_op_mov_v_reg(int ot, TCGv t0, int reg)
464#endif /* VBOX */
465{
466 switch(ot) {
467 case OT_BYTE:
468 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
469#ifndef VBOX
470 goto std_case;
471#else
472 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
473#endif
474 } else {
475 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
476 }
477 break;
478 default:
479 std_case:
480 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
481 break;
482 }
483}
484
485#ifndef VBOX
486static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
487#else /* VBOX */
488DECLINLINE(void) gen_op_mov_TN_reg(int ot, int t_index, int reg)
489#endif /* VBOX */
490{
491 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
492}
493
494#ifndef VBOX
495static inline void gen_op_movl_A0_reg(int reg)
496#else /* VBOX */
497DECLINLINE(void) gen_op_movl_A0_reg(int reg)
498#endif /* VBOX */
499{
500 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
501}
502
503#ifndef VBOX
504static inline void gen_op_addl_A0_im(int32_t val)
505#else /* VBOX */
506DECLINLINE(void) gen_op_addl_A0_im(int32_t val)
507#endif /* VBOX */
508{
509 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
510#ifdef TARGET_X86_64
511 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
512#endif
513}
514
515#ifdef TARGET_X86_64
516#ifndef VBOX
517static inline void gen_op_addq_A0_im(int64_t val)
518#else /* VBOX */
519DECLINLINE(void) gen_op_addq_A0_im(int64_t val)
520#endif /* VBOX */
521{
522 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
523}
524#endif
525
526static void gen_add_A0_im(DisasContext *s, int val)
527{
528#ifdef TARGET_X86_64
529 if (CODE64(s))
530 gen_op_addq_A0_im(val);
531 else
532#endif
533 gen_op_addl_A0_im(val);
534}
535
536#ifndef VBOX
537static inline void gen_op_addl_T0_T1(void)
538#else /* VBOX */
539DECLINLINE(void) gen_op_addl_T0_T1(void)
540#endif /* VBOX */
541{
542 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
543}
544
545#ifndef VBOX
546static inline void gen_op_jmp_T0(void)
547#else /* VBOX */
548DECLINLINE(void) gen_op_jmp_T0(void)
549#endif /* VBOX */
550{
551 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
552}
553
554#ifndef VBOX
555static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
556#else /* VBOX */
557DECLINLINE(void) gen_op_add_reg_im(int size, int reg, int32_t val)
558#endif /* VBOX */
559{
560 switch(size) {
561 case 0:
562 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
563 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
564 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
565 break;
566 case 1:
567 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
568 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
569#ifdef TARGET_X86_64
570 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
571#endif
572 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
573 break;
574#ifdef TARGET_X86_64
575 case 2:
576 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
577 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
578 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
579 break;
580#endif
581 }
582}
583
584#ifndef VBOX
585static inline void gen_op_add_reg_T0(int size, int reg)
586#else /* VBOX */
587DECLINLINE(void) gen_op_add_reg_T0(int size, int reg)
588#endif /* VBOX */
589{
590 switch(size) {
591 case 0:
592 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
593 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
594 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
595 break;
596 case 1:
597 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
598 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
599#ifdef TARGET_X86_64
600 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
601#endif
602 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
603 break;
604#ifdef TARGET_X86_64
605 case 2:
606 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
607 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
608 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
609 break;
610#endif
611 }
612}
613
614#ifndef VBOX
615static inline void gen_op_set_cc_op(int32_t val)
616#else /* VBOX */
617DECLINLINE(void) gen_op_set_cc_op(int32_t val)
618#endif /* VBOX */
619{
620 tcg_gen_movi_i32(cpu_cc_op, val);
621}
622
623#ifndef VBOX
624static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
625#else /* VBOX */
626DECLINLINE(void) gen_op_addl_A0_reg_sN(int shift, int reg)
627#endif /* VBOX */
628{
629 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
630 if (shift != 0)
631 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
632 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
633#ifdef TARGET_X86_64
634 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
635#endif
636}
637#ifdef VBOX
638DECLINLINE(void) gen_op_seg_check(int reg, bool keepA0)
639{
640 /* It seems segments doesn't get out of sync - if they do in fact - enable below code. */
641#if 0
642 /* Our segments could be outdated, thus check for newselector field to see if update really needed */
643 int skip_label;
644 TCGv t0, a0;
645
646 /* For other segments this check is waste of time, and also TCG is unable to cope with this code,
647 for data/stack segments, as expects alive cpu_T[0] */
648 if (reg != R_GS)
649 return;
650
651 if (keepA0)
652 {
653 /* we need to store old cpu_A0 */
654 a0 = tcg_temp_local_new(TCG_TYPE_TL);
655 tcg_gen_mov_tl(a0, cpu_A0);
656 }
657
658 skip_label = gen_new_label();
659 t0 = tcg_temp_local_new(TCG_TYPE_TL);
660
661 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, segs[reg].newselector) + REG_L_OFFSET);
662 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
663 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, eflags) + REG_L_OFFSET);
664 tcg_gen_andi_tl(t0, t0, VM_MASK);
665 tcg_gen_brcondi_i32(TCG_COND_NE, t0, 0, skip_label);
666 tcg_gen_movi_tl(t0, reg);
667
668 tcg_gen_helper_0_1(helper_sync_seg, t0);
669
670 tcg_temp_free(t0);
671
672 gen_set_label(skip_label);
673 if (keepA0)
674 {
675 tcg_gen_mov_tl(cpu_A0, a0);
676 tcg_temp_free(a0);
677 }
678#endif /* 0 */
679}
680#endif
681
682#ifndef VBOX
683static inline void gen_op_movl_A0_seg(int reg)
684#else /* VBOX */
685DECLINLINE(void) gen_op_movl_A0_seg(int reg)
686#endif /* VBOX */
687{
688#ifdef VBOX
689 gen_op_seg_check(reg, false);
690#endif
691 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
692}
693
694#ifndef VBOX
695static inline void gen_op_addl_A0_seg(int reg)
696#else /* VBOX */
697DECLINLINE(void) gen_op_addl_A0_seg(int reg)
698#endif /* VBOX */
699{
700#ifdef VBOX
701 gen_op_seg_check(reg, true);
702#endif
703 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
704 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
705#ifdef TARGET_X86_64
706 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
707#endif
708}
709
710#ifdef TARGET_X86_64
711#ifndef VBOX
712static inline void gen_op_movq_A0_seg(int reg)
713#else /* VBOX */
714DECLINLINE(void) gen_op_movq_A0_seg(int reg)
715#endif /* VBOX */
716{
717#ifdef VBOX
718 gen_op_seg_check(reg, false);
719#endif
720 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
721}
722
723#ifndef VBOX
724static inline void gen_op_addq_A0_seg(int reg)
725#else /* VBOX */
726DECLINLINE(void) gen_op_addq_A0_seg(int reg)
727#endif /* VBOX */
728{
729#ifdef VBOX
730 gen_op_seg_check(reg, true);
731#endif
732 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
733 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
734}
735
736#ifndef VBOX
737static inline void gen_op_movq_A0_reg(int reg)
738#else /* VBOX */
739DECLINLINE(void) gen_op_movq_A0_reg(int reg)
740#endif /* VBOX */
741{
742 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
743}
744
745#ifndef VBOX
746static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
747#else /* VBOX */
748DECLINLINE(void) gen_op_addq_A0_reg_sN(int shift, int reg)
749#endif /* VBOX */
750{
751 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
752 if (shift != 0)
753 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
754 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
755}
756#endif
757
758#ifndef VBOX
759static inline void gen_op_lds_T0_A0(int idx)
760#else /* VBOX */
761DECLINLINE(void) gen_op_lds_T0_A0(int idx)
762#endif /* VBOX */
763{
764 int mem_index = (idx >> 2) - 1;
765 switch(idx & 3) {
766 case 0:
767 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
768 break;
769 case 1:
770 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
771 break;
772 default:
773 case 2:
774 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
775 break;
776 }
777}
778
779#ifndef VBOX
780static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
781#else /* VBOX */
782DECLINLINE(void) gen_op_ld_v(int idx, TCGv t0, TCGv a0)
783#endif /* VBOX */
784{
785 int mem_index = (idx >> 2) - 1;
786 switch(idx & 3) {
787 case 0:
788 tcg_gen_qemu_ld8u(t0, a0, mem_index);
789 break;
790 case 1:
791 tcg_gen_qemu_ld16u(t0, a0, mem_index);
792 break;
793 case 2:
794 tcg_gen_qemu_ld32u(t0, a0, mem_index);
795 break;
796 default:
797 case 3:
798 tcg_gen_qemu_ld64(t0, a0, mem_index);
799 break;
800 }
801}
802
803/* XXX: always use ldu or lds */
804#ifndef VBOX
805static inline void gen_op_ld_T0_A0(int idx)
806#else /* VBOX */
807DECLINLINE(void) gen_op_ld_T0_A0(int idx)
808#endif /* VBOX */
809{
810 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
811}
812
813#ifndef VBOX
814static inline void gen_op_ldu_T0_A0(int idx)
815#else /* VBOX */
816DECLINLINE(void) gen_op_ldu_T0_A0(int idx)
817#endif /* VBOX */
818{
819 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
820}
821
822#ifndef VBOX
823static inline void gen_op_ld_T1_A0(int idx)
824#else /* VBOX */
825DECLINLINE(void) gen_op_ld_T1_A0(int idx)
826#endif /* VBOX */
827{
828 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
829}
830
831#ifndef VBOX
832static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
833#else /* VBOX */
834DECLINLINE(void) gen_op_st_v(int idx, TCGv t0, TCGv a0)
835#endif /* VBOX */
836{
837 int mem_index = (idx >> 2) - 1;
838 switch(idx & 3) {
839 case 0:
840 tcg_gen_qemu_st8(t0, a0, mem_index);
841 break;
842 case 1:
843 tcg_gen_qemu_st16(t0, a0, mem_index);
844 break;
845 case 2:
846 tcg_gen_qemu_st32(t0, a0, mem_index);
847 break;
848 default:
849 case 3:
850 tcg_gen_qemu_st64(t0, a0, mem_index);
851 break;
852 }
853}
854
855#ifndef VBOX
856static inline void gen_op_st_T0_A0(int idx)
857#else /* VBOX */
858DECLINLINE(void) gen_op_st_T0_A0(int idx)
859#endif /* VBOX */
860{
861 gen_op_st_v(idx, cpu_T[0], cpu_A0);
862}
863
864#ifndef VBOX
865static inline void gen_op_st_T1_A0(int idx)
866#else /* VBOX */
867DECLINLINE(void) gen_op_st_T1_A0(int idx)
868#endif /* VBOX */
869{
870 gen_op_st_v(idx, cpu_T[1], cpu_A0);
871}
872
873#ifdef VBOX
874static void gen_check_external_event()
875{
876 int skip_label;
877 TCGv t0;
878
879 skip_label = gen_new_label();
880 t0 = tcg_temp_local_new(TCG_TYPE_TL);
881 /* t0 = cpu_tmp0; */
882
883 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, interrupt_request));
884 /* Keep in sync with helper_check_external_event() */
885 tcg_gen_andi_tl(t0, t0,
886 CPU_INTERRUPT_EXTERNAL_EXIT
887 | CPU_INTERRUPT_EXTERNAL_TIMER
888 | CPU_INTERRUPT_EXTERNAL_DMA
889 | CPU_INTERRUPT_EXTERNAL_HARD);
890 /** @todo: predict branch as taken */
891 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
892 tcg_temp_free(t0);
893
894 tcg_gen_helper_0_0(helper_check_external_event);
895
896 gen_set_label(skip_label);
897}
898
899static void gen_check_external_event2()
900{
901 tcg_gen_helper_0_0(helper_check_external_event);
902}
903
904#endif
905
906#ifndef VBOX
907static inline void gen_jmp_im(target_ulong pc)
908#else /* VBOX */
909DECLINLINE(void) gen_jmp_im(target_ulong pc)
910#endif /* VBOX */
911{
912 tcg_gen_movi_tl(cpu_tmp0, pc);
913 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
914}
915
916#ifdef VBOX
917DECLINLINE(void) gen_update_eip(target_ulong pc)
918{
919 gen_jmp_im(pc);
920#ifdef VBOX_DUMP_STATE
921 tcg_gen_helper_0_0(helper_dump_state);
922#endif
923}
924
925#endif
926
927#ifndef VBOX
928static inline void gen_string_movl_A0_ESI(DisasContext *s)
929#else /* VBOX */
930DECLINLINE(void) gen_string_movl_A0_ESI(DisasContext *s)
931#endif /* VBOX */
932{
933 int override;
934
935 override = s->override;
936#ifdef TARGET_X86_64
937 if (s->aflag == 2) {
938 if (override >= 0) {
939 gen_op_movq_A0_seg(override);
940 gen_op_addq_A0_reg_sN(0, R_ESI);
941 } else {
942 gen_op_movq_A0_reg(R_ESI);
943 }
944 } else
945#endif
946 if (s->aflag) {
947 /* 32 bit address */
948 if (s->addseg && override < 0)
949 override = R_DS;
950 if (override >= 0) {
951 gen_op_movl_A0_seg(override);
952 gen_op_addl_A0_reg_sN(0, R_ESI);
953 } else {
954 gen_op_movl_A0_reg(R_ESI);
955 }
956 } else {
957 /* 16 address, always override */
958 if (override < 0)
959 override = R_DS;
960 gen_op_movl_A0_reg(R_ESI);
961 gen_op_andl_A0_ffff();
962 gen_op_addl_A0_seg(override);
963 }
964}
965
966#ifndef VBOX
967static inline void gen_string_movl_A0_EDI(DisasContext *s)
968#else /* VBOX */
969DECLINLINE(void) gen_string_movl_A0_EDI(DisasContext *s)
970#endif /* VBOX */
971{
972#ifdef TARGET_X86_64
973 if (s->aflag == 2) {
974 gen_op_movq_A0_reg(R_EDI);
975 } else
976#endif
977 if (s->aflag) {
978 if (s->addseg) {
979 gen_op_movl_A0_seg(R_ES);
980 gen_op_addl_A0_reg_sN(0, R_EDI);
981 } else {
982 gen_op_movl_A0_reg(R_EDI);
983 }
984 } else {
985 gen_op_movl_A0_reg(R_EDI);
986 gen_op_andl_A0_ffff();
987 gen_op_addl_A0_seg(R_ES);
988 }
989}
990
991#ifndef VBOX
992static inline void gen_op_movl_T0_Dshift(int ot)
993#else /* VBOX */
994DECLINLINE(void) gen_op_movl_T0_Dshift(int ot)
995#endif /* VBOX */
996{
997 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
998 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
999};
1000
1001static void gen_extu(int ot, TCGv reg)
1002{
1003 switch(ot) {
1004 case OT_BYTE:
1005 tcg_gen_ext8u_tl(reg, reg);
1006 break;
1007 case OT_WORD:
1008 tcg_gen_ext16u_tl(reg, reg);
1009 break;
1010 case OT_LONG:
1011 tcg_gen_ext32u_tl(reg, reg);
1012 break;
1013 default:
1014 break;
1015 }
1016}
1017
1018static void gen_exts(int ot, TCGv reg)
1019{
1020 switch(ot) {
1021 case OT_BYTE:
1022 tcg_gen_ext8s_tl(reg, reg);
1023 break;
1024 case OT_WORD:
1025 tcg_gen_ext16s_tl(reg, reg);
1026 break;
1027 case OT_LONG:
1028 tcg_gen_ext32s_tl(reg, reg);
1029 break;
1030 default:
1031 break;
1032 }
1033}
1034
1035#ifndef VBOX
1036static inline void gen_op_jnz_ecx(int size, int label1)
1037#else /* VBOX */
1038DECLINLINE(void) gen_op_jnz_ecx(int size, int label1)
1039#endif /* VBOX */
1040{
1041 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1042 gen_extu(size + 1, cpu_tmp0);
1043 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
1044}
1045
1046#ifndef VBOX
1047static inline void gen_op_jz_ecx(int size, int label1)
1048#else /* VBOX */
1049DECLINLINE(void) gen_op_jz_ecx(int size, int label1)
1050#endif /* VBOX */
1051{
1052 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1053 gen_extu(size + 1, cpu_tmp0);
1054 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
1055}
1056
1057static void *helper_in_func[3] = {
1058 helper_inb,
1059 helper_inw,
1060 helper_inl,
1061};
1062
1063static void *helper_out_func[3] = {
1064 helper_outb,
1065 helper_outw,
1066 helper_outl,
1067};
1068
1069static void *gen_check_io_func[3] = {
1070 helper_check_iob,
1071 helper_check_iow,
1072 helper_check_iol,
1073};
1074
1075static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
1076 uint32_t svm_flags)
1077{
1078 int state_saved;
1079 target_ulong next_eip;
1080
1081 state_saved = 0;
1082 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1083 if (s->cc_op != CC_OP_DYNAMIC)
1084 gen_op_set_cc_op(s->cc_op);
1085 gen_jmp_im(cur_eip);
1086 state_saved = 1;
1087 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1088 tcg_gen_helper_0_1(gen_check_io_func[ot],
1089 cpu_tmp2_i32);
1090 }
1091 if(s->flags & HF_SVMI_MASK) {
1092 if (!state_saved) {
1093 if (s->cc_op != CC_OP_DYNAMIC)
1094 gen_op_set_cc_op(s->cc_op);
1095 gen_jmp_im(cur_eip);
1096 state_saved = 1;
1097 }
1098 svm_flags |= (1 << (4 + ot));
1099 next_eip = s->pc - s->cs_base;
1100 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1101 tcg_gen_helper_0_3(helper_svm_check_io,
1102 cpu_tmp2_i32,
1103 tcg_const_i32(svm_flags),
1104 tcg_const_i32(next_eip - cur_eip));
1105 }
1106}
1107
1108#ifndef VBOX
1109static inline void gen_movs(DisasContext *s, int ot)
1110#else /* VBOX */
1111DECLINLINE(void) gen_movs(DisasContext *s, int ot)
1112#endif /* VBOX */
1113{
1114 gen_string_movl_A0_ESI(s);
1115 gen_op_ld_T0_A0(ot + s->mem_index);
1116 gen_string_movl_A0_EDI(s);
1117 gen_op_st_T0_A0(ot + s->mem_index);
1118 gen_op_movl_T0_Dshift(ot);
1119 gen_op_add_reg_T0(s->aflag, R_ESI);
1120 gen_op_add_reg_T0(s->aflag, R_EDI);
1121}
1122
1123#ifndef VBOX
1124static inline void gen_update_cc_op(DisasContext *s)
1125#else /* VBOX */
1126DECLINLINE(void) gen_update_cc_op(DisasContext *s)
1127#endif /* VBOX */
1128{
1129 if (s->cc_op != CC_OP_DYNAMIC) {
1130 gen_op_set_cc_op(s->cc_op);
1131 s->cc_op = CC_OP_DYNAMIC;
1132 }
1133}
1134
1135static void gen_op_update1_cc(void)
1136{
1137 tcg_gen_discard_tl(cpu_cc_src);
1138 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1139}
1140
1141static void gen_op_update2_cc(void)
1142{
1143 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1144 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1145}
1146
1147#ifndef VBOX
1148static inline void gen_op_cmpl_T0_T1_cc(void)
1149#else /* VBOX */
1150DECLINLINE(void) gen_op_cmpl_T0_T1_cc(void)
1151#endif /* VBOX */
1152{
1153 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1154 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1155}
1156
1157#ifndef VBOX
1158static inline void gen_op_testl_T0_T1_cc(void)
1159#else /* VBOX */
1160DECLINLINE(void) gen_op_testl_T0_T1_cc(void)
1161#endif /* VBOX */
1162{
1163 tcg_gen_discard_tl(cpu_cc_src);
1164 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1165}
1166
1167static void gen_op_update_neg_cc(void)
1168{
1169 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
1170 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1171}
1172
1173/* compute eflags.C to reg */
1174static void gen_compute_eflags_c(TCGv reg)
1175{
1176#if TCG_TARGET_REG_BITS == 32
1177 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1178 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1179 (long)cc_table + offsetof(CCTable, compute_c));
1180 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1181 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1182 1, &cpu_tmp2_i32, 0, NULL);
1183#else
1184 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1185 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1186 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1187 (long)cc_table + offsetof(CCTable, compute_c));
1188 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1189 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1190 1, &cpu_tmp2_i32, 0, NULL);
1191#endif
1192 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1193}
1194
1195/* compute all eflags to cc_src */
1196static void gen_compute_eflags(TCGv reg)
1197{
1198#if TCG_TARGET_REG_BITS == 32
1199 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1200 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1201 (long)cc_table + offsetof(CCTable, compute_all));
1202 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1203 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1204 1, &cpu_tmp2_i32, 0, NULL);
1205#else
1206 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1207 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1208 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1209 (long)cc_table + offsetof(CCTable, compute_all));
1210 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1211 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1212 1, &cpu_tmp2_i32, 0, NULL);
1213#endif
1214 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1215}
1216
1217#ifndef VBOX
1218static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1219#else /* VBOX */
1220DECLINLINE(void) gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1221#endif /* VBOX */
1222{
1223 if (s->cc_op != CC_OP_DYNAMIC)
1224 gen_op_set_cc_op(s->cc_op);
1225 switch(jcc_op) {
1226 case JCC_O:
1227 gen_compute_eflags(cpu_T[0]);
1228 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
1229 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1230 break;
1231 case JCC_B:
1232 gen_compute_eflags_c(cpu_T[0]);
1233 break;
1234 case JCC_Z:
1235 gen_compute_eflags(cpu_T[0]);
1236 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
1237 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1238 break;
1239 case JCC_BE:
1240 gen_compute_eflags(cpu_tmp0);
1241 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
1242 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1243 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1244 break;
1245 case JCC_S:
1246 gen_compute_eflags(cpu_T[0]);
1247 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
1248 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1249 break;
1250 case JCC_P:
1251 gen_compute_eflags(cpu_T[0]);
1252 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
1253 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1254 break;
1255 case JCC_L:
1256 gen_compute_eflags(cpu_tmp0);
1257 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1258 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
1259 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1260 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1261 break;
1262 default:
1263 case JCC_LE:
1264 gen_compute_eflags(cpu_tmp0);
1265 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1266 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
1267 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
1268 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1269 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1270 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1271 break;
1272 }
1273}
1274
1275/* return true if setcc_slow is not needed (WARNING: must be kept in
1276 sync with gen_jcc1) */
1277static int is_fast_jcc_case(DisasContext *s, int b)
1278{
1279 int jcc_op;
1280 jcc_op = (b >> 1) & 7;
1281 switch(s->cc_op) {
1282 /* we optimize the cmp/jcc case */
1283 case CC_OP_SUBB:
1284 case CC_OP_SUBW:
1285 case CC_OP_SUBL:
1286 case CC_OP_SUBQ:
1287 if (jcc_op == JCC_O || jcc_op == JCC_P)
1288 goto slow_jcc;
1289 break;
1290
1291 /* some jumps are easy to compute */
1292 case CC_OP_ADDB:
1293 case CC_OP_ADDW:
1294 case CC_OP_ADDL:
1295 case CC_OP_ADDQ:
1296
1297 case CC_OP_LOGICB:
1298 case CC_OP_LOGICW:
1299 case CC_OP_LOGICL:
1300 case CC_OP_LOGICQ:
1301
1302 case CC_OP_INCB:
1303 case CC_OP_INCW:
1304 case CC_OP_INCL:
1305 case CC_OP_INCQ:
1306
1307 case CC_OP_DECB:
1308 case CC_OP_DECW:
1309 case CC_OP_DECL:
1310 case CC_OP_DECQ:
1311
1312 case CC_OP_SHLB:
1313 case CC_OP_SHLW:
1314 case CC_OP_SHLL:
1315 case CC_OP_SHLQ:
1316 if (jcc_op != JCC_Z && jcc_op != JCC_S)
1317 goto slow_jcc;
1318 break;
1319 default:
1320 slow_jcc:
1321 return 0;
1322 }
1323 return 1;
1324}
1325
1326/* generate a conditional jump to label 'l1' according to jump opcode
1327 value 'b'. In the fast case, T0 is guaranted not to be used. */
1328#ifndef VBOX
1329static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1330#else /* VBOX */
1331DECLINLINE(void) gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1332#endif /* VBOX */
1333{
1334 int inv, jcc_op, size, cond;
1335 TCGv t0;
1336
1337 inv = b & 1;
1338 jcc_op = (b >> 1) & 7;
1339
1340 switch(cc_op) {
1341 /* we optimize the cmp/jcc case */
1342 case CC_OP_SUBB:
1343 case CC_OP_SUBW:
1344 case CC_OP_SUBL:
1345 case CC_OP_SUBQ:
1346
1347 size = cc_op - CC_OP_SUBB;
1348 switch(jcc_op) {
1349 case JCC_Z:
1350 fast_jcc_z:
1351 switch(size) {
1352 case 0:
1353 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
1354 t0 = cpu_tmp0;
1355 break;
1356 case 1:
1357 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
1358 t0 = cpu_tmp0;
1359 break;
1360#ifdef TARGET_X86_64
1361 case 2:
1362 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1363 t0 = cpu_tmp0;
1364 break;
1365#endif
1366 default:
1367 t0 = cpu_cc_dst;
1368 break;
1369 }
1370 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
1371 break;
1372 case JCC_S:
1373 fast_jcc_s:
1374 switch(size) {
1375 case 0:
1376 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1377 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1378 0, l1);
1379 break;
1380 case 1:
1381 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1382 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1383 0, l1);
1384 break;
1385#ifdef TARGET_X86_64
1386 case 2:
1387 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1388 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1389 0, l1);
1390 break;
1391#endif
1392 default:
1393 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1394 0, l1);
1395 break;
1396 }
1397 break;
1398
1399 case JCC_B:
1400 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1401 goto fast_jcc_b;
1402 case JCC_BE:
1403 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1404 fast_jcc_b:
1405 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1406 switch(size) {
1407 case 0:
1408 t0 = cpu_tmp0;
1409 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1410 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1411 break;
1412 case 1:
1413 t0 = cpu_tmp0;
1414 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1415 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1416 break;
1417#ifdef TARGET_X86_64
1418 case 2:
1419 t0 = cpu_tmp0;
1420 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1421 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1422 break;
1423#endif
1424 default:
1425 t0 = cpu_cc_src;
1426 break;
1427 }
1428 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1429 break;
1430
1431 case JCC_L:
1432 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1433 goto fast_jcc_l;
1434 case JCC_LE:
1435 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1436 fast_jcc_l:
1437 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1438 switch(size) {
1439 case 0:
1440 t0 = cpu_tmp0;
1441 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1442 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1443 break;
1444 case 1:
1445 t0 = cpu_tmp0;
1446 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1447 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1448 break;
1449#ifdef TARGET_X86_64
1450 case 2:
1451 t0 = cpu_tmp0;
1452 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1453 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1454 break;
1455#endif
1456 default:
1457 t0 = cpu_cc_src;
1458 break;
1459 }
1460 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1461 break;
1462
1463 default:
1464 goto slow_jcc;
1465 }
1466 break;
1467
1468 /* some jumps are easy to compute */
1469 case CC_OP_ADDB:
1470 case CC_OP_ADDW:
1471 case CC_OP_ADDL:
1472 case CC_OP_ADDQ:
1473
1474 case CC_OP_ADCB:
1475 case CC_OP_ADCW:
1476 case CC_OP_ADCL:
1477 case CC_OP_ADCQ:
1478
1479 case CC_OP_SBBB:
1480 case CC_OP_SBBW:
1481 case CC_OP_SBBL:
1482 case CC_OP_SBBQ:
1483
1484 case CC_OP_LOGICB:
1485 case CC_OP_LOGICW:
1486 case CC_OP_LOGICL:
1487 case CC_OP_LOGICQ:
1488
1489 case CC_OP_INCB:
1490 case CC_OP_INCW:
1491 case CC_OP_INCL:
1492 case CC_OP_INCQ:
1493
1494 case CC_OP_DECB:
1495 case CC_OP_DECW:
1496 case CC_OP_DECL:
1497 case CC_OP_DECQ:
1498
1499 case CC_OP_SHLB:
1500 case CC_OP_SHLW:
1501 case CC_OP_SHLL:
1502 case CC_OP_SHLQ:
1503
1504 case CC_OP_SARB:
1505 case CC_OP_SARW:
1506 case CC_OP_SARL:
1507 case CC_OP_SARQ:
1508 switch(jcc_op) {
1509 case JCC_Z:
1510 size = (cc_op - CC_OP_ADDB) & 3;
1511 goto fast_jcc_z;
1512 case JCC_S:
1513 size = (cc_op - CC_OP_ADDB) & 3;
1514 goto fast_jcc_s;
1515 default:
1516 goto slow_jcc;
1517 }
1518 break;
1519 default:
1520 slow_jcc:
1521 gen_setcc_slow_T0(s, jcc_op);
1522 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1523 cpu_T[0], 0, l1);
1524 break;
1525 }
1526}
1527
1528/* XXX: does not work with gdbstub "ice" single step - not a
1529 serious problem */
1530static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1531{
1532 int l1, l2;
1533
1534 l1 = gen_new_label();
1535 l2 = gen_new_label();
1536 gen_op_jnz_ecx(s->aflag, l1);
1537 gen_set_label(l2);
1538 gen_jmp_tb(s, next_eip, 1);
1539 gen_set_label(l1);
1540 return l2;
1541}
1542
1543#ifndef VBOX
1544static inline void gen_stos(DisasContext *s, int ot)
1545#else /* VBOX */
1546DECLINLINE(void) gen_stos(DisasContext *s, int ot)
1547#endif /* VBOX */
1548{
1549 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1550 gen_string_movl_A0_EDI(s);
1551 gen_op_st_T0_A0(ot + s->mem_index);
1552 gen_op_movl_T0_Dshift(ot);
1553 gen_op_add_reg_T0(s->aflag, R_EDI);
1554}
1555
1556#ifndef VBOX
1557static inline void gen_lods(DisasContext *s, int ot)
1558#else /* VBOX */
1559DECLINLINE(void) gen_lods(DisasContext *s, int ot)
1560#endif /* VBOX */
1561{
1562 gen_string_movl_A0_ESI(s);
1563 gen_op_ld_T0_A0(ot + s->mem_index);
1564 gen_op_mov_reg_T0(ot, R_EAX);
1565 gen_op_movl_T0_Dshift(ot);
1566 gen_op_add_reg_T0(s->aflag, R_ESI);
1567}
1568
1569#ifndef VBOX
1570static inline void gen_scas(DisasContext *s, int ot)
1571#else /* VBOX */
1572DECLINLINE(void) gen_scas(DisasContext *s, int ot)
1573#endif /* VBOX */
1574{
1575 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1576 gen_string_movl_A0_EDI(s);
1577 gen_op_ld_T1_A0(ot + s->mem_index);
1578 gen_op_cmpl_T0_T1_cc();
1579 gen_op_movl_T0_Dshift(ot);
1580 gen_op_add_reg_T0(s->aflag, R_EDI);
1581}
1582
1583#ifndef VBOX
1584static inline void gen_cmps(DisasContext *s, int ot)
1585#else /* VBOX */
1586DECLINLINE(void) gen_cmps(DisasContext *s, int ot)
1587#endif /* VBOX */
1588{
1589 gen_string_movl_A0_ESI(s);
1590 gen_op_ld_T0_A0(ot + s->mem_index);
1591 gen_string_movl_A0_EDI(s);
1592 gen_op_ld_T1_A0(ot + s->mem_index);
1593 gen_op_cmpl_T0_T1_cc();
1594 gen_op_movl_T0_Dshift(ot);
1595 gen_op_add_reg_T0(s->aflag, R_ESI);
1596 gen_op_add_reg_T0(s->aflag, R_EDI);
1597}
1598
1599#ifndef VBOX
1600static inline void gen_ins(DisasContext *s, int ot)
1601#else /* VBOX */
1602DECLINLINE(void) gen_ins(DisasContext *s, int ot)
1603#endif /* VBOX */
1604{
1605 if (use_icount)
1606 gen_io_start();
1607 gen_string_movl_A0_EDI(s);
1608 /* Note: we must do this dummy write first to be restartable in
1609 case of page fault. */
1610 gen_op_movl_T0_0();
1611 gen_op_st_T0_A0(ot + s->mem_index);
1612 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1613 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1614 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1615 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1616 gen_op_st_T0_A0(ot + s->mem_index);
1617 gen_op_movl_T0_Dshift(ot);
1618 gen_op_add_reg_T0(s->aflag, R_EDI);
1619 if (use_icount)
1620 gen_io_end();
1621}
1622
1623#ifndef VBOX
1624static inline void gen_outs(DisasContext *s, int ot)
1625#else /* VBOX */
1626DECLINLINE(void) gen_outs(DisasContext *s, int ot)
1627#endif /* VBOX */
1628{
1629 if (use_icount)
1630 gen_io_start();
1631 gen_string_movl_A0_ESI(s);
1632 gen_op_ld_T0_A0(ot + s->mem_index);
1633
1634 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1635 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1636 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1637 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1638 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1639
1640 gen_op_movl_T0_Dshift(ot);
1641 gen_op_add_reg_T0(s->aflag, R_ESI);
1642 if (use_icount)
1643 gen_io_end();
1644}
1645
1646/* same method as Valgrind : we generate jumps to current or next
1647 instruction */
1648#ifndef VBOX
1649#define GEN_REPZ(op) \
1650static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1651 target_ulong cur_eip, target_ulong next_eip) \
1652{ \
1653 int l2; \
1654 gen_update_cc_op(s); \
1655 l2 = gen_jz_ecx_string(s, next_eip); \
1656 gen_ ## op(s, ot); \
1657 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1658 /* a loop would cause two single step exceptions if ECX = 1 \
1659 before rep string_insn */ \
1660 if (!s->jmp_opt) \
1661 gen_op_jz_ecx(s->aflag, l2); \
1662 gen_jmp(s, cur_eip); \
1663}
1664#else /* VBOX */
1665#define GEN_REPZ(op) \
1666DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1667 target_ulong cur_eip, target_ulong next_eip) \
1668{ \
1669 int l2; \
1670 gen_update_cc_op(s); \
1671 l2 = gen_jz_ecx_string(s, next_eip); \
1672 gen_ ## op(s, ot); \
1673 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1674 /* a loop would cause two single step exceptions if ECX = 1 \
1675 before rep string_insn */ \
1676 if (!s->jmp_opt) \
1677 gen_op_jz_ecx(s->aflag, l2); \
1678 gen_jmp(s, cur_eip); \
1679}
1680#endif /* VBOX */
1681
1682#ifndef VBOX
1683#define GEN_REPZ2(op) \
1684static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1685 target_ulong cur_eip, \
1686 target_ulong next_eip, \
1687 int nz) \
1688{ \
1689 int l2; \
1690 gen_update_cc_op(s); \
1691 l2 = gen_jz_ecx_string(s, next_eip); \
1692 gen_ ## op(s, ot); \
1693 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1694 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1695 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1696 if (!s->jmp_opt) \
1697 gen_op_jz_ecx(s->aflag, l2); \
1698 gen_jmp(s, cur_eip); \
1699}
1700#else /* VBOX */
1701#define GEN_REPZ2(op) \
1702DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1703 target_ulong cur_eip, \
1704 target_ulong next_eip, \
1705 int nz) \
1706{ \
1707 int l2;\
1708 gen_update_cc_op(s); \
1709 l2 = gen_jz_ecx_string(s, next_eip); \
1710 gen_ ## op(s, ot); \
1711 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1712 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1713 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1714 if (!s->jmp_opt) \
1715 gen_op_jz_ecx(s->aflag, l2); \
1716 gen_jmp(s, cur_eip); \
1717}
1718#endif /* VBOX */
1719
1720GEN_REPZ(movs)
1721GEN_REPZ(stos)
1722GEN_REPZ(lods)
1723GEN_REPZ(ins)
1724GEN_REPZ(outs)
1725GEN_REPZ2(scas)
1726GEN_REPZ2(cmps)
1727
1728static void *helper_fp_arith_ST0_FT0[8] = {
1729 helper_fadd_ST0_FT0,
1730 helper_fmul_ST0_FT0,
1731 helper_fcom_ST0_FT0,
1732 helper_fcom_ST0_FT0,
1733 helper_fsub_ST0_FT0,
1734 helper_fsubr_ST0_FT0,
1735 helper_fdiv_ST0_FT0,
1736 helper_fdivr_ST0_FT0,
1737};
1738
1739/* NOTE the exception in "r" op ordering */
1740static void *helper_fp_arith_STN_ST0[8] = {
1741 helper_fadd_STN_ST0,
1742 helper_fmul_STN_ST0,
1743 NULL,
1744 NULL,
1745 helper_fsubr_STN_ST0,
1746 helper_fsub_STN_ST0,
1747 helper_fdivr_STN_ST0,
1748 helper_fdiv_STN_ST0,
1749};
1750
1751/* if d == OR_TMP0, it means memory operand (address in A0) */
1752static void gen_op(DisasContext *s1, int op, int ot, int d)
1753{
1754 if (d != OR_TMP0) {
1755 gen_op_mov_TN_reg(ot, 0, d);
1756 } else {
1757 gen_op_ld_T0_A0(ot + s1->mem_index);
1758 }
1759 switch(op) {
1760 case OP_ADCL:
1761 if (s1->cc_op != CC_OP_DYNAMIC)
1762 gen_op_set_cc_op(s1->cc_op);
1763 gen_compute_eflags_c(cpu_tmp4);
1764 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1765 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1766 if (d != OR_TMP0)
1767 gen_op_mov_reg_T0(ot, d);
1768 else
1769 gen_op_st_T0_A0(ot + s1->mem_index);
1770 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1771 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1772 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1773 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1774 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1775 s1->cc_op = CC_OP_DYNAMIC;
1776 break;
1777 case OP_SBBL:
1778 if (s1->cc_op != CC_OP_DYNAMIC)
1779 gen_op_set_cc_op(s1->cc_op);
1780 gen_compute_eflags_c(cpu_tmp4);
1781 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1782 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1783 if (d != OR_TMP0)
1784 gen_op_mov_reg_T0(ot, d);
1785 else
1786 gen_op_st_T0_A0(ot + s1->mem_index);
1787 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1788 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1789 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1790 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1791 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1792 s1->cc_op = CC_OP_DYNAMIC;
1793 break;
1794 case OP_ADDL:
1795 gen_op_addl_T0_T1();
1796 if (d != OR_TMP0)
1797 gen_op_mov_reg_T0(ot, d);
1798 else
1799 gen_op_st_T0_A0(ot + s1->mem_index);
1800 gen_op_update2_cc();
1801 s1->cc_op = CC_OP_ADDB + ot;
1802 break;
1803 case OP_SUBL:
1804 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1805 if (d != OR_TMP0)
1806 gen_op_mov_reg_T0(ot, d);
1807 else
1808 gen_op_st_T0_A0(ot + s1->mem_index);
1809 gen_op_update2_cc();
1810 s1->cc_op = CC_OP_SUBB + ot;
1811 break;
1812 default:
1813 case OP_ANDL:
1814 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1815 if (d != OR_TMP0)
1816 gen_op_mov_reg_T0(ot, d);
1817 else
1818 gen_op_st_T0_A0(ot + s1->mem_index);
1819 gen_op_update1_cc();
1820 s1->cc_op = CC_OP_LOGICB + ot;
1821 break;
1822 case OP_ORL:
1823 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1824 if (d != OR_TMP0)
1825 gen_op_mov_reg_T0(ot, d);
1826 else
1827 gen_op_st_T0_A0(ot + s1->mem_index);
1828 gen_op_update1_cc();
1829 s1->cc_op = CC_OP_LOGICB + ot;
1830 break;
1831 case OP_XORL:
1832 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1833 if (d != OR_TMP0)
1834 gen_op_mov_reg_T0(ot, d);
1835 else
1836 gen_op_st_T0_A0(ot + s1->mem_index);
1837 gen_op_update1_cc();
1838 s1->cc_op = CC_OP_LOGICB + ot;
1839 break;
1840 case OP_CMPL:
1841 gen_op_cmpl_T0_T1_cc();
1842 s1->cc_op = CC_OP_SUBB + ot;
1843 break;
1844 }
1845}
1846
1847/* if d == OR_TMP0, it means memory operand (address in A0) */
1848static void gen_inc(DisasContext *s1, int ot, int d, int c)
1849{
1850 if (d != OR_TMP0)
1851 gen_op_mov_TN_reg(ot, 0, d);
1852 else
1853 gen_op_ld_T0_A0(ot + s1->mem_index);
1854 if (s1->cc_op != CC_OP_DYNAMIC)
1855 gen_op_set_cc_op(s1->cc_op);
1856 if (c > 0) {
1857 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1858 s1->cc_op = CC_OP_INCB + ot;
1859 } else {
1860 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1861 s1->cc_op = CC_OP_DECB + ot;
1862 }
1863 if (d != OR_TMP0)
1864 gen_op_mov_reg_T0(ot, d);
1865 else
1866 gen_op_st_T0_A0(ot + s1->mem_index);
1867 gen_compute_eflags_c(cpu_cc_src);
1868 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1869}
1870
1871static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1872 int is_right, int is_arith)
1873{
1874 target_ulong mask;
1875 int shift_label;
1876 TCGv t0, t1;
1877
1878 if (ot == OT_QUAD)
1879 mask = 0x3f;
1880 else
1881 mask = 0x1f;
1882
1883 /* load */
1884 if (op1 == OR_TMP0)
1885 gen_op_ld_T0_A0(ot + s->mem_index);
1886 else
1887 gen_op_mov_TN_reg(ot, 0, op1);
1888
1889 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1890
1891 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1892
1893 if (is_right) {
1894 if (is_arith) {
1895 gen_exts(ot, cpu_T[0]);
1896 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1897 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1898 } else {
1899 gen_extu(ot, cpu_T[0]);
1900 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1901 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1902 }
1903 } else {
1904 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1905 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1906 }
1907
1908 /* store */
1909 if (op1 == OR_TMP0)
1910 gen_op_st_T0_A0(ot + s->mem_index);
1911 else
1912 gen_op_mov_reg_T0(ot, op1);
1913
1914 /* update eflags if non zero shift */
1915 if (s->cc_op != CC_OP_DYNAMIC)
1916 gen_op_set_cc_op(s->cc_op);
1917
1918 /* XXX: inefficient */
1919 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1920 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1921
1922 tcg_gen_mov_tl(t0, cpu_T[0]);
1923 tcg_gen_mov_tl(t1, cpu_T3);
1924
1925 shift_label = gen_new_label();
1926 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1927
1928 tcg_gen_mov_tl(cpu_cc_src, t1);
1929 tcg_gen_mov_tl(cpu_cc_dst, t0);
1930 if (is_right)
1931 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1932 else
1933 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1934
1935 gen_set_label(shift_label);
1936 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1937
1938 tcg_temp_free(t0);
1939 tcg_temp_free(t1);
1940}
1941
1942static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1943 int is_right, int is_arith)
1944{
1945 int mask;
1946
1947 if (ot == OT_QUAD)
1948 mask = 0x3f;
1949 else
1950 mask = 0x1f;
1951
1952 /* load */
1953 if (op1 == OR_TMP0)
1954 gen_op_ld_T0_A0(ot + s->mem_index);
1955 else
1956 gen_op_mov_TN_reg(ot, 0, op1);
1957
1958 op2 &= mask;
1959 if (op2 != 0) {
1960 if (is_right) {
1961 if (is_arith) {
1962 gen_exts(ot, cpu_T[0]);
1963 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1964 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1965 } else {
1966 gen_extu(ot, cpu_T[0]);
1967 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1968 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1969 }
1970 } else {
1971 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1972 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1973 }
1974 }
1975
1976 /* store */
1977 if (op1 == OR_TMP0)
1978 gen_op_st_T0_A0(ot + s->mem_index);
1979 else
1980 gen_op_mov_reg_T0(ot, op1);
1981
1982 /* update eflags if non zero shift */
1983 if (op2 != 0) {
1984 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1985 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1986 if (is_right)
1987 s->cc_op = CC_OP_SARB + ot;
1988 else
1989 s->cc_op = CC_OP_SHLB + ot;
1990 }
1991}
1992
1993#ifndef VBOX
1994static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1995#else /* VBOX */
1996DECLINLINE(void) tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1997#endif /* VBOX */
1998{
1999 if (arg2 >= 0)
2000 tcg_gen_shli_tl(ret, arg1, arg2);
2001 else
2002 tcg_gen_shri_tl(ret, arg1, -arg2);
2003}
2004
2005/* XXX: add faster immediate case */
2006static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
2007 int is_right)
2008{
2009 target_ulong mask;
2010 int label1, label2, data_bits;
2011 TCGv t0, t1, t2, a0;
2012
2013 /* XXX: inefficient, but we must use local temps */
2014 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2015 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2016 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2017 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2018
2019 if (ot == OT_QUAD)
2020 mask = 0x3f;
2021 else
2022 mask = 0x1f;
2023
2024 /* load */
2025 if (op1 == OR_TMP0) {
2026 tcg_gen_mov_tl(a0, cpu_A0);
2027 gen_op_ld_v(ot + s->mem_index, t0, a0);
2028 } else {
2029 gen_op_mov_v_reg(ot, t0, op1);
2030 }
2031
2032 tcg_gen_mov_tl(t1, cpu_T[1]);
2033
2034 tcg_gen_andi_tl(t1, t1, mask);
2035
2036 /* Must test zero case to avoid using undefined behaviour in TCG
2037 shifts. */
2038 label1 = gen_new_label();
2039 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
2040
2041 if (ot <= OT_WORD)
2042 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
2043 else
2044 tcg_gen_mov_tl(cpu_tmp0, t1);
2045
2046 gen_extu(ot, t0);
2047 tcg_gen_mov_tl(t2, t0);
2048
2049 data_bits = 8 << ot;
2050 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
2051 fix TCG definition) */
2052 if (is_right) {
2053 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
2054 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2055 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
2056 } else {
2057 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
2058 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2059 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
2060 }
2061 tcg_gen_or_tl(t0, t0, cpu_tmp4);
2062
2063 gen_set_label(label1);
2064 /* store */
2065 if (op1 == OR_TMP0) {
2066 gen_op_st_v(ot + s->mem_index, t0, a0);
2067 } else {
2068 gen_op_mov_reg_v(ot, op1, t0);
2069 }
2070
2071 /* update eflags */
2072 if (s->cc_op != CC_OP_DYNAMIC)
2073 gen_op_set_cc_op(s->cc_op);
2074
2075 label2 = gen_new_label();
2076 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
2077
2078 gen_compute_eflags(cpu_cc_src);
2079 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
2080 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
2081 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
2082 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
2083 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
2084 if (is_right) {
2085 tcg_gen_shri_tl(t0, t0, data_bits - 1);
2086 }
2087 tcg_gen_andi_tl(t0, t0, CC_C);
2088 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
2089
2090 tcg_gen_discard_tl(cpu_cc_dst);
2091 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2092
2093 gen_set_label(label2);
2094 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2095
2096 tcg_temp_free(t0);
2097 tcg_temp_free(t1);
2098 tcg_temp_free(t2);
2099 tcg_temp_free(a0);
2100}
2101
2102static void *helper_rotc[8] = {
2103 helper_rclb,
2104 helper_rclw,
2105 helper_rcll,
2106 X86_64_ONLY(helper_rclq),
2107 helper_rcrb,
2108 helper_rcrw,
2109 helper_rcrl,
2110 X86_64_ONLY(helper_rcrq),
2111};
2112
2113/* XXX: add faster immediate = 1 case */
2114static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
2115 int is_right)
2116{
2117 int label1;
2118
2119 if (s->cc_op != CC_OP_DYNAMIC)
2120 gen_op_set_cc_op(s->cc_op);
2121
2122 /* load */
2123 if (op1 == OR_TMP0)
2124 gen_op_ld_T0_A0(ot + s->mem_index);
2125 else
2126 gen_op_mov_TN_reg(ot, 0, op1);
2127
2128 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
2129 cpu_T[0], cpu_T[0], cpu_T[1]);
2130 /* store */
2131 if (op1 == OR_TMP0)
2132 gen_op_st_T0_A0(ot + s->mem_index);
2133 else
2134 gen_op_mov_reg_T0(ot, op1);
2135
2136 /* update eflags */
2137 label1 = gen_new_label();
2138 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
2139
2140 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
2141 tcg_gen_discard_tl(cpu_cc_dst);
2142 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2143
2144 gen_set_label(label1);
2145 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2146}
2147
2148/* XXX: add faster immediate case */
2149static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
2150 int is_right)
2151{
2152 int label1, label2, data_bits;
2153 target_ulong mask;
2154 TCGv t0, t1, t2, a0;
2155
2156 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2157 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2158 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2159 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2160
2161 if (ot == OT_QUAD)
2162 mask = 0x3f;
2163 else
2164 mask = 0x1f;
2165
2166 /* load */
2167 if (op1 == OR_TMP0) {
2168 tcg_gen_mov_tl(a0, cpu_A0);
2169 gen_op_ld_v(ot + s->mem_index, t0, a0);
2170 } else {
2171 gen_op_mov_v_reg(ot, t0, op1);
2172 }
2173
2174 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
2175
2176 tcg_gen_mov_tl(t1, cpu_T[1]);
2177 tcg_gen_mov_tl(t2, cpu_T3);
2178
2179 /* Must test zero case to avoid using undefined behaviour in TCG
2180 shifts. */
2181 label1 = gen_new_label();
2182 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
2183
2184 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
2185 if (ot == OT_WORD) {
2186 /* Note: we implement the Intel behaviour for shift count > 16 */
2187 if (is_right) {
2188 tcg_gen_andi_tl(t0, t0, 0xffff);
2189 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
2190 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2191 tcg_gen_ext32u_tl(t0, t0);
2192
2193 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2194
2195 /* only needed if count > 16, but a test would complicate */
2196 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2197 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
2198
2199 tcg_gen_shr_tl(t0, t0, t2);
2200
2201 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2202 } else {
2203 /* XXX: not optimal */
2204 tcg_gen_andi_tl(t0, t0, 0xffff);
2205 tcg_gen_shli_tl(t1, t1, 16);
2206 tcg_gen_or_tl(t1, t1, t0);
2207 tcg_gen_ext32u_tl(t1, t1);
2208
2209 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2210 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
2211 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
2212 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
2213
2214 tcg_gen_shl_tl(t0, t0, t2);
2215 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2216 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2217 tcg_gen_or_tl(t0, t0, t1);
2218 }
2219 } else {
2220 data_bits = 8 << ot;
2221 if (is_right) {
2222 if (ot == OT_LONG)
2223 tcg_gen_ext32u_tl(t0, t0);
2224
2225 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2226
2227 tcg_gen_shr_tl(t0, t0, t2);
2228 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2229 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
2230 tcg_gen_or_tl(t0, t0, t1);
2231
2232 } else {
2233 if (ot == OT_LONG)
2234 tcg_gen_ext32u_tl(t1, t1);
2235
2236 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2237
2238 tcg_gen_shl_tl(t0, t0, t2);
2239 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2240 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2241 tcg_gen_or_tl(t0, t0, t1);
2242 }
2243 }
2244 tcg_gen_mov_tl(t1, cpu_tmp4);
2245
2246 gen_set_label(label1);
2247 /* store */
2248 if (op1 == OR_TMP0) {
2249 gen_op_st_v(ot + s->mem_index, t0, a0);
2250 } else {
2251 gen_op_mov_reg_v(ot, op1, t0);
2252 }
2253
2254 /* update eflags */
2255 if (s->cc_op != CC_OP_DYNAMIC)
2256 gen_op_set_cc_op(s->cc_op);
2257
2258 label2 = gen_new_label();
2259 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
2260
2261 tcg_gen_mov_tl(cpu_cc_src, t1);
2262 tcg_gen_mov_tl(cpu_cc_dst, t0);
2263 if (is_right) {
2264 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
2265 } else {
2266 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
2267 }
2268 gen_set_label(label2);
2269 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2270
2271 tcg_temp_free(t0);
2272 tcg_temp_free(t1);
2273 tcg_temp_free(t2);
2274 tcg_temp_free(a0);
2275}
2276
2277static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2278{
2279 if (s != OR_TMP1)
2280 gen_op_mov_TN_reg(ot, 1, s);
2281 switch(op) {
2282 case OP_ROL:
2283 gen_rot_rm_T1(s1, ot, d, 0);
2284 break;
2285 case OP_ROR:
2286 gen_rot_rm_T1(s1, ot, d, 1);
2287 break;
2288 case OP_SHL:
2289 case OP_SHL1:
2290 gen_shift_rm_T1(s1, ot, d, 0, 0);
2291 break;
2292 case OP_SHR:
2293 gen_shift_rm_T1(s1, ot, d, 1, 0);
2294 break;
2295 case OP_SAR:
2296 gen_shift_rm_T1(s1, ot, d, 1, 1);
2297 break;
2298 case OP_RCL:
2299 gen_rotc_rm_T1(s1, ot, d, 0);
2300 break;
2301 case OP_RCR:
2302 gen_rotc_rm_T1(s1, ot, d, 1);
2303 break;
2304 }
2305}
2306
2307static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2308{
2309 switch(op) {
2310 case OP_SHL:
2311 case OP_SHL1:
2312 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2313 break;
2314 case OP_SHR:
2315 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2316 break;
2317 case OP_SAR:
2318 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2319 break;
2320 default:
2321 /* currently not optimized */
2322 gen_op_movl_T1_im(c);
2323 gen_shift(s1, op, ot, d, OR_TMP1);
2324 break;
2325 }
2326}
2327
2328static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
2329{
2330 target_long disp;
2331 int havesib;
2332 int base;
2333 int index;
2334 int scale;
2335 int opreg;
2336 int mod, rm, code, override, must_add_seg;
2337
2338 override = s->override;
2339 must_add_seg = s->addseg;
2340 if (override >= 0)
2341 must_add_seg = 1;
2342 mod = (modrm >> 6) & 3;
2343 rm = modrm & 7;
2344
2345 if (s->aflag) {
2346
2347 havesib = 0;
2348 base = rm;
2349 index = 0;
2350 scale = 0;
2351
2352 if (base == 4) {
2353 havesib = 1;
2354 code = ldub_code(s->pc++);
2355 scale = (code >> 6) & 3;
2356 index = ((code >> 3) & 7) | REX_X(s);
2357 base = (code & 7);
2358 }
2359 base |= REX_B(s);
2360
2361 switch (mod) {
2362 case 0:
2363 if ((base & 7) == 5) {
2364 base = -1;
2365 disp = (int32_t)ldl_code(s->pc);
2366 s->pc += 4;
2367 if (CODE64(s) && !havesib) {
2368 disp += s->pc + s->rip_offset;
2369 }
2370 } else {
2371 disp = 0;
2372 }
2373 break;
2374 case 1:
2375 disp = (int8_t)ldub_code(s->pc++);
2376 break;
2377 default:
2378 case 2:
2379 disp = ldl_code(s->pc);
2380 s->pc += 4;
2381 break;
2382 }
2383
2384 if (base >= 0) {
2385 /* for correct popl handling with esp */
2386 if (base == 4 && s->popl_esp_hack)
2387 disp += s->popl_esp_hack;
2388#ifdef TARGET_X86_64
2389 if (s->aflag == 2) {
2390 gen_op_movq_A0_reg(base);
2391 if (disp != 0) {
2392 gen_op_addq_A0_im(disp);
2393 }
2394 } else
2395#endif
2396 {
2397 gen_op_movl_A0_reg(base);
2398 if (disp != 0)
2399 gen_op_addl_A0_im(disp);
2400 }
2401 } else {
2402#ifdef TARGET_X86_64
2403 if (s->aflag == 2) {
2404 gen_op_movq_A0_im(disp);
2405 } else
2406#endif
2407 {
2408 gen_op_movl_A0_im(disp);
2409 }
2410 }
2411 /* XXX: index == 4 is always invalid */
2412 if (havesib && (index != 4 || scale != 0)) {
2413#ifdef TARGET_X86_64
2414 if (s->aflag == 2) {
2415 gen_op_addq_A0_reg_sN(scale, index);
2416 } else
2417#endif
2418 {
2419 gen_op_addl_A0_reg_sN(scale, index);
2420 }
2421 }
2422 if (must_add_seg) {
2423 if (override < 0) {
2424 if (base == R_EBP || base == R_ESP)
2425 override = R_SS;
2426 else
2427 override = R_DS;
2428 }
2429#ifdef TARGET_X86_64
2430 if (s->aflag == 2) {
2431 gen_op_addq_A0_seg(override);
2432 } else
2433#endif
2434 {
2435 gen_op_addl_A0_seg(override);
2436 }
2437 }
2438 } else {
2439 switch (mod) {
2440 case 0:
2441 if (rm == 6) {
2442 disp = lduw_code(s->pc);
2443 s->pc += 2;
2444 gen_op_movl_A0_im(disp);
2445 rm = 0; /* avoid SS override */
2446 goto no_rm;
2447 } else {
2448 disp = 0;
2449 }
2450 break;
2451 case 1:
2452 disp = (int8_t)ldub_code(s->pc++);
2453 break;
2454 default:
2455 case 2:
2456 disp = lduw_code(s->pc);
2457 s->pc += 2;
2458 break;
2459 }
2460 switch(rm) {
2461 case 0:
2462 gen_op_movl_A0_reg(R_EBX);
2463 gen_op_addl_A0_reg_sN(0, R_ESI);
2464 break;
2465 case 1:
2466 gen_op_movl_A0_reg(R_EBX);
2467 gen_op_addl_A0_reg_sN(0, R_EDI);
2468 break;
2469 case 2:
2470 gen_op_movl_A0_reg(R_EBP);
2471 gen_op_addl_A0_reg_sN(0, R_ESI);
2472 break;
2473 case 3:
2474 gen_op_movl_A0_reg(R_EBP);
2475 gen_op_addl_A0_reg_sN(0, R_EDI);
2476 break;
2477 case 4:
2478 gen_op_movl_A0_reg(R_ESI);
2479 break;
2480 case 5:
2481 gen_op_movl_A0_reg(R_EDI);
2482 break;
2483 case 6:
2484 gen_op_movl_A0_reg(R_EBP);
2485 break;
2486 default:
2487 case 7:
2488 gen_op_movl_A0_reg(R_EBX);
2489 break;
2490 }
2491 if (disp != 0)
2492 gen_op_addl_A0_im(disp);
2493 gen_op_andl_A0_ffff();
2494 no_rm:
2495 if (must_add_seg) {
2496 if (override < 0) {
2497 if (rm == 2 || rm == 3 || rm == 6)
2498 override = R_SS;
2499 else
2500 override = R_DS;
2501 }
2502 gen_op_addl_A0_seg(override);
2503 }
2504 }
2505
2506 opreg = OR_A0;
2507 disp = 0;
2508 *reg_ptr = opreg;
2509 *offset_ptr = disp;
2510}
2511
2512static void gen_nop_modrm(DisasContext *s, int modrm)
2513{
2514 int mod, rm, base, code;
2515
2516 mod = (modrm >> 6) & 3;
2517 if (mod == 3)
2518 return;
2519 rm = modrm & 7;
2520
2521 if (s->aflag) {
2522
2523 base = rm;
2524
2525 if (base == 4) {
2526 code = ldub_code(s->pc++);
2527 base = (code & 7);
2528 }
2529
2530 switch (mod) {
2531 case 0:
2532 if (base == 5) {
2533 s->pc += 4;
2534 }
2535 break;
2536 case 1:
2537 s->pc++;
2538 break;
2539 default:
2540 case 2:
2541 s->pc += 4;
2542 break;
2543 }
2544 } else {
2545 switch (mod) {
2546 case 0:
2547 if (rm == 6) {
2548 s->pc += 2;
2549 }
2550 break;
2551 case 1:
2552 s->pc++;
2553 break;
2554 default:
2555 case 2:
2556 s->pc += 2;
2557 break;
2558 }
2559 }
2560}
2561
2562/* used for LEA and MOV AX, mem */
2563static void gen_add_A0_ds_seg(DisasContext *s)
2564{
2565 int override, must_add_seg;
2566 must_add_seg = s->addseg;
2567 override = R_DS;
2568 if (s->override >= 0) {
2569 override = s->override;
2570 must_add_seg = 1;
2571 } else {
2572 override = R_DS;
2573 }
2574 if (must_add_seg) {
2575#ifdef TARGET_X86_64
2576 if (CODE64(s)) {
2577 gen_op_addq_A0_seg(override);
2578 } else
2579#endif
2580 {
2581 gen_op_addl_A0_seg(override);
2582 }
2583 }
2584}
2585
2586/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2587 OR_TMP0 */
2588static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2589{
2590 int mod, rm, opreg, disp;
2591
2592 mod = (modrm >> 6) & 3;
2593 rm = (modrm & 7) | REX_B(s);
2594 if (mod == 3) {
2595 if (is_store) {
2596 if (reg != OR_TMP0)
2597 gen_op_mov_TN_reg(ot, 0, reg);
2598 gen_op_mov_reg_T0(ot, rm);
2599 } else {
2600 gen_op_mov_TN_reg(ot, 0, rm);
2601 if (reg != OR_TMP0)
2602 gen_op_mov_reg_T0(ot, reg);
2603 }
2604 } else {
2605 gen_lea_modrm(s, modrm, &opreg, &disp);
2606 if (is_store) {
2607 if (reg != OR_TMP0)
2608 gen_op_mov_TN_reg(ot, 0, reg);
2609 gen_op_st_T0_A0(ot + s->mem_index);
2610 } else {
2611 gen_op_ld_T0_A0(ot + s->mem_index);
2612 if (reg != OR_TMP0)
2613 gen_op_mov_reg_T0(ot, reg);
2614 }
2615 }
2616}
2617
2618#ifndef VBOX
2619static inline uint32_t insn_get(DisasContext *s, int ot)
2620#else /* VBOX */
2621DECLINLINE(uint32_t) insn_get(DisasContext *s, int ot)
2622#endif /* VBOX */
2623{
2624 uint32_t ret;
2625
2626 switch(ot) {
2627 case OT_BYTE:
2628 ret = ldub_code(s->pc);
2629 s->pc++;
2630 break;
2631 case OT_WORD:
2632 ret = lduw_code(s->pc);
2633 s->pc += 2;
2634 break;
2635 default:
2636 case OT_LONG:
2637 ret = ldl_code(s->pc);
2638 s->pc += 4;
2639 break;
2640 }
2641 return ret;
2642}
2643
2644#ifndef VBOX
2645static inline int insn_const_size(unsigned int ot)
2646#else /* VBOX */
2647DECLINLINE(int) insn_const_size(unsigned int ot)
2648#endif /* VBOX */
2649{
2650 if (ot <= OT_LONG)
2651 return 1 << ot;
2652 else
2653 return 4;
2654}
2655
2656#ifndef VBOX
2657static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2658#else /* VBOX */
2659DECLINLINE(void) gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2660#endif /* VBOX */
2661{
2662 TranslationBlock *tb;
2663 target_ulong pc;
2664
2665 pc = s->cs_base + eip;
2666 tb = s->tb;
2667 /* NOTE: we handle the case where the TB spans two pages here */
2668 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2669 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2670#ifdef VBOX
2671 gen_check_external_event(s);
2672#endif /* VBOX */
2673 /* jump to same page: we can use a direct jump */
2674 tcg_gen_goto_tb(tb_num);
2675 gen_jmp_im(eip);
2676 tcg_gen_exit_tb((long)tb + tb_num);
2677 } else {
2678 /* jump to another page: currently not optimized */
2679 gen_jmp_im(eip);
2680 gen_eob(s);
2681 }
2682}
2683
2684#ifndef VBOX
2685static inline void gen_jcc(DisasContext *s, int b,
2686#else /* VBOX */
2687DECLINLINE(void) gen_jcc(DisasContext *s, int b,
2688#endif /* VBOX */
2689 target_ulong val, target_ulong next_eip)
2690{
2691 int l1, l2, cc_op;
2692
2693 cc_op = s->cc_op;
2694 if (s->cc_op != CC_OP_DYNAMIC) {
2695 gen_op_set_cc_op(s->cc_op);
2696 s->cc_op = CC_OP_DYNAMIC;
2697 }
2698 if (s->jmp_opt) {
2699 l1 = gen_new_label();
2700 gen_jcc1(s, cc_op, b, l1);
2701
2702 gen_goto_tb(s, 0, next_eip);
2703
2704 gen_set_label(l1);
2705 gen_goto_tb(s, 1, val);
2706 s->is_jmp = 3;
2707 } else {
2708
2709 l1 = gen_new_label();
2710 l2 = gen_new_label();
2711 gen_jcc1(s, cc_op, b, l1);
2712
2713 gen_jmp_im(next_eip);
2714 tcg_gen_br(l2);
2715
2716 gen_set_label(l1);
2717 gen_jmp_im(val);
2718 gen_set_label(l2);
2719 gen_eob(s);
2720 }
2721}
2722
2723static void gen_setcc(DisasContext *s, int b)
2724{
2725 int inv, jcc_op, l1;
2726 TCGv t0;
2727
2728 if (is_fast_jcc_case(s, b)) {
2729 /* nominal case: we use a jump */
2730 /* XXX: make it faster by adding new instructions in TCG */
2731 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2732 tcg_gen_movi_tl(t0, 0);
2733 l1 = gen_new_label();
2734 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2735 tcg_gen_movi_tl(t0, 1);
2736 gen_set_label(l1);
2737 tcg_gen_mov_tl(cpu_T[0], t0);
2738 tcg_temp_free(t0);
2739 } else {
2740 /* slow case: it is more efficient not to generate a jump,
2741 although it is questionnable whether this optimization is
2742 worth to */
2743 inv = b & 1;
2744 jcc_op = (b >> 1) & 7;
2745 gen_setcc_slow_T0(s, jcc_op);
2746 if (inv) {
2747 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2748 }
2749 }
2750}
2751
2752#ifndef VBOX
2753static inline void gen_op_movl_T0_seg(int seg_reg)
2754#else /* VBOX */
2755DECLINLINE(void) gen_op_movl_T0_seg(int seg_reg)
2756#endif /* VBOX */
2757{
2758 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2759 offsetof(CPUX86State,segs[seg_reg].selector));
2760}
2761
2762#ifndef VBOX
2763static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2764#else /* VBOX */
2765DECLINLINE(void) gen_op_movl_seg_T0_vm(int seg_reg)
2766#endif /* VBOX */
2767{
2768 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2769 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2770 offsetof(CPUX86State,segs[seg_reg].selector));
2771 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2772 tcg_gen_st_tl(cpu_T[0], cpu_env,
2773 offsetof(CPUX86State,segs[seg_reg].base));
2774#ifdef VBOX
2775 int flags = DESC_P_MASK | DESC_S_MASK | DESC_W_MASK;
2776 if (seg_reg == R_CS)
2777 flags |= DESC_CS_MASK;
2778 gen_op_movl_T0_im(flags);
2779 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].flags));
2780#endif
2781}
2782
2783/* move T0 to seg_reg and compute if the CPU state may change. Never
2784 call this function with seg_reg == R_CS */
2785static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2786{
2787 if (s->pe && !s->vm86) {
2788 /* XXX: optimize by finding processor state dynamically */
2789 if (s->cc_op != CC_OP_DYNAMIC)
2790 gen_op_set_cc_op(s->cc_op);
2791 gen_jmp_im(cur_eip);
2792 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2793 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2794 /* abort translation because the addseg value may change or
2795 because ss32 may change. For R_SS, translation must always
2796 stop as a special handling must be done to disable hardware
2797 interrupts for the next instruction */
2798 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2799 s->is_jmp = 3;
2800 } else {
2801 gen_op_movl_seg_T0_vm(seg_reg);
2802 if (seg_reg == R_SS)
2803 s->is_jmp = 3;
2804 }
2805}
2806
2807#ifndef VBOX
2808static inline int svm_is_rep(int prefixes)
2809#else /* VBOX */
2810DECLINLINE(int) svm_is_rep(int prefixes)
2811#endif /* VBOX */
2812{
2813 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2814}
2815
2816#ifndef VBOX
2817static inline void
2818#else /* VBOX */
2819DECLINLINE(void)
2820#endif /* VBOX */
2821gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2822 uint32_t type, uint64_t param)
2823{
2824 /* no SVM activated; fast case */
2825 if (likely(!(s->flags & HF_SVMI_MASK)))
2826 return;
2827 if (s->cc_op != CC_OP_DYNAMIC)
2828 gen_op_set_cc_op(s->cc_op);
2829 gen_jmp_im(pc_start - s->cs_base);
2830 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2831 tcg_const_i32(type), tcg_const_i64(param));
2832}
2833
2834#ifndef VBOX
2835static inline void
2836#else /* VBOX */
2837DECLINLINE(void)
2838#endif
2839gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2840{
2841 gen_svm_check_intercept_param(s, pc_start, type, 0);
2842}
2843
2844#ifndef VBOX
2845static inline void gen_stack_update(DisasContext *s, int addend)
2846#else /* VBOX */
2847DECLINLINE(void) gen_stack_update(DisasContext *s, int addend)
2848#endif /* VBOX */
2849{
2850#ifdef TARGET_X86_64
2851 if (CODE64(s)) {
2852 gen_op_add_reg_im(2, R_ESP, addend);
2853 } else
2854#endif
2855 if (s->ss32) {
2856 gen_op_add_reg_im(1, R_ESP, addend);
2857 } else {
2858 gen_op_add_reg_im(0, R_ESP, addend);
2859 }
2860}
2861
2862/* generate a push. It depends on ss32, addseg and dflag */
2863static void gen_push_T0(DisasContext *s)
2864{
2865#ifdef TARGET_X86_64
2866 if (CODE64(s)) {
2867 gen_op_movq_A0_reg(R_ESP);
2868 if (s->dflag) {
2869 gen_op_addq_A0_im(-8);
2870 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2871 } else {
2872 gen_op_addq_A0_im(-2);
2873 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2874 }
2875 gen_op_mov_reg_A0(2, R_ESP);
2876 } else
2877#endif
2878 {
2879 gen_op_movl_A0_reg(R_ESP);
2880 if (!s->dflag)
2881 gen_op_addl_A0_im(-2);
2882 else
2883 gen_op_addl_A0_im(-4);
2884 if (s->ss32) {
2885 if (s->addseg) {
2886 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2887 gen_op_addl_A0_seg(R_SS);
2888 }
2889 } else {
2890 gen_op_andl_A0_ffff();
2891 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2892 gen_op_addl_A0_seg(R_SS);
2893 }
2894 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2895 if (s->ss32 && !s->addseg)
2896 gen_op_mov_reg_A0(1, R_ESP);
2897 else
2898 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2899 }
2900}
2901
2902/* generate a push. It depends on ss32, addseg and dflag */
2903/* slower version for T1, only used for call Ev */
2904static void gen_push_T1(DisasContext *s)
2905{
2906#ifdef TARGET_X86_64
2907 if (CODE64(s)) {
2908 gen_op_movq_A0_reg(R_ESP);
2909 if (s->dflag) {
2910 gen_op_addq_A0_im(-8);
2911 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2912 } else {
2913 gen_op_addq_A0_im(-2);
2914 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2915 }
2916 gen_op_mov_reg_A0(2, R_ESP);
2917 } else
2918#endif
2919 {
2920 gen_op_movl_A0_reg(R_ESP);
2921 if (!s->dflag)
2922 gen_op_addl_A0_im(-2);
2923 else
2924 gen_op_addl_A0_im(-4);
2925 if (s->ss32) {
2926 if (s->addseg) {
2927 gen_op_addl_A0_seg(R_SS);
2928 }
2929 } else {
2930 gen_op_andl_A0_ffff();
2931 gen_op_addl_A0_seg(R_SS);
2932 }
2933 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2934
2935 if (s->ss32 && !s->addseg)
2936 gen_op_mov_reg_A0(1, R_ESP);
2937 else
2938 gen_stack_update(s, (-2) << s->dflag);
2939 }
2940}
2941
2942/* two step pop is necessary for precise exceptions */
2943static void gen_pop_T0(DisasContext *s)
2944{
2945#ifdef TARGET_X86_64
2946 if (CODE64(s)) {
2947 gen_op_movq_A0_reg(R_ESP);
2948 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2949 } else
2950#endif
2951 {
2952 gen_op_movl_A0_reg(R_ESP);
2953 if (s->ss32) {
2954 if (s->addseg)
2955 gen_op_addl_A0_seg(R_SS);
2956 } else {
2957 gen_op_andl_A0_ffff();
2958 gen_op_addl_A0_seg(R_SS);
2959 }
2960 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2961 }
2962}
2963
2964static void gen_pop_update(DisasContext *s)
2965{
2966#ifdef TARGET_X86_64
2967 if (CODE64(s) && s->dflag) {
2968 gen_stack_update(s, 8);
2969 } else
2970#endif
2971 {
2972 gen_stack_update(s, 2 << s->dflag);
2973 }
2974}
2975
2976static void gen_stack_A0(DisasContext *s)
2977{
2978 gen_op_movl_A0_reg(R_ESP);
2979 if (!s->ss32)
2980 gen_op_andl_A0_ffff();
2981 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2982 if (s->addseg)
2983 gen_op_addl_A0_seg(R_SS);
2984}
2985
2986/* NOTE: wrap around in 16 bit not fully handled */
2987static void gen_pusha(DisasContext *s)
2988{
2989 int i;
2990 gen_op_movl_A0_reg(R_ESP);
2991 gen_op_addl_A0_im(-16 << s->dflag);
2992 if (!s->ss32)
2993 gen_op_andl_A0_ffff();
2994 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2995 if (s->addseg)
2996 gen_op_addl_A0_seg(R_SS);
2997 for(i = 0;i < 8; i++) {
2998 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
2999 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
3000 gen_op_addl_A0_im(2 << s->dflag);
3001 }
3002 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3003}
3004
3005/* NOTE: wrap around in 16 bit not fully handled */
3006static void gen_popa(DisasContext *s)
3007{
3008 int i;
3009 gen_op_movl_A0_reg(R_ESP);
3010 if (!s->ss32)
3011 gen_op_andl_A0_ffff();
3012 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3013 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
3014 if (s->addseg)
3015 gen_op_addl_A0_seg(R_SS);
3016 for(i = 0;i < 8; i++) {
3017 /* ESP is not reloaded */
3018 if (i != 3) {
3019 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
3020 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
3021 }
3022 gen_op_addl_A0_im(2 << s->dflag);
3023 }
3024 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3025}
3026
3027static void gen_enter(DisasContext *s, int esp_addend, int level)
3028{
3029 int ot, opsize;
3030
3031 level &= 0x1f;
3032#ifdef TARGET_X86_64
3033 if (CODE64(s)) {
3034 ot = s->dflag ? OT_QUAD : OT_WORD;
3035 opsize = 1 << ot;
3036
3037 gen_op_movl_A0_reg(R_ESP);
3038 gen_op_addq_A0_im(-opsize);
3039 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3040
3041 /* push bp */
3042 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3043 gen_op_st_T0_A0(ot + s->mem_index);
3044 if (level) {
3045 /* XXX: must save state */
3046 tcg_gen_helper_0_3(helper_enter64_level,
3047 tcg_const_i32(level),
3048 tcg_const_i32((ot == OT_QUAD)),
3049 cpu_T[1]);
3050 }
3051 gen_op_mov_reg_T1(ot, R_EBP);
3052 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3053 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
3054 } else
3055#endif
3056 {
3057 ot = s->dflag + OT_WORD;
3058 opsize = 2 << s->dflag;
3059
3060 gen_op_movl_A0_reg(R_ESP);
3061 gen_op_addl_A0_im(-opsize);
3062 if (!s->ss32)
3063 gen_op_andl_A0_ffff();
3064 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3065 if (s->addseg)
3066 gen_op_addl_A0_seg(R_SS);
3067 /* push bp */
3068 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3069 gen_op_st_T0_A0(ot + s->mem_index);
3070 if (level) {
3071 /* XXX: must save state */
3072 tcg_gen_helper_0_3(helper_enter_level,
3073 tcg_const_i32(level),
3074 tcg_const_i32(s->dflag),
3075 cpu_T[1]);
3076 }
3077 gen_op_mov_reg_T1(ot, R_EBP);
3078 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3079 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3080 }
3081}
3082
3083static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
3084{
3085 if (s->cc_op != CC_OP_DYNAMIC)
3086 gen_op_set_cc_op(s->cc_op);
3087 gen_jmp_im(cur_eip);
3088 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
3089 s->is_jmp = 3;
3090}
3091
3092/* an interrupt is different from an exception because of the
3093 privilege checks */
3094static void gen_interrupt(DisasContext *s, int intno,
3095 target_ulong cur_eip, target_ulong next_eip)
3096{
3097 if (s->cc_op != CC_OP_DYNAMIC)
3098 gen_op_set_cc_op(s->cc_op);
3099 gen_jmp_im(cur_eip);
3100 tcg_gen_helper_0_2(helper_raise_interrupt,
3101 tcg_const_i32(intno),
3102 tcg_const_i32(next_eip - cur_eip));
3103 s->is_jmp = 3;
3104}
3105
3106static void gen_debug(DisasContext *s, target_ulong cur_eip)
3107{
3108 if (s->cc_op != CC_OP_DYNAMIC)
3109 gen_op_set_cc_op(s->cc_op);
3110 gen_jmp_im(cur_eip);
3111 tcg_gen_helper_0_0(helper_debug);
3112 s->is_jmp = 3;
3113}
3114
3115/* generate a generic end of block. Trace exception is also generated
3116 if needed */
3117static void gen_eob(DisasContext *s)
3118{
3119#ifdef VBOX
3120 gen_check_external_event(s);
3121#endif /* VBOX */
3122 if (s->cc_op != CC_OP_DYNAMIC)
3123 gen_op_set_cc_op(s->cc_op);
3124 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
3125 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
3126 }
3127 if (s->singlestep_enabled) {
3128 tcg_gen_helper_0_0(helper_debug);
3129 } else if (s->tf) {
3130 tcg_gen_helper_0_0(helper_single_step);
3131 } else {
3132 tcg_gen_exit_tb(0);
3133 }
3134 s->is_jmp = 3;
3135}
3136
3137/* generate a jump to eip. No segment change must happen before as a
3138 direct call to the next block may occur */
3139static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
3140{
3141 if (s->jmp_opt) {
3142 if (s->cc_op != CC_OP_DYNAMIC) {
3143 gen_op_set_cc_op(s->cc_op);
3144 s->cc_op = CC_OP_DYNAMIC;
3145 }
3146 gen_goto_tb(s, tb_num, eip);
3147 s->is_jmp = 3;
3148 } else {
3149 gen_jmp_im(eip);
3150 gen_eob(s);
3151 }
3152}
3153
3154static void gen_jmp(DisasContext *s, target_ulong eip)
3155{
3156 gen_jmp_tb(s, eip, 0);
3157}
3158
3159#ifndef VBOX
3160static inline void gen_ldq_env_A0(int idx, int offset)
3161#else /* VBOX */
3162DECLINLINE(void) gen_ldq_env_A0(int idx, int offset)
3163#endif /* VBOX */
3164{
3165 int mem_index = (idx >> 2) - 1;
3166 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3167 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
3168}
3169
3170#ifndef VBOX
3171static inline void gen_stq_env_A0(int idx, int offset)
3172#else /* VBOX */
3173DECLINLINE(void) gen_stq_env_A0(int idx, int offset)
3174#endif /* VBOX */
3175{
3176 int mem_index = (idx >> 2) - 1;
3177 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
3178 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3179}
3180
3181#ifndef VBOX
3182static inline void gen_ldo_env_A0(int idx, int offset)
3183#else /* VBOX */
3184DECLINLINE(void) gen_ldo_env_A0(int idx, int offset)
3185#endif /* VBOX */
3186{
3187 int mem_index = (idx >> 2) - 1;
3188 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3189 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3190 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3191 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3192 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3193}
3194
3195#ifndef VBOX
3196static inline void gen_sto_env_A0(int idx, int offset)
3197#else /* VBOX */
3198DECLINLINE(void) gen_sto_env_A0(int idx, int offset)
3199#endif /* VBOX */
3200{
3201 int mem_index = (idx >> 2) - 1;
3202 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3203 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3204 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3205 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3206 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3207}
3208
3209#ifndef VBOX
3210static inline void gen_op_movo(int d_offset, int s_offset)
3211#else /* VBOX */
3212DECLINLINE(void) gen_op_movo(int d_offset, int s_offset)
3213#endif /* VBOX */
3214{
3215 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3216 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3217 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
3218 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
3219}
3220
3221#ifndef VBOX
3222static inline void gen_op_movq(int d_offset, int s_offset)
3223#else /* VBOX */
3224DECLINLINE(void) gen_op_movq(int d_offset, int s_offset)
3225#endif /* VBOX */
3226{
3227 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3228 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3229}
3230
3231#ifndef VBOX
3232static inline void gen_op_movl(int d_offset, int s_offset)
3233#else /* VBOX */
3234DECLINLINE(void) gen_op_movl(int d_offset, int s_offset)
3235#endif /* VBOX */
3236{
3237 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
3238 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
3239}
3240
3241#ifndef VBOX
3242static inline void gen_op_movq_env_0(int d_offset)
3243#else /* VBOX */
3244DECLINLINE(void) gen_op_movq_env_0(int d_offset)
3245#endif /* VBOX */
3246{
3247 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
3248 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3249}
3250
3251#define SSE_SPECIAL ((void *)1)
3252#define SSE_DUMMY ((void *)2)
3253
3254#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
3255#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
3256 helper_ ## x ## ss, helper_ ## x ## sd, }
3257
3258static void *sse_op_table1[256][4] = {
3259 /* 3DNow! extensions */
3260 [0x0e] = { SSE_DUMMY }, /* femms */
3261 [0x0f] = { SSE_DUMMY }, /* pf... */
3262 /* pure SSE operations */
3263 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3264 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3265 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
3266 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
3267 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
3268 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
3269 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
3270 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
3271
3272 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3273 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3274 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
3275 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
3276 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
3277 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
3278 [0x2e] = { helper_ucomiss, helper_ucomisd },
3279 [0x2f] = { helper_comiss, helper_comisd },
3280 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
3281 [0x51] = SSE_FOP(sqrt),
3282 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
3283 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
3284 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
3285 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
3286 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
3287 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
3288 [0x58] = SSE_FOP(add),
3289 [0x59] = SSE_FOP(mul),
3290 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
3291 helper_cvtss2sd, helper_cvtsd2ss },
3292 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
3293 [0x5c] = SSE_FOP(sub),
3294 [0x5d] = SSE_FOP(min),
3295 [0x5e] = SSE_FOP(div),
3296 [0x5f] = SSE_FOP(max),
3297
3298 [0xc2] = SSE_FOP(cmpeq),
3299 [0xc6] = { helper_shufps, helper_shufpd },
3300
3301 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3302 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3303
3304 /* MMX ops and their SSE extensions */
3305 [0x60] = MMX_OP2(punpcklbw),
3306 [0x61] = MMX_OP2(punpcklwd),
3307 [0x62] = MMX_OP2(punpckldq),
3308 [0x63] = MMX_OP2(packsswb),
3309 [0x64] = MMX_OP2(pcmpgtb),
3310 [0x65] = MMX_OP2(pcmpgtw),
3311 [0x66] = MMX_OP2(pcmpgtl),
3312 [0x67] = MMX_OP2(packuswb),
3313 [0x68] = MMX_OP2(punpckhbw),
3314 [0x69] = MMX_OP2(punpckhwd),
3315 [0x6a] = MMX_OP2(punpckhdq),
3316 [0x6b] = MMX_OP2(packssdw),
3317 [0x6c] = { NULL, helper_punpcklqdq_xmm },
3318 [0x6d] = { NULL, helper_punpckhqdq_xmm },
3319 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
3320 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
3321 [0x70] = { helper_pshufw_mmx,
3322 helper_pshufd_xmm,
3323 helper_pshufhw_xmm,
3324 helper_pshuflw_xmm },
3325 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
3326 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
3327 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
3328 [0x74] = MMX_OP2(pcmpeqb),
3329 [0x75] = MMX_OP2(pcmpeqw),
3330 [0x76] = MMX_OP2(pcmpeql),
3331 [0x77] = { SSE_DUMMY }, /* emms */
3332 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
3333 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
3334 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3335 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3336 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3337 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
3338 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
3339 [0xd1] = MMX_OP2(psrlw),
3340 [0xd2] = MMX_OP2(psrld),
3341 [0xd3] = MMX_OP2(psrlq),
3342 [0xd4] = MMX_OP2(paddq),
3343 [0xd5] = MMX_OP2(pmullw),
3344 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3345 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3346 [0xd8] = MMX_OP2(psubusb),
3347 [0xd9] = MMX_OP2(psubusw),
3348 [0xda] = MMX_OP2(pminub),
3349 [0xdb] = MMX_OP2(pand),
3350 [0xdc] = MMX_OP2(paddusb),
3351 [0xdd] = MMX_OP2(paddusw),
3352 [0xde] = MMX_OP2(pmaxub),
3353 [0xdf] = MMX_OP2(pandn),
3354 [0xe0] = MMX_OP2(pavgb),
3355 [0xe1] = MMX_OP2(psraw),
3356 [0xe2] = MMX_OP2(psrad),
3357 [0xe3] = MMX_OP2(pavgw),
3358 [0xe4] = MMX_OP2(pmulhuw),
3359 [0xe5] = MMX_OP2(pmulhw),
3360 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
3361 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3362 [0xe8] = MMX_OP2(psubsb),
3363 [0xe9] = MMX_OP2(psubsw),
3364 [0xea] = MMX_OP2(pminsw),
3365 [0xeb] = MMX_OP2(por),
3366 [0xec] = MMX_OP2(paddsb),
3367 [0xed] = MMX_OP2(paddsw),
3368 [0xee] = MMX_OP2(pmaxsw),
3369 [0xef] = MMX_OP2(pxor),
3370 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
3371 [0xf1] = MMX_OP2(psllw),
3372 [0xf2] = MMX_OP2(pslld),
3373 [0xf3] = MMX_OP2(psllq),
3374 [0xf4] = MMX_OP2(pmuludq),
3375 [0xf5] = MMX_OP2(pmaddwd),
3376 [0xf6] = MMX_OP2(psadbw),
3377 [0xf7] = MMX_OP2(maskmov),
3378 [0xf8] = MMX_OP2(psubb),
3379 [0xf9] = MMX_OP2(psubw),
3380 [0xfa] = MMX_OP2(psubl),
3381 [0xfb] = MMX_OP2(psubq),
3382 [0xfc] = MMX_OP2(paddb),
3383 [0xfd] = MMX_OP2(paddw),
3384 [0xfe] = MMX_OP2(paddl),
3385};
3386
3387static void *sse_op_table2[3 * 8][2] = {
3388 [0 + 2] = MMX_OP2(psrlw),
3389 [0 + 4] = MMX_OP2(psraw),
3390 [0 + 6] = MMX_OP2(psllw),
3391 [8 + 2] = MMX_OP2(psrld),
3392 [8 + 4] = MMX_OP2(psrad),
3393 [8 + 6] = MMX_OP2(pslld),
3394 [16 + 2] = MMX_OP2(psrlq),
3395 [16 + 3] = { NULL, helper_psrldq_xmm },
3396 [16 + 6] = MMX_OP2(psllq),
3397 [16 + 7] = { NULL, helper_pslldq_xmm },
3398};
3399
3400static void *sse_op_table3[4 * 3] = {
3401 helper_cvtsi2ss,
3402 helper_cvtsi2sd,
3403 X86_64_ONLY(helper_cvtsq2ss),
3404 X86_64_ONLY(helper_cvtsq2sd),
3405
3406 helper_cvttss2si,
3407 helper_cvttsd2si,
3408 X86_64_ONLY(helper_cvttss2sq),
3409 X86_64_ONLY(helper_cvttsd2sq),
3410
3411 helper_cvtss2si,
3412 helper_cvtsd2si,
3413 X86_64_ONLY(helper_cvtss2sq),
3414 X86_64_ONLY(helper_cvtsd2sq),
3415};
3416
3417static void *sse_op_table4[8][4] = {
3418 SSE_FOP(cmpeq),
3419 SSE_FOP(cmplt),
3420 SSE_FOP(cmple),
3421 SSE_FOP(cmpunord),
3422 SSE_FOP(cmpneq),
3423 SSE_FOP(cmpnlt),
3424 SSE_FOP(cmpnle),
3425 SSE_FOP(cmpord),
3426};
3427
3428static void *sse_op_table5[256] = {
3429 [0x0c] = helper_pi2fw,
3430 [0x0d] = helper_pi2fd,
3431 [0x1c] = helper_pf2iw,
3432 [0x1d] = helper_pf2id,
3433 [0x8a] = helper_pfnacc,
3434 [0x8e] = helper_pfpnacc,
3435 [0x90] = helper_pfcmpge,
3436 [0x94] = helper_pfmin,
3437 [0x96] = helper_pfrcp,
3438 [0x97] = helper_pfrsqrt,
3439 [0x9a] = helper_pfsub,
3440 [0x9e] = helper_pfadd,
3441 [0xa0] = helper_pfcmpgt,
3442 [0xa4] = helper_pfmax,
3443 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
3444 [0xa7] = helper_movq, /* pfrsqit1 */
3445 [0xaa] = helper_pfsubr,
3446 [0xae] = helper_pfacc,
3447 [0xb0] = helper_pfcmpeq,
3448 [0xb4] = helper_pfmul,
3449 [0xb6] = helper_movq, /* pfrcpit2 */
3450 [0xb7] = helper_pmulhrw_mmx,
3451 [0xbb] = helper_pswapd,
3452 [0xbf] = helper_pavgb_mmx /* pavgusb */
3453};
3454
3455struct sse_op_helper_s {
3456 void *op[2]; uint32_t ext_mask;
3457};
3458#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3459#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3460#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3461#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3462static struct sse_op_helper_s sse_op_table6[256] = {
3463 [0x00] = SSSE3_OP(pshufb),
3464 [0x01] = SSSE3_OP(phaddw),
3465 [0x02] = SSSE3_OP(phaddd),
3466 [0x03] = SSSE3_OP(phaddsw),
3467 [0x04] = SSSE3_OP(pmaddubsw),
3468 [0x05] = SSSE3_OP(phsubw),
3469 [0x06] = SSSE3_OP(phsubd),
3470 [0x07] = SSSE3_OP(phsubsw),
3471 [0x08] = SSSE3_OP(psignb),
3472 [0x09] = SSSE3_OP(psignw),
3473 [0x0a] = SSSE3_OP(psignd),
3474 [0x0b] = SSSE3_OP(pmulhrsw),
3475 [0x10] = SSE41_OP(pblendvb),
3476 [0x14] = SSE41_OP(blendvps),
3477 [0x15] = SSE41_OP(blendvpd),
3478 [0x17] = SSE41_OP(ptest),
3479 [0x1c] = SSSE3_OP(pabsb),
3480 [0x1d] = SSSE3_OP(pabsw),
3481 [0x1e] = SSSE3_OP(pabsd),
3482 [0x20] = SSE41_OP(pmovsxbw),
3483 [0x21] = SSE41_OP(pmovsxbd),
3484 [0x22] = SSE41_OP(pmovsxbq),
3485 [0x23] = SSE41_OP(pmovsxwd),
3486 [0x24] = SSE41_OP(pmovsxwq),
3487 [0x25] = SSE41_OP(pmovsxdq),
3488 [0x28] = SSE41_OP(pmuldq),
3489 [0x29] = SSE41_OP(pcmpeqq),
3490 [0x2a] = SSE41_SPECIAL, /* movntqda */
3491 [0x2b] = SSE41_OP(packusdw),
3492 [0x30] = SSE41_OP(pmovzxbw),
3493 [0x31] = SSE41_OP(pmovzxbd),
3494 [0x32] = SSE41_OP(pmovzxbq),
3495 [0x33] = SSE41_OP(pmovzxwd),
3496 [0x34] = SSE41_OP(pmovzxwq),
3497 [0x35] = SSE41_OP(pmovzxdq),
3498 [0x37] = SSE42_OP(pcmpgtq),
3499 [0x38] = SSE41_OP(pminsb),
3500 [0x39] = SSE41_OP(pminsd),
3501 [0x3a] = SSE41_OP(pminuw),
3502 [0x3b] = SSE41_OP(pminud),
3503 [0x3c] = SSE41_OP(pmaxsb),
3504 [0x3d] = SSE41_OP(pmaxsd),
3505 [0x3e] = SSE41_OP(pmaxuw),
3506 [0x3f] = SSE41_OP(pmaxud),
3507 [0x40] = SSE41_OP(pmulld),
3508 [0x41] = SSE41_OP(phminposuw),
3509};
3510
3511static struct sse_op_helper_s sse_op_table7[256] = {
3512 [0x08] = SSE41_OP(roundps),
3513 [0x09] = SSE41_OP(roundpd),
3514 [0x0a] = SSE41_OP(roundss),
3515 [0x0b] = SSE41_OP(roundsd),
3516 [0x0c] = SSE41_OP(blendps),
3517 [0x0d] = SSE41_OP(blendpd),
3518 [0x0e] = SSE41_OP(pblendw),
3519 [0x0f] = SSSE3_OP(palignr),
3520 [0x14] = SSE41_SPECIAL, /* pextrb */
3521 [0x15] = SSE41_SPECIAL, /* pextrw */
3522 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3523 [0x17] = SSE41_SPECIAL, /* extractps */
3524 [0x20] = SSE41_SPECIAL, /* pinsrb */
3525 [0x21] = SSE41_SPECIAL, /* insertps */
3526 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3527 [0x40] = SSE41_OP(dpps),
3528 [0x41] = SSE41_OP(dppd),
3529 [0x42] = SSE41_OP(mpsadbw),
3530 [0x60] = SSE42_OP(pcmpestrm),
3531 [0x61] = SSE42_OP(pcmpestri),
3532 [0x62] = SSE42_OP(pcmpistrm),
3533 [0x63] = SSE42_OP(pcmpistri),
3534};
3535
3536static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3537{
3538 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3539 int modrm, mod, rm, reg, reg_addr, offset_addr;
3540 void *sse_op2;
3541
3542 b &= 0xff;
3543 if (s->prefix & PREFIX_DATA)
3544 b1 = 1;
3545 else if (s->prefix & PREFIX_REPZ)
3546 b1 = 2;
3547 else if (s->prefix & PREFIX_REPNZ)
3548 b1 = 3;
3549 else
3550 b1 = 0;
3551 sse_op2 = sse_op_table1[b][b1];
3552 if (!sse_op2)
3553 goto illegal_op;
3554 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3555 is_xmm = 1;
3556 } else {
3557 if (b1 == 0) {
3558 /* MMX case */
3559 is_xmm = 0;
3560 } else {
3561 is_xmm = 1;
3562 }
3563 }
3564 /* simple MMX/SSE operation */
3565 if (s->flags & HF_TS_MASK) {
3566 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3567 return;
3568 }
3569 if (s->flags & HF_EM_MASK) {
3570 illegal_op:
3571 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3572 return;
3573 }
3574 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3575 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3576 goto illegal_op;
3577 if (b == 0x0e) {
3578 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3579 goto illegal_op;
3580 /* femms */
3581 tcg_gen_helper_0_0(helper_emms);
3582 return;
3583 }
3584 if (b == 0x77) {
3585 /* emms */
3586 tcg_gen_helper_0_0(helper_emms);
3587 return;
3588 }
3589 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3590 the static cpu state) */
3591 if (!is_xmm) {
3592 tcg_gen_helper_0_0(helper_enter_mmx);
3593 }
3594
3595 modrm = ldub_code(s->pc++);
3596 reg = ((modrm >> 3) & 7);
3597 if (is_xmm)
3598 reg |= rex_r;
3599 mod = (modrm >> 6) & 3;
3600 if (sse_op2 == SSE_SPECIAL) {
3601 b |= (b1 << 8);
3602 switch(b) {
3603 case 0x0e7: /* movntq */
3604 if (mod == 3)
3605 goto illegal_op;
3606 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3607 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3608 break;
3609 case 0x1e7: /* movntdq */
3610 case 0x02b: /* movntps */
3611 case 0x12b: /* movntps */
3612 case 0x3f0: /* lddqu */
3613 if (mod == 3)
3614 goto illegal_op;
3615 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3616 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3617 break;
3618 case 0x6e: /* movd mm, ea */
3619#ifdef TARGET_X86_64
3620 if (s->dflag == 2) {
3621 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3622 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3623 } else
3624#endif
3625 {
3626 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3627 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3628 offsetof(CPUX86State,fpregs[reg].mmx));
3629 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3630 }
3631 break;
3632 case 0x16e: /* movd xmm, ea */
3633#ifdef TARGET_X86_64
3634 if (s->dflag == 2) {
3635 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3636 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3637 offsetof(CPUX86State,xmm_regs[reg]));
3638 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3639 } else
3640#endif
3641 {
3642 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3643 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3644 offsetof(CPUX86State,xmm_regs[reg]));
3645 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3646 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3647 }
3648 break;
3649 case 0x6f: /* movq mm, ea */
3650 if (mod != 3) {
3651 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3652 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3653 } else {
3654 rm = (modrm & 7);
3655 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3656 offsetof(CPUX86State,fpregs[rm].mmx));
3657 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3658 offsetof(CPUX86State,fpregs[reg].mmx));
3659 }
3660 break;
3661 case 0x010: /* movups */
3662 case 0x110: /* movupd */
3663 case 0x028: /* movaps */
3664 case 0x128: /* movapd */
3665 case 0x16f: /* movdqa xmm, ea */
3666 case 0x26f: /* movdqu xmm, ea */
3667 if (mod != 3) {
3668 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3669 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3670 } else {
3671 rm = (modrm & 7) | REX_B(s);
3672 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3673 offsetof(CPUX86State,xmm_regs[rm]));
3674 }
3675 break;
3676 case 0x210: /* movss xmm, ea */
3677 if (mod != 3) {
3678 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3679 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3680 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3681 gen_op_movl_T0_0();
3682 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3683 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3684 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3685 } else {
3686 rm = (modrm & 7) | REX_B(s);
3687 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3688 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3689 }
3690 break;
3691 case 0x310: /* movsd xmm, ea */
3692 if (mod != 3) {
3693 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3694 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3695 gen_op_movl_T0_0();
3696 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3697 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3698 } else {
3699 rm = (modrm & 7) | REX_B(s);
3700 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3701 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3702 }
3703 break;
3704 case 0x012: /* movlps */
3705 case 0x112: /* movlpd */
3706 if (mod != 3) {
3707 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3708 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3709 } else {
3710 /* movhlps */
3711 rm = (modrm & 7) | REX_B(s);
3712 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3713 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3714 }
3715 break;
3716 case 0x212: /* movsldup */
3717 if (mod != 3) {
3718 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3719 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3720 } else {
3721 rm = (modrm & 7) | REX_B(s);
3722 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3723 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3724 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3725 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3726 }
3727 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3728 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3729 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3730 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3731 break;
3732 case 0x312: /* movddup */
3733 if (mod != 3) {
3734 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3735 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3736 } else {
3737 rm = (modrm & 7) | REX_B(s);
3738 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3739 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3740 }
3741 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3742 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3743 break;
3744 case 0x016: /* movhps */
3745 case 0x116: /* movhpd */
3746 if (mod != 3) {
3747 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3748 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3749 } else {
3750 /* movlhps */
3751 rm = (modrm & 7) | REX_B(s);
3752 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3753 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3754 }
3755 break;
3756 case 0x216: /* movshdup */
3757 if (mod != 3) {
3758 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3759 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3760 } else {
3761 rm = (modrm & 7) | REX_B(s);
3762 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3763 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3764 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3765 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3766 }
3767 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3768 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3769 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3770 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3771 break;
3772 case 0x7e: /* movd ea, mm */
3773#ifdef TARGET_X86_64
3774 if (s->dflag == 2) {
3775 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3776 offsetof(CPUX86State,fpregs[reg].mmx));
3777 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3778 } else
3779#endif
3780 {
3781 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3782 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3783 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3784 }
3785 break;
3786 case 0x17e: /* movd ea, xmm */
3787#ifdef TARGET_X86_64
3788 if (s->dflag == 2) {
3789 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3790 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3791 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3792 } else
3793#endif
3794 {
3795 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3796 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3797 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3798 }
3799 break;
3800 case 0x27e: /* movq xmm, ea */
3801 if (mod != 3) {
3802 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3803 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3804 } else {
3805 rm = (modrm & 7) | REX_B(s);
3806 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3807 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3808 }
3809 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3810 break;
3811 case 0x7f: /* movq ea, mm */
3812 if (mod != 3) {
3813 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3814 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3815 } else {
3816 rm = (modrm & 7);
3817 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3818 offsetof(CPUX86State,fpregs[reg].mmx));
3819 }
3820 break;
3821 case 0x011: /* movups */
3822 case 0x111: /* movupd */
3823 case 0x029: /* movaps */
3824 case 0x129: /* movapd */
3825 case 0x17f: /* movdqa ea, xmm */
3826 case 0x27f: /* movdqu ea, xmm */
3827 if (mod != 3) {
3828 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3829 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3830 } else {
3831 rm = (modrm & 7) | REX_B(s);
3832 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3833 offsetof(CPUX86State,xmm_regs[reg]));
3834 }
3835 break;
3836 case 0x211: /* movss ea, xmm */
3837 if (mod != 3) {
3838 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3839 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3840 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3841 } else {
3842 rm = (modrm & 7) | REX_B(s);
3843 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3844 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3845 }
3846 break;
3847 case 0x311: /* movsd ea, xmm */
3848 if (mod != 3) {
3849 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3850 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3851 } else {
3852 rm = (modrm & 7) | REX_B(s);
3853 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3854 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3855 }
3856 break;
3857 case 0x013: /* movlps */
3858 case 0x113: /* movlpd */
3859 if (mod != 3) {
3860 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3861 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3862 } else {
3863 goto illegal_op;
3864 }
3865 break;
3866 case 0x017: /* movhps */
3867 case 0x117: /* movhpd */
3868 if (mod != 3) {
3869 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3870 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3871 } else {
3872 goto illegal_op;
3873 }
3874 break;
3875 case 0x71: /* shift mm, im */
3876 case 0x72:
3877 case 0x73:
3878 case 0x171: /* shift xmm, im */
3879 case 0x172:
3880 case 0x173:
3881 val = ldub_code(s->pc++);
3882 if (is_xmm) {
3883 gen_op_movl_T0_im(val);
3884 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3885 gen_op_movl_T0_0();
3886 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3887 op1_offset = offsetof(CPUX86State,xmm_t0);
3888 } else {
3889 gen_op_movl_T0_im(val);
3890 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3891 gen_op_movl_T0_0();
3892 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3893 op1_offset = offsetof(CPUX86State,mmx_t0);
3894 }
3895 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3896 if (!sse_op2)
3897 goto illegal_op;
3898 if (is_xmm) {
3899 rm = (modrm & 7) | REX_B(s);
3900 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3901 } else {
3902 rm = (modrm & 7);
3903 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3904 }
3905 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3906 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3907 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3908 break;
3909 case 0x050: /* movmskps */
3910 rm = (modrm & 7) | REX_B(s);
3911 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3912 offsetof(CPUX86State,xmm_regs[rm]));
3913 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3914 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3915 gen_op_mov_reg_T0(OT_LONG, reg);
3916 break;
3917 case 0x150: /* movmskpd */
3918 rm = (modrm & 7) | REX_B(s);
3919 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3920 offsetof(CPUX86State,xmm_regs[rm]));
3921 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3922 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3923 gen_op_mov_reg_T0(OT_LONG, reg);
3924 break;
3925 case 0x02a: /* cvtpi2ps */
3926 case 0x12a: /* cvtpi2pd */
3927 tcg_gen_helper_0_0(helper_enter_mmx);
3928 if (mod != 3) {
3929 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3930 op2_offset = offsetof(CPUX86State,mmx_t0);
3931 gen_ldq_env_A0(s->mem_index, op2_offset);
3932 } else {
3933 rm = (modrm & 7);
3934 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3935 }
3936 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3937 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3938 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3939 switch(b >> 8) {
3940 case 0x0:
3941 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3942 break;
3943 default:
3944 case 0x1:
3945 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3946 break;
3947 }
3948 break;
3949 case 0x22a: /* cvtsi2ss */
3950 case 0x32a: /* cvtsi2sd */
3951 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3952 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3953 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3954 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3955 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3956 if (ot == OT_LONG) {
3957 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3958 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3959 } else {
3960 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
3961 }
3962 break;
3963 case 0x02c: /* cvttps2pi */
3964 case 0x12c: /* cvttpd2pi */
3965 case 0x02d: /* cvtps2pi */
3966 case 0x12d: /* cvtpd2pi */
3967 tcg_gen_helper_0_0(helper_enter_mmx);
3968 if (mod != 3) {
3969 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3970 op2_offset = offsetof(CPUX86State,xmm_t0);
3971 gen_ldo_env_A0(s->mem_index, op2_offset);
3972 } else {
3973 rm = (modrm & 7) | REX_B(s);
3974 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3975 }
3976 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3977 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3978 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3979 switch(b) {
3980 case 0x02c:
3981 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3982 break;
3983 case 0x12c:
3984 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3985 break;
3986 case 0x02d:
3987 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3988 break;
3989 case 0x12d:
3990 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3991 break;
3992 }
3993 break;
3994 case 0x22c: /* cvttss2si */
3995 case 0x32c: /* cvttsd2si */
3996 case 0x22d: /* cvtss2si */
3997 case 0x32d: /* cvtsd2si */
3998 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3999 if (mod != 3) {
4000 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4001 if ((b >> 8) & 1) {
4002 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
4003 } else {
4004 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4005 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4006 }
4007 op2_offset = offsetof(CPUX86State,xmm_t0);
4008 } else {
4009 rm = (modrm & 7) | REX_B(s);
4010 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4011 }
4012 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
4013 (b & 1) * 4];
4014 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
4015 if (ot == OT_LONG) {
4016 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
4017 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4018 } else {
4019 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
4020 }
4021 gen_op_mov_reg_T0(ot, reg);
4022 break;
4023 case 0xc4: /* pinsrw */
4024 case 0x1c4:
4025 s->rip_offset = 1;
4026 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4027 val = ldub_code(s->pc++);
4028 if (b1) {
4029 val &= 7;
4030 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4031 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
4032 } else {
4033 val &= 3;
4034 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4035 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
4036 }
4037 break;
4038 case 0xc5: /* pextrw */
4039 case 0x1c5:
4040 if (mod != 3)
4041 goto illegal_op;
4042 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4043 val = ldub_code(s->pc++);
4044 if (b1) {
4045 val &= 7;
4046 rm = (modrm & 7) | REX_B(s);
4047 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4048 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
4049 } else {
4050 val &= 3;
4051 rm = (modrm & 7);
4052 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4053 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
4054 }
4055 reg = ((modrm >> 3) & 7) | rex_r;
4056 gen_op_mov_reg_T0(ot, reg);
4057 break;
4058 case 0x1d6: /* movq ea, xmm */
4059 if (mod != 3) {
4060 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4061 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4062 } else {
4063 rm = (modrm & 7) | REX_B(s);
4064 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
4065 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4066 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
4067 }
4068 break;
4069 case 0x2d6: /* movq2dq */
4070 tcg_gen_helper_0_0(helper_enter_mmx);
4071 rm = (modrm & 7);
4072 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
4073 offsetof(CPUX86State,fpregs[rm].mmx));
4074 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
4075 break;
4076 case 0x3d6: /* movdq2q */
4077 tcg_gen_helper_0_0(helper_enter_mmx);
4078 rm = (modrm & 7) | REX_B(s);
4079 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
4080 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
4081 break;
4082 case 0xd7: /* pmovmskb */
4083 case 0x1d7:
4084 if (mod != 3)
4085 goto illegal_op;
4086 if (b1) {
4087 rm = (modrm & 7) | REX_B(s);
4088 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
4089 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
4090 } else {
4091 rm = (modrm & 7);
4092 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
4093 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
4094 }
4095 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4096 reg = ((modrm >> 3) & 7) | rex_r;
4097 gen_op_mov_reg_T0(OT_LONG, reg);
4098 break;
4099 case 0x138:
4100 if (s->prefix & PREFIX_REPNZ)
4101 goto crc32;
4102 case 0x038:
4103 b = modrm;
4104 modrm = ldub_code(s->pc++);
4105 rm = modrm & 7;
4106 reg = ((modrm >> 3) & 7) | rex_r;
4107 mod = (modrm >> 6) & 3;
4108
4109 sse_op2 = sse_op_table6[b].op[b1];
4110 if (!sse_op2)
4111 goto illegal_op;
4112 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
4113 goto illegal_op;
4114
4115 if (b1) {
4116 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4117 if (mod == 3) {
4118 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4119 } else {
4120 op2_offset = offsetof(CPUX86State,xmm_t0);
4121 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4122 switch (b) {
4123 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
4124 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
4125 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
4126 gen_ldq_env_A0(s->mem_index, op2_offset +
4127 offsetof(XMMReg, XMM_Q(0)));
4128 break;
4129 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
4130 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
4131 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4132 (s->mem_index >> 2) - 1);
4133 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
4134 offsetof(XMMReg, XMM_L(0)));
4135 break;
4136 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
4137 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
4138 (s->mem_index >> 2) - 1);
4139 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
4140 offsetof(XMMReg, XMM_W(0)));
4141 break;
4142 case 0x2a: /* movntqda */
4143 gen_ldo_env_A0(s->mem_index, op1_offset);
4144 return;
4145 default:
4146 gen_ldo_env_A0(s->mem_index, op2_offset);
4147 }
4148 }
4149 } else {
4150 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4151 if (mod == 3) {
4152 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4153 } else {
4154 op2_offset = offsetof(CPUX86State,mmx_t0);
4155 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4156 gen_ldq_env_A0(s->mem_index, op2_offset);
4157 }
4158 }
4159 if (sse_op2 == SSE_SPECIAL)
4160 goto illegal_op;
4161
4162 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4163 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4164 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4165
4166 if (b == 0x17)
4167 s->cc_op = CC_OP_EFLAGS;
4168 break;
4169 case 0x338: /* crc32 */
4170 crc32:
4171 b = modrm;
4172 modrm = ldub_code(s->pc++);
4173 reg = ((modrm >> 3) & 7) | rex_r;
4174
4175 if (b != 0xf0 && b != 0xf1)
4176 goto illegal_op;
4177 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
4178 goto illegal_op;
4179
4180 if (b == 0xf0)
4181 ot = OT_BYTE;
4182 else if (b == 0xf1 && s->dflag != 2)
4183 if (s->prefix & PREFIX_DATA)
4184 ot = OT_WORD;
4185 else
4186 ot = OT_LONG;
4187 else
4188 ot = OT_QUAD;
4189
4190 gen_op_mov_TN_reg(OT_LONG, 0, reg);
4191 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4192 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4193 tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
4194 cpu_T[0], tcg_const_i32(8 << ot));
4195
4196 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4197 gen_op_mov_reg_T0(ot, reg);
4198 break;
4199 case 0x03a:
4200 case 0x13a:
4201 b = modrm;
4202 modrm = ldub_code(s->pc++);
4203 rm = modrm & 7;
4204 reg = ((modrm >> 3) & 7) | rex_r;
4205 mod = (modrm >> 6) & 3;
4206
4207 sse_op2 = sse_op_table7[b].op[b1];
4208 if (!sse_op2)
4209 goto illegal_op;
4210 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4211 goto illegal_op;
4212
4213 if (sse_op2 == SSE_SPECIAL) {
4214 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4215 rm = (modrm & 7) | REX_B(s);
4216 if (mod != 3)
4217 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4218 reg = ((modrm >> 3) & 7) | rex_r;
4219 val = ldub_code(s->pc++);
4220 switch (b) {
4221 case 0x14: /* pextrb */
4222 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4223 xmm_regs[reg].XMM_B(val & 15)));
4224 if (mod == 3)
4225 gen_op_mov_reg_T0(ot, rm);
4226 else
4227 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4228 (s->mem_index >> 2) - 1);
4229 break;
4230 case 0x15: /* pextrw */
4231 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4232 xmm_regs[reg].XMM_W(val & 7)));
4233 if (mod == 3)
4234 gen_op_mov_reg_T0(ot, rm);
4235 else
4236 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4237 (s->mem_index >> 2) - 1);
4238 break;
4239 case 0x16:
4240 if (ot == OT_LONG) { /* pextrd */
4241 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4242 offsetof(CPUX86State,
4243 xmm_regs[reg].XMM_L(val & 3)));
4244 if (mod == 3)
4245 gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
4246 else
4247 tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
4248 (s->mem_index >> 2) - 1);
4249 } else { /* pextrq */
4250 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4251 offsetof(CPUX86State,
4252 xmm_regs[reg].XMM_Q(val & 1)));
4253 if (mod == 3)
4254 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4255 else
4256 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4257 (s->mem_index >> 2) - 1);
4258 }
4259 break;
4260 case 0x17: /* extractps */
4261 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4262 xmm_regs[reg].XMM_L(val & 3)));
4263 if (mod == 3)
4264 gen_op_mov_reg_T0(ot, rm);
4265 else
4266 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4267 (s->mem_index >> 2) - 1);
4268 break;
4269 case 0x20: /* pinsrb */
4270 if (mod == 3)
4271 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4272 else
4273 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
4274 (s->mem_index >> 2) - 1);
4275 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4276 xmm_regs[reg].XMM_B(val & 15)));
4277 break;
4278 case 0x21: /* insertps */
4279 if (mod == 3)
4280 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4281 offsetof(CPUX86State,xmm_regs[rm]
4282 .XMM_L((val >> 6) & 3)));
4283 else
4284 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4285 (s->mem_index >> 2) - 1);
4286 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4287 offsetof(CPUX86State,xmm_regs[reg]
4288 .XMM_L((val >> 4) & 3)));
4289 if ((val >> 0) & 1)
4290 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4291 cpu_env, offsetof(CPUX86State,
4292 xmm_regs[reg].XMM_L(0)));
4293 if ((val >> 1) & 1)
4294 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4295 cpu_env, offsetof(CPUX86State,
4296 xmm_regs[reg].XMM_L(1)));
4297 if ((val >> 2) & 1)
4298 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4299 cpu_env, offsetof(CPUX86State,
4300 xmm_regs[reg].XMM_L(2)));
4301 if ((val >> 3) & 1)
4302 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4303 cpu_env, offsetof(CPUX86State,
4304 xmm_regs[reg].XMM_L(3)));
4305 break;
4306 case 0x22:
4307 if (ot == OT_LONG) { /* pinsrd */
4308 if (mod == 3)
4309 gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
4310 else
4311 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4312 (s->mem_index >> 2) - 1);
4313 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4314 offsetof(CPUX86State,
4315 xmm_regs[reg].XMM_L(val & 3)));
4316 } else { /* pinsrq */
4317 if (mod == 3)
4318 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4319 else
4320 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4321 (s->mem_index >> 2) - 1);
4322 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4323 offsetof(CPUX86State,
4324 xmm_regs[reg].XMM_Q(val & 1)));
4325 }
4326 break;
4327 }
4328 return;
4329 }
4330
4331 if (b1) {
4332 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4333 if (mod == 3) {
4334 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4335 } else {
4336 op2_offset = offsetof(CPUX86State,xmm_t0);
4337 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4338 gen_ldo_env_A0(s->mem_index, op2_offset);
4339 }
4340 } else {
4341 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4342 if (mod == 3) {
4343 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4344 } else {
4345 op2_offset = offsetof(CPUX86State,mmx_t0);
4346 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4347 gen_ldq_env_A0(s->mem_index, op2_offset);
4348 }
4349 }
4350 val = ldub_code(s->pc++);
4351
4352 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
4353 s->cc_op = CC_OP_EFLAGS;
4354
4355 if (s->dflag == 2)
4356 /* The helper must use entire 64-bit gp registers */
4357 val |= 1 << 8;
4358 }
4359
4360 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4361 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4362 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4363 break;
4364 default:
4365 goto illegal_op;
4366 }
4367 } else {
4368 /* generic MMX or SSE operation */
4369 switch(b) {
4370 case 0x70: /* pshufx insn */
4371 case 0xc6: /* pshufx insn */
4372 case 0xc2: /* compare insns */
4373 s->rip_offset = 1;
4374 break;
4375 default:
4376 break;
4377 }
4378 if (is_xmm) {
4379 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4380 if (mod != 3) {
4381 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4382 op2_offset = offsetof(CPUX86State,xmm_t0);
4383 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
4384 b == 0xc2)) {
4385 /* specific case for SSE single instructions */
4386 if (b1 == 2) {
4387 /* 32 bit access */
4388 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4389 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4390 } else {
4391 /* 64 bit access */
4392 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
4393 }
4394 } else {
4395 gen_ldo_env_A0(s->mem_index, op2_offset);
4396 }
4397 } else {
4398 rm = (modrm & 7) | REX_B(s);
4399 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4400 }
4401 } else {
4402 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4403 if (mod != 3) {
4404 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4405 op2_offset = offsetof(CPUX86State,mmx_t0);
4406 gen_ldq_env_A0(s->mem_index, op2_offset);
4407 } else {
4408 rm = (modrm & 7);
4409 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4410 }
4411 }
4412 switch(b) {
4413 case 0x0f: /* 3DNow! data insns */
4414 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4415 goto illegal_op;
4416 val = ldub_code(s->pc++);
4417 sse_op2 = sse_op_table5[val];
4418 if (!sse_op2)
4419 goto illegal_op;
4420 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4421 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4422 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4423 break;
4424 case 0x70: /* pshufx insn */
4425 case 0xc6: /* pshufx insn */
4426 val = ldub_code(s->pc++);
4427 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4428 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4429 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4430 break;
4431 case 0xc2:
4432 /* compare insns */
4433 val = ldub_code(s->pc++);
4434 if (val >= 8)
4435 goto illegal_op;
4436 sse_op2 = sse_op_table4[val][b1];
4437 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4438 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4439 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4440 break;
4441 case 0xf7:
4442 /* maskmov : we must prepare A0 */
4443 if (mod != 3)
4444 goto illegal_op;
4445#ifdef TARGET_X86_64
4446 if (s->aflag == 2) {
4447 gen_op_movq_A0_reg(R_EDI);
4448 } else
4449#endif
4450 {
4451 gen_op_movl_A0_reg(R_EDI);
4452 if (s->aflag == 0)
4453 gen_op_andl_A0_ffff();
4454 }
4455 gen_add_A0_ds_seg(s);
4456
4457 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4458 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4459 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
4460 break;
4461 default:
4462 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4463 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4464 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4465 break;
4466 }
4467 if (b == 0x2e || b == 0x2f) {
4468 s->cc_op = CC_OP_EFLAGS;
4469 }
4470 }
4471}
4472
4473#ifdef VBOX
4474/* Checks if it's an invalid lock sequence. Only a few instructions
4475 can be used together with the lock prefix and of those only the
4476 form that write a memory operand. So, this is kind of annoying
4477 work to do...
4478 The AMD manual lists the following instructions.
4479 ADC
4480 ADD
4481 AND
4482 BTC
4483 BTR
4484 BTS
4485 CMPXCHG
4486 CMPXCHG8B
4487 CMPXCHG16B
4488 DEC
4489 INC
4490 NEG
4491 NOT
4492 OR
4493 SBB
4494 SUB
4495 XADD
4496 XCHG
4497 XOR */
4498static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
4499{
4500 target_ulong pc = s->pc;
4501 int modrm, mod, op;
4502
4503 /* X={8,16,32,64} Y={16,32,64} */
4504 switch (b)
4505 {
4506 /* /2: ADC reg/memX, immX */
4507 /* /0: ADD reg/memX, immX */
4508 /* /4: AND reg/memX, immX */
4509 /* /1: OR reg/memX, immX */
4510 /* /3: SBB reg/memX, immX */
4511 /* /5: SUB reg/memX, immX */
4512 /* /6: XOR reg/memX, immX */
4513 case 0x80:
4514 case 0x81:
4515 case 0x83:
4516 modrm = ldub_code(pc++);
4517 op = (modrm >> 3) & 7;
4518 if (op == 7) /* /7: CMP */
4519 break;
4520 mod = (modrm >> 6) & 3;
4521 if (mod == 3) /* register destination */
4522 break;
4523 return false;
4524
4525 case 0x10: /* /r: ADC reg/mem8, reg8 */
4526 case 0x11: /* /r: ADC reg/memX, regY */
4527 case 0x00: /* /r: ADD reg/mem8, reg8 */
4528 case 0x01: /* /r: ADD reg/memX, regY */
4529 case 0x20: /* /r: AND reg/mem8, reg8 */
4530 case 0x21: /* /r: AND reg/memY, regY */
4531 case 0x08: /* /r: OR reg/mem8, reg8 */
4532 case 0x09: /* /r: OR reg/memY, regY */
4533 case 0x18: /* /r: SBB reg/mem8, reg8 */
4534 case 0x19: /* /r: SBB reg/memY, regY */
4535 case 0x28: /* /r: SUB reg/mem8, reg8 */
4536 case 0x29: /* /r: SUB reg/memY, regY */
4537 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
4538 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
4539 case 0x30: /* /r: XOR reg/mem8, reg8 */
4540 case 0x31: /* /r: XOR reg/memY, regY */
4541 modrm = ldub_code(pc++);
4542 mod = (modrm >> 6) & 3;
4543 if (mod == 3) /* register destination */
4544 break;
4545 return false;
4546
4547 /* /1: DEC reg/memX */
4548 /* /0: INC reg/memX */
4549 case 0xfe:
4550 case 0xff:
4551 modrm = ldub_code(pc++);
4552 mod = (modrm >> 6) & 3;
4553 if (mod == 3) /* register destination */
4554 break;
4555 return false;
4556
4557 /* /3: NEG reg/memX */
4558 /* /2: NOT reg/memX */
4559 case 0xf6:
4560 case 0xf7:
4561 modrm = ldub_code(pc++);
4562 mod = (modrm >> 6) & 3;
4563 if (mod == 3) /* register destination */
4564 break;
4565 return false;
4566
4567 case 0x0f:
4568 b = ldub_code(pc++);
4569 switch (b)
4570 {
4571 /* /7: BTC reg/memY, imm8 */
4572 /* /6: BTR reg/memY, imm8 */
4573 /* /5: BTS reg/memY, imm8 */
4574 case 0xba:
4575 modrm = ldub_code(pc++);
4576 op = (modrm >> 3) & 7;
4577 if (op < 5)
4578 break;
4579 mod = (modrm >> 6) & 3;
4580 if (mod == 3) /* register destination */
4581 break;
4582 return false;
4583
4584 case 0xbb: /* /r: BTC reg/memY, regY */
4585 case 0xb3: /* /r: BTR reg/memY, regY */
4586 case 0xab: /* /r: BTS reg/memY, regY */
4587 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
4588 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
4589 case 0xc0: /* /r: XADD reg/mem8, reg8 */
4590 case 0xc1: /* /r: XADD reg/memY, regY */
4591 modrm = ldub_code(pc++);
4592 mod = (modrm >> 6) & 3;
4593 if (mod == 3) /* register destination */
4594 break;
4595 return false;
4596
4597 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
4598 case 0xc7:
4599 modrm = ldub_code(pc++);
4600 op = (modrm >> 3) & 7;
4601 if (op != 1)
4602 break;
4603 return false;
4604 }
4605 break;
4606 }
4607
4608 /* illegal sequence. The s->pc is past the lock prefix and that
4609 is sufficient for the TB, I think. */
4610 Log(("illegal lock sequence %RGv (b=%#x)\n", pc_start, b));
4611 return true;
4612}
4613#endif /* VBOX */
4614
4615
4616/* convert one instruction. s->is_jmp is set if the translation must
4617 be stopped. Return the next pc value */
4618static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4619{
4620 int b, prefixes, aflag, dflag;
4621 int shift, ot;
4622 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4623 target_ulong next_eip, tval;
4624 int rex_w, rex_r;
4625
4626 if (unlikely(loglevel & CPU_LOG_TB_OP))
4627 tcg_gen_debug_insn_start(pc_start);
4628 s->pc = pc_start;
4629 prefixes = 0;
4630 aflag = s->code32;
4631 dflag = s->code32;
4632 s->override = -1;
4633 rex_w = -1;
4634 rex_r = 0;
4635#ifdef TARGET_X86_64
4636 s->rex_x = 0;
4637 s->rex_b = 0;
4638 x86_64_hregs = 0;
4639#endif
4640 s->rip_offset = 0; /* for relative ip address */
4641#ifdef VBOX
4642 /* nike: seems only slow down things */
4643# if 0
4644 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
4645
4646 gen_update_eip(pc_start - s->cs_base);
4647# endif
4648#endif
4649
4650 next_byte:
4651 b = ldub_code(s->pc);
4652 s->pc++;
4653 /* check prefixes */
4654#ifdef TARGET_X86_64
4655 if (CODE64(s)) {
4656 switch (b) {
4657 case 0xf3:
4658 prefixes |= PREFIX_REPZ;
4659 goto next_byte;
4660 case 0xf2:
4661 prefixes |= PREFIX_REPNZ;
4662 goto next_byte;
4663 case 0xf0:
4664 prefixes |= PREFIX_LOCK;
4665 goto next_byte;
4666 case 0x2e:
4667 s->override = R_CS;
4668 goto next_byte;
4669 case 0x36:
4670 s->override = R_SS;
4671 goto next_byte;
4672 case 0x3e:
4673 s->override = R_DS;
4674 goto next_byte;
4675 case 0x26:
4676 s->override = R_ES;
4677 goto next_byte;
4678 case 0x64:
4679 s->override = R_FS;
4680 goto next_byte;
4681 case 0x65:
4682 s->override = R_GS;
4683 goto next_byte;
4684 case 0x66:
4685 prefixes |= PREFIX_DATA;
4686 goto next_byte;
4687 case 0x67:
4688 prefixes |= PREFIX_ADR;
4689 goto next_byte;
4690 case 0x40 ... 0x4f:
4691 /* REX prefix */
4692 rex_w = (b >> 3) & 1;
4693 rex_r = (b & 0x4) << 1;
4694 s->rex_x = (b & 0x2) << 2;
4695 REX_B(s) = (b & 0x1) << 3;
4696 x86_64_hregs = 1; /* select uniform byte register addressing */
4697 goto next_byte;
4698 }
4699 if (rex_w == 1) {
4700 /* 0x66 is ignored if rex.w is set */
4701 dflag = 2;
4702 } else {
4703 if (prefixes & PREFIX_DATA)
4704 dflag ^= 1;
4705 }
4706 if (!(prefixes & PREFIX_ADR))
4707 aflag = 2;
4708 } else
4709#endif
4710 {
4711 switch (b) {
4712 case 0xf3:
4713 prefixes |= PREFIX_REPZ;
4714 goto next_byte;
4715 case 0xf2:
4716 prefixes |= PREFIX_REPNZ;
4717 goto next_byte;
4718 case 0xf0:
4719 prefixes |= PREFIX_LOCK;
4720 goto next_byte;
4721 case 0x2e:
4722 s->override = R_CS;
4723 goto next_byte;
4724 case 0x36:
4725 s->override = R_SS;
4726 goto next_byte;
4727 case 0x3e:
4728 s->override = R_DS;
4729 goto next_byte;
4730 case 0x26:
4731 s->override = R_ES;
4732 goto next_byte;
4733 case 0x64:
4734 s->override = R_FS;
4735 goto next_byte;
4736 case 0x65:
4737 s->override = R_GS;
4738 goto next_byte;
4739 case 0x66:
4740 prefixes |= PREFIX_DATA;
4741 goto next_byte;
4742 case 0x67:
4743 prefixes |= PREFIX_ADR;
4744 goto next_byte;
4745 }
4746 if (prefixes & PREFIX_DATA)
4747 dflag ^= 1;
4748 if (prefixes & PREFIX_ADR)
4749 aflag ^= 1;
4750 }
4751
4752 s->prefix = prefixes;
4753 s->aflag = aflag;
4754 s->dflag = dflag;
4755
4756 /* lock generation */
4757#ifndef VBOX
4758 if (prefixes & PREFIX_LOCK)
4759 tcg_gen_helper_0_0(helper_lock);
4760#else /* VBOX */
4761 if (prefixes & PREFIX_LOCK) {
4762 if (is_invalid_lock_sequence(s, pc_start, b)) {
4763 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
4764 return s->pc;
4765 }
4766 tcg_gen_helper_0_0(helper_lock);
4767 }
4768#endif /* VBOX */
4769
4770 /* now check op code */
4771 reswitch:
4772 switch(b) {
4773 case 0x0f:
4774 /**************************/
4775 /* extended op code */
4776 b = ldub_code(s->pc++) | 0x100;
4777 goto reswitch;
4778
4779 /**************************/
4780 /* arith & logic */
4781 case 0x00 ... 0x05:
4782 case 0x08 ... 0x0d:
4783 case 0x10 ... 0x15:
4784 case 0x18 ... 0x1d:
4785 case 0x20 ... 0x25:
4786 case 0x28 ... 0x2d:
4787 case 0x30 ... 0x35:
4788 case 0x38 ... 0x3d:
4789 {
4790 int op, f, val;
4791 op = (b >> 3) & 7;
4792 f = (b >> 1) & 3;
4793
4794 if ((b & 1) == 0)
4795 ot = OT_BYTE;
4796 else
4797 ot = dflag + OT_WORD;
4798
4799 switch(f) {
4800 case 0: /* OP Ev, Gv */
4801 modrm = ldub_code(s->pc++);
4802 reg = ((modrm >> 3) & 7) | rex_r;
4803 mod = (modrm >> 6) & 3;
4804 rm = (modrm & 7) | REX_B(s);
4805 if (mod != 3) {
4806 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4807 opreg = OR_TMP0;
4808 } else if (op == OP_XORL && rm == reg) {
4809 xor_zero:
4810 /* xor reg, reg optimisation */
4811 gen_op_movl_T0_0();
4812 s->cc_op = CC_OP_LOGICB + ot;
4813 gen_op_mov_reg_T0(ot, reg);
4814 gen_op_update1_cc();
4815 break;
4816 } else {
4817 opreg = rm;
4818 }
4819 gen_op_mov_TN_reg(ot, 1, reg);
4820 gen_op(s, op, ot, opreg);
4821 break;
4822 case 1: /* OP Gv, Ev */
4823 modrm = ldub_code(s->pc++);
4824 mod = (modrm >> 6) & 3;
4825 reg = ((modrm >> 3) & 7) | rex_r;
4826 rm = (modrm & 7) | REX_B(s);
4827 if (mod != 3) {
4828 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4829 gen_op_ld_T1_A0(ot + s->mem_index);
4830 } else if (op == OP_XORL && rm == reg) {
4831 goto xor_zero;
4832 } else {
4833 gen_op_mov_TN_reg(ot, 1, rm);
4834 }
4835 gen_op(s, op, ot, reg);
4836 break;
4837 case 2: /* OP A, Iv */
4838 val = insn_get(s, ot);
4839 gen_op_movl_T1_im(val);
4840 gen_op(s, op, ot, OR_EAX);
4841 break;
4842 }
4843 }
4844 break;
4845
4846 case 0x82:
4847 if (CODE64(s))
4848 goto illegal_op;
4849 case 0x80: /* GRP1 */
4850 case 0x81:
4851 case 0x83:
4852 {
4853 int val;
4854
4855 if ((b & 1) == 0)
4856 ot = OT_BYTE;
4857 else
4858 ot = dflag + OT_WORD;
4859
4860 modrm = ldub_code(s->pc++);
4861 mod = (modrm >> 6) & 3;
4862 rm = (modrm & 7) | REX_B(s);
4863 op = (modrm >> 3) & 7;
4864
4865 if (mod != 3) {
4866 if (b == 0x83)
4867 s->rip_offset = 1;
4868 else
4869 s->rip_offset = insn_const_size(ot);
4870 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4871 opreg = OR_TMP0;
4872 } else {
4873 opreg = rm;
4874 }
4875
4876 switch(b) {
4877 default:
4878 case 0x80:
4879 case 0x81:
4880 case 0x82:
4881 val = insn_get(s, ot);
4882 break;
4883 case 0x83:
4884 val = (int8_t)insn_get(s, OT_BYTE);
4885 break;
4886 }
4887 gen_op_movl_T1_im(val);
4888 gen_op(s, op, ot, opreg);
4889 }
4890 break;
4891
4892 /**************************/
4893 /* inc, dec, and other misc arith */
4894 case 0x40 ... 0x47: /* inc Gv */
4895 ot = dflag ? OT_LONG : OT_WORD;
4896 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4897 break;
4898 case 0x48 ... 0x4f: /* dec Gv */
4899 ot = dflag ? OT_LONG : OT_WORD;
4900 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4901 break;
4902 case 0xf6: /* GRP3 */
4903 case 0xf7:
4904 if ((b & 1) == 0)
4905 ot = OT_BYTE;
4906 else
4907 ot = dflag + OT_WORD;
4908
4909 modrm = ldub_code(s->pc++);
4910 mod = (modrm >> 6) & 3;
4911 rm = (modrm & 7) | REX_B(s);
4912 op = (modrm >> 3) & 7;
4913 if (mod != 3) {
4914 if (op == 0)
4915 s->rip_offset = insn_const_size(ot);
4916 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4917 gen_op_ld_T0_A0(ot + s->mem_index);
4918 } else {
4919 gen_op_mov_TN_reg(ot, 0, rm);
4920 }
4921
4922 switch(op) {
4923 case 0: /* test */
4924 val = insn_get(s, ot);
4925 gen_op_movl_T1_im(val);
4926 gen_op_testl_T0_T1_cc();
4927 s->cc_op = CC_OP_LOGICB + ot;
4928 break;
4929 case 2: /* not */
4930 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4931 if (mod != 3) {
4932 gen_op_st_T0_A0(ot + s->mem_index);
4933 } else {
4934 gen_op_mov_reg_T0(ot, rm);
4935 }
4936 break;
4937 case 3: /* neg */
4938 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4939 if (mod != 3) {
4940 gen_op_st_T0_A0(ot + s->mem_index);
4941 } else {
4942 gen_op_mov_reg_T0(ot, rm);
4943 }
4944 gen_op_update_neg_cc();
4945 s->cc_op = CC_OP_SUBB + ot;
4946 break;
4947 case 4: /* mul */
4948 switch(ot) {
4949 case OT_BYTE:
4950 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4951 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4952 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4953 /* XXX: use 32 bit mul which could be faster */
4954 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4955 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4956 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4957 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4958 s->cc_op = CC_OP_MULB;
4959 break;
4960 case OT_WORD:
4961 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4962 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4963 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4964 /* XXX: use 32 bit mul which could be faster */
4965 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4966 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4967 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4968 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4969 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4970 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4971 s->cc_op = CC_OP_MULW;
4972 break;
4973 default:
4974 case OT_LONG:
4975#ifdef TARGET_X86_64
4976 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4977 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4978 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
4979 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4980 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4981 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4982 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4983 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4984 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4985#else
4986 {
4987 TCGv t0, t1;
4988 t0 = tcg_temp_new(TCG_TYPE_I64);
4989 t1 = tcg_temp_new(TCG_TYPE_I64);
4990 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4991 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
4992 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
4993 tcg_gen_mul_i64(t0, t0, t1);
4994 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4995 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4996 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4997 tcg_gen_shri_i64(t0, t0, 32);
4998 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4999 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5000 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5001 }
5002#endif
5003 s->cc_op = CC_OP_MULL;
5004 break;
5005#ifdef TARGET_X86_64
5006 case OT_QUAD:
5007 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
5008 s->cc_op = CC_OP_MULQ;
5009 break;
5010#endif
5011 }
5012 break;
5013 case 5: /* imul */
5014 switch(ot) {
5015 case OT_BYTE:
5016 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
5017 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5018 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
5019 /* XXX: use 32 bit mul which could be faster */
5020 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5021 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5022 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5023 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
5024 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5025 s->cc_op = CC_OP_MULB;
5026 break;
5027 case OT_WORD:
5028 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5029 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5030 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5031 /* XXX: use 32 bit mul which could be faster */
5032 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5033 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5034 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5035 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5036 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5037 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5038 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5039 s->cc_op = CC_OP_MULW;
5040 break;
5041 default:
5042 case OT_LONG:
5043#ifdef TARGET_X86_64
5044 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5045 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5046 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5047 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5048 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5049 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5050 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5051 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5052 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5053 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5054#else
5055 {
5056 TCGv t0, t1;
5057 t0 = tcg_temp_new(TCG_TYPE_I64);
5058 t1 = tcg_temp_new(TCG_TYPE_I64);
5059 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5060 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5061 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5062 tcg_gen_mul_i64(t0, t0, t1);
5063 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5064 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5065 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5066 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5067 tcg_gen_shri_i64(t0, t0, 32);
5068 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5069 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5070 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5071 }
5072#endif
5073 s->cc_op = CC_OP_MULL;
5074 break;
5075#ifdef TARGET_X86_64
5076 case OT_QUAD:
5077 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
5078 s->cc_op = CC_OP_MULQ;
5079 break;
5080#endif
5081 }
5082 break;
5083 case 6: /* div */
5084 switch(ot) {
5085 case OT_BYTE:
5086 gen_jmp_im(pc_start - s->cs_base);
5087 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
5088 break;
5089 case OT_WORD:
5090 gen_jmp_im(pc_start - s->cs_base);
5091 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
5092 break;
5093 default:
5094 case OT_LONG:
5095 gen_jmp_im(pc_start - s->cs_base);
5096 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
5097 break;
5098#ifdef TARGET_X86_64
5099 case OT_QUAD:
5100 gen_jmp_im(pc_start - s->cs_base);
5101 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
5102 break;
5103#endif
5104 }
5105 break;
5106 case 7: /* idiv */
5107 switch(ot) {
5108 case OT_BYTE:
5109 gen_jmp_im(pc_start - s->cs_base);
5110 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
5111 break;
5112 case OT_WORD:
5113 gen_jmp_im(pc_start - s->cs_base);
5114 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
5115 break;
5116 default:
5117 case OT_LONG:
5118 gen_jmp_im(pc_start - s->cs_base);
5119 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
5120 break;
5121#ifdef TARGET_X86_64
5122 case OT_QUAD:
5123 gen_jmp_im(pc_start - s->cs_base);
5124 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
5125 break;
5126#endif
5127 }
5128 break;
5129 default:
5130 goto illegal_op;
5131 }
5132 break;
5133
5134 case 0xfe: /* GRP4 */
5135 case 0xff: /* GRP5 */
5136 if ((b & 1) == 0)
5137 ot = OT_BYTE;
5138 else
5139 ot = dflag + OT_WORD;
5140
5141 modrm = ldub_code(s->pc++);
5142 mod = (modrm >> 6) & 3;
5143 rm = (modrm & 7) | REX_B(s);
5144 op = (modrm >> 3) & 7;
5145 if (op >= 2 && b == 0xfe) {
5146 goto illegal_op;
5147 }
5148 if (CODE64(s)) {
5149 if (op == 2 || op == 4) {
5150 /* operand size for jumps is 64 bit */
5151 ot = OT_QUAD;
5152 } else if (op == 3 || op == 5) {
5153 /* for call calls, the operand is 16 or 32 bit, even
5154 in long mode */
5155 ot = dflag ? OT_LONG : OT_WORD;
5156 } else if (op == 6) {
5157 /* default push size is 64 bit */
5158 ot = dflag ? OT_QUAD : OT_WORD;
5159 }
5160 }
5161 if (mod != 3) {
5162 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5163 if (op >= 2 && op != 3 && op != 5)
5164 gen_op_ld_T0_A0(ot + s->mem_index);
5165 } else {
5166 gen_op_mov_TN_reg(ot, 0, rm);
5167 }
5168
5169 switch(op) {
5170 case 0: /* inc Ev */
5171 if (mod != 3)
5172 opreg = OR_TMP0;
5173 else
5174 opreg = rm;
5175 gen_inc(s, ot, opreg, 1);
5176 break;
5177 case 1: /* dec Ev */
5178 if (mod != 3)
5179 opreg = OR_TMP0;
5180 else
5181 opreg = rm;
5182 gen_inc(s, ot, opreg, -1);
5183 break;
5184 case 2: /* call Ev */
5185 /* XXX: optimize if memory (no 'and' is necessary) */
5186#ifdef VBOX_WITH_CALL_RECORD
5187 if (s->record_call)
5188 gen_op_record_call();
5189#endif
5190 if (s->dflag == 0)
5191 gen_op_andl_T0_ffff();
5192 next_eip = s->pc - s->cs_base;
5193 gen_movtl_T1_im(next_eip);
5194 gen_push_T1(s);
5195 gen_op_jmp_T0();
5196 gen_eob(s);
5197 break;
5198 case 3: /* lcall Ev */
5199 gen_op_ld_T1_A0(ot + s->mem_index);
5200 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5201 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5202 do_lcall:
5203 if (s->pe && !s->vm86) {
5204 if (s->cc_op != CC_OP_DYNAMIC)
5205 gen_op_set_cc_op(s->cc_op);
5206 gen_jmp_im(pc_start - s->cs_base);
5207 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5208 tcg_gen_helper_0_4(helper_lcall_protected,
5209 cpu_tmp2_i32, cpu_T[1],
5210 tcg_const_i32(dflag),
5211 tcg_const_i32(s->pc - pc_start));
5212 } else {
5213 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5214 tcg_gen_helper_0_4(helper_lcall_real,
5215 cpu_tmp2_i32, cpu_T[1],
5216 tcg_const_i32(dflag),
5217 tcg_const_i32(s->pc - s->cs_base));
5218 }
5219 gen_eob(s);
5220 break;
5221 case 4: /* jmp Ev */
5222 if (s->dflag == 0)
5223 gen_op_andl_T0_ffff();
5224 gen_op_jmp_T0();
5225 gen_eob(s);
5226 break;
5227 case 5: /* ljmp Ev */
5228 gen_op_ld_T1_A0(ot + s->mem_index);
5229 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5230 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5231 do_ljmp:
5232 if (s->pe && !s->vm86) {
5233 if (s->cc_op != CC_OP_DYNAMIC)
5234 gen_op_set_cc_op(s->cc_op);
5235 gen_jmp_im(pc_start - s->cs_base);
5236 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5237 tcg_gen_helper_0_3(helper_ljmp_protected,
5238 cpu_tmp2_i32,
5239 cpu_T[1],
5240 tcg_const_i32(s->pc - pc_start));
5241 } else {
5242 gen_op_movl_seg_T0_vm(R_CS);
5243 gen_op_movl_T0_T1();
5244 gen_op_jmp_T0();
5245 }
5246 gen_eob(s);
5247 break;
5248 case 6: /* push Ev */
5249 gen_push_T0(s);
5250 break;
5251 default:
5252 goto illegal_op;
5253 }
5254 break;
5255
5256 case 0x84: /* test Ev, Gv */
5257 case 0x85:
5258 if ((b & 1) == 0)
5259 ot = OT_BYTE;
5260 else
5261 ot = dflag + OT_WORD;
5262
5263 modrm = ldub_code(s->pc++);
5264 mod = (modrm >> 6) & 3;
5265 rm = (modrm & 7) | REX_B(s);
5266 reg = ((modrm >> 3) & 7) | rex_r;
5267
5268 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5269 gen_op_mov_TN_reg(ot, 1, reg);
5270 gen_op_testl_T0_T1_cc();
5271 s->cc_op = CC_OP_LOGICB + ot;
5272 break;
5273
5274 case 0xa8: /* test eAX, Iv */
5275 case 0xa9:
5276 if ((b & 1) == 0)
5277 ot = OT_BYTE;
5278 else
5279 ot = dflag + OT_WORD;
5280 val = insn_get(s, ot);
5281
5282 gen_op_mov_TN_reg(ot, 0, OR_EAX);
5283 gen_op_movl_T1_im(val);
5284 gen_op_testl_T0_T1_cc();
5285 s->cc_op = CC_OP_LOGICB + ot;
5286 break;
5287
5288 case 0x98: /* CWDE/CBW */
5289#ifdef TARGET_X86_64
5290 if (dflag == 2) {
5291 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5292 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5293 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
5294 } else
5295#endif
5296 if (dflag == 1) {
5297 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5298 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5299 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5300 } else {
5301 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5302 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5303 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5304 }
5305 break;
5306 case 0x99: /* CDQ/CWD */
5307#ifdef TARGET_X86_64
5308 if (dflag == 2) {
5309 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5310 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5311 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
5312 } else
5313#endif
5314 if (dflag == 1) {
5315 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5316 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5317 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5318 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5319 } else {
5320 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5321 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5322 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5323 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5324 }
5325 break;
5326 case 0x1af: /* imul Gv, Ev */
5327 case 0x69: /* imul Gv, Ev, I */
5328 case 0x6b:
5329 ot = dflag + OT_WORD;
5330 modrm = ldub_code(s->pc++);
5331 reg = ((modrm >> 3) & 7) | rex_r;
5332 if (b == 0x69)
5333 s->rip_offset = insn_const_size(ot);
5334 else if (b == 0x6b)
5335 s->rip_offset = 1;
5336 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5337 if (b == 0x69) {
5338 val = insn_get(s, ot);
5339 gen_op_movl_T1_im(val);
5340 } else if (b == 0x6b) {
5341 val = (int8_t)insn_get(s, OT_BYTE);
5342 gen_op_movl_T1_im(val);
5343 } else {
5344 gen_op_mov_TN_reg(ot, 1, reg);
5345 }
5346
5347#ifdef TARGET_X86_64
5348 if (ot == OT_QUAD) {
5349 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
5350 } else
5351#endif
5352 if (ot == OT_LONG) {
5353#ifdef TARGET_X86_64
5354 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5355 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5356 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5357 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5358 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5359 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5360#else
5361 {
5362 TCGv t0, t1;
5363 t0 = tcg_temp_new(TCG_TYPE_I64);
5364 t1 = tcg_temp_new(TCG_TYPE_I64);
5365 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5366 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5367 tcg_gen_mul_i64(t0, t0, t1);
5368 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5369 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5370 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5371 tcg_gen_shri_i64(t0, t0, 32);
5372 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
5373 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
5374 }
5375#endif
5376 } else {
5377 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5378 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5379 /* XXX: use 32 bit mul which could be faster */
5380 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5381 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5382 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5383 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5384 }
5385 gen_op_mov_reg_T0(ot, reg);
5386 s->cc_op = CC_OP_MULB + ot;
5387 break;
5388 case 0x1c0:
5389 case 0x1c1: /* xadd Ev, Gv */
5390 if ((b & 1) == 0)
5391 ot = OT_BYTE;
5392 else
5393 ot = dflag + OT_WORD;
5394 modrm = ldub_code(s->pc++);
5395 reg = ((modrm >> 3) & 7) | rex_r;
5396 mod = (modrm >> 6) & 3;
5397 if (mod == 3) {
5398 rm = (modrm & 7) | REX_B(s);
5399 gen_op_mov_TN_reg(ot, 0, reg);
5400 gen_op_mov_TN_reg(ot, 1, rm);
5401 gen_op_addl_T0_T1();
5402 gen_op_mov_reg_T1(ot, reg);
5403 gen_op_mov_reg_T0(ot, rm);
5404 } else {
5405 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5406 gen_op_mov_TN_reg(ot, 0, reg);
5407 gen_op_ld_T1_A0(ot + s->mem_index);
5408 gen_op_addl_T0_T1();
5409 gen_op_st_T0_A0(ot + s->mem_index);
5410 gen_op_mov_reg_T1(ot, reg);
5411 }
5412 gen_op_update2_cc();
5413 s->cc_op = CC_OP_ADDB + ot;
5414 break;
5415 case 0x1b0:
5416 case 0x1b1: /* cmpxchg Ev, Gv */
5417 {
5418 int label1, label2;
5419 TCGv t0, t1, t2, a0;
5420
5421 if ((b & 1) == 0)
5422 ot = OT_BYTE;
5423 else
5424 ot = dflag + OT_WORD;
5425 modrm = ldub_code(s->pc++);
5426 reg = ((modrm >> 3) & 7) | rex_r;
5427 mod = (modrm >> 6) & 3;
5428 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5429 t1 = tcg_temp_local_new(TCG_TYPE_TL);
5430 t2 = tcg_temp_local_new(TCG_TYPE_TL);
5431 a0 = tcg_temp_local_new(TCG_TYPE_TL);
5432 gen_op_mov_v_reg(ot, t1, reg);
5433 if (mod == 3) {
5434 rm = (modrm & 7) | REX_B(s);
5435 gen_op_mov_v_reg(ot, t0, rm);
5436 } else {
5437 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5438 tcg_gen_mov_tl(a0, cpu_A0);
5439 gen_op_ld_v(ot + s->mem_index, t0, a0);
5440 rm = 0; /* avoid warning */
5441 }
5442 label1 = gen_new_label();
5443 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
5444 tcg_gen_sub_tl(t2, t2, t0);
5445 gen_extu(ot, t2);
5446 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
5447 if (mod == 3) {
5448 label2 = gen_new_label();
5449 gen_op_mov_reg_v(ot, R_EAX, t0);
5450 tcg_gen_br(label2);
5451 gen_set_label(label1);
5452 gen_op_mov_reg_v(ot, rm, t1);
5453 gen_set_label(label2);
5454 } else {
5455 tcg_gen_mov_tl(t1, t0);
5456 gen_op_mov_reg_v(ot, R_EAX, t0);
5457 gen_set_label(label1);
5458 /* always store */
5459 gen_op_st_v(ot + s->mem_index, t1, a0);
5460 }
5461 tcg_gen_mov_tl(cpu_cc_src, t0);
5462 tcg_gen_mov_tl(cpu_cc_dst, t2);
5463 s->cc_op = CC_OP_SUBB + ot;
5464 tcg_temp_free(t0);
5465 tcg_temp_free(t1);
5466 tcg_temp_free(t2);
5467 tcg_temp_free(a0);
5468 }
5469 break;
5470 case 0x1c7: /* cmpxchg8b */
5471 modrm = ldub_code(s->pc++);
5472 mod = (modrm >> 6) & 3;
5473 if ((mod == 3) || ((modrm & 0x38) != 0x8))
5474 goto illegal_op;
5475#ifdef TARGET_X86_64
5476 if (dflag == 2) {
5477 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5478 goto illegal_op;
5479 gen_jmp_im(pc_start - s->cs_base);
5480 if (s->cc_op != CC_OP_DYNAMIC)
5481 gen_op_set_cc_op(s->cc_op);
5482 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5483 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
5484 } else
5485#endif
5486 {
5487 if (!(s->cpuid_features & CPUID_CX8))
5488 goto illegal_op;
5489 gen_jmp_im(pc_start - s->cs_base);
5490 if (s->cc_op != CC_OP_DYNAMIC)
5491 gen_op_set_cc_op(s->cc_op);
5492 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5493 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
5494 }
5495 s->cc_op = CC_OP_EFLAGS;
5496 break;
5497
5498 /**************************/
5499 /* push/pop */
5500 case 0x50 ... 0x57: /* push */
5501 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
5502 gen_push_T0(s);
5503 break;
5504 case 0x58 ... 0x5f: /* pop */
5505 if (CODE64(s)) {
5506 ot = dflag ? OT_QUAD : OT_WORD;
5507 } else {
5508 ot = dflag + OT_WORD;
5509 }
5510 gen_pop_T0(s);
5511 /* NOTE: order is important for pop %sp */
5512 gen_pop_update(s);
5513 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
5514 break;
5515 case 0x60: /* pusha */
5516 if (CODE64(s))
5517 goto illegal_op;
5518 gen_pusha(s);
5519 break;
5520 case 0x61: /* popa */
5521 if (CODE64(s))
5522 goto illegal_op;
5523 gen_popa(s);
5524 break;
5525 case 0x68: /* push Iv */
5526 case 0x6a:
5527 if (CODE64(s)) {
5528 ot = dflag ? OT_QUAD : OT_WORD;
5529 } else {
5530 ot = dflag + OT_WORD;
5531 }
5532 if (b == 0x68)
5533 val = insn_get(s, ot);
5534 else
5535 val = (int8_t)insn_get(s, OT_BYTE);
5536 gen_op_movl_T0_im(val);
5537 gen_push_T0(s);
5538 break;
5539 case 0x8f: /* pop Ev */
5540 if (CODE64(s)) {
5541 ot = dflag ? OT_QUAD : OT_WORD;
5542 } else {
5543 ot = dflag + OT_WORD;
5544 }
5545 modrm = ldub_code(s->pc++);
5546 mod = (modrm >> 6) & 3;
5547 gen_pop_T0(s);
5548 if (mod == 3) {
5549 /* NOTE: order is important for pop %sp */
5550 gen_pop_update(s);
5551 rm = (modrm & 7) | REX_B(s);
5552 gen_op_mov_reg_T0(ot, rm);
5553 } else {
5554 /* NOTE: order is important too for MMU exceptions */
5555 s->popl_esp_hack = 1 << ot;
5556 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5557 s->popl_esp_hack = 0;
5558 gen_pop_update(s);
5559 }
5560 break;
5561 case 0xc8: /* enter */
5562 {
5563 int level;
5564 val = lduw_code(s->pc);
5565 s->pc += 2;
5566 level = ldub_code(s->pc++);
5567 gen_enter(s, val, level);
5568 }
5569 break;
5570 case 0xc9: /* leave */
5571 /* XXX: exception not precise (ESP is updated before potential exception) */
5572 if (CODE64(s)) {
5573 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5574 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5575 } else if (s->ss32) {
5576 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5577 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5578 } else {
5579 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5580 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5581 }
5582 gen_pop_T0(s);
5583 if (CODE64(s)) {
5584 ot = dflag ? OT_QUAD : OT_WORD;
5585 } else {
5586 ot = dflag + OT_WORD;
5587 }
5588 gen_op_mov_reg_T0(ot, R_EBP);
5589 gen_pop_update(s);
5590 break;
5591 case 0x06: /* push es */
5592 case 0x0e: /* push cs */
5593 case 0x16: /* push ss */
5594 case 0x1e: /* push ds */
5595 if (CODE64(s))
5596 goto illegal_op;
5597 gen_op_movl_T0_seg(b >> 3);
5598 gen_push_T0(s);
5599 break;
5600 case 0x1a0: /* push fs */
5601 case 0x1a8: /* push gs */
5602 gen_op_movl_T0_seg((b >> 3) & 7);
5603 gen_push_T0(s);
5604 break;
5605 case 0x07: /* pop es */
5606 case 0x17: /* pop ss */
5607 case 0x1f: /* pop ds */
5608 if (CODE64(s))
5609 goto illegal_op;
5610 reg = b >> 3;
5611 gen_pop_T0(s);
5612 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5613 gen_pop_update(s);
5614 if (reg == R_SS) {
5615 /* if reg == SS, inhibit interrupts/trace. */
5616 /* If several instructions disable interrupts, only the
5617 _first_ does it */
5618 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5619 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5620 s->tf = 0;
5621 }
5622 if (s->is_jmp) {
5623 gen_jmp_im(s->pc - s->cs_base);
5624 gen_eob(s);
5625 }
5626 break;
5627 case 0x1a1: /* pop fs */
5628 case 0x1a9: /* pop gs */
5629 gen_pop_T0(s);
5630 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5631 gen_pop_update(s);
5632 if (s->is_jmp) {
5633 gen_jmp_im(s->pc - s->cs_base);
5634 gen_eob(s);
5635 }
5636 break;
5637
5638 /**************************/
5639 /* mov */
5640 case 0x88:
5641 case 0x89: /* mov Gv, Ev */
5642 if ((b & 1) == 0)
5643 ot = OT_BYTE;
5644 else
5645 ot = dflag + OT_WORD;
5646 modrm = ldub_code(s->pc++);
5647 reg = ((modrm >> 3) & 7) | rex_r;
5648
5649 /* generate a generic store */
5650 gen_ldst_modrm(s, modrm, ot, reg, 1);
5651 break;
5652 case 0xc6:
5653 case 0xc7: /* mov Ev, Iv */
5654 if ((b & 1) == 0)
5655 ot = OT_BYTE;
5656 else
5657 ot = dflag + OT_WORD;
5658 modrm = ldub_code(s->pc++);
5659 mod = (modrm >> 6) & 3;
5660 if (mod != 3) {
5661 s->rip_offset = insn_const_size(ot);
5662 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5663 }
5664 val = insn_get(s, ot);
5665 gen_op_movl_T0_im(val);
5666 if (mod != 3)
5667 gen_op_st_T0_A0(ot + s->mem_index);
5668 else
5669 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5670 break;
5671 case 0x8a:
5672 case 0x8b: /* mov Ev, Gv */
5673#ifdef VBOX /* dtrace hot fix */
5674 if (prefixes & PREFIX_LOCK)
5675 goto illegal_op;
5676#endif
5677 if ((b & 1) == 0)
5678 ot = OT_BYTE;
5679 else
5680 ot = OT_WORD + dflag;
5681 modrm = ldub_code(s->pc++);
5682 reg = ((modrm >> 3) & 7) | rex_r;
5683
5684 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5685 gen_op_mov_reg_T0(ot, reg);
5686 break;
5687 case 0x8e: /* mov seg, Gv */
5688 modrm = ldub_code(s->pc++);
5689 reg = (modrm >> 3) & 7;
5690 if (reg >= 6 || reg == R_CS)
5691 goto illegal_op;
5692 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5693 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5694 if (reg == R_SS) {
5695 /* if reg == SS, inhibit interrupts/trace */
5696 /* If several instructions disable interrupts, only the
5697 _first_ does it */
5698 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5699 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5700 s->tf = 0;
5701 }
5702 if (s->is_jmp) {
5703 gen_jmp_im(s->pc - s->cs_base);
5704 gen_eob(s);
5705 }
5706 break;
5707 case 0x8c: /* mov Gv, seg */
5708 modrm = ldub_code(s->pc++);
5709 reg = (modrm >> 3) & 7;
5710 mod = (modrm >> 6) & 3;
5711 if (reg >= 6)
5712 goto illegal_op;
5713 gen_op_movl_T0_seg(reg);
5714 if (mod == 3)
5715 ot = OT_WORD + dflag;
5716 else
5717 ot = OT_WORD;
5718 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5719 break;
5720
5721 case 0x1b6: /* movzbS Gv, Eb */
5722 case 0x1b7: /* movzwS Gv, Eb */
5723 case 0x1be: /* movsbS Gv, Eb */
5724 case 0x1bf: /* movswS Gv, Eb */
5725 {
5726 int d_ot;
5727 /* d_ot is the size of destination */
5728 d_ot = dflag + OT_WORD;
5729 /* ot is the size of source */
5730 ot = (b & 1) + OT_BYTE;
5731 modrm = ldub_code(s->pc++);
5732 reg = ((modrm >> 3) & 7) | rex_r;
5733 mod = (modrm >> 6) & 3;
5734 rm = (modrm & 7) | REX_B(s);
5735
5736 if (mod == 3) {
5737 gen_op_mov_TN_reg(ot, 0, rm);
5738 switch(ot | (b & 8)) {
5739 case OT_BYTE:
5740 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5741 break;
5742 case OT_BYTE | 8:
5743 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5744 break;
5745 case OT_WORD:
5746 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5747 break;
5748 default:
5749 case OT_WORD | 8:
5750 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5751 break;
5752 }
5753 gen_op_mov_reg_T0(d_ot, reg);
5754 } else {
5755 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5756 if (b & 8) {
5757 gen_op_lds_T0_A0(ot + s->mem_index);
5758 } else {
5759 gen_op_ldu_T0_A0(ot + s->mem_index);
5760 }
5761 gen_op_mov_reg_T0(d_ot, reg);
5762 }
5763 }
5764 break;
5765
5766 case 0x8d: /* lea */
5767 ot = dflag + OT_WORD;
5768 modrm = ldub_code(s->pc++);
5769 mod = (modrm >> 6) & 3;
5770 if (mod == 3)
5771 goto illegal_op;
5772 reg = ((modrm >> 3) & 7) | rex_r;
5773 /* we must ensure that no segment is added */
5774 s->override = -1;
5775 val = s->addseg;
5776 s->addseg = 0;
5777 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5778 s->addseg = val;
5779 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5780 break;
5781
5782 case 0xa0: /* mov EAX, Ov */
5783 case 0xa1:
5784 case 0xa2: /* mov Ov, EAX */
5785 case 0xa3:
5786 {
5787 target_ulong offset_addr;
5788
5789 if ((b & 1) == 0)
5790 ot = OT_BYTE;
5791 else
5792 ot = dflag + OT_WORD;
5793#ifdef TARGET_X86_64
5794 if (s->aflag == 2) {
5795 offset_addr = ldq_code(s->pc);
5796 s->pc += 8;
5797 gen_op_movq_A0_im(offset_addr);
5798 } else
5799#endif
5800 {
5801 if (s->aflag) {
5802 offset_addr = insn_get(s, OT_LONG);
5803 } else {
5804 offset_addr = insn_get(s, OT_WORD);
5805 }
5806 gen_op_movl_A0_im(offset_addr);
5807 }
5808 gen_add_A0_ds_seg(s);
5809 if ((b & 2) == 0) {
5810 gen_op_ld_T0_A0(ot + s->mem_index);
5811 gen_op_mov_reg_T0(ot, R_EAX);
5812 } else {
5813 gen_op_mov_TN_reg(ot, 0, R_EAX);
5814 gen_op_st_T0_A0(ot + s->mem_index);
5815 }
5816 }
5817 break;
5818 case 0xd7: /* xlat */
5819#ifdef TARGET_X86_64
5820 if (s->aflag == 2) {
5821 gen_op_movq_A0_reg(R_EBX);
5822 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5823 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5824 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5825 } else
5826#endif
5827 {
5828 gen_op_movl_A0_reg(R_EBX);
5829 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5830 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5831 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5832 if (s->aflag == 0)
5833 gen_op_andl_A0_ffff();
5834 else
5835 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5836 }
5837 gen_add_A0_ds_seg(s);
5838 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5839 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5840 break;
5841 case 0xb0 ... 0xb7: /* mov R, Ib */
5842 val = insn_get(s, OT_BYTE);
5843 gen_op_movl_T0_im(val);
5844 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5845 break;
5846 case 0xb8 ... 0xbf: /* mov R, Iv */
5847#ifdef TARGET_X86_64
5848 if (dflag == 2) {
5849 uint64_t tmp;
5850 /* 64 bit case */
5851 tmp = ldq_code(s->pc);
5852 s->pc += 8;
5853 reg = (b & 7) | REX_B(s);
5854 gen_movtl_T0_im(tmp);
5855 gen_op_mov_reg_T0(OT_QUAD, reg);
5856 } else
5857#endif
5858 {
5859 ot = dflag ? OT_LONG : OT_WORD;
5860 val = insn_get(s, ot);
5861 reg = (b & 7) | REX_B(s);
5862 gen_op_movl_T0_im(val);
5863 gen_op_mov_reg_T0(ot, reg);
5864 }
5865 break;
5866
5867 case 0x91 ... 0x97: /* xchg R, EAX */
5868 ot = dflag + OT_WORD;
5869 reg = (b & 7) | REX_B(s);
5870 rm = R_EAX;
5871 goto do_xchg_reg;
5872 case 0x86:
5873 case 0x87: /* xchg Ev, Gv */
5874 if ((b & 1) == 0)
5875 ot = OT_BYTE;
5876 else
5877 ot = dflag + OT_WORD;
5878 modrm = ldub_code(s->pc++);
5879 reg = ((modrm >> 3) & 7) | rex_r;
5880 mod = (modrm >> 6) & 3;
5881 if (mod == 3) {
5882 rm = (modrm & 7) | REX_B(s);
5883 do_xchg_reg:
5884 gen_op_mov_TN_reg(ot, 0, reg);
5885 gen_op_mov_TN_reg(ot, 1, rm);
5886 gen_op_mov_reg_T0(ot, rm);
5887 gen_op_mov_reg_T1(ot, reg);
5888 } else {
5889 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5890 gen_op_mov_TN_reg(ot, 0, reg);
5891 /* for xchg, lock is implicit */
5892 if (!(prefixes & PREFIX_LOCK))
5893 tcg_gen_helper_0_0(helper_lock);
5894 gen_op_ld_T1_A0(ot + s->mem_index);
5895 gen_op_st_T0_A0(ot + s->mem_index);
5896 if (!(prefixes & PREFIX_LOCK))
5897 tcg_gen_helper_0_0(helper_unlock);
5898 gen_op_mov_reg_T1(ot, reg);
5899 }
5900 break;
5901 case 0xc4: /* les Gv */
5902 if (CODE64(s))
5903 goto illegal_op;
5904 op = R_ES;
5905 goto do_lxx;
5906 case 0xc5: /* lds Gv */
5907 if (CODE64(s))
5908 goto illegal_op;
5909 op = R_DS;
5910 goto do_lxx;
5911 case 0x1b2: /* lss Gv */
5912 op = R_SS;
5913 goto do_lxx;
5914 case 0x1b4: /* lfs Gv */
5915 op = R_FS;
5916 goto do_lxx;
5917 case 0x1b5: /* lgs Gv */
5918 op = R_GS;
5919 do_lxx:
5920 ot = dflag ? OT_LONG : OT_WORD;
5921 modrm = ldub_code(s->pc++);
5922 reg = ((modrm >> 3) & 7) | rex_r;
5923 mod = (modrm >> 6) & 3;
5924 if (mod == 3)
5925 goto illegal_op;
5926 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5927 gen_op_ld_T1_A0(ot + s->mem_index);
5928 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5929 /* load the segment first to handle exceptions properly */
5930 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5931 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5932 /* then put the data */
5933 gen_op_mov_reg_T1(ot, reg);
5934 if (s->is_jmp) {
5935 gen_jmp_im(s->pc - s->cs_base);
5936 gen_eob(s);
5937 }
5938 break;
5939
5940 /************************/
5941 /* shifts */
5942 case 0xc0:
5943 case 0xc1:
5944 /* shift Ev,Ib */
5945 shift = 2;
5946 grp2:
5947 {
5948 if ((b & 1) == 0)
5949 ot = OT_BYTE;
5950 else
5951 ot = dflag + OT_WORD;
5952
5953 modrm = ldub_code(s->pc++);
5954 mod = (modrm >> 6) & 3;
5955 op = (modrm >> 3) & 7;
5956
5957 if (mod != 3) {
5958 if (shift == 2) {
5959 s->rip_offset = 1;
5960 }
5961 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5962 opreg = OR_TMP0;
5963 } else {
5964 opreg = (modrm & 7) | REX_B(s);
5965 }
5966
5967 /* simpler op */
5968 if (shift == 0) {
5969 gen_shift(s, op, ot, opreg, OR_ECX);
5970 } else {
5971 if (shift == 2) {
5972 shift = ldub_code(s->pc++);
5973 }
5974 gen_shifti(s, op, ot, opreg, shift);
5975 }
5976 }
5977 break;
5978 case 0xd0:
5979 case 0xd1:
5980 /* shift Ev,1 */
5981 shift = 1;
5982 goto grp2;
5983 case 0xd2:
5984 case 0xd3:
5985 /* shift Ev,cl */
5986 shift = 0;
5987 goto grp2;
5988
5989 case 0x1a4: /* shld imm */
5990 op = 0;
5991 shift = 1;
5992 goto do_shiftd;
5993 case 0x1a5: /* shld cl */
5994 op = 0;
5995 shift = 0;
5996 goto do_shiftd;
5997 case 0x1ac: /* shrd imm */
5998 op = 1;
5999 shift = 1;
6000 goto do_shiftd;
6001 case 0x1ad: /* shrd cl */
6002 op = 1;
6003 shift = 0;
6004 do_shiftd:
6005 ot = dflag + OT_WORD;
6006 modrm = ldub_code(s->pc++);
6007 mod = (modrm >> 6) & 3;
6008 rm = (modrm & 7) | REX_B(s);
6009 reg = ((modrm >> 3) & 7) | rex_r;
6010 if (mod != 3) {
6011 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6012 opreg = OR_TMP0;
6013 } else {
6014 opreg = rm;
6015 }
6016 gen_op_mov_TN_reg(ot, 1, reg);
6017
6018 if (shift) {
6019 val = ldub_code(s->pc++);
6020 tcg_gen_movi_tl(cpu_T3, val);
6021 } else {
6022 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
6023 }
6024 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
6025 break;
6026
6027 /************************/
6028 /* floats */
6029 case 0xd8 ... 0xdf:
6030 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
6031 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
6032 /* XXX: what to do if illegal op ? */
6033 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6034 break;
6035 }
6036 modrm = ldub_code(s->pc++);
6037 mod = (modrm >> 6) & 3;
6038 rm = modrm & 7;
6039 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
6040 if (mod != 3) {
6041 /* memory op */
6042 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6043 switch(op) {
6044 case 0x00 ... 0x07: /* fxxxs */
6045 case 0x10 ... 0x17: /* fixxxl */
6046 case 0x20 ... 0x27: /* fxxxl */
6047 case 0x30 ... 0x37: /* fixxx */
6048 {
6049 int op1;
6050 op1 = op & 7;
6051
6052 switch(op >> 4) {
6053 case 0:
6054 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6055 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6056 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
6057 break;
6058 case 1:
6059 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6060 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6061 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6062 break;
6063 case 2:
6064 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6065 (s->mem_index >> 2) - 1);
6066 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
6067 break;
6068 case 3:
6069 default:
6070 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6071 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6072 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6073 break;
6074 }
6075
6076 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6077 if (op1 == 3) {
6078 /* fcomp needs pop */
6079 tcg_gen_helper_0_0(helper_fpop);
6080 }
6081 }
6082 break;
6083 case 0x08: /* flds */
6084 case 0x0a: /* fsts */
6085 case 0x0b: /* fstps */
6086 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6087 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6088 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
6089 switch(op & 7) {
6090 case 0:
6091 switch(op >> 4) {
6092 case 0:
6093 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6094 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6095 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
6096 break;
6097 case 1:
6098 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6099 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6100 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6101 break;
6102 case 2:
6103 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6104 (s->mem_index >> 2) - 1);
6105 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
6106 break;
6107 case 3:
6108 default:
6109 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6110 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6111 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6112 break;
6113 }
6114 break;
6115 case 1:
6116 /* XXX: the corresponding CPUID bit must be tested ! */
6117 switch(op >> 4) {
6118 case 1:
6119 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
6120 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6121 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6122 break;
6123 case 2:
6124 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
6125 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6126 (s->mem_index >> 2) - 1);
6127 break;
6128 case 3:
6129 default:
6130 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
6131 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6132 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6133 break;
6134 }
6135 tcg_gen_helper_0_0(helper_fpop);
6136 break;
6137 default:
6138 switch(op >> 4) {
6139 case 0:
6140 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
6141 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6142 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6143 break;
6144 case 1:
6145 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
6146 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6147 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6148 break;
6149 case 2:
6150 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
6151 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6152 (s->mem_index >> 2) - 1);
6153 break;
6154 case 3:
6155 default:
6156 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
6157 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6158 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6159 break;
6160 }
6161 if ((op & 7) == 3)
6162 tcg_gen_helper_0_0(helper_fpop);
6163 break;
6164 }
6165 break;
6166 case 0x0c: /* fldenv mem */
6167 if (s->cc_op != CC_OP_DYNAMIC)
6168 gen_op_set_cc_op(s->cc_op);
6169 gen_jmp_im(pc_start - s->cs_base);
6170 tcg_gen_helper_0_2(helper_fldenv,
6171 cpu_A0, tcg_const_i32(s->dflag));
6172 break;
6173 case 0x0d: /* fldcw mem */
6174 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
6175 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6176 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
6177 break;
6178 case 0x0e: /* fnstenv mem */
6179 if (s->cc_op != CC_OP_DYNAMIC)
6180 gen_op_set_cc_op(s->cc_op);
6181 gen_jmp_im(pc_start - s->cs_base);
6182 tcg_gen_helper_0_2(helper_fstenv,
6183 cpu_A0, tcg_const_i32(s->dflag));
6184 break;
6185 case 0x0f: /* fnstcw mem */
6186 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
6187 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6188 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6189 break;
6190 case 0x1d: /* fldt mem */
6191 if (s->cc_op != CC_OP_DYNAMIC)
6192 gen_op_set_cc_op(s->cc_op);
6193 gen_jmp_im(pc_start - s->cs_base);
6194 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
6195 break;
6196 case 0x1f: /* fstpt mem */
6197 if (s->cc_op != CC_OP_DYNAMIC)
6198 gen_op_set_cc_op(s->cc_op);
6199 gen_jmp_im(pc_start - s->cs_base);
6200 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
6201 tcg_gen_helper_0_0(helper_fpop);
6202 break;
6203 case 0x2c: /* frstor mem */
6204 if (s->cc_op != CC_OP_DYNAMIC)
6205 gen_op_set_cc_op(s->cc_op);
6206 gen_jmp_im(pc_start - s->cs_base);
6207 tcg_gen_helper_0_2(helper_frstor,
6208 cpu_A0, tcg_const_i32(s->dflag));
6209 break;
6210 case 0x2e: /* fnsave mem */
6211 if (s->cc_op != CC_OP_DYNAMIC)
6212 gen_op_set_cc_op(s->cc_op);
6213 gen_jmp_im(pc_start - s->cs_base);
6214 tcg_gen_helper_0_2(helper_fsave,
6215 cpu_A0, tcg_const_i32(s->dflag));
6216 break;
6217 case 0x2f: /* fnstsw mem */
6218 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6219 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6220 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6221 break;
6222 case 0x3c: /* fbld */
6223 if (s->cc_op != CC_OP_DYNAMIC)
6224 gen_op_set_cc_op(s->cc_op);
6225 gen_jmp_im(pc_start - s->cs_base);
6226 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
6227 break;
6228 case 0x3e: /* fbstp */
6229 if (s->cc_op != CC_OP_DYNAMIC)
6230 gen_op_set_cc_op(s->cc_op);
6231 gen_jmp_im(pc_start - s->cs_base);
6232 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
6233 tcg_gen_helper_0_0(helper_fpop);
6234 break;
6235 case 0x3d: /* fildll */
6236 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6237 (s->mem_index >> 2) - 1);
6238 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
6239 break;
6240 case 0x3f: /* fistpll */
6241 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
6242 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6243 (s->mem_index >> 2) - 1);
6244 tcg_gen_helper_0_0(helper_fpop);
6245 break;
6246 default:
6247 goto illegal_op;
6248 }
6249 } else {
6250 /* register float ops */
6251 opreg = rm;
6252
6253 switch(op) {
6254 case 0x08: /* fld sti */
6255 tcg_gen_helper_0_0(helper_fpush);
6256 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
6257 break;
6258 case 0x09: /* fxchg sti */
6259 case 0x29: /* fxchg4 sti, undocumented op */
6260 case 0x39: /* fxchg7 sti, undocumented op */
6261 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
6262 break;
6263 case 0x0a: /* grp d9/2 */
6264 switch(rm) {
6265 case 0: /* fnop */
6266 /* check exceptions (FreeBSD FPU probe) */
6267 if (s->cc_op != CC_OP_DYNAMIC)
6268 gen_op_set_cc_op(s->cc_op);
6269 gen_jmp_im(pc_start - s->cs_base);
6270 tcg_gen_helper_0_0(helper_fwait);
6271 break;
6272 default:
6273 goto illegal_op;
6274 }
6275 break;
6276 case 0x0c: /* grp d9/4 */
6277 switch(rm) {
6278 case 0: /* fchs */
6279 tcg_gen_helper_0_0(helper_fchs_ST0);
6280 break;
6281 case 1: /* fabs */
6282 tcg_gen_helper_0_0(helper_fabs_ST0);
6283 break;
6284 case 4: /* ftst */
6285 tcg_gen_helper_0_0(helper_fldz_FT0);
6286 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6287 break;
6288 case 5: /* fxam */
6289 tcg_gen_helper_0_0(helper_fxam_ST0);
6290 break;
6291 default:
6292 goto illegal_op;
6293 }
6294 break;
6295 case 0x0d: /* grp d9/5 */
6296 {
6297 switch(rm) {
6298 case 0:
6299 tcg_gen_helper_0_0(helper_fpush);
6300 tcg_gen_helper_0_0(helper_fld1_ST0);
6301 break;
6302 case 1:
6303 tcg_gen_helper_0_0(helper_fpush);
6304 tcg_gen_helper_0_0(helper_fldl2t_ST0);
6305 break;
6306 case 2:
6307 tcg_gen_helper_0_0(helper_fpush);
6308 tcg_gen_helper_0_0(helper_fldl2e_ST0);
6309 break;
6310 case 3:
6311 tcg_gen_helper_0_0(helper_fpush);
6312 tcg_gen_helper_0_0(helper_fldpi_ST0);
6313 break;
6314 case 4:
6315 tcg_gen_helper_0_0(helper_fpush);
6316 tcg_gen_helper_0_0(helper_fldlg2_ST0);
6317 break;
6318 case 5:
6319 tcg_gen_helper_0_0(helper_fpush);
6320 tcg_gen_helper_0_0(helper_fldln2_ST0);
6321 break;
6322 case 6:
6323 tcg_gen_helper_0_0(helper_fpush);
6324 tcg_gen_helper_0_0(helper_fldz_ST0);
6325 break;
6326 default:
6327 goto illegal_op;
6328 }
6329 }
6330 break;
6331 case 0x0e: /* grp d9/6 */
6332 switch(rm) {
6333 case 0: /* f2xm1 */
6334 tcg_gen_helper_0_0(helper_f2xm1);
6335 break;
6336 case 1: /* fyl2x */
6337 tcg_gen_helper_0_0(helper_fyl2x);
6338 break;
6339 case 2: /* fptan */
6340 tcg_gen_helper_0_0(helper_fptan);
6341 break;
6342 case 3: /* fpatan */
6343 tcg_gen_helper_0_0(helper_fpatan);
6344 break;
6345 case 4: /* fxtract */
6346 tcg_gen_helper_0_0(helper_fxtract);
6347 break;
6348 case 5: /* fprem1 */
6349 tcg_gen_helper_0_0(helper_fprem1);
6350 break;
6351 case 6: /* fdecstp */
6352 tcg_gen_helper_0_0(helper_fdecstp);
6353 break;
6354 default:
6355 case 7: /* fincstp */
6356 tcg_gen_helper_0_0(helper_fincstp);
6357 break;
6358 }
6359 break;
6360 case 0x0f: /* grp d9/7 */
6361 switch(rm) {
6362 case 0: /* fprem */
6363 tcg_gen_helper_0_0(helper_fprem);
6364 break;
6365 case 1: /* fyl2xp1 */
6366 tcg_gen_helper_0_0(helper_fyl2xp1);
6367 break;
6368 case 2: /* fsqrt */
6369 tcg_gen_helper_0_0(helper_fsqrt);
6370 break;
6371 case 3: /* fsincos */
6372 tcg_gen_helper_0_0(helper_fsincos);
6373 break;
6374 case 5: /* fscale */
6375 tcg_gen_helper_0_0(helper_fscale);
6376 break;
6377 case 4: /* frndint */
6378 tcg_gen_helper_0_0(helper_frndint);
6379 break;
6380 case 6: /* fsin */
6381 tcg_gen_helper_0_0(helper_fsin);
6382 break;
6383 default:
6384 case 7: /* fcos */
6385 tcg_gen_helper_0_0(helper_fcos);
6386 break;
6387 }
6388 break;
6389 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6390 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6391 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6392 {
6393 int op1;
6394
6395 op1 = op & 7;
6396 if (op >= 0x20) {
6397 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
6398 if (op >= 0x30)
6399 tcg_gen_helper_0_0(helper_fpop);
6400 } else {
6401 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6402 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6403 }
6404 }
6405 break;
6406 case 0x02: /* fcom */
6407 case 0x22: /* fcom2, undocumented op */
6408 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6409 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6410 break;
6411 case 0x03: /* fcomp */
6412 case 0x23: /* fcomp3, undocumented op */
6413 case 0x32: /* fcomp5, undocumented op */
6414 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6415 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6416 tcg_gen_helper_0_0(helper_fpop);
6417 break;
6418 case 0x15: /* da/5 */
6419 switch(rm) {
6420 case 1: /* fucompp */
6421 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6422 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6423 tcg_gen_helper_0_0(helper_fpop);
6424 tcg_gen_helper_0_0(helper_fpop);
6425 break;
6426 default:
6427 goto illegal_op;
6428 }
6429 break;
6430 case 0x1c:
6431 switch(rm) {
6432 case 0: /* feni (287 only, just do nop here) */
6433 break;
6434 case 1: /* fdisi (287 only, just do nop here) */
6435 break;
6436 case 2: /* fclex */
6437 tcg_gen_helper_0_0(helper_fclex);
6438 break;
6439 case 3: /* fninit */
6440 tcg_gen_helper_0_0(helper_fninit);
6441 break;
6442 case 4: /* fsetpm (287 only, just do nop here) */
6443 break;
6444 default:
6445 goto illegal_op;
6446 }
6447 break;
6448 case 0x1d: /* fucomi */
6449 if (s->cc_op != CC_OP_DYNAMIC)
6450 gen_op_set_cc_op(s->cc_op);
6451 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6452 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6453 s->cc_op = CC_OP_EFLAGS;
6454 break;
6455 case 0x1e: /* fcomi */
6456 if (s->cc_op != CC_OP_DYNAMIC)
6457 gen_op_set_cc_op(s->cc_op);
6458 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6459 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6460 s->cc_op = CC_OP_EFLAGS;
6461 break;
6462 case 0x28: /* ffree sti */
6463 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6464 break;
6465 case 0x2a: /* fst sti */
6466 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6467 break;
6468 case 0x2b: /* fstp sti */
6469 case 0x0b: /* fstp1 sti, undocumented op */
6470 case 0x3a: /* fstp8 sti, undocumented op */
6471 case 0x3b: /* fstp9 sti, undocumented op */
6472 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6473 tcg_gen_helper_0_0(helper_fpop);
6474 break;
6475 case 0x2c: /* fucom st(i) */
6476 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6477 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6478 break;
6479 case 0x2d: /* fucomp st(i) */
6480 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6481 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6482 tcg_gen_helper_0_0(helper_fpop);
6483 break;
6484 case 0x33: /* de/3 */
6485 switch(rm) {
6486 case 1: /* fcompp */
6487 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6488 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6489 tcg_gen_helper_0_0(helper_fpop);
6490 tcg_gen_helper_0_0(helper_fpop);
6491 break;
6492 default:
6493 goto illegal_op;
6494 }
6495 break;
6496 case 0x38: /* ffreep sti, undocumented op */
6497 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6498 tcg_gen_helper_0_0(helper_fpop);
6499 break;
6500 case 0x3c: /* df/4 */
6501 switch(rm) {
6502 case 0:
6503 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6504 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6505 gen_op_mov_reg_T0(OT_WORD, R_EAX);
6506 break;
6507 default:
6508 goto illegal_op;
6509 }
6510 break;
6511 case 0x3d: /* fucomip */
6512 if (s->cc_op != CC_OP_DYNAMIC)
6513 gen_op_set_cc_op(s->cc_op);
6514 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6515 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6516 tcg_gen_helper_0_0(helper_fpop);
6517 s->cc_op = CC_OP_EFLAGS;
6518 break;
6519 case 0x3e: /* fcomip */
6520 if (s->cc_op != CC_OP_DYNAMIC)
6521 gen_op_set_cc_op(s->cc_op);
6522 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6523 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6524 tcg_gen_helper_0_0(helper_fpop);
6525 s->cc_op = CC_OP_EFLAGS;
6526 break;
6527 case 0x10 ... 0x13: /* fcmovxx */
6528 case 0x18 ... 0x1b:
6529 {
6530 int op1, l1;
6531 static const uint8_t fcmov_cc[8] = {
6532 (JCC_B << 1),
6533 (JCC_Z << 1),
6534 (JCC_BE << 1),
6535 (JCC_P << 1),
6536 };
6537 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
6538 l1 = gen_new_label();
6539 gen_jcc1(s, s->cc_op, op1, l1);
6540 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
6541 gen_set_label(l1);
6542 }
6543 break;
6544 default:
6545 goto illegal_op;
6546 }
6547 }
6548 break;
6549 /************************/
6550 /* string ops */
6551
6552 case 0xa4: /* movsS */
6553 case 0xa5:
6554 if ((b & 1) == 0)
6555 ot = OT_BYTE;
6556 else
6557 ot = dflag + OT_WORD;
6558
6559 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6560 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6561 } else {
6562 gen_movs(s, ot);
6563 }
6564 break;
6565
6566 case 0xaa: /* stosS */
6567 case 0xab:
6568 if ((b & 1) == 0)
6569 ot = OT_BYTE;
6570 else
6571 ot = dflag + OT_WORD;
6572
6573 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6574 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6575 } else {
6576 gen_stos(s, ot);
6577 }
6578 break;
6579 case 0xac: /* lodsS */
6580 case 0xad:
6581 if ((b & 1) == 0)
6582 ot = OT_BYTE;
6583 else
6584 ot = dflag + OT_WORD;
6585 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6586 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6587 } else {
6588 gen_lods(s, ot);
6589 }
6590 break;
6591 case 0xae: /* scasS */
6592 case 0xaf:
6593 if ((b & 1) == 0)
6594 ot = OT_BYTE;
6595 else
6596 ot = dflag + OT_WORD;
6597 if (prefixes & PREFIX_REPNZ) {
6598 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6599 } else if (prefixes & PREFIX_REPZ) {
6600 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6601 } else {
6602 gen_scas(s, ot);
6603 s->cc_op = CC_OP_SUBB + ot;
6604 }
6605 break;
6606
6607 case 0xa6: /* cmpsS */
6608 case 0xa7:
6609 if ((b & 1) == 0)
6610 ot = OT_BYTE;
6611 else
6612 ot = dflag + OT_WORD;
6613 if (prefixes & PREFIX_REPNZ) {
6614 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6615 } else if (prefixes & PREFIX_REPZ) {
6616 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6617 } else {
6618 gen_cmps(s, ot);
6619 s->cc_op = CC_OP_SUBB + ot;
6620 }
6621 break;
6622 case 0x6c: /* insS */
6623 case 0x6d:
6624 if ((b & 1) == 0)
6625 ot = OT_BYTE;
6626 else
6627 ot = dflag ? OT_LONG : OT_WORD;
6628 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6629 gen_op_andl_T0_ffff();
6630 gen_check_io(s, ot, pc_start - s->cs_base,
6631 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6632 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6633 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6634 } else {
6635 gen_ins(s, ot);
6636 if (use_icount) {
6637 gen_jmp(s, s->pc - s->cs_base);
6638 }
6639 }
6640 break;
6641 case 0x6e: /* outsS */
6642 case 0x6f:
6643 if ((b & 1) == 0)
6644 ot = OT_BYTE;
6645 else
6646 ot = dflag ? OT_LONG : OT_WORD;
6647 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6648 gen_op_andl_T0_ffff();
6649 gen_check_io(s, ot, pc_start - s->cs_base,
6650 svm_is_rep(prefixes) | 4);
6651 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6652 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6653 } else {
6654 gen_outs(s, ot);
6655 if (use_icount) {
6656 gen_jmp(s, s->pc - s->cs_base);
6657 }
6658 }
6659 break;
6660
6661 /************************/
6662 /* port I/O */
6663
6664 case 0xe4:
6665 case 0xe5:
6666 if ((b & 1) == 0)
6667 ot = OT_BYTE;
6668 else
6669 ot = dflag ? OT_LONG : OT_WORD;
6670 val = ldub_code(s->pc++);
6671 gen_op_movl_T0_im(val);
6672 gen_check_io(s, ot, pc_start - s->cs_base,
6673 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6674 if (use_icount)
6675 gen_io_start();
6676 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6677 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6678 gen_op_mov_reg_T1(ot, R_EAX);
6679 if (use_icount) {
6680 gen_io_end();
6681 gen_jmp(s, s->pc - s->cs_base);
6682 }
6683 break;
6684 case 0xe6:
6685 case 0xe7:
6686 if ((b & 1) == 0)
6687 ot = OT_BYTE;
6688 else
6689 ot = dflag ? OT_LONG : OT_WORD;
6690 val = ldub_code(s->pc++);
6691 gen_op_movl_T0_im(val);
6692 gen_check_io(s, ot, pc_start - s->cs_base,
6693 svm_is_rep(prefixes));
6694#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
6695 if (val == 0x80)
6696 break;
6697#endif /* VBOX */
6698 gen_op_mov_TN_reg(ot, 1, R_EAX);
6699
6700 if (use_icount)
6701 gen_io_start();
6702 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6703 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6704 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6705 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6706 if (use_icount) {
6707 gen_io_end();
6708 gen_jmp(s, s->pc - s->cs_base);
6709 }
6710 break;
6711 case 0xec:
6712 case 0xed:
6713 if ((b & 1) == 0)
6714 ot = OT_BYTE;
6715 else
6716 ot = dflag ? OT_LONG : OT_WORD;
6717 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6718 gen_op_andl_T0_ffff();
6719 gen_check_io(s, ot, pc_start - s->cs_base,
6720 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6721 if (use_icount)
6722 gen_io_start();
6723 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6724 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6725 gen_op_mov_reg_T1(ot, R_EAX);
6726 if (use_icount) {
6727 gen_io_end();
6728 gen_jmp(s, s->pc - s->cs_base);
6729 }
6730 break;
6731 case 0xee:
6732 case 0xef:
6733 if ((b & 1) == 0)
6734 ot = OT_BYTE;
6735 else
6736 ot = dflag ? OT_LONG : OT_WORD;
6737 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6738 gen_op_andl_T0_ffff();
6739 gen_check_io(s, ot, pc_start - s->cs_base,
6740 svm_is_rep(prefixes));
6741 gen_op_mov_TN_reg(ot, 1, R_EAX);
6742
6743 if (use_icount)
6744 gen_io_start();
6745 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6746 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6747 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6748 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6749 if (use_icount) {
6750 gen_io_end();
6751 gen_jmp(s, s->pc - s->cs_base);
6752 }
6753 break;
6754
6755 /************************/
6756 /* control */
6757 case 0xc2: /* ret im */
6758 val = ldsw_code(s->pc);
6759 s->pc += 2;
6760 gen_pop_T0(s);
6761 if (CODE64(s) && s->dflag)
6762 s->dflag = 2;
6763 gen_stack_update(s, val + (2 << s->dflag));
6764 if (s->dflag == 0)
6765 gen_op_andl_T0_ffff();
6766 gen_op_jmp_T0();
6767 gen_eob(s);
6768 break;
6769 case 0xc3: /* ret */
6770 gen_pop_T0(s);
6771 gen_pop_update(s);
6772 if (s->dflag == 0)
6773 gen_op_andl_T0_ffff();
6774 gen_op_jmp_T0();
6775 gen_eob(s);
6776 break;
6777 case 0xca: /* lret im */
6778 val = ldsw_code(s->pc);
6779 s->pc += 2;
6780 do_lret:
6781 if (s->pe && !s->vm86) {
6782 if (s->cc_op != CC_OP_DYNAMIC)
6783 gen_op_set_cc_op(s->cc_op);
6784 gen_jmp_im(pc_start - s->cs_base);
6785 tcg_gen_helper_0_2(helper_lret_protected,
6786 tcg_const_i32(s->dflag),
6787 tcg_const_i32(val));
6788 } else {
6789 gen_stack_A0(s);
6790 /* pop offset */
6791 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6792 if (s->dflag == 0)
6793 gen_op_andl_T0_ffff();
6794 /* NOTE: keeping EIP updated is not a problem in case of
6795 exception */
6796 gen_op_jmp_T0();
6797 /* pop selector */
6798 gen_op_addl_A0_im(2 << s->dflag);
6799 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6800 gen_op_movl_seg_T0_vm(R_CS);
6801 /* add stack offset */
6802 gen_stack_update(s, val + (4 << s->dflag));
6803 }
6804 gen_eob(s);
6805 break;
6806 case 0xcb: /* lret */
6807 val = 0;
6808 goto do_lret;
6809 case 0xcf: /* iret */
6810 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6811 if (!s->pe) {
6812 /* real mode */
6813 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6814 s->cc_op = CC_OP_EFLAGS;
6815 } else if (s->vm86) {
6816#ifdef VBOX
6817 if (s->iopl != 3 && (!s->vme || s->dflag)) {
6818#else
6819 if (s->iopl != 3) {
6820#endif
6821 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6822 } else {
6823 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6824 s->cc_op = CC_OP_EFLAGS;
6825 }
6826 } else {
6827 if (s->cc_op != CC_OP_DYNAMIC)
6828 gen_op_set_cc_op(s->cc_op);
6829 gen_jmp_im(pc_start - s->cs_base);
6830 tcg_gen_helper_0_2(helper_iret_protected,
6831 tcg_const_i32(s->dflag),
6832 tcg_const_i32(s->pc - s->cs_base));
6833 s->cc_op = CC_OP_EFLAGS;
6834 }
6835 gen_eob(s);
6836 break;
6837 case 0xe8: /* call im */
6838 {
6839 if (dflag)
6840 tval = (int32_t)insn_get(s, OT_LONG);
6841 else
6842 tval = (int16_t)insn_get(s, OT_WORD);
6843 next_eip = s->pc - s->cs_base;
6844 tval += next_eip;
6845 if (s->dflag == 0)
6846 tval &= 0xffff;
6847 gen_movtl_T0_im(next_eip);
6848 gen_push_T0(s);
6849 gen_jmp(s, tval);
6850 }
6851 break;
6852 case 0x9a: /* lcall im */
6853 {
6854 unsigned int selector, offset;
6855
6856 if (CODE64(s))
6857 goto illegal_op;
6858 ot = dflag ? OT_LONG : OT_WORD;
6859 offset = insn_get(s, ot);
6860 selector = insn_get(s, OT_WORD);
6861
6862 gen_op_movl_T0_im(selector);
6863 gen_op_movl_T1_imu(offset);
6864 }
6865 goto do_lcall;
6866 case 0xe9: /* jmp im */
6867 if (dflag)
6868 tval = (int32_t)insn_get(s, OT_LONG);
6869 else
6870 tval = (int16_t)insn_get(s, OT_WORD);
6871 tval += s->pc - s->cs_base;
6872 if (s->dflag == 0)
6873 tval &= 0xffff;
6874 gen_jmp(s, tval);
6875 break;
6876 case 0xea: /* ljmp im */
6877 {
6878 unsigned int selector, offset;
6879
6880 if (CODE64(s))
6881 goto illegal_op;
6882 ot = dflag ? OT_LONG : OT_WORD;
6883 offset = insn_get(s, ot);
6884 selector = insn_get(s, OT_WORD);
6885
6886 gen_op_movl_T0_im(selector);
6887 gen_op_movl_T1_imu(offset);
6888 }
6889 goto do_ljmp;
6890 case 0xeb: /* jmp Jb */
6891 tval = (int8_t)insn_get(s, OT_BYTE);
6892 tval += s->pc - s->cs_base;
6893 if (s->dflag == 0)
6894 tval &= 0xffff;
6895 gen_jmp(s, tval);
6896 break;
6897 case 0x70 ... 0x7f: /* jcc Jb */
6898 tval = (int8_t)insn_get(s, OT_BYTE);
6899 goto do_jcc;
6900 case 0x180 ... 0x18f: /* jcc Jv */
6901 if (dflag) {
6902 tval = (int32_t)insn_get(s, OT_LONG);
6903 } else {
6904 tval = (int16_t)insn_get(s, OT_WORD);
6905 }
6906 do_jcc:
6907 next_eip = s->pc - s->cs_base;
6908 tval += next_eip;
6909 if (s->dflag == 0)
6910 tval &= 0xffff;
6911 gen_jcc(s, b, tval, next_eip);
6912 break;
6913
6914 case 0x190 ... 0x19f: /* setcc Gv */
6915 modrm = ldub_code(s->pc++);
6916 gen_setcc(s, b);
6917 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6918 break;
6919 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6920 {
6921 int l1;
6922 TCGv t0;
6923
6924 ot = dflag + OT_WORD;
6925 modrm = ldub_code(s->pc++);
6926 reg = ((modrm >> 3) & 7) | rex_r;
6927 mod = (modrm >> 6) & 3;
6928 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6929 if (mod != 3) {
6930 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6931 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6932 } else {
6933 rm = (modrm & 7) | REX_B(s);
6934 gen_op_mov_v_reg(ot, t0, rm);
6935 }
6936#ifdef TARGET_X86_64
6937 if (ot == OT_LONG) {
6938 /* XXX: specific Intel behaviour ? */
6939 l1 = gen_new_label();
6940 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6941 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6942 gen_set_label(l1);
6943 tcg_gen_movi_tl(cpu_tmp0, 0);
6944 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6945 } else
6946#endif
6947 {
6948 l1 = gen_new_label();
6949 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6950 gen_op_mov_reg_v(ot, reg, t0);
6951 gen_set_label(l1);
6952 }
6953 tcg_temp_free(t0);
6954 }
6955 break;
6956
6957 /************************/
6958 /* flags */
6959 case 0x9c: /* pushf */
6960 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6961#ifdef VBOX
6962 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6963#else
6964 if (s->vm86 && s->iopl != 3) {
6965#endif
6966 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6967 } else {
6968 if (s->cc_op != CC_OP_DYNAMIC)
6969 gen_op_set_cc_op(s->cc_op);
6970#ifdef VBOX
6971 if (s->vm86 && s->vme && s->iopl != 3)
6972 tcg_gen_helper_1_0(helper_read_eflags_vme, cpu_T[0]);
6973 else
6974#endif
6975 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
6976 gen_push_T0(s);
6977 }
6978 break;
6979 case 0x9d: /* popf */
6980 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
6981#ifdef VBOX
6982 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6983#else
6984 if (s->vm86 && s->iopl != 3) {
6985#endif
6986 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6987 } else {
6988 gen_pop_T0(s);
6989 if (s->cpl == 0) {
6990 if (s->dflag) {
6991 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6992 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
6993 } else {
6994 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6995 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
6996 }
6997 } else {
6998 if (s->cpl <= s->iopl) {
6999 if (s->dflag) {
7000 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7001 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
7002 } else {
7003 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7004 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
7005 }
7006 } else {
7007 if (s->dflag) {
7008 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7009 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
7010 } else {
7011#ifdef VBOX
7012 if (s->vm86 && s->vme)
7013 tcg_gen_helper_0_1(helper_write_eflags_vme, cpu_T[0]);
7014 else
7015#endif
7016 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7017 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
7018 }
7019 }
7020 }
7021 gen_pop_update(s);
7022 s->cc_op = CC_OP_EFLAGS;
7023 /* abort translation because TF flag may change */
7024 gen_jmp_im(s->pc - s->cs_base);
7025 gen_eob(s);
7026 }
7027 break;
7028 case 0x9e: /* sahf */
7029 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7030 goto illegal_op;
7031 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
7032 if (s->cc_op != CC_OP_DYNAMIC)
7033 gen_op_set_cc_op(s->cc_op);
7034 gen_compute_eflags(cpu_cc_src);
7035 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
7036 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
7037 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
7038 s->cc_op = CC_OP_EFLAGS;
7039 break;
7040 case 0x9f: /* lahf */
7041 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7042 goto illegal_op;
7043 if (s->cc_op != CC_OP_DYNAMIC)
7044 gen_op_set_cc_op(s->cc_op);
7045 gen_compute_eflags(cpu_T[0]);
7046 /* Note: gen_compute_eflags() only gives the condition codes */
7047 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
7048 gen_op_mov_reg_T0(OT_BYTE, R_AH);
7049 break;
7050 case 0xf5: /* cmc */
7051 if (s->cc_op != CC_OP_DYNAMIC)
7052 gen_op_set_cc_op(s->cc_op);
7053 gen_compute_eflags(cpu_cc_src);
7054 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7055 s->cc_op = CC_OP_EFLAGS;
7056 break;
7057 case 0xf8: /* clc */
7058 if (s->cc_op != CC_OP_DYNAMIC)
7059 gen_op_set_cc_op(s->cc_op);
7060 gen_compute_eflags(cpu_cc_src);
7061 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
7062 s->cc_op = CC_OP_EFLAGS;
7063 break;
7064 case 0xf9: /* stc */
7065 if (s->cc_op != CC_OP_DYNAMIC)
7066 gen_op_set_cc_op(s->cc_op);
7067 gen_compute_eflags(cpu_cc_src);
7068 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7069 s->cc_op = CC_OP_EFLAGS;
7070 break;
7071 case 0xfc: /* cld */
7072 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
7073 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7074 break;
7075 case 0xfd: /* std */
7076 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
7077 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7078 break;
7079
7080 /************************/
7081 /* bit operations */
7082 case 0x1ba: /* bt/bts/btr/btc Gv, im */
7083 ot = dflag + OT_WORD;
7084 modrm = ldub_code(s->pc++);
7085 op = (modrm >> 3) & 7;
7086 mod = (modrm >> 6) & 3;
7087 rm = (modrm & 7) | REX_B(s);
7088 if (mod != 3) {
7089 s->rip_offset = 1;
7090 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7091 gen_op_ld_T0_A0(ot + s->mem_index);
7092 } else {
7093 gen_op_mov_TN_reg(ot, 0, rm);
7094 }
7095 /* load shift */
7096 val = ldub_code(s->pc++);
7097 gen_op_movl_T1_im(val);
7098 if (op < 4)
7099 goto illegal_op;
7100 op -= 4;
7101 goto bt_op;
7102 case 0x1a3: /* bt Gv, Ev */
7103 op = 0;
7104 goto do_btx;
7105 case 0x1ab: /* bts */
7106 op = 1;
7107 goto do_btx;
7108 case 0x1b3: /* btr */
7109 op = 2;
7110 goto do_btx;
7111 case 0x1bb: /* btc */
7112 op = 3;
7113 do_btx:
7114 ot = dflag + OT_WORD;
7115 modrm = ldub_code(s->pc++);
7116 reg = ((modrm >> 3) & 7) | rex_r;
7117 mod = (modrm >> 6) & 3;
7118 rm = (modrm & 7) | REX_B(s);
7119 gen_op_mov_TN_reg(OT_LONG, 1, reg);
7120 if (mod != 3) {
7121 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7122 /* specific case: we need to add a displacement */
7123 gen_exts(ot, cpu_T[1]);
7124 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7125 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7126 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
7127 gen_op_ld_T0_A0(ot + s->mem_index);
7128 } else {
7129 gen_op_mov_TN_reg(ot, 0, rm);
7130 }
7131 bt_op:
7132 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7133 switch(op) {
7134 case 0:
7135 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7136 tcg_gen_movi_tl(cpu_cc_dst, 0);
7137 break;
7138 case 1:
7139 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7140 tcg_gen_movi_tl(cpu_tmp0, 1);
7141 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7142 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7143 break;
7144 case 2:
7145 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7146 tcg_gen_movi_tl(cpu_tmp0, 1);
7147 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7148 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7149 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7150 break;
7151 default:
7152 case 3:
7153 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7154 tcg_gen_movi_tl(cpu_tmp0, 1);
7155 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7156 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7157 break;
7158 }
7159 s->cc_op = CC_OP_SARB + ot;
7160 if (op != 0) {
7161 if (mod != 3)
7162 gen_op_st_T0_A0(ot + s->mem_index);
7163 else
7164 gen_op_mov_reg_T0(ot, rm);
7165 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7166 tcg_gen_movi_tl(cpu_cc_dst, 0);
7167 }
7168 break;
7169 case 0x1bc: /* bsf */
7170 case 0x1bd: /* bsr */
7171 {
7172 int label1;
7173 TCGv t0;
7174
7175 ot = dflag + OT_WORD;
7176 modrm = ldub_code(s->pc++);
7177 reg = ((modrm >> 3) & 7) | rex_r;
7178 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7179 gen_extu(ot, cpu_T[0]);
7180 label1 = gen_new_label();
7181 tcg_gen_movi_tl(cpu_cc_dst, 0);
7182 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7183 tcg_gen_mov_tl(t0, cpu_T[0]);
7184 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
7185 if (b & 1) {
7186 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
7187 } else {
7188 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
7189 }
7190 gen_op_mov_reg_T0(ot, reg);
7191 tcg_gen_movi_tl(cpu_cc_dst, 1);
7192 gen_set_label(label1);
7193 tcg_gen_discard_tl(cpu_cc_src);
7194 s->cc_op = CC_OP_LOGICB + ot;
7195 tcg_temp_free(t0);
7196 }
7197 break;
7198 /************************/
7199 /* bcd */
7200 case 0x27: /* daa */
7201 if (CODE64(s))
7202 goto illegal_op;
7203 if (s->cc_op != CC_OP_DYNAMIC)
7204 gen_op_set_cc_op(s->cc_op);
7205 tcg_gen_helper_0_0(helper_daa);
7206 s->cc_op = CC_OP_EFLAGS;
7207 break;
7208 case 0x2f: /* das */
7209 if (CODE64(s))
7210 goto illegal_op;
7211 if (s->cc_op != CC_OP_DYNAMIC)
7212 gen_op_set_cc_op(s->cc_op);
7213 tcg_gen_helper_0_0(helper_das);
7214 s->cc_op = CC_OP_EFLAGS;
7215 break;
7216 case 0x37: /* aaa */
7217 if (CODE64(s))
7218 goto illegal_op;
7219 if (s->cc_op != CC_OP_DYNAMIC)
7220 gen_op_set_cc_op(s->cc_op);
7221 tcg_gen_helper_0_0(helper_aaa);
7222 s->cc_op = CC_OP_EFLAGS;
7223 break;
7224 case 0x3f: /* aas */
7225 if (CODE64(s))
7226 goto illegal_op;
7227 if (s->cc_op != CC_OP_DYNAMIC)
7228 gen_op_set_cc_op(s->cc_op);
7229 tcg_gen_helper_0_0(helper_aas);
7230 s->cc_op = CC_OP_EFLAGS;
7231 break;
7232 case 0xd4: /* aam */
7233 if (CODE64(s))
7234 goto illegal_op;
7235 val = ldub_code(s->pc++);
7236 if (val == 0) {
7237 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7238 } else {
7239 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
7240 s->cc_op = CC_OP_LOGICB;
7241 }
7242 break;
7243 case 0xd5: /* aad */
7244 if (CODE64(s))
7245 goto illegal_op;
7246 val = ldub_code(s->pc++);
7247 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
7248 s->cc_op = CC_OP_LOGICB;
7249 break;
7250 /************************/
7251 /* misc */
7252 case 0x90: /* nop */
7253 /* XXX: xchg + rex handling */
7254 /* XXX: correct lock test for all insn */
7255 if (prefixes & PREFIX_LOCK)
7256 goto illegal_op;
7257 if (prefixes & PREFIX_REPZ) {
7258 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7259 }
7260 break;
7261 case 0x9b: /* fwait */
7262 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7263 (HF_MP_MASK | HF_TS_MASK)) {
7264 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7265 } else {
7266 if (s->cc_op != CC_OP_DYNAMIC)
7267 gen_op_set_cc_op(s->cc_op);
7268 gen_jmp_im(pc_start - s->cs_base);
7269 tcg_gen_helper_0_0(helper_fwait);
7270 }
7271 break;
7272 case 0xcc: /* int3 */
7273#ifdef VBOX
7274 if (s->vm86 && s->iopl != 3 && !s->vme) {
7275 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7276 } else
7277#endif
7278 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7279 break;
7280 case 0xcd: /* int N */
7281 val = ldub_code(s->pc++);
7282#ifdef VBOX
7283 if (s->vm86 && s->iopl != 3 && !s->vme) {
7284#else
7285 if (s->vm86 && s->iopl != 3) {
7286#endif
7287 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7288 } else {
7289 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7290 }
7291 break;
7292 case 0xce: /* into */
7293 if (CODE64(s))
7294 goto illegal_op;
7295 if (s->cc_op != CC_OP_DYNAMIC)
7296 gen_op_set_cc_op(s->cc_op);
7297 gen_jmp_im(pc_start - s->cs_base);
7298 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
7299 break;
7300 case 0xf1: /* icebp (undocumented, exits to external debugger) */
7301 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
7302#if 1
7303 gen_debug(s, pc_start - s->cs_base);
7304#else
7305 /* start debug */
7306 tb_flush(cpu_single_env);
7307 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
7308#endif
7309 break;
7310 case 0xfa: /* cli */
7311 if (!s->vm86) {
7312 if (s->cpl <= s->iopl) {
7313 tcg_gen_helper_0_0(helper_cli);
7314 } else {
7315 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7316 }
7317 } else {
7318 if (s->iopl == 3) {
7319 tcg_gen_helper_0_0(helper_cli);
7320#ifdef VBOX
7321 } else if (s->iopl != 3 && s->vme) {
7322 tcg_gen_helper_0_0(helper_cli_vme);
7323#endif
7324 } else {
7325 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7326 }
7327 }
7328 break;
7329 case 0xfb: /* sti */
7330 if (!s->vm86) {
7331 if (s->cpl <= s->iopl) {
7332 gen_sti:
7333 tcg_gen_helper_0_0(helper_sti);
7334 /* interruptions are enabled only the first insn after sti */
7335 /* If several instructions disable interrupts, only the
7336 _first_ does it */
7337 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
7338 tcg_gen_helper_0_0(helper_set_inhibit_irq);
7339 /* give a chance to handle pending irqs */
7340 gen_jmp_im(s->pc - s->cs_base);
7341 gen_eob(s);
7342 } else {
7343 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7344 }
7345 } else {
7346 if (s->iopl == 3) {
7347 goto gen_sti;
7348#ifdef VBOX
7349 } else if (s->iopl != 3 && s->vme) {
7350 tcg_gen_helper_0_0(helper_sti_vme);
7351 /* give a chance to handle pending irqs */
7352 gen_jmp_im(s->pc - s->cs_base);
7353 gen_eob(s);
7354#endif
7355 } else {
7356 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7357 }
7358 }
7359 break;
7360 case 0x62: /* bound */
7361 if (CODE64(s))
7362 goto illegal_op;
7363 ot = dflag ? OT_LONG : OT_WORD;
7364 modrm = ldub_code(s->pc++);
7365 reg = (modrm >> 3) & 7;
7366 mod = (modrm >> 6) & 3;
7367 if (mod == 3)
7368 goto illegal_op;
7369 gen_op_mov_TN_reg(ot, 0, reg);
7370 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7371 gen_jmp_im(pc_start - s->cs_base);
7372 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7373 if (ot == OT_WORD)
7374 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
7375 else
7376 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
7377 break;
7378 case 0x1c8 ... 0x1cf: /* bswap reg */
7379 reg = (b & 7) | REX_B(s);
7380#ifdef TARGET_X86_64
7381 if (dflag == 2) {
7382 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
7383 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
7384 gen_op_mov_reg_T0(OT_QUAD, reg);
7385 } else
7386 {
7387 TCGv tmp0;
7388 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7389
7390 tmp0 = tcg_temp_new(TCG_TYPE_I32);
7391 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
7392 tcg_gen_bswap_i32(tmp0, tmp0);
7393 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
7394 gen_op_mov_reg_T0(OT_LONG, reg);
7395 }
7396#else
7397 {
7398 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7399 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
7400 gen_op_mov_reg_T0(OT_LONG, reg);
7401 }
7402#endif
7403 break;
7404 case 0xd6: /* salc */
7405 if (CODE64(s))
7406 goto illegal_op;
7407 if (s->cc_op != CC_OP_DYNAMIC)
7408 gen_op_set_cc_op(s->cc_op);
7409 gen_compute_eflags_c(cpu_T[0]);
7410 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7411 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
7412 break;
7413 case 0xe0: /* loopnz */
7414 case 0xe1: /* loopz */
7415 case 0xe2: /* loop */
7416 case 0xe3: /* jecxz */
7417 {
7418 int l1, l2, l3;
7419
7420 tval = (int8_t)insn_get(s, OT_BYTE);
7421 next_eip = s->pc - s->cs_base;
7422 tval += next_eip;
7423 if (s->dflag == 0)
7424 tval &= 0xffff;
7425
7426 l1 = gen_new_label();
7427 l2 = gen_new_label();
7428 l3 = gen_new_label();
7429 b &= 3;
7430 switch(b) {
7431 case 0: /* loopnz */
7432 case 1: /* loopz */
7433 if (s->cc_op != CC_OP_DYNAMIC)
7434 gen_op_set_cc_op(s->cc_op);
7435 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7436 gen_op_jz_ecx(s->aflag, l3);
7437 gen_compute_eflags(cpu_tmp0);
7438 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
7439 if (b == 0) {
7440 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
7441 } else {
7442 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
7443 }
7444 break;
7445 case 2: /* loop */
7446 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7447 gen_op_jnz_ecx(s->aflag, l1);
7448 break;
7449 default:
7450 case 3: /* jcxz */
7451 gen_op_jz_ecx(s->aflag, l1);
7452 break;
7453 }
7454
7455 gen_set_label(l3);
7456 gen_jmp_im(next_eip);
7457 tcg_gen_br(l2);
7458
7459 gen_set_label(l1);
7460 gen_jmp_im(tval);
7461 gen_set_label(l2);
7462 gen_eob(s);
7463 }
7464 break;
7465 case 0x130: /* wrmsr */
7466 case 0x132: /* rdmsr */
7467 if (s->cpl != 0) {
7468 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7469 } else {
7470 if (s->cc_op != CC_OP_DYNAMIC)
7471 gen_op_set_cc_op(s->cc_op);
7472 gen_jmp_im(pc_start - s->cs_base);
7473 if (b & 2) {
7474 tcg_gen_helper_0_0(helper_rdmsr);
7475 } else {
7476 tcg_gen_helper_0_0(helper_wrmsr);
7477 }
7478 }
7479 break;
7480 case 0x131: /* rdtsc */
7481 if (s->cc_op != CC_OP_DYNAMIC)
7482 gen_op_set_cc_op(s->cc_op);
7483 gen_jmp_im(pc_start - s->cs_base);
7484 if (use_icount)
7485 gen_io_start();
7486 tcg_gen_helper_0_0(helper_rdtsc);
7487 if (use_icount) {
7488 gen_io_end();
7489 gen_jmp(s, s->pc - s->cs_base);
7490 }
7491 break;
7492 case 0x133: /* rdpmc */
7493 if (s->cc_op != CC_OP_DYNAMIC)
7494 gen_op_set_cc_op(s->cc_op);
7495 gen_jmp_im(pc_start - s->cs_base);
7496 tcg_gen_helper_0_0(helper_rdpmc);
7497 break;
7498 case 0x134: /* sysenter */
7499#ifndef VBOX
7500 /* For Intel SYSENTER is valid on 64-bit */
7501 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7502#else
7503 /** @todo: make things right */
7504 if (CODE64(s))
7505#endif
7506 goto illegal_op;
7507 if (!s->pe) {
7508 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7509 } else {
7510 if (s->cc_op != CC_OP_DYNAMIC) {
7511 gen_op_set_cc_op(s->cc_op);
7512 s->cc_op = CC_OP_DYNAMIC;
7513 }
7514 gen_jmp_im(pc_start - s->cs_base);
7515 tcg_gen_helper_0_0(helper_sysenter);
7516 gen_eob(s);
7517 }
7518 break;
7519 case 0x135: /* sysexit */
7520#ifndef VBOX
7521 /* For Intel SYSEXIT is valid on 64-bit */
7522 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7523#else
7524 /** @todo: make things right */
7525 if (CODE64(s))
7526#endif
7527 goto illegal_op;
7528 if (!s->pe) {
7529 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7530 } else {
7531 if (s->cc_op != CC_OP_DYNAMIC) {
7532 gen_op_set_cc_op(s->cc_op);
7533 s->cc_op = CC_OP_DYNAMIC;
7534 }
7535 gen_jmp_im(pc_start - s->cs_base);
7536 tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
7537 gen_eob(s);
7538 }
7539 break;
7540#ifdef TARGET_X86_64
7541 case 0x105: /* syscall */
7542 /* XXX: is it usable in real mode ? */
7543 if (s->cc_op != CC_OP_DYNAMIC) {
7544 gen_op_set_cc_op(s->cc_op);
7545 s->cc_op = CC_OP_DYNAMIC;
7546 }
7547 gen_jmp_im(pc_start - s->cs_base);
7548 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
7549 gen_eob(s);
7550 break;
7551 case 0x107: /* sysret */
7552 if (!s->pe) {
7553 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7554 } else {
7555 if (s->cc_op != CC_OP_DYNAMIC) {
7556 gen_op_set_cc_op(s->cc_op);
7557 s->cc_op = CC_OP_DYNAMIC;
7558 }
7559 gen_jmp_im(pc_start - s->cs_base);
7560 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
7561 /* condition codes are modified only in long mode */
7562 if (s->lma)
7563 s->cc_op = CC_OP_EFLAGS;
7564 gen_eob(s);
7565 }
7566 break;
7567#endif
7568 case 0x1a2: /* cpuid */
7569 if (s->cc_op != CC_OP_DYNAMIC)
7570 gen_op_set_cc_op(s->cc_op);
7571 gen_jmp_im(pc_start - s->cs_base);
7572 tcg_gen_helper_0_0(helper_cpuid);
7573 break;
7574 case 0xf4: /* hlt */
7575 if (s->cpl != 0) {
7576 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7577 } else {
7578 if (s->cc_op != CC_OP_DYNAMIC)
7579 gen_op_set_cc_op(s->cc_op);
7580 gen_jmp_im(pc_start - s->cs_base);
7581 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
7582 s->is_jmp = 3;
7583 }
7584 break;
7585 case 0x100:
7586 modrm = ldub_code(s->pc++);
7587 mod = (modrm >> 6) & 3;
7588 op = (modrm >> 3) & 7;
7589 switch(op) {
7590 case 0: /* sldt */
7591 if (!s->pe || s->vm86)
7592 goto illegal_op;
7593 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
7594 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
7595 ot = OT_WORD;
7596 if (mod == 3)
7597 ot += s->dflag;
7598 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7599 break;
7600 case 2: /* lldt */
7601 if (!s->pe || s->vm86)
7602 goto illegal_op;
7603 if (s->cpl != 0) {
7604 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7605 } else {
7606 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
7607 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7608 gen_jmp_im(pc_start - s->cs_base);
7609 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7610 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
7611 }
7612 break;
7613 case 1: /* str */
7614 if (!s->pe || s->vm86)
7615 goto illegal_op;
7616 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
7617 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
7618 ot = OT_WORD;
7619 if (mod == 3)
7620 ot += s->dflag;
7621 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7622 break;
7623 case 3: /* ltr */
7624 if (!s->pe || s->vm86)
7625 goto illegal_op;
7626 if (s->cpl != 0) {
7627 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7628 } else {
7629 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
7630 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7631 gen_jmp_im(pc_start - s->cs_base);
7632 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7633 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
7634 }
7635 break;
7636 case 4: /* verr */
7637 case 5: /* verw */
7638 if (!s->pe || s->vm86)
7639 goto illegal_op;
7640 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7641 if (s->cc_op != CC_OP_DYNAMIC)
7642 gen_op_set_cc_op(s->cc_op);
7643 if (op == 4)
7644 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
7645 else
7646 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
7647 s->cc_op = CC_OP_EFLAGS;
7648 break;
7649 default:
7650 goto illegal_op;
7651 }
7652 break;
7653 case 0x101:
7654 modrm = ldub_code(s->pc++);
7655 mod = (modrm >> 6) & 3;
7656 op = (modrm >> 3) & 7;
7657 rm = modrm & 7;
7658
7659#ifdef VBOX
7660 /* 0f 01 f9 */
7661 if (modrm == 0xf9)
7662 {
7663 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
7664 goto illegal_op;
7665 gen_jmp_im(pc_start - s->cs_base);
7666 tcg_gen_helper_0_0(helper_rdtscp);
7667 break;
7668 }
7669#endif
7670 switch(op) {
7671 case 0: /* sgdt */
7672 if (mod == 3)
7673 goto illegal_op;
7674 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7675 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7676 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7677 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7678 gen_add_A0_im(s, 2);
7679 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7680 if (!s->dflag)
7681 gen_op_andl_T0_im(0xffffff);
7682 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7683 break;
7684 case 1:
7685 if (mod == 3) {
7686 switch (rm) {
7687 case 0: /* monitor */
7688 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7689 s->cpl != 0)
7690 goto illegal_op;
7691 if (s->cc_op != CC_OP_DYNAMIC)
7692 gen_op_set_cc_op(s->cc_op);
7693 gen_jmp_im(pc_start - s->cs_base);
7694#ifdef TARGET_X86_64
7695 if (s->aflag == 2) {
7696 gen_op_movq_A0_reg(R_EAX);
7697 } else
7698#endif
7699 {
7700 gen_op_movl_A0_reg(R_EAX);
7701 if (s->aflag == 0)
7702 gen_op_andl_A0_ffff();
7703 }
7704 gen_add_A0_ds_seg(s);
7705 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
7706 break;
7707 case 1: /* mwait */
7708 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7709 s->cpl != 0)
7710 goto illegal_op;
7711 if (s->cc_op != CC_OP_DYNAMIC) {
7712 gen_op_set_cc_op(s->cc_op);
7713 s->cc_op = CC_OP_DYNAMIC;
7714 }
7715 gen_jmp_im(pc_start - s->cs_base);
7716 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
7717 gen_eob(s);
7718 break;
7719 default:
7720 goto illegal_op;
7721 }
7722 } else { /* sidt */
7723 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7724 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7725 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7726 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7727 gen_add_A0_im(s, 2);
7728 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7729 if (!s->dflag)
7730 gen_op_andl_T0_im(0xffffff);
7731 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7732 }
7733 break;
7734 case 2: /* lgdt */
7735 case 3: /* lidt */
7736 if (mod == 3) {
7737 if (s->cc_op != CC_OP_DYNAMIC)
7738 gen_op_set_cc_op(s->cc_op);
7739 gen_jmp_im(pc_start - s->cs_base);
7740 switch(rm) {
7741 case 0: /* VMRUN */
7742 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7743 goto illegal_op;
7744 if (s->cpl != 0) {
7745 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7746 break;
7747 } else {
7748 tcg_gen_helper_0_2(helper_vmrun,
7749 tcg_const_i32(s->aflag),
7750 tcg_const_i32(s->pc - pc_start));
7751 tcg_gen_exit_tb(0);
7752 s->is_jmp = 3;
7753 }
7754 break;
7755 case 1: /* VMMCALL */
7756 if (!(s->flags & HF_SVME_MASK))
7757 goto illegal_op;
7758 tcg_gen_helper_0_0(helper_vmmcall);
7759 break;
7760 case 2: /* VMLOAD */
7761 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7762 goto illegal_op;
7763 if (s->cpl != 0) {
7764 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7765 break;
7766 } else {
7767 tcg_gen_helper_0_1(helper_vmload,
7768 tcg_const_i32(s->aflag));
7769 }
7770 break;
7771 case 3: /* VMSAVE */
7772 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7773 goto illegal_op;
7774 if (s->cpl != 0) {
7775 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7776 break;
7777 } else {
7778 tcg_gen_helper_0_1(helper_vmsave,
7779 tcg_const_i32(s->aflag));
7780 }
7781 break;
7782 case 4: /* STGI */
7783 if ((!(s->flags & HF_SVME_MASK) &&
7784 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7785 !s->pe)
7786 goto illegal_op;
7787 if (s->cpl != 0) {
7788 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7789 break;
7790 } else {
7791 tcg_gen_helper_0_0(helper_stgi);
7792 }
7793 break;
7794 case 5: /* CLGI */
7795 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7796 goto illegal_op;
7797 if (s->cpl != 0) {
7798 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7799 break;
7800 } else {
7801 tcg_gen_helper_0_0(helper_clgi);
7802 }
7803 break;
7804 case 6: /* SKINIT */
7805 if ((!(s->flags & HF_SVME_MASK) &&
7806 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7807 !s->pe)
7808 goto illegal_op;
7809 tcg_gen_helper_0_0(helper_skinit);
7810 break;
7811 case 7: /* INVLPGA */
7812 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7813 goto illegal_op;
7814 if (s->cpl != 0) {
7815 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7816 break;
7817 } else {
7818 tcg_gen_helper_0_1(helper_invlpga,
7819 tcg_const_i32(s->aflag));
7820 }
7821 break;
7822 default:
7823 goto illegal_op;
7824 }
7825 } else if (s->cpl != 0) {
7826 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7827 } else {
7828 gen_svm_check_intercept(s, pc_start,
7829 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7830 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7831 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7832 gen_add_A0_im(s, 2);
7833 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7834 if (!s->dflag)
7835 gen_op_andl_T0_im(0xffffff);
7836 if (op == 2) {
7837 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7838 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7839 } else {
7840 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7841 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7842 }
7843 }
7844 break;
7845 case 4: /* smsw */
7846 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7847 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7848 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7849 break;
7850 case 6: /* lmsw */
7851 if (s->cpl != 0) {
7852 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7853 } else {
7854 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7855 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7856 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7857 gen_jmp_im(s->pc - s->cs_base);
7858 gen_eob(s);
7859 }
7860 break;
7861 case 7: /* invlpg */
7862 if (s->cpl != 0) {
7863 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7864 } else {
7865 if (mod == 3) {
7866#ifdef TARGET_X86_64
7867 if (CODE64(s) && rm == 0) {
7868 /* swapgs */
7869 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7870 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7871 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7872 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7873 } else
7874#endif
7875 {
7876 goto illegal_op;
7877 }
7878 } else {
7879 if (s->cc_op != CC_OP_DYNAMIC)
7880 gen_op_set_cc_op(s->cc_op);
7881 gen_jmp_im(pc_start - s->cs_base);
7882 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7883 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
7884 gen_jmp_im(s->pc - s->cs_base);
7885 gen_eob(s);
7886 }
7887 }
7888 break;
7889 default:
7890 goto illegal_op;
7891 }
7892 break;
7893 case 0x108: /* invd */
7894 case 0x109: /* wbinvd */
7895 if (s->cpl != 0) {
7896 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7897 } else {
7898 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7899 /* nothing to do */
7900 }
7901 break;
7902 case 0x63: /* arpl or movslS (x86_64) */
7903#ifdef TARGET_X86_64
7904 if (CODE64(s)) {
7905 int d_ot;
7906 /* d_ot is the size of destination */
7907 d_ot = dflag + OT_WORD;
7908
7909 modrm = ldub_code(s->pc++);
7910 reg = ((modrm >> 3) & 7) | rex_r;
7911 mod = (modrm >> 6) & 3;
7912 rm = (modrm & 7) | REX_B(s);
7913
7914 if (mod == 3) {
7915 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7916 /* sign extend */
7917 if (d_ot == OT_QUAD)
7918 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7919 gen_op_mov_reg_T0(d_ot, reg);
7920 } else {
7921 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7922 if (d_ot == OT_QUAD) {
7923 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7924 } else {
7925 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7926 }
7927 gen_op_mov_reg_T0(d_ot, reg);
7928 }
7929 } else
7930#endif
7931 {
7932 int label1;
7933 TCGv t0, t1, t2, a0;
7934
7935 if (!s->pe || s->vm86)
7936 goto illegal_op;
7937
7938 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7939 t1 = tcg_temp_local_new(TCG_TYPE_TL);
7940 t2 = tcg_temp_local_new(TCG_TYPE_TL);
7941#ifdef VBOX
7942 a0 = tcg_temp_local_new(TCG_TYPE_TL);
7943#endif
7944 ot = OT_WORD;
7945 modrm = ldub_code(s->pc++);
7946 reg = (modrm >> 3) & 7;
7947 mod = (modrm >> 6) & 3;
7948 rm = modrm & 7;
7949 if (mod != 3) {
7950 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7951#ifdef VBOX
7952 tcg_gen_mov_tl(a0, cpu_A0);
7953#endif
7954 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7955 } else {
7956 gen_op_mov_v_reg(ot, t0, rm);
7957 }
7958 gen_op_mov_v_reg(ot, t1, reg);
7959 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7960 tcg_gen_andi_tl(t1, t1, 3);
7961 tcg_gen_movi_tl(t2, 0);
7962 label1 = gen_new_label();
7963 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7964 tcg_gen_andi_tl(t0, t0, ~3);
7965 tcg_gen_or_tl(t0, t0, t1);
7966 tcg_gen_movi_tl(t2, CC_Z);
7967 gen_set_label(label1);
7968 if (mod != 3) {
7969#ifdef VBOX
7970 /* cpu_A0 doesn't survive branch */
7971 gen_op_st_v(ot + s->mem_index, t0, a0);
7972#else
7973 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
7974#endif
7975 } else {
7976 gen_op_mov_reg_v(ot, rm, t0);
7977 }
7978 if (s->cc_op != CC_OP_DYNAMIC)
7979 gen_op_set_cc_op(s->cc_op);
7980 gen_compute_eflags(cpu_cc_src);
7981 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
7982 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
7983 s->cc_op = CC_OP_EFLAGS;
7984 tcg_temp_free(t0);
7985 tcg_temp_free(t1);
7986 tcg_temp_free(t2);
7987#ifdef VBOX
7988 tcg_temp_free(a0);
7989#endif
7990 }
7991 break;
7992 case 0x102: /* lar */
7993 case 0x103: /* lsl */
7994 {
7995 int label1;
7996 TCGv t0;
7997 if (!s->pe || s->vm86)
7998 goto illegal_op;
7999 ot = dflag ? OT_LONG : OT_WORD;
8000 modrm = ldub_code(s->pc++);
8001 reg = ((modrm >> 3) & 7) | rex_r;
8002 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
8003 t0 = tcg_temp_local_new(TCG_TYPE_TL);
8004 if (s->cc_op != CC_OP_DYNAMIC)
8005 gen_op_set_cc_op(s->cc_op);
8006 if (b == 0x102)
8007 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
8008 else
8009 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
8010 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
8011 label1 = gen_new_label();
8012 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
8013 gen_op_mov_reg_v(ot, reg, t0);
8014 gen_set_label(label1);
8015 s->cc_op = CC_OP_EFLAGS;
8016 tcg_temp_free(t0);
8017 }
8018 break;
8019 case 0x118:
8020 modrm = ldub_code(s->pc++);
8021 mod = (modrm >> 6) & 3;
8022 op = (modrm >> 3) & 7;
8023 switch(op) {
8024 case 0: /* prefetchnta */
8025 case 1: /* prefetchnt0 */
8026 case 2: /* prefetchnt0 */
8027 case 3: /* prefetchnt0 */
8028 if (mod == 3)
8029 goto illegal_op;
8030 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8031 /* nothing more to do */
8032 break;
8033 default: /* nop (multi byte) */
8034 gen_nop_modrm(s, modrm);
8035 break;
8036 }
8037 break;
8038 case 0x119 ... 0x11f: /* nop (multi byte) */
8039 modrm = ldub_code(s->pc++);
8040 gen_nop_modrm(s, modrm);
8041 break;
8042 case 0x120: /* mov reg, crN */
8043 case 0x122: /* mov crN, reg */
8044 if (s->cpl != 0) {
8045 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8046 } else {
8047 modrm = ldub_code(s->pc++);
8048 if ((modrm & 0xc0) != 0xc0)
8049 goto illegal_op;
8050 rm = (modrm & 7) | REX_B(s);
8051 reg = ((modrm >> 3) & 7) | rex_r;
8052 if (CODE64(s))
8053 ot = OT_QUAD;
8054 else
8055 ot = OT_LONG;
8056 switch(reg) {
8057 case 0:
8058 case 2:
8059 case 3:
8060 case 4:
8061 case 8:
8062 if (s->cc_op != CC_OP_DYNAMIC)
8063 gen_op_set_cc_op(s->cc_op);
8064 gen_jmp_im(pc_start - s->cs_base);
8065 if (b & 2) {
8066 gen_op_mov_TN_reg(ot, 0, rm);
8067 tcg_gen_helper_0_2(helper_write_crN,
8068 tcg_const_i32(reg), cpu_T[0]);
8069 gen_jmp_im(s->pc - s->cs_base);
8070 gen_eob(s);
8071 } else {
8072 tcg_gen_helper_1_1(helper_read_crN,
8073 cpu_T[0], tcg_const_i32(reg));
8074 gen_op_mov_reg_T0(ot, rm);
8075 }
8076 break;
8077 default:
8078 goto illegal_op;
8079 }
8080 }
8081 break;
8082 case 0x121: /* mov reg, drN */
8083 case 0x123: /* mov drN, reg */
8084 if (s->cpl != 0) {
8085 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8086 } else {
8087 modrm = ldub_code(s->pc++);
8088 if ((modrm & 0xc0) != 0xc0)
8089 goto illegal_op;
8090 rm = (modrm & 7) | REX_B(s);
8091 reg = ((modrm >> 3) & 7) | rex_r;
8092 if (CODE64(s))
8093 ot = OT_QUAD;
8094 else
8095 ot = OT_LONG;
8096 /* XXX: do it dynamically with CR4.DE bit */
8097 if (reg == 4 || reg == 5 || reg >= 8)
8098 goto illegal_op;
8099 if (b & 2) {
8100 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
8101 gen_op_mov_TN_reg(ot, 0, rm);
8102 tcg_gen_helper_0_2(helper_movl_drN_T0,
8103 tcg_const_i32(reg), cpu_T[0]);
8104 gen_jmp_im(s->pc - s->cs_base);
8105 gen_eob(s);
8106 } else {
8107 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
8108 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
8109 gen_op_mov_reg_T0(ot, rm);
8110 }
8111 }
8112 break;
8113 case 0x106: /* clts */
8114 if (s->cpl != 0) {
8115 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8116 } else {
8117 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
8118 tcg_gen_helper_0_0(helper_clts);
8119 /* abort block because static cpu state changed */
8120 gen_jmp_im(s->pc - s->cs_base);
8121 gen_eob(s);
8122 }
8123 break;
8124 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
8125 case 0x1c3: /* MOVNTI reg, mem */
8126 if (!(s->cpuid_features & CPUID_SSE2))
8127 goto illegal_op;
8128 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
8129 modrm = ldub_code(s->pc++);
8130 mod = (modrm >> 6) & 3;
8131 if (mod == 3)
8132 goto illegal_op;
8133 reg = ((modrm >> 3) & 7) | rex_r;
8134 /* generate a generic store */
8135 gen_ldst_modrm(s, modrm, ot, reg, 1);
8136 break;
8137 case 0x1ae:
8138 modrm = ldub_code(s->pc++);
8139 mod = (modrm >> 6) & 3;
8140 op = (modrm >> 3) & 7;
8141 switch(op) {
8142 case 0: /* fxsave */
8143 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8144 (s->flags & HF_EM_MASK))
8145 goto illegal_op;
8146 if (s->flags & HF_TS_MASK) {
8147 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8148 break;
8149 }
8150 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8151 if (s->cc_op != CC_OP_DYNAMIC)
8152 gen_op_set_cc_op(s->cc_op);
8153 gen_jmp_im(pc_start - s->cs_base);
8154 tcg_gen_helper_0_2(helper_fxsave,
8155 cpu_A0, tcg_const_i32((s->dflag == 2)));
8156 break;
8157 case 1: /* fxrstor */
8158 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8159 (s->flags & HF_EM_MASK))
8160 goto illegal_op;
8161 if (s->flags & HF_TS_MASK) {
8162 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8163 break;
8164 }
8165 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8166 if (s->cc_op != CC_OP_DYNAMIC)
8167 gen_op_set_cc_op(s->cc_op);
8168 gen_jmp_im(pc_start - s->cs_base);
8169 tcg_gen_helper_0_2(helper_fxrstor,
8170 cpu_A0, tcg_const_i32((s->dflag == 2)));
8171 break;
8172 case 2: /* ldmxcsr */
8173 case 3: /* stmxcsr */
8174 if (s->flags & HF_TS_MASK) {
8175 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8176 break;
8177 }
8178 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8179 mod == 3)
8180 goto illegal_op;
8181 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8182 if (op == 2) {
8183 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
8184 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8185 } else {
8186 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8187 gen_op_st_T0_A0(OT_LONG + s->mem_index);
8188 }
8189 break;
8190 case 5: /* lfence */
8191 case 6: /* mfence */
8192 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
8193 goto illegal_op;
8194 break;
8195 case 7: /* sfence / clflush */
8196 if ((modrm & 0xc7) == 0xc0) {
8197 /* sfence */
8198 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8199 if (!(s->cpuid_features & CPUID_SSE))
8200 goto illegal_op;
8201 } else {
8202 /* clflush */
8203 if (!(s->cpuid_features & CPUID_CLFLUSH))
8204 goto illegal_op;
8205 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8206 }
8207 break;
8208 default:
8209 goto illegal_op;
8210 }
8211 break;
8212 case 0x10d: /* 3DNow! prefetch(w) */
8213 modrm = ldub_code(s->pc++);
8214 mod = (modrm >> 6) & 3;
8215 if (mod == 3)
8216 goto illegal_op;
8217 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8218 /* ignore for now */
8219 break;
8220 case 0x1aa: /* rsm */
8221 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
8222 if (!(s->flags & HF_SMM_MASK))
8223 goto illegal_op;
8224 if (s->cc_op != CC_OP_DYNAMIC) {
8225 gen_op_set_cc_op(s->cc_op);
8226 s->cc_op = CC_OP_DYNAMIC;
8227 }
8228 gen_jmp_im(s->pc - s->cs_base);
8229 tcg_gen_helper_0_0(helper_rsm);
8230 gen_eob(s);
8231 break;
8232 case 0x1b8: /* SSE4.2 popcnt */
8233 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8234 PREFIX_REPZ)
8235 goto illegal_op;
8236 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8237 goto illegal_op;
8238
8239 modrm = ldub_code(s->pc++);
8240 reg = ((modrm >> 3) & 7);
8241
8242 if (s->prefix & PREFIX_DATA)
8243 ot = OT_WORD;
8244 else if (s->dflag != 2)
8245 ot = OT_LONG;
8246 else
8247 ot = OT_QUAD;
8248
8249 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
8250 tcg_gen_helper_1_2(helper_popcnt,
8251 cpu_T[0], cpu_T[0], tcg_const_i32(ot));
8252 gen_op_mov_reg_T0(ot, reg);
8253
8254 s->cc_op = CC_OP_EFLAGS;
8255 break;
8256 case 0x10e ... 0x10f:
8257 /* 3DNow! instructions, ignore prefixes */
8258 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
8259 case 0x110 ... 0x117:
8260 case 0x128 ... 0x12f:
8261 case 0x138 ... 0x13a:
8262 case 0x150 ... 0x177:
8263 case 0x17c ... 0x17f:
8264 case 0x1c2:
8265 case 0x1c4 ... 0x1c6:
8266 case 0x1d0 ... 0x1fe:
8267 gen_sse(s, b, pc_start, rex_r);
8268 break;
8269 default:
8270 goto illegal_op;
8271 }
8272 /* lock generation */
8273 if (s->prefix & PREFIX_LOCK)
8274 tcg_gen_helper_0_0(helper_unlock);
8275 return s->pc;
8276 illegal_op:
8277 if (s->prefix & PREFIX_LOCK)
8278 tcg_gen_helper_0_0(helper_unlock);
8279 /* XXX: ensure that no lock was generated */
8280 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8281 return s->pc;
8282}
8283
8284void optimize_flags_init(void)
8285{
8286#if TCG_TARGET_REG_BITS == 32
8287 assert(sizeof(CCTable) == (1 << 3));
8288#else
8289 assert(sizeof(CCTable) == (1 << 4));
8290#endif
8291 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
8292 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
8293 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
8294 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
8295 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
8296 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
8297 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
8298 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
8299 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
8300
8301 /* register helpers */
8302
8303#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
8304#include "helper.h"
8305}
8306
8307/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8308 basic block 'tb'. If search_pc is TRUE, also generate PC
8309 information for each intermediate instruction. */
8310#ifndef VBOX
8311static inline void gen_intermediate_code_internal(CPUState *env,
8312#else /* VBOX */
8313DECLINLINE(void) gen_intermediate_code_internal(CPUState *env,
8314#endif /* VBOX */
8315 TranslationBlock *tb,
8316 int search_pc)
8317{
8318 DisasContext dc1, *dc = &dc1;
8319 target_ulong pc_ptr;
8320 uint16_t *gen_opc_end;
8321 int j, lj, cflags;
8322 uint64_t flags;
8323 target_ulong pc_start;
8324 target_ulong cs_base;
8325 int num_insns;
8326 int max_insns;
8327
8328 /* generate intermediate code */
8329 pc_start = tb->pc;
8330 cs_base = tb->cs_base;
8331 flags = tb->flags;
8332 cflags = tb->cflags;
8333
8334 dc->pe = (flags >> HF_PE_SHIFT) & 1;
8335 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8336 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8337 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8338 dc->f_st = 0;
8339 dc->vm86 = (flags >> VM_SHIFT) & 1;
8340#ifdef VBOX_WITH_CALL_RECORD
8341 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
8342 if ( !(env->state & CPU_RAW_RING0)
8343 && (env->cr[0] & CR0_PG_MASK)
8344 && !(env->eflags & X86_EFL_IF)
8345 && dc->code32)
8346 dc->record_call = 1;
8347 else
8348 dc->record_call = 0;
8349#endif
8350 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8351 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8352 dc->tf = (flags >> TF_SHIFT) & 1;
8353 dc->singlestep_enabled = env->singlestep_enabled;
8354 dc->cc_op = CC_OP_DYNAMIC;
8355 dc->cs_base = cs_base;
8356 dc->tb = tb;
8357 dc->popl_esp_hack = 0;
8358 /* select memory access functions */
8359 dc->mem_index = 0;
8360 if (flags & HF_SOFTMMU_MASK) {
8361 if (dc->cpl == 3)
8362 dc->mem_index = 2 * 4;
8363 else
8364 dc->mem_index = 1 * 4;
8365 }
8366 dc->cpuid_features = env->cpuid_features;
8367 dc->cpuid_ext_features = env->cpuid_ext_features;
8368 dc->cpuid_ext2_features = env->cpuid_ext2_features;
8369 dc->cpuid_ext3_features = env->cpuid_ext3_features;
8370#ifdef TARGET_X86_64
8371 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8372 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8373#endif
8374 dc->flags = flags;
8375 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8376 (flags & HF_INHIBIT_IRQ_MASK)
8377#ifndef CONFIG_SOFTMMU
8378 || (flags & HF_SOFTMMU_MASK)
8379#endif
8380 );
8381#if 0
8382 /* check addseg logic */
8383 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
8384 printf("ERROR addseg\n");
8385#endif
8386
8387 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
8388 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
8389 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
8390 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
8391
8392 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
8393 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
8394 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
8395 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
8396 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
8397 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
8398 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
8399 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
8400 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
8401
8402 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
8403
8404 dc->is_jmp = DISAS_NEXT;
8405 pc_ptr = pc_start;
8406 lj = -1;
8407 num_insns = 0;
8408 max_insns = tb->cflags & CF_COUNT_MASK;
8409 if (max_insns == 0)
8410 max_insns = CF_COUNT_MASK;
8411
8412 gen_icount_start();
8413 for(;;) {
8414 if (env->nb_breakpoints > 0) {
8415 for(j = 0; j < env->nb_breakpoints; j++) {
8416 if (env->breakpoints[j] == pc_ptr) {
8417 gen_debug(dc, pc_ptr - dc->cs_base);
8418 break;
8419 }
8420 }
8421 }
8422 if (search_pc) {
8423 j = gen_opc_ptr - gen_opc_buf;
8424 if (lj < j) {
8425 lj++;
8426 while (lj < j)
8427 gen_opc_instr_start[lj++] = 0;
8428 }
8429 gen_opc_pc[lj] = pc_ptr;
8430 gen_opc_cc_op[lj] = dc->cc_op;
8431 gen_opc_instr_start[lj] = 1;
8432 gen_opc_icount[lj] = num_insns;
8433 }
8434 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8435 gen_io_start();
8436
8437 pc_ptr = disas_insn(dc, pc_ptr);
8438 num_insns++;
8439 /* stop translation if indicated */
8440 if (dc->is_jmp)
8441 break;
8442#ifdef VBOX
8443#ifdef DEBUG
8444/*
8445 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
8446 {
8447 //should never happen as the jump to the patch code terminates the translation block
8448 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
8449 }
8450*/
8451#endif
8452 if (env->state & CPU_EMULATE_SINGLE_INSTR)
8453 {
8454 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
8455 gen_jmp_im(pc_ptr - dc->cs_base);
8456 gen_eob(dc);
8457 break;
8458 }
8459#endif /* VBOX */
8460
8461 /* if single step mode, we generate only one instruction and
8462 generate an exception */
8463 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8464 the flag and abort the translation to give the irqs a
8465 change to be happen */
8466 if (dc->tf || dc->singlestep_enabled ||
8467 (flags & HF_INHIBIT_IRQ_MASK)) {
8468 gen_jmp_im(pc_ptr - dc->cs_base);
8469 gen_eob(dc);
8470 break;
8471 }
8472 /* if too long translation, stop generation too */
8473 if (gen_opc_ptr >= gen_opc_end ||
8474 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8475 num_insns >= max_insns) {
8476 gen_jmp_im(pc_ptr - dc->cs_base);
8477 gen_eob(dc);
8478 break;
8479 }
8480 }
8481 if (tb->cflags & CF_LAST_IO)
8482 gen_io_end();
8483 gen_icount_end(tb, num_insns);
8484 *gen_opc_ptr = INDEX_op_end;
8485 /* we don't forget to fill the last values */
8486 if (search_pc) {
8487 j = gen_opc_ptr - gen_opc_buf;
8488 lj++;
8489 while (lj <= j)
8490 gen_opc_instr_start[lj++] = 0;
8491 }
8492
8493#ifdef DEBUG_DISAS
8494 if (loglevel & CPU_LOG_TB_CPU) {
8495 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
8496 }
8497 if (loglevel & CPU_LOG_TB_IN_ASM) {
8498 int disas_flags;
8499 fprintf(logfile, "----------------\n");
8500 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
8501#ifdef TARGET_X86_64
8502 if (dc->code64)
8503 disas_flags = 2;
8504 else
8505#endif
8506 disas_flags = !dc->code32;
8507 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
8508 fprintf(logfile, "\n");
8509 }
8510#endif
8511
8512 if (!search_pc) {
8513 tb->size = pc_ptr - pc_start;
8514 tb->icount = num_insns;
8515 }
8516}
8517
8518void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
8519{
8520 gen_intermediate_code_internal(env, tb, 0);
8521}
8522
8523void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
8524{
8525 gen_intermediate_code_internal(env, tb, 1);
8526}
8527
8528void gen_pc_load(CPUState *env, TranslationBlock *tb,
8529 unsigned long searched_pc, int pc_pos, void *puc)
8530{
8531 int cc_op;
8532#ifdef DEBUG_DISAS
8533 if (loglevel & CPU_LOG_TB_OP) {
8534 int i;
8535 fprintf(logfile, "RESTORE:\n");
8536 for(i = 0;i <= pc_pos; i++) {
8537 if (gen_opc_instr_start[i]) {
8538 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
8539 }
8540 }
8541 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
8542 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
8543 (uint32_t)tb->cs_base);
8544 }
8545#endif
8546 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
8547 cc_op = gen_opc_cc_op[pc_pos];
8548 if (cc_op != CC_OP_DYNAMIC)
8549 env->cc_op = cc_op;
8550}
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette