VirtualBox

source: vbox/trunk/src/recompiler_new/target-i386/translate.c@ 14582

Last change on this file since 14582 was 14582, checked in by vboxsync, 16 years ago

Fixed REM problem with loading wrong register, was leading to
inability to perform 'CMP AH, AL', flush disam log file on TB end

  • Property svn:eol-style set to native
File size: 275.4 KB
Line 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Sun LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Sun elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29#include <stdarg.h>
30#include <stdlib.h>
31#include <stdio.h>
32#include <string.h>
33#ifndef VBOX
34#include <inttypes.h>
35#include <signal.h>
36#include <assert.h>
37#endif /* !VBOX */
38
39#include "cpu.h"
40#include "exec-all.h"
41#include "disas.h"
42#include "helper.h"
43#include "tcg-op.h"
44
45#define PREFIX_REPZ 0x01
46#define PREFIX_REPNZ 0x02
47#define PREFIX_LOCK 0x04
48#define PREFIX_DATA 0x08
49#define PREFIX_ADR 0x10
50
51#ifdef TARGET_X86_64
52#define X86_64_ONLY(x) x
53#ifndef VBOX
54#define X86_64_DEF(x...) x
55#else
56#define X86_64_DEF(x...) x
57#endif
58#define CODE64(s) ((s)->code64)
59#define REX_X(s) ((s)->rex_x)
60#define REX_B(s) ((s)->rex_b)
61/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
62#if 1
63#define BUGGY_64(x) NULL
64#endif
65#else
66#define X86_64_ONLY(x) NULL
67#ifndef VBOX
68#define X86_64_DEF(x...)
69#else
70#define X86_64_DEF(x)
71#endif
72#define CODE64(s) 0
73#define REX_X(s) 0
74#define REX_B(s) 0
75#endif
76
77//#define MACRO_TEST 1
78
79/* global register indexes */
80static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
81/* local temps */
82static TCGv cpu_T[2], cpu_T3;
83/* local register indexes (only used inside old micro ops) */
84static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
85static TCGv cpu_tmp5, cpu_tmp6;
86
87#include "gen-icount.h"
88
89#ifdef TARGET_X86_64
90static int x86_64_hregs;
91#endif
92
93#ifdef VBOX
94
95/* Special/override code readers to hide patched code. */
96
97uint8_t ldub_code_raw(target_ulong pc)
98{
99 uint8_t b;
100
101 if (!remR3GetOpcode(cpu_single_env, pc, &b))
102 b = ldub_code(pc);
103 return b;
104}
105#define ldub_code(a) ldub_code_raw(a)
106
107uint16_t lduw_code_raw(target_ulong pc)
108{
109 return (ldub_code(pc+1) << 8) | ldub_code(pc);
110}
111#define lduw_code(a) lduw_code_raw(a)
112
113
114uint32_t ldl_code_raw(target_ulong pc)
115{
116 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
117}
118#define ldl_code(a) ldl_code_raw(a)
119
120#endif /* VBOX */
121
122
123typedef struct DisasContext {
124 /* current insn context */
125 int override; /* -1 if no override */
126 int prefix;
127 int aflag, dflag;
128 target_ulong pc; /* pc = eip + cs_base */
129 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
130 static state change (stop translation) */
131 /* current block context */
132 target_ulong cs_base; /* base of CS segment */
133 int pe; /* protected mode */
134 int code32; /* 32 bit code segment */
135#ifdef TARGET_X86_64
136 int lma; /* long mode active */
137 int code64; /* 64 bit code segment */
138 int rex_x, rex_b;
139#endif
140 int ss32; /* 32 bit stack segment */
141 int cc_op; /* current CC operation */
142 int addseg; /* non zero if either DS/ES/SS have a non zero base */
143 int f_st; /* currently unused */
144 int vm86; /* vm86 mode */
145#ifdef VBOX
146 int vme; /* CR4.VME */
147 int record_call; /* record calls for CSAM or not? */
148#endif
149 int cpl;
150 int iopl;
151 int tf; /* TF cpu flag */
152 int singlestep_enabled; /* "hardware" single step enabled */
153 int jmp_opt; /* use direct block chaining for direct jumps */
154 int mem_index; /* select memory access functions */
155 uint64_t flags; /* all execution flags */
156 struct TranslationBlock *tb;
157 int popl_esp_hack; /* for correct popl with esp base handling */
158 int rip_offset; /* only used in x86_64, but left for simplicity */
159 int cpuid_features;
160 int cpuid_ext_features;
161 int cpuid_ext2_features;
162 int cpuid_ext3_features;
163} DisasContext;
164
165static void gen_eob(DisasContext *s);
166static void gen_jmp(DisasContext *s, target_ulong eip);
167static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
168
169#ifdef VBOX
170static void gen_check_external_event();
171#endif
172
173/* i386 arith/logic operations */
174enum {
175 OP_ADDL,
176 OP_ORL,
177 OP_ADCL,
178 OP_SBBL,
179 OP_ANDL,
180 OP_SUBL,
181 OP_XORL,
182 OP_CMPL,
183};
184
185/* i386 shift ops */
186enum {
187 OP_ROL,
188 OP_ROR,
189 OP_RCL,
190 OP_RCR,
191 OP_SHL,
192 OP_SHR,
193 OP_SHL1, /* undocumented */
194 OP_SAR = 7,
195};
196
197enum {
198 JCC_O,
199 JCC_B,
200 JCC_Z,
201 JCC_BE,
202 JCC_S,
203 JCC_P,
204 JCC_L,
205 JCC_LE,
206};
207
208/* operand size */
209enum {
210 OT_BYTE = 0,
211 OT_WORD,
212 OT_LONG,
213 OT_QUAD,
214};
215
216enum {
217 /* I386 int registers */
218 OR_EAX, /* MUST be even numbered */
219 OR_ECX,
220 OR_EDX,
221 OR_EBX,
222 OR_ESP,
223 OR_EBP,
224 OR_ESI,
225 OR_EDI,
226
227 OR_TMP0 = 16, /* temporary operand register */
228 OR_TMP1,
229 OR_A0, /* temporary register used when doing address evaluation */
230};
231
232#ifndef VBOX
233static inline void gen_op_movl_T0_0(void)
234#else /* VBOX */
235DECLINLINE(void) gen_op_movl_T0_0(void)
236#endif /* VBOX */
237{
238 tcg_gen_movi_tl(cpu_T[0], 0);
239}
240
241#ifndef VBOX
242static inline void gen_op_movl_T0_im(int32_t val)
243#else /* VBOX */
244DECLINLINE(void) gen_op_movl_T0_im(int32_t val)
245#endif /* VBOX */
246{
247 tcg_gen_movi_tl(cpu_T[0], val);
248}
249
250#ifndef VBOX
251static inline void gen_op_movl_T0_imu(uint32_t val)
252#else /* VBOX */
253DECLINLINE(void) gen_op_movl_T0_imu(uint32_t val)
254#endif /* VBOX */
255{
256 tcg_gen_movi_tl(cpu_T[0], val);
257}
258
259#ifndef VBOX
260static inline void gen_op_movl_T1_im(int32_t val)
261#else /* VBOX */
262DECLINLINE(void) gen_op_movl_T1_im(int32_t val)
263#endif /* VBOX */
264{
265 tcg_gen_movi_tl(cpu_T[1], val);
266}
267
268#ifndef VBOX
269static inline void gen_op_movl_T1_imu(uint32_t val)
270#else /* VBOX */
271DECLINLINE(void) gen_op_movl_T1_imu(uint32_t val)
272#endif /* VBOX */
273{
274 tcg_gen_movi_tl(cpu_T[1], val);
275}
276
277#ifndef VBOX
278static inline void gen_op_movl_A0_im(uint32_t val)
279#else /* VBOX */
280DECLINLINE(void) gen_op_movl_A0_im(uint32_t val)
281#endif /* VBOX */
282{
283 tcg_gen_movi_tl(cpu_A0, val);
284}
285
286#ifdef TARGET_X86_64
287#ifndef VBOX
288static inline void gen_op_movq_A0_im(int64_t val)
289#else /* VBOX */
290DECLINLINE(void) gen_op_movq_A0_im(int64_t val)
291#endif /* VBOX */
292{
293 tcg_gen_movi_tl(cpu_A0, val);
294}
295#endif
296
297#ifndef VBOX
298static inline void gen_movtl_T0_im(target_ulong val)
299#else /* VBOX */
300DECLINLINE(void) gen_movtl_T0_im(target_ulong val)
301#endif /* VBOX */
302{
303 tcg_gen_movi_tl(cpu_T[0], val);
304}
305
306#ifndef VBOX
307static inline void gen_movtl_T1_im(target_ulong val)
308#else /* VBOX */
309DECLINLINE(void) gen_movtl_T1_im(target_ulong val)
310#endif /* VBOX */
311{
312 tcg_gen_movi_tl(cpu_T[1], val);
313}
314
315#ifndef VBOX
316static inline void gen_op_andl_T0_ffff(void)
317#else /* VBOX */
318DECLINLINE(void) gen_op_andl_T0_ffff(void)
319#endif /* VBOX */
320{
321 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
322}
323
324#ifndef VBOX
325static inline void gen_op_andl_T0_im(uint32_t val)
326#else /* VBOX */
327DECLINLINE(void) gen_op_andl_T0_im(uint32_t val)
328#endif /* VBOX */
329{
330 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
331}
332
333#ifndef VBOX
334static inline void gen_op_movl_T0_T1(void)
335#else /* VBOX */
336DECLINLINE(void) gen_op_movl_T0_T1(void)
337#endif /* VBOX */
338{
339 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
340}
341
342#ifndef VBOX
343static inline void gen_op_andl_A0_ffff(void)
344#else /* VBOX */
345DECLINLINE(void) gen_op_andl_A0_ffff(void)
346#endif /* VBOX */
347{
348 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
349}
350
351#ifdef TARGET_X86_64
352
353#define NB_OP_SIZES 4
354
355#else /* !TARGET_X86_64 */
356
357#define NB_OP_SIZES 3
358
359#endif /* !TARGET_X86_64 */
360
361#if defined(WORDS_BIGENDIAN)
362#define REG_B_OFFSET (sizeof(target_ulong) - 1)
363#define REG_H_OFFSET (sizeof(target_ulong) - 2)
364#define REG_W_OFFSET (sizeof(target_ulong) - 2)
365#define REG_L_OFFSET (sizeof(target_ulong) - 4)
366#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
367#else
368#define REG_B_OFFSET 0
369#define REG_H_OFFSET 1
370#define REG_W_OFFSET 0
371#define REG_L_OFFSET 0
372#define REG_LH_OFFSET 4
373#endif
374
375#ifndef VBOX
376static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
377#else /* VBOX */
378DECLINLINE(void) gen_op_mov_reg_v(int ot, int reg, TCGv t0)
379#endif /* VBOX */
380{
381 switch(ot) {
382 case OT_BYTE:
383 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
384 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
385 } else {
386 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
387 }
388 break;
389 case OT_WORD:
390 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
391 break;
392#ifdef TARGET_X86_64
393 case OT_LONG:
394 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
395 /* high part of register set to zero */
396 tcg_gen_movi_tl(cpu_tmp0, 0);
397 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
398 break;
399 default:
400 case OT_QUAD:
401 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
402 break;
403#else
404 default:
405 case OT_LONG:
406 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
407 break;
408#endif
409 }
410}
411
412#ifndef VBOX
413static inline void gen_op_mov_reg_T0(int ot, int reg)
414#else /* VBOX */
415DECLINLINE(void) gen_op_mov_reg_T0(int ot, int reg)
416#endif /* VBOX */
417{
418 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
419}
420
421#ifndef VBOX
422static inline void gen_op_mov_reg_T1(int ot, int reg)
423#else /* VBOX */
424DECLINLINE(void) gen_op_mov_reg_T1(int ot, int reg)
425#endif /* VBOX */
426{
427 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
428}
429
430#ifndef VBOX
431static inline void gen_op_mov_reg_A0(int size, int reg)
432#else /* VBOX */
433DECLINLINE(void) gen_op_mov_reg_A0(int size, int reg)
434#endif /* VBOX */
435{
436 switch(size) {
437 case 0:
438 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
439 break;
440#ifdef TARGET_X86_64
441 case 1:
442 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
443 /* high part of register set to zero */
444 tcg_gen_movi_tl(cpu_tmp0, 0);
445 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
446 break;
447 default:
448 case 2:
449 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
450 break;
451#else
452 default:
453 case 1:
454 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
455 break;
456#endif
457 }
458}
459
460#ifndef VBOX
461static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
462#else /* VBOX */
463DECLINLINE(void) gen_op_mov_v_reg(int ot, TCGv t0, int reg)
464#endif /* VBOX */
465{
466 switch(ot) {
467 case OT_BYTE:
468 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
469#ifndef VBOX
470 goto std_case;
471#else
472 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
473#endif
474 } else {
475 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
476 }
477 break;
478 default:
479 std_case:
480 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
481 break;
482 }
483}
484
485#ifndef VBOX
486static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
487#else /* VBOX */
488DECLINLINE(void) gen_op_mov_TN_reg(int ot, int t_index, int reg)
489#endif /* VBOX */
490{
491 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
492}
493
494#ifndef VBOX
495static inline void gen_op_movl_A0_reg(int reg)
496#else /* VBOX */
497DECLINLINE(void) gen_op_movl_A0_reg(int reg)
498#endif /* VBOX */
499{
500 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
501}
502
503#ifndef VBOX
504static inline void gen_op_addl_A0_im(int32_t val)
505#else /* VBOX */
506DECLINLINE(void) gen_op_addl_A0_im(int32_t val)
507#endif /* VBOX */
508{
509 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
510#ifdef TARGET_X86_64
511 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
512#endif
513}
514
515#ifdef TARGET_X86_64
516#ifndef VBOX
517static inline void gen_op_addq_A0_im(int64_t val)
518#else /* VBOX */
519DECLINLINE(void) gen_op_addq_A0_im(int64_t val)
520#endif /* VBOX */
521{
522 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
523}
524#endif
525
526static void gen_add_A0_im(DisasContext *s, int val)
527{
528#ifdef TARGET_X86_64
529 if (CODE64(s))
530 gen_op_addq_A0_im(val);
531 else
532#endif
533 gen_op_addl_A0_im(val);
534}
535
536#ifndef VBOX
537static inline void gen_op_addl_T0_T1(void)
538#else /* VBOX */
539DECLINLINE(void) gen_op_addl_T0_T1(void)
540#endif /* VBOX */
541{
542 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
543}
544
545#ifndef VBOX
546static inline void gen_op_jmp_T0(void)
547#else /* VBOX */
548DECLINLINE(void) gen_op_jmp_T0(void)
549#endif /* VBOX */
550{
551 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
552}
553
554#ifndef VBOX
555static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
556#else /* VBOX */
557DECLINLINE(void) gen_op_add_reg_im(int size, int reg, int32_t val)
558#endif /* VBOX */
559{
560 switch(size) {
561 case 0:
562 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
563 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
564 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
565 break;
566 case 1:
567 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
568 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
569#ifdef TARGET_X86_64
570 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
571#endif
572 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
573 break;
574#ifdef TARGET_X86_64
575 case 2:
576 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
577 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
578 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
579 break;
580#endif
581 }
582}
583
584#ifndef VBOX
585static inline void gen_op_add_reg_T0(int size, int reg)
586#else /* VBOX */
587DECLINLINE(void) gen_op_add_reg_T0(int size, int reg)
588#endif /* VBOX */
589{
590 switch(size) {
591 case 0:
592 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
593 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
594 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
595 break;
596 case 1:
597 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
598 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
599#ifdef TARGET_X86_64
600 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
601#endif
602 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
603 break;
604#ifdef TARGET_X86_64
605 case 2:
606 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
607 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
608 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
609 break;
610#endif
611 }
612}
613
614#ifndef VBOX
615static inline void gen_op_set_cc_op(int32_t val)
616#else /* VBOX */
617DECLINLINE(void) gen_op_set_cc_op(int32_t val)
618#endif /* VBOX */
619{
620 tcg_gen_movi_i32(cpu_cc_op, val);
621}
622
623#ifndef VBOX
624static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
625#else /* VBOX */
626DECLINLINE(void) gen_op_addl_A0_reg_sN(int shift, int reg)
627#endif /* VBOX */
628{
629 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
630 if (shift != 0)
631 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
632 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
633#ifdef TARGET_X86_64
634 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
635#endif
636}
637#ifdef VBOX
638DECLINLINE(void) gen_op_seg_check(int reg, bool keepA0)
639{
640 /* It seems segments doesn't get out of sync - if they do in fact - enable below code. */
641#if 0
642 /* Our segments could be outdated, thus check for newselector field to see if update really needed */
643 int skip_label;
644 TCGv t0, a0;
645
646 /* For other segments this check is waste of time, and also TCG is unable to cope with this code,
647 for data/stack segments, as expects alive cpu_T[0] */
648 if (reg != R_GS)
649 return;
650
651 if (keepA0)
652 {
653 /* we need to store old cpu_A0 */
654 a0 = tcg_temp_local_new(TCG_TYPE_TL);
655 tcg_gen_mov_tl(a0, cpu_A0);
656 }
657
658 skip_label = gen_new_label();
659 t0 = tcg_temp_local_new(TCG_TYPE_TL);
660
661 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, segs[reg].newselector) + REG_L_OFFSET);
662 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
663 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, eflags) + REG_L_OFFSET);
664 tcg_gen_andi_tl(t0, t0, VM_MASK);
665 tcg_gen_brcondi_i32(TCG_COND_NE, t0, 0, skip_label);
666 tcg_gen_movi_tl(t0, reg);
667
668 tcg_gen_helper_0_1(helper_sync_seg, t0);
669
670 tcg_temp_free(t0);
671
672 gen_set_label(skip_label);
673 if (keepA0)
674 {
675 tcg_gen_mov_tl(cpu_A0, a0);
676 tcg_temp_free(a0);
677 }
678#endif /* 0 */
679}
680#endif
681
682#ifndef VBOX
683static inline void gen_op_movl_A0_seg(int reg)
684#else /* VBOX */
685DECLINLINE(void) gen_op_movl_A0_seg(int reg)
686#endif /* VBOX */
687{
688#ifdef VBOX
689 gen_op_seg_check(reg, false);
690#endif
691 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
692}
693
694#ifndef VBOX
695static inline void gen_op_addl_A0_seg(int reg)
696#else /* VBOX */
697DECLINLINE(void) gen_op_addl_A0_seg(int reg)
698#endif /* VBOX */
699{
700#ifdef VBOX
701 gen_op_seg_check(reg, true);
702#endif
703 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
704 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
705#ifdef TARGET_X86_64
706 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
707#endif
708}
709
710#ifdef TARGET_X86_64
711#ifndef VBOX
712static inline void gen_op_movq_A0_seg(int reg)
713#else /* VBOX */
714DECLINLINE(void) gen_op_movq_A0_seg(int reg)
715#endif /* VBOX */
716{
717#ifdef VBOX
718 gen_op_seg_check(reg, false);
719#endif
720 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
721}
722
723#ifndef VBOX
724static inline void gen_op_addq_A0_seg(int reg)
725#else /* VBOX */
726DECLINLINE(void) gen_op_addq_A0_seg(int reg)
727#endif /* VBOX */
728{
729#ifdef VBOX
730 gen_op_seg_check(reg, true);
731#endif
732 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
733 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
734}
735
736#ifndef VBOX
737static inline void gen_op_movq_A0_reg(int reg)
738#else /* VBOX */
739DECLINLINE(void) gen_op_movq_A0_reg(int reg)
740#endif /* VBOX */
741{
742 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
743}
744
745#ifndef VBOX
746static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
747#else /* VBOX */
748DECLINLINE(void) gen_op_addq_A0_reg_sN(int shift, int reg)
749#endif /* VBOX */
750{
751 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
752 if (shift != 0)
753 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
754 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
755}
756#endif
757
758#ifndef VBOX
759static inline void gen_op_lds_T0_A0(int idx)
760#else /* VBOX */
761DECLINLINE(void) gen_op_lds_T0_A0(int idx)
762#endif /* VBOX */
763{
764 int mem_index = (idx >> 2) - 1;
765 switch(idx & 3) {
766 case 0:
767 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
768 break;
769 case 1:
770 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
771 break;
772 default:
773 case 2:
774 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
775 break;
776 }
777}
778
779#ifndef VBOX
780static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
781#else /* VBOX */
782DECLINLINE(void) gen_op_ld_v(int idx, TCGv t0, TCGv a0)
783#endif /* VBOX */
784{
785 int mem_index = (idx >> 2) - 1;
786 switch(idx & 3) {
787 case 0:
788 tcg_gen_qemu_ld8u(t0, a0, mem_index);
789 break;
790 case 1:
791 tcg_gen_qemu_ld16u(t0, a0, mem_index);
792 break;
793 case 2:
794 tcg_gen_qemu_ld32u(t0, a0, mem_index);
795 break;
796 default:
797 case 3:
798 tcg_gen_qemu_ld64(t0, a0, mem_index);
799 break;
800 }
801}
802
803/* XXX: always use ldu or lds */
804#ifndef VBOX
805static inline void gen_op_ld_T0_A0(int idx)
806#else /* VBOX */
807DECLINLINE(void) gen_op_ld_T0_A0(int idx)
808#endif /* VBOX */
809{
810 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
811}
812
813#ifndef VBOX
814static inline void gen_op_ldu_T0_A0(int idx)
815#else /* VBOX */
816DECLINLINE(void) gen_op_ldu_T0_A0(int idx)
817#endif /* VBOX */
818{
819 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
820}
821
822#ifndef VBOX
823static inline void gen_op_ld_T1_A0(int idx)
824#else /* VBOX */
825DECLINLINE(void) gen_op_ld_T1_A0(int idx)
826#endif /* VBOX */
827{
828 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
829}
830
831#ifndef VBOX
832static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
833#else /* VBOX */
834DECLINLINE(void) gen_op_st_v(int idx, TCGv t0, TCGv a0)
835#endif /* VBOX */
836{
837 int mem_index = (idx >> 2) - 1;
838 switch(idx & 3) {
839 case 0:
840 tcg_gen_qemu_st8(t0, a0, mem_index);
841 break;
842 case 1:
843 tcg_gen_qemu_st16(t0, a0, mem_index);
844 break;
845 case 2:
846 tcg_gen_qemu_st32(t0, a0, mem_index);
847 break;
848 default:
849 case 3:
850 tcg_gen_qemu_st64(t0, a0, mem_index);
851 break;
852 }
853}
854
855#ifndef VBOX
856static inline void gen_op_st_T0_A0(int idx)
857#else /* VBOX */
858DECLINLINE(void) gen_op_st_T0_A0(int idx)
859#endif /* VBOX */
860{
861 gen_op_st_v(idx, cpu_T[0], cpu_A0);
862}
863
864#ifndef VBOX
865static inline void gen_op_st_T1_A0(int idx)
866#else /* VBOX */
867DECLINLINE(void) gen_op_st_T1_A0(int idx)
868#endif /* VBOX */
869{
870 gen_op_st_v(idx, cpu_T[1], cpu_A0);
871}
872
873#ifdef VBOX
874static void gen_check_external_event()
875{
876 int skip_label;
877 TCGv t0;
878
879 skip_label = gen_new_label();
880 t0 = tcg_temp_local_new(TCG_TYPE_TL);
881 /* t0 = cpu_tmp0; */
882
883 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, interrupt_request));
884 /* Keep in sync with helper_check_external_event() */
885 tcg_gen_andi_tl(t0, t0,
886 CPU_INTERRUPT_EXTERNAL_EXIT
887 | CPU_INTERRUPT_EXTERNAL_TIMER
888 | CPU_INTERRUPT_EXTERNAL_DMA
889 | CPU_INTERRUPT_EXTERNAL_HARD);
890 /** @todo: predict branch as taken */
891 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
892 tcg_temp_free(t0);
893
894 tcg_gen_helper_0_0(helper_check_external_event);
895
896 gen_set_label(skip_label);
897}
898
899static void gen_check_external_event2()
900{
901 tcg_gen_helper_0_0(helper_check_external_event);
902}
903
904#endif
905
906#ifndef VBOX
907static inline void gen_jmp_im(target_ulong pc)
908#else /* VBOX */
909DECLINLINE(void) gen_jmp_im(target_ulong pc)
910#endif /* VBOX */
911{
912 tcg_gen_movi_tl(cpu_tmp0, pc);
913 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
914}
915
916#ifdef VBOX
917DECLINLINE(void) gen_update_eip(target_ulong pc)
918{
919 gen_jmp_im(pc);
920#ifdef VBOX_DUMP_STATE
921 tcg_gen_helper_0_0(helper_dump_state);
922#endif
923}
924
925#endif
926
927#ifndef VBOX
928static inline void gen_string_movl_A0_ESI(DisasContext *s)
929#else /* VBOX */
930DECLINLINE(void) gen_string_movl_A0_ESI(DisasContext *s)
931#endif /* VBOX */
932{
933 int override;
934
935 override = s->override;
936#ifdef TARGET_X86_64
937 if (s->aflag == 2) {
938 if (override >= 0) {
939 gen_op_movq_A0_seg(override);
940 gen_op_addq_A0_reg_sN(0, R_ESI);
941 } else {
942 gen_op_movq_A0_reg(R_ESI);
943 }
944 } else
945#endif
946 if (s->aflag) {
947 /* 32 bit address */
948 if (s->addseg && override < 0)
949 override = R_DS;
950 if (override >= 0) {
951 gen_op_movl_A0_seg(override);
952 gen_op_addl_A0_reg_sN(0, R_ESI);
953 } else {
954 gen_op_movl_A0_reg(R_ESI);
955 }
956 } else {
957 /* 16 address, always override */
958 if (override < 0)
959 override = R_DS;
960 gen_op_movl_A0_reg(R_ESI);
961 gen_op_andl_A0_ffff();
962 gen_op_addl_A0_seg(override);
963 }
964}
965
966#ifndef VBOX
967static inline void gen_string_movl_A0_EDI(DisasContext *s)
968#else /* VBOX */
969DECLINLINE(void) gen_string_movl_A0_EDI(DisasContext *s)
970#endif /* VBOX */
971{
972#ifdef TARGET_X86_64
973 if (s->aflag == 2) {
974 gen_op_movq_A0_reg(R_EDI);
975 } else
976#endif
977 if (s->aflag) {
978 if (s->addseg) {
979 gen_op_movl_A0_seg(R_ES);
980 gen_op_addl_A0_reg_sN(0, R_EDI);
981 } else {
982 gen_op_movl_A0_reg(R_EDI);
983 }
984 } else {
985 gen_op_movl_A0_reg(R_EDI);
986 gen_op_andl_A0_ffff();
987 gen_op_addl_A0_seg(R_ES);
988 }
989}
990
991#ifndef VBOX
992static inline void gen_op_movl_T0_Dshift(int ot)
993#else /* VBOX */
994DECLINLINE(void) gen_op_movl_T0_Dshift(int ot)
995#endif /* VBOX */
996{
997 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
998 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
999};
1000
1001static void gen_extu(int ot, TCGv reg)
1002{
1003 switch(ot) {
1004 case OT_BYTE:
1005 tcg_gen_ext8u_tl(reg, reg);
1006 break;
1007 case OT_WORD:
1008 tcg_gen_ext16u_tl(reg, reg);
1009 break;
1010 case OT_LONG:
1011 tcg_gen_ext32u_tl(reg, reg);
1012 break;
1013 default:
1014 break;
1015 }
1016}
1017
1018static void gen_exts(int ot, TCGv reg)
1019{
1020 switch(ot) {
1021 case OT_BYTE:
1022 tcg_gen_ext8s_tl(reg, reg);
1023 break;
1024 case OT_WORD:
1025 tcg_gen_ext16s_tl(reg, reg);
1026 break;
1027 case OT_LONG:
1028 tcg_gen_ext32s_tl(reg, reg);
1029 break;
1030 default:
1031 break;
1032 }
1033}
1034
1035#ifndef VBOX
1036static inline void gen_op_jnz_ecx(int size, int label1)
1037#else /* VBOX */
1038DECLINLINE(void) gen_op_jnz_ecx(int size, int label1)
1039#endif /* VBOX */
1040{
1041 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1042 gen_extu(size + 1, cpu_tmp0);
1043 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
1044}
1045
1046#ifndef VBOX
1047static inline void gen_op_jz_ecx(int size, int label1)
1048#else /* VBOX */
1049DECLINLINE(void) gen_op_jz_ecx(int size, int label1)
1050#endif /* VBOX */
1051{
1052 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1053 gen_extu(size + 1, cpu_tmp0);
1054 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
1055}
1056
1057static void *helper_in_func[3] = {
1058 helper_inb,
1059 helper_inw,
1060 helper_inl,
1061};
1062
1063static void *helper_out_func[3] = {
1064 helper_outb,
1065 helper_outw,
1066 helper_outl,
1067};
1068
1069static void *gen_check_io_func[3] = {
1070 helper_check_iob,
1071 helper_check_iow,
1072 helper_check_iol,
1073};
1074
1075static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
1076 uint32_t svm_flags)
1077{
1078 int state_saved;
1079 target_ulong next_eip;
1080
1081 state_saved = 0;
1082 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1083 if (s->cc_op != CC_OP_DYNAMIC)
1084 gen_op_set_cc_op(s->cc_op);
1085 gen_jmp_im(cur_eip);
1086 state_saved = 1;
1087 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1088 tcg_gen_helper_0_1(gen_check_io_func[ot],
1089 cpu_tmp2_i32);
1090 }
1091 if(s->flags & HF_SVMI_MASK) {
1092 if (!state_saved) {
1093 if (s->cc_op != CC_OP_DYNAMIC)
1094 gen_op_set_cc_op(s->cc_op);
1095 gen_jmp_im(cur_eip);
1096 state_saved = 1;
1097 }
1098 svm_flags |= (1 << (4 + ot));
1099 next_eip = s->pc - s->cs_base;
1100 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1101 tcg_gen_helper_0_3(helper_svm_check_io,
1102 cpu_tmp2_i32,
1103 tcg_const_i32(svm_flags),
1104 tcg_const_i32(next_eip - cur_eip));
1105 }
1106#ifdef VBOX
1107 gen_check_external_event2(s);
1108#endif /* VBOX */
1109}
1110
1111#ifndef VBOX
1112static inline void gen_movs(DisasContext *s, int ot)
1113#else /* VBOX */
1114DECLINLINE(void) gen_movs(DisasContext *s, int ot)
1115#endif /* VBOX */
1116{
1117 gen_string_movl_A0_ESI(s);
1118 gen_op_ld_T0_A0(ot + s->mem_index);
1119 gen_string_movl_A0_EDI(s);
1120 gen_op_st_T0_A0(ot + s->mem_index);
1121 gen_op_movl_T0_Dshift(ot);
1122 gen_op_add_reg_T0(s->aflag, R_ESI);
1123 gen_op_add_reg_T0(s->aflag, R_EDI);
1124}
1125
1126#ifndef VBOX
1127static inline void gen_update_cc_op(DisasContext *s)
1128#else /* VBOX */
1129DECLINLINE(void) gen_update_cc_op(DisasContext *s)
1130#endif /* VBOX */
1131{
1132 if (s->cc_op != CC_OP_DYNAMIC) {
1133 gen_op_set_cc_op(s->cc_op);
1134 s->cc_op = CC_OP_DYNAMIC;
1135 }
1136}
1137
1138static void gen_op_update1_cc(void)
1139{
1140 tcg_gen_discard_tl(cpu_cc_src);
1141 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1142}
1143
1144static void gen_op_update2_cc(void)
1145{
1146 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1147 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1148}
1149
1150#ifndef VBOX
1151static inline void gen_op_cmpl_T0_T1_cc(void)
1152#else /* VBOX */
1153DECLINLINE(void) gen_op_cmpl_T0_T1_cc(void)
1154#endif /* VBOX */
1155{
1156 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1157 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1158}
1159
1160#ifndef VBOX
1161static inline void gen_op_testl_T0_T1_cc(void)
1162#else /* VBOX */
1163DECLINLINE(void) gen_op_testl_T0_T1_cc(void)
1164#endif /* VBOX */
1165{
1166 tcg_gen_discard_tl(cpu_cc_src);
1167 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1168}
1169
1170static void gen_op_update_neg_cc(void)
1171{
1172 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
1173 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1174}
1175
1176/* compute eflags.C to reg */
1177static void gen_compute_eflags_c(TCGv reg)
1178{
1179#if TCG_TARGET_REG_BITS == 32
1180 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1181 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1182 (long)cc_table + offsetof(CCTable, compute_c));
1183 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1184 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1185 1, &cpu_tmp2_i32, 0, NULL);
1186#else
1187 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1188 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1189 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1190 (long)cc_table + offsetof(CCTable, compute_c));
1191 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1192 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1193 1, &cpu_tmp2_i32, 0, NULL);
1194#endif
1195 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1196}
1197
1198/* compute all eflags to cc_src */
1199static void gen_compute_eflags(TCGv reg)
1200{
1201#if TCG_TARGET_REG_BITS == 32
1202 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1203 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1204 (long)cc_table + offsetof(CCTable, compute_all));
1205 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1206 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1207 1, &cpu_tmp2_i32, 0, NULL);
1208#else
1209 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1210 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1211 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1212 (long)cc_table + offsetof(CCTable, compute_all));
1213 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1214 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1215 1, &cpu_tmp2_i32, 0, NULL);
1216#endif
1217 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1218}
1219
1220#ifndef VBOX
1221static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1222#else /* VBOX */
1223DECLINLINE(void) gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1224#endif /* VBOX */
1225{
1226 if (s->cc_op != CC_OP_DYNAMIC)
1227 gen_op_set_cc_op(s->cc_op);
1228 switch(jcc_op) {
1229 case JCC_O:
1230 gen_compute_eflags(cpu_T[0]);
1231 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
1232 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1233 break;
1234 case JCC_B:
1235 gen_compute_eflags_c(cpu_T[0]);
1236 break;
1237 case JCC_Z:
1238 gen_compute_eflags(cpu_T[0]);
1239 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
1240 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1241 break;
1242 case JCC_BE:
1243 gen_compute_eflags(cpu_tmp0);
1244 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
1245 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1246 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1247 break;
1248 case JCC_S:
1249 gen_compute_eflags(cpu_T[0]);
1250 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
1251 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1252 break;
1253 case JCC_P:
1254 gen_compute_eflags(cpu_T[0]);
1255 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
1256 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1257 break;
1258 case JCC_L:
1259 gen_compute_eflags(cpu_tmp0);
1260 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1261 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
1262 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1263 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1264 break;
1265 default:
1266 case JCC_LE:
1267 gen_compute_eflags(cpu_tmp0);
1268 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1269 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
1270 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
1271 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1272 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1273 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1274 break;
1275 }
1276}
1277
1278/* return true if setcc_slow is not needed (WARNING: must be kept in
1279 sync with gen_jcc1) */
1280static int is_fast_jcc_case(DisasContext *s, int b)
1281{
1282 int jcc_op;
1283 jcc_op = (b >> 1) & 7;
1284 switch(s->cc_op) {
1285 /* we optimize the cmp/jcc case */
1286 case CC_OP_SUBB:
1287 case CC_OP_SUBW:
1288 case CC_OP_SUBL:
1289 case CC_OP_SUBQ:
1290 if (jcc_op == JCC_O || jcc_op == JCC_P)
1291 goto slow_jcc;
1292 break;
1293
1294 /* some jumps are easy to compute */
1295 case CC_OP_ADDB:
1296 case CC_OP_ADDW:
1297 case CC_OP_ADDL:
1298 case CC_OP_ADDQ:
1299
1300 case CC_OP_LOGICB:
1301 case CC_OP_LOGICW:
1302 case CC_OP_LOGICL:
1303 case CC_OP_LOGICQ:
1304
1305 case CC_OP_INCB:
1306 case CC_OP_INCW:
1307 case CC_OP_INCL:
1308 case CC_OP_INCQ:
1309
1310 case CC_OP_DECB:
1311 case CC_OP_DECW:
1312 case CC_OP_DECL:
1313 case CC_OP_DECQ:
1314
1315 case CC_OP_SHLB:
1316 case CC_OP_SHLW:
1317 case CC_OP_SHLL:
1318 case CC_OP_SHLQ:
1319 if (jcc_op != JCC_Z && jcc_op != JCC_S)
1320 goto slow_jcc;
1321 break;
1322 default:
1323 slow_jcc:
1324 return 0;
1325 }
1326 return 1;
1327}
1328
1329/* generate a conditional jump to label 'l1' according to jump opcode
1330 value 'b'. In the fast case, T0 is guaranted not to be used. */
1331#ifndef VBOX
1332static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1333#else /* VBOX */
1334DECLINLINE(void) gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1335#endif /* VBOX */
1336{
1337 int inv, jcc_op, size, cond;
1338 TCGv t0;
1339
1340 inv = b & 1;
1341 jcc_op = (b >> 1) & 7;
1342
1343 switch(cc_op) {
1344 /* we optimize the cmp/jcc case */
1345 case CC_OP_SUBB:
1346 case CC_OP_SUBW:
1347 case CC_OP_SUBL:
1348 case CC_OP_SUBQ:
1349
1350 size = cc_op - CC_OP_SUBB;
1351 switch(jcc_op) {
1352 case JCC_Z:
1353 fast_jcc_z:
1354 switch(size) {
1355 case 0:
1356 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
1357 t0 = cpu_tmp0;
1358 break;
1359 case 1:
1360 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
1361 t0 = cpu_tmp0;
1362 break;
1363#ifdef TARGET_X86_64
1364 case 2:
1365 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1366 t0 = cpu_tmp0;
1367 break;
1368#endif
1369 default:
1370 t0 = cpu_cc_dst;
1371 break;
1372 }
1373 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
1374 break;
1375 case JCC_S:
1376 fast_jcc_s:
1377 switch(size) {
1378 case 0:
1379 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1380 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1381 0, l1);
1382 break;
1383 case 1:
1384 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1385 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1386 0, l1);
1387 break;
1388#ifdef TARGET_X86_64
1389 case 2:
1390 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1391 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1392 0, l1);
1393 break;
1394#endif
1395 default:
1396 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1397 0, l1);
1398 break;
1399 }
1400 break;
1401
1402 case JCC_B:
1403 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1404 goto fast_jcc_b;
1405 case JCC_BE:
1406 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1407 fast_jcc_b:
1408 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1409 switch(size) {
1410 case 0:
1411 t0 = cpu_tmp0;
1412 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1413 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1414 break;
1415 case 1:
1416 t0 = cpu_tmp0;
1417 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1418 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1419 break;
1420#ifdef TARGET_X86_64
1421 case 2:
1422 t0 = cpu_tmp0;
1423 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1424 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1425 break;
1426#endif
1427 default:
1428 t0 = cpu_cc_src;
1429 break;
1430 }
1431 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1432 break;
1433
1434 case JCC_L:
1435 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1436 goto fast_jcc_l;
1437 case JCC_LE:
1438 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1439 fast_jcc_l:
1440 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1441 switch(size) {
1442 case 0:
1443 t0 = cpu_tmp0;
1444 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1445 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1446 break;
1447 case 1:
1448 t0 = cpu_tmp0;
1449 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1450 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1451 break;
1452#ifdef TARGET_X86_64
1453 case 2:
1454 t0 = cpu_tmp0;
1455 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1456 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1457 break;
1458#endif
1459 default:
1460 t0 = cpu_cc_src;
1461 break;
1462 }
1463 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1464 break;
1465
1466 default:
1467 goto slow_jcc;
1468 }
1469 break;
1470
1471 /* some jumps are easy to compute */
1472 case CC_OP_ADDB:
1473 case CC_OP_ADDW:
1474 case CC_OP_ADDL:
1475 case CC_OP_ADDQ:
1476
1477 case CC_OP_ADCB:
1478 case CC_OP_ADCW:
1479 case CC_OP_ADCL:
1480 case CC_OP_ADCQ:
1481
1482 case CC_OP_SBBB:
1483 case CC_OP_SBBW:
1484 case CC_OP_SBBL:
1485 case CC_OP_SBBQ:
1486
1487 case CC_OP_LOGICB:
1488 case CC_OP_LOGICW:
1489 case CC_OP_LOGICL:
1490 case CC_OP_LOGICQ:
1491
1492 case CC_OP_INCB:
1493 case CC_OP_INCW:
1494 case CC_OP_INCL:
1495 case CC_OP_INCQ:
1496
1497 case CC_OP_DECB:
1498 case CC_OP_DECW:
1499 case CC_OP_DECL:
1500 case CC_OP_DECQ:
1501
1502 case CC_OP_SHLB:
1503 case CC_OP_SHLW:
1504 case CC_OP_SHLL:
1505 case CC_OP_SHLQ:
1506
1507 case CC_OP_SARB:
1508 case CC_OP_SARW:
1509 case CC_OP_SARL:
1510 case CC_OP_SARQ:
1511 switch(jcc_op) {
1512 case JCC_Z:
1513 size = (cc_op - CC_OP_ADDB) & 3;
1514 goto fast_jcc_z;
1515 case JCC_S:
1516 size = (cc_op - CC_OP_ADDB) & 3;
1517 goto fast_jcc_s;
1518 default:
1519 goto slow_jcc;
1520 }
1521 break;
1522 default:
1523 slow_jcc:
1524 gen_setcc_slow_T0(s, jcc_op);
1525 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1526 cpu_T[0], 0, l1);
1527 break;
1528 }
1529}
1530
1531/* XXX: does not work with gdbstub "ice" single step - not a
1532 serious problem */
1533static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1534{
1535 int l1, l2;
1536
1537 l1 = gen_new_label();
1538 l2 = gen_new_label();
1539 gen_op_jnz_ecx(s->aflag, l1);
1540 gen_set_label(l2);
1541 gen_jmp_tb(s, next_eip, 1);
1542 gen_set_label(l1);
1543 return l2;
1544}
1545
1546#ifndef VBOX
1547static inline void gen_stos(DisasContext *s, int ot)
1548#else /* VBOX */
1549DECLINLINE(void) gen_stos(DisasContext *s, int ot)
1550#endif /* VBOX */
1551{
1552 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1553 gen_string_movl_A0_EDI(s);
1554 gen_op_st_T0_A0(ot + s->mem_index);
1555 gen_op_movl_T0_Dshift(ot);
1556 gen_op_add_reg_T0(s->aflag, R_EDI);
1557}
1558
1559#ifndef VBOX
1560static inline void gen_lods(DisasContext *s, int ot)
1561#else /* VBOX */
1562DECLINLINE(void) gen_lods(DisasContext *s, int ot)
1563#endif /* VBOX */
1564{
1565 gen_string_movl_A0_ESI(s);
1566 gen_op_ld_T0_A0(ot + s->mem_index);
1567 gen_op_mov_reg_T0(ot, R_EAX);
1568 gen_op_movl_T0_Dshift(ot);
1569 gen_op_add_reg_T0(s->aflag, R_ESI);
1570}
1571
1572#ifndef VBOX
1573static inline void gen_scas(DisasContext *s, int ot)
1574#else /* VBOX */
1575DECLINLINE(void) gen_scas(DisasContext *s, int ot)
1576#endif /* VBOX */
1577{
1578 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1579 gen_string_movl_A0_EDI(s);
1580 gen_op_ld_T1_A0(ot + s->mem_index);
1581 gen_op_cmpl_T0_T1_cc();
1582 gen_op_movl_T0_Dshift(ot);
1583 gen_op_add_reg_T0(s->aflag, R_EDI);
1584}
1585
1586#ifndef VBOX
1587static inline void gen_cmps(DisasContext *s, int ot)
1588#else /* VBOX */
1589DECLINLINE(void) gen_cmps(DisasContext *s, int ot)
1590#endif /* VBOX */
1591{
1592 gen_string_movl_A0_ESI(s);
1593 gen_op_ld_T0_A0(ot + s->mem_index);
1594 gen_string_movl_A0_EDI(s);
1595 gen_op_ld_T1_A0(ot + s->mem_index);
1596 gen_op_cmpl_T0_T1_cc();
1597 gen_op_movl_T0_Dshift(ot);
1598 gen_op_add_reg_T0(s->aflag, R_ESI);
1599 gen_op_add_reg_T0(s->aflag, R_EDI);
1600}
1601
1602#ifndef VBOX
1603static inline void gen_ins(DisasContext *s, int ot)
1604#else /* VBOX */
1605DECLINLINE(void) gen_ins(DisasContext *s, int ot)
1606#endif /* VBOX */
1607{
1608 if (use_icount)
1609 gen_io_start();
1610 gen_string_movl_A0_EDI(s);
1611 /* Note: we must do this dummy write first to be restartable in
1612 case of page fault. */
1613 gen_op_movl_T0_0();
1614 gen_op_st_T0_A0(ot + s->mem_index);
1615 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1616 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1617 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1618 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1619 gen_op_st_T0_A0(ot + s->mem_index);
1620 gen_op_movl_T0_Dshift(ot);
1621 gen_op_add_reg_T0(s->aflag, R_EDI);
1622 if (use_icount)
1623 gen_io_end();
1624}
1625
1626#ifndef VBOX
1627static inline void gen_outs(DisasContext *s, int ot)
1628#else /* VBOX */
1629DECLINLINE(void) gen_outs(DisasContext *s, int ot)
1630#endif /* VBOX */
1631{
1632 if (use_icount)
1633 gen_io_start();
1634 gen_string_movl_A0_ESI(s);
1635 gen_op_ld_T0_A0(ot + s->mem_index);
1636
1637 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1638 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1639 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1640 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1641 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1642
1643 gen_op_movl_T0_Dshift(ot);
1644 gen_op_add_reg_T0(s->aflag, R_ESI);
1645 if (use_icount)
1646 gen_io_end();
1647}
1648
1649/* same method as Valgrind : we generate jumps to current or next
1650 instruction */
1651#ifndef VBOX
1652#define GEN_REPZ(op) \
1653static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1654 target_ulong cur_eip, target_ulong next_eip) \
1655{ \
1656 int l2; \
1657 gen_update_cc_op(s); \
1658 l2 = gen_jz_ecx_string(s, next_eip); \
1659 gen_ ## op(s, ot); \
1660 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1661 /* a loop would cause two single step exceptions if ECX = 1 \
1662 before rep string_insn */ \
1663 if (!s->jmp_opt) \
1664 gen_op_jz_ecx(s->aflag, l2); \
1665 gen_jmp(s, cur_eip); \
1666}
1667#else /* VBOX */
1668#define GEN_REPZ(op) \
1669DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1670 target_ulong cur_eip, target_ulong next_eip) \
1671{ \
1672 int l2; \
1673 gen_update_cc_op(s); \
1674 l2 = gen_jz_ecx_string(s, next_eip); \
1675 gen_ ## op(s, ot); \
1676 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1677 /* a loop would cause two single step exceptions if ECX = 1 \
1678 before rep string_insn */ \
1679 if (!s->jmp_opt) \
1680 gen_op_jz_ecx(s->aflag, l2); \
1681 gen_jmp(s, cur_eip); \
1682}
1683#endif /* VBOX */
1684
1685#ifndef VBOX
1686#define GEN_REPZ2(op) \
1687static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1688 target_ulong cur_eip, \
1689 target_ulong next_eip, \
1690 int nz) \
1691{ \
1692 int l2; \
1693 gen_update_cc_op(s); \
1694 l2 = gen_jz_ecx_string(s, next_eip); \
1695 gen_ ## op(s, ot); \
1696 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1697 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1698 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1699 if (!s->jmp_opt) \
1700 gen_op_jz_ecx(s->aflag, l2); \
1701 gen_jmp(s, cur_eip); \
1702}
1703#else /* VBOX */
1704#define GEN_REPZ2(op) \
1705DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1706 target_ulong cur_eip, \
1707 target_ulong next_eip, \
1708 int nz) \
1709{ \
1710 int l2;\
1711 gen_update_cc_op(s); \
1712 l2 = gen_jz_ecx_string(s, next_eip); \
1713 gen_ ## op(s, ot); \
1714 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1715 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1716 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1717 if (!s->jmp_opt) \
1718 gen_op_jz_ecx(s->aflag, l2); \
1719 gen_jmp(s, cur_eip); \
1720}
1721#endif /* VBOX */
1722
1723GEN_REPZ(movs)
1724GEN_REPZ(stos)
1725GEN_REPZ(lods)
1726GEN_REPZ(ins)
1727GEN_REPZ(outs)
1728GEN_REPZ2(scas)
1729GEN_REPZ2(cmps)
1730
1731static void *helper_fp_arith_ST0_FT0[8] = {
1732 helper_fadd_ST0_FT0,
1733 helper_fmul_ST0_FT0,
1734 helper_fcom_ST0_FT0,
1735 helper_fcom_ST0_FT0,
1736 helper_fsub_ST0_FT0,
1737 helper_fsubr_ST0_FT0,
1738 helper_fdiv_ST0_FT0,
1739 helper_fdivr_ST0_FT0,
1740};
1741
1742/* NOTE the exception in "r" op ordering */
1743static void *helper_fp_arith_STN_ST0[8] = {
1744 helper_fadd_STN_ST0,
1745 helper_fmul_STN_ST0,
1746 NULL,
1747 NULL,
1748 helper_fsubr_STN_ST0,
1749 helper_fsub_STN_ST0,
1750 helper_fdivr_STN_ST0,
1751 helper_fdiv_STN_ST0,
1752};
1753
1754/* if d == OR_TMP0, it means memory operand (address in A0) */
1755static void gen_op(DisasContext *s1, int op, int ot, int d)
1756{
1757 if (d != OR_TMP0) {
1758 gen_op_mov_TN_reg(ot, 0, d);
1759 } else {
1760 gen_op_ld_T0_A0(ot + s1->mem_index);
1761 }
1762 switch(op) {
1763 case OP_ADCL:
1764 if (s1->cc_op != CC_OP_DYNAMIC)
1765 gen_op_set_cc_op(s1->cc_op);
1766 gen_compute_eflags_c(cpu_tmp4);
1767 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1768 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1769 if (d != OR_TMP0)
1770 gen_op_mov_reg_T0(ot, d);
1771 else
1772 gen_op_st_T0_A0(ot + s1->mem_index);
1773 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1774 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1775 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1776 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1777 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1778 s1->cc_op = CC_OP_DYNAMIC;
1779 break;
1780 case OP_SBBL:
1781 if (s1->cc_op != CC_OP_DYNAMIC)
1782 gen_op_set_cc_op(s1->cc_op);
1783 gen_compute_eflags_c(cpu_tmp4);
1784 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1785 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1786 if (d != OR_TMP0)
1787 gen_op_mov_reg_T0(ot, d);
1788 else
1789 gen_op_st_T0_A0(ot + s1->mem_index);
1790 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1791 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1792 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1793 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1794 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1795 s1->cc_op = CC_OP_DYNAMIC;
1796 break;
1797 case OP_ADDL:
1798 gen_op_addl_T0_T1();
1799 if (d != OR_TMP0)
1800 gen_op_mov_reg_T0(ot, d);
1801 else
1802 gen_op_st_T0_A0(ot + s1->mem_index);
1803 gen_op_update2_cc();
1804 s1->cc_op = CC_OP_ADDB + ot;
1805 break;
1806 case OP_SUBL:
1807 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1808 if (d != OR_TMP0)
1809 gen_op_mov_reg_T0(ot, d);
1810 else
1811 gen_op_st_T0_A0(ot + s1->mem_index);
1812 gen_op_update2_cc();
1813 s1->cc_op = CC_OP_SUBB + ot;
1814 break;
1815 default:
1816 case OP_ANDL:
1817 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1818 if (d != OR_TMP0)
1819 gen_op_mov_reg_T0(ot, d);
1820 else
1821 gen_op_st_T0_A0(ot + s1->mem_index);
1822 gen_op_update1_cc();
1823 s1->cc_op = CC_OP_LOGICB + ot;
1824 break;
1825 case OP_ORL:
1826 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1827 if (d != OR_TMP0)
1828 gen_op_mov_reg_T0(ot, d);
1829 else
1830 gen_op_st_T0_A0(ot + s1->mem_index);
1831 gen_op_update1_cc();
1832 s1->cc_op = CC_OP_LOGICB + ot;
1833 break;
1834 case OP_XORL:
1835 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1836 if (d != OR_TMP0)
1837 gen_op_mov_reg_T0(ot, d);
1838 else
1839 gen_op_st_T0_A0(ot + s1->mem_index);
1840 gen_op_update1_cc();
1841 s1->cc_op = CC_OP_LOGICB + ot;
1842 break;
1843 case OP_CMPL:
1844 gen_op_cmpl_T0_T1_cc();
1845 s1->cc_op = CC_OP_SUBB + ot;
1846 break;
1847 }
1848}
1849
1850/* if d == OR_TMP0, it means memory operand (address in A0) */
1851static void gen_inc(DisasContext *s1, int ot, int d, int c)
1852{
1853 if (d != OR_TMP0)
1854 gen_op_mov_TN_reg(ot, 0, d);
1855 else
1856 gen_op_ld_T0_A0(ot + s1->mem_index);
1857 if (s1->cc_op != CC_OP_DYNAMIC)
1858 gen_op_set_cc_op(s1->cc_op);
1859 if (c > 0) {
1860 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1861 s1->cc_op = CC_OP_INCB + ot;
1862 } else {
1863 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1864 s1->cc_op = CC_OP_DECB + ot;
1865 }
1866 if (d != OR_TMP0)
1867 gen_op_mov_reg_T0(ot, d);
1868 else
1869 gen_op_st_T0_A0(ot + s1->mem_index);
1870 gen_compute_eflags_c(cpu_cc_src);
1871 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1872}
1873
1874static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1875 int is_right, int is_arith)
1876{
1877 target_ulong mask;
1878 int shift_label;
1879 TCGv t0, t1;
1880
1881 if (ot == OT_QUAD)
1882 mask = 0x3f;
1883 else
1884 mask = 0x1f;
1885
1886 /* load */
1887 if (op1 == OR_TMP0)
1888 gen_op_ld_T0_A0(ot + s->mem_index);
1889 else
1890 gen_op_mov_TN_reg(ot, 0, op1);
1891
1892 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1893
1894 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1895
1896 if (is_right) {
1897 if (is_arith) {
1898 gen_exts(ot, cpu_T[0]);
1899 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1900 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1901 } else {
1902 gen_extu(ot, cpu_T[0]);
1903 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1904 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1905 }
1906 } else {
1907 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1908 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1909 }
1910
1911 /* store */
1912 if (op1 == OR_TMP0)
1913 gen_op_st_T0_A0(ot + s->mem_index);
1914 else
1915 gen_op_mov_reg_T0(ot, op1);
1916
1917 /* update eflags if non zero shift */
1918 if (s->cc_op != CC_OP_DYNAMIC)
1919 gen_op_set_cc_op(s->cc_op);
1920
1921 /* XXX: inefficient */
1922 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1923 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1924
1925 tcg_gen_mov_tl(t0, cpu_T[0]);
1926 tcg_gen_mov_tl(t1, cpu_T3);
1927
1928 shift_label = gen_new_label();
1929 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1930
1931 tcg_gen_mov_tl(cpu_cc_src, t1);
1932 tcg_gen_mov_tl(cpu_cc_dst, t0);
1933 if (is_right)
1934 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1935 else
1936 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1937
1938 gen_set_label(shift_label);
1939 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1940
1941 tcg_temp_free(t0);
1942 tcg_temp_free(t1);
1943}
1944
1945static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1946 int is_right, int is_arith)
1947{
1948 int mask;
1949
1950 if (ot == OT_QUAD)
1951 mask = 0x3f;
1952 else
1953 mask = 0x1f;
1954
1955 /* load */
1956 if (op1 == OR_TMP0)
1957 gen_op_ld_T0_A0(ot + s->mem_index);
1958 else
1959 gen_op_mov_TN_reg(ot, 0, op1);
1960
1961 op2 &= mask;
1962 if (op2 != 0) {
1963 if (is_right) {
1964 if (is_arith) {
1965 gen_exts(ot, cpu_T[0]);
1966 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1967 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1968 } else {
1969 gen_extu(ot, cpu_T[0]);
1970 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1971 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1972 }
1973 } else {
1974 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1975 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1976 }
1977 }
1978
1979 /* store */
1980 if (op1 == OR_TMP0)
1981 gen_op_st_T0_A0(ot + s->mem_index);
1982 else
1983 gen_op_mov_reg_T0(ot, op1);
1984
1985 /* update eflags if non zero shift */
1986 if (op2 != 0) {
1987 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
1988 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1989 if (is_right)
1990 s->cc_op = CC_OP_SARB + ot;
1991 else
1992 s->cc_op = CC_OP_SHLB + ot;
1993 }
1994}
1995
1996#ifndef VBOX
1997static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
1998#else /* VBOX */
1999DECLINLINE(void) tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
2000#endif /* VBOX */
2001{
2002 if (arg2 >= 0)
2003 tcg_gen_shli_tl(ret, arg1, arg2);
2004 else
2005 tcg_gen_shri_tl(ret, arg1, -arg2);
2006}
2007
2008/* XXX: add faster immediate case */
2009static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
2010 int is_right)
2011{
2012 target_ulong mask;
2013 int label1, label2, data_bits;
2014 TCGv t0, t1, t2, a0;
2015
2016 /* XXX: inefficient, but we must use local temps */
2017 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2018 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2019 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2020 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2021
2022 if (ot == OT_QUAD)
2023 mask = 0x3f;
2024 else
2025 mask = 0x1f;
2026
2027 /* load */
2028 if (op1 == OR_TMP0) {
2029 tcg_gen_mov_tl(a0, cpu_A0);
2030 gen_op_ld_v(ot + s->mem_index, t0, a0);
2031 } else {
2032 gen_op_mov_v_reg(ot, t0, op1);
2033 }
2034
2035 tcg_gen_mov_tl(t1, cpu_T[1]);
2036
2037 tcg_gen_andi_tl(t1, t1, mask);
2038
2039 /* Must test zero case to avoid using undefined behaviour in TCG
2040 shifts. */
2041 label1 = gen_new_label();
2042 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
2043
2044 if (ot <= OT_WORD)
2045 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
2046 else
2047 tcg_gen_mov_tl(cpu_tmp0, t1);
2048
2049 gen_extu(ot, t0);
2050 tcg_gen_mov_tl(t2, t0);
2051
2052 data_bits = 8 << ot;
2053 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
2054 fix TCG definition) */
2055 if (is_right) {
2056 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
2057 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2058 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
2059 } else {
2060 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
2061 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2062 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
2063 }
2064 tcg_gen_or_tl(t0, t0, cpu_tmp4);
2065
2066 gen_set_label(label1);
2067 /* store */
2068 if (op1 == OR_TMP0) {
2069 gen_op_st_v(ot + s->mem_index, t0, a0);
2070 } else {
2071 gen_op_mov_reg_v(ot, op1, t0);
2072 }
2073
2074 /* update eflags */
2075 if (s->cc_op != CC_OP_DYNAMIC)
2076 gen_op_set_cc_op(s->cc_op);
2077
2078 label2 = gen_new_label();
2079 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
2080
2081 gen_compute_eflags(cpu_cc_src);
2082 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
2083 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
2084 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
2085 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
2086 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
2087 if (is_right) {
2088 tcg_gen_shri_tl(t0, t0, data_bits - 1);
2089 }
2090 tcg_gen_andi_tl(t0, t0, CC_C);
2091 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
2092
2093 tcg_gen_discard_tl(cpu_cc_dst);
2094 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2095
2096 gen_set_label(label2);
2097 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2098
2099 tcg_temp_free(t0);
2100 tcg_temp_free(t1);
2101 tcg_temp_free(t2);
2102 tcg_temp_free(a0);
2103}
2104
2105static void *helper_rotc[8] = {
2106 helper_rclb,
2107 helper_rclw,
2108 helper_rcll,
2109 X86_64_ONLY(helper_rclq),
2110 helper_rcrb,
2111 helper_rcrw,
2112 helper_rcrl,
2113 X86_64_ONLY(helper_rcrq),
2114};
2115
2116/* XXX: add faster immediate = 1 case */
2117static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
2118 int is_right)
2119{
2120 int label1;
2121
2122 if (s->cc_op != CC_OP_DYNAMIC)
2123 gen_op_set_cc_op(s->cc_op);
2124
2125 /* load */
2126 if (op1 == OR_TMP0)
2127 gen_op_ld_T0_A0(ot + s->mem_index);
2128 else
2129 gen_op_mov_TN_reg(ot, 0, op1);
2130
2131 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
2132 cpu_T[0], cpu_T[0], cpu_T[1]);
2133 /* store */
2134 if (op1 == OR_TMP0)
2135 gen_op_st_T0_A0(ot + s->mem_index);
2136 else
2137 gen_op_mov_reg_T0(ot, op1);
2138
2139 /* update eflags */
2140 label1 = gen_new_label();
2141 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
2142
2143 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
2144 tcg_gen_discard_tl(cpu_cc_dst);
2145 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2146
2147 gen_set_label(label1);
2148 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2149}
2150
2151/* XXX: add faster immediate case */
2152static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
2153 int is_right)
2154{
2155 int label1, label2, data_bits;
2156 target_ulong mask;
2157 TCGv t0, t1, t2, a0;
2158
2159 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2160 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2161 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2162 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2163
2164 if (ot == OT_QUAD)
2165 mask = 0x3f;
2166 else
2167 mask = 0x1f;
2168
2169 /* load */
2170 if (op1 == OR_TMP0) {
2171 tcg_gen_mov_tl(a0, cpu_A0);
2172 gen_op_ld_v(ot + s->mem_index, t0, a0);
2173 } else {
2174 gen_op_mov_v_reg(ot, t0, op1);
2175 }
2176
2177 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
2178
2179 tcg_gen_mov_tl(t1, cpu_T[1]);
2180 tcg_gen_mov_tl(t2, cpu_T3);
2181
2182 /* Must test zero case to avoid using undefined behaviour in TCG
2183 shifts. */
2184 label1 = gen_new_label();
2185 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
2186
2187 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
2188 if (ot == OT_WORD) {
2189 /* Note: we implement the Intel behaviour for shift count > 16 */
2190 if (is_right) {
2191 tcg_gen_andi_tl(t0, t0, 0xffff);
2192 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
2193 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2194 tcg_gen_ext32u_tl(t0, t0);
2195
2196 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2197
2198 /* only needed if count > 16, but a test would complicate */
2199 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2200 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
2201
2202 tcg_gen_shr_tl(t0, t0, t2);
2203
2204 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2205 } else {
2206 /* XXX: not optimal */
2207 tcg_gen_andi_tl(t0, t0, 0xffff);
2208 tcg_gen_shli_tl(t1, t1, 16);
2209 tcg_gen_or_tl(t1, t1, t0);
2210 tcg_gen_ext32u_tl(t1, t1);
2211
2212 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2213 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
2214 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
2215 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
2216
2217 tcg_gen_shl_tl(t0, t0, t2);
2218 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2219 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2220 tcg_gen_or_tl(t0, t0, t1);
2221 }
2222 } else {
2223 data_bits = 8 << ot;
2224 if (is_right) {
2225 if (ot == OT_LONG)
2226 tcg_gen_ext32u_tl(t0, t0);
2227
2228 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2229
2230 tcg_gen_shr_tl(t0, t0, t2);
2231 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2232 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
2233 tcg_gen_or_tl(t0, t0, t1);
2234
2235 } else {
2236 if (ot == OT_LONG)
2237 tcg_gen_ext32u_tl(t1, t1);
2238
2239 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2240
2241 tcg_gen_shl_tl(t0, t0, t2);
2242 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2243 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2244 tcg_gen_or_tl(t0, t0, t1);
2245 }
2246 }
2247 tcg_gen_mov_tl(t1, cpu_tmp4);
2248
2249 gen_set_label(label1);
2250 /* store */
2251 if (op1 == OR_TMP0) {
2252 gen_op_st_v(ot + s->mem_index, t0, a0);
2253 } else {
2254 gen_op_mov_reg_v(ot, op1, t0);
2255 }
2256
2257 /* update eflags */
2258 if (s->cc_op != CC_OP_DYNAMIC)
2259 gen_op_set_cc_op(s->cc_op);
2260
2261 label2 = gen_new_label();
2262 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
2263
2264 tcg_gen_mov_tl(cpu_cc_src, t1);
2265 tcg_gen_mov_tl(cpu_cc_dst, t0);
2266 if (is_right) {
2267 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
2268 } else {
2269 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
2270 }
2271 gen_set_label(label2);
2272 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2273
2274 tcg_temp_free(t0);
2275 tcg_temp_free(t1);
2276 tcg_temp_free(t2);
2277 tcg_temp_free(a0);
2278}
2279
2280static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2281{
2282 if (s != OR_TMP1)
2283 gen_op_mov_TN_reg(ot, 1, s);
2284 switch(op) {
2285 case OP_ROL:
2286 gen_rot_rm_T1(s1, ot, d, 0);
2287 break;
2288 case OP_ROR:
2289 gen_rot_rm_T1(s1, ot, d, 1);
2290 break;
2291 case OP_SHL:
2292 case OP_SHL1:
2293 gen_shift_rm_T1(s1, ot, d, 0, 0);
2294 break;
2295 case OP_SHR:
2296 gen_shift_rm_T1(s1, ot, d, 1, 0);
2297 break;
2298 case OP_SAR:
2299 gen_shift_rm_T1(s1, ot, d, 1, 1);
2300 break;
2301 case OP_RCL:
2302 gen_rotc_rm_T1(s1, ot, d, 0);
2303 break;
2304 case OP_RCR:
2305 gen_rotc_rm_T1(s1, ot, d, 1);
2306 break;
2307 }
2308}
2309
2310static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2311{
2312 switch(op) {
2313 case OP_SHL:
2314 case OP_SHL1:
2315 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2316 break;
2317 case OP_SHR:
2318 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2319 break;
2320 case OP_SAR:
2321 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2322 break;
2323 default:
2324 /* currently not optimized */
2325 gen_op_movl_T1_im(c);
2326 gen_shift(s1, op, ot, d, OR_TMP1);
2327 break;
2328 }
2329}
2330
2331static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
2332{
2333 target_long disp;
2334 int havesib;
2335 int base;
2336 int index;
2337 int scale;
2338 int opreg;
2339 int mod, rm, code, override, must_add_seg;
2340
2341 override = s->override;
2342 must_add_seg = s->addseg;
2343 if (override >= 0)
2344 must_add_seg = 1;
2345 mod = (modrm >> 6) & 3;
2346 rm = modrm & 7;
2347
2348 if (s->aflag) {
2349
2350 havesib = 0;
2351 base = rm;
2352 index = 0;
2353 scale = 0;
2354
2355 if (base == 4) {
2356 havesib = 1;
2357 code = ldub_code(s->pc++);
2358 scale = (code >> 6) & 3;
2359 index = ((code >> 3) & 7) | REX_X(s);
2360 base = (code & 7);
2361 }
2362 base |= REX_B(s);
2363
2364 switch (mod) {
2365 case 0:
2366 if ((base & 7) == 5) {
2367 base = -1;
2368 disp = (int32_t)ldl_code(s->pc);
2369 s->pc += 4;
2370 if (CODE64(s) && !havesib) {
2371 disp += s->pc + s->rip_offset;
2372 }
2373 } else {
2374 disp = 0;
2375 }
2376 break;
2377 case 1:
2378 disp = (int8_t)ldub_code(s->pc++);
2379 break;
2380 default:
2381 case 2:
2382 disp = ldl_code(s->pc);
2383 s->pc += 4;
2384 break;
2385 }
2386
2387 if (base >= 0) {
2388 /* for correct popl handling with esp */
2389 if (base == 4 && s->popl_esp_hack)
2390 disp += s->popl_esp_hack;
2391#ifdef TARGET_X86_64
2392 if (s->aflag == 2) {
2393 gen_op_movq_A0_reg(base);
2394 if (disp != 0) {
2395 gen_op_addq_A0_im(disp);
2396 }
2397 } else
2398#endif
2399 {
2400 gen_op_movl_A0_reg(base);
2401 if (disp != 0)
2402 gen_op_addl_A0_im(disp);
2403 }
2404 } else {
2405#ifdef TARGET_X86_64
2406 if (s->aflag == 2) {
2407 gen_op_movq_A0_im(disp);
2408 } else
2409#endif
2410 {
2411 gen_op_movl_A0_im(disp);
2412 }
2413 }
2414 /* XXX: index == 4 is always invalid */
2415 if (havesib && (index != 4 || scale != 0)) {
2416#ifdef TARGET_X86_64
2417 if (s->aflag == 2) {
2418 gen_op_addq_A0_reg_sN(scale, index);
2419 } else
2420#endif
2421 {
2422 gen_op_addl_A0_reg_sN(scale, index);
2423 }
2424 }
2425 if (must_add_seg) {
2426 if (override < 0) {
2427 if (base == R_EBP || base == R_ESP)
2428 override = R_SS;
2429 else
2430 override = R_DS;
2431 }
2432#ifdef TARGET_X86_64
2433 if (s->aflag == 2) {
2434 gen_op_addq_A0_seg(override);
2435 } else
2436#endif
2437 {
2438 gen_op_addl_A0_seg(override);
2439 }
2440 }
2441 } else {
2442 switch (mod) {
2443 case 0:
2444 if (rm == 6) {
2445 disp = lduw_code(s->pc);
2446 s->pc += 2;
2447 gen_op_movl_A0_im(disp);
2448 rm = 0; /* avoid SS override */
2449 goto no_rm;
2450 } else {
2451 disp = 0;
2452 }
2453 break;
2454 case 1:
2455 disp = (int8_t)ldub_code(s->pc++);
2456 break;
2457 default:
2458 case 2:
2459 disp = lduw_code(s->pc);
2460 s->pc += 2;
2461 break;
2462 }
2463 switch(rm) {
2464 case 0:
2465 gen_op_movl_A0_reg(R_EBX);
2466 gen_op_addl_A0_reg_sN(0, R_ESI);
2467 break;
2468 case 1:
2469 gen_op_movl_A0_reg(R_EBX);
2470 gen_op_addl_A0_reg_sN(0, R_EDI);
2471 break;
2472 case 2:
2473 gen_op_movl_A0_reg(R_EBP);
2474 gen_op_addl_A0_reg_sN(0, R_ESI);
2475 break;
2476 case 3:
2477 gen_op_movl_A0_reg(R_EBP);
2478 gen_op_addl_A0_reg_sN(0, R_EDI);
2479 break;
2480 case 4:
2481 gen_op_movl_A0_reg(R_ESI);
2482 break;
2483 case 5:
2484 gen_op_movl_A0_reg(R_EDI);
2485 break;
2486 case 6:
2487 gen_op_movl_A0_reg(R_EBP);
2488 break;
2489 default:
2490 case 7:
2491 gen_op_movl_A0_reg(R_EBX);
2492 break;
2493 }
2494 if (disp != 0)
2495 gen_op_addl_A0_im(disp);
2496 gen_op_andl_A0_ffff();
2497 no_rm:
2498 if (must_add_seg) {
2499 if (override < 0) {
2500 if (rm == 2 || rm == 3 || rm == 6)
2501 override = R_SS;
2502 else
2503 override = R_DS;
2504 }
2505 gen_op_addl_A0_seg(override);
2506 }
2507 }
2508
2509 opreg = OR_A0;
2510 disp = 0;
2511 *reg_ptr = opreg;
2512 *offset_ptr = disp;
2513}
2514
2515static void gen_nop_modrm(DisasContext *s, int modrm)
2516{
2517 int mod, rm, base, code;
2518
2519 mod = (modrm >> 6) & 3;
2520 if (mod == 3)
2521 return;
2522 rm = modrm & 7;
2523
2524 if (s->aflag) {
2525
2526 base = rm;
2527
2528 if (base == 4) {
2529 code = ldub_code(s->pc++);
2530 base = (code & 7);
2531 }
2532
2533 switch (mod) {
2534 case 0:
2535 if (base == 5) {
2536 s->pc += 4;
2537 }
2538 break;
2539 case 1:
2540 s->pc++;
2541 break;
2542 default:
2543 case 2:
2544 s->pc += 4;
2545 break;
2546 }
2547 } else {
2548 switch (mod) {
2549 case 0:
2550 if (rm == 6) {
2551 s->pc += 2;
2552 }
2553 break;
2554 case 1:
2555 s->pc++;
2556 break;
2557 default:
2558 case 2:
2559 s->pc += 2;
2560 break;
2561 }
2562 }
2563}
2564
2565/* used for LEA and MOV AX, mem */
2566static void gen_add_A0_ds_seg(DisasContext *s)
2567{
2568 int override, must_add_seg;
2569 must_add_seg = s->addseg;
2570 override = R_DS;
2571 if (s->override >= 0) {
2572 override = s->override;
2573 must_add_seg = 1;
2574 } else {
2575 override = R_DS;
2576 }
2577 if (must_add_seg) {
2578#ifdef TARGET_X86_64
2579 if (CODE64(s)) {
2580 gen_op_addq_A0_seg(override);
2581 } else
2582#endif
2583 {
2584 gen_op_addl_A0_seg(override);
2585 }
2586 }
2587}
2588
2589/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2590 OR_TMP0 */
2591static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2592{
2593 int mod, rm, opreg, disp;
2594
2595 mod = (modrm >> 6) & 3;
2596 rm = (modrm & 7) | REX_B(s);
2597 if (mod == 3) {
2598 if (is_store) {
2599 if (reg != OR_TMP0)
2600 gen_op_mov_TN_reg(ot, 0, reg);
2601 gen_op_mov_reg_T0(ot, rm);
2602 } else {
2603 gen_op_mov_TN_reg(ot, 0, rm);
2604 if (reg != OR_TMP0)
2605 gen_op_mov_reg_T0(ot, reg);
2606 }
2607 } else {
2608 gen_lea_modrm(s, modrm, &opreg, &disp);
2609 if (is_store) {
2610 if (reg != OR_TMP0)
2611 gen_op_mov_TN_reg(ot, 0, reg);
2612 gen_op_st_T0_A0(ot + s->mem_index);
2613 } else {
2614 gen_op_ld_T0_A0(ot + s->mem_index);
2615 if (reg != OR_TMP0)
2616 gen_op_mov_reg_T0(ot, reg);
2617 }
2618 }
2619}
2620
2621#ifndef VBOX
2622static inline uint32_t insn_get(DisasContext *s, int ot)
2623#else /* VBOX */
2624DECLINLINE(uint32_t) insn_get(DisasContext *s, int ot)
2625#endif /* VBOX */
2626{
2627 uint32_t ret;
2628
2629 switch(ot) {
2630 case OT_BYTE:
2631 ret = ldub_code(s->pc);
2632 s->pc++;
2633 break;
2634 case OT_WORD:
2635 ret = lduw_code(s->pc);
2636 s->pc += 2;
2637 break;
2638 default:
2639 case OT_LONG:
2640 ret = ldl_code(s->pc);
2641 s->pc += 4;
2642 break;
2643 }
2644 return ret;
2645}
2646
2647#ifndef VBOX
2648static inline int insn_const_size(unsigned int ot)
2649#else /* VBOX */
2650DECLINLINE(int) insn_const_size(unsigned int ot)
2651#endif /* VBOX */
2652{
2653 if (ot <= OT_LONG)
2654 return 1 << ot;
2655 else
2656 return 4;
2657}
2658
2659#ifndef VBOX
2660static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2661#else /* VBOX */
2662DECLINLINE(void) gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2663#endif /* VBOX */
2664{
2665 TranslationBlock *tb;
2666 target_ulong pc;
2667
2668 pc = s->cs_base + eip;
2669 tb = s->tb;
2670 /* NOTE: we handle the case where the TB spans two pages here */
2671 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2672 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2673 /* jump to same page: we can use a direct jump */
2674 tcg_gen_goto_tb(tb_num);
2675 gen_jmp_im(eip);
2676 tcg_gen_exit_tb((long)tb + tb_num);
2677 } else {
2678 /* jump to another page: currently not optimized */
2679 gen_jmp_im(eip);
2680 gen_eob(s);
2681 }
2682}
2683
2684#ifndef VBOX
2685static inline void gen_jcc(DisasContext *s, int b,
2686#else /* VBOX */
2687DECLINLINE(void) gen_jcc(DisasContext *s, int b,
2688#endif /* VBOX */
2689 target_ulong val, target_ulong next_eip)
2690{
2691 int l1, l2, cc_op;
2692
2693#ifdef VBOX
2694 gen_check_external_event(s);
2695#endif /* VBOX */
2696 cc_op = s->cc_op;
2697 if (s->cc_op != CC_OP_DYNAMIC) {
2698 gen_op_set_cc_op(s->cc_op);
2699 s->cc_op = CC_OP_DYNAMIC;
2700 }
2701 if (s->jmp_opt) {
2702 l1 = gen_new_label();
2703 gen_jcc1(s, cc_op, b, l1);
2704
2705 gen_goto_tb(s, 0, next_eip);
2706
2707 gen_set_label(l1);
2708 gen_goto_tb(s, 1, val);
2709 s->is_jmp = 3;
2710 } else {
2711
2712 l1 = gen_new_label();
2713 l2 = gen_new_label();
2714 gen_jcc1(s, cc_op, b, l1);
2715
2716 gen_jmp_im(next_eip);
2717 tcg_gen_br(l2);
2718
2719 gen_set_label(l1);
2720 gen_jmp_im(val);
2721 gen_set_label(l2);
2722 gen_eob(s);
2723 }
2724}
2725
2726static void gen_setcc(DisasContext *s, int b)
2727{
2728 int inv, jcc_op, l1;
2729 TCGv t0;
2730
2731 if (is_fast_jcc_case(s, b)) {
2732 /* nominal case: we use a jump */
2733 /* XXX: make it faster by adding new instructions in TCG */
2734 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2735 tcg_gen_movi_tl(t0, 0);
2736 l1 = gen_new_label();
2737 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2738 tcg_gen_movi_tl(t0, 1);
2739 gen_set_label(l1);
2740 tcg_gen_mov_tl(cpu_T[0], t0);
2741 tcg_temp_free(t0);
2742 } else {
2743 /* slow case: it is more efficient not to generate a jump,
2744 although it is questionnable whether this optimization is
2745 worth to */
2746 inv = b & 1;
2747 jcc_op = (b >> 1) & 7;
2748 gen_setcc_slow_T0(s, jcc_op);
2749 if (inv) {
2750 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2751 }
2752 }
2753}
2754
2755#ifndef VBOX
2756static inline void gen_op_movl_T0_seg(int seg_reg)
2757#else /* VBOX */
2758DECLINLINE(void) gen_op_movl_T0_seg(int seg_reg)
2759#endif /* VBOX */
2760{
2761 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2762 offsetof(CPUX86State,segs[seg_reg].selector));
2763}
2764
2765#ifndef VBOX
2766static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2767#else /* VBOX */
2768DECLINLINE(void) gen_op_movl_seg_T0_vm(int seg_reg)
2769#endif /* VBOX */
2770{
2771 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2772 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2773 offsetof(CPUX86State,segs[seg_reg].selector));
2774 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2775 tcg_gen_st_tl(cpu_T[0], cpu_env,
2776 offsetof(CPUX86State,segs[seg_reg].base));
2777#ifdef VBOX
2778 int flags = DESC_P_MASK | DESC_S_MASK | DESC_W_MASK;
2779 if (seg_reg == R_CS)
2780 flags |= DESC_CS_MASK;
2781 gen_op_movl_T0_im(flags);
2782 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].flags));
2783#endif
2784}
2785
2786/* move T0 to seg_reg and compute if the CPU state may change. Never
2787 call this function with seg_reg == R_CS */
2788static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2789{
2790 if (s->pe && !s->vm86) {
2791 /* XXX: optimize by finding processor state dynamically */
2792 if (s->cc_op != CC_OP_DYNAMIC)
2793 gen_op_set_cc_op(s->cc_op);
2794 gen_jmp_im(cur_eip);
2795 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2796 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2797 /* abort translation because the addseg value may change or
2798 because ss32 may change. For R_SS, translation must always
2799 stop as a special handling must be done to disable hardware
2800 interrupts for the next instruction */
2801 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2802 s->is_jmp = 3;
2803 } else {
2804 gen_op_movl_seg_T0_vm(seg_reg);
2805 if (seg_reg == R_SS)
2806 s->is_jmp = 3;
2807 }
2808}
2809
2810#ifndef VBOX
2811static inline int svm_is_rep(int prefixes)
2812#else /* VBOX */
2813DECLINLINE(int) svm_is_rep(int prefixes)
2814#endif /* VBOX */
2815{
2816 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2817}
2818
2819#ifndef VBOX
2820static inline void
2821#else /* VBOX */
2822DECLINLINE(void)
2823#endif /* VBOX */
2824gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2825 uint32_t type, uint64_t param)
2826{
2827 /* no SVM activated; fast case */
2828 if (likely(!(s->flags & HF_SVMI_MASK)))
2829 return;
2830 if (s->cc_op != CC_OP_DYNAMIC)
2831 gen_op_set_cc_op(s->cc_op);
2832 gen_jmp_im(pc_start - s->cs_base);
2833 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2834 tcg_const_i32(type), tcg_const_i64(param));
2835}
2836
2837#ifndef VBOX
2838static inline void
2839#else /* VBOX */
2840DECLINLINE(void)
2841#endif
2842gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2843{
2844 gen_svm_check_intercept_param(s, pc_start, type, 0);
2845}
2846
2847#ifndef VBOX
2848static inline void gen_stack_update(DisasContext *s, int addend)
2849#else /* VBOX */
2850DECLINLINE(void) gen_stack_update(DisasContext *s, int addend)
2851#endif /* VBOX */
2852{
2853#ifdef TARGET_X86_64
2854 if (CODE64(s)) {
2855 gen_op_add_reg_im(2, R_ESP, addend);
2856 } else
2857#endif
2858 if (s->ss32) {
2859 gen_op_add_reg_im(1, R_ESP, addend);
2860 } else {
2861 gen_op_add_reg_im(0, R_ESP, addend);
2862 }
2863}
2864
2865/* generate a push. It depends on ss32, addseg and dflag */
2866static void gen_push_T0(DisasContext *s)
2867{
2868#ifdef TARGET_X86_64
2869 if (CODE64(s)) {
2870 gen_op_movq_A0_reg(R_ESP);
2871 if (s->dflag) {
2872 gen_op_addq_A0_im(-8);
2873 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2874 } else {
2875 gen_op_addq_A0_im(-2);
2876 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2877 }
2878 gen_op_mov_reg_A0(2, R_ESP);
2879 } else
2880#endif
2881 {
2882 gen_op_movl_A0_reg(R_ESP);
2883 if (!s->dflag)
2884 gen_op_addl_A0_im(-2);
2885 else
2886 gen_op_addl_A0_im(-4);
2887 if (s->ss32) {
2888 if (s->addseg) {
2889 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2890 gen_op_addl_A0_seg(R_SS);
2891 }
2892 } else {
2893 gen_op_andl_A0_ffff();
2894 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2895 gen_op_addl_A0_seg(R_SS);
2896 }
2897 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2898 if (s->ss32 && !s->addseg)
2899 gen_op_mov_reg_A0(1, R_ESP);
2900 else
2901 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2902 }
2903}
2904
2905/* generate a push. It depends on ss32, addseg and dflag */
2906/* slower version for T1, only used for call Ev */
2907static void gen_push_T1(DisasContext *s)
2908{
2909#ifdef TARGET_X86_64
2910 if (CODE64(s)) {
2911 gen_op_movq_A0_reg(R_ESP);
2912 if (s->dflag) {
2913 gen_op_addq_A0_im(-8);
2914 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2915 } else {
2916 gen_op_addq_A0_im(-2);
2917 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2918 }
2919 gen_op_mov_reg_A0(2, R_ESP);
2920 } else
2921#endif
2922 {
2923 gen_op_movl_A0_reg(R_ESP);
2924 if (!s->dflag)
2925 gen_op_addl_A0_im(-2);
2926 else
2927 gen_op_addl_A0_im(-4);
2928 if (s->ss32) {
2929 if (s->addseg) {
2930 gen_op_addl_A0_seg(R_SS);
2931 }
2932 } else {
2933 gen_op_andl_A0_ffff();
2934 gen_op_addl_A0_seg(R_SS);
2935 }
2936 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2937
2938 if (s->ss32 && !s->addseg)
2939 gen_op_mov_reg_A0(1, R_ESP);
2940 else
2941 gen_stack_update(s, (-2) << s->dflag);
2942 }
2943}
2944
2945/* two step pop is necessary for precise exceptions */
2946static void gen_pop_T0(DisasContext *s)
2947{
2948#ifdef TARGET_X86_64
2949 if (CODE64(s)) {
2950 gen_op_movq_A0_reg(R_ESP);
2951 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2952 } else
2953#endif
2954 {
2955 gen_op_movl_A0_reg(R_ESP);
2956 if (s->ss32) {
2957 if (s->addseg)
2958 gen_op_addl_A0_seg(R_SS);
2959 } else {
2960 gen_op_andl_A0_ffff();
2961 gen_op_addl_A0_seg(R_SS);
2962 }
2963 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2964 }
2965}
2966
2967static void gen_pop_update(DisasContext *s)
2968{
2969#ifdef TARGET_X86_64
2970 if (CODE64(s) && s->dflag) {
2971 gen_stack_update(s, 8);
2972 } else
2973#endif
2974 {
2975 gen_stack_update(s, 2 << s->dflag);
2976 }
2977}
2978
2979static void gen_stack_A0(DisasContext *s)
2980{
2981 gen_op_movl_A0_reg(R_ESP);
2982 if (!s->ss32)
2983 gen_op_andl_A0_ffff();
2984 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2985 if (s->addseg)
2986 gen_op_addl_A0_seg(R_SS);
2987}
2988
2989/* NOTE: wrap around in 16 bit not fully handled */
2990static void gen_pusha(DisasContext *s)
2991{
2992 int i;
2993 gen_op_movl_A0_reg(R_ESP);
2994 gen_op_addl_A0_im(-16 << s->dflag);
2995 if (!s->ss32)
2996 gen_op_andl_A0_ffff();
2997 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2998 if (s->addseg)
2999 gen_op_addl_A0_seg(R_SS);
3000 for(i = 0;i < 8; i++) {
3001 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
3002 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
3003 gen_op_addl_A0_im(2 << s->dflag);
3004 }
3005 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3006}
3007
3008/* NOTE: wrap around in 16 bit not fully handled */
3009static void gen_popa(DisasContext *s)
3010{
3011 int i;
3012 gen_op_movl_A0_reg(R_ESP);
3013 if (!s->ss32)
3014 gen_op_andl_A0_ffff();
3015 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3016 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
3017 if (s->addseg)
3018 gen_op_addl_A0_seg(R_SS);
3019 for(i = 0;i < 8; i++) {
3020 /* ESP is not reloaded */
3021 if (i != 3) {
3022 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
3023 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
3024 }
3025 gen_op_addl_A0_im(2 << s->dflag);
3026 }
3027 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3028}
3029
3030static void gen_enter(DisasContext *s, int esp_addend, int level)
3031{
3032 int ot, opsize;
3033
3034 level &= 0x1f;
3035#ifdef TARGET_X86_64
3036 if (CODE64(s)) {
3037 ot = s->dflag ? OT_QUAD : OT_WORD;
3038 opsize = 1 << ot;
3039
3040 gen_op_movl_A0_reg(R_ESP);
3041 gen_op_addq_A0_im(-opsize);
3042 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3043
3044 /* push bp */
3045 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3046 gen_op_st_T0_A0(ot + s->mem_index);
3047 if (level) {
3048 /* XXX: must save state */
3049 tcg_gen_helper_0_3(helper_enter64_level,
3050 tcg_const_i32(level),
3051 tcg_const_i32((ot == OT_QUAD)),
3052 cpu_T[1]);
3053 }
3054 gen_op_mov_reg_T1(ot, R_EBP);
3055 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3056 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
3057 } else
3058#endif
3059 {
3060 ot = s->dflag + OT_WORD;
3061 opsize = 2 << s->dflag;
3062
3063 gen_op_movl_A0_reg(R_ESP);
3064 gen_op_addl_A0_im(-opsize);
3065 if (!s->ss32)
3066 gen_op_andl_A0_ffff();
3067 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3068 if (s->addseg)
3069 gen_op_addl_A0_seg(R_SS);
3070 /* push bp */
3071 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3072 gen_op_st_T0_A0(ot + s->mem_index);
3073 if (level) {
3074 /* XXX: must save state */
3075 tcg_gen_helper_0_3(helper_enter_level,
3076 tcg_const_i32(level),
3077 tcg_const_i32(s->dflag),
3078 cpu_T[1]);
3079 }
3080 gen_op_mov_reg_T1(ot, R_EBP);
3081 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3082 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3083 }
3084}
3085
3086static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
3087{
3088 if (s->cc_op != CC_OP_DYNAMIC)
3089 gen_op_set_cc_op(s->cc_op);
3090 gen_jmp_im(cur_eip);
3091 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
3092 s->is_jmp = 3;
3093}
3094
3095/* an interrupt is different from an exception because of the
3096 privilege checks */
3097static void gen_interrupt(DisasContext *s, int intno,
3098 target_ulong cur_eip, target_ulong next_eip)
3099{
3100 if (s->cc_op != CC_OP_DYNAMIC)
3101 gen_op_set_cc_op(s->cc_op);
3102 gen_jmp_im(cur_eip);
3103 tcg_gen_helper_0_2(helper_raise_interrupt,
3104 tcg_const_i32(intno),
3105 tcg_const_i32(next_eip - cur_eip));
3106 s->is_jmp = 3;
3107}
3108
3109static void gen_debug(DisasContext *s, target_ulong cur_eip)
3110{
3111 if (s->cc_op != CC_OP_DYNAMIC)
3112 gen_op_set_cc_op(s->cc_op);
3113 gen_jmp_im(cur_eip);
3114 tcg_gen_helper_0_0(helper_debug);
3115 s->is_jmp = 3;
3116}
3117
3118/* generate a generic end of block. Trace exception is also generated
3119 if needed */
3120static void gen_eob(DisasContext *s)
3121{
3122 if (s->cc_op != CC_OP_DYNAMIC)
3123 gen_op_set_cc_op(s->cc_op);
3124 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
3125 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
3126 }
3127 if (s->singlestep_enabled) {
3128 tcg_gen_helper_0_0(helper_debug);
3129 } else if (s->tf) {
3130 tcg_gen_helper_0_0(helper_single_step);
3131 } else {
3132 tcg_gen_exit_tb(0);
3133 }
3134 s->is_jmp = 3;
3135}
3136
3137/* generate a jump to eip. No segment change must happen before as a
3138 direct call to the next block may occur */
3139static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
3140{
3141 if (s->jmp_opt) {
3142 if (s->cc_op != CC_OP_DYNAMIC) {
3143 gen_op_set_cc_op(s->cc_op);
3144 s->cc_op = CC_OP_DYNAMIC;
3145 }
3146 gen_goto_tb(s, tb_num, eip);
3147 s->is_jmp = 3;
3148 } else {
3149 gen_jmp_im(eip);
3150 gen_eob(s);
3151 }
3152}
3153
3154static void gen_jmp(DisasContext *s, target_ulong eip)
3155{
3156#ifdef VBOX
3157 gen_check_external_event(s);
3158#endif /* VBOX */
3159 gen_jmp_tb(s, eip, 0);
3160}
3161
3162#ifndef VBOX
3163static inline void gen_ldq_env_A0(int idx, int offset)
3164#else /* VBOX */
3165DECLINLINE(void) gen_ldq_env_A0(int idx, int offset)
3166#endif /* VBOX */
3167{
3168 int mem_index = (idx >> 2) - 1;
3169 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3170 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
3171}
3172
3173#ifndef VBOX
3174static inline void gen_stq_env_A0(int idx, int offset)
3175#else /* VBOX */
3176DECLINLINE(void) gen_stq_env_A0(int idx, int offset)
3177#endif /* VBOX */
3178{
3179 int mem_index = (idx >> 2) - 1;
3180 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
3181 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3182}
3183
3184#ifndef VBOX
3185static inline void gen_ldo_env_A0(int idx, int offset)
3186#else /* VBOX */
3187DECLINLINE(void) gen_ldo_env_A0(int idx, int offset)
3188#endif /* VBOX */
3189{
3190 int mem_index = (idx >> 2) - 1;
3191 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3192 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3193 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3194 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3195 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3196}
3197
3198#ifndef VBOX
3199static inline void gen_sto_env_A0(int idx, int offset)
3200#else /* VBOX */
3201DECLINLINE(void) gen_sto_env_A0(int idx, int offset)
3202#endif /* VBOX */
3203{
3204 int mem_index = (idx >> 2) - 1;
3205 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3206 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3207 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3208 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3209 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3210}
3211
3212#ifndef VBOX
3213static inline void gen_op_movo(int d_offset, int s_offset)
3214#else /* VBOX */
3215DECLINLINE(void) gen_op_movo(int d_offset, int s_offset)
3216#endif /* VBOX */
3217{
3218 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3219 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3220 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
3221 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
3222}
3223
3224#ifndef VBOX
3225static inline void gen_op_movq(int d_offset, int s_offset)
3226#else /* VBOX */
3227DECLINLINE(void) gen_op_movq(int d_offset, int s_offset)
3228#endif /* VBOX */
3229{
3230 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3231 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3232}
3233
3234#ifndef VBOX
3235static inline void gen_op_movl(int d_offset, int s_offset)
3236#else /* VBOX */
3237DECLINLINE(void) gen_op_movl(int d_offset, int s_offset)
3238#endif /* VBOX */
3239{
3240 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
3241 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
3242}
3243
3244#ifndef VBOX
3245static inline void gen_op_movq_env_0(int d_offset)
3246#else /* VBOX */
3247DECLINLINE(void) gen_op_movq_env_0(int d_offset)
3248#endif /* VBOX */
3249{
3250 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
3251 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3252}
3253
3254#define SSE_SPECIAL ((void *)1)
3255#define SSE_DUMMY ((void *)2)
3256
3257#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
3258#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
3259 helper_ ## x ## ss, helper_ ## x ## sd, }
3260
3261static void *sse_op_table1[256][4] = {
3262 /* 3DNow! extensions */
3263 [0x0e] = { SSE_DUMMY }, /* femms */
3264 [0x0f] = { SSE_DUMMY }, /* pf... */
3265 /* pure SSE operations */
3266 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3267 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3268 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
3269 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
3270 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
3271 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
3272 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
3273 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
3274
3275 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3276 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3277 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
3278 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
3279 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
3280 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
3281 [0x2e] = { helper_ucomiss, helper_ucomisd },
3282 [0x2f] = { helper_comiss, helper_comisd },
3283 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
3284 [0x51] = SSE_FOP(sqrt),
3285 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
3286 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
3287 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
3288 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
3289 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
3290 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
3291 [0x58] = SSE_FOP(add),
3292 [0x59] = SSE_FOP(mul),
3293 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
3294 helper_cvtss2sd, helper_cvtsd2ss },
3295 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
3296 [0x5c] = SSE_FOP(sub),
3297 [0x5d] = SSE_FOP(min),
3298 [0x5e] = SSE_FOP(div),
3299 [0x5f] = SSE_FOP(max),
3300
3301 [0xc2] = SSE_FOP(cmpeq),
3302 [0xc6] = { helper_shufps, helper_shufpd },
3303
3304 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3305 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3306
3307 /* MMX ops and their SSE extensions */
3308 [0x60] = MMX_OP2(punpcklbw),
3309 [0x61] = MMX_OP2(punpcklwd),
3310 [0x62] = MMX_OP2(punpckldq),
3311 [0x63] = MMX_OP2(packsswb),
3312 [0x64] = MMX_OP2(pcmpgtb),
3313 [0x65] = MMX_OP2(pcmpgtw),
3314 [0x66] = MMX_OP2(pcmpgtl),
3315 [0x67] = MMX_OP2(packuswb),
3316 [0x68] = MMX_OP2(punpckhbw),
3317 [0x69] = MMX_OP2(punpckhwd),
3318 [0x6a] = MMX_OP2(punpckhdq),
3319 [0x6b] = MMX_OP2(packssdw),
3320 [0x6c] = { NULL, helper_punpcklqdq_xmm },
3321 [0x6d] = { NULL, helper_punpckhqdq_xmm },
3322 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
3323 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
3324 [0x70] = { helper_pshufw_mmx,
3325 helper_pshufd_xmm,
3326 helper_pshufhw_xmm,
3327 helper_pshuflw_xmm },
3328 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
3329 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
3330 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
3331 [0x74] = MMX_OP2(pcmpeqb),
3332 [0x75] = MMX_OP2(pcmpeqw),
3333 [0x76] = MMX_OP2(pcmpeql),
3334 [0x77] = { SSE_DUMMY }, /* emms */
3335 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
3336 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
3337 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3338 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3339 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3340 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
3341 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
3342 [0xd1] = MMX_OP2(psrlw),
3343 [0xd2] = MMX_OP2(psrld),
3344 [0xd3] = MMX_OP2(psrlq),
3345 [0xd4] = MMX_OP2(paddq),
3346 [0xd5] = MMX_OP2(pmullw),
3347 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3348 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3349 [0xd8] = MMX_OP2(psubusb),
3350 [0xd9] = MMX_OP2(psubusw),
3351 [0xda] = MMX_OP2(pminub),
3352 [0xdb] = MMX_OP2(pand),
3353 [0xdc] = MMX_OP2(paddusb),
3354 [0xdd] = MMX_OP2(paddusw),
3355 [0xde] = MMX_OP2(pmaxub),
3356 [0xdf] = MMX_OP2(pandn),
3357 [0xe0] = MMX_OP2(pavgb),
3358 [0xe1] = MMX_OP2(psraw),
3359 [0xe2] = MMX_OP2(psrad),
3360 [0xe3] = MMX_OP2(pavgw),
3361 [0xe4] = MMX_OP2(pmulhuw),
3362 [0xe5] = MMX_OP2(pmulhw),
3363 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
3364 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3365 [0xe8] = MMX_OP2(psubsb),
3366 [0xe9] = MMX_OP2(psubsw),
3367 [0xea] = MMX_OP2(pminsw),
3368 [0xeb] = MMX_OP2(por),
3369 [0xec] = MMX_OP2(paddsb),
3370 [0xed] = MMX_OP2(paddsw),
3371 [0xee] = MMX_OP2(pmaxsw),
3372 [0xef] = MMX_OP2(pxor),
3373 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
3374 [0xf1] = MMX_OP2(psllw),
3375 [0xf2] = MMX_OP2(pslld),
3376 [0xf3] = MMX_OP2(psllq),
3377 [0xf4] = MMX_OP2(pmuludq),
3378 [0xf5] = MMX_OP2(pmaddwd),
3379 [0xf6] = MMX_OP2(psadbw),
3380 [0xf7] = MMX_OP2(maskmov),
3381 [0xf8] = MMX_OP2(psubb),
3382 [0xf9] = MMX_OP2(psubw),
3383 [0xfa] = MMX_OP2(psubl),
3384 [0xfb] = MMX_OP2(psubq),
3385 [0xfc] = MMX_OP2(paddb),
3386 [0xfd] = MMX_OP2(paddw),
3387 [0xfe] = MMX_OP2(paddl),
3388};
3389
3390static void *sse_op_table2[3 * 8][2] = {
3391 [0 + 2] = MMX_OP2(psrlw),
3392 [0 + 4] = MMX_OP2(psraw),
3393 [0 + 6] = MMX_OP2(psllw),
3394 [8 + 2] = MMX_OP2(psrld),
3395 [8 + 4] = MMX_OP2(psrad),
3396 [8 + 6] = MMX_OP2(pslld),
3397 [16 + 2] = MMX_OP2(psrlq),
3398 [16 + 3] = { NULL, helper_psrldq_xmm },
3399 [16 + 6] = MMX_OP2(psllq),
3400 [16 + 7] = { NULL, helper_pslldq_xmm },
3401};
3402
3403static void *sse_op_table3[4 * 3] = {
3404 helper_cvtsi2ss,
3405 helper_cvtsi2sd,
3406 X86_64_ONLY(helper_cvtsq2ss),
3407 X86_64_ONLY(helper_cvtsq2sd),
3408
3409 helper_cvttss2si,
3410 helper_cvttsd2si,
3411 X86_64_ONLY(helper_cvttss2sq),
3412 X86_64_ONLY(helper_cvttsd2sq),
3413
3414 helper_cvtss2si,
3415 helper_cvtsd2si,
3416 X86_64_ONLY(helper_cvtss2sq),
3417 X86_64_ONLY(helper_cvtsd2sq),
3418};
3419
3420static void *sse_op_table4[8][4] = {
3421 SSE_FOP(cmpeq),
3422 SSE_FOP(cmplt),
3423 SSE_FOP(cmple),
3424 SSE_FOP(cmpunord),
3425 SSE_FOP(cmpneq),
3426 SSE_FOP(cmpnlt),
3427 SSE_FOP(cmpnle),
3428 SSE_FOP(cmpord),
3429};
3430
3431static void *sse_op_table5[256] = {
3432 [0x0c] = helper_pi2fw,
3433 [0x0d] = helper_pi2fd,
3434 [0x1c] = helper_pf2iw,
3435 [0x1d] = helper_pf2id,
3436 [0x8a] = helper_pfnacc,
3437 [0x8e] = helper_pfpnacc,
3438 [0x90] = helper_pfcmpge,
3439 [0x94] = helper_pfmin,
3440 [0x96] = helper_pfrcp,
3441 [0x97] = helper_pfrsqrt,
3442 [0x9a] = helper_pfsub,
3443 [0x9e] = helper_pfadd,
3444 [0xa0] = helper_pfcmpgt,
3445 [0xa4] = helper_pfmax,
3446 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
3447 [0xa7] = helper_movq, /* pfrsqit1 */
3448 [0xaa] = helper_pfsubr,
3449 [0xae] = helper_pfacc,
3450 [0xb0] = helper_pfcmpeq,
3451 [0xb4] = helper_pfmul,
3452 [0xb6] = helper_movq, /* pfrcpit2 */
3453 [0xb7] = helper_pmulhrw_mmx,
3454 [0xbb] = helper_pswapd,
3455 [0xbf] = helper_pavgb_mmx /* pavgusb */
3456};
3457
3458struct sse_op_helper_s {
3459 void *op[2]; uint32_t ext_mask;
3460};
3461#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3462#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3463#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3464#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3465static struct sse_op_helper_s sse_op_table6[256] = {
3466 [0x00] = SSSE3_OP(pshufb),
3467 [0x01] = SSSE3_OP(phaddw),
3468 [0x02] = SSSE3_OP(phaddd),
3469 [0x03] = SSSE3_OP(phaddsw),
3470 [0x04] = SSSE3_OP(pmaddubsw),
3471 [0x05] = SSSE3_OP(phsubw),
3472 [0x06] = SSSE3_OP(phsubd),
3473 [0x07] = SSSE3_OP(phsubsw),
3474 [0x08] = SSSE3_OP(psignb),
3475 [0x09] = SSSE3_OP(psignw),
3476 [0x0a] = SSSE3_OP(psignd),
3477 [0x0b] = SSSE3_OP(pmulhrsw),
3478 [0x10] = SSE41_OP(pblendvb),
3479 [0x14] = SSE41_OP(blendvps),
3480 [0x15] = SSE41_OP(blendvpd),
3481 [0x17] = SSE41_OP(ptest),
3482 [0x1c] = SSSE3_OP(pabsb),
3483 [0x1d] = SSSE3_OP(pabsw),
3484 [0x1e] = SSSE3_OP(pabsd),
3485 [0x20] = SSE41_OP(pmovsxbw),
3486 [0x21] = SSE41_OP(pmovsxbd),
3487 [0x22] = SSE41_OP(pmovsxbq),
3488 [0x23] = SSE41_OP(pmovsxwd),
3489 [0x24] = SSE41_OP(pmovsxwq),
3490 [0x25] = SSE41_OP(pmovsxdq),
3491 [0x28] = SSE41_OP(pmuldq),
3492 [0x29] = SSE41_OP(pcmpeqq),
3493 [0x2a] = SSE41_SPECIAL, /* movntqda */
3494 [0x2b] = SSE41_OP(packusdw),
3495 [0x30] = SSE41_OP(pmovzxbw),
3496 [0x31] = SSE41_OP(pmovzxbd),
3497 [0x32] = SSE41_OP(pmovzxbq),
3498 [0x33] = SSE41_OP(pmovzxwd),
3499 [0x34] = SSE41_OP(pmovzxwq),
3500 [0x35] = SSE41_OP(pmovzxdq),
3501 [0x37] = SSE42_OP(pcmpgtq),
3502 [0x38] = SSE41_OP(pminsb),
3503 [0x39] = SSE41_OP(pminsd),
3504 [0x3a] = SSE41_OP(pminuw),
3505 [0x3b] = SSE41_OP(pminud),
3506 [0x3c] = SSE41_OP(pmaxsb),
3507 [0x3d] = SSE41_OP(pmaxsd),
3508 [0x3e] = SSE41_OP(pmaxuw),
3509 [0x3f] = SSE41_OP(pmaxud),
3510 [0x40] = SSE41_OP(pmulld),
3511 [0x41] = SSE41_OP(phminposuw),
3512};
3513
3514static struct sse_op_helper_s sse_op_table7[256] = {
3515 [0x08] = SSE41_OP(roundps),
3516 [0x09] = SSE41_OP(roundpd),
3517 [0x0a] = SSE41_OP(roundss),
3518 [0x0b] = SSE41_OP(roundsd),
3519 [0x0c] = SSE41_OP(blendps),
3520 [0x0d] = SSE41_OP(blendpd),
3521 [0x0e] = SSE41_OP(pblendw),
3522 [0x0f] = SSSE3_OP(palignr),
3523 [0x14] = SSE41_SPECIAL, /* pextrb */
3524 [0x15] = SSE41_SPECIAL, /* pextrw */
3525 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3526 [0x17] = SSE41_SPECIAL, /* extractps */
3527 [0x20] = SSE41_SPECIAL, /* pinsrb */
3528 [0x21] = SSE41_SPECIAL, /* insertps */
3529 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3530 [0x40] = SSE41_OP(dpps),
3531 [0x41] = SSE41_OP(dppd),
3532 [0x42] = SSE41_OP(mpsadbw),
3533 [0x60] = SSE42_OP(pcmpestrm),
3534 [0x61] = SSE42_OP(pcmpestri),
3535 [0x62] = SSE42_OP(pcmpistrm),
3536 [0x63] = SSE42_OP(pcmpistri),
3537};
3538
3539static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3540{
3541 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3542 int modrm, mod, rm, reg, reg_addr, offset_addr;
3543 void *sse_op2;
3544
3545 b &= 0xff;
3546 if (s->prefix & PREFIX_DATA)
3547 b1 = 1;
3548 else if (s->prefix & PREFIX_REPZ)
3549 b1 = 2;
3550 else if (s->prefix & PREFIX_REPNZ)
3551 b1 = 3;
3552 else
3553 b1 = 0;
3554 sse_op2 = sse_op_table1[b][b1];
3555 if (!sse_op2)
3556 goto illegal_op;
3557 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3558 is_xmm = 1;
3559 } else {
3560 if (b1 == 0) {
3561 /* MMX case */
3562 is_xmm = 0;
3563 } else {
3564 is_xmm = 1;
3565 }
3566 }
3567 /* simple MMX/SSE operation */
3568 if (s->flags & HF_TS_MASK) {
3569 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3570 return;
3571 }
3572 if (s->flags & HF_EM_MASK) {
3573 illegal_op:
3574 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3575 return;
3576 }
3577 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3578 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3579 goto illegal_op;
3580 if (b == 0x0e) {
3581 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3582 goto illegal_op;
3583 /* femms */
3584 tcg_gen_helper_0_0(helper_emms);
3585 return;
3586 }
3587 if (b == 0x77) {
3588 /* emms */
3589 tcg_gen_helper_0_0(helper_emms);
3590 return;
3591 }
3592 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3593 the static cpu state) */
3594 if (!is_xmm) {
3595 tcg_gen_helper_0_0(helper_enter_mmx);
3596 }
3597
3598 modrm = ldub_code(s->pc++);
3599 reg = ((modrm >> 3) & 7);
3600 if (is_xmm)
3601 reg |= rex_r;
3602 mod = (modrm >> 6) & 3;
3603 if (sse_op2 == SSE_SPECIAL) {
3604 b |= (b1 << 8);
3605 switch(b) {
3606 case 0x0e7: /* movntq */
3607 if (mod == 3)
3608 goto illegal_op;
3609 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3610 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3611 break;
3612 case 0x1e7: /* movntdq */
3613 case 0x02b: /* movntps */
3614 case 0x12b: /* movntps */
3615 case 0x3f0: /* lddqu */
3616 if (mod == 3)
3617 goto illegal_op;
3618 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3619 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3620 break;
3621 case 0x6e: /* movd mm, ea */
3622#ifdef TARGET_X86_64
3623 if (s->dflag == 2) {
3624 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3625 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3626 } else
3627#endif
3628 {
3629 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3630 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3631 offsetof(CPUX86State,fpregs[reg].mmx));
3632 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3633 }
3634 break;
3635 case 0x16e: /* movd xmm, ea */
3636#ifdef TARGET_X86_64
3637 if (s->dflag == 2) {
3638 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3639 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3640 offsetof(CPUX86State,xmm_regs[reg]));
3641 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3642 } else
3643#endif
3644 {
3645 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3646 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3647 offsetof(CPUX86State,xmm_regs[reg]));
3648 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3649 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3650 }
3651 break;
3652 case 0x6f: /* movq mm, ea */
3653 if (mod != 3) {
3654 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3655 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3656 } else {
3657 rm = (modrm & 7);
3658 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3659 offsetof(CPUX86State,fpregs[rm].mmx));
3660 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3661 offsetof(CPUX86State,fpregs[reg].mmx));
3662 }
3663 break;
3664 case 0x010: /* movups */
3665 case 0x110: /* movupd */
3666 case 0x028: /* movaps */
3667 case 0x128: /* movapd */
3668 case 0x16f: /* movdqa xmm, ea */
3669 case 0x26f: /* movdqu xmm, ea */
3670 if (mod != 3) {
3671 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3672 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3673 } else {
3674 rm = (modrm & 7) | REX_B(s);
3675 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3676 offsetof(CPUX86State,xmm_regs[rm]));
3677 }
3678 break;
3679 case 0x210: /* movss xmm, ea */
3680 if (mod != 3) {
3681 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3682 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3683 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3684 gen_op_movl_T0_0();
3685 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3686 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3687 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3688 } else {
3689 rm = (modrm & 7) | REX_B(s);
3690 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3691 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3692 }
3693 break;
3694 case 0x310: /* movsd xmm, ea */
3695 if (mod != 3) {
3696 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3697 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3698 gen_op_movl_T0_0();
3699 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3700 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3701 } else {
3702 rm = (modrm & 7) | REX_B(s);
3703 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3704 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3705 }
3706 break;
3707 case 0x012: /* movlps */
3708 case 0x112: /* movlpd */
3709 if (mod != 3) {
3710 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3711 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3712 } else {
3713 /* movhlps */
3714 rm = (modrm & 7) | REX_B(s);
3715 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3716 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3717 }
3718 break;
3719 case 0x212: /* movsldup */
3720 if (mod != 3) {
3721 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3722 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3723 } else {
3724 rm = (modrm & 7) | REX_B(s);
3725 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3726 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3727 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3728 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3729 }
3730 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3731 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3732 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3733 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3734 break;
3735 case 0x312: /* movddup */
3736 if (mod != 3) {
3737 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3738 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3739 } else {
3740 rm = (modrm & 7) | REX_B(s);
3741 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3742 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3743 }
3744 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3745 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3746 break;
3747 case 0x016: /* movhps */
3748 case 0x116: /* movhpd */
3749 if (mod != 3) {
3750 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3751 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3752 } else {
3753 /* movlhps */
3754 rm = (modrm & 7) | REX_B(s);
3755 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3756 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3757 }
3758 break;
3759 case 0x216: /* movshdup */
3760 if (mod != 3) {
3761 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3762 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3763 } else {
3764 rm = (modrm & 7) | REX_B(s);
3765 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3766 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3767 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3768 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3769 }
3770 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3771 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3772 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3773 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3774 break;
3775 case 0x7e: /* movd ea, mm */
3776#ifdef TARGET_X86_64
3777 if (s->dflag == 2) {
3778 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3779 offsetof(CPUX86State,fpregs[reg].mmx));
3780 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3781 } else
3782#endif
3783 {
3784 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3785 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3786 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3787 }
3788 break;
3789 case 0x17e: /* movd ea, xmm */
3790#ifdef TARGET_X86_64
3791 if (s->dflag == 2) {
3792 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3793 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3794 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3795 } else
3796#endif
3797 {
3798 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3799 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3800 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3801 }
3802 break;
3803 case 0x27e: /* movq xmm, ea */
3804 if (mod != 3) {
3805 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3806 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3807 } else {
3808 rm = (modrm & 7) | REX_B(s);
3809 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3810 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3811 }
3812 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3813 break;
3814 case 0x7f: /* movq ea, mm */
3815 if (mod != 3) {
3816 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3817 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3818 } else {
3819 rm = (modrm & 7);
3820 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3821 offsetof(CPUX86State,fpregs[reg].mmx));
3822 }
3823 break;
3824 case 0x011: /* movups */
3825 case 0x111: /* movupd */
3826 case 0x029: /* movaps */
3827 case 0x129: /* movapd */
3828 case 0x17f: /* movdqa ea, xmm */
3829 case 0x27f: /* movdqu ea, xmm */
3830 if (mod != 3) {
3831 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3832 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3833 } else {
3834 rm = (modrm & 7) | REX_B(s);
3835 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3836 offsetof(CPUX86State,xmm_regs[reg]));
3837 }
3838 break;
3839 case 0x211: /* movss ea, xmm */
3840 if (mod != 3) {
3841 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3842 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3843 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3844 } else {
3845 rm = (modrm & 7) | REX_B(s);
3846 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3847 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3848 }
3849 break;
3850 case 0x311: /* movsd ea, xmm */
3851 if (mod != 3) {
3852 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3853 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3854 } else {
3855 rm = (modrm & 7) | REX_B(s);
3856 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3857 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3858 }
3859 break;
3860 case 0x013: /* movlps */
3861 case 0x113: /* movlpd */
3862 if (mod != 3) {
3863 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3864 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3865 } else {
3866 goto illegal_op;
3867 }
3868 break;
3869 case 0x017: /* movhps */
3870 case 0x117: /* movhpd */
3871 if (mod != 3) {
3872 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3873 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3874 } else {
3875 goto illegal_op;
3876 }
3877 break;
3878 case 0x71: /* shift mm, im */
3879 case 0x72:
3880 case 0x73:
3881 case 0x171: /* shift xmm, im */
3882 case 0x172:
3883 case 0x173:
3884 val = ldub_code(s->pc++);
3885 if (is_xmm) {
3886 gen_op_movl_T0_im(val);
3887 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3888 gen_op_movl_T0_0();
3889 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3890 op1_offset = offsetof(CPUX86State,xmm_t0);
3891 } else {
3892 gen_op_movl_T0_im(val);
3893 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3894 gen_op_movl_T0_0();
3895 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3896 op1_offset = offsetof(CPUX86State,mmx_t0);
3897 }
3898 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3899 if (!sse_op2)
3900 goto illegal_op;
3901 if (is_xmm) {
3902 rm = (modrm & 7) | REX_B(s);
3903 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3904 } else {
3905 rm = (modrm & 7);
3906 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3907 }
3908 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3909 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3910 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3911 break;
3912 case 0x050: /* movmskps */
3913 rm = (modrm & 7) | REX_B(s);
3914 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3915 offsetof(CPUX86State,xmm_regs[rm]));
3916 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3917 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3918 gen_op_mov_reg_T0(OT_LONG, reg);
3919 break;
3920 case 0x150: /* movmskpd */
3921 rm = (modrm & 7) | REX_B(s);
3922 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3923 offsetof(CPUX86State,xmm_regs[rm]));
3924 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3925 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3926 gen_op_mov_reg_T0(OT_LONG, reg);
3927 break;
3928 case 0x02a: /* cvtpi2ps */
3929 case 0x12a: /* cvtpi2pd */
3930 tcg_gen_helper_0_0(helper_enter_mmx);
3931 if (mod != 3) {
3932 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3933 op2_offset = offsetof(CPUX86State,mmx_t0);
3934 gen_ldq_env_A0(s->mem_index, op2_offset);
3935 } else {
3936 rm = (modrm & 7);
3937 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3938 }
3939 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3940 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3941 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3942 switch(b >> 8) {
3943 case 0x0:
3944 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3945 break;
3946 default:
3947 case 0x1:
3948 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3949 break;
3950 }
3951 break;
3952 case 0x22a: /* cvtsi2ss */
3953 case 0x32a: /* cvtsi2sd */
3954 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3955 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3956 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3957 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3958 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3959 if (ot == OT_LONG) {
3960 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3961 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
3962 } else {
3963 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
3964 }
3965 break;
3966 case 0x02c: /* cvttps2pi */
3967 case 0x12c: /* cvttpd2pi */
3968 case 0x02d: /* cvtps2pi */
3969 case 0x12d: /* cvtpd2pi */
3970 tcg_gen_helper_0_0(helper_enter_mmx);
3971 if (mod != 3) {
3972 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3973 op2_offset = offsetof(CPUX86State,xmm_t0);
3974 gen_ldo_env_A0(s->mem_index, op2_offset);
3975 } else {
3976 rm = (modrm & 7) | REX_B(s);
3977 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3978 }
3979 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
3980 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3981 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3982 switch(b) {
3983 case 0x02c:
3984 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
3985 break;
3986 case 0x12c:
3987 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
3988 break;
3989 case 0x02d:
3990 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
3991 break;
3992 case 0x12d:
3993 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
3994 break;
3995 }
3996 break;
3997 case 0x22c: /* cvttss2si */
3998 case 0x32c: /* cvttsd2si */
3999 case 0x22d: /* cvtss2si */
4000 case 0x32d: /* cvtsd2si */
4001 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4002 if (mod != 3) {
4003 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4004 if ((b >> 8) & 1) {
4005 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
4006 } else {
4007 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4008 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4009 }
4010 op2_offset = offsetof(CPUX86State,xmm_t0);
4011 } else {
4012 rm = (modrm & 7) | REX_B(s);
4013 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4014 }
4015 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
4016 (b & 1) * 4];
4017 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
4018 if (ot == OT_LONG) {
4019 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
4020 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4021 } else {
4022 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
4023 }
4024 gen_op_mov_reg_T0(ot, reg);
4025 break;
4026 case 0xc4: /* pinsrw */
4027 case 0x1c4:
4028 s->rip_offset = 1;
4029 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4030 val = ldub_code(s->pc++);
4031 if (b1) {
4032 val &= 7;
4033 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4034 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
4035 } else {
4036 val &= 3;
4037 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4038 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
4039 }
4040 break;
4041 case 0xc5: /* pextrw */
4042 case 0x1c5:
4043 if (mod != 3)
4044 goto illegal_op;
4045 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4046 val = ldub_code(s->pc++);
4047 if (b1) {
4048 val &= 7;
4049 rm = (modrm & 7) | REX_B(s);
4050 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4051 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
4052 } else {
4053 val &= 3;
4054 rm = (modrm & 7);
4055 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4056 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
4057 }
4058 reg = ((modrm >> 3) & 7) | rex_r;
4059 gen_op_mov_reg_T0(ot, reg);
4060 break;
4061 case 0x1d6: /* movq ea, xmm */
4062 if (mod != 3) {
4063 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4064 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4065 } else {
4066 rm = (modrm & 7) | REX_B(s);
4067 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
4068 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4069 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
4070 }
4071 break;
4072 case 0x2d6: /* movq2dq */
4073 tcg_gen_helper_0_0(helper_enter_mmx);
4074 rm = (modrm & 7);
4075 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
4076 offsetof(CPUX86State,fpregs[rm].mmx));
4077 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
4078 break;
4079 case 0x3d6: /* movdq2q */
4080 tcg_gen_helper_0_0(helper_enter_mmx);
4081 rm = (modrm & 7) | REX_B(s);
4082 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
4083 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
4084 break;
4085 case 0xd7: /* pmovmskb */
4086 case 0x1d7:
4087 if (mod != 3)
4088 goto illegal_op;
4089 if (b1) {
4090 rm = (modrm & 7) | REX_B(s);
4091 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
4092 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
4093 } else {
4094 rm = (modrm & 7);
4095 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
4096 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
4097 }
4098 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4099 reg = ((modrm >> 3) & 7) | rex_r;
4100 gen_op_mov_reg_T0(OT_LONG, reg);
4101 break;
4102 case 0x138:
4103 if (s->prefix & PREFIX_REPNZ)
4104 goto crc32;
4105 case 0x038:
4106 b = modrm;
4107 modrm = ldub_code(s->pc++);
4108 rm = modrm & 7;
4109 reg = ((modrm >> 3) & 7) | rex_r;
4110 mod = (modrm >> 6) & 3;
4111
4112 sse_op2 = sse_op_table6[b].op[b1];
4113 if (!sse_op2)
4114 goto illegal_op;
4115 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
4116 goto illegal_op;
4117
4118 if (b1) {
4119 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4120 if (mod == 3) {
4121 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4122 } else {
4123 op2_offset = offsetof(CPUX86State,xmm_t0);
4124 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4125 switch (b) {
4126 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
4127 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
4128 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
4129 gen_ldq_env_A0(s->mem_index, op2_offset +
4130 offsetof(XMMReg, XMM_Q(0)));
4131 break;
4132 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
4133 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
4134 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4135 (s->mem_index >> 2) - 1);
4136 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
4137 offsetof(XMMReg, XMM_L(0)));
4138 break;
4139 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
4140 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
4141 (s->mem_index >> 2) - 1);
4142 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
4143 offsetof(XMMReg, XMM_W(0)));
4144 break;
4145 case 0x2a: /* movntqda */
4146 gen_ldo_env_A0(s->mem_index, op1_offset);
4147 return;
4148 default:
4149 gen_ldo_env_A0(s->mem_index, op2_offset);
4150 }
4151 }
4152 } else {
4153 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4154 if (mod == 3) {
4155 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4156 } else {
4157 op2_offset = offsetof(CPUX86State,mmx_t0);
4158 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4159 gen_ldq_env_A0(s->mem_index, op2_offset);
4160 }
4161 }
4162 if (sse_op2 == SSE_SPECIAL)
4163 goto illegal_op;
4164
4165 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4166 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4167 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4168
4169 if (b == 0x17)
4170 s->cc_op = CC_OP_EFLAGS;
4171 break;
4172 case 0x338: /* crc32 */
4173 crc32:
4174 b = modrm;
4175 modrm = ldub_code(s->pc++);
4176 reg = ((modrm >> 3) & 7) | rex_r;
4177
4178 if (b != 0xf0 && b != 0xf1)
4179 goto illegal_op;
4180 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
4181 goto illegal_op;
4182
4183 if (b == 0xf0)
4184 ot = OT_BYTE;
4185 else if (b == 0xf1 && s->dflag != 2)
4186 if (s->prefix & PREFIX_DATA)
4187 ot = OT_WORD;
4188 else
4189 ot = OT_LONG;
4190 else
4191 ot = OT_QUAD;
4192
4193 gen_op_mov_TN_reg(OT_LONG, 0, reg);
4194 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4195 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4196 tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
4197 cpu_T[0], tcg_const_i32(8 << ot));
4198
4199 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4200 gen_op_mov_reg_T0(ot, reg);
4201 break;
4202 case 0x03a:
4203 case 0x13a:
4204 b = modrm;
4205 modrm = ldub_code(s->pc++);
4206 rm = modrm & 7;
4207 reg = ((modrm >> 3) & 7) | rex_r;
4208 mod = (modrm >> 6) & 3;
4209
4210 sse_op2 = sse_op_table7[b].op[b1];
4211 if (!sse_op2)
4212 goto illegal_op;
4213 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4214 goto illegal_op;
4215
4216 if (sse_op2 == SSE_SPECIAL) {
4217 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4218 rm = (modrm & 7) | REX_B(s);
4219 if (mod != 3)
4220 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4221 reg = ((modrm >> 3) & 7) | rex_r;
4222 val = ldub_code(s->pc++);
4223 switch (b) {
4224 case 0x14: /* pextrb */
4225 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4226 xmm_regs[reg].XMM_B(val & 15)));
4227 if (mod == 3)
4228 gen_op_mov_reg_T0(ot, rm);
4229 else
4230 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4231 (s->mem_index >> 2) - 1);
4232 break;
4233 case 0x15: /* pextrw */
4234 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4235 xmm_regs[reg].XMM_W(val & 7)));
4236 if (mod == 3)
4237 gen_op_mov_reg_T0(ot, rm);
4238 else
4239 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4240 (s->mem_index >> 2) - 1);
4241 break;
4242 case 0x16:
4243 if (ot == OT_LONG) { /* pextrd */
4244 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4245 offsetof(CPUX86State,
4246 xmm_regs[reg].XMM_L(val & 3)));
4247 if (mod == 3)
4248 gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
4249 else
4250 tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
4251 (s->mem_index >> 2) - 1);
4252 } else { /* pextrq */
4253 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4254 offsetof(CPUX86State,
4255 xmm_regs[reg].XMM_Q(val & 1)));
4256 if (mod == 3)
4257 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4258 else
4259 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4260 (s->mem_index >> 2) - 1);
4261 }
4262 break;
4263 case 0x17: /* extractps */
4264 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4265 xmm_regs[reg].XMM_L(val & 3)));
4266 if (mod == 3)
4267 gen_op_mov_reg_T0(ot, rm);
4268 else
4269 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4270 (s->mem_index >> 2) - 1);
4271 break;
4272 case 0x20: /* pinsrb */
4273 if (mod == 3)
4274 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4275 else
4276 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
4277 (s->mem_index >> 2) - 1);
4278 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4279 xmm_regs[reg].XMM_B(val & 15)));
4280 break;
4281 case 0x21: /* insertps */
4282 if (mod == 3)
4283 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4284 offsetof(CPUX86State,xmm_regs[rm]
4285 .XMM_L((val >> 6) & 3)));
4286 else
4287 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4288 (s->mem_index >> 2) - 1);
4289 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4290 offsetof(CPUX86State,xmm_regs[reg]
4291 .XMM_L((val >> 4) & 3)));
4292 if ((val >> 0) & 1)
4293 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4294 cpu_env, offsetof(CPUX86State,
4295 xmm_regs[reg].XMM_L(0)));
4296 if ((val >> 1) & 1)
4297 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4298 cpu_env, offsetof(CPUX86State,
4299 xmm_regs[reg].XMM_L(1)));
4300 if ((val >> 2) & 1)
4301 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4302 cpu_env, offsetof(CPUX86State,
4303 xmm_regs[reg].XMM_L(2)));
4304 if ((val >> 3) & 1)
4305 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4306 cpu_env, offsetof(CPUX86State,
4307 xmm_regs[reg].XMM_L(3)));
4308 break;
4309 case 0x22:
4310 if (ot == OT_LONG) { /* pinsrd */
4311 if (mod == 3)
4312 gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
4313 else
4314 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4315 (s->mem_index >> 2) - 1);
4316 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4317 offsetof(CPUX86State,
4318 xmm_regs[reg].XMM_L(val & 3)));
4319 } else { /* pinsrq */
4320 if (mod == 3)
4321 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4322 else
4323 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4324 (s->mem_index >> 2) - 1);
4325 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4326 offsetof(CPUX86State,
4327 xmm_regs[reg].XMM_Q(val & 1)));
4328 }
4329 break;
4330 }
4331 return;
4332 }
4333
4334 if (b1) {
4335 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4336 if (mod == 3) {
4337 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4338 } else {
4339 op2_offset = offsetof(CPUX86State,xmm_t0);
4340 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4341 gen_ldo_env_A0(s->mem_index, op2_offset);
4342 }
4343 } else {
4344 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4345 if (mod == 3) {
4346 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4347 } else {
4348 op2_offset = offsetof(CPUX86State,mmx_t0);
4349 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4350 gen_ldq_env_A0(s->mem_index, op2_offset);
4351 }
4352 }
4353 val = ldub_code(s->pc++);
4354
4355 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
4356 s->cc_op = CC_OP_EFLAGS;
4357
4358 if (s->dflag == 2)
4359 /* The helper must use entire 64-bit gp registers */
4360 val |= 1 << 8;
4361 }
4362
4363 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4364 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4365 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4366 break;
4367 default:
4368 goto illegal_op;
4369 }
4370 } else {
4371 /* generic MMX or SSE operation */
4372 switch(b) {
4373 case 0x70: /* pshufx insn */
4374 case 0xc6: /* pshufx insn */
4375 case 0xc2: /* compare insns */
4376 s->rip_offset = 1;
4377 break;
4378 default:
4379 break;
4380 }
4381 if (is_xmm) {
4382 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4383 if (mod != 3) {
4384 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4385 op2_offset = offsetof(CPUX86State,xmm_t0);
4386 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
4387 b == 0xc2)) {
4388 /* specific case for SSE single instructions */
4389 if (b1 == 2) {
4390 /* 32 bit access */
4391 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4392 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4393 } else {
4394 /* 64 bit access */
4395 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
4396 }
4397 } else {
4398 gen_ldo_env_A0(s->mem_index, op2_offset);
4399 }
4400 } else {
4401 rm = (modrm & 7) | REX_B(s);
4402 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4403 }
4404 } else {
4405 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4406 if (mod != 3) {
4407 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4408 op2_offset = offsetof(CPUX86State,mmx_t0);
4409 gen_ldq_env_A0(s->mem_index, op2_offset);
4410 } else {
4411 rm = (modrm & 7);
4412 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4413 }
4414 }
4415 switch(b) {
4416 case 0x0f: /* 3DNow! data insns */
4417 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4418 goto illegal_op;
4419 val = ldub_code(s->pc++);
4420 sse_op2 = sse_op_table5[val];
4421 if (!sse_op2)
4422 goto illegal_op;
4423 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4424 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4425 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4426 break;
4427 case 0x70: /* pshufx insn */
4428 case 0xc6: /* pshufx insn */
4429 val = ldub_code(s->pc++);
4430 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4431 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4432 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4433 break;
4434 case 0xc2:
4435 /* compare insns */
4436 val = ldub_code(s->pc++);
4437 if (val >= 8)
4438 goto illegal_op;
4439 sse_op2 = sse_op_table4[val][b1];
4440 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4441 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4442 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4443 break;
4444 case 0xf7:
4445 /* maskmov : we must prepare A0 */
4446 if (mod != 3)
4447 goto illegal_op;
4448#ifdef TARGET_X86_64
4449 if (s->aflag == 2) {
4450 gen_op_movq_A0_reg(R_EDI);
4451 } else
4452#endif
4453 {
4454 gen_op_movl_A0_reg(R_EDI);
4455 if (s->aflag == 0)
4456 gen_op_andl_A0_ffff();
4457 }
4458 gen_add_A0_ds_seg(s);
4459
4460 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4461 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4462 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
4463 break;
4464 default:
4465 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4466 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4467 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4468 break;
4469 }
4470 if (b == 0x2e || b == 0x2f) {
4471 s->cc_op = CC_OP_EFLAGS;
4472 }
4473 }
4474}
4475
4476#ifdef VBOX
4477/* Checks if it's an invalid lock sequence. Only a few instructions
4478 can be used together with the lock prefix and of those only the
4479 form that write a memory operand. So, this is kind of annoying
4480 work to do...
4481 The AMD manual lists the following instructions.
4482 ADC
4483 ADD
4484 AND
4485 BTC
4486 BTR
4487 BTS
4488 CMPXCHG
4489 CMPXCHG8B
4490 CMPXCHG16B
4491 DEC
4492 INC
4493 NEG
4494 NOT
4495 OR
4496 SBB
4497 SUB
4498 XADD
4499 XCHG
4500 XOR */
4501static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
4502{
4503 target_ulong pc = s->pc;
4504 int modrm, mod, op;
4505
4506 /* X={8,16,32,64} Y={16,32,64} */
4507 switch (b)
4508 {
4509 /* /2: ADC reg/memX, immX */
4510 /* /0: ADD reg/memX, immX */
4511 /* /4: AND reg/memX, immX */
4512 /* /1: OR reg/memX, immX */
4513 /* /3: SBB reg/memX, immX */
4514 /* /5: SUB reg/memX, immX */
4515 /* /6: XOR reg/memX, immX */
4516 case 0x80:
4517 case 0x81:
4518 case 0x83:
4519 modrm = ldub_code(pc++);
4520 op = (modrm >> 3) & 7;
4521 if (op == 7) /* /7: CMP */
4522 break;
4523 mod = (modrm >> 6) & 3;
4524 if (mod == 3) /* register destination */
4525 break;
4526 return false;
4527
4528 case 0x10: /* /r: ADC reg/mem8, reg8 */
4529 case 0x11: /* /r: ADC reg/memX, regY */
4530 case 0x00: /* /r: ADD reg/mem8, reg8 */
4531 case 0x01: /* /r: ADD reg/memX, regY */
4532 case 0x20: /* /r: AND reg/mem8, reg8 */
4533 case 0x21: /* /r: AND reg/memY, regY */
4534 case 0x08: /* /r: OR reg/mem8, reg8 */
4535 case 0x09: /* /r: OR reg/memY, regY */
4536 case 0x18: /* /r: SBB reg/mem8, reg8 */
4537 case 0x19: /* /r: SBB reg/memY, regY */
4538 case 0x28: /* /r: SUB reg/mem8, reg8 */
4539 case 0x29: /* /r: SUB reg/memY, regY */
4540 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
4541 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
4542 case 0x30: /* /r: XOR reg/mem8, reg8 */
4543 case 0x31: /* /r: XOR reg/memY, regY */
4544 modrm = ldub_code(pc++);
4545 mod = (modrm >> 6) & 3;
4546 if (mod == 3) /* register destination */
4547 break;
4548 return false;
4549
4550 /* /1: DEC reg/memX */
4551 /* /0: INC reg/memX */
4552 case 0xfe:
4553 case 0xff:
4554 modrm = ldub_code(pc++);
4555 mod = (modrm >> 6) & 3;
4556 if (mod == 3) /* register destination */
4557 break;
4558 return false;
4559
4560 /* /3: NEG reg/memX */
4561 /* /2: NOT reg/memX */
4562 case 0xf6:
4563 case 0xf7:
4564 modrm = ldub_code(pc++);
4565 mod = (modrm >> 6) & 3;
4566 if (mod == 3) /* register destination */
4567 break;
4568 return false;
4569
4570 case 0x0f:
4571 b = ldub_code(pc++);
4572 switch (b)
4573 {
4574 /* /7: BTC reg/memY, imm8 */
4575 /* /6: BTR reg/memY, imm8 */
4576 /* /5: BTS reg/memY, imm8 */
4577 case 0xba:
4578 modrm = ldub_code(pc++);
4579 op = (modrm >> 3) & 7;
4580 if (op < 5)
4581 break;
4582 mod = (modrm >> 6) & 3;
4583 if (mod == 3) /* register destination */
4584 break;
4585 return false;
4586
4587 case 0xbb: /* /r: BTC reg/memY, regY */
4588 case 0xb3: /* /r: BTR reg/memY, regY */
4589 case 0xab: /* /r: BTS reg/memY, regY */
4590 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
4591 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
4592 case 0xc0: /* /r: XADD reg/mem8, reg8 */
4593 case 0xc1: /* /r: XADD reg/memY, regY */
4594 modrm = ldub_code(pc++);
4595 mod = (modrm >> 6) & 3;
4596 if (mod == 3) /* register destination */
4597 break;
4598 return false;
4599
4600 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
4601 case 0xc7:
4602 modrm = ldub_code(pc++);
4603 op = (modrm >> 3) & 7;
4604 if (op != 1)
4605 break;
4606 return false;
4607 }
4608 break;
4609 }
4610
4611 /* illegal sequence. The s->pc is past the lock prefix and that
4612 is sufficient for the TB, I think. */
4613 Log(("illegal lock sequence %RGv (b=%#x)\n", pc_start, b));
4614 return true;
4615}
4616#endif /* VBOX */
4617
4618
4619/* convert one instruction. s->is_jmp is set if the translation must
4620 be stopped. Return the next pc value */
4621static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4622{
4623 int b, prefixes, aflag, dflag;
4624 int shift, ot;
4625 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4626 target_ulong next_eip, tval;
4627 int rex_w, rex_r;
4628
4629 if (unlikely(loglevel & CPU_LOG_TB_OP))
4630 tcg_gen_debug_insn_start(pc_start);
4631 s->pc = pc_start;
4632 prefixes = 0;
4633 aflag = s->code32;
4634 dflag = s->code32;
4635 s->override = -1;
4636 rex_w = -1;
4637 rex_r = 0;
4638#ifdef TARGET_X86_64
4639 s->rex_x = 0;
4640 s->rex_b = 0;
4641 x86_64_hregs = 0;
4642#endif
4643 s->rip_offset = 0; /* for relative ip address */
4644#ifdef VBOX
4645 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
4646 gen_update_eip(pc_start - s->cs_base);
4647#endif
4648 next_byte:
4649 b = ldub_code(s->pc);
4650 s->pc++;
4651 /* check prefixes */
4652#ifdef TARGET_X86_64
4653 if (CODE64(s)) {
4654 switch (b) {
4655 case 0xf3:
4656 prefixes |= PREFIX_REPZ;
4657 goto next_byte;
4658 case 0xf2:
4659 prefixes |= PREFIX_REPNZ;
4660 goto next_byte;
4661 case 0xf0:
4662 prefixes |= PREFIX_LOCK;
4663 goto next_byte;
4664 case 0x2e:
4665 s->override = R_CS;
4666 goto next_byte;
4667 case 0x36:
4668 s->override = R_SS;
4669 goto next_byte;
4670 case 0x3e:
4671 s->override = R_DS;
4672 goto next_byte;
4673 case 0x26:
4674 s->override = R_ES;
4675 goto next_byte;
4676 case 0x64:
4677 s->override = R_FS;
4678 goto next_byte;
4679 case 0x65:
4680 s->override = R_GS;
4681 goto next_byte;
4682 case 0x66:
4683 prefixes |= PREFIX_DATA;
4684 goto next_byte;
4685 case 0x67:
4686 prefixes |= PREFIX_ADR;
4687 goto next_byte;
4688 case 0x40 ... 0x4f:
4689 /* REX prefix */
4690 rex_w = (b >> 3) & 1;
4691 rex_r = (b & 0x4) << 1;
4692 s->rex_x = (b & 0x2) << 2;
4693 REX_B(s) = (b & 0x1) << 3;
4694 x86_64_hregs = 1; /* select uniform byte register addressing */
4695 goto next_byte;
4696 }
4697 if (rex_w == 1) {
4698 /* 0x66 is ignored if rex.w is set */
4699 dflag = 2;
4700 } else {
4701 if (prefixes & PREFIX_DATA)
4702 dflag ^= 1;
4703 }
4704 if (!(prefixes & PREFIX_ADR))
4705 aflag = 2;
4706 } else
4707#endif
4708 {
4709 switch (b) {
4710 case 0xf3:
4711 prefixes |= PREFIX_REPZ;
4712 goto next_byte;
4713 case 0xf2:
4714 prefixes |= PREFIX_REPNZ;
4715 goto next_byte;
4716 case 0xf0:
4717 prefixes |= PREFIX_LOCK;
4718 goto next_byte;
4719 case 0x2e:
4720 s->override = R_CS;
4721 goto next_byte;
4722 case 0x36:
4723 s->override = R_SS;
4724 goto next_byte;
4725 case 0x3e:
4726 s->override = R_DS;
4727 goto next_byte;
4728 case 0x26:
4729 s->override = R_ES;
4730 goto next_byte;
4731 case 0x64:
4732 s->override = R_FS;
4733 goto next_byte;
4734 case 0x65:
4735 s->override = R_GS;
4736 goto next_byte;
4737 case 0x66:
4738 prefixes |= PREFIX_DATA;
4739 goto next_byte;
4740 case 0x67:
4741 prefixes |= PREFIX_ADR;
4742 goto next_byte;
4743 }
4744 if (prefixes & PREFIX_DATA)
4745 dflag ^= 1;
4746 if (prefixes & PREFIX_ADR)
4747 aflag ^= 1;
4748 }
4749
4750 s->prefix = prefixes;
4751 s->aflag = aflag;
4752 s->dflag = dflag;
4753
4754 /* lock generation */
4755#ifndef VBOX
4756 if (prefixes & PREFIX_LOCK)
4757 tcg_gen_helper_0_0(helper_lock);
4758#else /* VBOX */
4759 if (prefixes & PREFIX_LOCK) {
4760 if (is_invalid_lock_sequence(s, pc_start, b)) {
4761 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
4762 return s->pc;
4763 }
4764 tcg_gen_helper_0_0(helper_lock);
4765 }
4766#endif /* VBOX */
4767
4768 /* now check op code */
4769 reswitch:
4770 switch(b) {
4771 case 0x0f:
4772 /**************************/
4773 /* extended op code */
4774 b = ldub_code(s->pc++) | 0x100;
4775 goto reswitch;
4776
4777 /**************************/
4778 /* arith & logic */
4779 case 0x00 ... 0x05:
4780 case 0x08 ... 0x0d:
4781 case 0x10 ... 0x15:
4782 case 0x18 ... 0x1d:
4783 case 0x20 ... 0x25:
4784 case 0x28 ... 0x2d:
4785 case 0x30 ... 0x35:
4786 case 0x38 ... 0x3d:
4787 {
4788 int op, f, val;
4789 op = (b >> 3) & 7;
4790 f = (b >> 1) & 3;
4791
4792 if ((b & 1) == 0)
4793 ot = OT_BYTE;
4794 else
4795 ot = dflag + OT_WORD;
4796
4797 switch(f) {
4798 case 0: /* OP Ev, Gv */
4799 modrm = ldub_code(s->pc++);
4800 reg = ((modrm >> 3) & 7) | rex_r;
4801 mod = (modrm >> 6) & 3;
4802 rm = (modrm & 7) | REX_B(s);
4803 if (mod != 3) {
4804 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4805 opreg = OR_TMP0;
4806 } else if (op == OP_XORL && rm == reg) {
4807 xor_zero:
4808 /* xor reg, reg optimisation */
4809 gen_op_movl_T0_0();
4810 s->cc_op = CC_OP_LOGICB + ot;
4811 gen_op_mov_reg_T0(ot, reg);
4812 gen_op_update1_cc();
4813 break;
4814 } else {
4815 opreg = rm;
4816 }
4817 gen_op_mov_TN_reg(ot, 1, reg);
4818 gen_op(s, op, ot, opreg);
4819 break;
4820 case 1: /* OP Gv, Ev */
4821 modrm = ldub_code(s->pc++);
4822 mod = (modrm >> 6) & 3;
4823 reg = ((modrm >> 3) & 7) | rex_r;
4824 rm = (modrm & 7) | REX_B(s);
4825 if (mod != 3) {
4826 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4827 gen_op_ld_T1_A0(ot + s->mem_index);
4828 } else if (op == OP_XORL && rm == reg) {
4829 goto xor_zero;
4830 } else {
4831 gen_op_mov_TN_reg(ot, 1, rm);
4832 }
4833 gen_op(s, op, ot, reg);
4834 break;
4835 case 2: /* OP A, Iv */
4836 val = insn_get(s, ot);
4837 gen_op_movl_T1_im(val);
4838 gen_op(s, op, ot, OR_EAX);
4839 break;
4840 }
4841 }
4842 break;
4843
4844 case 0x82:
4845 if (CODE64(s))
4846 goto illegal_op;
4847 case 0x80: /* GRP1 */
4848 case 0x81:
4849 case 0x83:
4850 {
4851 int val;
4852
4853 if ((b & 1) == 0)
4854 ot = OT_BYTE;
4855 else
4856 ot = dflag + OT_WORD;
4857
4858 modrm = ldub_code(s->pc++);
4859 mod = (modrm >> 6) & 3;
4860 rm = (modrm & 7) | REX_B(s);
4861 op = (modrm >> 3) & 7;
4862
4863 if (mod != 3) {
4864 if (b == 0x83)
4865 s->rip_offset = 1;
4866 else
4867 s->rip_offset = insn_const_size(ot);
4868 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4869 opreg = OR_TMP0;
4870 } else {
4871 opreg = rm;
4872 }
4873
4874 switch(b) {
4875 default:
4876 case 0x80:
4877 case 0x81:
4878 case 0x82:
4879 val = insn_get(s, ot);
4880 break;
4881 case 0x83:
4882 val = (int8_t)insn_get(s, OT_BYTE);
4883 break;
4884 }
4885 gen_op_movl_T1_im(val);
4886 gen_op(s, op, ot, opreg);
4887 }
4888 break;
4889
4890 /**************************/
4891 /* inc, dec, and other misc arith */
4892 case 0x40 ... 0x47: /* inc Gv */
4893 ot = dflag ? OT_LONG : OT_WORD;
4894 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4895 break;
4896 case 0x48 ... 0x4f: /* dec Gv */
4897 ot = dflag ? OT_LONG : OT_WORD;
4898 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4899 break;
4900 case 0xf6: /* GRP3 */
4901 case 0xf7:
4902 if ((b & 1) == 0)
4903 ot = OT_BYTE;
4904 else
4905 ot = dflag + OT_WORD;
4906
4907 modrm = ldub_code(s->pc++);
4908 mod = (modrm >> 6) & 3;
4909 rm = (modrm & 7) | REX_B(s);
4910 op = (modrm >> 3) & 7;
4911 if (mod != 3) {
4912 if (op == 0)
4913 s->rip_offset = insn_const_size(ot);
4914 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4915 gen_op_ld_T0_A0(ot + s->mem_index);
4916 } else {
4917 gen_op_mov_TN_reg(ot, 0, rm);
4918 }
4919
4920 switch(op) {
4921 case 0: /* test */
4922 val = insn_get(s, ot);
4923 gen_op_movl_T1_im(val);
4924 gen_op_testl_T0_T1_cc();
4925 s->cc_op = CC_OP_LOGICB + ot;
4926 break;
4927 case 2: /* not */
4928 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4929 if (mod != 3) {
4930 gen_op_st_T0_A0(ot + s->mem_index);
4931 } else {
4932 gen_op_mov_reg_T0(ot, rm);
4933 }
4934 break;
4935 case 3: /* neg */
4936 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4937 if (mod != 3) {
4938 gen_op_st_T0_A0(ot + s->mem_index);
4939 } else {
4940 gen_op_mov_reg_T0(ot, rm);
4941 }
4942 gen_op_update_neg_cc();
4943 s->cc_op = CC_OP_SUBB + ot;
4944 break;
4945 case 4: /* mul */
4946 switch(ot) {
4947 case OT_BYTE:
4948 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4949 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4950 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4951 /* XXX: use 32 bit mul which could be faster */
4952 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4953 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4954 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4955 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
4956 s->cc_op = CC_OP_MULB;
4957 break;
4958 case OT_WORD:
4959 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
4960 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
4961 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
4962 /* XXX: use 32 bit mul which could be faster */
4963 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4964 gen_op_mov_reg_T0(OT_WORD, R_EAX);
4965 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4966 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
4967 gen_op_mov_reg_T0(OT_WORD, R_EDX);
4968 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4969 s->cc_op = CC_OP_MULW;
4970 break;
4971 default:
4972 case OT_LONG:
4973#ifdef TARGET_X86_64
4974 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4975 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
4976 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
4977 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4978 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4979 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4980 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
4981 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4982 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4983#else
4984 {
4985 TCGv t0, t1;
4986 t0 = tcg_temp_new(TCG_TYPE_I64);
4987 t1 = tcg_temp_new(TCG_TYPE_I64);
4988 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
4989 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
4990 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
4991 tcg_gen_mul_i64(t0, t0, t1);
4992 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4993 gen_op_mov_reg_T0(OT_LONG, R_EAX);
4994 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
4995 tcg_gen_shri_i64(t0, t0, 32);
4996 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
4997 gen_op_mov_reg_T0(OT_LONG, R_EDX);
4998 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
4999 }
5000#endif
5001 s->cc_op = CC_OP_MULL;
5002 break;
5003#ifdef TARGET_X86_64
5004 case OT_QUAD:
5005 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
5006 s->cc_op = CC_OP_MULQ;
5007 break;
5008#endif
5009 }
5010 break;
5011 case 5: /* imul */
5012 switch(ot) {
5013 case OT_BYTE:
5014 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
5015 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5016 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
5017 /* XXX: use 32 bit mul which could be faster */
5018 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5019 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5020 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5021 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
5022 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5023 s->cc_op = CC_OP_MULB;
5024 break;
5025 case OT_WORD:
5026 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5027 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5028 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5029 /* XXX: use 32 bit mul which could be faster */
5030 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5031 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5032 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5033 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5034 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5035 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5036 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5037 s->cc_op = CC_OP_MULW;
5038 break;
5039 default:
5040 case OT_LONG:
5041#ifdef TARGET_X86_64
5042 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5043 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5044 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5045 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5046 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5047 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5048 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5049 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5050 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5051 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5052#else
5053 {
5054 TCGv t0, t1;
5055 t0 = tcg_temp_new(TCG_TYPE_I64);
5056 t1 = tcg_temp_new(TCG_TYPE_I64);
5057 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5058 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5059 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5060 tcg_gen_mul_i64(t0, t0, t1);
5061 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5062 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5063 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5064 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5065 tcg_gen_shri_i64(t0, t0, 32);
5066 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5067 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5068 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5069 }
5070#endif
5071 s->cc_op = CC_OP_MULL;
5072 break;
5073#ifdef TARGET_X86_64
5074 case OT_QUAD:
5075 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
5076 s->cc_op = CC_OP_MULQ;
5077 break;
5078#endif
5079 }
5080 break;
5081 case 6: /* div */
5082 switch(ot) {
5083 case OT_BYTE:
5084 gen_jmp_im(pc_start - s->cs_base);
5085 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
5086 break;
5087 case OT_WORD:
5088 gen_jmp_im(pc_start - s->cs_base);
5089 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
5090 break;
5091 default:
5092 case OT_LONG:
5093 gen_jmp_im(pc_start - s->cs_base);
5094 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
5095 break;
5096#ifdef TARGET_X86_64
5097 case OT_QUAD:
5098 gen_jmp_im(pc_start - s->cs_base);
5099 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
5100 break;
5101#endif
5102 }
5103 break;
5104 case 7: /* idiv */
5105 switch(ot) {
5106 case OT_BYTE:
5107 gen_jmp_im(pc_start - s->cs_base);
5108 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
5109 break;
5110 case OT_WORD:
5111 gen_jmp_im(pc_start - s->cs_base);
5112 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
5113 break;
5114 default:
5115 case OT_LONG:
5116 gen_jmp_im(pc_start - s->cs_base);
5117 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
5118 break;
5119#ifdef TARGET_X86_64
5120 case OT_QUAD:
5121 gen_jmp_im(pc_start - s->cs_base);
5122 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
5123 break;
5124#endif
5125 }
5126 break;
5127 default:
5128 goto illegal_op;
5129 }
5130 break;
5131
5132 case 0xfe: /* GRP4 */
5133 case 0xff: /* GRP5 */
5134 if ((b & 1) == 0)
5135 ot = OT_BYTE;
5136 else
5137 ot = dflag + OT_WORD;
5138
5139 modrm = ldub_code(s->pc++);
5140 mod = (modrm >> 6) & 3;
5141 rm = (modrm & 7) | REX_B(s);
5142 op = (modrm >> 3) & 7;
5143 if (op >= 2 && b == 0xfe) {
5144 goto illegal_op;
5145 }
5146 if (CODE64(s)) {
5147 if (op == 2 || op == 4) {
5148 /* operand size for jumps is 64 bit */
5149 ot = OT_QUAD;
5150 } else if (op == 3 || op == 5) {
5151 /* for call calls, the operand is 16 or 32 bit, even
5152 in long mode */
5153 ot = dflag ? OT_LONG : OT_WORD;
5154 } else if (op == 6) {
5155 /* default push size is 64 bit */
5156 ot = dflag ? OT_QUAD : OT_WORD;
5157 }
5158 }
5159 if (mod != 3) {
5160 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5161 if (op >= 2 && op != 3 && op != 5)
5162 gen_op_ld_T0_A0(ot + s->mem_index);
5163 } else {
5164 gen_op_mov_TN_reg(ot, 0, rm);
5165 }
5166
5167 switch(op) {
5168 case 0: /* inc Ev */
5169 if (mod != 3)
5170 opreg = OR_TMP0;
5171 else
5172 opreg = rm;
5173 gen_inc(s, ot, opreg, 1);
5174 break;
5175 case 1: /* dec Ev */
5176 if (mod != 3)
5177 opreg = OR_TMP0;
5178 else
5179 opreg = rm;
5180 gen_inc(s, ot, opreg, -1);
5181 break;
5182 case 2: /* call Ev */
5183 /* XXX: optimize if memory (no 'and' is necessary) */
5184#ifdef VBOX_WITH_CALL_RECORD
5185 if (s->record_call)
5186 gen_op_record_call();
5187#endif
5188 if (s->dflag == 0)
5189 gen_op_andl_T0_ffff();
5190 next_eip = s->pc - s->cs_base;
5191 gen_movtl_T1_im(next_eip);
5192 gen_push_T1(s);
5193 gen_op_jmp_T0();
5194 gen_eob(s);
5195 break;
5196 case 3: /* lcall Ev */
5197 gen_op_ld_T1_A0(ot + s->mem_index);
5198 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5199 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5200 do_lcall:
5201 if (s->pe && !s->vm86) {
5202 if (s->cc_op != CC_OP_DYNAMIC)
5203 gen_op_set_cc_op(s->cc_op);
5204 gen_jmp_im(pc_start - s->cs_base);
5205 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5206 tcg_gen_helper_0_4(helper_lcall_protected,
5207 cpu_tmp2_i32, cpu_T[1],
5208 tcg_const_i32(dflag),
5209 tcg_const_i32(s->pc - pc_start));
5210 } else {
5211 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5212 tcg_gen_helper_0_4(helper_lcall_real,
5213 cpu_tmp2_i32, cpu_T[1],
5214 tcg_const_i32(dflag),
5215 tcg_const_i32(s->pc - s->cs_base));
5216 }
5217 gen_eob(s);
5218 break;
5219 case 4: /* jmp Ev */
5220 if (s->dflag == 0)
5221 gen_op_andl_T0_ffff();
5222 gen_op_jmp_T0();
5223 gen_eob(s);
5224 break;
5225 case 5: /* ljmp Ev */
5226 gen_op_ld_T1_A0(ot + s->mem_index);
5227 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5228 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5229 do_ljmp:
5230 if (s->pe && !s->vm86) {
5231 if (s->cc_op != CC_OP_DYNAMIC)
5232 gen_op_set_cc_op(s->cc_op);
5233 gen_jmp_im(pc_start - s->cs_base);
5234 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5235 tcg_gen_helper_0_3(helper_ljmp_protected,
5236 cpu_tmp2_i32,
5237 cpu_T[1],
5238 tcg_const_i32(s->pc - pc_start));
5239 } else {
5240 gen_op_movl_seg_T0_vm(R_CS);
5241 gen_op_movl_T0_T1();
5242 gen_op_jmp_T0();
5243 }
5244 gen_eob(s);
5245 break;
5246 case 6: /* push Ev */
5247 gen_push_T0(s);
5248 break;
5249 default:
5250 goto illegal_op;
5251 }
5252 break;
5253
5254 case 0x84: /* test Ev, Gv */
5255 case 0x85:
5256 if ((b & 1) == 0)
5257 ot = OT_BYTE;
5258 else
5259 ot = dflag + OT_WORD;
5260
5261 modrm = ldub_code(s->pc++);
5262 mod = (modrm >> 6) & 3;
5263 rm = (modrm & 7) | REX_B(s);
5264 reg = ((modrm >> 3) & 7) | rex_r;
5265
5266 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5267 gen_op_mov_TN_reg(ot, 1, reg);
5268 gen_op_testl_T0_T1_cc();
5269 s->cc_op = CC_OP_LOGICB + ot;
5270 break;
5271
5272 case 0xa8: /* test eAX, Iv */
5273 case 0xa9:
5274 if ((b & 1) == 0)
5275 ot = OT_BYTE;
5276 else
5277 ot = dflag + OT_WORD;
5278 val = insn_get(s, ot);
5279
5280 gen_op_mov_TN_reg(ot, 0, OR_EAX);
5281 gen_op_movl_T1_im(val);
5282 gen_op_testl_T0_T1_cc();
5283 s->cc_op = CC_OP_LOGICB + ot;
5284 break;
5285
5286 case 0x98: /* CWDE/CBW */
5287#ifdef TARGET_X86_64
5288 if (dflag == 2) {
5289 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5290 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5291 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
5292 } else
5293#endif
5294 if (dflag == 1) {
5295 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5296 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5297 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5298 } else {
5299 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5300 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5301 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5302 }
5303 break;
5304 case 0x99: /* CDQ/CWD */
5305#ifdef TARGET_X86_64
5306 if (dflag == 2) {
5307 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5308 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5309 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
5310 } else
5311#endif
5312 if (dflag == 1) {
5313 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5314 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5315 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5316 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5317 } else {
5318 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5319 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5320 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5321 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5322 }
5323 break;
5324 case 0x1af: /* imul Gv, Ev */
5325 case 0x69: /* imul Gv, Ev, I */
5326 case 0x6b:
5327 ot = dflag + OT_WORD;
5328 modrm = ldub_code(s->pc++);
5329 reg = ((modrm >> 3) & 7) | rex_r;
5330 if (b == 0x69)
5331 s->rip_offset = insn_const_size(ot);
5332 else if (b == 0x6b)
5333 s->rip_offset = 1;
5334 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5335 if (b == 0x69) {
5336 val = insn_get(s, ot);
5337 gen_op_movl_T1_im(val);
5338 } else if (b == 0x6b) {
5339 val = (int8_t)insn_get(s, OT_BYTE);
5340 gen_op_movl_T1_im(val);
5341 } else {
5342 gen_op_mov_TN_reg(ot, 1, reg);
5343 }
5344
5345#ifdef TARGET_X86_64
5346 if (ot == OT_QUAD) {
5347 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
5348 } else
5349#endif
5350 if (ot == OT_LONG) {
5351#ifdef TARGET_X86_64
5352 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5353 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5354 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5355 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5356 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5357 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5358#else
5359 {
5360 TCGv t0, t1;
5361 t0 = tcg_temp_new(TCG_TYPE_I64);
5362 t1 = tcg_temp_new(TCG_TYPE_I64);
5363 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5364 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5365 tcg_gen_mul_i64(t0, t0, t1);
5366 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5367 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5368 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5369 tcg_gen_shri_i64(t0, t0, 32);
5370 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
5371 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
5372 }
5373#endif
5374 } else {
5375 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5376 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5377 /* XXX: use 32 bit mul which could be faster */
5378 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5379 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5380 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5381 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5382 }
5383 gen_op_mov_reg_T0(ot, reg);
5384 s->cc_op = CC_OP_MULB + ot;
5385 break;
5386 case 0x1c0:
5387 case 0x1c1: /* xadd Ev, Gv */
5388 if ((b & 1) == 0)
5389 ot = OT_BYTE;
5390 else
5391 ot = dflag + OT_WORD;
5392 modrm = ldub_code(s->pc++);
5393 reg = ((modrm >> 3) & 7) | rex_r;
5394 mod = (modrm >> 6) & 3;
5395 if (mod == 3) {
5396 rm = (modrm & 7) | REX_B(s);
5397 gen_op_mov_TN_reg(ot, 0, reg);
5398 gen_op_mov_TN_reg(ot, 1, rm);
5399 gen_op_addl_T0_T1();
5400 gen_op_mov_reg_T1(ot, reg);
5401 gen_op_mov_reg_T0(ot, rm);
5402 } else {
5403 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5404 gen_op_mov_TN_reg(ot, 0, reg);
5405 gen_op_ld_T1_A0(ot + s->mem_index);
5406 gen_op_addl_T0_T1();
5407 gen_op_st_T0_A0(ot + s->mem_index);
5408 gen_op_mov_reg_T1(ot, reg);
5409 }
5410 gen_op_update2_cc();
5411 s->cc_op = CC_OP_ADDB + ot;
5412 break;
5413 case 0x1b0:
5414 case 0x1b1: /* cmpxchg Ev, Gv */
5415 {
5416 int label1, label2;
5417 TCGv t0, t1, t2, a0;
5418
5419 if ((b & 1) == 0)
5420 ot = OT_BYTE;
5421 else
5422 ot = dflag + OT_WORD;
5423 modrm = ldub_code(s->pc++);
5424 reg = ((modrm >> 3) & 7) | rex_r;
5425 mod = (modrm >> 6) & 3;
5426 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5427 t1 = tcg_temp_local_new(TCG_TYPE_TL);
5428 t2 = tcg_temp_local_new(TCG_TYPE_TL);
5429 a0 = tcg_temp_local_new(TCG_TYPE_TL);
5430 gen_op_mov_v_reg(ot, t1, reg);
5431 if (mod == 3) {
5432 rm = (modrm & 7) | REX_B(s);
5433 gen_op_mov_v_reg(ot, t0, rm);
5434 } else {
5435 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5436 tcg_gen_mov_tl(a0, cpu_A0);
5437 gen_op_ld_v(ot + s->mem_index, t0, a0);
5438 rm = 0; /* avoid warning */
5439 }
5440 label1 = gen_new_label();
5441 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
5442 tcg_gen_sub_tl(t2, t2, t0);
5443 gen_extu(ot, t2);
5444 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
5445 if (mod == 3) {
5446 label2 = gen_new_label();
5447 gen_op_mov_reg_v(ot, R_EAX, t0);
5448 tcg_gen_br(label2);
5449 gen_set_label(label1);
5450 gen_op_mov_reg_v(ot, rm, t1);
5451 gen_set_label(label2);
5452 } else {
5453 tcg_gen_mov_tl(t1, t0);
5454 gen_op_mov_reg_v(ot, R_EAX, t0);
5455 gen_set_label(label1);
5456 /* always store */
5457 gen_op_st_v(ot + s->mem_index, t1, a0);
5458 }
5459 tcg_gen_mov_tl(cpu_cc_src, t0);
5460 tcg_gen_mov_tl(cpu_cc_dst, t2);
5461 s->cc_op = CC_OP_SUBB + ot;
5462 tcg_temp_free(t0);
5463 tcg_temp_free(t1);
5464 tcg_temp_free(t2);
5465 tcg_temp_free(a0);
5466 }
5467 break;
5468 case 0x1c7: /* cmpxchg8b */
5469 modrm = ldub_code(s->pc++);
5470 mod = (modrm >> 6) & 3;
5471 if ((mod == 3) || ((modrm & 0x38) != 0x8))
5472 goto illegal_op;
5473#ifdef TARGET_X86_64
5474 if (dflag == 2) {
5475 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5476 goto illegal_op;
5477 gen_jmp_im(pc_start - s->cs_base);
5478 if (s->cc_op != CC_OP_DYNAMIC)
5479 gen_op_set_cc_op(s->cc_op);
5480 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5481 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
5482 } else
5483#endif
5484 {
5485 if (!(s->cpuid_features & CPUID_CX8))
5486 goto illegal_op;
5487 gen_jmp_im(pc_start - s->cs_base);
5488 if (s->cc_op != CC_OP_DYNAMIC)
5489 gen_op_set_cc_op(s->cc_op);
5490 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5491 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
5492 }
5493 s->cc_op = CC_OP_EFLAGS;
5494 break;
5495
5496 /**************************/
5497 /* push/pop */
5498 case 0x50 ... 0x57: /* push */
5499 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
5500 gen_push_T0(s);
5501 break;
5502 case 0x58 ... 0x5f: /* pop */
5503 if (CODE64(s)) {
5504 ot = dflag ? OT_QUAD : OT_WORD;
5505 } else {
5506 ot = dflag + OT_WORD;
5507 }
5508 gen_pop_T0(s);
5509 /* NOTE: order is important for pop %sp */
5510 gen_pop_update(s);
5511 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
5512 break;
5513 case 0x60: /* pusha */
5514 if (CODE64(s))
5515 goto illegal_op;
5516 gen_pusha(s);
5517 break;
5518 case 0x61: /* popa */
5519 if (CODE64(s))
5520 goto illegal_op;
5521 gen_popa(s);
5522 break;
5523 case 0x68: /* push Iv */
5524 case 0x6a:
5525 if (CODE64(s)) {
5526 ot = dflag ? OT_QUAD : OT_WORD;
5527 } else {
5528 ot = dflag + OT_WORD;
5529 }
5530 if (b == 0x68)
5531 val = insn_get(s, ot);
5532 else
5533 val = (int8_t)insn_get(s, OT_BYTE);
5534 gen_op_movl_T0_im(val);
5535 gen_push_T0(s);
5536 break;
5537 case 0x8f: /* pop Ev */
5538 if (CODE64(s)) {
5539 ot = dflag ? OT_QUAD : OT_WORD;
5540 } else {
5541 ot = dflag + OT_WORD;
5542 }
5543 modrm = ldub_code(s->pc++);
5544 mod = (modrm >> 6) & 3;
5545 gen_pop_T0(s);
5546 if (mod == 3) {
5547 /* NOTE: order is important for pop %sp */
5548 gen_pop_update(s);
5549 rm = (modrm & 7) | REX_B(s);
5550 gen_op_mov_reg_T0(ot, rm);
5551 } else {
5552 /* NOTE: order is important too for MMU exceptions */
5553 s->popl_esp_hack = 1 << ot;
5554 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5555 s->popl_esp_hack = 0;
5556 gen_pop_update(s);
5557 }
5558 break;
5559 case 0xc8: /* enter */
5560 {
5561 int level;
5562 val = lduw_code(s->pc);
5563 s->pc += 2;
5564 level = ldub_code(s->pc++);
5565 gen_enter(s, val, level);
5566 }
5567 break;
5568 case 0xc9: /* leave */
5569 /* XXX: exception not precise (ESP is updated before potential exception) */
5570 if (CODE64(s)) {
5571 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5572 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5573 } else if (s->ss32) {
5574 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5575 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5576 } else {
5577 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5578 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5579 }
5580 gen_pop_T0(s);
5581 if (CODE64(s)) {
5582 ot = dflag ? OT_QUAD : OT_WORD;
5583 } else {
5584 ot = dflag + OT_WORD;
5585 }
5586 gen_op_mov_reg_T0(ot, R_EBP);
5587 gen_pop_update(s);
5588 break;
5589 case 0x06: /* push es */
5590 case 0x0e: /* push cs */
5591 case 0x16: /* push ss */
5592 case 0x1e: /* push ds */
5593 if (CODE64(s))
5594 goto illegal_op;
5595 gen_op_movl_T0_seg(b >> 3);
5596 gen_push_T0(s);
5597 break;
5598 case 0x1a0: /* push fs */
5599 case 0x1a8: /* push gs */
5600 gen_op_movl_T0_seg((b >> 3) & 7);
5601 gen_push_T0(s);
5602 break;
5603 case 0x07: /* pop es */
5604 case 0x17: /* pop ss */
5605 case 0x1f: /* pop ds */
5606 if (CODE64(s))
5607 goto illegal_op;
5608 reg = b >> 3;
5609 gen_pop_T0(s);
5610 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5611 gen_pop_update(s);
5612 if (reg == R_SS) {
5613 /* if reg == SS, inhibit interrupts/trace. */
5614 /* If several instructions disable interrupts, only the
5615 _first_ does it */
5616 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5617 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5618 s->tf = 0;
5619 }
5620 if (s->is_jmp) {
5621 gen_jmp_im(s->pc - s->cs_base);
5622 gen_eob(s);
5623 }
5624 break;
5625 case 0x1a1: /* pop fs */
5626 case 0x1a9: /* pop gs */
5627 gen_pop_T0(s);
5628 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5629 gen_pop_update(s);
5630 if (s->is_jmp) {
5631 gen_jmp_im(s->pc - s->cs_base);
5632 gen_eob(s);
5633 }
5634 break;
5635
5636 /**************************/
5637 /* mov */
5638 case 0x88:
5639 case 0x89: /* mov Gv, Ev */
5640 if ((b & 1) == 0)
5641 ot = OT_BYTE;
5642 else
5643 ot = dflag + OT_WORD;
5644 modrm = ldub_code(s->pc++);
5645 reg = ((modrm >> 3) & 7) | rex_r;
5646
5647 /* generate a generic store */
5648 gen_ldst_modrm(s, modrm, ot, reg, 1);
5649 break;
5650 case 0xc6:
5651 case 0xc7: /* mov Ev, Iv */
5652 if ((b & 1) == 0)
5653 ot = OT_BYTE;
5654 else
5655 ot = dflag + OT_WORD;
5656 modrm = ldub_code(s->pc++);
5657 mod = (modrm >> 6) & 3;
5658 if (mod != 3) {
5659 s->rip_offset = insn_const_size(ot);
5660 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5661 }
5662 val = insn_get(s, ot);
5663 gen_op_movl_T0_im(val);
5664 if (mod != 3)
5665 gen_op_st_T0_A0(ot + s->mem_index);
5666 else
5667 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5668 break;
5669 case 0x8a:
5670 case 0x8b: /* mov Ev, Gv */
5671#ifdef VBOX /* dtrace hot fix */
5672 if (prefixes & PREFIX_LOCK)
5673 goto illegal_op;
5674#endif
5675 if ((b & 1) == 0)
5676 ot = OT_BYTE;
5677 else
5678 ot = OT_WORD + dflag;
5679 modrm = ldub_code(s->pc++);
5680 reg = ((modrm >> 3) & 7) | rex_r;
5681
5682 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5683 gen_op_mov_reg_T0(ot, reg);
5684 break;
5685 case 0x8e: /* mov seg, Gv */
5686 modrm = ldub_code(s->pc++);
5687 reg = (modrm >> 3) & 7;
5688 if (reg >= 6 || reg == R_CS)
5689 goto illegal_op;
5690 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5691 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5692 if (reg == R_SS) {
5693 /* if reg == SS, inhibit interrupts/trace */
5694 /* If several instructions disable interrupts, only the
5695 _first_ does it */
5696 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5697 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5698 s->tf = 0;
5699 }
5700 if (s->is_jmp) {
5701 gen_jmp_im(s->pc - s->cs_base);
5702 gen_eob(s);
5703 }
5704 break;
5705 case 0x8c: /* mov Gv, seg */
5706 modrm = ldub_code(s->pc++);
5707 reg = (modrm >> 3) & 7;
5708 mod = (modrm >> 6) & 3;
5709 if (reg >= 6)
5710 goto illegal_op;
5711 gen_op_movl_T0_seg(reg);
5712 if (mod == 3)
5713 ot = OT_WORD + dflag;
5714 else
5715 ot = OT_WORD;
5716 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5717 break;
5718
5719 case 0x1b6: /* movzbS Gv, Eb */
5720 case 0x1b7: /* movzwS Gv, Eb */
5721 case 0x1be: /* movsbS Gv, Eb */
5722 case 0x1bf: /* movswS Gv, Eb */
5723 {
5724 int d_ot;
5725 /* d_ot is the size of destination */
5726 d_ot = dflag + OT_WORD;
5727 /* ot is the size of source */
5728 ot = (b & 1) + OT_BYTE;
5729 modrm = ldub_code(s->pc++);
5730 reg = ((modrm >> 3) & 7) | rex_r;
5731 mod = (modrm >> 6) & 3;
5732 rm = (modrm & 7) | REX_B(s);
5733
5734 if (mod == 3) {
5735 gen_op_mov_TN_reg(ot, 0, rm);
5736 switch(ot | (b & 8)) {
5737 case OT_BYTE:
5738 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5739 break;
5740 case OT_BYTE | 8:
5741 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5742 break;
5743 case OT_WORD:
5744 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5745 break;
5746 default:
5747 case OT_WORD | 8:
5748 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5749 break;
5750 }
5751 gen_op_mov_reg_T0(d_ot, reg);
5752 } else {
5753 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5754 if (b & 8) {
5755 gen_op_lds_T0_A0(ot + s->mem_index);
5756 } else {
5757 gen_op_ldu_T0_A0(ot + s->mem_index);
5758 }
5759 gen_op_mov_reg_T0(d_ot, reg);
5760 }
5761 }
5762 break;
5763
5764 case 0x8d: /* lea */
5765 ot = dflag + OT_WORD;
5766 modrm = ldub_code(s->pc++);
5767 mod = (modrm >> 6) & 3;
5768 if (mod == 3)
5769 goto illegal_op;
5770 reg = ((modrm >> 3) & 7) | rex_r;
5771 /* we must ensure that no segment is added */
5772 s->override = -1;
5773 val = s->addseg;
5774 s->addseg = 0;
5775 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5776 s->addseg = val;
5777 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5778 break;
5779
5780 case 0xa0: /* mov EAX, Ov */
5781 case 0xa1:
5782 case 0xa2: /* mov Ov, EAX */
5783 case 0xa3:
5784 {
5785 target_ulong offset_addr;
5786
5787 if ((b & 1) == 0)
5788 ot = OT_BYTE;
5789 else
5790 ot = dflag + OT_WORD;
5791#ifdef TARGET_X86_64
5792 if (s->aflag == 2) {
5793 offset_addr = ldq_code(s->pc);
5794 s->pc += 8;
5795 gen_op_movq_A0_im(offset_addr);
5796 } else
5797#endif
5798 {
5799 if (s->aflag) {
5800 offset_addr = insn_get(s, OT_LONG);
5801 } else {
5802 offset_addr = insn_get(s, OT_WORD);
5803 }
5804 gen_op_movl_A0_im(offset_addr);
5805 }
5806 gen_add_A0_ds_seg(s);
5807 if ((b & 2) == 0) {
5808 gen_op_ld_T0_A0(ot + s->mem_index);
5809 gen_op_mov_reg_T0(ot, R_EAX);
5810 } else {
5811 gen_op_mov_TN_reg(ot, 0, R_EAX);
5812 gen_op_st_T0_A0(ot + s->mem_index);
5813 }
5814 }
5815 break;
5816 case 0xd7: /* xlat */
5817#ifdef TARGET_X86_64
5818 if (s->aflag == 2) {
5819 gen_op_movq_A0_reg(R_EBX);
5820 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5821 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5822 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5823 } else
5824#endif
5825 {
5826 gen_op_movl_A0_reg(R_EBX);
5827 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5828 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5829 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5830 if (s->aflag == 0)
5831 gen_op_andl_A0_ffff();
5832 else
5833 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5834 }
5835 gen_add_A0_ds_seg(s);
5836 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5837 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5838 break;
5839 case 0xb0 ... 0xb7: /* mov R, Ib */
5840 val = insn_get(s, OT_BYTE);
5841 gen_op_movl_T0_im(val);
5842 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5843 break;
5844 case 0xb8 ... 0xbf: /* mov R, Iv */
5845#ifdef TARGET_X86_64
5846 if (dflag == 2) {
5847 uint64_t tmp;
5848 /* 64 bit case */
5849 tmp = ldq_code(s->pc);
5850 s->pc += 8;
5851 reg = (b & 7) | REX_B(s);
5852 gen_movtl_T0_im(tmp);
5853 gen_op_mov_reg_T0(OT_QUAD, reg);
5854 } else
5855#endif
5856 {
5857 ot = dflag ? OT_LONG : OT_WORD;
5858 val = insn_get(s, ot);
5859 reg = (b & 7) | REX_B(s);
5860 gen_op_movl_T0_im(val);
5861 gen_op_mov_reg_T0(ot, reg);
5862 }
5863 break;
5864
5865 case 0x91 ... 0x97: /* xchg R, EAX */
5866 ot = dflag + OT_WORD;
5867 reg = (b & 7) | REX_B(s);
5868 rm = R_EAX;
5869 goto do_xchg_reg;
5870 case 0x86:
5871 case 0x87: /* xchg Ev, Gv */
5872 if ((b & 1) == 0)
5873 ot = OT_BYTE;
5874 else
5875 ot = dflag + OT_WORD;
5876 modrm = ldub_code(s->pc++);
5877 reg = ((modrm >> 3) & 7) | rex_r;
5878 mod = (modrm >> 6) & 3;
5879 if (mod == 3) {
5880 rm = (modrm & 7) | REX_B(s);
5881 do_xchg_reg:
5882 gen_op_mov_TN_reg(ot, 0, reg);
5883 gen_op_mov_TN_reg(ot, 1, rm);
5884 gen_op_mov_reg_T0(ot, rm);
5885 gen_op_mov_reg_T1(ot, reg);
5886 } else {
5887 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5888 gen_op_mov_TN_reg(ot, 0, reg);
5889 /* for xchg, lock is implicit */
5890 if (!(prefixes & PREFIX_LOCK))
5891 tcg_gen_helper_0_0(helper_lock);
5892 gen_op_ld_T1_A0(ot + s->mem_index);
5893 gen_op_st_T0_A0(ot + s->mem_index);
5894 if (!(prefixes & PREFIX_LOCK))
5895 tcg_gen_helper_0_0(helper_unlock);
5896 gen_op_mov_reg_T1(ot, reg);
5897 }
5898 break;
5899 case 0xc4: /* les Gv */
5900 if (CODE64(s))
5901 goto illegal_op;
5902 op = R_ES;
5903 goto do_lxx;
5904 case 0xc5: /* lds Gv */
5905 if (CODE64(s))
5906 goto illegal_op;
5907 op = R_DS;
5908 goto do_lxx;
5909 case 0x1b2: /* lss Gv */
5910 op = R_SS;
5911 goto do_lxx;
5912 case 0x1b4: /* lfs Gv */
5913 op = R_FS;
5914 goto do_lxx;
5915 case 0x1b5: /* lgs Gv */
5916 op = R_GS;
5917 do_lxx:
5918 ot = dflag ? OT_LONG : OT_WORD;
5919 modrm = ldub_code(s->pc++);
5920 reg = ((modrm >> 3) & 7) | rex_r;
5921 mod = (modrm >> 6) & 3;
5922 if (mod == 3)
5923 goto illegal_op;
5924 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5925 gen_op_ld_T1_A0(ot + s->mem_index);
5926 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5927 /* load the segment first to handle exceptions properly */
5928 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5929 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5930 /* then put the data */
5931 gen_op_mov_reg_T1(ot, reg);
5932 if (s->is_jmp) {
5933 gen_jmp_im(s->pc - s->cs_base);
5934 gen_eob(s);
5935 }
5936 break;
5937
5938 /************************/
5939 /* shifts */
5940 case 0xc0:
5941 case 0xc1:
5942 /* shift Ev,Ib */
5943 shift = 2;
5944 grp2:
5945 {
5946 if ((b & 1) == 0)
5947 ot = OT_BYTE;
5948 else
5949 ot = dflag + OT_WORD;
5950
5951 modrm = ldub_code(s->pc++);
5952 mod = (modrm >> 6) & 3;
5953 op = (modrm >> 3) & 7;
5954
5955 if (mod != 3) {
5956 if (shift == 2) {
5957 s->rip_offset = 1;
5958 }
5959 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5960 opreg = OR_TMP0;
5961 } else {
5962 opreg = (modrm & 7) | REX_B(s);
5963 }
5964
5965 /* simpler op */
5966 if (shift == 0) {
5967 gen_shift(s, op, ot, opreg, OR_ECX);
5968 } else {
5969 if (shift == 2) {
5970 shift = ldub_code(s->pc++);
5971 }
5972 gen_shifti(s, op, ot, opreg, shift);
5973 }
5974 }
5975 break;
5976 case 0xd0:
5977 case 0xd1:
5978 /* shift Ev,1 */
5979 shift = 1;
5980 goto grp2;
5981 case 0xd2:
5982 case 0xd3:
5983 /* shift Ev,cl */
5984 shift = 0;
5985 goto grp2;
5986
5987 case 0x1a4: /* shld imm */
5988 op = 0;
5989 shift = 1;
5990 goto do_shiftd;
5991 case 0x1a5: /* shld cl */
5992 op = 0;
5993 shift = 0;
5994 goto do_shiftd;
5995 case 0x1ac: /* shrd imm */
5996 op = 1;
5997 shift = 1;
5998 goto do_shiftd;
5999 case 0x1ad: /* shrd cl */
6000 op = 1;
6001 shift = 0;
6002 do_shiftd:
6003 ot = dflag + OT_WORD;
6004 modrm = ldub_code(s->pc++);
6005 mod = (modrm >> 6) & 3;
6006 rm = (modrm & 7) | REX_B(s);
6007 reg = ((modrm >> 3) & 7) | rex_r;
6008 if (mod != 3) {
6009 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6010 opreg = OR_TMP0;
6011 } else {
6012 opreg = rm;
6013 }
6014 gen_op_mov_TN_reg(ot, 1, reg);
6015
6016 if (shift) {
6017 val = ldub_code(s->pc++);
6018 tcg_gen_movi_tl(cpu_T3, val);
6019 } else {
6020 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
6021 }
6022 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
6023 break;
6024
6025 /************************/
6026 /* floats */
6027 case 0xd8 ... 0xdf:
6028 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
6029 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
6030 /* XXX: what to do if illegal op ? */
6031 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6032 break;
6033 }
6034 modrm = ldub_code(s->pc++);
6035 mod = (modrm >> 6) & 3;
6036 rm = modrm & 7;
6037 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
6038 if (mod != 3) {
6039 /* memory op */
6040 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6041 switch(op) {
6042 case 0x00 ... 0x07: /* fxxxs */
6043 case 0x10 ... 0x17: /* fixxxl */
6044 case 0x20 ... 0x27: /* fxxxl */
6045 case 0x30 ... 0x37: /* fixxx */
6046 {
6047 int op1;
6048 op1 = op & 7;
6049
6050 switch(op >> 4) {
6051 case 0:
6052 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6053 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6054 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
6055 break;
6056 case 1:
6057 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6058 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6059 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6060 break;
6061 case 2:
6062 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6063 (s->mem_index >> 2) - 1);
6064 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
6065 break;
6066 case 3:
6067 default:
6068 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6069 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6070 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6071 break;
6072 }
6073
6074 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6075 if (op1 == 3) {
6076 /* fcomp needs pop */
6077 tcg_gen_helper_0_0(helper_fpop);
6078 }
6079 }
6080 break;
6081 case 0x08: /* flds */
6082 case 0x0a: /* fsts */
6083 case 0x0b: /* fstps */
6084 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6085 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6086 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
6087 switch(op & 7) {
6088 case 0:
6089 switch(op >> 4) {
6090 case 0:
6091 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6092 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6093 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
6094 break;
6095 case 1:
6096 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6097 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6098 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6099 break;
6100 case 2:
6101 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6102 (s->mem_index >> 2) - 1);
6103 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
6104 break;
6105 case 3:
6106 default:
6107 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6108 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6109 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6110 break;
6111 }
6112 break;
6113 case 1:
6114 /* XXX: the corresponding CPUID bit must be tested ! */
6115 switch(op >> 4) {
6116 case 1:
6117 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
6118 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6119 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6120 break;
6121 case 2:
6122 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
6123 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6124 (s->mem_index >> 2) - 1);
6125 break;
6126 case 3:
6127 default:
6128 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
6129 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6130 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6131 break;
6132 }
6133 tcg_gen_helper_0_0(helper_fpop);
6134 break;
6135 default:
6136 switch(op >> 4) {
6137 case 0:
6138 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
6139 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6140 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6141 break;
6142 case 1:
6143 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
6144 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6145 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6146 break;
6147 case 2:
6148 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
6149 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6150 (s->mem_index >> 2) - 1);
6151 break;
6152 case 3:
6153 default:
6154 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
6155 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6156 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6157 break;
6158 }
6159 if ((op & 7) == 3)
6160 tcg_gen_helper_0_0(helper_fpop);
6161 break;
6162 }
6163 break;
6164 case 0x0c: /* fldenv mem */
6165 if (s->cc_op != CC_OP_DYNAMIC)
6166 gen_op_set_cc_op(s->cc_op);
6167 gen_jmp_im(pc_start - s->cs_base);
6168 tcg_gen_helper_0_2(helper_fldenv,
6169 cpu_A0, tcg_const_i32(s->dflag));
6170 break;
6171 case 0x0d: /* fldcw mem */
6172 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
6173 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6174 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
6175 break;
6176 case 0x0e: /* fnstenv mem */
6177 if (s->cc_op != CC_OP_DYNAMIC)
6178 gen_op_set_cc_op(s->cc_op);
6179 gen_jmp_im(pc_start - s->cs_base);
6180 tcg_gen_helper_0_2(helper_fstenv,
6181 cpu_A0, tcg_const_i32(s->dflag));
6182 break;
6183 case 0x0f: /* fnstcw mem */
6184 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
6185 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6186 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6187 break;
6188 case 0x1d: /* fldt mem */
6189 if (s->cc_op != CC_OP_DYNAMIC)
6190 gen_op_set_cc_op(s->cc_op);
6191 gen_jmp_im(pc_start - s->cs_base);
6192 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
6193 break;
6194 case 0x1f: /* fstpt mem */
6195 if (s->cc_op != CC_OP_DYNAMIC)
6196 gen_op_set_cc_op(s->cc_op);
6197 gen_jmp_im(pc_start - s->cs_base);
6198 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
6199 tcg_gen_helper_0_0(helper_fpop);
6200 break;
6201 case 0x2c: /* frstor mem */
6202 if (s->cc_op != CC_OP_DYNAMIC)
6203 gen_op_set_cc_op(s->cc_op);
6204 gen_jmp_im(pc_start - s->cs_base);
6205 tcg_gen_helper_0_2(helper_frstor,
6206 cpu_A0, tcg_const_i32(s->dflag));
6207 break;
6208 case 0x2e: /* fnsave mem */
6209 if (s->cc_op != CC_OP_DYNAMIC)
6210 gen_op_set_cc_op(s->cc_op);
6211 gen_jmp_im(pc_start - s->cs_base);
6212 tcg_gen_helper_0_2(helper_fsave,
6213 cpu_A0, tcg_const_i32(s->dflag));
6214 break;
6215 case 0x2f: /* fnstsw mem */
6216 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6217 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6218 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6219 break;
6220 case 0x3c: /* fbld */
6221 if (s->cc_op != CC_OP_DYNAMIC)
6222 gen_op_set_cc_op(s->cc_op);
6223 gen_jmp_im(pc_start - s->cs_base);
6224 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
6225 break;
6226 case 0x3e: /* fbstp */
6227 if (s->cc_op != CC_OP_DYNAMIC)
6228 gen_op_set_cc_op(s->cc_op);
6229 gen_jmp_im(pc_start - s->cs_base);
6230 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
6231 tcg_gen_helper_0_0(helper_fpop);
6232 break;
6233 case 0x3d: /* fildll */
6234 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6235 (s->mem_index >> 2) - 1);
6236 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
6237 break;
6238 case 0x3f: /* fistpll */
6239 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
6240 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6241 (s->mem_index >> 2) - 1);
6242 tcg_gen_helper_0_0(helper_fpop);
6243 break;
6244 default:
6245 goto illegal_op;
6246 }
6247 } else {
6248 /* register float ops */
6249 opreg = rm;
6250
6251 switch(op) {
6252 case 0x08: /* fld sti */
6253 tcg_gen_helper_0_0(helper_fpush);
6254 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
6255 break;
6256 case 0x09: /* fxchg sti */
6257 case 0x29: /* fxchg4 sti, undocumented op */
6258 case 0x39: /* fxchg7 sti, undocumented op */
6259 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
6260 break;
6261 case 0x0a: /* grp d9/2 */
6262 switch(rm) {
6263 case 0: /* fnop */
6264 /* check exceptions (FreeBSD FPU probe) */
6265 if (s->cc_op != CC_OP_DYNAMIC)
6266 gen_op_set_cc_op(s->cc_op);
6267 gen_jmp_im(pc_start - s->cs_base);
6268 tcg_gen_helper_0_0(helper_fwait);
6269 break;
6270 default:
6271 goto illegal_op;
6272 }
6273 break;
6274 case 0x0c: /* grp d9/4 */
6275 switch(rm) {
6276 case 0: /* fchs */
6277 tcg_gen_helper_0_0(helper_fchs_ST0);
6278 break;
6279 case 1: /* fabs */
6280 tcg_gen_helper_0_0(helper_fabs_ST0);
6281 break;
6282 case 4: /* ftst */
6283 tcg_gen_helper_0_0(helper_fldz_FT0);
6284 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6285 break;
6286 case 5: /* fxam */
6287 tcg_gen_helper_0_0(helper_fxam_ST0);
6288 break;
6289 default:
6290 goto illegal_op;
6291 }
6292 break;
6293 case 0x0d: /* grp d9/5 */
6294 {
6295 switch(rm) {
6296 case 0:
6297 tcg_gen_helper_0_0(helper_fpush);
6298 tcg_gen_helper_0_0(helper_fld1_ST0);
6299 break;
6300 case 1:
6301 tcg_gen_helper_0_0(helper_fpush);
6302 tcg_gen_helper_0_0(helper_fldl2t_ST0);
6303 break;
6304 case 2:
6305 tcg_gen_helper_0_0(helper_fpush);
6306 tcg_gen_helper_0_0(helper_fldl2e_ST0);
6307 break;
6308 case 3:
6309 tcg_gen_helper_0_0(helper_fpush);
6310 tcg_gen_helper_0_0(helper_fldpi_ST0);
6311 break;
6312 case 4:
6313 tcg_gen_helper_0_0(helper_fpush);
6314 tcg_gen_helper_0_0(helper_fldlg2_ST0);
6315 break;
6316 case 5:
6317 tcg_gen_helper_0_0(helper_fpush);
6318 tcg_gen_helper_0_0(helper_fldln2_ST0);
6319 break;
6320 case 6:
6321 tcg_gen_helper_0_0(helper_fpush);
6322 tcg_gen_helper_0_0(helper_fldz_ST0);
6323 break;
6324 default:
6325 goto illegal_op;
6326 }
6327 }
6328 break;
6329 case 0x0e: /* grp d9/6 */
6330 switch(rm) {
6331 case 0: /* f2xm1 */
6332 tcg_gen_helper_0_0(helper_f2xm1);
6333 break;
6334 case 1: /* fyl2x */
6335 tcg_gen_helper_0_0(helper_fyl2x);
6336 break;
6337 case 2: /* fptan */
6338 tcg_gen_helper_0_0(helper_fptan);
6339 break;
6340 case 3: /* fpatan */
6341 tcg_gen_helper_0_0(helper_fpatan);
6342 break;
6343 case 4: /* fxtract */
6344 tcg_gen_helper_0_0(helper_fxtract);
6345 break;
6346 case 5: /* fprem1 */
6347 tcg_gen_helper_0_0(helper_fprem1);
6348 break;
6349 case 6: /* fdecstp */
6350 tcg_gen_helper_0_0(helper_fdecstp);
6351 break;
6352 default:
6353 case 7: /* fincstp */
6354 tcg_gen_helper_0_0(helper_fincstp);
6355 break;
6356 }
6357 break;
6358 case 0x0f: /* grp d9/7 */
6359 switch(rm) {
6360 case 0: /* fprem */
6361 tcg_gen_helper_0_0(helper_fprem);
6362 break;
6363 case 1: /* fyl2xp1 */
6364 tcg_gen_helper_0_0(helper_fyl2xp1);
6365 break;
6366 case 2: /* fsqrt */
6367 tcg_gen_helper_0_0(helper_fsqrt);
6368 break;
6369 case 3: /* fsincos */
6370 tcg_gen_helper_0_0(helper_fsincos);
6371 break;
6372 case 5: /* fscale */
6373 tcg_gen_helper_0_0(helper_fscale);
6374 break;
6375 case 4: /* frndint */
6376 tcg_gen_helper_0_0(helper_frndint);
6377 break;
6378 case 6: /* fsin */
6379 tcg_gen_helper_0_0(helper_fsin);
6380 break;
6381 default:
6382 case 7: /* fcos */
6383 tcg_gen_helper_0_0(helper_fcos);
6384 break;
6385 }
6386 break;
6387 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6388 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6389 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6390 {
6391 int op1;
6392
6393 op1 = op & 7;
6394 if (op >= 0x20) {
6395 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
6396 if (op >= 0x30)
6397 tcg_gen_helper_0_0(helper_fpop);
6398 } else {
6399 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6400 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6401 }
6402 }
6403 break;
6404 case 0x02: /* fcom */
6405 case 0x22: /* fcom2, undocumented op */
6406 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6407 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6408 break;
6409 case 0x03: /* fcomp */
6410 case 0x23: /* fcomp3, undocumented op */
6411 case 0x32: /* fcomp5, undocumented op */
6412 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6413 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6414 tcg_gen_helper_0_0(helper_fpop);
6415 break;
6416 case 0x15: /* da/5 */
6417 switch(rm) {
6418 case 1: /* fucompp */
6419 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6420 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6421 tcg_gen_helper_0_0(helper_fpop);
6422 tcg_gen_helper_0_0(helper_fpop);
6423 break;
6424 default:
6425 goto illegal_op;
6426 }
6427 break;
6428 case 0x1c:
6429 switch(rm) {
6430 case 0: /* feni (287 only, just do nop here) */
6431 break;
6432 case 1: /* fdisi (287 only, just do nop here) */
6433 break;
6434 case 2: /* fclex */
6435 tcg_gen_helper_0_0(helper_fclex);
6436 break;
6437 case 3: /* fninit */
6438 tcg_gen_helper_0_0(helper_fninit);
6439 break;
6440 case 4: /* fsetpm (287 only, just do nop here) */
6441 break;
6442 default:
6443 goto illegal_op;
6444 }
6445 break;
6446 case 0x1d: /* fucomi */
6447 if (s->cc_op != CC_OP_DYNAMIC)
6448 gen_op_set_cc_op(s->cc_op);
6449 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6450 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6451 s->cc_op = CC_OP_EFLAGS;
6452 break;
6453 case 0x1e: /* fcomi */
6454 if (s->cc_op != CC_OP_DYNAMIC)
6455 gen_op_set_cc_op(s->cc_op);
6456 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6457 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6458 s->cc_op = CC_OP_EFLAGS;
6459 break;
6460 case 0x28: /* ffree sti */
6461 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6462 break;
6463 case 0x2a: /* fst sti */
6464 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6465 break;
6466 case 0x2b: /* fstp sti */
6467 case 0x0b: /* fstp1 sti, undocumented op */
6468 case 0x3a: /* fstp8 sti, undocumented op */
6469 case 0x3b: /* fstp9 sti, undocumented op */
6470 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6471 tcg_gen_helper_0_0(helper_fpop);
6472 break;
6473 case 0x2c: /* fucom st(i) */
6474 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6475 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6476 break;
6477 case 0x2d: /* fucomp st(i) */
6478 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6479 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6480 tcg_gen_helper_0_0(helper_fpop);
6481 break;
6482 case 0x33: /* de/3 */
6483 switch(rm) {
6484 case 1: /* fcompp */
6485 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6486 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6487 tcg_gen_helper_0_0(helper_fpop);
6488 tcg_gen_helper_0_0(helper_fpop);
6489 break;
6490 default:
6491 goto illegal_op;
6492 }
6493 break;
6494 case 0x38: /* ffreep sti, undocumented op */
6495 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6496 tcg_gen_helper_0_0(helper_fpop);
6497 break;
6498 case 0x3c: /* df/4 */
6499 switch(rm) {
6500 case 0:
6501 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6502 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6503 gen_op_mov_reg_T0(OT_WORD, R_EAX);
6504 break;
6505 default:
6506 goto illegal_op;
6507 }
6508 break;
6509 case 0x3d: /* fucomip */
6510 if (s->cc_op != CC_OP_DYNAMIC)
6511 gen_op_set_cc_op(s->cc_op);
6512 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6513 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6514 tcg_gen_helper_0_0(helper_fpop);
6515 s->cc_op = CC_OP_EFLAGS;
6516 break;
6517 case 0x3e: /* fcomip */
6518 if (s->cc_op != CC_OP_DYNAMIC)
6519 gen_op_set_cc_op(s->cc_op);
6520 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6521 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6522 tcg_gen_helper_0_0(helper_fpop);
6523 s->cc_op = CC_OP_EFLAGS;
6524 break;
6525 case 0x10 ... 0x13: /* fcmovxx */
6526 case 0x18 ... 0x1b:
6527 {
6528 int op1, l1;
6529 static const uint8_t fcmov_cc[8] = {
6530 (JCC_B << 1),
6531 (JCC_Z << 1),
6532 (JCC_BE << 1),
6533 (JCC_P << 1),
6534 };
6535 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
6536 l1 = gen_new_label();
6537 gen_jcc1(s, s->cc_op, op1, l1);
6538 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
6539 gen_set_label(l1);
6540 }
6541 break;
6542 default:
6543 goto illegal_op;
6544 }
6545 }
6546 break;
6547 /************************/
6548 /* string ops */
6549
6550 case 0xa4: /* movsS */
6551 case 0xa5:
6552 if ((b & 1) == 0)
6553 ot = OT_BYTE;
6554 else
6555 ot = dflag + OT_WORD;
6556
6557 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6558 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6559 } else {
6560 gen_movs(s, ot);
6561 }
6562 break;
6563
6564 case 0xaa: /* stosS */
6565 case 0xab:
6566 if ((b & 1) == 0)
6567 ot = OT_BYTE;
6568 else
6569 ot = dflag + OT_WORD;
6570
6571 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6572 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6573 } else {
6574 gen_stos(s, ot);
6575 }
6576 break;
6577 case 0xac: /* lodsS */
6578 case 0xad:
6579 if ((b & 1) == 0)
6580 ot = OT_BYTE;
6581 else
6582 ot = dflag + OT_WORD;
6583 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6584 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6585 } else {
6586 gen_lods(s, ot);
6587 }
6588 break;
6589 case 0xae: /* scasS */
6590 case 0xaf:
6591 if ((b & 1) == 0)
6592 ot = OT_BYTE;
6593 else
6594 ot = dflag + OT_WORD;
6595 if (prefixes & PREFIX_REPNZ) {
6596 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6597 } else if (prefixes & PREFIX_REPZ) {
6598 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6599 } else {
6600 gen_scas(s, ot);
6601 s->cc_op = CC_OP_SUBB + ot;
6602 }
6603 break;
6604
6605 case 0xa6: /* cmpsS */
6606 case 0xa7:
6607 if ((b & 1) == 0)
6608 ot = OT_BYTE;
6609 else
6610 ot = dflag + OT_WORD;
6611 if (prefixes & PREFIX_REPNZ) {
6612 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6613 } else if (prefixes & PREFIX_REPZ) {
6614 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6615 } else {
6616 gen_cmps(s, ot);
6617 s->cc_op = CC_OP_SUBB + ot;
6618 }
6619 break;
6620 case 0x6c: /* insS */
6621 case 0x6d:
6622 if ((b & 1) == 0)
6623 ot = OT_BYTE;
6624 else
6625 ot = dflag ? OT_LONG : OT_WORD;
6626 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6627 gen_op_andl_T0_ffff();
6628 gen_check_io(s, ot, pc_start - s->cs_base,
6629 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6630 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6631 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6632 } else {
6633 gen_ins(s, ot);
6634 if (use_icount) {
6635 gen_jmp(s, s->pc - s->cs_base);
6636 }
6637 }
6638 break;
6639 case 0x6e: /* outsS */
6640 case 0x6f:
6641 if ((b & 1) == 0)
6642 ot = OT_BYTE;
6643 else
6644 ot = dflag ? OT_LONG : OT_WORD;
6645 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6646 gen_op_andl_T0_ffff();
6647 gen_check_io(s, ot, pc_start - s->cs_base,
6648 svm_is_rep(prefixes) | 4);
6649 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6650 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6651 } else {
6652 gen_outs(s, ot);
6653 if (use_icount) {
6654 gen_jmp(s, s->pc - s->cs_base);
6655 }
6656 }
6657 break;
6658
6659 /************************/
6660 /* port I/O */
6661
6662 case 0xe4:
6663 case 0xe5:
6664 if ((b & 1) == 0)
6665 ot = OT_BYTE;
6666 else
6667 ot = dflag ? OT_LONG : OT_WORD;
6668 val = ldub_code(s->pc++);
6669 gen_op_movl_T0_im(val);
6670 gen_check_io(s, ot, pc_start - s->cs_base,
6671 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6672 if (use_icount)
6673 gen_io_start();
6674 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6675 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6676 gen_op_mov_reg_T1(ot, R_EAX);
6677 if (use_icount) {
6678 gen_io_end();
6679 gen_jmp(s, s->pc - s->cs_base);
6680 }
6681 break;
6682 case 0xe6:
6683 case 0xe7:
6684 if ((b & 1) == 0)
6685 ot = OT_BYTE;
6686 else
6687 ot = dflag ? OT_LONG : OT_WORD;
6688 val = ldub_code(s->pc++);
6689 gen_op_movl_T0_im(val);
6690 gen_check_io(s, ot, pc_start - s->cs_base,
6691 svm_is_rep(prefixes));
6692#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
6693 if (val == 0x80)
6694 break;
6695#endif /* VBOX */
6696 gen_op_mov_TN_reg(ot, 1, R_EAX);
6697
6698 if (use_icount)
6699 gen_io_start();
6700 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6701 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6702 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6703 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6704 if (use_icount) {
6705 gen_io_end();
6706 gen_jmp(s, s->pc - s->cs_base);
6707 }
6708 break;
6709 case 0xec:
6710 case 0xed:
6711 if ((b & 1) == 0)
6712 ot = OT_BYTE;
6713 else
6714 ot = dflag ? OT_LONG : OT_WORD;
6715 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6716 gen_op_andl_T0_ffff();
6717 gen_check_io(s, ot, pc_start - s->cs_base,
6718 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6719 if (use_icount)
6720 gen_io_start();
6721 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6722 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6723 gen_op_mov_reg_T1(ot, R_EAX);
6724 if (use_icount) {
6725 gen_io_end();
6726 gen_jmp(s, s->pc - s->cs_base);
6727 }
6728 break;
6729 case 0xee:
6730 case 0xef:
6731 if ((b & 1) == 0)
6732 ot = OT_BYTE;
6733 else
6734 ot = dflag ? OT_LONG : OT_WORD;
6735 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6736 gen_op_andl_T0_ffff();
6737 gen_check_io(s, ot, pc_start - s->cs_base,
6738 svm_is_rep(prefixes));
6739 gen_op_mov_TN_reg(ot, 1, R_EAX);
6740
6741 if (use_icount)
6742 gen_io_start();
6743 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6744 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6745 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6746 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6747 if (use_icount) {
6748 gen_io_end();
6749 gen_jmp(s, s->pc - s->cs_base);
6750 }
6751 break;
6752
6753 /************************/
6754 /* control */
6755 case 0xc2: /* ret im */
6756 val = ldsw_code(s->pc);
6757 s->pc += 2;
6758 gen_pop_T0(s);
6759 if (CODE64(s) && s->dflag)
6760 s->dflag = 2;
6761 gen_stack_update(s, val + (2 << s->dflag));
6762 if (s->dflag == 0)
6763 gen_op_andl_T0_ffff();
6764 gen_op_jmp_T0();
6765 gen_eob(s);
6766 break;
6767 case 0xc3: /* ret */
6768 gen_pop_T0(s);
6769 gen_pop_update(s);
6770 if (s->dflag == 0)
6771 gen_op_andl_T0_ffff();
6772 gen_op_jmp_T0();
6773 gen_eob(s);
6774 break;
6775 case 0xca: /* lret im */
6776 val = ldsw_code(s->pc);
6777 s->pc += 2;
6778 do_lret:
6779 if (s->pe && !s->vm86) {
6780 if (s->cc_op != CC_OP_DYNAMIC)
6781 gen_op_set_cc_op(s->cc_op);
6782 gen_jmp_im(pc_start - s->cs_base);
6783 tcg_gen_helper_0_2(helper_lret_protected,
6784 tcg_const_i32(s->dflag),
6785 tcg_const_i32(val));
6786 } else {
6787 gen_stack_A0(s);
6788 /* pop offset */
6789 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6790 if (s->dflag == 0)
6791 gen_op_andl_T0_ffff();
6792 /* NOTE: keeping EIP updated is not a problem in case of
6793 exception */
6794 gen_op_jmp_T0();
6795 /* pop selector */
6796 gen_op_addl_A0_im(2 << s->dflag);
6797 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6798 gen_op_movl_seg_T0_vm(R_CS);
6799 /* add stack offset */
6800 gen_stack_update(s, val + (4 << s->dflag));
6801 }
6802 gen_eob(s);
6803 break;
6804 case 0xcb: /* lret */
6805 val = 0;
6806 goto do_lret;
6807 case 0xcf: /* iret */
6808 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6809 if (!s->pe) {
6810 /* real mode */
6811 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6812 s->cc_op = CC_OP_EFLAGS;
6813 } else if (s->vm86) {
6814#ifdef VBOX
6815 if (s->iopl != 3 && (!s->vme || s->dflag)) {
6816#else
6817 if (s->iopl != 3) {
6818#endif
6819 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6820 } else {
6821 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6822 s->cc_op = CC_OP_EFLAGS;
6823 }
6824 } else {
6825 if (s->cc_op != CC_OP_DYNAMIC)
6826 gen_op_set_cc_op(s->cc_op);
6827 gen_jmp_im(pc_start - s->cs_base);
6828 tcg_gen_helper_0_2(helper_iret_protected,
6829 tcg_const_i32(s->dflag),
6830 tcg_const_i32(s->pc - s->cs_base));
6831 s->cc_op = CC_OP_EFLAGS;
6832 }
6833 gen_eob(s);
6834 break;
6835 case 0xe8: /* call im */
6836 {
6837 if (dflag)
6838 tval = (int32_t)insn_get(s, OT_LONG);
6839 else
6840 tval = (int16_t)insn_get(s, OT_WORD);
6841 next_eip = s->pc - s->cs_base;
6842 tval += next_eip;
6843 if (s->dflag == 0)
6844 tval &= 0xffff;
6845 gen_movtl_T0_im(next_eip);
6846 gen_push_T0(s);
6847 gen_jmp(s, tval);
6848 }
6849 break;
6850 case 0x9a: /* lcall im */
6851 {
6852 unsigned int selector, offset;
6853
6854 if (CODE64(s))
6855 goto illegal_op;
6856 ot = dflag ? OT_LONG : OT_WORD;
6857 offset = insn_get(s, ot);
6858 selector = insn_get(s, OT_WORD);
6859
6860 gen_op_movl_T0_im(selector);
6861 gen_op_movl_T1_imu(offset);
6862 }
6863 goto do_lcall;
6864 case 0xe9: /* jmp im */
6865 if (dflag)
6866 tval = (int32_t)insn_get(s, OT_LONG);
6867 else
6868 tval = (int16_t)insn_get(s, OT_WORD);
6869 tval += s->pc - s->cs_base;
6870 if (s->dflag == 0)
6871 tval &= 0xffff;
6872 gen_jmp(s, tval);
6873 break;
6874 case 0xea: /* ljmp im */
6875 {
6876 unsigned int selector, offset;
6877
6878 if (CODE64(s))
6879 goto illegal_op;
6880 ot = dflag ? OT_LONG : OT_WORD;
6881 offset = insn_get(s, ot);
6882 selector = insn_get(s, OT_WORD);
6883
6884 gen_op_movl_T0_im(selector);
6885 gen_op_movl_T1_imu(offset);
6886 }
6887 goto do_ljmp;
6888 case 0xeb: /* jmp Jb */
6889 tval = (int8_t)insn_get(s, OT_BYTE);
6890 tval += s->pc - s->cs_base;
6891 if (s->dflag == 0)
6892 tval &= 0xffff;
6893 gen_jmp(s, tval);
6894 break;
6895 case 0x70 ... 0x7f: /* jcc Jb */
6896 tval = (int8_t)insn_get(s, OT_BYTE);
6897 goto do_jcc;
6898 case 0x180 ... 0x18f: /* jcc Jv */
6899 if (dflag) {
6900 tval = (int32_t)insn_get(s, OT_LONG);
6901 } else {
6902 tval = (int16_t)insn_get(s, OT_WORD);
6903 }
6904 do_jcc:
6905 next_eip = s->pc - s->cs_base;
6906 tval += next_eip;
6907 if (s->dflag == 0)
6908 tval &= 0xffff;
6909 gen_jcc(s, b, tval, next_eip);
6910 break;
6911
6912 case 0x190 ... 0x19f: /* setcc Gv */
6913 modrm = ldub_code(s->pc++);
6914 gen_setcc(s, b);
6915 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6916 break;
6917 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6918 {
6919 int l1;
6920 TCGv t0;
6921
6922 ot = dflag + OT_WORD;
6923 modrm = ldub_code(s->pc++);
6924 reg = ((modrm >> 3) & 7) | rex_r;
6925 mod = (modrm >> 6) & 3;
6926 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6927 if (mod != 3) {
6928 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6929 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6930 } else {
6931 rm = (modrm & 7) | REX_B(s);
6932 gen_op_mov_v_reg(ot, t0, rm);
6933 }
6934#ifdef TARGET_X86_64
6935 if (ot == OT_LONG) {
6936 /* XXX: specific Intel behaviour ? */
6937 l1 = gen_new_label();
6938 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6939 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6940 gen_set_label(l1);
6941 tcg_gen_movi_tl(cpu_tmp0, 0);
6942 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6943 } else
6944#endif
6945 {
6946 l1 = gen_new_label();
6947 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6948 gen_op_mov_reg_v(ot, reg, t0);
6949 gen_set_label(l1);
6950 }
6951 tcg_temp_free(t0);
6952 }
6953 break;
6954
6955 /************************/
6956 /* flags */
6957 case 0x9c: /* pushf */
6958 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
6959#ifdef VBOX
6960 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6961#else
6962 if (s->vm86 && s->iopl != 3) {
6963#endif
6964 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6965 } else {
6966 if (s->cc_op != CC_OP_DYNAMIC)
6967 gen_op_set_cc_op(s->cc_op);
6968#ifdef VBOX
6969 if (s->vm86 && s->vme && s->iopl != 3)
6970 tcg_gen_helper_1_0(helper_read_eflags_vme, cpu_T[0]);
6971 else
6972#endif
6973 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
6974 gen_push_T0(s);
6975 }
6976 break;
6977 case 0x9d: /* popf */
6978 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
6979#ifdef VBOX
6980 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
6981#else
6982 if (s->vm86 && s->iopl != 3) {
6983#endif
6984 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6985 } else {
6986 gen_pop_T0(s);
6987 if (s->cpl == 0) {
6988 if (s->dflag) {
6989 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6990 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
6991 } else {
6992 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6993 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
6994 }
6995 } else {
6996 if (s->cpl <= s->iopl) {
6997 if (s->dflag) {
6998 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
6999 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
7000 } else {
7001 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7002 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
7003 }
7004 } else {
7005 if (s->dflag) {
7006 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7007 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
7008 } else {
7009#ifdef VBOX
7010 if (s->vm86 && s->vme)
7011 tcg_gen_helper_0_1(helper_write_eflags_vme, cpu_T[0]);
7012 else
7013#endif
7014 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7015 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
7016 }
7017 }
7018 }
7019 gen_pop_update(s);
7020 s->cc_op = CC_OP_EFLAGS;
7021 /* abort translation because TF flag may change */
7022 gen_jmp_im(s->pc - s->cs_base);
7023 gen_eob(s);
7024 }
7025 break;
7026 case 0x9e: /* sahf */
7027 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7028 goto illegal_op;
7029 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
7030 if (s->cc_op != CC_OP_DYNAMIC)
7031 gen_op_set_cc_op(s->cc_op);
7032 gen_compute_eflags(cpu_cc_src);
7033 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
7034 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
7035 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
7036 s->cc_op = CC_OP_EFLAGS;
7037 break;
7038 case 0x9f: /* lahf */
7039 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7040 goto illegal_op;
7041 if (s->cc_op != CC_OP_DYNAMIC)
7042 gen_op_set_cc_op(s->cc_op);
7043 gen_compute_eflags(cpu_T[0]);
7044 /* Note: gen_compute_eflags() only gives the condition codes */
7045 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
7046 gen_op_mov_reg_T0(OT_BYTE, R_AH);
7047 break;
7048 case 0xf5: /* cmc */
7049 if (s->cc_op != CC_OP_DYNAMIC)
7050 gen_op_set_cc_op(s->cc_op);
7051 gen_compute_eflags(cpu_cc_src);
7052 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7053 s->cc_op = CC_OP_EFLAGS;
7054 break;
7055 case 0xf8: /* clc */
7056 if (s->cc_op != CC_OP_DYNAMIC)
7057 gen_op_set_cc_op(s->cc_op);
7058 gen_compute_eflags(cpu_cc_src);
7059 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
7060 s->cc_op = CC_OP_EFLAGS;
7061 break;
7062 case 0xf9: /* stc */
7063 if (s->cc_op != CC_OP_DYNAMIC)
7064 gen_op_set_cc_op(s->cc_op);
7065 gen_compute_eflags(cpu_cc_src);
7066 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7067 s->cc_op = CC_OP_EFLAGS;
7068 break;
7069 case 0xfc: /* cld */
7070 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
7071 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7072 break;
7073 case 0xfd: /* std */
7074 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
7075 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7076 break;
7077
7078 /************************/
7079 /* bit operations */
7080 case 0x1ba: /* bt/bts/btr/btc Gv, im */
7081 ot = dflag + OT_WORD;
7082 modrm = ldub_code(s->pc++);
7083 op = (modrm >> 3) & 7;
7084 mod = (modrm >> 6) & 3;
7085 rm = (modrm & 7) | REX_B(s);
7086 if (mod != 3) {
7087 s->rip_offset = 1;
7088 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7089 gen_op_ld_T0_A0(ot + s->mem_index);
7090 } else {
7091 gen_op_mov_TN_reg(ot, 0, rm);
7092 }
7093 /* load shift */
7094 val = ldub_code(s->pc++);
7095 gen_op_movl_T1_im(val);
7096 if (op < 4)
7097 goto illegal_op;
7098 op -= 4;
7099 goto bt_op;
7100 case 0x1a3: /* bt Gv, Ev */
7101 op = 0;
7102 goto do_btx;
7103 case 0x1ab: /* bts */
7104 op = 1;
7105 goto do_btx;
7106 case 0x1b3: /* btr */
7107 op = 2;
7108 goto do_btx;
7109 case 0x1bb: /* btc */
7110 op = 3;
7111 do_btx:
7112 ot = dflag + OT_WORD;
7113 modrm = ldub_code(s->pc++);
7114 reg = ((modrm >> 3) & 7) | rex_r;
7115 mod = (modrm >> 6) & 3;
7116 rm = (modrm & 7) | REX_B(s);
7117 gen_op_mov_TN_reg(OT_LONG, 1, reg);
7118 if (mod != 3) {
7119 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7120 /* specific case: we need to add a displacement */
7121 gen_exts(ot, cpu_T[1]);
7122 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7123 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7124 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
7125 gen_op_ld_T0_A0(ot + s->mem_index);
7126 } else {
7127 gen_op_mov_TN_reg(ot, 0, rm);
7128 }
7129 bt_op:
7130 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7131 switch(op) {
7132 case 0:
7133 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7134 tcg_gen_movi_tl(cpu_cc_dst, 0);
7135 break;
7136 case 1:
7137 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7138 tcg_gen_movi_tl(cpu_tmp0, 1);
7139 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7140 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7141 break;
7142 case 2:
7143 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7144 tcg_gen_movi_tl(cpu_tmp0, 1);
7145 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7146 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7147 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7148 break;
7149 default:
7150 case 3:
7151 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7152 tcg_gen_movi_tl(cpu_tmp0, 1);
7153 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7154 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7155 break;
7156 }
7157 s->cc_op = CC_OP_SARB + ot;
7158 if (op != 0) {
7159 if (mod != 3)
7160 gen_op_st_T0_A0(ot + s->mem_index);
7161 else
7162 gen_op_mov_reg_T0(ot, rm);
7163 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7164 tcg_gen_movi_tl(cpu_cc_dst, 0);
7165 }
7166 break;
7167 case 0x1bc: /* bsf */
7168 case 0x1bd: /* bsr */
7169 {
7170 int label1;
7171 TCGv t0;
7172
7173 ot = dflag + OT_WORD;
7174 modrm = ldub_code(s->pc++);
7175 reg = ((modrm >> 3) & 7) | rex_r;
7176 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7177 gen_extu(ot, cpu_T[0]);
7178 label1 = gen_new_label();
7179 tcg_gen_movi_tl(cpu_cc_dst, 0);
7180 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7181 tcg_gen_mov_tl(t0, cpu_T[0]);
7182 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
7183 if (b & 1) {
7184 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
7185 } else {
7186 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
7187 }
7188 gen_op_mov_reg_T0(ot, reg);
7189 tcg_gen_movi_tl(cpu_cc_dst, 1);
7190 gen_set_label(label1);
7191 tcg_gen_discard_tl(cpu_cc_src);
7192 s->cc_op = CC_OP_LOGICB + ot;
7193 tcg_temp_free(t0);
7194 }
7195 break;
7196 /************************/
7197 /* bcd */
7198 case 0x27: /* daa */
7199 if (CODE64(s))
7200 goto illegal_op;
7201 if (s->cc_op != CC_OP_DYNAMIC)
7202 gen_op_set_cc_op(s->cc_op);
7203 tcg_gen_helper_0_0(helper_daa);
7204 s->cc_op = CC_OP_EFLAGS;
7205 break;
7206 case 0x2f: /* das */
7207 if (CODE64(s))
7208 goto illegal_op;
7209 if (s->cc_op != CC_OP_DYNAMIC)
7210 gen_op_set_cc_op(s->cc_op);
7211 tcg_gen_helper_0_0(helper_das);
7212 s->cc_op = CC_OP_EFLAGS;
7213 break;
7214 case 0x37: /* aaa */
7215 if (CODE64(s))
7216 goto illegal_op;
7217 if (s->cc_op != CC_OP_DYNAMIC)
7218 gen_op_set_cc_op(s->cc_op);
7219 tcg_gen_helper_0_0(helper_aaa);
7220 s->cc_op = CC_OP_EFLAGS;
7221 break;
7222 case 0x3f: /* aas */
7223 if (CODE64(s))
7224 goto illegal_op;
7225 if (s->cc_op != CC_OP_DYNAMIC)
7226 gen_op_set_cc_op(s->cc_op);
7227 tcg_gen_helper_0_0(helper_aas);
7228 s->cc_op = CC_OP_EFLAGS;
7229 break;
7230 case 0xd4: /* aam */
7231 if (CODE64(s))
7232 goto illegal_op;
7233 val = ldub_code(s->pc++);
7234 if (val == 0) {
7235 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7236 } else {
7237 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
7238 s->cc_op = CC_OP_LOGICB;
7239 }
7240 break;
7241 case 0xd5: /* aad */
7242 if (CODE64(s))
7243 goto illegal_op;
7244 val = ldub_code(s->pc++);
7245 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
7246 s->cc_op = CC_OP_LOGICB;
7247 break;
7248 /************************/
7249 /* misc */
7250 case 0x90: /* nop */
7251 /* XXX: xchg + rex handling */
7252 /* XXX: correct lock test for all insn */
7253 if (prefixes & PREFIX_LOCK)
7254 goto illegal_op;
7255 if (prefixes & PREFIX_REPZ) {
7256 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7257 }
7258 break;
7259 case 0x9b: /* fwait */
7260 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7261 (HF_MP_MASK | HF_TS_MASK)) {
7262 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7263 } else {
7264 if (s->cc_op != CC_OP_DYNAMIC)
7265 gen_op_set_cc_op(s->cc_op);
7266 gen_jmp_im(pc_start - s->cs_base);
7267 tcg_gen_helper_0_0(helper_fwait);
7268 }
7269 break;
7270 case 0xcc: /* int3 */
7271#ifdef VBOX
7272 if (s->vm86 && s->iopl != 3 && !s->vme) {
7273 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7274 } else
7275#endif
7276 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7277 break;
7278 case 0xcd: /* int N */
7279 val = ldub_code(s->pc++);
7280#ifdef VBOX
7281 if (s->vm86 && s->iopl != 3 && !s->vme) {
7282#else
7283 if (s->vm86 && s->iopl != 3) {
7284#endif
7285 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7286 } else {
7287 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7288 }
7289 break;
7290 case 0xce: /* into */
7291 if (CODE64(s))
7292 goto illegal_op;
7293 if (s->cc_op != CC_OP_DYNAMIC)
7294 gen_op_set_cc_op(s->cc_op);
7295 gen_jmp_im(pc_start - s->cs_base);
7296 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
7297 break;
7298 case 0xf1: /* icebp (undocumented, exits to external debugger) */
7299 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
7300#if 1
7301 gen_debug(s, pc_start - s->cs_base);
7302#else
7303 /* start debug */
7304 tb_flush(cpu_single_env);
7305 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
7306#endif
7307 break;
7308 case 0xfa: /* cli */
7309 if (!s->vm86) {
7310 if (s->cpl <= s->iopl) {
7311 tcg_gen_helper_0_0(helper_cli);
7312 } else {
7313 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7314 }
7315 } else {
7316 if (s->iopl == 3) {
7317 tcg_gen_helper_0_0(helper_cli);
7318#ifdef VBOX
7319 } else if (s->iopl != 3 && s->vme) {
7320 tcg_gen_helper_0_0(helper_cli_vme);
7321#endif
7322 } else {
7323 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7324 }
7325 }
7326 break;
7327 case 0xfb: /* sti */
7328 if (!s->vm86) {
7329 if (s->cpl <= s->iopl) {
7330 gen_sti:
7331 tcg_gen_helper_0_0(helper_sti);
7332 /* interruptions are enabled only the first insn after sti */
7333 /* If several instructions disable interrupts, only the
7334 _first_ does it */
7335 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
7336 tcg_gen_helper_0_0(helper_set_inhibit_irq);
7337 /* give a chance to handle pending irqs */
7338 gen_jmp_im(s->pc - s->cs_base);
7339 gen_eob(s);
7340 } else {
7341 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7342 }
7343 } else {
7344 if (s->iopl == 3) {
7345 goto gen_sti;
7346#ifdef VBOX
7347 } else if (s->iopl != 3 && s->vme) {
7348 tcg_gen_helper_0_0(helper_sti_vme);
7349 /* give a chance to handle pending irqs */
7350 gen_jmp_im(s->pc - s->cs_base);
7351 gen_eob(s);
7352#endif
7353 } else {
7354 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7355 }
7356 }
7357 break;
7358 case 0x62: /* bound */
7359 if (CODE64(s))
7360 goto illegal_op;
7361 ot = dflag ? OT_LONG : OT_WORD;
7362 modrm = ldub_code(s->pc++);
7363 reg = (modrm >> 3) & 7;
7364 mod = (modrm >> 6) & 3;
7365 if (mod == 3)
7366 goto illegal_op;
7367 gen_op_mov_TN_reg(ot, 0, reg);
7368 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7369 gen_jmp_im(pc_start - s->cs_base);
7370 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7371 if (ot == OT_WORD)
7372 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
7373 else
7374 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
7375 break;
7376 case 0x1c8 ... 0x1cf: /* bswap reg */
7377 reg = (b & 7) | REX_B(s);
7378#ifdef TARGET_X86_64
7379 if (dflag == 2) {
7380 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
7381 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
7382 gen_op_mov_reg_T0(OT_QUAD, reg);
7383 } else
7384 {
7385 TCGv tmp0;
7386 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7387
7388 tmp0 = tcg_temp_new(TCG_TYPE_I32);
7389 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
7390 tcg_gen_bswap_i32(tmp0, tmp0);
7391 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
7392 gen_op_mov_reg_T0(OT_LONG, reg);
7393 }
7394#else
7395 {
7396 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7397 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
7398 gen_op_mov_reg_T0(OT_LONG, reg);
7399 }
7400#endif
7401 break;
7402 case 0xd6: /* salc */
7403 if (CODE64(s))
7404 goto illegal_op;
7405 if (s->cc_op != CC_OP_DYNAMIC)
7406 gen_op_set_cc_op(s->cc_op);
7407 gen_compute_eflags_c(cpu_T[0]);
7408 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7409 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
7410 break;
7411 case 0xe0: /* loopnz */
7412 case 0xe1: /* loopz */
7413 case 0xe2: /* loop */
7414 case 0xe3: /* jecxz */
7415 {
7416 int l1, l2, l3;
7417
7418 tval = (int8_t)insn_get(s, OT_BYTE);
7419 next_eip = s->pc - s->cs_base;
7420 tval += next_eip;
7421 if (s->dflag == 0)
7422 tval &= 0xffff;
7423
7424 l1 = gen_new_label();
7425 l2 = gen_new_label();
7426 l3 = gen_new_label();
7427 b &= 3;
7428 switch(b) {
7429 case 0: /* loopnz */
7430 case 1: /* loopz */
7431 if (s->cc_op != CC_OP_DYNAMIC)
7432 gen_op_set_cc_op(s->cc_op);
7433 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7434 gen_op_jz_ecx(s->aflag, l3);
7435 gen_compute_eflags(cpu_tmp0);
7436 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
7437 if (b == 0) {
7438 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
7439 } else {
7440 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
7441 }
7442 break;
7443 case 2: /* loop */
7444 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7445 gen_op_jnz_ecx(s->aflag, l1);
7446 break;
7447 default:
7448 case 3: /* jcxz */
7449 gen_op_jz_ecx(s->aflag, l1);
7450 break;
7451 }
7452
7453 gen_set_label(l3);
7454 gen_jmp_im(next_eip);
7455 tcg_gen_br(l2);
7456
7457 gen_set_label(l1);
7458 gen_jmp_im(tval);
7459 gen_set_label(l2);
7460 gen_eob(s);
7461 }
7462 break;
7463 case 0x130: /* wrmsr */
7464 case 0x132: /* rdmsr */
7465 if (s->cpl != 0) {
7466 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7467 } else {
7468 if (s->cc_op != CC_OP_DYNAMIC)
7469 gen_op_set_cc_op(s->cc_op);
7470 gen_jmp_im(pc_start - s->cs_base);
7471 if (b & 2) {
7472 tcg_gen_helper_0_0(helper_rdmsr);
7473 } else {
7474 tcg_gen_helper_0_0(helper_wrmsr);
7475 }
7476 }
7477 break;
7478 case 0x131: /* rdtsc */
7479 if (s->cc_op != CC_OP_DYNAMIC)
7480 gen_op_set_cc_op(s->cc_op);
7481 gen_jmp_im(pc_start - s->cs_base);
7482 if (use_icount)
7483 gen_io_start();
7484 tcg_gen_helper_0_0(helper_rdtsc);
7485 if (use_icount) {
7486 gen_io_end();
7487 gen_jmp(s, s->pc - s->cs_base);
7488 }
7489 break;
7490 case 0x133: /* rdpmc */
7491 if (s->cc_op != CC_OP_DYNAMIC)
7492 gen_op_set_cc_op(s->cc_op);
7493 gen_jmp_im(pc_start - s->cs_base);
7494 tcg_gen_helper_0_0(helper_rdpmc);
7495 break;
7496 case 0x134: /* sysenter */
7497#ifndef VBOX
7498 /* For Intel SYSENTER is valid on 64-bit */
7499 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7500#else
7501 /** @todo: make things right */
7502 if (CODE64(s))
7503#endif
7504 goto illegal_op;
7505 if (!s->pe) {
7506 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7507 } else {
7508 if (s->cc_op != CC_OP_DYNAMIC) {
7509 gen_op_set_cc_op(s->cc_op);
7510 s->cc_op = CC_OP_DYNAMIC;
7511 }
7512 gen_jmp_im(pc_start - s->cs_base);
7513 tcg_gen_helper_0_0(helper_sysenter);
7514 gen_eob(s);
7515 }
7516 break;
7517 case 0x135: /* sysexit */
7518#ifndef VBOX
7519 /* For Intel SYSEXIT is valid on 64-bit */
7520 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7521#else
7522 /** @todo: make things right */
7523 if (CODE64(s))
7524#endif
7525 goto illegal_op;
7526 if (!s->pe) {
7527 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7528 } else {
7529 if (s->cc_op != CC_OP_DYNAMIC) {
7530 gen_op_set_cc_op(s->cc_op);
7531 s->cc_op = CC_OP_DYNAMIC;
7532 }
7533 gen_jmp_im(pc_start - s->cs_base);
7534 tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
7535 gen_eob(s);
7536 }
7537 break;
7538#ifdef TARGET_X86_64
7539 case 0x105: /* syscall */
7540 /* XXX: is it usable in real mode ? */
7541 if (s->cc_op != CC_OP_DYNAMIC) {
7542 gen_op_set_cc_op(s->cc_op);
7543 s->cc_op = CC_OP_DYNAMIC;
7544 }
7545 gen_jmp_im(pc_start - s->cs_base);
7546 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
7547 gen_eob(s);
7548 break;
7549 case 0x107: /* sysret */
7550 if (!s->pe) {
7551 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7552 } else {
7553 if (s->cc_op != CC_OP_DYNAMIC) {
7554 gen_op_set_cc_op(s->cc_op);
7555 s->cc_op = CC_OP_DYNAMIC;
7556 }
7557 gen_jmp_im(pc_start - s->cs_base);
7558 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
7559 /* condition codes are modified only in long mode */
7560 if (s->lma)
7561 s->cc_op = CC_OP_EFLAGS;
7562 gen_eob(s);
7563 }
7564 break;
7565#endif
7566 case 0x1a2: /* cpuid */
7567 if (s->cc_op != CC_OP_DYNAMIC)
7568 gen_op_set_cc_op(s->cc_op);
7569 gen_jmp_im(pc_start - s->cs_base);
7570 tcg_gen_helper_0_0(helper_cpuid);
7571 break;
7572 case 0xf4: /* hlt */
7573 if (s->cpl != 0) {
7574 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7575 } else {
7576 if (s->cc_op != CC_OP_DYNAMIC)
7577 gen_op_set_cc_op(s->cc_op);
7578 gen_jmp_im(pc_start - s->cs_base);
7579 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
7580 s->is_jmp = 3;
7581 }
7582 break;
7583 case 0x100:
7584 modrm = ldub_code(s->pc++);
7585 mod = (modrm >> 6) & 3;
7586 op = (modrm >> 3) & 7;
7587 switch(op) {
7588 case 0: /* sldt */
7589 if (!s->pe || s->vm86)
7590 goto illegal_op;
7591 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
7592 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
7593 ot = OT_WORD;
7594 if (mod == 3)
7595 ot += s->dflag;
7596 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7597 break;
7598 case 2: /* lldt */
7599 if (!s->pe || s->vm86)
7600 goto illegal_op;
7601 if (s->cpl != 0) {
7602 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7603 } else {
7604 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
7605 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7606 gen_jmp_im(pc_start - s->cs_base);
7607 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7608 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
7609 }
7610 break;
7611 case 1: /* str */
7612 if (!s->pe || s->vm86)
7613 goto illegal_op;
7614 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
7615 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
7616 ot = OT_WORD;
7617 if (mod == 3)
7618 ot += s->dflag;
7619 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7620 break;
7621 case 3: /* ltr */
7622 if (!s->pe || s->vm86)
7623 goto illegal_op;
7624 if (s->cpl != 0) {
7625 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7626 } else {
7627 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
7628 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7629 gen_jmp_im(pc_start - s->cs_base);
7630 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7631 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
7632 }
7633 break;
7634 case 4: /* verr */
7635 case 5: /* verw */
7636 if (!s->pe || s->vm86)
7637 goto illegal_op;
7638 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7639 if (s->cc_op != CC_OP_DYNAMIC)
7640 gen_op_set_cc_op(s->cc_op);
7641 if (op == 4)
7642 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
7643 else
7644 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
7645 s->cc_op = CC_OP_EFLAGS;
7646 break;
7647 default:
7648 goto illegal_op;
7649 }
7650 break;
7651 case 0x101:
7652 modrm = ldub_code(s->pc++);
7653 mod = (modrm >> 6) & 3;
7654 op = (modrm >> 3) & 7;
7655 rm = modrm & 7;
7656
7657#ifdef VBOX
7658 /* 0f 01 f9 */
7659 if (modrm == 0xf9)
7660 {
7661 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
7662 goto illegal_op;
7663 gen_jmp_im(pc_start - s->cs_base);
7664 tcg_gen_helper_0_0(helper_rdtscp);
7665 break;
7666 }
7667#endif
7668 switch(op) {
7669 case 0: /* sgdt */
7670 if (mod == 3)
7671 goto illegal_op;
7672 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7673 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7674 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7675 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7676 gen_add_A0_im(s, 2);
7677 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7678 if (!s->dflag)
7679 gen_op_andl_T0_im(0xffffff);
7680 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7681 break;
7682 case 1:
7683 if (mod == 3) {
7684 switch (rm) {
7685 case 0: /* monitor */
7686 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7687 s->cpl != 0)
7688 goto illegal_op;
7689 if (s->cc_op != CC_OP_DYNAMIC)
7690 gen_op_set_cc_op(s->cc_op);
7691 gen_jmp_im(pc_start - s->cs_base);
7692#ifdef TARGET_X86_64
7693 if (s->aflag == 2) {
7694 gen_op_movq_A0_reg(R_EAX);
7695 } else
7696#endif
7697 {
7698 gen_op_movl_A0_reg(R_EAX);
7699 if (s->aflag == 0)
7700 gen_op_andl_A0_ffff();
7701 }
7702 gen_add_A0_ds_seg(s);
7703 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
7704 break;
7705 case 1: /* mwait */
7706 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7707 s->cpl != 0)
7708 goto illegal_op;
7709 if (s->cc_op != CC_OP_DYNAMIC) {
7710 gen_op_set_cc_op(s->cc_op);
7711 s->cc_op = CC_OP_DYNAMIC;
7712 }
7713 gen_jmp_im(pc_start - s->cs_base);
7714 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
7715 gen_eob(s);
7716 break;
7717 default:
7718 goto illegal_op;
7719 }
7720 } else { /* sidt */
7721 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7722 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7723 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7724 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7725 gen_add_A0_im(s, 2);
7726 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7727 if (!s->dflag)
7728 gen_op_andl_T0_im(0xffffff);
7729 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7730 }
7731 break;
7732 case 2: /* lgdt */
7733 case 3: /* lidt */
7734 if (mod == 3) {
7735 if (s->cc_op != CC_OP_DYNAMIC)
7736 gen_op_set_cc_op(s->cc_op);
7737 gen_jmp_im(pc_start - s->cs_base);
7738 switch(rm) {
7739 case 0: /* VMRUN */
7740 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7741 goto illegal_op;
7742 if (s->cpl != 0) {
7743 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7744 break;
7745 } else {
7746 tcg_gen_helper_0_2(helper_vmrun,
7747 tcg_const_i32(s->aflag),
7748 tcg_const_i32(s->pc - pc_start));
7749 tcg_gen_exit_tb(0);
7750 s->is_jmp = 3;
7751 }
7752 break;
7753 case 1: /* VMMCALL */
7754 if (!(s->flags & HF_SVME_MASK))
7755 goto illegal_op;
7756 tcg_gen_helper_0_0(helper_vmmcall);
7757 break;
7758 case 2: /* VMLOAD */
7759 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7760 goto illegal_op;
7761 if (s->cpl != 0) {
7762 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7763 break;
7764 } else {
7765 tcg_gen_helper_0_1(helper_vmload,
7766 tcg_const_i32(s->aflag));
7767 }
7768 break;
7769 case 3: /* VMSAVE */
7770 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7771 goto illegal_op;
7772 if (s->cpl != 0) {
7773 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7774 break;
7775 } else {
7776 tcg_gen_helper_0_1(helper_vmsave,
7777 tcg_const_i32(s->aflag));
7778 }
7779 break;
7780 case 4: /* STGI */
7781 if ((!(s->flags & HF_SVME_MASK) &&
7782 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7783 !s->pe)
7784 goto illegal_op;
7785 if (s->cpl != 0) {
7786 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7787 break;
7788 } else {
7789 tcg_gen_helper_0_0(helper_stgi);
7790 }
7791 break;
7792 case 5: /* CLGI */
7793 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7794 goto illegal_op;
7795 if (s->cpl != 0) {
7796 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7797 break;
7798 } else {
7799 tcg_gen_helper_0_0(helper_clgi);
7800 }
7801 break;
7802 case 6: /* SKINIT */
7803 if ((!(s->flags & HF_SVME_MASK) &&
7804 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7805 !s->pe)
7806 goto illegal_op;
7807 tcg_gen_helper_0_0(helper_skinit);
7808 break;
7809 case 7: /* INVLPGA */
7810 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7811 goto illegal_op;
7812 if (s->cpl != 0) {
7813 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7814 break;
7815 } else {
7816 tcg_gen_helper_0_1(helper_invlpga,
7817 tcg_const_i32(s->aflag));
7818 }
7819 break;
7820 default:
7821 goto illegal_op;
7822 }
7823 } else if (s->cpl != 0) {
7824 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7825 } else {
7826 gen_svm_check_intercept(s, pc_start,
7827 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7828 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7829 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7830 gen_add_A0_im(s, 2);
7831 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7832 if (!s->dflag)
7833 gen_op_andl_T0_im(0xffffff);
7834 if (op == 2) {
7835 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7836 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7837 } else {
7838 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7839 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7840 }
7841 }
7842 break;
7843 case 4: /* smsw */
7844 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7845 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7846 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7847 break;
7848 case 6: /* lmsw */
7849 if (s->cpl != 0) {
7850 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7851 } else {
7852 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7853 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7854 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7855 gen_jmp_im(s->pc - s->cs_base);
7856 gen_eob(s);
7857 }
7858 break;
7859 case 7: /* invlpg */
7860 if (s->cpl != 0) {
7861 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7862 } else {
7863 if (mod == 3) {
7864#ifdef TARGET_X86_64
7865 if (CODE64(s) && rm == 0) {
7866 /* swapgs */
7867 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7868 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7869 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7870 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7871 } else
7872#endif
7873 {
7874 goto illegal_op;
7875 }
7876 } else {
7877 if (s->cc_op != CC_OP_DYNAMIC)
7878 gen_op_set_cc_op(s->cc_op);
7879 gen_jmp_im(pc_start - s->cs_base);
7880 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7881 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
7882 gen_jmp_im(s->pc - s->cs_base);
7883 gen_eob(s);
7884 }
7885 }
7886 break;
7887 default:
7888 goto illegal_op;
7889 }
7890 break;
7891 case 0x108: /* invd */
7892 case 0x109: /* wbinvd */
7893 if (s->cpl != 0) {
7894 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7895 } else {
7896 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7897 /* nothing to do */
7898 }
7899 break;
7900 case 0x63: /* arpl or movslS (x86_64) */
7901#ifdef TARGET_X86_64
7902 if (CODE64(s)) {
7903 int d_ot;
7904 /* d_ot is the size of destination */
7905 d_ot = dflag + OT_WORD;
7906
7907 modrm = ldub_code(s->pc++);
7908 reg = ((modrm >> 3) & 7) | rex_r;
7909 mod = (modrm >> 6) & 3;
7910 rm = (modrm & 7) | REX_B(s);
7911
7912 if (mod == 3) {
7913 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7914 /* sign extend */
7915 if (d_ot == OT_QUAD)
7916 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7917 gen_op_mov_reg_T0(d_ot, reg);
7918 } else {
7919 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7920 if (d_ot == OT_QUAD) {
7921 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7922 } else {
7923 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7924 }
7925 gen_op_mov_reg_T0(d_ot, reg);
7926 }
7927 } else
7928#endif
7929 {
7930 int label1;
7931 TCGv t0, t1, t2, a0;
7932
7933 if (!s->pe || s->vm86)
7934 goto illegal_op;
7935
7936 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7937 t1 = tcg_temp_local_new(TCG_TYPE_TL);
7938 t2 = tcg_temp_local_new(TCG_TYPE_TL);
7939#ifdef VBOX
7940 a0 = tcg_temp_local_new(TCG_TYPE_TL);
7941#endif
7942 ot = OT_WORD;
7943 modrm = ldub_code(s->pc++);
7944 reg = (modrm >> 3) & 7;
7945 mod = (modrm >> 6) & 3;
7946 rm = modrm & 7;
7947 if (mod != 3) {
7948 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7949#ifdef VBOX
7950 tcg_gen_mov_tl(a0, cpu_A0);
7951#endif
7952 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
7953 } else {
7954 gen_op_mov_v_reg(ot, t0, rm);
7955 }
7956 gen_op_mov_v_reg(ot, t1, reg);
7957 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
7958 tcg_gen_andi_tl(t1, t1, 3);
7959 tcg_gen_movi_tl(t2, 0);
7960 label1 = gen_new_label();
7961 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
7962 tcg_gen_andi_tl(t0, t0, ~3);
7963 tcg_gen_or_tl(t0, t0, t1);
7964 tcg_gen_movi_tl(t2, CC_Z);
7965 gen_set_label(label1);
7966 if (mod != 3) {
7967#ifdef VBOX
7968 /* cpu_A0 doesn't survive branch */
7969 gen_op_st_v(ot + s->mem_index, t0, a0);
7970#else
7971 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
7972#endif
7973 } else {
7974 gen_op_mov_reg_v(ot, rm, t0);
7975 }
7976 if (s->cc_op != CC_OP_DYNAMIC)
7977 gen_op_set_cc_op(s->cc_op);
7978 gen_compute_eflags(cpu_cc_src);
7979 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
7980 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
7981 s->cc_op = CC_OP_EFLAGS;
7982 tcg_temp_free(t0);
7983 tcg_temp_free(t1);
7984 tcg_temp_free(t2);
7985#ifdef VBOX
7986 tcg_temp_free(a0);
7987#endif
7988 }
7989 break;
7990 case 0x102: /* lar */
7991 case 0x103: /* lsl */
7992 {
7993 int label1;
7994 TCGv t0;
7995 if (!s->pe || s->vm86)
7996 goto illegal_op;
7997 ot = dflag ? OT_LONG : OT_WORD;
7998 modrm = ldub_code(s->pc++);
7999 reg = ((modrm >> 3) & 7) | rex_r;
8000 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
8001 t0 = tcg_temp_local_new(TCG_TYPE_TL);
8002 if (s->cc_op != CC_OP_DYNAMIC)
8003 gen_op_set_cc_op(s->cc_op);
8004 if (b == 0x102)
8005 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
8006 else
8007 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
8008 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
8009 label1 = gen_new_label();
8010 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
8011 gen_op_mov_reg_v(ot, reg, t0);
8012 gen_set_label(label1);
8013 s->cc_op = CC_OP_EFLAGS;
8014 tcg_temp_free(t0);
8015 }
8016 break;
8017 case 0x118:
8018 modrm = ldub_code(s->pc++);
8019 mod = (modrm >> 6) & 3;
8020 op = (modrm >> 3) & 7;
8021 switch(op) {
8022 case 0: /* prefetchnta */
8023 case 1: /* prefetchnt0 */
8024 case 2: /* prefetchnt0 */
8025 case 3: /* prefetchnt0 */
8026 if (mod == 3)
8027 goto illegal_op;
8028 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8029 /* nothing more to do */
8030 break;
8031 default: /* nop (multi byte) */
8032 gen_nop_modrm(s, modrm);
8033 break;
8034 }
8035 break;
8036 case 0x119 ... 0x11f: /* nop (multi byte) */
8037 modrm = ldub_code(s->pc++);
8038 gen_nop_modrm(s, modrm);
8039 break;
8040 case 0x120: /* mov reg, crN */
8041 case 0x122: /* mov crN, reg */
8042 if (s->cpl != 0) {
8043 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8044 } else {
8045 modrm = ldub_code(s->pc++);
8046 if ((modrm & 0xc0) != 0xc0)
8047 goto illegal_op;
8048 rm = (modrm & 7) | REX_B(s);
8049 reg = ((modrm >> 3) & 7) | rex_r;
8050 if (CODE64(s))
8051 ot = OT_QUAD;
8052 else
8053 ot = OT_LONG;
8054 switch(reg) {
8055 case 0:
8056 case 2:
8057 case 3:
8058 case 4:
8059 case 8:
8060 if (s->cc_op != CC_OP_DYNAMIC)
8061 gen_op_set_cc_op(s->cc_op);
8062 gen_jmp_im(pc_start - s->cs_base);
8063 if (b & 2) {
8064 gen_op_mov_TN_reg(ot, 0, rm);
8065 tcg_gen_helper_0_2(helper_write_crN,
8066 tcg_const_i32(reg), cpu_T[0]);
8067 gen_jmp_im(s->pc - s->cs_base);
8068 gen_eob(s);
8069 } else {
8070 tcg_gen_helper_1_1(helper_read_crN,
8071 cpu_T[0], tcg_const_i32(reg));
8072 gen_op_mov_reg_T0(ot, rm);
8073 }
8074 break;
8075 default:
8076 goto illegal_op;
8077 }
8078 }
8079 break;
8080 case 0x121: /* mov reg, drN */
8081 case 0x123: /* mov drN, reg */
8082 if (s->cpl != 0) {
8083 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8084 } else {
8085 modrm = ldub_code(s->pc++);
8086 if ((modrm & 0xc0) != 0xc0)
8087 goto illegal_op;
8088 rm = (modrm & 7) | REX_B(s);
8089 reg = ((modrm >> 3) & 7) | rex_r;
8090 if (CODE64(s))
8091 ot = OT_QUAD;
8092 else
8093 ot = OT_LONG;
8094 /* XXX: do it dynamically with CR4.DE bit */
8095 if (reg == 4 || reg == 5 || reg >= 8)
8096 goto illegal_op;
8097 if (b & 2) {
8098 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
8099 gen_op_mov_TN_reg(ot, 0, rm);
8100 tcg_gen_helper_0_2(helper_movl_drN_T0,
8101 tcg_const_i32(reg), cpu_T[0]);
8102 gen_jmp_im(s->pc - s->cs_base);
8103 gen_eob(s);
8104 } else {
8105 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
8106 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
8107 gen_op_mov_reg_T0(ot, rm);
8108 }
8109 }
8110 break;
8111 case 0x106: /* clts */
8112 if (s->cpl != 0) {
8113 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8114 } else {
8115 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
8116 tcg_gen_helper_0_0(helper_clts);
8117 /* abort block because static cpu state changed */
8118 gen_jmp_im(s->pc - s->cs_base);
8119 gen_eob(s);
8120 }
8121 break;
8122 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
8123 case 0x1c3: /* MOVNTI reg, mem */
8124 if (!(s->cpuid_features & CPUID_SSE2))
8125 goto illegal_op;
8126 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
8127 modrm = ldub_code(s->pc++);
8128 mod = (modrm >> 6) & 3;
8129 if (mod == 3)
8130 goto illegal_op;
8131 reg = ((modrm >> 3) & 7) | rex_r;
8132 /* generate a generic store */
8133 gen_ldst_modrm(s, modrm, ot, reg, 1);
8134 break;
8135 case 0x1ae:
8136 modrm = ldub_code(s->pc++);
8137 mod = (modrm >> 6) & 3;
8138 op = (modrm >> 3) & 7;
8139 switch(op) {
8140 case 0: /* fxsave */
8141 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8142 (s->flags & HF_EM_MASK))
8143 goto illegal_op;
8144 if (s->flags & HF_TS_MASK) {
8145 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8146 break;
8147 }
8148 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8149 if (s->cc_op != CC_OP_DYNAMIC)
8150 gen_op_set_cc_op(s->cc_op);
8151 gen_jmp_im(pc_start - s->cs_base);
8152 tcg_gen_helper_0_2(helper_fxsave,
8153 cpu_A0, tcg_const_i32((s->dflag == 2)));
8154 break;
8155 case 1: /* fxrstor */
8156 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8157 (s->flags & HF_EM_MASK))
8158 goto illegal_op;
8159 if (s->flags & HF_TS_MASK) {
8160 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8161 break;
8162 }
8163 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8164 if (s->cc_op != CC_OP_DYNAMIC)
8165 gen_op_set_cc_op(s->cc_op);
8166 gen_jmp_im(pc_start - s->cs_base);
8167 tcg_gen_helper_0_2(helper_fxrstor,
8168 cpu_A0, tcg_const_i32((s->dflag == 2)));
8169 break;
8170 case 2: /* ldmxcsr */
8171 case 3: /* stmxcsr */
8172 if (s->flags & HF_TS_MASK) {
8173 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8174 break;
8175 }
8176 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8177 mod == 3)
8178 goto illegal_op;
8179 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8180 if (op == 2) {
8181 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
8182 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8183 } else {
8184 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8185 gen_op_st_T0_A0(OT_LONG + s->mem_index);
8186 }
8187 break;
8188 case 5: /* lfence */
8189 case 6: /* mfence */
8190 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
8191 goto illegal_op;
8192 break;
8193 case 7: /* sfence / clflush */
8194 if ((modrm & 0xc7) == 0xc0) {
8195 /* sfence */
8196 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8197 if (!(s->cpuid_features & CPUID_SSE))
8198 goto illegal_op;
8199 } else {
8200 /* clflush */
8201 if (!(s->cpuid_features & CPUID_CLFLUSH))
8202 goto illegal_op;
8203 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8204 }
8205 break;
8206 default:
8207 goto illegal_op;
8208 }
8209 break;
8210 case 0x10d: /* 3DNow! prefetch(w) */
8211 modrm = ldub_code(s->pc++);
8212 mod = (modrm >> 6) & 3;
8213 if (mod == 3)
8214 goto illegal_op;
8215 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8216 /* ignore for now */
8217 break;
8218 case 0x1aa: /* rsm */
8219 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
8220 if (!(s->flags & HF_SMM_MASK))
8221 goto illegal_op;
8222 if (s->cc_op != CC_OP_DYNAMIC) {
8223 gen_op_set_cc_op(s->cc_op);
8224 s->cc_op = CC_OP_DYNAMIC;
8225 }
8226 gen_jmp_im(s->pc - s->cs_base);
8227 tcg_gen_helper_0_0(helper_rsm);
8228 gen_eob(s);
8229 break;
8230 case 0x1b8: /* SSE4.2 popcnt */
8231 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8232 PREFIX_REPZ)
8233 goto illegal_op;
8234 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8235 goto illegal_op;
8236
8237 modrm = ldub_code(s->pc++);
8238 reg = ((modrm >> 3) & 7);
8239
8240 if (s->prefix & PREFIX_DATA)
8241 ot = OT_WORD;
8242 else if (s->dflag != 2)
8243 ot = OT_LONG;
8244 else
8245 ot = OT_QUAD;
8246
8247 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
8248 tcg_gen_helper_1_2(helper_popcnt,
8249 cpu_T[0], cpu_T[0], tcg_const_i32(ot));
8250 gen_op_mov_reg_T0(ot, reg);
8251
8252 s->cc_op = CC_OP_EFLAGS;
8253 break;
8254 case 0x10e ... 0x10f:
8255 /* 3DNow! instructions, ignore prefixes */
8256 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
8257 case 0x110 ... 0x117:
8258 case 0x128 ... 0x12f:
8259 case 0x138 ... 0x13a:
8260 case 0x150 ... 0x177:
8261 case 0x17c ... 0x17f:
8262 case 0x1c2:
8263 case 0x1c4 ... 0x1c6:
8264 case 0x1d0 ... 0x1fe:
8265 gen_sse(s, b, pc_start, rex_r);
8266 break;
8267 default:
8268 goto illegal_op;
8269 }
8270 /* lock generation */
8271 if (s->prefix & PREFIX_LOCK)
8272 tcg_gen_helper_0_0(helper_unlock);
8273 return s->pc;
8274 illegal_op:
8275 if (s->prefix & PREFIX_LOCK)
8276 tcg_gen_helper_0_0(helper_unlock);
8277 /* XXX: ensure that no lock was generated */
8278 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8279 return s->pc;
8280}
8281
8282void optimize_flags_init(void)
8283{
8284#if TCG_TARGET_REG_BITS == 32
8285 assert(sizeof(CCTable) == (1 << 3));
8286#else
8287 assert(sizeof(CCTable) == (1 << 4));
8288#endif
8289 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
8290 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
8291 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
8292 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
8293 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
8294 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
8295 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
8296 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
8297 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
8298
8299 /* register helpers */
8300
8301#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
8302#include "helper.h"
8303}
8304
8305/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8306 basic block 'tb'. If search_pc is TRUE, also generate PC
8307 information for each intermediate instruction. */
8308#ifndef VBOX
8309static inline void gen_intermediate_code_internal(CPUState *env,
8310#else /* VBOX */
8311DECLINLINE(void) gen_intermediate_code_internal(CPUState *env,
8312#endif /* VBOX */
8313 TranslationBlock *tb,
8314 int search_pc)
8315{
8316 DisasContext dc1, *dc = &dc1;
8317 target_ulong pc_ptr;
8318 uint16_t *gen_opc_end;
8319 int j, lj, cflags;
8320 uint64_t flags;
8321 target_ulong pc_start;
8322 target_ulong cs_base;
8323 int num_insns;
8324 int max_insns;
8325
8326 /* generate intermediate code */
8327 pc_start = tb->pc;
8328 cs_base = tb->cs_base;
8329 flags = tb->flags;
8330 cflags = tb->cflags;
8331
8332 dc->pe = (flags >> HF_PE_SHIFT) & 1;
8333 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8334 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8335 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8336 dc->f_st = 0;
8337 dc->vm86 = (flags >> VM_SHIFT) & 1;
8338#ifdef VBOX_WITH_CALL_RECORD
8339 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
8340 if ( !(env->state & CPU_RAW_RING0)
8341 && (env->cr[0] & CR0_PG_MASK)
8342 && !(env->eflags & X86_EFL_IF)
8343 && dc->code32)
8344 dc->record_call = 1;
8345 else
8346 dc->record_call = 0;
8347#endif
8348 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8349 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8350 dc->tf = (flags >> TF_SHIFT) & 1;
8351 dc->singlestep_enabled = env->singlestep_enabled;
8352 dc->cc_op = CC_OP_DYNAMIC;
8353 dc->cs_base = cs_base;
8354 dc->tb = tb;
8355 dc->popl_esp_hack = 0;
8356 /* select memory access functions */
8357 dc->mem_index = 0;
8358 if (flags & HF_SOFTMMU_MASK) {
8359 if (dc->cpl == 3)
8360 dc->mem_index = 2 * 4;
8361 else
8362 dc->mem_index = 1 * 4;
8363 }
8364 dc->cpuid_features = env->cpuid_features;
8365 dc->cpuid_ext_features = env->cpuid_ext_features;
8366 dc->cpuid_ext2_features = env->cpuid_ext2_features;
8367 dc->cpuid_ext3_features = env->cpuid_ext3_features;
8368#ifdef TARGET_X86_64
8369 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8370 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8371#endif
8372 dc->flags = flags;
8373 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8374 (flags & HF_INHIBIT_IRQ_MASK)
8375#ifndef CONFIG_SOFTMMU
8376 || (flags & HF_SOFTMMU_MASK)
8377#endif
8378 );
8379#if 0
8380 /* check addseg logic */
8381 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
8382 printf("ERROR addseg\n");
8383#endif
8384
8385 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
8386 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
8387 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
8388 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
8389
8390 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
8391 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
8392 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
8393 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
8394 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
8395 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
8396 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
8397 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
8398 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
8399
8400 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
8401
8402 dc->is_jmp = DISAS_NEXT;
8403 pc_ptr = pc_start;
8404 lj = -1;
8405 num_insns = 0;
8406 max_insns = tb->cflags & CF_COUNT_MASK;
8407 if (max_insns == 0)
8408 max_insns = CF_COUNT_MASK;
8409
8410 gen_icount_start();
8411 for(;;) {
8412 if (env->nb_breakpoints > 0) {
8413 for(j = 0; j < env->nb_breakpoints; j++) {
8414 if (env->breakpoints[j] == pc_ptr) {
8415 gen_debug(dc, pc_ptr - dc->cs_base);
8416 break;
8417 }
8418 }
8419 }
8420 if (search_pc) {
8421 j = gen_opc_ptr - gen_opc_buf;
8422 if (lj < j) {
8423 lj++;
8424 while (lj < j)
8425 gen_opc_instr_start[lj++] = 0;
8426 }
8427 gen_opc_pc[lj] = pc_ptr;
8428 gen_opc_cc_op[lj] = dc->cc_op;
8429 gen_opc_instr_start[lj] = 1;
8430 gen_opc_icount[lj] = num_insns;
8431 }
8432 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8433 gen_io_start();
8434
8435 pc_ptr = disas_insn(dc, pc_ptr);
8436 num_insns++;
8437 /* stop translation if indicated */
8438 if (dc->is_jmp)
8439 break;
8440#ifdef VBOX
8441#ifdef DEBUG
8442/*
8443 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
8444 {
8445 //should never happen as the jump to the patch code terminates the translation block
8446 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
8447 }
8448*/
8449#endif
8450 if (env->state & CPU_EMULATE_SINGLE_INSTR)
8451 {
8452 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
8453 gen_jmp_im(pc_ptr - dc->cs_base);
8454 gen_eob(dc);
8455 break;
8456 }
8457#endif /* VBOX */
8458
8459 /* if single step mode, we generate only one instruction and
8460 generate an exception */
8461 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8462 the flag and abort the translation to give the irqs a
8463 change to be happen */
8464 if (dc->tf || dc->singlestep_enabled ||
8465 (flags & HF_INHIBIT_IRQ_MASK)) {
8466 gen_jmp_im(pc_ptr - dc->cs_base);
8467 gen_eob(dc);
8468 break;
8469 }
8470 /* if too long translation, stop generation too */
8471 if (gen_opc_ptr >= gen_opc_end ||
8472 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8473 num_insns >= max_insns) {
8474 gen_jmp_im(pc_ptr - dc->cs_base);
8475 gen_eob(dc);
8476 break;
8477 }
8478 }
8479 if (tb->cflags & CF_LAST_IO)
8480 gen_io_end();
8481 gen_icount_end(tb, num_insns);
8482 *gen_opc_ptr = INDEX_op_end;
8483 /* we don't forget to fill the last values */
8484 if (search_pc) {
8485 j = gen_opc_ptr - gen_opc_buf;
8486 lj++;
8487 while (lj <= j)
8488 gen_opc_instr_start[lj++] = 0;
8489 }
8490
8491#ifdef DEBUG_DISAS
8492 if (loglevel & CPU_LOG_TB_CPU) {
8493 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
8494 }
8495 if (loglevel & CPU_LOG_TB_IN_ASM) {
8496 int disas_flags;
8497 fprintf(logfile, "----------------\n");
8498 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
8499#ifdef TARGET_X86_64
8500 if (dc->code64)
8501 disas_flags = 2;
8502 else
8503#endif
8504 disas_flags = !dc->code32;
8505 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
8506 fprintf(logfile, "\n");
8507 }
8508#endif
8509
8510 if (!search_pc) {
8511 tb->size = pc_ptr - pc_start;
8512 tb->icount = num_insns;
8513 }
8514}
8515
8516void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
8517{
8518 gen_intermediate_code_internal(env, tb, 0);
8519}
8520
8521void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
8522{
8523 gen_intermediate_code_internal(env, tb, 1);
8524}
8525
8526void gen_pc_load(CPUState *env, TranslationBlock *tb,
8527 unsigned long searched_pc, int pc_pos, void *puc)
8528{
8529 int cc_op;
8530#ifdef DEBUG_DISAS
8531 if (loglevel & CPU_LOG_TB_OP) {
8532 int i;
8533 fprintf(logfile, "RESTORE:\n");
8534 for(i = 0;i <= pc_pos; i++) {
8535 if (gen_opc_instr_start[i]) {
8536 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
8537 }
8538 }
8539 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
8540 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
8541 (uint32_t)tb->cs_base);
8542 }
8543#endif
8544 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
8545 cc_op = gen_opc_cc_op[pc_pos];
8546 if (cc_op != CC_OP_DYNAMIC)
8547 env->cc_op = cc_op;
8548}
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette