VirtualBox

source: vbox/trunk/src/recompiler/target-i386/translate.c@ 36061

Last change on this file since 36061 was 36056, checked in by vboxsync, 14 years ago

.remstep hacking.

  • Property svn:eol-style set to native
File size: 277.0 KB
Line 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21/*
22 * Oracle LGPL Disclaimer: For the avoidance of doubt, except that if any license choice
23 * other than GPL or LGPL is available it will apply instead, Oracle elects to use only
24 * the Lesser General Public License version 2.1 (LGPLv2) at this time for any software where
25 * a choice of LGPL license versions is made available with the language indicating
26 * that LGPLv2 or any later version may be used, or where a choice of which version
27 * of the LGPL is applied is otherwise unspecified.
28 */
29
30#include <stdarg.h>
31#include <stdlib.h>
32#include <stdio.h>
33#include <string.h>
34#ifndef VBOX
35#include <inttypes.h>
36#include <signal.h>
37#include <assert.h>
38#endif /* !VBOX */
39
40#include "cpu.h"
41#include "exec-all.h"
42#include "disas.h"
43#include "helper.h"
44#include "tcg-op.h"
45
46#define PREFIX_REPZ 0x01
47#define PREFIX_REPNZ 0x02
48#define PREFIX_LOCK 0x04
49#define PREFIX_DATA 0x08
50#define PREFIX_ADR 0x10
51
52#ifdef TARGET_X86_64
53#define X86_64_ONLY(x) x
54#ifndef VBOX
55#define X86_64_DEF(x...) x
56#else
57#define X86_64_DEF(x...) x
58#endif
59#define CODE64(s) ((s)->code64)
60#define REX_X(s) ((s)->rex_x)
61#define REX_B(s) ((s)->rex_b)
62/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
63#if 1
64#define BUGGY_64(x) NULL
65#endif
66#else
67#define X86_64_ONLY(x) NULL
68#ifndef VBOX
69#define X86_64_DEF(x...)
70#else
71#define X86_64_DEF(x)
72#endif
73#define CODE64(s) 0
74#define REX_X(s) 0
75#define REX_B(s) 0
76#endif
77
78//#define MACRO_TEST 1
79
80/* global register indexes */
81static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp;
82/* local temps */
83static TCGv cpu_T[2], cpu_T3;
84/* local register indexes (only used inside old micro ops) */
85static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1;
86static TCGv cpu_tmp5, cpu_tmp6;
87
88#include "gen-icount.h"
89
90#ifdef TARGET_X86_64
91static int x86_64_hregs;
92#endif
93
94#ifdef VBOX
95
96/* Special/override code readers to hide patched code. */
97
98uint8_t ldub_code_raw(target_ulong pc)
99{
100 uint8_t b;
101
102 if (!remR3GetOpcode(cpu_single_env, pc, &b))
103 b = ldub_code(pc);
104 return b;
105}
106#define ldub_code(a) ldub_code_raw(a)
107
108uint16_t lduw_code_raw(target_ulong pc)
109{
110 return (ldub_code(pc+1) << 8) | ldub_code(pc);
111}
112#define lduw_code(a) lduw_code_raw(a)
113
114
115uint32_t ldl_code_raw(target_ulong pc)
116{
117 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
118}
119#define ldl_code(a) ldl_code_raw(a)
120
121#endif /* VBOX */
122
123
124typedef struct DisasContext {
125 /* current insn context */
126 int override; /* -1 if no override */
127 int prefix;
128 int aflag, dflag;
129 target_ulong pc; /* pc = eip + cs_base */
130 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
131 static state change (stop translation) */
132 /* current block context */
133 target_ulong cs_base; /* base of CS segment */
134 int pe; /* protected mode */
135 int code32; /* 32 bit code segment */
136#ifdef TARGET_X86_64
137 int lma; /* long mode active */
138 int code64; /* 64 bit code segment */
139 int rex_x, rex_b;
140#endif
141 int ss32; /* 32 bit stack segment */
142 int cc_op; /* current CC operation */
143 int addseg; /* non zero if either DS/ES/SS have a non zero base */
144 int f_st; /* currently unused */
145 int vm86; /* vm86 mode */
146#ifdef VBOX
147 int vme; /* CR4.VME */
148 int pvi; /* CR4.PVI */
149 int record_call; /* record calls for CSAM or not? */
150#endif
151 int cpl;
152 int iopl;
153 int tf; /* TF cpu flag */
154 int singlestep_enabled; /* "hardware" single step enabled */
155 int jmp_opt; /* use direct block chaining for direct jumps */
156 int mem_index; /* select memory access functions */
157 uint64_t flags; /* all execution flags */
158 struct TranslationBlock *tb;
159 int popl_esp_hack; /* for correct popl with esp base handling */
160 int rip_offset; /* only used in x86_64, but left for simplicity */
161 int cpuid_features;
162 int cpuid_ext_features;
163 int cpuid_ext2_features;
164 int cpuid_ext3_features;
165} DisasContext;
166
167static void gen_eob(DisasContext *s);
168static void gen_jmp(DisasContext *s, target_ulong eip);
169static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
170
171#ifdef VBOX
172static void gen_check_external_event();
173#endif
174
175/* i386 arith/logic operations */
176enum {
177 OP_ADDL,
178 OP_ORL,
179 OP_ADCL,
180 OP_SBBL,
181 OP_ANDL,
182 OP_SUBL,
183 OP_XORL,
184 OP_CMPL,
185};
186
187/* i386 shift ops */
188enum {
189 OP_ROL,
190 OP_ROR,
191 OP_RCL,
192 OP_RCR,
193 OP_SHL,
194 OP_SHR,
195 OP_SHL1, /* undocumented */
196 OP_SAR = 7,
197};
198
199enum {
200 JCC_O,
201 JCC_B,
202 JCC_Z,
203 JCC_BE,
204 JCC_S,
205 JCC_P,
206 JCC_L,
207 JCC_LE,
208};
209
210/* operand size */
211enum {
212 OT_BYTE = 0,
213 OT_WORD,
214 OT_LONG,
215 OT_QUAD,
216};
217
218enum {
219 /* I386 int registers */
220 OR_EAX, /* MUST be even numbered */
221 OR_ECX,
222 OR_EDX,
223 OR_EBX,
224 OR_ESP,
225 OR_EBP,
226 OR_ESI,
227 OR_EDI,
228
229 OR_TMP0 = 16, /* temporary operand register */
230 OR_TMP1,
231 OR_A0, /* temporary register used when doing address evaluation */
232};
233
234#ifndef VBOX
235static inline void gen_op_movl_T0_0(void)
236#else /* VBOX */
237DECLINLINE(void) gen_op_movl_T0_0(void)
238#endif /* VBOX */
239{
240 tcg_gen_movi_tl(cpu_T[0], 0);
241}
242
243#ifndef VBOX
244static inline void gen_op_movl_T0_im(int32_t val)
245#else /* VBOX */
246DECLINLINE(void) gen_op_movl_T0_im(int32_t val)
247#endif /* VBOX */
248{
249 tcg_gen_movi_tl(cpu_T[0], val);
250}
251
252#ifndef VBOX
253static inline void gen_op_movl_T0_imu(uint32_t val)
254#else /* VBOX */
255DECLINLINE(void) gen_op_movl_T0_imu(uint32_t val)
256#endif /* VBOX */
257{
258 tcg_gen_movi_tl(cpu_T[0], val);
259}
260
261#ifndef VBOX
262static inline void gen_op_movl_T1_im(int32_t val)
263#else /* VBOX */
264DECLINLINE(void) gen_op_movl_T1_im(int32_t val)
265#endif /* VBOX */
266{
267 tcg_gen_movi_tl(cpu_T[1], val);
268}
269
270#ifndef VBOX
271static inline void gen_op_movl_T1_imu(uint32_t val)
272#else /* VBOX */
273DECLINLINE(void) gen_op_movl_T1_imu(uint32_t val)
274#endif /* VBOX */
275{
276 tcg_gen_movi_tl(cpu_T[1], val);
277}
278
279#ifndef VBOX
280static inline void gen_op_movl_A0_im(uint32_t val)
281#else /* VBOX */
282DECLINLINE(void) gen_op_movl_A0_im(uint32_t val)
283#endif /* VBOX */
284{
285 tcg_gen_movi_tl(cpu_A0, val);
286}
287
288#ifdef TARGET_X86_64
289#ifndef VBOX
290static inline void gen_op_movq_A0_im(int64_t val)
291#else /* VBOX */
292DECLINLINE(void) gen_op_movq_A0_im(int64_t val)
293#endif /* VBOX */
294{
295 tcg_gen_movi_tl(cpu_A0, val);
296}
297#endif
298
299#ifndef VBOX
300static inline void gen_movtl_T0_im(target_ulong val)
301#else /* VBOX */
302DECLINLINE(void) gen_movtl_T0_im(target_ulong val)
303#endif /* VBOX */
304{
305 tcg_gen_movi_tl(cpu_T[0], val);
306}
307
308#ifndef VBOX
309static inline void gen_movtl_T1_im(target_ulong val)
310#else /* VBOX */
311DECLINLINE(void) gen_movtl_T1_im(target_ulong val)
312#endif /* VBOX */
313{
314 tcg_gen_movi_tl(cpu_T[1], val);
315}
316
317#ifndef VBOX
318static inline void gen_op_andl_T0_ffff(void)
319#else /* VBOX */
320DECLINLINE(void) gen_op_andl_T0_ffff(void)
321#endif /* VBOX */
322{
323 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
324}
325
326#ifndef VBOX
327static inline void gen_op_andl_T0_im(uint32_t val)
328#else /* VBOX */
329DECLINLINE(void) gen_op_andl_T0_im(uint32_t val)
330#endif /* VBOX */
331{
332 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val);
333}
334
335#ifndef VBOX
336static inline void gen_op_movl_T0_T1(void)
337#else /* VBOX */
338DECLINLINE(void) gen_op_movl_T0_T1(void)
339#endif /* VBOX */
340{
341 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]);
342}
343
344#ifndef VBOX
345static inline void gen_op_andl_A0_ffff(void)
346#else /* VBOX */
347DECLINLINE(void) gen_op_andl_A0_ffff(void)
348#endif /* VBOX */
349{
350 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff);
351}
352
353#ifdef TARGET_X86_64
354
355#define NB_OP_SIZES 4
356
357#else /* !TARGET_X86_64 */
358
359#define NB_OP_SIZES 3
360
361#endif /* !TARGET_X86_64 */
362
363#if defined(WORDS_BIGENDIAN)
364#define REG_B_OFFSET (sizeof(target_ulong) - 1)
365#define REG_H_OFFSET (sizeof(target_ulong) - 2)
366#define REG_W_OFFSET (sizeof(target_ulong) - 2)
367#define REG_L_OFFSET (sizeof(target_ulong) - 4)
368#define REG_LH_OFFSET (sizeof(target_ulong) - 8)
369#else
370#define REG_B_OFFSET 0
371#define REG_H_OFFSET 1
372#define REG_W_OFFSET 0
373#define REG_L_OFFSET 0
374#define REG_LH_OFFSET 4
375#endif
376
377#ifndef VBOX
378static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0)
379#else /* VBOX */
380DECLINLINE(void) gen_op_mov_reg_v(int ot, int reg, TCGv t0)
381#endif /* VBOX */
382{
383 switch(ot) {
384 case OT_BYTE:
385 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
386 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
387 } else {
388 tcg_gen_st8_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
389 }
390 break;
391 case OT_WORD:
392 tcg_gen_st16_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
393 break;
394#ifdef TARGET_X86_64
395 case OT_LONG:
396 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
397 /* high part of register set to zero */
398 tcg_gen_movi_tl(cpu_tmp0, 0);
399 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
400 break;
401 default:
402 case OT_QUAD:
403 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
404 break;
405#else
406 default:
407 case OT_LONG:
408 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
409 break;
410#endif
411 }
412}
413
414#ifndef VBOX
415static inline void gen_op_mov_reg_T0(int ot, int reg)
416#else /* VBOX */
417DECLINLINE(void) gen_op_mov_reg_T0(int ot, int reg)
418#endif /* VBOX */
419{
420 gen_op_mov_reg_v(ot, reg, cpu_T[0]);
421}
422
423#ifndef VBOX
424static inline void gen_op_mov_reg_T1(int ot, int reg)
425#else /* VBOX */
426DECLINLINE(void) gen_op_mov_reg_T1(int ot, int reg)
427#endif /* VBOX */
428{
429 gen_op_mov_reg_v(ot, reg, cpu_T[1]);
430}
431
432#ifndef VBOX
433static inline void gen_op_mov_reg_A0(int size, int reg)
434#else /* VBOX */
435DECLINLINE(void) gen_op_mov_reg_A0(int size, int reg)
436#endif /* VBOX */
437{
438 switch(size) {
439 case 0:
440 tcg_gen_st16_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
441 break;
442#ifdef TARGET_X86_64
443 case 1:
444 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
445 /* high part of register set to zero */
446 tcg_gen_movi_tl(cpu_tmp0, 0);
447 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
448 break;
449 default:
450 case 2:
451 tcg_gen_st_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
452 break;
453#else
454 default:
455 case 1:
456 tcg_gen_st32_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
457 break;
458#endif
459 }
460}
461
462#ifndef VBOX
463static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg)
464#else /* VBOX */
465DECLINLINE(void) gen_op_mov_v_reg(int ot, TCGv t0, int reg)
466#endif /* VBOX */
467{
468 switch(ot) {
469 case OT_BYTE:
470 if (reg < 4 X86_64_DEF( || reg >= 8 || x86_64_hregs)) {
471#ifndef VBOX
472 goto std_case;
473#else
474 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_B_OFFSET);
475#endif
476 } else {
477 tcg_gen_ld8u_tl(t0, cpu_env, offsetof(CPUState, regs[reg - 4]) + REG_H_OFFSET);
478 }
479 break;
480 default:
481#ifndef VBOX
482 std_case:
483#endif
484 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, regs[reg]));
485 break;
486 }
487}
488
489#ifndef VBOX
490static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg)
491#else /* VBOX */
492DECLINLINE(void) gen_op_mov_TN_reg(int ot, int t_index, int reg)
493#endif /* VBOX */
494{
495 gen_op_mov_v_reg(ot, cpu_T[t_index], reg);
496}
497
498#ifndef VBOX
499static inline void gen_op_movl_A0_reg(int reg)
500#else /* VBOX */
501DECLINLINE(void) gen_op_movl_A0_reg(int reg)
502#endif /* VBOX */
503{
504 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
505}
506
507#ifndef VBOX
508static inline void gen_op_addl_A0_im(int32_t val)
509#else /* VBOX */
510DECLINLINE(void) gen_op_addl_A0_im(int32_t val)
511#endif /* VBOX */
512{
513 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
514#ifdef TARGET_X86_64
515 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
516#endif
517}
518
519#ifdef TARGET_X86_64
520#ifndef VBOX
521static inline void gen_op_addq_A0_im(int64_t val)
522#else /* VBOX */
523DECLINLINE(void) gen_op_addq_A0_im(int64_t val)
524#endif /* VBOX */
525{
526 tcg_gen_addi_tl(cpu_A0, cpu_A0, val);
527}
528#endif
529
530static void gen_add_A0_im(DisasContext *s, int val)
531{
532#ifdef TARGET_X86_64
533 if (CODE64(s))
534 gen_op_addq_A0_im(val);
535 else
536#endif
537 gen_op_addl_A0_im(val);
538}
539
540#ifndef VBOX
541static inline void gen_op_addl_T0_T1(void)
542#else /* VBOX */
543DECLINLINE(void) gen_op_addl_T0_T1(void)
544#endif /* VBOX */
545{
546 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
547}
548
549#ifndef VBOX
550static inline void gen_op_jmp_T0(void)
551#else /* VBOX */
552DECLINLINE(void) gen_op_jmp_T0(void)
553#endif /* VBOX */
554{
555 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip));
556}
557
558#ifndef VBOX
559static inline void gen_op_add_reg_im(int size, int reg, int32_t val)
560#else /* VBOX */
561DECLINLINE(void) gen_op_add_reg_im(int size, int reg, int32_t val)
562#endif /* VBOX */
563{
564 switch(size) {
565 case 0:
566 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
567 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
568 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
569 break;
570 case 1:
571 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
572 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
573#ifdef TARGET_X86_64
574 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
575#endif
576 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
577 break;
578#ifdef TARGET_X86_64
579 case 2:
580 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
581 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, val);
582 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
583 break;
584#endif
585 }
586}
587
588#ifndef VBOX
589static inline void gen_op_add_reg_T0(int size, int reg)
590#else /* VBOX */
591DECLINLINE(void) gen_op_add_reg_T0(int size, int reg)
592#endif /* VBOX */
593{
594 switch(size) {
595 case 0:
596 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
597 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
598 tcg_gen_st16_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_W_OFFSET);
599 break;
600 case 1:
601 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
602 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
603#ifdef TARGET_X86_64
604 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffff);
605#endif
606 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
607 break;
608#ifdef TARGET_X86_64
609 case 2:
610 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
611 tcg_gen_add_tl(cpu_tmp0, cpu_tmp0, cpu_T[0]);
612 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
613 break;
614#endif
615 }
616}
617
618#ifndef VBOX
619static inline void gen_op_set_cc_op(int32_t val)
620#else /* VBOX */
621DECLINLINE(void) gen_op_set_cc_op(int32_t val)
622#endif /* VBOX */
623{
624 tcg_gen_movi_i32(cpu_cc_op, val);
625}
626
627#ifndef VBOX
628static inline void gen_op_addl_A0_reg_sN(int shift, int reg)
629#else /* VBOX */
630DECLINLINE(void) gen_op_addl_A0_reg_sN(int shift, int reg)
631#endif /* VBOX */
632{
633 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
634 if (shift != 0)
635 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
636 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
637#ifdef TARGET_X86_64
638 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
639#endif
640}
641#ifdef VBOX
642DECLINLINE(void) gen_op_seg_check(int reg, bool keepA0)
643{
644 /* It seems segments doesn't get out of sync - if they do in fact - enable below code. */
645#ifdef FORCE_SEGMENT_SYNC
646#if 1
647 TCGv t0;
648
649 /* Considering poor quality of TCG optimizer - better call directly */
650 t0 = tcg_temp_local_new(TCG_TYPE_TL);
651 tcg_gen_movi_tl(t0, reg);
652 tcg_gen_helper_0_1(helper_sync_seg, t0);
653 tcg_temp_free(t0);
654#else
655 /* Our segments could be outdated, thus check for newselector field to see if update really needed */
656 int skip_label;
657 TCGv t0, a0;
658
659 /* For other segments this check is waste of time, and also TCG is unable to cope with this code,
660 for data/stack segments, as expects alive cpu_T[0] */
661 if (reg != R_GS)
662 return;
663
664 if (keepA0)
665 {
666 /* we need to store old cpu_A0 */
667 a0 = tcg_temp_local_new(TCG_TYPE_TL);
668 tcg_gen_mov_tl(a0, cpu_A0);
669 }
670
671 skip_label = gen_new_label();
672 t0 = tcg_temp_local_new(TCG_TYPE_TL);
673
674 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, segs[reg].newselector) + REG_L_OFFSET);
675 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
676 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, eflags) + REG_L_OFFSET);
677 tcg_gen_andi_tl(t0, t0, VM_MASK);
678 tcg_gen_brcondi_i32(TCG_COND_NE, t0, 0, skip_label);
679 tcg_gen_movi_tl(t0, reg);
680
681 tcg_gen_helper_0_1(helper_sync_seg, t0);
682
683 tcg_temp_free(t0);
684
685 gen_set_label(skip_label);
686 if (keepA0)
687 {
688 tcg_gen_mov_tl(cpu_A0, a0);
689 tcg_temp_free(a0);
690 }
691#endif /* 0 */
692#endif /* FORCE_SEGMENT_SYNC */
693}
694#endif
695
696#ifndef VBOX
697static inline void gen_op_movl_A0_seg(int reg)
698#else /* VBOX */
699DECLINLINE(void) gen_op_movl_A0_seg(int reg)
700#endif /* VBOX */
701{
702#ifdef VBOX
703 gen_op_seg_check(reg, false);
704#endif
705 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base) + REG_L_OFFSET);
706}
707
708#ifndef VBOX
709static inline void gen_op_addl_A0_seg(int reg)
710#else /* VBOX */
711DECLINLINE(void) gen_op_addl_A0_seg(int reg)
712#endif /* VBOX */
713{
714#ifdef VBOX
715 gen_op_seg_check(reg, true);
716#endif
717 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
718 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
719#ifdef TARGET_X86_64
720 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
721#endif
722}
723
724#ifdef TARGET_X86_64
725#ifndef VBOX
726static inline void gen_op_movq_A0_seg(int reg)
727#else /* VBOX */
728DECLINLINE(void) gen_op_movq_A0_seg(int reg)
729#endif /* VBOX */
730{
731#ifdef VBOX
732 gen_op_seg_check(reg, false);
733#endif
734 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, segs[reg].base));
735}
736
737#ifndef VBOX
738static inline void gen_op_addq_A0_seg(int reg)
739#else /* VBOX */
740DECLINLINE(void) gen_op_addq_A0_seg(int reg)
741#endif /* VBOX */
742{
743#ifdef VBOX
744 gen_op_seg_check(reg, true);
745#endif
746 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, segs[reg].base));
747 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
748}
749
750#ifndef VBOX
751static inline void gen_op_movq_A0_reg(int reg)
752#else /* VBOX */
753DECLINLINE(void) gen_op_movq_A0_reg(int reg)
754#endif /* VBOX */
755{
756 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]));
757}
758
759#ifndef VBOX
760static inline void gen_op_addq_A0_reg_sN(int shift, int reg)
761#else /* VBOX */
762DECLINLINE(void) gen_op_addq_A0_reg_sN(int shift, int reg)
763#endif /* VBOX */
764{
765 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]));
766 if (shift != 0)
767 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, shift);
768 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
769}
770#endif
771
772#ifndef VBOX
773static inline void gen_op_lds_T0_A0(int idx)
774#else /* VBOX */
775DECLINLINE(void) gen_op_lds_T0_A0(int idx)
776#endif /* VBOX */
777{
778 int mem_index = (idx >> 2) - 1;
779 switch(idx & 3) {
780 case 0:
781 tcg_gen_qemu_ld8s(cpu_T[0], cpu_A0, mem_index);
782 break;
783 case 1:
784 tcg_gen_qemu_ld16s(cpu_T[0], cpu_A0, mem_index);
785 break;
786 default:
787 case 2:
788 tcg_gen_qemu_ld32s(cpu_T[0], cpu_A0, mem_index);
789 break;
790 }
791}
792
793#ifndef VBOX
794static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0)
795#else /* VBOX */
796DECLINLINE(void) gen_op_ld_v(int idx, TCGv t0, TCGv a0)
797#endif /* VBOX */
798{
799 int mem_index = (idx >> 2) - 1;
800 switch(idx & 3) {
801 case 0:
802 tcg_gen_qemu_ld8u(t0, a0, mem_index);
803 break;
804 case 1:
805 tcg_gen_qemu_ld16u(t0, a0, mem_index);
806 break;
807 case 2:
808 tcg_gen_qemu_ld32u(t0, a0, mem_index);
809 break;
810 default:
811 case 3:
812 tcg_gen_qemu_ld64(t0, a0, mem_index);
813 break;
814 }
815}
816
817/* XXX: always use ldu or lds */
818#ifndef VBOX
819static inline void gen_op_ld_T0_A0(int idx)
820#else /* VBOX */
821DECLINLINE(void) gen_op_ld_T0_A0(int idx)
822#endif /* VBOX */
823{
824 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
825}
826
827#ifndef VBOX
828static inline void gen_op_ldu_T0_A0(int idx)
829#else /* VBOX */
830DECLINLINE(void) gen_op_ldu_T0_A0(int idx)
831#endif /* VBOX */
832{
833 gen_op_ld_v(idx, cpu_T[0], cpu_A0);
834}
835
836#ifndef VBOX
837static inline void gen_op_ld_T1_A0(int idx)
838#else /* VBOX */
839DECLINLINE(void) gen_op_ld_T1_A0(int idx)
840#endif /* VBOX */
841{
842 gen_op_ld_v(idx, cpu_T[1], cpu_A0);
843}
844
845#ifndef VBOX
846static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0)
847#else /* VBOX */
848DECLINLINE(void) gen_op_st_v(int idx, TCGv t0, TCGv a0)
849#endif /* VBOX */
850{
851 int mem_index = (idx >> 2) - 1;
852 switch(idx & 3) {
853 case 0:
854 tcg_gen_qemu_st8(t0, a0, mem_index);
855 break;
856 case 1:
857 tcg_gen_qemu_st16(t0, a0, mem_index);
858 break;
859 case 2:
860 tcg_gen_qemu_st32(t0, a0, mem_index);
861 break;
862 default:
863 case 3:
864 tcg_gen_qemu_st64(t0, a0, mem_index);
865 break;
866 }
867}
868
869#ifndef VBOX
870static inline void gen_op_st_T0_A0(int idx)
871#else /* VBOX */
872DECLINLINE(void) gen_op_st_T0_A0(int idx)
873#endif /* VBOX */
874{
875 gen_op_st_v(idx, cpu_T[0], cpu_A0);
876}
877
878#ifndef VBOX
879static inline void gen_op_st_T1_A0(int idx)
880#else /* VBOX */
881DECLINLINE(void) gen_op_st_T1_A0(int idx)
882#endif /* VBOX */
883{
884 gen_op_st_v(idx, cpu_T[1], cpu_A0);
885}
886
887#ifdef VBOX
888static void gen_check_external_event()
889{
890#if 1
891 /** @todo: once TCG codegen improves, we may want to use version
892 from else version */
893 tcg_gen_helper_0_0(helper_check_external_event);
894#else
895 int skip_label;
896 TCGv t0;
897
898 skip_label = gen_new_label();
899 t0 = tcg_temp_local_new(TCG_TYPE_TL);
900 /* t0 = cpu_tmp0; */
901
902 tcg_gen_ld32u_tl(t0, cpu_env, offsetof(CPUState, interrupt_request));
903 /* Keep in sync with helper_check_external_event() */
904 tcg_gen_andi_tl(t0, t0,
905 CPU_INTERRUPT_EXTERNAL_EXIT
906 | CPU_INTERRUPT_EXTERNAL_TIMER
907 | CPU_INTERRUPT_EXTERNAL_DMA
908 | CPU_INTERRUPT_EXTERNAL_HARD);
909 /** @todo: predict branch as taken */
910 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, skip_label);
911 tcg_temp_free(t0);
912
913 tcg_gen_helper_0_0(helper_check_external_event);
914
915 gen_set_label(skip_label);
916#endif
917}
918
919#if 0 /* unused code? */
920static void gen_check_external_event2()
921{
922 tcg_gen_helper_0_0(helper_check_external_event);
923}
924#endif
925
926#endif
927
928#ifndef VBOX
929static inline void gen_jmp_im(target_ulong pc)
930#else /* VBOX */
931DECLINLINE(void) gen_jmp_im(target_ulong pc)
932#endif /* VBOX */
933{
934 tcg_gen_movi_tl(cpu_tmp0, pc);
935 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, eip));
936}
937
938#ifdef VBOX
939DECLINLINE(void) gen_update_eip(target_ulong pc)
940{
941 gen_jmp_im(pc);
942#ifdef VBOX_DUMP_STATE
943 tcg_gen_helper_0_0(helper_dump_state);
944#endif
945}
946
947#endif
948
949#ifndef VBOX
950static inline void gen_string_movl_A0_ESI(DisasContext *s)
951#else /* VBOX */
952DECLINLINE(void) gen_string_movl_A0_ESI(DisasContext *s)
953#endif /* VBOX */
954{
955 int override;
956
957 override = s->override;
958#ifdef TARGET_X86_64
959 if (s->aflag == 2) {
960 if (override >= 0) {
961 gen_op_movq_A0_seg(override);
962 gen_op_addq_A0_reg_sN(0, R_ESI);
963 } else {
964 gen_op_movq_A0_reg(R_ESI);
965 }
966 } else
967#endif
968 if (s->aflag) {
969 /* 32 bit address */
970 if (s->addseg && override < 0)
971 override = R_DS;
972 if (override >= 0) {
973 gen_op_movl_A0_seg(override);
974 gen_op_addl_A0_reg_sN(0, R_ESI);
975 } else {
976 gen_op_movl_A0_reg(R_ESI);
977 }
978 } else {
979 /* 16 address, always override */
980 if (override < 0)
981 override = R_DS;
982 gen_op_movl_A0_reg(R_ESI);
983 gen_op_andl_A0_ffff();
984 gen_op_addl_A0_seg(override);
985 }
986}
987
988#ifndef VBOX
989static inline void gen_string_movl_A0_EDI(DisasContext *s)
990#else /* VBOX */
991DECLINLINE(void) gen_string_movl_A0_EDI(DisasContext *s)
992#endif /* VBOX */
993{
994#ifdef TARGET_X86_64
995 if (s->aflag == 2) {
996 gen_op_movq_A0_reg(R_EDI);
997 } else
998#endif
999 if (s->aflag) {
1000 if (s->addseg) {
1001 gen_op_movl_A0_seg(R_ES);
1002 gen_op_addl_A0_reg_sN(0, R_EDI);
1003 } else {
1004 gen_op_movl_A0_reg(R_EDI);
1005 }
1006 } else {
1007 gen_op_movl_A0_reg(R_EDI);
1008 gen_op_andl_A0_ffff();
1009 gen_op_addl_A0_seg(R_ES);
1010 }
1011}
1012
1013#ifndef VBOX
1014static inline void gen_op_movl_T0_Dshift(int ot)
1015#else /* VBOX */
1016DECLINLINE(void) gen_op_movl_T0_Dshift(int ot)
1017#endif /* VBOX */
1018{
1019 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df));
1020 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], ot);
1021};
1022
1023static void gen_extu(int ot, TCGv reg)
1024{
1025 switch(ot) {
1026 case OT_BYTE:
1027 tcg_gen_ext8u_tl(reg, reg);
1028 break;
1029 case OT_WORD:
1030 tcg_gen_ext16u_tl(reg, reg);
1031 break;
1032 case OT_LONG:
1033 tcg_gen_ext32u_tl(reg, reg);
1034 break;
1035 default:
1036 break;
1037 }
1038}
1039
1040static void gen_exts(int ot, TCGv reg)
1041{
1042 switch(ot) {
1043 case OT_BYTE:
1044 tcg_gen_ext8s_tl(reg, reg);
1045 break;
1046 case OT_WORD:
1047 tcg_gen_ext16s_tl(reg, reg);
1048 break;
1049 case OT_LONG:
1050 tcg_gen_ext32s_tl(reg, reg);
1051 break;
1052 default:
1053 break;
1054 }
1055}
1056
1057#ifndef VBOX
1058static inline void gen_op_jnz_ecx(int size, int label1)
1059#else /* VBOX */
1060DECLINLINE(void) gen_op_jnz_ecx(int size, int label1)
1061#endif /* VBOX */
1062{
1063 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1064 gen_extu(size + 1, cpu_tmp0);
1065 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, label1);
1066}
1067
1068#ifndef VBOX
1069static inline void gen_op_jz_ecx(int size, int label1)
1070#else /* VBOX */
1071DECLINLINE(void) gen_op_jz_ecx(int size, int label1)
1072#endif /* VBOX */
1073{
1074 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX]));
1075 gen_extu(size + 1, cpu_tmp0);
1076 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
1077}
1078
1079static void *helper_in_func[3] = {
1080 helper_inb,
1081 helper_inw,
1082 helper_inl,
1083};
1084
1085static void *helper_out_func[3] = {
1086 helper_outb,
1087 helper_outw,
1088 helper_outl,
1089};
1090
1091static void *gen_check_io_func[3] = {
1092 helper_check_iob,
1093 helper_check_iow,
1094 helper_check_iol,
1095};
1096
1097static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip,
1098 uint32_t svm_flags)
1099{
1100 int state_saved;
1101 target_ulong next_eip;
1102
1103 state_saved = 0;
1104 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1105 if (s->cc_op != CC_OP_DYNAMIC)
1106 gen_op_set_cc_op(s->cc_op);
1107 gen_jmp_im(cur_eip);
1108 state_saved = 1;
1109 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1110 tcg_gen_helper_0_1(gen_check_io_func[ot],
1111 cpu_tmp2_i32);
1112 }
1113 if(s->flags & HF_SVMI_MASK) {
1114 if (!state_saved) {
1115 if (s->cc_op != CC_OP_DYNAMIC)
1116 gen_op_set_cc_op(s->cc_op);
1117 gen_jmp_im(cur_eip);
1118 state_saved = 1;
1119 }
1120 svm_flags |= (1 << (4 + ot));
1121 next_eip = s->pc - s->cs_base;
1122 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
1123 tcg_gen_helper_0_3(helper_svm_check_io,
1124 cpu_tmp2_i32,
1125 tcg_const_i32(svm_flags),
1126 tcg_const_i32(next_eip - cur_eip));
1127 }
1128}
1129
1130#ifndef VBOX
1131static inline void gen_movs(DisasContext *s, int ot)
1132#else /* VBOX */
1133DECLINLINE(void) gen_movs(DisasContext *s, int ot)
1134#endif /* VBOX */
1135{
1136 gen_string_movl_A0_ESI(s);
1137 gen_op_ld_T0_A0(ot + s->mem_index);
1138 gen_string_movl_A0_EDI(s);
1139 gen_op_st_T0_A0(ot + s->mem_index);
1140 gen_op_movl_T0_Dshift(ot);
1141 gen_op_add_reg_T0(s->aflag, R_ESI);
1142 gen_op_add_reg_T0(s->aflag, R_EDI);
1143}
1144
1145#ifndef VBOX
1146static inline void gen_update_cc_op(DisasContext *s)
1147#else /* VBOX */
1148DECLINLINE(void) gen_update_cc_op(DisasContext *s)
1149#endif /* VBOX */
1150{
1151 if (s->cc_op != CC_OP_DYNAMIC) {
1152 gen_op_set_cc_op(s->cc_op);
1153 s->cc_op = CC_OP_DYNAMIC;
1154 }
1155}
1156
1157static void gen_op_update1_cc(void)
1158{
1159 tcg_gen_discard_tl(cpu_cc_src);
1160 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1161}
1162
1163static void gen_op_update2_cc(void)
1164{
1165 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1166 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1167}
1168
1169#ifndef VBOX
1170static inline void gen_op_cmpl_T0_T1_cc(void)
1171#else /* VBOX */
1172DECLINLINE(void) gen_op_cmpl_T0_T1_cc(void)
1173#endif /* VBOX */
1174{
1175 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1176 tcg_gen_sub_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1177}
1178
1179#ifndef VBOX
1180static inline void gen_op_testl_T0_T1_cc(void)
1181#else /* VBOX */
1182DECLINLINE(void) gen_op_testl_T0_T1_cc(void)
1183#endif /* VBOX */
1184{
1185 tcg_gen_discard_tl(cpu_cc_src);
1186 tcg_gen_and_tl(cpu_cc_dst, cpu_T[0], cpu_T[1]);
1187}
1188
1189static void gen_op_update_neg_cc(void)
1190{
1191 tcg_gen_neg_tl(cpu_cc_src, cpu_T[0]);
1192 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1193}
1194
1195/* compute eflags.C to reg */
1196static void gen_compute_eflags_c(TCGv reg)
1197{
1198#if TCG_TARGET_REG_BITS == 32
1199 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1200 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1201 (long)cc_table + offsetof(CCTable, compute_c));
1202 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1203 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1204 1, &cpu_tmp2_i32, 0, NULL);
1205#else
1206 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1207 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1208 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1209 (long)cc_table + offsetof(CCTable, compute_c));
1210 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1211 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1212 1, &cpu_tmp2_i32, 0, NULL);
1213#endif
1214 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1215}
1216
1217/* compute all eflags to cc_src */
1218static void gen_compute_eflags(TCGv reg)
1219{
1220#if TCG_TARGET_REG_BITS == 32
1221 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3);
1222 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32,
1223 (long)cc_table + offsetof(CCTable, compute_all));
1224 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0);
1225 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE,
1226 1, &cpu_tmp2_i32, 0, NULL);
1227#else
1228 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op);
1229 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4);
1230 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64,
1231 (long)cc_table + offsetof(CCTable, compute_all));
1232 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0);
1233 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE,
1234 1, &cpu_tmp2_i32, 0, NULL);
1235#endif
1236 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32);
1237}
1238
1239#ifndef VBOX
1240static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1241#else /* VBOX */
1242DECLINLINE(void) gen_setcc_slow_T0(DisasContext *s, int jcc_op)
1243#endif /* VBOX */
1244{
1245 if (s->cc_op != CC_OP_DYNAMIC)
1246 gen_op_set_cc_op(s->cc_op);
1247 switch(jcc_op) {
1248 case JCC_O:
1249 gen_compute_eflags(cpu_T[0]);
1250 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 11);
1251 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1252 break;
1253 case JCC_B:
1254 gen_compute_eflags_c(cpu_T[0]);
1255 break;
1256 case JCC_Z:
1257 gen_compute_eflags(cpu_T[0]);
1258 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 6);
1259 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1260 break;
1261 case JCC_BE:
1262 gen_compute_eflags(cpu_tmp0);
1263 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 6);
1264 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1265 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1266 break;
1267 case JCC_S:
1268 gen_compute_eflags(cpu_T[0]);
1269 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 7);
1270 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1271 break;
1272 case JCC_P:
1273 gen_compute_eflags(cpu_T[0]);
1274 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 2);
1275 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1276 break;
1277 case JCC_L:
1278 gen_compute_eflags(cpu_tmp0);
1279 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1280 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 7); /* CC_S */
1281 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1282 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1283 break;
1284 default:
1285 case JCC_LE:
1286 gen_compute_eflags(cpu_tmp0);
1287 tcg_gen_shri_tl(cpu_T[0], cpu_tmp0, 11); /* CC_O */
1288 tcg_gen_shri_tl(cpu_tmp4, cpu_tmp0, 7); /* CC_S */
1289 tcg_gen_shri_tl(cpu_tmp0, cpu_tmp0, 6); /* CC_Z */
1290 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1291 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
1292 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 1);
1293 break;
1294 }
1295}
1296
1297/* return true if setcc_slow is not needed (WARNING: must be kept in
1298 sync with gen_jcc1) */
1299static int is_fast_jcc_case(DisasContext *s, int b)
1300{
1301 int jcc_op;
1302 jcc_op = (b >> 1) & 7;
1303 switch(s->cc_op) {
1304 /* we optimize the cmp/jcc case */
1305 case CC_OP_SUBB:
1306 case CC_OP_SUBW:
1307 case CC_OP_SUBL:
1308 case CC_OP_SUBQ:
1309 if (jcc_op == JCC_O || jcc_op == JCC_P)
1310 goto slow_jcc;
1311 break;
1312
1313 /* some jumps are easy to compute */
1314 case CC_OP_ADDB:
1315 case CC_OP_ADDW:
1316 case CC_OP_ADDL:
1317 case CC_OP_ADDQ:
1318
1319 case CC_OP_LOGICB:
1320 case CC_OP_LOGICW:
1321 case CC_OP_LOGICL:
1322 case CC_OP_LOGICQ:
1323
1324 case CC_OP_INCB:
1325 case CC_OP_INCW:
1326 case CC_OP_INCL:
1327 case CC_OP_INCQ:
1328
1329 case CC_OP_DECB:
1330 case CC_OP_DECW:
1331 case CC_OP_DECL:
1332 case CC_OP_DECQ:
1333
1334 case CC_OP_SHLB:
1335 case CC_OP_SHLW:
1336 case CC_OP_SHLL:
1337 case CC_OP_SHLQ:
1338 if (jcc_op != JCC_Z && jcc_op != JCC_S)
1339 goto slow_jcc;
1340 break;
1341 default:
1342 slow_jcc:
1343 return 0;
1344 }
1345 return 1;
1346}
1347
1348/* generate a conditional jump to label 'l1' according to jump opcode
1349 value 'b'. In the fast case, T0 is guaranteed not to be used. */
1350#ifndef VBOX
1351static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1352#else /* VBOX */
1353DECLINLINE(void) gen_jcc1(DisasContext *s, int cc_op, int b, int l1)
1354#endif /* VBOX */
1355{
1356 int inv, jcc_op, size, cond;
1357 TCGv t0;
1358
1359 inv = b & 1;
1360 jcc_op = (b >> 1) & 7;
1361
1362 switch(cc_op) {
1363 /* we optimize the cmp/jcc case */
1364 case CC_OP_SUBB:
1365 case CC_OP_SUBW:
1366 case CC_OP_SUBL:
1367 case CC_OP_SUBQ:
1368
1369 size = cc_op - CC_OP_SUBB;
1370 switch(jcc_op) {
1371 case JCC_Z:
1372 fast_jcc_z:
1373 switch(size) {
1374 case 0:
1375 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xff);
1376 t0 = cpu_tmp0;
1377 break;
1378 case 1:
1379 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffff);
1380 t0 = cpu_tmp0;
1381 break;
1382#ifdef TARGET_X86_64
1383 case 2:
1384 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0xffffffff);
1385 t0 = cpu_tmp0;
1386 break;
1387#endif
1388 default:
1389 t0 = cpu_cc_dst;
1390 break;
1391 }
1392 tcg_gen_brcondi_tl(inv ? TCG_COND_NE : TCG_COND_EQ, t0, 0, l1);
1393 break;
1394 case JCC_S:
1395 fast_jcc_s:
1396 switch(size) {
1397 case 0:
1398 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80);
1399 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1400 0, l1);
1401 break;
1402 case 1:
1403 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x8000);
1404 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1405 0, l1);
1406 break;
1407#ifdef TARGET_X86_64
1408 case 2:
1409 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_dst, 0x80000000);
1410 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE, cpu_tmp0,
1411 0, l1);
1412 break;
1413#endif
1414 default:
1415 tcg_gen_brcondi_tl(inv ? TCG_COND_GE : TCG_COND_LT, cpu_cc_dst,
1416 0, l1);
1417 break;
1418 }
1419 break;
1420
1421 case JCC_B:
1422 cond = inv ? TCG_COND_GEU : TCG_COND_LTU;
1423 goto fast_jcc_b;
1424 case JCC_BE:
1425 cond = inv ? TCG_COND_GTU : TCG_COND_LEU;
1426 fast_jcc_b:
1427 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1428 switch(size) {
1429 case 0:
1430 t0 = cpu_tmp0;
1431 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xff);
1432 tcg_gen_andi_tl(t0, cpu_cc_src, 0xff);
1433 break;
1434 case 1:
1435 t0 = cpu_tmp0;
1436 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffff);
1437 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffff);
1438 break;
1439#ifdef TARGET_X86_64
1440 case 2:
1441 t0 = cpu_tmp0;
1442 tcg_gen_andi_tl(cpu_tmp4, cpu_tmp4, 0xffffffff);
1443 tcg_gen_andi_tl(t0, cpu_cc_src, 0xffffffff);
1444 break;
1445#endif
1446 default:
1447 t0 = cpu_cc_src;
1448 break;
1449 }
1450 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1451 break;
1452
1453 case JCC_L:
1454 cond = inv ? TCG_COND_GE : TCG_COND_LT;
1455 goto fast_jcc_l;
1456 case JCC_LE:
1457 cond = inv ? TCG_COND_GT : TCG_COND_LE;
1458 fast_jcc_l:
1459 tcg_gen_add_tl(cpu_tmp4, cpu_cc_dst, cpu_cc_src);
1460 switch(size) {
1461 case 0:
1462 t0 = cpu_tmp0;
1463 tcg_gen_ext8s_tl(cpu_tmp4, cpu_tmp4);
1464 tcg_gen_ext8s_tl(t0, cpu_cc_src);
1465 break;
1466 case 1:
1467 t0 = cpu_tmp0;
1468 tcg_gen_ext16s_tl(cpu_tmp4, cpu_tmp4);
1469 tcg_gen_ext16s_tl(t0, cpu_cc_src);
1470 break;
1471#ifdef TARGET_X86_64
1472 case 2:
1473 t0 = cpu_tmp0;
1474 tcg_gen_ext32s_tl(cpu_tmp4, cpu_tmp4);
1475 tcg_gen_ext32s_tl(t0, cpu_cc_src);
1476 break;
1477#endif
1478 default:
1479 t0 = cpu_cc_src;
1480 break;
1481 }
1482 tcg_gen_brcond_tl(cond, cpu_tmp4, t0, l1);
1483 break;
1484
1485 default:
1486 goto slow_jcc;
1487 }
1488 break;
1489
1490 /* some jumps are easy to compute */
1491 case CC_OP_ADDB:
1492 case CC_OP_ADDW:
1493 case CC_OP_ADDL:
1494 case CC_OP_ADDQ:
1495
1496 case CC_OP_ADCB:
1497 case CC_OP_ADCW:
1498 case CC_OP_ADCL:
1499 case CC_OP_ADCQ:
1500
1501 case CC_OP_SBBB:
1502 case CC_OP_SBBW:
1503 case CC_OP_SBBL:
1504 case CC_OP_SBBQ:
1505
1506 case CC_OP_LOGICB:
1507 case CC_OP_LOGICW:
1508 case CC_OP_LOGICL:
1509 case CC_OP_LOGICQ:
1510
1511 case CC_OP_INCB:
1512 case CC_OP_INCW:
1513 case CC_OP_INCL:
1514 case CC_OP_INCQ:
1515
1516 case CC_OP_DECB:
1517 case CC_OP_DECW:
1518 case CC_OP_DECL:
1519 case CC_OP_DECQ:
1520
1521 case CC_OP_SHLB:
1522 case CC_OP_SHLW:
1523 case CC_OP_SHLL:
1524 case CC_OP_SHLQ:
1525
1526 case CC_OP_SARB:
1527 case CC_OP_SARW:
1528 case CC_OP_SARL:
1529 case CC_OP_SARQ:
1530 switch(jcc_op) {
1531 case JCC_Z:
1532 size = (cc_op - CC_OP_ADDB) & 3;
1533 goto fast_jcc_z;
1534 case JCC_S:
1535 size = (cc_op - CC_OP_ADDB) & 3;
1536 goto fast_jcc_s;
1537 default:
1538 goto slow_jcc;
1539 }
1540 break;
1541 default:
1542 slow_jcc:
1543 gen_setcc_slow_T0(s, jcc_op);
1544 tcg_gen_brcondi_tl(inv ? TCG_COND_EQ : TCG_COND_NE,
1545 cpu_T[0], 0, l1);
1546 break;
1547 }
1548}
1549
1550/* XXX: does not work with gdbstub "ice" single step - not a
1551 serious problem */
1552static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1553{
1554 int l1, l2;
1555
1556 l1 = gen_new_label();
1557 l2 = gen_new_label();
1558 gen_op_jnz_ecx(s->aflag, l1);
1559 gen_set_label(l2);
1560 gen_jmp_tb(s, next_eip, 1);
1561 gen_set_label(l1);
1562 return l2;
1563}
1564
1565#ifndef VBOX
1566static inline void gen_stos(DisasContext *s, int ot)
1567#else /* VBOX */
1568DECLINLINE(void) gen_stos(DisasContext *s, int ot)
1569#endif /* VBOX */
1570{
1571 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1572 gen_string_movl_A0_EDI(s);
1573 gen_op_st_T0_A0(ot + s->mem_index);
1574 gen_op_movl_T0_Dshift(ot);
1575 gen_op_add_reg_T0(s->aflag, R_EDI);
1576}
1577
1578#ifndef VBOX
1579static inline void gen_lods(DisasContext *s, int ot)
1580#else /* VBOX */
1581DECLINLINE(void) gen_lods(DisasContext *s, int ot)
1582#endif /* VBOX */
1583{
1584 gen_string_movl_A0_ESI(s);
1585 gen_op_ld_T0_A0(ot + s->mem_index);
1586 gen_op_mov_reg_T0(ot, R_EAX);
1587 gen_op_movl_T0_Dshift(ot);
1588 gen_op_add_reg_T0(s->aflag, R_ESI);
1589}
1590
1591#ifndef VBOX
1592static inline void gen_scas(DisasContext *s, int ot)
1593#else /* VBOX */
1594DECLINLINE(void) gen_scas(DisasContext *s, int ot)
1595#endif /* VBOX */
1596{
1597 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
1598 gen_string_movl_A0_EDI(s);
1599 gen_op_ld_T1_A0(ot + s->mem_index);
1600 gen_op_cmpl_T0_T1_cc();
1601 gen_op_movl_T0_Dshift(ot);
1602 gen_op_add_reg_T0(s->aflag, R_EDI);
1603}
1604
1605#ifndef VBOX
1606static inline void gen_cmps(DisasContext *s, int ot)
1607#else /* VBOX */
1608DECLINLINE(void) gen_cmps(DisasContext *s, int ot)
1609#endif /* VBOX */
1610{
1611 gen_string_movl_A0_ESI(s);
1612 gen_op_ld_T0_A0(ot + s->mem_index);
1613 gen_string_movl_A0_EDI(s);
1614 gen_op_ld_T1_A0(ot + s->mem_index);
1615 gen_op_cmpl_T0_T1_cc();
1616 gen_op_movl_T0_Dshift(ot);
1617 gen_op_add_reg_T0(s->aflag, R_ESI);
1618 gen_op_add_reg_T0(s->aflag, R_EDI);
1619}
1620
1621#ifndef VBOX
1622static inline void gen_ins(DisasContext *s, int ot)
1623#else /* VBOX */
1624DECLINLINE(void) gen_ins(DisasContext *s, int ot)
1625#endif /* VBOX */
1626{
1627 if (use_icount)
1628 gen_io_start();
1629 gen_string_movl_A0_EDI(s);
1630 /* Note: we must do this dummy write first to be restartable in
1631 case of page fault. */
1632 gen_op_movl_T0_0();
1633 gen_op_st_T0_A0(ot + s->mem_index);
1634 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1635 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1636 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1637 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);
1638 gen_op_st_T0_A0(ot + s->mem_index);
1639 gen_op_movl_T0_Dshift(ot);
1640 gen_op_add_reg_T0(s->aflag, R_EDI);
1641 if (use_icount)
1642 gen_io_end();
1643}
1644
1645#ifndef VBOX
1646static inline void gen_outs(DisasContext *s, int ot)
1647#else /* VBOX */
1648DECLINLINE(void) gen_outs(DisasContext *s, int ot)
1649#endif /* VBOX */
1650{
1651 if (use_icount)
1652 gen_io_start();
1653 gen_string_movl_A0_ESI(s);
1654 gen_op_ld_T0_A0(ot + s->mem_index);
1655
1656 gen_op_mov_TN_reg(OT_WORD, 1, R_EDX);
1657 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]);
1658 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
1659 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]);
1660 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
1661
1662 gen_op_movl_T0_Dshift(ot);
1663 gen_op_add_reg_T0(s->aflag, R_ESI);
1664 if (use_icount)
1665 gen_io_end();
1666}
1667
1668/* same method as Valgrind : we generate jumps to current or next
1669 instruction */
1670#ifndef VBOX
1671#define GEN_REPZ(op) \
1672static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1673 target_ulong cur_eip, target_ulong next_eip) \
1674{ \
1675 int l2; \
1676 gen_update_cc_op(s); \
1677 l2 = gen_jz_ecx_string(s, next_eip); \
1678 gen_ ## op(s, ot); \
1679 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1680 /* a loop would cause two single step exceptions if ECX = 1 \
1681 before rep string_insn */ \
1682 if (!s->jmp_opt) \
1683 gen_op_jz_ecx(s->aflag, l2); \
1684 gen_jmp(s, cur_eip); \
1685}
1686#else /* VBOX */
1687#define GEN_REPZ(op) \
1688DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1689 target_ulong cur_eip, target_ulong next_eip) \
1690{ \
1691 int l2; \
1692 gen_update_cc_op(s); \
1693 l2 = gen_jz_ecx_string(s, next_eip); \
1694 gen_ ## op(s, ot); \
1695 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1696 /* a loop would cause two single step exceptions if ECX = 1 \
1697 before rep string_insn */ \
1698 if (!s->jmp_opt) \
1699 gen_op_jz_ecx(s->aflag, l2); \
1700 gen_jmp(s, cur_eip); \
1701}
1702#endif /* VBOX */
1703
1704#ifndef VBOX
1705#define GEN_REPZ2(op) \
1706static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1707 target_ulong cur_eip, \
1708 target_ulong next_eip, \
1709 int nz) \
1710{ \
1711 int l2; \
1712 gen_update_cc_op(s); \
1713 l2 = gen_jz_ecx_string(s, next_eip); \
1714 gen_ ## op(s, ot); \
1715 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1716 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1717 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1718 if (!s->jmp_opt) \
1719 gen_op_jz_ecx(s->aflag, l2); \
1720 gen_jmp(s, cur_eip); \
1721}
1722#else /* VBOX */
1723#define GEN_REPZ2(op) \
1724DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \
1725 target_ulong cur_eip, \
1726 target_ulong next_eip, \
1727 int nz) \
1728{ \
1729 int l2;\
1730 gen_update_cc_op(s); \
1731 l2 = gen_jz_ecx_string(s, next_eip); \
1732 gen_ ## op(s, ot); \
1733 gen_op_add_reg_im(s->aflag, R_ECX, -1); \
1734 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1735 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \
1736 if (!s->jmp_opt) \
1737 gen_op_jz_ecx(s->aflag, l2); \
1738 gen_jmp(s, cur_eip); \
1739}
1740#endif /* VBOX */
1741
1742GEN_REPZ(movs)
1743GEN_REPZ(stos)
1744GEN_REPZ(lods)
1745GEN_REPZ(ins)
1746GEN_REPZ(outs)
1747GEN_REPZ2(scas)
1748GEN_REPZ2(cmps)
1749
1750static void *helper_fp_arith_ST0_FT0[8] = {
1751 helper_fadd_ST0_FT0,
1752 helper_fmul_ST0_FT0,
1753 helper_fcom_ST0_FT0,
1754 helper_fcom_ST0_FT0,
1755 helper_fsub_ST0_FT0,
1756 helper_fsubr_ST0_FT0,
1757 helper_fdiv_ST0_FT0,
1758 helper_fdivr_ST0_FT0,
1759};
1760
1761/* NOTE the exception in "r" op ordering */
1762static void *helper_fp_arith_STN_ST0[8] = {
1763 helper_fadd_STN_ST0,
1764 helper_fmul_STN_ST0,
1765 NULL,
1766 NULL,
1767 helper_fsubr_STN_ST0,
1768 helper_fsub_STN_ST0,
1769 helper_fdivr_STN_ST0,
1770 helper_fdiv_STN_ST0,
1771};
1772
1773/* if d == OR_TMP0, it means memory operand (address in A0) */
1774static void gen_op(DisasContext *s1, int op, int ot, int d)
1775{
1776 if (d != OR_TMP0) {
1777 gen_op_mov_TN_reg(ot, 0, d);
1778 } else {
1779 gen_op_ld_T0_A0(ot + s1->mem_index);
1780 }
1781 switch(op) {
1782 case OP_ADCL:
1783 if (s1->cc_op != CC_OP_DYNAMIC)
1784 gen_op_set_cc_op(s1->cc_op);
1785 gen_compute_eflags_c(cpu_tmp4);
1786 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1787 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1788 if (d != OR_TMP0)
1789 gen_op_mov_reg_T0(ot, d);
1790 else
1791 gen_op_st_T0_A0(ot + s1->mem_index);
1792 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1793 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1794 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1795 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1796 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_ADDB + ot);
1797 s1->cc_op = CC_OP_DYNAMIC;
1798 break;
1799 case OP_SBBL:
1800 if (s1->cc_op != CC_OP_DYNAMIC)
1801 gen_op_set_cc_op(s1->cc_op);
1802 gen_compute_eflags_c(cpu_tmp4);
1803 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1804 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp4);
1805 if (d != OR_TMP0)
1806 gen_op_mov_reg_T0(ot, d);
1807 else
1808 gen_op_st_T0_A0(ot + s1->mem_index);
1809 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]);
1810 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1811 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp4);
1812 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_tmp2_i32, 2);
1813 tcg_gen_addi_i32(cpu_cc_op, cpu_tmp2_i32, CC_OP_SUBB + ot);
1814 s1->cc_op = CC_OP_DYNAMIC;
1815 break;
1816 case OP_ADDL:
1817 gen_op_addl_T0_T1();
1818 if (d != OR_TMP0)
1819 gen_op_mov_reg_T0(ot, d);
1820 else
1821 gen_op_st_T0_A0(ot + s1->mem_index);
1822 gen_op_update2_cc();
1823 s1->cc_op = CC_OP_ADDB + ot;
1824 break;
1825 case OP_SUBL:
1826 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1827 if (d != OR_TMP0)
1828 gen_op_mov_reg_T0(ot, d);
1829 else
1830 gen_op_st_T0_A0(ot + s1->mem_index);
1831 gen_op_update2_cc();
1832 s1->cc_op = CC_OP_SUBB + ot;
1833 break;
1834 default:
1835 case OP_ANDL:
1836 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1837 if (d != OR_TMP0)
1838 gen_op_mov_reg_T0(ot, d);
1839 else
1840 gen_op_st_T0_A0(ot + s1->mem_index);
1841 gen_op_update1_cc();
1842 s1->cc_op = CC_OP_LOGICB + ot;
1843 break;
1844 case OP_ORL:
1845 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1846 if (d != OR_TMP0)
1847 gen_op_mov_reg_T0(ot, d);
1848 else
1849 gen_op_st_T0_A0(ot + s1->mem_index);
1850 gen_op_update1_cc();
1851 s1->cc_op = CC_OP_LOGICB + ot;
1852 break;
1853 case OP_XORL:
1854 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1855 if (d != OR_TMP0)
1856 gen_op_mov_reg_T0(ot, d);
1857 else
1858 gen_op_st_T0_A0(ot + s1->mem_index);
1859 gen_op_update1_cc();
1860 s1->cc_op = CC_OP_LOGICB + ot;
1861 break;
1862 case OP_CMPL:
1863 gen_op_cmpl_T0_T1_cc();
1864 s1->cc_op = CC_OP_SUBB + ot;
1865 break;
1866 }
1867}
1868
1869/* if d == OR_TMP0, it means memory operand (address in A0) */
1870static void gen_inc(DisasContext *s1, int ot, int d, int c)
1871{
1872 if (d != OR_TMP0)
1873 gen_op_mov_TN_reg(ot, 0, d);
1874 else
1875 gen_op_ld_T0_A0(ot + s1->mem_index);
1876 if (s1->cc_op != CC_OP_DYNAMIC)
1877 gen_op_set_cc_op(s1->cc_op);
1878 if (c > 0) {
1879 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], 1);
1880 s1->cc_op = CC_OP_INCB + ot;
1881 } else {
1882 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], -1);
1883 s1->cc_op = CC_OP_DECB + ot;
1884 }
1885 if (d != OR_TMP0)
1886 gen_op_mov_reg_T0(ot, d);
1887 else
1888 gen_op_st_T0_A0(ot + s1->mem_index);
1889 gen_compute_eflags_c(cpu_cc_src);
1890 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
1891}
1892
1893static void gen_shift_rm_T1(DisasContext *s, int ot, int op1,
1894 int is_right, int is_arith)
1895{
1896 target_ulong mask;
1897 int shift_label;
1898 TCGv t0, t1;
1899
1900 if (ot == OT_QUAD)
1901 mask = 0x3f;
1902 else
1903 mask = 0x1f;
1904
1905 /* load */
1906 if (op1 == OR_TMP0)
1907 gen_op_ld_T0_A0(ot + s->mem_index);
1908 else
1909 gen_op_mov_TN_reg(ot, 0, op1);
1910
1911 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], mask);
1912
1913 tcg_gen_addi_tl(cpu_tmp5, cpu_T[1], -1);
1914
1915 if (is_right) {
1916 if (is_arith) {
1917 gen_exts(ot, cpu_T[0]);
1918 tcg_gen_sar_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1919 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1920 } else {
1921 gen_extu(ot, cpu_T[0]);
1922 tcg_gen_shr_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1923 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1924 }
1925 } else {
1926 tcg_gen_shl_tl(cpu_T3, cpu_T[0], cpu_tmp5);
1927 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
1928 }
1929
1930 /* store */
1931 if (op1 == OR_TMP0)
1932 gen_op_st_T0_A0(ot + s->mem_index);
1933 else
1934 gen_op_mov_reg_T0(ot, op1);
1935
1936 /* update eflags if non zero shift */
1937 if (s->cc_op != CC_OP_DYNAMIC)
1938 gen_op_set_cc_op(s->cc_op);
1939
1940 /* XXX: inefficient */
1941 t0 = tcg_temp_local_new(TCG_TYPE_TL);
1942 t1 = tcg_temp_local_new(TCG_TYPE_TL);
1943
1944 tcg_gen_mov_tl(t0, cpu_T[0]);
1945 tcg_gen_mov_tl(t1, cpu_T3);
1946
1947 shift_label = gen_new_label();
1948 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_T[1], 0, shift_label);
1949
1950 tcg_gen_mov_tl(cpu_cc_src, t1);
1951 tcg_gen_mov_tl(cpu_cc_dst, t0);
1952 if (is_right)
1953 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
1954 else
1955 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
1956
1957 gen_set_label(shift_label);
1958 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1959
1960 tcg_temp_free(t0);
1961 tcg_temp_free(t1);
1962}
1963
1964static void gen_shift_rm_im(DisasContext *s, int ot, int op1, int op2,
1965 int is_right, int is_arith)
1966{
1967 int mask;
1968
1969 if (ot == OT_QUAD)
1970 mask = 0x3f;
1971 else
1972 mask = 0x1f;
1973
1974 /* load */
1975 if (op1 == OR_TMP0)
1976 gen_op_ld_T0_A0(ot + s->mem_index);
1977 else
1978 gen_op_mov_TN_reg(ot, 0, op1);
1979
1980 op2 &= mask;
1981 if (op2 != 0) {
1982 if (is_right) {
1983 if (is_arith) {
1984 gen_exts(ot, cpu_T[0]);
1985 tcg_gen_sari_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1986 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], op2);
1987 } else {
1988 gen_extu(ot, cpu_T[0]);
1989 tcg_gen_shri_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1990 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], op2);
1991 }
1992 } else {
1993 tcg_gen_shli_tl(cpu_tmp4, cpu_T[0], op2 - 1);
1994 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], op2);
1995 }
1996 }
1997
1998 /* store */
1999 if (op1 == OR_TMP0)
2000 gen_op_st_T0_A0(ot + s->mem_index);
2001 else
2002 gen_op_mov_reg_T0(ot, op1);
2003
2004 /* update eflags if non zero shift */
2005 if (op2 != 0) {
2006 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
2007 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
2008 if (is_right)
2009 s->cc_op = CC_OP_SARB + ot;
2010 else
2011 s->cc_op = CC_OP_SHLB + ot;
2012 }
2013}
2014
2015#ifndef VBOX
2016static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
2017#else /* VBOX */
2018DECLINLINE(void) tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)
2019#endif /* VBOX */
2020{
2021 if (arg2 >= 0)
2022 tcg_gen_shli_tl(ret, arg1, arg2);
2023 else
2024 tcg_gen_shri_tl(ret, arg1, -arg2);
2025}
2026
2027/* XXX: add faster immediate case */
2028static void gen_rot_rm_T1(DisasContext *s, int ot, int op1,
2029 int is_right)
2030{
2031 target_ulong mask;
2032 int label1, label2, data_bits;
2033 TCGv t0, t1, t2, a0;
2034
2035 /* XXX: inefficient, but we must use local temps */
2036 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2037 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2038 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2039 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2040
2041 if (ot == OT_QUAD)
2042 mask = 0x3f;
2043 else
2044 mask = 0x1f;
2045
2046 /* load */
2047 if (op1 == OR_TMP0) {
2048 tcg_gen_mov_tl(a0, cpu_A0);
2049 gen_op_ld_v(ot + s->mem_index, t0, a0);
2050 } else {
2051 gen_op_mov_v_reg(ot, t0, op1);
2052 }
2053
2054 tcg_gen_mov_tl(t1, cpu_T[1]);
2055
2056 tcg_gen_andi_tl(t1, t1, mask);
2057
2058 /* Must test zero case to avoid using undefined behaviour in TCG
2059 shifts. */
2060 label1 = gen_new_label();
2061 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label1);
2062
2063 if (ot <= OT_WORD)
2064 tcg_gen_andi_tl(cpu_tmp0, t1, (1 << (3 + ot)) - 1);
2065 else
2066 tcg_gen_mov_tl(cpu_tmp0, t1);
2067
2068 gen_extu(ot, t0);
2069 tcg_gen_mov_tl(t2, t0);
2070
2071 data_bits = 8 << ot;
2072 /* XXX: rely on behaviour of shifts when operand 2 overflows (XXX:
2073 fix TCG definition) */
2074 if (is_right) {
2075 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp0);
2076 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2077 tcg_gen_shl_tl(t0, t0, cpu_tmp0);
2078 } else {
2079 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp0);
2080 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(data_bits), cpu_tmp0);
2081 tcg_gen_shr_tl(t0, t0, cpu_tmp0);
2082 }
2083 tcg_gen_or_tl(t0, t0, cpu_tmp4);
2084
2085 gen_set_label(label1);
2086 /* store */
2087 if (op1 == OR_TMP0) {
2088 gen_op_st_v(ot + s->mem_index, t0, a0);
2089 } else {
2090 gen_op_mov_reg_v(ot, op1, t0);
2091 }
2092
2093 /* update eflags */
2094 if (s->cc_op != CC_OP_DYNAMIC)
2095 gen_op_set_cc_op(s->cc_op);
2096
2097 label2 = gen_new_label();
2098 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, label2);
2099
2100 gen_compute_eflags(cpu_cc_src);
2101 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~(CC_O | CC_C));
2102 tcg_gen_xor_tl(cpu_tmp0, t2, t0);
2103 tcg_gen_lshift(cpu_tmp0, cpu_tmp0, 11 - (data_bits - 1));
2104 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_O);
2105 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
2106 if (is_right) {
2107 tcg_gen_shri_tl(t0, t0, data_bits - 1);
2108 }
2109 tcg_gen_andi_tl(t0, t0, CC_C);
2110 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
2111
2112 tcg_gen_discard_tl(cpu_cc_dst);
2113 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2114
2115 gen_set_label(label2);
2116 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2117
2118 tcg_temp_free(t0);
2119 tcg_temp_free(t1);
2120 tcg_temp_free(t2);
2121 tcg_temp_free(a0);
2122}
2123
2124static void *helper_rotc[8] = {
2125 helper_rclb,
2126 helper_rclw,
2127 helper_rcll,
2128 X86_64_ONLY(helper_rclq),
2129 helper_rcrb,
2130 helper_rcrw,
2131 helper_rcrl,
2132 X86_64_ONLY(helper_rcrq),
2133};
2134
2135/* XXX: add faster immediate = 1 case */
2136static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1,
2137 int is_right)
2138{
2139 int label1;
2140
2141 if (s->cc_op != CC_OP_DYNAMIC)
2142 gen_op_set_cc_op(s->cc_op);
2143
2144 /* load */
2145 if (op1 == OR_TMP0)
2146 gen_op_ld_T0_A0(ot + s->mem_index);
2147 else
2148 gen_op_mov_TN_reg(ot, 0, op1);
2149
2150 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)],
2151 cpu_T[0], cpu_T[0], cpu_T[1]);
2152 /* store */
2153 if (op1 == OR_TMP0)
2154 gen_op_st_T0_A0(ot + s->mem_index);
2155 else
2156 gen_op_mov_reg_T0(ot, op1);
2157
2158 /* update eflags */
2159 label1 = gen_new_label();
2160 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_tmp, -1, label1);
2161
2162 tcg_gen_mov_tl(cpu_cc_src, cpu_cc_tmp);
2163 tcg_gen_discard_tl(cpu_cc_dst);
2164 tcg_gen_movi_i32(cpu_cc_op, CC_OP_EFLAGS);
2165
2166 gen_set_label(label1);
2167 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2168}
2169
2170/* XXX: add faster immediate case */
2171static void gen_shiftd_rm_T1_T3(DisasContext *s, int ot, int op1,
2172 int is_right)
2173{
2174 int label1, label2, data_bits;
2175 target_ulong mask;
2176 TCGv t0, t1, t2, a0;
2177
2178 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2179 t1 = tcg_temp_local_new(TCG_TYPE_TL);
2180 t2 = tcg_temp_local_new(TCG_TYPE_TL);
2181 a0 = tcg_temp_local_new(TCG_TYPE_TL);
2182
2183 if (ot == OT_QUAD)
2184 mask = 0x3f;
2185 else
2186 mask = 0x1f;
2187
2188 /* load */
2189 if (op1 == OR_TMP0) {
2190 tcg_gen_mov_tl(a0, cpu_A0);
2191 gen_op_ld_v(ot + s->mem_index, t0, a0);
2192 } else {
2193 gen_op_mov_v_reg(ot, t0, op1);
2194 }
2195
2196 tcg_gen_andi_tl(cpu_T3, cpu_T3, mask);
2197
2198 tcg_gen_mov_tl(t1, cpu_T[1]);
2199 tcg_gen_mov_tl(t2, cpu_T3);
2200
2201 /* Must test zero case to avoid using undefined behaviour in TCG
2202 shifts. */
2203 label1 = gen_new_label();
2204 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
2205
2206 tcg_gen_addi_tl(cpu_tmp5, t2, -1);
2207 if (ot == OT_WORD) {
2208 /* Note: we implement the Intel behaviour for shift count > 16 */
2209 if (is_right) {
2210 tcg_gen_andi_tl(t0, t0, 0xffff);
2211 tcg_gen_shli_tl(cpu_tmp0, t1, 16);
2212 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2213 tcg_gen_ext32u_tl(t0, t0);
2214
2215 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2216
2217 /* only needed if count > 16, but a test would complicate */
2218 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2219 tcg_gen_shl_tl(cpu_tmp0, t0, cpu_tmp5);
2220
2221 tcg_gen_shr_tl(t0, t0, t2);
2222
2223 tcg_gen_or_tl(t0, t0, cpu_tmp0);
2224 } else {
2225 /* XXX: not optimal */
2226 tcg_gen_andi_tl(t0, t0, 0xffff);
2227 tcg_gen_shli_tl(t1, t1, 16);
2228 tcg_gen_or_tl(t1, t1, t0);
2229 tcg_gen_ext32u_tl(t1, t1);
2230
2231 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2232 tcg_gen_sub_tl(cpu_tmp0, tcg_const_tl(32), cpu_tmp5);
2233 tcg_gen_shr_tl(cpu_tmp6, t1, cpu_tmp0);
2234 tcg_gen_or_tl(cpu_tmp4, cpu_tmp4, cpu_tmp6);
2235
2236 tcg_gen_shl_tl(t0, t0, t2);
2237 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(32), t2);
2238 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2239 tcg_gen_or_tl(t0, t0, t1);
2240 }
2241 } else {
2242 data_bits = 8 << ot;
2243 if (is_right) {
2244 if (ot == OT_LONG)
2245 tcg_gen_ext32u_tl(t0, t0);
2246
2247 tcg_gen_shr_tl(cpu_tmp4, t0, cpu_tmp5);
2248
2249 tcg_gen_shr_tl(t0, t0, t2);
2250 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2251 tcg_gen_shl_tl(t1, t1, cpu_tmp5);
2252 tcg_gen_or_tl(t0, t0, t1);
2253
2254 } else {
2255 if (ot == OT_LONG)
2256 tcg_gen_ext32u_tl(t1, t1);
2257
2258 tcg_gen_shl_tl(cpu_tmp4, t0, cpu_tmp5);
2259
2260 tcg_gen_shl_tl(t0, t0, t2);
2261 tcg_gen_sub_tl(cpu_tmp5, tcg_const_tl(data_bits), t2);
2262 tcg_gen_shr_tl(t1, t1, cpu_tmp5);
2263 tcg_gen_or_tl(t0, t0, t1);
2264 }
2265 }
2266 tcg_gen_mov_tl(t1, cpu_tmp4);
2267
2268 gen_set_label(label1);
2269 /* store */
2270 if (op1 == OR_TMP0) {
2271 gen_op_st_v(ot + s->mem_index, t0, a0);
2272 } else {
2273 gen_op_mov_reg_v(ot, op1, t0);
2274 }
2275
2276 /* update eflags */
2277 if (s->cc_op != CC_OP_DYNAMIC)
2278 gen_op_set_cc_op(s->cc_op);
2279
2280 label2 = gen_new_label();
2281 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label2);
2282
2283 tcg_gen_mov_tl(cpu_cc_src, t1);
2284 tcg_gen_mov_tl(cpu_cc_dst, t0);
2285 if (is_right) {
2286 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SARB + ot);
2287 } else {
2288 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SHLB + ot);
2289 }
2290 gen_set_label(label2);
2291 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
2292
2293 tcg_temp_free(t0);
2294 tcg_temp_free(t1);
2295 tcg_temp_free(t2);
2296 tcg_temp_free(a0);
2297}
2298
2299static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
2300{
2301 if (s != OR_TMP1)
2302 gen_op_mov_TN_reg(ot, 1, s);
2303 switch(op) {
2304 case OP_ROL:
2305 gen_rot_rm_T1(s1, ot, d, 0);
2306 break;
2307 case OP_ROR:
2308 gen_rot_rm_T1(s1, ot, d, 1);
2309 break;
2310 case OP_SHL:
2311 case OP_SHL1:
2312 gen_shift_rm_T1(s1, ot, d, 0, 0);
2313 break;
2314 case OP_SHR:
2315 gen_shift_rm_T1(s1, ot, d, 1, 0);
2316 break;
2317 case OP_SAR:
2318 gen_shift_rm_T1(s1, ot, d, 1, 1);
2319 break;
2320 case OP_RCL:
2321 gen_rotc_rm_T1(s1, ot, d, 0);
2322 break;
2323 case OP_RCR:
2324 gen_rotc_rm_T1(s1, ot, d, 1);
2325 break;
2326 }
2327}
2328
2329static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
2330{
2331 switch(op) {
2332 case OP_SHL:
2333 case OP_SHL1:
2334 gen_shift_rm_im(s1, ot, d, c, 0, 0);
2335 break;
2336 case OP_SHR:
2337 gen_shift_rm_im(s1, ot, d, c, 1, 0);
2338 break;
2339 case OP_SAR:
2340 gen_shift_rm_im(s1, ot, d, c, 1, 1);
2341 break;
2342 default:
2343 /* currently not optimized */
2344 gen_op_movl_T1_im(c);
2345 gen_shift(s1, op, ot, d, OR_TMP1);
2346 break;
2347 }
2348}
2349
2350static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
2351{
2352 target_long disp;
2353 int havesib;
2354 int base;
2355 int index;
2356 int scale;
2357 int opreg;
2358 int mod, rm, code, override, must_add_seg;
2359
2360 override = s->override;
2361 must_add_seg = s->addseg;
2362 if (override >= 0)
2363 must_add_seg = 1;
2364 mod = (modrm >> 6) & 3;
2365 rm = modrm & 7;
2366
2367 if (s->aflag) {
2368
2369 havesib = 0;
2370 base = rm;
2371 index = 0;
2372 scale = 0;
2373
2374 if (base == 4) {
2375 havesib = 1;
2376 code = ldub_code(s->pc++);
2377 scale = (code >> 6) & 3;
2378 index = ((code >> 3) & 7) | REX_X(s);
2379 base = (code & 7);
2380 }
2381 base |= REX_B(s);
2382
2383 switch (mod) {
2384 case 0:
2385 if ((base & 7) == 5) {
2386 base = -1;
2387 disp = (int32_t)ldl_code(s->pc);
2388 s->pc += 4;
2389 if (CODE64(s) && !havesib) {
2390 disp += s->pc + s->rip_offset;
2391 }
2392 } else {
2393 disp = 0;
2394 }
2395 break;
2396 case 1:
2397 disp = (int8_t)ldub_code(s->pc++);
2398 break;
2399 default:
2400 case 2:
2401#ifdef VBOX
2402 disp = (int32_t)ldl_code(s->pc);
2403#else
2404 disp = ldl_code(s->pc);
2405#endif
2406 s->pc += 4;
2407 break;
2408 }
2409
2410 if (base >= 0) {
2411 /* for correct popl handling with esp */
2412 if (base == 4 && s->popl_esp_hack)
2413 disp += s->popl_esp_hack;
2414#ifdef TARGET_X86_64
2415 if (s->aflag == 2) {
2416 gen_op_movq_A0_reg(base);
2417 if (disp != 0) {
2418 gen_op_addq_A0_im(disp);
2419 }
2420 } else
2421#endif
2422 {
2423 gen_op_movl_A0_reg(base);
2424 if (disp != 0)
2425 gen_op_addl_A0_im(disp);
2426 }
2427 } else {
2428#ifdef TARGET_X86_64
2429 if (s->aflag == 2) {
2430 gen_op_movq_A0_im(disp);
2431 } else
2432#endif
2433 {
2434 gen_op_movl_A0_im(disp);
2435 }
2436 }
2437 /* index == 4 means no index */
2438 if (havesib && (index != 4)) {
2439#ifdef TARGET_X86_64
2440 if (s->aflag == 2) {
2441 gen_op_addq_A0_reg_sN(scale, index);
2442 } else
2443#endif
2444 {
2445 gen_op_addl_A0_reg_sN(scale, index);
2446 }
2447 }
2448 if (must_add_seg) {
2449 if (override < 0) {
2450 if (base == R_EBP || base == R_ESP)
2451 override = R_SS;
2452 else
2453 override = R_DS;
2454 }
2455#ifdef TARGET_X86_64
2456 if (s->aflag == 2) {
2457 gen_op_addq_A0_seg(override);
2458 } else
2459#endif
2460 {
2461 gen_op_addl_A0_seg(override);
2462 }
2463 }
2464 } else {
2465 switch (mod) {
2466 case 0:
2467 if (rm == 6) {
2468 disp = lduw_code(s->pc);
2469 s->pc += 2;
2470 gen_op_movl_A0_im(disp);
2471 rm = 0; /* avoid SS override */
2472 goto no_rm;
2473 } else {
2474 disp = 0;
2475 }
2476 break;
2477 case 1:
2478 disp = (int8_t)ldub_code(s->pc++);
2479 break;
2480 default:
2481 case 2:
2482 disp = lduw_code(s->pc);
2483 s->pc += 2;
2484 break;
2485 }
2486 switch(rm) {
2487 case 0:
2488 gen_op_movl_A0_reg(R_EBX);
2489 gen_op_addl_A0_reg_sN(0, R_ESI);
2490 break;
2491 case 1:
2492 gen_op_movl_A0_reg(R_EBX);
2493 gen_op_addl_A0_reg_sN(0, R_EDI);
2494 break;
2495 case 2:
2496 gen_op_movl_A0_reg(R_EBP);
2497 gen_op_addl_A0_reg_sN(0, R_ESI);
2498 break;
2499 case 3:
2500 gen_op_movl_A0_reg(R_EBP);
2501 gen_op_addl_A0_reg_sN(0, R_EDI);
2502 break;
2503 case 4:
2504 gen_op_movl_A0_reg(R_ESI);
2505 break;
2506 case 5:
2507 gen_op_movl_A0_reg(R_EDI);
2508 break;
2509 case 6:
2510 gen_op_movl_A0_reg(R_EBP);
2511 break;
2512 default:
2513 case 7:
2514 gen_op_movl_A0_reg(R_EBX);
2515 break;
2516 }
2517 if (disp != 0)
2518 gen_op_addl_A0_im(disp);
2519 gen_op_andl_A0_ffff();
2520 no_rm:
2521 if (must_add_seg) {
2522 if (override < 0) {
2523 if (rm == 2 || rm == 3 || rm == 6)
2524 override = R_SS;
2525 else
2526 override = R_DS;
2527 }
2528 gen_op_addl_A0_seg(override);
2529 }
2530 }
2531
2532 opreg = OR_A0;
2533 disp = 0;
2534 *reg_ptr = opreg;
2535 *offset_ptr = disp;
2536}
2537
2538static void gen_nop_modrm(DisasContext *s, int modrm)
2539{
2540 int mod, rm, base, code;
2541
2542 mod = (modrm >> 6) & 3;
2543 if (mod == 3)
2544 return;
2545 rm = modrm & 7;
2546
2547 if (s->aflag) {
2548
2549 base = rm;
2550
2551 if (base == 4) {
2552 code = ldub_code(s->pc++);
2553 base = (code & 7);
2554 }
2555
2556 switch (mod) {
2557 case 0:
2558 if (base == 5) {
2559 s->pc += 4;
2560 }
2561 break;
2562 case 1:
2563 s->pc++;
2564 break;
2565 default:
2566 case 2:
2567 s->pc += 4;
2568 break;
2569 }
2570 } else {
2571 switch (mod) {
2572 case 0:
2573 if (rm == 6) {
2574 s->pc += 2;
2575 }
2576 break;
2577 case 1:
2578 s->pc++;
2579 break;
2580 default:
2581 case 2:
2582 s->pc += 2;
2583 break;
2584 }
2585 }
2586}
2587
2588/* used for LEA and MOV AX, mem */
2589static void gen_add_A0_ds_seg(DisasContext *s)
2590{
2591 int override, must_add_seg;
2592 must_add_seg = s->addseg;
2593 override = R_DS;
2594 if (s->override >= 0) {
2595 override = s->override;
2596 must_add_seg = 1;
2597 } else {
2598 override = R_DS;
2599 }
2600 if (must_add_seg) {
2601#ifdef TARGET_X86_64
2602 if (CODE64(s)) {
2603 gen_op_addq_A0_seg(override);
2604 } else
2605#endif
2606 {
2607 gen_op_addl_A0_seg(override);
2608 }
2609 }
2610}
2611
2612/* generate modrm memory load or store of 'reg'. TMP0 is used if reg ==
2613 OR_TMP0 */
2614static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
2615{
2616 int mod, rm, opreg, disp;
2617
2618 mod = (modrm >> 6) & 3;
2619 rm = (modrm & 7) | REX_B(s);
2620 if (mod == 3) {
2621 if (is_store) {
2622 if (reg != OR_TMP0)
2623 gen_op_mov_TN_reg(ot, 0, reg);
2624 gen_op_mov_reg_T0(ot, rm);
2625 } else {
2626 gen_op_mov_TN_reg(ot, 0, rm);
2627 if (reg != OR_TMP0)
2628 gen_op_mov_reg_T0(ot, reg);
2629 }
2630 } else {
2631 gen_lea_modrm(s, modrm, &opreg, &disp);
2632 if (is_store) {
2633 if (reg != OR_TMP0)
2634 gen_op_mov_TN_reg(ot, 0, reg);
2635 gen_op_st_T0_A0(ot + s->mem_index);
2636 } else {
2637 gen_op_ld_T0_A0(ot + s->mem_index);
2638 if (reg != OR_TMP0)
2639 gen_op_mov_reg_T0(ot, reg);
2640 }
2641 }
2642}
2643
2644#ifndef VBOX
2645static inline uint32_t insn_get(DisasContext *s, int ot)
2646#else /* VBOX */
2647DECLINLINE(uint32_t) insn_get(DisasContext *s, int ot)
2648#endif /* VBOX */
2649{
2650 uint32_t ret;
2651
2652 switch(ot) {
2653 case OT_BYTE:
2654 ret = ldub_code(s->pc);
2655 s->pc++;
2656 break;
2657 case OT_WORD:
2658 ret = lduw_code(s->pc);
2659 s->pc += 2;
2660 break;
2661 default:
2662 case OT_LONG:
2663 ret = ldl_code(s->pc);
2664 s->pc += 4;
2665 break;
2666 }
2667 return ret;
2668}
2669
2670#ifndef VBOX
2671static inline int insn_const_size(unsigned int ot)
2672#else /* VBOX */
2673DECLINLINE(int) insn_const_size(unsigned int ot)
2674#endif /* VBOX */
2675{
2676 if (ot <= OT_LONG)
2677 return 1 << ot;
2678 else
2679 return 4;
2680}
2681
2682#ifndef VBOX
2683static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2684#else /* VBOX */
2685DECLINLINE(void) gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
2686#endif /* VBOX */
2687{
2688 TranslationBlock *tb;
2689 target_ulong pc;
2690
2691 pc = s->cs_base + eip;
2692 tb = s->tb;
2693 /* NOTE: we handle the case where the TB spans two pages here */
2694 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
2695 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
2696#ifdef VBOX
2697 gen_check_external_event(s);
2698#endif /* VBOX */
2699 /* jump to same page: we can use a direct jump */
2700 tcg_gen_goto_tb(tb_num);
2701 gen_jmp_im(eip);
2702 tcg_gen_exit_tb((long)tb + tb_num);
2703 } else {
2704 /* jump to another page: currently not optimized */
2705 gen_jmp_im(eip);
2706 gen_eob(s);
2707 }
2708}
2709
2710#ifndef VBOX
2711static inline void gen_jcc(DisasContext *s, int b,
2712#else /* VBOX */
2713DECLINLINE(void) gen_jcc(DisasContext *s, int b,
2714#endif /* VBOX */
2715 target_ulong val, target_ulong next_eip)
2716{
2717 int l1, l2, cc_op;
2718
2719 cc_op = s->cc_op;
2720 if (s->cc_op != CC_OP_DYNAMIC) {
2721 gen_op_set_cc_op(s->cc_op);
2722 s->cc_op = CC_OP_DYNAMIC;
2723 }
2724 if (s->jmp_opt) {
2725 l1 = gen_new_label();
2726 gen_jcc1(s, cc_op, b, l1);
2727
2728 gen_goto_tb(s, 0, next_eip);
2729
2730 gen_set_label(l1);
2731 gen_goto_tb(s, 1, val);
2732 s->is_jmp = 3;
2733 } else {
2734
2735 l1 = gen_new_label();
2736 l2 = gen_new_label();
2737 gen_jcc1(s, cc_op, b, l1);
2738
2739 gen_jmp_im(next_eip);
2740 tcg_gen_br(l2);
2741
2742 gen_set_label(l1);
2743 gen_jmp_im(val);
2744 gen_set_label(l2);
2745 gen_eob(s);
2746 }
2747}
2748
2749static void gen_setcc(DisasContext *s, int b)
2750{
2751 int inv, jcc_op, l1;
2752 TCGv t0;
2753
2754 if (is_fast_jcc_case(s, b)) {
2755 /* nominal case: we use a jump */
2756 /* XXX: make it faster by adding new instructions in TCG */
2757 t0 = tcg_temp_local_new(TCG_TYPE_TL);
2758 tcg_gen_movi_tl(t0, 0);
2759 l1 = gen_new_label();
2760 gen_jcc1(s, s->cc_op, b ^ 1, l1);
2761 tcg_gen_movi_tl(t0, 1);
2762 gen_set_label(l1);
2763 tcg_gen_mov_tl(cpu_T[0], t0);
2764 tcg_temp_free(t0);
2765 } else {
2766 /* slow case: it is more efficient not to generate a jump,
2767 although it is questionable whether this optimization is
2768 worth to */
2769 inv = b & 1;
2770 jcc_op = (b >> 1) & 7;
2771 gen_setcc_slow_T0(s, jcc_op);
2772 if (inv) {
2773 tcg_gen_xori_tl(cpu_T[0], cpu_T[0], 1);
2774 }
2775 }
2776}
2777
2778#ifndef VBOX
2779static inline void gen_op_movl_T0_seg(int seg_reg)
2780#else /* VBOX */
2781DECLINLINE(void) gen_op_movl_T0_seg(int seg_reg)
2782#endif /* VBOX */
2783{
2784 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
2785 offsetof(CPUX86State,segs[seg_reg].selector));
2786}
2787
2788#ifndef VBOX
2789static inline void gen_op_movl_seg_T0_vm(int seg_reg)
2790#else /* VBOX */
2791DECLINLINE(void) gen_op_movl_seg_T0_vm(int seg_reg)
2792#endif /* VBOX */
2793{
2794 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff);
2795 tcg_gen_st32_tl(cpu_T[0], cpu_env,
2796 offsetof(CPUX86State,segs[seg_reg].selector));
2797 tcg_gen_shli_tl(cpu_T[0], cpu_T[0], 4);
2798 tcg_gen_st_tl(cpu_T[0], cpu_env,
2799 offsetof(CPUX86State,segs[seg_reg].base));
2800#ifdef VBOX
2801 int flags = DESC_P_MASK | DESC_S_MASK | DESC_W_MASK;
2802 if (seg_reg == R_CS)
2803 flags |= DESC_CS_MASK;
2804 gen_op_movl_T0_im(flags);
2805 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].flags));
2806
2807 /* Set the limit to 0xffff. */
2808 gen_op_movl_T0_im(0xffff);
2809 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[seg_reg].limit));
2810#endif
2811}
2812
2813/* move T0 to seg_reg and compute if the CPU state may change. Never
2814 call this function with seg_reg == R_CS */
2815static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2816{
2817 if (s->pe && !s->vm86) {
2818 /* XXX: optimize by finding processor state dynamically */
2819 if (s->cc_op != CC_OP_DYNAMIC)
2820 gen_op_set_cc_op(s->cc_op);
2821 gen_jmp_im(cur_eip);
2822 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
2823 tcg_gen_helper_0_2(helper_load_seg, tcg_const_i32(seg_reg), cpu_tmp2_i32);
2824 /* abort translation because the addseg value may change or
2825 because ss32 may change. For R_SS, translation must always
2826 stop as a special handling must be done to disable hardware
2827 interrupts for the next instruction */
2828 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2829 s->is_jmp = 3;
2830 } else {
2831 gen_op_movl_seg_T0_vm(seg_reg);
2832 if (seg_reg == R_SS)
2833 s->is_jmp = 3;
2834 }
2835}
2836
2837#ifndef VBOX
2838static inline int svm_is_rep(int prefixes)
2839#else /* VBOX */
2840DECLINLINE(int) svm_is_rep(int prefixes)
2841#endif /* VBOX */
2842{
2843 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0);
2844}
2845
2846#ifndef VBOX
2847static inline void
2848#else /* VBOX */
2849DECLINLINE(void)
2850#endif /* VBOX */
2851gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start,
2852 uint32_t type, uint64_t param)
2853{
2854 /* no SVM activated; fast case */
2855 if (likely(!(s->flags & HF_SVMI_MASK)))
2856 return;
2857 if (s->cc_op != CC_OP_DYNAMIC)
2858 gen_op_set_cc_op(s->cc_op);
2859 gen_jmp_im(pc_start - s->cs_base);
2860 tcg_gen_helper_0_2(helper_svm_check_intercept_param,
2861 tcg_const_i32(type), tcg_const_i64(param));
2862}
2863
2864#ifndef VBOX
2865static inline void
2866#else /* VBOX */
2867DECLINLINE(void)
2868#endif
2869gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type)
2870{
2871 gen_svm_check_intercept_param(s, pc_start, type, 0);
2872}
2873
2874#ifndef VBOX
2875static inline void gen_stack_update(DisasContext *s, int addend)
2876#else /* VBOX */
2877DECLINLINE(void) gen_stack_update(DisasContext *s, int addend)
2878#endif /* VBOX */
2879{
2880#ifdef TARGET_X86_64
2881 if (CODE64(s)) {
2882 gen_op_add_reg_im(2, R_ESP, addend);
2883 } else
2884#endif
2885 if (s->ss32) {
2886 gen_op_add_reg_im(1, R_ESP, addend);
2887 } else {
2888 gen_op_add_reg_im(0, R_ESP, addend);
2889 }
2890}
2891
2892/* generate a push. It depends on ss32, addseg and dflag */
2893static void gen_push_T0(DisasContext *s)
2894{
2895#ifdef TARGET_X86_64
2896 if (CODE64(s)) {
2897 gen_op_movq_A0_reg(R_ESP);
2898 if (s->dflag) {
2899 gen_op_addq_A0_im(-8);
2900 gen_op_st_T0_A0(OT_QUAD + s->mem_index);
2901 } else {
2902 gen_op_addq_A0_im(-2);
2903 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2904 }
2905 gen_op_mov_reg_A0(2, R_ESP);
2906 } else
2907#endif
2908 {
2909 gen_op_movl_A0_reg(R_ESP);
2910 if (!s->dflag)
2911 gen_op_addl_A0_im(-2);
2912 else
2913 gen_op_addl_A0_im(-4);
2914 if (s->ss32) {
2915 if (s->addseg) {
2916 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2917 gen_op_addl_A0_seg(R_SS);
2918 }
2919 } else {
2920 gen_op_andl_A0_ffff();
2921 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
2922 gen_op_addl_A0_seg(R_SS);
2923 }
2924 gen_op_st_T0_A0(s->dflag + 1 + s->mem_index);
2925 if (s->ss32 && !s->addseg)
2926 gen_op_mov_reg_A0(1, R_ESP);
2927 else
2928 gen_op_mov_reg_T1(s->ss32 + 1, R_ESP);
2929 }
2930}
2931
2932/* generate a push. It depends on ss32, addseg and dflag */
2933/* slower version for T1, only used for call Ev */
2934static void gen_push_T1(DisasContext *s)
2935{
2936#ifdef TARGET_X86_64
2937 if (CODE64(s)) {
2938 gen_op_movq_A0_reg(R_ESP);
2939 if (s->dflag) {
2940 gen_op_addq_A0_im(-8);
2941 gen_op_st_T1_A0(OT_QUAD + s->mem_index);
2942 } else {
2943 gen_op_addq_A0_im(-2);
2944 gen_op_st_T0_A0(OT_WORD + s->mem_index);
2945 }
2946 gen_op_mov_reg_A0(2, R_ESP);
2947 } else
2948#endif
2949 {
2950 gen_op_movl_A0_reg(R_ESP);
2951 if (!s->dflag)
2952 gen_op_addl_A0_im(-2);
2953 else
2954 gen_op_addl_A0_im(-4);
2955 if (s->ss32) {
2956 if (s->addseg) {
2957 gen_op_addl_A0_seg(R_SS);
2958 }
2959 } else {
2960 gen_op_andl_A0_ffff();
2961 gen_op_addl_A0_seg(R_SS);
2962 }
2963 gen_op_st_T1_A0(s->dflag + 1 + s->mem_index);
2964
2965 if (s->ss32 && !s->addseg)
2966 gen_op_mov_reg_A0(1, R_ESP);
2967 else
2968 gen_stack_update(s, (-2) << s->dflag);
2969 }
2970}
2971
2972/* two step pop is necessary for precise exceptions */
2973static void gen_pop_T0(DisasContext *s)
2974{
2975#ifdef TARGET_X86_64
2976 if (CODE64(s)) {
2977 gen_op_movq_A0_reg(R_ESP);
2978 gen_op_ld_T0_A0((s->dflag ? OT_QUAD : OT_WORD) + s->mem_index);
2979 } else
2980#endif
2981 {
2982 gen_op_movl_A0_reg(R_ESP);
2983 if (s->ss32) {
2984 if (s->addseg)
2985 gen_op_addl_A0_seg(R_SS);
2986 } else {
2987 gen_op_andl_A0_ffff();
2988 gen_op_addl_A0_seg(R_SS);
2989 }
2990 gen_op_ld_T0_A0(s->dflag + 1 + s->mem_index);
2991 }
2992}
2993
2994static void gen_pop_update(DisasContext *s)
2995{
2996#ifdef TARGET_X86_64
2997 if (CODE64(s) && s->dflag) {
2998 gen_stack_update(s, 8);
2999 } else
3000#endif
3001 {
3002 gen_stack_update(s, 2 << s->dflag);
3003 }
3004}
3005
3006static void gen_stack_A0(DisasContext *s)
3007{
3008 gen_op_movl_A0_reg(R_ESP);
3009 if (!s->ss32)
3010 gen_op_andl_A0_ffff();
3011 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3012 if (s->addseg)
3013 gen_op_addl_A0_seg(R_SS);
3014}
3015
3016/* NOTE: wrap around in 16 bit not fully handled */
3017static void gen_pusha(DisasContext *s)
3018{
3019 int i;
3020 gen_op_movl_A0_reg(R_ESP);
3021 gen_op_addl_A0_im(-16 << s->dflag);
3022 if (!s->ss32)
3023 gen_op_andl_A0_ffff();
3024 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3025 if (s->addseg)
3026 gen_op_addl_A0_seg(R_SS);
3027 for(i = 0;i < 8; i++) {
3028 gen_op_mov_TN_reg(OT_LONG, 0, 7 - i);
3029 gen_op_st_T0_A0(OT_WORD + s->dflag + s->mem_index);
3030 gen_op_addl_A0_im(2 << s->dflag);
3031 }
3032 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3033}
3034
3035/* NOTE: wrap around in 16 bit not fully handled */
3036static void gen_popa(DisasContext *s)
3037{
3038 int i;
3039 gen_op_movl_A0_reg(R_ESP);
3040 if (!s->ss32)
3041 gen_op_andl_A0_ffff();
3042 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3043 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], 16 << s->dflag);
3044 if (s->addseg)
3045 gen_op_addl_A0_seg(R_SS);
3046 for(i = 0;i < 8; i++) {
3047 /* ESP is not reloaded */
3048 if (i != 3) {
3049 gen_op_ld_T0_A0(OT_WORD + s->dflag + s->mem_index);
3050 gen_op_mov_reg_T0(OT_WORD + s->dflag, 7 - i);
3051 }
3052 gen_op_addl_A0_im(2 << s->dflag);
3053 }
3054 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3055}
3056
3057static void gen_enter(DisasContext *s, int esp_addend, int level)
3058{
3059 int ot, opsize;
3060
3061 level &= 0x1f;
3062#ifdef TARGET_X86_64
3063 if (CODE64(s)) {
3064 ot = s->dflag ? OT_QUAD : OT_WORD;
3065 opsize = 1 << ot;
3066
3067 gen_op_movl_A0_reg(R_ESP);
3068 gen_op_addq_A0_im(-opsize);
3069 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3070
3071 /* push bp */
3072 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3073 gen_op_st_T0_A0(ot + s->mem_index);
3074 if (level) {
3075 /* XXX: must save state */
3076 tcg_gen_helper_0_3(helper_enter64_level,
3077 tcg_const_i32(level),
3078 tcg_const_i32((ot == OT_QUAD)),
3079 cpu_T[1]);
3080 }
3081 gen_op_mov_reg_T1(ot, R_EBP);
3082 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3083 gen_op_mov_reg_T1(OT_QUAD, R_ESP);
3084 } else
3085#endif
3086 {
3087 ot = s->dflag + OT_WORD;
3088 opsize = 2 << s->dflag;
3089
3090 gen_op_movl_A0_reg(R_ESP);
3091 gen_op_addl_A0_im(-opsize);
3092 if (!s->ss32)
3093 gen_op_andl_A0_ffff();
3094 tcg_gen_mov_tl(cpu_T[1], cpu_A0);
3095 if (s->addseg)
3096 gen_op_addl_A0_seg(R_SS);
3097 /* push bp */
3098 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
3099 gen_op_st_T0_A0(ot + s->mem_index);
3100 if (level) {
3101 /* XXX: must save state */
3102 tcg_gen_helper_0_3(helper_enter_level,
3103 tcg_const_i32(level),
3104 tcg_const_i32(s->dflag),
3105 cpu_T[1]);
3106 }
3107 gen_op_mov_reg_T1(ot, R_EBP);
3108 tcg_gen_addi_tl(cpu_T[1], cpu_T[1], -esp_addend + (-opsize * level));
3109 gen_op_mov_reg_T1(OT_WORD + s->ss32, R_ESP);
3110 }
3111}
3112
3113static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
3114{
3115 if (s->cc_op != CC_OP_DYNAMIC)
3116 gen_op_set_cc_op(s->cc_op);
3117 gen_jmp_im(cur_eip);
3118 tcg_gen_helper_0_1(helper_raise_exception, tcg_const_i32(trapno));
3119 s->is_jmp = 3;
3120}
3121
3122/* an interrupt is different from an exception because of the
3123 privilege checks */
3124static void gen_interrupt(DisasContext *s, int intno,
3125 target_ulong cur_eip, target_ulong next_eip)
3126{
3127 if (s->cc_op != CC_OP_DYNAMIC)
3128 gen_op_set_cc_op(s->cc_op);
3129 gen_jmp_im(cur_eip);
3130 tcg_gen_helper_0_2(helper_raise_interrupt,
3131 tcg_const_i32(intno),
3132 tcg_const_i32(next_eip - cur_eip));
3133 s->is_jmp = 3;
3134}
3135
3136static void gen_debug(DisasContext *s, target_ulong cur_eip)
3137{
3138 if (s->cc_op != CC_OP_DYNAMIC)
3139 gen_op_set_cc_op(s->cc_op);
3140 gen_jmp_im(cur_eip);
3141 tcg_gen_helper_0_0(helper_debug);
3142 s->is_jmp = 3;
3143}
3144
3145/* generate a generic end of block. Trace exception is also generated
3146 if needed */
3147static void gen_eob(DisasContext *s)
3148{
3149 if (s->cc_op != CC_OP_DYNAMIC)
3150 gen_op_set_cc_op(s->cc_op);
3151 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
3152 tcg_gen_helper_0_0(helper_reset_inhibit_irq);
3153 }
3154
3155#ifdef VBOX
3156 gen_check_external_event(s);
3157#endif /* VBOX */
3158
3159 if ( s->singlestep_enabled
3160#ifdef VBOX
3161 && ( !(cpu_single_env->state & CPU_EMULATE_SINGLE_STEP)
3162 || !(s->prefix & (PREFIX_REPNZ | PREFIX_REPZ) ))
3163#endif
3164 ) {
3165 tcg_gen_helper_0_0(helper_debug);
3166 } else if (s->tf) {
3167 tcg_gen_helper_0_0(helper_single_step);
3168 } else {
3169 tcg_gen_exit_tb(0);
3170 }
3171 s->is_jmp = 3;
3172}
3173
3174/* generate a jump to eip. No segment change must happen before as a
3175 direct call to the next block may occur */
3176static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
3177{
3178 if (s->jmp_opt) {
3179 if (s->cc_op != CC_OP_DYNAMIC) {
3180 gen_op_set_cc_op(s->cc_op);
3181 s->cc_op = CC_OP_DYNAMIC;
3182 }
3183 gen_goto_tb(s, tb_num, eip);
3184 s->is_jmp = 3;
3185 } else {
3186 gen_jmp_im(eip);
3187 gen_eob(s);
3188 }
3189}
3190
3191static void gen_jmp(DisasContext *s, target_ulong eip)
3192{
3193 gen_jmp_tb(s, eip, 0);
3194}
3195
3196#ifndef VBOX
3197static inline void gen_ldq_env_A0(int idx, int offset)
3198#else /* VBOX */
3199DECLINLINE(void) gen_ldq_env_A0(int idx, int offset)
3200#endif /* VBOX */
3201{
3202 int mem_index = (idx >> 2) - 1;
3203 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3204 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset);
3205}
3206
3207#ifndef VBOX
3208static inline void gen_stq_env_A0(int idx, int offset)
3209#else /* VBOX */
3210DECLINLINE(void) gen_stq_env_A0(int idx, int offset)
3211#endif /* VBOX */
3212{
3213 int mem_index = (idx >> 2) - 1;
3214 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset);
3215 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3216}
3217
3218#ifndef VBOX
3219static inline void gen_ldo_env_A0(int idx, int offset)
3220#else /* VBOX */
3221DECLINLINE(void) gen_ldo_env_A0(int idx, int offset)
3222#endif /* VBOX */
3223{
3224 int mem_index = (idx >> 2) - 1;
3225 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, mem_index);
3226 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3227 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3228 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3229 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3230}
3231
3232#ifndef VBOX
3233static inline void gen_sto_env_A0(int idx, int offset)
3234#else /* VBOX */
3235DECLINLINE(void) gen_sto_env_A0(int idx, int offset)
3236#endif /* VBOX */
3237{
3238 int mem_index = (idx >> 2) - 1;
3239 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(0)));
3240 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, mem_index);
3241 tcg_gen_addi_tl(cpu_tmp0, cpu_A0, 8);
3242 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, offset + offsetof(XMMReg, XMM_Q(1)));
3243 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_tmp0, mem_index);
3244}
3245
3246#ifndef VBOX
3247static inline void gen_op_movo(int d_offset, int s_offset)
3248#else /* VBOX */
3249DECLINLINE(void) gen_op_movo(int d_offset, int s_offset)
3250#endif /* VBOX */
3251{
3252 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3253 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3254 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset + 8);
3255 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset + 8);
3256}
3257
3258#ifndef VBOX
3259static inline void gen_op_movq(int d_offset, int s_offset)
3260#else /* VBOX */
3261DECLINLINE(void) gen_op_movq(int d_offset, int s_offset)
3262#endif /* VBOX */
3263{
3264 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset);
3265 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3266}
3267
3268#ifndef VBOX
3269static inline void gen_op_movl(int d_offset, int s_offset)
3270#else /* VBOX */
3271DECLINLINE(void) gen_op_movl(int d_offset, int s_offset)
3272#endif /* VBOX */
3273{
3274 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset);
3275 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, d_offset);
3276}
3277
3278#ifndef VBOX
3279static inline void gen_op_movq_env_0(int d_offset)
3280#else /* VBOX */
3281DECLINLINE(void) gen_op_movq_env_0(int d_offset)
3282#endif /* VBOX */
3283{
3284 tcg_gen_movi_i64(cpu_tmp1_i64, 0);
3285 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env, d_offset);
3286}
3287
3288#define SSE_SPECIAL ((void *)1)
3289#define SSE_DUMMY ((void *)2)
3290
3291#define MMX_OP2(x) { helper_ ## x ## _mmx, helper_ ## x ## _xmm }
3292#define SSE_FOP(x) { helper_ ## x ## ps, helper_ ## x ## pd, \
3293 helper_ ## x ## ss, helper_ ## x ## sd, }
3294
3295static void *sse_op_table1[256][4] = {
3296 /* 3DNow! extensions */
3297 [0x0e] = { SSE_DUMMY }, /* femms */
3298 [0x0f] = { SSE_DUMMY }, /* pf... */
3299 /* pure SSE operations */
3300 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3301 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
3302 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
3303 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
3304 [0x14] = { helper_punpckldq_xmm, helper_punpcklqdq_xmm },
3305 [0x15] = { helper_punpckhdq_xmm, helper_punpckhqdq_xmm },
3306 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
3307 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
3308
3309 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3310 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
3311 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
3312 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
3313 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
3314 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
3315 [0x2e] = { helper_ucomiss, helper_ucomisd },
3316 [0x2f] = { helper_comiss, helper_comisd },
3317 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
3318 [0x51] = SSE_FOP(sqrt),
3319 [0x52] = { helper_rsqrtps, NULL, helper_rsqrtss, NULL },
3320 [0x53] = { helper_rcpps, NULL, helper_rcpss, NULL },
3321 [0x54] = { helper_pand_xmm, helper_pand_xmm }, /* andps, andpd */
3322 [0x55] = { helper_pandn_xmm, helper_pandn_xmm }, /* andnps, andnpd */
3323 [0x56] = { helper_por_xmm, helper_por_xmm }, /* orps, orpd */
3324 [0x57] = { helper_pxor_xmm, helper_pxor_xmm }, /* xorps, xorpd */
3325 [0x58] = SSE_FOP(add),
3326 [0x59] = SSE_FOP(mul),
3327 [0x5a] = { helper_cvtps2pd, helper_cvtpd2ps,
3328 helper_cvtss2sd, helper_cvtsd2ss },
3329 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq, helper_cvttps2dq },
3330 [0x5c] = SSE_FOP(sub),
3331 [0x5d] = SSE_FOP(min),
3332 [0x5e] = SSE_FOP(div),
3333 [0x5f] = SSE_FOP(max),
3334
3335 [0xc2] = SSE_FOP(cmpeq),
3336 [0xc6] = { helper_shufps, helper_shufpd },
3337
3338 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3339 [0x3a] = { SSE_SPECIAL, SSE_SPECIAL }, /* SSSE3/SSE4 */
3340
3341 /* MMX ops and their SSE extensions */
3342 [0x60] = MMX_OP2(punpcklbw),
3343 [0x61] = MMX_OP2(punpcklwd),
3344 [0x62] = MMX_OP2(punpckldq),
3345 [0x63] = MMX_OP2(packsswb),
3346 [0x64] = MMX_OP2(pcmpgtb),
3347 [0x65] = MMX_OP2(pcmpgtw),
3348 [0x66] = MMX_OP2(pcmpgtl),
3349 [0x67] = MMX_OP2(packuswb),
3350 [0x68] = MMX_OP2(punpckhbw),
3351 [0x69] = MMX_OP2(punpckhwd),
3352 [0x6a] = MMX_OP2(punpckhdq),
3353 [0x6b] = MMX_OP2(packssdw),
3354 [0x6c] = { NULL, helper_punpcklqdq_xmm },
3355 [0x6d] = { NULL, helper_punpckhqdq_xmm },
3356 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
3357 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
3358 [0x70] = { helper_pshufw_mmx,
3359 helper_pshufd_xmm,
3360 helper_pshufhw_xmm,
3361 helper_pshuflw_xmm },
3362 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
3363 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
3364 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
3365 [0x74] = MMX_OP2(pcmpeqb),
3366 [0x75] = MMX_OP2(pcmpeqw),
3367 [0x76] = MMX_OP2(pcmpeql),
3368 [0x77] = { SSE_DUMMY }, /* emms */
3369 [0x7c] = { NULL, helper_haddpd, NULL, helper_haddps },
3370 [0x7d] = { NULL, helper_hsubpd, NULL, helper_hsubps },
3371 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
3372 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
3373 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
3374 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
3375 [0xd0] = { NULL, helper_addsubpd, NULL, helper_addsubps },
3376 [0xd1] = MMX_OP2(psrlw),
3377 [0xd2] = MMX_OP2(psrld),
3378 [0xd3] = MMX_OP2(psrlq),
3379 [0xd4] = MMX_OP2(paddq),
3380 [0xd5] = MMX_OP2(pmullw),
3381 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
3382 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
3383 [0xd8] = MMX_OP2(psubusb),
3384 [0xd9] = MMX_OP2(psubusw),
3385 [0xda] = MMX_OP2(pminub),
3386 [0xdb] = MMX_OP2(pand),
3387 [0xdc] = MMX_OP2(paddusb),
3388 [0xdd] = MMX_OP2(paddusw),
3389 [0xde] = MMX_OP2(pmaxub),
3390 [0xdf] = MMX_OP2(pandn),
3391 [0xe0] = MMX_OP2(pavgb),
3392 [0xe1] = MMX_OP2(psraw),
3393 [0xe2] = MMX_OP2(psrad),
3394 [0xe3] = MMX_OP2(pavgw),
3395 [0xe4] = MMX_OP2(pmulhuw),
3396 [0xe5] = MMX_OP2(pmulhw),
3397 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd, helper_cvtpd2dq },
3398 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
3399 [0xe8] = MMX_OP2(psubsb),
3400 [0xe9] = MMX_OP2(psubsw),
3401 [0xea] = MMX_OP2(pminsw),
3402 [0xeb] = MMX_OP2(por),
3403 [0xec] = MMX_OP2(paddsb),
3404 [0xed] = MMX_OP2(paddsw),
3405 [0xee] = MMX_OP2(pmaxsw),
3406 [0xef] = MMX_OP2(pxor),
3407 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
3408 [0xf1] = MMX_OP2(psllw),
3409 [0xf2] = MMX_OP2(pslld),
3410 [0xf3] = MMX_OP2(psllq),
3411 [0xf4] = MMX_OP2(pmuludq),
3412 [0xf5] = MMX_OP2(pmaddwd),
3413 [0xf6] = MMX_OP2(psadbw),
3414 [0xf7] = MMX_OP2(maskmov),
3415 [0xf8] = MMX_OP2(psubb),
3416 [0xf9] = MMX_OP2(psubw),
3417 [0xfa] = MMX_OP2(psubl),
3418 [0xfb] = MMX_OP2(psubq),
3419 [0xfc] = MMX_OP2(paddb),
3420 [0xfd] = MMX_OP2(paddw),
3421 [0xfe] = MMX_OP2(paddl),
3422};
3423
3424static void *sse_op_table2[3 * 8][2] = {
3425 [0 + 2] = MMX_OP2(psrlw),
3426 [0 + 4] = MMX_OP2(psraw),
3427 [0 + 6] = MMX_OP2(psllw),
3428 [8 + 2] = MMX_OP2(psrld),
3429 [8 + 4] = MMX_OP2(psrad),
3430 [8 + 6] = MMX_OP2(pslld),
3431 [16 + 2] = MMX_OP2(psrlq),
3432 [16 + 3] = { NULL, helper_psrldq_xmm },
3433 [16 + 6] = MMX_OP2(psllq),
3434 [16 + 7] = { NULL, helper_pslldq_xmm },
3435};
3436
3437static void *sse_op_table3[4 * 3] = {
3438 helper_cvtsi2ss,
3439 helper_cvtsi2sd,
3440 X86_64_ONLY(helper_cvtsq2ss),
3441 X86_64_ONLY(helper_cvtsq2sd),
3442
3443 helper_cvttss2si,
3444 helper_cvttsd2si,
3445 X86_64_ONLY(helper_cvttss2sq),
3446 X86_64_ONLY(helper_cvttsd2sq),
3447
3448 helper_cvtss2si,
3449 helper_cvtsd2si,
3450 X86_64_ONLY(helper_cvtss2sq),
3451 X86_64_ONLY(helper_cvtsd2sq),
3452};
3453
3454static void *sse_op_table4[8][4] = {
3455 SSE_FOP(cmpeq),
3456 SSE_FOP(cmplt),
3457 SSE_FOP(cmple),
3458 SSE_FOP(cmpunord),
3459 SSE_FOP(cmpneq),
3460 SSE_FOP(cmpnlt),
3461 SSE_FOP(cmpnle),
3462 SSE_FOP(cmpord),
3463};
3464
3465static void *sse_op_table5[256] = {
3466 [0x0c] = helper_pi2fw,
3467 [0x0d] = helper_pi2fd,
3468 [0x1c] = helper_pf2iw,
3469 [0x1d] = helper_pf2id,
3470 [0x8a] = helper_pfnacc,
3471 [0x8e] = helper_pfpnacc,
3472 [0x90] = helper_pfcmpge,
3473 [0x94] = helper_pfmin,
3474 [0x96] = helper_pfrcp,
3475 [0x97] = helper_pfrsqrt,
3476 [0x9a] = helper_pfsub,
3477 [0x9e] = helper_pfadd,
3478 [0xa0] = helper_pfcmpgt,
3479 [0xa4] = helper_pfmax,
3480 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */
3481 [0xa7] = helper_movq, /* pfrsqit1 */
3482 [0xaa] = helper_pfsubr,
3483 [0xae] = helper_pfacc,
3484 [0xb0] = helper_pfcmpeq,
3485 [0xb4] = helper_pfmul,
3486 [0xb6] = helper_movq, /* pfrcpit2 */
3487 [0xb7] = helper_pmulhrw_mmx,
3488 [0xbb] = helper_pswapd,
3489 [0xbf] = helper_pavgb_mmx /* pavgusb */
3490};
3491
3492struct sse_op_helper_s {
3493 void *op[2]; uint32_t ext_mask;
3494};
3495#define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 }
3496#define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }
3497#define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }
3498#define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 }
3499static struct sse_op_helper_s sse_op_table6[256] = {
3500 [0x00] = SSSE3_OP(pshufb),
3501 [0x01] = SSSE3_OP(phaddw),
3502 [0x02] = SSSE3_OP(phaddd),
3503 [0x03] = SSSE3_OP(phaddsw),
3504 [0x04] = SSSE3_OP(pmaddubsw),
3505 [0x05] = SSSE3_OP(phsubw),
3506 [0x06] = SSSE3_OP(phsubd),
3507 [0x07] = SSSE3_OP(phsubsw),
3508 [0x08] = SSSE3_OP(psignb),
3509 [0x09] = SSSE3_OP(psignw),
3510 [0x0a] = SSSE3_OP(psignd),
3511 [0x0b] = SSSE3_OP(pmulhrsw),
3512 [0x10] = SSE41_OP(pblendvb),
3513 [0x14] = SSE41_OP(blendvps),
3514 [0x15] = SSE41_OP(blendvpd),
3515 [0x17] = SSE41_OP(ptest),
3516 [0x1c] = SSSE3_OP(pabsb),
3517 [0x1d] = SSSE3_OP(pabsw),
3518 [0x1e] = SSSE3_OP(pabsd),
3519 [0x20] = SSE41_OP(pmovsxbw),
3520 [0x21] = SSE41_OP(pmovsxbd),
3521 [0x22] = SSE41_OP(pmovsxbq),
3522 [0x23] = SSE41_OP(pmovsxwd),
3523 [0x24] = SSE41_OP(pmovsxwq),
3524 [0x25] = SSE41_OP(pmovsxdq),
3525 [0x28] = SSE41_OP(pmuldq),
3526 [0x29] = SSE41_OP(pcmpeqq),
3527 [0x2a] = SSE41_SPECIAL, /* movntqda */
3528 [0x2b] = SSE41_OP(packusdw),
3529 [0x30] = SSE41_OP(pmovzxbw),
3530 [0x31] = SSE41_OP(pmovzxbd),
3531 [0x32] = SSE41_OP(pmovzxbq),
3532 [0x33] = SSE41_OP(pmovzxwd),
3533 [0x34] = SSE41_OP(pmovzxwq),
3534 [0x35] = SSE41_OP(pmovzxdq),
3535 [0x37] = SSE42_OP(pcmpgtq),
3536 [0x38] = SSE41_OP(pminsb),
3537 [0x39] = SSE41_OP(pminsd),
3538 [0x3a] = SSE41_OP(pminuw),
3539 [0x3b] = SSE41_OP(pminud),
3540 [0x3c] = SSE41_OP(pmaxsb),
3541 [0x3d] = SSE41_OP(pmaxsd),
3542 [0x3e] = SSE41_OP(pmaxuw),
3543 [0x3f] = SSE41_OP(pmaxud),
3544 [0x40] = SSE41_OP(pmulld),
3545 [0x41] = SSE41_OP(phminposuw),
3546};
3547
3548static struct sse_op_helper_s sse_op_table7[256] = {
3549 [0x08] = SSE41_OP(roundps),
3550 [0x09] = SSE41_OP(roundpd),
3551 [0x0a] = SSE41_OP(roundss),
3552 [0x0b] = SSE41_OP(roundsd),
3553 [0x0c] = SSE41_OP(blendps),
3554 [0x0d] = SSE41_OP(blendpd),
3555 [0x0e] = SSE41_OP(pblendw),
3556 [0x0f] = SSSE3_OP(palignr),
3557 [0x14] = SSE41_SPECIAL, /* pextrb */
3558 [0x15] = SSE41_SPECIAL, /* pextrw */
3559 [0x16] = SSE41_SPECIAL, /* pextrd/pextrq */
3560 [0x17] = SSE41_SPECIAL, /* extractps */
3561 [0x20] = SSE41_SPECIAL, /* pinsrb */
3562 [0x21] = SSE41_SPECIAL, /* insertps */
3563 [0x22] = SSE41_SPECIAL, /* pinsrd/pinsrq */
3564 [0x40] = SSE41_OP(dpps),
3565 [0x41] = SSE41_OP(dppd),
3566 [0x42] = SSE41_OP(mpsadbw),
3567 [0x60] = SSE42_OP(pcmpestrm),
3568 [0x61] = SSE42_OP(pcmpestri),
3569 [0x62] = SSE42_OP(pcmpistrm),
3570 [0x63] = SSE42_OP(pcmpistri),
3571};
3572
3573static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
3574{
3575 int b1, op1_offset, op2_offset, is_xmm, val, ot;
3576 int modrm, mod, rm, reg, reg_addr, offset_addr;
3577 void *sse_op2;
3578
3579 b &= 0xff;
3580 if (s->prefix & PREFIX_DATA)
3581 b1 = 1;
3582 else if (s->prefix & PREFIX_REPZ)
3583 b1 = 2;
3584 else if (s->prefix & PREFIX_REPNZ)
3585 b1 = 3;
3586 else
3587 b1 = 0;
3588 sse_op2 = sse_op_table1[b][b1];
3589 if (!sse_op2)
3590 goto illegal_op;
3591 if ((b <= 0x5f && b >= 0x10) || b == 0xc6 || b == 0xc2) {
3592 is_xmm = 1;
3593 } else {
3594 if (b1 == 0) {
3595 /* MMX case */
3596 is_xmm = 0;
3597 } else {
3598 is_xmm = 1;
3599 }
3600 }
3601 /* simple MMX/SSE operation */
3602 if (s->flags & HF_TS_MASK) {
3603 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
3604 return;
3605 }
3606 if (s->flags & HF_EM_MASK) {
3607 illegal_op:
3608 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
3609 return;
3610 }
3611 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
3612 if ((b != 0x38 && b != 0x3a) || (s->prefix & PREFIX_DATA))
3613 goto illegal_op;
3614 if (b == 0x0e) {
3615 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
3616 goto illegal_op;
3617 /* femms */
3618 tcg_gen_helper_0_0(helper_emms);
3619 return;
3620 }
3621 if (b == 0x77) {
3622 /* emms */
3623 tcg_gen_helper_0_0(helper_emms);
3624 return;
3625 }
3626 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
3627 the static cpu state) */
3628 if (!is_xmm) {
3629 tcg_gen_helper_0_0(helper_enter_mmx);
3630 }
3631
3632 modrm = ldub_code(s->pc++);
3633 reg = ((modrm >> 3) & 7);
3634 if (is_xmm)
3635 reg |= rex_r;
3636 mod = (modrm >> 6) & 3;
3637 if (sse_op2 == SSE_SPECIAL) {
3638 b |= (b1 << 8);
3639 switch(b) {
3640 case 0x0e7: /* movntq */
3641 if (mod == 3)
3642 goto illegal_op;
3643 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3644 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3645 break;
3646 case 0x1e7: /* movntdq */
3647 case 0x02b: /* movntps */
3648 case 0x12b: /* movntps */
3649 if (mod == 3)
3650 goto illegal_op;
3651 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3652 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3653 break;
3654 case 0x3f0: /* lddqu */
3655 if (mod == 3)
3656 goto illegal_op;
3657 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3658 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3659 break;
3660 case 0x6e: /* movd mm, ea */
3661#ifdef TARGET_X86_64
3662 if (s->dflag == 2) {
3663 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3664 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,fpregs[reg].mmx));
3665 } else
3666#endif
3667 {
3668 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3669 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3670 offsetof(CPUX86State,fpregs[reg].mmx));
3671 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]);
3672 }
3673 break;
3674 case 0x16e: /* movd xmm, ea */
3675#ifdef TARGET_X86_64
3676 if (s->dflag == 2) {
3677 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
3678 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3679 offsetof(CPUX86State,xmm_regs[reg]));
3680 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm, cpu_ptr0, cpu_T[0]);
3681 } else
3682#endif
3683 {
3684 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
3685 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3686 offsetof(CPUX86State,xmm_regs[reg]));
3687 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
3688 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm, cpu_ptr0, cpu_tmp2_i32);
3689 }
3690 break;
3691 case 0x6f: /* movq mm, ea */
3692 if (mod != 3) {
3693 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3694 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3695 } else {
3696 rm = (modrm & 7);
3697 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
3698 offsetof(CPUX86State,fpregs[rm].mmx));
3699 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
3700 offsetof(CPUX86State,fpregs[reg].mmx));
3701 }
3702 break;
3703 case 0x010: /* movups */
3704 case 0x110: /* movupd */
3705 case 0x028: /* movaps */
3706 case 0x128: /* movapd */
3707 case 0x16f: /* movdqa xmm, ea */
3708 case 0x26f: /* movdqu xmm, ea */
3709 if (mod != 3) {
3710 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3711 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3712 } else {
3713 rm = (modrm & 7) | REX_B(s);
3714 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
3715 offsetof(CPUX86State,xmm_regs[rm]));
3716 }
3717 break;
3718 case 0x210: /* movss xmm, ea */
3719 if (mod != 3) {
3720 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3721 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
3722 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3723 gen_op_movl_T0_0();
3724 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3725 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3726 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3727 } else {
3728 rm = (modrm & 7) | REX_B(s);
3729 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3730 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3731 }
3732 break;
3733 case 0x310: /* movsd xmm, ea */
3734 if (mod != 3) {
3735 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3736 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3737 gen_op_movl_T0_0();
3738 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3739 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3740 } else {
3741 rm = (modrm & 7) | REX_B(s);
3742 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3743 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3744 }
3745 break;
3746 case 0x012: /* movlps */
3747 case 0x112: /* movlpd */
3748 if (mod != 3) {
3749 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3750 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3751 } else {
3752 /* movhlps */
3753 rm = (modrm & 7) | REX_B(s);
3754 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3755 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3756 }
3757 break;
3758 case 0x212: /* movsldup */
3759 if (mod != 3) {
3760 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3761 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3762 } else {
3763 rm = (modrm & 7) | REX_B(s);
3764 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3765 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
3766 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3767 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
3768 }
3769 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3770 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3771 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3772 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
3773 break;
3774 case 0x312: /* movddup */
3775 if (mod != 3) {
3776 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3777 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3778 } else {
3779 rm = (modrm & 7) | REX_B(s);
3780 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3781 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3782 }
3783 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3784 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3785 break;
3786 case 0x016: /* movhps */
3787 case 0x116: /* movhpd */
3788 if (mod != 3) {
3789 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3790 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3791 } else {
3792 /* movlhps */
3793 rm = (modrm & 7) | REX_B(s);
3794 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
3795 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3796 }
3797 break;
3798 case 0x216: /* movshdup */
3799 if (mod != 3) {
3800 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3801 gen_ldo_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3802 } else {
3803 rm = (modrm & 7) | REX_B(s);
3804 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
3805 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
3806 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
3807 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
3808 }
3809 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
3810 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
3811 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
3812 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
3813 break;
3814 case 0x7e: /* movd ea, mm */
3815#ifdef TARGET_X86_64
3816 if (s->dflag == 2) {
3817 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3818 offsetof(CPUX86State,fpregs[reg].mmx));
3819 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3820 } else
3821#endif
3822 {
3823 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3824 offsetof(CPUX86State,fpregs[reg].mmx.MMX_L(0)));
3825 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3826 }
3827 break;
3828 case 0x17e: /* movd ea, xmm */
3829#ifdef TARGET_X86_64
3830 if (s->dflag == 2) {
3831 tcg_gen_ld_i64(cpu_T[0], cpu_env,
3832 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3833 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
3834 } else
3835#endif
3836 {
3837 tcg_gen_ld32u_tl(cpu_T[0], cpu_env,
3838 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3839 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
3840 }
3841 break;
3842 case 0x27e: /* movq xmm, ea */
3843 if (mod != 3) {
3844 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3845 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3846 } else {
3847 rm = (modrm & 7) | REX_B(s);
3848 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3849 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3850 }
3851 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3852 break;
3853 case 0x7f: /* movq ea, mm */
3854 if (mod != 3) {
3855 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3856 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,fpregs[reg].mmx));
3857 } else {
3858 rm = (modrm & 7);
3859 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3860 offsetof(CPUX86State,fpregs[reg].mmx));
3861 }
3862 break;
3863 case 0x011: /* movups */
3864 case 0x111: /* movupd */
3865 case 0x029: /* movaps */
3866 case 0x129: /* movapd */
3867 case 0x17f: /* movdqa ea, xmm */
3868 case 0x27f: /* movdqu ea, xmm */
3869 if (mod != 3) {
3870 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3871 gen_sto_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg]));
3872 } else {
3873 rm = (modrm & 7) | REX_B(s);
3874 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
3875 offsetof(CPUX86State,xmm_regs[reg]));
3876 }
3877 break;
3878 case 0x211: /* movss ea, xmm */
3879 if (mod != 3) {
3880 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3881 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3882 gen_op_st_T0_A0(OT_LONG + s->mem_index);
3883 } else {
3884 rm = (modrm & 7) | REX_B(s);
3885 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
3886 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
3887 }
3888 break;
3889 case 0x311: /* movsd ea, xmm */
3890 if (mod != 3) {
3891 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3892 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3893 } else {
3894 rm = (modrm & 7) | REX_B(s);
3895 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3896 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3897 }
3898 break;
3899 case 0x013: /* movlps */
3900 case 0x113: /* movlpd */
3901 if (mod != 3) {
3902 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3903 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3904 } else {
3905 goto illegal_op;
3906 }
3907 break;
3908 case 0x017: /* movhps */
3909 case 0x117: /* movhpd */
3910 if (mod != 3) {
3911 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3912 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3913 } else {
3914 goto illegal_op;
3915 }
3916 break;
3917 case 0x71: /* shift mm, im */
3918 case 0x72:
3919 case 0x73:
3920 case 0x171: /* shift xmm, im */
3921 case 0x172:
3922 case 0x173:
3923 val = ldub_code(s->pc++);
3924 if (is_xmm) {
3925 gen_op_movl_T0_im(val);
3926 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3927 gen_op_movl_T0_0();
3928 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(1)));
3929 op1_offset = offsetof(CPUX86State,xmm_t0);
3930 } else {
3931 gen_op_movl_T0_im(val);
3932 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(0)));
3933 gen_op_movl_T0_0();
3934 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,mmx_t0.MMX_L(1)));
3935 op1_offset = offsetof(CPUX86State,mmx_t0);
3936 }
3937 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
3938 if (!sse_op2)
3939 goto illegal_op;
3940 if (is_xmm) {
3941 rm = (modrm & 7) | REX_B(s);
3942 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3943 } else {
3944 rm = (modrm & 7);
3945 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3946 }
3947 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
3948 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset);
3949 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
3950 break;
3951 case 0x050: /* movmskps */
3952 rm = (modrm & 7) | REX_B(s);
3953 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3954 offsetof(CPUX86State,xmm_regs[rm]));
3955 tcg_gen_helper_1_1(helper_movmskps, cpu_tmp2_i32, cpu_ptr0);
3956 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3957 gen_op_mov_reg_T0(OT_LONG, reg);
3958 break;
3959 case 0x150: /* movmskpd */
3960 rm = (modrm & 7) | REX_B(s);
3961 tcg_gen_addi_ptr(cpu_ptr0, cpu_env,
3962 offsetof(CPUX86State,xmm_regs[rm]));
3963 tcg_gen_helper_1_1(helper_movmskpd, cpu_tmp2_i32, cpu_ptr0);
3964 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
3965 gen_op_mov_reg_T0(OT_LONG, reg);
3966 break;
3967 case 0x02a: /* cvtpi2ps */
3968 case 0x12a: /* cvtpi2pd */
3969 tcg_gen_helper_0_0(helper_enter_mmx);
3970 if (mod != 3) {
3971 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3972 op2_offset = offsetof(CPUX86State,mmx_t0);
3973 gen_ldq_env_A0(s->mem_index, op2_offset);
3974 } else {
3975 rm = (modrm & 7);
3976 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3977 }
3978 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3979 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3980 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
3981 switch(b >> 8) {
3982 case 0x0:
3983 tcg_gen_helper_0_2(helper_cvtpi2ps, cpu_ptr0, cpu_ptr1);
3984 break;
3985 default:
3986 case 0x1:
3987 tcg_gen_helper_0_2(helper_cvtpi2pd, cpu_ptr0, cpu_ptr1);
3988 break;
3989 }
3990 break;
3991 case 0x22a: /* cvtsi2ss */
3992 case 0x32a: /* cvtsi2sd */
3993 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3994 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3995 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3996 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
3997 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)];
3998 if (ot == OT_LONG) {
3999 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4000 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_tmp2_i32);
4001 } else {
4002 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_T[0]);
4003 }
4004 break;
4005 case 0x02c: /* cvttps2pi */
4006 case 0x12c: /* cvttpd2pi */
4007 case 0x02d: /* cvtps2pi */
4008 case 0x12d: /* cvtpd2pi */
4009 tcg_gen_helper_0_0(helper_enter_mmx);
4010 if (mod != 3) {
4011 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4012 op2_offset = offsetof(CPUX86State,xmm_t0);
4013 gen_ldo_env_A0(s->mem_index, op2_offset);
4014 } else {
4015 rm = (modrm & 7) | REX_B(s);
4016 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4017 }
4018 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
4019 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4020 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4021 switch(b) {
4022 case 0x02c:
4023 tcg_gen_helper_0_2(helper_cvttps2pi, cpu_ptr0, cpu_ptr1);
4024 break;
4025 case 0x12c:
4026 tcg_gen_helper_0_2(helper_cvttpd2pi, cpu_ptr0, cpu_ptr1);
4027 break;
4028 case 0x02d:
4029 tcg_gen_helper_0_2(helper_cvtps2pi, cpu_ptr0, cpu_ptr1);
4030 break;
4031 case 0x12d:
4032 tcg_gen_helper_0_2(helper_cvtpd2pi, cpu_ptr0, cpu_ptr1);
4033 break;
4034 }
4035 break;
4036 case 0x22c: /* cvttss2si */
4037 case 0x32c: /* cvttsd2si */
4038 case 0x22d: /* cvtss2si */
4039 case 0x32d: /* cvtsd2si */
4040 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4041 if (mod != 3) {
4042 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4043 if ((b >> 8) & 1) {
4044 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
4045 } else {
4046 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4047 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4048 }
4049 op2_offset = offsetof(CPUX86State,xmm_t0);
4050 } else {
4051 rm = (modrm & 7) | REX_B(s);
4052 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4053 }
4054 sse_op2 = sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
4055 (b & 1) * 4];
4056 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset);
4057 if (ot == OT_LONG) {
4058 tcg_gen_helper_1_1(sse_op2, cpu_tmp2_i32, cpu_ptr0);
4059 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4060 } else {
4061 tcg_gen_helper_1_1(sse_op2, cpu_T[0], cpu_ptr0);
4062 }
4063 gen_op_mov_reg_T0(ot, reg);
4064 break;
4065 case 0xc4: /* pinsrw */
4066 case 0x1c4:
4067 s->rip_offset = 1;
4068 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4069 val = ldub_code(s->pc++);
4070 if (b1) {
4071 val &= 7;
4072 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4073 offsetof(CPUX86State,xmm_regs[reg].XMM_W(val)));
4074 } else {
4075 val &= 3;
4076 tcg_gen_st16_tl(cpu_T[0], cpu_env,
4077 offsetof(CPUX86State,fpregs[reg].mmx.MMX_W(val)));
4078 }
4079 break;
4080 case 0xc5: /* pextrw */
4081 case 0x1c5:
4082 if (mod != 3)
4083 goto illegal_op;
4084 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4085 val = ldub_code(s->pc++);
4086 if (b1) {
4087 val &= 7;
4088 rm = (modrm & 7) | REX_B(s);
4089 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4090 offsetof(CPUX86State,xmm_regs[rm].XMM_W(val)));
4091 } else {
4092 val &= 3;
4093 rm = (modrm & 7);
4094 tcg_gen_ld16u_tl(cpu_T[0], cpu_env,
4095 offsetof(CPUX86State,fpregs[rm].mmx.MMX_W(val)));
4096 }
4097 reg = ((modrm >> 3) & 7) | rex_r;
4098 gen_op_mov_reg_T0(ot, reg);
4099 break;
4100 case 0x1d6: /* movq ea, xmm */
4101 if (mod != 3) {
4102 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4103 gen_stq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4104 } else {
4105 rm = (modrm & 7) | REX_B(s);
4106 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
4107 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
4108 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
4109 }
4110 break;
4111 case 0x2d6: /* movq2dq */
4112 tcg_gen_helper_0_0(helper_enter_mmx);
4113 rm = (modrm & 7);
4114 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
4115 offsetof(CPUX86State,fpregs[rm].mmx));
4116 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
4117 break;
4118 case 0x3d6: /* movdq2q */
4119 tcg_gen_helper_0_0(helper_enter_mmx);
4120 rm = (modrm & 7) | REX_B(s);
4121 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
4122 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
4123 break;
4124 case 0xd7: /* pmovmskb */
4125 case 0x1d7:
4126 if (mod != 3)
4127 goto illegal_op;
4128 if (b1) {
4129 rm = (modrm & 7) | REX_B(s);
4130 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm]));
4131 tcg_gen_helper_1_1(helper_pmovmskb_xmm, cpu_tmp2_i32, cpu_ptr0);
4132 } else {
4133 rm = (modrm & 7);
4134 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx));
4135 tcg_gen_helper_1_1(helper_pmovmskb_mmx, cpu_tmp2_i32, cpu_ptr0);
4136 }
4137 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
4138 reg = ((modrm >> 3) & 7) | rex_r;
4139 gen_op_mov_reg_T0(OT_LONG, reg);
4140 break;
4141 case 0x138:
4142 if (s->prefix & PREFIX_REPNZ)
4143 goto crc32;
4144 case 0x038:
4145 b = modrm;
4146 modrm = ldub_code(s->pc++);
4147 rm = modrm & 7;
4148 reg = ((modrm >> 3) & 7) | rex_r;
4149 mod = (modrm >> 6) & 3;
4150
4151 sse_op2 = sse_op_table6[b].op[b1];
4152 if (!sse_op2)
4153 goto illegal_op;
4154 if (!(s->cpuid_ext_features & sse_op_table6[b].ext_mask))
4155 goto illegal_op;
4156
4157 if (b1) {
4158 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4159 if (mod == 3) {
4160 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4161 } else {
4162 op2_offset = offsetof(CPUX86State,xmm_t0);
4163 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4164 switch (b) {
4165 case 0x20: case 0x30: /* pmovsxbw, pmovzxbw */
4166 case 0x23: case 0x33: /* pmovsxwd, pmovzxwd */
4167 case 0x25: case 0x35: /* pmovsxdq, pmovzxdq */
4168 gen_ldq_env_A0(s->mem_index, op2_offset +
4169 offsetof(XMMReg, XMM_Q(0)));
4170 break;
4171 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */
4172 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */
4173 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0,
4174 (s->mem_index >> 2) - 1);
4175 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0);
4176 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset +
4177 offsetof(XMMReg, XMM_L(0)));
4178 break;
4179 case 0x22: case 0x32: /* pmovsxbq, pmovzxbq */
4180 tcg_gen_qemu_ld16u(cpu_tmp0, cpu_A0,
4181 (s->mem_index >> 2) - 1);
4182 tcg_gen_st16_tl(cpu_tmp0, cpu_env, op2_offset +
4183 offsetof(XMMReg, XMM_W(0)));
4184 break;
4185 case 0x2a: /* movntqda */
4186 gen_ldo_env_A0(s->mem_index, op1_offset);
4187 return;
4188 default:
4189 gen_ldo_env_A0(s->mem_index, op2_offset);
4190 }
4191 }
4192 } else {
4193 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4194 if (mod == 3) {
4195 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4196 } else {
4197 op2_offset = offsetof(CPUX86State,mmx_t0);
4198 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4199 gen_ldq_env_A0(s->mem_index, op2_offset);
4200 }
4201 }
4202 if (sse_op2 == SSE_SPECIAL)
4203 goto illegal_op;
4204
4205 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4206 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4207 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4208
4209 if (b == 0x17)
4210 s->cc_op = CC_OP_EFLAGS;
4211 break;
4212 case 0x338: /* crc32 */
4213 crc32:
4214 b = modrm;
4215 modrm = ldub_code(s->pc++);
4216 reg = ((modrm >> 3) & 7) | rex_r;
4217
4218 if (b != 0xf0 && b != 0xf1)
4219 goto illegal_op;
4220 if (!(s->cpuid_ext_features & CPUID_EXT_SSE42))
4221 goto illegal_op;
4222
4223 if (b == 0xf0)
4224 ot = OT_BYTE;
4225 else if (b == 0xf1 && s->dflag != 2)
4226 if (s->prefix & PREFIX_DATA)
4227 ot = OT_WORD;
4228 else
4229 ot = OT_LONG;
4230 else
4231 ot = OT_QUAD;
4232
4233 gen_op_mov_TN_reg(OT_LONG, 0, reg);
4234 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
4235 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4236 tcg_gen_helper_1_3(helper_crc32, cpu_T[0], cpu_tmp2_i32,
4237 cpu_T[0], tcg_const_i32(8 << ot));
4238
4239 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4240 gen_op_mov_reg_T0(ot, reg);
4241 break;
4242 case 0x03a:
4243 case 0x13a:
4244 b = modrm;
4245 modrm = ldub_code(s->pc++);
4246 rm = modrm & 7;
4247 reg = ((modrm >> 3) & 7) | rex_r;
4248 mod = (modrm >> 6) & 3;
4249
4250 sse_op2 = sse_op_table7[b].op[b1];
4251 if (!sse_op2)
4252 goto illegal_op;
4253 if (!(s->cpuid_ext_features & sse_op_table7[b].ext_mask))
4254 goto illegal_op;
4255
4256 if (sse_op2 == SSE_SPECIAL) {
4257 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
4258 rm = (modrm & 7) | REX_B(s);
4259 if (mod != 3)
4260 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4261 reg = ((modrm >> 3) & 7) | rex_r;
4262 val = ldub_code(s->pc++);
4263 switch (b) {
4264 case 0x14: /* pextrb */
4265 tcg_gen_ld8u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4266 xmm_regs[reg].XMM_B(val & 15)));
4267 if (mod == 3)
4268 gen_op_mov_reg_T0(ot, rm);
4269 else
4270 tcg_gen_qemu_st8(cpu_T[0], cpu_A0,
4271 (s->mem_index >> 2) - 1);
4272 break;
4273 case 0x15: /* pextrw */
4274 tcg_gen_ld16u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4275 xmm_regs[reg].XMM_W(val & 7)));
4276 if (mod == 3)
4277 gen_op_mov_reg_T0(ot, rm);
4278 else
4279 tcg_gen_qemu_st16(cpu_T[0], cpu_A0,
4280 (s->mem_index >> 2) - 1);
4281 break;
4282 case 0x16:
4283 if (ot == OT_LONG) { /* pextrd */
4284 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4285 offsetof(CPUX86State,
4286 xmm_regs[reg].XMM_L(val & 3)));
4287 if (mod == 3)
4288 gen_op_mov_reg_v(ot, rm, cpu_tmp2_i32);
4289 else
4290 tcg_gen_qemu_st32(cpu_tmp2_i32, cpu_A0,
4291 (s->mem_index >> 2) - 1);
4292 } else { /* pextrq */
4293 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env,
4294 offsetof(CPUX86State,
4295 xmm_regs[reg].XMM_Q(val & 1)));
4296 if (mod == 3)
4297 gen_op_mov_reg_v(ot, rm, cpu_tmp1_i64);
4298 else
4299 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
4300 (s->mem_index >> 2) - 1);
4301 }
4302 break;
4303 case 0x17: /* extractps */
4304 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4305 xmm_regs[reg].XMM_L(val & 3)));
4306 if (mod == 3)
4307 gen_op_mov_reg_T0(ot, rm);
4308 else
4309 tcg_gen_qemu_st32(cpu_T[0], cpu_A0,
4310 (s->mem_index >> 2) - 1);
4311 break;
4312 case 0x20: /* pinsrb */
4313 if (mod == 3)
4314 gen_op_mov_TN_reg(OT_LONG, 0, rm);
4315 else
4316 tcg_gen_qemu_ld8u(cpu_T[0], cpu_A0,
4317 (s->mem_index >> 2) - 1);
4318 tcg_gen_st8_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,
4319 xmm_regs[reg].XMM_B(val & 15)));
4320 break;
4321 case 0x21: /* insertps */
4322 if (mod == 3)
4323 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env,
4324 offsetof(CPUX86State,xmm_regs[rm]
4325 .XMM_L((val >> 6) & 3)));
4326 else
4327 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4328 (s->mem_index >> 2) - 1);
4329 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4330 offsetof(CPUX86State,xmm_regs[reg]
4331 .XMM_L((val >> 4) & 3)));
4332 if ((val >> 0) & 1)
4333 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4334 cpu_env, offsetof(CPUX86State,
4335 xmm_regs[reg].XMM_L(0)));
4336 if ((val >> 1) & 1)
4337 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4338 cpu_env, offsetof(CPUX86State,
4339 xmm_regs[reg].XMM_L(1)));
4340 if ((val >> 2) & 1)
4341 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4342 cpu_env, offsetof(CPUX86State,
4343 xmm_regs[reg].XMM_L(2)));
4344 if ((val >> 3) & 1)
4345 tcg_gen_st_i32(tcg_const_i32(0 /*float32_zero*/),
4346 cpu_env, offsetof(CPUX86State,
4347 xmm_regs[reg].XMM_L(3)));
4348 break;
4349 case 0x22:
4350 if (ot == OT_LONG) { /* pinsrd */
4351 if (mod == 3)
4352 gen_op_mov_v_reg(ot, cpu_tmp2_i32, rm);
4353 else
4354 tcg_gen_qemu_ld32u(cpu_tmp2_i32, cpu_A0,
4355 (s->mem_index >> 2) - 1);
4356 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env,
4357 offsetof(CPUX86State,
4358 xmm_regs[reg].XMM_L(val & 3)));
4359 } else { /* pinsrq */
4360 if (mod == 3)
4361 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm);
4362 else
4363 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
4364 (s->mem_index >> 2) - 1);
4365 tcg_gen_st_i64(cpu_tmp1_i64, cpu_env,
4366 offsetof(CPUX86State,
4367 xmm_regs[reg].XMM_Q(val & 1)));
4368 }
4369 break;
4370 }
4371 return;
4372 }
4373
4374 if (b1) {
4375 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4376 if (mod == 3) {
4377 op2_offset = offsetof(CPUX86State,xmm_regs[rm | REX_B(s)]);
4378 } else {
4379 op2_offset = offsetof(CPUX86State,xmm_t0);
4380 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4381 gen_ldo_env_A0(s->mem_index, op2_offset);
4382 }
4383 } else {
4384 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4385 if (mod == 3) {
4386 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4387 } else {
4388 op2_offset = offsetof(CPUX86State,mmx_t0);
4389 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4390 gen_ldq_env_A0(s->mem_index, op2_offset);
4391 }
4392 }
4393 val = ldub_code(s->pc++);
4394
4395 if ((b & 0xfc) == 0x60) { /* pcmpXstrX */
4396 s->cc_op = CC_OP_EFLAGS;
4397
4398 if (s->dflag == 2)
4399 /* The helper must use entire 64-bit gp registers */
4400 val |= 1 << 8;
4401 }
4402
4403 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4404 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4405 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4406 break;
4407 default:
4408 goto illegal_op;
4409 }
4410 } else {
4411 /* generic MMX or SSE operation */
4412 switch(b) {
4413 case 0x70: /* pshufx insn */
4414 case 0xc6: /* pshufx insn */
4415 case 0xc2: /* compare insns */
4416 s->rip_offset = 1;
4417 break;
4418 default:
4419 break;
4420 }
4421 if (is_xmm) {
4422 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
4423 if (mod != 3) {
4424 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4425 op2_offset = offsetof(CPUX86State,xmm_t0);
4426 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
4427 b == 0xc2)) {
4428 /* specific case for SSE single instructions */
4429 if (b1 == 2) {
4430 /* 32 bit access */
4431 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
4432 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,xmm_t0.XMM_L(0)));
4433 } else {
4434 /* 64 bit access */
4435 gen_ldq_env_A0(s->mem_index, offsetof(CPUX86State,xmm_t0.XMM_D(0)));
4436 }
4437 } else {
4438 gen_ldo_env_A0(s->mem_index, op2_offset);
4439 }
4440 } else {
4441 rm = (modrm & 7) | REX_B(s);
4442 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
4443 }
4444 } else {
4445 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
4446 if (mod != 3) {
4447 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4448 op2_offset = offsetof(CPUX86State,mmx_t0);
4449 gen_ldq_env_A0(s->mem_index, op2_offset);
4450 } else {
4451 rm = (modrm & 7);
4452 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
4453 }
4454 }
4455 switch(b) {
4456 case 0x0f: /* 3DNow! data insns */
4457 if (!(s->cpuid_ext2_features & CPUID_EXT2_3DNOW))
4458 goto illegal_op;
4459 val = ldub_code(s->pc++);
4460 sse_op2 = sse_op_table5[val];
4461 if (!sse_op2)
4462 goto illegal_op;
4463 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4464 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4465 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4466 break;
4467 case 0x70: /* pshufx insn */
4468 case 0xc6: /* pshufx insn */
4469 val = ldub_code(s->pc++);
4470 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4471 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4472 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, tcg_const_i32(val));
4473 break;
4474 case 0xc2:
4475 /* compare insns */
4476 val = ldub_code(s->pc++);
4477 if (val >= 8)
4478 goto illegal_op;
4479 sse_op2 = sse_op_table4[val][b1];
4480 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4481 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4482 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4483 break;
4484 case 0xf7:
4485 /* maskmov : we must prepare A0 */
4486 if (mod != 3)
4487 goto illegal_op;
4488#ifdef TARGET_X86_64
4489 if (s->aflag == 2) {
4490 gen_op_movq_A0_reg(R_EDI);
4491 } else
4492#endif
4493 {
4494 gen_op_movl_A0_reg(R_EDI);
4495 if (s->aflag == 0)
4496 gen_op_andl_A0_ffff();
4497 }
4498 gen_add_A0_ds_seg(s);
4499
4500 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4501 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4502 tcg_gen_helper_0_3(sse_op2, cpu_ptr0, cpu_ptr1, cpu_A0);
4503 break;
4504 default:
4505 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset);
4506 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset);
4507 tcg_gen_helper_0_2(sse_op2, cpu_ptr0, cpu_ptr1);
4508 break;
4509 }
4510 if (b == 0x2e || b == 0x2f) {
4511 s->cc_op = CC_OP_EFLAGS;
4512 }
4513 }
4514}
4515
4516#ifdef VBOX
4517/* Checks if it's an invalid lock sequence. Only a few instructions
4518 can be used together with the lock prefix and of those only the
4519 form that write a memory operand. So, this is kind of annoying
4520 work to do...
4521 The AMD manual lists the following instructions.
4522 ADC
4523 ADD
4524 AND
4525 BTC
4526 BTR
4527 BTS
4528 CMPXCHG
4529 CMPXCHG8B
4530 CMPXCHG16B
4531 DEC
4532 INC
4533 NEG
4534 NOT
4535 OR
4536 SBB
4537 SUB
4538 XADD
4539 XCHG
4540 XOR */
4541static bool is_invalid_lock_sequence(DisasContext *s, target_ulong pc_start, int b)
4542{
4543 target_ulong pc = s->pc;
4544 int modrm, mod, op;
4545
4546 /* X={8,16,32,64} Y={16,32,64} */
4547 switch (b)
4548 {
4549 /* /2: ADC reg/memX, immX */
4550 /* /0: ADD reg/memX, immX */
4551 /* /4: AND reg/memX, immX */
4552 /* /1: OR reg/memX, immX */
4553 /* /3: SBB reg/memX, immX */
4554 /* /5: SUB reg/memX, immX */
4555 /* /6: XOR reg/memX, immX */
4556 case 0x80:
4557 case 0x81:
4558 case 0x83:
4559 modrm = ldub_code(pc++);
4560 op = (modrm >> 3) & 7;
4561 if (op == 7) /* /7: CMP */
4562 break;
4563 mod = (modrm >> 6) & 3;
4564 if (mod == 3) /* register destination */
4565 break;
4566 return false;
4567
4568 case 0x10: /* /r: ADC reg/mem8, reg8 */
4569 case 0x11: /* /r: ADC reg/memX, regY */
4570 case 0x00: /* /r: ADD reg/mem8, reg8 */
4571 case 0x01: /* /r: ADD reg/memX, regY */
4572 case 0x20: /* /r: AND reg/mem8, reg8 */
4573 case 0x21: /* /r: AND reg/memY, regY */
4574 case 0x08: /* /r: OR reg/mem8, reg8 */
4575 case 0x09: /* /r: OR reg/memY, regY */
4576 case 0x18: /* /r: SBB reg/mem8, reg8 */
4577 case 0x19: /* /r: SBB reg/memY, regY */
4578 case 0x28: /* /r: SUB reg/mem8, reg8 */
4579 case 0x29: /* /r: SUB reg/memY, regY */
4580 case 0x86: /* /r: XCHG reg/mem8, reg8 or XCHG reg8, reg/mem8 */
4581 case 0x87: /* /r: XCHG reg/memY, regY or XCHG regY, reg/memY */
4582 case 0x30: /* /r: XOR reg/mem8, reg8 */
4583 case 0x31: /* /r: XOR reg/memY, regY */
4584 modrm = ldub_code(pc++);
4585 mod = (modrm >> 6) & 3;
4586 if (mod == 3) /* register destination */
4587 break;
4588 return false;
4589
4590 /* /1: DEC reg/memX */
4591 /* /0: INC reg/memX */
4592 case 0xfe:
4593 case 0xff:
4594 modrm = ldub_code(pc++);
4595 mod = (modrm >> 6) & 3;
4596 if (mod == 3) /* register destination */
4597 break;
4598 return false;
4599
4600 /* /3: NEG reg/memX */
4601 /* /2: NOT reg/memX */
4602 case 0xf6:
4603 case 0xf7:
4604 modrm = ldub_code(pc++);
4605 mod = (modrm >> 6) & 3;
4606 if (mod == 3) /* register destination */
4607 break;
4608 return false;
4609
4610 case 0x0f:
4611 b = ldub_code(pc++);
4612 switch (b)
4613 {
4614 /* /7: BTC reg/memY, imm8 */
4615 /* /6: BTR reg/memY, imm8 */
4616 /* /5: BTS reg/memY, imm8 */
4617 case 0xba:
4618 modrm = ldub_code(pc++);
4619 op = (modrm >> 3) & 7;
4620 if (op < 5)
4621 break;
4622 mod = (modrm >> 6) & 3;
4623 if (mod == 3) /* register destination */
4624 break;
4625 return false;
4626
4627 case 0xbb: /* /r: BTC reg/memY, regY */
4628 case 0xb3: /* /r: BTR reg/memY, regY */
4629 case 0xab: /* /r: BTS reg/memY, regY */
4630 case 0xb0: /* /r: CMPXCHG reg/mem8, reg8 */
4631 case 0xb1: /* /r: CMPXCHG reg/memY, regY */
4632 case 0xc0: /* /r: XADD reg/mem8, reg8 */
4633 case 0xc1: /* /r: XADD reg/memY, regY */
4634 modrm = ldub_code(pc++);
4635 mod = (modrm >> 6) & 3;
4636 if (mod == 3) /* register destination */
4637 break;
4638 return false;
4639
4640 /* /1: CMPXCHG8B mem64 or CMPXCHG16B mem128 */
4641 case 0xc7:
4642 modrm = ldub_code(pc++);
4643 op = (modrm >> 3) & 7;
4644 if (op != 1)
4645 break;
4646 return false;
4647 }
4648 break;
4649 }
4650
4651 /* illegal sequence. The s->pc is past the lock prefix and that
4652 is sufficient for the TB, I think. */
4653 Log(("illegal lock sequence %RGv (b=%#x)\n", pc_start, b));
4654 return true;
4655}
4656#endif /* VBOX */
4657
4658
4659/* convert one instruction. s->is_jmp is set if the translation must
4660 be stopped. Return the next pc value */
4661static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
4662{
4663 int b, prefixes, aflag, dflag;
4664 int shift, ot;
4665 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
4666 target_ulong next_eip, tval;
4667 int rex_w, rex_r;
4668
4669 if (unlikely(loglevel & CPU_LOG_TB_OP))
4670 tcg_gen_debug_insn_start(pc_start);
4671
4672 s->pc = pc_start;
4673 prefixes = 0;
4674 aflag = s->code32;
4675 dflag = s->code32;
4676 s->override = -1;
4677 rex_w = -1;
4678 rex_r = 0;
4679#ifdef TARGET_X86_64
4680 s->rex_x = 0;
4681 s->rex_b = 0;
4682 x86_64_hregs = 0;
4683#endif
4684 s->rip_offset = 0; /* for relative ip address */
4685#ifdef VBOX
4686 /* nike: seems only slow down things */
4687# if 0
4688 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
4689
4690 gen_update_eip(pc_start - s->cs_base);
4691# endif
4692#endif
4693
4694 next_byte:
4695 b = ldub_code(s->pc);
4696 s->pc++;
4697 /* check prefixes */
4698#ifdef TARGET_X86_64
4699 if (CODE64(s)) {
4700 switch (b) {
4701 case 0xf3:
4702 prefixes |= PREFIX_REPZ;
4703 goto next_byte;
4704 case 0xf2:
4705 prefixes |= PREFIX_REPNZ;
4706 goto next_byte;
4707 case 0xf0:
4708 prefixes |= PREFIX_LOCK;
4709 goto next_byte;
4710 case 0x2e:
4711 s->override = R_CS;
4712 goto next_byte;
4713 case 0x36:
4714 s->override = R_SS;
4715 goto next_byte;
4716 case 0x3e:
4717 s->override = R_DS;
4718 goto next_byte;
4719 case 0x26:
4720 s->override = R_ES;
4721 goto next_byte;
4722 case 0x64:
4723 s->override = R_FS;
4724 goto next_byte;
4725 case 0x65:
4726 s->override = R_GS;
4727 goto next_byte;
4728 case 0x66:
4729 prefixes |= PREFIX_DATA;
4730 goto next_byte;
4731 case 0x67:
4732 prefixes |= PREFIX_ADR;
4733 goto next_byte;
4734 case 0x40 ... 0x4f:
4735 /* REX prefix */
4736 rex_w = (b >> 3) & 1;
4737 rex_r = (b & 0x4) << 1;
4738 s->rex_x = (b & 0x2) << 2;
4739 REX_B(s) = (b & 0x1) << 3;
4740 x86_64_hregs = 1; /* select uniform byte register addressing */
4741 goto next_byte;
4742 }
4743 if (rex_w == 1) {
4744 /* 0x66 is ignored if rex.w is set */
4745 dflag = 2;
4746 } else {
4747 if (prefixes & PREFIX_DATA)
4748 dflag ^= 1;
4749 }
4750 if (!(prefixes & PREFIX_ADR))
4751 aflag = 2;
4752 } else
4753#endif
4754 {
4755 switch (b) {
4756 case 0xf3:
4757 prefixes |= PREFIX_REPZ;
4758 goto next_byte;
4759 case 0xf2:
4760 prefixes |= PREFIX_REPNZ;
4761 goto next_byte;
4762 case 0xf0:
4763 prefixes |= PREFIX_LOCK;
4764 goto next_byte;
4765 case 0x2e:
4766 s->override = R_CS;
4767 goto next_byte;
4768 case 0x36:
4769 s->override = R_SS;
4770 goto next_byte;
4771 case 0x3e:
4772 s->override = R_DS;
4773 goto next_byte;
4774 case 0x26:
4775 s->override = R_ES;
4776 goto next_byte;
4777 case 0x64:
4778 s->override = R_FS;
4779 goto next_byte;
4780 case 0x65:
4781 s->override = R_GS;
4782 goto next_byte;
4783 case 0x66:
4784 prefixes |= PREFIX_DATA;
4785 goto next_byte;
4786 case 0x67:
4787 prefixes |= PREFIX_ADR;
4788 goto next_byte;
4789 }
4790 if (prefixes & PREFIX_DATA)
4791 dflag ^= 1;
4792 if (prefixes & PREFIX_ADR)
4793 aflag ^= 1;
4794 }
4795
4796 s->prefix = prefixes;
4797 s->aflag = aflag;
4798 s->dflag = dflag;
4799
4800 /* lock generation */
4801#ifndef VBOX
4802 if (prefixes & PREFIX_LOCK)
4803 tcg_gen_helper_0_0(helper_lock);
4804#else /* VBOX */
4805 if (prefixes & PREFIX_LOCK) {
4806 if (is_invalid_lock_sequence(s, pc_start, b)) {
4807 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
4808 return s->pc;
4809 }
4810 tcg_gen_helper_0_0(helper_lock);
4811 }
4812#endif /* VBOX */
4813
4814 /* now check op code */
4815 reswitch:
4816 switch(b) {
4817 case 0x0f:
4818 /**************************/
4819 /* extended op code */
4820 b = ldub_code(s->pc++) | 0x100;
4821 goto reswitch;
4822
4823 /**************************/
4824 /* arith & logic */
4825 case 0x00 ... 0x05:
4826 case 0x08 ... 0x0d:
4827 case 0x10 ... 0x15:
4828 case 0x18 ... 0x1d:
4829 case 0x20 ... 0x25:
4830 case 0x28 ... 0x2d:
4831 case 0x30 ... 0x35:
4832 case 0x38 ... 0x3d:
4833 {
4834 int op, f, val;
4835 op = (b >> 3) & 7;
4836 f = (b >> 1) & 3;
4837
4838 if ((b & 1) == 0)
4839 ot = OT_BYTE;
4840 else
4841 ot = dflag + OT_WORD;
4842
4843 switch(f) {
4844 case 0: /* OP Ev, Gv */
4845 modrm = ldub_code(s->pc++);
4846 reg = ((modrm >> 3) & 7) | rex_r;
4847 mod = (modrm >> 6) & 3;
4848 rm = (modrm & 7) | REX_B(s);
4849 if (mod != 3) {
4850 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4851 opreg = OR_TMP0;
4852 } else if (op == OP_XORL && rm == reg) {
4853 xor_zero:
4854 /* xor reg, reg optimisation */
4855 gen_op_movl_T0_0();
4856 s->cc_op = CC_OP_LOGICB + ot;
4857 gen_op_mov_reg_T0(ot, reg);
4858 gen_op_update1_cc();
4859 break;
4860 } else {
4861 opreg = rm;
4862 }
4863 gen_op_mov_TN_reg(ot, 1, reg);
4864 gen_op(s, op, ot, opreg);
4865 break;
4866 case 1: /* OP Gv, Ev */
4867 modrm = ldub_code(s->pc++);
4868 mod = (modrm >> 6) & 3;
4869 reg = ((modrm >> 3) & 7) | rex_r;
4870 rm = (modrm & 7) | REX_B(s);
4871 if (mod != 3) {
4872 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4873 gen_op_ld_T1_A0(ot + s->mem_index);
4874 } else if (op == OP_XORL && rm == reg) {
4875 goto xor_zero;
4876 } else {
4877 gen_op_mov_TN_reg(ot, 1, rm);
4878 }
4879 gen_op(s, op, ot, reg);
4880 break;
4881 case 2: /* OP A, Iv */
4882 val = insn_get(s, ot);
4883 gen_op_movl_T1_im(val);
4884 gen_op(s, op, ot, OR_EAX);
4885 break;
4886 }
4887 }
4888 break;
4889
4890 case 0x82:
4891 if (CODE64(s))
4892 goto illegal_op;
4893 case 0x80: /* GRP1 */
4894 case 0x81:
4895 case 0x83:
4896 {
4897 int val;
4898
4899 if ((b & 1) == 0)
4900 ot = OT_BYTE;
4901 else
4902 ot = dflag + OT_WORD;
4903
4904 modrm = ldub_code(s->pc++);
4905 mod = (modrm >> 6) & 3;
4906 rm = (modrm & 7) | REX_B(s);
4907 op = (modrm >> 3) & 7;
4908
4909 if (mod != 3) {
4910 if (b == 0x83)
4911 s->rip_offset = 1;
4912 else
4913 s->rip_offset = insn_const_size(ot);
4914 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4915 opreg = OR_TMP0;
4916 } else {
4917 opreg = rm;
4918 }
4919
4920 switch(b) {
4921 default:
4922 case 0x80:
4923 case 0x81:
4924 case 0x82:
4925 val = insn_get(s, ot);
4926 break;
4927 case 0x83:
4928 val = (int8_t)insn_get(s, OT_BYTE);
4929 break;
4930 }
4931 gen_op_movl_T1_im(val);
4932 gen_op(s, op, ot, opreg);
4933 }
4934 break;
4935
4936 /**************************/
4937 /* inc, dec, and other misc arith */
4938 case 0x40 ... 0x47: /* inc Gv */
4939 ot = dflag ? OT_LONG : OT_WORD;
4940 gen_inc(s, ot, OR_EAX + (b & 7), 1);
4941 break;
4942 case 0x48 ... 0x4f: /* dec Gv */
4943 ot = dflag ? OT_LONG : OT_WORD;
4944 gen_inc(s, ot, OR_EAX + (b & 7), -1);
4945 break;
4946 case 0xf6: /* GRP3 */
4947 case 0xf7:
4948 if ((b & 1) == 0)
4949 ot = OT_BYTE;
4950 else
4951 ot = dflag + OT_WORD;
4952
4953 modrm = ldub_code(s->pc++);
4954 mod = (modrm >> 6) & 3;
4955 rm = (modrm & 7) | REX_B(s);
4956 op = (modrm >> 3) & 7;
4957 if (mod != 3) {
4958 if (op == 0)
4959 s->rip_offset = insn_const_size(ot);
4960 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4961 gen_op_ld_T0_A0(ot + s->mem_index);
4962 } else {
4963 gen_op_mov_TN_reg(ot, 0, rm);
4964 }
4965
4966 switch(op) {
4967 case 0: /* test */
4968 val = insn_get(s, ot);
4969 gen_op_movl_T1_im(val);
4970 gen_op_testl_T0_T1_cc();
4971 s->cc_op = CC_OP_LOGICB + ot;
4972 break;
4973 case 2: /* not */
4974 tcg_gen_not_tl(cpu_T[0], cpu_T[0]);
4975 if (mod != 3) {
4976 gen_op_st_T0_A0(ot + s->mem_index);
4977 } else {
4978 gen_op_mov_reg_T0(ot, rm);
4979 }
4980 break;
4981 case 3: /* neg */
4982 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
4983 if (mod != 3) {
4984 gen_op_st_T0_A0(ot + s->mem_index);
4985 } else {
4986 gen_op_mov_reg_T0(ot, rm);
4987 }
4988 gen_op_update_neg_cc();
4989 s->cc_op = CC_OP_SUBB + ot;
4990 break;
4991 case 4: /* mul */
4992 switch(ot) {
4993 case OT_BYTE:
4994 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
4995 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
4996 tcg_gen_ext8u_tl(cpu_T[1], cpu_T[1]);
4997 /* XXX: use 32 bit mul which could be faster */
4998 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
4999 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5000 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5001 tcg_gen_andi_tl(cpu_cc_src, cpu_T[0], 0xff00);
5002 s->cc_op = CC_OP_MULB;
5003 break;
5004 case OT_WORD:
5005 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5006 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5007 tcg_gen_ext16u_tl(cpu_T[1], cpu_T[1]);
5008 /* XXX: use 32 bit mul which could be faster */
5009 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5010 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5011 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5012 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5013 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5014 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5015 s->cc_op = CC_OP_MULW;
5016 break;
5017 default:
5018 case OT_LONG:
5019#ifdef TARGET_X86_64
5020 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5021 tcg_gen_ext32u_tl(cpu_T[0], cpu_T[0]);
5022 tcg_gen_ext32u_tl(cpu_T[1], cpu_T[1]);
5023 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5024 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5025 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5026 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5027 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5028 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5029#else
5030 {
5031 TCGv t0, t1;
5032 t0 = tcg_temp_new(TCG_TYPE_I64);
5033 t1 = tcg_temp_new(TCG_TYPE_I64);
5034 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5035 tcg_gen_extu_i32_i64(t0, cpu_T[0]);
5036 tcg_gen_extu_i32_i64(t1, cpu_T[1]);
5037 tcg_gen_mul_i64(t0, t0, t1);
5038 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5039 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5040 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5041 tcg_gen_shri_i64(t0, t0, 32);
5042 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5043 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5044 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
5045 }
5046#endif
5047 s->cc_op = CC_OP_MULL;
5048 break;
5049#ifdef TARGET_X86_64
5050 case OT_QUAD:
5051 tcg_gen_helper_0_1(helper_mulq_EAX_T0, cpu_T[0]);
5052 s->cc_op = CC_OP_MULQ;
5053 break;
5054#endif
5055 }
5056 break;
5057 case 5: /* imul */
5058 switch(ot) {
5059 case OT_BYTE:
5060 gen_op_mov_TN_reg(OT_BYTE, 1, R_EAX);
5061 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5062 tcg_gen_ext8s_tl(cpu_T[1], cpu_T[1]);
5063 /* XXX: use 32 bit mul which could be faster */
5064 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5065 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5066 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5067 tcg_gen_ext8s_tl(cpu_tmp0, cpu_T[0]);
5068 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5069 s->cc_op = CC_OP_MULB;
5070 break;
5071 case OT_WORD:
5072 gen_op_mov_TN_reg(OT_WORD, 1, R_EAX);
5073 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5074 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5075 /* XXX: use 32 bit mul which could be faster */
5076 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5077 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5078 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5079 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5080 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5081 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 16);
5082 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5083 s->cc_op = CC_OP_MULW;
5084 break;
5085 default:
5086 case OT_LONG:
5087#ifdef TARGET_X86_64
5088 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5089 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5090 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5091 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5092 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5093 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5094 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5095 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5096 tcg_gen_shri_tl(cpu_T[0], cpu_T[0], 32);
5097 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5098#else
5099 {
5100 TCGv t0, t1;
5101 t0 = tcg_temp_new(TCG_TYPE_I64);
5102 t1 = tcg_temp_new(TCG_TYPE_I64);
5103 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX);
5104 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5105 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5106 tcg_gen_mul_i64(t0, t0, t1);
5107 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5108 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5109 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5110 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5111 tcg_gen_shri_i64(t0, t0, 32);
5112 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5113 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5114 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5115 }
5116#endif
5117 s->cc_op = CC_OP_MULL;
5118 break;
5119#ifdef TARGET_X86_64
5120 case OT_QUAD:
5121 tcg_gen_helper_0_1(helper_imulq_EAX_T0, cpu_T[0]);
5122 s->cc_op = CC_OP_MULQ;
5123 break;
5124#endif
5125 }
5126 break;
5127 case 6: /* div */
5128 switch(ot) {
5129 case OT_BYTE:
5130 gen_jmp_im(pc_start - s->cs_base);
5131 tcg_gen_helper_0_1(helper_divb_AL, cpu_T[0]);
5132 break;
5133 case OT_WORD:
5134 gen_jmp_im(pc_start - s->cs_base);
5135 tcg_gen_helper_0_1(helper_divw_AX, cpu_T[0]);
5136 break;
5137 default:
5138 case OT_LONG:
5139 gen_jmp_im(pc_start - s->cs_base);
5140 tcg_gen_helper_0_1(helper_divl_EAX, cpu_T[0]);
5141 break;
5142#ifdef TARGET_X86_64
5143 case OT_QUAD:
5144 gen_jmp_im(pc_start - s->cs_base);
5145 tcg_gen_helper_0_1(helper_divq_EAX, cpu_T[0]);
5146 break;
5147#endif
5148 }
5149 break;
5150 case 7: /* idiv */
5151 switch(ot) {
5152 case OT_BYTE:
5153 gen_jmp_im(pc_start - s->cs_base);
5154 tcg_gen_helper_0_1(helper_idivb_AL, cpu_T[0]);
5155 break;
5156 case OT_WORD:
5157 gen_jmp_im(pc_start - s->cs_base);
5158 tcg_gen_helper_0_1(helper_idivw_AX, cpu_T[0]);
5159 break;
5160 default:
5161 case OT_LONG:
5162 gen_jmp_im(pc_start - s->cs_base);
5163 tcg_gen_helper_0_1(helper_idivl_EAX, cpu_T[0]);
5164 break;
5165#ifdef TARGET_X86_64
5166 case OT_QUAD:
5167 gen_jmp_im(pc_start - s->cs_base);
5168 tcg_gen_helper_0_1(helper_idivq_EAX, cpu_T[0]);
5169 break;
5170#endif
5171 }
5172 break;
5173 default:
5174 goto illegal_op;
5175 }
5176 break;
5177
5178 case 0xfe: /* GRP4 */
5179 case 0xff: /* GRP5 */
5180 if ((b & 1) == 0)
5181 ot = OT_BYTE;
5182 else
5183 ot = dflag + OT_WORD;
5184
5185 modrm = ldub_code(s->pc++);
5186 mod = (modrm >> 6) & 3;
5187 rm = (modrm & 7) | REX_B(s);
5188 op = (modrm >> 3) & 7;
5189 if (op >= 2 && b == 0xfe) {
5190 goto illegal_op;
5191 }
5192 if (CODE64(s)) {
5193 if (op == 2 || op == 4) {
5194 /* operand size for jumps is 64 bit */
5195 ot = OT_QUAD;
5196 } else if (op == 3 || op == 5) {
5197 /* for call calls, the operand is 16 or 32 bit, even
5198 in long mode */
5199 ot = dflag ? OT_LONG : OT_WORD;
5200 } else if (op == 6) {
5201 /* default push size is 64 bit */
5202 ot = dflag ? OT_QUAD : OT_WORD;
5203 }
5204 }
5205 if (mod != 3) {
5206 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5207 if (op >= 2 && op != 3 && op != 5)
5208 gen_op_ld_T0_A0(ot + s->mem_index);
5209 } else {
5210 gen_op_mov_TN_reg(ot, 0, rm);
5211 }
5212
5213 switch(op) {
5214 case 0: /* inc Ev */
5215 if (mod != 3)
5216 opreg = OR_TMP0;
5217 else
5218 opreg = rm;
5219 gen_inc(s, ot, opreg, 1);
5220 break;
5221 case 1: /* dec Ev */
5222 if (mod != 3)
5223 opreg = OR_TMP0;
5224 else
5225 opreg = rm;
5226 gen_inc(s, ot, opreg, -1);
5227 break;
5228 case 2: /* call Ev */
5229 /* XXX: optimize if memory (no 'and' is necessary) */
5230#ifdef VBOX_WITH_CALL_RECORD
5231 if (s->record_call)
5232 gen_op_record_call();
5233#endif
5234 if (s->dflag == 0)
5235 gen_op_andl_T0_ffff();
5236 next_eip = s->pc - s->cs_base;
5237 gen_movtl_T1_im(next_eip);
5238 gen_push_T1(s);
5239 gen_op_jmp_T0();
5240 gen_eob(s);
5241 break;
5242 case 3: /* lcall Ev */
5243 gen_op_ld_T1_A0(ot + s->mem_index);
5244 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5245 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5246 do_lcall:
5247 if (s->pe && !s->vm86) {
5248 if (s->cc_op != CC_OP_DYNAMIC)
5249 gen_op_set_cc_op(s->cc_op);
5250 gen_jmp_im(pc_start - s->cs_base);
5251 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5252 tcg_gen_helper_0_4(helper_lcall_protected,
5253 cpu_tmp2_i32, cpu_T[1],
5254 tcg_const_i32(dflag),
5255 tcg_const_i32(s->pc - pc_start));
5256 } else {
5257 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5258 tcg_gen_helper_0_4(helper_lcall_real,
5259 cpu_tmp2_i32, cpu_T[1],
5260 tcg_const_i32(dflag),
5261 tcg_const_i32(s->pc - s->cs_base));
5262 }
5263 gen_eob(s);
5264 break;
5265 case 4: /* jmp Ev */
5266 if (s->dflag == 0)
5267 gen_op_andl_T0_ffff();
5268 gen_op_jmp_T0();
5269 gen_eob(s);
5270 break;
5271 case 5: /* ljmp Ev */
5272 gen_op_ld_T1_A0(ot + s->mem_index);
5273 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5274 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5275 do_ljmp:
5276 if (s->pe && !s->vm86) {
5277 if (s->cc_op != CC_OP_DYNAMIC)
5278 gen_op_set_cc_op(s->cc_op);
5279 gen_jmp_im(pc_start - s->cs_base);
5280 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
5281 tcg_gen_helper_0_3(helper_ljmp_protected,
5282 cpu_tmp2_i32,
5283 cpu_T[1],
5284 tcg_const_i32(s->pc - pc_start));
5285 } else {
5286 gen_op_movl_seg_T0_vm(R_CS);
5287 gen_op_movl_T0_T1();
5288 gen_op_jmp_T0();
5289 }
5290 gen_eob(s);
5291 break;
5292 case 6: /* push Ev */
5293 gen_push_T0(s);
5294 break;
5295 default:
5296 goto illegal_op;
5297 }
5298 break;
5299
5300 case 0x84: /* test Ev, Gv */
5301 case 0x85:
5302 if ((b & 1) == 0)
5303 ot = OT_BYTE;
5304 else
5305 ot = dflag + OT_WORD;
5306
5307 modrm = ldub_code(s->pc++);
5308 mod = (modrm >> 6) & 3;
5309 rm = (modrm & 7) | REX_B(s);
5310 reg = ((modrm >> 3) & 7) | rex_r;
5311
5312 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5313 gen_op_mov_TN_reg(ot, 1, reg);
5314 gen_op_testl_T0_T1_cc();
5315 s->cc_op = CC_OP_LOGICB + ot;
5316 break;
5317
5318 case 0xa8: /* test eAX, Iv */
5319 case 0xa9:
5320 if ((b & 1) == 0)
5321 ot = OT_BYTE;
5322 else
5323 ot = dflag + OT_WORD;
5324 val = insn_get(s, ot);
5325
5326 gen_op_mov_TN_reg(ot, 0, OR_EAX);
5327 gen_op_movl_T1_im(val);
5328 gen_op_testl_T0_T1_cc();
5329 s->cc_op = CC_OP_LOGICB + ot;
5330 break;
5331
5332 case 0x98: /* CWDE/CBW */
5333#ifdef TARGET_X86_64
5334 if (dflag == 2) {
5335 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5336 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5337 gen_op_mov_reg_T0(OT_QUAD, R_EAX);
5338 } else
5339#endif
5340 if (dflag == 1) {
5341 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5342 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5343 gen_op_mov_reg_T0(OT_LONG, R_EAX);
5344 } else {
5345 gen_op_mov_TN_reg(OT_BYTE, 0, R_EAX);
5346 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5347 gen_op_mov_reg_T0(OT_WORD, R_EAX);
5348 }
5349 break;
5350 case 0x99: /* CDQ/CWD */
5351#ifdef TARGET_X86_64
5352 if (dflag == 2) {
5353 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5354 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 63);
5355 gen_op_mov_reg_T0(OT_QUAD, R_EDX);
5356 } else
5357#endif
5358 if (dflag == 1) {
5359 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5360 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5361 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 31);
5362 gen_op_mov_reg_T0(OT_LONG, R_EDX);
5363 } else {
5364 gen_op_mov_TN_reg(OT_WORD, 0, R_EAX);
5365 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5366 tcg_gen_sari_tl(cpu_T[0], cpu_T[0], 15);
5367 gen_op_mov_reg_T0(OT_WORD, R_EDX);
5368 }
5369 break;
5370 case 0x1af: /* imul Gv, Ev */
5371 case 0x69: /* imul Gv, Ev, I */
5372 case 0x6b:
5373 ot = dflag + OT_WORD;
5374 modrm = ldub_code(s->pc++);
5375 reg = ((modrm >> 3) & 7) | rex_r;
5376 if (b == 0x69)
5377 s->rip_offset = insn_const_size(ot);
5378 else if (b == 0x6b)
5379 s->rip_offset = 1;
5380 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5381 if (b == 0x69) {
5382 val = insn_get(s, ot);
5383 gen_op_movl_T1_im(val);
5384 } else if (b == 0x6b) {
5385 val = (int8_t)insn_get(s, OT_BYTE);
5386 gen_op_movl_T1_im(val);
5387 } else {
5388 gen_op_mov_TN_reg(ot, 1, reg);
5389 }
5390
5391#ifdef TARGET_X86_64
5392 if (ot == OT_QUAD) {
5393 tcg_gen_helper_1_2(helper_imulq_T0_T1, cpu_T[0], cpu_T[0], cpu_T[1]);
5394 } else
5395#endif
5396 if (ot == OT_LONG) {
5397#ifdef TARGET_X86_64
5398 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
5399 tcg_gen_ext32s_tl(cpu_T[1], cpu_T[1]);
5400 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5401 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5402 tcg_gen_ext32s_tl(cpu_tmp0, cpu_T[0]);
5403 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5404#else
5405 {
5406 TCGv t0, t1;
5407 t0 = tcg_temp_new(TCG_TYPE_I64);
5408 t1 = tcg_temp_new(TCG_TYPE_I64);
5409 tcg_gen_ext_i32_i64(t0, cpu_T[0]);
5410 tcg_gen_ext_i32_i64(t1, cpu_T[1]);
5411 tcg_gen_mul_i64(t0, t0, t1);
5412 tcg_gen_trunc_i64_i32(cpu_T[0], t0);
5413 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5414 tcg_gen_sari_tl(cpu_tmp0, cpu_T[0], 31);
5415 tcg_gen_shri_i64(t0, t0, 32);
5416 tcg_gen_trunc_i64_i32(cpu_T[1], t0);
5417 tcg_gen_sub_tl(cpu_cc_src, cpu_T[1], cpu_tmp0);
5418 }
5419#endif
5420 } else {
5421 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5422 tcg_gen_ext16s_tl(cpu_T[1], cpu_T[1]);
5423 /* XXX: use 32 bit mul which could be faster */
5424 tcg_gen_mul_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
5425 tcg_gen_mov_tl(cpu_cc_dst, cpu_T[0]);
5426 tcg_gen_ext16s_tl(cpu_tmp0, cpu_T[0]);
5427 tcg_gen_sub_tl(cpu_cc_src, cpu_T[0], cpu_tmp0);
5428 }
5429 gen_op_mov_reg_T0(ot, reg);
5430 s->cc_op = CC_OP_MULB + ot;
5431 break;
5432 case 0x1c0:
5433 case 0x1c1: /* xadd Ev, Gv */
5434 if ((b & 1) == 0)
5435 ot = OT_BYTE;
5436 else
5437 ot = dflag + OT_WORD;
5438 modrm = ldub_code(s->pc++);
5439 reg = ((modrm >> 3) & 7) | rex_r;
5440 mod = (modrm >> 6) & 3;
5441 if (mod == 3) {
5442 rm = (modrm & 7) | REX_B(s);
5443 gen_op_mov_TN_reg(ot, 0, reg);
5444 gen_op_mov_TN_reg(ot, 1, rm);
5445 gen_op_addl_T0_T1();
5446 gen_op_mov_reg_T1(ot, reg);
5447 gen_op_mov_reg_T0(ot, rm);
5448 } else {
5449 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5450 gen_op_mov_TN_reg(ot, 0, reg);
5451 gen_op_ld_T1_A0(ot + s->mem_index);
5452 gen_op_addl_T0_T1();
5453 gen_op_st_T0_A0(ot + s->mem_index);
5454 gen_op_mov_reg_T1(ot, reg);
5455 }
5456 gen_op_update2_cc();
5457 s->cc_op = CC_OP_ADDB + ot;
5458 break;
5459 case 0x1b0:
5460 case 0x1b1: /* cmpxchg Ev, Gv */
5461 {
5462 int label1, label2;
5463 TCGv t0, t1, t2, a0;
5464
5465 if ((b & 1) == 0)
5466 ot = OT_BYTE;
5467 else
5468 ot = dflag + OT_WORD;
5469 modrm = ldub_code(s->pc++);
5470 reg = ((modrm >> 3) & 7) | rex_r;
5471 mod = (modrm >> 6) & 3;
5472 t0 = tcg_temp_local_new(TCG_TYPE_TL);
5473 t1 = tcg_temp_local_new(TCG_TYPE_TL);
5474 t2 = tcg_temp_local_new(TCG_TYPE_TL);
5475 a0 = tcg_temp_local_new(TCG_TYPE_TL);
5476 gen_op_mov_v_reg(ot, t1, reg);
5477 if (mod == 3) {
5478 rm = (modrm & 7) | REX_B(s);
5479 gen_op_mov_v_reg(ot, t0, rm);
5480 } else {
5481 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5482 tcg_gen_mov_tl(a0, cpu_A0);
5483 gen_op_ld_v(ot + s->mem_index, t0, a0);
5484 rm = 0; /* avoid warning */
5485 }
5486 label1 = gen_new_label();
5487 tcg_gen_ld_tl(t2, cpu_env, offsetof(CPUState, regs[R_EAX]));
5488 tcg_gen_sub_tl(t2, t2, t0);
5489 gen_extu(ot, t2);
5490 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, label1);
5491 if (mod == 3) {
5492 label2 = gen_new_label();
5493 gen_op_mov_reg_v(ot, R_EAX, t0);
5494 tcg_gen_br(label2);
5495 gen_set_label(label1);
5496 gen_op_mov_reg_v(ot, rm, t1);
5497 gen_set_label(label2);
5498 } else {
5499 tcg_gen_mov_tl(t1, t0);
5500 gen_op_mov_reg_v(ot, R_EAX, t0);
5501 gen_set_label(label1);
5502 /* always store */
5503 gen_op_st_v(ot + s->mem_index, t1, a0);
5504 }
5505 tcg_gen_mov_tl(cpu_cc_src, t0);
5506 tcg_gen_mov_tl(cpu_cc_dst, t2);
5507 s->cc_op = CC_OP_SUBB + ot;
5508 tcg_temp_free(t0);
5509 tcg_temp_free(t1);
5510 tcg_temp_free(t2);
5511 tcg_temp_free(a0);
5512 }
5513 break;
5514 case 0x1c7: /* cmpxchg8b */
5515 modrm = ldub_code(s->pc++);
5516 mod = (modrm >> 6) & 3;
5517 if ((mod == 3) || ((modrm & 0x38) != 0x8))
5518 goto illegal_op;
5519#ifdef TARGET_X86_64
5520 if (dflag == 2) {
5521 if (!(s->cpuid_ext_features & CPUID_EXT_CX16))
5522 goto illegal_op;
5523 gen_jmp_im(pc_start - s->cs_base);
5524 if (s->cc_op != CC_OP_DYNAMIC)
5525 gen_op_set_cc_op(s->cc_op);
5526 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5527 tcg_gen_helper_0_1(helper_cmpxchg16b, cpu_A0);
5528 } else
5529#endif
5530 {
5531 if (!(s->cpuid_features & CPUID_CX8))
5532 goto illegal_op;
5533 gen_jmp_im(pc_start - s->cs_base);
5534 if (s->cc_op != CC_OP_DYNAMIC)
5535 gen_op_set_cc_op(s->cc_op);
5536 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5537 tcg_gen_helper_0_1(helper_cmpxchg8b, cpu_A0);
5538 }
5539 s->cc_op = CC_OP_EFLAGS;
5540 break;
5541
5542 /**************************/
5543 /* push/pop */
5544 case 0x50 ... 0x57: /* push */
5545 gen_op_mov_TN_reg(OT_LONG, 0, (b & 7) | REX_B(s));
5546 gen_push_T0(s);
5547 break;
5548 case 0x58 ... 0x5f: /* pop */
5549 if (CODE64(s)) {
5550 ot = dflag ? OT_QUAD : OT_WORD;
5551 } else {
5552 ot = dflag + OT_WORD;
5553 }
5554 gen_pop_T0(s);
5555 /* NOTE: order is important for pop %sp */
5556 gen_pop_update(s);
5557 gen_op_mov_reg_T0(ot, (b & 7) | REX_B(s));
5558 break;
5559 case 0x60: /* pusha */
5560 if (CODE64(s))
5561 goto illegal_op;
5562 gen_pusha(s);
5563 break;
5564 case 0x61: /* popa */
5565 if (CODE64(s))
5566 goto illegal_op;
5567 gen_popa(s);
5568 break;
5569 case 0x68: /* push Iv */
5570 case 0x6a:
5571 if (CODE64(s)) {
5572 ot = dflag ? OT_QUAD : OT_WORD;
5573 } else {
5574 ot = dflag + OT_WORD;
5575 }
5576 if (b == 0x68)
5577 val = insn_get(s, ot);
5578 else
5579 val = (int8_t)insn_get(s, OT_BYTE);
5580 gen_op_movl_T0_im(val);
5581 gen_push_T0(s);
5582 break;
5583 case 0x8f: /* pop Ev */
5584 if (CODE64(s)) {
5585 ot = dflag ? OT_QUAD : OT_WORD;
5586 } else {
5587 ot = dflag + OT_WORD;
5588 }
5589 modrm = ldub_code(s->pc++);
5590 mod = (modrm >> 6) & 3;
5591 gen_pop_T0(s);
5592 if (mod == 3) {
5593 /* NOTE: order is important for pop %sp */
5594 gen_pop_update(s);
5595 rm = (modrm & 7) | REX_B(s);
5596 gen_op_mov_reg_T0(ot, rm);
5597 } else {
5598 /* NOTE: order is important too for MMU exceptions */
5599 s->popl_esp_hack = 1 << ot;
5600 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5601 s->popl_esp_hack = 0;
5602 gen_pop_update(s);
5603 }
5604 break;
5605 case 0xc8: /* enter */
5606 {
5607 int level;
5608 val = lduw_code(s->pc);
5609 s->pc += 2;
5610 level = ldub_code(s->pc++);
5611 gen_enter(s, val, level);
5612 }
5613 break;
5614 case 0xc9: /* leave */
5615 /* XXX: exception not precise (ESP is updated before potential exception) */
5616 if (CODE64(s)) {
5617 gen_op_mov_TN_reg(OT_QUAD, 0, R_EBP);
5618 gen_op_mov_reg_T0(OT_QUAD, R_ESP);
5619 } else if (s->ss32) {
5620 gen_op_mov_TN_reg(OT_LONG, 0, R_EBP);
5621 gen_op_mov_reg_T0(OT_LONG, R_ESP);
5622 } else {
5623 gen_op_mov_TN_reg(OT_WORD, 0, R_EBP);
5624 gen_op_mov_reg_T0(OT_WORD, R_ESP);
5625 }
5626 gen_pop_T0(s);
5627 if (CODE64(s)) {
5628 ot = dflag ? OT_QUAD : OT_WORD;
5629 } else {
5630 ot = dflag + OT_WORD;
5631 }
5632 gen_op_mov_reg_T0(ot, R_EBP);
5633 gen_pop_update(s);
5634 break;
5635 case 0x06: /* push es */
5636 case 0x0e: /* push cs */
5637 case 0x16: /* push ss */
5638 case 0x1e: /* push ds */
5639 if (CODE64(s))
5640 goto illegal_op;
5641 gen_op_movl_T0_seg(b >> 3);
5642 gen_push_T0(s);
5643 break;
5644 case 0x1a0: /* push fs */
5645 case 0x1a8: /* push gs */
5646 gen_op_movl_T0_seg((b >> 3) & 7);
5647 gen_push_T0(s);
5648 break;
5649 case 0x07: /* pop es */
5650 case 0x17: /* pop ss */
5651 case 0x1f: /* pop ds */
5652 if (CODE64(s))
5653 goto illegal_op;
5654 reg = b >> 3;
5655 gen_pop_T0(s);
5656 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5657 gen_pop_update(s);
5658 if (reg == R_SS) {
5659 /* if reg == SS, inhibit interrupts/trace. */
5660 /* If several instructions disable interrupts, only the
5661 _first_ does it */
5662 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5663 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5664 s->tf = 0;
5665 }
5666 if (s->is_jmp) {
5667 gen_jmp_im(s->pc - s->cs_base);
5668 gen_eob(s);
5669 }
5670 break;
5671 case 0x1a1: /* pop fs */
5672 case 0x1a9: /* pop gs */
5673 gen_pop_T0(s);
5674 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
5675 gen_pop_update(s);
5676 if (s->is_jmp) {
5677 gen_jmp_im(s->pc - s->cs_base);
5678 gen_eob(s);
5679 }
5680 break;
5681
5682 /**************************/
5683 /* mov */
5684 case 0x88:
5685 case 0x89: /* mov Gv, Ev */
5686 if ((b & 1) == 0)
5687 ot = OT_BYTE;
5688 else
5689 ot = dflag + OT_WORD;
5690 modrm = ldub_code(s->pc++);
5691 reg = ((modrm >> 3) & 7) | rex_r;
5692
5693 /* generate a generic store */
5694 gen_ldst_modrm(s, modrm, ot, reg, 1);
5695 break;
5696 case 0xc6:
5697 case 0xc7: /* mov Ev, Iv */
5698 if ((b & 1) == 0)
5699 ot = OT_BYTE;
5700 else
5701 ot = dflag + OT_WORD;
5702 modrm = ldub_code(s->pc++);
5703 mod = (modrm >> 6) & 3;
5704 if (mod != 3) {
5705 s->rip_offset = insn_const_size(ot);
5706 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5707 }
5708 val = insn_get(s, ot);
5709 gen_op_movl_T0_im(val);
5710 if (mod != 3)
5711 gen_op_st_T0_A0(ot + s->mem_index);
5712 else
5713 gen_op_mov_reg_T0(ot, (modrm & 7) | REX_B(s));
5714 break;
5715 case 0x8a:
5716 case 0x8b: /* mov Ev, Gv */
5717#ifdef VBOX /* dtrace hot fix */
5718 if (prefixes & PREFIX_LOCK)
5719 goto illegal_op;
5720#endif
5721 if ((b & 1) == 0)
5722 ot = OT_BYTE;
5723 else
5724 ot = OT_WORD + dflag;
5725 modrm = ldub_code(s->pc++);
5726 reg = ((modrm >> 3) & 7) | rex_r;
5727
5728 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5729 gen_op_mov_reg_T0(ot, reg);
5730 break;
5731 case 0x8e: /* mov seg, Gv */
5732 modrm = ldub_code(s->pc++);
5733 reg = (modrm >> 3) & 7;
5734 if (reg >= 6 || reg == R_CS)
5735 goto illegal_op;
5736 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5737 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
5738 if (reg == R_SS) {
5739 /* if reg == SS, inhibit interrupts/trace */
5740 /* If several instructions disable interrupts, only the
5741 _first_ does it */
5742 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5743 tcg_gen_helper_0_0(helper_set_inhibit_irq);
5744 s->tf = 0;
5745 }
5746 if (s->is_jmp) {
5747 gen_jmp_im(s->pc - s->cs_base);
5748 gen_eob(s);
5749 }
5750 break;
5751 case 0x8c: /* mov Gv, seg */
5752 modrm = ldub_code(s->pc++);
5753 reg = (modrm >> 3) & 7;
5754 mod = (modrm >> 6) & 3;
5755 if (reg >= 6)
5756 goto illegal_op;
5757 gen_op_movl_T0_seg(reg);
5758 if (mod == 3)
5759 ot = OT_WORD + dflag;
5760 else
5761 ot = OT_WORD;
5762 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5763 break;
5764
5765 case 0x1b6: /* movzbS Gv, Eb */
5766 case 0x1b7: /* movzwS Gv, Eb */
5767 case 0x1be: /* movsbS Gv, Eb */
5768 case 0x1bf: /* movswS Gv, Eb */
5769 {
5770 int d_ot;
5771 /* d_ot is the size of destination */
5772 d_ot = dflag + OT_WORD;
5773 /* ot is the size of source */
5774 ot = (b & 1) + OT_BYTE;
5775 modrm = ldub_code(s->pc++);
5776 reg = ((modrm >> 3) & 7) | rex_r;
5777 mod = (modrm >> 6) & 3;
5778 rm = (modrm & 7) | REX_B(s);
5779
5780 if (mod == 3) {
5781 gen_op_mov_TN_reg(ot, 0, rm);
5782 switch(ot | (b & 8)) {
5783 case OT_BYTE:
5784 tcg_gen_ext8u_tl(cpu_T[0], cpu_T[0]);
5785 break;
5786 case OT_BYTE | 8:
5787 tcg_gen_ext8s_tl(cpu_T[0], cpu_T[0]);
5788 break;
5789 case OT_WORD:
5790 tcg_gen_ext16u_tl(cpu_T[0], cpu_T[0]);
5791 break;
5792 default:
5793 case OT_WORD | 8:
5794 tcg_gen_ext16s_tl(cpu_T[0], cpu_T[0]);
5795 break;
5796 }
5797 gen_op_mov_reg_T0(d_ot, reg);
5798 } else {
5799 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5800 if (b & 8) {
5801 gen_op_lds_T0_A0(ot + s->mem_index);
5802 } else {
5803 gen_op_ldu_T0_A0(ot + s->mem_index);
5804 }
5805 gen_op_mov_reg_T0(d_ot, reg);
5806 }
5807 }
5808 break;
5809
5810 case 0x8d: /* lea */
5811 ot = dflag + OT_WORD;
5812 modrm = ldub_code(s->pc++);
5813 mod = (modrm >> 6) & 3;
5814 if (mod == 3)
5815 goto illegal_op;
5816 reg = ((modrm >> 3) & 7) | rex_r;
5817 /* we must ensure that no segment is added */
5818 s->override = -1;
5819 val = s->addseg;
5820 s->addseg = 0;
5821 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5822 s->addseg = val;
5823 gen_op_mov_reg_A0(ot - OT_WORD, reg);
5824 break;
5825
5826 case 0xa0: /* mov EAX, Ov */
5827 case 0xa1:
5828 case 0xa2: /* mov Ov, EAX */
5829 case 0xa3:
5830 {
5831 target_ulong offset_addr;
5832
5833 if ((b & 1) == 0)
5834 ot = OT_BYTE;
5835 else
5836 ot = dflag + OT_WORD;
5837#ifdef TARGET_X86_64
5838 if (s->aflag == 2) {
5839 offset_addr = ldq_code(s->pc);
5840 s->pc += 8;
5841 gen_op_movq_A0_im(offset_addr);
5842 } else
5843#endif
5844 {
5845 if (s->aflag) {
5846 offset_addr = insn_get(s, OT_LONG);
5847 } else {
5848 offset_addr = insn_get(s, OT_WORD);
5849 }
5850 gen_op_movl_A0_im(offset_addr);
5851 }
5852 gen_add_A0_ds_seg(s);
5853 if ((b & 2) == 0) {
5854 gen_op_ld_T0_A0(ot + s->mem_index);
5855 gen_op_mov_reg_T0(ot, R_EAX);
5856 } else {
5857 gen_op_mov_TN_reg(ot, 0, R_EAX);
5858 gen_op_st_T0_A0(ot + s->mem_index);
5859 }
5860 }
5861 break;
5862 case 0xd7: /* xlat */
5863#ifdef TARGET_X86_64
5864 if (s->aflag == 2) {
5865 gen_op_movq_A0_reg(R_EBX);
5866 gen_op_mov_TN_reg(OT_QUAD, 0, R_EAX);
5867 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5868 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5869 } else
5870#endif
5871 {
5872 gen_op_movl_A0_reg(R_EBX);
5873 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX);
5874 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xff);
5875 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_T[0]);
5876 if (s->aflag == 0)
5877 gen_op_andl_A0_ffff();
5878 else
5879 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffffffff);
5880 }
5881 gen_add_A0_ds_seg(s);
5882 gen_op_ldu_T0_A0(OT_BYTE + s->mem_index);
5883 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
5884 break;
5885 case 0xb0 ... 0xb7: /* mov R, Ib */
5886 val = insn_get(s, OT_BYTE);
5887 gen_op_movl_T0_im(val);
5888 gen_op_mov_reg_T0(OT_BYTE, (b & 7) | REX_B(s));
5889 break;
5890 case 0xb8 ... 0xbf: /* mov R, Iv */
5891#ifdef TARGET_X86_64
5892 if (dflag == 2) {
5893 uint64_t tmp;
5894 /* 64 bit case */
5895 tmp = ldq_code(s->pc);
5896 s->pc += 8;
5897 reg = (b & 7) | REX_B(s);
5898 gen_movtl_T0_im(tmp);
5899 gen_op_mov_reg_T0(OT_QUAD, reg);
5900 } else
5901#endif
5902 {
5903 ot = dflag ? OT_LONG : OT_WORD;
5904 val = insn_get(s, ot);
5905 reg = (b & 7) | REX_B(s);
5906 gen_op_movl_T0_im(val);
5907 gen_op_mov_reg_T0(ot, reg);
5908 }
5909 break;
5910
5911 case 0x91 ... 0x97: /* xchg R, EAX */
5912 ot = dflag + OT_WORD;
5913 reg = (b & 7) | REX_B(s);
5914 rm = R_EAX;
5915 goto do_xchg_reg;
5916 case 0x86:
5917 case 0x87: /* xchg Ev, Gv */
5918 if ((b & 1) == 0)
5919 ot = OT_BYTE;
5920 else
5921 ot = dflag + OT_WORD;
5922 modrm = ldub_code(s->pc++);
5923 reg = ((modrm >> 3) & 7) | rex_r;
5924 mod = (modrm >> 6) & 3;
5925 if (mod == 3) {
5926 rm = (modrm & 7) | REX_B(s);
5927 do_xchg_reg:
5928 gen_op_mov_TN_reg(ot, 0, reg);
5929 gen_op_mov_TN_reg(ot, 1, rm);
5930 gen_op_mov_reg_T0(ot, rm);
5931 gen_op_mov_reg_T1(ot, reg);
5932 } else {
5933 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5934 gen_op_mov_TN_reg(ot, 0, reg);
5935 /* for xchg, lock is implicit */
5936 if (!(prefixes & PREFIX_LOCK))
5937 tcg_gen_helper_0_0(helper_lock);
5938 gen_op_ld_T1_A0(ot + s->mem_index);
5939 gen_op_st_T0_A0(ot + s->mem_index);
5940 if (!(prefixes & PREFIX_LOCK))
5941 tcg_gen_helper_0_0(helper_unlock);
5942 gen_op_mov_reg_T1(ot, reg);
5943 }
5944 break;
5945 case 0xc4: /* les Gv */
5946 if (CODE64(s))
5947 goto illegal_op;
5948 op = R_ES;
5949 goto do_lxx;
5950 case 0xc5: /* lds Gv */
5951 if (CODE64(s))
5952 goto illegal_op;
5953 op = R_DS;
5954 goto do_lxx;
5955 case 0x1b2: /* lss Gv */
5956 op = R_SS;
5957 goto do_lxx;
5958 case 0x1b4: /* lfs Gv */
5959 op = R_FS;
5960 goto do_lxx;
5961 case 0x1b5: /* lgs Gv */
5962 op = R_GS;
5963 do_lxx:
5964 ot = dflag ? OT_LONG : OT_WORD;
5965 modrm = ldub_code(s->pc++);
5966 reg = ((modrm >> 3) & 7) | rex_r;
5967 mod = (modrm >> 6) & 3;
5968 if (mod == 3)
5969 goto illegal_op;
5970 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5971 gen_op_ld_T1_A0(ot + s->mem_index);
5972 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
5973 /* load the segment first to handle exceptions properly */
5974 gen_op_ldu_T0_A0(OT_WORD + s->mem_index);
5975 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
5976 /* then put the data */
5977 gen_op_mov_reg_T1(ot, reg);
5978 if (s->is_jmp) {
5979 gen_jmp_im(s->pc - s->cs_base);
5980 gen_eob(s);
5981 }
5982 break;
5983
5984 /************************/
5985 /* shifts */
5986 case 0xc0:
5987 case 0xc1:
5988 /* shift Ev,Ib */
5989 shift = 2;
5990 grp2:
5991 {
5992 if ((b & 1) == 0)
5993 ot = OT_BYTE;
5994 else
5995 ot = dflag + OT_WORD;
5996
5997 modrm = ldub_code(s->pc++);
5998 mod = (modrm >> 6) & 3;
5999 op = (modrm >> 3) & 7;
6000
6001 if (mod != 3) {
6002 if (shift == 2) {
6003 s->rip_offset = 1;
6004 }
6005 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6006 opreg = OR_TMP0;
6007 } else {
6008 opreg = (modrm & 7) | REX_B(s);
6009 }
6010
6011 /* simpler op */
6012 if (shift == 0) {
6013 gen_shift(s, op, ot, opreg, OR_ECX);
6014 } else {
6015 if (shift == 2) {
6016 shift = ldub_code(s->pc++);
6017 }
6018 gen_shifti(s, op, ot, opreg, shift);
6019 }
6020 }
6021 break;
6022 case 0xd0:
6023 case 0xd1:
6024 /* shift Ev,1 */
6025 shift = 1;
6026 goto grp2;
6027 case 0xd2:
6028 case 0xd3:
6029 /* shift Ev,cl */
6030 shift = 0;
6031 goto grp2;
6032
6033 case 0x1a4: /* shld imm */
6034 op = 0;
6035 shift = 1;
6036 goto do_shiftd;
6037 case 0x1a5: /* shld cl */
6038 op = 0;
6039 shift = 0;
6040 goto do_shiftd;
6041 case 0x1ac: /* shrd imm */
6042 op = 1;
6043 shift = 1;
6044 goto do_shiftd;
6045 case 0x1ad: /* shrd cl */
6046 op = 1;
6047 shift = 0;
6048 do_shiftd:
6049 ot = dflag + OT_WORD;
6050 modrm = ldub_code(s->pc++);
6051 mod = (modrm >> 6) & 3;
6052 rm = (modrm & 7) | REX_B(s);
6053 reg = ((modrm >> 3) & 7) | rex_r;
6054 if (mod != 3) {
6055 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6056 opreg = OR_TMP0;
6057 } else {
6058 opreg = rm;
6059 }
6060 gen_op_mov_TN_reg(ot, 1, reg);
6061
6062 if (shift) {
6063 val = ldub_code(s->pc++);
6064 tcg_gen_movi_tl(cpu_T3, val);
6065 } else {
6066 tcg_gen_ld_tl(cpu_T3, cpu_env, offsetof(CPUState, regs[R_ECX]));
6067 }
6068 gen_shiftd_rm_T1_T3(s, ot, opreg, op);
6069 break;
6070
6071 /************************/
6072 /* floats */
6073 case 0xd8 ... 0xdf:
6074 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
6075 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
6076 /* XXX: what to do if illegal op ? */
6077 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6078 break;
6079 }
6080 modrm = ldub_code(s->pc++);
6081 mod = (modrm >> 6) & 3;
6082 rm = modrm & 7;
6083 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
6084 if (mod != 3) {
6085 /* memory op */
6086 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6087 switch(op) {
6088 case 0x00 ... 0x07: /* fxxxs */
6089 case 0x10 ... 0x17: /* fixxxl */
6090 case 0x20 ... 0x27: /* fxxxl */
6091 case 0x30 ... 0x37: /* fixxx */
6092 {
6093 int op1;
6094 op1 = op & 7;
6095
6096 switch(op >> 4) {
6097 case 0:
6098 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6099 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6100 tcg_gen_helper_0_1(helper_flds_FT0, cpu_tmp2_i32);
6101 break;
6102 case 1:
6103 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6104 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6105 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6106 break;
6107 case 2:
6108 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6109 (s->mem_index >> 2) - 1);
6110 tcg_gen_helper_0_1(helper_fldl_FT0, cpu_tmp1_i64);
6111 break;
6112 case 3:
6113 default:
6114 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6115 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6116 tcg_gen_helper_0_1(helper_fildl_FT0, cpu_tmp2_i32);
6117 break;
6118 }
6119
6120 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6121 if (op1 == 3) {
6122 /* fcomp needs pop */
6123 tcg_gen_helper_0_0(helper_fpop);
6124 }
6125 }
6126 break;
6127 case 0x08: /* flds */
6128 case 0x0a: /* fsts */
6129 case 0x0b: /* fstps */
6130 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
6131 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
6132 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
6133 switch(op & 7) {
6134 case 0:
6135 switch(op >> 4) {
6136 case 0:
6137 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6138 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6139 tcg_gen_helper_0_1(helper_flds_ST0, cpu_tmp2_i32);
6140 break;
6141 case 1:
6142 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
6143 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6144 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6145 break;
6146 case 2:
6147 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6148 (s->mem_index >> 2) - 1);
6149 tcg_gen_helper_0_1(helper_fldl_ST0, cpu_tmp1_i64);
6150 break;
6151 case 3:
6152 default:
6153 gen_op_lds_T0_A0(OT_WORD + s->mem_index);
6154 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6155 tcg_gen_helper_0_1(helper_fildl_ST0, cpu_tmp2_i32);
6156 break;
6157 }
6158 break;
6159 case 1:
6160 /* XXX: the corresponding CPUID bit must be tested ! */
6161 switch(op >> 4) {
6162 case 1:
6163 tcg_gen_helper_1_0(helper_fisttl_ST0, cpu_tmp2_i32);
6164 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6165 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6166 break;
6167 case 2:
6168 tcg_gen_helper_1_0(helper_fisttll_ST0, cpu_tmp1_i64);
6169 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6170 (s->mem_index >> 2) - 1);
6171 break;
6172 case 3:
6173 default:
6174 tcg_gen_helper_1_0(helper_fistt_ST0, cpu_tmp2_i32);
6175 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6176 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6177 break;
6178 }
6179 tcg_gen_helper_0_0(helper_fpop);
6180 break;
6181 default:
6182 switch(op >> 4) {
6183 case 0:
6184 tcg_gen_helper_1_0(helper_fsts_ST0, cpu_tmp2_i32);
6185 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6186 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6187 break;
6188 case 1:
6189 tcg_gen_helper_1_0(helper_fistl_ST0, cpu_tmp2_i32);
6190 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6191 gen_op_st_T0_A0(OT_LONG + s->mem_index);
6192 break;
6193 case 2:
6194 tcg_gen_helper_1_0(helper_fstl_ST0, cpu_tmp1_i64);
6195 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6196 (s->mem_index >> 2) - 1);
6197 break;
6198 case 3:
6199 default:
6200 tcg_gen_helper_1_0(helper_fist_ST0, cpu_tmp2_i32);
6201 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6202 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6203 break;
6204 }
6205 if ((op & 7) == 3)
6206 tcg_gen_helper_0_0(helper_fpop);
6207 break;
6208 }
6209 break;
6210 case 0x0c: /* fldenv mem */
6211 if (s->cc_op != CC_OP_DYNAMIC)
6212 gen_op_set_cc_op(s->cc_op);
6213 gen_jmp_im(pc_start - s->cs_base);
6214 tcg_gen_helper_0_2(helper_fldenv,
6215 cpu_A0, tcg_const_i32(s->dflag));
6216 break;
6217 case 0x0d: /* fldcw mem */
6218 gen_op_ld_T0_A0(OT_WORD + s->mem_index);
6219 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6220 tcg_gen_helper_0_1(helper_fldcw, cpu_tmp2_i32);
6221 break;
6222 case 0x0e: /* fnstenv mem */
6223 if (s->cc_op != CC_OP_DYNAMIC)
6224 gen_op_set_cc_op(s->cc_op);
6225 gen_jmp_im(pc_start - s->cs_base);
6226 tcg_gen_helper_0_2(helper_fstenv,
6227 cpu_A0, tcg_const_i32(s->dflag));
6228 break;
6229 case 0x0f: /* fnstcw mem */
6230 tcg_gen_helper_1_0(helper_fnstcw, cpu_tmp2_i32);
6231 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6232 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6233 break;
6234 case 0x1d: /* fldt mem */
6235 if (s->cc_op != CC_OP_DYNAMIC)
6236 gen_op_set_cc_op(s->cc_op);
6237 gen_jmp_im(pc_start - s->cs_base);
6238 tcg_gen_helper_0_1(helper_fldt_ST0, cpu_A0);
6239 break;
6240 case 0x1f: /* fstpt mem */
6241 if (s->cc_op != CC_OP_DYNAMIC)
6242 gen_op_set_cc_op(s->cc_op);
6243 gen_jmp_im(pc_start - s->cs_base);
6244 tcg_gen_helper_0_1(helper_fstt_ST0, cpu_A0);
6245 tcg_gen_helper_0_0(helper_fpop);
6246 break;
6247 case 0x2c: /* frstor mem */
6248 if (s->cc_op != CC_OP_DYNAMIC)
6249 gen_op_set_cc_op(s->cc_op);
6250 gen_jmp_im(pc_start - s->cs_base);
6251 tcg_gen_helper_0_2(helper_frstor,
6252 cpu_A0, tcg_const_i32(s->dflag));
6253 break;
6254 case 0x2e: /* fnsave mem */
6255 if (s->cc_op != CC_OP_DYNAMIC)
6256 gen_op_set_cc_op(s->cc_op);
6257 gen_jmp_im(pc_start - s->cs_base);
6258 tcg_gen_helper_0_2(helper_fsave,
6259 cpu_A0, tcg_const_i32(s->dflag));
6260 break;
6261 case 0x2f: /* fnstsw mem */
6262 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6263 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6264 gen_op_st_T0_A0(OT_WORD + s->mem_index);
6265 break;
6266 case 0x3c: /* fbld */
6267 if (s->cc_op != CC_OP_DYNAMIC)
6268 gen_op_set_cc_op(s->cc_op);
6269 gen_jmp_im(pc_start - s->cs_base);
6270 tcg_gen_helper_0_1(helper_fbld_ST0, cpu_A0);
6271 break;
6272 case 0x3e: /* fbstp */
6273 if (s->cc_op != CC_OP_DYNAMIC)
6274 gen_op_set_cc_op(s->cc_op);
6275 gen_jmp_im(pc_start - s->cs_base);
6276 tcg_gen_helper_0_1(helper_fbst_ST0, cpu_A0);
6277 tcg_gen_helper_0_0(helper_fpop);
6278 break;
6279 case 0x3d: /* fildll */
6280 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0,
6281 (s->mem_index >> 2) - 1);
6282 tcg_gen_helper_0_1(helper_fildll_ST0, cpu_tmp1_i64);
6283 break;
6284 case 0x3f: /* fistpll */
6285 tcg_gen_helper_1_0(helper_fistll_ST0, cpu_tmp1_i64);
6286 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0,
6287 (s->mem_index >> 2) - 1);
6288 tcg_gen_helper_0_0(helper_fpop);
6289 break;
6290 default:
6291 goto illegal_op;
6292 }
6293 } else {
6294 /* register float ops */
6295 opreg = rm;
6296
6297 switch(op) {
6298 case 0x08: /* fld sti */
6299 tcg_gen_helper_0_0(helper_fpush);
6300 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32((opreg + 1) & 7));
6301 break;
6302 case 0x09: /* fxchg sti */
6303 case 0x29: /* fxchg4 sti, undocumented op */
6304 case 0x39: /* fxchg7 sti, undocumented op */
6305 tcg_gen_helper_0_1(helper_fxchg_ST0_STN, tcg_const_i32(opreg));
6306 break;
6307 case 0x0a: /* grp d9/2 */
6308 switch(rm) {
6309 case 0: /* fnop */
6310 /* check exceptions (FreeBSD FPU probe) */
6311 if (s->cc_op != CC_OP_DYNAMIC)
6312 gen_op_set_cc_op(s->cc_op);
6313 gen_jmp_im(pc_start - s->cs_base);
6314 tcg_gen_helper_0_0(helper_fwait);
6315 break;
6316 default:
6317 goto illegal_op;
6318 }
6319 break;
6320 case 0x0c: /* grp d9/4 */
6321 switch(rm) {
6322 case 0: /* fchs */
6323 tcg_gen_helper_0_0(helper_fchs_ST0);
6324 break;
6325 case 1: /* fabs */
6326 tcg_gen_helper_0_0(helper_fabs_ST0);
6327 break;
6328 case 4: /* ftst */
6329 tcg_gen_helper_0_0(helper_fldz_FT0);
6330 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6331 break;
6332 case 5: /* fxam */
6333 tcg_gen_helper_0_0(helper_fxam_ST0);
6334 break;
6335 default:
6336 goto illegal_op;
6337 }
6338 break;
6339 case 0x0d: /* grp d9/5 */
6340 {
6341 switch(rm) {
6342 case 0:
6343 tcg_gen_helper_0_0(helper_fpush);
6344 tcg_gen_helper_0_0(helper_fld1_ST0);
6345 break;
6346 case 1:
6347 tcg_gen_helper_0_0(helper_fpush);
6348 tcg_gen_helper_0_0(helper_fldl2t_ST0);
6349 break;
6350 case 2:
6351 tcg_gen_helper_0_0(helper_fpush);
6352 tcg_gen_helper_0_0(helper_fldl2e_ST0);
6353 break;
6354 case 3:
6355 tcg_gen_helper_0_0(helper_fpush);
6356 tcg_gen_helper_0_0(helper_fldpi_ST0);
6357 break;
6358 case 4:
6359 tcg_gen_helper_0_0(helper_fpush);
6360 tcg_gen_helper_0_0(helper_fldlg2_ST0);
6361 break;
6362 case 5:
6363 tcg_gen_helper_0_0(helper_fpush);
6364 tcg_gen_helper_0_0(helper_fldln2_ST0);
6365 break;
6366 case 6:
6367 tcg_gen_helper_0_0(helper_fpush);
6368 tcg_gen_helper_0_0(helper_fldz_ST0);
6369 break;
6370 default:
6371 goto illegal_op;
6372 }
6373 }
6374 break;
6375 case 0x0e: /* grp d9/6 */
6376 switch(rm) {
6377 case 0: /* f2xm1 */
6378 tcg_gen_helper_0_0(helper_f2xm1);
6379 break;
6380 case 1: /* fyl2x */
6381 tcg_gen_helper_0_0(helper_fyl2x);
6382 break;
6383 case 2: /* fptan */
6384 tcg_gen_helper_0_0(helper_fptan);
6385 break;
6386 case 3: /* fpatan */
6387 tcg_gen_helper_0_0(helper_fpatan);
6388 break;
6389 case 4: /* fxtract */
6390 tcg_gen_helper_0_0(helper_fxtract);
6391 break;
6392 case 5: /* fprem1 */
6393 tcg_gen_helper_0_0(helper_fprem1);
6394 break;
6395 case 6: /* fdecstp */
6396 tcg_gen_helper_0_0(helper_fdecstp);
6397 break;
6398 default:
6399 case 7: /* fincstp */
6400 tcg_gen_helper_0_0(helper_fincstp);
6401 break;
6402 }
6403 break;
6404 case 0x0f: /* grp d9/7 */
6405 switch(rm) {
6406 case 0: /* fprem */
6407 tcg_gen_helper_0_0(helper_fprem);
6408 break;
6409 case 1: /* fyl2xp1 */
6410 tcg_gen_helper_0_0(helper_fyl2xp1);
6411 break;
6412 case 2: /* fsqrt */
6413 tcg_gen_helper_0_0(helper_fsqrt);
6414 break;
6415 case 3: /* fsincos */
6416 tcg_gen_helper_0_0(helper_fsincos);
6417 break;
6418 case 5: /* fscale */
6419 tcg_gen_helper_0_0(helper_fscale);
6420 break;
6421 case 4: /* frndint */
6422 tcg_gen_helper_0_0(helper_frndint);
6423 break;
6424 case 6: /* fsin */
6425 tcg_gen_helper_0_0(helper_fsin);
6426 break;
6427 default:
6428 case 7: /* fcos */
6429 tcg_gen_helper_0_0(helper_fcos);
6430 break;
6431 }
6432 break;
6433 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
6434 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
6435 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
6436 {
6437 int op1;
6438
6439 op1 = op & 7;
6440 if (op >= 0x20) {
6441 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));
6442 if (op >= 0x30)
6443 tcg_gen_helper_0_0(helper_fpop);
6444 } else {
6445 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6446 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);
6447 }
6448 }
6449 break;
6450 case 0x02: /* fcom */
6451 case 0x22: /* fcom2, undocumented op */
6452 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6453 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6454 break;
6455 case 0x03: /* fcomp */
6456 case 0x23: /* fcomp3, undocumented op */
6457 case 0x32: /* fcomp5, undocumented op */
6458 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6459 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6460 tcg_gen_helper_0_0(helper_fpop);
6461 break;
6462 case 0x15: /* da/5 */
6463 switch(rm) {
6464 case 1: /* fucompp */
6465 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6466 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6467 tcg_gen_helper_0_0(helper_fpop);
6468 tcg_gen_helper_0_0(helper_fpop);
6469 break;
6470 default:
6471 goto illegal_op;
6472 }
6473 break;
6474 case 0x1c:
6475 switch(rm) {
6476 case 0: /* feni (287 only, just do nop here) */
6477 break;
6478 case 1: /* fdisi (287 only, just do nop here) */
6479 break;
6480 case 2: /* fclex */
6481 tcg_gen_helper_0_0(helper_fclex);
6482 break;
6483 case 3: /* fninit */
6484 tcg_gen_helper_0_0(helper_fninit);
6485 break;
6486 case 4: /* fsetpm (287 only, just do nop here) */
6487 break;
6488 default:
6489 goto illegal_op;
6490 }
6491 break;
6492 case 0x1d: /* fucomi */
6493 if (s->cc_op != CC_OP_DYNAMIC)
6494 gen_op_set_cc_op(s->cc_op);
6495 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6496 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6497 s->cc_op = CC_OP_EFLAGS;
6498 break;
6499 case 0x1e: /* fcomi */
6500 if (s->cc_op != CC_OP_DYNAMIC)
6501 gen_op_set_cc_op(s->cc_op);
6502 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6503 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6504 s->cc_op = CC_OP_EFLAGS;
6505 break;
6506 case 0x28: /* ffree sti */
6507 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6508 break;
6509 case 0x2a: /* fst sti */
6510 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6511 break;
6512 case 0x2b: /* fstp sti */
6513 case 0x0b: /* fstp1 sti, undocumented op */
6514 case 0x3a: /* fstp8 sti, undocumented op */
6515 case 0x3b: /* fstp9 sti, undocumented op */
6516 tcg_gen_helper_0_1(helper_fmov_STN_ST0, tcg_const_i32(opreg));
6517 tcg_gen_helper_0_0(helper_fpop);
6518 break;
6519 case 0x2c: /* fucom st(i) */
6520 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6521 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6522 break;
6523 case 0x2d: /* fucomp st(i) */
6524 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6525 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);
6526 tcg_gen_helper_0_0(helper_fpop);
6527 break;
6528 case 0x33: /* de/3 */
6529 switch(rm) {
6530 case 1: /* fcompp */
6531 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(1));
6532 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);
6533 tcg_gen_helper_0_0(helper_fpop);
6534 tcg_gen_helper_0_0(helper_fpop);
6535 break;
6536 default:
6537 goto illegal_op;
6538 }
6539 break;
6540 case 0x38: /* ffreep sti, undocumented op */
6541 tcg_gen_helper_0_1(helper_ffree_STN, tcg_const_i32(opreg));
6542 tcg_gen_helper_0_0(helper_fpop);
6543 break;
6544 case 0x3c: /* df/4 */
6545 switch(rm) {
6546 case 0:
6547 tcg_gen_helper_1_0(helper_fnstsw, cpu_tmp2_i32);
6548 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32);
6549 gen_op_mov_reg_T0(OT_WORD, R_EAX);
6550 break;
6551 default:
6552 goto illegal_op;
6553 }
6554 break;
6555 case 0x3d: /* fucomip */
6556 if (s->cc_op != CC_OP_DYNAMIC)
6557 gen_op_set_cc_op(s->cc_op);
6558 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6559 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);
6560 tcg_gen_helper_0_0(helper_fpop);
6561 s->cc_op = CC_OP_EFLAGS;
6562 break;
6563 case 0x3e: /* fcomip */
6564 if (s->cc_op != CC_OP_DYNAMIC)
6565 gen_op_set_cc_op(s->cc_op);
6566 tcg_gen_helper_0_1(helper_fmov_FT0_STN, tcg_const_i32(opreg));
6567 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);
6568 tcg_gen_helper_0_0(helper_fpop);
6569 s->cc_op = CC_OP_EFLAGS;
6570 break;
6571 case 0x10 ... 0x13: /* fcmovxx */
6572 case 0x18 ... 0x1b:
6573 {
6574 int op1, l1;
6575 static const uint8_t fcmov_cc[8] = {
6576 (JCC_B << 1),
6577 (JCC_Z << 1),
6578 (JCC_BE << 1),
6579 (JCC_P << 1),
6580 };
6581 op1 = fcmov_cc[op & 3] | (((op >> 3) & 1) ^ 1);
6582 l1 = gen_new_label();
6583 gen_jcc1(s, s->cc_op, op1, l1);
6584 tcg_gen_helper_0_1(helper_fmov_ST0_STN, tcg_const_i32(opreg));
6585 gen_set_label(l1);
6586 }
6587 break;
6588 default:
6589 goto illegal_op;
6590 }
6591 }
6592 break;
6593 /************************/
6594 /* string ops */
6595
6596 case 0xa4: /* movsS */
6597 case 0xa5:
6598 if ((b & 1) == 0)
6599 ot = OT_BYTE;
6600 else
6601 ot = dflag + OT_WORD;
6602
6603 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6604 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6605 } else {
6606 gen_movs(s, ot);
6607 }
6608 break;
6609
6610 case 0xaa: /* stosS */
6611 case 0xab:
6612 if ((b & 1) == 0)
6613 ot = OT_BYTE;
6614 else
6615 ot = dflag + OT_WORD;
6616
6617 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6618 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6619 } else {
6620 gen_stos(s, ot);
6621 }
6622 break;
6623 case 0xac: /* lodsS */
6624 case 0xad:
6625 if ((b & 1) == 0)
6626 ot = OT_BYTE;
6627 else
6628 ot = dflag + OT_WORD;
6629 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6630 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6631 } else {
6632 gen_lods(s, ot);
6633 }
6634 break;
6635 case 0xae: /* scasS */
6636 case 0xaf:
6637 if ((b & 1) == 0)
6638 ot = OT_BYTE;
6639 else
6640 ot = dflag + OT_WORD;
6641 if (prefixes & PREFIX_REPNZ) {
6642 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6643 } else if (prefixes & PREFIX_REPZ) {
6644 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6645 } else {
6646 gen_scas(s, ot);
6647 s->cc_op = CC_OP_SUBB + ot;
6648 }
6649 break;
6650
6651 case 0xa6: /* cmpsS */
6652 case 0xa7:
6653 if ((b & 1) == 0)
6654 ot = OT_BYTE;
6655 else
6656 ot = dflag + OT_WORD;
6657 if (prefixes & PREFIX_REPNZ) {
6658 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
6659 } else if (prefixes & PREFIX_REPZ) {
6660 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
6661 } else {
6662 gen_cmps(s, ot);
6663 s->cc_op = CC_OP_SUBB + ot;
6664 }
6665 break;
6666 case 0x6c: /* insS */
6667 case 0x6d:
6668 if ((b & 1) == 0)
6669 ot = OT_BYTE;
6670 else
6671 ot = dflag ? OT_LONG : OT_WORD;
6672 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6673 gen_op_andl_T0_ffff();
6674 gen_check_io(s, ot, pc_start - s->cs_base,
6675 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes) | 4);
6676 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6677 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6678 } else {
6679 gen_ins(s, ot);
6680 if (use_icount) {
6681 gen_jmp(s, s->pc - s->cs_base);
6682 }
6683 }
6684 break;
6685 case 0x6e: /* outsS */
6686 case 0x6f:
6687 if ((b & 1) == 0)
6688 ot = OT_BYTE;
6689 else
6690 ot = dflag ? OT_LONG : OT_WORD;
6691 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6692 gen_op_andl_T0_ffff();
6693 gen_check_io(s, ot, pc_start - s->cs_base,
6694 svm_is_rep(prefixes) | 4);
6695 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
6696 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
6697 } else {
6698 gen_outs(s, ot);
6699 if (use_icount) {
6700 gen_jmp(s, s->pc - s->cs_base);
6701 }
6702 }
6703 break;
6704
6705 /************************/
6706 /* port I/O */
6707
6708 case 0xe4:
6709 case 0xe5:
6710 if ((b & 1) == 0)
6711 ot = OT_BYTE;
6712 else
6713 ot = dflag ? OT_LONG : OT_WORD;
6714 val = ldub_code(s->pc++);
6715 gen_op_movl_T0_im(val);
6716 gen_check_io(s, ot, pc_start - s->cs_base,
6717 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6718 if (use_icount)
6719 gen_io_start();
6720 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6721 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6722 gen_op_mov_reg_T1(ot, R_EAX);
6723 if (use_icount) {
6724 gen_io_end();
6725 gen_jmp(s, s->pc - s->cs_base);
6726 }
6727 break;
6728 case 0xe6:
6729 case 0xe7:
6730 if ((b & 1) == 0)
6731 ot = OT_BYTE;
6732 else
6733 ot = dflag ? OT_LONG : OT_WORD;
6734 val = ldub_code(s->pc++);
6735 gen_op_movl_T0_im(val);
6736 gen_check_io(s, ot, pc_start - s->cs_base,
6737 svm_is_rep(prefixes));
6738#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
6739 if (val == 0x80)
6740 break;
6741#endif /* VBOX */
6742 gen_op_mov_TN_reg(ot, 1, R_EAX);
6743
6744 if (use_icount)
6745 gen_io_start();
6746 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6747 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6748 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6749 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6750 if (use_icount) {
6751 gen_io_end();
6752 gen_jmp(s, s->pc - s->cs_base);
6753 }
6754 break;
6755 case 0xec:
6756 case 0xed:
6757 if ((b & 1) == 0)
6758 ot = OT_BYTE;
6759 else
6760 ot = dflag ? OT_LONG : OT_WORD;
6761 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6762 gen_op_andl_T0_ffff();
6763 gen_check_io(s, ot, pc_start - s->cs_base,
6764 SVM_IOIO_TYPE_MASK | svm_is_rep(prefixes));
6765 if (use_icount)
6766 gen_io_start();
6767 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6768 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);
6769 gen_op_mov_reg_T1(ot, R_EAX);
6770 if (use_icount) {
6771 gen_io_end();
6772 gen_jmp(s, s->pc - s->cs_base);
6773 }
6774 break;
6775 case 0xee:
6776 case 0xef:
6777 if ((b & 1) == 0)
6778 ot = OT_BYTE;
6779 else
6780 ot = dflag ? OT_LONG : OT_WORD;
6781 gen_op_mov_TN_reg(OT_WORD, 0, R_EDX);
6782 gen_op_andl_T0_ffff();
6783 gen_check_io(s, ot, pc_start - s->cs_base,
6784 svm_is_rep(prefixes));
6785 gen_op_mov_TN_reg(ot, 1, R_EAX);
6786
6787 if (use_icount)
6788 gen_io_start();
6789 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
6790 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff);
6791 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]);
6792 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);
6793 if (use_icount) {
6794 gen_io_end();
6795 gen_jmp(s, s->pc - s->cs_base);
6796 }
6797 break;
6798
6799 /************************/
6800 /* control */
6801 case 0xc2: /* ret im */
6802 val = ldsw_code(s->pc);
6803 s->pc += 2;
6804 gen_pop_T0(s);
6805 if (CODE64(s) && s->dflag)
6806 s->dflag = 2;
6807 gen_stack_update(s, val + (2 << s->dflag));
6808 if (s->dflag == 0)
6809 gen_op_andl_T0_ffff();
6810 gen_op_jmp_T0();
6811 gen_eob(s);
6812 break;
6813 case 0xc3: /* ret */
6814 gen_pop_T0(s);
6815 gen_pop_update(s);
6816 if (s->dflag == 0)
6817 gen_op_andl_T0_ffff();
6818 gen_op_jmp_T0();
6819 gen_eob(s);
6820 break;
6821 case 0xca: /* lret im */
6822 val = ldsw_code(s->pc);
6823 s->pc += 2;
6824 do_lret:
6825 if (s->pe && !s->vm86) {
6826 if (s->cc_op != CC_OP_DYNAMIC)
6827 gen_op_set_cc_op(s->cc_op);
6828 gen_jmp_im(pc_start - s->cs_base);
6829 tcg_gen_helper_0_2(helper_lret_protected,
6830 tcg_const_i32(s->dflag),
6831 tcg_const_i32(val));
6832 } else {
6833 gen_stack_A0(s);
6834 /* pop offset */
6835 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6836 if (s->dflag == 0)
6837 gen_op_andl_T0_ffff();
6838 /* NOTE: keeping EIP updated is not a problem in case of
6839 exception */
6840 gen_op_jmp_T0();
6841 /* pop selector */
6842 gen_op_addl_A0_im(2 << s->dflag);
6843 gen_op_ld_T0_A0(1 + s->dflag + s->mem_index);
6844 gen_op_movl_seg_T0_vm(R_CS);
6845 /* add stack offset */
6846 gen_stack_update(s, val + (4 << s->dflag));
6847 }
6848 gen_eob(s);
6849 break;
6850 case 0xcb: /* lret */
6851 val = 0;
6852 goto do_lret;
6853 case 0xcf: /* iret */
6854 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IRET);
6855 if (!s->pe) {
6856 /* real mode */
6857 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6858 s->cc_op = CC_OP_EFLAGS;
6859 } else if (s->vm86) {
6860#ifdef VBOX
6861 if (s->iopl != 3 && (!s->vme || s->dflag)) {
6862#else
6863 if (s->iopl != 3) {
6864#endif
6865 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6866 } else {
6867 tcg_gen_helper_0_1(helper_iret_real, tcg_const_i32(s->dflag));
6868 s->cc_op = CC_OP_EFLAGS;
6869 }
6870 } else {
6871 if (s->cc_op != CC_OP_DYNAMIC)
6872 gen_op_set_cc_op(s->cc_op);
6873 gen_jmp_im(pc_start - s->cs_base);
6874 tcg_gen_helper_0_2(helper_iret_protected,
6875 tcg_const_i32(s->dflag),
6876 tcg_const_i32(s->pc - s->cs_base));
6877 s->cc_op = CC_OP_EFLAGS;
6878 }
6879 gen_eob(s);
6880 break;
6881 case 0xe8: /* call im */
6882 {
6883 if (dflag)
6884 tval = (int32_t)insn_get(s, OT_LONG);
6885 else
6886 tval = (int16_t)insn_get(s, OT_WORD);
6887 next_eip = s->pc - s->cs_base;
6888 tval += next_eip;
6889 if (s->dflag == 0)
6890 tval &= 0xffff;
6891 else if (!CODE64(s))
6892 tval &= 0xffffffff;
6893 gen_movtl_T0_im(next_eip);
6894 gen_push_T0(s);
6895 gen_jmp(s, tval);
6896 }
6897 break;
6898 case 0x9a: /* lcall im */
6899 {
6900 unsigned int selector, offset;
6901
6902 if (CODE64(s))
6903 goto illegal_op;
6904 ot = dflag ? OT_LONG : OT_WORD;
6905 offset = insn_get(s, ot);
6906 selector = insn_get(s, OT_WORD);
6907
6908 gen_op_movl_T0_im(selector);
6909 gen_op_movl_T1_imu(offset);
6910 }
6911 goto do_lcall;
6912 case 0xe9: /* jmp im */
6913 if (dflag)
6914 tval = (int32_t)insn_get(s, OT_LONG);
6915 else
6916 tval = (int16_t)insn_get(s, OT_WORD);
6917 tval += s->pc - s->cs_base;
6918 if (s->dflag == 0)
6919 tval &= 0xffff;
6920 else if(!CODE64(s))
6921 tval &= 0xffffffff;
6922 gen_jmp(s, tval);
6923 break;
6924 case 0xea: /* ljmp im */
6925 {
6926 unsigned int selector, offset;
6927
6928 if (CODE64(s))
6929 goto illegal_op;
6930 ot = dflag ? OT_LONG : OT_WORD;
6931 offset = insn_get(s, ot);
6932 selector = insn_get(s, OT_WORD);
6933
6934 gen_op_movl_T0_im(selector);
6935 gen_op_movl_T1_imu(offset);
6936 }
6937 goto do_ljmp;
6938 case 0xeb: /* jmp Jb */
6939 tval = (int8_t)insn_get(s, OT_BYTE);
6940 tval += s->pc - s->cs_base;
6941 if (s->dflag == 0)
6942 tval &= 0xffff;
6943 gen_jmp(s, tval);
6944 break;
6945 case 0x70 ... 0x7f: /* jcc Jb */
6946 tval = (int8_t)insn_get(s, OT_BYTE);
6947 goto do_jcc;
6948 case 0x180 ... 0x18f: /* jcc Jv */
6949 if (dflag) {
6950 tval = (int32_t)insn_get(s, OT_LONG);
6951 } else {
6952 tval = (int16_t)insn_get(s, OT_WORD);
6953 }
6954 do_jcc:
6955 next_eip = s->pc - s->cs_base;
6956 tval += next_eip;
6957 if (s->dflag == 0)
6958 tval &= 0xffff;
6959 gen_jcc(s, b, tval, next_eip);
6960 break;
6961
6962 case 0x190 ... 0x19f: /* setcc Gv */
6963 modrm = ldub_code(s->pc++);
6964 gen_setcc(s, b);
6965 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
6966 break;
6967 case 0x140 ... 0x14f: /* cmov Gv, Ev */
6968 {
6969 int l1;
6970 TCGv t0;
6971
6972 ot = dflag + OT_WORD;
6973 modrm = ldub_code(s->pc++);
6974 reg = ((modrm >> 3) & 7) | rex_r;
6975 mod = (modrm >> 6) & 3;
6976 t0 = tcg_temp_local_new(TCG_TYPE_TL);
6977 if (mod != 3) {
6978 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6979 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
6980 } else {
6981 rm = (modrm & 7) | REX_B(s);
6982 gen_op_mov_v_reg(ot, t0, rm);
6983 }
6984#ifdef TARGET_X86_64
6985 if (ot == OT_LONG) {
6986 /* XXX: specific Intel behaviour ? */
6987 l1 = gen_new_label();
6988 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6989 tcg_gen_st32_tl(t0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET);
6990 gen_set_label(l1);
6991 tcg_gen_movi_tl(cpu_tmp0, 0);
6992 tcg_gen_st32_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg]) + REG_LH_OFFSET);
6993 } else
6994#endif
6995 {
6996 l1 = gen_new_label();
6997 gen_jcc1(s, s->cc_op, b ^ 1, l1);
6998 gen_op_mov_reg_v(ot, reg, t0);
6999 gen_set_label(l1);
7000 }
7001 tcg_temp_free(t0);
7002 }
7003 break;
7004
7005 /************************/
7006 /* flags */
7007 case 0x9c: /* pushf */
7008 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PUSHF);
7009#ifdef VBOX
7010 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
7011#else
7012 if (s->vm86 && s->iopl != 3) {
7013#endif
7014 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7015 } else {
7016 if (s->cc_op != CC_OP_DYNAMIC)
7017 gen_op_set_cc_op(s->cc_op);
7018#ifdef VBOX
7019 if (s->vm86 && s->vme && s->iopl != 3)
7020 tcg_gen_helper_1_0(helper_read_eflags_vme, cpu_T[0]);
7021 else
7022#endif
7023 tcg_gen_helper_1_0(helper_read_eflags, cpu_T[0]);
7024 gen_push_T0(s);
7025 }
7026 break;
7027 case 0x9d: /* popf */
7028 gen_svm_check_intercept(s, pc_start, SVM_EXIT_POPF);
7029#ifdef VBOX
7030 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
7031#else
7032 if (s->vm86 && s->iopl != 3) {
7033#endif
7034 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7035 } else {
7036 gen_pop_T0(s);
7037 if (s->cpl == 0) {
7038 if (s->dflag) {
7039 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7040 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK)));
7041 } else {
7042 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7043 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff));
7044 }
7045 } else {
7046 if (s->cpl <= s->iopl) {
7047 if (s->dflag) {
7048 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7049 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK)));
7050 } else {
7051 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7052 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff));
7053 }
7054 } else {
7055 if (s->dflag) {
7056 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7057 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK)));
7058 } else {
7059#ifdef VBOX
7060 if (s->vm86 && s->vme)
7061 tcg_gen_helper_0_1(helper_write_eflags_vme, cpu_T[0]);
7062 else
7063#endif
7064 tcg_gen_helper_0_2(helper_write_eflags, cpu_T[0],
7065 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff));
7066 }
7067 }
7068 }
7069 gen_pop_update(s);
7070 s->cc_op = CC_OP_EFLAGS;
7071 /* abort translation because TF flag may change */
7072 gen_jmp_im(s->pc - s->cs_base);
7073 gen_eob(s);
7074 }
7075 break;
7076 case 0x9e: /* sahf */
7077 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7078 goto illegal_op;
7079 gen_op_mov_TN_reg(OT_BYTE, 0, R_AH);
7080 if (s->cc_op != CC_OP_DYNAMIC)
7081 gen_op_set_cc_op(s->cc_op);
7082 gen_compute_eflags(cpu_cc_src);
7083 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, CC_O);
7084 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], CC_S | CC_Z | CC_A | CC_P | CC_C);
7085 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_T[0]);
7086 s->cc_op = CC_OP_EFLAGS;
7087 break;
7088 case 0x9f: /* lahf */
7089 if (CODE64(s) && !(s->cpuid_ext3_features & CPUID_EXT3_LAHF_LM))
7090 goto illegal_op;
7091 if (s->cc_op != CC_OP_DYNAMIC)
7092 gen_op_set_cc_op(s->cc_op);
7093 gen_compute_eflags(cpu_T[0]);
7094 /* Note: gen_compute_eflags() only gives the condition codes */
7095 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], 0x02);
7096 gen_op_mov_reg_T0(OT_BYTE, R_AH);
7097 break;
7098 case 0xf5: /* cmc */
7099 if (s->cc_op != CC_OP_DYNAMIC)
7100 gen_op_set_cc_op(s->cc_op);
7101 gen_compute_eflags(cpu_cc_src);
7102 tcg_gen_xori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7103 s->cc_op = CC_OP_EFLAGS;
7104 break;
7105 case 0xf8: /* clc */
7106 if (s->cc_op != CC_OP_DYNAMIC)
7107 gen_op_set_cc_op(s->cc_op);
7108 gen_compute_eflags(cpu_cc_src);
7109 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_C);
7110 s->cc_op = CC_OP_EFLAGS;
7111 break;
7112 case 0xf9: /* stc */
7113 if (s->cc_op != CC_OP_DYNAMIC)
7114 gen_op_set_cc_op(s->cc_op);
7115 gen_compute_eflags(cpu_cc_src);
7116 tcg_gen_ori_tl(cpu_cc_src, cpu_cc_src, CC_C);
7117 s->cc_op = CC_OP_EFLAGS;
7118 break;
7119 case 0xfc: /* cld */
7120 tcg_gen_movi_i32(cpu_tmp2_i32, 1);
7121 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7122 break;
7123 case 0xfd: /* std */
7124 tcg_gen_movi_i32(cpu_tmp2_i32, -1);
7125 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, offsetof(CPUState, df));
7126 break;
7127
7128 /************************/
7129 /* bit operations */
7130 case 0x1ba: /* bt/bts/btr/btc Gv, im */
7131 ot = dflag + OT_WORD;
7132 modrm = ldub_code(s->pc++);
7133 op = (modrm >> 3) & 7;
7134 mod = (modrm >> 6) & 3;
7135 rm = (modrm & 7) | REX_B(s);
7136 if (mod != 3) {
7137 s->rip_offset = 1;
7138 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7139 gen_op_ld_T0_A0(ot + s->mem_index);
7140 } else {
7141 gen_op_mov_TN_reg(ot, 0, rm);
7142 }
7143 /* load shift */
7144 val = ldub_code(s->pc++);
7145 gen_op_movl_T1_im(val);
7146 if (op < 4)
7147 goto illegal_op;
7148 op -= 4;
7149 goto bt_op;
7150 case 0x1a3: /* bt Gv, Ev */
7151 op = 0;
7152 goto do_btx;
7153 case 0x1ab: /* bts */
7154 op = 1;
7155 goto do_btx;
7156 case 0x1b3: /* btr */
7157 op = 2;
7158 goto do_btx;
7159 case 0x1bb: /* btc */
7160 op = 3;
7161 do_btx:
7162 ot = dflag + OT_WORD;
7163 modrm = ldub_code(s->pc++);
7164 reg = ((modrm >> 3) & 7) | rex_r;
7165 mod = (modrm >> 6) & 3;
7166 rm = (modrm & 7) | REX_B(s);
7167 gen_op_mov_TN_reg(OT_LONG, 1, reg);
7168 if (mod != 3) {
7169 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7170 /* specific case: we need to add a displacement */
7171 gen_exts(ot, cpu_T[1]);
7172 tcg_gen_sari_tl(cpu_tmp0, cpu_T[1], 3 + ot);
7173 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, ot);
7174 tcg_gen_add_tl(cpu_A0, cpu_A0, cpu_tmp0);
7175 gen_op_ld_T0_A0(ot + s->mem_index);
7176 } else {
7177 gen_op_mov_TN_reg(ot, 0, rm);
7178 }
7179 bt_op:
7180 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], (1 << (3 + ot)) - 1);
7181 switch(op) {
7182 case 0:
7183 tcg_gen_shr_tl(cpu_cc_src, cpu_T[0], cpu_T[1]);
7184 tcg_gen_movi_tl(cpu_cc_dst, 0);
7185 break;
7186 case 1:
7187 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7188 tcg_gen_movi_tl(cpu_tmp0, 1);
7189 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7190 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7191 break;
7192 case 2:
7193 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7194 tcg_gen_movi_tl(cpu_tmp0, 1);
7195 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7196 tcg_gen_not_tl(cpu_tmp0, cpu_tmp0);
7197 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7198 break;
7199 default:
7200 case 3:
7201 tcg_gen_shr_tl(cpu_tmp4, cpu_T[0], cpu_T[1]);
7202 tcg_gen_movi_tl(cpu_tmp0, 1);
7203 tcg_gen_shl_tl(cpu_tmp0, cpu_tmp0, cpu_T[1]);
7204 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
7205 break;
7206 }
7207 s->cc_op = CC_OP_SARB + ot;
7208 if (op != 0) {
7209 if (mod != 3)
7210 gen_op_st_T0_A0(ot + s->mem_index);
7211 else
7212 gen_op_mov_reg_T0(ot, rm);
7213 tcg_gen_mov_tl(cpu_cc_src, cpu_tmp4);
7214 tcg_gen_movi_tl(cpu_cc_dst, 0);
7215 }
7216 break;
7217 case 0x1bc: /* bsf */
7218 case 0x1bd: /* bsr */
7219 {
7220 int label1;
7221 TCGv t0;
7222
7223 ot = dflag + OT_WORD;
7224 modrm = ldub_code(s->pc++);
7225 reg = ((modrm >> 3) & 7) | rex_r;
7226 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
7227 gen_extu(ot, cpu_T[0]);
7228 label1 = gen_new_label();
7229 tcg_gen_movi_tl(cpu_cc_dst, 0);
7230 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7231 tcg_gen_mov_tl(t0, cpu_T[0]);
7232 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1);
7233 if (b & 1) {
7234 tcg_gen_helper_1_1(helper_bsr, cpu_T[0], t0);
7235 } else {
7236 tcg_gen_helper_1_1(helper_bsf, cpu_T[0], t0);
7237 }
7238 gen_op_mov_reg_T0(ot, reg);
7239 tcg_gen_movi_tl(cpu_cc_dst, 1);
7240 gen_set_label(label1);
7241 tcg_gen_discard_tl(cpu_cc_src);
7242 s->cc_op = CC_OP_LOGICB + ot;
7243 tcg_temp_free(t0);
7244 }
7245 break;
7246 /************************/
7247 /* bcd */
7248 case 0x27: /* daa */
7249 if (CODE64(s))
7250 goto illegal_op;
7251 if (s->cc_op != CC_OP_DYNAMIC)
7252 gen_op_set_cc_op(s->cc_op);
7253 tcg_gen_helper_0_0(helper_daa);
7254 s->cc_op = CC_OP_EFLAGS;
7255 break;
7256 case 0x2f: /* das */
7257 if (CODE64(s))
7258 goto illegal_op;
7259 if (s->cc_op != CC_OP_DYNAMIC)
7260 gen_op_set_cc_op(s->cc_op);
7261 tcg_gen_helper_0_0(helper_das);
7262 s->cc_op = CC_OP_EFLAGS;
7263 break;
7264 case 0x37: /* aaa */
7265 if (CODE64(s))
7266 goto illegal_op;
7267 if (s->cc_op != CC_OP_DYNAMIC)
7268 gen_op_set_cc_op(s->cc_op);
7269 tcg_gen_helper_0_0(helper_aaa);
7270 s->cc_op = CC_OP_EFLAGS;
7271 break;
7272 case 0x3f: /* aas */
7273 if (CODE64(s))
7274 goto illegal_op;
7275 if (s->cc_op != CC_OP_DYNAMIC)
7276 gen_op_set_cc_op(s->cc_op);
7277 tcg_gen_helper_0_0(helper_aas);
7278 s->cc_op = CC_OP_EFLAGS;
7279 break;
7280 case 0xd4: /* aam */
7281 if (CODE64(s))
7282 goto illegal_op;
7283 val = ldub_code(s->pc++);
7284 if (val == 0) {
7285 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base);
7286 } else {
7287 tcg_gen_helper_0_1(helper_aam, tcg_const_i32(val));
7288 s->cc_op = CC_OP_LOGICB;
7289 }
7290 break;
7291 case 0xd5: /* aad */
7292 if (CODE64(s))
7293 goto illegal_op;
7294 val = ldub_code(s->pc++);
7295 tcg_gen_helper_0_1(helper_aad, tcg_const_i32(val));
7296 s->cc_op = CC_OP_LOGICB;
7297 break;
7298 /************************/
7299 /* misc */
7300 case 0x90: /* nop */
7301 /* XXX: xchg + rex handling */
7302 /* XXX: correct lock test for all insn */
7303 if (prefixes & PREFIX_LOCK)
7304 goto illegal_op;
7305 if (prefixes & PREFIX_REPZ) {
7306 gen_svm_check_intercept(s, pc_start, SVM_EXIT_PAUSE);
7307 }
7308 break;
7309 case 0x9b: /* fwait */
7310 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
7311 (HF_MP_MASK | HF_TS_MASK)) {
7312 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
7313 } else {
7314 if (s->cc_op != CC_OP_DYNAMIC)
7315 gen_op_set_cc_op(s->cc_op);
7316 gen_jmp_im(pc_start - s->cs_base);
7317 tcg_gen_helper_0_0(helper_fwait);
7318 }
7319 break;
7320 case 0xcc: /* int3 */
7321#ifdef VBOX
7322 if (s->vm86 && s->iopl != 3 && !s->vme) {
7323 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7324 } else
7325#endif
7326 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
7327 break;
7328 case 0xcd: /* int N */
7329 val = ldub_code(s->pc++);
7330#ifdef VBOX
7331 if (s->vm86 && s->iopl != 3 && !s->vme) {
7332#else
7333 if (s->vm86 && s->iopl != 3) {
7334#endif
7335 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7336 } else {
7337 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
7338 }
7339 break;
7340 case 0xce: /* into */
7341 if (CODE64(s))
7342 goto illegal_op;
7343 if (s->cc_op != CC_OP_DYNAMIC)
7344 gen_op_set_cc_op(s->cc_op);
7345 gen_jmp_im(pc_start - s->cs_base);
7346 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start));
7347 break;
7348 case 0xf1: /* icebp (undocumented, exits to external debugger) */
7349 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP);
7350#if 1
7351 gen_debug(s, pc_start - s->cs_base);
7352#else
7353 /* start debug */
7354 tb_flush(cpu_single_env);
7355 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
7356#endif
7357 break;
7358 case 0xfa: /* cli */
7359 if (!s->vm86) {
7360 if (s->cpl <= s->iopl) {
7361 tcg_gen_helper_0_0(helper_cli);
7362 } else {
7363 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7364 }
7365 } else {
7366 if (s->iopl == 3) {
7367 tcg_gen_helper_0_0(helper_cli);
7368#ifdef VBOX
7369 } else if (s->iopl != 3 && s->vme) {
7370 tcg_gen_helper_0_0(helper_cli_vme);
7371#endif
7372 } else {
7373 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7374 }
7375 }
7376 break;
7377 case 0xfb: /* sti */
7378 if (!s->vm86) {
7379 if (s->cpl <= s->iopl) {
7380 gen_sti:
7381 tcg_gen_helper_0_0(helper_sti);
7382 /* interruptions are enabled only the first insn after sti */
7383 /* If several instructions disable interrupts, only the
7384 _first_ does it */
7385 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
7386 tcg_gen_helper_0_0(helper_set_inhibit_irq);
7387 /* give a chance to handle pending irqs */
7388 gen_jmp_im(s->pc - s->cs_base);
7389 gen_eob(s);
7390 } else {
7391 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7392 }
7393 } else {
7394 if (s->iopl == 3) {
7395 goto gen_sti;
7396#ifdef VBOX
7397 } else if (s->iopl != 3 && s->vme) {
7398 tcg_gen_helper_0_0(helper_sti_vme);
7399 /* give a chance to handle pending irqs */
7400 gen_jmp_im(s->pc - s->cs_base);
7401 gen_eob(s);
7402#endif
7403 } else {
7404 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7405 }
7406 }
7407 break;
7408 case 0x62: /* bound */
7409 if (CODE64(s))
7410 goto illegal_op;
7411 ot = dflag ? OT_LONG : OT_WORD;
7412 modrm = ldub_code(s->pc++);
7413 reg = (modrm >> 3) & 7;
7414 mod = (modrm >> 6) & 3;
7415 if (mod == 3)
7416 goto illegal_op;
7417 gen_op_mov_TN_reg(ot, 0, reg);
7418 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7419 gen_jmp_im(pc_start - s->cs_base);
7420 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7421 if (ot == OT_WORD)
7422 tcg_gen_helper_0_2(helper_boundw, cpu_A0, cpu_tmp2_i32);
7423 else
7424 tcg_gen_helper_0_2(helper_boundl, cpu_A0, cpu_tmp2_i32);
7425 break;
7426 case 0x1c8 ... 0x1cf: /* bswap reg */
7427 reg = (b & 7) | REX_B(s);
7428#ifdef TARGET_X86_64
7429 if (dflag == 2) {
7430 gen_op_mov_TN_reg(OT_QUAD, 0, reg);
7431 tcg_gen_bswap_i64(cpu_T[0], cpu_T[0]);
7432 gen_op_mov_reg_T0(OT_QUAD, reg);
7433 } else
7434 {
7435 TCGv tmp0;
7436 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7437
7438 tmp0 = tcg_temp_new(TCG_TYPE_I32);
7439 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]);
7440 tcg_gen_bswap_i32(tmp0, tmp0);
7441 tcg_gen_extu_i32_i64(cpu_T[0], tmp0);
7442 gen_op_mov_reg_T0(OT_LONG, reg);
7443 }
7444#else
7445 {
7446 gen_op_mov_TN_reg(OT_LONG, 0, reg);
7447 tcg_gen_bswap_i32(cpu_T[0], cpu_T[0]);
7448 gen_op_mov_reg_T0(OT_LONG, reg);
7449 }
7450#endif
7451 break;
7452 case 0xd6: /* salc */
7453 if (CODE64(s))
7454 goto illegal_op;
7455 if (s->cc_op != CC_OP_DYNAMIC)
7456 gen_op_set_cc_op(s->cc_op);
7457 gen_compute_eflags_c(cpu_T[0]);
7458 tcg_gen_neg_tl(cpu_T[0], cpu_T[0]);
7459 gen_op_mov_reg_T0(OT_BYTE, R_EAX);
7460 break;
7461 case 0xe0: /* loopnz */
7462 case 0xe1: /* loopz */
7463 case 0xe2: /* loop */
7464 case 0xe3: /* jecxz */
7465 {
7466 int l1, l2, l3;
7467
7468 tval = (int8_t)insn_get(s, OT_BYTE);
7469 next_eip = s->pc - s->cs_base;
7470 tval += next_eip;
7471 if (s->dflag == 0)
7472 tval &= 0xffff;
7473
7474 l1 = gen_new_label();
7475 l2 = gen_new_label();
7476 l3 = gen_new_label();
7477 b &= 3;
7478 switch(b) {
7479 case 0: /* loopnz */
7480 case 1: /* loopz */
7481 if (s->cc_op != CC_OP_DYNAMIC)
7482 gen_op_set_cc_op(s->cc_op);
7483 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7484 gen_op_jz_ecx(s->aflag, l3);
7485 gen_compute_eflags(cpu_tmp0);
7486 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, CC_Z);
7487 if (b == 0) {
7488 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
7489 } else {
7490 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_tmp0, 0, l1);
7491 }
7492 break;
7493 case 2: /* loop */
7494 gen_op_add_reg_im(s->aflag, R_ECX, -1);
7495 gen_op_jnz_ecx(s->aflag, l1);
7496 break;
7497 default:
7498 case 3: /* jcxz */
7499 gen_op_jz_ecx(s->aflag, l1);
7500 break;
7501 }
7502
7503 gen_set_label(l3);
7504 gen_jmp_im(next_eip);
7505 tcg_gen_br(l2);
7506
7507 gen_set_label(l1);
7508 gen_jmp_im(tval);
7509 gen_set_label(l2);
7510 gen_eob(s);
7511 }
7512 break;
7513 case 0x130: /* wrmsr */
7514 case 0x132: /* rdmsr */
7515 if (s->cpl != 0) {
7516 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7517 } else {
7518 if (s->cc_op != CC_OP_DYNAMIC)
7519 gen_op_set_cc_op(s->cc_op);
7520 gen_jmp_im(pc_start - s->cs_base);
7521 if (b & 2) {
7522 tcg_gen_helper_0_0(helper_rdmsr);
7523 } else {
7524 tcg_gen_helper_0_0(helper_wrmsr);
7525 }
7526 }
7527 break;
7528 case 0x131: /* rdtsc */
7529 if (s->cc_op != CC_OP_DYNAMIC)
7530 gen_op_set_cc_op(s->cc_op);
7531 gen_jmp_im(pc_start - s->cs_base);
7532 if (use_icount)
7533 gen_io_start();
7534 tcg_gen_helper_0_0(helper_rdtsc);
7535 if (use_icount) {
7536 gen_io_end();
7537 gen_jmp(s, s->pc - s->cs_base);
7538 }
7539 break;
7540 case 0x133: /* rdpmc */
7541 if (s->cc_op != CC_OP_DYNAMIC)
7542 gen_op_set_cc_op(s->cc_op);
7543 gen_jmp_im(pc_start - s->cs_base);
7544 tcg_gen_helper_0_0(helper_rdpmc);
7545 break;
7546 case 0x134: /* sysenter */
7547#ifndef VBOX
7548 /* For Intel SYSENTER is valid on 64-bit */
7549 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7550#else
7551 /** @todo: make things right */
7552 if (CODE64(s))
7553#endif
7554 goto illegal_op;
7555 if (!s->pe) {
7556 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7557 } else {
7558 if (s->cc_op != CC_OP_DYNAMIC) {
7559 gen_op_set_cc_op(s->cc_op);
7560 s->cc_op = CC_OP_DYNAMIC;
7561 }
7562 gen_jmp_im(pc_start - s->cs_base);
7563 tcg_gen_helper_0_0(helper_sysenter);
7564 gen_eob(s);
7565 }
7566 break;
7567 case 0x135: /* sysexit */
7568#ifndef VBOX
7569 /* For Intel SYSEXIT is valid on 64-bit */
7570 if (CODE64(s) && cpu_single_env->cpuid_vendor1 != CPUID_VENDOR_INTEL_1)
7571#else
7572 /** @todo: make things right */
7573 if (CODE64(s))
7574#endif
7575 goto illegal_op;
7576 if (!s->pe) {
7577 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7578 } else {
7579 if (s->cc_op != CC_OP_DYNAMIC) {
7580 gen_op_set_cc_op(s->cc_op);
7581 s->cc_op = CC_OP_DYNAMIC;
7582 }
7583 gen_jmp_im(pc_start - s->cs_base);
7584 tcg_gen_helper_0_1(helper_sysexit, tcg_const_i32(dflag));
7585 gen_eob(s);
7586 }
7587 break;
7588#ifdef TARGET_X86_64
7589 case 0x105: /* syscall */
7590 /* XXX: is it usable in real mode ? */
7591 if (s->cc_op != CC_OP_DYNAMIC) {
7592 gen_op_set_cc_op(s->cc_op);
7593 s->cc_op = CC_OP_DYNAMIC;
7594 }
7595 gen_jmp_im(pc_start - s->cs_base);
7596 tcg_gen_helper_0_1(helper_syscall, tcg_const_i32(s->pc - pc_start));
7597 gen_eob(s);
7598 break;
7599 case 0x107: /* sysret */
7600 if (!s->pe) {
7601 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7602 } else {
7603 if (s->cc_op != CC_OP_DYNAMIC) {
7604 gen_op_set_cc_op(s->cc_op);
7605 s->cc_op = CC_OP_DYNAMIC;
7606 }
7607 gen_jmp_im(pc_start - s->cs_base);
7608 tcg_gen_helper_0_1(helper_sysret, tcg_const_i32(s->dflag));
7609 /* condition codes are modified only in long mode */
7610 if (s->lma)
7611 s->cc_op = CC_OP_EFLAGS;
7612 gen_eob(s);
7613 }
7614 break;
7615#endif
7616 case 0x1a2: /* cpuid */
7617 if (s->cc_op != CC_OP_DYNAMIC)
7618 gen_op_set_cc_op(s->cc_op);
7619 gen_jmp_im(pc_start - s->cs_base);
7620 tcg_gen_helper_0_0(helper_cpuid);
7621 break;
7622 case 0xf4: /* hlt */
7623 if (s->cpl != 0) {
7624 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7625 } else {
7626 if (s->cc_op != CC_OP_DYNAMIC)
7627 gen_op_set_cc_op(s->cc_op);
7628 gen_jmp_im(pc_start - s->cs_base);
7629 tcg_gen_helper_0_1(helper_hlt, tcg_const_i32(s->pc - pc_start));
7630 s->is_jmp = 3;
7631 }
7632 break;
7633 case 0x100:
7634 modrm = ldub_code(s->pc++);
7635 mod = (modrm >> 6) & 3;
7636 op = (modrm >> 3) & 7;
7637 switch(op) {
7638 case 0: /* sldt */
7639 if (!s->pe || s->vm86)
7640 goto illegal_op;
7641 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_READ);
7642 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,ldt.selector));
7643 ot = OT_WORD;
7644 if (mod == 3)
7645 ot += s->dflag;
7646 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7647 break;
7648 case 2: /* lldt */
7649 if (!s->pe || s->vm86)
7650 goto illegal_op;
7651 if (s->cpl != 0) {
7652 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7653 } else {
7654 gen_svm_check_intercept(s, pc_start, SVM_EXIT_LDTR_WRITE);
7655 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7656 gen_jmp_im(pc_start - s->cs_base);
7657 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7658 tcg_gen_helper_0_1(helper_lldt, cpu_tmp2_i32);
7659 }
7660 break;
7661 case 1: /* str */
7662 if (!s->pe || s->vm86)
7663 goto illegal_op;
7664 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_READ);
7665 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,tr.selector));
7666 ot = OT_WORD;
7667 if (mod == 3)
7668 ot += s->dflag;
7669 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
7670 break;
7671 case 3: /* ltr */
7672 if (!s->pe || s->vm86)
7673 goto illegal_op;
7674 if (s->cpl != 0) {
7675 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7676 } else {
7677 gen_svm_check_intercept(s, pc_start, SVM_EXIT_TR_WRITE);
7678 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7679 gen_jmp_im(pc_start - s->cs_base);
7680 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]);
7681 tcg_gen_helper_0_1(helper_ltr, cpu_tmp2_i32);
7682 }
7683 break;
7684 case 4: /* verr */
7685 case 5: /* verw */
7686 if (!s->pe || s->vm86)
7687 goto illegal_op;
7688 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7689 if (s->cc_op != CC_OP_DYNAMIC)
7690 gen_op_set_cc_op(s->cc_op);
7691 if (op == 4)
7692 tcg_gen_helper_0_1(helper_verr, cpu_T[0]);
7693 else
7694 tcg_gen_helper_0_1(helper_verw, cpu_T[0]);
7695 s->cc_op = CC_OP_EFLAGS;
7696 break;
7697 default:
7698 goto illegal_op;
7699 }
7700 break;
7701 case 0x101:
7702 modrm = ldub_code(s->pc++);
7703 mod = (modrm >> 6) & 3;
7704 op = (modrm >> 3) & 7;
7705 rm = modrm & 7;
7706
7707#ifdef VBOX
7708 /* 0f 01 f9 */
7709 if (modrm == 0xf9)
7710 {
7711 if (!(s->cpuid_ext2_features & CPUID_EXT2_RDTSCP))
7712 goto illegal_op;
7713 gen_jmp_im(pc_start - s->cs_base);
7714 tcg_gen_helper_0_0(helper_rdtscp);
7715 break;
7716 }
7717#endif
7718 switch(op) {
7719 case 0: /* sgdt */
7720 if (mod == 3)
7721 goto illegal_op;
7722 gen_svm_check_intercept(s, pc_start, SVM_EXIT_GDTR_READ);
7723 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7724 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.limit));
7725 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7726 gen_add_A0_im(s, 2);
7727 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, gdt.base));
7728 if (!s->dflag)
7729 gen_op_andl_T0_im(0xffffff);
7730 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7731 break;
7732 case 1:
7733 if (mod == 3) {
7734 switch (rm) {
7735 case 0: /* monitor */
7736 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7737 s->cpl != 0)
7738 goto illegal_op;
7739 if (s->cc_op != CC_OP_DYNAMIC)
7740 gen_op_set_cc_op(s->cc_op);
7741 gen_jmp_im(pc_start - s->cs_base);
7742#ifdef TARGET_X86_64
7743 if (s->aflag == 2) {
7744 gen_op_movq_A0_reg(R_EAX);
7745 } else
7746#endif
7747 {
7748 gen_op_movl_A0_reg(R_EAX);
7749 if (s->aflag == 0)
7750 gen_op_andl_A0_ffff();
7751 }
7752 gen_add_A0_ds_seg(s);
7753 tcg_gen_helper_0_1(helper_monitor, cpu_A0);
7754 break;
7755 case 1: /* mwait */
7756 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
7757 s->cpl != 0)
7758 goto illegal_op;
7759 if (s->cc_op != CC_OP_DYNAMIC) {
7760 gen_op_set_cc_op(s->cc_op);
7761 s->cc_op = CC_OP_DYNAMIC;
7762 }
7763 gen_jmp_im(pc_start - s->cs_base);
7764 tcg_gen_helper_0_1(helper_mwait, tcg_const_i32(s->pc - pc_start));
7765 gen_eob(s);
7766 break;
7767 default:
7768 goto illegal_op;
7769 }
7770 } else { /* sidt */
7771 gen_svm_check_intercept(s, pc_start, SVM_EXIT_IDTR_READ);
7772 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7773 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.limit));
7774 gen_op_st_T0_A0(OT_WORD + s->mem_index);
7775 gen_add_A0_im(s, 2);
7776 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, idt.base));
7777 if (!s->dflag)
7778 gen_op_andl_T0_im(0xffffff);
7779 gen_op_st_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7780 }
7781 break;
7782 case 2: /* lgdt */
7783 case 3: /* lidt */
7784 if (mod == 3) {
7785 if (s->cc_op != CC_OP_DYNAMIC)
7786 gen_op_set_cc_op(s->cc_op);
7787 gen_jmp_im(pc_start - s->cs_base);
7788 switch(rm) {
7789 case 0: /* VMRUN */
7790 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7791 goto illegal_op;
7792 if (s->cpl != 0) {
7793 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7794 break;
7795 } else {
7796 tcg_gen_helper_0_2(helper_vmrun,
7797 tcg_const_i32(s->aflag),
7798 tcg_const_i32(s->pc - pc_start));
7799 tcg_gen_exit_tb(0);
7800 s->is_jmp = 3;
7801 }
7802 break;
7803 case 1: /* VMMCALL */
7804 if (!(s->flags & HF_SVME_MASK))
7805 goto illegal_op;
7806 tcg_gen_helper_0_0(helper_vmmcall);
7807 break;
7808 case 2: /* VMLOAD */
7809 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7810 goto illegal_op;
7811 if (s->cpl != 0) {
7812 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7813 break;
7814 } else {
7815 tcg_gen_helper_0_1(helper_vmload,
7816 tcg_const_i32(s->aflag));
7817 }
7818 break;
7819 case 3: /* VMSAVE */
7820 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7821 goto illegal_op;
7822 if (s->cpl != 0) {
7823 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7824 break;
7825 } else {
7826 tcg_gen_helper_0_1(helper_vmsave,
7827 tcg_const_i32(s->aflag));
7828 }
7829 break;
7830 case 4: /* STGI */
7831 if ((!(s->flags & HF_SVME_MASK) &&
7832 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7833 !s->pe)
7834 goto illegal_op;
7835 if (s->cpl != 0) {
7836 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7837 break;
7838 } else {
7839 tcg_gen_helper_0_0(helper_stgi);
7840 }
7841 break;
7842 case 5: /* CLGI */
7843 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7844 goto illegal_op;
7845 if (s->cpl != 0) {
7846 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7847 break;
7848 } else {
7849 tcg_gen_helper_0_0(helper_clgi);
7850 }
7851 break;
7852 case 6: /* SKINIT */
7853 if ((!(s->flags & HF_SVME_MASK) &&
7854 !(s->cpuid_ext3_features & CPUID_EXT3_SKINIT)) ||
7855 !s->pe)
7856 goto illegal_op;
7857 tcg_gen_helper_0_0(helper_skinit);
7858 break;
7859 case 7: /* INVLPGA */
7860 if (!(s->flags & HF_SVME_MASK) || !s->pe)
7861 goto illegal_op;
7862 if (s->cpl != 0) {
7863 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7864 break;
7865 } else {
7866 tcg_gen_helper_0_1(helper_invlpga,
7867 tcg_const_i32(s->aflag));
7868 }
7869 break;
7870 default:
7871 goto illegal_op;
7872 }
7873 } else if (s->cpl != 0) {
7874 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7875 } else {
7876 gen_svm_check_intercept(s, pc_start,
7877 op==2 ? SVM_EXIT_GDTR_WRITE : SVM_EXIT_IDTR_WRITE);
7878 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7879 gen_op_ld_T1_A0(OT_WORD + s->mem_index);
7880 gen_add_A0_im(s, 2);
7881 gen_op_ld_T0_A0(CODE64(s) + OT_LONG + s->mem_index);
7882 if (!s->dflag)
7883 gen_op_andl_T0_im(0xffffff);
7884 if (op == 2) {
7885 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,gdt.base));
7886 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,gdt.limit));
7887 } else {
7888 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,idt.base));
7889 tcg_gen_st32_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,idt.limit));
7890 }
7891 }
7892 break;
7893 case 4: /* smsw */
7894 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0);
7895 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]));
7896 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
7897 break;
7898 case 6: /* lmsw */
7899 if (s->cpl != 0) {
7900 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7901 } else {
7902 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
7903 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
7904 tcg_gen_helper_0_1(helper_lmsw, cpu_T[0]);
7905 gen_jmp_im(s->pc - s->cs_base);
7906 gen_eob(s);
7907 }
7908 break;
7909 case 7: /* invlpg */
7910 if (s->cpl != 0) {
7911 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7912 } else {
7913 if (mod == 3) {
7914#ifdef TARGET_X86_64
7915 if (CODE64(s) && rm == 0) {
7916 /* swapgs */
7917 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7918 tcg_gen_ld_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,kernelgsbase));
7919 tcg_gen_st_tl(cpu_T[1], cpu_env, offsetof(CPUX86State,segs[R_GS].base));
7920 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,kernelgsbase));
7921 } else
7922#endif
7923 {
7924 goto illegal_op;
7925 }
7926 } else {
7927 if (s->cc_op != CC_OP_DYNAMIC)
7928 gen_op_set_cc_op(s->cc_op);
7929 gen_jmp_im(pc_start - s->cs_base);
7930 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7931 tcg_gen_helper_0_1(helper_invlpg, cpu_A0);
7932 gen_jmp_im(s->pc - s->cs_base);
7933 gen_eob(s);
7934 }
7935 }
7936 break;
7937 default:
7938 goto illegal_op;
7939 }
7940 break;
7941 case 0x108: /* invd */
7942 case 0x109: /* wbinvd */
7943 if (s->cpl != 0) {
7944 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
7945 } else {
7946 gen_svm_check_intercept(s, pc_start, (b & 2) ? SVM_EXIT_INVD : SVM_EXIT_WBINVD);
7947 /* nothing to do */
7948 }
7949 break;
7950 case 0x63: /* arpl or movslS (x86_64) */
7951#ifdef TARGET_X86_64
7952 if (CODE64(s)) {
7953 int d_ot;
7954 /* d_ot is the size of destination */
7955 d_ot = dflag + OT_WORD;
7956
7957 modrm = ldub_code(s->pc++);
7958 reg = ((modrm >> 3) & 7) | rex_r;
7959 mod = (modrm >> 6) & 3;
7960 rm = (modrm & 7) | REX_B(s);
7961
7962 if (mod == 3) {
7963 gen_op_mov_TN_reg(OT_LONG, 0, rm);
7964 /* sign extend */
7965 if (d_ot == OT_QUAD)
7966 tcg_gen_ext32s_tl(cpu_T[0], cpu_T[0]);
7967 gen_op_mov_reg_T0(d_ot, reg);
7968 } else {
7969 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7970 if (d_ot == OT_QUAD) {
7971 gen_op_lds_T0_A0(OT_LONG + s->mem_index);
7972 } else {
7973 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
7974 }
7975 gen_op_mov_reg_T0(d_ot, reg);
7976 }
7977 } else
7978#endif
7979 {
7980 int label1;
7981 TCGv t0, t1, t2, a0;
7982
7983 if (!s->pe || s->vm86)
7984 goto illegal_op;
7985
7986 t0 = tcg_temp_local_new(TCG_TYPE_TL);
7987 t1 = tcg_temp_local_new(TCG_TYPE_TL);
7988 t2 = tcg_temp_local_new(TCG_TYPE_TL);
7989#ifdef VBOX
7990 a0 = tcg_temp_local_new(TCG_TYPE_TL);
7991#endif
7992 ot = OT_WORD;
7993 modrm = ldub_code(s->pc++);
7994 reg = (modrm >> 3) & 7;
7995 mod = (modrm >> 6) & 3;
7996 rm = modrm & 7;
7997 if (mod != 3) {
7998 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
7999#ifdef VBOX
8000 tcg_gen_mov_tl(a0, cpu_A0);
8001#endif
8002 gen_op_ld_v(ot + s->mem_index, t0, cpu_A0);
8003 } else {
8004 gen_op_mov_v_reg(ot, t0, rm);
8005 }
8006 gen_op_mov_v_reg(ot, t1, reg);
8007 tcg_gen_andi_tl(cpu_tmp0, t0, 3);
8008 tcg_gen_andi_tl(t1, t1, 3);
8009 tcg_gen_movi_tl(t2, 0);
8010 label1 = gen_new_label();
8011 tcg_gen_brcond_tl(TCG_COND_GE, cpu_tmp0, t1, label1);
8012 tcg_gen_andi_tl(t0, t0, ~3);
8013 tcg_gen_or_tl(t0, t0, t1);
8014 tcg_gen_movi_tl(t2, CC_Z);
8015 gen_set_label(label1);
8016 if (mod != 3) {
8017#ifdef VBOX
8018 /* cpu_A0 doesn't survive branch */
8019 gen_op_st_v(ot + s->mem_index, t0, a0);
8020#else
8021 gen_op_st_v(ot + s->mem_index, t0, cpu_A0);
8022#endif
8023 } else {
8024 gen_op_mov_reg_v(ot, rm, t0);
8025 }
8026 if (s->cc_op != CC_OP_DYNAMIC)
8027 gen_op_set_cc_op(s->cc_op);
8028 gen_compute_eflags(cpu_cc_src);
8029 tcg_gen_andi_tl(cpu_cc_src, cpu_cc_src, ~CC_Z);
8030 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t2);
8031 s->cc_op = CC_OP_EFLAGS;
8032 tcg_temp_free(t0);
8033 tcg_temp_free(t1);
8034 tcg_temp_free(t2);
8035#ifdef VBOX
8036 tcg_temp_free(a0);
8037#endif
8038 }
8039 break;
8040 case 0x102: /* lar */
8041 case 0x103: /* lsl */
8042 {
8043 int label1;
8044 TCGv t0;
8045 if (!s->pe || s->vm86)
8046 goto illegal_op;
8047 ot = dflag ? OT_LONG : OT_WORD;
8048 modrm = ldub_code(s->pc++);
8049 reg = ((modrm >> 3) & 7) | rex_r;
8050 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
8051 t0 = tcg_temp_local_new(TCG_TYPE_TL);
8052 if (s->cc_op != CC_OP_DYNAMIC)
8053 gen_op_set_cc_op(s->cc_op);
8054 if (b == 0x102)
8055 tcg_gen_helper_1_1(helper_lar, t0, cpu_T[0]);
8056 else
8057 tcg_gen_helper_1_1(helper_lsl, t0, cpu_T[0]);
8058 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z);
8059 label1 = gen_new_label();
8060 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, label1);
8061 gen_op_mov_reg_v(ot, reg, t0);
8062 gen_set_label(label1);
8063 s->cc_op = CC_OP_EFLAGS;
8064 tcg_temp_free(t0);
8065 }
8066 break;
8067 case 0x118:
8068 modrm = ldub_code(s->pc++);
8069 mod = (modrm >> 6) & 3;
8070 op = (modrm >> 3) & 7;
8071 switch(op) {
8072 case 0: /* prefetchnta */
8073 case 1: /* prefetchnt0 */
8074 case 2: /* prefetchnt0 */
8075 case 3: /* prefetchnt0 */
8076 if (mod == 3)
8077 goto illegal_op;
8078 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8079 /* nothing more to do */
8080 break;
8081 default: /* nop (multi byte) */
8082 gen_nop_modrm(s, modrm);
8083 break;
8084 }
8085 break;
8086 case 0x119 ... 0x11f: /* nop (multi byte) */
8087 modrm = ldub_code(s->pc++);
8088 gen_nop_modrm(s, modrm);
8089 break;
8090 case 0x120: /* mov reg, crN */
8091 case 0x122: /* mov crN, reg */
8092 if (s->cpl != 0) {
8093 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8094 } else {
8095 modrm = ldub_code(s->pc++);
8096#ifndef VBOX /* mod bits are always understood to be 11 (0xc0) regardless of actual content; see AMD manuals */
8097 if ((modrm & 0xc0) != 0xc0)
8098 goto illegal_op;
8099#endif
8100 rm = (modrm & 7) | REX_B(s);
8101 reg = ((modrm >> 3) & 7) | rex_r;
8102 if (CODE64(s))
8103 ot = OT_QUAD;
8104 else
8105 ot = OT_LONG;
8106 switch(reg) {
8107 case 0:
8108 case 2:
8109 case 3:
8110 case 4:
8111 case 8:
8112 if (s->cc_op != CC_OP_DYNAMIC)
8113 gen_op_set_cc_op(s->cc_op);
8114 gen_jmp_im(pc_start - s->cs_base);
8115 if (b & 2) {
8116 gen_op_mov_TN_reg(ot, 0, rm);
8117 tcg_gen_helper_0_2(helper_write_crN,
8118 tcg_const_i32(reg), cpu_T[0]);
8119 gen_jmp_im(s->pc - s->cs_base);
8120 gen_eob(s);
8121 } else {
8122 tcg_gen_helper_1_1(helper_read_crN,
8123 cpu_T[0], tcg_const_i32(reg));
8124 gen_op_mov_reg_T0(ot, rm);
8125 }
8126 break;
8127 default:
8128 goto illegal_op;
8129 }
8130 }
8131 break;
8132 case 0x121: /* mov reg, drN */
8133 case 0x123: /* mov drN, reg */
8134 if (s->cpl != 0) {
8135 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8136 } else {
8137 modrm = ldub_code(s->pc++);
8138#ifndef VBOX /* mod bits are always understood to be 11 (0xc0) regardless of actual content; see AMD manuals */
8139 if ((modrm & 0xc0) != 0xc0)
8140 goto illegal_op;
8141#endif
8142 rm = (modrm & 7) | REX_B(s);
8143 reg = ((modrm >> 3) & 7) | rex_r;
8144 if (CODE64(s))
8145 ot = OT_QUAD;
8146 else
8147 ot = OT_LONG;
8148 /* XXX: do it dynamically with CR4.DE bit */
8149 if (reg == 4 || reg == 5 || reg >= 8)
8150 goto illegal_op;
8151 if (b & 2) {
8152 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg);
8153 gen_op_mov_TN_reg(ot, 0, rm);
8154 tcg_gen_helper_0_2(helper_movl_drN_T0,
8155 tcg_const_i32(reg), cpu_T[0]);
8156 gen_jmp_im(s->pc - s->cs_base);
8157 gen_eob(s);
8158 } else {
8159 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_DR0 + reg);
8160 tcg_gen_ld_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,dr[reg]));
8161 gen_op_mov_reg_T0(ot, rm);
8162 }
8163 }
8164 break;
8165 case 0x106: /* clts */
8166 if (s->cpl != 0) {
8167 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
8168 } else {
8169 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0);
8170 tcg_gen_helper_0_0(helper_clts);
8171 /* abort block because static cpu state changed */
8172 gen_jmp_im(s->pc - s->cs_base);
8173 gen_eob(s);
8174 }
8175 break;
8176 /* MMX/3DNow!/SSE/SSE2/SSE3/SSSE3/SSE4 support */
8177 case 0x1c3: /* MOVNTI reg, mem */
8178 if (!(s->cpuid_features & CPUID_SSE2))
8179 goto illegal_op;
8180 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
8181 modrm = ldub_code(s->pc++);
8182 mod = (modrm >> 6) & 3;
8183 if (mod == 3)
8184 goto illegal_op;
8185 reg = ((modrm >> 3) & 7) | rex_r;
8186 /* generate a generic store */
8187 gen_ldst_modrm(s, modrm, ot, reg, 1);
8188 break;
8189 case 0x1ae:
8190 modrm = ldub_code(s->pc++);
8191 mod = (modrm >> 6) & 3;
8192 op = (modrm >> 3) & 7;
8193 switch(op) {
8194 case 0: /* fxsave */
8195 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8196 (s->flags & HF_EM_MASK))
8197 goto illegal_op;
8198 if (s->flags & HF_TS_MASK) {
8199 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8200 break;
8201 }
8202 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8203 if (s->cc_op != CC_OP_DYNAMIC)
8204 gen_op_set_cc_op(s->cc_op);
8205 gen_jmp_im(pc_start - s->cs_base);
8206 tcg_gen_helper_0_2(helper_fxsave,
8207 cpu_A0, tcg_const_i32((s->dflag == 2)));
8208 break;
8209 case 1: /* fxrstor */
8210 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
8211 (s->flags & HF_EM_MASK))
8212 goto illegal_op;
8213 if (s->flags & HF_TS_MASK) {
8214 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8215 break;
8216 }
8217 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8218 if (s->cc_op != CC_OP_DYNAMIC)
8219 gen_op_set_cc_op(s->cc_op);
8220 gen_jmp_im(pc_start - s->cs_base);
8221 tcg_gen_helper_0_2(helper_fxrstor,
8222 cpu_A0, tcg_const_i32((s->dflag == 2)));
8223 break;
8224 case 2: /* ldmxcsr */
8225 case 3: /* stmxcsr */
8226 if (s->flags & HF_TS_MASK) {
8227 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
8228 break;
8229 }
8230 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
8231 mod == 3)
8232 goto illegal_op;
8233 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8234 if (op == 2) {
8235 gen_op_ld_T0_A0(OT_LONG + s->mem_index);
8236 tcg_gen_st32_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8237 } else {
8238 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State, mxcsr));
8239 gen_op_st_T0_A0(OT_LONG + s->mem_index);
8240 }
8241 break;
8242 case 5: /* lfence */
8243 case 6: /* mfence */
8244 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
8245 goto illegal_op;
8246 break;
8247 case 7: /* sfence / clflush */
8248 if ((modrm & 0xc7) == 0xc0) {
8249 /* sfence */
8250 /* XXX: also check for cpuid_ext2_features & CPUID_EXT2_EMMX */
8251 if (!(s->cpuid_features & CPUID_SSE))
8252 goto illegal_op;
8253 } else {
8254 /* clflush */
8255 if (!(s->cpuid_features & CPUID_CLFLUSH))
8256 goto illegal_op;
8257 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8258 }
8259 break;
8260 default:
8261 goto illegal_op;
8262 }
8263 break;
8264 case 0x10d: /* 3DNow! prefetch(w) */
8265 modrm = ldub_code(s->pc++);
8266 mod = (modrm >> 6) & 3;
8267 if (mod == 3)
8268 goto illegal_op;
8269 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
8270 /* ignore for now */
8271 break;
8272 case 0x1aa: /* rsm */
8273 gen_svm_check_intercept(s, pc_start, SVM_EXIT_RSM);
8274 if (!(s->flags & HF_SMM_MASK))
8275 goto illegal_op;
8276 if (s->cc_op != CC_OP_DYNAMIC) {
8277 gen_op_set_cc_op(s->cc_op);
8278 s->cc_op = CC_OP_DYNAMIC;
8279 }
8280 gen_jmp_im(s->pc - s->cs_base);
8281 tcg_gen_helper_0_0(helper_rsm);
8282 gen_eob(s);
8283 break;
8284 case 0x1b8: /* SSE4.2 popcnt */
8285 if ((prefixes & (PREFIX_REPZ | PREFIX_LOCK | PREFIX_REPNZ)) !=
8286 PREFIX_REPZ)
8287 goto illegal_op;
8288 if (!(s->cpuid_ext_features & CPUID_EXT_POPCNT))
8289 goto illegal_op;
8290
8291 modrm = ldub_code(s->pc++);
8292 reg = ((modrm >> 3) & 7);
8293
8294 if (s->prefix & PREFIX_DATA)
8295 ot = OT_WORD;
8296 else if (s->dflag != 2)
8297 ot = OT_LONG;
8298 else
8299 ot = OT_QUAD;
8300
8301 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
8302 tcg_gen_helper_1_2(helper_popcnt,
8303 cpu_T[0], cpu_T[0], tcg_const_i32(ot));
8304 gen_op_mov_reg_T0(ot, reg);
8305
8306 s->cc_op = CC_OP_EFLAGS;
8307 break;
8308 case 0x10e ... 0x10f:
8309 /* 3DNow! instructions, ignore prefixes */
8310 s->prefix &= ~(PREFIX_REPZ | PREFIX_REPNZ | PREFIX_DATA);
8311 case 0x110 ... 0x117:
8312 case 0x128 ... 0x12f:
8313 case 0x138 ... 0x13a:
8314 case 0x150 ... 0x177:
8315 case 0x17c ... 0x17f:
8316 case 0x1c2:
8317 case 0x1c4 ... 0x1c6:
8318 case 0x1d0 ... 0x1fe:
8319 gen_sse(s, b, pc_start, rex_r);
8320 break;
8321 default:
8322 goto illegal_op;
8323 }
8324 /* lock generation */
8325 if (s->prefix & PREFIX_LOCK)
8326 tcg_gen_helper_0_0(helper_unlock);
8327 return s->pc;
8328 illegal_op:
8329 if (s->prefix & PREFIX_LOCK)
8330 tcg_gen_helper_0_0(helper_unlock);
8331 /* XXX: ensure that no lock was generated */
8332 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
8333 return s->pc;
8334}
8335
8336void optimize_flags_init(void)
8337{
8338#if TCG_TARGET_REG_BITS == 32
8339 assert(sizeof(CCTable) == (1 << 3));
8340#else
8341 assert(sizeof(CCTable) == (1 << 4));
8342#endif
8343 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
8344 cpu_cc_op = tcg_global_mem_new(TCG_TYPE_I32,
8345 TCG_AREG0, offsetof(CPUState, cc_op), "cc_op");
8346 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
8347 TCG_AREG0, offsetof(CPUState, cc_src), "cc_src");
8348 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
8349 TCG_AREG0, offsetof(CPUState, cc_dst), "cc_dst");
8350 cpu_cc_tmp = tcg_global_mem_new(TCG_TYPE_TL,
8351 TCG_AREG0, offsetof(CPUState, cc_tmp), "cc_tmp");
8352
8353 /* register helpers */
8354
8355#define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
8356#include "helper.h"
8357}
8358
8359/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
8360 basic block 'tb'. If search_pc is TRUE, also generate PC
8361 information for each intermediate instruction. */
8362#ifndef VBOX
8363static inline void gen_intermediate_code_internal(CPUState *env,
8364#else /* VBOX */
8365DECLINLINE(void) gen_intermediate_code_internal(CPUState *env,
8366#endif /* VBOX */
8367 TranslationBlock *tb,
8368 int search_pc)
8369{
8370 DisasContext dc1, *dc = &dc1;
8371 target_ulong pc_ptr;
8372 uint16_t *gen_opc_end;
8373 int j, lj, cflags;
8374 uint64_t flags;
8375 target_ulong pc_start;
8376 target_ulong cs_base;
8377 int num_insns;
8378 int max_insns;
8379
8380 /* generate intermediate code */
8381 pc_start = tb->pc;
8382 cs_base = tb->cs_base;
8383 flags = tb->flags;
8384 cflags = tb->cflags;
8385
8386 dc->pe = (flags >> HF_PE_SHIFT) & 1;
8387 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
8388 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
8389 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
8390 dc->f_st = 0;
8391 dc->vm86 = (flags >> VM_SHIFT) & 1;
8392#ifdef VBOX
8393 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
8394 dc->pvi = !!(env->cr[4] & CR4_PVI_MASK);
8395#ifdef VBOX_WITH_CALL_RECORD
8396 if ( !(env->state & CPU_RAW_RING0)
8397 && (env->cr[0] & CR0_PG_MASK)
8398 && !(env->eflags & X86_EFL_IF)
8399 && dc->code32)
8400 dc->record_call = 1;
8401 else
8402 dc->record_call = 0;
8403#endif
8404#endif
8405 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
8406 dc->iopl = (flags >> IOPL_SHIFT) & 3;
8407 dc->tf = (flags >> TF_SHIFT) & 1;
8408 dc->singlestep_enabled = env->singlestep_enabled;
8409 dc->cc_op = CC_OP_DYNAMIC;
8410 dc->cs_base = cs_base;
8411 dc->tb = tb;
8412 dc->popl_esp_hack = 0;
8413 /* select memory access functions */
8414 dc->mem_index = 0;
8415 if (flags & HF_SOFTMMU_MASK) {
8416 if (dc->cpl == 3)
8417 dc->mem_index = 2 * 4;
8418 else
8419 dc->mem_index = 1 * 4;
8420 }
8421 dc->cpuid_features = env->cpuid_features;
8422 dc->cpuid_ext_features = env->cpuid_ext_features;
8423 dc->cpuid_ext2_features = env->cpuid_ext2_features;
8424 dc->cpuid_ext3_features = env->cpuid_ext3_features;
8425#ifdef TARGET_X86_64
8426 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
8427 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
8428#endif
8429 dc->flags = flags;
8430 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
8431 (flags & HF_INHIBIT_IRQ_MASK)
8432#ifndef CONFIG_SOFTMMU
8433 || (flags & HF_SOFTMMU_MASK)
8434#endif
8435 );
8436#if 0
8437 /* check addseg logic */
8438 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
8439 printf("ERROR addseg\n");
8440#endif
8441
8442 cpu_T[0] = tcg_temp_new(TCG_TYPE_TL);
8443 cpu_T[1] = tcg_temp_new(TCG_TYPE_TL);
8444 cpu_A0 = tcg_temp_new(TCG_TYPE_TL);
8445 cpu_T3 = tcg_temp_new(TCG_TYPE_TL);
8446
8447 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
8448 cpu_tmp1_i64 = tcg_temp_new(TCG_TYPE_I64);
8449 cpu_tmp2_i32 = tcg_temp_new(TCG_TYPE_I32);
8450 cpu_tmp3_i32 = tcg_temp_new(TCG_TYPE_I32);
8451 cpu_tmp4 = tcg_temp_new(TCG_TYPE_TL);
8452 cpu_tmp5 = tcg_temp_new(TCG_TYPE_TL);
8453 cpu_tmp6 = tcg_temp_new(TCG_TYPE_TL);
8454 cpu_ptr0 = tcg_temp_new(TCG_TYPE_PTR);
8455 cpu_ptr1 = tcg_temp_new(TCG_TYPE_PTR);
8456
8457 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
8458
8459 dc->is_jmp = DISAS_NEXT;
8460 pc_ptr = pc_start;
8461 lj = -1;
8462 num_insns = 0;
8463 max_insns = tb->cflags & CF_COUNT_MASK;
8464 if (max_insns == 0)
8465 max_insns = CF_COUNT_MASK;
8466
8467 gen_icount_start();
8468 for(;;) {
8469 if (env->nb_breakpoints > 0) {
8470 for(j = 0; j < env->nb_breakpoints; j++) {
8471 if (env->breakpoints[j] == pc_ptr) {
8472 gen_debug(dc, pc_ptr - dc->cs_base);
8473 break;
8474 }
8475 }
8476 }
8477 if (search_pc) {
8478 j = gen_opc_ptr - gen_opc_buf;
8479 if (lj < j) {
8480 lj++;
8481 while (lj < j)
8482 gen_opc_instr_start[lj++] = 0;
8483 }
8484 gen_opc_pc[lj] = pc_ptr;
8485 gen_opc_cc_op[lj] = dc->cc_op;
8486 gen_opc_instr_start[lj] = 1;
8487 gen_opc_icount[lj] = num_insns;
8488 }
8489 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8490 gen_io_start();
8491
8492 pc_ptr = disas_insn(dc, pc_ptr);
8493 num_insns++;
8494 /* stop translation if indicated */
8495 if (dc->is_jmp)
8496 break;
8497#ifdef VBOX
8498#ifdef DEBUG
8499/*
8500 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
8501 {
8502 //should never happen as the jump to the patch code terminates the translation block
8503 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
8504 }
8505*/
8506#endif
8507 if (env->state & CPU_EMULATE_SINGLE_INSTR)
8508 {
8509 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
8510 gen_jmp_im(pc_ptr - dc->cs_base);
8511 gen_eob(dc);
8512 break;
8513 }
8514#endif /* VBOX */
8515
8516 /* if single step mode, we generate only one instruction and
8517 generate an exception */
8518 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
8519 the flag and abort the translation to give the irqs a
8520 change to be happen */
8521 if (dc->tf || dc->singlestep_enabled ||
8522 (flags & HF_INHIBIT_IRQ_MASK)) {
8523 gen_jmp_im(pc_ptr - dc->cs_base);
8524 gen_eob(dc);
8525 break;
8526 }
8527 /* if too long translation, stop generation too */
8528 if (gen_opc_ptr >= gen_opc_end ||
8529 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32) ||
8530 num_insns >= max_insns) {
8531 gen_jmp_im(pc_ptr - dc->cs_base);
8532 gen_eob(dc);
8533 break;
8534 }
8535 }
8536 if (tb->cflags & CF_LAST_IO)
8537 gen_io_end();
8538 gen_icount_end(tb, num_insns);
8539 *gen_opc_ptr = INDEX_op_end;
8540 /* we don't forget to fill the last values */
8541 if (search_pc) {
8542 j = gen_opc_ptr - gen_opc_buf;
8543 lj++;
8544 while (lj <= j)
8545 gen_opc_instr_start[lj++] = 0;
8546 }
8547
8548#ifdef DEBUG_DISAS
8549 if (loglevel & CPU_LOG_TB_CPU) {
8550 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
8551 }
8552 if (loglevel & CPU_LOG_TB_IN_ASM) {
8553 int disas_flags;
8554 fprintf(logfile, "----------------\n");
8555 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
8556#ifdef TARGET_X86_64
8557 if (dc->code64)
8558 disas_flags = 2;
8559 else
8560#endif
8561 disas_flags = !dc->code32;
8562 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
8563 fprintf(logfile, "\n");
8564 }
8565#endif
8566
8567 if (!search_pc) {
8568 tb->size = pc_ptr - pc_start;
8569 tb->icount = num_insns;
8570 }
8571}
8572
8573void gen_intermediate_code(CPUState *env, TranslationBlock *tb)
8574{
8575 gen_intermediate_code_internal(env, tb, 0);
8576}
8577
8578void gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
8579{
8580 gen_intermediate_code_internal(env, tb, 1);
8581}
8582
8583void gen_pc_load(CPUState *env, TranslationBlock *tb,
8584 unsigned long searched_pc, int pc_pos, void *puc)
8585{
8586 int cc_op;
8587#ifdef DEBUG_DISAS
8588 if (loglevel & CPU_LOG_TB_OP) {
8589 int i;
8590 fprintf(logfile, "RESTORE:\n");
8591 for(i = 0;i <= pc_pos; i++) {
8592 if (gen_opc_instr_start[i]) {
8593 fprintf(logfile, "0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);
8594 }
8595 }
8596 fprintf(logfile, "spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",
8597 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base,
8598 (uint32_t)tb->cs_base);
8599 }
8600#endif
8601 env->eip = gen_opc_pc[pc_pos] - tb->cs_base;
8602 cc_op = gen_opc_cc_op[pc_pos];
8603 if (cc_op != CC_OP_DYNAMIC)
8604 env->cc_op = cc_op;
8605}
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette