VirtualBox

source: vbox/trunk/src/recompiler/new/target-i386/translate.c@ 1344

Last change on this file since 1344 was 644, checked in by vboxsync, 18 years ago

Merged in current upstream changes.

  • Property svn:eol-style set to native
File size: 198.6 KB
Line 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20#include <stdarg.h>
21#include <stdlib.h>
22#include <stdio.h>
23#include <string.h>
24#include <inttypes.h>
25#ifndef VBOX
26#include <signal.h>
27#include <assert.h>
28#endif /* !VBOX */
29
30#include "cpu.h"
31#include "exec-all.h"
32#include "disas.h"
33
34/* XXX: move that elsewhere */
35static uint16_t *gen_opc_ptr;
36static uint32_t *gen_opparam_ptr;
37
38#define PREFIX_REPZ 0x01
39#define PREFIX_REPNZ 0x02
40#define PREFIX_LOCK 0x04
41#define PREFIX_DATA 0x08
42#define PREFIX_ADR 0x10
43
44#ifdef TARGET_X86_64
45#define X86_64_ONLY(x) x
46#define X86_64_DEF(x...) x
47#define CODE64(s) ((s)->code64)
48#define REX_X(s) ((s)->rex_x)
49#define REX_B(s) ((s)->rex_b)
50/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
51#if 1
52#define BUGGY_64(x) NULL
53#endif
54#else
55#define X86_64_ONLY(x) NULL
56#define X86_64_DEF(x...)
57#define CODE64(s) 0
58#define REX_X(s) 0
59#define REX_B(s) 0
60#endif
61
62#ifdef TARGET_X86_64
63static int x86_64_hregs;
64#endif
65
66#ifdef USE_DIRECT_JUMP
67#define TBPARAM(x)
68#else
69#define TBPARAM(x) (long)(x)
70#endif
71
72#ifdef VBOX
73/* Special/override code readers to hide patched code. */
74
75uint8_t ldub_code_raw(target_ulong pc)
76{
77 uint8_t b;
78
79 if (!remR3GetOpcode(cpu_single_env, pc, &b))
80 b = ldub_code(pc);
81 return b;
82}
83#define ldub_code(a) ldub_code_raw(a)
84
85uint16_t lduw_code_raw(target_ulong pc)
86{
87 return (ldub_code(pc+1) << 8) | ldub_code(pc);
88}
89#define lduw_code(a) lduw_code_raw(a)
90
91
92uint32_t ldl_code_raw(target_ulong pc)
93{
94 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
95}
96#define ldl_code(a) ldl_code_raw(a)
97
98#endif /* VBOX */
99
100
101typedef struct DisasContext {
102 /* current insn context */
103 int override; /* -1 if no override */
104 int prefix;
105 int aflag, dflag;
106 target_ulong pc; /* pc = eip + cs_base */
107 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
108 static state change (stop translation) */
109 /* current block context */
110 target_ulong cs_base; /* base of CS segment */
111 int pe; /* protected mode */
112 int code32; /* 32 bit code segment */
113#ifdef TARGET_X86_64
114 int lma; /* long mode active */
115 int code64; /* 64 bit code segment */
116 int rex_x, rex_b;
117#endif
118 int ss32; /* 32 bit stack segment */
119 int cc_op; /* current CC operation */
120 int addseg; /* non zero if either DS/ES/SS have a non zero base */
121 int f_st; /* currently unused */
122 int vm86; /* vm86 mode */
123 int cpl;
124 int iopl;
125 int tf; /* TF cpu flag */
126 int singlestep_enabled; /* "hardware" single step enabled */
127 int jmp_opt; /* use direct block chaining for direct jumps */
128 int mem_index; /* select memory access functions */
129 int flags; /* all execution flags */
130 struct TranslationBlock *tb;
131 int popl_esp_hack; /* for correct popl with esp base handling */
132 int rip_offset; /* only used in x86_64, but left for simplicity */
133 int cpuid_features;
134 int cpuid_ext_features;
135} DisasContext;
136
137static void gen_eob(DisasContext *s);
138static void gen_jmp(DisasContext *s, target_ulong eip);
139static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
140
141/* i386 arith/logic operations */
142enum {
143 OP_ADDL,
144 OP_ORL,
145 OP_ADCL,
146 OP_SBBL,
147 OP_ANDL,
148 OP_SUBL,
149 OP_XORL,
150 OP_CMPL,
151};
152
153/* i386 shift ops */
154enum {
155 OP_ROL,
156 OP_ROR,
157 OP_RCL,
158 OP_RCR,
159 OP_SHL,
160 OP_SHR,
161 OP_SHL1, /* undocumented */
162 OP_SAR = 7,
163};
164
165enum {
166#define DEF(s, n, copy_size) INDEX_op_ ## s,
167#include "opc.h"
168#undef DEF
169 NB_OPS,
170};
171
172#include "gen-op.h"
173
174/* operand size */
175enum {
176 OT_BYTE = 0,
177 OT_WORD,
178 OT_LONG,
179 OT_QUAD,
180};
181
182enum {
183 /* I386 int registers */
184 OR_EAX, /* MUST be even numbered */
185 OR_ECX,
186 OR_EDX,
187 OR_EBX,
188 OR_ESP,
189 OR_EBP,
190 OR_ESI,
191 OR_EDI,
192
193 OR_TMP0 = 16, /* temporary operand register */
194 OR_TMP1,
195 OR_A0, /* temporary register used when doing address evaluation */
196};
197
198#ifdef TARGET_X86_64
199
200#define NB_OP_SIZES 4
201
202#define DEF_REGS(prefix, suffix) \
203 prefix ## EAX ## suffix,\
204 prefix ## ECX ## suffix,\
205 prefix ## EDX ## suffix,\
206 prefix ## EBX ## suffix,\
207 prefix ## ESP ## suffix,\
208 prefix ## EBP ## suffix,\
209 prefix ## ESI ## suffix,\
210 prefix ## EDI ## suffix,\
211 prefix ## R8 ## suffix,\
212 prefix ## R9 ## suffix,\
213 prefix ## R10 ## suffix,\
214 prefix ## R11 ## suffix,\
215 prefix ## R12 ## suffix,\
216 prefix ## R13 ## suffix,\
217 prefix ## R14 ## suffix,\
218 prefix ## R15 ## suffix,
219
220#define DEF_BREGS(prefixb, prefixh, suffix) \
221 \
222static void prefixb ## ESP ## suffix ## _wrapper(void) \
223{ \
224 if (x86_64_hregs) \
225 prefixb ## ESP ## suffix (); \
226 else \
227 prefixh ## EAX ## suffix (); \
228} \
229 \
230static void prefixb ## EBP ## suffix ## _wrapper(void) \
231{ \
232 if (x86_64_hregs) \
233 prefixb ## EBP ## suffix (); \
234 else \
235 prefixh ## ECX ## suffix (); \
236} \
237 \
238static void prefixb ## ESI ## suffix ## _wrapper(void) \
239{ \
240 if (x86_64_hregs) \
241 prefixb ## ESI ## suffix (); \
242 else \
243 prefixh ## EDX ## suffix (); \
244} \
245 \
246static void prefixb ## EDI ## suffix ## _wrapper(void) \
247{ \
248 if (x86_64_hregs) \
249 prefixb ## EDI ## suffix (); \
250 else \
251 prefixh ## EBX ## suffix (); \
252}
253
254DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
255DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
256DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
257DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
258
259#else /* !TARGET_X86_64 */
260
261#define NB_OP_SIZES 3
262
263#define DEF_REGS(prefix, suffix) \
264 prefix ## EAX ## suffix,\
265 prefix ## ECX ## suffix,\
266 prefix ## EDX ## suffix,\
267 prefix ## EBX ## suffix,\
268 prefix ## ESP ## suffix,\
269 prefix ## EBP ## suffix,\
270 prefix ## ESI ## suffix,\
271 prefix ## EDI ## suffix,
272
273#endif /* !TARGET_X86_64 */
274
275static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
276 [OT_BYTE] = {
277 gen_op_movb_EAX_T0,
278 gen_op_movb_ECX_T0,
279 gen_op_movb_EDX_T0,
280 gen_op_movb_EBX_T0,
281#ifdef TARGET_X86_64
282 gen_op_movb_ESP_T0_wrapper,
283 gen_op_movb_EBP_T0_wrapper,
284 gen_op_movb_ESI_T0_wrapper,
285 gen_op_movb_EDI_T0_wrapper,
286 gen_op_movb_R8_T0,
287 gen_op_movb_R9_T0,
288 gen_op_movb_R10_T0,
289 gen_op_movb_R11_T0,
290 gen_op_movb_R12_T0,
291 gen_op_movb_R13_T0,
292 gen_op_movb_R14_T0,
293 gen_op_movb_R15_T0,
294#else
295 gen_op_movh_EAX_T0,
296 gen_op_movh_ECX_T0,
297 gen_op_movh_EDX_T0,
298 gen_op_movh_EBX_T0,
299#endif
300 },
301 [OT_WORD] = {
302 DEF_REGS(gen_op_movw_, _T0)
303 },
304 [OT_LONG] = {
305 DEF_REGS(gen_op_movl_, _T0)
306 },
307#ifdef TARGET_X86_64
308 [OT_QUAD] = {
309 DEF_REGS(gen_op_movq_, _T0)
310 },
311#endif
312};
313
314static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
315 [OT_BYTE] = {
316 gen_op_movb_EAX_T1,
317 gen_op_movb_ECX_T1,
318 gen_op_movb_EDX_T1,
319 gen_op_movb_EBX_T1,
320#ifdef TARGET_X86_64
321 gen_op_movb_ESP_T1_wrapper,
322 gen_op_movb_EBP_T1_wrapper,
323 gen_op_movb_ESI_T1_wrapper,
324 gen_op_movb_EDI_T1_wrapper,
325 gen_op_movb_R8_T1,
326 gen_op_movb_R9_T1,
327 gen_op_movb_R10_T1,
328 gen_op_movb_R11_T1,
329 gen_op_movb_R12_T1,
330 gen_op_movb_R13_T1,
331 gen_op_movb_R14_T1,
332 gen_op_movb_R15_T1,
333#else
334 gen_op_movh_EAX_T1,
335 gen_op_movh_ECX_T1,
336 gen_op_movh_EDX_T1,
337 gen_op_movh_EBX_T1,
338#endif
339 },
340 [OT_WORD] = {
341 DEF_REGS(gen_op_movw_, _T1)
342 },
343 [OT_LONG] = {
344 DEF_REGS(gen_op_movl_, _T1)
345 },
346#ifdef TARGET_X86_64
347 [OT_QUAD] = {
348 DEF_REGS(gen_op_movq_, _T1)
349 },
350#endif
351};
352
353static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
354 [0] = {
355 DEF_REGS(gen_op_movw_, _A0)
356 },
357 [1] = {
358 DEF_REGS(gen_op_movl_, _A0)
359 },
360#ifdef TARGET_X86_64
361 [2] = {
362 DEF_REGS(gen_op_movq_, _A0)
363 },
364#endif
365};
366
367static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
368{
369 [OT_BYTE] = {
370 {
371 gen_op_movl_T0_EAX,
372 gen_op_movl_T0_ECX,
373 gen_op_movl_T0_EDX,
374 gen_op_movl_T0_EBX,
375#ifdef TARGET_X86_64
376 gen_op_movl_T0_ESP_wrapper,
377 gen_op_movl_T0_EBP_wrapper,
378 gen_op_movl_T0_ESI_wrapper,
379 gen_op_movl_T0_EDI_wrapper,
380 gen_op_movl_T0_R8,
381 gen_op_movl_T0_R9,
382 gen_op_movl_T0_R10,
383 gen_op_movl_T0_R11,
384 gen_op_movl_T0_R12,
385 gen_op_movl_T0_R13,
386 gen_op_movl_T0_R14,
387 gen_op_movl_T0_R15,
388#else
389 gen_op_movh_T0_EAX,
390 gen_op_movh_T0_ECX,
391 gen_op_movh_T0_EDX,
392 gen_op_movh_T0_EBX,
393#endif
394 },
395 {
396 gen_op_movl_T1_EAX,
397 gen_op_movl_T1_ECX,
398 gen_op_movl_T1_EDX,
399 gen_op_movl_T1_EBX,
400#ifdef TARGET_X86_64
401 gen_op_movl_T1_ESP_wrapper,
402 gen_op_movl_T1_EBP_wrapper,
403 gen_op_movl_T1_ESI_wrapper,
404 gen_op_movl_T1_EDI_wrapper,
405 gen_op_movl_T1_R8,
406 gen_op_movl_T1_R9,
407 gen_op_movl_T1_R10,
408 gen_op_movl_T1_R11,
409 gen_op_movl_T1_R12,
410 gen_op_movl_T1_R13,
411 gen_op_movl_T1_R14,
412 gen_op_movl_T1_R15,
413#else
414 gen_op_movh_T1_EAX,
415 gen_op_movh_T1_ECX,
416 gen_op_movh_T1_EDX,
417 gen_op_movh_T1_EBX,
418#endif
419 },
420 },
421 [OT_WORD] = {
422 {
423 DEF_REGS(gen_op_movl_T0_, )
424 },
425 {
426 DEF_REGS(gen_op_movl_T1_, )
427 },
428 },
429 [OT_LONG] = {
430 {
431 DEF_REGS(gen_op_movl_T0_, )
432 },
433 {
434 DEF_REGS(gen_op_movl_T1_, )
435 },
436 },
437#ifdef TARGET_X86_64
438 [OT_QUAD] = {
439 {
440 DEF_REGS(gen_op_movl_T0_, )
441 },
442 {
443 DEF_REGS(gen_op_movl_T1_, )
444 },
445 },
446#endif
447};
448
449static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
450 DEF_REGS(gen_op_movl_A0_, )
451};
452
453static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
454 [0] = {
455 DEF_REGS(gen_op_addl_A0_, )
456 },
457 [1] = {
458 DEF_REGS(gen_op_addl_A0_, _s1)
459 },
460 [2] = {
461 DEF_REGS(gen_op_addl_A0_, _s2)
462 },
463 [3] = {
464 DEF_REGS(gen_op_addl_A0_, _s3)
465 },
466};
467
468#ifdef TARGET_X86_64
469static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
470 DEF_REGS(gen_op_movq_A0_, )
471};
472
473static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
474 [0] = {
475 DEF_REGS(gen_op_addq_A0_, )
476 },
477 [1] = {
478 DEF_REGS(gen_op_addq_A0_, _s1)
479 },
480 [2] = {
481 DEF_REGS(gen_op_addq_A0_, _s2)
482 },
483 [3] = {
484 DEF_REGS(gen_op_addq_A0_, _s3)
485 },
486};
487#endif
488
489static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
490 [0] = {
491 DEF_REGS(gen_op_cmovw_, _T1_T0)
492 },
493 [1] = {
494 DEF_REGS(gen_op_cmovl_, _T1_T0)
495 },
496#ifdef TARGET_X86_64
497 [2] = {
498 DEF_REGS(gen_op_cmovq_, _T1_T0)
499 },
500#endif
501};
502
503static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
504 NULL,
505 gen_op_orl_T0_T1,
506 NULL,
507 NULL,
508 gen_op_andl_T0_T1,
509 NULL,
510 gen_op_xorl_T0_T1,
511 NULL,
512};
513
514#define DEF_ARITHC(SUFFIX)\
515 {\
516 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
517 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
518 },\
519 {\
520 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
521 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
522 },\
523 {\
524 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
525 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
526 },\
527 {\
528 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
529 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
530 },
531
532static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
533 DEF_ARITHC( )
534};
535
536static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
537 DEF_ARITHC(_raw)
538#ifndef CONFIG_USER_ONLY
539 DEF_ARITHC(_kernel)
540 DEF_ARITHC(_user)
541#endif
542};
543
544static const int cc_op_arithb[8] = {
545 CC_OP_ADDB,
546 CC_OP_LOGICB,
547 CC_OP_ADDB,
548 CC_OP_SUBB,
549 CC_OP_LOGICB,
550 CC_OP_SUBB,
551 CC_OP_LOGICB,
552 CC_OP_SUBB,
553};
554
555#define DEF_CMPXCHG(SUFFIX)\
556 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
557 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
558 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
559 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
560
561static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
562 DEF_CMPXCHG( )
563};
564
565static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
566 DEF_CMPXCHG(_raw)
567#ifndef CONFIG_USER_ONLY
568 DEF_CMPXCHG(_kernel)
569 DEF_CMPXCHG(_user)
570#endif
571};
572
573#define DEF_SHIFT(SUFFIX)\
574 {\
575 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
576 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
577 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
578 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
579 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
580 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
581 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
582 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
583 },\
584 {\
585 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
586 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
587 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
588 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
589 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
590 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
591 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
592 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
593 },\
594 {\
595 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
596 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
597 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
598 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
599 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
600 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
601 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
602 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
603 },\
604 {\
605 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
606 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
607 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
608 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
609 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
610 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
611 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
612 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
613 },
614
615static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
616 DEF_SHIFT( )
617};
618
619static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
620 DEF_SHIFT(_raw)
621#ifndef CONFIG_USER_ONLY
622 DEF_SHIFT(_kernel)
623 DEF_SHIFT(_user)
624#endif
625};
626
627#define DEF_SHIFTD(SUFFIX, op)\
628 {\
629 NULL,\
630 NULL,\
631 },\
632 {\
633 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
634 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
635 },\
636 {\
637 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
638 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
639 },\
640 {\
641X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
642 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
643 },
644
645static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
646 DEF_SHIFTD(, im)
647};
648
649static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
650 DEF_SHIFTD(, ECX)
651};
652
653static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
654 DEF_SHIFTD(_raw, im)
655#ifndef CONFIG_USER_ONLY
656 DEF_SHIFTD(_kernel, im)
657 DEF_SHIFTD(_user, im)
658#endif
659};
660
661static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
662 DEF_SHIFTD(_raw, ECX)
663#ifndef CONFIG_USER_ONLY
664 DEF_SHIFTD(_kernel, ECX)
665 DEF_SHIFTD(_user, ECX)
666#endif
667};
668
669static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
670 [0] = {
671 gen_op_btw_T0_T1_cc,
672 gen_op_btsw_T0_T1_cc,
673 gen_op_btrw_T0_T1_cc,
674 gen_op_btcw_T0_T1_cc,
675 },
676 [1] = {
677 gen_op_btl_T0_T1_cc,
678 gen_op_btsl_T0_T1_cc,
679 gen_op_btrl_T0_T1_cc,
680 gen_op_btcl_T0_T1_cc,
681 },
682#ifdef TARGET_X86_64
683 [2] = {
684 gen_op_btq_T0_T1_cc,
685 gen_op_btsq_T0_T1_cc,
686 gen_op_btrq_T0_T1_cc,
687 gen_op_btcq_T0_T1_cc,
688 },
689#endif
690};
691
692static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
693 gen_op_add_bitw_A0_T1,
694 gen_op_add_bitl_A0_T1,
695 X86_64_ONLY(gen_op_add_bitq_A0_T1),
696};
697
698static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
699 [0] = {
700 gen_op_bsfw_T0_cc,
701 gen_op_bsrw_T0_cc,
702 },
703 [1] = {
704 gen_op_bsfl_T0_cc,
705 gen_op_bsrl_T0_cc,
706 },
707#ifdef TARGET_X86_64
708 [2] = {
709 gen_op_bsfq_T0_cc,
710 gen_op_bsrq_T0_cc,
711 },
712#endif
713};
714
715static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
716 gen_op_ldsb_raw_T0_A0,
717 gen_op_ldsw_raw_T0_A0,
718 X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
719 NULL,
720#ifndef CONFIG_USER_ONLY
721 gen_op_ldsb_kernel_T0_A0,
722 gen_op_ldsw_kernel_T0_A0,
723 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
724 NULL,
725
726 gen_op_ldsb_user_T0_A0,
727 gen_op_ldsw_user_T0_A0,
728 X86_64_ONLY(gen_op_ldsl_user_T0_A0),
729 NULL,
730#endif
731};
732
733static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
734 gen_op_ldub_raw_T0_A0,
735 gen_op_lduw_raw_T0_A0,
736 NULL,
737 NULL,
738
739#ifndef CONFIG_USER_ONLY
740 gen_op_ldub_kernel_T0_A0,
741 gen_op_lduw_kernel_T0_A0,
742 NULL,
743 NULL,
744
745 gen_op_ldub_user_T0_A0,
746 gen_op_lduw_user_T0_A0,
747 NULL,
748 NULL,
749#endif
750};
751
752/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
753static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
754 gen_op_ldub_raw_T0_A0,
755 gen_op_lduw_raw_T0_A0,
756 gen_op_ldl_raw_T0_A0,
757 X86_64_ONLY(gen_op_ldq_raw_T0_A0),
758
759#ifndef CONFIG_USER_ONLY
760 gen_op_ldub_kernel_T0_A0,
761 gen_op_lduw_kernel_T0_A0,
762 gen_op_ldl_kernel_T0_A0,
763 X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
764
765 gen_op_ldub_user_T0_A0,
766 gen_op_lduw_user_T0_A0,
767 gen_op_ldl_user_T0_A0,
768 X86_64_ONLY(gen_op_ldq_user_T0_A0),
769#endif
770};
771
772static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
773 gen_op_ldub_raw_T1_A0,
774 gen_op_lduw_raw_T1_A0,
775 gen_op_ldl_raw_T1_A0,
776 X86_64_ONLY(gen_op_ldq_raw_T1_A0),
777
778#ifndef CONFIG_USER_ONLY
779 gen_op_ldub_kernel_T1_A0,
780 gen_op_lduw_kernel_T1_A0,
781 gen_op_ldl_kernel_T1_A0,
782 X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
783
784 gen_op_ldub_user_T1_A0,
785 gen_op_lduw_user_T1_A0,
786 gen_op_ldl_user_T1_A0,
787 X86_64_ONLY(gen_op_ldq_user_T1_A0),
788#endif
789};
790
791static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
792 gen_op_stb_raw_T0_A0,
793 gen_op_stw_raw_T0_A0,
794 gen_op_stl_raw_T0_A0,
795 X86_64_ONLY(gen_op_stq_raw_T0_A0),
796
797#ifndef CONFIG_USER_ONLY
798 gen_op_stb_kernel_T0_A0,
799 gen_op_stw_kernel_T0_A0,
800 gen_op_stl_kernel_T0_A0,
801 X86_64_ONLY(gen_op_stq_kernel_T0_A0),
802
803 gen_op_stb_user_T0_A0,
804 gen_op_stw_user_T0_A0,
805 gen_op_stl_user_T0_A0,
806 X86_64_ONLY(gen_op_stq_user_T0_A0),
807#endif
808};
809
810static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
811 NULL,
812 gen_op_stw_raw_T1_A0,
813 gen_op_stl_raw_T1_A0,
814 X86_64_ONLY(gen_op_stq_raw_T1_A0),
815
816#ifndef CONFIG_USER_ONLY
817 NULL,
818 gen_op_stw_kernel_T1_A0,
819 gen_op_stl_kernel_T1_A0,
820 X86_64_ONLY(gen_op_stq_kernel_T1_A0),
821
822 NULL,
823 gen_op_stw_user_T1_A0,
824 gen_op_stl_user_T1_A0,
825 X86_64_ONLY(gen_op_stq_user_T1_A0),
826#endif
827};
828
829#ifdef VBOX
830static void gen_check_external_event()
831{
832 gen_op_check_external_event();
833}
834#endif /* VBOX */
835
836static inline void gen_jmp_im(target_ulong pc)
837{
838#ifdef VBOX
839 gen_check_external_event();
840#endif /* VBOX */
841#ifdef TARGET_X86_64
842 if (pc == (uint32_t)pc) {
843 gen_op_movl_eip_im(pc);
844 } else if (pc == (int32_t)pc) {
845 gen_op_movq_eip_im(pc);
846 } else {
847 gen_op_movq_eip_im64(pc >> 32, pc);
848 }
849#else
850 gen_op_movl_eip_im(pc);
851#endif
852}
853
854static inline void gen_string_movl_A0_ESI(DisasContext *s)
855{
856 int override;
857
858 override = s->override;
859#ifdef TARGET_X86_64
860 if (s->aflag == 2) {
861 if (override >= 0) {
862 gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
863 gen_op_addq_A0_reg_sN[0][R_ESI]();
864 } else {
865 gen_op_movq_A0_reg[R_ESI]();
866 }
867 } else
868#endif
869 if (s->aflag) {
870 /* 32 bit address */
871 if (s->addseg && override < 0)
872 override = R_DS;
873 if (override >= 0) {
874 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
875 gen_op_addl_A0_reg_sN[0][R_ESI]();
876 } else {
877 gen_op_movl_A0_reg[R_ESI]();
878 }
879 } else {
880 /* 16 address, always override */
881 if (override < 0)
882 override = R_DS;
883 gen_op_movl_A0_reg[R_ESI]();
884 gen_op_andl_A0_ffff();
885 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
886 }
887}
888
889static inline void gen_string_movl_A0_EDI(DisasContext *s)
890{
891#ifdef TARGET_X86_64
892 if (s->aflag == 2) {
893 gen_op_movq_A0_reg[R_EDI]();
894 } else
895#endif
896 if (s->aflag) {
897 if (s->addseg) {
898 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
899 gen_op_addl_A0_reg_sN[0][R_EDI]();
900 } else {
901 gen_op_movl_A0_reg[R_EDI]();
902 }
903 } else {
904 gen_op_movl_A0_reg[R_EDI]();
905 gen_op_andl_A0_ffff();
906 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
907 }
908}
909
910static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
911 gen_op_movl_T0_Dshiftb,
912 gen_op_movl_T0_Dshiftw,
913 gen_op_movl_T0_Dshiftl,
914 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
915};
916
917static GenOpFunc1 *gen_op_jnz_ecx[3] = {
918 gen_op_jnz_ecxw,
919 gen_op_jnz_ecxl,
920 X86_64_ONLY(gen_op_jnz_ecxq),
921};
922
923static GenOpFunc1 *gen_op_jz_ecx[3] = {
924 gen_op_jz_ecxw,
925 gen_op_jz_ecxl,
926 X86_64_ONLY(gen_op_jz_ecxq),
927};
928
929static GenOpFunc *gen_op_dec_ECX[3] = {
930 gen_op_decw_ECX,
931 gen_op_decl_ECX,
932 X86_64_ONLY(gen_op_decq_ECX),
933};
934
935static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
936 {
937 gen_op_jnz_subb,
938 gen_op_jnz_subw,
939 gen_op_jnz_subl,
940 X86_64_ONLY(gen_op_jnz_subq),
941 },
942 {
943 gen_op_jz_subb,
944 gen_op_jz_subw,
945 gen_op_jz_subl,
946 X86_64_ONLY(gen_op_jz_subq),
947 },
948};
949
950static GenOpFunc *gen_op_in_DX_T0[3] = {
951 gen_op_inb_DX_T0,
952 gen_op_inw_DX_T0,
953 gen_op_inl_DX_T0,
954};
955
956static GenOpFunc *gen_op_out_DX_T0[3] = {
957 gen_op_outb_DX_T0,
958 gen_op_outw_DX_T0,
959 gen_op_outl_DX_T0,
960};
961
962static GenOpFunc *gen_op_in[3] = {
963 gen_op_inb_T0_T1,
964 gen_op_inw_T0_T1,
965 gen_op_inl_T0_T1,
966};
967
968static GenOpFunc *gen_op_out[3] = {
969 gen_op_outb_T0_T1,
970 gen_op_outw_T0_T1,
971 gen_op_outl_T0_T1,
972};
973
974static GenOpFunc *gen_check_io_T0[3] = {
975 gen_op_check_iob_T0,
976 gen_op_check_iow_T0,
977 gen_op_check_iol_T0,
978};
979
980static GenOpFunc *gen_check_io_DX[3] = {
981 gen_op_check_iob_DX,
982 gen_op_check_iow_DX,
983 gen_op_check_iol_DX,
984};
985
986static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
987{
988 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
989 if (s->cc_op != CC_OP_DYNAMIC)
990 gen_op_set_cc_op(s->cc_op);
991 gen_jmp_im(cur_eip);
992 if (use_dx)
993 gen_check_io_DX[ot]();
994 else
995 gen_check_io_T0[ot]();
996 }
997}
998
999static inline void gen_movs(DisasContext *s, int ot)
1000{
1001 gen_string_movl_A0_ESI(s);
1002 gen_op_ld_T0_A0[ot + s->mem_index]();
1003 gen_string_movl_A0_EDI(s);
1004 gen_op_st_T0_A0[ot + s->mem_index]();
1005 gen_op_movl_T0_Dshift[ot]();
1006#ifdef TARGET_X86_64
1007 if (s->aflag == 2) {
1008 gen_op_addq_ESI_T0();
1009 gen_op_addq_EDI_T0();
1010 } else
1011#endif
1012 if (s->aflag) {
1013 gen_op_addl_ESI_T0();
1014 gen_op_addl_EDI_T0();
1015 } else {
1016 gen_op_addw_ESI_T0();
1017 gen_op_addw_EDI_T0();
1018 }
1019}
1020
1021static inline void gen_update_cc_op(DisasContext *s)
1022{
1023 if (s->cc_op != CC_OP_DYNAMIC) {
1024 gen_op_set_cc_op(s->cc_op);
1025 s->cc_op = CC_OP_DYNAMIC;
1026 }
1027}
1028
1029/* XXX: does not work with gdbstub "ice" single step - not a
1030 serious problem */
1031static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1032{
1033 int l1, l2;
1034
1035 l1 = gen_new_label();
1036 l2 = gen_new_label();
1037 gen_op_jnz_ecx[s->aflag](l1);
1038 gen_set_label(l2);
1039 gen_jmp_tb(s, next_eip, 1);
1040 gen_set_label(l1);
1041 return l2;
1042}
1043
1044static inline void gen_stos(DisasContext *s, int ot)
1045{
1046 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1047 gen_string_movl_A0_EDI(s);
1048 gen_op_st_T0_A0[ot + s->mem_index]();
1049 gen_op_movl_T0_Dshift[ot]();
1050#ifdef TARGET_X86_64
1051 if (s->aflag == 2) {
1052 gen_op_addq_EDI_T0();
1053 } else
1054#endif
1055 if (s->aflag) {
1056 gen_op_addl_EDI_T0();
1057 } else {
1058 gen_op_addw_EDI_T0();
1059 }
1060}
1061
1062static inline void gen_lods(DisasContext *s, int ot)
1063{
1064 gen_string_movl_A0_ESI(s);
1065 gen_op_ld_T0_A0[ot + s->mem_index]();
1066 gen_op_mov_reg_T0[ot][R_EAX]();
1067 gen_op_movl_T0_Dshift[ot]();
1068#ifdef TARGET_X86_64
1069 if (s->aflag == 2) {
1070 gen_op_addq_ESI_T0();
1071 } else
1072#endif
1073 if (s->aflag) {
1074 gen_op_addl_ESI_T0();
1075 } else {
1076 gen_op_addw_ESI_T0();
1077 }
1078}
1079
1080static inline void gen_scas(DisasContext *s, int ot)
1081{
1082 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1083 gen_string_movl_A0_EDI(s);
1084 gen_op_ld_T1_A0[ot + s->mem_index]();
1085 gen_op_cmpl_T0_T1_cc();
1086 gen_op_movl_T0_Dshift[ot]();
1087#ifdef TARGET_X86_64
1088 if (s->aflag == 2) {
1089 gen_op_addq_EDI_T0();
1090 } else
1091#endif
1092 if (s->aflag) {
1093 gen_op_addl_EDI_T0();
1094 } else {
1095 gen_op_addw_EDI_T0();
1096 }
1097}
1098
1099static inline void gen_cmps(DisasContext *s, int ot)
1100{
1101 gen_string_movl_A0_ESI(s);
1102 gen_op_ld_T0_A0[ot + s->mem_index]();
1103 gen_string_movl_A0_EDI(s);
1104 gen_op_ld_T1_A0[ot + s->mem_index]();
1105 gen_op_cmpl_T0_T1_cc();
1106 gen_op_movl_T0_Dshift[ot]();
1107#ifdef TARGET_X86_64
1108 if (s->aflag == 2) {
1109 gen_op_addq_ESI_T0();
1110 gen_op_addq_EDI_T0();
1111 } else
1112#endif
1113 if (s->aflag) {
1114 gen_op_addl_ESI_T0();
1115 gen_op_addl_EDI_T0();
1116 } else {
1117 gen_op_addw_ESI_T0();
1118 gen_op_addw_EDI_T0();
1119 }
1120}
1121
1122static inline void gen_ins(DisasContext *s, int ot)
1123{
1124 gen_string_movl_A0_EDI(s);
1125 gen_op_movl_T0_0();
1126 gen_op_st_T0_A0[ot + s->mem_index]();
1127 gen_op_in_DX_T0[ot]();
1128 gen_op_st_T0_A0[ot + s->mem_index]();
1129 gen_op_movl_T0_Dshift[ot]();
1130#ifdef TARGET_X86_64
1131 if (s->aflag == 2) {
1132 gen_op_addq_EDI_T0();
1133 } else
1134#endif
1135 if (s->aflag) {
1136 gen_op_addl_EDI_T0();
1137 } else {
1138 gen_op_addw_EDI_T0();
1139 }
1140}
1141
1142static inline void gen_outs(DisasContext *s, int ot)
1143{
1144 gen_string_movl_A0_ESI(s);
1145 gen_op_ld_T0_A0[ot + s->mem_index]();
1146 gen_op_out_DX_T0[ot]();
1147 gen_op_movl_T0_Dshift[ot]();
1148#ifdef TARGET_X86_64
1149 if (s->aflag == 2) {
1150 gen_op_addq_ESI_T0();
1151 } else
1152#endif
1153 if (s->aflag) {
1154 gen_op_addl_ESI_T0();
1155 } else {
1156 gen_op_addw_ESI_T0();
1157 }
1158}
1159
1160/* same method as Valgrind : we generate jumps to current or next
1161 instruction */
1162#define GEN_REPZ(op) \
1163static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1164 target_ulong cur_eip, target_ulong next_eip) \
1165{ \
1166 int l2;\
1167 gen_update_cc_op(s); \
1168 l2 = gen_jz_ecx_string(s, next_eip); \
1169 gen_ ## op(s, ot); \
1170 gen_op_dec_ECX[s->aflag](); \
1171 /* a loop would cause two single step exceptions if ECX = 1 \
1172 before rep string_insn */ \
1173 if (!s->jmp_opt) \
1174 gen_op_jz_ecx[s->aflag](l2); \
1175 gen_jmp(s, cur_eip); \
1176}
1177
1178#define GEN_REPZ2(op) \
1179static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1180 target_ulong cur_eip, \
1181 target_ulong next_eip, \
1182 int nz) \
1183{ \
1184 int l2;\
1185 gen_update_cc_op(s); \
1186 l2 = gen_jz_ecx_string(s, next_eip); \
1187 gen_ ## op(s, ot); \
1188 gen_op_dec_ECX[s->aflag](); \
1189 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1190 gen_op_string_jnz_sub[nz][ot](l2);\
1191 if (!s->jmp_opt) \
1192 gen_op_jz_ecx[s->aflag](l2); \
1193 gen_jmp(s, cur_eip); \
1194}
1195
1196GEN_REPZ(movs)
1197GEN_REPZ(stos)
1198GEN_REPZ(lods)
1199GEN_REPZ(ins)
1200GEN_REPZ(outs)
1201GEN_REPZ2(scas)
1202GEN_REPZ2(cmps)
1203
1204enum {
1205 JCC_O,
1206 JCC_B,
1207 JCC_Z,
1208 JCC_BE,
1209 JCC_S,
1210 JCC_P,
1211 JCC_L,
1212 JCC_LE,
1213};
1214
1215static GenOpFunc1 *gen_jcc_sub[4][8] = {
1216 [OT_BYTE] = {
1217 NULL,
1218 gen_op_jb_subb,
1219 gen_op_jz_subb,
1220 gen_op_jbe_subb,
1221 gen_op_js_subb,
1222 NULL,
1223 gen_op_jl_subb,
1224 gen_op_jle_subb,
1225 },
1226 [OT_WORD] = {
1227 NULL,
1228 gen_op_jb_subw,
1229 gen_op_jz_subw,
1230 gen_op_jbe_subw,
1231 gen_op_js_subw,
1232 NULL,
1233 gen_op_jl_subw,
1234 gen_op_jle_subw,
1235 },
1236 [OT_LONG] = {
1237 NULL,
1238 gen_op_jb_subl,
1239 gen_op_jz_subl,
1240 gen_op_jbe_subl,
1241 gen_op_js_subl,
1242 NULL,
1243 gen_op_jl_subl,
1244 gen_op_jle_subl,
1245 },
1246#ifdef TARGET_X86_64
1247 [OT_QUAD] = {
1248 NULL,
1249 BUGGY_64(gen_op_jb_subq),
1250 gen_op_jz_subq,
1251 BUGGY_64(gen_op_jbe_subq),
1252 gen_op_js_subq,
1253 NULL,
1254 BUGGY_64(gen_op_jl_subq),
1255 BUGGY_64(gen_op_jle_subq),
1256 },
1257#endif
1258};
1259static GenOpFunc1 *gen_op_loop[3][4] = {
1260 [0] = {
1261 gen_op_loopnzw,
1262 gen_op_loopzw,
1263 gen_op_jnz_ecxw,
1264 },
1265 [1] = {
1266 gen_op_loopnzl,
1267 gen_op_loopzl,
1268 gen_op_jnz_ecxl,
1269 },
1270#ifdef TARGET_X86_64
1271 [2] = {
1272 gen_op_loopnzq,
1273 gen_op_loopzq,
1274 gen_op_jnz_ecxq,
1275 },
1276#endif
1277};
1278
1279static GenOpFunc *gen_setcc_slow[8] = {
1280 gen_op_seto_T0_cc,
1281 gen_op_setb_T0_cc,
1282 gen_op_setz_T0_cc,
1283 gen_op_setbe_T0_cc,
1284 gen_op_sets_T0_cc,
1285 gen_op_setp_T0_cc,
1286 gen_op_setl_T0_cc,
1287 gen_op_setle_T0_cc,
1288};
1289
1290static GenOpFunc *gen_setcc_sub[4][8] = {
1291 [OT_BYTE] = {
1292 NULL,
1293 gen_op_setb_T0_subb,
1294 gen_op_setz_T0_subb,
1295 gen_op_setbe_T0_subb,
1296 gen_op_sets_T0_subb,
1297 NULL,
1298 gen_op_setl_T0_subb,
1299 gen_op_setle_T0_subb,
1300 },
1301 [OT_WORD] = {
1302 NULL,
1303 gen_op_setb_T0_subw,
1304 gen_op_setz_T0_subw,
1305 gen_op_setbe_T0_subw,
1306 gen_op_sets_T0_subw,
1307 NULL,
1308 gen_op_setl_T0_subw,
1309 gen_op_setle_T0_subw,
1310 },
1311 [OT_LONG] = {
1312 NULL,
1313 gen_op_setb_T0_subl,
1314 gen_op_setz_T0_subl,
1315 gen_op_setbe_T0_subl,
1316 gen_op_sets_T0_subl,
1317 NULL,
1318 gen_op_setl_T0_subl,
1319 gen_op_setle_T0_subl,
1320 },
1321#ifdef TARGET_X86_64
1322 [OT_QUAD] = {
1323 NULL,
1324 gen_op_setb_T0_subq,
1325 gen_op_setz_T0_subq,
1326 gen_op_setbe_T0_subq,
1327 gen_op_sets_T0_subq,
1328 NULL,
1329 gen_op_setl_T0_subq,
1330 gen_op_setle_T0_subq,
1331 },
1332#endif
1333};
1334
1335static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1336 gen_op_fadd_ST0_FT0,
1337 gen_op_fmul_ST0_FT0,
1338 gen_op_fcom_ST0_FT0,
1339 gen_op_fcom_ST0_FT0,
1340 gen_op_fsub_ST0_FT0,
1341 gen_op_fsubr_ST0_FT0,
1342 gen_op_fdiv_ST0_FT0,
1343 gen_op_fdivr_ST0_FT0,
1344};
1345
1346/* NOTE the exception in "r" op ordering */
1347static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1348 gen_op_fadd_STN_ST0,
1349 gen_op_fmul_STN_ST0,
1350 NULL,
1351 NULL,
1352 gen_op_fsubr_STN_ST0,
1353 gen_op_fsub_STN_ST0,
1354 gen_op_fdivr_STN_ST0,
1355 gen_op_fdiv_STN_ST0,
1356};
1357
1358/* if d == OR_TMP0, it means memory operand (address in A0) */
1359static void gen_op(DisasContext *s1, int op, int ot, int d)
1360{
1361 GenOpFunc *gen_update_cc;
1362
1363 if (d != OR_TMP0) {
1364 gen_op_mov_TN_reg[ot][0][d]();
1365 } else {
1366 gen_op_ld_T0_A0[ot + s1->mem_index]();
1367 }
1368 switch(op) {
1369 case OP_ADCL:
1370 case OP_SBBL:
1371 if (s1->cc_op != CC_OP_DYNAMIC)
1372 gen_op_set_cc_op(s1->cc_op);
1373 if (d != OR_TMP0) {
1374 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1375 gen_op_mov_reg_T0[ot][d]();
1376 } else {
1377 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1378 }
1379 s1->cc_op = CC_OP_DYNAMIC;
1380 goto the_end;
1381 case OP_ADDL:
1382 gen_op_addl_T0_T1();
1383 s1->cc_op = CC_OP_ADDB + ot;
1384 gen_update_cc = gen_op_update2_cc;
1385 break;
1386 case OP_SUBL:
1387 gen_op_subl_T0_T1();
1388 s1->cc_op = CC_OP_SUBB + ot;
1389 gen_update_cc = gen_op_update2_cc;
1390 break;
1391 default:
1392 case OP_ANDL:
1393 case OP_ORL:
1394 case OP_XORL:
1395 gen_op_arith_T0_T1_cc[op]();
1396 s1->cc_op = CC_OP_LOGICB + ot;
1397 gen_update_cc = gen_op_update1_cc;
1398 break;
1399 case OP_CMPL:
1400 gen_op_cmpl_T0_T1_cc();
1401 s1->cc_op = CC_OP_SUBB + ot;
1402 gen_update_cc = NULL;
1403 break;
1404 }
1405 if (op != OP_CMPL) {
1406 if (d != OR_TMP0)
1407 gen_op_mov_reg_T0[ot][d]();
1408 else
1409 gen_op_st_T0_A0[ot + s1->mem_index]();
1410 }
1411 /* the flags update must happen after the memory write (precise
1412 exception support) */
1413 if (gen_update_cc)
1414 gen_update_cc();
1415 the_end: ;
1416}
1417
1418/* if d == OR_TMP0, it means memory operand (address in A0) */
1419static void gen_inc(DisasContext *s1, int ot, int d, int c)
1420{
1421 if (d != OR_TMP0)
1422 gen_op_mov_TN_reg[ot][0][d]();
1423 else
1424 gen_op_ld_T0_A0[ot + s1->mem_index]();
1425 if (s1->cc_op != CC_OP_DYNAMIC)
1426 gen_op_set_cc_op(s1->cc_op);
1427 if (c > 0) {
1428 gen_op_incl_T0();
1429 s1->cc_op = CC_OP_INCB + ot;
1430 } else {
1431 gen_op_decl_T0();
1432 s1->cc_op = CC_OP_DECB + ot;
1433 }
1434 if (d != OR_TMP0)
1435 gen_op_mov_reg_T0[ot][d]();
1436 else
1437 gen_op_st_T0_A0[ot + s1->mem_index]();
1438 gen_op_update_inc_cc();
1439}
1440
1441static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1442{
1443 if (d != OR_TMP0)
1444 gen_op_mov_TN_reg[ot][0][d]();
1445 else
1446 gen_op_ld_T0_A0[ot + s1->mem_index]();
1447 if (s != OR_TMP1)
1448 gen_op_mov_TN_reg[ot][1][s]();
1449 /* for zero counts, flags are not updated, so must do it dynamically */
1450 if (s1->cc_op != CC_OP_DYNAMIC)
1451 gen_op_set_cc_op(s1->cc_op);
1452
1453 if (d != OR_TMP0)
1454 gen_op_shift_T0_T1_cc[ot][op]();
1455 else
1456 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1457 if (d != OR_TMP0)
1458 gen_op_mov_reg_T0[ot][d]();
1459 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1460}
1461
1462static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1463{
1464 /* currently not optimized */
1465 gen_op_movl_T1_im(c);
1466 gen_shift(s1, op, ot, d, OR_TMP1);
1467}
1468
1469static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1470{
1471 target_long disp;
1472 int havesib;
1473 int base;
1474 int index;
1475 int scale;
1476 int opreg;
1477 int mod, rm, code, override, must_add_seg;
1478
1479 override = s->override;
1480 must_add_seg = s->addseg;
1481 if (override >= 0)
1482 must_add_seg = 1;
1483 mod = (modrm >> 6) & 3;
1484 rm = modrm & 7;
1485
1486 if (s->aflag) {
1487
1488 havesib = 0;
1489 base = rm;
1490 index = 0;
1491 scale = 0;
1492
1493 if (base == 4) {
1494 havesib = 1;
1495 code = ldub_code(s->pc++);
1496 scale = (code >> 6) & 3;
1497 index = ((code >> 3) & 7) | REX_X(s);
1498 base = (code & 7);
1499 }
1500 base |= REX_B(s);
1501
1502 switch (mod) {
1503 case 0:
1504 if ((base & 7) == 5) {
1505 base = -1;
1506 disp = (int32_t)ldl_code(s->pc);
1507 s->pc += 4;
1508 if (CODE64(s) && !havesib) {
1509 disp += s->pc + s->rip_offset;
1510 }
1511 } else {
1512 disp = 0;
1513 }
1514 break;
1515 case 1:
1516 disp = (int8_t)ldub_code(s->pc++);
1517 break;
1518 default:
1519 case 2:
1520 disp = ldl_code(s->pc);
1521 s->pc += 4;
1522 break;
1523 }
1524
1525 if (base >= 0) {
1526 /* for correct popl handling with esp */
1527 if (base == 4 && s->popl_esp_hack)
1528 disp += s->popl_esp_hack;
1529#ifdef TARGET_X86_64
1530 if (s->aflag == 2) {
1531 gen_op_movq_A0_reg[base]();
1532 if (disp != 0) {
1533 if ((int32_t)disp == disp)
1534 gen_op_addq_A0_im(disp);
1535 else
1536 gen_op_addq_A0_im64(disp >> 32, disp);
1537 }
1538 } else
1539#endif
1540 {
1541 gen_op_movl_A0_reg[base]();
1542 if (disp != 0)
1543 gen_op_addl_A0_im(disp);
1544 }
1545 } else {
1546#ifdef TARGET_X86_64
1547 if (s->aflag == 2) {
1548 if ((int32_t)disp == disp)
1549 gen_op_movq_A0_im(disp);
1550 else
1551 gen_op_movq_A0_im64(disp >> 32, disp);
1552 } else
1553#endif
1554 {
1555 gen_op_movl_A0_im(disp);
1556 }
1557 }
1558 /* XXX: index == 4 is always invalid */
1559 if (havesib && (index != 4 || scale != 0)) {
1560#ifdef TARGET_X86_64
1561 if (s->aflag == 2) {
1562 gen_op_addq_A0_reg_sN[scale][index]();
1563 } else
1564#endif
1565 {
1566 gen_op_addl_A0_reg_sN[scale][index]();
1567 }
1568 }
1569 if (must_add_seg) {
1570 if (override < 0) {
1571 if (base == R_EBP || base == R_ESP)
1572 override = R_SS;
1573 else
1574 override = R_DS;
1575 }
1576#ifdef TARGET_X86_64
1577 if (s->aflag == 2) {
1578 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1579 } else
1580#endif
1581 {
1582 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1583 }
1584 }
1585 } else {
1586 switch (mod) {
1587 case 0:
1588 if (rm == 6) {
1589 disp = lduw_code(s->pc);
1590 s->pc += 2;
1591 gen_op_movl_A0_im(disp);
1592 rm = 0; /* avoid SS override */
1593 goto no_rm;
1594 } else {
1595 disp = 0;
1596 }
1597 break;
1598 case 1:
1599 disp = (int8_t)ldub_code(s->pc++);
1600 break;
1601 default:
1602 case 2:
1603 disp = lduw_code(s->pc);
1604 s->pc += 2;
1605 break;
1606 }
1607 switch(rm) {
1608 case 0:
1609 gen_op_movl_A0_reg[R_EBX]();
1610 gen_op_addl_A0_reg_sN[0][R_ESI]();
1611 break;
1612 case 1:
1613 gen_op_movl_A0_reg[R_EBX]();
1614 gen_op_addl_A0_reg_sN[0][R_EDI]();
1615 break;
1616 case 2:
1617 gen_op_movl_A0_reg[R_EBP]();
1618 gen_op_addl_A0_reg_sN[0][R_ESI]();
1619 break;
1620 case 3:
1621 gen_op_movl_A0_reg[R_EBP]();
1622 gen_op_addl_A0_reg_sN[0][R_EDI]();
1623 break;
1624 case 4:
1625 gen_op_movl_A0_reg[R_ESI]();
1626 break;
1627 case 5:
1628 gen_op_movl_A0_reg[R_EDI]();
1629 break;
1630 case 6:
1631 gen_op_movl_A0_reg[R_EBP]();
1632 break;
1633 default:
1634 case 7:
1635 gen_op_movl_A0_reg[R_EBX]();
1636 break;
1637 }
1638 if (disp != 0)
1639 gen_op_addl_A0_im(disp);
1640 gen_op_andl_A0_ffff();
1641 no_rm:
1642 if (must_add_seg) {
1643 if (override < 0) {
1644 if (rm == 2 || rm == 3 || rm == 6)
1645 override = R_SS;
1646 else
1647 override = R_DS;
1648 }
1649 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1650 }
1651 }
1652
1653 opreg = OR_A0;
1654 disp = 0;
1655 *reg_ptr = opreg;
1656 *offset_ptr = disp;
1657}
1658
1659static void gen_nop_modrm(DisasContext *s, int modrm)
1660{
1661 int mod, rm, base, code;
1662
1663 mod = (modrm >> 6) & 3;
1664 if (mod == 3)
1665 return;
1666 rm = modrm & 7;
1667
1668 if (s->aflag) {
1669
1670 base = rm;
1671
1672 if (base == 4) {
1673 code = ldub_code(s->pc++);
1674 base = (code & 7);
1675 }
1676
1677 switch (mod) {
1678 case 0:
1679 if (base == 5) {
1680 s->pc += 4;
1681 }
1682 break;
1683 case 1:
1684 s->pc++;
1685 break;
1686 default:
1687 case 2:
1688 s->pc += 4;
1689 break;
1690 }
1691 } else {
1692 switch (mod) {
1693 case 0:
1694 if (rm == 6) {
1695 s->pc += 2;
1696 }
1697 break;
1698 case 1:
1699 s->pc++;
1700 break;
1701 default:
1702 case 2:
1703 s->pc += 2;
1704 break;
1705 }
1706 }
1707}
1708
1709/* used for LEA and MOV AX, mem */
1710static void gen_add_A0_ds_seg(DisasContext *s)
1711{
1712 int override, must_add_seg;
1713 must_add_seg = s->addseg;
1714 override = R_DS;
1715 if (s->override >= 0) {
1716 override = s->override;
1717 must_add_seg = 1;
1718 } else {
1719 override = R_DS;
1720 }
1721 if (must_add_seg) {
1722#ifdef TARGET_X86_64
1723 if (CODE64(s)) {
1724 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1725 } else
1726#endif
1727 {
1728 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1729 }
1730 }
1731}
1732
1733/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1734 OR_TMP0 */
1735static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1736{
1737 int mod, rm, opreg, disp;
1738
1739 mod = (modrm >> 6) & 3;
1740 rm = (modrm & 7) | REX_B(s);
1741 if (mod == 3) {
1742 if (is_store) {
1743 if (reg != OR_TMP0)
1744 gen_op_mov_TN_reg[ot][0][reg]();
1745 gen_op_mov_reg_T0[ot][rm]();
1746 } else {
1747 gen_op_mov_TN_reg[ot][0][rm]();
1748 if (reg != OR_TMP0)
1749 gen_op_mov_reg_T0[ot][reg]();
1750 }
1751 } else {
1752 gen_lea_modrm(s, modrm, &opreg, &disp);
1753 if (is_store) {
1754 if (reg != OR_TMP0)
1755 gen_op_mov_TN_reg[ot][0][reg]();
1756 gen_op_st_T0_A0[ot + s->mem_index]();
1757 } else {
1758 gen_op_ld_T0_A0[ot + s->mem_index]();
1759 if (reg != OR_TMP0)
1760 gen_op_mov_reg_T0[ot][reg]();
1761 }
1762 }
1763}
1764
1765static inline uint32_t insn_get(DisasContext *s, int ot)
1766{
1767 uint32_t ret;
1768
1769 switch(ot) {
1770 case OT_BYTE:
1771 ret = ldub_code(s->pc);
1772 s->pc++;
1773 break;
1774 case OT_WORD:
1775 ret = lduw_code(s->pc);
1776 s->pc += 2;
1777 break;
1778 default:
1779 case OT_LONG:
1780 ret = ldl_code(s->pc);
1781 s->pc += 4;
1782 break;
1783 }
1784 return ret;
1785}
1786
1787static inline int insn_const_size(unsigned int ot)
1788{
1789 if (ot <= OT_LONG)
1790 return 1 << ot;
1791 else
1792 return 4;
1793}
1794
1795static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1796{
1797 TranslationBlock *tb;
1798 target_ulong pc;
1799
1800 pc = s->cs_base + eip;
1801 tb = s->tb;
1802 /* NOTE: we handle the case where the TB spans two pages here */
1803 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1804 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1805 /* jump to same page: we can use a direct jump */
1806 if (tb_num == 0)
1807 gen_op_goto_tb0(TBPARAM(tb));
1808 else
1809 gen_op_goto_tb1(TBPARAM(tb));
1810 gen_jmp_im(eip);
1811 gen_op_movl_T0_im((long)tb + tb_num);
1812 gen_op_exit_tb();
1813 } else {
1814 /* jump to another page: currently not optimized */
1815 gen_jmp_im(eip);
1816 gen_eob(s);
1817 }
1818}
1819
1820static inline void gen_jcc(DisasContext *s, int b,
1821 target_ulong val, target_ulong next_eip)
1822{
1823 TranslationBlock *tb;
1824 int inv, jcc_op;
1825 GenOpFunc1 *func;
1826 target_ulong tmp;
1827 int l1, l2;
1828
1829 inv = b & 1;
1830 jcc_op = (b >> 1) & 7;
1831
1832 if (s->jmp_opt) {
1833#ifdef VBOX
1834 gen_check_external_event(s);
1835#endif /* VBOX */
1836 switch(s->cc_op) {
1837 /* we optimize the cmp/jcc case */
1838 case CC_OP_SUBB:
1839 case CC_OP_SUBW:
1840 case CC_OP_SUBL:
1841 case CC_OP_SUBQ:
1842 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1843 break;
1844
1845 /* some jumps are easy to compute */
1846 case CC_OP_ADDB:
1847 case CC_OP_ADDW:
1848 case CC_OP_ADDL:
1849 case CC_OP_ADDQ:
1850
1851 case CC_OP_ADCB:
1852 case CC_OP_ADCW:
1853 case CC_OP_ADCL:
1854 case CC_OP_ADCQ:
1855
1856 case CC_OP_SBBB:
1857 case CC_OP_SBBW:
1858 case CC_OP_SBBL:
1859 case CC_OP_SBBQ:
1860
1861 case CC_OP_LOGICB:
1862 case CC_OP_LOGICW:
1863 case CC_OP_LOGICL:
1864 case CC_OP_LOGICQ:
1865
1866 case CC_OP_INCB:
1867 case CC_OP_INCW:
1868 case CC_OP_INCL:
1869 case CC_OP_INCQ:
1870
1871 case CC_OP_DECB:
1872 case CC_OP_DECW:
1873 case CC_OP_DECL:
1874 case CC_OP_DECQ:
1875
1876 case CC_OP_SHLB:
1877 case CC_OP_SHLW:
1878 case CC_OP_SHLL:
1879 case CC_OP_SHLQ:
1880
1881 case CC_OP_SARB:
1882 case CC_OP_SARW:
1883 case CC_OP_SARL:
1884 case CC_OP_SARQ:
1885 switch(jcc_op) {
1886 case JCC_Z:
1887 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1888 break;
1889 case JCC_S:
1890 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1891 break;
1892 default:
1893 func = NULL;
1894 break;
1895 }
1896 break;
1897 default:
1898 func = NULL;
1899 break;
1900 }
1901
1902 if (s->cc_op != CC_OP_DYNAMIC) {
1903 gen_op_set_cc_op(s->cc_op);
1904 s->cc_op = CC_OP_DYNAMIC;
1905 }
1906
1907 if (!func) {
1908 gen_setcc_slow[jcc_op]();
1909 func = gen_op_jnz_T0_label;
1910 }
1911
1912 if (inv) {
1913 tmp = val;
1914 val = next_eip;
1915 next_eip = tmp;
1916 }
1917 tb = s->tb;
1918
1919 l1 = gen_new_label();
1920 func(l1);
1921
1922 gen_goto_tb(s, 0, next_eip);
1923
1924 gen_set_label(l1);
1925 gen_goto_tb(s, 1, val);
1926
1927 s->is_jmp = 3;
1928 } else {
1929
1930 if (s->cc_op != CC_OP_DYNAMIC) {
1931 gen_op_set_cc_op(s->cc_op);
1932 s->cc_op = CC_OP_DYNAMIC;
1933 }
1934 gen_setcc_slow[jcc_op]();
1935 if (inv) {
1936 tmp = val;
1937 val = next_eip;
1938 next_eip = tmp;
1939 }
1940 l1 = gen_new_label();
1941 l2 = gen_new_label();
1942 gen_op_jnz_T0_label(l1);
1943 gen_jmp_im(next_eip);
1944 gen_op_jmp_label(l2);
1945 gen_set_label(l1);
1946 gen_jmp_im(val);
1947 gen_set_label(l2);
1948 gen_eob(s);
1949 }
1950}
1951
1952static void gen_setcc(DisasContext *s, int b)
1953{
1954 int inv, jcc_op;
1955 GenOpFunc *func;
1956
1957 inv = b & 1;
1958 jcc_op = (b >> 1) & 7;
1959 switch(s->cc_op) {
1960 /* we optimize the cmp/jcc case */
1961 case CC_OP_SUBB:
1962 case CC_OP_SUBW:
1963 case CC_OP_SUBL:
1964 case CC_OP_SUBQ:
1965 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1966 if (!func)
1967 goto slow_jcc;
1968 break;
1969
1970 /* some jumps are easy to compute */
1971 case CC_OP_ADDB:
1972 case CC_OP_ADDW:
1973 case CC_OP_ADDL:
1974 case CC_OP_ADDQ:
1975
1976 case CC_OP_LOGICB:
1977 case CC_OP_LOGICW:
1978 case CC_OP_LOGICL:
1979 case CC_OP_LOGICQ:
1980
1981 case CC_OP_INCB:
1982 case CC_OP_INCW:
1983 case CC_OP_INCL:
1984 case CC_OP_INCQ:
1985
1986 case CC_OP_DECB:
1987 case CC_OP_DECW:
1988 case CC_OP_DECL:
1989 case CC_OP_DECQ:
1990
1991 case CC_OP_SHLB:
1992 case CC_OP_SHLW:
1993 case CC_OP_SHLL:
1994 case CC_OP_SHLQ:
1995 switch(jcc_op) {
1996 case JCC_Z:
1997 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1998 break;
1999 case JCC_S:
2000 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2001 break;
2002 default:
2003 goto slow_jcc;
2004 }
2005 break;
2006 default:
2007 slow_jcc:
2008 if (s->cc_op != CC_OP_DYNAMIC)
2009 gen_op_set_cc_op(s->cc_op);
2010 func = gen_setcc_slow[jcc_op];
2011 break;
2012 }
2013 func();
2014 if (inv) {
2015 gen_op_xor_T0_1();
2016 }
2017}
2018
2019/* move T0 to seg_reg and compute if the CPU state may change. Never
2020 call this function with seg_reg == R_CS */
2021static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2022{
2023 if (s->pe && !s->vm86) {
2024 /* XXX: optimize by finding processor state dynamically */
2025 if (s->cc_op != CC_OP_DYNAMIC)
2026 gen_op_set_cc_op(s->cc_op);
2027 gen_jmp_im(cur_eip);
2028 gen_op_movl_seg_T0(seg_reg);
2029 /* abort translation because the addseg value may change or
2030 because ss32 may change. For R_SS, translation must always
2031 stop as a special handling must be done to disable hardware
2032 interrupts for the next instruction */
2033 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2034 s->is_jmp = 3;
2035 } else {
2036 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2037 if (seg_reg == R_SS)
2038 s->is_jmp = 3;
2039 }
2040}
2041
2042static inline void gen_stack_update(DisasContext *s, int addend)
2043{
2044#ifdef TARGET_X86_64
2045 if (CODE64(s)) {
2046 if (addend == 8)
2047 gen_op_addq_ESP_8();
2048 else
2049 gen_op_addq_ESP_im(addend);
2050 } else
2051#endif
2052 if (s->ss32) {
2053 if (addend == 2)
2054 gen_op_addl_ESP_2();
2055 else if (addend == 4)
2056 gen_op_addl_ESP_4();
2057 else
2058 gen_op_addl_ESP_im(addend);
2059 } else {
2060 if (addend == 2)
2061 gen_op_addw_ESP_2();
2062 else if (addend == 4)
2063 gen_op_addw_ESP_4();
2064 else
2065 gen_op_addw_ESP_im(addend);
2066 }
2067}
2068
2069/* generate a push. It depends on ss32, addseg and dflag */
2070static void gen_push_T0(DisasContext *s)
2071{
2072#ifdef TARGET_X86_64
2073 if (CODE64(s)) {
2074 gen_op_movq_A0_reg[R_ESP]();
2075 if (s->dflag) {
2076 gen_op_subq_A0_8();
2077 gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
2078 } else {
2079 gen_op_subq_A0_2();
2080 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2081 }
2082 gen_op_movq_ESP_A0();
2083 } else
2084#endif
2085 {
2086 gen_op_movl_A0_reg[R_ESP]();
2087 if (!s->dflag)
2088 gen_op_subl_A0_2();
2089 else
2090 gen_op_subl_A0_4();
2091 if (s->ss32) {
2092 if (s->addseg) {
2093 gen_op_movl_T1_A0();
2094 gen_op_addl_A0_SS();
2095 }
2096 } else {
2097 gen_op_andl_A0_ffff();
2098 gen_op_movl_T1_A0();
2099 gen_op_addl_A0_SS();
2100 }
2101 gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
2102 if (s->ss32 && !s->addseg)
2103 gen_op_movl_ESP_A0();
2104 else
2105 gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
2106 }
2107}
2108
2109/* generate a push. It depends on ss32, addseg and dflag */
2110/* slower version for T1, only used for call Ev */
2111static void gen_push_T1(DisasContext *s)
2112{
2113#ifdef TARGET_X86_64
2114 if (CODE64(s)) {
2115 gen_op_movq_A0_reg[R_ESP]();
2116 if (s->dflag) {
2117 gen_op_subq_A0_8();
2118 gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
2119 } else {
2120 gen_op_subq_A0_2();
2121 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2122 }
2123 gen_op_movq_ESP_A0();
2124 } else
2125#endif
2126 {
2127 gen_op_movl_A0_reg[R_ESP]();
2128 if (!s->dflag)
2129 gen_op_subl_A0_2();
2130 else
2131 gen_op_subl_A0_4();
2132 if (s->ss32) {
2133 if (s->addseg) {
2134 gen_op_addl_A0_SS();
2135 }
2136 } else {
2137 gen_op_andl_A0_ffff();
2138 gen_op_addl_A0_SS();
2139 }
2140 gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2141
2142 if (s->ss32 && !s->addseg)
2143 gen_op_movl_ESP_A0();
2144 else
2145 gen_stack_update(s, (-2) << s->dflag);
2146 }
2147}
2148
2149/* two step pop is necessary for precise exceptions */
2150static void gen_pop_T0(DisasContext *s)
2151{
2152#ifdef TARGET_X86_64
2153 if (CODE64(s)) {
2154 gen_op_movq_A0_reg[R_ESP]();
2155 gen_op_ld_T0_A0[(s->dflag ? OT_QUAD : OT_WORD) + s->mem_index]();
2156 } else
2157#endif
2158 {
2159 gen_op_movl_A0_reg[R_ESP]();
2160 if (s->ss32) {
2161 if (s->addseg)
2162 gen_op_addl_A0_SS();
2163 } else {
2164 gen_op_andl_A0_ffff();
2165 gen_op_addl_A0_SS();
2166 }
2167 gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2168 }
2169}
2170
2171static void gen_pop_update(DisasContext *s)
2172{
2173#ifdef TARGET_X86_64
2174 if (CODE64(s) && s->dflag) {
2175 gen_stack_update(s, 8);
2176 } else
2177#endif
2178 {
2179 gen_stack_update(s, 2 << s->dflag);
2180 }
2181}
2182
2183static void gen_stack_A0(DisasContext *s)
2184{
2185 gen_op_movl_A0_ESP();
2186 if (!s->ss32)
2187 gen_op_andl_A0_ffff();
2188 gen_op_movl_T1_A0();
2189 if (s->addseg)
2190 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2191}
2192
2193/* NOTE: wrap around in 16 bit not fully handled */
2194static void gen_pusha(DisasContext *s)
2195{
2196 int i;
2197 gen_op_movl_A0_ESP();
2198 gen_op_addl_A0_im(-16 << s->dflag);
2199 if (!s->ss32)
2200 gen_op_andl_A0_ffff();
2201 gen_op_movl_T1_A0();
2202 if (s->addseg)
2203 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2204 for(i = 0;i < 8; i++) {
2205 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2206 gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2207 gen_op_addl_A0_im(2 << s->dflag);
2208 }
2209 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2210}
2211
2212/* NOTE: wrap around in 16 bit not fully handled */
2213static void gen_popa(DisasContext *s)
2214{
2215 int i;
2216 gen_op_movl_A0_ESP();
2217 if (!s->ss32)
2218 gen_op_andl_A0_ffff();
2219 gen_op_movl_T1_A0();
2220 gen_op_addl_T1_im(16 << s->dflag);
2221 if (s->addseg)
2222 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2223 for(i = 0;i < 8; i++) {
2224 /* ESP is not reloaded */
2225 if (i != 3) {
2226 gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2227 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2228 }
2229 gen_op_addl_A0_im(2 << s->dflag);
2230 }
2231 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2232}
2233
2234static void gen_enter(DisasContext *s, int esp_addend, int level)
2235{
2236 int ot, opsize;
2237
2238 level &= 0x1f;
2239#ifdef TARGET_X86_64
2240 if (CODE64(s)) {
2241 ot = s->dflag ? OT_QUAD : OT_WORD;
2242 opsize = 1 << ot;
2243
2244 gen_op_movl_A0_ESP();
2245 gen_op_addq_A0_im(-opsize);
2246 gen_op_movl_T1_A0();
2247
2248 /* push bp */
2249 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2250 gen_op_st_T0_A0[ot + s->mem_index]();
2251 if (level) {
2252 gen_op_enter64_level(level, (ot == OT_QUAD));
2253 }
2254 gen_op_mov_reg_T1[ot][R_EBP]();
2255 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2256 gen_op_mov_reg_T1[OT_QUAD][R_ESP]();
2257 } else
2258#endif
2259 {
2260 ot = s->dflag + OT_WORD;
2261 opsize = 2 << s->dflag;
2262
2263 gen_op_movl_A0_ESP();
2264 gen_op_addl_A0_im(-opsize);
2265 if (!s->ss32)
2266 gen_op_andl_A0_ffff();
2267 gen_op_movl_T1_A0();
2268 if (s->addseg)
2269 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2270 /* push bp */
2271 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2272 gen_op_st_T0_A0[ot + s->mem_index]();
2273 if (level) {
2274 gen_op_enter_level(level, s->dflag);
2275 }
2276 gen_op_mov_reg_T1[ot][R_EBP]();
2277 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2278 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2279 }
2280}
2281
2282static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2283{
2284 if (s->cc_op != CC_OP_DYNAMIC)
2285 gen_op_set_cc_op(s->cc_op);
2286 gen_jmp_im(cur_eip);
2287 gen_op_raise_exception(trapno);
2288 s->is_jmp = 3;
2289}
2290
2291/* an interrupt is different from an exception because of the
2292 priviledge checks */
2293static void gen_interrupt(DisasContext *s, int intno,
2294 target_ulong cur_eip, target_ulong next_eip)
2295{
2296 if (s->cc_op != CC_OP_DYNAMIC)
2297 gen_op_set_cc_op(s->cc_op);
2298 gen_jmp_im(cur_eip);
2299 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2300 s->is_jmp = 3;
2301}
2302
2303static void gen_debug(DisasContext *s, target_ulong cur_eip)
2304{
2305 if (s->cc_op != CC_OP_DYNAMIC)
2306 gen_op_set_cc_op(s->cc_op);
2307 gen_jmp_im(cur_eip);
2308 gen_op_debug();
2309 s->is_jmp = 3;
2310}
2311
2312/* generate a generic end of block. Trace exception is also generated
2313 if needed */
2314static void gen_eob(DisasContext *s)
2315{
2316 if (s->cc_op != CC_OP_DYNAMIC)
2317 gen_op_set_cc_op(s->cc_op);
2318 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2319 gen_op_reset_inhibit_irq();
2320 }
2321 if (s->singlestep_enabled) {
2322 gen_op_debug();
2323 } else if (s->tf) {
2324 gen_op_raise_exception(EXCP01_SSTP);
2325 } else {
2326 gen_op_movl_T0_0();
2327 gen_op_exit_tb();
2328 }
2329 s->is_jmp = 3;
2330}
2331
2332/* generate a jump to eip. No segment change must happen before as a
2333 direct call to the next block may occur */
2334static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2335{
2336 if (s->jmp_opt) {
2337#ifdef VBOX
2338 gen_check_external_event(s);
2339#endif /* VBOX */
2340 if (s->cc_op != CC_OP_DYNAMIC) {
2341 gen_op_set_cc_op(s->cc_op);
2342 s->cc_op = CC_OP_DYNAMIC;
2343 }
2344 gen_goto_tb(s, tb_num, eip);
2345 s->is_jmp = 3;
2346 } else {
2347 gen_jmp_im(eip);
2348 gen_eob(s);
2349 }
2350}
2351
2352static void gen_jmp(DisasContext *s, target_ulong eip)
2353{
2354 gen_jmp_tb(s, eip, 0);
2355}
2356
2357static void gen_movtl_T0_im(target_ulong val)
2358{
2359#ifdef TARGET_X86_64
2360 if ((int32_t)val == val) {
2361 gen_op_movl_T0_im(val);
2362 } else {
2363 gen_op_movq_T0_im64(val >> 32, val);
2364 }
2365#else
2366 gen_op_movl_T0_im(val);
2367#endif
2368}
2369
2370static void gen_movtl_T1_im(target_ulong val)
2371{
2372#ifdef TARGET_X86_64
2373 if ((int32_t)val == val) {
2374 gen_op_movl_T1_im(val);
2375 } else {
2376 gen_op_movq_T1_im64(val >> 32, val);
2377 }
2378#else
2379 gen_op_movl_T1_im(val);
2380#endif
2381}
2382
2383static void gen_add_A0_im(DisasContext *s, int val)
2384{
2385#ifdef TARGET_X86_64
2386 if (CODE64(s))
2387 gen_op_addq_A0_im(val);
2388 else
2389#endif
2390 gen_op_addl_A0_im(val);
2391}
2392
2393static GenOpFunc1 *gen_ldq_env_A0[3] = {
2394 gen_op_ldq_raw_env_A0,
2395#ifndef CONFIG_USER_ONLY
2396 gen_op_ldq_kernel_env_A0,
2397 gen_op_ldq_user_env_A0,
2398#endif
2399};
2400
2401static GenOpFunc1 *gen_stq_env_A0[3] = {
2402 gen_op_stq_raw_env_A0,
2403#ifndef CONFIG_USER_ONLY
2404 gen_op_stq_kernel_env_A0,
2405 gen_op_stq_user_env_A0,
2406#endif
2407};
2408
2409static GenOpFunc1 *gen_ldo_env_A0[3] = {
2410 gen_op_ldo_raw_env_A0,
2411#ifndef CONFIG_USER_ONLY
2412 gen_op_ldo_kernel_env_A0,
2413 gen_op_ldo_user_env_A0,
2414#endif
2415};
2416
2417static GenOpFunc1 *gen_sto_env_A0[3] = {
2418 gen_op_sto_raw_env_A0,
2419#ifndef CONFIG_USER_ONLY
2420 gen_op_sto_kernel_env_A0,
2421 gen_op_sto_user_env_A0,
2422#endif
2423};
2424
2425#define SSE_SPECIAL ((GenOpFunc2 *)1)
2426
2427#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2428#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2429 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2430
2431static GenOpFunc2 *sse_op_table1[256][4] = {
2432 /* pure SSE operations */
2433 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2434 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2435 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2436 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2437 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2438 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2439 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2440 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2441
2442 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2443 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2444 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2445 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2446 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2447 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2448 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2449 [0x2f] = { gen_op_comiss, gen_op_comisd },
2450 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2451 [0x51] = SSE_FOP(sqrt),
2452 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2453 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2454 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2455 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2456 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2457 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2458 [0x58] = SSE_FOP(add),
2459 [0x59] = SSE_FOP(mul),
2460 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2461 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2462 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2463 [0x5c] = SSE_FOP(sub),
2464 [0x5d] = SSE_FOP(min),
2465 [0x5e] = SSE_FOP(div),
2466 [0x5f] = SSE_FOP(max),
2467
2468 [0xc2] = SSE_FOP(cmpeq),
2469 [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2470
2471 /* MMX ops and their SSE extensions */
2472 [0x60] = MMX_OP2(punpcklbw),
2473 [0x61] = MMX_OP2(punpcklwd),
2474 [0x62] = MMX_OP2(punpckldq),
2475 [0x63] = MMX_OP2(packsswb),
2476 [0x64] = MMX_OP2(pcmpgtb),
2477 [0x65] = MMX_OP2(pcmpgtw),
2478 [0x66] = MMX_OP2(pcmpgtl),
2479 [0x67] = MMX_OP2(packuswb),
2480 [0x68] = MMX_OP2(punpckhbw),
2481 [0x69] = MMX_OP2(punpckhwd),
2482 [0x6a] = MMX_OP2(punpckhdq),
2483 [0x6b] = MMX_OP2(packssdw),
2484 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2485 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2486 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2487 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2488 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2489 (GenOpFunc2 *)gen_op_pshufd_xmm,
2490 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2491 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2492 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2493 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2494 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2495 [0x74] = MMX_OP2(pcmpeqb),
2496 [0x75] = MMX_OP2(pcmpeqw),
2497 [0x76] = MMX_OP2(pcmpeql),
2498 [0x77] = { SSE_SPECIAL }, /* emms */
2499 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2500 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2501 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2502 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2503 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2504 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2505 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2506 [0xd1] = MMX_OP2(psrlw),
2507 [0xd2] = MMX_OP2(psrld),
2508 [0xd3] = MMX_OP2(psrlq),
2509 [0xd4] = MMX_OP2(paddq),
2510 [0xd5] = MMX_OP2(pmullw),
2511 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2512 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2513 [0xd8] = MMX_OP2(psubusb),
2514 [0xd9] = MMX_OP2(psubusw),
2515 [0xda] = MMX_OP2(pminub),
2516 [0xdb] = MMX_OP2(pand),
2517 [0xdc] = MMX_OP2(paddusb),
2518 [0xdd] = MMX_OP2(paddusw),
2519 [0xde] = MMX_OP2(pmaxub),
2520 [0xdf] = MMX_OP2(pandn),
2521 [0xe0] = MMX_OP2(pavgb),
2522 [0xe1] = MMX_OP2(psraw),
2523 [0xe2] = MMX_OP2(psrad),
2524 [0xe3] = MMX_OP2(pavgw),
2525 [0xe4] = MMX_OP2(pmulhuw),
2526 [0xe5] = MMX_OP2(pmulhw),
2527 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2528 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2529 [0xe8] = MMX_OP2(psubsb),
2530 [0xe9] = MMX_OP2(psubsw),
2531 [0xea] = MMX_OP2(pminsw),
2532 [0xeb] = MMX_OP2(por),
2533 [0xec] = MMX_OP2(paddsb),
2534 [0xed] = MMX_OP2(paddsw),
2535 [0xee] = MMX_OP2(pmaxsw),
2536 [0xef] = MMX_OP2(pxor),
2537 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2538 [0xf1] = MMX_OP2(psllw),
2539 [0xf2] = MMX_OP2(pslld),
2540 [0xf3] = MMX_OP2(psllq),
2541 [0xf4] = MMX_OP2(pmuludq),
2542 [0xf5] = MMX_OP2(pmaddwd),
2543 [0xf6] = MMX_OP2(psadbw),
2544 [0xf7] = MMX_OP2(maskmov),
2545 [0xf8] = MMX_OP2(psubb),
2546 [0xf9] = MMX_OP2(psubw),
2547 [0xfa] = MMX_OP2(psubl),
2548 [0xfb] = MMX_OP2(psubq),
2549 [0xfc] = MMX_OP2(paddb),
2550 [0xfd] = MMX_OP2(paddw),
2551 [0xfe] = MMX_OP2(paddl),
2552};
2553
2554static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2555 [0 + 2] = MMX_OP2(psrlw),
2556 [0 + 4] = MMX_OP2(psraw),
2557 [0 + 6] = MMX_OP2(psllw),
2558 [8 + 2] = MMX_OP2(psrld),
2559 [8 + 4] = MMX_OP2(psrad),
2560 [8 + 6] = MMX_OP2(pslld),
2561 [16 + 2] = MMX_OP2(psrlq),
2562 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2563 [16 + 6] = MMX_OP2(psllq),
2564 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2565};
2566
2567static GenOpFunc1 *sse_op_table3[4 * 3] = {
2568 gen_op_cvtsi2ss,
2569 gen_op_cvtsi2sd,
2570 X86_64_ONLY(gen_op_cvtsq2ss),
2571 X86_64_ONLY(gen_op_cvtsq2sd),
2572
2573 gen_op_cvttss2si,
2574 gen_op_cvttsd2si,
2575 X86_64_ONLY(gen_op_cvttss2sq),
2576 X86_64_ONLY(gen_op_cvttsd2sq),
2577
2578 gen_op_cvtss2si,
2579 gen_op_cvtsd2si,
2580 X86_64_ONLY(gen_op_cvtss2sq),
2581 X86_64_ONLY(gen_op_cvtsd2sq),
2582};
2583
2584static GenOpFunc2 *sse_op_table4[8][4] = {
2585 SSE_FOP(cmpeq),
2586 SSE_FOP(cmplt),
2587 SSE_FOP(cmple),
2588 SSE_FOP(cmpunord),
2589 SSE_FOP(cmpneq),
2590 SSE_FOP(cmpnlt),
2591 SSE_FOP(cmpnle),
2592 SSE_FOP(cmpord),
2593};
2594
2595static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2596{
2597 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2598 int modrm, mod, rm, reg, reg_addr, offset_addr;
2599 GenOpFunc2 *sse_op2;
2600 GenOpFunc3 *sse_op3;
2601
2602 b &= 0xff;
2603 if (s->prefix & PREFIX_DATA)
2604 b1 = 1;
2605 else if (s->prefix & PREFIX_REPZ)
2606 b1 = 2;
2607 else if (s->prefix & PREFIX_REPNZ)
2608 b1 = 3;
2609 else
2610 b1 = 0;
2611 sse_op2 = sse_op_table1[b][b1];
2612 if (!sse_op2)
2613 goto illegal_op;
2614 if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2615 is_xmm = 1;
2616 } else {
2617 if (b1 == 0) {
2618 /* MMX case */
2619 is_xmm = 0;
2620 } else {
2621 is_xmm = 1;
2622 }
2623 }
2624 /* simple MMX/SSE operation */
2625 if (s->flags & HF_TS_MASK) {
2626 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2627 return;
2628 }
2629 if (s->flags & HF_EM_MASK) {
2630 illegal_op:
2631 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2632 return;
2633 }
2634 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2635 goto illegal_op;
2636 if (b == 0x77) {
2637 /* emms */
2638 gen_op_emms();
2639 return;
2640 }
2641 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2642 the static cpu state) */
2643 if (!is_xmm) {
2644 gen_op_enter_mmx();
2645 }
2646
2647 modrm = ldub_code(s->pc++);
2648 reg = ((modrm >> 3) & 7);
2649 if (is_xmm)
2650 reg |= rex_r;
2651 mod = (modrm >> 6) & 3;
2652 if (sse_op2 == SSE_SPECIAL) {
2653 b |= (b1 << 8);
2654 switch(b) {
2655 case 0x0e7: /* movntq */
2656 if (mod == 3)
2657 goto illegal_op;
2658 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2659 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2660 break;
2661 case 0x1e7: /* movntdq */
2662 case 0x02b: /* movntps */
2663 case 0x12b: /* movntps */
2664 case 0x3f0: /* lddqu */
2665 if (mod == 3)
2666 goto illegal_op;
2667 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2668 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2669 break;
2670 case 0x6e: /* movd mm, ea */
2671#ifdef TARGET_X86_64
2672 if (s->dflag == 2) {
2673 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2674 gen_op_movq_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2675 } else
2676#endif
2677 {
2678 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2679 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2680 }
2681 break;
2682 case 0x16e: /* movd xmm, ea */
2683#ifdef TARGET_X86_64
2684 if (s->dflag == 2) {
2685 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 0);
2686 gen_op_movq_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2687 } else
2688#endif
2689 {
2690 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2691 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2692 }
2693 break;
2694 case 0x6f: /* movq mm, ea */
2695 if (mod != 3) {
2696 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2697 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2698 } else {
2699 rm = (modrm & 7);
2700 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2701 offsetof(CPUX86State,fpregs[rm].mmx));
2702 }
2703 break;
2704 case 0x010: /* movups */
2705 case 0x110: /* movupd */
2706 case 0x028: /* movaps */
2707 case 0x128: /* movapd */
2708 case 0x16f: /* movdqa xmm, ea */
2709 case 0x26f: /* movdqu xmm, ea */
2710 if (mod != 3) {
2711 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2712 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2713 } else {
2714 rm = (modrm & 7) | REX_B(s);
2715 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2716 offsetof(CPUX86State,xmm_regs[rm]));
2717 }
2718 break;
2719 case 0x210: /* movss xmm, ea */
2720 if (mod != 3) {
2721 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2722 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2723 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2724 gen_op_movl_T0_0();
2725 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2726 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2727 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2728 } else {
2729 rm = (modrm & 7) | REX_B(s);
2730 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2731 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2732 }
2733 break;
2734 case 0x310: /* movsd xmm, ea */
2735 if (mod != 3) {
2736 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2737 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2738 gen_op_movl_T0_0();
2739 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2740 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2741 } else {
2742 rm = (modrm & 7) | REX_B(s);
2743 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2744 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2745 }
2746 break;
2747 case 0x012: /* movlps */
2748 case 0x112: /* movlpd */
2749 if (mod != 3) {
2750 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2751 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2752 } else {
2753 /* movhlps */
2754 rm = (modrm & 7) | REX_B(s);
2755 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2756 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2757 }
2758 break;
2759 case 0x212: /* movsldup */
2760 if (mod != 3) {
2761 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2762 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2763 } else {
2764 rm = (modrm & 7) | REX_B(s);
2765 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2766 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2767 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2768 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2769 }
2770 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2771 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2772 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2773 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2774 break;
2775 case 0x312: /* movddup */
2776 if (mod != 3) {
2777 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2778 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2779 } else {
2780 rm = (modrm & 7) | REX_B(s);
2781 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2782 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2783 }
2784 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2785 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2786 break;
2787 case 0x016: /* movhps */
2788 case 0x116: /* movhpd */
2789 if (mod != 3) {
2790 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2791 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2792 } else {
2793 /* movlhps */
2794 rm = (modrm & 7) | REX_B(s);
2795 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2796 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2797 }
2798 break;
2799 case 0x216: /* movshdup */
2800 if (mod != 3) {
2801 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2802 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2803 } else {
2804 rm = (modrm & 7) | REX_B(s);
2805 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2806 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2807 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2808 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2809 }
2810 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2811 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2812 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2813 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2814 break;
2815 case 0x7e: /* movd ea, mm */
2816#ifdef TARGET_X86_64
2817 if (s->dflag == 2) {
2818 gen_op_movq_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2819 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2820 } else
2821#endif
2822 {
2823 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2824 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2825 }
2826 break;
2827 case 0x17e: /* movd ea, xmm */
2828#ifdef TARGET_X86_64
2829 if (s->dflag == 2) {
2830 gen_op_movq_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2831 gen_ldst_modrm(s, modrm, OT_QUAD, OR_TMP0, 1);
2832 } else
2833#endif
2834 {
2835 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2836 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2837 }
2838 break;
2839 case 0x27e: /* movq xmm, ea */
2840 if (mod != 3) {
2841 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2842 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2843 } else {
2844 rm = (modrm & 7) | REX_B(s);
2845 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2846 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2847 }
2848 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2849 break;
2850 case 0x7f: /* movq ea, mm */
2851 if (mod != 3) {
2852 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2853 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2854 } else {
2855 rm = (modrm & 7);
2856 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2857 offsetof(CPUX86State,fpregs[reg].mmx));
2858 }
2859 break;
2860 case 0x011: /* movups */
2861 case 0x111: /* movupd */
2862 case 0x029: /* movaps */
2863 case 0x129: /* movapd */
2864 case 0x17f: /* movdqa ea, xmm */
2865 case 0x27f: /* movdqu ea, xmm */
2866 if (mod != 3) {
2867 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2868 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2869 } else {
2870 rm = (modrm & 7) | REX_B(s);
2871 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2872 offsetof(CPUX86State,xmm_regs[reg]));
2873 }
2874 break;
2875 case 0x211: /* movss ea, xmm */
2876 if (mod != 3) {
2877 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2878 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2879 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2880 } else {
2881 rm = (modrm & 7) | REX_B(s);
2882 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2883 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2884 }
2885 break;
2886 case 0x311: /* movsd ea, xmm */
2887 if (mod != 3) {
2888 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2889 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2890 } else {
2891 rm = (modrm & 7) | REX_B(s);
2892 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2893 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2894 }
2895 break;
2896 case 0x013: /* movlps */
2897 case 0x113: /* movlpd */
2898 if (mod != 3) {
2899 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2900 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2901 } else {
2902 goto illegal_op;
2903 }
2904 break;
2905 case 0x017: /* movhps */
2906 case 0x117: /* movhpd */
2907 if (mod != 3) {
2908 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2909 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2910 } else {
2911 goto illegal_op;
2912 }
2913 break;
2914 case 0x71: /* shift mm, im */
2915 case 0x72:
2916 case 0x73:
2917 case 0x171: /* shift xmm, im */
2918 case 0x172:
2919 case 0x173:
2920 val = ldub_code(s->pc++);
2921 if (is_xmm) {
2922 gen_op_movl_T0_im(val);
2923 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2924 gen_op_movl_T0_0();
2925 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2926 op1_offset = offsetof(CPUX86State,xmm_t0);
2927 } else {
2928 gen_op_movl_T0_im(val);
2929 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2930 gen_op_movl_T0_0();
2931 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2932 op1_offset = offsetof(CPUX86State,mmx_t0);
2933 }
2934 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2935 if (!sse_op2)
2936 goto illegal_op;
2937 if (is_xmm) {
2938 rm = (modrm & 7) | REX_B(s);
2939 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2940 } else {
2941 rm = (modrm & 7);
2942 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2943 }
2944 sse_op2(op2_offset, op1_offset);
2945 break;
2946 case 0x050: /* movmskps */
2947 rm = (modrm & 7) | REX_B(s);
2948 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2949 gen_op_mov_reg_T0[OT_LONG][reg]();
2950 break;
2951 case 0x150: /* movmskpd */
2952 rm = (modrm & 7) | REX_B(s);
2953 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2954 gen_op_mov_reg_T0[OT_LONG][reg]();
2955 break;
2956 case 0x02a: /* cvtpi2ps */
2957 case 0x12a: /* cvtpi2pd */
2958 gen_op_enter_mmx();
2959 if (mod != 3) {
2960 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2961 op2_offset = offsetof(CPUX86State,mmx_t0);
2962 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2963 } else {
2964 rm = (modrm & 7);
2965 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2966 }
2967 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2968 switch(b >> 8) {
2969 case 0x0:
2970 gen_op_cvtpi2ps(op1_offset, op2_offset);
2971 break;
2972 default:
2973 case 0x1:
2974 gen_op_cvtpi2pd(op1_offset, op2_offset);
2975 break;
2976 }
2977 break;
2978 case 0x22a: /* cvtsi2ss */
2979 case 0x32a: /* cvtsi2sd */
2980 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2981 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2982 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2983 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
2984 break;
2985 case 0x02c: /* cvttps2pi */
2986 case 0x12c: /* cvttpd2pi */
2987 case 0x02d: /* cvtps2pi */
2988 case 0x12d: /* cvtpd2pi */
2989 gen_op_enter_mmx();
2990 if (mod != 3) {
2991 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2992 op2_offset = offsetof(CPUX86State,xmm_t0);
2993 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2994 } else {
2995 rm = (modrm & 7) | REX_B(s);
2996 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2997 }
2998 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
2999 switch(b) {
3000 case 0x02c:
3001 gen_op_cvttps2pi(op1_offset, op2_offset);
3002 break;
3003 case 0x12c:
3004 gen_op_cvttpd2pi(op1_offset, op2_offset);
3005 break;
3006 case 0x02d:
3007 gen_op_cvtps2pi(op1_offset, op2_offset);
3008 break;
3009 case 0x12d:
3010 gen_op_cvtpd2pi(op1_offset, op2_offset);
3011 break;
3012 }
3013 break;
3014 case 0x22c: /* cvttss2si */
3015 case 0x32c: /* cvttsd2si */
3016 case 0x22d: /* cvtss2si */
3017 case 0x32d: /* cvtsd2si */
3018 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
3019 if (mod != 3) {
3020 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3021 if ((b >> 8) & 1) {
3022 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
3023 } else {
3024 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3025 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3026 }
3027 op2_offset = offsetof(CPUX86State,xmm_t0);
3028 } else {
3029 rm = (modrm & 7) | REX_B(s);
3030 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3031 }
3032 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3033 (b & 1) * 4](op2_offset);
3034 gen_op_mov_reg_T0[ot][reg]();
3035 break;
3036 case 0xc4: /* pinsrw */
3037 case 0x1c4:
3038 s->rip_offset = 1;
3039 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3040 val = ldub_code(s->pc++);
3041 if (b1) {
3042 val &= 7;
3043 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
3044 } else {
3045 val &= 3;
3046 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
3047 }
3048 break;
3049 case 0xc5: /* pextrw */
3050 case 0x1c5:
3051 if (mod != 3)
3052 goto illegal_op;
3053 val = ldub_code(s->pc++);
3054 if (b1) {
3055 val &= 7;
3056 rm = (modrm & 7) | REX_B(s);
3057 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
3058 } else {
3059 val &= 3;
3060 rm = (modrm & 7);
3061 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
3062 }
3063 reg = ((modrm >> 3) & 7) | rex_r;
3064 gen_op_mov_reg_T0[OT_LONG][reg]();
3065 break;
3066 case 0x1d6: /* movq ea, xmm */
3067 if (mod != 3) {
3068 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3069 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3070 } else {
3071 rm = (modrm & 7) | REX_B(s);
3072 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3073 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3074 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3075 }
3076 break;
3077 case 0x2d6: /* movq2dq */
3078 gen_op_enter_mmx();
3079 rm = (modrm & 7);
3080 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3081 offsetof(CPUX86State,fpregs[rm].mmx));
3082 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3083 break;
3084 case 0x3d6: /* movdq2q */
3085 gen_op_enter_mmx();
3086 rm = (modrm & 7) | REX_B(s);
3087 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3088 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3089 break;
3090 case 0xd7: /* pmovmskb */
3091 case 0x1d7:
3092 if (mod != 3)
3093 goto illegal_op;
3094 if (b1) {
3095 rm = (modrm & 7) | REX_B(s);
3096 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3097 } else {
3098 rm = (modrm & 7);
3099 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3100 }
3101 reg = ((modrm >> 3) & 7) | rex_r;
3102 gen_op_mov_reg_T0[OT_LONG][reg]();
3103 break;
3104 default:
3105 goto illegal_op;
3106 }
3107 } else {
3108 /* generic MMX or SSE operation */
3109 switch(b) {
3110 case 0xf7:
3111 /* maskmov : we must prepare A0 */
3112 if (mod != 3)
3113 goto illegal_op;
3114#ifdef TARGET_X86_64
3115 if (s->aflag == 2) {
3116 gen_op_movq_A0_reg[R_EDI]();
3117 } else
3118#endif
3119 {
3120 gen_op_movl_A0_reg[R_EDI]();
3121 if (s->aflag == 0)
3122 gen_op_andl_A0_ffff();
3123 }
3124 gen_add_A0_ds_seg(s);
3125 break;
3126 case 0x70: /* pshufx insn */
3127 case 0xc6: /* pshufx insn */
3128 case 0xc2: /* compare insns */
3129 s->rip_offset = 1;
3130 break;
3131 default:
3132 break;
3133 }
3134 if (is_xmm) {
3135 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3136 if (mod != 3) {
3137 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3138 op2_offset = offsetof(CPUX86State,xmm_t0);
3139 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3140 b == 0xc2)) {
3141 /* specific case for SSE single instructions */
3142 if (b1 == 2) {
3143 /* 32 bit access */
3144 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3145 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3146 } else {
3147 /* 64 bit access */
3148 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3149 }
3150 } else {
3151 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3152 }
3153 } else {
3154 rm = (modrm & 7) | REX_B(s);
3155 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3156 }
3157 } else {
3158 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3159 if (mod != 3) {
3160 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3161 op2_offset = offsetof(CPUX86State,mmx_t0);
3162 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3163 } else {
3164 rm = (modrm & 7);
3165 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3166 }
3167 }
3168 switch(b) {
3169 case 0x70: /* pshufx insn */
3170 case 0xc6: /* pshufx insn */
3171 val = ldub_code(s->pc++);
3172 sse_op3 = (GenOpFunc3 *)sse_op2;
3173 sse_op3(op1_offset, op2_offset, val);
3174 break;
3175 case 0xc2:
3176 /* compare insns */
3177 val = ldub_code(s->pc++);
3178 if (val >= 8)
3179 goto illegal_op;
3180 sse_op2 = sse_op_table4[val][b1];
3181 sse_op2(op1_offset, op2_offset);
3182 break;
3183 default:
3184 sse_op2(op1_offset, op2_offset);
3185 break;
3186 }
3187 if (b == 0x2e || b == 0x2f) {
3188 s->cc_op = CC_OP_EFLAGS;
3189 }
3190 }
3191}
3192
3193
3194/* convert one instruction. s->is_jmp is set if the translation must
3195 be stopped. Return the next pc value */
3196static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3197{
3198 int b, prefixes, aflag, dflag;
3199 int shift, ot;
3200 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3201 target_ulong next_eip, tval;
3202 int rex_w, rex_r;
3203
3204 s->pc = pc_start;
3205 prefixes = 0;
3206 aflag = s->code32;
3207 dflag = s->code32;
3208 s->override = -1;
3209 rex_w = -1;
3210 rex_r = 0;
3211#ifdef TARGET_X86_64
3212 s->rex_x = 0;
3213 s->rex_b = 0;
3214 x86_64_hregs = 0;
3215#endif
3216 s->rip_offset = 0; /* for relative ip address */
3217 next_byte:
3218 b = ldub_code(s->pc);
3219 s->pc++;
3220 /* check prefixes */
3221#ifdef TARGET_X86_64
3222 if (CODE64(s)) {
3223 switch (b) {
3224 case 0xf3:
3225 prefixes |= PREFIX_REPZ;
3226 goto next_byte;
3227 case 0xf2:
3228 prefixes |= PREFIX_REPNZ;
3229 goto next_byte;
3230 case 0xf0:
3231 prefixes |= PREFIX_LOCK;
3232 goto next_byte;
3233 case 0x2e:
3234 s->override = R_CS;
3235 goto next_byte;
3236 case 0x36:
3237 s->override = R_SS;
3238 goto next_byte;
3239 case 0x3e:
3240 s->override = R_DS;
3241 goto next_byte;
3242 case 0x26:
3243 s->override = R_ES;
3244 goto next_byte;
3245 case 0x64:
3246 s->override = R_FS;
3247 goto next_byte;
3248 case 0x65:
3249 s->override = R_GS;
3250 goto next_byte;
3251 case 0x66:
3252 prefixes |= PREFIX_DATA;
3253 goto next_byte;
3254 case 0x67:
3255 prefixes |= PREFIX_ADR;
3256 goto next_byte;
3257 case 0x40 ... 0x4f:
3258 /* REX prefix */
3259 rex_w = (b >> 3) & 1;
3260 rex_r = (b & 0x4) << 1;
3261 s->rex_x = (b & 0x2) << 2;
3262 REX_B(s) = (b & 0x1) << 3;
3263 x86_64_hregs = 1; /* select uniform byte register addressing */
3264 goto next_byte;
3265 }
3266 if (rex_w == 1) {
3267 /* 0x66 is ignored if rex.w is set */
3268 dflag = 2;
3269 } else {
3270 if (prefixes & PREFIX_DATA)
3271 dflag ^= 1;
3272 }
3273 if (!(prefixes & PREFIX_ADR))
3274 aflag = 2;
3275 } else
3276#endif
3277 {
3278 switch (b) {
3279 case 0xf3:
3280 prefixes |= PREFIX_REPZ;
3281 goto next_byte;
3282 case 0xf2:
3283 prefixes |= PREFIX_REPNZ;
3284 goto next_byte;
3285 case 0xf0:
3286 prefixes |= PREFIX_LOCK;
3287 goto next_byte;
3288 case 0x2e:
3289 s->override = R_CS;
3290 goto next_byte;
3291 case 0x36:
3292 s->override = R_SS;
3293 goto next_byte;
3294 case 0x3e:
3295 s->override = R_DS;
3296 goto next_byte;
3297 case 0x26:
3298 s->override = R_ES;
3299 goto next_byte;
3300 case 0x64:
3301 s->override = R_FS;
3302 goto next_byte;
3303 case 0x65:
3304 s->override = R_GS;
3305 goto next_byte;
3306 case 0x66:
3307 prefixes |= PREFIX_DATA;
3308 goto next_byte;
3309 case 0x67:
3310 prefixes |= PREFIX_ADR;
3311 goto next_byte;
3312 }
3313 if (prefixes & PREFIX_DATA)
3314 dflag ^= 1;
3315 if (prefixes & PREFIX_ADR)
3316 aflag ^= 1;
3317 }
3318
3319 s->prefix = prefixes;
3320 s->aflag = aflag;
3321 s->dflag = dflag;
3322
3323 /* lock generation */
3324 if (prefixes & PREFIX_LOCK)
3325 gen_op_lock();
3326
3327 /* now check op code */
3328 reswitch:
3329 switch(b) {
3330 case 0x0f:
3331 /**************************/
3332 /* extended op code */
3333 b = ldub_code(s->pc++) | 0x100;
3334 goto reswitch;
3335
3336 /**************************/
3337 /* arith & logic */
3338 case 0x00 ... 0x05:
3339 case 0x08 ... 0x0d:
3340 case 0x10 ... 0x15:
3341 case 0x18 ... 0x1d:
3342 case 0x20 ... 0x25:
3343 case 0x28 ... 0x2d:
3344 case 0x30 ... 0x35:
3345 case 0x38 ... 0x3d:
3346 {
3347 int op, f, val;
3348 op = (b >> 3) & 7;
3349 f = (b >> 1) & 3;
3350
3351 if ((b & 1) == 0)
3352 ot = OT_BYTE;
3353 else
3354 ot = dflag + OT_WORD;
3355
3356 switch(f) {
3357 case 0: /* OP Ev, Gv */
3358 modrm = ldub_code(s->pc++);
3359 reg = ((modrm >> 3) & 7) | rex_r;
3360 mod = (modrm >> 6) & 3;
3361 rm = (modrm & 7) | REX_B(s);
3362 if (mod != 3) {
3363 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3364 opreg = OR_TMP0;
3365 } else if (op == OP_XORL && rm == reg) {
3366 xor_zero:
3367 /* xor reg, reg optimisation */
3368 gen_op_movl_T0_0();
3369 s->cc_op = CC_OP_LOGICB + ot;
3370 gen_op_mov_reg_T0[ot][reg]();
3371 gen_op_update1_cc();
3372 break;
3373 } else {
3374 opreg = rm;
3375 }
3376 gen_op_mov_TN_reg[ot][1][reg]();
3377 gen_op(s, op, ot, opreg);
3378 break;
3379 case 1: /* OP Gv, Ev */
3380 modrm = ldub_code(s->pc++);
3381 mod = (modrm >> 6) & 3;
3382 reg = ((modrm >> 3) & 7) | rex_r;
3383 rm = (modrm & 7) | REX_B(s);
3384 if (mod != 3) {
3385 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3386 gen_op_ld_T1_A0[ot + s->mem_index]();
3387 } else if (op == OP_XORL && rm == reg) {
3388 goto xor_zero;
3389 } else {
3390 gen_op_mov_TN_reg[ot][1][rm]();
3391 }
3392 gen_op(s, op, ot, reg);
3393 break;
3394 case 2: /* OP A, Iv */
3395 val = insn_get(s, ot);
3396 gen_op_movl_T1_im(val);
3397 gen_op(s, op, ot, OR_EAX);
3398 break;
3399 }
3400 }
3401 break;
3402
3403 case 0x80: /* GRP1 */
3404 case 0x81:
3405 case 0x82:
3406 case 0x83:
3407 {
3408 int val;
3409
3410 if ((b & 1) == 0)
3411 ot = OT_BYTE;
3412 else
3413 ot = dflag + OT_WORD;
3414
3415 modrm = ldub_code(s->pc++);
3416 mod = (modrm >> 6) & 3;
3417 rm = (modrm & 7) | REX_B(s);
3418 op = (modrm >> 3) & 7;
3419
3420 if (mod != 3) {
3421 if (b == 0x83)
3422 s->rip_offset = 1;
3423 else
3424 s->rip_offset = insn_const_size(ot);
3425 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3426 opreg = OR_TMP0;
3427 } else {
3428 opreg = rm;
3429 }
3430
3431 switch(b) {
3432 default:
3433 case 0x80:
3434 case 0x81:
3435 case 0x82:
3436 val = insn_get(s, ot);
3437 break;
3438 case 0x83:
3439 val = (int8_t)insn_get(s, OT_BYTE);
3440 break;
3441 }
3442 gen_op_movl_T1_im(val);
3443 gen_op(s, op, ot, opreg);
3444 }
3445 break;
3446
3447 /**************************/
3448 /* inc, dec, and other misc arith */
3449 case 0x40 ... 0x47: /* inc Gv */
3450 ot = dflag ? OT_LONG : OT_WORD;
3451 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3452 break;
3453 case 0x48 ... 0x4f: /* dec Gv */
3454 ot = dflag ? OT_LONG : OT_WORD;
3455 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3456 break;
3457 case 0xf6: /* GRP3 */
3458 case 0xf7:
3459 if ((b & 1) == 0)
3460 ot = OT_BYTE;
3461 else
3462 ot = dflag + OT_WORD;
3463
3464 modrm = ldub_code(s->pc++);
3465 mod = (modrm >> 6) & 3;
3466 rm = (modrm & 7) | REX_B(s);
3467 op = (modrm >> 3) & 7;
3468 if (mod != 3) {
3469 if (op == 0)
3470 s->rip_offset = insn_const_size(ot);
3471 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3472 gen_op_ld_T0_A0[ot + s->mem_index]();
3473 } else {
3474 gen_op_mov_TN_reg[ot][0][rm]();
3475 }
3476
3477 switch(op) {
3478 case 0: /* test */
3479 val = insn_get(s, ot);
3480 gen_op_movl_T1_im(val);
3481 gen_op_testl_T0_T1_cc();
3482 s->cc_op = CC_OP_LOGICB + ot;
3483 break;
3484 case 2: /* not */
3485 gen_op_notl_T0();
3486 if (mod != 3) {
3487 gen_op_st_T0_A0[ot + s->mem_index]();
3488 } else {
3489 gen_op_mov_reg_T0[ot][rm]();
3490 }
3491 break;
3492 case 3: /* neg */
3493 gen_op_negl_T0();
3494 if (mod != 3) {
3495 gen_op_st_T0_A0[ot + s->mem_index]();
3496 } else {
3497 gen_op_mov_reg_T0[ot][rm]();
3498 }
3499 gen_op_update_neg_cc();
3500 s->cc_op = CC_OP_SUBB + ot;
3501 break;
3502 case 4: /* mul */
3503 switch(ot) {
3504 case OT_BYTE:
3505 gen_op_mulb_AL_T0();
3506 s->cc_op = CC_OP_MULB;
3507 break;
3508 case OT_WORD:
3509 gen_op_mulw_AX_T0();
3510 s->cc_op = CC_OP_MULW;
3511 break;
3512 default:
3513 case OT_LONG:
3514 gen_op_mull_EAX_T0();
3515 s->cc_op = CC_OP_MULL;
3516 break;
3517#ifdef TARGET_X86_64
3518 case OT_QUAD:
3519 gen_op_mulq_EAX_T0();
3520 s->cc_op = CC_OP_MULQ;
3521 break;
3522#endif
3523 }
3524 break;
3525 case 5: /* imul */
3526 switch(ot) {
3527 case OT_BYTE:
3528 gen_op_imulb_AL_T0();
3529 s->cc_op = CC_OP_MULB;
3530 break;
3531 case OT_WORD:
3532 gen_op_imulw_AX_T0();
3533 s->cc_op = CC_OP_MULW;
3534 break;
3535 default:
3536 case OT_LONG:
3537 gen_op_imull_EAX_T0();
3538 s->cc_op = CC_OP_MULL;
3539 break;
3540#ifdef TARGET_X86_64
3541 case OT_QUAD:
3542 gen_op_imulq_EAX_T0();
3543 s->cc_op = CC_OP_MULQ;
3544 break;
3545#endif
3546 }
3547 break;
3548 case 6: /* div */
3549 switch(ot) {
3550 case OT_BYTE:
3551 gen_jmp_im(pc_start - s->cs_base);
3552 gen_op_divb_AL_T0();
3553 break;
3554 case OT_WORD:
3555 gen_jmp_im(pc_start - s->cs_base);
3556 gen_op_divw_AX_T0();
3557 break;
3558 default:
3559 case OT_LONG:
3560 gen_jmp_im(pc_start - s->cs_base);
3561 gen_op_divl_EAX_T0();
3562 break;
3563#ifdef TARGET_X86_64
3564 case OT_QUAD:
3565 gen_jmp_im(pc_start - s->cs_base);
3566 gen_op_divq_EAX_T0();
3567 break;
3568#endif
3569 }
3570 break;
3571 case 7: /* idiv */
3572 switch(ot) {
3573 case OT_BYTE:
3574 gen_jmp_im(pc_start - s->cs_base);
3575 gen_op_idivb_AL_T0();
3576 break;
3577 case OT_WORD:
3578 gen_jmp_im(pc_start - s->cs_base);
3579 gen_op_idivw_AX_T0();
3580 break;
3581 default:
3582 case OT_LONG:
3583 gen_jmp_im(pc_start - s->cs_base);
3584 gen_op_idivl_EAX_T0();
3585 break;
3586#ifdef TARGET_X86_64
3587 case OT_QUAD:
3588 gen_jmp_im(pc_start - s->cs_base);
3589 gen_op_idivq_EAX_T0();
3590 break;
3591#endif
3592 }
3593 break;
3594 default:
3595 goto illegal_op;
3596 }
3597 break;
3598
3599 case 0xfe: /* GRP4 */
3600 case 0xff: /* GRP5 */
3601 if ((b & 1) == 0)
3602 ot = OT_BYTE;
3603 else
3604 ot = dflag + OT_WORD;
3605
3606 modrm = ldub_code(s->pc++);
3607 mod = (modrm >> 6) & 3;
3608 rm = (modrm & 7) | REX_B(s);
3609 op = (modrm >> 3) & 7;
3610 if (op >= 2 && b == 0xfe) {
3611 goto illegal_op;
3612 }
3613 if (CODE64(s)) {
3614 if (op == 2 || op == 4) {
3615 /* operand size for jumps is 64 bit */
3616 ot = OT_QUAD;
3617 } else if (op == 3 || op == 5) {
3618 /* for call calls, the operand is 16 or 32 bit, even
3619 in long mode */
3620 ot = dflag ? OT_LONG : OT_WORD;
3621 } else if (op == 6) {
3622 /* default push size is 64 bit */
3623 ot = dflag ? OT_QUAD : OT_WORD;
3624 }
3625 }
3626 if (mod != 3) {
3627 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3628 if (op >= 2 && op != 3 && op != 5)
3629 gen_op_ld_T0_A0[ot + s->mem_index]();
3630 } else {
3631 gen_op_mov_TN_reg[ot][0][rm]();
3632 }
3633
3634 switch(op) {
3635 case 0: /* inc Ev */
3636 if (mod != 3)
3637 opreg = OR_TMP0;
3638 else
3639 opreg = rm;
3640 gen_inc(s, ot, opreg, 1);
3641 break;
3642 case 1: /* dec Ev */
3643 if (mod != 3)
3644 opreg = OR_TMP0;
3645 else
3646 opreg = rm;
3647 gen_inc(s, ot, opreg, -1);
3648 break;
3649 case 2: /* call Ev */
3650 /* XXX: optimize if memory (no 'and' is necessary) */
3651 if (s->dflag == 0)
3652 gen_op_andl_T0_ffff();
3653 next_eip = s->pc - s->cs_base;
3654 gen_movtl_T1_im(next_eip);
3655 gen_push_T1(s);
3656 gen_op_jmp_T0();
3657 gen_eob(s);
3658 break;
3659 case 3: /* lcall Ev */
3660 gen_op_ld_T1_A0[ot + s->mem_index]();
3661 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3662 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3663 do_lcall:
3664 if (s->pe && !s->vm86) {
3665 if (s->cc_op != CC_OP_DYNAMIC)
3666 gen_op_set_cc_op(s->cc_op);
3667 gen_jmp_im(pc_start - s->cs_base);
3668 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3669 } else {
3670 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3671 }
3672 gen_eob(s);
3673 break;
3674 case 4: /* jmp Ev */
3675 if (s->dflag == 0)
3676 gen_op_andl_T0_ffff();
3677 gen_op_jmp_T0();
3678 gen_eob(s);
3679 break;
3680 case 5: /* ljmp Ev */
3681 gen_op_ld_T1_A0[ot + s->mem_index]();
3682 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3683 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3684 do_ljmp:
3685 if (s->pe && !s->vm86) {
3686 if (s->cc_op != CC_OP_DYNAMIC)
3687 gen_op_set_cc_op(s->cc_op);
3688 gen_jmp_im(pc_start - s->cs_base);
3689 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3690 } else {
3691 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3692 gen_op_movl_T0_T1();
3693 gen_op_jmp_T0();
3694 }
3695 gen_eob(s);
3696 break;
3697 case 6: /* push Ev */
3698 gen_push_T0(s);
3699 break;
3700 default:
3701 goto illegal_op;
3702 }
3703 break;
3704
3705 case 0x84: /* test Ev, Gv */
3706 case 0x85:
3707 if ((b & 1) == 0)
3708 ot = OT_BYTE;
3709 else
3710 ot = dflag + OT_WORD;
3711
3712 modrm = ldub_code(s->pc++);
3713 mod = (modrm >> 6) & 3;
3714 rm = (modrm & 7) | REX_B(s);
3715 reg = ((modrm >> 3) & 7) | rex_r;
3716
3717 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3718 gen_op_mov_TN_reg[ot][1][reg]();
3719 gen_op_testl_T0_T1_cc();
3720 s->cc_op = CC_OP_LOGICB + ot;
3721 break;
3722
3723 case 0xa8: /* test eAX, Iv */
3724 case 0xa9:
3725 if ((b & 1) == 0)
3726 ot = OT_BYTE;
3727 else
3728 ot = dflag + OT_WORD;
3729 val = insn_get(s, ot);
3730
3731 gen_op_mov_TN_reg[ot][0][OR_EAX]();
3732 gen_op_movl_T1_im(val);
3733 gen_op_testl_T0_T1_cc();
3734 s->cc_op = CC_OP_LOGICB + ot;
3735 break;
3736
3737 case 0x98: /* CWDE/CBW */
3738#ifdef TARGET_X86_64
3739 if (dflag == 2) {
3740 gen_op_movslq_RAX_EAX();
3741 } else
3742#endif
3743 if (dflag == 1)
3744 gen_op_movswl_EAX_AX();
3745 else
3746 gen_op_movsbw_AX_AL();
3747 break;
3748 case 0x99: /* CDQ/CWD */
3749#ifdef TARGET_X86_64
3750 if (dflag == 2) {
3751 gen_op_movsqo_RDX_RAX();
3752 } else
3753#endif
3754 if (dflag == 1)
3755 gen_op_movslq_EDX_EAX();
3756 else
3757 gen_op_movswl_DX_AX();
3758 break;
3759 case 0x1af: /* imul Gv, Ev */
3760 case 0x69: /* imul Gv, Ev, I */
3761 case 0x6b:
3762 ot = dflag + OT_WORD;
3763 modrm = ldub_code(s->pc++);
3764 reg = ((modrm >> 3) & 7) | rex_r;
3765 if (b == 0x69)
3766 s->rip_offset = insn_const_size(ot);
3767 else if (b == 0x6b)
3768 s->rip_offset = 1;
3769 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3770 if (b == 0x69) {
3771 val = insn_get(s, ot);
3772 gen_op_movl_T1_im(val);
3773 } else if (b == 0x6b) {
3774 val = (int8_t)insn_get(s, OT_BYTE);
3775 gen_op_movl_T1_im(val);
3776 } else {
3777 gen_op_mov_TN_reg[ot][1][reg]();
3778 }
3779
3780#ifdef TARGET_X86_64
3781 if (ot == OT_QUAD) {
3782 gen_op_imulq_T0_T1();
3783 } else
3784#endif
3785 if (ot == OT_LONG) {
3786 gen_op_imull_T0_T1();
3787 } else {
3788 gen_op_imulw_T0_T1();
3789 }
3790 gen_op_mov_reg_T0[ot][reg]();
3791 s->cc_op = CC_OP_MULB + ot;
3792 break;
3793 case 0x1c0:
3794 case 0x1c1: /* xadd Ev, Gv */
3795 if ((b & 1) == 0)
3796 ot = OT_BYTE;
3797 else
3798 ot = dflag + OT_WORD;
3799 modrm = ldub_code(s->pc++);
3800 reg = ((modrm >> 3) & 7) | rex_r;
3801 mod = (modrm >> 6) & 3;
3802 if (mod == 3) {
3803 rm = (modrm & 7) | REX_B(s);
3804 gen_op_mov_TN_reg[ot][0][reg]();
3805 gen_op_mov_TN_reg[ot][1][rm]();
3806 gen_op_addl_T0_T1();
3807 gen_op_mov_reg_T1[ot][reg]();
3808 gen_op_mov_reg_T0[ot][rm]();
3809 } else {
3810 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3811 gen_op_mov_TN_reg[ot][0][reg]();
3812 gen_op_ld_T1_A0[ot + s->mem_index]();
3813 gen_op_addl_T0_T1();
3814 gen_op_st_T0_A0[ot + s->mem_index]();
3815 gen_op_mov_reg_T1[ot][reg]();
3816 }
3817 gen_op_update2_cc();
3818 s->cc_op = CC_OP_ADDB + ot;
3819 break;
3820 case 0x1b0:
3821 case 0x1b1: /* cmpxchg Ev, Gv */
3822 if ((b & 1) == 0)
3823 ot = OT_BYTE;
3824 else
3825 ot = dflag + OT_WORD;
3826 modrm = ldub_code(s->pc++);
3827 reg = ((modrm >> 3) & 7) | rex_r;
3828 mod = (modrm >> 6) & 3;
3829 gen_op_mov_TN_reg[ot][1][reg]();
3830 if (mod == 3) {
3831 rm = (modrm & 7) | REX_B(s);
3832 gen_op_mov_TN_reg[ot][0][rm]();
3833 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3834 gen_op_mov_reg_T0[ot][rm]();
3835 } else {
3836 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3837 gen_op_ld_T0_A0[ot + s->mem_index]();
3838 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3839 }
3840 s->cc_op = CC_OP_SUBB + ot;
3841 break;
3842 case 0x1c7: /* cmpxchg8b */
3843 modrm = ldub_code(s->pc++);
3844 mod = (modrm >> 6) & 3;
3845 if (mod == 3)
3846 goto illegal_op;
3847 if (s->cc_op != CC_OP_DYNAMIC)
3848 gen_op_set_cc_op(s->cc_op);
3849 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3850 gen_op_cmpxchg8b();
3851 s->cc_op = CC_OP_EFLAGS;
3852 break;
3853
3854 /**************************/
3855 /* push/pop */
3856 case 0x50 ... 0x57: /* push */
3857 gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
3858 gen_push_T0(s);
3859 break;
3860 case 0x58 ... 0x5f: /* pop */
3861 if (CODE64(s)) {
3862 ot = dflag ? OT_QUAD : OT_WORD;
3863 } else {
3864 ot = dflag + OT_WORD;
3865 }
3866 gen_pop_T0(s);
3867 /* NOTE: order is important for pop %sp */
3868 gen_pop_update(s);
3869 gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
3870 break;
3871 case 0x60: /* pusha */
3872 if (CODE64(s))
3873 goto illegal_op;
3874 gen_pusha(s);
3875 break;
3876 case 0x61: /* popa */
3877 if (CODE64(s))
3878 goto illegal_op;
3879 gen_popa(s);
3880 break;
3881 case 0x68: /* push Iv */
3882 case 0x6a:
3883 if (CODE64(s)) {
3884 ot = dflag ? OT_QUAD : OT_WORD;
3885 } else {
3886 ot = dflag + OT_WORD;
3887 }
3888 if (b == 0x68)
3889 val = insn_get(s, ot);
3890 else
3891 val = (int8_t)insn_get(s, OT_BYTE);
3892 gen_op_movl_T0_im(val);
3893 gen_push_T0(s);
3894 break;
3895 case 0x8f: /* pop Ev */
3896 if (CODE64(s)) {
3897 ot = dflag ? OT_QUAD : OT_WORD;
3898 } else {
3899 ot = dflag + OT_WORD;
3900 }
3901 modrm = ldub_code(s->pc++);
3902 mod = (modrm >> 6) & 3;
3903 gen_pop_T0(s);
3904 if (mod == 3) {
3905 /* NOTE: order is important for pop %sp */
3906 gen_pop_update(s);
3907 rm = (modrm & 7) | REX_B(s);
3908 gen_op_mov_reg_T0[ot][rm]();
3909 } else {
3910 /* NOTE: order is important too for MMU exceptions */
3911 s->popl_esp_hack = 1 << ot;
3912 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3913 s->popl_esp_hack = 0;
3914 gen_pop_update(s);
3915 }
3916 break;
3917 case 0xc8: /* enter */
3918 {
3919 int level;
3920 val = lduw_code(s->pc);
3921 s->pc += 2;
3922 level = ldub_code(s->pc++);
3923 gen_enter(s, val, level);
3924 }
3925 break;
3926 case 0xc9: /* leave */
3927 /* XXX: exception not precise (ESP is updated before potential exception) */
3928 if (CODE64(s)) {
3929 gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
3930 gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
3931 } else if (s->ss32) {
3932 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
3933 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
3934 } else {
3935 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
3936 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
3937 }
3938 gen_pop_T0(s);
3939 if (CODE64(s)) {
3940 ot = dflag ? OT_QUAD : OT_WORD;
3941 } else {
3942 ot = dflag + OT_WORD;
3943 }
3944 gen_op_mov_reg_T0[ot][R_EBP]();
3945 gen_pop_update(s);
3946 break;
3947 case 0x06: /* push es */
3948 case 0x0e: /* push cs */
3949 case 0x16: /* push ss */
3950 case 0x1e: /* push ds */
3951 if (CODE64(s))
3952 goto illegal_op;
3953 gen_op_movl_T0_seg(b >> 3);
3954 gen_push_T0(s);
3955 break;
3956 case 0x1a0: /* push fs */
3957 case 0x1a8: /* push gs */
3958 gen_op_movl_T0_seg((b >> 3) & 7);
3959 gen_push_T0(s);
3960 break;
3961 case 0x07: /* pop es */
3962 case 0x17: /* pop ss */
3963 case 0x1f: /* pop ds */
3964 if (CODE64(s))
3965 goto illegal_op;
3966 reg = b >> 3;
3967 gen_pop_T0(s);
3968 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3969 gen_pop_update(s);
3970 if (reg == R_SS) {
3971 /* if reg == SS, inhibit interrupts/trace. */
3972 /* If several instructions disable interrupts, only the
3973 _first_ does it */
3974 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3975 gen_op_set_inhibit_irq();
3976 s->tf = 0;
3977 }
3978 if (s->is_jmp) {
3979 gen_jmp_im(s->pc - s->cs_base);
3980 gen_eob(s);
3981 }
3982 break;
3983 case 0x1a1: /* pop fs */
3984 case 0x1a9: /* pop gs */
3985 gen_pop_T0(s);
3986 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
3987 gen_pop_update(s);
3988 if (s->is_jmp) {
3989 gen_jmp_im(s->pc - s->cs_base);
3990 gen_eob(s);
3991 }
3992 break;
3993
3994 /**************************/
3995 /* mov */
3996 case 0x88:
3997 case 0x89: /* mov Gv, Ev */
3998 if ((b & 1) == 0)
3999 ot = OT_BYTE;
4000 else
4001 ot = dflag + OT_WORD;
4002 modrm = ldub_code(s->pc++);
4003 reg = ((modrm >> 3) & 7) | rex_r;
4004
4005 /* generate a generic store */
4006 gen_ldst_modrm(s, modrm, ot, reg, 1);
4007 break;
4008 case 0xc6:
4009 case 0xc7: /* mov Ev, Iv */
4010 if ((b & 1) == 0)
4011 ot = OT_BYTE;
4012 else
4013 ot = dflag + OT_WORD;
4014 modrm = ldub_code(s->pc++);
4015 mod = (modrm >> 6) & 3;
4016 if (mod != 3) {
4017 s->rip_offset = insn_const_size(ot);
4018 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4019 }
4020 val = insn_get(s, ot);
4021 gen_op_movl_T0_im(val);
4022 if (mod != 3)
4023 gen_op_st_T0_A0[ot + s->mem_index]();
4024 else
4025 gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
4026 break;
4027 case 0x8a:
4028 case 0x8b: /* mov Ev, Gv */
4029 if ((b & 1) == 0)
4030 ot = OT_BYTE;
4031 else
4032 ot = OT_WORD + dflag;
4033 modrm = ldub_code(s->pc++);
4034 reg = ((modrm >> 3) & 7) | rex_r;
4035
4036 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4037 gen_op_mov_reg_T0[ot][reg]();
4038 break;
4039 case 0x8e: /* mov seg, Gv */
4040 modrm = ldub_code(s->pc++);
4041 reg = (modrm >> 3) & 7;
4042 if (reg >= 6 || reg == R_CS)
4043 goto illegal_op;
4044 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4045 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4046 if (reg == R_SS) {
4047 /* if reg == SS, inhibit interrupts/trace */
4048 /* If several instructions disable interrupts, only the
4049 _first_ does it */
4050 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4051 gen_op_set_inhibit_irq();
4052 s->tf = 0;
4053 }
4054 if (s->is_jmp) {
4055 gen_jmp_im(s->pc - s->cs_base);
4056 gen_eob(s);
4057 }
4058 break;
4059 case 0x8c: /* mov Gv, seg */
4060 modrm = ldub_code(s->pc++);
4061 reg = (modrm >> 3) & 7;
4062 mod = (modrm >> 6) & 3;
4063 if (reg >= 6)
4064 goto illegal_op;
4065 gen_op_movl_T0_seg(reg);
4066 if (mod == 3)
4067 ot = OT_WORD + dflag;
4068 else
4069 ot = OT_WORD;
4070 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4071 break;
4072
4073 case 0x1b6: /* movzbS Gv, Eb */
4074 case 0x1b7: /* movzwS Gv, Eb */
4075 case 0x1be: /* movsbS Gv, Eb */
4076 case 0x1bf: /* movswS Gv, Eb */
4077 {
4078 int d_ot;
4079 /* d_ot is the size of destination */
4080 d_ot = dflag + OT_WORD;
4081 /* ot is the size of source */
4082 ot = (b & 1) + OT_BYTE;
4083 modrm = ldub_code(s->pc++);
4084 reg = ((modrm >> 3) & 7) | rex_r;
4085 mod = (modrm >> 6) & 3;
4086 rm = (modrm & 7) | REX_B(s);
4087
4088 if (mod == 3) {
4089 gen_op_mov_TN_reg[ot][0][rm]();
4090 switch(ot | (b & 8)) {
4091 case OT_BYTE:
4092 gen_op_movzbl_T0_T0();
4093 break;
4094 case OT_BYTE | 8:
4095 gen_op_movsbl_T0_T0();
4096 break;
4097 case OT_WORD:
4098 gen_op_movzwl_T0_T0();
4099 break;
4100 default:
4101 case OT_WORD | 8:
4102 gen_op_movswl_T0_T0();
4103 break;
4104 }
4105 gen_op_mov_reg_T0[d_ot][reg]();
4106 } else {
4107 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4108 if (b & 8) {
4109 gen_op_lds_T0_A0[ot + s->mem_index]();
4110 } else {
4111 gen_op_ldu_T0_A0[ot + s->mem_index]();
4112 }
4113 gen_op_mov_reg_T0[d_ot][reg]();
4114 }
4115 }
4116 break;
4117
4118 case 0x8d: /* lea */
4119 ot = dflag + OT_WORD;
4120 modrm = ldub_code(s->pc++);
4121 mod = (modrm >> 6) & 3;
4122 if (mod == 3)
4123 goto illegal_op;
4124 reg = ((modrm >> 3) & 7) | rex_r;
4125 /* we must ensure that no segment is added */
4126 s->override = -1;
4127 val = s->addseg;
4128 s->addseg = 0;
4129 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4130 s->addseg = val;
4131 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
4132 break;
4133
4134 case 0xa0: /* mov EAX, Ov */
4135 case 0xa1:
4136 case 0xa2: /* mov Ov, EAX */
4137 case 0xa3:
4138 {
4139 target_ulong offset_addr;
4140
4141 if ((b & 1) == 0)
4142 ot = OT_BYTE;
4143 else
4144 ot = dflag + OT_WORD;
4145#ifdef TARGET_X86_64
4146 if (s->aflag == 2) {
4147 offset_addr = ldq_code(s->pc);
4148 s->pc += 8;
4149 if (offset_addr == (int32_t)offset_addr)
4150 gen_op_movq_A0_im(offset_addr);
4151 else
4152 gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
4153 } else
4154#endif
4155 {
4156 if (s->aflag) {
4157 offset_addr = insn_get(s, OT_LONG);
4158 } else {
4159 offset_addr = insn_get(s, OT_WORD);
4160 }
4161 gen_op_movl_A0_im(offset_addr);
4162 }
4163 gen_add_A0_ds_seg(s);
4164 if ((b & 2) == 0) {
4165 gen_op_ld_T0_A0[ot + s->mem_index]();
4166 gen_op_mov_reg_T0[ot][R_EAX]();
4167 } else {
4168 gen_op_mov_TN_reg[ot][0][R_EAX]();
4169 gen_op_st_T0_A0[ot + s->mem_index]();
4170 }
4171 }
4172 break;
4173 case 0xd7: /* xlat */
4174#ifdef TARGET_X86_64
4175 if (s->aflag == 2) {
4176 gen_op_movq_A0_reg[R_EBX]();
4177 gen_op_addq_A0_AL();
4178 } else
4179#endif
4180 {
4181 gen_op_movl_A0_reg[R_EBX]();
4182 gen_op_addl_A0_AL();
4183 if (s->aflag == 0)
4184 gen_op_andl_A0_ffff();
4185 }
4186 gen_add_A0_ds_seg(s);
4187 gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
4188 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
4189 break;
4190 case 0xb0 ... 0xb7: /* mov R, Ib */
4191 val = insn_get(s, OT_BYTE);
4192 gen_op_movl_T0_im(val);
4193 gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
4194 break;
4195 case 0xb8 ... 0xbf: /* mov R, Iv */
4196#ifdef TARGET_X86_64
4197 if (dflag == 2) {
4198 uint64_t tmp;
4199 /* 64 bit case */
4200 tmp = ldq_code(s->pc);
4201 s->pc += 8;
4202 reg = (b & 7) | REX_B(s);
4203 gen_movtl_T0_im(tmp);
4204 gen_op_mov_reg_T0[OT_QUAD][reg]();
4205 } else
4206#endif
4207 {
4208 ot = dflag ? OT_LONG : OT_WORD;
4209 val = insn_get(s, ot);
4210 reg = (b & 7) | REX_B(s);
4211 gen_op_movl_T0_im(val);
4212 gen_op_mov_reg_T0[ot][reg]();
4213 }
4214 break;
4215
4216 case 0x91 ... 0x97: /* xchg R, EAX */
4217 ot = dflag + OT_WORD;
4218 reg = (b & 7) | REX_B(s);
4219 rm = R_EAX;
4220 goto do_xchg_reg;
4221 case 0x86:
4222 case 0x87: /* xchg Ev, Gv */
4223 if ((b & 1) == 0)
4224 ot = OT_BYTE;
4225 else
4226 ot = dflag + OT_WORD;
4227 modrm = ldub_code(s->pc++);
4228 reg = ((modrm >> 3) & 7) | rex_r;
4229 mod = (modrm >> 6) & 3;
4230 if (mod == 3) {
4231 rm = (modrm & 7) | REX_B(s);
4232 do_xchg_reg:
4233 gen_op_mov_TN_reg[ot][0][reg]();
4234 gen_op_mov_TN_reg[ot][1][rm]();
4235 gen_op_mov_reg_T0[ot][rm]();
4236 gen_op_mov_reg_T1[ot][reg]();
4237 } else {
4238 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4239 gen_op_mov_TN_reg[ot][0][reg]();
4240 /* for xchg, lock is implicit */
4241 if (!(prefixes & PREFIX_LOCK))
4242 gen_op_lock();
4243 gen_op_ld_T1_A0[ot + s->mem_index]();
4244 gen_op_st_T0_A0[ot + s->mem_index]();
4245 if (!(prefixes & PREFIX_LOCK))
4246 gen_op_unlock();
4247 gen_op_mov_reg_T1[ot][reg]();
4248 }
4249 break;
4250 case 0xc4: /* les Gv */
4251 if (CODE64(s))
4252 goto illegal_op;
4253 op = R_ES;
4254 goto do_lxx;
4255 case 0xc5: /* lds Gv */
4256 if (CODE64(s))
4257 goto illegal_op;
4258 op = R_DS;
4259 goto do_lxx;
4260 case 0x1b2: /* lss Gv */
4261 op = R_SS;
4262 goto do_lxx;
4263 case 0x1b4: /* lfs Gv */
4264 op = R_FS;
4265 goto do_lxx;
4266 case 0x1b5: /* lgs Gv */
4267 op = R_GS;
4268 do_lxx:
4269 ot = dflag ? OT_LONG : OT_WORD;
4270 modrm = ldub_code(s->pc++);
4271 reg = ((modrm >> 3) & 7) | rex_r;
4272 mod = (modrm >> 6) & 3;
4273 if (mod == 3)
4274 goto illegal_op;
4275 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4276 gen_op_ld_T1_A0[ot + s->mem_index]();
4277 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4278 /* load the segment first to handle exceptions properly */
4279 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4280 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4281 /* then put the data */
4282 gen_op_mov_reg_T1[ot][reg]();
4283 if (s->is_jmp) {
4284 gen_jmp_im(s->pc - s->cs_base);
4285 gen_eob(s);
4286 }
4287 break;
4288
4289 /************************/
4290 /* shifts */
4291 case 0xc0:
4292 case 0xc1:
4293 /* shift Ev,Ib */
4294 shift = 2;
4295 grp2:
4296 {
4297 if ((b & 1) == 0)
4298 ot = OT_BYTE;
4299 else
4300 ot = dflag + OT_WORD;
4301
4302 modrm = ldub_code(s->pc++);
4303 mod = (modrm >> 6) & 3;
4304 op = (modrm >> 3) & 7;
4305
4306 if (mod != 3) {
4307 if (shift == 2) {
4308 s->rip_offset = 1;
4309 }
4310 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4311 opreg = OR_TMP0;
4312 } else {
4313 opreg = (modrm & 7) | REX_B(s);
4314 }
4315
4316 /* simpler op */
4317 if (shift == 0) {
4318 gen_shift(s, op, ot, opreg, OR_ECX);
4319 } else {
4320 if (shift == 2) {
4321 shift = ldub_code(s->pc++);
4322 }
4323 gen_shifti(s, op, ot, opreg, shift);
4324 }
4325 }
4326 break;
4327 case 0xd0:
4328 case 0xd1:
4329 /* shift Ev,1 */
4330 shift = 1;
4331 goto grp2;
4332 case 0xd2:
4333 case 0xd3:
4334 /* shift Ev,cl */
4335 shift = 0;
4336 goto grp2;
4337
4338 case 0x1a4: /* shld imm */
4339 op = 0;
4340 shift = 1;
4341 goto do_shiftd;
4342 case 0x1a5: /* shld cl */
4343 op = 0;
4344 shift = 0;
4345 goto do_shiftd;
4346 case 0x1ac: /* shrd imm */
4347 op = 1;
4348 shift = 1;
4349 goto do_shiftd;
4350 case 0x1ad: /* shrd cl */
4351 op = 1;
4352 shift = 0;
4353 do_shiftd:
4354 ot = dflag + OT_WORD;
4355 modrm = ldub_code(s->pc++);
4356 mod = (modrm >> 6) & 3;
4357 rm = (modrm & 7) | REX_B(s);
4358 reg = ((modrm >> 3) & 7) | rex_r;
4359
4360 if (mod != 3) {
4361 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4362 gen_op_ld_T0_A0[ot + s->mem_index]();
4363 } else {
4364 gen_op_mov_TN_reg[ot][0][rm]();
4365 }
4366 gen_op_mov_TN_reg[ot][1][reg]();
4367
4368 if (shift) {
4369 val = ldub_code(s->pc++);
4370 if (ot == OT_QUAD)
4371 val &= 0x3f;
4372 else
4373 val &= 0x1f;
4374 if (val) {
4375 if (mod == 3)
4376 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4377 else
4378 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4379 if (op == 0 && ot != OT_WORD)
4380 s->cc_op = CC_OP_SHLB + ot;
4381 else
4382 s->cc_op = CC_OP_SARB + ot;
4383 }
4384 } else {
4385 if (s->cc_op != CC_OP_DYNAMIC)
4386 gen_op_set_cc_op(s->cc_op);
4387 if (mod == 3)
4388 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4389 else
4390 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4391 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4392 }
4393 if (mod == 3) {
4394 gen_op_mov_reg_T0[ot][rm]();
4395 }
4396 break;
4397
4398 /************************/
4399 /* floats */
4400 case 0xd8 ... 0xdf:
4401 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4402 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4403 /* XXX: what to do if illegal op ? */
4404 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4405 break;
4406 }
4407 modrm = ldub_code(s->pc++);
4408 mod = (modrm >> 6) & 3;
4409 rm = modrm & 7;
4410 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4411 if (mod != 3) {
4412 /* memory op */
4413 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4414 switch(op) {
4415 case 0x00 ... 0x07: /* fxxxs */
4416 case 0x10 ... 0x17: /* fixxxl */
4417 case 0x20 ... 0x27: /* fxxxl */
4418 case 0x30 ... 0x37: /* fixxx */
4419 {
4420 int op1;
4421 op1 = op & 7;
4422
4423 switch(op >> 4) {
4424 case 0:
4425 gen_op_flds_FT0_A0();
4426 break;
4427 case 1:
4428 gen_op_fildl_FT0_A0();
4429 break;
4430 case 2:
4431 gen_op_fldl_FT0_A0();
4432 break;
4433 case 3:
4434 default:
4435 gen_op_fild_FT0_A0();
4436 break;
4437 }
4438
4439 gen_op_fp_arith_ST0_FT0[op1]();
4440 if (op1 == 3) {
4441 /* fcomp needs pop */
4442 gen_op_fpop();
4443 }
4444 }
4445 break;
4446 case 0x08: /* flds */
4447 case 0x0a: /* fsts */
4448 case 0x0b: /* fstps */
4449 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4450 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4451 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4452 switch(op & 7) {
4453 case 0:
4454 switch(op >> 4) {
4455 case 0:
4456 gen_op_flds_ST0_A0();
4457 break;
4458 case 1:
4459 gen_op_fildl_ST0_A0();
4460 break;
4461 case 2:
4462 gen_op_fldl_ST0_A0();
4463 break;
4464 case 3:
4465 default:
4466 gen_op_fild_ST0_A0();
4467 break;
4468 }
4469 break;
4470 case 1:
4471 switch(op >> 4) {
4472 case 1:
4473 gen_op_fisttl_ST0_A0();
4474 break;
4475 case 2:
4476 gen_op_fisttll_ST0_A0();
4477 break;
4478 case 3:
4479 default:
4480 gen_op_fistt_ST0_A0();
4481 }
4482 gen_op_fpop();
4483 break;
4484 default:
4485 switch(op >> 4) {
4486 case 0:
4487 gen_op_fsts_ST0_A0();
4488 break;
4489 case 1:
4490 gen_op_fistl_ST0_A0();
4491 break;
4492 case 2:
4493 gen_op_fstl_ST0_A0();
4494 break;
4495 case 3:
4496 default:
4497 gen_op_fist_ST0_A0();
4498 break;
4499 }
4500 if ((op & 7) == 3)
4501 gen_op_fpop();
4502 break;
4503 }
4504 break;
4505 case 0x0c: /* fldenv mem */
4506 gen_op_fldenv_A0(s->dflag);
4507 break;
4508 case 0x0d: /* fldcw mem */
4509 gen_op_fldcw_A0();
4510 break;
4511 case 0x0e: /* fnstenv mem */
4512 gen_op_fnstenv_A0(s->dflag);
4513 break;
4514 case 0x0f: /* fnstcw mem */
4515 gen_op_fnstcw_A0();
4516 break;
4517 case 0x1d: /* fldt mem */
4518 gen_op_fldt_ST0_A0();
4519 break;
4520 case 0x1f: /* fstpt mem */
4521 gen_op_fstt_ST0_A0();
4522 gen_op_fpop();
4523 break;
4524 case 0x2c: /* frstor mem */
4525 gen_op_frstor_A0(s->dflag);
4526 break;
4527 case 0x2e: /* fnsave mem */
4528 gen_op_fnsave_A0(s->dflag);
4529 break;
4530 case 0x2f: /* fnstsw mem */
4531 gen_op_fnstsw_A0();
4532 break;
4533 case 0x3c: /* fbld */
4534 gen_op_fbld_ST0_A0();
4535 break;
4536 case 0x3e: /* fbstp */
4537 gen_op_fbst_ST0_A0();
4538 gen_op_fpop();
4539 break;
4540 case 0x3d: /* fildll */
4541 gen_op_fildll_ST0_A0();
4542 break;
4543 case 0x3f: /* fistpll */
4544 gen_op_fistll_ST0_A0();
4545 gen_op_fpop();
4546 break;
4547 default:
4548 goto illegal_op;
4549 }
4550 } else {
4551 /* register float ops */
4552 opreg = rm;
4553
4554 switch(op) {
4555 case 0x08: /* fld sti */
4556 gen_op_fpush();
4557 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4558 break;
4559 case 0x09: /* fxchg sti */
4560 case 0x29: /* fxchg4 sti, undocumented op */
4561 case 0x39: /* fxchg7 sti, undocumented op */
4562 gen_op_fxchg_ST0_STN(opreg);
4563 break;
4564 case 0x0a: /* grp d9/2 */
4565 switch(rm) {
4566 case 0: /* fnop */
4567 /* check exceptions (FreeBSD FPU probe) */
4568 if (s->cc_op != CC_OP_DYNAMIC)
4569 gen_op_set_cc_op(s->cc_op);
4570 gen_jmp_im(pc_start - s->cs_base);
4571 gen_op_fwait();
4572 break;
4573 default:
4574 goto illegal_op;
4575 }
4576 break;
4577 case 0x0c: /* grp d9/4 */
4578 switch(rm) {
4579 case 0: /* fchs */
4580 gen_op_fchs_ST0();
4581 break;
4582 case 1: /* fabs */
4583 gen_op_fabs_ST0();
4584 break;
4585 case 4: /* ftst */
4586 gen_op_fldz_FT0();
4587 gen_op_fcom_ST0_FT0();
4588 break;
4589 case 5: /* fxam */
4590 gen_op_fxam_ST0();
4591 break;
4592 default:
4593 goto illegal_op;
4594 }
4595 break;
4596 case 0x0d: /* grp d9/5 */
4597 {
4598 switch(rm) {
4599 case 0:
4600 gen_op_fpush();
4601 gen_op_fld1_ST0();
4602 break;
4603 case 1:
4604 gen_op_fpush();
4605 gen_op_fldl2t_ST0();
4606 break;
4607 case 2:
4608 gen_op_fpush();
4609 gen_op_fldl2e_ST0();
4610 break;
4611 case 3:
4612 gen_op_fpush();
4613 gen_op_fldpi_ST0();
4614 break;
4615 case 4:
4616 gen_op_fpush();
4617 gen_op_fldlg2_ST0();
4618 break;
4619 case 5:
4620 gen_op_fpush();
4621 gen_op_fldln2_ST0();
4622 break;
4623 case 6:
4624 gen_op_fpush();
4625 gen_op_fldz_ST0();
4626 break;
4627 default:
4628 goto illegal_op;
4629 }
4630 }
4631 break;
4632 case 0x0e: /* grp d9/6 */
4633 switch(rm) {
4634 case 0: /* f2xm1 */
4635 gen_op_f2xm1();
4636 break;
4637 case 1: /* fyl2x */
4638 gen_op_fyl2x();
4639 break;
4640 case 2: /* fptan */
4641 gen_op_fptan();
4642 break;
4643 case 3: /* fpatan */
4644 gen_op_fpatan();
4645 break;
4646 case 4: /* fxtract */
4647 gen_op_fxtract();
4648 break;
4649 case 5: /* fprem1 */
4650 gen_op_fprem1();
4651 break;
4652 case 6: /* fdecstp */
4653 gen_op_fdecstp();
4654 break;
4655 default:
4656 case 7: /* fincstp */
4657 gen_op_fincstp();
4658 break;
4659 }
4660 break;
4661 case 0x0f: /* grp d9/7 */
4662 switch(rm) {
4663 case 0: /* fprem */
4664 gen_op_fprem();
4665 break;
4666 case 1: /* fyl2xp1 */
4667 gen_op_fyl2xp1();
4668 break;
4669 case 2: /* fsqrt */
4670 gen_op_fsqrt();
4671 break;
4672 case 3: /* fsincos */
4673 gen_op_fsincos();
4674 break;
4675 case 5: /* fscale */
4676 gen_op_fscale();
4677 break;
4678 case 4: /* frndint */
4679 gen_op_frndint();
4680 break;
4681 case 6: /* fsin */
4682 gen_op_fsin();
4683 break;
4684 default:
4685 case 7: /* fcos */
4686 gen_op_fcos();
4687 break;
4688 }
4689 break;
4690 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4691 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4692 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4693 {
4694 int op1;
4695
4696 op1 = op & 7;
4697 if (op >= 0x20) {
4698 gen_op_fp_arith_STN_ST0[op1](opreg);
4699 if (op >= 0x30)
4700 gen_op_fpop();
4701 } else {
4702 gen_op_fmov_FT0_STN(opreg);
4703 gen_op_fp_arith_ST0_FT0[op1]();
4704 }
4705 }
4706 break;
4707 case 0x02: /* fcom */
4708 case 0x22: /* fcom2, undocumented op */
4709 gen_op_fmov_FT0_STN(opreg);
4710 gen_op_fcom_ST0_FT0();
4711 break;
4712 case 0x03: /* fcomp */
4713 case 0x23: /* fcomp3, undocumented op */
4714 case 0x32: /* fcomp5, undocumented op */
4715 gen_op_fmov_FT0_STN(opreg);
4716 gen_op_fcom_ST0_FT0();
4717 gen_op_fpop();
4718 break;
4719 case 0x15: /* da/5 */
4720 switch(rm) {
4721 case 1: /* fucompp */
4722 gen_op_fmov_FT0_STN(1);
4723 gen_op_fucom_ST0_FT0();
4724 gen_op_fpop();
4725 gen_op_fpop();
4726 break;
4727 default:
4728 goto illegal_op;
4729 }
4730 break;
4731 case 0x1c:
4732 switch(rm) {
4733 case 0: /* feni (287 only, just do nop here) */
4734 break;
4735 case 1: /* fdisi (287 only, just do nop here) */
4736 break;
4737 case 2: /* fclex */
4738 gen_op_fclex();
4739 break;
4740 case 3: /* fninit */
4741 gen_op_fninit();
4742 break;
4743 case 4: /* fsetpm (287 only, just do nop here) */
4744 break;
4745 default:
4746 goto illegal_op;
4747 }
4748 break;
4749 case 0x1d: /* fucomi */
4750 if (s->cc_op != CC_OP_DYNAMIC)
4751 gen_op_set_cc_op(s->cc_op);
4752 gen_op_fmov_FT0_STN(opreg);
4753 gen_op_fucomi_ST0_FT0();
4754 s->cc_op = CC_OP_EFLAGS;
4755 break;
4756 case 0x1e: /* fcomi */
4757 if (s->cc_op != CC_OP_DYNAMIC)
4758 gen_op_set_cc_op(s->cc_op);
4759 gen_op_fmov_FT0_STN(opreg);
4760 gen_op_fcomi_ST0_FT0();
4761 s->cc_op = CC_OP_EFLAGS;
4762 break;
4763 case 0x28: /* ffree sti */
4764 gen_op_ffree_STN(opreg);
4765 break;
4766 case 0x2a: /* fst sti */
4767 gen_op_fmov_STN_ST0(opreg);
4768 break;
4769 case 0x2b: /* fstp sti */
4770 case 0x0b: /* fstp1 sti, undocumented op */
4771 case 0x3a: /* fstp8 sti, undocumented op */
4772 case 0x3b: /* fstp9 sti, undocumented op */
4773 gen_op_fmov_STN_ST0(opreg);
4774 gen_op_fpop();
4775 break;
4776 case 0x2c: /* fucom st(i) */
4777 gen_op_fmov_FT0_STN(opreg);
4778 gen_op_fucom_ST0_FT0();
4779 break;
4780 case 0x2d: /* fucomp st(i) */
4781 gen_op_fmov_FT0_STN(opreg);
4782 gen_op_fucom_ST0_FT0();
4783 gen_op_fpop();
4784 break;
4785 case 0x33: /* de/3 */
4786 switch(rm) {
4787 case 1: /* fcompp */
4788 gen_op_fmov_FT0_STN(1);
4789 gen_op_fcom_ST0_FT0();
4790 gen_op_fpop();
4791 gen_op_fpop();
4792 break;
4793 default:
4794 goto illegal_op;
4795 }
4796 break;
4797 case 0x38: /* ffreep sti, undocumented op */
4798 gen_op_ffree_STN(opreg);
4799 gen_op_fpop();
4800 break;
4801 case 0x3c: /* df/4 */
4802 switch(rm) {
4803 case 0:
4804 gen_op_fnstsw_EAX();
4805 break;
4806 default:
4807 goto illegal_op;
4808 }
4809 break;
4810 case 0x3d: /* fucomip */
4811 if (s->cc_op != CC_OP_DYNAMIC)
4812 gen_op_set_cc_op(s->cc_op);
4813 gen_op_fmov_FT0_STN(opreg);
4814 gen_op_fucomi_ST0_FT0();
4815 gen_op_fpop();
4816 s->cc_op = CC_OP_EFLAGS;
4817 break;
4818 case 0x3e: /* fcomip */
4819 if (s->cc_op != CC_OP_DYNAMIC)
4820 gen_op_set_cc_op(s->cc_op);
4821 gen_op_fmov_FT0_STN(opreg);
4822 gen_op_fcomi_ST0_FT0();
4823 gen_op_fpop();
4824 s->cc_op = CC_OP_EFLAGS;
4825 break;
4826 case 0x10 ... 0x13: /* fcmovxx */
4827 case 0x18 ... 0x1b:
4828 {
4829 int op1;
4830 const static uint8_t fcmov_cc[8] = {
4831 (JCC_B << 1),
4832 (JCC_Z << 1),
4833 (JCC_BE << 1),
4834 (JCC_P << 1),
4835 };
4836 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4837 gen_setcc(s, op1);
4838 gen_op_fcmov_ST0_STN_T0(opreg);
4839 }
4840 break;
4841 default:
4842 goto illegal_op;
4843 }
4844 }
4845#ifdef USE_CODE_COPY
4846 s->tb->cflags |= CF_TB_FP_USED;
4847#endif
4848 break;
4849 /************************/
4850 /* string ops */
4851
4852 case 0xa4: /* movsS */
4853 case 0xa5:
4854 if ((b & 1) == 0)
4855 ot = OT_BYTE;
4856 else
4857 ot = dflag + OT_WORD;
4858
4859 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4860 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4861 } else {
4862 gen_movs(s, ot);
4863 }
4864 break;
4865
4866 case 0xaa: /* stosS */
4867 case 0xab:
4868 if ((b & 1) == 0)
4869 ot = OT_BYTE;
4870 else
4871 ot = dflag + OT_WORD;
4872
4873 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4874 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4875 } else {
4876 gen_stos(s, ot);
4877 }
4878 break;
4879 case 0xac: /* lodsS */
4880 case 0xad:
4881 if ((b & 1) == 0)
4882 ot = OT_BYTE;
4883 else
4884 ot = dflag + OT_WORD;
4885 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4886 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4887 } else {
4888 gen_lods(s, ot);
4889 }
4890 break;
4891 case 0xae: /* scasS */
4892 case 0xaf:
4893 if ((b & 1) == 0)
4894 ot = OT_BYTE;
4895 else
4896 ot = dflag + OT_WORD;
4897 if (prefixes & PREFIX_REPNZ) {
4898 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4899 } else if (prefixes & PREFIX_REPZ) {
4900 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4901 } else {
4902 gen_scas(s, ot);
4903 s->cc_op = CC_OP_SUBB + ot;
4904 }
4905 break;
4906
4907 case 0xa6: /* cmpsS */
4908 case 0xa7:
4909 if ((b & 1) == 0)
4910 ot = OT_BYTE;
4911 else
4912 ot = dflag + OT_WORD;
4913 if (prefixes & PREFIX_REPNZ) {
4914 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4915 } else if (prefixes & PREFIX_REPZ) {
4916 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4917 } else {
4918 gen_cmps(s, ot);
4919 s->cc_op = CC_OP_SUBB + ot;
4920 }
4921 break;
4922 case 0x6c: /* insS */
4923 case 0x6d:
4924 if ((b & 1) == 0)
4925 ot = OT_BYTE;
4926 else
4927 ot = dflag ? OT_LONG : OT_WORD;
4928 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4929 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4930 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4931 } else {
4932 gen_ins(s, ot);
4933 }
4934 break;
4935 case 0x6e: /* outsS */
4936 case 0x6f:
4937 if ((b & 1) == 0)
4938 ot = OT_BYTE;
4939 else
4940 ot = dflag ? OT_LONG : OT_WORD;
4941 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4942 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4943 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4944 } else {
4945 gen_outs(s, ot);
4946 }
4947 break;
4948
4949 /************************/
4950 /* port I/O */
4951 case 0xe4:
4952 case 0xe5:
4953 if ((b & 1) == 0)
4954 ot = OT_BYTE;
4955 else
4956 ot = dflag ? OT_LONG : OT_WORD;
4957 val = ldub_code(s->pc++);
4958 gen_op_movl_T0_im(val);
4959 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4960 gen_op_in[ot]();
4961 gen_op_mov_reg_T1[ot][R_EAX]();
4962 break;
4963 case 0xe6:
4964 case 0xe7:
4965 if ((b & 1) == 0)
4966 ot = OT_BYTE;
4967 else
4968 ot = dflag ? OT_LONG : OT_WORD;
4969 val = ldub_code(s->pc++);
4970 gen_op_movl_T0_im(val);
4971 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4972#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
4973 if (val == 0x80)
4974 break;
4975#endif /* VBOX */
4976 gen_op_mov_TN_reg[ot][1][R_EAX]();
4977 gen_op_out[ot]();
4978 break;
4979 case 0xec:
4980 case 0xed:
4981 if ((b & 1) == 0)
4982 ot = OT_BYTE;
4983 else
4984 ot = dflag ? OT_LONG : OT_WORD;
4985 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4986 gen_op_andl_T0_ffff();
4987 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4988 gen_op_in[ot]();
4989 gen_op_mov_reg_T1[ot][R_EAX]();
4990 break;
4991 case 0xee:
4992 case 0xef:
4993 if ((b & 1) == 0)
4994 ot = OT_BYTE;
4995 else
4996 ot = dflag ? OT_LONG : OT_WORD;
4997 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4998 gen_op_andl_T0_ffff();
4999 gen_check_io(s, ot, 0, pc_start - s->cs_base);
5000 gen_op_mov_TN_reg[ot][1][R_EAX]();
5001 gen_op_out[ot]();
5002 break;
5003
5004 /************************/
5005 /* control */
5006 case 0xc2: /* ret im */
5007 val = ldsw_code(s->pc);
5008 s->pc += 2;
5009 gen_pop_T0(s);
5010 if (CODE64(s) && s->dflag)
5011 s->dflag = 2;
5012 gen_stack_update(s, val + (2 << s->dflag));
5013 if (s->dflag == 0)
5014 gen_op_andl_T0_ffff();
5015 gen_op_jmp_T0();
5016 gen_eob(s);
5017 break;
5018 case 0xc3: /* ret */
5019 gen_pop_T0(s);
5020 gen_pop_update(s);
5021 if (s->dflag == 0)
5022 gen_op_andl_T0_ffff();
5023 gen_op_jmp_T0();
5024 gen_eob(s);
5025 break;
5026 case 0xca: /* lret im */
5027 val = ldsw_code(s->pc);
5028 s->pc += 2;
5029 do_lret:
5030 if (s->pe && !s->vm86) {
5031 if (s->cc_op != CC_OP_DYNAMIC)
5032 gen_op_set_cc_op(s->cc_op);
5033 gen_jmp_im(pc_start - s->cs_base);
5034 gen_op_lret_protected(s->dflag, val);
5035 } else {
5036 gen_stack_A0(s);
5037 /* pop offset */
5038 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5039 if (s->dflag == 0)
5040 gen_op_andl_T0_ffff();
5041 /* NOTE: keeping EIP updated is not a problem in case of
5042 exception */
5043 gen_op_jmp_T0();
5044 /* pop selector */
5045 gen_op_addl_A0_im(2 << s->dflag);
5046 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5047 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5048 /* add stack offset */
5049 gen_stack_update(s, val + (4 << s->dflag));
5050 }
5051 gen_eob(s);
5052 break;
5053 case 0xcb: /* lret */
5054 val = 0;
5055 goto do_lret;
5056 case 0xcf: /* iret */
5057 if (!s->pe) {
5058 /* real mode */
5059 gen_op_iret_real(s->dflag);
5060 s->cc_op = CC_OP_EFLAGS;
5061 } else if (s->vm86) {
5062 if (s->iopl != 3) {
5063 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5064 } else {
5065 gen_op_iret_real(s->dflag);
5066 s->cc_op = CC_OP_EFLAGS;
5067 }
5068 } else {
5069 if (s->cc_op != CC_OP_DYNAMIC)
5070 gen_op_set_cc_op(s->cc_op);
5071 gen_jmp_im(pc_start - s->cs_base);
5072 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5073 s->cc_op = CC_OP_EFLAGS;
5074 }
5075 gen_eob(s);
5076 break;
5077 case 0xe8: /* call im */
5078 {
5079 if (dflag)
5080 tval = (int32_t)insn_get(s, OT_LONG);
5081 else
5082 tval = (int16_t)insn_get(s, OT_WORD);
5083 next_eip = s->pc - s->cs_base;
5084 tval += next_eip;
5085 if (s->dflag == 0)
5086 tval &= 0xffff;
5087 gen_movtl_T0_im(next_eip);
5088 gen_push_T0(s);
5089 gen_jmp(s, tval);
5090 }
5091 break;
5092 case 0x9a: /* lcall im */
5093 {
5094 unsigned int selector, offset;
5095
5096 if (CODE64(s))
5097 goto illegal_op;
5098 ot = dflag ? OT_LONG : OT_WORD;
5099 offset = insn_get(s, ot);
5100 selector = insn_get(s, OT_WORD);
5101
5102 gen_op_movl_T0_im(selector);
5103 gen_op_movl_T1_imu(offset);
5104 }
5105 goto do_lcall;
5106 case 0xe9: /* jmp im */
5107 if (dflag)
5108 tval = (int32_t)insn_get(s, OT_LONG);
5109 else
5110 tval = (int16_t)insn_get(s, OT_WORD);
5111 tval += s->pc - s->cs_base;
5112 if (s->dflag == 0)
5113 tval &= 0xffff;
5114 gen_jmp(s, tval);
5115 break;
5116 case 0xea: /* ljmp im */
5117 {
5118 unsigned int selector, offset;
5119
5120 if (CODE64(s))
5121 goto illegal_op;
5122 ot = dflag ? OT_LONG : OT_WORD;
5123 offset = insn_get(s, ot);
5124 selector = insn_get(s, OT_WORD);
5125
5126 gen_op_movl_T0_im(selector);
5127 gen_op_movl_T1_imu(offset);
5128 }
5129 goto do_ljmp;
5130 case 0xeb: /* jmp Jb */
5131 tval = (int8_t)insn_get(s, OT_BYTE);
5132 tval += s->pc - s->cs_base;
5133 if (s->dflag == 0)
5134 tval &= 0xffff;
5135 gen_jmp(s, tval);
5136 break;
5137 case 0x70 ... 0x7f: /* jcc Jb */
5138 tval = (int8_t)insn_get(s, OT_BYTE);
5139 goto do_jcc;
5140 case 0x180 ... 0x18f: /* jcc Jv */
5141 if (dflag) {
5142 tval = (int32_t)insn_get(s, OT_LONG);
5143 } else {
5144 tval = (int16_t)insn_get(s, OT_WORD);
5145 }
5146 do_jcc:
5147 next_eip = s->pc - s->cs_base;
5148 tval += next_eip;
5149 if (s->dflag == 0)
5150 tval &= 0xffff;
5151 gen_jcc(s, b, tval, next_eip);
5152 break;
5153
5154 case 0x190 ... 0x19f: /* setcc Gv */
5155 modrm = ldub_code(s->pc++);
5156 gen_setcc(s, b);
5157 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5158 break;
5159 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5160 ot = dflag + OT_WORD;
5161 modrm = ldub_code(s->pc++);
5162 reg = ((modrm >> 3) & 7) | rex_r;
5163 mod = (modrm >> 6) & 3;
5164 gen_setcc(s, b);
5165 if (mod != 3) {
5166 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5167 gen_op_ld_T1_A0[ot + s->mem_index]();
5168 } else {
5169 rm = (modrm & 7) | REX_B(s);
5170 gen_op_mov_TN_reg[ot][1][rm]();
5171 }
5172 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5173 break;
5174
5175 /************************/
5176 /* flags */
5177 case 0x9c: /* pushf */
5178 if (s->vm86 && s->iopl != 3) {
5179 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5180 } else {
5181 if (s->cc_op != CC_OP_DYNAMIC)
5182 gen_op_set_cc_op(s->cc_op);
5183 gen_op_movl_T0_eflags();
5184 gen_push_T0(s);
5185 }
5186 break;
5187 case 0x9d: /* popf */
5188 if (s->vm86 && s->iopl != 3) {
5189 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5190 } else {
5191 gen_pop_T0(s);
5192 if (s->cpl == 0) {
5193 if (s->dflag) {
5194 gen_op_movl_eflags_T0_cpl0();
5195 } else {
5196 gen_op_movw_eflags_T0_cpl0();
5197 }
5198 } else {
5199 if (s->cpl <= s->iopl) {
5200 if (s->dflag) {
5201 gen_op_movl_eflags_T0_io();
5202 } else {
5203 gen_op_movw_eflags_T0_io();
5204 }
5205 } else {
5206 if (s->dflag) {
5207 gen_op_movl_eflags_T0();
5208 } else {
5209 gen_op_movw_eflags_T0();
5210 }
5211 }
5212 }
5213 gen_pop_update(s);
5214 s->cc_op = CC_OP_EFLAGS;
5215 /* abort translation because TF flag may change */
5216 gen_jmp_im(s->pc - s->cs_base);
5217 gen_eob(s);
5218 }
5219 break;
5220 case 0x9e: /* sahf */
5221 if (CODE64(s))
5222 goto illegal_op;
5223 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
5224 if (s->cc_op != CC_OP_DYNAMIC)
5225 gen_op_set_cc_op(s->cc_op);
5226 gen_op_movb_eflags_T0();
5227 s->cc_op = CC_OP_EFLAGS;
5228 break;
5229 case 0x9f: /* lahf */
5230 if (CODE64(s))
5231 goto illegal_op;
5232 if (s->cc_op != CC_OP_DYNAMIC)
5233 gen_op_set_cc_op(s->cc_op);
5234 gen_op_movl_T0_eflags();
5235 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
5236 break;
5237 case 0xf5: /* cmc */
5238 if (s->cc_op != CC_OP_DYNAMIC)
5239 gen_op_set_cc_op(s->cc_op);
5240 gen_op_cmc();
5241 s->cc_op = CC_OP_EFLAGS;
5242 break;
5243 case 0xf8: /* clc */
5244 if (s->cc_op != CC_OP_DYNAMIC)
5245 gen_op_set_cc_op(s->cc_op);
5246 gen_op_clc();
5247 s->cc_op = CC_OP_EFLAGS;
5248 break;
5249 case 0xf9: /* stc */
5250 if (s->cc_op != CC_OP_DYNAMIC)
5251 gen_op_set_cc_op(s->cc_op);
5252 gen_op_stc();
5253 s->cc_op = CC_OP_EFLAGS;
5254 break;
5255 case 0xfc: /* cld */
5256 gen_op_cld();
5257 break;
5258 case 0xfd: /* std */
5259 gen_op_std();
5260 break;
5261
5262 /************************/
5263 /* bit operations */
5264 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5265 ot = dflag + OT_WORD;
5266 modrm = ldub_code(s->pc++);
5267 op = (modrm >> 3) & 7;
5268 mod = (modrm >> 6) & 3;
5269 rm = (modrm & 7) | REX_B(s);
5270 if (mod != 3) {
5271 s->rip_offset = 1;
5272 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5273 gen_op_ld_T0_A0[ot + s->mem_index]();
5274 } else {
5275 gen_op_mov_TN_reg[ot][0][rm]();
5276 }
5277 /* load shift */
5278 val = ldub_code(s->pc++);
5279 gen_op_movl_T1_im(val);
5280 if (op < 4)
5281 goto illegal_op;
5282 op -= 4;
5283 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5284 s->cc_op = CC_OP_SARB + ot;
5285 if (op != 0) {
5286 if (mod != 3)
5287 gen_op_st_T0_A0[ot + s->mem_index]();
5288 else
5289 gen_op_mov_reg_T0[ot][rm]();
5290 gen_op_update_bt_cc();
5291 }
5292 break;
5293 case 0x1a3: /* bt Gv, Ev */
5294 op = 0;
5295 goto do_btx;
5296 case 0x1ab: /* bts */
5297 op = 1;
5298 goto do_btx;
5299 case 0x1b3: /* btr */
5300 op = 2;
5301 goto do_btx;
5302 case 0x1bb: /* btc */
5303 op = 3;
5304 do_btx:
5305 ot = dflag + OT_WORD;
5306 modrm = ldub_code(s->pc++);
5307 reg = ((modrm >> 3) & 7) | rex_r;
5308 mod = (modrm >> 6) & 3;
5309 rm = (modrm & 7) | REX_B(s);
5310 gen_op_mov_TN_reg[OT_LONG][1][reg]();
5311 if (mod != 3) {
5312 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5313 /* specific case: we need to add a displacement */
5314 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5315 gen_op_ld_T0_A0[ot + s->mem_index]();
5316 } else {
5317 gen_op_mov_TN_reg[ot][0][rm]();
5318 }
5319 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5320 s->cc_op = CC_OP_SARB + ot;
5321 if (op != 0) {
5322 if (mod != 3)
5323 gen_op_st_T0_A0[ot + s->mem_index]();
5324 else
5325 gen_op_mov_reg_T0[ot][rm]();
5326 gen_op_update_bt_cc();
5327 }
5328 break;
5329 case 0x1bc: /* bsf */
5330 case 0x1bd: /* bsr */
5331 ot = dflag + OT_WORD;
5332 modrm = ldub_code(s->pc++);
5333 reg = ((modrm >> 3) & 7) | rex_r;
5334 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5335 /* NOTE: in order to handle the 0 case, we must load the
5336 result. It could be optimized with a generated jump */
5337 gen_op_mov_TN_reg[ot][1][reg]();
5338 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5339 gen_op_mov_reg_T1[ot][reg]();
5340 s->cc_op = CC_OP_LOGICB + ot;
5341 break;
5342 /************************/
5343 /* bcd */
5344 case 0x27: /* daa */
5345 if (CODE64(s))
5346 goto illegal_op;
5347 if (s->cc_op != CC_OP_DYNAMIC)
5348 gen_op_set_cc_op(s->cc_op);
5349 gen_op_daa();
5350 s->cc_op = CC_OP_EFLAGS;
5351 break;
5352 case 0x2f: /* das */
5353 if (CODE64(s))
5354 goto illegal_op;
5355 if (s->cc_op != CC_OP_DYNAMIC)
5356 gen_op_set_cc_op(s->cc_op);
5357 gen_op_das();
5358 s->cc_op = CC_OP_EFLAGS;
5359 break;
5360 case 0x37: /* aaa */
5361 if (CODE64(s))
5362 goto illegal_op;
5363 if (s->cc_op != CC_OP_DYNAMIC)
5364 gen_op_set_cc_op(s->cc_op);
5365 gen_op_aaa();
5366 s->cc_op = CC_OP_EFLAGS;
5367 break;
5368 case 0x3f: /* aas */
5369 if (CODE64(s))
5370 goto illegal_op;
5371 if (s->cc_op != CC_OP_DYNAMIC)
5372 gen_op_set_cc_op(s->cc_op);
5373 gen_op_aas();
5374 s->cc_op = CC_OP_EFLAGS;
5375 break;
5376 case 0xd4: /* aam */
5377 if (CODE64(s))
5378 goto illegal_op;
5379 val = ldub_code(s->pc++);
5380 gen_op_aam(val);
5381 s->cc_op = CC_OP_LOGICB;
5382 break;
5383 case 0xd5: /* aad */
5384 if (CODE64(s))
5385 goto illegal_op;
5386 val = ldub_code(s->pc++);
5387 gen_op_aad(val);
5388 s->cc_op = CC_OP_LOGICB;
5389 break;
5390 /************************/
5391 /* misc */
5392 case 0x90: /* nop */
5393 /* XXX: xchg + rex handling */
5394 /* XXX: correct lock test for all insn */
5395 if (prefixes & PREFIX_LOCK)
5396 goto illegal_op;
5397 break;
5398 case 0x9b: /* fwait */
5399 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5400 (HF_MP_MASK | HF_TS_MASK)) {
5401 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5402 } else {
5403 if (s->cc_op != CC_OP_DYNAMIC)
5404 gen_op_set_cc_op(s->cc_op);
5405 gen_jmp_im(pc_start - s->cs_base);
5406 gen_op_fwait();
5407 }
5408 break;
5409 case 0xcc: /* int3 */
5410 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5411 break;
5412 case 0xcd: /* int N */
5413 val = ldub_code(s->pc++);
5414 if (s->vm86 && s->iopl != 3) {
5415 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5416 } else {
5417 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5418 }
5419 break;
5420 case 0xce: /* into */
5421 if (CODE64(s))
5422 goto illegal_op;
5423 if (s->cc_op != CC_OP_DYNAMIC)
5424 gen_op_set_cc_op(s->cc_op);
5425 gen_jmp_im(pc_start - s->cs_base);
5426 gen_op_into(s->pc - pc_start);
5427 break;
5428 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5429#if 1
5430 gen_debug(s, pc_start - s->cs_base);
5431#else
5432 /* start debug */
5433 tb_flush(cpu_single_env);
5434 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5435#endif
5436 break;
5437 case 0xfa: /* cli */
5438 if (!s->vm86) {
5439 if (s->cpl <= s->iopl) {
5440 gen_op_cli();
5441 } else {
5442 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5443 }
5444 } else {
5445 if (s->iopl == 3) {
5446 gen_op_cli();
5447 } else {
5448 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5449 }
5450 }
5451 break;
5452 case 0xfb: /* sti */
5453 if (!s->vm86) {
5454 if (s->cpl <= s->iopl) {
5455 gen_sti:
5456 gen_op_sti();
5457 /* interruptions are enabled only the first insn after sti */
5458 /* If several instructions disable interrupts, only the
5459 _first_ does it */
5460 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5461 gen_op_set_inhibit_irq();
5462 /* give a chance to handle pending irqs */
5463 gen_jmp_im(s->pc - s->cs_base);
5464 gen_eob(s);
5465 } else {
5466 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5467 }
5468 } else {
5469 if (s->iopl == 3) {
5470 goto gen_sti;
5471 } else {
5472 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5473 }
5474 }
5475 break;
5476 case 0x62: /* bound */
5477 if (CODE64(s))
5478 goto illegal_op;
5479 ot = dflag ? OT_LONG : OT_WORD;
5480 modrm = ldub_code(s->pc++);
5481 reg = (modrm >> 3) & 7;
5482 mod = (modrm >> 6) & 3;
5483 if (mod == 3)
5484 goto illegal_op;
5485 gen_op_mov_TN_reg[ot][0][reg]();
5486 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5487 gen_jmp_im(pc_start - s->cs_base);
5488 if (ot == OT_WORD)
5489 gen_op_boundw();
5490 else
5491 gen_op_boundl();
5492 break;
5493 case 0x1c8 ... 0x1cf: /* bswap reg */
5494 reg = (b & 7) | REX_B(s);
5495#ifdef TARGET_X86_64
5496 if (dflag == 2) {
5497 gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5498 gen_op_bswapq_T0();
5499 gen_op_mov_reg_T0[OT_QUAD][reg]();
5500 } else
5501#endif
5502 {
5503 gen_op_mov_TN_reg[OT_LONG][0][reg]();
5504 gen_op_bswapl_T0();
5505 gen_op_mov_reg_T0[OT_LONG][reg]();
5506 }
5507 break;
5508 case 0xd6: /* salc */
5509 if (CODE64(s))
5510 goto illegal_op;
5511 if (s->cc_op != CC_OP_DYNAMIC)
5512 gen_op_set_cc_op(s->cc_op);
5513 gen_op_salc();
5514 break;
5515 case 0xe0: /* loopnz */
5516 case 0xe1: /* loopz */
5517 if (s->cc_op != CC_OP_DYNAMIC)
5518 gen_op_set_cc_op(s->cc_op);
5519 /* FALL THRU */
5520 case 0xe2: /* loop */
5521 case 0xe3: /* jecxz */
5522 {
5523 int l1, l2;
5524
5525 tval = (int8_t)insn_get(s, OT_BYTE);
5526 next_eip = s->pc - s->cs_base;
5527 tval += next_eip;
5528 if (s->dflag == 0)
5529 tval &= 0xffff;
5530
5531 l1 = gen_new_label();
5532 l2 = gen_new_label();
5533 b &= 3;
5534 if (b == 3) {
5535 gen_op_jz_ecx[s->aflag](l1);
5536 } else {
5537 gen_op_dec_ECX[s->aflag]();
5538 if (b <= 1)
5539 gen_op_mov_T0_cc();
5540 gen_op_loop[s->aflag][b](l1);
5541 }
5542
5543 gen_jmp_im(next_eip);
5544 gen_op_jmp_label(l2);
5545 gen_set_label(l1);
5546 gen_jmp_im(tval);
5547 gen_set_label(l2);
5548 gen_eob(s);
5549 }
5550 break;
5551 case 0x130: /* wrmsr */
5552 case 0x132: /* rdmsr */
5553 if (s->cpl != 0) {
5554 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5555 } else {
5556 if (b & 2)
5557 gen_op_rdmsr();
5558 else
5559 gen_op_wrmsr();
5560 }
5561 break;
5562 case 0x131: /* rdtsc */
5563 gen_jmp_im(pc_start - s->cs_base);
5564 gen_op_rdtsc();
5565 break;
5566 case 0x134: /* sysenter */
5567 if (CODE64(s))
5568 goto illegal_op;
5569 if (!s->pe) {
5570 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5571 } else {
5572 if (s->cc_op != CC_OP_DYNAMIC) {
5573 gen_op_set_cc_op(s->cc_op);
5574 s->cc_op = CC_OP_DYNAMIC;
5575 }
5576 gen_jmp_im(pc_start - s->cs_base);
5577 gen_op_sysenter();
5578 gen_eob(s);
5579 }
5580 break;
5581 case 0x135: /* sysexit */
5582 if (CODE64(s))
5583 goto illegal_op;
5584 if (!s->pe) {
5585 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5586 } else {
5587 if (s->cc_op != CC_OP_DYNAMIC) {
5588 gen_op_set_cc_op(s->cc_op);
5589 s->cc_op = CC_OP_DYNAMIC;
5590 }
5591 gen_jmp_im(pc_start - s->cs_base);
5592 gen_op_sysexit();
5593 gen_eob(s);
5594 }
5595 break;
5596#ifdef TARGET_X86_64
5597 case 0x105: /* syscall */
5598 /* XXX: is it usable in real mode ? */
5599 if (s->cc_op != CC_OP_DYNAMIC) {
5600 gen_op_set_cc_op(s->cc_op);
5601 s->cc_op = CC_OP_DYNAMIC;
5602 }
5603 gen_jmp_im(pc_start - s->cs_base);
5604 gen_op_syscall(s->pc - pc_start);
5605 gen_eob(s);
5606 break;
5607 case 0x107: /* sysret */
5608 if (!s->pe) {
5609 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5610 } else {
5611 if (s->cc_op != CC_OP_DYNAMIC) {
5612 gen_op_set_cc_op(s->cc_op);
5613 s->cc_op = CC_OP_DYNAMIC;
5614 }
5615 gen_jmp_im(pc_start - s->cs_base);
5616 gen_op_sysret(s->dflag);
5617 /* condition codes are modified only in long mode */
5618 if (s->lma)
5619 s->cc_op = CC_OP_EFLAGS;
5620 gen_eob(s);
5621 }
5622 break;
5623#endif
5624 case 0x1a2: /* cpuid */
5625 gen_op_cpuid();
5626 break;
5627 case 0xf4: /* hlt */
5628 if (s->cpl != 0) {
5629 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5630 } else {
5631 if (s->cc_op != CC_OP_DYNAMIC)
5632 gen_op_set_cc_op(s->cc_op);
5633 gen_jmp_im(s->pc - s->cs_base);
5634 gen_op_hlt();
5635 s->is_jmp = 3;
5636 }
5637 break;
5638 case 0x100:
5639 modrm = ldub_code(s->pc++);
5640 mod = (modrm >> 6) & 3;
5641 op = (modrm >> 3) & 7;
5642 switch(op) {
5643 case 0: /* sldt */
5644 if (!s->pe || s->vm86)
5645 goto illegal_op;
5646 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5647 ot = OT_WORD;
5648 if (mod == 3)
5649 ot += s->dflag;
5650 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5651 break;
5652 case 2: /* lldt */
5653 if (!s->pe || s->vm86)
5654 goto illegal_op;
5655 if (s->cpl != 0) {
5656 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5657 } else {
5658 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5659 gen_jmp_im(pc_start - s->cs_base);
5660 gen_op_lldt_T0();
5661 }
5662 break;
5663 case 1: /* str */
5664 if (!s->pe || s->vm86)
5665 goto illegal_op;
5666 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5667 ot = OT_WORD;
5668 if (mod == 3)
5669 ot += s->dflag;
5670 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5671 break;
5672 case 3: /* ltr */
5673 if (!s->pe || s->vm86)
5674 goto illegal_op;
5675 if (s->cpl != 0) {
5676 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5677 } else {
5678 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5679 gen_jmp_im(pc_start - s->cs_base);
5680 gen_op_ltr_T0();
5681 }
5682 break;
5683 case 4: /* verr */
5684 case 5: /* verw */
5685 if (!s->pe || s->vm86)
5686 goto illegal_op;
5687 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5688 if (s->cc_op != CC_OP_DYNAMIC)
5689 gen_op_set_cc_op(s->cc_op);
5690 if (op == 4)
5691 gen_op_verr();
5692 else
5693 gen_op_verw();
5694 s->cc_op = CC_OP_EFLAGS;
5695 break;
5696 default:
5697 goto illegal_op;
5698 }
5699 break;
5700 case 0x101:
5701 modrm = ldub_code(s->pc++);
5702 mod = (modrm >> 6) & 3;
5703 op = (modrm >> 3) & 7;
5704 rm = modrm & 7;
5705 switch(op) {
5706 case 0: /* sgdt */
5707 if (mod == 3)
5708 goto illegal_op;
5709 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5710 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5711 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5712 gen_add_A0_im(s, 2);
5713 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5714 if (!s->dflag)
5715 gen_op_andl_T0_im(0xffffff);
5716 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5717 break;
5718 case 1:
5719 if (mod == 3) {
5720 switch (rm) {
5721 case 0: /* monitor */
5722 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5723 s->cpl != 0)
5724 goto illegal_op;
5725 gen_jmp_im(pc_start - s->cs_base);
5726#ifdef TARGET_X86_64
5727 if (s->aflag == 2) {
5728 gen_op_movq_A0_reg[R_EBX]();
5729 gen_op_addq_A0_AL();
5730 } else
5731#endif
5732 {
5733 gen_op_movl_A0_reg[R_EBX]();
5734 gen_op_addl_A0_AL();
5735 if (s->aflag == 0)
5736 gen_op_andl_A0_ffff();
5737 }
5738 gen_add_A0_ds_seg(s);
5739 gen_op_monitor();
5740 break;
5741 case 1: /* mwait */
5742 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5743 s->cpl != 0)
5744 goto illegal_op;
5745 if (s->cc_op != CC_OP_DYNAMIC) {
5746 gen_op_set_cc_op(s->cc_op);
5747 s->cc_op = CC_OP_DYNAMIC;
5748 }
5749 gen_jmp_im(s->pc - s->cs_base);
5750 gen_op_mwait();
5751 gen_eob(s);
5752 break;
5753 default:
5754 goto illegal_op;
5755 }
5756 } else { /* sidt */
5757 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5758 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5759 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5760 gen_add_A0_im(s, 2);
5761 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5762 if (!s->dflag)
5763 gen_op_andl_T0_im(0xffffff);
5764 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5765 }
5766 break;
5767 case 2: /* lgdt */
5768 case 3: /* lidt */
5769 if (mod == 3)
5770 goto illegal_op;
5771 if (s->cpl != 0) {
5772 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5773 } else {
5774 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5775 gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
5776 gen_add_A0_im(s, 2);
5777 gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5778 if (!s->dflag)
5779 gen_op_andl_T0_im(0xffffff);
5780 if (op == 2) {
5781 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5782 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5783 } else {
5784 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5785 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5786 }
5787 }
5788 break;
5789 case 4: /* smsw */
5790 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5791 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5792 break;
5793 case 6: /* lmsw */
5794 if (s->cpl != 0) {
5795 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5796 } else {
5797 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5798 gen_op_lmsw_T0();
5799 gen_jmp_im(s->pc - s->cs_base);
5800 gen_eob(s);
5801 }
5802 break;
5803 case 7: /* invlpg */
5804 if (s->cpl != 0) {
5805 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5806 } else {
5807 if (mod == 3) {
5808#ifdef TARGET_X86_64
5809 if (CODE64(s) && rm == 0) {
5810 /* swapgs */
5811 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5812 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5813 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5814 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5815 } else
5816#endif
5817 {
5818 goto illegal_op;
5819 }
5820 } else {
5821 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5822 gen_op_invlpg_A0();
5823 gen_jmp_im(s->pc - s->cs_base);
5824 gen_eob(s);
5825 }
5826 }
5827 break;
5828 default:
5829 goto illegal_op;
5830 }
5831 break;
5832 case 0x108: /* invd */
5833 case 0x109: /* wbinvd */
5834 if (s->cpl != 0) {
5835 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5836 } else {
5837 /* nothing to do */
5838 }
5839 break;
5840 case 0x63: /* arpl or movslS (x86_64) */
5841#ifdef TARGET_X86_64
5842 if (CODE64(s)) {
5843 int d_ot;
5844 /* d_ot is the size of destination */
5845 d_ot = dflag + OT_WORD;
5846
5847 modrm = ldub_code(s->pc++);
5848 reg = ((modrm >> 3) & 7) | rex_r;
5849 mod = (modrm >> 6) & 3;
5850 rm = (modrm & 7) | REX_B(s);
5851
5852 if (mod == 3) {
5853 gen_op_mov_TN_reg[OT_LONG][0][rm]();
5854 /* sign extend */
5855 if (d_ot == OT_QUAD)
5856 gen_op_movslq_T0_T0();
5857 gen_op_mov_reg_T0[d_ot][reg]();
5858 } else {
5859 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5860 if (d_ot == OT_QUAD) {
5861 gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
5862 } else {
5863 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5864 }
5865 gen_op_mov_reg_T0[d_ot][reg]();
5866 }
5867 } else
5868#endif
5869 {
5870 if (!s->pe || s->vm86)
5871 goto illegal_op;
5872 ot = dflag ? OT_LONG : OT_WORD;
5873 modrm = ldub_code(s->pc++);
5874 reg = (modrm >> 3) & 7;
5875 mod = (modrm >> 6) & 3;
5876 rm = modrm & 7;
5877 if (mod != 3) {
5878 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5879 gen_op_ld_T0_A0[ot + s->mem_index]();
5880 } else {
5881 gen_op_mov_TN_reg[ot][0][rm]();
5882 }
5883 if (s->cc_op != CC_OP_DYNAMIC)
5884 gen_op_set_cc_op(s->cc_op);
5885 gen_op_arpl();
5886 s->cc_op = CC_OP_EFLAGS;
5887 if (mod != 3) {
5888 gen_op_st_T0_A0[ot + s->mem_index]();
5889 } else {
5890 gen_op_mov_reg_T0[ot][rm]();
5891 }
5892 gen_op_arpl_update();
5893 }
5894 break;
5895 case 0x102: /* lar */
5896 case 0x103: /* lsl */
5897 if (!s->pe || s->vm86)
5898 goto illegal_op;
5899 ot = dflag ? OT_LONG : OT_WORD;
5900 modrm = ldub_code(s->pc++);
5901 reg = ((modrm >> 3) & 7) | rex_r;
5902 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5903 gen_op_mov_TN_reg[ot][1][reg]();
5904 if (s->cc_op != CC_OP_DYNAMIC)
5905 gen_op_set_cc_op(s->cc_op);
5906 if (b == 0x102)
5907 gen_op_lar();
5908 else
5909 gen_op_lsl();
5910 s->cc_op = CC_OP_EFLAGS;
5911 gen_op_mov_reg_T1[ot][reg]();
5912 break;
5913 case 0x118:
5914 modrm = ldub_code(s->pc++);
5915 mod = (modrm >> 6) & 3;
5916 op = (modrm >> 3) & 7;
5917 switch(op) {
5918 case 0: /* prefetchnta */
5919 case 1: /* prefetchnt0 */
5920 case 2: /* prefetchnt0 */
5921 case 3: /* prefetchnt0 */
5922 if (mod == 3)
5923 goto illegal_op;
5924 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5925 /* nothing more to do */
5926 break;
5927 default: /* nop (multi byte) */
5928 gen_nop_modrm(s, modrm);
5929 break;
5930 }
5931 break;
5932 case 0x119 ... 0x11f: /* nop (multi byte) */
5933 modrm = ldub_code(s->pc++);
5934 gen_nop_modrm(s, modrm);
5935 break;
5936 case 0x120: /* mov reg, crN */
5937 case 0x122: /* mov crN, reg */
5938 if (s->cpl != 0) {
5939 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5940 } else {
5941 modrm = ldub_code(s->pc++);
5942 if ((modrm & 0xc0) != 0xc0)
5943 goto illegal_op;
5944 rm = (modrm & 7) | REX_B(s);
5945 reg = ((modrm >> 3) & 7) | rex_r;
5946 if (CODE64(s))
5947 ot = OT_QUAD;
5948 else
5949 ot = OT_LONG;
5950 switch(reg) {
5951 case 0:
5952 case 2:
5953 case 3:
5954 case 4:
5955 case 8:
5956 if (b & 2) {
5957 gen_op_mov_TN_reg[ot][0][rm]();
5958 gen_op_movl_crN_T0(reg);
5959 gen_jmp_im(s->pc - s->cs_base);
5960 gen_eob(s);
5961 } else {
5962#if !defined(CONFIG_USER_ONLY)
5963 if (reg == 8)
5964 gen_op_movtl_T0_cr8();
5965 else
5966#endif
5967 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
5968 gen_op_mov_reg_T0[ot][rm]();
5969 }
5970 break;
5971 default:
5972 goto illegal_op;
5973 }
5974 }
5975 break;
5976 case 0x121: /* mov reg, drN */
5977 case 0x123: /* mov drN, reg */
5978 if (s->cpl != 0) {
5979 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5980 } else {
5981 modrm = ldub_code(s->pc++);
5982 if ((modrm & 0xc0) != 0xc0)
5983 goto illegal_op;
5984 rm = (modrm & 7) | REX_B(s);
5985 reg = ((modrm >> 3) & 7) | rex_r;
5986 if (CODE64(s))
5987 ot = OT_QUAD;
5988 else
5989 ot = OT_LONG;
5990 /* XXX: do it dynamically with CR4.DE bit */
5991 if (reg == 4 || reg == 5 || reg >= 8)
5992 goto illegal_op;
5993 if (b & 2) {
5994 gen_op_mov_TN_reg[ot][0][rm]();
5995 gen_op_movl_drN_T0(reg);
5996 gen_jmp_im(s->pc - s->cs_base);
5997 gen_eob(s);
5998 } else {
5999 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
6000 gen_op_mov_reg_T0[ot][rm]();
6001 }
6002 }
6003 break;
6004 case 0x106: /* clts */
6005 if (s->cpl != 0) {
6006 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
6007 } else {
6008 gen_op_clts();
6009 /* abort block because static cpu state changed */
6010 gen_jmp_im(s->pc - s->cs_base);
6011 gen_eob(s);
6012 }
6013 break;
6014 /* MMX/SSE/SSE2/PNI support */
6015 case 0x1c3: /* MOVNTI reg, mem */
6016 if (!(s->cpuid_features & CPUID_SSE2))
6017 goto illegal_op;
6018 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
6019 modrm = ldub_code(s->pc++);
6020 mod = (modrm >> 6) & 3;
6021 if (mod == 3)
6022 goto illegal_op;
6023 reg = ((modrm >> 3) & 7) | rex_r;
6024 /* generate a generic store */
6025 gen_ldst_modrm(s, modrm, ot, reg, 1);
6026 break;
6027 case 0x1ae:
6028 modrm = ldub_code(s->pc++);
6029 mod = (modrm >> 6) & 3;
6030 op = (modrm >> 3) & 7;
6031 switch(op) {
6032 case 0: /* fxsave */
6033 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6034 (s->flags & HF_EM_MASK))
6035 goto illegal_op;
6036 if (s->flags & HF_TS_MASK) {
6037 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6038 break;
6039 }
6040 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6041 gen_op_fxsave_A0((s->dflag == 2));
6042 break;
6043 case 1: /* fxrstor */
6044 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6045 (s->flags & HF_EM_MASK))
6046 goto illegal_op;
6047 if (s->flags & HF_TS_MASK) {
6048 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6049 break;
6050 }
6051 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6052 gen_op_fxrstor_A0((s->dflag == 2));
6053 break;
6054 case 2: /* ldmxcsr */
6055 case 3: /* stmxcsr */
6056 if (s->flags & HF_TS_MASK) {
6057 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6058 break;
6059 }
6060 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6061 mod == 3)
6062 goto illegal_op;
6063 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6064 if (op == 2) {
6065 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6066 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6067 } else {
6068 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6069 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
6070 }
6071 break;
6072 case 5: /* lfence */
6073 case 6: /* mfence */
6074 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6075 goto illegal_op;
6076 break;
6077 case 7: /* sfence / clflush */
6078 if ((modrm & 0xc7) == 0xc0) {
6079 /* sfence */
6080 if (!(s->cpuid_features & CPUID_SSE))
6081 goto illegal_op;
6082 } else {
6083 /* clflush */
6084 if (!(s->cpuid_features & CPUID_CLFLUSH))
6085 goto illegal_op;
6086 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6087 }
6088 break;
6089 default:
6090 goto illegal_op;
6091 }
6092 break;
6093 case 0x10d: /* prefetch */
6094 modrm = ldub_code(s->pc++);
6095 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6096 /* ignore for now */
6097 break;
6098 case 0x1aa: /* rsm */
6099 if (!(s->flags & HF_SMM_MASK))
6100 goto illegal_op;
6101 if (s->cc_op != CC_OP_DYNAMIC) {
6102 gen_op_set_cc_op(s->cc_op);
6103 s->cc_op = CC_OP_DYNAMIC;
6104 }
6105 gen_jmp_im(s->pc - s->cs_base);
6106 gen_op_rsm();
6107 gen_eob(s);
6108 break;
6109 case 0x110 ... 0x117:
6110 case 0x128 ... 0x12f:
6111 case 0x150 ... 0x177:
6112 case 0x17c ... 0x17f:
6113 case 0x1c2:
6114 case 0x1c4 ... 0x1c6:
6115 case 0x1d0 ... 0x1fe:
6116 gen_sse(s, b, pc_start, rex_r);
6117 break;
6118 default:
6119 goto illegal_op;
6120 }
6121 /* lock generation */
6122 if (s->prefix & PREFIX_LOCK)
6123 gen_op_unlock();
6124 return s->pc;
6125 illegal_op:
6126 if (s->prefix & PREFIX_LOCK)
6127 gen_op_unlock();
6128 /* XXX: ensure that no lock was generated */
6129 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6130 return s->pc;
6131}
6132
6133#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6134#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6135
6136/* flags read by an operation */
6137static uint16_t opc_read_flags[NB_OPS] = {
6138 [INDEX_op_aas] = CC_A,
6139 [INDEX_op_aaa] = CC_A,
6140 [INDEX_op_das] = CC_A | CC_C,
6141 [INDEX_op_daa] = CC_A | CC_C,
6142
6143 /* subtle: due to the incl/decl implementation, C is used */
6144 [INDEX_op_update_inc_cc] = CC_C,
6145
6146 [INDEX_op_into] = CC_O,
6147
6148 [INDEX_op_jb_subb] = CC_C,
6149 [INDEX_op_jb_subw] = CC_C,
6150 [INDEX_op_jb_subl] = CC_C,
6151
6152 [INDEX_op_jz_subb] = CC_Z,
6153 [INDEX_op_jz_subw] = CC_Z,
6154 [INDEX_op_jz_subl] = CC_Z,
6155
6156 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6157 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6158 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6159
6160 [INDEX_op_js_subb] = CC_S,
6161 [INDEX_op_js_subw] = CC_S,
6162 [INDEX_op_js_subl] = CC_S,
6163
6164 [INDEX_op_jl_subb] = CC_O | CC_S,
6165 [INDEX_op_jl_subw] = CC_O | CC_S,
6166 [INDEX_op_jl_subl] = CC_O | CC_S,
6167
6168 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6169 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6170 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6171
6172 [INDEX_op_loopnzw] = CC_Z,
6173 [INDEX_op_loopnzl] = CC_Z,
6174 [INDEX_op_loopzw] = CC_Z,
6175 [INDEX_op_loopzl] = CC_Z,
6176
6177 [INDEX_op_seto_T0_cc] = CC_O,
6178 [INDEX_op_setb_T0_cc] = CC_C,
6179 [INDEX_op_setz_T0_cc] = CC_Z,
6180 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6181 [INDEX_op_sets_T0_cc] = CC_S,
6182 [INDEX_op_setp_T0_cc] = CC_P,
6183 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6184 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6185
6186 [INDEX_op_setb_T0_subb] = CC_C,
6187 [INDEX_op_setb_T0_subw] = CC_C,
6188 [INDEX_op_setb_T0_subl] = CC_C,
6189
6190 [INDEX_op_setz_T0_subb] = CC_Z,
6191 [INDEX_op_setz_T0_subw] = CC_Z,
6192 [INDEX_op_setz_T0_subl] = CC_Z,
6193
6194 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6195 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6196 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6197
6198 [INDEX_op_sets_T0_subb] = CC_S,
6199 [INDEX_op_sets_T0_subw] = CC_S,
6200 [INDEX_op_sets_T0_subl] = CC_S,
6201
6202 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6203 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6204 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6205
6206 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6207 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6208 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6209
6210 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6211 [INDEX_op_cmc] = CC_C,
6212 [INDEX_op_salc] = CC_C,
6213
6214 /* needed for correct flag optimisation before string ops */
6215 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6216 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6217 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6218 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6219
6220#ifdef TARGET_X86_64
6221 [INDEX_op_jb_subq] = CC_C,
6222 [INDEX_op_jz_subq] = CC_Z,
6223 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6224 [INDEX_op_js_subq] = CC_S,
6225 [INDEX_op_jl_subq] = CC_O | CC_S,
6226 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6227
6228 [INDEX_op_loopnzq] = CC_Z,
6229 [INDEX_op_loopzq] = CC_Z,
6230
6231 [INDEX_op_setb_T0_subq] = CC_C,
6232 [INDEX_op_setz_T0_subq] = CC_Z,
6233 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6234 [INDEX_op_sets_T0_subq] = CC_S,
6235 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6236 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6237
6238 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6239 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6240#endif
6241
6242#define DEF_READF(SUFFIX)\
6243 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6244 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6245 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6246 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6247 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6248 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6249 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6250 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6251\
6252 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6253 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6254 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6255 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6256 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6257 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6258 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6259 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6260
6261 DEF_READF( )
6262 DEF_READF(_raw)
6263#ifndef CONFIG_USER_ONLY
6264 DEF_READF(_kernel)
6265 DEF_READF(_user)
6266#endif
6267};
6268
6269/* flags written by an operation */
6270static uint16_t opc_write_flags[NB_OPS] = {
6271 [INDEX_op_update2_cc] = CC_OSZAPC,
6272 [INDEX_op_update1_cc] = CC_OSZAPC,
6273 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6274 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6275 /* subtle: due to the incl/decl implementation, C is used */
6276 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6277 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6278
6279 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6280 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6281 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6282 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6283 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6284 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6285 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6286 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6287 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6288 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6289 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6290
6291 /* sse */
6292 [INDEX_op_ucomiss] = CC_OSZAPC,
6293 [INDEX_op_ucomisd] = CC_OSZAPC,
6294 [INDEX_op_comiss] = CC_OSZAPC,
6295 [INDEX_op_comisd] = CC_OSZAPC,
6296
6297 /* bcd */
6298 [INDEX_op_aam] = CC_OSZAPC,
6299 [INDEX_op_aad] = CC_OSZAPC,
6300 [INDEX_op_aas] = CC_OSZAPC,
6301 [INDEX_op_aaa] = CC_OSZAPC,
6302 [INDEX_op_das] = CC_OSZAPC,
6303 [INDEX_op_daa] = CC_OSZAPC,
6304
6305 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6306 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6307 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6308 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6309 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6310 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6311 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6312 [INDEX_op_clc] = CC_C,
6313 [INDEX_op_stc] = CC_C,
6314 [INDEX_op_cmc] = CC_C,
6315
6316 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6317 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6318 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6319 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6320 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6321 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6322 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6323 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6324 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6325 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6326 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6327 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6328
6329 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6330 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6331 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6332 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6333 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6334 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6335
6336 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6337 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6338 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6339 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6340
6341 [INDEX_op_cmpxchg8b] = CC_Z,
6342 [INDEX_op_lar] = CC_Z,
6343 [INDEX_op_lsl] = CC_Z,
6344 [INDEX_op_verr] = CC_Z,
6345 [INDEX_op_verw] = CC_Z,
6346 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6347 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6348
6349#define DEF_WRITEF(SUFFIX)\
6350 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6351 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6352 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6353 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6354 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6355 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6356 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6357 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6358\
6359 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6360 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6361 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6362 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6363 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6364 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6365 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6366 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6367\
6368 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6369 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6370 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6371 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6372 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6373 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6374 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6375 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6376\
6377 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6378 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6379 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6380 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6381\
6382 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6383 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6384 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6385 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6386\
6387 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6388 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6389 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6390 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6391\
6392 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6393 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6394 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6395 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6396 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6397 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6398\
6399 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6400 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6401 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6402 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6403 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6404 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6405\
6406 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6407 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6408 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6409 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6410
6411
6412 DEF_WRITEF( )
6413 DEF_WRITEF(_raw)
6414#ifndef CONFIG_USER_ONLY
6415 DEF_WRITEF(_kernel)
6416 DEF_WRITEF(_user)
6417#endif
6418};
6419
6420/* simpler form of an operation if no flags need to be generated */
6421static uint16_t opc_simpler[NB_OPS] = {
6422 [INDEX_op_update2_cc] = INDEX_op_nop,
6423 [INDEX_op_update1_cc] = INDEX_op_nop,
6424 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6425#if 0
6426 /* broken: CC_OP logic must be rewritten */
6427 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6428#endif
6429
6430 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6431 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6432 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6433 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6434
6435 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6436 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6437 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6438 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6439
6440 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6441 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6442 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6443 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6444
6445#define DEF_SIMPLER(SUFFIX)\
6446 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6447 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6448 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6449 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6450\
6451 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6452 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6453 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6454 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6455
6456 DEF_SIMPLER( )
6457 DEF_SIMPLER(_raw)
6458#ifndef CONFIG_USER_ONLY
6459 DEF_SIMPLER(_kernel)
6460 DEF_SIMPLER(_user)
6461#endif
6462};
6463
6464void optimize_flags_init(void)
6465{
6466 int i;
6467 /* put default values in arrays */
6468 for(i = 0; i < NB_OPS; i++) {
6469 if (opc_simpler[i] == 0)
6470 opc_simpler[i] = i;
6471 }
6472}
6473
6474/* CPU flags computation optimization: we move backward thru the
6475 generated code to see which flags are needed. The operation is
6476 modified if suitable */
6477static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6478{
6479 uint16_t *opc_ptr;
6480 int live_flags, write_flags, op;
6481
6482 opc_ptr = opc_buf + opc_buf_len;
6483 /* live_flags contains the flags needed by the next instructions
6484 in the code. At the end of the bloc, we consider that all the
6485 flags are live. */
6486 live_flags = CC_OSZAPC;
6487 while (opc_ptr > opc_buf) {
6488 op = *--opc_ptr;
6489 /* if none of the flags written by the instruction is used,
6490 then we can try to find a simpler instruction */
6491 write_flags = opc_write_flags[op];
6492 if ((live_flags & write_flags) == 0) {
6493 *opc_ptr = opc_simpler[op];
6494 }
6495 /* compute the live flags before the instruction */
6496 live_flags &= ~write_flags;
6497 live_flags |= opc_read_flags[op];
6498 }
6499}
6500
6501/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6502 basic block 'tb'. If search_pc is TRUE, also generate PC
6503 information for each intermediate instruction. */
6504static inline int gen_intermediate_code_internal(CPUState *env,
6505 TranslationBlock *tb,
6506 int search_pc)
6507{
6508 DisasContext dc1, *dc = &dc1;
6509 target_ulong pc_ptr;
6510 uint16_t *gen_opc_end;
6511 int flags, j, lj, cflags;
6512 target_ulong pc_start;
6513 target_ulong cs_base;
6514
6515 /* generate intermediate code */
6516 pc_start = tb->pc;
6517 cs_base = tb->cs_base;
6518 flags = tb->flags;
6519 cflags = tb->cflags;
6520
6521 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6522 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6523 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6524 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6525 dc->f_st = 0;
6526 dc->vm86 = (flags >> VM_SHIFT) & 1;
6527 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6528 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6529 dc->tf = (flags >> TF_SHIFT) & 1;
6530 dc->singlestep_enabled = env->singlestep_enabled;
6531 dc->cc_op = CC_OP_DYNAMIC;
6532 dc->cs_base = cs_base;
6533 dc->tb = tb;
6534 dc->popl_esp_hack = 0;
6535 /* select memory access functions */
6536 dc->mem_index = 0;
6537 if (flags & HF_SOFTMMU_MASK) {
6538 if (dc->cpl == 3)
6539 dc->mem_index = 2 * 4;
6540 else
6541 dc->mem_index = 1 * 4;
6542 }
6543 dc->cpuid_features = env->cpuid_features;
6544 dc->cpuid_ext_features = env->cpuid_ext_features;
6545#ifdef TARGET_X86_64
6546 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6547 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6548#endif
6549 dc->flags = flags;
6550 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6551 (flags & HF_INHIBIT_IRQ_MASK)
6552#ifndef CONFIG_SOFTMMU
6553 || (flags & HF_SOFTMMU_MASK)
6554#endif
6555 );
6556#if 0
6557 /* check addseg logic */
6558 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6559 printf("ERROR addseg\n");
6560#endif
6561
6562 gen_opc_ptr = gen_opc_buf;
6563 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6564 gen_opparam_ptr = gen_opparam_buf;
6565 nb_gen_labels = 0;
6566
6567 dc->is_jmp = DISAS_NEXT;
6568 pc_ptr = pc_start;
6569 lj = -1;
6570
6571 for(;;) {
6572 if (env->nb_breakpoints > 0) {
6573 for(j = 0; j < env->nb_breakpoints; j++) {
6574 if (env->breakpoints[j] == pc_ptr) {
6575 gen_debug(dc, pc_ptr - dc->cs_base);
6576 break;
6577 }
6578 }
6579 }
6580 if (search_pc) {
6581 j = gen_opc_ptr - gen_opc_buf;
6582 if (lj < j) {
6583 lj++;
6584 while (lj < j)
6585 gen_opc_instr_start[lj++] = 0;
6586 }
6587 gen_opc_pc[lj] = pc_ptr;
6588 gen_opc_cc_op[lj] = dc->cc_op;
6589 gen_opc_instr_start[lj] = 1;
6590 }
6591 pc_ptr = disas_insn(dc, pc_ptr);
6592 /* stop translation if indicated */
6593 if (dc->is_jmp)
6594 break;
6595
6596#ifdef VBOX
6597#ifdef DEBUG
6598/*
6599 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
6600 {
6601 //should never happen as the jump to the patch code terminates the translation block
6602 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
6603 }
6604*/
6605#endif
6606 if (env->state & CPU_EMULATE_SINGLE_INSTR)
6607 {
6608 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
6609 gen_jmp_im(pc_ptr - dc->cs_base);
6610 gen_eob(dc);
6611 break;
6612 }
6613#endif /* VBOX */
6614
6615 /* if single step mode, we generate only one instruction and
6616 generate an exception */
6617 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6618 the flag and abort the translation to give the irqs a
6619 change to be happen */
6620 if (dc->tf || dc->singlestep_enabled ||
6621 (flags & HF_INHIBIT_IRQ_MASK) ||
6622 (cflags & CF_SINGLE_INSN)) {
6623 gen_jmp_im(pc_ptr - dc->cs_base);
6624 gen_eob(dc);
6625 break;
6626 }
6627 /* if too long translation, stop generation too */
6628 if (gen_opc_ptr >= gen_opc_end ||
6629 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6630 gen_jmp_im(pc_ptr - dc->cs_base);
6631 gen_eob(dc);
6632 break;
6633 }
6634 }
6635 *gen_opc_ptr = INDEX_op_end;
6636 /* we don't forget to fill the last values */
6637 if (search_pc) {
6638 j = gen_opc_ptr - gen_opc_buf;
6639 lj++;
6640 while (lj <= j)
6641 gen_opc_instr_start[lj++] = 0;
6642 }
6643
6644#ifdef DEBUG_DISAS
6645 if (loglevel & CPU_LOG_TB_CPU) {
6646 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6647 }
6648 if (loglevel & CPU_LOG_TB_IN_ASM) {
6649 int disas_flags;
6650 fprintf(logfile, "----------------\n");
6651 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6652#ifdef TARGET_X86_64
6653 if (dc->code64)
6654 disas_flags = 2;
6655 else
6656#endif
6657 disas_flags = !dc->code32;
6658 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6659 fprintf(logfile, "\n");
6660 if (loglevel & CPU_LOG_TB_OP) {
6661 fprintf(logfile, "OP:\n");
6662 dump_ops(gen_opc_buf, gen_opparam_buf);
6663 fprintf(logfile, "\n");
6664 }
6665 }
6666#endif
6667
6668 /* optimize flag computations */
6669 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6670
6671#ifdef DEBUG_DISAS
6672 if (loglevel & CPU_LOG_TB_OP_OPT) {
6673 fprintf(logfile, "AFTER FLAGS OPT:\n");
6674 dump_ops(gen_opc_buf, gen_opparam_buf);
6675 fprintf(logfile, "\n");
6676 }
6677#endif
6678 if (!search_pc)
6679 tb->size = pc_ptr - pc_start;
6680 return 0;
6681}
6682
6683int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6684{
6685 return gen_intermediate_code_internal(env, tb, 0);
6686}
6687
6688int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6689{
6690 return gen_intermediate_code_internal(env, tb, 1);
6691}
6692
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette