VirtualBox

source: vbox/trunk/src/recompiler/new/target-i386/translate.c@ 372

Last change on this file since 372 was 1, checked in by vboxsync, 55 years ago

import

  • Property svn:eol-style set to native
File size: 197.6 KB
Line 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20#include <stdarg.h>
21#include <stdlib.h>
22#include <stdio.h>
23#include <string.h>
24#include <inttypes.h>
25#ifndef VBOX
26#include <signal.h>
27#include <assert.h>
28#endif /* !VBOX */
29
30#include "cpu.h"
31#include "exec-all.h"
32#include "disas.h"
33
34/* XXX: move that elsewhere */
35static uint16_t *gen_opc_ptr;
36static uint32_t *gen_opparam_ptr;
37
38#define PREFIX_REPZ 0x01
39#define PREFIX_REPNZ 0x02
40#define PREFIX_LOCK 0x04
41#define PREFIX_DATA 0x08
42#define PREFIX_ADR 0x10
43
44#ifdef TARGET_X86_64
45#define X86_64_ONLY(x) x
46#define X86_64_DEF(x...) x
47#define CODE64(s) ((s)->code64)
48#define REX_X(s) ((s)->rex_x)
49#define REX_B(s) ((s)->rex_b)
50/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
51#if 1
52#define BUGGY_64(x) NULL
53#endif
54#else
55#define X86_64_ONLY(x) NULL
56#define X86_64_DEF(x...)
57#define CODE64(s) 0
58#define REX_X(s) 0
59#define REX_B(s) 0
60#endif
61
62#ifdef TARGET_X86_64
63static int x86_64_hregs;
64#endif
65
66#ifdef USE_DIRECT_JUMP
67#define TBPARAM(x)
68#else
69#define TBPARAM(x) (long)(x)
70#endif
71
72#ifdef VBOX
73/* Special/override code readers to hide patched code. */
74
75uint8_t ldub_code_raw(target_ulong pc)
76{
77 uint8_t b;
78
79 if (!remR3GetOpcode(cpu_single_env, pc, &b))
80 b = ldub_code(pc);
81 return b;
82}
83#define ldub_code(a) ldub_code_raw(a)
84
85uint16_t lduw_code_raw(target_ulong pc)
86{
87 return (ldub_code(pc+1) << 8) | ldub_code(pc);
88}
89#define lduw_code(a) lduw_code_raw(a)
90
91
92uint32_t ldl_code_raw(target_ulong pc)
93{
94 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
95}
96#define ldl_code(a) ldl_code_raw(a)
97
98#endif /* VBOX */
99
100
101typedef struct DisasContext {
102 /* current insn context */
103 int override; /* -1 if no override */
104 int prefix;
105 int aflag, dflag;
106 target_ulong pc; /* pc = eip + cs_base */
107 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
108 static state change (stop translation) */
109 /* current block context */
110 target_ulong cs_base; /* base of CS segment */
111 int pe; /* protected mode */
112 int code32; /* 32 bit code segment */
113#ifdef TARGET_X86_64
114 int lma; /* long mode active */
115 int code64; /* 64 bit code segment */
116 int rex_x, rex_b;
117#endif
118 int ss32; /* 32 bit stack segment */
119 int cc_op; /* current CC operation */
120 int addseg; /* non zero if either DS/ES/SS have a non zero base */
121 int f_st; /* currently unused */
122 int vm86; /* vm86 mode */
123 int cpl;
124 int iopl;
125 int tf; /* TF cpu flag */
126 int singlestep_enabled; /* "hardware" single step enabled */
127 int jmp_opt; /* use direct block chaining for direct jumps */
128 int mem_index; /* select memory access functions */
129 int flags; /* all execution flags */
130 struct TranslationBlock *tb;
131 int popl_esp_hack; /* for correct popl with esp base handling */
132 int rip_offset; /* only used in x86_64, but left for simplicity */
133 int cpuid_features;
134 int cpuid_ext_features;
135} DisasContext;
136
137static void gen_eob(DisasContext *s);
138static void gen_jmp(DisasContext *s, target_ulong eip);
139static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
140
141/* i386 arith/logic operations */
142enum {
143 OP_ADDL,
144 OP_ORL,
145 OP_ADCL,
146 OP_SBBL,
147 OP_ANDL,
148 OP_SUBL,
149 OP_XORL,
150 OP_CMPL,
151};
152
153/* i386 shift ops */
154enum {
155 OP_ROL,
156 OP_ROR,
157 OP_RCL,
158 OP_RCR,
159 OP_SHL,
160 OP_SHR,
161 OP_SHL1, /* undocumented */
162 OP_SAR = 7,
163};
164
165enum {
166#define DEF(s, n, copy_size) INDEX_op_ ## s,
167#include "opc.h"
168#undef DEF
169 NB_OPS,
170};
171
172#include "gen-op.h"
173
174/* operand size */
175enum {
176 OT_BYTE = 0,
177 OT_WORD,
178 OT_LONG,
179 OT_QUAD,
180};
181
182enum {
183 /* I386 int registers */
184 OR_EAX, /* MUST be even numbered */
185 OR_ECX,
186 OR_EDX,
187 OR_EBX,
188 OR_ESP,
189 OR_EBP,
190 OR_ESI,
191 OR_EDI,
192
193 OR_TMP0 = 16, /* temporary operand register */
194 OR_TMP1,
195 OR_A0, /* temporary register used when doing address evaluation */
196};
197
198#ifdef TARGET_X86_64
199
200#define NB_OP_SIZES 4
201
202#define DEF_REGS(prefix, suffix) \
203 prefix ## EAX ## suffix,\
204 prefix ## ECX ## suffix,\
205 prefix ## EDX ## suffix,\
206 prefix ## EBX ## suffix,\
207 prefix ## ESP ## suffix,\
208 prefix ## EBP ## suffix,\
209 prefix ## ESI ## suffix,\
210 prefix ## EDI ## suffix,\
211 prefix ## R8 ## suffix,\
212 prefix ## R9 ## suffix,\
213 prefix ## R10 ## suffix,\
214 prefix ## R11 ## suffix,\
215 prefix ## R12 ## suffix,\
216 prefix ## R13 ## suffix,\
217 prefix ## R14 ## suffix,\
218 prefix ## R15 ## suffix,
219
220#define DEF_BREGS(prefixb, prefixh, suffix) \
221 \
222static void prefixb ## ESP ## suffix ## _wrapper(void) \
223{ \
224 if (x86_64_hregs) \
225 prefixb ## ESP ## suffix (); \
226 else \
227 prefixh ## EAX ## suffix (); \
228} \
229 \
230static void prefixb ## EBP ## suffix ## _wrapper(void) \
231{ \
232 if (x86_64_hregs) \
233 prefixb ## EBP ## suffix (); \
234 else \
235 prefixh ## ECX ## suffix (); \
236} \
237 \
238static void prefixb ## ESI ## suffix ## _wrapper(void) \
239{ \
240 if (x86_64_hregs) \
241 prefixb ## ESI ## suffix (); \
242 else \
243 prefixh ## EDX ## suffix (); \
244} \
245 \
246static void prefixb ## EDI ## suffix ## _wrapper(void) \
247{ \
248 if (x86_64_hregs) \
249 prefixb ## EDI ## suffix (); \
250 else \
251 prefixh ## EBX ## suffix (); \
252}
253
254DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
255DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
256DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
257DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
258
259#else /* !TARGET_X86_64 */
260
261#define NB_OP_SIZES 3
262
263#define DEF_REGS(prefix, suffix) \
264 prefix ## EAX ## suffix,\
265 prefix ## ECX ## suffix,\
266 prefix ## EDX ## suffix,\
267 prefix ## EBX ## suffix,\
268 prefix ## ESP ## suffix,\
269 prefix ## EBP ## suffix,\
270 prefix ## ESI ## suffix,\
271 prefix ## EDI ## suffix,
272
273#endif /* !TARGET_X86_64 */
274
275static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
276 [OT_BYTE] = {
277 gen_op_movb_EAX_T0,
278 gen_op_movb_ECX_T0,
279 gen_op_movb_EDX_T0,
280 gen_op_movb_EBX_T0,
281#ifdef TARGET_X86_64
282 gen_op_movb_ESP_T0_wrapper,
283 gen_op_movb_EBP_T0_wrapper,
284 gen_op_movb_ESI_T0_wrapper,
285 gen_op_movb_EDI_T0_wrapper,
286 gen_op_movb_R8_T0,
287 gen_op_movb_R9_T0,
288 gen_op_movb_R10_T0,
289 gen_op_movb_R11_T0,
290 gen_op_movb_R12_T0,
291 gen_op_movb_R13_T0,
292 gen_op_movb_R14_T0,
293 gen_op_movb_R15_T0,
294#else
295 gen_op_movh_EAX_T0,
296 gen_op_movh_ECX_T0,
297 gen_op_movh_EDX_T0,
298 gen_op_movh_EBX_T0,
299#endif
300 },
301 [OT_WORD] = {
302 DEF_REGS(gen_op_movw_, _T0)
303 },
304 [OT_LONG] = {
305 DEF_REGS(gen_op_movl_, _T0)
306 },
307#ifdef TARGET_X86_64
308 [OT_QUAD] = {
309 DEF_REGS(gen_op_movq_, _T0)
310 },
311#endif
312};
313
314static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
315 [OT_BYTE] = {
316 gen_op_movb_EAX_T1,
317 gen_op_movb_ECX_T1,
318 gen_op_movb_EDX_T1,
319 gen_op_movb_EBX_T1,
320#ifdef TARGET_X86_64
321 gen_op_movb_ESP_T1_wrapper,
322 gen_op_movb_EBP_T1_wrapper,
323 gen_op_movb_ESI_T1_wrapper,
324 gen_op_movb_EDI_T1_wrapper,
325 gen_op_movb_R8_T1,
326 gen_op_movb_R9_T1,
327 gen_op_movb_R10_T1,
328 gen_op_movb_R11_T1,
329 gen_op_movb_R12_T1,
330 gen_op_movb_R13_T1,
331 gen_op_movb_R14_T1,
332 gen_op_movb_R15_T1,
333#else
334 gen_op_movh_EAX_T1,
335 gen_op_movh_ECX_T1,
336 gen_op_movh_EDX_T1,
337 gen_op_movh_EBX_T1,
338#endif
339 },
340 [OT_WORD] = {
341 DEF_REGS(gen_op_movw_, _T1)
342 },
343 [OT_LONG] = {
344 DEF_REGS(gen_op_movl_, _T1)
345 },
346#ifdef TARGET_X86_64
347 [OT_QUAD] = {
348 DEF_REGS(gen_op_movq_, _T1)
349 },
350#endif
351};
352
353static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
354 [0] = {
355 DEF_REGS(gen_op_movw_, _A0)
356 },
357 [1] = {
358 DEF_REGS(gen_op_movl_, _A0)
359 },
360#ifdef TARGET_X86_64
361 [2] = {
362 DEF_REGS(gen_op_movq_, _A0)
363 },
364#endif
365};
366
367static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
368{
369 [OT_BYTE] = {
370 {
371 gen_op_movl_T0_EAX,
372 gen_op_movl_T0_ECX,
373 gen_op_movl_T0_EDX,
374 gen_op_movl_T0_EBX,
375#ifdef TARGET_X86_64
376 gen_op_movl_T0_ESP_wrapper,
377 gen_op_movl_T0_EBP_wrapper,
378 gen_op_movl_T0_ESI_wrapper,
379 gen_op_movl_T0_EDI_wrapper,
380 gen_op_movl_T0_R8,
381 gen_op_movl_T0_R9,
382 gen_op_movl_T0_R10,
383 gen_op_movl_T0_R11,
384 gen_op_movl_T0_R12,
385 gen_op_movl_T0_R13,
386 gen_op_movl_T0_R14,
387 gen_op_movl_T0_R15,
388#else
389 gen_op_movh_T0_EAX,
390 gen_op_movh_T0_ECX,
391 gen_op_movh_T0_EDX,
392 gen_op_movh_T0_EBX,
393#endif
394 },
395 {
396 gen_op_movl_T1_EAX,
397 gen_op_movl_T1_ECX,
398 gen_op_movl_T1_EDX,
399 gen_op_movl_T1_EBX,
400#ifdef TARGET_X86_64
401 gen_op_movl_T1_ESP_wrapper,
402 gen_op_movl_T1_EBP_wrapper,
403 gen_op_movl_T1_ESI_wrapper,
404 gen_op_movl_T1_EDI_wrapper,
405 gen_op_movl_T1_R8,
406 gen_op_movl_T1_R9,
407 gen_op_movl_T1_R10,
408 gen_op_movl_T1_R11,
409 gen_op_movl_T1_R12,
410 gen_op_movl_T1_R13,
411 gen_op_movl_T1_R14,
412 gen_op_movl_T1_R15,
413#else
414 gen_op_movh_T1_EAX,
415 gen_op_movh_T1_ECX,
416 gen_op_movh_T1_EDX,
417 gen_op_movh_T1_EBX,
418#endif
419 },
420 },
421 [OT_WORD] = {
422 {
423 DEF_REGS(gen_op_movl_T0_, )
424 },
425 {
426 DEF_REGS(gen_op_movl_T1_, )
427 },
428 },
429 [OT_LONG] = {
430 {
431 DEF_REGS(gen_op_movl_T0_, )
432 },
433 {
434 DEF_REGS(gen_op_movl_T1_, )
435 },
436 },
437#ifdef TARGET_X86_64
438 [OT_QUAD] = {
439 {
440 DEF_REGS(gen_op_movl_T0_, )
441 },
442 {
443 DEF_REGS(gen_op_movl_T1_, )
444 },
445 },
446#endif
447};
448
449static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
450 DEF_REGS(gen_op_movl_A0_, )
451};
452
453static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
454 [0] = {
455 DEF_REGS(gen_op_addl_A0_, )
456 },
457 [1] = {
458 DEF_REGS(gen_op_addl_A0_, _s1)
459 },
460 [2] = {
461 DEF_REGS(gen_op_addl_A0_, _s2)
462 },
463 [3] = {
464 DEF_REGS(gen_op_addl_A0_, _s3)
465 },
466};
467
468#ifdef TARGET_X86_64
469static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
470 DEF_REGS(gen_op_movq_A0_, )
471};
472
473static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
474 [0] = {
475 DEF_REGS(gen_op_addq_A0_, )
476 },
477 [1] = {
478 DEF_REGS(gen_op_addq_A0_, _s1)
479 },
480 [2] = {
481 DEF_REGS(gen_op_addq_A0_, _s2)
482 },
483 [3] = {
484 DEF_REGS(gen_op_addq_A0_, _s3)
485 },
486};
487#endif
488
489static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
490 [0] = {
491 DEF_REGS(gen_op_cmovw_, _T1_T0)
492 },
493 [1] = {
494 DEF_REGS(gen_op_cmovl_, _T1_T0)
495 },
496#ifdef TARGET_X86_64
497 [2] = {
498 DEF_REGS(gen_op_cmovq_, _T1_T0)
499 },
500#endif
501};
502
503static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
504 NULL,
505 gen_op_orl_T0_T1,
506 NULL,
507 NULL,
508 gen_op_andl_T0_T1,
509 NULL,
510 gen_op_xorl_T0_T1,
511 NULL,
512};
513
514#define DEF_ARITHC(SUFFIX)\
515 {\
516 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
517 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
518 },\
519 {\
520 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
521 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
522 },\
523 {\
524 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
525 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
526 },\
527 {\
528 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
529 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
530 },
531
532static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
533 DEF_ARITHC( )
534};
535
536static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
537 DEF_ARITHC(_raw)
538#ifndef CONFIG_USER_ONLY
539 DEF_ARITHC(_kernel)
540 DEF_ARITHC(_user)
541#endif
542};
543
544static const int cc_op_arithb[8] = {
545 CC_OP_ADDB,
546 CC_OP_LOGICB,
547 CC_OP_ADDB,
548 CC_OP_SUBB,
549 CC_OP_LOGICB,
550 CC_OP_SUBB,
551 CC_OP_LOGICB,
552 CC_OP_SUBB,
553};
554
555#define DEF_CMPXCHG(SUFFIX)\
556 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
557 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
558 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
559 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
560
561static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
562 DEF_CMPXCHG( )
563};
564
565static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
566 DEF_CMPXCHG(_raw)
567#ifndef CONFIG_USER_ONLY
568 DEF_CMPXCHG(_kernel)
569 DEF_CMPXCHG(_user)
570#endif
571};
572
573#define DEF_SHIFT(SUFFIX)\
574 {\
575 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
576 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
577 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
578 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
579 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
580 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
581 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
582 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
583 },\
584 {\
585 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
586 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
587 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
588 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
589 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
590 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
591 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
592 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
593 },\
594 {\
595 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
596 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
597 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
598 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
599 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
600 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
601 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
602 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
603 },\
604 {\
605 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
606 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
607 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
608 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
609 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
610 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
611 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
612 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
613 },
614
615static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
616 DEF_SHIFT( )
617};
618
619static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
620 DEF_SHIFT(_raw)
621#ifndef CONFIG_USER_ONLY
622 DEF_SHIFT(_kernel)
623 DEF_SHIFT(_user)
624#endif
625};
626
627#define DEF_SHIFTD(SUFFIX, op)\
628 {\
629 NULL,\
630 NULL,\
631 },\
632 {\
633 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
634 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
635 },\
636 {\
637 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
638 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
639 },\
640 {\
641X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
642 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
643 },
644
645static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
646 DEF_SHIFTD(, im)
647};
648
649static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
650 DEF_SHIFTD(, ECX)
651};
652
653static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
654 DEF_SHIFTD(_raw, im)
655#ifndef CONFIG_USER_ONLY
656 DEF_SHIFTD(_kernel, im)
657 DEF_SHIFTD(_user, im)
658#endif
659};
660
661static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
662 DEF_SHIFTD(_raw, ECX)
663#ifndef CONFIG_USER_ONLY
664 DEF_SHIFTD(_kernel, ECX)
665 DEF_SHIFTD(_user, ECX)
666#endif
667};
668
669static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
670 [0] = {
671 gen_op_btw_T0_T1_cc,
672 gen_op_btsw_T0_T1_cc,
673 gen_op_btrw_T0_T1_cc,
674 gen_op_btcw_T0_T1_cc,
675 },
676 [1] = {
677 gen_op_btl_T0_T1_cc,
678 gen_op_btsl_T0_T1_cc,
679 gen_op_btrl_T0_T1_cc,
680 gen_op_btcl_T0_T1_cc,
681 },
682#ifdef TARGET_X86_64
683 [2] = {
684 gen_op_btq_T0_T1_cc,
685 gen_op_btsq_T0_T1_cc,
686 gen_op_btrq_T0_T1_cc,
687 gen_op_btcq_T0_T1_cc,
688 },
689#endif
690};
691
692static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
693 gen_op_add_bitw_A0_T1,
694 gen_op_add_bitl_A0_T1,
695 X86_64_ONLY(gen_op_add_bitq_A0_T1),
696};
697
698static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
699 [0] = {
700 gen_op_bsfw_T0_cc,
701 gen_op_bsrw_T0_cc,
702 },
703 [1] = {
704 gen_op_bsfl_T0_cc,
705 gen_op_bsrl_T0_cc,
706 },
707#ifdef TARGET_X86_64
708 [2] = {
709 gen_op_bsfq_T0_cc,
710 gen_op_bsrq_T0_cc,
711 },
712#endif
713};
714
715static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
716 gen_op_ldsb_raw_T0_A0,
717 gen_op_ldsw_raw_T0_A0,
718 X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
719 NULL,
720#ifndef CONFIG_USER_ONLY
721 gen_op_ldsb_kernel_T0_A0,
722 gen_op_ldsw_kernel_T0_A0,
723 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
724 NULL,
725
726 gen_op_ldsb_user_T0_A0,
727 gen_op_ldsw_user_T0_A0,
728 X86_64_ONLY(gen_op_ldsl_user_T0_A0),
729 NULL,
730#endif
731};
732
733static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
734 gen_op_ldub_raw_T0_A0,
735 gen_op_lduw_raw_T0_A0,
736 NULL,
737 NULL,
738
739#ifndef CONFIG_USER_ONLY
740 gen_op_ldub_kernel_T0_A0,
741 gen_op_lduw_kernel_T0_A0,
742 NULL,
743 NULL,
744
745 gen_op_ldub_user_T0_A0,
746 gen_op_lduw_user_T0_A0,
747 NULL,
748 NULL,
749#endif
750};
751
752/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
753static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
754 gen_op_ldub_raw_T0_A0,
755 gen_op_lduw_raw_T0_A0,
756 gen_op_ldl_raw_T0_A0,
757 X86_64_ONLY(gen_op_ldq_raw_T0_A0),
758
759#ifndef CONFIG_USER_ONLY
760 gen_op_ldub_kernel_T0_A0,
761 gen_op_lduw_kernel_T0_A0,
762 gen_op_ldl_kernel_T0_A0,
763 X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
764
765 gen_op_ldub_user_T0_A0,
766 gen_op_lduw_user_T0_A0,
767 gen_op_ldl_user_T0_A0,
768 X86_64_ONLY(gen_op_ldq_user_T0_A0),
769#endif
770};
771
772static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
773 gen_op_ldub_raw_T1_A0,
774 gen_op_lduw_raw_T1_A0,
775 gen_op_ldl_raw_T1_A0,
776 X86_64_ONLY(gen_op_ldq_raw_T1_A0),
777
778#ifndef CONFIG_USER_ONLY
779 gen_op_ldub_kernel_T1_A0,
780 gen_op_lduw_kernel_T1_A0,
781 gen_op_ldl_kernel_T1_A0,
782 X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
783
784 gen_op_ldub_user_T1_A0,
785 gen_op_lduw_user_T1_A0,
786 gen_op_ldl_user_T1_A0,
787 X86_64_ONLY(gen_op_ldq_user_T1_A0),
788#endif
789};
790
791static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
792 gen_op_stb_raw_T0_A0,
793 gen_op_stw_raw_T0_A0,
794 gen_op_stl_raw_T0_A0,
795 X86_64_ONLY(gen_op_stq_raw_T0_A0),
796
797#ifndef CONFIG_USER_ONLY
798 gen_op_stb_kernel_T0_A0,
799 gen_op_stw_kernel_T0_A0,
800 gen_op_stl_kernel_T0_A0,
801 X86_64_ONLY(gen_op_stq_kernel_T0_A0),
802
803 gen_op_stb_user_T0_A0,
804 gen_op_stw_user_T0_A0,
805 gen_op_stl_user_T0_A0,
806 X86_64_ONLY(gen_op_stq_user_T0_A0),
807#endif
808};
809
810static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
811 NULL,
812 gen_op_stw_raw_T1_A0,
813 gen_op_stl_raw_T1_A0,
814 X86_64_ONLY(gen_op_stq_raw_T1_A0),
815
816#ifndef CONFIG_USER_ONLY
817 NULL,
818 gen_op_stw_kernel_T1_A0,
819 gen_op_stl_kernel_T1_A0,
820 X86_64_ONLY(gen_op_stq_kernel_T1_A0),
821
822 NULL,
823 gen_op_stw_user_T1_A0,
824 gen_op_stl_user_T1_A0,
825 X86_64_ONLY(gen_op_stq_user_T1_A0),
826#endif
827};
828
829#ifdef VBOX
830static void gen_check_external_event()
831{
832 gen_op_check_external_event();
833}
834#endif /* VBOX */
835
836static inline void gen_jmp_im(target_ulong pc)
837{
838#ifdef VBOX
839 gen_check_external_event();
840#endif /* VBOX */
841#ifdef TARGET_X86_64
842 if (pc == (uint32_t)pc) {
843 gen_op_movl_eip_im(pc);
844 } else if (pc == (int32_t)pc) {
845 gen_op_movq_eip_im(pc);
846 } else {
847 gen_op_movq_eip_im64(pc >> 32, pc);
848 }
849#else
850 gen_op_movl_eip_im(pc);
851#endif
852}
853
854static inline void gen_string_movl_A0_ESI(DisasContext *s)
855{
856 int override;
857
858 override = s->override;
859#ifdef TARGET_X86_64
860 if (s->aflag == 2) {
861 if (override >= 0) {
862 gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
863 gen_op_addq_A0_reg_sN[0][R_ESI]();
864 } else {
865 gen_op_movq_A0_reg[R_ESI]();
866 }
867 } else
868#endif
869 if (s->aflag) {
870 /* 32 bit address */
871 if (s->addseg && override < 0)
872 override = R_DS;
873 if (override >= 0) {
874 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
875 gen_op_addl_A0_reg_sN[0][R_ESI]();
876 } else {
877 gen_op_movl_A0_reg[R_ESI]();
878 }
879 } else {
880 /* 16 address, always override */
881 if (override < 0)
882 override = R_DS;
883 gen_op_movl_A0_reg[R_ESI]();
884 gen_op_andl_A0_ffff();
885 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
886 }
887}
888
889static inline void gen_string_movl_A0_EDI(DisasContext *s)
890{
891#ifdef TARGET_X86_64
892 if (s->aflag == 2) {
893 gen_op_movq_A0_reg[R_EDI]();
894 } else
895#endif
896 if (s->aflag) {
897 if (s->addseg) {
898 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
899 gen_op_addl_A0_reg_sN[0][R_EDI]();
900 } else {
901 gen_op_movl_A0_reg[R_EDI]();
902 }
903 } else {
904 gen_op_movl_A0_reg[R_EDI]();
905 gen_op_andl_A0_ffff();
906 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
907 }
908}
909
910static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
911 gen_op_movl_T0_Dshiftb,
912 gen_op_movl_T0_Dshiftw,
913 gen_op_movl_T0_Dshiftl,
914 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
915};
916
917static GenOpFunc1 *gen_op_jnz_ecx[3] = {
918 gen_op_jnz_ecxw,
919 gen_op_jnz_ecxl,
920 X86_64_ONLY(gen_op_jnz_ecxq),
921};
922
923static GenOpFunc1 *gen_op_jz_ecx[3] = {
924 gen_op_jz_ecxw,
925 gen_op_jz_ecxl,
926 X86_64_ONLY(gen_op_jz_ecxq),
927};
928
929static GenOpFunc *gen_op_dec_ECX[3] = {
930 gen_op_decw_ECX,
931 gen_op_decl_ECX,
932 X86_64_ONLY(gen_op_decq_ECX),
933};
934
935static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
936 {
937 gen_op_jnz_subb,
938 gen_op_jnz_subw,
939 gen_op_jnz_subl,
940 X86_64_ONLY(gen_op_jnz_subq),
941 },
942 {
943 gen_op_jz_subb,
944 gen_op_jz_subw,
945 gen_op_jz_subl,
946 X86_64_ONLY(gen_op_jz_subq),
947 },
948};
949
950static GenOpFunc *gen_op_in_DX_T0[3] = {
951 gen_op_inb_DX_T0,
952 gen_op_inw_DX_T0,
953 gen_op_inl_DX_T0,
954};
955
956static GenOpFunc *gen_op_out_DX_T0[3] = {
957 gen_op_outb_DX_T0,
958 gen_op_outw_DX_T0,
959 gen_op_outl_DX_T0,
960};
961
962static GenOpFunc *gen_op_in[3] = {
963 gen_op_inb_T0_T1,
964 gen_op_inw_T0_T1,
965 gen_op_inl_T0_T1,
966};
967
968static GenOpFunc *gen_op_out[3] = {
969 gen_op_outb_T0_T1,
970 gen_op_outw_T0_T1,
971 gen_op_outl_T0_T1,
972};
973
974static GenOpFunc *gen_check_io_T0[3] = {
975 gen_op_check_iob_T0,
976 gen_op_check_iow_T0,
977 gen_op_check_iol_T0,
978};
979
980static GenOpFunc *gen_check_io_DX[3] = {
981 gen_op_check_iob_DX,
982 gen_op_check_iow_DX,
983 gen_op_check_iol_DX,
984};
985
986static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
987{
988 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
989 if (s->cc_op != CC_OP_DYNAMIC)
990 gen_op_set_cc_op(s->cc_op);
991 gen_jmp_im(cur_eip);
992 if (use_dx)
993 gen_check_io_DX[ot]();
994 else
995 gen_check_io_T0[ot]();
996 }
997}
998
999static inline void gen_movs(DisasContext *s, int ot)
1000{
1001 gen_string_movl_A0_ESI(s);
1002 gen_op_ld_T0_A0[ot + s->mem_index]();
1003 gen_string_movl_A0_EDI(s);
1004 gen_op_st_T0_A0[ot + s->mem_index]();
1005 gen_op_movl_T0_Dshift[ot]();
1006#ifdef TARGET_X86_64
1007 if (s->aflag == 2) {
1008 gen_op_addq_ESI_T0();
1009 gen_op_addq_EDI_T0();
1010 } else
1011#endif
1012 if (s->aflag) {
1013 gen_op_addl_ESI_T0();
1014 gen_op_addl_EDI_T0();
1015 } else {
1016 gen_op_addw_ESI_T0();
1017 gen_op_addw_EDI_T0();
1018 }
1019}
1020
1021static inline void gen_update_cc_op(DisasContext *s)
1022{
1023 if (s->cc_op != CC_OP_DYNAMIC) {
1024 gen_op_set_cc_op(s->cc_op);
1025 s->cc_op = CC_OP_DYNAMIC;
1026 }
1027}
1028
1029/* XXX: does not work with gdbstub "ice" single step - not a
1030 serious problem */
1031static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1032{
1033 int l1, l2;
1034
1035 l1 = gen_new_label();
1036 l2 = gen_new_label();
1037 gen_op_jnz_ecx[s->aflag](l1);
1038 gen_set_label(l2);
1039 gen_jmp_tb(s, next_eip, 1);
1040 gen_set_label(l1);
1041 return l2;
1042}
1043
1044static inline void gen_stos(DisasContext *s, int ot)
1045{
1046 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1047 gen_string_movl_A0_EDI(s);
1048 gen_op_st_T0_A0[ot + s->mem_index]();
1049 gen_op_movl_T0_Dshift[ot]();
1050#ifdef TARGET_X86_64
1051 if (s->aflag == 2) {
1052 gen_op_addq_EDI_T0();
1053 } else
1054#endif
1055 if (s->aflag) {
1056 gen_op_addl_EDI_T0();
1057 } else {
1058 gen_op_addw_EDI_T0();
1059 }
1060}
1061
1062static inline void gen_lods(DisasContext *s, int ot)
1063{
1064 gen_string_movl_A0_ESI(s);
1065 gen_op_ld_T0_A0[ot + s->mem_index]();
1066 gen_op_mov_reg_T0[ot][R_EAX]();
1067 gen_op_movl_T0_Dshift[ot]();
1068#ifdef TARGET_X86_64
1069 if (s->aflag == 2) {
1070 gen_op_addq_ESI_T0();
1071 } else
1072#endif
1073 if (s->aflag) {
1074 gen_op_addl_ESI_T0();
1075 } else {
1076 gen_op_addw_ESI_T0();
1077 }
1078}
1079
1080static inline void gen_scas(DisasContext *s, int ot)
1081{
1082 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1083 gen_string_movl_A0_EDI(s);
1084 gen_op_ld_T1_A0[ot + s->mem_index]();
1085 gen_op_cmpl_T0_T1_cc();
1086 gen_op_movl_T0_Dshift[ot]();
1087#ifdef TARGET_X86_64
1088 if (s->aflag == 2) {
1089 gen_op_addq_EDI_T0();
1090 } else
1091#endif
1092 if (s->aflag) {
1093 gen_op_addl_EDI_T0();
1094 } else {
1095 gen_op_addw_EDI_T0();
1096 }
1097}
1098
1099static inline void gen_cmps(DisasContext *s, int ot)
1100{
1101 gen_string_movl_A0_ESI(s);
1102 gen_op_ld_T0_A0[ot + s->mem_index]();
1103 gen_string_movl_A0_EDI(s);
1104 gen_op_ld_T1_A0[ot + s->mem_index]();
1105 gen_op_cmpl_T0_T1_cc();
1106 gen_op_movl_T0_Dshift[ot]();
1107#ifdef TARGET_X86_64
1108 if (s->aflag == 2) {
1109 gen_op_addq_ESI_T0();
1110 gen_op_addq_EDI_T0();
1111 } else
1112#endif
1113 if (s->aflag) {
1114 gen_op_addl_ESI_T0();
1115 gen_op_addl_EDI_T0();
1116 } else {
1117 gen_op_addw_ESI_T0();
1118 gen_op_addw_EDI_T0();
1119 }
1120}
1121
1122static inline void gen_ins(DisasContext *s, int ot)
1123{
1124 gen_string_movl_A0_EDI(s);
1125 gen_op_movl_T0_0();
1126 gen_op_st_T0_A0[ot + s->mem_index]();
1127 gen_op_in_DX_T0[ot]();
1128 gen_op_st_T0_A0[ot + s->mem_index]();
1129 gen_op_movl_T0_Dshift[ot]();
1130#ifdef TARGET_X86_64
1131 if (s->aflag == 2) {
1132 gen_op_addq_EDI_T0();
1133 } else
1134#endif
1135 if (s->aflag) {
1136 gen_op_addl_EDI_T0();
1137 } else {
1138 gen_op_addw_EDI_T0();
1139 }
1140}
1141
1142static inline void gen_outs(DisasContext *s, int ot)
1143{
1144 gen_string_movl_A0_ESI(s);
1145 gen_op_ld_T0_A0[ot + s->mem_index]();
1146 gen_op_out_DX_T0[ot]();
1147 gen_op_movl_T0_Dshift[ot]();
1148#ifdef TARGET_X86_64
1149 if (s->aflag == 2) {
1150 gen_op_addq_ESI_T0();
1151 } else
1152#endif
1153 if (s->aflag) {
1154 gen_op_addl_ESI_T0();
1155 } else {
1156 gen_op_addw_ESI_T0();
1157 }
1158}
1159
1160/* same method as Valgrind : we generate jumps to current or next
1161 instruction */
1162#define GEN_REPZ(op) \
1163static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1164 target_ulong cur_eip, target_ulong next_eip) \
1165{ \
1166 int l2;\
1167 gen_update_cc_op(s); \
1168 l2 = gen_jz_ecx_string(s, next_eip); \
1169 gen_ ## op(s, ot); \
1170 gen_op_dec_ECX[s->aflag](); \
1171 /* a loop would cause two single step exceptions if ECX = 1 \
1172 before rep string_insn */ \
1173 if (!s->jmp_opt) \
1174 gen_op_jz_ecx[s->aflag](l2); \
1175 gen_jmp(s, cur_eip); \
1176}
1177
1178#define GEN_REPZ2(op) \
1179static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1180 target_ulong cur_eip, \
1181 target_ulong next_eip, \
1182 int nz) \
1183{ \
1184 int l2;\
1185 gen_update_cc_op(s); \
1186 l2 = gen_jz_ecx_string(s, next_eip); \
1187 gen_ ## op(s, ot); \
1188 gen_op_dec_ECX[s->aflag](); \
1189 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1190 gen_op_string_jnz_sub[nz][ot](l2);\
1191 if (!s->jmp_opt) \
1192 gen_op_jz_ecx[s->aflag](l2); \
1193 gen_jmp(s, cur_eip); \
1194}
1195
1196GEN_REPZ(movs)
1197GEN_REPZ(stos)
1198GEN_REPZ(lods)
1199GEN_REPZ(ins)
1200GEN_REPZ(outs)
1201GEN_REPZ2(scas)
1202GEN_REPZ2(cmps)
1203
1204enum {
1205 JCC_O,
1206 JCC_B,
1207 JCC_Z,
1208 JCC_BE,
1209 JCC_S,
1210 JCC_P,
1211 JCC_L,
1212 JCC_LE,
1213};
1214
1215static GenOpFunc1 *gen_jcc_sub[4][8] = {
1216 [OT_BYTE] = {
1217 NULL,
1218 gen_op_jb_subb,
1219 gen_op_jz_subb,
1220 gen_op_jbe_subb,
1221 gen_op_js_subb,
1222 NULL,
1223 gen_op_jl_subb,
1224 gen_op_jle_subb,
1225 },
1226 [OT_WORD] = {
1227 NULL,
1228 gen_op_jb_subw,
1229 gen_op_jz_subw,
1230 gen_op_jbe_subw,
1231 gen_op_js_subw,
1232 NULL,
1233 gen_op_jl_subw,
1234 gen_op_jle_subw,
1235 },
1236 [OT_LONG] = {
1237 NULL,
1238 gen_op_jb_subl,
1239 gen_op_jz_subl,
1240 gen_op_jbe_subl,
1241 gen_op_js_subl,
1242 NULL,
1243 gen_op_jl_subl,
1244 gen_op_jle_subl,
1245 },
1246#ifdef TARGET_X86_64
1247 [OT_QUAD] = {
1248 NULL,
1249 BUGGY_64(gen_op_jb_subq),
1250 gen_op_jz_subq,
1251 BUGGY_64(gen_op_jbe_subq),
1252 gen_op_js_subq,
1253 NULL,
1254 BUGGY_64(gen_op_jl_subq),
1255 BUGGY_64(gen_op_jle_subq),
1256 },
1257#endif
1258};
1259static GenOpFunc1 *gen_op_loop[3][4] = {
1260 [0] = {
1261 gen_op_loopnzw,
1262 gen_op_loopzw,
1263 gen_op_jnz_ecxw,
1264 },
1265 [1] = {
1266 gen_op_loopnzl,
1267 gen_op_loopzl,
1268 gen_op_jnz_ecxl,
1269 },
1270#ifdef TARGET_X86_64
1271 [2] = {
1272 gen_op_loopnzq,
1273 gen_op_loopzq,
1274 gen_op_jnz_ecxq,
1275 },
1276#endif
1277};
1278
1279static GenOpFunc *gen_setcc_slow[8] = {
1280 gen_op_seto_T0_cc,
1281 gen_op_setb_T0_cc,
1282 gen_op_setz_T0_cc,
1283 gen_op_setbe_T0_cc,
1284 gen_op_sets_T0_cc,
1285 gen_op_setp_T0_cc,
1286 gen_op_setl_T0_cc,
1287 gen_op_setle_T0_cc,
1288};
1289
1290static GenOpFunc *gen_setcc_sub[4][8] = {
1291 [OT_BYTE] = {
1292 NULL,
1293 gen_op_setb_T0_subb,
1294 gen_op_setz_T0_subb,
1295 gen_op_setbe_T0_subb,
1296 gen_op_sets_T0_subb,
1297 NULL,
1298 gen_op_setl_T0_subb,
1299 gen_op_setle_T0_subb,
1300 },
1301 [OT_WORD] = {
1302 NULL,
1303 gen_op_setb_T0_subw,
1304 gen_op_setz_T0_subw,
1305 gen_op_setbe_T0_subw,
1306 gen_op_sets_T0_subw,
1307 NULL,
1308 gen_op_setl_T0_subw,
1309 gen_op_setle_T0_subw,
1310 },
1311 [OT_LONG] = {
1312 NULL,
1313 gen_op_setb_T0_subl,
1314 gen_op_setz_T0_subl,
1315 gen_op_setbe_T0_subl,
1316 gen_op_sets_T0_subl,
1317 NULL,
1318 gen_op_setl_T0_subl,
1319 gen_op_setle_T0_subl,
1320 },
1321#ifdef TARGET_X86_64
1322 [OT_QUAD] = {
1323 NULL,
1324 gen_op_setb_T0_subq,
1325 gen_op_setz_T0_subq,
1326 gen_op_setbe_T0_subq,
1327 gen_op_sets_T0_subq,
1328 NULL,
1329 gen_op_setl_T0_subq,
1330 gen_op_setle_T0_subq,
1331 },
1332#endif
1333};
1334
1335static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1336 gen_op_fadd_ST0_FT0,
1337 gen_op_fmul_ST0_FT0,
1338 gen_op_fcom_ST0_FT0,
1339 gen_op_fcom_ST0_FT0,
1340 gen_op_fsub_ST0_FT0,
1341 gen_op_fsubr_ST0_FT0,
1342 gen_op_fdiv_ST0_FT0,
1343 gen_op_fdivr_ST0_FT0,
1344};
1345
1346/* NOTE the exception in "r" op ordering */
1347static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1348 gen_op_fadd_STN_ST0,
1349 gen_op_fmul_STN_ST0,
1350 NULL,
1351 NULL,
1352 gen_op_fsubr_STN_ST0,
1353 gen_op_fsub_STN_ST0,
1354 gen_op_fdivr_STN_ST0,
1355 gen_op_fdiv_STN_ST0,
1356};
1357
1358/* if d == OR_TMP0, it means memory operand (address in A0) */
1359static void gen_op(DisasContext *s1, int op, int ot, int d)
1360{
1361 GenOpFunc *gen_update_cc;
1362
1363 if (d != OR_TMP0) {
1364 gen_op_mov_TN_reg[ot][0][d]();
1365 } else {
1366 gen_op_ld_T0_A0[ot + s1->mem_index]();
1367 }
1368 switch(op) {
1369 case OP_ADCL:
1370 case OP_SBBL:
1371 if (s1->cc_op != CC_OP_DYNAMIC)
1372 gen_op_set_cc_op(s1->cc_op);
1373 if (d != OR_TMP0) {
1374 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1375 gen_op_mov_reg_T0[ot][d]();
1376 } else {
1377 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1378 }
1379 s1->cc_op = CC_OP_DYNAMIC;
1380 goto the_end;
1381 case OP_ADDL:
1382 gen_op_addl_T0_T1();
1383 s1->cc_op = CC_OP_ADDB + ot;
1384 gen_update_cc = gen_op_update2_cc;
1385 break;
1386 case OP_SUBL:
1387 gen_op_subl_T0_T1();
1388 s1->cc_op = CC_OP_SUBB + ot;
1389 gen_update_cc = gen_op_update2_cc;
1390 break;
1391 default:
1392 case OP_ANDL:
1393 case OP_ORL:
1394 case OP_XORL:
1395 gen_op_arith_T0_T1_cc[op]();
1396 s1->cc_op = CC_OP_LOGICB + ot;
1397 gen_update_cc = gen_op_update1_cc;
1398 break;
1399 case OP_CMPL:
1400 gen_op_cmpl_T0_T1_cc();
1401 s1->cc_op = CC_OP_SUBB + ot;
1402 gen_update_cc = NULL;
1403 break;
1404 }
1405 if (op != OP_CMPL) {
1406 if (d != OR_TMP0)
1407 gen_op_mov_reg_T0[ot][d]();
1408 else
1409 gen_op_st_T0_A0[ot + s1->mem_index]();
1410 }
1411 /* the flags update must happen after the memory write (precise
1412 exception support) */
1413 if (gen_update_cc)
1414 gen_update_cc();
1415 the_end: ;
1416}
1417
1418/* if d == OR_TMP0, it means memory operand (address in A0) */
1419static void gen_inc(DisasContext *s1, int ot, int d, int c)
1420{
1421 if (d != OR_TMP0)
1422 gen_op_mov_TN_reg[ot][0][d]();
1423 else
1424 gen_op_ld_T0_A0[ot + s1->mem_index]();
1425 if (s1->cc_op != CC_OP_DYNAMIC)
1426 gen_op_set_cc_op(s1->cc_op);
1427 if (c > 0) {
1428 gen_op_incl_T0();
1429 s1->cc_op = CC_OP_INCB + ot;
1430 } else {
1431 gen_op_decl_T0();
1432 s1->cc_op = CC_OP_DECB + ot;
1433 }
1434 if (d != OR_TMP0)
1435 gen_op_mov_reg_T0[ot][d]();
1436 else
1437 gen_op_st_T0_A0[ot + s1->mem_index]();
1438 gen_op_update_inc_cc();
1439}
1440
1441static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1442{
1443 if (d != OR_TMP0)
1444 gen_op_mov_TN_reg[ot][0][d]();
1445 else
1446 gen_op_ld_T0_A0[ot + s1->mem_index]();
1447 if (s != OR_TMP1)
1448 gen_op_mov_TN_reg[ot][1][s]();
1449 /* for zero counts, flags are not updated, so must do it dynamically */
1450 if (s1->cc_op != CC_OP_DYNAMIC)
1451 gen_op_set_cc_op(s1->cc_op);
1452
1453 if (d != OR_TMP0)
1454 gen_op_shift_T0_T1_cc[ot][op]();
1455 else
1456 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1457 if (d != OR_TMP0)
1458 gen_op_mov_reg_T0[ot][d]();
1459 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1460}
1461
1462static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1463{
1464 /* currently not optimized */
1465 gen_op_movl_T1_im(c);
1466 gen_shift(s1, op, ot, d, OR_TMP1);
1467}
1468
1469static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1470{
1471 target_long disp;
1472 int havesib;
1473 int base;
1474 int index;
1475 int scale;
1476 int opreg;
1477 int mod, rm, code, override, must_add_seg;
1478
1479 override = s->override;
1480 must_add_seg = s->addseg;
1481 if (override >= 0)
1482 must_add_seg = 1;
1483 mod = (modrm >> 6) & 3;
1484 rm = modrm & 7;
1485
1486 if (s->aflag) {
1487
1488 havesib = 0;
1489 base = rm;
1490 index = 0;
1491 scale = 0;
1492
1493 if (base == 4) {
1494 havesib = 1;
1495 code = ldub_code(s->pc++);
1496 scale = (code >> 6) & 3;
1497 index = ((code >> 3) & 7) | REX_X(s);
1498 base = (code & 7);
1499 }
1500 base |= REX_B(s);
1501
1502 switch (mod) {
1503 case 0:
1504 if ((base & 7) == 5) {
1505 base = -1;
1506 disp = (int32_t)ldl_code(s->pc);
1507 s->pc += 4;
1508 if (CODE64(s) && !havesib) {
1509 disp += s->pc + s->rip_offset;
1510 }
1511 } else {
1512 disp = 0;
1513 }
1514 break;
1515 case 1:
1516 disp = (int8_t)ldub_code(s->pc++);
1517 break;
1518 default:
1519 case 2:
1520 disp = ldl_code(s->pc);
1521 s->pc += 4;
1522 break;
1523 }
1524
1525 if (base >= 0) {
1526 /* for correct popl handling with esp */
1527 if (base == 4 && s->popl_esp_hack)
1528 disp += s->popl_esp_hack;
1529#ifdef TARGET_X86_64
1530 if (s->aflag == 2) {
1531 gen_op_movq_A0_reg[base]();
1532 if (disp != 0) {
1533 if ((int32_t)disp == disp)
1534 gen_op_addq_A0_im(disp);
1535 else
1536 gen_op_addq_A0_im64(disp >> 32, disp);
1537 }
1538 } else
1539#endif
1540 {
1541 gen_op_movl_A0_reg[base]();
1542 if (disp != 0)
1543 gen_op_addl_A0_im(disp);
1544 }
1545 } else {
1546#ifdef TARGET_X86_64
1547 if (s->aflag == 2) {
1548 if ((int32_t)disp == disp)
1549 gen_op_movq_A0_im(disp);
1550 else
1551 gen_op_movq_A0_im64(disp >> 32, disp);
1552 } else
1553#endif
1554 {
1555 gen_op_movl_A0_im(disp);
1556 }
1557 }
1558 /* XXX: index == 4 is always invalid */
1559 if (havesib && (index != 4 || scale != 0)) {
1560#ifdef TARGET_X86_64
1561 if (s->aflag == 2) {
1562 gen_op_addq_A0_reg_sN[scale][index]();
1563 } else
1564#endif
1565 {
1566 gen_op_addl_A0_reg_sN[scale][index]();
1567 }
1568 }
1569 if (must_add_seg) {
1570 if (override < 0) {
1571 if (base == R_EBP || base == R_ESP)
1572 override = R_SS;
1573 else
1574 override = R_DS;
1575 }
1576#ifdef TARGET_X86_64
1577 if (s->aflag == 2) {
1578 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1579 } else
1580#endif
1581 {
1582 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1583 }
1584 }
1585 } else {
1586 switch (mod) {
1587 case 0:
1588 if (rm == 6) {
1589 disp = lduw_code(s->pc);
1590 s->pc += 2;
1591 gen_op_movl_A0_im(disp);
1592 rm = 0; /* avoid SS override */
1593 goto no_rm;
1594 } else {
1595 disp = 0;
1596 }
1597 break;
1598 case 1:
1599 disp = (int8_t)ldub_code(s->pc++);
1600 break;
1601 default:
1602 case 2:
1603 disp = lduw_code(s->pc);
1604 s->pc += 2;
1605 break;
1606 }
1607 switch(rm) {
1608 case 0:
1609 gen_op_movl_A0_reg[R_EBX]();
1610 gen_op_addl_A0_reg_sN[0][R_ESI]();
1611 break;
1612 case 1:
1613 gen_op_movl_A0_reg[R_EBX]();
1614 gen_op_addl_A0_reg_sN[0][R_EDI]();
1615 break;
1616 case 2:
1617 gen_op_movl_A0_reg[R_EBP]();
1618 gen_op_addl_A0_reg_sN[0][R_ESI]();
1619 break;
1620 case 3:
1621 gen_op_movl_A0_reg[R_EBP]();
1622 gen_op_addl_A0_reg_sN[0][R_EDI]();
1623 break;
1624 case 4:
1625 gen_op_movl_A0_reg[R_ESI]();
1626 break;
1627 case 5:
1628 gen_op_movl_A0_reg[R_EDI]();
1629 break;
1630 case 6:
1631 gen_op_movl_A0_reg[R_EBP]();
1632 break;
1633 default:
1634 case 7:
1635 gen_op_movl_A0_reg[R_EBX]();
1636 break;
1637 }
1638 if (disp != 0)
1639 gen_op_addl_A0_im(disp);
1640 gen_op_andl_A0_ffff();
1641 no_rm:
1642 if (must_add_seg) {
1643 if (override < 0) {
1644 if (rm == 2 || rm == 3 || rm == 6)
1645 override = R_SS;
1646 else
1647 override = R_DS;
1648 }
1649 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1650 }
1651 }
1652
1653 opreg = OR_A0;
1654 disp = 0;
1655 *reg_ptr = opreg;
1656 *offset_ptr = disp;
1657}
1658
1659static void gen_nop_modrm(DisasContext *s, int modrm)
1660{
1661 int mod, rm, base, code;
1662
1663 mod = (modrm >> 6) & 3;
1664 if (mod == 3)
1665 return;
1666 rm = modrm & 7;
1667
1668 if (s->aflag) {
1669
1670 base = rm;
1671
1672 if (base == 4) {
1673 code = ldub_code(s->pc++);
1674 base = (code & 7);
1675 }
1676
1677 switch (mod) {
1678 case 0:
1679 if (base == 5) {
1680 s->pc += 4;
1681 }
1682 break;
1683 case 1:
1684 s->pc++;
1685 break;
1686 default:
1687 case 2:
1688 s->pc += 4;
1689 break;
1690 }
1691 } else {
1692 switch (mod) {
1693 case 0:
1694 if (rm == 6) {
1695 s->pc += 2;
1696 }
1697 break;
1698 case 1:
1699 s->pc++;
1700 break;
1701 default:
1702 case 2:
1703 s->pc += 2;
1704 break;
1705 }
1706 }
1707}
1708
1709/* used for LEA and MOV AX, mem */
1710static void gen_add_A0_ds_seg(DisasContext *s)
1711{
1712 int override, must_add_seg;
1713 must_add_seg = s->addseg;
1714 override = R_DS;
1715 if (s->override >= 0) {
1716 override = s->override;
1717 must_add_seg = 1;
1718 } else {
1719 override = R_DS;
1720 }
1721 if (must_add_seg) {
1722#ifdef TARGET_X86_64
1723 if (CODE64(s)) {
1724 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1725 } else
1726#endif
1727 {
1728 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1729 }
1730 }
1731}
1732
1733/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1734 OR_TMP0 */
1735static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1736{
1737 int mod, rm, opreg, disp;
1738
1739 mod = (modrm >> 6) & 3;
1740 rm = (modrm & 7) | REX_B(s);
1741 if (mod == 3) {
1742 if (is_store) {
1743 if (reg != OR_TMP0)
1744 gen_op_mov_TN_reg[ot][0][reg]();
1745 gen_op_mov_reg_T0[ot][rm]();
1746 } else {
1747 gen_op_mov_TN_reg[ot][0][rm]();
1748 if (reg != OR_TMP0)
1749 gen_op_mov_reg_T0[ot][reg]();
1750 }
1751 } else {
1752 gen_lea_modrm(s, modrm, &opreg, &disp);
1753 if (is_store) {
1754 if (reg != OR_TMP0)
1755 gen_op_mov_TN_reg[ot][0][reg]();
1756 gen_op_st_T0_A0[ot + s->mem_index]();
1757 } else {
1758 gen_op_ld_T0_A0[ot + s->mem_index]();
1759 if (reg != OR_TMP0)
1760 gen_op_mov_reg_T0[ot][reg]();
1761 }
1762 }
1763}
1764
1765static inline uint32_t insn_get(DisasContext *s, int ot)
1766{
1767 uint32_t ret;
1768
1769 switch(ot) {
1770 case OT_BYTE:
1771 ret = ldub_code(s->pc);
1772 s->pc++;
1773 break;
1774 case OT_WORD:
1775 ret = lduw_code(s->pc);
1776 s->pc += 2;
1777 break;
1778 default:
1779 case OT_LONG:
1780 ret = ldl_code(s->pc);
1781 s->pc += 4;
1782 break;
1783 }
1784 return ret;
1785}
1786
1787static inline int insn_const_size(unsigned int ot)
1788{
1789 if (ot <= OT_LONG)
1790 return 1 << ot;
1791 else
1792 return 4;
1793}
1794
1795static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1796{
1797 TranslationBlock *tb;
1798 target_ulong pc;
1799
1800 pc = s->cs_base + eip;
1801 tb = s->tb;
1802 /* NOTE: we handle the case where the TB spans two pages here */
1803 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1804 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1805 /* jump to same page: we can use a direct jump */
1806 if (tb_num == 0)
1807 gen_op_goto_tb0(TBPARAM(tb));
1808 else
1809 gen_op_goto_tb1(TBPARAM(tb));
1810 gen_jmp_im(eip);
1811 gen_op_movl_T0_im((long)tb + tb_num);
1812 gen_op_exit_tb();
1813 } else {
1814 /* jump to another page: currently not optimized */
1815 gen_jmp_im(eip);
1816 gen_eob(s);
1817 }
1818}
1819
1820static inline void gen_jcc(DisasContext *s, int b,
1821 target_ulong val, target_ulong next_eip)
1822{
1823 TranslationBlock *tb;
1824 int inv, jcc_op;
1825 GenOpFunc1 *func;
1826 target_ulong tmp;
1827 int l1, l2;
1828
1829 inv = b & 1;
1830 jcc_op = (b >> 1) & 7;
1831
1832 if (s->jmp_opt) {
1833#ifdef VBOX
1834 gen_check_external_event(s);
1835#endif /* VBOX */
1836 switch(s->cc_op) {
1837 /* we optimize the cmp/jcc case */
1838 case CC_OP_SUBB:
1839 case CC_OP_SUBW:
1840 case CC_OP_SUBL:
1841 case CC_OP_SUBQ:
1842 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1843 break;
1844
1845 /* some jumps are easy to compute */
1846 case CC_OP_ADDB:
1847 case CC_OP_ADDW:
1848 case CC_OP_ADDL:
1849 case CC_OP_ADDQ:
1850
1851 case CC_OP_ADCB:
1852 case CC_OP_ADCW:
1853 case CC_OP_ADCL:
1854 case CC_OP_ADCQ:
1855
1856 case CC_OP_SBBB:
1857 case CC_OP_SBBW:
1858 case CC_OP_SBBL:
1859 case CC_OP_SBBQ:
1860
1861 case CC_OP_LOGICB:
1862 case CC_OP_LOGICW:
1863 case CC_OP_LOGICL:
1864 case CC_OP_LOGICQ:
1865
1866 case CC_OP_INCB:
1867 case CC_OP_INCW:
1868 case CC_OP_INCL:
1869 case CC_OP_INCQ:
1870
1871 case CC_OP_DECB:
1872 case CC_OP_DECW:
1873 case CC_OP_DECL:
1874 case CC_OP_DECQ:
1875
1876 case CC_OP_SHLB:
1877 case CC_OP_SHLW:
1878 case CC_OP_SHLL:
1879 case CC_OP_SHLQ:
1880
1881 case CC_OP_SARB:
1882 case CC_OP_SARW:
1883 case CC_OP_SARL:
1884 case CC_OP_SARQ:
1885 switch(jcc_op) {
1886 case JCC_Z:
1887 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1888 break;
1889 case JCC_S:
1890 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1891 break;
1892 default:
1893 func = NULL;
1894 break;
1895 }
1896 break;
1897 default:
1898 func = NULL;
1899 break;
1900 }
1901
1902 if (s->cc_op != CC_OP_DYNAMIC) {
1903 gen_op_set_cc_op(s->cc_op);
1904 s->cc_op = CC_OP_DYNAMIC;
1905 }
1906
1907 if (!func) {
1908 gen_setcc_slow[jcc_op]();
1909 func = gen_op_jnz_T0_label;
1910 }
1911
1912 if (inv) {
1913 tmp = val;
1914 val = next_eip;
1915 next_eip = tmp;
1916 }
1917 tb = s->tb;
1918
1919 l1 = gen_new_label();
1920 func(l1);
1921
1922 gen_goto_tb(s, 0, next_eip);
1923
1924 gen_set_label(l1);
1925 gen_goto_tb(s, 1, val);
1926
1927 s->is_jmp = 3;
1928 } else {
1929
1930 if (s->cc_op != CC_OP_DYNAMIC) {
1931 gen_op_set_cc_op(s->cc_op);
1932 s->cc_op = CC_OP_DYNAMIC;
1933 }
1934 gen_setcc_slow[jcc_op]();
1935 if (inv) {
1936 tmp = val;
1937 val = next_eip;
1938 next_eip = tmp;
1939 }
1940 l1 = gen_new_label();
1941 l2 = gen_new_label();
1942 gen_op_jnz_T0_label(l1);
1943 gen_jmp_im(next_eip);
1944 gen_op_jmp_label(l2);
1945 gen_set_label(l1);
1946 gen_jmp_im(val);
1947 gen_set_label(l2);
1948 gen_eob(s);
1949 }
1950}
1951
1952static void gen_setcc(DisasContext *s, int b)
1953{
1954 int inv, jcc_op;
1955 GenOpFunc *func;
1956
1957 inv = b & 1;
1958 jcc_op = (b >> 1) & 7;
1959 switch(s->cc_op) {
1960 /* we optimize the cmp/jcc case */
1961 case CC_OP_SUBB:
1962 case CC_OP_SUBW:
1963 case CC_OP_SUBL:
1964 case CC_OP_SUBQ:
1965 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1966 if (!func)
1967 goto slow_jcc;
1968 break;
1969
1970 /* some jumps are easy to compute */
1971 case CC_OP_ADDB:
1972 case CC_OP_ADDW:
1973 case CC_OP_ADDL:
1974 case CC_OP_ADDQ:
1975
1976 case CC_OP_LOGICB:
1977 case CC_OP_LOGICW:
1978 case CC_OP_LOGICL:
1979 case CC_OP_LOGICQ:
1980
1981 case CC_OP_INCB:
1982 case CC_OP_INCW:
1983 case CC_OP_INCL:
1984 case CC_OP_INCQ:
1985
1986 case CC_OP_DECB:
1987 case CC_OP_DECW:
1988 case CC_OP_DECL:
1989 case CC_OP_DECQ:
1990
1991 case CC_OP_SHLB:
1992 case CC_OP_SHLW:
1993 case CC_OP_SHLL:
1994 case CC_OP_SHLQ:
1995 switch(jcc_op) {
1996 case JCC_Z:
1997 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1998 break;
1999 case JCC_S:
2000 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
2001 break;
2002 default:
2003 goto slow_jcc;
2004 }
2005 break;
2006 default:
2007 slow_jcc:
2008 if (s->cc_op != CC_OP_DYNAMIC)
2009 gen_op_set_cc_op(s->cc_op);
2010 func = gen_setcc_slow[jcc_op];
2011 break;
2012 }
2013 func();
2014 if (inv) {
2015 gen_op_xor_T0_1();
2016 }
2017}
2018
2019/* move T0 to seg_reg and compute if the CPU state may change. Never
2020 call this function with seg_reg == R_CS */
2021static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
2022{
2023 if (s->pe && !s->vm86) {
2024 /* XXX: optimize by finding processor state dynamically */
2025 if (s->cc_op != CC_OP_DYNAMIC)
2026 gen_op_set_cc_op(s->cc_op);
2027 gen_jmp_im(cur_eip);
2028 gen_op_movl_seg_T0(seg_reg);
2029 /* abort translation because the addseg value may change or
2030 because ss32 may change. For R_SS, translation must always
2031 stop as a special handling must be done to disable hardware
2032 interrupts for the next instruction */
2033 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2034 s->is_jmp = 3;
2035 } else {
2036 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2037 if (seg_reg == R_SS)
2038 s->is_jmp = 3;
2039 }
2040}
2041
2042static inline void gen_stack_update(DisasContext *s, int addend)
2043{
2044#ifdef TARGET_X86_64
2045 if (CODE64(s)) {
2046 if (addend == 8)
2047 gen_op_addq_ESP_8();
2048 else
2049 gen_op_addq_ESP_im(addend);
2050 } else
2051#endif
2052 if (s->ss32) {
2053 if (addend == 2)
2054 gen_op_addl_ESP_2();
2055 else if (addend == 4)
2056 gen_op_addl_ESP_4();
2057 else
2058 gen_op_addl_ESP_im(addend);
2059 } else {
2060 if (addend == 2)
2061 gen_op_addw_ESP_2();
2062 else if (addend == 4)
2063 gen_op_addw_ESP_4();
2064 else
2065 gen_op_addw_ESP_im(addend);
2066 }
2067}
2068
2069/* generate a push. It depends on ss32, addseg and dflag */
2070static void gen_push_T0(DisasContext *s)
2071{
2072#ifdef TARGET_X86_64
2073 if (CODE64(s)) {
2074 gen_op_movq_A0_reg[R_ESP]();
2075 if (s->dflag) {
2076 gen_op_subq_A0_8();
2077 gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
2078 } else {
2079 gen_op_subq_A0_2();
2080 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2081 }
2082 gen_op_movq_ESP_A0();
2083 } else
2084#endif
2085 {
2086 gen_op_movl_A0_reg[R_ESP]();
2087 if (!s->dflag)
2088 gen_op_subl_A0_2();
2089 else
2090 gen_op_subl_A0_4();
2091 if (s->ss32) {
2092 if (s->addseg) {
2093 gen_op_movl_T1_A0();
2094 gen_op_addl_A0_SS();
2095 }
2096 } else {
2097 gen_op_andl_A0_ffff();
2098 gen_op_movl_T1_A0();
2099 gen_op_addl_A0_SS();
2100 }
2101 gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
2102 if (s->ss32 && !s->addseg)
2103 gen_op_movl_ESP_A0();
2104 else
2105 gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
2106 }
2107}
2108
2109/* generate a push. It depends on ss32, addseg and dflag */
2110/* slower version for T1, only used for call Ev */
2111static void gen_push_T1(DisasContext *s)
2112{
2113#ifdef TARGET_X86_64
2114 if (CODE64(s)) {
2115 gen_op_movq_A0_reg[R_ESP]();
2116 if (s->dflag) {
2117 gen_op_subq_A0_8();
2118 gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
2119 } else {
2120 gen_op_subq_A0_2();
2121 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2122 }
2123 gen_op_movq_ESP_A0();
2124 } else
2125#endif
2126 {
2127 gen_op_movl_A0_reg[R_ESP]();
2128 if (!s->dflag)
2129 gen_op_subl_A0_2();
2130 else
2131 gen_op_subl_A0_4();
2132 if (s->ss32) {
2133 if (s->addseg) {
2134 gen_op_addl_A0_SS();
2135 }
2136 } else {
2137 gen_op_andl_A0_ffff();
2138 gen_op_addl_A0_SS();
2139 }
2140 gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2141
2142 if (s->ss32 && !s->addseg)
2143 gen_op_movl_ESP_A0();
2144 else
2145 gen_stack_update(s, (-2) << s->dflag);
2146 }
2147}
2148
2149/* two step pop is necessary for precise exceptions */
2150static void gen_pop_T0(DisasContext *s)
2151{
2152#ifdef TARGET_X86_64
2153 if (CODE64(s)) {
2154 gen_op_movq_A0_reg[R_ESP]();
2155 gen_op_ld_T0_A0[(s->dflag ? OT_QUAD : OT_WORD) + s->mem_index]();
2156 } else
2157#endif
2158 {
2159 gen_op_movl_A0_reg[R_ESP]();
2160 if (s->ss32) {
2161 if (s->addseg)
2162 gen_op_addl_A0_SS();
2163 } else {
2164 gen_op_andl_A0_ffff();
2165 gen_op_addl_A0_SS();
2166 }
2167 gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2168 }
2169}
2170
2171static void gen_pop_update(DisasContext *s)
2172{
2173#ifdef TARGET_X86_64
2174 if (CODE64(s) && s->dflag) {
2175 gen_stack_update(s, 8);
2176 } else
2177#endif
2178 {
2179 gen_stack_update(s, 2 << s->dflag);
2180 }
2181}
2182
2183static void gen_stack_A0(DisasContext *s)
2184{
2185 gen_op_movl_A0_ESP();
2186 if (!s->ss32)
2187 gen_op_andl_A0_ffff();
2188 gen_op_movl_T1_A0();
2189 if (s->addseg)
2190 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2191}
2192
2193/* NOTE: wrap around in 16 bit not fully handled */
2194static void gen_pusha(DisasContext *s)
2195{
2196 int i;
2197 gen_op_movl_A0_ESP();
2198 gen_op_addl_A0_im(-16 << s->dflag);
2199 if (!s->ss32)
2200 gen_op_andl_A0_ffff();
2201 gen_op_movl_T1_A0();
2202 if (s->addseg)
2203 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2204 for(i = 0;i < 8; i++) {
2205 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2206 gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2207 gen_op_addl_A0_im(2 << s->dflag);
2208 }
2209 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2210}
2211
2212/* NOTE: wrap around in 16 bit not fully handled */
2213static void gen_popa(DisasContext *s)
2214{
2215 int i;
2216 gen_op_movl_A0_ESP();
2217 if (!s->ss32)
2218 gen_op_andl_A0_ffff();
2219 gen_op_movl_T1_A0();
2220 gen_op_addl_T1_im(16 << s->dflag);
2221 if (s->addseg)
2222 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2223 for(i = 0;i < 8; i++) {
2224 /* ESP is not reloaded */
2225 if (i != 3) {
2226 gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2227 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2228 }
2229 gen_op_addl_A0_im(2 << s->dflag);
2230 }
2231 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2232}
2233
2234static void gen_enter(DisasContext *s, int esp_addend, int level)
2235{
2236 int ot, opsize;
2237
2238 level &= 0x1f;
2239#ifdef TARGET_X86_64
2240 if (CODE64(s)) {
2241 ot = s->dflag ? OT_QUAD : OT_WORD;
2242 opsize = 1 << ot;
2243
2244 gen_op_movl_A0_ESP();
2245 gen_op_addq_A0_im(-opsize);
2246 gen_op_movl_T1_A0();
2247
2248 /* push bp */
2249 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2250 gen_op_st_T0_A0[ot + s->mem_index]();
2251 if (level) {
2252 gen_op_enter64_level(level, (ot == OT_QUAD));
2253 }
2254 gen_op_mov_reg_T1[ot][R_EBP]();
2255 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2256 gen_op_mov_reg_T1[OT_QUAD][R_ESP]();
2257 } else
2258#endif
2259 {
2260 ot = s->dflag + OT_WORD;
2261 opsize = 2 << s->dflag;
2262
2263 gen_op_movl_A0_ESP();
2264 gen_op_addl_A0_im(-opsize);
2265 if (!s->ss32)
2266 gen_op_andl_A0_ffff();
2267 gen_op_movl_T1_A0();
2268 if (s->addseg)
2269 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2270 /* push bp */
2271 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2272 gen_op_st_T0_A0[ot + s->mem_index]();
2273 if (level) {
2274 gen_op_enter_level(level, s->dflag);
2275 }
2276 gen_op_mov_reg_T1[ot][R_EBP]();
2277 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2278 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2279 }
2280}
2281
2282static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2283{
2284 if (s->cc_op != CC_OP_DYNAMIC)
2285 gen_op_set_cc_op(s->cc_op);
2286 gen_jmp_im(cur_eip);
2287 gen_op_raise_exception(trapno);
2288 s->is_jmp = 3;
2289}
2290
2291/* an interrupt is different from an exception because of the
2292 priviledge checks */
2293static void gen_interrupt(DisasContext *s, int intno,
2294 target_ulong cur_eip, target_ulong next_eip)
2295{
2296 if (s->cc_op != CC_OP_DYNAMIC)
2297 gen_op_set_cc_op(s->cc_op);
2298 gen_jmp_im(cur_eip);
2299 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2300 s->is_jmp = 3;
2301}
2302
2303static void gen_debug(DisasContext *s, target_ulong cur_eip)
2304{
2305 if (s->cc_op != CC_OP_DYNAMIC)
2306 gen_op_set_cc_op(s->cc_op);
2307 gen_jmp_im(cur_eip);
2308 gen_op_debug();
2309 s->is_jmp = 3;
2310}
2311
2312/* generate a generic end of block. Trace exception is also generated
2313 if needed */
2314static void gen_eob(DisasContext *s)
2315{
2316 if (s->cc_op != CC_OP_DYNAMIC)
2317 gen_op_set_cc_op(s->cc_op);
2318 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2319 gen_op_reset_inhibit_irq();
2320 }
2321 if (s->singlestep_enabled) {
2322 gen_op_debug();
2323 } else if (s->tf) {
2324 gen_op_raise_exception(EXCP01_SSTP);
2325 } else {
2326 gen_op_movl_T0_0();
2327 gen_op_exit_tb();
2328 }
2329 s->is_jmp = 3;
2330}
2331
2332/* generate a jump to eip. No segment change must happen before as a
2333 direct call to the next block may occur */
2334static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2335{
2336 if (s->jmp_opt) {
2337#ifdef VBOX
2338 gen_check_external_event(s);
2339#endif /* VBOX */
2340 if (s->cc_op != CC_OP_DYNAMIC) {
2341 gen_op_set_cc_op(s->cc_op);
2342 s->cc_op = CC_OP_DYNAMIC;
2343 }
2344 gen_goto_tb(s, tb_num, eip);
2345 s->is_jmp = 3;
2346 } else {
2347 gen_jmp_im(eip);
2348 gen_eob(s);
2349 }
2350}
2351
2352static void gen_jmp(DisasContext *s, target_ulong eip)
2353{
2354 gen_jmp_tb(s, eip, 0);
2355}
2356
2357static void gen_movtl_T0_im(target_ulong val)
2358{
2359#ifdef TARGET_X86_64
2360 if ((int32_t)val == val) {
2361 gen_op_movl_T0_im(val);
2362 } else {
2363 gen_op_movq_T0_im64(val >> 32, val);
2364 }
2365#else
2366 gen_op_movl_T0_im(val);
2367#endif
2368}
2369
2370static void gen_movtl_T1_im(target_ulong val)
2371{
2372#ifdef TARGET_X86_64
2373 if ((int32_t)val == val) {
2374 gen_op_movl_T1_im(val);
2375 } else {
2376 gen_op_movq_T1_im64(val >> 32, val);
2377 }
2378#else
2379 gen_op_movl_T1_im(val);
2380#endif
2381}
2382
2383static void gen_add_A0_im(DisasContext *s, int val)
2384{
2385#ifdef TARGET_X86_64
2386 if (CODE64(s))
2387 gen_op_addq_A0_im(val);
2388 else
2389#endif
2390 gen_op_addl_A0_im(val);
2391}
2392
2393static GenOpFunc1 *gen_ldq_env_A0[3] = {
2394 gen_op_ldq_raw_env_A0,
2395#ifndef CONFIG_USER_ONLY
2396 gen_op_ldq_kernel_env_A0,
2397 gen_op_ldq_user_env_A0,
2398#endif
2399};
2400
2401static GenOpFunc1 *gen_stq_env_A0[3] = {
2402 gen_op_stq_raw_env_A0,
2403#ifndef CONFIG_USER_ONLY
2404 gen_op_stq_kernel_env_A0,
2405 gen_op_stq_user_env_A0,
2406#endif
2407};
2408
2409static GenOpFunc1 *gen_ldo_env_A0[3] = {
2410 gen_op_ldo_raw_env_A0,
2411#ifndef CONFIG_USER_ONLY
2412 gen_op_ldo_kernel_env_A0,
2413 gen_op_ldo_user_env_A0,
2414#endif
2415};
2416
2417static GenOpFunc1 *gen_sto_env_A0[3] = {
2418 gen_op_sto_raw_env_A0,
2419#ifndef CONFIG_USER_ONLY
2420 gen_op_sto_kernel_env_A0,
2421 gen_op_sto_user_env_A0,
2422#endif
2423};
2424
2425#define SSE_SPECIAL ((GenOpFunc2 *)1)
2426
2427#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2428#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2429 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2430
2431static GenOpFunc2 *sse_op_table1[256][4] = {
2432 /* pure SSE operations */
2433 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2434 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2435 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2436 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2437 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2438 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2439 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2440 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2441
2442 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2443 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2444 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2445 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2446 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2447 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2448 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2449 [0x2f] = { gen_op_comiss, gen_op_comisd },
2450 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2451 [0x51] = SSE_FOP(sqrt),
2452 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2453 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2454 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2455 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2456 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2457 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2458 [0x58] = SSE_FOP(add),
2459 [0x59] = SSE_FOP(mul),
2460 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2461 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2462 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2463 [0x5c] = SSE_FOP(sub),
2464 [0x5d] = SSE_FOP(min),
2465 [0x5e] = SSE_FOP(div),
2466 [0x5f] = SSE_FOP(max),
2467
2468 [0xc2] = SSE_FOP(cmpeq),
2469 [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2470
2471 /* MMX ops and their SSE extensions */
2472 [0x60] = MMX_OP2(punpcklbw),
2473 [0x61] = MMX_OP2(punpcklwd),
2474 [0x62] = MMX_OP2(punpckldq),
2475 [0x63] = MMX_OP2(packsswb),
2476 [0x64] = MMX_OP2(pcmpgtb),
2477 [0x65] = MMX_OP2(pcmpgtw),
2478 [0x66] = MMX_OP2(pcmpgtl),
2479 [0x67] = MMX_OP2(packuswb),
2480 [0x68] = MMX_OP2(punpckhbw),
2481 [0x69] = MMX_OP2(punpckhwd),
2482 [0x6a] = MMX_OP2(punpckhdq),
2483 [0x6b] = MMX_OP2(packssdw),
2484 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2485 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2486 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2487 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2488 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2489 (GenOpFunc2 *)gen_op_pshufd_xmm,
2490 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2491 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2492 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2493 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2494 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2495 [0x74] = MMX_OP2(pcmpeqb),
2496 [0x75] = MMX_OP2(pcmpeqw),
2497 [0x76] = MMX_OP2(pcmpeql),
2498 [0x77] = { SSE_SPECIAL }, /* emms */
2499 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2500 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2501 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2502 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2503 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2504 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2505 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2506 [0xd1] = MMX_OP2(psrlw),
2507 [0xd2] = MMX_OP2(psrld),
2508 [0xd3] = MMX_OP2(psrlq),
2509 [0xd4] = MMX_OP2(paddq),
2510 [0xd5] = MMX_OP2(pmullw),
2511 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2512 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2513 [0xd8] = MMX_OP2(psubusb),
2514 [0xd9] = MMX_OP2(psubusw),
2515 [0xda] = MMX_OP2(pminub),
2516 [0xdb] = MMX_OP2(pand),
2517 [0xdc] = MMX_OP2(paddusb),
2518 [0xdd] = MMX_OP2(paddusw),
2519 [0xde] = MMX_OP2(pmaxub),
2520 [0xdf] = MMX_OP2(pandn),
2521 [0xe0] = MMX_OP2(pavgb),
2522 [0xe1] = MMX_OP2(psraw),
2523 [0xe2] = MMX_OP2(psrad),
2524 [0xe3] = MMX_OP2(pavgw),
2525 [0xe4] = MMX_OP2(pmulhuw),
2526 [0xe5] = MMX_OP2(pmulhw),
2527 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2528 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2529 [0xe8] = MMX_OP2(psubsb),
2530 [0xe9] = MMX_OP2(psubsw),
2531 [0xea] = MMX_OP2(pminsw),
2532 [0xeb] = MMX_OP2(por),
2533 [0xec] = MMX_OP2(paddsb),
2534 [0xed] = MMX_OP2(paddsw),
2535 [0xee] = MMX_OP2(pmaxsw),
2536 [0xef] = MMX_OP2(pxor),
2537 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2538 [0xf1] = MMX_OP2(psllw),
2539 [0xf2] = MMX_OP2(pslld),
2540 [0xf3] = MMX_OP2(psllq),
2541 [0xf4] = MMX_OP2(pmuludq),
2542 [0xf5] = MMX_OP2(pmaddwd),
2543 [0xf6] = MMX_OP2(psadbw),
2544 [0xf7] = MMX_OP2(maskmov),
2545 [0xf8] = MMX_OP2(psubb),
2546 [0xf9] = MMX_OP2(psubw),
2547 [0xfa] = MMX_OP2(psubl),
2548 [0xfb] = MMX_OP2(psubq),
2549 [0xfc] = MMX_OP2(paddb),
2550 [0xfd] = MMX_OP2(paddw),
2551 [0xfe] = MMX_OP2(paddl),
2552};
2553
2554static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2555 [0 + 2] = MMX_OP2(psrlw),
2556 [0 + 4] = MMX_OP2(psraw),
2557 [0 + 6] = MMX_OP2(psllw),
2558 [8 + 2] = MMX_OP2(psrld),
2559 [8 + 4] = MMX_OP2(psrad),
2560 [8 + 6] = MMX_OP2(pslld),
2561 [16 + 2] = MMX_OP2(psrlq),
2562 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2563 [16 + 6] = MMX_OP2(psllq),
2564 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2565};
2566
2567static GenOpFunc1 *sse_op_table3[4 * 3] = {
2568 gen_op_cvtsi2ss,
2569 gen_op_cvtsi2sd,
2570 X86_64_ONLY(gen_op_cvtsq2ss),
2571 X86_64_ONLY(gen_op_cvtsq2sd),
2572
2573 gen_op_cvttss2si,
2574 gen_op_cvttsd2si,
2575 X86_64_ONLY(gen_op_cvttss2sq),
2576 X86_64_ONLY(gen_op_cvttsd2sq),
2577
2578 gen_op_cvtss2si,
2579 gen_op_cvtsd2si,
2580 X86_64_ONLY(gen_op_cvtss2sq),
2581 X86_64_ONLY(gen_op_cvtsd2sq),
2582};
2583
2584static GenOpFunc2 *sse_op_table4[8][4] = {
2585 SSE_FOP(cmpeq),
2586 SSE_FOP(cmplt),
2587 SSE_FOP(cmple),
2588 SSE_FOP(cmpunord),
2589 SSE_FOP(cmpneq),
2590 SSE_FOP(cmpnlt),
2591 SSE_FOP(cmpnle),
2592 SSE_FOP(cmpord),
2593};
2594
2595static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2596{
2597 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2598 int modrm, mod, rm, reg, reg_addr, offset_addr;
2599 GenOpFunc2 *sse_op2;
2600 GenOpFunc3 *sse_op3;
2601
2602 b &= 0xff;
2603 if (s->prefix & PREFIX_DATA)
2604 b1 = 1;
2605 else if (s->prefix & PREFIX_REPZ)
2606 b1 = 2;
2607 else if (s->prefix & PREFIX_REPNZ)
2608 b1 = 3;
2609 else
2610 b1 = 0;
2611 sse_op2 = sse_op_table1[b][b1];
2612 if (!sse_op2)
2613 goto illegal_op;
2614 if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2615 is_xmm = 1;
2616 } else {
2617 if (b1 == 0) {
2618 /* MMX case */
2619 is_xmm = 0;
2620 } else {
2621 is_xmm = 1;
2622 }
2623 }
2624 /* simple MMX/SSE operation */
2625 if (s->flags & HF_TS_MASK) {
2626 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2627 return;
2628 }
2629 if (s->flags & HF_EM_MASK) {
2630 illegal_op:
2631 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2632 return;
2633 }
2634 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2635 goto illegal_op;
2636 if (b == 0x77) {
2637 /* emms */
2638 gen_op_emms();
2639 return;
2640 }
2641 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2642 the static cpu state) */
2643 if (!is_xmm) {
2644 gen_op_enter_mmx();
2645 }
2646
2647 modrm = ldub_code(s->pc++);
2648 reg = ((modrm >> 3) & 7);
2649 if (is_xmm)
2650 reg |= rex_r;
2651 mod = (modrm >> 6) & 3;
2652 if (sse_op2 == SSE_SPECIAL) {
2653 b |= (b1 << 8);
2654 switch(b) {
2655 case 0x0e7: /* movntq */
2656 if (mod == 3)
2657 goto illegal_op;
2658 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2659 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2660 break;
2661 case 0x1e7: /* movntdq */
2662 case 0x02b: /* movntps */
2663 case 0x12b: /* movntps */
2664 case 0x3f0: /* lddqu */
2665 if (mod == 3)
2666 goto illegal_op;
2667 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2668 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2669 break;
2670 case 0x6e: /* movd mm, ea */
2671 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2672 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2673 break;
2674 case 0x16e: /* movd xmm, ea */
2675 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2676 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2677 break;
2678 case 0x6f: /* movq mm, ea */
2679 if (mod != 3) {
2680 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2681 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2682 } else {
2683 rm = (modrm & 7);
2684 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2685 offsetof(CPUX86State,fpregs[rm].mmx));
2686 }
2687 break;
2688 case 0x010: /* movups */
2689 case 0x110: /* movupd */
2690 case 0x028: /* movaps */
2691 case 0x128: /* movapd */
2692 case 0x16f: /* movdqa xmm, ea */
2693 case 0x26f: /* movdqu xmm, ea */
2694 if (mod != 3) {
2695 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2696 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2697 } else {
2698 rm = (modrm & 7) | REX_B(s);
2699 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2700 offsetof(CPUX86State,xmm_regs[rm]));
2701 }
2702 break;
2703 case 0x210: /* movss xmm, ea */
2704 if (mod != 3) {
2705 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2706 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2707 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2708 gen_op_movl_T0_0();
2709 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2710 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2711 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2712 } else {
2713 rm = (modrm & 7) | REX_B(s);
2714 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2715 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2716 }
2717 break;
2718 case 0x310: /* movsd xmm, ea */
2719 if (mod != 3) {
2720 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2721 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2722 gen_op_movl_T0_0();
2723 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2724 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2725 } else {
2726 rm = (modrm & 7) | REX_B(s);
2727 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2728 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2729 }
2730 break;
2731 case 0x012: /* movlps */
2732 case 0x112: /* movlpd */
2733 if (mod != 3) {
2734 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2735 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2736 } else {
2737 /* movhlps */
2738 rm = (modrm & 7) | REX_B(s);
2739 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2740 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2741 }
2742 break;
2743 case 0x212: /* movsldup */
2744 if (mod != 3) {
2745 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2746 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2747 } else {
2748 rm = (modrm & 7) | REX_B(s);
2749 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2750 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2751 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2752 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2753 }
2754 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2755 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2756 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2757 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2758 break;
2759 case 0x312: /* movddup */
2760 if (mod != 3) {
2761 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2762 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2763 } else {
2764 rm = (modrm & 7) | REX_B(s);
2765 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2766 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2767 }
2768 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2769 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2770 break;
2771 case 0x016: /* movhps */
2772 case 0x116: /* movhpd */
2773 if (mod != 3) {
2774 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2775 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2776 } else {
2777 /* movlhps */
2778 rm = (modrm & 7) | REX_B(s);
2779 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2780 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2781 }
2782 break;
2783 case 0x216: /* movshdup */
2784 if (mod != 3) {
2785 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2786 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2787 } else {
2788 rm = (modrm & 7) | REX_B(s);
2789 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2790 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2791 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2792 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2793 }
2794 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2795 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2796 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2797 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2798 break;
2799 case 0x7e: /* movd ea, mm */
2800 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2801 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2802 break;
2803 case 0x17e: /* movd ea, xmm */
2804 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2805 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2806 break;
2807 case 0x27e: /* movq xmm, ea */
2808 if (mod != 3) {
2809 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2810 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2811 } else {
2812 rm = (modrm & 7) | REX_B(s);
2813 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2814 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2815 }
2816 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2817 break;
2818 case 0x7f: /* movq ea, mm */
2819 if (mod != 3) {
2820 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2821 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2822 } else {
2823 rm = (modrm & 7);
2824 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2825 offsetof(CPUX86State,fpregs[reg].mmx));
2826 }
2827 break;
2828 case 0x011: /* movups */
2829 case 0x111: /* movupd */
2830 case 0x029: /* movaps */
2831 case 0x129: /* movapd */
2832 case 0x17f: /* movdqa ea, xmm */
2833 case 0x27f: /* movdqu ea, xmm */
2834 if (mod != 3) {
2835 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2836 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2837 } else {
2838 rm = (modrm & 7) | REX_B(s);
2839 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2840 offsetof(CPUX86State,xmm_regs[reg]));
2841 }
2842 break;
2843 case 0x211: /* movss ea, xmm */
2844 if (mod != 3) {
2845 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2846 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2847 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2848 } else {
2849 rm = (modrm & 7) | REX_B(s);
2850 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2851 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2852 }
2853 break;
2854 case 0x311: /* movsd ea, xmm */
2855 if (mod != 3) {
2856 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2857 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2858 } else {
2859 rm = (modrm & 7) | REX_B(s);
2860 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2861 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2862 }
2863 break;
2864 case 0x013: /* movlps */
2865 case 0x113: /* movlpd */
2866 if (mod != 3) {
2867 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2868 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2869 } else {
2870 goto illegal_op;
2871 }
2872 break;
2873 case 0x017: /* movhps */
2874 case 0x117: /* movhpd */
2875 if (mod != 3) {
2876 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2877 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2878 } else {
2879 goto illegal_op;
2880 }
2881 break;
2882 case 0x71: /* shift mm, im */
2883 case 0x72:
2884 case 0x73:
2885 case 0x171: /* shift xmm, im */
2886 case 0x172:
2887 case 0x173:
2888 val = ldub_code(s->pc++);
2889 if (is_xmm) {
2890 gen_op_movl_T0_im(val);
2891 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2892 gen_op_movl_T0_0();
2893 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2894 op1_offset = offsetof(CPUX86State,xmm_t0);
2895 } else {
2896 gen_op_movl_T0_im(val);
2897 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2898 gen_op_movl_T0_0();
2899 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2900 op1_offset = offsetof(CPUX86State,mmx_t0);
2901 }
2902 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2903 if (!sse_op2)
2904 goto illegal_op;
2905 if (is_xmm) {
2906 rm = (modrm & 7) | REX_B(s);
2907 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2908 } else {
2909 rm = (modrm & 7);
2910 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2911 }
2912 sse_op2(op2_offset, op1_offset);
2913 break;
2914 case 0x050: /* movmskps */
2915 rm = (modrm & 7) | REX_B(s);
2916 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2917 gen_op_mov_reg_T0[OT_LONG][reg]();
2918 break;
2919 case 0x150: /* movmskpd */
2920 rm = (modrm & 7) | REX_B(s);
2921 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2922 gen_op_mov_reg_T0[OT_LONG][reg]();
2923 break;
2924 case 0x02a: /* cvtpi2ps */
2925 case 0x12a: /* cvtpi2pd */
2926 gen_op_enter_mmx();
2927 if (mod != 3) {
2928 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2929 op2_offset = offsetof(CPUX86State,mmx_t0);
2930 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2931 } else {
2932 rm = (modrm & 7);
2933 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2934 }
2935 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2936 switch(b >> 8) {
2937 case 0x0:
2938 gen_op_cvtpi2ps(op1_offset, op2_offset);
2939 break;
2940 default:
2941 case 0x1:
2942 gen_op_cvtpi2pd(op1_offset, op2_offset);
2943 break;
2944 }
2945 break;
2946 case 0x22a: /* cvtsi2ss */
2947 case 0x32a: /* cvtsi2sd */
2948 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2949 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2950 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2951 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
2952 break;
2953 case 0x02c: /* cvttps2pi */
2954 case 0x12c: /* cvttpd2pi */
2955 case 0x02d: /* cvtps2pi */
2956 case 0x12d: /* cvtpd2pi */
2957 gen_op_enter_mmx();
2958 if (mod != 3) {
2959 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2960 op2_offset = offsetof(CPUX86State,xmm_t0);
2961 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2962 } else {
2963 rm = (modrm & 7) | REX_B(s);
2964 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2965 }
2966 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
2967 switch(b) {
2968 case 0x02c:
2969 gen_op_cvttps2pi(op1_offset, op2_offset);
2970 break;
2971 case 0x12c:
2972 gen_op_cvttpd2pi(op1_offset, op2_offset);
2973 break;
2974 case 0x02d:
2975 gen_op_cvtps2pi(op1_offset, op2_offset);
2976 break;
2977 case 0x12d:
2978 gen_op_cvtpd2pi(op1_offset, op2_offset);
2979 break;
2980 }
2981 break;
2982 case 0x22c: /* cvttss2si */
2983 case 0x32c: /* cvttsd2si */
2984 case 0x22d: /* cvtss2si */
2985 case 0x32d: /* cvtsd2si */
2986 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2987 if (mod != 3) {
2988 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2989 if ((b >> 8) & 1) {
2990 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
2991 } else {
2992 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2993 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2994 }
2995 op2_offset = offsetof(CPUX86State,xmm_t0);
2996 } else {
2997 rm = (modrm & 7) | REX_B(s);
2998 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2999 }
3000 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
3001 (b & 1) * 4](op2_offset);
3002 gen_op_mov_reg_T0[ot][reg]();
3003 break;
3004 case 0xc4: /* pinsrw */
3005 case 0x1c4:
3006 s->rip_offset = 1;
3007 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3008 val = ldub_code(s->pc++);
3009 if (b1) {
3010 val &= 7;
3011 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
3012 } else {
3013 val &= 3;
3014 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
3015 }
3016 break;
3017 case 0xc5: /* pextrw */
3018 case 0x1c5:
3019 if (mod != 3)
3020 goto illegal_op;
3021 val = ldub_code(s->pc++);
3022 if (b1) {
3023 val &= 7;
3024 rm = (modrm & 7) | REX_B(s);
3025 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
3026 } else {
3027 val &= 3;
3028 rm = (modrm & 7);
3029 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
3030 }
3031 reg = ((modrm >> 3) & 7) | rex_r;
3032 gen_op_mov_reg_T0[OT_LONG][reg]();
3033 break;
3034 case 0x1d6: /* movq ea, xmm */
3035 if (mod != 3) {
3036 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3037 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3038 } else {
3039 rm = (modrm & 7) | REX_B(s);
3040 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3041 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3042 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3043 }
3044 break;
3045 case 0x2d6: /* movq2dq */
3046 gen_op_enter_mmx();
3047 rm = (modrm & 7);
3048 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
3049 offsetof(CPUX86State,fpregs[rm].mmx));
3050 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
3051 break;
3052 case 0x3d6: /* movdq2q */
3053 gen_op_enter_mmx();
3054 rm = (modrm & 7) | REX_B(s);
3055 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx),
3056 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
3057 break;
3058 case 0xd7: /* pmovmskb */
3059 case 0x1d7:
3060 if (mod != 3)
3061 goto illegal_op;
3062 if (b1) {
3063 rm = (modrm & 7) | REX_B(s);
3064 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3065 } else {
3066 rm = (modrm & 7);
3067 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3068 }
3069 reg = ((modrm >> 3) & 7) | rex_r;
3070 gen_op_mov_reg_T0[OT_LONG][reg]();
3071 break;
3072 default:
3073 goto illegal_op;
3074 }
3075 } else {
3076 /* generic MMX or SSE operation */
3077 switch(b) {
3078 case 0xf7:
3079 /* maskmov : we must prepare A0 */
3080 if (mod != 3)
3081 goto illegal_op;
3082#ifdef TARGET_X86_64
3083 if (s->aflag == 2) {
3084 gen_op_movq_A0_reg[R_EDI]();
3085 } else
3086#endif
3087 {
3088 gen_op_movl_A0_reg[R_EDI]();
3089 if (s->aflag == 0)
3090 gen_op_andl_A0_ffff();
3091 }
3092 gen_add_A0_ds_seg(s);
3093 break;
3094 case 0x70: /* pshufx insn */
3095 case 0xc6: /* pshufx insn */
3096 case 0xc2: /* compare insns */
3097 s->rip_offset = 1;
3098 break;
3099 default:
3100 break;
3101 }
3102 if (is_xmm) {
3103 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3104 if (mod != 3) {
3105 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3106 op2_offset = offsetof(CPUX86State,xmm_t0);
3107 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f && b != 0x5b) ||
3108 b == 0xc2)) {
3109 /* specific case for SSE single instructions */
3110 if (b1 == 2) {
3111 /* 32 bit access */
3112 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3113 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3114 } else {
3115 /* 64 bit access */
3116 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3117 }
3118 } else {
3119 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3120 }
3121 } else {
3122 rm = (modrm & 7) | REX_B(s);
3123 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3124 }
3125 } else {
3126 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3127 if (mod != 3) {
3128 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3129 op2_offset = offsetof(CPUX86State,mmx_t0);
3130 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3131 } else {
3132 rm = (modrm & 7);
3133 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3134 }
3135 }
3136 switch(b) {
3137 case 0x70: /* pshufx insn */
3138 case 0xc6: /* pshufx insn */
3139 val = ldub_code(s->pc++);
3140 sse_op3 = (GenOpFunc3 *)sse_op2;
3141 sse_op3(op1_offset, op2_offset, val);
3142 break;
3143 case 0xc2:
3144 /* compare insns */
3145 val = ldub_code(s->pc++);
3146 if (val >= 8)
3147 goto illegal_op;
3148 sse_op2 = sse_op_table4[val][b1];
3149 sse_op2(op1_offset, op2_offset);
3150 break;
3151 default:
3152 sse_op2(op1_offset, op2_offset);
3153 break;
3154 }
3155 if (b == 0x2e || b == 0x2f) {
3156 s->cc_op = CC_OP_EFLAGS;
3157 }
3158 }
3159}
3160
3161
3162/* convert one instruction. s->is_jmp is set if the translation must
3163 be stopped. Return the next pc value */
3164static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3165{
3166 int b, prefixes, aflag, dflag;
3167 int shift, ot;
3168 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3169 target_ulong next_eip, tval;
3170 int rex_w, rex_r;
3171
3172 s->pc = pc_start;
3173 prefixes = 0;
3174 aflag = s->code32;
3175 dflag = s->code32;
3176 s->override = -1;
3177 rex_w = -1;
3178 rex_r = 0;
3179#ifdef TARGET_X86_64
3180 s->rex_x = 0;
3181 s->rex_b = 0;
3182 x86_64_hregs = 0;
3183#endif
3184 s->rip_offset = 0; /* for relative ip address */
3185 next_byte:
3186 b = ldub_code(s->pc);
3187 s->pc++;
3188 /* check prefixes */
3189#ifdef TARGET_X86_64
3190 if (CODE64(s)) {
3191 switch (b) {
3192 case 0xf3:
3193 prefixes |= PREFIX_REPZ;
3194 goto next_byte;
3195 case 0xf2:
3196 prefixes |= PREFIX_REPNZ;
3197 goto next_byte;
3198 case 0xf0:
3199 prefixes |= PREFIX_LOCK;
3200 goto next_byte;
3201 case 0x2e:
3202 s->override = R_CS;
3203 goto next_byte;
3204 case 0x36:
3205 s->override = R_SS;
3206 goto next_byte;
3207 case 0x3e:
3208 s->override = R_DS;
3209 goto next_byte;
3210 case 0x26:
3211 s->override = R_ES;
3212 goto next_byte;
3213 case 0x64:
3214 s->override = R_FS;
3215 goto next_byte;
3216 case 0x65:
3217 s->override = R_GS;
3218 goto next_byte;
3219 case 0x66:
3220 prefixes |= PREFIX_DATA;
3221 goto next_byte;
3222 case 0x67:
3223 prefixes |= PREFIX_ADR;
3224 goto next_byte;
3225 case 0x40 ... 0x4f:
3226 /* REX prefix */
3227 rex_w = (b >> 3) & 1;
3228 rex_r = (b & 0x4) << 1;
3229 s->rex_x = (b & 0x2) << 2;
3230 REX_B(s) = (b & 0x1) << 3;
3231 x86_64_hregs = 1; /* select uniform byte register addressing */
3232 goto next_byte;
3233 }
3234 if (rex_w == 1) {
3235 /* 0x66 is ignored if rex.w is set */
3236 dflag = 2;
3237 } else {
3238 if (prefixes & PREFIX_DATA)
3239 dflag ^= 1;
3240 }
3241 if (!(prefixes & PREFIX_ADR))
3242 aflag = 2;
3243 } else
3244#endif
3245 {
3246 switch (b) {
3247 case 0xf3:
3248 prefixes |= PREFIX_REPZ;
3249 goto next_byte;
3250 case 0xf2:
3251 prefixes |= PREFIX_REPNZ;
3252 goto next_byte;
3253 case 0xf0:
3254 prefixes |= PREFIX_LOCK;
3255 goto next_byte;
3256 case 0x2e:
3257 s->override = R_CS;
3258 goto next_byte;
3259 case 0x36:
3260 s->override = R_SS;
3261 goto next_byte;
3262 case 0x3e:
3263 s->override = R_DS;
3264 goto next_byte;
3265 case 0x26:
3266 s->override = R_ES;
3267 goto next_byte;
3268 case 0x64:
3269 s->override = R_FS;
3270 goto next_byte;
3271 case 0x65:
3272 s->override = R_GS;
3273 goto next_byte;
3274 case 0x66:
3275 prefixes |= PREFIX_DATA;
3276 goto next_byte;
3277 case 0x67:
3278 prefixes |= PREFIX_ADR;
3279 goto next_byte;
3280 }
3281 if (prefixes & PREFIX_DATA)
3282 dflag ^= 1;
3283 if (prefixes & PREFIX_ADR)
3284 aflag ^= 1;
3285 }
3286
3287 s->prefix = prefixes;
3288 s->aflag = aflag;
3289 s->dflag = dflag;
3290
3291 /* lock generation */
3292 if (prefixes & PREFIX_LOCK)
3293 gen_op_lock();
3294
3295 /* now check op code */
3296 reswitch:
3297 switch(b) {
3298 case 0x0f:
3299 /**************************/
3300 /* extended op code */
3301 b = ldub_code(s->pc++) | 0x100;
3302 goto reswitch;
3303
3304 /**************************/
3305 /* arith & logic */
3306 case 0x00 ... 0x05:
3307 case 0x08 ... 0x0d:
3308 case 0x10 ... 0x15:
3309 case 0x18 ... 0x1d:
3310 case 0x20 ... 0x25:
3311 case 0x28 ... 0x2d:
3312 case 0x30 ... 0x35:
3313 case 0x38 ... 0x3d:
3314 {
3315 int op, f, val;
3316 op = (b >> 3) & 7;
3317 f = (b >> 1) & 3;
3318
3319 if ((b & 1) == 0)
3320 ot = OT_BYTE;
3321 else
3322 ot = dflag + OT_WORD;
3323
3324 switch(f) {
3325 case 0: /* OP Ev, Gv */
3326 modrm = ldub_code(s->pc++);
3327 reg = ((modrm >> 3) & 7) | rex_r;
3328 mod = (modrm >> 6) & 3;
3329 rm = (modrm & 7) | REX_B(s);
3330 if (mod != 3) {
3331 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3332 opreg = OR_TMP0;
3333 } else if (op == OP_XORL && rm == reg) {
3334 xor_zero:
3335 /* xor reg, reg optimisation */
3336 gen_op_movl_T0_0();
3337 s->cc_op = CC_OP_LOGICB + ot;
3338 gen_op_mov_reg_T0[ot][reg]();
3339 gen_op_update1_cc();
3340 break;
3341 } else {
3342 opreg = rm;
3343 }
3344 gen_op_mov_TN_reg[ot][1][reg]();
3345 gen_op(s, op, ot, opreg);
3346 break;
3347 case 1: /* OP Gv, Ev */
3348 modrm = ldub_code(s->pc++);
3349 mod = (modrm >> 6) & 3;
3350 reg = ((modrm >> 3) & 7) | rex_r;
3351 rm = (modrm & 7) | REX_B(s);
3352 if (mod != 3) {
3353 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3354 gen_op_ld_T1_A0[ot + s->mem_index]();
3355 } else if (op == OP_XORL && rm == reg) {
3356 goto xor_zero;
3357 } else {
3358 gen_op_mov_TN_reg[ot][1][rm]();
3359 }
3360 gen_op(s, op, ot, reg);
3361 break;
3362 case 2: /* OP A, Iv */
3363 val = insn_get(s, ot);
3364 gen_op_movl_T1_im(val);
3365 gen_op(s, op, ot, OR_EAX);
3366 break;
3367 }
3368 }
3369 break;
3370
3371 case 0x80: /* GRP1 */
3372 case 0x81:
3373 case 0x82:
3374 case 0x83:
3375 {
3376 int val;
3377
3378 if ((b & 1) == 0)
3379 ot = OT_BYTE;
3380 else
3381 ot = dflag + OT_WORD;
3382
3383 modrm = ldub_code(s->pc++);
3384 mod = (modrm >> 6) & 3;
3385 rm = (modrm & 7) | REX_B(s);
3386 op = (modrm >> 3) & 7;
3387
3388 if (mod != 3) {
3389 if (b == 0x83)
3390 s->rip_offset = 1;
3391 else
3392 s->rip_offset = insn_const_size(ot);
3393 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3394 opreg = OR_TMP0;
3395 } else {
3396 opreg = rm;
3397 }
3398
3399 switch(b) {
3400 default:
3401 case 0x80:
3402 case 0x81:
3403 case 0x82:
3404 val = insn_get(s, ot);
3405 break;
3406 case 0x83:
3407 val = (int8_t)insn_get(s, OT_BYTE);
3408 break;
3409 }
3410 gen_op_movl_T1_im(val);
3411 gen_op(s, op, ot, opreg);
3412 }
3413 break;
3414
3415 /**************************/
3416 /* inc, dec, and other misc arith */
3417 case 0x40 ... 0x47: /* inc Gv */
3418 ot = dflag ? OT_LONG : OT_WORD;
3419 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3420 break;
3421 case 0x48 ... 0x4f: /* dec Gv */
3422 ot = dflag ? OT_LONG : OT_WORD;
3423 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3424 break;
3425 case 0xf6: /* GRP3 */
3426 case 0xf7:
3427 if ((b & 1) == 0)
3428 ot = OT_BYTE;
3429 else
3430 ot = dflag + OT_WORD;
3431
3432 modrm = ldub_code(s->pc++);
3433 mod = (modrm >> 6) & 3;
3434 rm = (modrm & 7) | REX_B(s);
3435 op = (modrm >> 3) & 7;
3436 if (mod != 3) {
3437 if (op == 0)
3438 s->rip_offset = insn_const_size(ot);
3439 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3440 gen_op_ld_T0_A0[ot + s->mem_index]();
3441 } else {
3442 gen_op_mov_TN_reg[ot][0][rm]();
3443 }
3444
3445 switch(op) {
3446 case 0: /* test */
3447 val = insn_get(s, ot);
3448 gen_op_movl_T1_im(val);
3449 gen_op_testl_T0_T1_cc();
3450 s->cc_op = CC_OP_LOGICB + ot;
3451 break;
3452 case 2: /* not */
3453 gen_op_notl_T0();
3454 if (mod != 3) {
3455 gen_op_st_T0_A0[ot + s->mem_index]();
3456 } else {
3457 gen_op_mov_reg_T0[ot][rm]();
3458 }
3459 break;
3460 case 3: /* neg */
3461 gen_op_negl_T0();
3462 if (mod != 3) {
3463 gen_op_st_T0_A0[ot + s->mem_index]();
3464 } else {
3465 gen_op_mov_reg_T0[ot][rm]();
3466 }
3467 gen_op_update_neg_cc();
3468 s->cc_op = CC_OP_SUBB + ot;
3469 break;
3470 case 4: /* mul */
3471 switch(ot) {
3472 case OT_BYTE:
3473 gen_op_mulb_AL_T0();
3474 s->cc_op = CC_OP_MULB;
3475 break;
3476 case OT_WORD:
3477 gen_op_mulw_AX_T0();
3478 s->cc_op = CC_OP_MULW;
3479 break;
3480 default:
3481 case OT_LONG:
3482 gen_op_mull_EAX_T0();
3483 s->cc_op = CC_OP_MULL;
3484 break;
3485#ifdef TARGET_X86_64
3486 case OT_QUAD:
3487 gen_op_mulq_EAX_T0();
3488 s->cc_op = CC_OP_MULQ;
3489 break;
3490#endif
3491 }
3492 break;
3493 case 5: /* imul */
3494 switch(ot) {
3495 case OT_BYTE:
3496 gen_op_imulb_AL_T0();
3497 s->cc_op = CC_OP_MULB;
3498 break;
3499 case OT_WORD:
3500 gen_op_imulw_AX_T0();
3501 s->cc_op = CC_OP_MULW;
3502 break;
3503 default:
3504 case OT_LONG:
3505 gen_op_imull_EAX_T0();
3506 s->cc_op = CC_OP_MULL;
3507 break;
3508#ifdef TARGET_X86_64
3509 case OT_QUAD:
3510 gen_op_imulq_EAX_T0();
3511 s->cc_op = CC_OP_MULQ;
3512 break;
3513#endif
3514 }
3515 break;
3516 case 6: /* div */
3517 switch(ot) {
3518 case OT_BYTE:
3519 gen_jmp_im(pc_start - s->cs_base);
3520 gen_op_divb_AL_T0();
3521 break;
3522 case OT_WORD:
3523 gen_jmp_im(pc_start - s->cs_base);
3524 gen_op_divw_AX_T0();
3525 break;
3526 default:
3527 case OT_LONG:
3528 gen_jmp_im(pc_start - s->cs_base);
3529 gen_op_divl_EAX_T0();
3530 break;
3531#ifdef TARGET_X86_64
3532 case OT_QUAD:
3533 gen_jmp_im(pc_start - s->cs_base);
3534 gen_op_divq_EAX_T0();
3535 break;
3536#endif
3537 }
3538 break;
3539 case 7: /* idiv */
3540 switch(ot) {
3541 case OT_BYTE:
3542 gen_jmp_im(pc_start - s->cs_base);
3543 gen_op_idivb_AL_T0();
3544 break;
3545 case OT_WORD:
3546 gen_jmp_im(pc_start - s->cs_base);
3547 gen_op_idivw_AX_T0();
3548 break;
3549 default:
3550 case OT_LONG:
3551 gen_jmp_im(pc_start - s->cs_base);
3552 gen_op_idivl_EAX_T0();
3553 break;
3554#ifdef TARGET_X86_64
3555 case OT_QUAD:
3556 gen_jmp_im(pc_start - s->cs_base);
3557 gen_op_idivq_EAX_T0();
3558 break;
3559#endif
3560 }
3561 break;
3562 default:
3563 goto illegal_op;
3564 }
3565 break;
3566
3567 case 0xfe: /* GRP4 */
3568 case 0xff: /* GRP5 */
3569 if ((b & 1) == 0)
3570 ot = OT_BYTE;
3571 else
3572 ot = dflag + OT_WORD;
3573
3574 modrm = ldub_code(s->pc++);
3575 mod = (modrm >> 6) & 3;
3576 rm = (modrm & 7) | REX_B(s);
3577 op = (modrm >> 3) & 7;
3578 if (op >= 2 && b == 0xfe) {
3579 goto illegal_op;
3580 }
3581 if (CODE64(s)) {
3582 if (op == 2 || op == 4) {
3583 /* operand size for jumps is 64 bit */
3584 ot = OT_QUAD;
3585 } else if (op == 3 || op == 5) {
3586 /* for call calls, the operand is 16 or 32 bit, even
3587 in long mode */
3588 ot = dflag ? OT_LONG : OT_WORD;
3589 } else if (op == 6) {
3590 /* default push size is 64 bit */
3591 ot = dflag ? OT_QUAD : OT_WORD;
3592 }
3593 }
3594 if (mod != 3) {
3595 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3596 if (op >= 2 && op != 3 && op != 5)
3597 gen_op_ld_T0_A0[ot + s->mem_index]();
3598 } else {
3599 gen_op_mov_TN_reg[ot][0][rm]();
3600 }
3601
3602 switch(op) {
3603 case 0: /* inc Ev */
3604 if (mod != 3)
3605 opreg = OR_TMP0;
3606 else
3607 opreg = rm;
3608 gen_inc(s, ot, opreg, 1);
3609 break;
3610 case 1: /* dec Ev */
3611 if (mod != 3)
3612 opreg = OR_TMP0;
3613 else
3614 opreg = rm;
3615 gen_inc(s, ot, opreg, -1);
3616 break;
3617 case 2: /* call Ev */
3618 /* XXX: optimize if memory (no 'and' is necessary) */
3619 if (s->dflag == 0)
3620 gen_op_andl_T0_ffff();
3621 next_eip = s->pc - s->cs_base;
3622 gen_movtl_T1_im(next_eip);
3623 gen_push_T1(s);
3624 gen_op_jmp_T0();
3625 gen_eob(s);
3626 break;
3627 case 3: /* lcall Ev */
3628 gen_op_ld_T1_A0[ot + s->mem_index]();
3629 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3630 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3631 do_lcall:
3632 if (s->pe && !s->vm86) {
3633 if (s->cc_op != CC_OP_DYNAMIC)
3634 gen_op_set_cc_op(s->cc_op);
3635 gen_jmp_im(pc_start - s->cs_base);
3636 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3637 } else {
3638 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3639 }
3640 gen_eob(s);
3641 break;
3642 case 4: /* jmp Ev */
3643 if (s->dflag == 0)
3644 gen_op_andl_T0_ffff();
3645 gen_op_jmp_T0();
3646 gen_eob(s);
3647 break;
3648 case 5: /* ljmp Ev */
3649 gen_op_ld_T1_A0[ot + s->mem_index]();
3650 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3651 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3652 do_ljmp:
3653 if (s->pe && !s->vm86) {
3654 if (s->cc_op != CC_OP_DYNAMIC)
3655 gen_op_set_cc_op(s->cc_op);
3656 gen_jmp_im(pc_start - s->cs_base);
3657 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3658 } else {
3659 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3660 gen_op_movl_T0_T1();
3661 gen_op_jmp_T0();
3662 }
3663 gen_eob(s);
3664 break;
3665 case 6: /* push Ev */
3666 gen_push_T0(s);
3667 break;
3668 default:
3669 goto illegal_op;
3670 }
3671 break;
3672
3673 case 0x84: /* test Ev, Gv */
3674 case 0x85:
3675 if ((b & 1) == 0)
3676 ot = OT_BYTE;
3677 else
3678 ot = dflag + OT_WORD;
3679
3680 modrm = ldub_code(s->pc++);
3681 mod = (modrm >> 6) & 3;
3682 rm = (modrm & 7) | REX_B(s);
3683 reg = ((modrm >> 3) & 7) | rex_r;
3684
3685 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3686 gen_op_mov_TN_reg[ot][1][reg]();
3687 gen_op_testl_T0_T1_cc();
3688 s->cc_op = CC_OP_LOGICB + ot;
3689 break;
3690
3691 case 0xa8: /* test eAX, Iv */
3692 case 0xa9:
3693 if ((b & 1) == 0)
3694 ot = OT_BYTE;
3695 else
3696 ot = dflag + OT_WORD;
3697 val = insn_get(s, ot);
3698
3699 gen_op_mov_TN_reg[ot][0][OR_EAX]();
3700 gen_op_movl_T1_im(val);
3701 gen_op_testl_T0_T1_cc();
3702 s->cc_op = CC_OP_LOGICB + ot;
3703 break;
3704
3705 case 0x98: /* CWDE/CBW */
3706#ifdef TARGET_X86_64
3707 if (dflag == 2) {
3708 gen_op_movslq_RAX_EAX();
3709 } else
3710#endif
3711 if (dflag == 1)
3712 gen_op_movswl_EAX_AX();
3713 else
3714 gen_op_movsbw_AX_AL();
3715 break;
3716 case 0x99: /* CDQ/CWD */
3717#ifdef TARGET_X86_64
3718 if (dflag == 2) {
3719 gen_op_movsqo_RDX_RAX();
3720 } else
3721#endif
3722 if (dflag == 1)
3723 gen_op_movslq_EDX_EAX();
3724 else
3725 gen_op_movswl_DX_AX();
3726 break;
3727 case 0x1af: /* imul Gv, Ev */
3728 case 0x69: /* imul Gv, Ev, I */
3729 case 0x6b:
3730 ot = dflag + OT_WORD;
3731 modrm = ldub_code(s->pc++);
3732 reg = ((modrm >> 3) & 7) | rex_r;
3733 if (b == 0x69)
3734 s->rip_offset = insn_const_size(ot);
3735 else if (b == 0x6b)
3736 s->rip_offset = 1;
3737 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3738 if (b == 0x69) {
3739 val = insn_get(s, ot);
3740 gen_op_movl_T1_im(val);
3741 } else if (b == 0x6b) {
3742 val = (int8_t)insn_get(s, OT_BYTE);
3743 gen_op_movl_T1_im(val);
3744 } else {
3745 gen_op_mov_TN_reg[ot][1][reg]();
3746 }
3747
3748#ifdef TARGET_X86_64
3749 if (ot == OT_QUAD) {
3750 gen_op_imulq_T0_T1();
3751 } else
3752#endif
3753 if (ot == OT_LONG) {
3754 gen_op_imull_T0_T1();
3755 } else {
3756 gen_op_imulw_T0_T1();
3757 }
3758 gen_op_mov_reg_T0[ot][reg]();
3759 s->cc_op = CC_OP_MULB + ot;
3760 break;
3761 case 0x1c0:
3762 case 0x1c1: /* xadd Ev, Gv */
3763 if ((b & 1) == 0)
3764 ot = OT_BYTE;
3765 else
3766 ot = dflag + OT_WORD;
3767 modrm = ldub_code(s->pc++);
3768 reg = ((modrm >> 3) & 7) | rex_r;
3769 mod = (modrm >> 6) & 3;
3770 if (mod == 3) {
3771 rm = (modrm & 7) | REX_B(s);
3772 gen_op_mov_TN_reg[ot][0][reg]();
3773 gen_op_mov_TN_reg[ot][1][rm]();
3774 gen_op_addl_T0_T1();
3775 gen_op_mov_reg_T1[ot][reg]();
3776 gen_op_mov_reg_T0[ot][rm]();
3777 } else {
3778 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3779 gen_op_mov_TN_reg[ot][0][reg]();
3780 gen_op_ld_T1_A0[ot + s->mem_index]();
3781 gen_op_addl_T0_T1();
3782 gen_op_st_T0_A0[ot + s->mem_index]();
3783 gen_op_mov_reg_T1[ot][reg]();
3784 }
3785 gen_op_update2_cc();
3786 s->cc_op = CC_OP_ADDB + ot;
3787 break;
3788 case 0x1b0:
3789 case 0x1b1: /* cmpxchg Ev, Gv */
3790 if ((b & 1) == 0)
3791 ot = OT_BYTE;
3792 else
3793 ot = dflag + OT_WORD;
3794 modrm = ldub_code(s->pc++);
3795 reg = ((modrm >> 3) & 7) | rex_r;
3796 mod = (modrm >> 6) & 3;
3797 gen_op_mov_TN_reg[ot][1][reg]();
3798 if (mod == 3) {
3799 rm = (modrm & 7) | REX_B(s);
3800 gen_op_mov_TN_reg[ot][0][rm]();
3801 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3802 gen_op_mov_reg_T0[ot][rm]();
3803 } else {
3804 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3805 gen_op_ld_T0_A0[ot + s->mem_index]();
3806 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3807 }
3808 s->cc_op = CC_OP_SUBB + ot;
3809 break;
3810 case 0x1c7: /* cmpxchg8b */
3811 modrm = ldub_code(s->pc++);
3812 mod = (modrm >> 6) & 3;
3813 if (mod == 3)
3814 goto illegal_op;
3815 if (s->cc_op != CC_OP_DYNAMIC)
3816 gen_op_set_cc_op(s->cc_op);
3817 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3818 gen_op_cmpxchg8b();
3819 s->cc_op = CC_OP_EFLAGS;
3820 break;
3821
3822 /**************************/
3823 /* push/pop */
3824 case 0x50 ... 0x57: /* push */
3825 gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
3826 gen_push_T0(s);
3827 break;
3828 case 0x58 ... 0x5f: /* pop */
3829 if (CODE64(s)) {
3830 ot = dflag ? OT_QUAD : OT_WORD;
3831 } else {
3832 ot = dflag + OT_WORD;
3833 }
3834 gen_pop_T0(s);
3835 /* NOTE: order is important for pop %sp */
3836 gen_pop_update(s);
3837 gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
3838 break;
3839 case 0x60: /* pusha */
3840 if (CODE64(s))
3841 goto illegal_op;
3842 gen_pusha(s);
3843 break;
3844 case 0x61: /* popa */
3845 if (CODE64(s))
3846 goto illegal_op;
3847 gen_popa(s);
3848 break;
3849 case 0x68: /* push Iv */
3850 case 0x6a:
3851 if (CODE64(s)) {
3852 ot = dflag ? OT_QUAD : OT_WORD;
3853 } else {
3854 ot = dflag + OT_WORD;
3855 }
3856 if (b == 0x68)
3857 val = insn_get(s, ot);
3858 else
3859 val = (int8_t)insn_get(s, OT_BYTE);
3860 gen_op_movl_T0_im(val);
3861 gen_push_T0(s);
3862 break;
3863 case 0x8f: /* pop Ev */
3864 if (CODE64(s)) {
3865 ot = dflag ? OT_QUAD : OT_WORD;
3866 } else {
3867 ot = dflag + OT_WORD;
3868 }
3869 modrm = ldub_code(s->pc++);
3870 mod = (modrm >> 6) & 3;
3871 gen_pop_T0(s);
3872 if (mod == 3) {
3873 /* NOTE: order is important for pop %sp */
3874 gen_pop_update(s);
3875 rm = (modrm & 7) | REX_B(s);
3876 gen_op_mov_reg_T0[ot][rm]();
3877 } else {
3878 /* NOTE: order is important too for MMU exceptions */
3879 s->popl_esp_hack = 1 << ot;
3880 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3881 s->popl_esp_hack = 0;
3882 gen_pop_update(s);
3883 }
3884 break;
3885 case 0xc8: /* enter */
3886 {
3887 int level;
3888 val = lduw_code(s->pc);
3889 s->pc += 2;
3890 level = ldub_code(s->pc++);
3891 gen_enter(s, val, level);
3892 }
3893 break;
3894 case 0xc9: /* leave */
3895 /* XXX: exception not precise (ESP is updated before potential exception) */
3896 if (CODE64(s)) {
3897 gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
3898 gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
3899 } else if (s->ss32) {
3900 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
3901 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
3902 } else {
3903 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
3904 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
3905 }
3906 gen_pop_T0(s);
3907 if (CODE64(s)) {
3908 ot = dflag ? OT_QUAD : OT_WORD;
3909 } else {
3910 ot = dflag + OT_WORD;
3911 }
3912 gen_op_mov_reg_T0[ot][R_EBP]();
3913 gen_pop_update(s);
3914 break;
3915 case 0x06: /* push es */
3916 case 0x0e: /* push cs */
3917 case 0x16: /* push ss */
3918 case 0x1e: /* push ds */
3919 if (CODE64(s))
3920 goto illegal_op;
3921 gen_op_movl_T0_seg(b >> 3);
3922 gen_push_T0(s);
3923 break;
3924 case 0x1a0: /* push fs */
3925 case 0x1a8: /* push gs */
3926 gen_op_movl_T0_seg((b >> 3) & 7);
3927 gen_push_T0(s);
3928 break;
3929 case 0x07: /* pop es */
3930 case 0x17: /* pop ss */
3931 case 0x1f: /* pop ds */
3932 if (CODE64(s))
3933 goto illegal_op;
3934 reg = b >> 3;
3935 gen_pop_T0(s);
3936 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3937 gen_pop_update(s);
3938 if (reg == R_SS) {
3939 /* if reg == SS, inhibit interrupts/trace. */
3940 /* If several instructions disable interrupts, only the
3941 _first_ does it */
3942 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3943 gen_op_set_inhibit_irq();
3944 s->tf = 0;
3945 }
3946 if (s->is_jmp) {
3947 gen_jmp_im(s->pc - s->cs_base);
3948 gen_eob(s);
3949 }
3950 break;
3951 case 0x1a1: /* pop fs */
3952 case 0x1a9: /* pop gs */
3953 gen_pop_T0(s);
3954 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
3955 gen_pop_update(s);
3956 if (s->is_jmp) {
3957 gen_jmp_im(s->pc - s->cs_base);
3958 gen_eob(s);
3959 }
3960 break;
3961
3962 /**************************/
3963 /* mov */
3964 case 0x88:
3965 case 0x89: /* mov Gv, Ev */
3966 if ((b & 1) == 0)
3967 ot = OT_BYTE;
3968 else
3969 ot = dflag + OT_WORD;
3970 modrm = ldub_code(s->pc++);
3971 reg = ((modrm >> 3) & 7) | rex_r;
3972
3973 /* generate a generic store */
3974 gen_ldst_modrm(s, modrm, ot, reg, 1);
3975 break;
3976 case 0xc6:
3977 case 0xc7: /* mov Ev, Iv */
3978 if ((b & 1) == 0)
3979 ot = OT_BYTE;
3980 else
3981 ot = dflag + OT_WORD;
3982 modrm = ldub_code(s->pc++);
3983 mod = (modrm >> 6) & 3;
3984 if (mod != 3) {
3985 s->rip_offset = insn_const_size(ot);
3986 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3987 }
3988 val = insn_get(s, ot);
3989 gen_op_movl_T0_im(val);
3990 if (mod != 3)
3991 gen_op_st_T0_A0[ot + s->mem_index]();
3992 else
3993 gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
3994 break;
3995 case 0x8a:
3996 case 0x8b: /* mov Ev, Gv */
3997 if ((b & 1) == 0)
3998 ot = OT_BYTE;
3999 else
4000 ot = OT_WORD + dflag;
4001 modrm = ldub_code(s->pc++);
4002 reg = ((modrm >> 3) & 7) | rex_r;
4003
4004 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
4005 gen_op_mov_reg_T0[ot][reg]();
4006 break;
4007 case 0x8e: /* mov seg, Gv */
4008 modrm = ldub_code(s->pc++);
4009 reg = (modrm >> 3) & 7;
4010 if (reg >= 6 || reg == R_CS)
4011 goto illegal_op;
4012 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
4013 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
4014 if (reg == R_SS) {
4015 /* if reg == SS, inhibit interrupts/trace */
4016 /* If several instructions disable interrupts, only the
4017 _first_ does it */
4018 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
4019 gen_op_set_inhibit_irq();
4020 s->tf = 0;
4021 }
4022 if (s->is_jmp) {
4023 gen_jmp_im(s->pc - s->cs_base);
4024 gen_eob(s);
4025 }
4026 break;
4027 case 0x8c: /* mov Gv, seg */
4028 modrm = ldub_code(s->pc++);
4029 reg = (modrm >> 3) & 7;
4030 mod = (modrm >> 6) & 3;
4031 if (reg >= 6)
4032 goto illegal_op;
4033 gen_op_movl_T0_seg(reg);
4034 if (mod == 3)
4035 ot = OT_WORD + dflag;
4036 else
4037 ot = OT_WORD;
4038 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4039 break;
4040
4041 case 0x1b6: /* movzbS Gv, Eb */
4042 case 0x1b7: /* movzwS Gv, Eb */
4043 case 0x1be: /* movsbS Gv, Eb */
4044 case 0x1bf: /* movswS Gv, Eb */
4045 {
4046 int d_ot;
4047 /* d_ot is the size of destination */
4048 d_ot = dflag + OT_WORD;
4049 /* ot is the size of source */
4050 ot = (b & 1) + OT_BYTE;
4051 modrm = ldub_code(s->pc++);
4052 reg = ((modrm >> 3) & 7) | rex_r;
4053 mod = (modrm >> 6) & 3;
4054 rm = (modrm & 7) | REX_B(s);
4055
4056 if (mod == 3) {
4057 gen_op_mov_TN_reg[ot][0][rm]();
4058 switch(ot | (b & 8)) {
4059 case OT_BYTE:
4060 gen_op_movzbl_T0_T0();
4061 break;
4062 case OT_BYTE | 8:
4063 gen_op_movsbl_T0_T0();
4064 break;
4065 case OT_WORD:
4066 gen_op_movzwl_T0_T0();
4067 break;
4068 default:
4069 case OT_WORD | 8:
4070 gen_op_movswl_T0_T0();
4071 break;
4072 }
4073 gen_op_mov_reg_T0[d_ot][reg]();
4074 } else {
4075 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4076 if (b & 8) {
4077 gen_op_lds_T0_A0[ot + s->mem_index]();
4078 } else {
4079 gen_op_ldu_T0_A0[ot + s->mem_index]();
4080 }
4081 gen_op_mov_reg_T0[d_ot][reg]();
4082 }
4083 }
4084 break;
4085
4086 case 0x8d: /* lea */
4087 ot = dflag + OT_WORD;
4088 modrm = ldub_code(s->pc++);
4089 mod = (modrm >> 6) & 3;
4090 if (mod == 3)
4091 goto illegal_op;
4092 reg = ((modrm >> 3) & 7) | rex_r;
4093 /* we must ensure that no segment is added */
4094 s->override = -1;
4095 val = s->addseg;
4096 s->addseg = 0;
4097 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4098 s->addseg = val;
4099 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
4100 break;
4101
4102 case 0xa0: /* mov EAX, Ov */
4103 case 0xa1:
4104 case 0xa2: /* mov Ov, EAX */
4105 case 0xa3:
4106 {
4107 target_ulong offset_addr;
4108
4109 if ((b & 1) == 0)
4110 ot = OT_BYTE;
4111 else
4112 ot = dflag + OT_WORD;
4113#ifdef TARGET_X86_64
4114 if (s->aflag == 2) {
4115 offset_addr = ldq_code(s->pc);
4116 s->pc += 8;
4117 if (offset_addr == (int32_t)offset_addr)
4118 gen_op_movq_A0_im(offset_addr);
4119 else
4120 gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
4121 } else
4122#endif
4123 {
4124 if (s->aflag) {
4125 offset_addr = insn_get(s, OT_LONG);
4126 } else {
4127 offset_addr = insn_get(s, OT_WORD);
4128 }
4129 gen_op_movl_A0_im(offset_addr);
4130 }
4131 gen_add_A0_ds_seg(s);
4132 if ((b & 2) == 0) {
4133 gen_op_ld_T0_A0[ot + s->mem_index]();
4134 gen_op_mov_reg_T0[ot][R_EAX]();
4135 } else {
4136 gen_op_mov_TN_reg[ot][0][R_EAX]();
4137 gen_op_st_T0_A0[ot + s->mem_index]();
4138 }
4139 }
4140 break;
4141 case 0xd7: /* xlat */
4142#ifdef TARGET_X86_64
4143 if (s->aflag == 2) {
4144 gen_op_movq_A0_reg[R_EBX]();
4145 gen_op_addq_A0_AL();
4146 } else
4147#endif
4148 {
4149 gen_op_movl_A0_reg[R_EBX]();
4150 gen_op_addl_A0_AL();
4151 if (s->aflag == 0)
4152 gen_op_andl_A0_ffff();
4153 }
4154 gen_add_A0_ds_seg(s);
4155 gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
4156 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
4157 break;
4158 case 0xb0 ... 0xb7: /* mov R, Ib */
4159 val = insn_get(s, OT_BYTE);
4160 gen_op_movl_T0_im(val);
4161 gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
4162 break;
4163 case 0xb8 ... 0xbf: /* mov R, Iv */
4164#ifdef TARGET_X86_64
4165 if (dflag == 2) {
4166 uint64_t tmp;
4167 /* 64 bit case */
4168 tmp = ldq_code(s->pc);
4169 s->pc += 8;
4170 reg = (b & 7) | REX_B(s);
4171 gen_movtl_T0_im(tmp);
4172 gen_op_mov_reg_T0[OT_QUAD][reg]();
4173 } else
4174#endif
4175 {
4176 ot = dflag ? OT_LONG : OT_WORD;
4177 val = insn_get(s, ot);
4178 reg = (b & 7) | REX_B(s);
4179 gen_op_movl_T0_im(val);
4180 gen_op_mov_reg_T0[ot][reg]();
4181 }
4182 break;
4183
4184 case 0x91 ... 0x97: /* xchg R, EAX */
4185 ot = dflag + OT_WORD;
4186 reg = (b & 7) | REX_B(s);
4187 rm = R_EAX;
4188 goto do_xchg_reg;
4189 case 0x86:
4190 case 0x87: /* xchg Ev, Gv */
4191 if ((b & 1) == 0)
4192 ot = OT_BYTE;
4193 else
4194 ot = dflag + OT_WORD;
4195 modrm = ldub_code(s->pc++);
4196 reg = ((modrm >> 3) & 7) | rex_r;
4197 mod = (modrm >> 6) & 3;
4198 if (mod == 3) {
4199 rm = (modrm & 7) | REX_B(s);
4200 do_xchg_reg:
4201 gen_op_mov_TN_reg[ot][0][reg]();
4202 gen_op_mov_TN_reg[ot][1][rm]();
4203 gen_op_mov_reg_T0[ot][rm]();
4204 gen_op_mov_reg_T1[ot][reg]();
4205 } else {
4206 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4207 gen_op_mov_TN_reg[ot][0][reg]();
4208 /* for xchg, lock is implicit */
4209 if (!(prefixes & PREFIX_LOCK))
4210 gen_op_lock();
4211 gen_op_ld_T1_A0[ot + s->mem_index]();
4212 gen_op_st_T0_A0[ot + s->mem_index]();
4213 if (!(prefixes & PREFIX_LOCK))
4214 gen_op_unlock();
4215 gen_op_mov_reg_T1[ot][reg]();
4216 }
4217 break;
4218 case 0xc4: /* les Gv */
4219 if (CODE64(s))
4220 goto illegal_op;
4221 op = R_ES;
4222 goto do_lxx;
4223 case 0xc5: /* lds Gv */
4224 if (CODE64(s))
4225 goto illegal_op;
4226 op = R_DS;
4227 goto do_lxx;
4228 case 0x1b2: /* lss Gv */
4229 op = R_SS;
4230 goto do_lxx;
4231 case 0x1b4: /* lfs Gv */
4232 op = R_FS;
4233 goto do_lxx;
4234 case 0x1b5: /* lgs Gv */
4235 op = R_GS;
4236 do_lxx:
4237 ot = dflag ? OT_LONG : OT_WORD;
4238 modrm = ldub_code(s->pc++);
4239 reg = ((modrm >> 3) & 7) | rex_r;
4240 mod = (modrm >> 6) & 3;
4241 if (mod == 3)
4242 goto illegal_op;
4243 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4244 gen_op_ld_T1_A0[ot + s->mem_index]();
4245 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4246 /* load the segment first to handle exceptions properly */
4247 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4248 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4249 /* then put the data */
4250 gen_op_mov_reg_T1[ot][reg]();
4251 if (s->is_jmp) {
4252 gen_jmp_im(s->pc - s->cs_base);
4253 gen_eob(s);
4254 }
4255 break;
4256
4257 /************************/
4258 /* shifts */
4259 case 0xc0:
4260 case 0xc1:
4261 /* shift Ev,Ib */
4262 shift = 2;
4263 grp2:
4264 {
4265 if ((b & 1) == 0)
4266 ot = OT_BYTE;
4267 else
4268 ot = dflag + OT_WORD;
4269
4270 modrm = ldub_code(s->pc++);
4271 mod = (modrm >> 6) & 3;
4272 op = (modrm >> 3) & 7;
4273
4274 if (mod != 3) {
4275 if (shift == 2) {
4276 s->rip_offset = 1;
4277 }
4278 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4279 opreg = OR_TMP0;
4280 } else {
4281 opreg = (modrm & 7) | REX_B(s);
4282 }
4283
4284 /* simpler op */
4285 if (shift == 0) {
4286 gen_shift(s, op, ot, opreg, OR_ECX);
4287 } else {
4288 if (shift == 2) {
4289 shift = ldub_code(s->pc++);
4290 }
4291 gen_shifti(s, op, ot, opreg, shift);
4292 }
4293 }
4294 break;
4295 case 0xd0:
4296 case 0xd1:
4297 /* shift Ev,1 */
4298 shift = 1;
4299 goto grp2;
4300 case 0xd2:
4301 case 0xd3:
4302 /* shift Ev,cl */
4303 shift = 0;
4304 goto grp2;
4305
4306 case 0x1a4: /* shld imm */
4307 op = 0;
4308 shift = 1;
4309 goto do_shiftd;
4310 case 0x1a5: /* shld cl */
4311 op = 0;
4312 shift = 0;
4313 goto do_shiftd;
4314 case 0x1ac: /* shrd imm */
4315 op = 1;
4316 shift = 1;
4317 goto do_shiftd;
4318 case 0x1ad: /* shrd cl */
4319 op = 1;
4320 shift = 0;
4321 do_shiftd:
4322 ot = dflag + OT_WORD;
4323 modrm = ldub_code(s->pc++);
4324 mod = (modrm >> 6) & 3;
4325 rm = (modrm & 7) | REX_B(s);
4326 reg = ((modrm >> 3) & 7) | rex_r;
4327
4328 if (mod != 3) {
4329 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4330 gen_op_ld_T0_A0[ot + s->mem_index]();
4331 } else {
4332 gen_op_mov_TN_reg[ot][0][rm]();
4333 }
4334 gen_op_mov_TN_reg[ot][1][reg]();
4335
4336 if (shift) {
4337 val = ldub_code(s->pc++);
4338 if (ot == OT_QUAD)
4339 val &= 0x3f;
4340 else
4341 val &= 0x1f;
4342 if (val) {
4343 if (mod == 3)
4344 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4345 else
4346 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4347 if (op == 0 && ot != OT_WORD)
4348 s->cc_op = CC_OP_SHLB + ot;
4349 else
4350 s->cc_op = CC_OP_SARB + ot;
4351 }
4352 } else {
4353 if (s->cc_op != CC_OP_DYNAMIC)
4354 gen_op_set_cc_op(s->cc_op);
4355 if (mod == 3)
4356 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4357 else
4358 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4359 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4360 }
4361 if (mod == 3) {
4362 gen_op_mov_reg_T0[ot][rm]();
4363 }
4364 break;
4365
4366 /************************/
4367 /* floats */
4368 case 0xd8 ... 0xdf:
4369 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4370 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4371 /* XXX: what to do if illegal op ? */
4372 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4373 break;
4374 }
4375 modrm = ldub_code(s->pc++);
4376 mod = (modrm >> 6) & 3;
4377 rm = modrm & 7;
4378 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4379 if (mod != 3) {
4380 /* memory op */
4381 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4382 switch(op) {
4383 case 0x00 ... 0x07: /* fxxxs */
4384 case 0x10 ... 0x17: /* fixxxl */
4385 case 0x20 ... 0x27: /* fxxxl */
4386 case 0x30 ... 0x37: /* fixxx */
4387 {
4388 int op1;
4389 op1 = op & 7;
4390
4391 switch(op >> 4) {
4392 case 0:
4393 gen_op_flds_FT0_A0();
4394 break;
4395 case 1:
4396 gen_op_fildl_FT0_A0();
4397 break;
4398 case 2:
4399 gen_op_fldl_FT0_A0();
4400 break;
4401 case 3:
4402 default:
4403 gen_op_fild_FT0_A0();
4404 break;
4405 }
4406
4407 gen_op_fp_arith_ST0_FT0[op1]();
4408 if (op1 == 3) {
4409 /* fcomp needs pop */
4410 gen_op_fpop();
4411 }
4412 }
4413 break;
4414 case 0x08: /* flds */
4415 case 0x0a: /* fsts */
4416 case 0x0b: /* fstps */
4417 case 0x18 ... 0x1b: /* fildl, fisttpl, fistl, fistpl */
4418 case 0x28 ... 0x2b: /* fldl, fisttpll, fstl, fstpl */
4419 case 0x38 ... 0x3b: /* filds, fisttps, fists, fistps */
4420 switch(op & 7) {
4421 case 0:
4422 switch(op >> 4) {
4423 case 0:
4424 gen_op_flds_ST0_A0();
4425 break;
4426 case 1:
4427 gen_op_fildl_ST0_A0();
4428 break;
4429 case 2:
4430 gen_op_fldl_ST0_A0();
4431 break;
4432 case 3:
4433 default:
4434 gen_op_fild_ST0_A0();
4435 break;
4436 }
4437 break;
4438 case 1:
4439 switch(op >> 4) {
4440 case 1:
4441 gen_op_fisttl_ST0_A0();
4442 break;
4443 case 2:
4444 gen_op_fisttll_ST0_A0();
4445 break;
4446 case 3:
4447 default:
4448 gen_op_fistt_ST0_A0();
4449 }
4450 gen_op_fpop();
4451 break;
4452 default:
4453 switch(op >> 4) {
4454 case 0:
4455 gen_op_fsts_ST0_A0();
4456 break;
4457 case 1:
4458 gen_op_fistl_ST0_A0();
4459 break;
4460 case 2:
4461 gen_op_fstl_ST0_A0();
4462 break;
4463 case 3:
4464 default:
4465 gen_op_fist_ST0_A0();
4466 break;
4467 }
4468 if ((op & 7) == 3)
4469 gen_op_fpop();
4470 break;
4471 }
4472 break;
4473 case 0x0c: /* fldenv mem */
4474 gen_op_fldenv_A0(s->dflag);
4475 break;
4476 case 0x0d: /* fldcw mem */
4477 gen_op_fldcw_A0();
4478 break;
4479 case 0x0e: /* fnstenv mem */
4480 gen_op_fnstenv_A0(s->dflag);
4481 break;
4482 case 0x0f: /* fnstcw mem */
4483 gen_op_fnstcw_A0();
4484 break;
4485 case 0x1d: /* fldt mem */
4486 gen_op_fldt_ST0_A0();
4487 break;
4488 case 0x1f: /* fstpt mem */
4489 gen_op_fstt_ST0_A0();
4490 gen_op_fpop();
4491 break;
4492 case 0x2c: /* frstor mem */
4493 gen_op_frstor_A0(s->dflag);
4494 break;
4495 case 0x2e: /* fnsave mem */
4496 gen_op_fnsave_A0(s->dflag);
4497 break;
4498 case 0x2f: /* fnstsw mem */
4499 gen_op_fnstsw_A0();
4500 break;
4501 case 0x3c: /* fbld */
4502 gen_op_fbld_ST0_A0();
4503 break;
4504 case 0x3e: /* fbstp */
4505 gen_op_fbst_ST0_A0();
4506 gen_op_fpop();
4507 break;
4508 case 0x3d: /* fildll */
4509 gen_op_fildll_ST0_A0();
4510 break;
4511 case 0x3f: /* fistpll */
4512 gen_op_fistll_ST0_A0();
4513 gen_op_fpop();
4514 break;
4515 default:
4516 goto illegal_op;
4517 }
4518 } else {
4519 /* register float ops */
4520 opreg = rm;
4521
4522 switch(op) {
4523 case 0x08: /* fld sti */
4524 gen_op_fpush();
4525 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4526 break;
4527 case 0x09: /* fxchg sti */
4528 case 0x29: /* fxchg4 sti, undocumented op */
4529 case 0x39: /* fxchg7 sti, undocumented op */
4530 gen_op_fxchg_ST0_STN(opreg);
4531 break;
4532 case 0x0a: /* grp d9/2 */
4533 switch(rm) {
4534 case 0: /* fnop */
4535 /* check exceptions (FreeBSD FPU probe) */
4536 if (s->cc_op != CC_OP_DYNAMIC)
4537 gen_op_set_cc_op(s->cc_op);
4538 gen_jmp_im(pc_start - s->cs_base);
4539 gen_op_fwait();
4540 break;
4541 default:
4542 goto illegal_op;
4543 }
4544 break;
4545 case 0x0c: /* grp d9/4 */
4546 switch(rm) {
4547 case 0: /* fchs */
4548 gen_op_fchs_ST0();
4549 break;
4550 case 1: /* fabs */
4551 gen_op_fabs_ST0();
4552 break;
4553 case 4: /* ftst */
4554 gen_op_fldz_FT0();
4555 gen_op_fcom_ST0_FT0();
4556 break;
4557 case 5: /* fxam */
4558 gen_op_fxam_ST0();
4559 break;
4560 default:
4561 goto illegal_op;
4562 }
4563 break;
4564 case 0x0d: /* grp d9/5 */
4565 {
4566 switch(rm) {
4567 case 0:
4568 gen_op_fpush();
4569 gen_op_fld1_ST0();
4570 break;
4571 case 1:
4572 gen_op_fpush();
4573 gen_op_fldl2t_ST0();
4574 break;
4575 case 2:
4576 gen_op_fpush();
4577 gen_op_fldl2e_ST0();
4578 break;
4579 case 3:
4580 gen_op_fpush();
4581 gen_op_fldpi_ST0();
4582 break;
4583 case 4:
4584 gen_op_fpush();
4585 gen_op_fldlg2_ST0();
4586 break;
4587 case 5:
4588 gen_op_fpush();
4589 gen_op_fldln2_ST0();
4590 break;
4591 case 6:
4592 gen_op_fpush();
4593 gen_op_fldz_ST0();
4594 break;
4595 default:
4596 goto illegal_op;
4597 }
4598 }
4599 break;
4600 case 0x0e: /* grp d9/6 */
4601 switch(rm) {
4602 case 0: /* f2xm1 */
4603 gen_op_f2xm1();
4604 break;
4605 case 1: /* fyl2x */
4606 gen_op_fyl2x();
4607 break;
4608 case 2: /* fptan */
4609 gen_op_fptan();
4610 break;
4611 case 3: /* fpatan */
4612 gen_op_fpatan();
4613 break;
4614 case 4: /* fxtract */
4615 gen_op_fxtract();
4616 break;
4617 case 5: /* fprem1 */
4618 gen_op_fprem1();
4619 break;
4620 case 6: /* fdecstp */
4621 gen_op_fdecstp();
4622 break;
4623 default:
4624 case 7: /* fincstp */
4625 gen_op_fincstp();
4626 break;
4627 }
4628 break;
4629 case 0x0f: /* grp d9/7 */
4630 switch(rm) {
4631 case 0: /* fprem */
4632 gen_op_fprem();
4633 break;
4634 case 1: /* fyl2xp1 */
4635 gen_op_fyl2xp1();
4636 break;
4637 case 2: /* fsqrt */
4638 gen_op_fsqrt();
4639 break;
4640 case 3: /* fsincos */
4641 gen_op_fsincos();
4642 break;
4643 case 5: /* fscale */
4644 gen_op_fscale();
4645 break;
4646 case 4: /* frndint */
4647 gen_op_frndint();
4648 break;
4649 case 6: /* fsin */
4650 gen_op_fsin();
4651 break;
4652 default:
4653 case 7: /* fcos */
4654 gen_op_fcos();
4655 break;
4656 }
4657 break;
4658 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4659 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4660 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4661 {
4662 int op1;
4663
4664 op1 = op & 7;
4665 if (op >= 0x20) {
4666 gen_op_fp_arith_STN_ST0[op1](opreg);
4667 if (op >= 0x30)
4668 gen_op_fpop();
4669 } else {
4670 gen_op_fmov_FT0_STN(opreg);
4671 gen_op_fp_arith_ST0_FT0[op1]();
4672 }
4673 }
4674 break;
4675 case 0x02: /* fcom */
4676 case 0x22: /* fcom2, undocumented op */
4677 gen_op_fmov_FT0_STN(opreg);
4678 gen_op_fcom_ST0_FT0();
4679 break;
4680 case 0x03: /* fcomp */
4681 case 0x23: /* fcomp3, undocumented op */
4682 case 0x32: /* fcomp5, undocumented op */
4683 gen_op_fmov_FT0_STN(opreg);
4684 gen_op_fcom_ST0_FT0();
4685 gen_op_fpop();
4686 break;
4687 case 0x15: /* da/5 */
4688 switch(rm) {
4689 case 1: /* fucompp */
4690 gen_op_fmov_FT0_STN(1);
4691 gen_op_fucom_ST0_FT0();
4692 gen_op_fpop();
4693 gen_op_fpop();
4694 break;
4695 default:
4696 goto illegal_op;
4697 }
4698 break;
4699 case 0x1c:
4700 switch(rm) {
4701 case 0: /* feni (287 only, just do nop here) */
4702 break;
4703 case 1: /* fdisi (287 only, just do nop here) */
4704 break;
4705 case 2: /* fclex */
4706 gen_op_fclex();
4707 break;
4708 case 3: /* fninit */
4709 gen_op_fninit();
4710 break;
4711 case 4: /* fsetpm (287 only, just do nop here) */
4712 break;
4713 default:
4714 goto illegal_op;
4715 }
4716 break;
4717 case 0x1d: /* fucomi */
4718 if (s->cc_op != CC_OP_DYNAMIC)
4719 gen_op_set_cc_op(s->cc_op);
4720 gen_op_fmov_FT0_STN(opreg);
4721 gen_op_fucomi_ST0_FT0();
4722 s->cc_op = CC_OP_EFLAGS;
4723 break;
4724 case 0x1e: /* fcomi */
4725 if (s->cc_op != CC_OP_DYNAMIC)
4726 gen_op_set_cc_op(s->cc_op);
4727 gen_op_fmov_FT0_STN(opreg);
4728 gen_op_fcomi_ST0_FT0();
4729 s->cc_op = CC_OP_EFLAGS;
4730 break;
4731 case 0x28: /* ffree sti */
4732 gen_op_ffree_STN(opreg);
4733 break;
4734 case 0x2a: /* fst sti */
4735 gen_op_fmov_STN_ST0(opreg);
4736 break;
4737 case 0x2b: /* fstp sti */
4738 case 0x0b: /* fstp1 sti, undocumented op */
4739 case 0x3a: /* fstp8 sti, undocumented op */
4740 case 0x3b: /* fstp9 sti, undocumented op */
4741 gen_op_fmov_STN_ST0(opreg);
4742 gen_op_fpop();
4743 break;
4744 case 0x2c: /* fucom st(i) */
4745 gen_op_fmov_FT0_STN(opreg);
4746 gen_op_fucom_ST0_FT0();
4747 break;
4748 case 0x2d: /* fucomp st(i) */
4749 gen_op_fmov_FT0_STN(opreg);
4750 gen_op_fucom_ST0_FT0();
4751 gen_op_fpop();
4752 break;
4753 case 0x33: /* de/3 */
4754 switch(rm) {
4755 case 1: /* fcompp */
4756 gen_op_fmov_FT0_STN(1);
4757 gen_op_fcom_ST0_FT0();
4758 gen_op_fpop();
4759 gen_op_fpop();
4760 break;
4761 default:
4762 goto illegal_op;
4763 }
4764 break;
4765 case 0x38: /* ffreep sti, undocumented op */
4766 gen_op_ffree_STN(opreg);
4767 gen_op_fpop();
4768 break;
4769 case 0x3c: /* df/4 */
4770 switch(rm) {
4771 case 0:
4772 gen_op_fnstsw_EAX();
4773 break;
4774 default:
4775 goto illegal_op;
4776 }
4777 break;
4778 case 0x3d: /* fucomip */
4779 if (s->cc_op != CC_OP_DYNAMIC)
4780 gen_op_set_cc_op(s->cc_op);
4781 gen_op_fmov_FT0_STN(opreg);
4782 gen_op_fucomi_ST0_FT0();
4783 gen_op_fpop();
4784 s->cc_op = CC_OP_EFLAGS;
4785 break;
4786 case 0x3e: /* fcomip */
4787 if (s->cc_op != CC_OP_DYNAMIC)
4788 gen_op_set_cc_op(s->cc_op);
4789 gen_op_fmov_FT0_STN(opreg);
4790 gen_op_fcomi_ST0_FT0();
4791 gen_op_fpop();
4792 s->cc_op = CC_OP_EFLAGS;
4793 break;
4794 case 0x10 ... 0x13: /* fcmovxx */
4795 case 0x18 ... 0x1b:
4796 {
4797 int op1;
4798 const static uint8_t fcmov_cc[8] = {
4799 (JCC_B << 1),
4800 (JCC_Z << 1),
4801 (JCC_BE << 1),
4802 (JCC_P << 1),
4803 };
4804 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4805 gen_setcc(s, op1);
4806 gen_op_fcmov_ST0_STN_T0(opreg);
4807 }
4808 break;
4809 default:
4810 goto illegal_op;
4811 }
4812 }
4813#ifdef USE_CODE_COPY
4814 s->tb->cflags |= CF_TB_FP_USED;
4815#endif
4816 break;
4817 /************************/
4818 /* string ops */
4819
4820 case 0xa4: /* movsS */
4821 case 0xa5:
4822 if ((b & 1) == 0)
4823 ot = OT_BYTE;
4824 else
4825 ot = dflag + OT_WORD;
4826
4827 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4828 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4829 } else {
4830 gen_movs(s, ot);
4831 }
4832 break;
4833
4834 case 0xaa: /* stosS */
4835 case 0xab:
4836 if ((b & 1) == 0)
4837 ot = OT_BYTE;
4838 else
4839 ot = dflag + OT_WORD;
4840
4841 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4842 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4843 } else {
4844 gen_stos(s, ot);
4845 }
4846 break;
4847 case 0xac: /* lodsS */
4848 case 0xad:
4849 if ((b & 1) == 0)
4850 ot = OT_BYTE;
4851 else
4852 ot = dflag + OT_WORD;
4853 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4854 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4855 } else {
4856 gen_lods(s, ot);
4857 }
4858 break;
4859 case 0xae: /* scasS */
4860 case 0xaf:
4861 if ((b & 1) == 0)
4862 ot = OT_BYTE;
4863 else
4864 ot = dflag + OT_WORD;
4865 if (prefixes & PREFIX_REPNZ) {
4866 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4867 } else if (prefixes & PREFIX_REPZ) {
4868 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4869 } else {
4870 gen_scas(s, ot);
4871 s->cc_op = CC_OP_SUBB + ot;
4872 }
4873 break;
4874
4875 case 0xa6: /* cmpsS */
4876 case 0xa7:
4877 if ((b & 1) == 0)
4878 ot = OT_BYTE;
4879 else
4880 ot = dflag + OT_WORD;
4881 if (prefixes & PREFIX_REPNZ) {
4882 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4883 } else if (prefixes & PREFIX_REPZ) {
4884 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4885 } else {
4886 gen_cmps(s, ot);
4887 s->cc_op = CC_OP_SUBB + ot;
4888 }
4889 break;
4890 case 0x6c: /* insS */
4891 case 0x6d:
4892 if ((b & 1) == 0)
4893 ot = OT_BYTE;
4894 else
4895 ot = dflag ? OT_LONG : OT_WORD;
4896 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4897 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4898 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4899 } else {
4900 gen_ins(s, ot);
4901 }
4902 break;
4903 case 0x6e: /* outsS */
4904 case 0x6f:
4905 if ((b & 1) == 0)
4906 ot = OT_BYTE;
4907 else
4908 ot = dflag ? OT_LONG : OT_WORD;
4909 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4910 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4911 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4912 } else {
4913 gen_outs(s, ot);
4914 }
4915 break;
4916
4917 /************************/
4918 /* port I/O */
4919 case 0xe4:
4920 case 0xe5:
4921 if ((b & 1) == 0)
4922 ot = OT_BYTE;
4923 else
4924 ot = dflag ? OT_LONG : OT_WORD;
4925 val = ldub_code(s->pc++);
4926 gen_op_movl_T0_im(val);
4927 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4928 gen_op_in[ot]();
4929 gen_op_mov_reg_T1[ot][R_EAX]();
4930 break;
4931 case 0xe6:
4932 case 0xe7:
4933 if ((b & 1) == 0)
4934 ot = OT_BYTE;
4935 else
4936 ot = dflag ? OT_LONG : OT_WORD;
4937 val = ldub_code(s->pc++);
4938 gen_op_movl_T0_im(val);
4939 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4940#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
4941 if (val == 0x80)
4942 break;
4943#endif /* VBOX */
4944 gen_op_mov_TN_reg[ot][1][R_EAX]();
4945 gen_op_out[ot]();
4946 break;
4947 case 0xec:
4948 case 0xed:
4949 if ((b & 1) == 0)
4950 ot = OT_BYTE;
4951 else
4952 ot = dflag ? OT_LONG : OT_WORD;
4953 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4954 gen_op_andl_T0_ffff();
4955 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4956 gen_op_in[ot]();
4957 gen_op_mov_reg_T1[ot][R_EAX]();
4958 break;
4959 case 0xee:
4960 case 0xef:
4961 if ((b & 1) == 0)
4962 ot = OT_BYTE;
4963 else
4964 ot = dflag ? OT_LONG : OT_WORD;
4965 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4966 gen_op_andl_T0_ffff();
4967 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4968 gen_op_mov_TN_reg[ot][1][R_EAX]();
4969 gen_op_out[ot]();
4970 break;
4971
4972 /************************/
4973 /* control */
4974 case 0xc2: /* ret im */
4975 val = ldsw_code(s->pc);
4976 s->pc += 2;
4977 gen_pop_T0(s);
4978 if (CODE64(s) && s->dflag)
4979 s->dflag = 2;
4980 gen_stack_update(s, val + (2 << s->dflag));
4981 if (s->dflag == 0)
4982 gen_op_andl_T0_ffff();
4983 gen_op_jmp_T0();
4984 gen_eob(s);
4985 break;
4986 case 0xc3: /* ret */
4987 gen_pop_T0(s);
4988 gen_pop_update(s);
4989 if (s->dflag == 0)
4990 gen_op_andl_T0_ffff();
4991 gen_op_jmp_T0();
4992 gen_eob(s);
4993 break;
4994 case 0xca: /* lret im */
4995 val = ldsw_code(s->pc);
4996 s->pc += 2;
4997 do_lret:
4998 if (s->pe && !s->vm86) {
4999 if (s->cc_op != CC_OP_DYNAMIC)
5000 gen_op_set_cc_op(s->cc_op);
5001 gen_jmp_im(pc_start - s->cs_base);
5002 gen_op_lret_protected(s->dflag, val);
5003 } else {
5004 gen_stack_A0(s);
5005 /* pop offset */
5006 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5007 if (s->dflag == 0)
5008 gen_op_andl_T0_ffff();
5009 /* NOTE: keeping EIP updated is not a problem in case of
5010 exception */
5011 gen_op_jmp_T0();
5012 /* pop selector */
5013 gen_op_addl_A0_im(2 << s->dflag);
5014 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
5015 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
5016 /* add stack offset */
5017 gen_stack_update(s, val + (4 << s->dflag));
5018 }
5019 gen_eob(s);
5020 break;
5021 case 0xcb: /* lret */
5022 val = 0;
5023 goto do_lret;
5024 case 0xcf: /* iret */
5025 if (!s->pe) {
5026 /* real mode */
5027 gen_op_iret_real(s->dflag);
5028 s->cc_op = CC_OP_EFLAGS;
5029 } else if (s->vm86) {
5030 if (s->iopl != 3) {
5031 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5032 } else {
5033 gen_op_iret_real(s->dflag);
5034 s->cc_op = CC_OP_EFLAGS;
5035 }
5036 } else {
5037 if (s->cc_op != CC_OP_DYNAMIC)
5038 gen_op_set_cc_op(s->cc_op);
5039 gen_jmp_im(pc_start - s->cs_base);
5040 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5041 s->cc_op = CC_OP_EFLAGS;
5042 }
5043 gen_eob(s);
5044 break;
5045 case 0xe8: /* call im */
5046 {
5047 if (dflag)
5048 tval = (int32_t)insn_get(s, OT_LONG);
5049 else
5050 tval = (int16_t)insn_get(s, OT_WORD);
5051 next_eip = s->pc - s->cs_base;
5052 tval += next_eip;
5053 if (s->dflag == 0)
5054 tval &= 0xffff;
5055 gen_movtl_T0_im(next_eip);
5056 gen_push_T0(s);
5057 gen_jmp(s, tval);
5058 }
5059 break;
5060 case 0x9a: /* lcall im */
5061 {
5062 unsigned int selector, offset;
5063
5064 if (CODE64(s))
5065 goto illegal_op;
5066 ot = dflag ? OT_LONG : OT_WORD;
5067 offset = insn_get(s, ot);
5068 selector = insn_get(s, OT_WORD);
5069
5070 gen_op_movl_T0_im(selector);
5071 gen_op_movl_T1_imu(offset);
5072 }
5073 goto do_lcall;
5074 case 0xe9: /* jmp im */
5075 if (dflag)
5076 tval = (int32_t)insn_get(s, OT_LONG);
5077 else
5078 tval = (int16_t)insn_get(s, OT_WORD);
5079 tval += s->pc - s->cs_base;
5080 if (s->dflag == 0)
5081 tval &= 0xffff;
5082 gen_jmp(s, tval);
5083 break;
5084 case 0xea: /* ljmp im */
5085 {
5086 unsigned int selector, offset;
5087
5088 if (CODE64(s))
5089 goto illegal_op;
5090 ot = dflag ? OT_LONG : OT_WORD;
5091 offset = insn_get(s, ot);
5092 selector = insn_get(s, OT_WORD);
5093
5094 gen_op_movl_T0_im(selector);
5095 gen_op_movl_T1_imu(offset);
5096 }
5097 goto do_ljmp;
5098 case 0xeb: /* jmp Jb */
5099 tval = (int8_t)insn_get(s, OT_BYTE);
5100 tval += s->pc - s->cs_base;
5101 if (s->dflag == 0)
5102 tval &= 0xffff;
5103 gen_jmp(s, tval);
5104 break;
5105 case 0x70 ... 0x7f: /* jcc Jb */
5106 tval = (int8_t)insn_get(s, OT_BYTE);
5107 goto do_jcc;
5108 case 0x180 ... 0x18f: /* jcc Jv */
5109 if (dflag) {
5110 tval = (int32_t)insn_get(s, OT_LONG);
5111 } else {
5112 tval = (int16_t)insn_get(s, OT_WORD);
5113 }
5114 do_jcc:
5115 next_eip = s->pc - s->cs_base;
5116 tval += next_eip;
5117 if (s->dflag == 0)
5118 tval &= 0xffff;
5119 gen_jcc(s, b, tval, next_eip);
5120 break;
5121
5122 case 0x190 ... 0x19f: /* setcc Gv */
5123 modrm = ldub_code(s->pc++);
5124 gen_setcc(s, b);
5125 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5126 break;
5127 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5128 ot = dflag + OT_WORD;
5129 modrm = ldub_code(s->pc++);
5130 reg = ((modrm >> 3) & 7) | rex_r;
5131 mod = (modrm >> 6) & 3;
5132 gen_setcc(s, b);
5133 if (mod != 3) {
5134 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5135 gen_op_ld_T1_A0[ot + s->mem_index]();
5136 } else {
5137 rm = (modrm & 7) | REX_B(s);
5138 gen_op_mov_TN_reg[ot][1][rm]();
5139 }
5140 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5141 break;
5142
5143 /************************/
5144 /* flags */
5145 case 0x9c: /* pushf */
5146 if (s->vm86 && s->iopl != 3) {
5147 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5148 } else {
5149 if (s->cc_op != CC_OP_DYNAMIC)
5150 gen_op_set_cc_op(s->cc_op);
5151 gen_op_movl_T0_eflags();
5152 gen_push_T0(s);
5153 }
5154 break;
5155 case 0x9d: /* popf */
5156 if (s->vm86 && s->iopl != 3) {
5157 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5158 } else {
5159 gen_pop_T0(s);
5160 if (s->cpl == 0) {
5161 if (s->dflag) {
5162 gen_op_movl_eflags_T0_cpl0();
5163 } else {
5164 gen_op_movw_eflags_T0_cpl0();
5165 }
5166 } else {
5167 if (s->cpl <= s->iopl) {
5168 if (s->dflag) {
5169 gen_op_movl_eflags_T0_io();
5170 } else {
5171 gen_op_movw_eflags_T0_io();
5172 }
5173 } else {
5174 if (s->dflag) {
5175 gen_op_movl_eflags_T0();
5176 } else {
5177 gen_op_movw_eflags_T0();
5178 }
5179 }
5180 }
5181 gen_pop_update(s);
5182 s->cc_op = CC_OP_EFLAGS;
5183 /* abort translation because TF flag may change */
5184 gen_jmp_im(s->pc - s->cs_base);
5185 gen_eob(s);
5186 }
5187 break;
5188 case 0x9e: /* sahf */
5189 if (CODE64(s))
5190 goto illegal_op;
5191 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
5192 if (s->cc_op != CC_OP_DYNAMIC)
5193 gen_op_set_cc_op(s->cc_op);
5194 gen_op_movb_eflags_T0();
5195 s->cc_op = CC_OP_EFLAGS;
5196 break;
5197 case 0x9f: /* lahf */
5198 if (CODE64(s))
5199 goto illegal_op;
5200 if (s->cc_op != CC_OP_DYNAMIC)
5201 gen_op_set_cc_op(s->cc_op);
5202 gen_op_movl_T0_eflags();
5203 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
5204 break;
5205 case 0xf5: /* cmc */
5206 if (s->cc_op != CC_OP_DYNAMIC)
5207 gen_op_set_cc_op(s->cc_op);
5208 gen_op_cmc();
5209 s->cc_op = CC_OP_EFLAGS;
5210 break;
5211 case 0xf8: /* clc */
5212 if (s->cc_op != CC_OP_DYNAMIC)
5213 gen_op_set_cc_op(s->cc_op);
5214 gen_op_clc();
5215 s->cc_op = CC_OP_EFLAGS;
5216 break;
5217 case 0xf9: /* stc */
5218 if (s->cc_op != CC_OP_DYNAMIC)
5219 gen_op_set_cc_op(s->cc_op);
5220 gen_op_stc();
5221 s->cc_op = CC_OP_EFLAGS;
5222 break;
5223 case 0xfc: /* cld */
5224 gen_op_cld();
5225 break;
5226 case 0xfd: /* std */
5227 gen_op_std();
5228 break;
5229
5230 /************************/
5231 /* bit operations */
5232 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5233 ot = dflag + OT_WORD;
5234 modrm = ldub_code(s->pc++);
5235 op = (modrm >> 3) & 7;
5236 mod = (modrm >> 6) & 3;
5237 rm = (modrm & 7) | REX_B(s);
5238 if (mod != 3) {
5239 s->rip_offset = 1;
5240 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5241 gen_op_ld_T0_A0[ot + s->mem_index]();
5242 } else {
5243 gen_op_mov_TN_reg[ot][0][rm]();
5244 }
5245 /* load shift */
5246 val = ldub_code(s->pc++);
5247 gen_op_movl_T1_im(val);
5248 if (op < 4)
5249 goto illegal_op;
5250 op -= 4;
5251 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5252 s->cc_op = CC_OP_SARB + ot;
5253 if (op != 0) {
5254 if (mod != 3)
5255 gen_op_st_T0_A0[ot + s->mem_index]();
5256 else
5257 gen_op_mov_reg_T0[ot][rm]();
5258 gen_op_update_bt_cc();
5259 }
5260 break;
5261 case 0x1a3: /* bt Gv, Ev */
5262 op = 0;
5263 goto do_btx;
5264 case 0x1ab: /* bts */
5265 op = 1;
5266 goto do_btx;
5267 case 0x1b3: /* btr */
5268 op = 2;
5269 goto do_btx;
5270 case 0x1bb: /* btc */
5271 op = 3;
5272 do_btx:
5273 ot = dflag + OT_WORD;
5274 modrm = ldub_code(s->pc++);
5275 reg = ((modrm >> 3) & 7) | rex_r;
5276 mod = (modrm >> 6) & 3;
5277 rm = (modrm & 7) | REX_B(s);
5278 gen_op_mov_TN_reg[OT_LONG][1][reg]();
5279 if (mod != 3) {
5280 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5281 /* specific case: we need to add a displacement */
5282 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5283 gen_op_ld_T0_A0[ot + s->mem_index]();
5284 } else {
5285 gen_op_mov_TN_reg[ot][0][rm]();
5286 }
5287 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5288 s->cc_op = CC_OP_SARB + ot;
5289 if (op != 0) {
5290 if (mod != 3)
5291 gen_op_st_T0_A0[ot + s->mem_index]();
5292 else
5293 gen_op_mov_reg_T0[ot][rm]();
5294 gen_op_update_bt_cc();
5295 }
5296 break;
5297 case 0x1bc: /* bsf */
5298 case 0x1bd: /* bsr */
5299 ot = dflag + OT_WORD;
5300 modrm = ldub_code(s->pc++);
5301 reg = ((modrm >> 3) & 7) | rex_r;
5302 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5303 /* NOTE: in order to handle the 0 case, we must load the
5304 result. It could be optimized with a generated jump */
5305 gen_op_mov_TN_reg[ot][1][reg]();
5306 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5307 gen_op_mov_reg_T1[ot][reg]();
5308 s->cc_op = CC_OP_LOGICB + ot;
5309 break;
5310 /************************/
5311 /* bcd */
5312 case 0x27: /* daa */
5313 if (CODE64(s))
5314 goto illegal_op;
5315 if (s->cc_op != CC_OP_DYNAMIC)
5316 gen_op_set_cc_op(s->cc_op);
5317 gen_op_daa();
5318 s->cc_op = CC_OP_EFLAGS;
5319 break;
5320 case 0x2f: /* das */
5321 if (CODE64(s))
5322 goto illegal_op;
5323 if (s->cc_op != CC_OP_DYNAMIC)
5324 gen_op_set_cc_op(s->cc_op);
5325 gen_op_das();
5326 s->cc_op = CC_OP_EFLAGS;
5327 break;
5328 case 0x37: /* aaa */
5329 if (CODE64(s))
5330 goto illegal_op;
5331 if (s->cc_op != CC_OP_DYNAMIC)
5332 gen_op_set_cc_op(s->cc_op);
5333 gen_op_aaa();
5334 s->cc_op = CC_OP_EFLAGS;
5335 break;
5336 case 0x3f: /* aas */
5337 if (CODE64(s))
5338 goto illegal_op;
5339 if (s->cc_op != CC_OP_DYNAMIC)
5340 gen_op_set_cc_op(s->cc_op);
5341 gen_op_aas();
5342 s->cc_op = CC_OP_EFLAGS;
5343 break;
5344 case 0xd4: /* aam */
5345 if (CODE64(s))
5346 goto illegal_op;
5347 val = ldub_code(s->pc++);
5348 gen_op_aam(val);
5349 s->cc_op = CC_OP_LOGICB;
5350 break;
5351 case 0xd5: /* aad */
5352 if (CODE64(s))
5353 goto illegal_op;
5354 val = ldub_code(s->pc++);
5355 gen_op_aad(val);
5356 s->cc_op = CC_OP_LOGICB;
5357 break;
5358 /************************/
5359 /* misc */
5360 case 0x90: /* nop */
5361 /* XXX: xchg + rex handling */
5362 /* XXX: correct lock test for all insn */
5363 if (prefixes & PREFIX_LOCK)
5364 goto illegal_op;
5365 break;
5366 case 0x9b: /* fwait */
5367 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5368 (HF_MP_MASK | HF_TS_MASK)) {
5369 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5370 } else {
5371 if (s->cc_op != CC_OP_DYNAMIC)
5372 gen_op_set_cc_op(s->cc_op);
5373 gen_jmp_im(pc_start - s->cs_base);
5374 gen_op_fwait();
5375 }
5376 break;
5377 case 0xcc: /* int3 */
5378 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5379 break;
5380 case 0xcd: /* int N */
5381 val = ldub_code(s->pc++);
5382 if (s->vm86 && s->iopl != 3) {
5383 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5384 } else {
5385 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5386 }
5387 break;
5388 case 0xce: /* into */
5389 if (CODE64(s))
5390 goto illegal_op;
5391 if (s->cc_op != CC_OP_DYNAMIC)
5392 gen_op_set_cc_op(s->cc_op);
5393 gen_jmp_im(pc_start - s->cs_base);
5394 gen_op_into(s->pc - pc_start);
5395 break;
5396 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5397#if 1
5398 gen_debug(s, pc_start - s->cs_base);
5399#else
5400 /* start debug */
5401 tb_flush(cpu_single_env);
5402 cpu_set_log(CPU_LOG_INT | CPU_LOG_TB_IN_ASM);
5403#endif
5404 break;
5405 case 0xfa: /* cli */
5406 if (!s->vm86) {
5407 if (s->cpl <= s->iopl) {
5408 gen_op_cli();
5409 } else {
5410 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5411 }
5412 } else {
5413 if (s->iopl == 3) {
5414 gen_op_cli();
5415 } else {
5416 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5417 }
5418 }
5419 break;
5420 case 0xfb: /* sti */
5421 if (!s->vm86) {
5422 if (s->cpl <= s->iopl) {
5423 gen_sti:
5424 gen_op_sti();
5425 /* interruptions are enabled only the first insn after sti */
5426 /* If several instructions disable interrupts, only the
5427 _first_ does it */
5428 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5429 gen_op_set_inhibit_irq();
5430 /* give a chance to handle pending irqs */
5431 gen_jmp_im(s->pc - s->cs_base);
5432 gen_eob(s);
5433 } else {
5434 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5435 }
5436 } else {
5437 if (s->iopl == 3) {
5438 goto gen_sti;
5439 } else {
5440 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5441 }
5442 }
5443 break;
5444 case 0x62: /* bound */
5445 if (CODE64(s))
5446 goto illegal_op;
5447 ot = dflag ? OT_LONG : OT_WORD;
5448 modrm = ldub_code(s->pc++);
5449 reg = (modrm >> 3) & 7;
5450 mod = (modrm >> 6) & 3;
5451 if (mod == 3)
5452 goto illegal_op;
5453 gen_op_mov_TN_reg[ot][0][reg]();
5454 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5455 gen_jmp_im(pc_start - s->cs_base);
5456 if (ot == OT_WORD)
5457 gen_op_boundw();
5458 else
5459 gen_op_boundl();
5460 break;
5461 case 0x1c8 ... 0x1cf: /* bswap reg */
5462 reg = (b & 7) | REX_B(s);
5463#ifdef TARGET_X86_64
5464 if (dflag == 2) {
5465 gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5466 gen_op_bswapq_T0();
5467 gen_op_mov_reg_T0[OT_QUAD][reg]();
5468 } else
5469#endif
5470 {
5471 gen_op_mov_TN_reg[OT_LONG][0][reg]();
5472 gen_op_bswapl_T0();
5473 gen_op_mov_reg_T0[OT_LONG][reg]();
5474 }
5475 break;
5476 case 0xd6: /* salc */
5477 if (CODE64(s))
5478 goto illegal_op;
5479 if (s->cc_op != CC_OP_DYNAMIC)
5480 gen_op_set_cc_op(s->cc_op);
5481 gen_op_salc();
5482 break;
5483 case 0xe0: /* loopnz */
5484 case 0xe1: /* loopz */
5485 if (s->cc_op != CC_OP_DYNAMIC)
5486 gen_op_set_cc_op(s->cc_op);
5487 /* FALL THRU */
5488 case 0xe2: /* loop */
5489 case 0xe3: /* jecxz */
5490 {
5491 int l1, l2;
5492
5493 tval = (int8_t)insn_get(s, OT_BYTE);
5494 next_eip = s->pc - s->cs_base;
5495 tval += next_eip;
5496 if (s->dflag == 0)
5497 tval &= 0xffff;
5498
5499 l1 = gen_new_label();
5500 l2 = gen_new_label();
5501 b &= 3;
5502 if (b == 3) {
5503 gen_op_jz_ecx[s->aflag](l1);
5504 } else {
5505 gen_op_dec_ECX[s->aflag]();
5506 if (b <= 1)
5507 gen_op_mov_T0_cc();
5508 gen_op_loop[s->aflag][b](l1);
5509 }
5510
5511 gen_jmp_im(next_eip);
5512 gen_op_jmp_label(l2);
5513 gen_set_label(l1);
5514 gen_jmp_im(tval);
5515 gen_set_label(l2);
5516 gen_eob(s);
5517 }
5518 break;
5519 case 0x130: /* wrmsr */
5520 case 0x132: /* rdmsr */
5521 if (s->cpl != 0) {
5522 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5523 } else {
5524 if (b & 2)
5525 gen_op_rdmsr();
5526 else
5527 gen_op_wrmsr();
5528 }
5529 break;
5530 case 0x131: /* rdtsc */
5531 gen_jmp_im(pc_start - s->cs_base);
5532 gen_op_rdtsc();
5533 break;
5534 case 0x134: /* sysenter */
5535 if (CODE64(s))
5536 goto illegal_op;
5537 if (!s->pe) {
5538 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5539 } else {
5540 if (s->cc_op != CC_OP_DYNAMIC) {
5541 gen_op_set_cc_op(s->cc_op);
5542 s->cc_op = CC_OP_DYNAMIC;
5543 }
5544 gen_jmp_im(pc_start - s->cs_base);
5545 gen_op_sysenter();
5546 gen_eob(s);
5547 }
5548 break;
5549 case 0x135: /* sysexit */
5550 if (CODE64(s))
5551 goto illegal_op;
5552 if (!s->pe) {
5553 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5554 } else {
5555 if (s->cc_op != CC_OP_DYNAMIC) {
5556 gen_op_set_cc_op(s->cc_op);
5557 s->cc_op = CC_OP_DYNAMIC;
5558 }
5559 gen_jmp_im(pc_start - s->cs_base);
5560 gen_op_sysexit();
5561 gen_eob(s);
5562 }
5563 break;
5564#ifdef TARGET_X86_64
5565 case 0x105: /* syscall */
5566 /* XXX: is it usable in real mode ? */
5567 if (s->cc_op != CC_OP_DYNAMIC) {
5568 gen_op_set_cc_op(s->cc_op);
5569 s->cc_op = CC_OP_DYNAMIC;
5570 }
5571 gen_jmp_im(pc_start - s->cs_base);
5572 gen_op_syscall(s->pc - pc_start);
5573 gen_eob(s);
5574 break;
5575 case 0x107: /* sysret */
5576 if (!s->pe) {
5577 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5578 } else {
5579 if (s->cc_op != CC_OP_DYNAMIC) {
5580 gen_op_set_cc_op(s->cc_op);
5581 s->cc_op = CC_OP_DYNAMIC;
5582 }
5583 gen_jmp_im(pc_start - s->cs_base);
5584 gen_op_sysret(s->dflag);
5585 /* condition codes are modified only in long mode */
5586 if (s->lma)
5587 s->cc_op = CC_OP_EFLAGS;
5588 gen_eob(s);
5589 }
5590 break;
5591#endif
5592 case 0x1a2: /* cpuid */
5593 gen_op_cpuid();
5594 break;
5595 case 0xf4: /* hlt */
5596 if (s->cpl != 0) {
5597 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5598 } else {
5599 if (s->cc_op != CC_OP_DYNAMIC)
5600 gen_op_set_cc_op(s->cc_op);
5601 gen_jmp_im(s->pc - s->cs_base);
5602 gen_op_hlt();
5603 s->is_jmp = 3;
5604 }
5605 break;
5606 case 0x100:
5607 modrm = ldub_code(s->pc++);
5608 mod = (modrm >> 6) & 3;
5609 op = (modrm >> 3) & 7;
5610 switch(op) {
5611 case 0: /* sldt */
5612 if (!s->pe || s->vm86)
5613 goto illegal_op;
5614 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5615 ot = OT_WORD;
5616 if (mod == 3)
5617 ot += s->dflag;
5618 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5619 break;
5620 case 2: /* lldt */
5621 if (!s->pe || s->vm86)
5622 goto illegal_op;
5623 if (s->cpl != 0) {
5624 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5625 } else {
5626 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5627 gen_jmp_im(pc_start - s->cs_base);
5628 gen_op_lldt_T0();
5629 }
5630 break;
5631 case 1: /* str */
5632 if (!s->pe || s->vm86)
5633 goto illegal_op;
5634 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5635 ot = OT_WORD;
5636 if (mod == 3)
5637 ot += s->dflag;
5638 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5639 break;
5640 case 3: /* ltr */
5641 if (!s->pe || s->vm86)
5642 goto illegal_op;
5643 if (s->cpl != 0) {
5644 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5645 } else {
5646 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5647 gen_jmp_im(pc_start - s->cs_base);
5648 gen_op_ltr_T0();
5649 }
5650 break;
5651 case 4: /* verr */
5652 case 5: /* verw */
5653 if (!s->pe || s->vm86)
5654 goto illegal_op;
5655 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5656 if (s->cc_op != CC_OP_DYNAMIC)
5657 gen_op_set_cc_op(s->cc_op);
5658 if (op == 4)
5659 gen_op_verr();
5660 else
5661 gen_op_verw();
5662 s->cc_op = CC_OP_EFLAGS;
5663 break;
5664 default:
5665 goto illegal_op;
5666 }
5667 break;
5668 case 0x101:
5669 modrm = ldub_code(s->pc++);
5670 mod = (modrm >> 6) & 3;
5671 op = (modrm >> 3) & 7;
5672 rm = modrm & 7;
5673 switch(op) {
5674 case 0: /* sgdt */
5675 if (mod == 3)
5676 goto illegal_op;
5677 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5678 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5679 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5680 gen_add_A0_im(s, 2);
5681 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5682 if (!s->dflag)
5683 gen_op_andl_T0_im(0xffffff);
5684 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5685 break;
5686 case 1:
5687 if (mod == 3) {
5688 switch (rm) {
5689 case 0: /* monitor */
5690 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5691 s->cpl != 0)
5692 goto illegal_op;
5693 gen_jmp_im(pc_start - s->cs_base);
5694#ifdef TARGET_X86_64
5695 if (s->aflag == 2) {
5696 gen_op_movq_A0_reg[R_EBX]();
5697 gen_op_addq_A0_AL();
5698 } else
5699#endif
5700 {
5701 gen_op_movl_A0_reg[R_EBX]();
5702 gen_op_addl_A0_AL();
5703 if (s->aflag == 0)
5704 gen_op_andl_A0_ffff();
5705 }
5706 gen_add_A0_ds_seg(s);
5707 gen_op_monitor();
5708 break;
5709 case 1: /* mwait */
5710 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5711 s->cpl != 0)
5712 goto illegal_op;
5713 if (s->cc_op != CC_OP_DYNAMIC) {
5714 gen_op_set_cc_op(s->cc_op);
5715 s->cc_op = CC_OP_DYNAMIC;
5716 }
5717 gen_jmp_im(s->pc - s->cs_base);
5718 gen_op_mwait();
5719 gen_eob(s);
5720 break;
5721 default:
5722 goto illegal_op;
5723 }
5724 } else { /* sidt */
5725 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5726 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5727 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5728 gen_add_A0_im(s, 2);
5729 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5730 if (!s->dflag)
5731 gen_op_andl_T0_im(0xffffff);
5732 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5733 }
5734 break;
5735 case 2: /* lgdt */
5736 case 3: /* lidt */
5737 if (mod == 3)
5738 goto illegal_op;
5739 if (s->cpl != 0) {
5740 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5741 } else {
5742 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5743 gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
5744 gen_add_A0_im(s, 2);
5745 gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5746 if (!s->dflag)
5747 gen_op_andl_T0_im(0xffffff);
5748 if (op == 2) {
5749 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5750 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5751 } else {
5752 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5753 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5754 }
5755 }
5756 break;
5757 case 4: /* smsw */
5758 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5759 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5760 break;
5761 case 6: /* lmsw */
5762 if (s->cpl != 0) {
5763 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5764 } else {
5765 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5766 gen_op_lmsw_T0();
5767 gen_jmp_im(s->pc - s->cs_base);
5768 gen_eob(s);
5769 }
5770 break;
5771 case 7: /* invlpg */
5772 if (s->cpl != 0) {
5773 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5774 } else {
5775 if (mod == 3) {
5776#ifdef TARGET_X86_64
5777 if (CODE64(s) && rm == 0) {
5778 /* swapgs */
5779 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5780 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5781 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5782 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5783 } else
5784#endif
5785 {
5786 goto illegal_op;
5787 }
5788 } else {
5789 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5790 gen_op_invlpg_A0();
5791 gen_jmp_im(s->pc - s->cs_base);
5792 gen_eob(s);
5793 }
5794 }
5795 break;
5796 default:
5797 goto illegal_op;
5798 }
5799 break;
5800 case 0x108: /* invd */
5801 case 0x109: /* wbinvd */
5802 if (s->cpl != 0) {
5803 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5804 } else {
5805 /* nothing to do */
5806 }
5807 break;
5808 case 0x63: /* arpl or movslS (x86_64) */
5809#ifdef TARGET_X86_64
5810 if (CODE64(s)) {
5811 int d_ot;
5812 /* d_ot is the size of destination */
5813 d_ot = dflag + OT_WORD;
5814
5815 modrm = ldub_code(s->pc++);
5816 reg = ((modrm >> 3) & 7) | rex_r;
5817 mod = (modrm >> 6) & 3;
5818 rm = (modrm & 7) | REX_B(s);
5819
5820 if (mod == 3) {
5821 gen_op_mov_TN_reg[OT_LONG][0][rm]();
5822 /* sign extend */
5823 if (d_ot == OT_QUAD)
5824 gen_op_movslq_T0_T0();
5825 gen_op_mov_reg_T0[d_ot][reg]();
5826 } else {
5827 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5828 if (d_ot == OT_QUAD) {
5829 gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
5830 } else {
5831 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5832 }
5833 gen_op_mov_reg_T0[d_ot][reg]();
5834 }
5835 } else
5836#endif
5837 {
5838 if (!s->pe || s->vm86)
5839 goto illegal_op;
5840 ot = dflag ? OT_LONG : OT_WORD;
5841 modrm = ldub_code(s->pc++);
5842 reg = (modrm >> 3) & 7;
5843 mod = (modrm >> 6) & 3;
5844 rm = modrm & 7;
5845 if (mod != 3) {
5846 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5847 gen_op_ld_T0_A0[ot + s->mem_index]();
5848 } else {
5849 gen_op_mov_TN_reg[ot][0][rm]();
5850 }
5851 if (s->cc_op != CC_OP_DYNAMIC)
5852 gen_op_set_cc_op(s->cc_op);
5853 gen_op_arpl();
5854 s->cc_op = CC_OP_EFLAGS;
5855 if (mod != 3) {
5856 gen_op_st_T0_A0[ot + s->mem_index]();
5857 } else {
5858 gen_op_mov_reg_T0[ot][rm]();
5859 }
5860 gen_op_arpl_update();
5861 }
5862 break;
5863 case 0x102: /* lar */
5864 case 0x103: /* lsl */
5865 if (!s->pe || s->vm86)
5866 goto illegal_op;
5867 ot = dflag ? OT_LONG : OT_WORD;
5868 modrm = ldub_code(s->pc++);
5869 reg = ((modrm >> 3) & 7) | rex_r;
5870 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5871 gen_op_mov_TN_reg[ot][1][reg]();
5872 if (s->cc_op != CC_OP_DYNAMIC)
5873 gen_op_set_cc_op(s->cc_op);
5874 if (b == 0x102)
5875 gen_op_lar();
5876 else
5877 gen_op_lsl();
5878 s->cc_op = CC_OP_EFLAGS;
5879 gen_op_mov_reg_T1[ot][reg]();
5880 break;
5881 case 0x118:
5882 modrm = ldub_code(s->pc++);
5883 mod = (modrm >> 6) & 3;
5884 op = (modrm >> 3) & 7;
5885 switch(op) {
5886 case 0: /* prefetchnta */
5887 case 1: /* prefetchnt0 */
5888 case 2: /* prefetchnt0 */
5889 case 3: /* prefetchnt0 */
5890 if (mod == 3)
5891 goto illegal_op;
5892 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5893 /* nothing more to do */
5894 break;
5895 default: /* nop (multi byte) */
5896 gen_nop_modrm(s, modrm);
5897 break;
5898 }
5899 break;
5900 case 0x119 ... 0x11f: /* nop (multi byte) */
5901 modrm = ldub_code(s->pc++);
5902 gen_nop_modrm(s, modrm);
5903 break;
5904 case 0x120: /* mov reg, crN */
5905 case 0x122: /* mov crN, reg */
5906 if (s->cpl != 0) {
5907 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5908 } else {
5909 modrm = ldub_code(s->pc++);
5910 if ((modrm & 0xc0) != 0xc0)
5911 goto illegal_op;
5912 rm = (modrm & 7) | REX_B(s);
5913 reg = ((modrm >> 3) & 7) | rex_r;
5914 if (CODE64(s))
5915 ot = OT_QUAD;
5916 else
5917 ot = OT_LONG;
5918 switch(reg) {
5919 case 0:
5920 case 2:
5921 case 3:
5922 case 4:
5923 case 8:
5924 if (b & 2) {
5925 gen_op_mov_TN_reg[ot][0][rm]();
5926 gen_op_movl_crN_T0(reg);
5927 gen_jmp_im(s->pc - s->cs_base);
5928 gen_eob(s);
5929 } else {
5930#if !defined(CONFIG_USER_ONLY)
5931 if (reg == 8)
5932 gen_op_movtl_T0_cr8();
5933 else
5934#endif
5935 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
5936 gen_op_mov_reg_T0[ot][rm]();
5937 }
5938 break;
5939 default:
5940 goto illegal_op;
5941 }
5942 }
5943 break;
5944 case 0x121: /* mov reg, drN */
5945 case 0x123: /* mov drN, reg */
5946 if (s->cpl != 0) {
5947 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5948 } else {
5949 modrm = ldub_code(s->pc++);
5950 if ((modrm & 0xc0) != 0xc0)
5951 goto illegal_op;
5952 rm = (modrm & 7) | REX_B(s);
5953 reg = ((modrm >> 3) & 7) | rex_r;
5954 if (CODE64(s))
5955 ot = OT_QUAD;
5956 else
5957 ot = OT_LONG;
5958 /* XXX: do it dynamically with CR4.DE bit */
5959 if (reg == 4 || reg == 5 || reg >= 8)
5960 goto illegal_op;
5961 if (b & 2) {
5962 gen_op_mov_TN_reg[ot][0][rm]();
5963 gen_op_movl_drN_T0(reg);
5964 gen_jmp_im(s->pc - s->cs_base);
5965 gen_eob(s);
5966 } else {
5967 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
5968 gen_op_mov_reg_T0[ot][rm]();
5969 }
5970 }
5971 break;
5972 case 0x106: /* clts */
5973 if (s->cpl != 0) {
5974 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5975 } else {
5976 gen_op_clts();
5977 /* abort block because static cpu state changed */
5978 gen_jmp_im(s->pc - s->cs_base);
5979 gen_eob(s);
5980 }
5981 break;
5982 /* MMX/SSE/SSE2/PNI support */
5983 case 0x1c3: /* MOVNTI reg, mem */
5984 if (!(s->cpuid_features & CPUID_SSE2))
5985 goto illegal_op;
5986 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
5987 modrm = ldub_code(s->pc++);
5988 mod = (modrm >> 6) & 3;
5989 if (mod == 3)
5990 goto illegal_op;
5991 reg = ((modrm >> 3) & 7) | rex_r;
5992 /* generate a generic store */
5993 gen_ldst_modrm(s, modrm, ot, reg, 1);
5994 break;
5995 case 0x1ae:
5996 modrm = ldub_code(s->pc++);
5997 mod = (modrm >> 6) & 3;
5998 op = (modrm >> 3) & 7;
5999 switch(op) {
6000 case 0: /* fxsave */
6001 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6002 (s->flags & HF_EM_MASK))
6003 goto illegal_op;
6004 if (s->flags & HF_TS_MASK) {
6005 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6006 break;
6007 }
6008 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6009 gen_op_fxsave_A0((s->dflag == 2));
6010 break;
6011 case 1: /* fxrstor */
6012 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6013 (s->flags & HF_EM_MASK))
6014 goto illegal_op;
6015 if (s->flags & HF_TS_MASK) {
6016 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6017 break;
6018 }
6019 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6020 gen_op_fxrstor_A0((s->dflag == 2));
6021 break;
6022 case 2: /* ldmxcsr */
6023 case 3: /* stmxcsr */
6024 if (s->flags & HF_TS_MASK) {
6025 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6026 break;
6027 }
6028 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6029 mod == 3)
6030 goto illegal_op;
6031 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6032 if (op == 2) {
6033 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6034 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6035 } else {
6036 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6037 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
6038 }
6039 break;
6040 case 5: /* lfence */
6041 case 6: /* mfence */
6042 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6043 goto illegal_op;
6044 break;
6045 case 7: /* sfence / clflush */
6046 if ((modrm & 0xc7) == 0xc0) {
6047 /* sfence */
6048 if (!(s->cpuid_features & CPUID_SSE))
6049 goto illegal_op;
6050 } else {
6051 /* clflush */
6052 if (!(s->cpuid_features & CPUID_CLFLUSH))
6053 goto illegal_op;
6054 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6055 }
6056 break;
6057 default:
6058 goto illegal_op;
6059 }
6060 break;
6061 case 0x10d: /* prefetch */
6062 modrm = ldub_code(s->pc++);
6063 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6064 /* ignore for now */
6065 break;
6066 case 0x1aa: /* rsm */
6067 if (!(s->flags & HF_SMM_MASK))
6068 goto illegal_op;
6069 if (s->cc_op != CC_OP_DYNAMIC) {
6070 gen_op_set_cc_op(s->cc_op);
6071 s->cc_op = CC_OP_DYNAMIC;
6072 }
6073 gen_jmp_im(s->pc - s->cs_base);
6074 gen_op_rsm();
6075 gen_eob(s);
6076 break;
6077 case 0x110 ... 0x117:
6078 case 0x128 ... 0x12f:
6079 case 0x150 ... 0x177:
6080 case 0x17c ... 0x17f:
6081 case 0x1c2:
6082 case 0x1c4 ... 0x1c6:
6083 case 0x1d0 ... 0x1fe:
6084 gen_sse(s, b, pc_start, rex_r);
6085 break;
6086 default:
6087 goto illegal_op;
6088 }
6089 /* lock generation */
6090 if (s->prefix & PREFIX_LOCK)
6091 gen_op_unlock();
6092 return s->pc;
6093 illegal_op:
6094 if (s->prefix & PREFIX_LOCK)
6095 gen_op_unlock();
6096 /* XXX: ensure that no lock was generated */
6097 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6098 return s->pc;
6099}
6100
6101#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6102#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6103
6104/* flags read by an operation */
6105static uint16_t opc_read_flags[NB_OPS] = {
6106 [INDEX_op_aas] = CC_A,
6107 [INDEX_op_aaa] = CC_A,
6108 [INDEX_op_das] = CC_A | CC_C,
6109 [INDEX_op_daa] = CC_A | CC_C,
6110
6111 /* subtle: due to the incl/decl implementation, C is used */
6112 [INDEX_op_update_inc_cc] = CC_C,
6113
6114 [INDEX_op_into] = CC_O,
6115
6116 [INDEX_op_jb_subb] = CC_C,
6117 [INDEX_op_jb_subw] = CC_C,
6118 [INDEX_op_jb_subl] = CC_C,
6119
6120 [INDEX_op_jz_subb] = CC_Z,
6121 [INDEX_op_jz_subw] = CC_Z,
6122 [INDEX_op_jz_subl] = CC_Z,
6123
6124 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6125 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6126 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6127
6128 [INDEX_op_js_subb] = CC_S,
6129 [INDEX_op_js_subw] = CC_S,
6130 [INDEX_op_js_subl] = CC_S,
6131
6132 [INDEX_op_jl_subb] = CC_O | CC_S,
6133 [INDEX_op_jl_subw] = CC_O | CC_S,
6134 [INDEX_op_jl_subl] = CC_O | CC_S,
6135
6136 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6137 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6138 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6139
6140 [INDEX_op_loopnzw] = CC_Z,
6141 [INDEX_op_loopnzl] = CC_Z,
6142 [INDEX_op_loopzw] = CC_Z,
6143 [INDEX_op_loopzl] = CC_Z,
6144
6145 [INDEX_op_seto_T0_cc] = CC_O,
6146 [INDEX_op_setb_T0_cc] = CC_C,
6147 [INDEX_op_setz_T0_cc] = CC_Z,
6148 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6149 [INDEX_op_sets_T0_cc] = CC_S,
6150 [INDEX_op_setp_T0_cc] = CC_P,
6151 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6152 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6153
6154 [INDEX_op_setb_T0_subb] = CC_C,
6155 [INDEX_op_setb_T0_subw] = CC_C,
6156 [INDEX_op_setb_T0_subl] = CC_C,
6157
6158 [INDEX_op_setz_T0_subb] = CC_Z,
6159 [INDEX_op_setz_T0_subw] = CC_Z,
6160 [INDEX_op_setz_T0_subl] = CC_Z,
6161
6162 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6163 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6164 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6165
6166 [INDEX_op_sets_T0_subb] = CC_S,
6167 [INDEX_op_sets_T0_subw] = CC_S,
6168 [INDEX_op_sets_T0_subl] = CC_S,
6169
6170 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6171 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6172 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6173
6174 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6175 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6176 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6177
6178 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6179 [INDEX_op_cmc] = CC_C,
6180 [INDEX_op_salc] = CC_C,
6181
6182 /* needed for correct flag optimisation before string ops */
6183 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6184 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6185 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6186 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6187
6188#ifdef TARGET_X86_64
6189 [INDEX_op_jb_subq] = CC_C,
6190 [INDEX_op_jz_subq] = CC_Z,
6191 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6192 [INDEX_op_js_subq] = CC_S,
6193 [INDEX_op_jl_subq] = CC_O | CC_S,
6194 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6195
6196 [INDEX_op_loopnzq] = CC_Z,
6197 [INDEX_op_loopzq] = CC_Z,
6198
6199 [INDEX_op_setb_T0_subq] = CC_C,
6200 [INDEX_op_setz_T0_subq] = CC_Z,
6201 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6202 [INDEX_op_sets_T0_subq] = CC_S,
6203 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6204 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6205
6206 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6207 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6208#endif
6209
6210#define DEF_READF(SUFFIX)\
6211 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6212 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6213 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6214 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6215 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6216 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6217 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6218 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6219\
6220 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6221 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6222 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6223 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6224 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6225 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6226 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6227 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6228
6229 DEF_READF( )
6230 DEF_READF(_raw)
6231#ifndef CONFIG_USER_ONLY
6232 DEF_READF(_kernel)
6233 DEF_READF(_user)
6234#endif
6235};
6236
6237/* flags written by an operation */
6238static uint16_t opc_write_flags[NB_OPS] = {
6239 [INDEX_op_update2_cc] = CC_OSZAPC,
6240 [INDEX_op_update1_cc] = CC_OSZAPC,
6241 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6242 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6243 /* subtle: due to the incl/decl implementation, C is used */
6244 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6245 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6246
6247 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6248 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6249 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6250 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6251 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6252 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6253 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6254 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6255 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6256 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6257 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6258
6259 /* sse */
6260 [INDEX_op_ucomiss] = CC_OSZAPC,
6261 [INDEX_op_ucomisd] = CC_OSZAPC,
6262 [INDEX_op_comiss] = CC_OSZAPC,
6263 [INDEX_op_comisd] = CC_OSZAPC,
6264
6265 /* bcd */
6266 [INDEX_op_aam] = CC_OSZAPC,
6267 [INDEX_op_aad] = CC_OSZAPC,
6268 [INDEX_op_aas] = CC_OSZAPC,
6269 [INDEX_op_aaa] = CC_OSZAPC,
6270 [INDEX_op_das] = CC_OSZAPC,
6271 [INDEX_op_daa] = CC_OSZAPC,
6272
6273 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6274 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6275 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6276 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6277 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6278 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6279 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6280 [INDEX_op_clc] = CC_C,
6281 [INDEX_op_stc] = CC_C,
6282 [INDEX_op_cmc] = CC_C,
6283
6284 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6285 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6286 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6287 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6288 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6289 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6290 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6291 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6292 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6293 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6294 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6295 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6296
6297 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6298 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6299 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6300 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6301 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6302 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6303
6304 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6305 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6306 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6307 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6308
6309 [INDEX_op_cmpxchg8b] = CC_Z,
6310 [INDEX_op_lar] = CC_Z,
6311 [INDEX_op_lsl] = CC_Z,
6312 [INDEX_op_verr] = CC_Z,
6313 [INDEX_op_verw] = CC_Z,
6314 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6315 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6316
6317#define DEF_WRITEF(SUFFIX)\
6318 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6319 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6320 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6321 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6322 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6323 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6324 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6325 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6326\
6327 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6328 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6329 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6330 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6331 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6332 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6333 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6334 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6335\
6336 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6337 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6338 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6339 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6340 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6341 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6342 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6343 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6344\
6345 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6346 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6347 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6348 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6349\
6350 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6351 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6352 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6353 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6354\
6355 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6356 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6357 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6358 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6359\
6360 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6361 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6362 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6363 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6364 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6365 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6366\
6367 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6368 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6369 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6370 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6371 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6372 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6373\
6374 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6375 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6376 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6377 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6378
6379
6380 DEF_WRITEF( )
6381 DEF_WRITEF(_raw)
6382#ifndef CONFIG_USER_ONLY
6383 DEF_WRITEF(_kernel)
6384 DEF_WRITEF(_user)
6385#endif
6386};
6387
6388/* simpler form of an operation if no flags need to be generated */
6389static uint16_t opc_simpler[NB_OPS] = {
6390 [INDEX_op_update2_cc] = INDEX_op_nop,
6391 [INDEX_op_update1_cc] = INDEX_op_nop,
6392 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6393#if 0
6394 /* broken: CC_OP logic must be rewritten */
6395 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6396#endif
6397
6398 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6399 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6400 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6401 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6402
6403 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6404 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6405 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6406 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6407
6408 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6409 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6410 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6411 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6412
6413#define DEF_SIMPLER(SUFFIX)\
6414 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6415 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6416 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6417 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6418\
6419 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6420 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6421 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6422 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6423
6424 DEF_SIMPLER( )
6425 DEF_SIMPLER(_raw)
6426#ifndef CONFIG_USER_ONLY
6427 DEF_SIMPLER(_kernel)
6428 DEF_SIMPLER(_user)
6429#endif
6430};
6431
6432void optimize_flags_init(void)
6433{
6434 int i;
6435 /* put default values in arrays */
6436 for(i = 0; i < NB_OPS; i++) {
6437 if (opc_simpler[i] == 0)
6438 opc_simpler[i] = i;
6439 }
6440}
6441
6442/* CPU flags computation optimization: we move backward thru the
6443 generated code to see which flags are needed. The operation is
6444 modified if suitable */
6445static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6446{
6447 uint16_t *opc_ptr;
6448 int live_flags, write_flags, op;
6449
6450 opc_ptr = opc_buf + opc_buf_len;
6451 /* live_flags contains the flags needed by the next instructions
6452 in the code. At the end of the bloc, we consider that all the
6453 flags are live. */
6454 live_flags = CC_OSZAPC;
6455 while (opc_ptr > opc_buf) {
6456 op = *--opc_ptr;
6457 /* if none of the flags written by the instruction is used,
6458 then we can try to find a simpler instruction */
6459 write_flags = opc_write_flags[op];
6460 if ((live_flags & write_flags) == 0) {
6461 *opc_ptr = opc_simpler[op];
6462 }
6463 /* compute the live flags before the instruction */
6464 live_flags &= ~write_flags;
6465 live_flags |= opc_read_flags[op];
6466 }
6467}
6468
6469/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6470 basic block 'tb'. If search_pc is TRUE, also generate PC
6471 information for each intermediate instruction. */
6472static inline int gen_intermediate_code_internal(CPUState *env,
6473 TranslationBlock *tb,
6474 int search_pc)
6475{
6476 DisasContext dc1, *dc = &dc1;
6477 target_ulong pc_ptr;
6478 uint16_t *gen_opc_end;
6479 int flags, j, lj, cflags;
6480 target_ulong pc_start;
6481 target_ulong cs_base;
6482
6483 /* generate intermediate code */
6484 pc_start = tb->pc;
6485 cs_base = tb->cs_base;
6486 flags = tb->flags;
6487 cflags = tb->cflags;
6488
6489 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6490 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6491 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6492 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6493 dc->f_st = 0;
6494 dc->vm86 = (flags >> VM_SHIFT) & 1;
6495 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6496 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6497 dc->tf = (flags >> TF_SHIFT) & 1;
6498 dc->singlestep_enabled = env->singlestep_enabled;
6499 dc->cc_op = CC_OP_DYNAMIC;
6500 dc->cs_base = cs_base;
6501 dc->tb = tb;
6502 dc->popl_esp_hack = 0;
6503 /* select memory access functions */
6504 dc->mem_index = 0;
6505 if (flags & HF_SOFTMMU_MASK) {
6506 if (dc->cpl == 3)
6507 dc->mem_index = 2 * 4;
6508 else
6509 dc->mem_index = 1 * 4;
6510 }
6511 dc->cpuid_features = env->cpuid_features;
6512 dc->cpuid_ext_features = env->cpuid_ext_features;
6513#ifdef TARGET_X86_64
6514 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6515 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6516#endif
6517 dc->flags = flags;
6518 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6519 (flags & HF_INHIBIT_IRQ_MASK)
6520#ifndef CONFIG_SOFTMMU
6521 || (flags & HF_SOFTMMU_MASK)
6522#endif
6523 );
6524#if 0
6525 /* check addseg logic */
6526 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6527 printf("ERROR addseg\n");
6528#endif
6529
6530 gen_opc_ptr = gen_opc_buf;
6531 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6532 gen_opparam_ptr = gen_opparam_buf;
6533 nb_gen_labels = 0;
6534
6535 dc->is_jmp = DISAS_NEXT;
6536 pc_ptr = pc_start;
6537 lj = -1;
6538
6539 for(;;) {
6540 if (env->nb_breakpoints > 0) {
6541 for(j = 0; j < env->nb_breakpoints; j++) {
6542 if (env->breakpoints[j] == pc_ptr) {
6543 gen_debug(dc, pc_ptr - dc->cs_base);
6544 break;
6545 }
6546 }
6547 }
6548 if (search_pc) {
6549 j = gen_opc_ptr - gen_opc_buf;
6550 if (lj < j) {
6551 lj++;
6552 while (lj < j)
6553 gen_opc_instr_start[lj++] = 0;
6554 }
6555 gen_opc_pc[lj] = pc_ptr;
6556 gen_opc_cc_op[lj] = dc->cc_op;
6557 gen_opc_instr_start[lj] = 1;
6558 }
6559 pc_ptr = disas_insn(dc, pc_ptr);
6560 /* stop translation if indicated */
6561 if (dc->is_jmp)
6562 break;
6563
6564#ifdef VBOX
6565#ifdef DEBUG
6566/*
6567 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
6568 {
6569 //should never happen as the jump to the patch code terminates the translation block
6570 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
6571 }
6572*/
6573#endif
6574 if (env->state & CPU_EMULATE_SINGLE_INSTR)
6575 {
6576 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
6577 gen_jmp_im(pc_ptr - dc->cs_base);
6578 gen_eob(dc);
6579 break;
6580 }
6581#endif /* VBOX */
6582
6583 /* if single step mode, we generate only one instruction and
6584 generate an exception */
6585 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6586 the flag and abort the translation to give the irqs a
6587 change to be happen */
6588 if (dc->tf || dc->singlestep_enabled ||
6589 (flags & HF_INHIBIT_IRQ_MASK) ||
6590 (cflags & CF_SINGLE_INSN)) {
6591 gen_jmp_im(pc_ptr - dc->cs_base);
6592 gen_eob(dc);
6593 break;
6594 }
6595 /* if too long translation, stop generation too */
6596 if (gen_opc_ptr >= gen_opc_end ||
6597 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6598 gen_jmp_im(pc_ptr - dc->cs_base);
6599 gen_eob(dc);
6600 break;
6601 }
6602 }
6603 *gen_opc_ptr = INDEX_op_end;
6604 /* we don't forget to fill the last values */
6605 if (search_pc) {
6606 j = gen_opc_ptr - gen_opc_buf;
6607 lj++;
6608 while (lj <= j)
6609 gen_opc_instr_start[lj++] = 0;
6610 }
6611
6612#ifdef DEBUG_DISAS
6613 if (loglevel & CPU_LOG_TB_CPU) {
6614 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6615 }
6616 if (loglevel & CPU_LOG_TB_IN_ASM) {
6617 int disas_flags;
6618 fprintf(logfile, "----------------\n");
6619 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6620#ifdef TARGET_X86_64
6621 if (dc->code64)
6622 disas_flags = 2;
6623 else
6624#endif
6625 disas_flags = !dc->code32;
6626 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6627 fprintf(logfile, "\n");
6628 if (loglevel & CPU_LOG_TB_OP) {
6629 fprintf(logfile, "OP:\n");
6630 dump_ops(gen_opc_buf, gen_opparam_buf);
6631 fprintf(logfile, "\n");
6632 }
6633 }
6634#endif
6635
6636 /* optimize flag computations */
6637 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6638
6639#ifdef DEBUG_DISAS
6640 if (loglevel & CPU_LOG_TB_OP_OPT) {
6641 fprintf(logfile, "AFTER FLAGS OPT:\n");
6642 dump_ops(gen_opc_buf, gen_opparam_buf);
6643 fprintf(logfile, "\n");
6644 }
6645#endif
6646 if (!search_pc)
6647 tb->size = pc_ptr - pc_start;
6648 return 0;
6649}
6650
6651int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6652{
6653 return gen_intermediate_code_internal(env, tb, 0);
6654}
6655
6656int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6657{
6658 return gen_intermediate_code_internal(env, tb, 1);
6659}
6660
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette