VirtualBox

source: vbox/trunk/src/recompiler/target-i386/translate.c@ 1599

Last change on this file since 1599 was 1514, checked in by vboxsync, 18 years ago

Update eip for each instruction. (makes it easier to generate exceptions)
Re-enabled null selector check during memory accesses.

  • Property svn:eol-style set to native
File size: 197.4 KB
Line 
1/*
2 * i386 translation
3 *
4 * Copyright (c) 2003 Fabrice Bellard
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20#include <stdarg.h>
21#include <stdlib.h>
22#include <stdio.h>
23#include <string.h>
24#include <inttypes.h>
25#include <signal.h>
26#include <assert.h>
27
28#include "cpu.h"
29#include "exec-all.h"
30#include "disas.h"
31
32/* XXX: move that elsewhere */
33static uint16_t *gen_opc_ptr;
34static uint32_t *gen_opparam_ptr;
35
36#define PREFIX_REPZ 0x01
37#define PREFIX_REPNZ 0x02
38#define PREFIX_LOCK 0x04
39#define PREFIX_DATA 0x08
40#define PREFIX_ADR 0x10
41
42#ifdef TARGET_X86_64
43#define X86_64_ONLY(x) x
44#define X86_64_DEF(x...) x
45#define CODE64(s) ((s)->code64)
46#define REX_X(s) ((s)->rex_x)
47#define REX_B(s) ((s)->rex_b)
48/* XXX: gcc generates push/pop in some opcodes, so we cannot use them */
49#if 1
50#define BUGGY_64(x) NULL
51#endif
52#else
53#define X86_64_ONLY(x) NULL
54#define X86_64_DEF(x...)
55#define CODE64(s) 0
56#define REX_X(s) 0
57#define REX_B(s) 0
58#endif
59
60#ifdef TARGET_X86_64
61static int x86_64_hregs;
62#endif
63
64#ifdef USE_DIRECT_JUMP
65#define TBPARAM(x)
66#else
67#define TBPARAM(x) (long)(x)
68#endif
69
70#ifdef VBOX
71/* Special/override code readers to hide patched code. */
72
73uint8_t ldub_code_raw(target_ulong pc)
74{
75 uint8_t b;
76
77 if (!remR3GetOpcode(cpu_single_env, pc, &b))
78 b = ldub_code(pc);
79 return b;
80}
81#define ldub_code(a) ldub_code_raw(a)
82
83uint16_t lduw_code_raw(target_ulong pc)
84{
85 return (ldub_code(pc+1) << 8) | ldub_code(pc);
86}
87#define lduw_code(a) lduw_code_raw(a)
88
89
90uint32_t ldl_code_raw(target_ulong pc)
91{
92 return (ldub_code(pc+3) << 24) | (ldub_code(pc+2) << 16) | (ldub_code(pc+1) << 8) | ldub_code(pc);
93}
94#define ldl_code(a) ldl_code_raw(a)
95
96#endif /* VBOX */
97
98
99typedef struct DisasContext {
100 /* current insn context */
101 int override; /* -1 if no override */
102 int prefix;
103 int aflag, dflag;
104 target_ulong pc; /* pc = eip + cs_base */
105 int is_jmp; /* 1 = means jump (stop translation), 2 means CPU
106 static state change (stop translation) */
107 /* current block context */
108 target_ulong cs_base; /* base of CS segment */
109 int pe; /* protected mode */
110 int code32; /* 32 bit code segment */
111#ifdef TARGET_X86_64
112 int lma; /* long mode active */
113 int code64; /* 64 bit code segment */
114 int rex_x, rex_b;
115#endif
116 int ss32; /* 32 bit stack segment */
117 int cc_op; /* current CC operation */
118 int addseg; /* non zero if either DS/ES/SS have a non zero base */
119 int f_st; /* currently unused */
120 int vm86; /* vm86 mode */
121#ifdef VBOX
122 int vme; /* CR4.VME */
123#endif
124 int cpl;
125 int iopl;
126 int tf; /* TF cpu flag */
127 int singlestep_enabled; /* "hardware" single step enabled */
128 int jmp_opt; /* use direct block chaining for direct jumps */
129 int mem_index; /* select memory access functions */
130 int flags; /* all execution flags */
131 struct TranslationBlock *tb;
132 int popl_esp_hack; /* for correct popl with esp base handling */
133 int rip_offset; /* only used in x86_64, but left for simplicity */
134 int cpuid_features;
135 int cpuid_ext_features;
136} DisasContext;
137
138static void gen_eob(DisasContext *s);
139static void gen_jmp(DisasContext *s, target_ulong eip);
140static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num);
141
142/* i386 arith/logic operations */
143enum {
144 OP_ADDL,
145 OP_ORL,
146 OP_ADCL,
147 OP_SBBL,
148 OP_ANDL,
149 OP_SUBL,
150 OP_XORL,
151 OP_CMPL,
152};
153
154/* i386 shift ops */
155enum {
156 OP_ROL,
157 OP_ROR,
158 OP_RCL,
159 OP_RCR,
160 OP_SHL,
161 OP_SHR,
162 OP_SHL1, /* undocumented */
163 OP_SAR = 7,
164};
165
166enum {
167#define DEF(s, n, copy_size) INDEX_op_ ## s,
168#include "opc.h"
169#undef DEF
170 NB_OPS,
171};
172
173#include "gen-op.h"
174
175/* operand size */
176enum {
177 OT_BYTE = 0,
178 OT_WORD,
179 OT_LONG,
180 OT_QUAD,
181};
182
183enum {
184 /* I386 int registers */
185 OR_EAX, /* MUST be even numbered */
186 OR_ECX,
187 OR_EDX,
188 OR_EBX,
189 OR_ESP,
190 OR_EBP,
191 OR_ESI,
192 OR_EDI,
193
194 OR_TMP0 = 16, /* temporary operand register */
195 OR_TMP1,
196 OR_A0, /* temporary register used when doing address evaluation */
197};
198
199#ifdef TARGET_X86_64
200
201#define NB_OP_SIZES 4
202
203#define DEF_REGS(prefix, suffix) \
204 prefix ## EAX ## suffix,\
205 prefix ## ECX ## suffix,\
206 prefix ## EDX ## suffix,\
207 prefix ## EBX ## suffix,\
208 prefix ## ESP ## suffix,\
209 prefix ## EBP ## suffix,\
210 prefix ## ESI ## suffix,\
211 prefix ## EDI ## suffix,\
212 prefix ## R8 ## suffix,\
213 prefix ## R9 ## suffix,\
214 prefix ## R10 ## suffix,\
215 prefix ## R11 ## suffix,\
216 prefix ## R12 ## suffix,\
217 prefix ## R13 ## suffix,\
218 prefix ## R14 ## suffix,\
219 prefix ## R15 ## suffix,
220
221#define DEF_BREGS(prefixb, prefixh, suffix) \
222 \
223static void prefixb ## ESP ## suffix ## _wrapper(void) \
224{ \
225 if (x86_64_hregs) \
226 prefixb ## ESP ## suffix (); \
227 else \
228 prefixh ## EAX ## suffix (); \
229} \
230 \
231static void prefixb ## EBP ## suffix ## _wrapper(void) \
232{ \
233 if (x86_64_hregs) \
234 prefixb ## EBP ## suffix (); \
235 else \
236 prefixh ## ECX ## suffix (); \
237} \
238 \
239static void prefixb ## ESI ## suffix ## _wrapper(void) \
240{ \
241 if (x86_64_hregs) \
242 prefixb ## ESI ## suffix (); \
243 else \
244 prefixh ## EDX ## suffix (); \
245} \
246 \
247static void prefixb ## EDI ## suffix ## _wrapper(void) \
248{ \
249 if (x86_64_hregs) \
250 prefixb ## EDI ## suffix (); \
251 else \
252 prefixh ## EBX ## suffix (); \
253}
254
255DEF_BREGS(gen_op_movb_, gen_op_movh_, _T0)
256DEF_BREGS(gen_op_movb_, gen_op_movh_, _T1)
257DEF_BREGS(gen_op_movl_T0_, gen_op_movh_T0_, )
258DEF_BREGS(gen_op_movl_T1_, gen_op_movh_T1_, )
259
260#else /* !TARGET_X86_64 */
261
262#define NB_OP_SIZES 3
263
264#define DEF_REGS(prefix, suffix) \
265 prefix ## EAX ## suffix,\
266 prefix ## ECX ## suffix,\
267 prefix ## EDX ## suffix,\
268 prefix ## EBX ## suffix,\
269 prefix ## ESP ## suffix,\
270 prefix ## EBP ## suffix,\
271 prefix ## ESI ## suffix,\
272 prefix ## EDI ## suffix,
273
274#endif /* !TARGET_X86_64 */
275
276static GenOpFunc *gen_op_mov_reg_T0[NB_OP_SIZES][CPU_NB_REGS] = {
277 [OT_BYTE] = {
278 gen_op_movb_EAX_T0,
279 gen_op_movb_ECX_T0,
280 gen_op_movb_EDX_T0,
281 gen_op_movb_EBX_T0,
282#ifdef TARGET_X86_64
283 gen_op_movb_ESP_T0_wrapper,
284 gen_op_movb_EBP_T0_wrapper,
285 gen_op_movb_ESI_T0_wrapper,
286 gen_op_movb_EDI_T0_wrapper,
287 gen_op_movb_R8_T0,
288 gen_op_movb_R9_T0,
289 gen_op_movb_R10_T0,
290 gen_op_movb_R11_T0,
291 gen_op_movb_R12_T0,
292 gen_op_movb_R13_T0,
293 gen_op_movb_R14_T0,
294 gen_op_movb_R15_T0,
295#else
296 gen_op_movh_EAX_T0,
297 gen_op_movh_ECX_T0,
298 gen_op_movh_EDX_T0,
299 gen_op_movh_EBX_T0,
300#endif
301 },
302 [OT_WORD] = {
303 DEF_REGS(gen_op_movw_, _T0)
304 },
305 [OT_LONG] = {
306 DEF_REGS(gen_op_movl_, _T0)
307 },
308#ifdef TARGET_X86_64
309 [OT_QUAD] = {
310 DEF_REGS(gen_op_movq_, _T0)
311 },
312#endif
313};
314
315static GenOpFunc *gen_op_mov_reg_T1[NB_OP_SIZES][CPU_NB_REGS] = {
316 [OT_BYTE] = {
317 gen_op_movb_EAX_T1,
318 gen_op_movb_ECX_T1,
319 gen_op_movb_EDX_T1,
320 gen_op_movb_EBX_T1,
321#ifdef TARGET_X86_64
322 gen_op_movb_ESP_T1_wrapper,
323 gen_op_movb_EBP_T1_wrapper,
324 gen_op_movb_ESI_T1_wrapper,
325 gen_op_movb_EDI_T1_wrapper,
326 gen_op_movb_R8_T1,
327 gen_op_movb_R9_T1,
328 gen_op_movb_R10_T1,
329 gen_op_movb_R11_T1,
330 gen_op_movb_R12_T1,
331 gen_op_movb_R13_T1,
332 gen_op_movb_R14_T1,
333 gen_op_movb_R15_T1,
334#else
335 gen_op_movh_EAX_T1,
336 gen_op_movh_ECX_T1,
337 gen_op_movh_EDX_T1,
338 gen_op_movh_EBX_T1,
339#endif
340 },
341 [OT_WORD] = {
342 DEF_REGS(gen_op_movw_, _T1)
343 },
344 [OT_LONG] = {
345 DEF_REGS(gen_op_movl_, _T1)
346 },
347#ifdef TARGET_X86_64
348 [OT_QUAD] = {
349 DEF_REGS(gen_op_movq_, _T1)
350 },
351#endif
352};
353
354static GenOpFunc *gen_op_mov_reg_A0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
355 [0] = {
356 DEF_REGS(gen_op_movw_, _A0)
357 },
358 [1] = {
359 DEF_REGS(gen_op_movl_, _A0)
360 },
361#ifdef TARGET_X86_64
362 [2] = {
363 DEF_REGS(gen_op_movq_, _A0)
364 },
365#endif
366};
367
368static GenOpFunc *gen_op_mov_TN_reg[NB_OP_SIZES][2][CPU_NB_REGS] =
369{
370 [OT_BYTE] = {
371 {
372 gen_op_movl_T0_EAX,
373 gen_op_movl_T0_ECX,
374 gen_op_movl_T0_EDX,
375 gen_op_movl_T0_EBX,
376#ifdef TARGET_X86_64
377 gen_op_movl_T0_ESP_wrapper,
378 gen_op_movl_T0_EBP_wrapper,
379 gen_op_movl_T0_ESI_wrapper,
380 gen_op_movl_T0_EDI_wrapper,
381 gen_op_movl_T0_R8,
382 gen_op_movl_T0_R9,
383 gen_op_movl_T0_R10,
384 gen_op_movl_T0_R11,
385 gen_op_movl_T0_R12,
386 gen_op_movl_T0_R13,
387 gen_op_movl_T0_R14,
388 gen_op_movl_T0_R15,
389#else
390 gen_op_movh_T0_EAX,
391 gen_op_movh_T0_ECX,
392 gen_op_movh_T0_EDX,
393 gen_op_movh_T0_EBX,
394#endif
395 },
396 {
397 gen_op_movl_T1_EAX,
398 gen_op_movl_T1_ECX,
399 gen_op_movl_T1_EDX,
400 gen_op_movl_T1_EBX,
401#ifdef TARGET_X86_64
402 gen_op_movl_T1_ESP_wrapper,
403 gen_op_movl_T1_EBP_wrapper,
404 gen_op_movl_T1_ESI_wrapper,
405 gen_op_movl_T1_EDI_wrapper,
406 gen_op_movl_T1_R8,
407 gen_op_movl_T1_R9,
408 gen_op_movl_T1_R10,
409 gen_op_movl_T1_R11,
410 gen_op_movl_T1_R12,
411 gen_op_movl_T1_R13,
412 gen_op_movl_T1_R14,
413 gen_op_movl_T1_R15,
414#else
415 gen_op_movh_T1_EAX,
416 gen_op_movh_T1_ECX,
417 gen_op_movh_T1_EDX,
418 gen_op_movh_T1_EBX,
419#endif
420 },
421 },
422 [OT_WORD] = {
423 {
424 DEF_REGS(gen_op_movl_T0_, )
425 },
426 {
427 DEF_REGS(gen_op_movl_T1_, )
428 },
429 },
430 [OT_LONG] = {
431 {
432 DEF_REGS(gen_op_movl_T0_, )
433 },
434 {
435 DEF_REGS(gen_op_movl_T1_, )
436 },
437 },
438#ifdef TARGET_X86_64
439 [OT_QUAD] = {
440 {
441 DEF_REGS(gen_op_movl_T0_, )
442 },
443 {
444 DEF_REGS(gen_op_movl_T1_, )
445 },
446 },
447#endif
448};
449
450static GenOpFunc *gen_op_movl_A0_reg[CPU_NB_REGS] = {
451 DEF_REGS(gen_op_movl_A0_, )
452};
453
454static GenOpFunc *gen_op_addl_A0_reg_sN[4][CPU_NB_REGS] = {
455 [0] = {
456 DEF_REGS(gen_op_addl_A0_, )
457 },
458 [1] = {
459 DEF_REGS(gen_op_addl_A0_, _s1)
460 },
461 [2] = {
462 DEF_REGS(gen_op_addl_A0_, _s2)
463 },
464 [3] = {
465 DEF_REGS(gen_op_addl_A0_, _s3)
466 },
467};
468
469#ifdef TARGET_X86_64
470static GenOpFunc *gen_op_movq_A0_reg[CPU_NB_REGS] = {
471 DEF_REGS(gen_op_movq_A0_, )
472};
473
474static GenOpFunc *gen_op_addq_A0_reg_sN[4][CPU_NB_REGS] = {
475 [0] = {
476 DEF_REGS(gen_op_addq_A0_, )
477 },
478 [1] = {
479 DEF_REGS(gen_op_addq_A0_, _s1)
480 },
481 [2] = {
482 DEF_REGS(gen_op_addq_A0_, _s2)
483 },
484 [3] = {
485 DEF_REGS(gen_op_addq_A0_, _s3)
486 },
487};
488#endif
489
490static GenOpFunc *gen_op_cmov_reg_T1_T0[NB_OP_SIZES - 1][CPU_NB_REGS] = {
491 [0] = {
492 DEF_REGS(gen_op_cmovw_, _T1_T0)
493 },
494 [1] = {
495 DEF_REGS(gen_op_cmovl_, _T1_T0)
496 },
497#ifdef TARGET_X86_64
498 [2] = {
499 DEF_REGS(gen_op_cmovq_, _T1_T0)
500 },
501#endif
502};
503
504static GenOpFunc *gen_op_arith_T0_T1_cc[8] = {
505 NULL,
506 gen_op_orl_T0_T1,
507 NULL,
508 NULL,
509 gen_op_andl_T0_T1,
510 NULL,
511 gen_op_xorl_T0_T1,
512 NULL,
513};
514
515#define DEF_ARITHC(SUFFIX)\
516 {\
517 gen_op_adcb ## SUFFIX ## _T0_T1_cc,\
518 gen_op_sbbb ## SUFFIX ## _T0_T1_cc,\
519 },\
520 {\
521 gen_op_adcw ## SUFFIX ## _T0_T1_cc,\
522 gen_op_sbbw ## SUFFIX ## _T0_T1_cc,\
523 },\
524 {\
525 gen_op_adcl ## SUFFIX ## _T0_T1_cc,\
526 gen_op_sbbl ## SUFFIX ## _T0_T1_cc,\
527 },\
528 {\
529 X86_64_ONLY(gen_op_adcq ## SUFFIX ## _T0_T1_cc),\
530 X86_64_ONLY(gen_op_sbbq ## SUFFIX ## _T0_T1_cc),\
531 },
532
533static GenOpFunc *gen_op_arithc_T0_T1_cc[4][2] = {
534 DEF_ARITHC( )
535};
536
537static GenOpFunc *gen_op_arithc_mem_T0_T1_cc[3 * 4][2] = {
538 DEF_ARITHC(_raw)
539#ifndef CONFIG_USER_ONLY
540 DEF_ARITHC(_kernel)
541 DEF_ARITHC(_user)
542#endif
543};
544
545static const int cc_op_arithb[8] = {
546 CC_OP_ADDB,
547 CC_OP_LOGICB,
548 CC_OP_ADDB,
549 CC_OP_SUBB,
550 CC_OP_LOGICB,
551 CC_OP_SUBB,
552 CC_OP_LOGICB,
553 CC_OP_SUBB,
554};
555
556#define DEF_CMPXCHG(SUFFIX)\
557 gen_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc,\
558 gen_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc,\
559 gen_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc,\
560 X86_64_ONLY(gen_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc),
561
562static GenOpFunc *gen_op_cmpxchg_T0_T1_EAX_cc[4] = {
563 DEF_CMPXCHG( )
564};
565
566static GenOpFunc *gen_op_cmpxchg_mem_T0_T1_EAX_cc[3 * 4] = {
567 DEF_CMPXCHG(_raw)
568#ifndef CONFIG_USER_ONLY
569 DEF_CMPXCHG(_kernel)
570 DEF_CMPXCHG(_user)
571#endif
572};
573
574#define DEF_SHIFT(SUFFIX)\
575 {\
576 gen_op_rolb ## SUFFIX ## _T0_T1_cc,\
577 gen_op_rorb ## SUFFIX ## _T0_T1_cc,\
578 gen_op_rclb ## SUFFIX ## _T0_T1_cc,\
579 gen_op_rcrb ## SUFFIX ## _T0_T1_cc,\
580 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
581 gen_op_shrb ## SUFFIX ## _T0_T1_cc,\
582 gen_op_shlb ## SUFFIX ## _T0_T1_cc,\
583 gen_op_sarb ## SUFFIX ## _T0_T1_cc,\
584 },\
585 {\
586 gen_op_rolw ## SUFFIX ## _T0_T1_cc,\
587 gen_op_rorw ## SUFFIX ## _T0_T1_cc,\
588 gen_op_rclw ## SUFFIX ## _T0_T1_cc,\
589 gen_op_rcrw ## SUFFIX ## _T0_T1_cc,\
590 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
591 gen_op_shrw ## SUFFIX ## _T0_T1_cc,\
592 gen_op_shlw ## SUFFIX ## _T0_T1_cc,\
593 gen_op_sarw ## SUFFIX ## _T0_T1_cc,\
594 },\
595 {\
596 gen_op_roll ## SUFFIX ## _T0_T1_cc,\
597 gen_op_rorl ## SUFFIX ## _T0_T1_cc,\
598 gen_op_rcll ## SUFFIX ## _T0_T1_cc,\
599 gen_op_rcrl ## SUFFIX ## _T0_T1_cc,\
600 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
601 gen_op_shrl ## SUFFIX ## _T0_T1_cc,\
602 gen_op_shll ## SUFFIX ## _T0_T1_cc,\
603 gen_op_sarl ## SUFFIX ## _T0_T1_cc,\
604 },\
605 {\
606 X86_64_ONLY(gen_op_rolq ## SUFFIX ## _T0_T1_cc),\
607 X86_64_ONLY(gen_op_rorq ## SUFFIX ## _T0_T1_cc),\
608 X86_64_ONLY(gen_op_rclq ## SUFFIX ## _T0_T1_cc),\
609 X86_64_ONLY(gen_op_rcrq ## SUFFIX ## _T0_T1_cc),\
610 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
611 X86_64_ONLY(gen_op_shrq ## SUFFIX ## _T0_T1_cc),\
612 X86_64_ONLY(gen_op_shlq ## SUFFIX ## _T0_T1_cc),\
613 X86_64_ONLY(gen_op_sarq ## SUFFIX ## _T0_T1_cc),\
614 },
615
616static GenOpFunc *gen_op_shift_T0_T1_cc[4][8] = {
617 DEF_SHIFT( )
618};
619
620static GenOpFunc *gen_op_shift_mem_T0_T1_cc[3 * 4][8] = {
621 DEF_SHIFT(_raw)
622#ifndef CONFIG_USER_ONLY
623 DEF_SHIFT(_kernel)
624 DEF_SHIFT(_user)
625#endif
626};
627
628#define DEF_SHIFTD(SUFFIX, op)\
629 {\
630 NULL,\
631 NULL,\
632 },\
633 {\
634 gen_op_shldw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
635 gen_op_shrdw ## SUFFIX ## _T0_T1_ ## op ## _cc,\
636 },\
637 {\
638 gen_op_shldl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
639 gen_op_shrdl ## SUFFIX ## _T0_T1_ ## op ## _cc,\
640 },\
641 {\
642X86_64_DEF(gen_op_shldq ## SUFFIX ## _T0_T1_ ## op ## _cc,\
643 gen_op_shrdq ## SUFFIX ## _T0_T1_ ## op ## _cc,)\
644 },
645
646static GenOpFunc1 *gen_op_shiftd_T0_T1_im_cc[4][2] = {
647 DEF_SHIFTD(, im)
648};
649
650static GenOpFunc *gen_op_shiftd_T0_T1_ECX_cc[4][2] = {
651 DEF_SHIFTD(, ECX)
652};
653
654static GenOpFunc1 *gen_op_shiftd_mem_T0_T1_im_cc[3 * 4][2] = {
655 DEF_SHIFTD(_raw, im)
656#ifndef CONFIG_USER_ONLY
657 DEF_SHIFTD(_kernel, im)
658 DEF_SHIFTD(_user, im)
659#endif
660};
661
662static GenOpFunc *gen_op_shiftd_mem_T0_T1_ECX_cc[3 * 4][2] = {
663 DEF_SHIFTD(_raw, ECX)
664#ifndef CONFIG_USER_ONLY
665 DEF_SHIFTD(_kernel, ECX)
666 DEF_SHIFTD(_user, ECX)
667#endif
668};
669
670static GenOpFunc *gen_op_btx_T0_T1_cc[3][4] = {
671 [0] = {
672 gen_op_btw_T0_T1_cc,
673 gen_op_btsw_T0_T1_cc,
674 gen_op_btrw_T0_T1_cc,
675 gen_op_btcw_T0_T1_cc,
676 },
677 [1] = {
678 gen_op_btl_T0_T1_cc,
679 gen_op_btsl_T0_T1_cc,
680 gen_op_btrl_T0_T1_cc,
681 gen_op_btcl_T0_T1_cc,
682 },
683#ifdef TARGET_X86_64
684 [2] = {
685 gen_op_btq_T0_T1_cc,
686 gen_op_btsq_T0_T1_cc,
687 gen_op_btrq_T0_T1_cc,
688 gen_op_btcq_T0_T1_cc,
689 },
690#endif
691};
692
693static GenOpFunc *gen_op_add_bit_A0_T1[3] = {
694 gen_op_add_bitw_A0_T1,
695 gen_op_add_bitl_A0_T1,
696 X86_64_ONLY(gen_op_add_bitq_A0_T1),
697};
698
699static GenOpFunc *gen_op_bsx_T0_cc[3][2] = {
700 [0] = {
701 gen_op_bsfw_T0_cc,
702 gen_op_bsrw_T0_cc,
703 },
704 [1] = {
705 gen_op_bsfl_T0_cc,
706 gen_op_bsrl_T0_cc,
707 },
708#ifdef TARGET_X86_64
709 [2] = {
710 gen_op_bsfq_T0_cc,
711 gen_op_bsrq_T0_cc,
712 },
713#endif
714};
715
716static GenOpFunc *gen_op_lds_T0_A0[3 * 4] = {
717 gen_op_ldsb_raw_T0_A0,
718 gen_op_ldsw_raw_T0_A0,
719 X86_64_ONLY(gen_op_ldsl_raw_T0_A0),
720 NULL,
721#ifndef CONFIG_USER_ONLY
722 gen_op_ldsb_kernel_T0_A0,
723 gen_op_ldsw_kernel_T0_A0,
724 X86_64_ONLY(gen_op_ldsl_kernel_T0_A0),
725 NULL,
726
727 gen_op_ldsb_user_T0_A0,
728 gen_op_ldsw_user_T0_A0,
729 X86_64_ONLY(gen_op_ldsl_user_T0_A0),
730 NULL,
731#endif
732};
733
734static GenOpFunc *gen_op_ldu_T0_A0[3 * 4] = {
735 gen_op_ldub_raw_T0_A0,
736 gen_op_lduw_raw_T0_A0,
737 NULL,
738 NULL,
739
740#ifndef CONFIG_USER_ONLY
741 gen_op_ldub_kernel_T0_A0,
742 gen_op_lduw_kernel_T0_A0,
743 NULL,
744 NULL,
745
746 gen_op_ldub_user_T0_A0,
747 gen_op_lduw_user_T0_A0,
748 NULL,
749 NULL,
750#endif
751};
752
753/* sign does not matter, except for lidt/lgdt call (TODO: fix it) */
754static GenOpFunc *gen_op_ld_T0_A0[3 * 4] = {
755 gen_op_ldub_raw_T0_A0,
756 gen_op_lduw_raw_T0_A0,
757 gen_op_ldl_raw_T0_A0,
758 X86_64_ONLY(gen_op_ldq_raw_T0_A0),
759
760#ifndef CONFIG_USER_ONLY
761 gen_op_ldub_kernel_T0_A0,
762 gen_op_lduw_kernel_T0_A0,
763 gen_op_ldl_kernel_T0_A0,
764 X86_64_ONLY(gen_op_ldq_kernel_T0_A0),
765
766 gen_op_ldub_user_T0_A0,
767 gen_op_lduw_user_T0_A0,
768 gen_op_ldl_user_T0_A0,
769 X86_64_ONLY(gen_op_ldq_user_T0_A0),
770#endif
771};
772
773static GenOpFunc *gen_op_ld_T1_A0[3 * 4] = {
774 gen_op_ldub_raw_T1_A0,
775 gen_op_lduw_raw_T1_A0,
776 gen_op_ldl_raw_T1_A0,
777 X86_64_ONLY(gen_op_ldq_raw_T1_A0),
778
779#ifndef CONFIG_USER_ONLY
780 gen_op_ldub_kernel_T1_A0,
781 gen_op_lduw_kernel_T1_A0,
782 gen_op_ldl_kernel_T1_A0,
783 X86_64_ONLY(gen_op_ldq_kernel_T1_A0),
784
785 gen_op_ldub_user_T1_A0,
786 gen_op_lduw_user_T1_A0,
787 gen_op_ldl_user_T1_A0,
788 X86_64_ONLY(gen_op_ldq_user_T1_A0),
789#endif
790};
791
792static GenOpFunc *gen_op_st_T0_A0[3 * 4] = {
793 gen_op_stb_raw_T0_A0,
794 gen_op_stw_raw_T0_A0,
795 gen_op_stl_raw_T0_A0,
796 X86_64_ONLY(gen_op_stq_raw_T0_A0),
797
798#ifndef CONFIG_USER_ONLY
799 gen_op_stb_kernel_T0_A0,
800 gen_op_stw_kernel_T0_A0,
801 gen_op_stl_kernel_T0_A0,
802 X86_64_ONLY(gen_op_stq_kernel_T0_A0),
803
804 gen_op_stb_user_T0_A0,
805 gen_op_stw_user_T0_A0,
806 gen_op_stl_user_T0_A0,
807 X86_64_ONLY(gen_op_stq_user_T0_A0),
808#endif
809};
810
811static GenOpFunc *gen_op_st_T1_A0[3 * 4] = {
812 NULL,
813 gen_op_stw_raw_T1_A0,
814 gen_op_stl_raw_T1_A0,
815 X86_64_ONLY(gen_op_stq_raw_T1_A0),
816
817#ifndef CONFIG_USER_ONLY
818 NULL,
819 gen_op_stw_kernel_T1_A0,
820 gen_op_stl_kernel_T1_A0,
821 X86_64_ONLY(gen_op_stq_kernel_T1_A0),
822
823 NULL,
824 gen_op_stw_user_T1_A0,
825 gen_op_stl_user_T1_A0,
826 X86_64_ONLY(gen_op_stq_user_T1_A0),
827#endif
828};
829
830#ifdef VBOX
831static void gen_check_external_event()
832{
833 gen_op_check_external_event();
834}
835
836static inline void gen_update_eip(target_ulong pc)
837{
838#ifdef TARGET_X86_64
839 if (pc == (uint32_t)pc) {
840 gen_op_movl_eip_im(pc);
841 } else if (pc == (int32_t)pc) {
842 gen_op_movq_eip_im(pc);
843 } else {
844 gen_op_movq_eip_im64(pc >> 32, pc);
845 }
846#else
847 gen_op_movl_eip_im(pc);
848#endif
849}
850
851#endif /* VBOX */
852
853static inline void gen_jmp_im(target_ulong pc)
854{
855#ifdef VBOX
856 gen_check_external_event();
857#endif
858#ifdef TARGET_X86_64
859 if (pc == (uint32_t)pc) {
860 gen_op_movl_eip_im(pc);
861 } else if (pc == (int32_t)pc) {
862 gen_op_movq_eip_im(pc);
863 } else {
864 gen_op_movq_eip_im64(pc >> 32, pc);
865 }
866#else
867 gen_op_movl_eip_im(pc);
868#endif
869}
870
871static inline void gen_string_movl_A0_ESI(DisasContext *s)
872{
873 int override;
874
875 override = s->override;
876#ifdef TARGET_X86_64
877 if (s->aflag == 2) {
878 if (override >= 0) {
879 gen_op_movq_A0_seg(offsetof(CPUX86State,segs[override].base));
880 gen_op_addq_A0_reg_sN[0][R_ESI]();
881 } else {
882 gen_op_movq_A0_reg[R_ESI]();
883 }
884 } else
885#endif
886 if (s->aflag) {
887 /* 32 bit address */
888 if (s->addseg && override < 0)
889 override = R_DS;
890 if (override >= 0) {
891 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[override].base));
892 gen_op_addl_A0_reg_sN[0][R_ESI]();
893 } else {
894 gen_op_movl_A0_reg[R_ESI]();
895 }
896 } else {
897 /* 16 address, always override */
898 if (override < 0)
899 override = R_DS;
900 gen_op_movl_A0_reg[R_ESI]();
901 gen_op_andl_A0_ffff();
902 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
903 }
904}
905
906static inline void gen_string_movl_A0_EDI(DisasContext *s)
907{
908#ifdef TARGET_X86_64
909 if (s->aflag == 2) {
910 gen_op_movq_A0_reg[R_EDI]();
911 } else
912#endif
913 if (s->aflag) {
914 if (s->addseg) {
915 gen_op_movl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
916 gen_op_addl_A0_reg_sN[0][R_EDI]();
917 } else {
918 gen_op_movl_A0_reg[R_EDI]();
919 }
920 } else {
921 gen_op_movl_A0_reg[R_EDI]();
922 gen_op_andl_A0_ffff();
923 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_ES].base));
924 }
925}
926
927static GenOpFunc *gen_op_movl_T0_Dshift[4] = {
928 gen_op_movl_T0_Dshiftb,
929 gen_op_movl_T0_Dshiftw,
930 gen_op_movl_T0_Dshiftl,
931 X86_64_ONLY(gen_op_movl_T0_Dshiftq),
932};
933
934static GenOpFunc1 *gen_op_jnz_ecx[3] = {
935 gen_op_jnz_ecxw,
936 gen_op_jnz_ecxl,
937 X86_64_ONLY(gen_op_jnz_ecxq),
938};
939
940static GenOpFunc1 *gen_op_jz_ecx[3] = {
941 gen_op_jz_ecxw,
942 gen_op_jz_ecxl,
943 X86_64_ONLY(gen_op_jz_ecxq),
944};
945
946static GenOpFunc *gen_op_dec_ECX[3] = {
947 gen_op_decw_ECX,
948 gen_op_decl_ECX,
949 X86_64_ONLY(gen_op_decq_ECX),
950};
951
952static GenOpFunc1 *gen_op_string_jnz_sub[2][4] = {
953 {
954 gen_op_jnz_subb,
955 gen_op_jnz_subw,
956 gen_op_jnz_subl,
957 X86_64_ONLY(gen_op_jnz_subq),
958 },
959 {
960 gen_op_jz_subb,
961 gen_op_jz_subw,
962 gen_op_jz_subl,
963 X86_64_ONLY(gen_op_jz_subq),
964 },
965};
966
967static GenOpFunc *gen_op_in_DX_T0[3] = {
968 gen_op_inb_DX_T0,
969 gen_op_inw_DX_T0,
970 gen_op_inl_DX_T0,
971};
972
973static GenOpFunc *gen_op_out_DX_T0[3] = {
974 gen_op_outb_DX_T0,
975 gen_op_outw_DX_T0,
976 gen_op_outl_DX_T0,
977};
978
979static GenOpFunc *gen_op_in[3] = {
980 gen_op_inb_T0_T1,
981 gen_op_inw_T0_T1,
982 gen_op_inl_T0_T1,
983};
984
985static GenOpFunc *gen_op_out[3] = {
986 gen_op_outb_T0_T1,
987 gen_op_outw_T0_T1,
988 gen_op_outl_T0_T1,
989};
990
991static GenOpFunc *gen_check_io_T0[3] = {
992 gen_op_check_iob_T0,
993 gen_op_check_iow_T0,
994 gen_op_check_iol_T0,
995};
996
997static GenOpFunc *gen_check_io_DX[3] = {
998 gen_op_check_iob_DX,
999 gen_op_check_iow_DX,
1000 gen_op_check_iol_DX,
1001};
1002
1003static void gen_check_io(DisasContext *s, int ot, int use_dx, target_ulong cur_eip)
1004{
1005 if (s->pe && (s->cpl > s->iopl || s->vm86)) {
1006 if (s->cc_op != CC_OP_DYNAMIC)
1007 gen_op_set_cc_op(s->cc_op);
1008 gen_jmp_im(cur_eip);
1009 if (use_dx)
1010 gen_check_io_DX[ot]();
1011 else
1012 gen_check_io_T0[ot]();
1013 }
1014}
1015
1016static inline void gen_movs(DisasContext *s, int ot)
1017{
1018 gen_string_movl_A0_ESI(s);
1019 gen_op_ld_T0_A0[ot + s->mem_index]();
1020 gen_string_movl_A0_EDI(s);
1021 gen_op_st_T0_A0[ot + s->mem_index]();
1022 gen_op_movl_T0_Dshift[ot]();
1023#ifdef TARGET_X86_64
1024 if (s->aflag == 2) {
1025 gen_op_addq_ESI_T0();
1026 gen_op_addq_EDI_T0();
1027 } else
1028#endif
1029 if (s->aflag) {
1030 gen_op_addl_ESI_T0();
1031 gen_op_addl_EDI_T0();
1032 } else {
1033 gen_op_addw_ESI_T0();
1034 gen_op_addw_EDI_T0();
1035 }
1036}
1037
1038static inline void gen_update_cc_op(DisasContext *s)
1039{
1040 if (s->cc_op != CC_OP_DYNAMIC) {
1041 gen_op_set_cc_op(s->cc_op);
1042 s->cc_op = CC_OP_DYNAMIC;
1043 }
1044}
1045
1046/* XXX: does not work with gdbstub "ice" single step - not a
1047 serious problem */
1048static int gen_jz_ecx_string(DisasContext *s, target_ulong next_eip)
1049{
1050 int l1, l2;
1051
1052 l1 = gen_new_label();
1053 l2 = gen_new_label();
1054 gen_op_jnz_ecx[s->aflag](l1);
1055 gen_set_label(l2);
1056 gen_jmp_tb(s, next_eip, 1);
1057 gen_set_label(l1);
1058 return l2;
1059}
1060
1061static inline void gen_stos(DisasContext *s, int ot)
1062{
1063 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1064 gen_string_movl_A0_EDI(s);
1065 gen_op_st_T0_A0[ot + s->mem_index]();
1066 gen_op_movl_T0_Dshift[ot]();
1067#ifdef TARGET_X86_64
1068 if (s->aflag == 2) {
1069 gen_op_addq_EDI_T0();
1070 } else
1071#endif
1072 if (s->aflag) {
1073 gen_op_addl_EDI_T0();
1074 } else {
1075 gen_op_addw_EDI_T0();
1076 }
1077}
1078
1079static inline void gen_lods(DisasContext *s, int ot)
1080{
1081 gen_string_movl_A0_ESI(s);
1082 gen_op_ld_T0_A0[ot + s->mem_index]();
1083 gen_op_mov_reg_T0[ot][R_EAX]();
1084 gen_op_movl_T0_Dshift[ot]();
1085#ifdef TARGET_X86_64
1086 if (s->aflag == 2) {
1087 gen_op_addq_ESI_T0();
1088 } else
1089#endif
1090 if (s->aflag) {
1091 gen_op_addl_ESI_T0();
1092 } else {
1093 gen_op_addw_ESI_T0();
1094 }
1095}
1096
1097static inline void gen_scas(DisasContext *s, int ot)
1098{
1099 gen_op_mov_TN_reg[OT_LONG][0][R_EAX]();
1100 gen_string_movl_A0_EDI(s);
1101 gen_op_ld_T1_A0[ot + s->mem_index]();
1102 gen_op_cmpl_T0_T1_cc();
1103 gen_op_movl_T0_Dshift[ot]();
1104#ifdef TARGET_X86_64
1105 if (s->aflag == 2) {
1106 gen_op_addq_EDI_T0();
1107 } else
1108#endif
1109 if (s->aflag) {
1110 gen_op_addl_EDI_T0();
1111 } else {
1112 gen_op_addw_EDI_T0();
1113 }
1114}
1115
1116static inline void gen_cmps(DisasContext *s, int ot)
1117{
1118 gen_string_movl_A0_ESI(s);
1119 gen_op_ld_T0_A0[ot + s->mem_index]();
1120 gen_string_movl_A0_EDI(s);
1121 gen_op_ld_T1_A0[ot + s->mem_index]();
1122 gen_op_cmpl_T0_T1_cc();
1123 gen_op_movl_T0_Dshift[ot]();
1124#ifdef TARGET_X86_64
1125 if (s->aflag == 2) {
1126 gen_op_addq_ESI_T0();
1127 gen_op_addq_EDI_T0();
1128 } else
1129#endif
1130 if (s->aflag) {
1131 gen_op_addl_ESI_T0();
1132 gen_op_addl_EDI_T0();
1133 } else {
1134 gen_op_addw_ESI_T0();
1135 gen_op_addw_EDI_T0();
1136 }
1137}
1138
1139static inline void gen_ins(DisasContext *s, int ot)
1140{
1141 gen_string_movl_A0_EDI(s);
1142 gen_op_movl_T0_0();
1143 gen_op_st_T0_A0[ot + s->mem_index]();
1144 gen_op_in_DX_T0[ot]();
1145 gen_op_st_T0_A0[ot + s->mem_index]();
1146 gen_op_movl_T0_Dshift[ot]();
1147#ifdef TARGET_X86_64
1148 if (s->aflag == 2) {
1149 gen_op_addq_EDI_T0();
1150 } else
1151#endif
1152 if (s->aflag) {
1153 gen_op_addl_EDI_T0();
1154 } else {
1155 gen_op_addw_EDI_T0();
1156 }
1157}
1158
1159static inline void gen_outs(DisasContext *s, int ot)
1160{
1161 gen_string_movl_A0_ESI(s);
1162 gen_op_ld_T0_A0[ot + s->mem_index]();
1163 gen_op_out_DX_T0[ot]();
1164 gen_op_movl_T0_Dshift[ot]();
1165#ifdef TARGET_X86_64
1166 if (s->aflag == 2) {
1167 gen_op_addq_ESI_T0();
1168 } else
1169#endif
1170 if (s->aflag) {
1171 gen_op_addl_ESI_T0();
1172 } else {
1173 gen_op_addw_ESI_T0();
1174 }
1175}
1176
1177/* same method as Valgrind : we generate jumps to current or next
1178 instruction */
1179#define GEN_REPZ(op) \
1180static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1181 target_ulong cur_eip, target_ulong next_eip) \
1182{ \
1183 int l2;\
1184 gen_update_cc_op(s); \
1185 l2 = gen_jz_ecx_string(s, next_eip); \
1186 gen_ ## op(s, ot); \
1187 gen_op_dec_ECX[s->aflag](); \
1188 /* a loop would cause two single step exceptions if ECX = 1 \
1189 before rep string_insn */ \
1190 if (!s->jmp_opt) \
1191 gen_op_jz_ecx[s->aflag](l2); \
1192 gen_jmp(s, cur_eip); \
1193}
1194
1195#define GEN_REPZ2(op) \
1196static inline void gen_repz_ ## op(DisasContext *s, int ot, \
1197 target_ulong cur_eip, \
1198 target_ulong next_eip, \
1199 int nz) \
1200{ \
1201 int l2;\
1202 gen_update_cc_op(s); \
1203 l2 = gen_jz_ecx_string(s, next_eip); \
1204 gen_ ## op(s, ot); \
1205 gen_op_dec_ECX[s->aflag](); \
1206 gen_op_set_cc_op(CC_OP_SUBB + ot); \
1207 gen_op_string_jnz_sub[nz][ot](l2);\
1208 if (!s->jmp_opt) \
1209 gen_op_jz_ecx[s->aflag](l2); \
1210 gen_jmp(s, cur_eip); \
1211}
1212
1213GEN_REPZ(movs)
1214GEN_REPZ(stos)
1215GEN_REPZ(lods)
1216GEN_REPZ(ins)
1217GEN_REPZ(outs)
1218GEN_REPZ2(scas)
1219GEN_REPZ2(cmps)
1220
1221enum {
1222 JCC_O,
1223 JCC_B,
1224 JCC_Z,
1225 JCC_BE,
1226 JCC_S,
1227 JCC_P,
1228 JCC_L,
1229 JCC_LE,
1230};
1231
1232static GenOpFunc1 *gen_jcc_sub[4][8] = {
1233 [OT_BYTE] = {
1234 NULL,
1235 gen_op_jb_subb,
1236 gen_op_jz_subb,
1237 gen_op_jbe_subb,
1238 gen_op_js_subb,
1239 NULL,
1240 gen_op_jl_subb,
1241 gen_op_jle_subb,
1242 },
1243 [OT_WORD] = {
1244 NULL,
1245 gen_op_jb_subw,
1246 gen_op_jz_subw,
1247 gen_op_jbe_subw,
1248 gen_op_js_subw,
1249 NULL,
1250 gen_op_jl_subw,
1251 gen_op_jle_subw,
1252 },
1253 [OT_LONG] = {
1254 NULL,
1255 gen_op_jb_subl,
1256 gen_op_jz_subl,
1257 gen_op_jbe_subl,
1258 gen_op_js_subl,
1259 NULL,
1260 gen_op_jl_subl,
1261 gen_op_jle_subl,
1262 },
1263#ifdef TARGET_X86_64
1264 [OT_QUAD] = {
1265 NULL,
1266 BUGGY_64(gen_op_jb_subq),
1267 gen_op_jz_subq,
1268 BUGGY_64(gen_op_jbe_subq),
1269 gen_op_js_subq,
1270 NULL,
1271 BUGGY_64(gen_op_jl_subq),
1272 BUGGY_64(gen_op_jle_subq),
1273 },
1274#endif
1275};
1276static GenOpFunc1 *gen_op_loop[3][4] = {
1277 [0] = {
1278 gen_op_loopnzw,
1279 gen_op_loopzw,
1280 gen_op_jnz_ecxw,
1281 },
1282 [1] = {
1283 gen_op_loopnzl,
1284 gen_op_loopzl,
1285 gen_op_jnz_ecxl,
1286 },
1287#ifdef TARGET_X86_64
1288 [2] = {
1289 gen_op_loopnzq,
1290 gen_op_loopzq,
1291 gen_op_jnz_ecxq,
1292 },
1293#endif
1294};
1295
1296static GenOpFunc *gen_setcc_slow[8] = {
1297 gen_op_seto_T0_cc,
1298 gen_op_setb_T0_cc,
1299 gen_op_setz_T0_cc,
1300 gen_op_setbe_T0_cc,
1301 gen_op_sets_T0_cc,
1302 gen_op_setp_T0_cc,
1303 gen_op_setl_T0_cc,
1304 gen_op_setle_T0_cc,
1305};
1306
1307static GenOpFunc *gen_setcc_sub[4][8] = {
1308 [OT_BYTE] = {
1309 NULL,
1310 gen_op_setb_T0_subb,
1311 gen_op_setz_T0_subb,
1312 gen_op_setbe_T0_subb,
1313 gen_op_sets_T0_subb,
1314 NULL,
1315 gen_op_setl_T0_subb,
1316 gen_op_setle_T0_subb,
1317 },
1318 [OT_WORD] = {
1319 NULL,
1320 gen_op_setb_T0_subw,
1321 gen_op_setz_T0_subw,
1322 gen_op_setbe_T0_subw,
1323 gen_op_sets_T0_subw,
1324 NULL,
1325 gen_op_setl_T0_subw,
1326 gen_op_setle_T0_subw,
1327 },
1328 [OT_LONG] = {
1329 NULL,
1330 gen_op_setb_T0_subl,
1331 gen_op_setz_T0_subl,
1332 gen_op_setbe_T0_subl,
1333 gen_op_sets_T0_subl,
1334 NULL,
1335 gen_op_setl_T0_subl,
1336 gen_op_setle_T0_subl,
1337 },
1338#ifdef TARGET_X86_64
1339 [OT_QUAD] = {
1340 NULL,
1341 gen_op_setb_T0_subq,
1342 gen_op_setz_T0_subq,
1343 gen_op_setbe_T0_subq,
1344 gen_op_sets_T0_subq,
1345 NULL,
1346 gen_op_setl_T0_subq,
1347 gen_op_setle_T0_subq,
1348 },
1349#endif
1350};
1351
1352static GenOpFunc *gen_op_fp_arith_ST0_FT0[8] = {
1353 gen_op_fadd_ST0_FT0,
1354 gen_op_fmul_ST0_FT0,
1355 gen_op_fcom_ST0_FT0,
1356 gen_op_fcom_ST0_FT0,
1357 gen_op_fsub_ST0_FT0,
1358 gen_op_fsubr_ST0_FT0,
1359 gen_op_fdiv_ST0_FT0,
1360 gen_op_fdivr_ST0_FT0,
1361};
1362
1363/* NOTE the exception in "r" op ordering */
1364static GenOpFunc1 *gen_op_fp_arith_STN_ST0[8] = {
1365 gen_op_fadd_STN_ST0,
1366 gen_op_fmul_STN_ST0,
1367 NULL,
1368 NULL,
1369 gen_op_fsubr_STN_ST0,
1370 gen_op_fsub_STN_ST0,
1371 gen_op_fdivr_STN_ST0,
1372 gen_op_fdiv_STN_ST0,
1373};
1374
1375/* if d == OR_TMP0, it means memory operand (address in A0) */
1376static void gen_op(DisasContext *s1, int op, int ot, int d)
1377{
1378 GenOpFunc *gen_update_cc;
1379
1380 if (d != OR_TMP0) {
1381 gen_op_mov_TN_reg[ot][0][d]();
1382 } else {
1383 gen_op_ld_T0_A0[ot + s1->mem_index]();
1384 }
1385 switch(op) {
1386 case OP_ADCL:
1387 case OP_SBBL:
1388 if (s1->cc_op != CC_OP_DYNAMIC)
1389 gen_op_set_cc_op(s1->cc_op);
1390 if (d != OR_TMP0) {
1391 gen_op_arithc_T0_T1_cc[ot][op - OP_ADCL]();
1392 gen_op_mov_reg_T0[ot][d]();
1393 } else {
1394 gen_op_arithc_mem_T0_T1_cc[ot + s1->mem_index][op - OP_ADCL]();
1395 }
1396 s1->cc_op = CC_OP_DYNAMIC;
1397 goto the_end;
1398 case OP_ADDL:
1399 gen_op_addl_T0_T1();
1400 s1->cc_op = CC_OP_ADDB + ot;
1401 gen_update_cc = gen_op_update2_cc;
1402 break;
1403 case OP_SUBL:
1404 gen_op_subl_T0_T1();
1405 s1->cc_op = CC_OP_SUBB + ot;
1406 gen_update_cc = gen_op_update2_cc;
1407 break;
1408 default:
1409 case OP_ANDL:
1410 case OP_ORL:
1411 case OP_XORL:
1412 gen_op_arith_T0_T1_cc[op]();
1413 s1->cc_op = CC_OP_LOGICB + ot;
1414 gen_update_cc = gen_op_update1_cc;
1415 break;
1416 case OP_CMPL:
1417 gen_op_cmpl_T0_T1_cc();
1418 s1->cc_op = CC_OP_SUBB + ot;
1419 gen_update_cc = NULL;
1420 break;
1421 }
1422 if (op != OP_CMPL) {
1423 if (d != OR_TMP0)
1424 gen_op_mov_reg_T0[ot][d]();
1425 else
1426 gen_op_st_T0_A0[ot + s1->mem_index]();
1427 }
1428 /* the flags update must happen after the memory write (precise
1429 exception support) */
1430 if (gen_update_cc)
1431 gen_update_cc();
1432 the_end: ;
1433}
1434
1435/* if d == OR_TMP0, it means memory operand (address in A0) */
1436static void gen_inc(DisasContext *s1, int ot, int d, int c)
1437{
1438 if (d != OR_TMP0)
1439 gen_op_mov_TN_reg[ot][0][d]();
1440 else
1441 gen_op_ld_T0_A0[ot + s1->mem_index]();
1442 if (s1->cc_op != CC_OP_DYNAMIC)
1443 gen_op_set_cc_op(s1->cc_op);
1444 if (c > 0) {
1445 gen_op_incl_T0();
1446 s1->cc_op = CC_OP_INCB + ot;
1447 } else {
1448 gen_op_decl_T0();
1449 s1->cc_op = CC_OP_DECB + ot;
1450 }
1451 if (d != OR_TMP0)
1452 gen_op_mov_reg_T0[ot][d]();
1453 else
1454 gen_op_st_T0_A0[ot + s1->mem_index]();
1455 gen_op_update_inc_cc();
1456}
1457
1458static void gen_shift(DisasContext *s1, int op, int ot, int d, int s)
1459{
1460 if (d != OR_TMP0)
1461 gen_op_mov_TN_reg[ot][0][d]();
1462 else
1463 gen_op_ld_T0_A0[ot + s1->mem_index]();
1464 if (s != OR_TMP1)
1465 gen_op_mov_TN_reg[ot][1][s]();
1466 /* for zero counts, flags are not updated, so must do it dynamically */
1467 if (s1->cc_op != CC_OP_DYNAMIC)
1468 gen_op_set_cc_op(s1->cc_op);
1469
1470 if (d != OR_TMP0)
1471 gen_op_shift_T0_T1_cc[ot][op]();
1472 else
1473 gen_op_shift_mem_T0_T1_cc[ot + s1->mem_index][op]();
1474 if (d != OR_TMP0)
1475 gen_op_mov_reg_T0[ot][d]();
1476 s1->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
1477}
1478
1479static void gen_shifti(DisasContext *s1, int op, int ot, int d, int c)
1480{
1481 /* currently not optimized */
1482 gen_op_movl_T1_im(c);
1483 gen_shift(s1, op, ot, d, OR_TMP1);
1484}
1485
1486static void gen_lea_modrm(DisasContext *s, int modrm, int *reg_ptr, int *offset_ptr)
1487{
1488 target_long disp;
1489 int havesib;
1490 int base;
1491 int index;
1492 int scale;
1493 int opreg;
1494 int mod, rm, code, override, must_add_seg;
1495
1496 override = s->override;
1497 must_add_seg = s->addseg;
1498 if (override >= 0)
1499 must_add_seg = 1;
1500 mod = (modrm >> 6) & 3;
1501 rm = modrm & 7;
1502
1503 if (s->aflag) {
1504
1505 havesib = 0;
1506 base = rm;
1507 index = 0;
1508 scale = 0;
1509
1510 if (base == 4) {
1511 havesib = 1;
1512 code = ldub_code(s->pc++);
1513 scale = (code >> 6) & 3;
1514 index = ((code >> 3) & 7) | REX_X(s);
1515 base = (code & 7);
1516 }
1517 base |= REX_B(s);
1518
1519 switch (mod) {
1520 case 0:
1521 if ((base & 7) == 5) {
1522 base = -1;
1523 disp = (int32_t)ldl_code(s->pc);
1524 s->pc += 4;
1525 if (CODE64(s) && !havesib) {
1526 disp += s->pc + s->rip_offset;
1527 }
1528 } else {
1529 disp = 0;
1530 }
1531 break;
1532 case 1:
1533 disp = (int8_t)ldub_code(s->pc++);
1534 break;
1535 default:
1536 case 2:
1537 disp = ldl_code(s->pc);
1538 s->pc += 4;
1539 break;
1540 }
1541
1542 if (base >= 0) {
1543 /* for correct popl handling with esp */
1544 if (base == 4 && s->popl_esp_hack)
1545 disp += s->popl_esp_hack;
1546#ifdef TARGET_X86_64
1547 if (s->aflag == 2) {
1548 gen_op_movq_A0_reg[base]();
1549 if (disp != 0) {
1550 if ((int32_t)disp == disp)
1551 gen_op_addq_A0_im(disp);
1552 else
1553 gen_op_addq_A0_im64(disp >> 32, disp);
1554 }
1555 } else
1556#endif
1557 {
1558 gen_op_movl_A0_reg[base]();
1559 if (disp != 0)
1560 gen_op_addl_A0_im(disp);
1561 }
1562 } else {
1563#ifdef TARGET_X86_64
1564 if (s->aflag == 2) {
1565 if ((int32_t)disp == disp)
1566 gen_op_movq_A0_im(disp);
1567 else
1568 gen_op_movq_A0_im64(disp >> 32, disp);
1569 } else
1570#endif
1571 {
1572 gen_op_movl_A0_im(disp);
1573 }
1574 }
1575 /* XXX: index == 4 is always invalid */
1576 if (havesib && (index != 4 || scale != 0)) {
1577#ifdef TARGET_X86_64
1578 if (s->aflag == 2) {
1579 gen_op_addq_A0_reg_sN[scale][index]();
1580 } else
1581#endif
1582 {
1583 gen_op_addl_A0_reg_sN[scale][index]();
1584 }
1585 }
1586 if (must_add_seg) {
1587 if (override < 0) {
1588 if (base == R_EBP || base == R_ESP)
1589 override = R_SS;
1590 else
1591 override = R_DS;
1592 }
1593#ifdef TARGET_X86_64
1594 if (s->aflag == 2) {
1595 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1596 } else
1597#endif
1598 {
1599 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1600 }
1601 }
1602 } else {
1603 switch (mod) {
1604 case 0:
1605 if (rm == 6) {
1606 disp = lduw_code(s->pc);
1607 s->pc += 2;
1608 gen_op_movl_A0_im(disp);
1609 rm = 0; /* avoid SS override */
1610 goto no_rm;
1611 } else {
1612 disp = 0;
1613 }
1614 break;
1615 case 1:
1616 disp = (int8_t)ldub_code(s->pc++);
1617 break;
1618 default:
1619 case 2:
1620 disp = lduw_code(s->pc);
1621 s->pc += 2;
1622 break;
1623 }
1624 switch(rm) {
1625 case 0:
1626 gen_op_movl_A0_reg[R_EBX]();
1627 gen_op_addl_A0_reg_sN[0][R_ESI]();
1628 break;
1629 case 1:
1630 gen_op_movl_A0_reg[R_EBX]();
1631 gen_op_addl_A0_reg_sN[0][R_EDI]();
1632 break;
1633 case 2:
1634 gen_op_movl_A0_reg[R_EBP]();
1635 gen_op_addl_A0_reg_sN[0][R_ESI]();
1636 break;
1637 case 3:
1638 gen_op_movl_A0_reg[R_EBP]();
1639 gen_op_addl_A0_reg_sN[0][R_EDI]();
1640 break;
1641 case 4:
1642 gen_op_movl_A0_reg[R_ESI]();
1643 break;
1644 case 5:
1645 gen_op_movl_A0_reg[R_EDI]();
1646 break;
1647 case 6:
1648 gen_op_movl_A0_reg[R_EBP]();
1649 break;
1650 default:
1651 case 7:
1652 gen_op_movl_A0_reg[R_EBX]();
1653 break;
1654 }
1655 if (disp != 0)
1656 gen_op_addl_A0_im(disp);
1657 gen_op_andl_A0_ffff();
1658 no_rm:
1659 if (must_add_seg) {
1660 if (override < 0) {
1661 if (rm == 2 || rm == 3 || rm == 6)
1662 override = R_SS;
1663 else
1664 override = R_DS;
1665 }
1666 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1667 }
1668 }
1669
1670 opreg = OR_A0;
1671 disp = 0;
1672 *reg_ptr = opreg;
1673 *offset_ptr = disp;
1674}
1675
1676/* used for LEA and MOV AX, mem */
1677static void gen_add_A0_ds_seg(DisasContext *s)
1678{
1679 int override, must_add_seg;
1680 must_add_seg = s->addseg;
1681 override = R_DS;
1682 if (s->override >= 0) {
1683 override = s->override;
1684 must_add_seg = 1;
1685 } else {
1686 override = R_DS;
1687 }
1688 if (must_add_seg) {
1689#ifdef TARGET_X86_64
1690 if (CODE64(s)) {
1691 gen_op_addq_A0_seg(offsetof(CPUX86State,segs[override].base));
1692 } else
1693#endif
1694 {
1695 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[override].base));
1696 }
1697 }
1698}
1699
1700/* generate modrm memory load or store of 'reg'. TMP0 is used if reg !=
1701 OR_TMP0 */
1702static void gen_ldst_modrm(DisasContext *s, int modrm, int ot, int reg, int is_store)
1703{
1704 int mod, rm, opreg, disp;
1705
1706 mod = (modrm >> 6) & 3;
1707 rm = (modrm & 7) | REX_B(s);
1708 if (mod == 3) {
1709 if (is_store) {
1710 if (reg != OR_TMP0)
1711 gen_op_mov_TN_reg[ot][0][reg]();
1712 gen_op_mov_reg_T0[ot][rm]();
1713 } else {
1714 gen_op_mov_TN_reg[ot][0][rm]();
1715 if (reg != OR_TMP0)
1716 gen_op_mov_reg_T0[ot][reg]();
1717 }
1718 } else {
1719 gen_lea_modrm(s, modrm, &opreg, &disp);
1720 if (is_store) {
1721 if (reg != OR_TMP0)
1722 gen_op_mov_TN_reg[ot][0][reg]();
1723 gen_op_st_T0_A0[ot + s->mem_index]();
1724 } else {
1725 gen_op_ld_T0_A0[ot + s->mem_index]();
1726 if (reg != OR_TMP0)
1727 gen_op_mov_reg_T0[ot][reg]();
1728 }
1729 }
1730}
1731
1732static inline uint32_t insn_get(DisasContext *s, int ot)
1733{
1734 uint32_t ret;
1735
1736 switch(ot) {
1737 case OT_BYTE:
1738 ret = ldub_code(s->pc);
1739 s->pc++;
1740 break;
1741 case OT_WORD:
1742 ret = lduw_code(s->pc);
1743 s->pc += 2;
1744 break;
1745 default:
1746 case OT_LONG:
1747 ret = ldl_code(s->pc);
1748 s->pc += 4;
1749 break;
1750 }
1751 return ret;
1752}
1753
1754static inline int insn_const_size(unsigned int ot)
1755{
1756 if (ot <= OT_LONG)
1757 return 1 << ot;
1758 else
1759 return 4;
1760}
1761
1762static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)
1763{
1764 TranslationBlock *tb;
1765 target_ulong pc;
1766
1767 pc = s->cs_base + eip;
1768 tb = s->tb;
1769 /* NOTE: we handle the case where the TB spans two pages here */
1770 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
1771 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
1772 /* jump to same page: we can use a direct jump */
1773 if (tb_num == 0)
1774 gen_op_goto_tb0(TBPARAM(tb));
1775 else
1776 gen_op_goto_tb1(TBPARAM(tb));
1777 gen_jmp_im(eip);
1778 gen_op_movl_T0_im((long)tb + tb_num);
1779 gen_op_exit_tb();
1780 } else {
1781 /* jump to another page: currently not optimized */
1782 gen_jmp_im(eip);
1783 gen_eob(s);
1784 }
1785}
1786
1787static inline void gen_jcc(DisasContext *s, int b,
1788 target_ulong val, target_ulong next_eip)
1789{
1790 TranslationBlock *tb;
1791 int inv, jcc_op;
1792 GenOpFunc1 *func;
1793 target_ulong tmp;
1794 int l1, l2;
1795
1796 inv = b & 1;
1797 jcc_op = (b >> 1) & 7;
1798
1799 if (s->jmp_opt) {
1800#ifdef VBOX
1801 gen_check_external_event(s);
1802#endif /* VBOX */
1803 switch(s->cc_op) {
1804 /* we optimize the cmp/jcc case */
1805 case CC_OP_SUBB:
1806 case CC_OP_SUBW:
1807 case CC_OP_SUBL:
1808 case CC_OP_SUBQ:
1809 func = gen_jcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1810 break;
1811
1812 /* some jumps are easy to compute */
1813 case CC_OP_ADDB:
1814 case CC_OP_ADDW:
1815 case CC_OP_ADDL:
1816 case CC_OP_ADDQ:
1817
1818 case CC_OP_ADCB:
1819 case CC_OP_ADCW:
1820 case CC_OP_ADCL:
1821 case CC_OP_ADCQ:
1822
1823 case CC_OP_SBBB:
1824 case CC_OP_SBBW:
1825 case CC_OP_SBBL:
1826 case CC_OP_SBBQ:
1827
1828 case CC_OP_LOGICB:
1829 case CC_OP_LOGICW:
1830 case CC_OP_LOGICL:
1831 case CC_OP_LOGICQ:
1832
1833 case CC_OP_INCB:
1834 case CC_OP_INCW:
1835 case CC_OP_INCL:
1836 case CC_OP_INCQ:
1837
1838 case CC_OP_DECB:
1839 case CC_OP_DECW:
1840 case CC_OP_DECL:
1841 case CC_OP_DECQ:
1842
1843 case CC_OP_SHLB:
1844 case CC_OP_SHLW:
1845 case CC_OP_SHLL:
1846 case CC_OP_SHLQ:
1847
1848 case CC_OP_SARB:
1849 case CC_OP_SARW:
1850 case CC_OP_SARL:
1851 case CC_OP_SARQ:
1852 switch(jcc_op) {
1853 case JCC_Z:
1854 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1855 break;
1856 case JCC_S:
1857 func = gen_jcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1858 break;
1859 default:
1860 func = NULL;
1861 break;
1862 }
1863 break;
1864 default:
1865 func = NULL;
1866 break;
1867 }
1868
1869 if (s->cc_op != CC_OP_DYNAMIC) {
1870 gen_op_set_cc_op(s->cc_op);
1871 s->cc_op = CC_OP_DYNAMIC;
1872 }
1873
1874 if (!func) {
1875 gen_setcc_slow[jcc_op]();
1876 func = gen_op_jnz_T0_label;
1877 }
1878
1879 if (inv) {
1880 tmp = val;
1881 val = next_eip;
1882 next_eip = tmp;
1883 }
1884 tb = s->tb;
1885
1886 l1 = gen_new_label();
1887 func(l1);
1888
1889 gen_goto_tb(s, 0, next_eip);
1890
1891 gen_set_label(l1);
1892 gen_goto_tb(s, 1, val);
1893
1894 s->is_jmp = 3;
1895 } else {
1896
1897 if (s->cc_op != CC_OP_DYNAMIC) {
1898 gen_op_set_cc_op(s->cc_op);
1899 s->cc_op = CC_OP_DYNAMIC;
1900 }
1901 gen_setcc_slow[jcc_op]();
1902 if (inv) {
1903 tmp = val;
1904 val = next_eip;
1905 next_eip = tmp;
1906 }
1907 l1 = gen_new_label();
1908 l2 = gen_new_label();
1909 gen_op_jnz_T0_label(l1);
1910 gen_jmp_im(next_eip);
1911 gen_op_jmp_label(l2);
1912 gen_set_label(l1);
1913 gen_jmp_im(val);
1914 gen_set_label(l2);
1915 gen_eob(s);
1916 }
1917}
1918
1919static void gen_setcc(DisasContext *s, int b)
1920{
1921 int inv, jcc_op;
1922 GenOpFunc *func;
1923
1924 inv = b & 1;
1925 jcc_op = (b >> 1) & 7;
1926 switch(s->cc_op) {
1927 /* we optimize the cmp/jcc case */
1928 case CC_OP_SUBB:
1929 case CC_OP_SUBW:
1930 case CC_OP_SUBL:
1931 case CC_OP_SUBQ:
1932 func = gen_setcc_sub[s->cc_op - CC_OP_SUBB][jcc_op];
1933 if (!func)
1934 goto slow_jcc;
1935 break;
1936
1937 /* some jumps are easy to compute */
1938 case CC_OP_ADDB:
1939 case CC_OP_ADDW:
1940 case CC_OP_ADDL:
1941 case CC_OP_ADDQ:
1942
1943 case CC_OP_LOGICB:
1944 case CC_OP_LOGICW:
1945 case CC_OP_LOGICL:
1946 case CC_OP_LOGICQ:
1947
1948 case CC_OP_INCB:
1949 case CC_OP_INCW:
1950 case CC_OP_INCL:
1951 case CC_OP_INCQ:
1952
1953 case CC_OP_DECB:
1954 case CC_OP_DECW:
1955 case CC_OP_DECL:
1956 case CC_OP_DECQ:
1957
1958 case CC_OP_SHLB:
1959 case CC_OP_SHLW:
1960 case CC_OP_SHLL:
1961 case CC_OP_SHLQ:
1962 switch(jcc_op) {
1963 case JCC_Z:
1964 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1965 break;
1966 case JCC_S:
1967 func = gen_setcc_sub[(s->cc_op - CC_OP_ADDB) % 4][jcc_op];
1968 break;
1969 default:
1970 goto slow_jcc;
1971 }
1972 break;
1973 default:
1974 slow_jcc:
1975 if (s->cc_op != CC_OP_DYNAMIC)
1976 gen_op_set_cc_op(s->cc_op);
1977 func = gen_setcc_slow[jcc_op];
1978 break;
1979 }
1980 func();
1981 if (inv) {
1982 gen_op_xor_T0_1();
1983 }
1984}
1985
1986/* move T0 to seg_reg and compute if the CPU state may change. Never
1987 call this function with seg_reg == R_CS */
1988static void gen_movl_seg_T0(DisasContext *s, int seg_reg, target_ulong cur_eip)
1989{
1990 if (s->pe && !s->vm86) {
1991 /* XXX: optimize by finding processor state dynamically */
1992 if (s->cc_op != CC_OP_DYNAMIC)
1993 gen_op_set_cc_op(s->cc_op);
1994 gen_jmp_im(cur_eip);
1995 gen_op_movl_seg_T0(seg_reg);
1996 /* abort translation because the addseg value may change or
1997 because ss32 may change. For R_SS, translation must always
1998 stop as a special handling must be done to disable hardware
1999 interrupts for the next instruction */
2000 if (seg_reg == R_SS || (s->code32 && seg_reg < R_FS))
2001 s->is_jmp = 3;
2002 } else {
2003 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[seg_reg]));
2004 if (seg_reg == R_SS)
2005 s->is_jmp = 3;
2006 }
2007}
2008
2009static inline void gen_stack_update(DisasContext *s, int addend)
2010{
2011#ifdef TARGET_X86_64
2012 if (CODE64(s)) {
2013 if (addend == 8)
2014 gen_op_addq_ESP_8();
2015 else
2016 gen_op_addq_ESP_im(addend);
2017 } else
2018#endif
2019 if (s->ss32) {
2020 if (addend == 2)
2021 gen_op_addl_ESP_2();
2022 else if (addend == 4)
2023 gen_op_addl_ESP_4();
2024 else
2025 gen_op_addl_ESP_im(addend);
2026 } else {
2027 if (addend == 2)
2028 gen_op_addw_ESP_2();
2029 else if (addend == 4)
2030 gen_op_addw_ESP_4();
2031 else
2032 gen_op_addw_ESP_im(addend);
2033 }
2034}
2035
2036/* generate a push. It depends on ss32, addseg and dflag */
2037static void gen_push_T0(DisasContext *s)
2038{
2039#ifdef TARGET_X86_64
2040 if (CODE64(s)) {
2041 gen_op_movq_A0_reg[R_ESP]();
2042 if (s->dflag) {
2043 gen_op_subq_A0_8();
2044 gen_op_st_T0_A0[OT_QUAD + s->mem_index]();
2045 } else {
2046 gen_op_subq_A0_2();
2047 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2048 }
2049 gen_op_movq_ESP_A0();
2050 } else
2051#endif
2052 {
2053 gen_op_movl_A0_reg[R_ESP]();
2054 if (!s->dflag)
2055 gen_op_subl_A0_2();
2056 else
2057 gen_op_subl_A0_4();
2058 if (s->ss32) {
2059 if (s->addseg) {
2060 gen_op_movl_T1_A0();
2061 gen_op_addl_A0_SS();
2062 }
2063 } else {
2064 gen_op_andl_A0_ffff();
2065 gen_op_movl_T1_A0();
2066 gen_op_addl_A0_SS();
2067 }
2068 gen_op_st_T0_A0[s->dflag + 1 + s->mem_index]();
2069 if (s->ss32 && !s->addseg)
2070 gen_op_movl_ESP_A0();
2071 else
2072 gen_op_mov_reg_T1[s->ss32 + 1][R_ESP]();
2073 }
2074}
2075
2076/* generate a push. It depends on ss32, addseg and dflag */
2077/* slower version for T1, only used for call Ev */
2078static void gen_push_T1(DisasContext *s)
2079{
2080#ifdef TARGET_X86_64
2081 if (CODE64(s)) {
2082 gen_op_movq_A0_reg[R_ESP]();
2083 if (s->dflag) {
2084 gen_op_subq_A0_8();
2085 gen_op_st_T1_A0[OT_QUAD + s->mem_index]();
2086 } else {
2087 gen_op_subq_A0_2();
2088 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
2089 }
2090 gen_op_movq_ESP_A0();
2091 } else
2092#endif
2093 {
2094 gen_op_movl_A0_reg[R_ESP]();
2095 if (!s->dflag)
2096 gen_op_subl_A0_2();
2097 else
2098 gen_op_subl_A0_4();
2099 if (s->ss32) {
2100 if (s->addseg) {
2101 gen_op_addl_A0_SS();
2102 }
2103 } else {
2104 gen_op_andl_A0_ffff();
2105 gen_op_addl_A0_SS();
2106 }
2107 gen_op_st_T1_A0[s->dflag + 1 + s->mem_index]();
2108
2109 if (s->ss32 && !s->addseg)
2110 gen_op_movl_ESP_A0();
2111 else
2112 gen_stack_update(s, (-2) << s->dflag);
2113 }
2114}
2115
2116/* two step pop is necessary for precise exceptions */
2117static void gen_pop_T0(DisasContext *s)
2118{
2119#ifdef TARGET_X86_64
2120 if (CODE64(s)) {
2121 gen_op_movq_A0_reg[R_ESP]();
2122 gen_op_ld_T0_A0[(s->dflag ? OT_QUAD : OT_WORD) + s->mem_index]();
2123 } else
2124#endif
2125 {
2126 gen_op_movl_A0_reg[R_ESP]();
2127 if (s->ss32) {
2128 if (s->addseg)
2129 gen_op_addl_A0_SS();
2130 } else {
2131 gen_op_andl_A0_ffff();
2132 gen_op_addl_A0_SS();
2133 }
2134 gen_op_ld_T0_A0[s->dflag + 1 + s->mem_index]();
2135 }
2136}
2137
2138static void gen_pop_update(DisasContext *s)
2139{
2140#ifdef TARGET_X86_64
2141 if (CODE64(s) && s->dflag) {
2142 gen_stack_update(s, 8);
2143 } else
2144#endif
2145 {
2146 gen_stack_update(s, 2 << s->dflag);
2147 }
2148}
2149
2150static void gen_stack_A0(DisasContext *s)
2151{
2152 gen_op_movl_A0_ESP();
2153 if (!s->ss32)
2154 gen_op_andl_A0_ffff();
2155 gen_op_movl_T1_A0();
2156 if (s->addseg)
2157 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2158}
2159
2160/* NOTE: wrap around in 16 bit not fully handled */
2161static void gen_pusha(DisasContext *s)
2162{
2163 int i;
2164 gen_op_movl_A0_ESP();
2165 gen_op_addl_A0_im(-16 << s->dflag);
2166 if (!s->ss32)
2167 gen_op_andl_A0_ffff();
2168 gen_op_movl_T1_A0();
2169 if (s->addseg)
2170 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2171 for(i = 0;i < 8; i++) {
2172 gen_op_mov_TN_reg[OT_LONG][0][7 - i]();
2173 gen_op_st_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2174 gen_op_addl_A0_im(2 << s->dflag);
2175 }
2176 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2177}
2178
2179/* NOTE: wrap around in 16 bit not fully handled */
2180static void gen_popa(DisasContext *s)
2181{
2182 int i;
2183 gen_op_movl_A0_ESP();
2184 if (!s->ss32)
2185 gen_op_andl_A0_ffff();
2186 gen_op_movl_T1_A0();
2187 gen_op_addl_T1_im(16 << s->dflag);
2188 if (s->addseg)
2189 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2190 for(i = 0;i < 8; i++) {
2191 /* ESP is not reloaded */
2192 if (i != 3) {
2193 gen_op_ld_T0_A0[OT_WORD + s->dflag + s->mem_index]();
2194 gen_op_mov_reg_T0[OT_WORD + s->dflag][7 - i]();
2195 }
2196 gen_op_addl_A0_im(2 << s->dflag);
2197 }
2198 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2199}
2200
2201static void gen_enter(DisasContext *s, int esp_addend, int level)
2202{
2203 int ot, opsize;
2204
2205 level &= 0x1f;
2206#ifdef TARGET_X86_64
2207 if (CODE64(s)) {
2208 ot = s->dflag ? OT_QUAD : OT_WORD;
2209 opsize = 1 << ot;
2210
2211 gen_op_movl_A0_ESP();
2212 gen_op_addq_A0_im(-opsize);
2213 gen_op_movl_T1_A0();
2214
2215 /* push bp */
2216 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2217 gen_op_st_T0_A0[ot + s->mem_index]();
2218 if (level) {
2219 gen_op_enter64_level(level, (ot == OT_QUAD));
2220 }
2221 gen_op_mov_reg_T1[ot][R_EBP]();
2222 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2223 gen_op_mov_reg_T1[OT_QUAD][R_ESP]();
2224 } else
2225#endif
2226 {
2227 ot = s->dflag + OT_WORD;
2228 opsize = 2 << s->dflag;
2229
2230 gen_op_movl_A0_ESP();
2231 gen_op_addl_A0_im(-opsize);
2232 if (!s->ss32)
2233 gen_op_andl_A0_ffff();
2234 gen_op_movl_T1_A0();
2235 if (s->addseg)
2236 gen_op_addl_A0_seg(offsetof(CPUX86State,segs[R_SS].base));
2237 /* push bp */
2238 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
2239 gen_op_st_T0_A0[ot + s->mem_index]();
2240 if (level) {
2241 gen_op_enter_level(level, s->dflag);
2242 }
2243 gen_op_mov_reg_T1[ot][R_EBP]();
2244 gen_op_addl_T1_im( -esp_addend + (-opsize * level) );
2245 gen_op_mov_reg_T1[OT_WORD + s->ss32][R_ESP]();
2246 }
2247}
2248
2249static void gen_exception(DisasContext *s, int trapno, target_ulong cur_eip)
2250{
2251 if (s->cc_op != CC_OP_DYNAMIC)
2252 gen_op_set_cc_op(s->cc_op);
2253 gen_jmp_im(cur_eip);
2254 gen_op_raise_exception(trapno);
2255 s->is_jmp = 3;
2256}
2257
2258/* an interrupt is different from an exception because of the
2259 priviledge checks */
2260static void gen_interrupt(DisasContext *s, int intno,
2261 target_ulong cur_eip, target_ulong next_eip)
2262{
2263 if (s->cc_op != CC_OP_DYNAMIC)
2264 gen_op_set_cc_op(s->cc_op);
2265 gen_jmp_im(cur_eip);
2266 gen_op_raise_interrupt(intno, (int)(next_eip - cur_eip));
2267 s->is_jmp = 3;
2268}
2269
2270static void gen_debug(DisasContext *s, target_ulong cur_eip)
2271{
2272 if (s->cc_op != CC_OP_DYNAMIC)
2273 gen_op_set_cc_op(s->cc_op);
2274 gen_jmp_im(cur_eip);
2275 gen_op_debug();
2276 s->is_jmp = 3;
2277}
2278
2279/* generate a generic end of block. Trace exception is also generated
2280 if needed */
2281static void gen_eob(DisasContext *s)
2282{
2283 if (s->cc_op != CC_OP_DYNAMIC)
2284 gen_op_set_cc_op(s->cc_op);
2285 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) {
2286 gen_op_reset_inhibit_irq();
2287 }
2288 if (s->singlestep_enabled) {
2289 gen_op_debug();
2290 } else if (s->tf) {
2291 gen_op_raise_exception(EXCP01_SSTP);
2292 } else {
2293 gen_op_movl_T0_0();
2294 gen_op_exit_tb();
2295 }
2296 s->is_jmp = 3;
2297}
2298
2299/* generate a jump to eip. No segment change must happen before as a
2300 direct call to the next block may occur */
2301static void gen_jmp_tb(DisasContext *s, target_ulong eip, int tb_num)
2302{
2303 if (s->jmp_opt) {
2304#ifdef VBOX
2305 gen_check_external_event(s);
2306#endif /* VBOX */
2307 if (s->cc_op != CC_OP_DYNAMIC) {
2308 gen_op_set_cc_op(s->cc_op);
2309 s->cc_op = CC_OP_DYNAMIC;
2310 }
2311 gen_goto_tb(s, tb_num, eip);
2312 s->is_jmp = 3;
2313 } else {
2314 gen_jmp_im(eip);
2315 gen_eob(s);
2316 }
2317}
2318
2319static void gen_jmp(DisasContext *s, target_ulong eip)
2320{
2321 gen_jmp_tb(s, eip, 0);
2322}
2323
2324static void gen_movtl_T0_im(target_ulong val)
2325{
2326#ifdef TARGET_X86_64
2327 if ((int32_t)val == val) {
2328 gen_op_movl_T0_im(val);
2329 } else {
2330 gen_op_movq_T0_im64(val >> 32, val);
2331 }
2332#else
2333 gen_op_movl_T0_im(val);
2334#endif
2335}
2336
2337static void gen_movtl_T1_im(target_ulong val)
2338{
2339#ifdef TARGET_X86_64
2340 if ((int32_t)val == val) {
2341 gen_op_movl_T1_im(val);
2342 } else {
2343 gen_op_movq_T1_im64(val >> 32, val);
2344 }
2345#else
2346 gen_op_movl_T1_im(val);
2347#endif
2348}
2349
2350static void gen_add_A0_im(DisasContext *s, int val)
2351{
2352#ifdef TARGET_X86_64
2353 if (CODE64(s))
2354 gen_op_addq_A0_im(val);
2355 else
2356#endif
2357 gen_op_addl_A0_im(val);
2358}
2359
2360static GenOpFunc1 *gen_ldq_env_A0[3] = {
2361 gen_op_ldq_raw_env_A0,
2362#ifndef CONFIG_USER_ONLY
2363 gen_op_ldq_kernel_env_A0,
2364 gen_op_ldq_user_env_A0,
2365#endif
2366};
2367
2368static GenOpFunc1 *gen_stq_env_A0[3] = {
2369 gen_op_stq_raw_env_A0,
2370#ifndef CONFIG_USER_ONLY
2371 gen_op_stq_kernel_env_A0,
2372 gen_op_stq_user_env_A0,
2373#endif
2374};
2375
2376static GenOpFunc1 *gen_ldo_env_A0[3] = {
2377 gen_op_ldo_raw_env_A0,
2378#ifndef CONFIG_USER_ONLY
2379 gen_op_ldo_kernel_env_A0,
2380 gen_op_ldo_user_env_A0,
2381#endif
2382};
2383
2384static GenOpFunc1 *gen_sto_env_A0[3] = {
2385 gen_op_sto_raw_env_A0,
2386#ifndef CONFIG_USER_ONLY
2387 gen_op_sto_kernel_env_A0,
2388 gen_op_sto_user_env_A0,
2389#endif
2390};
2391
2392#define SSE_SPECIAL ((GenOpFunc2 *)1)
2393
2394#define MMX_OP2(x) { gen_op_ ## x ## _mmx, gen_op_ ## x ## _xmm }
2395#define SSE_FOP(x) { gen_op_ ## x ## ps, gen_op_ ## x ## pd, \
2396 gen_op_ ## x ## ss, gen_op_ ## x ## sd, }
2397
2398static GenOpFunc2 *sse_op_table1[256][4] = {
2399 /* pure SSE operations */
2400 [0x10] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2401 [0x11] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movups, movupd, movss, movsd */
2402 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */
2403 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */
2404 [0x14] = { gen_op_punpckldq_xmm, gen_op_punpcklqdq_xmm },
2405 [0x15] = { gen_op_punpckhdq_xmm, gen_op_punpckhqdq_xmm },
2406 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */
2407 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */
2408
2409 [0x28] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2410 [0x29] = { SSE_SPECIAL, SSE_SPECIAL }, /* movaps, movapd */
2411 [0x2a] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtpi2ps, cvtpi2pd, cvtsi2ss, cvtsi2sd */
2412 [0x2b] = { SSE_SPECIAL, SSE_SPECIAL }, /* movntps, movntpd */
2413 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */
2414 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */
2415 [0x2e] = { gen_op_ucomiss, gen_op_ucomisd },
2416 [0x2f] = { gen_op_comiss, gen_op_comisd },
2417 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */
2418 [0x51] = SSE_FOP(sqrt),
2419 [0x52] = { gen_op_rsqrtps, NULL, gen_op_rsqrtss, NULL },
2420 [0x53] = { gen_op_rcpps, NULL, gen_op_rcpss, NULL },
2421 [0x54] = { gen_op_pand_xmm, gen_op_pand_xmm }, /* andps, andpd */
2422 [0x55] = { gen_op_pandn_xmm, gen_op_pandn_xmm }, /* andnps, andnpd */
2423 [0x56] = { gen_op_por_xmm, gen_op_por_xmm }, /* orps, orpd */
2424 [0x57] = { gen_op_pxor_xmm, gen_op_pxor_xmm }, /* xorps, xorpd */
2425 [0x58] = SSE_FOP(add),
2426 [0x59] = SSE_FOP(mul),
2427 [0x5a] = { gen_op_cvtps2pd, gen_op_cvtpd2ps,
2428 gen_op_cvtss2sd, gen_op_cvtsd2ss },
2429 [0x5b] = { gen_op_cvtdq2ps, gen_op_cvtps2dq, gen_op_cvttps2dq },
2430 [0x5c] = SSE_FOP(sub),
2431 [0x5d] = SSE_FOP(min),
2432 [0x5e] = SSE_FOP(div),
2433 [0x5f] = SSE_FOP(max),
2434
2435 [0xc2] = SSE_FOP(cmpeq),
2436 [0xc6] = { (GenOpFunc2 *)gen_op_shufps, (GenOpFunc2 *)gen_op_shufpd },
2437
2438 /* MMX ops and their SSE extensions */
2439 [0x60] = MMX_OP2(punpcklbw),
2440 [0x61] = MMX_OP2(punpcklwd),
2441 [0x62] = MMX_OP2(punpckldq),
2442 [0x63] = MMX_OP2(packsswb),
2443 [0x64] = MMX_OP2(pcmpgtb),
2444 [0x65] = MMX_OP2(pcmpgtw),
2445 [0x66] = MMX_OP2(pcmpgtl),
2446 [0x67] = MMX_OP2(packuswb),
2447 [0x68] = MMX_OP2(punpckhbw),
2448 [0x69] = MMX_OP2(punpckhwd),
2449 [0x6a] = MMX_OP2(punpckhdq),
2450 [0x6b] = MMX_OP2(packssdw),
2451 [0x6c] = { NULL, gen_op_punpcklqdq_xmm },
2452 [0x6d] = { NULL, gen_op_punpckhqdq_xmm },
2453 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */
2454 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */
2455 [0x70] = { (GenOpFunc2 *)gen_op_pshufw_mmx,
2456 (GenOpFunc2 *)gen_op_pshufd_xmm,
2457 (GenOpFunc2 *)gen_op_pshufhw_xmm,
2458 (GenOpFunc2 *)gen_op_pshuflw_xmm },
2459 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */
2460 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */
2461 [0x73] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftq */
2462 [0x74] = MMX_OP2(pcmpeqb),
2463 [0x75] = MMX_OP2(pcmpeqw),
2464 [0x76] = MMX_OP2(pcmpeql),
2465 [0x77] = { SSE_SPECIAL }, /* emms */
2466 [0x7c] = { NULL, gen_op_haddpd, NULL, gen_op_haddps },
2467 [0x7d] = { NULL, gen_op_hsubpd, NULL, gen_op_hsubps },
2468 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */
2469 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */
2470 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */
2471 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */
2472 [0xd0] = { NULL, gen_op_addsubpd, NULL, gen_op_addsubps },
2473 [0xd1] = MMX_OP2(psrlw),
2474 [0xd2] = MMX_OP2(psrld),
2475 [0xd3] = MMX_OP2(psrlq),
2476 [0xd4] = MMX_OP2(paddq),
2477 [0xd5] = MMX_OP2(pmullw),
2478 [0xd6] = { NULL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL },
2479 [0xd7] = { SSE_SPECIAL, SSE_SPECIAL }, /* pmovmskb */
2480 [0xd8] = MMX_OP2(psubusb),
2481 [0xd9] = MMX_OP2(psubusw),
2482 [0xda] = MMX_OP2(pminub),
2483 [0xdb] = MMX_OP2(pand),
2484 [0xdc] = MMX_OP2(paddusb),
2485 [0xdd] = MMX_OP2(paddusw),
2486 [0xde] = MMX_OP2(pmaxub),
2487 [0xdf] = MMX_OP2(pandn),
2488 [0xe0] = MMX_OP2(pavgb),
2489 [0xe1] = MMX_OP2(psraw),
2490 [0xe2] = MMX_OP2(psrad),
2491 [0xe3] = MMX_OP2(pavgw),
2492 [0xe4] = MMX_OP2(pmulhuw),
2493 [0xe5] = MMX_OP2(pmulhw),
2494 [0xe6] = { NULL, gen_op_cvttpd2dq, gen_op_cvtdq2pd, gen_op_cvtpd2dq },
2495 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */
2496 [0xe8] = MMX_OP2(psubsb),
2497 [0xe9] = MMX_OP2(psubsw),
2498 [0xea] = MMX_OP2(pminsw),
2499 [0xeb] = MMX_OP2(por),
2500 [0xec] = MMX_OP2(paddsb),
2501 [0xed] = MMX_OP2(paddsw),
2502 [0xee] = MMX_OP2(pmaxsw),
2503 [0xef] = MMX_OP2(pxor),
2504 [0xf0] = { NULL, NULL, NULL, SSE_SPECIAL }, /* lddqu */
2505 [0xf1] = MMX_OP2(psllw),
2506 [0xf2] = MMX_OP2(pslld),
2507 [0xf3] = MMX_OP2(psllq),
2508 [0xf4] = MMX_OP2(pmuludq),
2509 [0xf5] = MMX_OP2(pmaddwd),
2510 [0xf6] = MMX_OP2(psadbw),
2511 [0xf7] = MMX_OP2(maskmov),
2512 [0xf8] = MMX_OP2(psubb),
2513 [0xf9] = MMX_OP2(psubw),
2514 [0xfa] = MMX_OP2(psubl),
2515 [0xfb] = MMX_OP2(psubq),
2516 [0xfc] = MMX_OP2(paddb),
2517 [0xfd] = MMX_OP2(paddw),
2518 [0xfe] = MMX_OP2(paddl),
2519};
2520
2521static GenOpFunc2 *sse_op_table2[3 * 8][2] = {
2522 [0 + 2] = MMX_OP2(psrlw),
2523 [0 + 4] = MMX_OP2(psraw),
2524 [0 + 6] = MMX_OP2(psllw),
2525 [8 + 2] = MMX_OP2(psrld),
2526 [8 + 4] = MMX_OP2(psrad),
2527 [8 + 6] = MMX_OP2(pslld),
2528 [16 + 2] = MMX_OP2(psrlq),
2529 [16 + 3] = { NULL, gen_op_psrldq_xmm },
2530 [16 + 6] = MMX_OP2(psllq),
2531 [16 + 7] = { NULL, gen_op_pslldq_xmm },
2532};
2533
2534static GenOpFunc1 *sse_op_table3[4 * 3] = {
2535 gen_op_cvtsi2ss,
2536 gen_op_cvtsi2sd,
2537 X86_64_ONLY(gen_op_cvtsq2ss),
2538 X86_64_ONLY(gen_op_cvtsq2sd),
2539
2540 gen_op_cvttss2si,
2541 gen_op_cvttsd2si,
2542 X86_64_ONLY(gen_op_cvttss2sq),
2543 X86_64_ONLY(gen_op_cvttsd2sq),
2544
2545 gen_op_cvtss2si,
2546 gen_op_cvtsd2si,
2547 X86_64_ONLY(gen_op_cvtss2sq),
2548 X86_64_ONLY(gen_op_cvtsd2sq),
2549};
2550
2551static GenOpFunc2 *sse_op_table4[8][4] = {
2552 SSE_FOP(cmpeq),
2553 SSE_FOP(cmplt),
2554 SSE_FOP(cmple),
2555 SSE_FOP(cmpunord),
2556 SSE_FOP(cmpneq),
2557 SSE_FOP(cmpnlt),
2558 SSE_FOP(cmpnle),
2559 SSE_FOP(cmpord),
2560};
2561
2562static void gen_sse(DisasContext *s, int b, target_ulong pc_start, int rex_r)
2563{
2564 int b1, op1_offset, op2_offset, is_xmm, val, ot;
2565 int modrm, mod, rm, reg, reg_addr, offset_addr;
2566 GenOpFunc2 *sse_op2;
2567 GenOpFunc3 *sse_op3;
2568
2569 b &= 0xff;
2570 if (s->prefix & PREFIX_DATA)
2571 b1 = 1;
2572 else if (s->prefix & PREFIX_REPZ)
2573 b1 = 2;
2574 else if (s->prefix & PREFIX_REPNZ)
2575 b1 = 3;
2576 else
2577 b1 = 0;
2578 sse_op2 = sse_op_table1[b][b1];
2579 if (!sse_op2)
2580 goto illegal_op;
2581 if (b <= 0x5f || b == 0xc6 || b == 0xc2) {
2582 is_xmm = 1;
2583 } else {
2584 if (b1 == 0) {
2585 /* MMX case */
2586 is_xmm = 0;
2587 } else {
2588 is_xmm = 1;
2589 }
2590 }
2591 /* simple MMX/SSE operation */
2592 if (s->flags & HF_TS_MASK) {
2593 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
2594 return;
2595 }
2596 if (s->flags & HF_EM_MASK) {
2597 illegal_op:
2598 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
2599 return;
2600 }
2601 if (is_xmm && !(s->flags & HF_OSFXSR_MASK))
2602 goto illegal_op;
2603 if (b == 0x77) {
2604 /* emms */
2605 gen_op_emms();
2606 return;
2607 }
2608 /* prepare MMX state (XXX: optimize by storing fptt and fptags in
2609 the static cpu state) */
2610 if (!is_xmm) {
2611 gen_op_enter_mmx();
2612 }
2613
2614 modrm = ldub_code(s->pc++);
2615 reg = ((modrm >> 3) & 7);
2616 if (is_xmm)
2617 reg |= rex_r;
2618 mod = (modrm >> 6) & 3;
2619 if (sse_op2 == SSE_SPECIAL) {
2620 b |= (b1 << 8);
2621 switch(b) {
2622 case 0x0e7: /* movntq */
2623 if (mod == 3)
2624 goto illegal_op;
2625 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2626 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2627 break;
2628 case 0x1e7: /* movntdq */
2629 case 0x02b: /* movntps */
2630 case 0x12b: /* movntps */
2631 case 0x3f0: /* lddqu */
2632 if (mod == 3)
2633 goto illegal_op;
2634 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2635 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2636 break;
2637 case 0x6e: /* movd mm, ea */
2638 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2639 gen_op_movl_mm_T0_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2640 break;
2641 case 0x16e: /* movd xmm, ea */
2642 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 0);
2643 gen_op_movl_mm_T0_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2644 break;
2645 case 0x6f: /* movq mm, ea */
2646 if (mod != 3) {
2647 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2648 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2649 } else {
2650 rm = (modrm & 7);
2651 gen_op_movq(offsetof(CPUX86State,fpregs[reg].mmx),
2652 offsetof(CPUX86State,fpregs[rm].mmx));
2653 }
2654 break;
2655 case 0x010: /* movups */
2656 case 0x110: /* movupd */
2657 case 0x028: /* movaps */
2658 case 0x128: /* movapd */
2659 case 0x16f: /* movdqa xmm, ea */
2660 case 0x26f: /* movdqu xmm, ea */
2661 if (mod != 3) {
2662 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2663 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2664 } else {
2665 rm = (modrm & 7) | REX_B(s);
2666 gen_op_movo(offsetof(CPUX86State,xmm_regs[reg]),
2667 offsetof(CPUX86State,xmm_regs[rm]));
2668 }
2669 break;
2670 case 0x210: /* movss xmm, ea */
2671 if (mod != 3) {
2672 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2673 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2674 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2675 gen_op_movl_T0_0();
2676 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2677 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2678 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2679 } else {
2680 rm = (modrm & 7) | REX_B(s);
2681 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2682 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2683 }
2684 break;
2685 case 0x310: /* movsd xmm, ea */
2686 if (mod != 3) {
2687 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2688 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2689 gen_op_movl_T0_0();
2690 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2691 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2692 } else {
2693 rm = (modrm & 7) | REX_B(s);
2694 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2695 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2696 }
2697 break;
2698 case 0x012: /* movlps */
2699 case 0x112: /* movlpd */
2700 if (mod != 3) {
2701 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2702 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2703 } else {
2704 /* movhlps */
2705 rm = (modrm & 7) | REX_B(s);
2706 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2707 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
2708 }
2709 break;
2710 case 0x212: /* movsldup */
2711 if (mod != 3) {
2712 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2713 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2714 } else {
2715 rm = (modrm & 7) | REX_B(s);
2716 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2717 offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)));
2718 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2719 offsetof(CPUX86State,xmm_regs[rm].XMM_L(2)));
2720 }
2721 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2722 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2723 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2724 offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)));
2725 break;
2726 case 0x312: /* movddup */
2727 if (mod != 3) {
2728 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2729 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2730 } else {
2731 rm = (modrm & 7) | REX_B(s);
2732 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2733 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2734 }
2735 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2736 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2737 break;
2738 case 0x016: /* movhps */
2739 case 0x116: /* movhpd */
2740 if (mod != 3) {
2741 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2742 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2743 } else {
2744 /* movlhps */
2745 rm = (modrm & 7) | REX_B(s);
2746 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)),
2747 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2748 }
2749 break;
2750 case 0x216: /* movshdup */
2751 if (mod != 3) {
2752 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2753 gen_ldo_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2754 } else {
2755 rm = (modrm & 7) | REX_B(s);
2756 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)),
2757 offsetof(CPUX86State,xmm_regs[rm].XMM_L(1)));
2758 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)),
2759 offsetof(CPUX86State,xmm_regs[rm].XMM_L(3)));
2760 }
2761 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)),
2762 offsetof(CPUX86State,xmm_regs[reg].XMM_L(1)));
2763 gen_op_movl(offsetof(CPUX86State,xmm_regs[reg].XMM_L(2)),
2764 offsetof(CPUX86State,xmm_regs[reg].XMM_L(3)));
2765 break;
2766 case 0x7e: /* movd ea, mm */
2767 gen_op_movl_T0_mm_mmx(offsetof(CPUX86State,fpregs[reg].mmx));
2768 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2769 break;
2770 case 0x17e: /* movd ea, xmm */
2771 gen_op_movl_T0_mm_xmm(offsetof(CPUX86State,xmm_regs[reg]));
2772 gen_ldst_modrm(s, modrm, OT_LONG, OR_TMP0, 1);
2773 break;
2774 case 0x27e: /* movq xmm, ea */
2775 if (mod != 3) {
2776 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2777 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2778 } else {
2779 rm = (modrm & 7) | REX_B(s);
2780 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)),
2781 offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)));
2782 }
2783 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2784 break;
2785 case 0x7f: /* movq ea, mm */
2786 if (mod != 3) {
2787 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2788 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,fpregs[reg].mmx));
2789 } else {
2790 rm = (modrm & 7);
2791 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
2792 offsetof(CPUX86State,fpregs[reg].mmx));
2793 }
2794 break;
2795 case 0x011: /* movups */
2796 case 0x111: /* movupd */
2797 case 0x029: /* movaps */
2798 case 0x129: /* movapd */
2799 case 0x17f: /* movdqa ea, xmm */
2800 case 0x27f: /* movdqu ea, xmm */
2801 if (mod != 3) {
2802 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2803 gen_sto_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg]));
2804 } else {
2805 rm = (modrm & 7) | REX_B(s);
2806 gen_op_movo(offsetof(CPUX86State,xmm_regs[rm]),
2807 offsetof(CPUX86State,xmm_regs[reg]));
2808 }
2809 break;
2810 case 0x211: /* movss ea, xmm */
2811 if (mod != 3) {
2812 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2813 gen_op_movl_T0_env(offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2814 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
2815 } else {
2816 rm = (modrm & 7) | REX_B(s);
2817 gen_op_movl(offsetof(CPUX86State,xmm_regs[rm].XMM_L(0)),
2818 offsetof(CPUX86State,xmm_regs[reg].XMM_L(0)));
2819 }
2820 break;
2821 case 0x311: /* movsd ea, xmm */
2822 if (mod != 3) {
2823 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2824 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2825 } else {
2826 rm = (modrm & 7) | REX_B(s);
2827 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
2828 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2829 }
2830 break;
2831 case 0x013: /* movlps */
2832 case 0x113: /* movlpd */
2833 if (mod != 3) {
2834 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2835 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
2836 } else {
2837 goto illegal_op;
2838 }
2839 break;
2840 case 0x017: /* movhps */
2841 case 0x117: /* movhpd */
2842 if (mod != 3) {
2843 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2844 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(1)));
2845 } else {
2846 goto illegal_op;
2847 }
2848 break;
2849 case 0x71: /* shift mm, im */
2850 case 0x72:
2851 case 0x73:
2852 case 0x171: /* shift xmm, im */
2853 case 0x172:
2854 case 0x173:
2855 val = ldub_code(s->pc++);
2856 if (is_xmm) {
2857 gen_op_movl_T0_im(val);
2858 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2859 gen_op_movl_T0_0();
2860 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(1)));
2861 op1_offset = offsetof(CPUX86State,xmm_t0);
2862 } else {
2863 gen_op_movl_T0_im(val);
2864 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(0)));
2865 gen_op_movl_T0_0();
2866 gen_op_movl_env_T0(offsetof(CPUX86State,mmx_t0.MMX_L(1)));
2867 op1_offset = offsetof(CPUX86State,mmx_t0);
2868 }
2869 sse_op2 = sse_op_table2[((b - 1) & 3) * 8 + (((modrm >> 3)) & 7)][b1];
2870 if (!sse_op2)
2871 goto illegal_op;
2872 if (is_xmm) {
2873 rm = (modrm & 7) | REX_B(s);
2874 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2875 } else {
2876 rm = (modrm & 7);
2877 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2878 }
2879 sse_op2(op2_offset, op1_offset);
2880 break;
2881 case 0x050: /* movmskps */
2882 rm = (modrm & 7) | REX_B(s);
2883 gen_op_movmskps(offsetof(CPUX86State,xmm_regs[rm]));
2884 gen_op_mov_reg_T0[OT_LONG][reg]();
2885 break;
2886 case 0x150: /* movmskpd */
2887 rm = (modrm & 7) | REX_B(s);
2888 gen_op_movmskpd(offsetof(CPUX86State,xmm_regs[rm]));
2889 gen_op_mov_reg_T0[OT_LONG][reg]();
2890 break;
2891 case 0x02a: /* cvtpi2ps */
2892 case 0x12a: /* cvtpi2pd */
2893 gen_op_enter_mmx();
2894 if (mod != 3) {
2895 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2896 op2_offset = offsetof(CPUX86State,mmx_t0);
2897 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
2898 } else {
2899 rm = (modrm & 7);
2900 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
2901 }
2902 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2903 switch(b >> 8) {
2904 case 0x0:
2905 gen_op_cvtpi2ps(op1_offset, op2_offset);
2906 break;
2907 default:
2908 case 0x1:
2909 gen_op_cvtpi2pd(op1_offset, op2_offset);
2910 break;
2911 }
2912 break;
2913 case 0x22a: /* cvtsi2ss */
2914 case 0x32a: /* cvtsi2sd */
2915 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2916 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
2917 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
2918 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2)](op1_offset);
2919 break;
2920 case 0x02c: /* cvttps2pi */
2921 case 0x12c: /* cvttpd2pi */
2922 case 0x02d: /* cvtps2pi */
2923 case 0x12d: /* cvtpd2pi */
2924 gen_op_enter_mmx();
2925 if (mod != 3) {
2926 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2927 op2_offset = offsetof(CPUX86State,xmm_t0);
2928 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
2929 } else {
2930 rm = (modrm & 7) | REX_B(s);
2931 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2932 }
2933 op1_offset = offsetof(CPUX86State,fpregs[reg & 7].mmx);
2934 switch(b) {
2935 case 0x02c:
2936 gen_op_cvttps2pi(op1_offset, op2_offset);
2937 break;
2938 case 0x12c:
2939 gen_op_cvttpd2pi(op1_offset, op2_offset);
2940 break;
2941 case 0x02d:
2942 gen_op_cvtps2pi(op1_offset, op2_offset);
2943 break;
2944 case 0x12d:
2945 gen_op_cvtpd2pi(op1_offset, op2_offset);
2946 break;
2947 }
2948 break;
2949 case 0x22c: /* cvttss2si */
2950 case 0x32c: /* cvttsd2si */
2951 case 0x22d: /* cvtss2si */
2952 case 0x32d: /* cvtsd2si */
2953 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG;
2954 if (mod != 3) {
2955 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
2956 if ((b >> 8) & 1) {
2957 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_Q(0)));
2958 } else {
2959 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
2960 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
2961 }
2962 op2_offset = offsetof(CPUX86State,xmm_t0);
2963 } else {
2964 rm = (modrm & 7) | REX_B(s);
2965 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
2966 }
2967 sse_op_table3[(s->dflag == 2) * 2 + ((b >> 8) - 2) + 4 +
2968 (b & 1) * 4](op2_offset);
2969 gen_op_mov_reg_T0[ot][reg]();
2970 break;
2971 case 0xc4: /* pinsrw */
2972 case 0x1c4:
2973 s->rip_offset = 1;
2974 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
2975 val = ldub_code(s->pc++);
2976 if (b1) {
2977 val &= 7;
2978 gen_op_pinsrw_xmm(offsetof(CPUX86State,xmm_regs[reg]), val);
2979 } else {
2980 val &= 3;
2981 gen_op_pinsrw_mmx(offsetof(CPUX86State,fpregs[reg].mmx), val);
2982 }
2983 break;
2984 case 0xc5: /* pextrw */
2985 case 0x1c5:
2986 if (mod != 3)
2987 goto illegal_op;
2988 val = ldub_code(s->pc++);
2989 if (b1) {
2990 val &= 7;
2991 rm = (modrm & 7) | REX_B(s);
2992 gen_op_pextrw_xmm(offsetof(CPUX86State,xmm_regs[rm]), val);
2993 } else {
2994 val &= 3;
2995 rm = (modrm & 7);
2996 gen_op_pextrw_mmx(offsetof(CPUX86State,fpregs[rm].mmx), val);
2997 }
2998 reg = ((modrm >> 3) & 7) | rex_r;
2999 gen_op_mov_reg_T0[OT_LONG][reg]();
3000 break;
3001 case 0x1d6: /* movq ea, xmm */
3002 if (mod != 3) {
3003 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3004 gen_stq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3005 } else {
3006 rm = (modrm & 7) | REX_B(s);
3007 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3008 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3009 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3010 }
3011 break;
3012 case 0x2d6: /* movq2dq */
3013 gen_op_enter_mmx();
3014 rm = (modrm & 7) | REX_B(s);
3015 gen_op_movq(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(0)),
3016 offsetof(CPUX86State,fpregs[reg & 7].mmx));
3017 gen_op_movq_env_0(offsetof(CPUX86State,xmm_regs[rm].XMM_Q(1)));
3018 break;
3019 case 0x3d6: /* movdq2q */
3020 gen_op_enter_mmx();
3021 rm = (modrm & 7);
3022 gen_op_movq(offsetof(CPUX86State,fpregs[rm].mmx),
3023 offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)));
3024 break;
3025 case 0xd7: /* pmovmskb */
3026 case 0x1d7:
3027 if (mod != 3)
3028 goto illegal_op;
3029 if (b1) {
3030 rm = (modrm & 7) | REX_B(s);
3031 gen_op_pmovmskb_xmm(offsetof(CPUX86State,xmm_regs[rm]));
3032 } else {
3033 rm = (modrm & 7);
3034 gen_op_pmovmskb_mmx(offsetof(CPUX86State,fpregs[rm].mmx));
3035 }
3036 reg = ((modrm >> 3) & 7) | rex_r;
3037 gen_op_mov_reg_T0[OT_LONG][reg]();
3038 break;
3039 default:
3040 goto illegal_op;
3041 }
3042 } else {
3043 /* generic MMX or SSE operation */
3044 switch(b) {
3045 case 0xf7:
3046 /* maskmov : we must prepare A0 */
3047 if (mod != 3)
3048 goto illegal_op;
3049#ifdef TARGET_X86_64
3050 if (s->aflag == 2) {
3051 gen_op_movq_A0_reg[R_EDI]();
3052 } else
3053#endif
3054 {
3055 gen_op_movl_A0_reg[R_EDI]();
3056 if (s->aflag == 0)
3057 gen_op_andl_A0_ffff();
3058 }
3059 gen_add_A0_ds_seg(s);
3060 break;
3061 case 0x70: /* pshufx insn */
3062 case 0xc6: /* pshufx insn */
3063 case 0xc2: /* compare insns */
3064 s->rip_offset = 1;
3065 break;
3066 default:
3067 break;
3068 }
3069 if (is_xmm) {
3070 op1_offset = offsetof(CPUX86State,xmm_regs[reg]);
3071 if (mod != 3) {
3072 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3073 op2_offset = offsetof(CPUX86State,xmm_t0);
3074 if (b1 >= 2 && ((b >= 0x50 && b <= 0x5f) ||
3075 b == 0xc2)) {
3076 /* specific case for SSE single instructions */
3077 if (b1 == 2) {
3078 /* 32 bit access */
3079 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
3080 gen_op_movl_env_T0(offsetof(CPUX86State,xmm_t0.XMM_L(0)));
3081 } else {
3082 /* 64 bit access */
3083 gen_ldq_env_A0[s->mem_index >> 2](offsetof(CPUX86State,xmm_t0.XMM_D(0)));
3084 }
3085 } else {
3086 gen_ldo_env_A0[s->mem_index >> 2](op2_offset);
3087 }
3088 } else {
3089 rm = (modrm & 7) | REX_B(s);
3090 op2_offset = offsetof(CPUX86State,xmm_regs[rm]);
3091 }
3092 } else {
3093 op1_offset = offsetof(CPUX86State,fpregs[reg].mmx);
3094 if (mod != 3) {
3095 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3096 op2_offset = offsetof(CPUX86State,mmx_t0);
3097 gen_ldq_env_A0[s->mem_index >> 2](op2_offset);
3098 } else {
3099 rm = (modrm & 7);
3100 op2_offset = offsetof(CPUX86State,fpregs[rm].mmx);
3101 }
3102 }
3103 switch(b) {
3104 case 0x70: /* pshufx insn */
3105 case 0xc6: /* pshufx insn */
3106 val = ldub_code(s->pc++);
3107 sse_op3 = (GenOpFunc3 *)sse_op2;
3108 sse_op3(op1_offset, op2_offset, val);
3109 break;
3110 case 0xc2:
3111 /* compare insns */
3112 val = ldub_code(s->pc++);
3113 if (val >= 8)
3114 goto illegal_op;
3115 sse_op2 = sse_op_table4[val][b1];
3116 sse_op2(op1_offset, op2_offset);
3117 break;
3118 default:
3119 sse_op2(op1_offset, op2_offset);
3120 break;
3121 }
3122 if (b == 0x2e || b == 0x2f) {
3123 s->cc_op = CC_OP_EFLAGS;
3124 }
3125 }
3126}
3127
3128
3129/* convert one instruction. s->is_jmp is set if the translation must
3130 be stopped. Return the next pc value */
3131static target_ulong disas_insn(DisasContext *s, target_ulong pc_start)
3132{
3133 int b, prefixes, aflag, dflag;
3134 int shift, ot;
3135 int modrm, reg, rm, mod, reg_addr, op, opreg, offset_addr, val;
3136 target_ulong next_eip, tval;
3137 int rex_w, rex_r;
3138
3139 s->pc = pc_start;
3140 prefixes = 0;
3141 aflag = s->code32;
3142 dflag = s->code32;
3143 s->override = -1;
3144 rex_w = -1;
3145 rex_r = 0;
3146#ifdef TARGET_X86_64
3147 s->rex_x = 0;
3148 s->rex_b = 0;
3149 x86_64_hregs = 0;
3150#endif
3151 s->rip_offset = 0; /* for relative ip address */
3152
3153#ifdef VBOX
3154 /* Always update EIP. Otherwise one must be very careful with generated code that can raise exceptions. */
3155 gen_update_eip(pc_start - s->cs_base);
3156#endif
3157
3158 next_byte:
3159 b = ldub_code(s->pc);
3160 s->pc++;
3161 /* check prefixes */
3162#ifdef TARGET_X86_64
3163 if (CODE64(s)) {
3164 switch (b) {
3165 case 0xf3:
3166 prefixes |= PREFIX_REPZ;
3167 goto next_byte;
3168 case 0xf2:
3169 prefixes |= PREFIX_REPNZ;
3170 goto next_byte;
3171 case 0xf0:
3172 prefixes |= PREFIX_LOCK;
3173 goto next_byte;
3174 case 0x2e:
3175 s->override = R_CS;
3176 goto next_byte;
3177 case 0x36:
3178 s->override = R_SS;
3179 goto next_byte;
3180 case 0x3e:
3181 s->override = R_DS;
3182 goto next_byte;
3183 case 0x26:
3184 s->override = R_ES;
3185 goto next_byte;
3186 case 0x64:
3187 s->override = R_FS;
3188 goto next_byte;
3189 case 0x65:
3190 s->override = R_GS;
3191 goto next_byte;
3192 case 0x66:
3193 prefixes |= PREFIX_DATA;
3194 goto next_byte;
3195 case 0x67:
3196 prefixes |= PREFIX_ADR;
3197 goto next_byte;
3198 case 0x40 ... 0x4f:
3199 /* REX prefix */
3200 rex_w = (b >> 3) & 1;
3201 rex_r = (b & 0x4) << 1;
3202 s->rex_x = (b & 0x2) << 2;
3203 REX_B(s) = (b & 0x1) << 3;
3204 x86_64_hregs = 1; /* select uniform byte register addressing */
3205 goto next_byte;
3206 }
3207 if (rex_w == 1) {
3208 /* 0x66 is ignored if rex.w is set */
3209 dflag = 2;
3210 } else {
3211 if (prefixes & PREFIX_DATA)
3212 dflag ^= 1;
3213 }
3214 if (!(prefixes & PREFIX_ADR))
3215 aflag = 2;
3216 } else
3217#endif
3218 {
3219 switch (b) {
3220 case 0xf3:
3221 prefixes |= PREFIX_REPZ;
3222 goto next_byte;
3223 case 0xf2:
3224 prefixes |= PREFIX_REPNZ;
3225 goto next_byte;
3226 case 0xf0:
3227 prefixes |= PREFIX_LOCK;
3228 goto next_byte;
3229 case 0x2e:
3230 s->override = R_CS;
3231 goto next_byte;
3232 case 0x36:
3233 s->override = R_SS;
3234 goto next_byte;
3235 case 0x3e:
3236 s->override = R_DS;
3237 goto next_byte;
3238 case 0x26:
3239 s->override = R_ES;
3240 goto next_byte;
3241 case 0x64:
3242 s->override = R_FS;
3243 goto next_byte;
3244 case 0x65:
3245 s->override = R_GS;
3246 goto next_byte;
3247 case 0x66:
3248 prefixes |= PREFIX_DATA;
3249 goto next_byte;
3250 case 0x67:
3251 prefixes |= PREFIX_ADR;
3252 goto next_byte;
3253 }
3254 if (prefixes & PREFIX_DATA)
3255 dflag ^= 1;
3256 if (prefixes & PREFIX_ADR)
3257 aflag ^= 1;
3258 }
3259
3260 s->prefix = prefixes;
3261 s->aflag = aflag;
3262 s->dflag = dflag;
3263
3264 /* lock generation */
3265 if (prefixes & PREFIX_LOCK)
3266 gen_op_lock();
3267
3268 /* now check op code */
3269 reswitch:
3270 switch(b) {
3271 case 0x0f:
3272 /**************************/
3273 /* extended op code */
3274 b = ldub_code(s->pc++) | 0x100;
3275 goto reswitch;
3276
3277 /**************************/
3278 /* arith & logic */
3279 case 0x00 ... 0x05:
3280 case 0x08 ... 0x0d:
3281 case 0x10 ... 0x15:
3282 case 0x18 ... 0x1d:
3283 case 0x20 ... 0x25:
3284 case 0x28 ... 0x2d:
3285 case 0x30 ... 0x35:
3286 case 0x38 ... 0x3d:
3287 {
3288 int op, f, val;
3289 op = (b >> 3) & 7;
3290 f = (b >> 1) & 3;
3291
3292 if ((b & 1) == 0)
3293 ot = OT_BYTE;
3294 else
3295 ot = dflag + OT_WORD;
3296
3297 switch(f) {
3298 case 0: /* OP Ev, Gv */
3299 modrm = ldub_code(s->pc++);
3300 reg = ((modrm >> 3) & 7) | rex_r;
3301 mod = (modrm >> 6) & 3;
3302 rm = (modrm & 7) | REX_B(s);
3303 if (mod != 3) {
3304 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3305 opreg = OR_TMP0;
3306 } else if (op == OP_XORL && rm == reg) {
3307 xor_zero:
3308 /* xor reg, reg optimisation */
3309 gen_op_movl_T0_0();
3310 s->cc_op = CC_OP_LOGICB + ot;
3311 gen_op_mov_reg_T0[ot][reg]();
3312 gen_op_update1_cc();
3313 break;
3314 } else {
3315 opreg = rm;
3316 }
3317 gen_op_mov_TN_reg[ot][1][reg]();
3318 gen_op(s, op, ot, opreg);
3319 break;
3320 case 1: /* OP Gv, Ev */
3321 modrm = ldub_code(s->pc++);
3322 mod = (modrm >> 6) & 3;
3323 reg = ((modrm >> 3) & 7) | rex_r;
3324 rm = (modrm & 7) | REX_B(s);
3325 if (mod != 3) {
3326 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3327 gen_op_ld_T1_A0[ot + s->mem_index]();
3328 } else if (op == OP_XORL && rm == reg) {
3329 goto xor_zero;
3330 } else {
3331 gen_op_mov_TN_reg[ot][1][rm]();
3332 }
3333 gen_op(s, op, ot, reg);
3334 break;
3335 case 2: /* OP A, Iv */
3336 val = insn_get(s, ot);
3337 gen_op_movl_T1_im(val);
3338 gen_op(s, op, ot, OR_EAX);
3339 break;
3340 }
3341 }
3342 break;
3343
3344 case 0x80: /* GRP1 */
3345 case 0x81:
3346 case 0x82:
3347 case 0x83:
3348 {
3349 int val;
3350
3351 if ((b & 1) == 0)
3352 ot = OT_BYTE;
3353 else
3354 ot = dflag + OT_WORD;
3355
3356 modrm = ldub_code(s->pc++);
3357 mod = (modrm >> 6) & 3;
3358 rm = (modrm & 7) | REX_B(s);
3359 op = (modrm >> 3) & 7;
3360
3361 if (mod != 3) {
3362 if (b == 0x83)
3363 s->rip_offset = 1;
3364 else
3365 s->rip_offset = insn_const_size(ot);
3366 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3367 opreg = OR_TMP0;
3368 } else {
3369 opreg = rm;
3370 }
3371
3372 switch(b) {
3373 default:
3374 case 0x80:
3375 case 0x81:
3376 case 0x82:
3377 val = insn_get(s, ot);
3378 break;
3379 case 0x83:
3380 val = (int8_t)insn_get(s, OT_BYTE);
3381 break;
3382 }
3383 gen_op_movl_T1_im(val);
3384 gen_op(s, op, ot, opreg);
3385 }
3386 break;
3387
3388 /**************************/
3389 /* inc, dec, and other misc arith */
3390 case 0x40 ... 0x47: /* inc Gv */
3391 ot = dflag ? OT_LONG : OT_WORD;
3392 gen_inc(s, ot, OR_EAX + (b & 7), 1);
3393 break;
3394 case 0x48 ... 0x4f: /* dec Gv */
3395 ot = dflag ? OT_LONG : OT_WORD;
3396 gen_inc(s, ot, OR_EAX + (b & 7), -1);
3397 break;
3398 case 0xf6: /* GRP3 */
3399 case 0xf7:
3400 if ((b & 1) == 0)
3401 ot = OT_BYTE;
3402 else
3403 ot = dflag + OT_WORD;
3404
3405 modrm = ldub_code(s->pc++);
3406 mod = (modrm >> 6) & 3;
3407 rm = (modrm & 7) | REX_B(s);
3408 op = (modrm >> 3) & 7;
3409 if (mod != 3) {
3410 if (op == 0)
3411 s->rip_offset = insn_const_size(ot);
3412 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3413 gen_op_ld_T0_A0[ot + s->mem_index]();
3414 } else {
3415 gen_op_mov_TN_reg[ot][0][rm]();
3416 }
3417
3418 switch(op) {
3419 case 0: /* test */
3420 val = insn_get(s, ot);
3421 gen_op_movl_T1_im(val);
3422 gen_op_testl_T0_T1_cc();
3423 s->cc_op = CC_OP_LOGICB + ot;
3424 break;
3425 case 2: /* not */
3426 gen_op_notl_T0();
3427 if (mod != 3) {
3428 gen_op_st_T0_A0[ot + s->mem_index]();
3429 } else {
3430 gen_op_mov_reg_T0[ot][rm]();
3431 }
3432 break;
3433 case 3: /* neg */
3434 gen_op_negl_T0();
3435 if (mod != 3) {
3436 gen_op_st_T0_A0[ot + s->mem_index]();
3437 } else {
3438 gen_op_mov_reg_T0[ot][rm]();
3439 }
3440 gen_op_update_neg_cc();
3441 s->cc_op = CC_OP_SUBB + ot;
3442 break;
3443 case 4: /* mul */
3444 switch(ot) {
3445 case OT_BYTE:
3446 gen_op_mulb_AL_T0();
3447 s->cc_op = CC_OP_MULB;
3448 break;
3449 case OT_WORD:
3450 gen_op_mulw_AX_T0();
3451 s->cc_op = CC_OP_MULW;
3452 break;
3453 default:
3454 case OT_LONG:
3455 gen_op_mull_EAX_T0();
3456 s->cc_op = CC_OP_MULL;
3457 break;
3458#ifdef TARGET_X86_64
3459 case OT_QUAD:
3460 gen_op_mulq_EAX_T0();
3461 s->cc_op = CC_OP_MULQ;
3462 break;
3463#endif
3464 }
3465 break;
3466 case 5: /* imul */
3467 switch(ot) {
3468 case OT_BYTE:
3469 gen_op_imulb_AL_T0();
3470 s->cc_op = CC_OP_MULB;
3471 break;
3472 case OT_WORD:
3473 gen_op_imulw_AX_T0();
3474 s->cc_op = CC_OP_MULW;
3475 break;
3476 default:
3477 case OT_LONG:
3478 gen_op_imull_EAX_T0();
3479 s->cc_op = CC_OP_MULL;
3480 break;
3481#ifdef TARGET_X86_64
3482 case OT_QUAD:
3483 gen_op_imulq_EAX_T0();
3484 s->cc_op = CC_OP_MULQ;
3485 break;
3486#endif
3487 }
3488 break;
3489 case 6: /* div */
3490 switch(ot) {
3491 case OT_BYTE:
3492 gen_jmp_im(pc_start - s->cs_base);
3493 gen_op_divb_AL_T0();
3494 break;
3495 case OT_WORD:
3496 gen_jmp_im(pc_start - s->cs_base);
3497 gen_op_divw_AX_T0();
3498 break;
3499 default:
3500 case OT_LONG:
3501 gen_jmp_im(pc_start - s->cs_base);
3502 gen_op_divl_EAX_T0();
3503 break;
3504#ifdef TARGET_X86_64
3505 case OT_QUAD:
3506 gen_jmp_im(pc_start - s->cs_base);
3507 gen_op_divq_EAX_T0();
3508 break;
3509#endif
3510 }
3511 break;
3512 case 7: /* idiv */
3513 switch(ot) {
3514 case OT_BYTE:
3515 gen_jmp_im(pc_start - s->cs_base);
3516 gen_op_idivb_AL_T0();
3517 break;
3518 case OT_WORD:
3519 gen_jmp_im(pc_start - s->cs_base);
3520 gen_op_idivw_AX_T0();
3521 break;
3522 default:
3523 case OT_LONG:
3524 gen_jmp_im(pc_start - s->cs_base);
3525 gen_op_idivl_EAX_T0();
3526 break;
3527#ifdef TARGET_X86_64
3528 case OT_QUAD:
3529 gen_jmp_im(pc_start - s->cs_base);
3530 gen_op_idivq_EAX_T0();
3531 break;
3532#endif
3533 }
3534 break;
3535 default:
3536 goto illegal_op;
3537 }
3538 break;
3539
3540 case 0xfe: /* GRP4 */
3541 case 0xff: /* GRP5 */
3542 if ((b & 1) == 0)
3543 ot = OT_BYTE;
3544 else
3545 ot = dflag + OT_WORD;
3546
3547 modrm = ldub_code(s->pc++);
3548 mod = (modrm >> 6) & 3;
3549 rm = (modrm & 7) | REX_B(s);
3550 op = (modrm >> 3) & 7;
3551 if (op >= 2 && b == 0xfe) {
3552 goto illegal_op;
3553 }
3554 if (CODE64(s)) {
3555 if (op == 2 || op == 4) {
3556 /* operand size for jumps is 64 bit */
3557 ot = OT_QUAD;
3558 } else if (op == 3 || op == 5) {
3559 /* for call calls, the operand is 16 or 32 bit, even
3560 in long mode */
3561 ot = dflag ? OT_LONG : OT_WORD;
3562 } else if (op == 6) {
3563 /* default push size is 64 bit */
3564 ot = dflag ? OT_QUAD : OT_WORD;
3565 }
3566 }
3567 if (mod != 3) {
3568 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3569 if (op >= 2 && op != 3 && op != 5)
3570 gen_op_ld_T0_A0[ot + s->mem_index]();
3571 } else {
3572 gen_op_mov_TN_reg[ot][0][rm]();
3573 }
3574
3575 switch(op) {
3576 case 0: /* inc Ev */
3577 if (mod != 3)
3578 opreg = OR_TMP0;
3579 else
3580 opreg = rm;
3581 gen_inc(s, ot, opreg, 1);
3582 break;
3583 case 1: /* dec Ev */
3584 if (mod != 3)
3585 opreg = OR_TMP0;
3586 else
3587 opreg = rm;
3588 gen_inc(s, ot, opreg, -1);
3589 break;
3590 case 2: /* call Ev */
3591 /* XXX: optimize if memory (no 'and' is necessary) */
3592 if (s->dflag == 0)
3593 gen_op_andl_T0_ffff();
3594 next_eip = s->pc - s->cs_base;
3595 gen_movtl_T1_im(next_eip);
3596 gen_push_T1(s);
3597 gen_op_jmp_T0();
3598 gen_eob(s);
3599 break;
3600 case 3: /* lcall Ev */
3601 gen_op_ld_T1_A0[ot + s->mem_index]();
3602 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3603 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3604 do_lcall:
3605 if (s->pe && !s->vm86) {
3606 if (s->cc_op != CC_OP_DYNAMIC)
3607 gen_op_set_cc_op(s->cc_op);
3608 gen_jmp_im(pc_start - s->cs_base);
3609 gen_op_lcall_protected_T0_T1(dflag, s->pc - pc_start);
3610 } else {
3611 gen_op_lcall_real_T0_T1(dflag, s->pc - s->cs_base);
3612 }
3613 gen_eob(s);
3614 break;
3615 case 4: /* jmp Ev */
3616 if (s->dflag == 0)
3617 gen_op_andl_T0_ffff();
3618 gen_op_jmp_T0();
3619 gen_eob(s);
3620 break;
3621 case 5: /* ljmp Ev */
3622 gen_op_ld_T1_A0[ot + s->mem_index]();
3623 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
3624 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
3625 do_ljmp:
3626 if (s->pe && !s->vm86) {
3627 if (s->cc_op != CC_OP_DYNAMIC)
3628 gen_op_set_cc_op(s->cc_op);
3629 gen_jmp_im(pc_start - s->cs_base);
3630 gen_op_ljmp_protected_T0_T1(s->pc - pc_start);
3631 } else {
3632 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
3633 gen_op_movl_T0_T1();
3634 gen_op_jmp_T0();
3635 }
3636 gen_eob(s);
3637 break;
3638 case 6: /* push Ev */
3639 gen_push_T0(s);
3640 break;
3641 default:
3642 goto illegal_op;
3643 }
3644 break;
3645
3646 case 0x84: /* test Ev, Gv */
3647 case 0x85:
3648 if ((b & 1) == 0)
3649 ot = OT_BYTE;
3650 else
3651 ot = dflag + OT_WORD;
3652
3653 modrm = ldub_code(s->pc++);
3654 mod = (modrm >> 6) & 3;
3655 rm = (modrm & 7) | REX_B(s);
3656 reg = ((modrm >> 3) & 7) | rex_r;
3657
3658 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3659 gen_op_mov_TN_reg[ot][1][reg]();
3660 gen_op_testl_T0_T1_cc();
3661 s->cc_op = CC_OP_LOGICB + ot;
3662 break;
3663
3664 case 0xa8: /* test eAX, Iv */
3665 case 0xa9:
3666 if ((b & 1) == 0)
3667 ot = OT_BYTE;
3668 else
3669 ot = dflag + OT_WORD;
3670 val = insn_get(s, ot);
3671
3672 gen_op_mov_TN_reg[ot][0][OR_EAX]();
3673 gen_op_movl_T1_im(val);
3674 gen_op_testl_T0_T1_cc();
3675 s->cc_op = CC_OP_LOGICB + ot;
3676 break;
3677
3678 case 0x98: /* CWDE/CBW */
3679#ifdef TARGET_X86_64
3680 if (dflag == 2) {
3681 gen_op_movslq_RAX_EAX();
3682 } else
3683#endif
3684 if (dflag == 1)
3685 gen_op_movswl_EAX_AX();
3686 else
3687 gen_op_movsbw_AX_AL();
3688 break;
3689 case 0x99: /* CDQ/CWD */
3690#ifdef TARGET_X86_64
3691 if (dflag == 2) {
3692 gen_op_movsqo_RDX_RAX();
3693 } else
3694#endif
3695 if (dflag == 1)
3696 gen_op_movslq_EDX_EAX();
3697 else
3698 gen_op_movswl_DX_AX();
3699 break;
3700 case 0x1af: /* imul Gv, Ev */
3701 case 0x69: /* imul Gv, Ev, I */
3702 case 0x6b:
3703 ot = dflag + OT_WORD;
3704 modrm = ldub_code(s->pc++);
3705 reg = ((modrm >> 3) & 7) | rex_r;
3706 if (b == 0x69)
3707 s->rip_offset = insn_const_size(ot);
3708 else if (b == 0x6b)
3709 s->rip_offset = 1;
3710 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3711 if (b == 0x69) {
3712 val = insn_get(s, ot);
3713 gen_op_movl_T1_im(val);
3714 } else if (b == 0x6b) {
3715 val = (int8_t)insn_get(s, OT_BYTE);
3716 gen_op_movl_T1_im(val);
3717 } else {
3718 gen_op_mov_TN_reg[ot][1][reg]();
3719 }
3720
3721#ifdef TARGET_X86_64
3722 if (ot == OT_QUAD) {
3723 gen_op_imulq_T0_T1();
3724 } else
3725#endif
3726 if (ot == OT_LONG) {
3727 gen_op_imull_T0_T1();
3728 } else {
3729 gen_op_imulw_T0_T1();
3730 }
3731 gen_op_mov_reg_T0[ot][reg]();
3732 s->cc_op = CC_OP_MULB + ot;
3733 break;
3734 case 0x1c0:
3735 case 0x1c1: /* xadd Ev, Gv */
3736 if ((b & 1) == 0)
3737 ot = OT_BYTE;
3738 else
3739 ot = dflag + OT_WORD;
3740 modrm = ldub_code(s->pc++);
3741 reg = ((modrm >> 3) & 7) | rex_r;
3742 mod = (modrm >> 6) & 3;
3743 if (mod == 3) {
3744 rm = (modrm & 7) | REX_B(s);
3745 gen_op_mov_TN_reg[ot][0][reg]();
3746 gen_op_mov_TN_reg[ot][1][rm]();
3747 gen_op_addl_T0_T1();
3748 gen_op_mov_reg_T1[ot][reg]();
3749 gen_op_mov_reg_T0[ot][rm]();
3750 } else {
3751 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3752 gen_op_mov_TN_reg[ot][0][reg]();
3753 gen_op_ld_T1_A0[ot + s->mem_index]();
3754 gen_op_addl_T0_T1();
3755 gen_op_st_T0_A0[ot + s->mem_index]();
3756 gen_op_mov_reg_T1[ot][reg]();
3757 }
3758 gen_op_update2_cc();
3759 s->cc_op = CC_OP_ADDB + ot;
3760 break;
3761 case 0x1b0:
3762 case 0x1b1: /* cmpxchg Ev, Gv */
3763 if ((b & 1) == 0)
3764 ot = OT_BYTE;
3765 else
3766 ot = dflag + OT_WORD;
3767 modrm = ldub_code(s->pc++);
3768 reg = ((modrm >> 3) & 7) | rex_r;
3769 mod = (modrm >> 6) & 3;
3770 gen_op_mov_TN_reg[ot][1][reg]();
3771 if (mod == 3) {
3772 rm = (modrm & 7) | REX_B(s);
3773 gen_op_mov_TN_reg[ot][0][rm]();
3774 gen_op_cmpxchg_T0_T1_EAX_cc[ot]();
3775 gen_op_mov_reg_T0[ot][rm]();
3776 } else {
3777 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3778 gen_op_ld_T0_A0[ot + s->mem_index]();
3779 gen_op_cmpxchg_mem_T0_T1_EAX_cc[ot + s->mem_index]();
3780 }
3781 s->cc_op = CC_OP_SUBB + ot;
3782 break;
3783 case 0x1c7: /* cmpxchg8b */
3784 modrm = ldub_code(s->pc++);
3785 mod = (modrm >> 6) & 3;
3786 if (mod == 3)
3787 goto illegal_op;
3788 if (s->cc_op != CC_OP_DYNAMIC)
3789 gen_op_set_cc_op(s->cc_op);
3790 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3791 gen_op_cmpxchg8b();
3792 s->cc_op = CC_OP_EFLAGS;
3793 break;
3794
3795 /**************************/
3796 /* push/pop */
3797 case 0x50 ... 0x57: /* push */
3798 gen_op_mov_TN_reg[OT_LONG][0][(b & 7) | REX_B(s)]();
3799 gen_push_T0(s);
3800 break;
3801 case 0x58 ... 0x5f: /* pop */
3802 if (CODE64(s)) {
3803 ot = dflag ? OT_QUAD : OT_WORD;
3804 } else {
3805 ot = dflag + OT_WORD;
3806 }
3807 gen_pop_T0(s);
3808 /* NOTE: order is important for pop %sp */
3809 gen_pop_update(s);
3810 gen_op_mov_reg_T0[ot][(b & 7) | REX_B(s)]();
3811 break;
3812 case 0x60: /* pusha */
3813 if (CODE64(s))
3814 goto illegal_op;
3815 gen_pusha(s);
3816 break;
3817 case 0x61: /* popa */
3818 if (CODE64(s))
3819 goto illegal_op;
3820 gen_popa(s);
3821 break;
3822 case 0x68: /* push Iv */
3823 case 0x6a:
3824 if (CODE64(s)) {
3825 ot = dflag ? OT_QUAD : OT_WORD;
3826 } else {
3827 ot = dflag + OT_WORD;
3828 }
3829 if (b == 0x68)
3830 val = insn_get(s, ot);
3831 else
3832 val = (int8_t)insn_get(s, OT_BYTE);
3833 gen_op_movl_T0_im(val);
3834 gen_push_T0(s);
3835 break;
3836 case 0x8f: /* pop Ev */
3837 if (CODE64(s)) {
3838 ot = dflag ? OT_QUAD : OT_WORD;
3839 } else {
3840 ot = dflag + OT_WORD;
3841 }
3842 modrm = ldub_code(s->pc++);
3843 mod = (modrm >> 6) & 3;
3844 gen_pop_T0(s);
3845 if (mod == 3) {
3846 /* NOTE: order is important for pop %sp */
3847 gen_pop_update(s);
3848 rm = (modrm & 7) | REX_B(s);
3849 gen_op_mov_reg_T0[ot][rm]();
3850 } else {
3851 /* NOTE: order is important too for MMU exceptions */
3852 s->popl_esp_hack = 1 << ot;
3853 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
3854 s->popl_esp_hack = 0;
3855 gen_pop_update(s);
3856 }
3857 break;
3858 case 0xc8: /* enter */
3859 {
3860 int level;
3861 val = lduw_code(s->pc);
3862 s->pc += 2;
3863 level = ldub_code(s->pc++);
3864 gen_enter(s, val, level);
3865 }
3866 break;
3867 case 0xc9: /* leave */
3868 /* XXX: exception not precise (ESP is updated before potential exception) */
3869 if (CODE64(s)) {
3870 gen_op_mov_TN_reg[OT_QUAD][0][R_EBP]();
3871 gen_op_mov_reg_T0[OT_QUAD][R_ESP]();
3872 } else if (s->ss32) {
3873 gen_op_mov_TN_reg[OT_LONG][0][R_EBP]();
3874 gen_op_mov_reg_T0[OT_LONG][R_ESP]();
3875 } else {
3876 gen_op_mov_TN_reg[OT_WORD][0][R_EBP]();
3877 gen_op_mov_reg_T0[OT_WORD][R_ESP]();
3878 }
3879 gen_pop_T0(s);
3880 if (CODE64(s)) {
3881 ot = dflag ? OT_QUAD : OT_WORD;
3882 } else {
3883 ot = dflag + OT_WORD;
3884 }
3885 gen_op_mov_reg_T0[ot][R_EBP]();
3886 gen_pop_update(s);
3887 break;
3888 case 0x06: /* push es */
3889 case 0x0e: /* push cs */
3890 case 0x16: /* push ss */
3891 case 0x1e: /* push ds */
3892 if (CODE64(s))
3893 goto illegal_op;
3894 gen_op_movl_T0_seg(b >> 3);
3895 gen_push_T0(s);
3896 break;
3897 case 0x1a0: /* push fs */
3898 case 0x1a8: /* push gs */
3899 gen_op_movl_T0_seg((b >> 3) & 7);
3900 gen_push_T0(s);
3901 break;
3902 case 0x07: /* pop es */
3903 case 0x17: /* pop ss */
3904 case 0x1f: /* pop ds */
3905 if (CODE64(s))
3906 goto illegal_op;
3907 reg = b >> 3;
3908 gen_pop_T0(s);
3909 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3910 gen_pop_update(s);
3911 if (reg == R_SS) {
3912 /* if reg == SS, inhibit interrupts/trace. */
3913 /* If several instructions disable interrupts, only the
3914 _first_ does it */
3915 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3916 gen_op_set_inhibit_irq();
3917 s->tf = 0;
3918 }
3919 if (s->is_jmp) {
3920 gen_jmp_im(s->pc - s->cs_base);
3921 gen_eob(s);
3922 }
3923 break;
3924 case 0x1a1: /* pop fs */
3925 case 0x1a9: /* pop gs */
3926 gen_pop_T0(s);
3927 gen_movl_seg_T0(s, (b >> 3) & 7, pc_start - s->cs_base);
3928 gen_pop_update(s);
3929 if (s->is_jmp) {
3930 gen_jmp_im(s->pc - s->cs_base);
3931 gen_eob(s);
3932 }
3933 break;
3934
3935 /**************************/
3936 /* mov */
3937 case 0x88:
3938 case 0x89: /* mov Gv, Ev */
3939 if ((b & 1) == 0)
3940 ot = OT_BYTE;
3941 else
3942 ot = dflag + OT_WORD;
3943 modrm = ldub_code(s->pc++);
3944 reg = ((modrm >> 3) & 7) | rex_r;
3945
3946 /* generate a generic store */
3947 gen_ldst_modrm(s, modrm, ot, reg, 1);
3948 break;
3949 case 0xc6:
3950 case 0xc7: /* mov Ev, Iv */
3951 if ((b & 1) == 0)
3952 ot = OT_BYTE;
3953 else
3954 ot = dflag + OT_WORD;
3955 modrm = ldub_code(s->pc++);
3956 mod = (modrm >> 6) & 3;
3957 if (mod != 3) {
3958 s->rip_offset = insn_const_size(ot);
3959 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
3960 }
3961 val = insn_get(s, ot);
3962 gen_op_movl_T0_im(val);
3963 if (mod != 3)
3964 gen_op_st_T0_A0[ot + s->mem_index]();
3965 else
3966 gen_op_mov_reg_T0[ot][(modrm & 7) | REX_B(s)]();
3967 break;
3968 case 0x8a:
3969 case 0x8b: /* mov Ev, Gv */
3970 if ((b & 1) == 0)
3971 ot = OT_BYTE;
3972 else
3973 ot = OT_WORD + dflag;
3974 modrm = ldub_code(s->pc++);
3975 reg = ((modrm >> 3) & 7) | rex_r;
3976
3977 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
3978 gen_op_mov_reg_T0[ot][reg]();
3979 break;
3980 case 0x8e: /* mov seg, Gv */
3981 modrm = ldub_code(s->pc++);
3982 reg = (modrm >> 3) & 7;
3983 if (reg >= 6 || reg == R_CS)
3984 goto illegal_op;
3985 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
3986 gen_movl_seg_T0(s, reg, pc_start - s->cs_base);
3987 if (reg == R_SS) {
3988 /* if reg == SS, inhibit interrupts/trace */
3989 /* If several instructions disable interrupts, only the
3990 _first_ does it */
3991 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
3992 gen_op_set_inhibit_irq();
3993 s->tf = 0;
3994 }
3995 if (s->is_jmp) {
3996 gen_jmp_im(s->pc - s->cs_base);
3997 gen_eob(s);
3998 }
3999 break;
4000 case 0x8c: /* mov Gv, seg */
4001 modrm = ldub_code(s->pc++);
4002 reg = (modrm >> 3) & 7;
4003 mod = (modrm >> 6) & 3;
4004 if (reg >= 6)
4005 goto illegal_op;
4006 gen_op_movl_T0_seg(reg);
4007 if (mod == 3)
4008 ot = OT_WORD + dflag;
4009 else
4010 ot = OT_WORD;
4011 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
4012 break;
4013
4014 case 0x1b6: /* movzbS Gv, Eb */
4015 case 0x1b7: /* movzwS Gv, Eb */
4016 case 0x1be: /* movsbS Gv, Eb */
4017 case 0x1bf: /* movswS Gv, Eb */
4018 {
4019 int d_ot;
4020 /* d_ot is the size of destination */
4021 d_ot = dflag + OT_WORD;
4022 /* ot is the size of source */
4023 ot = (b & 1) + OT_BYTE;
4024 modrm = ldub_code(s->pc++);
4025 reg = ((modrm >> 3) & 7) | rex_r;
4026 mod = (modrm >> 6) & 3;
4027 rm = (modrm & 7) | REX_B(s);
4028
4029 if (mod == 3) {
4030 gen_op_mov_TN_reg[ot][0][rm]();
4031 switch(ot | (b & 8)) {
4032 case OT_BYTE:
4033 gen_op_movzbl_T0_T0();
4034 break;
4035 case OT_BYTE | 8:
4036 gen_op_movsbl_T0_T0();
4037 break;
4038 case OT_WORD:
4039 gen_op_movzwl_T0_T0();
4040 break;
4041 default:
4042 case OT_WORD | 8:
4043 gen_op_movswl_T0_T0();
4044 break;
4045 }
4046 gen_op_mov_reg_T0[d_ot][reg]();
4047 } else {
4048 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4049 if (b & 8) {
4050 gen_op_lds_T0_A0[ot + s->mem_index]();
4051 } else {
4052 gen_op_ldu_T0_A0[ot + s->mem_index]();
4053 }
4054 gen_op_mov_reg_T0[d_ot][reg]();
4055 }
4056 }
4057 break;
4058
4059 case 0x8d: /* lea */
4060 ot = dflag + OT_WORD;
4061 modrm = ldub_code(s->pc++);
4062 mod = (modrm >> 6) & 3;
4063 if (mod == 3)
4064 goto illegal_op;
4065 reg = ((modrm >> 3) & 7) | rex_r;
4066 /* we must ensure that no segment is added */
4067 s->override = -1;
4068 val = s->addseg;
4069 s->addseg = 0;
4070 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4071 s->addseg = val;
4072 gen_op_mov_reg_A0[ot - OT_WORD][reg]();
4073 break;
4074
4075 case 0xa0: /* mov EAX, Ov */
4076 case 0xa1:
4077 case 0xa2: /* mov Ov, EAX */
4078 case 0xa3:
4079 {
4080 target_ulong offset_addr;
4081
4082 if ((b & 1) == 0)
4083 ot = OT_BYTE;
4084 else
4085 ot = dflag + OT_WORD;
4086#ifdef TARGET_X86_64
4087 if (s->aflag == 2) {
4088 offset_addr = ldq_code(s->pc);
4089 s->pc += 8;
4090 if (offset_addr == (int32_t)offset_addr)
4091 gen_op_movq_A0_im(offset_addr);
4092 else
4093 gen_op_movq_A0_im64(offset_addr >> 32, offset_addr);
4094 } else
4095#endif
4096 {
4097 if (s->aflag) {
4098 offset_addr = insn_get(s, OT_LONG);
4099 } else {
4100 offset_addr = insn_get(s, OT_WORD);
4101 }
4102 gen_op_movl_A0_im(offset_addr);
4103 }
4104 gen_add_A0_ds_seg(s);
4105 if ((b & 2) == 0) {
4106 gen_op_ld_T0_A0[ot + s->mem_index]();
4107 gen_op_mov_reg_T0[ot][R_EAX]();
4108 } else {
4109 gen_op_mov_TN_reg[ot][0][R_EAX]();
4110 gen_op_st_T0_A0[ot + s->mem_index]();
4111 }
4112 }
4113 break;
4114 case 0xd7: /* xlat */
4115#ifdef TARGET_X86_64
4116 if (s->aflag == 2) {
4117 gen_op_movq_A0_reg[R_EBX]();
4118 gen_op_addq_A0_AL();
4119 } else
4120#endif
4121 {
4122 gen_op_movl_A0_reg[R_EBX]();
4123 gen_op_addl_A0_AL();
4124 if (s->aflag == 0)
4125 gen_op_andl_A0_ffff();
4126 }
4127 gen_add_A0_ds_seg(s);
4128 gen_op_ldu_T0_A0[OT_BYTE + s->mem_index]();
4129 gen_op_mov_reg_T0[OT_BYTE][R_EAX]();
4130 break;
4131 case 0xb0 ... 0xb7: /* mov R, Ib */
4132 val = insn_get(s, OT_BYTE);
4133 gen_op_movl_T0_im(val);
4134 gen_op_mov_reg_T0[OT_BYTE][(b & 7) | REX_B(s)]();
4135 break;
4136 case 0xb8 ... 0xbf: /* mov R, Iv */
4137#ifdef TARGET_X86_64
4138 if (dflag == 2) {
4139 uint64_t tmp;
4140 /* 64 bit case */
4141 tmp = ldq_code(s->pc);
4142 s->pc += 8;
4143 reg = (b & 7) | REX_B(s);
4144 gen_movtl_T0_im(tmp);
4145 gen_op_mov_reg_T0[OT_QUAD][reg]();
4146 } else
4147#endif
4148 {
4149 ot = dflag ? OT_LONG : OT_WORD;
4150 val = insn_get(s, ot);
4151 reg = (b & 7) | REX_B(s);
4152 gen_op_movl_T0_im(val);
4153 gen_op_mov_reg_T0[ot][reg]();
4154 }
4155 break;
4156
4157 case 0x91 ... 0x97: /* xchg R, EAX */
4158 ot = dflag + OT_WORD;
4159 reg = (b & 7) | REX_B(s);
4160 rm = R_EAX;
4161 goto do_xchg_reg;
4162 case 0x86:
4163 case 0x87: /* xchg Ev, Gv */
4164 if ((b & 1) == 0)
4165 ot = OT_BYTE;
4166 else
4167 ot = dflag + OT_WORD;
4168 modrm = ldub_code(s->pc++);
4169 reg = ((modrm >> 3) & 7) | rex_r;
4170 mod = (modrm >> 6) & 3;
4171 if (mod == 3) {
4172 rm = (modrm & 7) | REX_B(s);
4173 do_xchg_reg:
4174 gen_op_mov_TN_reg[ot][0][reg]();
4175 gen_op_mov_TN_reg[ot][1][rm]();
4176 gen_op_mov_reg_T0[ot][rm]();
4177 gen_op_mov_reg_T1[ot][reg]();
4178 } else {
4179 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4180 gen_op_mov_TN_reg[ot][0][reg]();
4181 /* for xchg, lock is implicit */
4182 if (!(prefixes & PREFIX_LOCK))
4183 gen_op_lock();
4184 gen_op_ld_T1_A0[ot + s->mem_index]();
4185 gen_op_st_T0_A0[ot + s->mem_index]();
4186 if (!(prefixes & PREFIX_LOCK))
4187 gen_op_unlock();
4188 gen_op_mov_reg_T1[ot][reg]();
4189 }
4190 break;
4191 case 0xc4: /* les Gv */
4192 if (CODE64(s))
4193 goto illegal_op;
4194 op = R_ES;
4195 goto do_lxx;
4196 case 0xc5: /* lds Gv */
4197 if (CODE64(s))
4198 goto illegal_op;
4199 op = R_DS;
4200 goto do_lxx;
4201 case 0x1b2: /* lss Gv */
4202 op = R_SS;
4203 goto do_lxx;
4204 case 0x1b4: /* lfs Gv */
4205 op = R_FS;
4206 goto do_lxx;
4207 case 0x1b5: /* lgs Gv */
4208 op = R_GS;
4209 do_lxx:
4210 ot = dflag ? OT_LONG : OT_WORD;
4211 modrm = ldub_code(s->pc++);
4212 reg = ((modrm >> 3) & 7) | rex_r;
4213 mod = (modrm >> 6) & 3;
4214 if (mod == 3)
4215 goto illegal_op;
4216 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4217 gen_op_ld_T1_A0[ot + s->mem_index]();
4218 gen_add_A0_im(s, 1 << (ot - OT_WORD + 1));
4219 /* load the segment first to handle exceptions properly */
4220 gen_op_ldu_T0_A0[OT_WORD + s->mem_index]();
4221 gen_movl_seg_T0(s, op, pc_start - s->cs_base);
4222 /* then put the data */
4223 gen_op_mov_reg_T1[ot][reg]();
4224 if (s->is_jmp) {
4225 gen_jmp_im(s->pc - s->cs_base);
4226 gen_eob(s);
4227 }
4228 break;
4229
4230 /************************/
4231 /* shifts */
4232 case 0xc0:
4233 case 0xc1:
4234 /* shift Ev,Ib */
4235 shift = 2;
4236 grp2:
4237 {
4238 if ((b & 1) == 0)
4239 ot = OT_BYTE;
4240 else
4241 ot = dflag + OT_WORD;
4242
4243 modrm = ldub_code(s->pc++);
4244 mod = (modrm >> 6) & 3;
4245 op = (modrm >> 3) & 7;
4246
4247 if (mod != 3) {
4248 if (shift == 2) {
4249 s->rip_offset = 1;
4250 }
4251 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4252 opreg = OR_TMP0;
4253 } else {
4254 opreg = (modrm & 7) | REX_B(s);
4255 }
4256
4257 /* simpler op */
4258 if (shift == 0) {
4259 gen_shift(s, op, ot, opreg, OR_ECX);
4260 } else {
4261 if (shift == 2) {
4262 shift = ldub_code(s->pc++);
4263 }
4264 gen_shifti(s, op, ot, opreg, shift);
4265 }
4266 }
4267 break;
4268 case 0xd0:
4269 case 0xd1:
4270 /* shift Ev,1 */
4271 shift = 1;
4272 goto grp2;
4273 case 0xd2:
4274 case 0xd3:
4275 /* shift Ev,cl */
4276 shift = 0;
4277 goto grp2;
4278
4279 case 0x1a4: /* shld imm */
4280 op = 0;
4281 shift = 1;
4282 goto do_shiftd;
4283 case 0x1a5: /* shld cl */
4284 op = 0;
4285 shift = 0;
4286 goto do_shiftd;
4287 case 0x1ac: /* shrd imm */
4288 op = 1;
4289 shift = 1;
4290 goto do_shiftd;
4291 case 0x1ad: /* shrd cl */
4292 op = 1;
4293 shift = 0;
4294 do_shiftd:
4295 ot = dflag + OT_WORD;
4296 modrm = ldub_code(s->pc++);
4297 mod = (modrm >> 6) & 3;
4298 rm = (modrm & 7) | REX_B(s);
4299 reg = ((modrm >> 3) & 7) | rex_r;
4300
4301 if (mod != 3) {
4302 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4303 gen_op_ld_T0_A0[ot + s->mem_index]();
4304 } else {
4305 gen_op_mov_TN_reg[ot][0][rm]();
4306 }
4307 gen_op_mov_TN_reg[ot][1][reg]();
4308
4309 if (shift) {
4310 val = ldub_code(s->pc++);
4311 if (ot == OT_QUAD)
4312 val &= 0x3f;
4313 else
4314 val &= 0x1f;
4315 if (val) {
4316 if (mod == 3)
4317 gen_op_shiftd_T0_T1_im_cc[ot][op](val);
4318 else
4319 gen_op_shiftd_mem_T0_T1_im_cc[ot + s->mem_index][op](val);
4320 if (op == 0 && ot != OT_WORD)
4321 s->cc_op = CC_OP_SHLB + ot;
4322 else
4323 s->cc_op = CC_OP_SARB + ot;
4324 }
4325 } else {
4326 if (s->cc_op != CC_OP_DYNAMIC)
4327 gen_op_set_cc_op(s->cc_op);
4328 if (mod == 3)
4329 gen_op_shiftd_T0_T1_ECX_cc[ot][op]();
4330 else
4331 gen_op_shiftd_mem_T0_T1_ECX_cc[ot + s->mem_index][op]();
4332 s->cc_op = CC_OP_DYNAMIC; /* cannot predict flags after */
4333 }
4334 if (mod == 3) {
4335 gen_op_mov_reg_T0[ot][rm]();
4336 }
4337 break;
4338
4339 /************************/
4340 /* floats */
4341 case 0xd8 ... 0xdf:
4342 if (s->flags & (HF_EM_MASK | HF_TS_MASK)) {
4343 /* if CR0.EM or CR0.TS are set, generate an FPU exception */
4344 /* XXX: what to do if illegal op ? */
4345 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
4346 break;
4347 }
4348 modrm = ldub_code(s->pc++);
4349 mod = (modrm >> 6) & 3;
4350 rm = modrm & 7;
4351 op = ((b & 7) << 3) | ((modrm >> 3) & 7);
4352 if (mod != 3) {
4353 /* memory op */
4354 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
4355 switch(op) {
4356 case 0x00 ... 0x07: /* fxxxs */
4357 case 0x10 ... 0x17: /* fixxxl */
4358 case 0x20 ... 0x27: /* fxxxl */
4359 case 0x30 ... 0x37: /* fixxx */
4360 {
4361 int op1;
4362 op1 = op & 7;
4363
4364 switch(op >> 4) {
4365 case 0:
4366 gen_op_flds_FT0_A0();
4367 break;
4368 case 1:
4369 gen_op_fildl_FT0_A0();
4370 break;
4371 case 2:
4372 gen_op_fldl_FT0_A0();
4373 break;
4374 case 3:
4375 default:
4376 gen_op_fild_FT0_A0();
4377 break;
4378 }
4379
4380 gen_op_fp_arith_ST0_FT0[op1]();
4381 if (op1 == 3) {
4382 /* fcomp needs pop */
4383 gen_op_fpop();
4384 }
4385 }
4386 break;
4387 case 0x08: /* flds */
4388 case 0x0a: /* fsts */
4389 case 0x0b: /* fstps */
4390 case 0x18: /* fildl */
4391 case 0x1a: /* fistl */
4392 case 0x1b: /* fistpl */
4393 case 0x28: /* fldl */
4394 case 0x2a: /* fstl */
4395 case 0x2b: /* fstpl */
4396 case 0x38: /* filds */
4397 case 0x3a: /* fists */
4398 case 0x3b: /* fistps */
4399
4400 switch(op & 7) {
4401 case 0:
4402 switch(op >> 4) {
4403 case 0:
4404 gen_op_flds_ST0_A0();
4405 break;
4406 case 1:
4407 gen_op_fildl_ST0_A0();
4408 break;
4409 case 2:
4410 gen_op_fldl_ST0_A0();
4411 break;
4412 case 3:
4413 default:
4414 gen_op_fild_ST0_A0();
4415 break;
4416 }
4417 break;
4418 default:
4419 switch(op >> 4) {
4420 case 0:
4421 gen_op_fsts_ST0_A0();
4422 break;
4423 case 1:
4424 gen_op_fistl_ST0_A0();
4425 break;
4426 case 2:
4427 gen_op_fstl_ST0_A0();
4428 break;
4429 case 3:
4430 default:
4431 gen_op_fist_ST0_A0();
4432 break;
4433 }
4434 if ((op & 7) == 3)
4435 gen_op_fpop();
4436 break;
4437 }
4438 break;
4439 case 0x0c: /* fldenv mem */
4440 gen_op_fldenv_A0(s->dflag);
4441 break;
4442 case 0x0d: /* fldcw mem */
4443 gen_op_fldcw_A0();
4444 break;
4445 case 0x0e: /* fnstenv mem */
4446 gen_op_fnstenv_A0(s->dflag);
4447 break;
4448 case 0x0f: /* fnstcw mem */
4449 gen_op_fnstcw_A0();
4450 break;
4451 case 0x1d: /* fldt mem */
4452 gen_op_fldt_ST0_A0();
4453 break;
4454 case 0x1f: /* fstpt mem */
4455 gen_op_fstt_ST0_A0();
4456 gen_op_fpop();
4457 break;
4458 case 0x2c: /* frstor mem */
4459 gen_op_frstor_A0(s->dflag);
4460 break;
4461 case 0x2e: /* fnsave mem */
4462 gen_op_fnsave_A0(s->dflag);
4463 break;
4464 case 0x2f: /* fnstsw mem */
4465 gen_op_fnstsw_A0();
4466 break;
4467 case 0x3c: /* fbld */
4468 gen_op_fbld_ST0_A0();
4469 break;
4470 case 0x3e: /* fbstp */
4471 gen_op_fbst_ST0_A0();
4472 gen_op_fpop();
4473 break;
4474 case 0x3d: /* fildll */
4475 gen_op_fildll_ST0_A0();
4476 break;
4477 case 0x3f: /* fistpll */
4478 gen_op_fistll_ST0_A0();
4479 gen_op_fpop();
4480 break;
4481 default:
4482 goto illegal_op;
4483 }
4484 } else {
4485 /* register float ops */
4486 opreg = rm;
4487
4488 switch(op) {
4489 case 0x08: /* fld sti */
4490 gen_op_fpush();
4491 gen_op_fmov_ST0_STN((opreg + 1) & 7);
4492 break;
4493 case 0x09: /* fxchg sti */
4494 case 0x29: /* fxchg4 sti, undocumented op */
4495 case 0x39: /* fxchg7 sti, undocumented op */
4496 gen_op_fxchg_ST0_STN(opreg);
4497 break;
4498 case 0x0a: /* grp d9/2 */
4499 switch(rm) {
4500 case 0: /* fnop */
4501 /* check exceptions (FreeBSD FPU probe) */
4502 if (s->cc_op != CC_OP_DYNAMIC)
4503 gen_op_set_cc_op(s->cc_op);
4504 gen_jmp_im(pc_start - s->cs_base);
4505 gen_op_fwait();
4506 break;
4507 default:
4508 goto illegal_op;
4509 }
4510 break;
4511 case 0x0c: /* grp d9/4 */
4512 switch(rm) {
4513 case 0: /* fchs */
4514 gen_op_fchs_ST0();
4515 break;
4516 case 1: /* fabs */
4517 gen_op_fabs_ST0();
4518 break;
4519 case 4: /* ftst */
4520 gen_op_fldz_FT0();
4521 gen_op_fcom_ST0_FT0();
4522 break;
4523 case 5: /* fxam */
4524 gen_op_fxam_ST0();
4525 break;
4526 default:
4527 goto illegal_op;
4528 }
4529 break;
4530 case 0x0d: /* grp d9/5 */
4531 {
4532 switch(rm) {
4533 case 0:
4534 gen_op_fpush();
4535 gen_op_fld1_ST0();
4536 break;
4537 case 1:
4538 gen_op_fpush();
4539 gen_op_fldl2t_ST0();
4540 break;
4541 case 2:
4542 gen_op_fpush();
4543 gen_op_fldl2e_ST0();
4544 break;
4545 case 3:
4546 gen_op_fpush();
4547 gen_op_fldpi_ST0();
4548 break;
4549 case 4:
4550 gen_op_fpush();
4551 gen_op_fldlg2_ST0();
4552 break;
4553 case 5:
4554 gen_op_fpush();
4555 gen_op_fldln2_ST0();
4556 break;
4557 case 6:
4558 gen_op_fpush();
4559 gen_op_fldz_ST0();
4560 break;
4561 default:
4562 goto illegal_op;
4563 }
4564 }
4565 break;
4566 case 0x0e: /* grp d9/6 */
4567 switch(rm) {
4568 case 0: /* f2xm1 */
4569 gen_op_f2xm1();
4570 break;
4571 case 1: /* fyl2x */
4572 gen_op_fyl2x();
4573 break;
4574 case 2: /* fptan */
4575 gen_op_fptan();
4576 break;
4577 case 3: /* fpatan */
4578 gen_op_fpatan();
4579 break;
4580 case 4: /* fxtract */
4581 gen_op_fxtract();
4582 break;
4583 case 5: /* fprem1 */
4584 gen_op_fprem1();
4585 break;
4586 case 6: /* fdecstp */
4587 gen_op_fdecstp();
4588 break;
4589 default:
4590 case 7: /* fincstp */
4591 gen_op_fincstp();
4592 break;
4593 }
4594 break;
4595 case 0x0f: /* grp d9/7 */
4596 switch(rm) {
4597 case 0: /* fprem */
4598 gen_op_fprem();
4599 break;
4600 case 1: /* fyl2xp1 */
4601 gen_op_fyl2xp1();
4602 break;
4603 case 2: /* fsqrt */
4604 gen_op_fsqrt();
4605 break;
4606 case 3: /* fsincos */
4607 gen_op_fsincos();
4608 break;
4609 case 5: /* fscale */
4610 gen_op_fscale();
4611 break;
4612 case 4: /* frndint */
4613 gen_op_frndint();
4614 break;
4615 case 6: /* fsin */
4616 gen_op_fsin();
4617 break;
4618 default:
4619 case 7: /* fcos */
4620 gen_op_fcos();
4621 break;
4622 }
4623 break;
4624 case 0x00: case 0x01: case 0x04 ... 0x07: /* fxxx st, sti */
4625 case 0x20: case 0x21: case 0x24 ... 0x27: /* fxxx sti, st */
4626 case 0x30: case 0x31: case 0x34 ... 0x37: /* fxxxp sti, st */
4627 {
4628 int op1;
4629
4630 op1 = op & 7;
4631 if (op >= 0x20) {
4632 gen_op_fp_arith_STN_ST0[op1](opreg);
4633 if (op >= 0x30)
4634 gen_op_fpop();
4635 } else {
4636 gen_op_fmov_FT0_STN(opreg);
4637 gen_op_fp_arith_ST0_FT0[op1]();
4638 }
4639 }
4640 break;
4641 case 0x02: /* fcom */
4642 case 0x22: /* fcom2, undocumented op */
4643 gen_op_fmov_FT0_STN(opreg);
4644 gen_op_fcom_ST0_FT0();
4645 break;
4646 case 0x03: /* fcomp */
4647 case 0x23: /* fcomp3, undocumented op */
4648 case 0x32: /* fcomp5, undocumented op */
4649 gen_op_fmov_FT0_STN(opreg);
4650 gen_op_fcom_ST0_FT0();
4651 gen_op_fpop();
4652 break;
4653 case 0x15: /* da/5 */
4654 switch(rm) {
4655 case 1: /* fucompp */
4656 gen_op_fmov_FT0_STN(1);
4657 gen_op_fucom_ST0_FT0();
4658 gen_op_fpop();
4659 gen_op_fpop();
4660 break;
4661 default:
4662 goto illegal_op;
4663 }
4664 break;
4665 case 0x1c:
4666 switch(rm) {
4667 case 0: /* feni (287 only, just do nop here) */
4668 break;
4669 case 1: /* fdisi (287 only, just do nop here) */
4670 break;
4671 case 2: /* fclex */
4672 gen_op_fclex();
4673 break;
4674 case 3: /* fninit */
4675 gen_op_fninit();
4676 break;
4677 case 4: /* fsetpm (287 only, just do nop here) */
4678 break;
4679 default:
4680 goto illegal_op;
4681 }
4682 break;
4683 case 0x1d: /* fucomi */
4684 if (s->cc_op != CC_OP_DYNAMIC)
4685 gen_op_set_cc_op(s->cc_op);
4686 gen_op_fmov_FT0_STN(opreg);
4687 gen_op_fucomi_ST0_FT0();
4688 s->cc_op = CC_OP_EFLAGS;
4689 break;
4690 case 0x1e: /* fcomi */
4691 if (s->cc_op != CC_OP_DYNAMIC)
4692 gen_op_set_cc_op(s->cc_op);
4693 gen_op_fmov_FT0_STN(opreg);
4694 gen_op_fcomi_ST0_FT0();
4695 s->cc_op = CC_OP_EFLAGS;
4696 break;
4697 case 0x28: /* ffree sti */
4698 gen_op_ffree_STN(opreg);
4699 break;
4700 case 0x2a: /* fst sti */
4701 gen_op_fmov_STN_ST0(opreg);
4702 break;
4703 case 0x2b: /* fstp sti */
4704 case 0x0b: /* fstp1 sti, undocumented op */
4705 case 0x3a: /* fstp8 sti, undocumented op */
4706 case 0x3b: /* fstp9 sti, undocumented op */
4707 gen_op_fmov_STN_ST0(opreg);
4708 gen_op_fpop();
4709 break;
4710 case 0x2c: /* fucom st(i) */
4711 gen_op_fmov_FT0_STN(opreg);
4712 gen_op_fucom_ST0_FT0();
4713 break;
4714 case 0x2d: /* fucomp st(i) */
4715 gen_op_fmov_FT0_STN(opreg);
4716 gen_op_fucom_ST0_FT0();
4717 gen_op_fpop();
4718 break;
4719 case 0x33: /* de/3 */
4720 switch(rm) {
4721 case 1: /* fcompp */
4722 gen_op_fmov_FT0_STN(1);
4723 gen_op_fcom_ST0_FT0();
4724 gen_op_fpop();
4725 gen_op_fpop();
4726 break;
4727 default:
4728 goto illegal_op;
4729 }
4730 break;
4731 case 0x38: /* ffreep sti, undocumented op */
4732 gen_op_ffree_STN(opreg);
4733 gen_op_fpop();
4734 break;
4735 case 0x3c: /* df/4 */
4736 switch(rm) {
4737 case 0:
4738 gen_op_fnstsw_EAX();
4739 break;
4740 default:
4741 goto illegal_op;
4742 }
4743 break;
4744 case 0x3d: /* fucomip */
4745 if (s->cc_op != CC_OP_DYNAMIC)
4746 gen_op_set_cc_op(s->cc_op);
4747 gen_op_fmov_FT0_STN(opreg);
4748 gen_op_fucomi_ST0_FT0();
4749 gen_op_fpop();
4750 s->cc_op = CC_OP_EFLAGS;
4751 break;
4752 case 0x3e: /* fcomip */
4753 if (s->cc_op != CC_OP_DYNAMIC)
4754 gen_op_set_cc_op(s->cc_op);
4755 gen_op_fmov_FT0_STN(opreg);
4756 gen_op_fcomi_ST0_FT0();
4757 gen_op_fpop();
4758 s->cc_op = CC_OP_EFLAGS;
4759 break;
4760 case 0x10 ... 0x13: /* fcmovxx */
4761 case 0x18 ... 0x1b:
4762 {
4763 int op1;
4764 const static uint8_t fcmov_cc[8] = {
4765 (JCC_B << 1),
4766 (JCC_Z << 1),
4767 (JCC_BE << 1),
4768 (JCC_P << 1),
4769 };
4770 op1 = fcmov_cc[op & 3] | ((op >> 3) & 1);
4771 gen_setcc(s, op1);
4772 gen_op_fcmov_ST0_STN_T0(opreg);
4773 }
4774 break;
4775 default:
4776 goto illegal_op;
4777 }
4778 }
4779#ifdef USE_CODE_COPY
4780 s->tb->cflags |= CF_TB_FP_USED;
4781#endif
4782 break;
4783 /************************/
4784 /* string ops */
4785
4786 case 0xa4: /* movsS */
4787 case 0xa5:
4788 if ((b & 1) == 0)
4789 ot = OT_BYTE;
4790 else
4791 ot = dflag + OT_WORD;
4792
4793 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4794 gen_repz_movs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4795 } else {
4796 gen_movs(s, ot);
4797 }
4798 break;
4799
4800 case 0xaa: /* stosS */
4801 case 0xab:
4802 if ((b & 1) == 0)
4803 ot = OT_BYTE;
4804 else
4805 ot = dflag + OT_WORD;
4806
4807 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4808 gen_repz_stos(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4809 } else {
4810 gen_stos(s, ot);
4811 }
4812 break;
4813 case 0xac: /* lodsS */
4814 case 0xad:
4815 if ((b & 1) == 0)
4816 ot = OT_BYTE;
4817 else
4818 ot = dflag + OT_WORD;
4819 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4820 gen_repz_lods(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4821 } else {
4822 gen_lods(s, ot);
4823 }
4824 break;
4825 case 0xae: /* scasS */
4826 case 0xaf:
4827 if ((b & 1) == 0)
4828 ot = OT_BYTE;
4829 else
4830 ot = dflag + OT_WORD;
4831 if (prefixes & PREFIX_REPNZ) {
4832 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4833 } else if (prefixes & PREFIX_REPZ) {
4834 gen_repz_scas(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4835 } else {
4836 gen_scas(s, ot);
4837 s->cc_op = CC_OP_SUBB + ot;
4838 }
4839 break;
4840
4841 case 0xa6: /* cmpsS */
4842 case 0xa7:
4843 if ((b & 1) == 0)
4844 ot = OT_BYTE;
4845 else
4846 ot = dflag + OT_WORD;
4847 if (prefixes & PREFIX_REPNZ) {
4848 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 1);
4849 } else if (prefixes & PREFIX_REPZ) {
4850 gen_repz_cmps(s, ot, pc_start - s->cs_base, s->pc - s->cs_base, 0);
4851 } else {
4852 gen_cmps(s, ot);
4853 s->cc_op = CC_OP_SUBB + ot;
4854 }
4855 break;
4856 case 0x6c: /* insS */
4857 case 0x6d:
4858 if ((b & 1) == 0)
4859 ot = OT_BYTE;
4860 else
4861 ot = dflag ? OT_LONG : OT_WORD;
4862 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4863 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4864 gen_repz_ins(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4865 } else {
4866 gen_ins(s, ot);
4867 }
4868 break;
4869 case 0x6e: /* outsS */
4870 case 0x6f:
4871 if ((b & 1) == 0)
4872 ot = OT_BYTE;
4873 else
4874 ot = dflag ? OT_LONG : OT_WORD;
4875 gen_check_io(s, ot, 1, pc_start - s->cs_base);
4876 if (prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) {
4877 gen_repz_outs(s, ot, pc_start - s->cs_base, s->pc - s->cs_base);
4878 } else {
4879 gen_outs(s, ot);
4880 }
4881 break;
4882
4883 /************************/
4884 /* port I/O */
4885 case 0xe4:
4886 case 0xe5:
4887 if ((b & 1) == 0)
4888 ot = OT_BYTE;
4889 else
4890 ot = dflag ? OT_LONG : OT_WORD;
4891 val = ldub_code(s->pc++);
4892 gen_op_movl_T0_im(val);
4893 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4894 gen_op_in[ot]();
4895 gen_op_mov_reg_T1[ot][R_EAX]();
4896 break;
4897 case 0xe6:
4898 case 0xe7:
4899 if ((b & 1) == 0)
4900 ot = OT_BYTE;
4901 else
4902 ot = dflag ? OT_LONG : OT_WORD;
4903 val = ldub_code(s->pc++);
4904 gen_op_movl_T0_im(val);
4905 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4906#ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */
4907 if (val == 0x80)
4908 break;
4909#endif /* VBOX */
4910 gen_op_mov_TN_reg[ot][1][R_EAX]();
4911 gen_op_out[ot]();
4912 break;
4913 case 0xec:
4914 case 0xed:
4915 if ((b & 1) == 0)
4916 ot = OT_BYTE;
4917 else
4918 ot = dflag ? OT_LONG : OT_WORD;
4919 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4920 gen_op_andl_T0_ffff();
4921 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4922 gen_op_in[ot]();
4923 gen_op_mov_reg_T1[ot][R_EAX]();
4924 break;
4925 case 0xee:
4926 case 0xef:
4927 if ((b & 1) == 0)
4928 ot = OT_BYTE;
4929 else
4930 ot = dflag ? OT_LONG : OT_WORD;
4931 gen_op_mov_TN_reg[OT_WORD][0][R_EDX]();
4932 gen_op_andl_T0_ffff();
4933 gen_check_io(s, ot, 0, pc_start - s->cs_base);
4934 gen_op_mov_TN_reg[ot][1][R_EAX]();
4935 gen_op_out[ot]();
4936 break;
4937
4938 /************************/
4939 /* control */
4940 case 0xc2: /* ret im */
4941 val = ldsw_code(s->pc);
4942 s->pc += 2;
4943 gen_pop_T0(s);
4944 if (CODE64(s) && s->dflag)
4945 s->dflag = 2;
4946 gen_stack_update(s, val + (2 << s->dflag));
4947 if (s->dflag == 0)
4948 gen_op_andl_T0_ffff();
4949 gen_op_jmp_T0();
4950 gen_eob(s);
4951 break;
4952 case 0xc3: /* ret */
4953 gen_pop_T0(s);
4954 gen_pop_update(s);
4955 if (s->dflag == 0)
4956 gen_op_andl_T0_ffff();
4957 gen_op_jmp_T0();
4958 gen_eob(s);
4959 break;
4960 case 0xca: /* lret im */
4961 val = ldsw_code(s->pc);
4962 s->pc += 2;
4963 do_lret:
4964 if (s->pe && !s->vm86) {
4965 if (s->cc_op != CC_OP_DYNAMIC)
4966 gen_op_set_cc_op(s->cc_op);
4967 gen_jmp_im(pc_start - s->cs_base);
4968 gen_op_lret_protected(s->dflag, val);
4969 } else {
4970 gen_stack_A0(s);
4971 /* pop offset */
4972 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4973 if (s->dflag == 0)
4974 gen_op_andl_T0_ffff();
4975 /* NOTE: keeping EIP updated is not a problem in case of
4976 exception */
4977 gen_op_jmp_T0();
4978 /* pop selector */
4979 gen_op_addl_A0_im(2 << s->dflag);
4980 gen_op_ld_T0_A0[1 + s->dflag + s->mem_index]();
4981 gen_op_movl_seg_T0_vm(offsetof(CPUX86State,segs[R_CS]));
4982 /* add stack offset */
4983 gen_stack_update(s, val + (4 << s->dflag));
4984 }
4985 gen_eob(s);
4986 break;
4987 case 0xcb: /* lret */
4988 val = 0;
4989 goto do_lret;
4990 case 0xcf: /* iret */
4991 if (!s->pe) {
4992 /* real mode */
4993 gen_op_iret_real(s->dflag);
4994 s->cc_op = CC_OP_EFLAGS;
4995 } else if (s->vm86) {
4996#ifdef VBOX
4997 if (s->iopl != 3 && (!s->vme || s->dflag)) {
4998#else
4999 if (s->iopl != 3) {
5000#endif
5001 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5002 } else {
5003 gen_op_iret_real(s->dflag);
5004 s->cc_op = CC_OP_EFLAGS;
5005 }
5006 } else {
5007 if (s->cc_op != CC_OP_DYNAMIC)
5008 gen_op_set_cc_op(s->cc_op);
5009 gen_jmp_im(pc_start - s->cs_base);
5010 gen_op_iret_protected(s->dflag, s->pc - s->cs_base);
5011 s->cc_op = CC_OP_EFLAGS;
5012 }
5013 gen_eob(s);
5014 break;
5015 case 0xe8: /* call im */
5016 {
5017 if (dflag)
5018 tval = (int32_t)insn_get(s, OT_LONG);
5019 else
5020 tval = (int16_t)insn_get(s, OT_WORD);
5021 next_eip = s->pc - s->cs_base;
5022 tval += next_eip;
5023 if (s->dflag == 0)
5024 tval &= 0xffff;
5025 gen_movtl_T0_im(next_eip);
5026 gen_push_T0(s);
5027 gen_jmp(s, tval);
5028 }
5029 break;
5030 case 0x9a: /* lcall im */
5031 {
5032 unsigned int selector, offset;
5033
5034 if (CODE64(s))
5035 goto illegal_op;
5036 ot = dflag ? OT_LONG : OT_WORD;
5037 offset = insn_get(s, ot);
5038 selector = insn_get(s, OT_WORD);
5039
5040 gen_op_movl_T0_im(selector);
5041 gen_op_movl_T1_imu(offset);
5042 }
5043 goto do_lcall;
5044 case 0xe9: /* jmp im */
5045 if (dflag)
5046 tval = (int32_t)insn_get(s, OT_LONG);
5047 else
5048 tval = (int16_t)insn_get(s, OT_WORD);
5049 tval += s->pc - s->cs_base;
5050 if (s->dflag == 0)
5051 tval &= 0xffff;
5052 gen_jmp(s, tval);
5053 break;
5054 case 0xea: /* ljmp im */
5055 {
5056 unsigned int selector, offset;
5057
5058 if (CODE64(s))
5059 goto illegal_op;
5060 ot = dflag ? OT_LONG : OT_WORD;
5061 offset = insn_get(s, ot);
5062 selector = insn_get(s, OT_WORD);
5063
5064 gen_op_movl_T0_im(selector);
5065 gen_op_movl_T1_imu(offset);
5066 }
5067 goto do_ljmp;
5068 case 0xeb: /* jmp Jb */
5069 tval = (int8_t)insn_get(s, OT_BYTE);
5070 tval += s->pc - s->cs_base;
5071 if (s->dflag == 0)
5072 tval &= 0xffff;
5073 gen_jmp(s, tval);
5074 break;
5075 case 0x70 ... 0x7f: /* jcc Jb */
5076 tval = (int8_t)insn_get(s, OT_BYTE);
5077 goto do_jcc;
5078 case 0x180 ... 0x18f: /* jcc Jv */
5079 if (dflag) {
5080 tval = (int32_t)insn_get(s, OT_LONG);
5081 } else {
5082 tval = (int16_t)insn_get(s, OT_WORD);
5083 }
5084 do_jcc:
5085 next_eip = s->pc - s->cs_base;
5086 tval += next_eip;
5087 if (s->dflag == 0)
5088 tval &= 0xffff;
5089 gen_jcc(s, b, tval, next_eip);
5090 break;
5091
5092 case 0x190 ... 0x19f: /* setcc Gv */
5093 modrm = ldub_code(s->pc++);
5094 gen_setcc(s, b);
5095 gen_ldst_modrm(s, modrm, OT_BYTE, OR_TMP0, 1);
5096 break;
5097 case 0x140 ... 0x14f: /* cmov Gv, Ev */
5098 ot = dflag + OT_WORD;
5099 modrm = ldub_code(s->pc++);
5100 reg = ((modrm >> 3) & 7) | rex_r;
5101 mod = (modrm >> 6) & 3;
5102 gen_setcc(s, b);
5103 if (mod != 3) {
5104 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5105 gen_op_ld_T1_A0[ot + s->mem_index]();
5106 } else {
5107 rm = (modrm & 7) | REX_B(s);
5108 gen_op_mov_TN_reg[ot][1][rm]();
5109 }
5110 gen_op_cmov_reg_T1_T0[ot - OT_WORD][reg]();
5111 break;
5112
5113 /************************/
5114 /* flags */
5115 case 0x9c: /* pushf */
5116#ifdef VBOX
5117 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
5118#else
5119 if (s->vm86 && s->iopl != 3) {
5120#endif
5121 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5122 } else {
5123 if (s->cc_op != CC_OP_DYNAMIC)
5124 gen_op_set_cc_op(s->cc_op);
5125#ifdef VBOX
5126 if (s->vm86 && s->vme && s->iopl != 3)
5127 gen_op_movl_T0_eflags_vme();
5128 else
5129#endif
5130 gen_op_movl_T0_eflags();
5131 gen_push_T0(s);
5132 }
5133 break;
5134 case 0x9d: /* popf */
5135#ifdef VBOX
5136 if (s->vm86 && s->iopl != 3 && (!s->vme || s->dflag)) {
5137#else
5138 if (s->vm86 && s->iopl != 3) {
5139#endif
5140 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5141 } else {
5142 gen_pop_T0(s);
5143 if (s->cpl == 0) {
5144 if (s->dflag) {
5145 gen_op_movl_eflags_T0_cpl0();
5146 } else {
5147 gen_op_movw_eflags_T0_cpl0();
5148 }
5149 } else {
5150 if (s->cpl <= s->iopl) {
5151 if (s->dflag) {
5152 gen_op_movl_eflags_T0_io();
5153 } else {
5154 gen_op_movw_eflags_T0_io();
5155 }
5156 } else {
5157 if (s->dflag) {
5158 gen_op_movl_eflags_T0();
5159 } else {
5160#ifdef VBOX
5161 if (s->vm86 && s->vme)
5162 gen_op_movw_eflags_T0_vme();
5163 else
5164#endif
5165 gen_op_movw_eflags_T0();
5166 }
5167 }
5168 }
5169 gen_pop_update(s);
5170 s->cc_op = CC_OP_EFLAGS;
5171 /* abort translation because TF flag may change */
5172 gen_jmp_im(s->pc - s->cs_base);
5173 gen_eob(s);
5174 }
5175 break;
5176 case 0x9e: /* sahf */
5177 if (CODE64(s))
5178 goto illegal_op;
5179 gen_op_mov_TN_reg[OT_BYTE][0][R_AH]();
5180 if (s->cc_op != CC_OP_DYNAMIC)
5181 gen_op_set_cc_op(s->cc_op);
5182 gen_op_movb_eflags_T0();
5183 s->cc_op = CC_OP_EFLAGS;
5184 break;
5185 case 0x9f: /* lahf */
5186 if (CODE64(s))
5187 goto illegal_op;
5188 if (s->cc_op != CC_OP_DYNAMIC)
5189 gen_op_set_cc_op(s->cc_op);
5190 gen_op_movl_T0_eflags();
5191 gen_op_mov_reg_T0[OT_BYTE][R_AH]();
5192 break;
5193 case 0xf5: /* cmc */
5194 if (s->cc_op != CC_OP_DYNAMIC)
5195 gen_op_set_cc_op(s->cc_op);
5196 gen_op_cmc();
5197 s->cc_op = CC_OP_EFLAGS;
5198 break;
5199 case 0xf8: /* clc */
5200 if (s->cc_op != CC_OP_DYNAMIC)
5201 gen_op_set_cc_op(s->cc_op);
5202 gen_op_clc();
5203 s->cc_op = CC_OP_EFLAGS;
5204 break;
5205 case 0xf9: /* stc */
5206 if (s->cc_op != CC_OP_DYNAMIC)
5207 gen_op_set_cc_op(s->cc_op);
5208 gen_op_stc();
5209 s->cc_op = CC_OP_EFLAGS;
5210 break;
5211 case 0xfc: /* cld */
5212 gen_op_cld();
5213 break;
5214 case 0xfd: /* std */
5215 gen_op_std();
5216 break;
5217
5218 /************************/
5219 /* bit operations */
5220 case 0x1ba: /* bt/bts/btr/btc Gv, im */
5221 ot = dflag + OT_WORD;
5222 modrm = ldub_code(s->pc++);
5223 op = (modrm >> 3) & 7;
5224 mod = (modrm >> 6) & 3;
5225 rm = (modrm & 7) | REX_B(s);
5226 if (mod != 3) {
5227 s->rip_offset = 1;
5228 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5229 gen_op_ld_T0_A0[ot + s->mem_index]();
5230 } else {
5231 gen_op_mov_TN_reg[ot][0][rm]();
5232 }
5233 /* load shift */
5234 val = ldub_code(s->pc++);
5235 gen_op_movl_T1_im(val);
5236 if (op < 4)
5237 goto illegal_op;
5238 op -= 4;
5239 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5240 s->cc_op = CC_OP_SARB + ot;
5241 if (op != 0) {
5242 if (mod != 3)
5243 gen_op_st_T0_A0[ot + s->mem_index]();
5244 else
5245 gen_op_mov_reg_T0[ot][rm]();
5246 gen_op_update_bt_cc();
5247 }
5248 break;
5249 case 0x1a3: /* bt Gv, Ev */
5250 op = 0;
5251 goto do_btx;
5252 case 0x1ab: /* bts */
5253 op = 1;
5254 goto do_btx;
5255 case 0x1b3: /* btr */
5256 op = 2;
5257 goto do_btx;
5258 case 0x1bb: /* btc */
5259 op = 3;
5260 do_btx:
5261 ot = dflag + OT_WORD;
5262 modrm = ldub_code(s->pc++);
5263 reg = ((modrm >> 3) & 7) | rex_r;
5264 mod = (modrm >> 6) & 3;
5265 rm = (modrm & 7) | REX_B(s);
5266 gen_op_mov_TN_reg[OT_LONG][1][reg]();
5267 if (mod != 3) {
5268 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5269 /* specific case: we need to add a displacement */
5270 gen_op_add_bit_A0_T1[ot - OT_WORD]();
5271 gen_op_ld_T0_A0[ot + s->mem_index]();
5272 } else {
5273 gen_op_mov_TN_reg[ot][0][rm]();
5274 }
5275 gen_op_btx_T0_T1_cc[ot - OT_WORD][op]();
5276 s->cc_op = CC_OP_SARB + ot;
5277 if (op != 0) {
5278 if (mod != 3)
5279 gen_op_st_T0_A0[ot + s->mem_index]();
5280 else
5281 gen_op_mov_reg_T0[ot][rm]();
5282 gen_op_update_bt_cc();
5283 }
5284 break;
5285 case 0x1bc: /* bsf */
5286 case 0x1bd: /* bsr */
5287 ot = dflag + OT_WORD;
5288 modrm = ldub_code(s->pc++);
5289 reg = ((modrm >> 3) & 7) | rex_r;
5290 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5291 /* NOTE: in order to handle the 0 case, we must load the
5292 result. It could be optimized with a generated jump */
5293 gen_op_mov_TN_reg[ot][1][reg]();
5294 gen_op_bsx_T0_cc[ot - OT_WORD][b & 1]();
5295 gen_op_mov_reg_T1[ot][reg]();
5296 s->cc_op = CC_OP_LOGICB + ot;
5297 break;
5298 /************************/
5299 /* bcd */
5300 case 0x27: /* daa */
5301 if (CODE64(s))
5302 goto illegal_op;
5303 if (s->cc_op != CC_OP_DYNAMIC)
5304 gen_op_set_cc_op(s->cc_op);
5305 gen_op_daa();
5306 s->cc_op = CC_OP_EFLAGS;
5307 break;
5308 case 0x2f: /* das */
5309 if (CODE64(s))
5310 goto illegal_op;
5311 if (s->cc_op != CC_OP_DYNAMIC)
5312 gen_op_set_cc_op(s->cc_op);
5313 gen_op_das();
5314 s->cc_op = CC_OP_EFLAGS;
5315 break;
5316 case 0x37: /* aaa */
5317 if (CODE64(s))
5318 goto illegal_op;
5319 if (s->cc_op != CC_OP_DYNAMIC)
5320 gen_op_set_cc_op(s->cc_op);
5321 gen_op_aaa();
5322 s->cc_op = CC_OP_EFLAGS;
5323 break;
5324 case 0x3f: /* aas */
5325 if (CODE64(s))
5326 goto illegal_op;
5327 if (s->cc_op != CC_OP_DYNAMIC)
5328 gen_op_set_cc_op(s->cc_op);
5329 gen_op_aas();
5330 s->cc_op = CC_OP_EFLAGS;
5331 break;
5332 case 0xd4: /* aam */
5333 if (CODE64(s))
5334 goto illegal_op;
5335 val = ldub_code(s->pc++);
5336 gen_op_aam(val);
5337 s->cc_op = CC_OP_LOGICB;
5338 break;
5339 case 0xd5: /* aad */
5340 if (CODE64(s))
5341 goto illegal_op;
5342 val = ldub_code(s->pc++);
5343 gen_op_aad(val);
5344 s->cc_op = CC_OP_LOGICB;
5345 break;
5346 /************************/
5347 /* misc */
5348 case 0x90: /* nop */
5349 /* XXX: xchg + rex handling */
5350 /* XXX: correct lock test for all insn */
5351 if (prefixes & PREFIX_LOCK)
5352 goto illegal_op;
5353 break;
5354 case 0x9b: /* fwait */
5355 if ((s->flags & (HF_MP_MASK | HF_TS_MASK)) ==
5356 (HF_MP_MASK | HF_TS_MASK)) {
5357 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
5358 } else {
5359 if (s->cc_op != CC_OP_DYNAMIC)
5360 gen_op_set_cc_op(s->cc_op);
5361 gen_jmp_im(pc_start - s->cs_base);
5362 gen_op_fwait();
5363 }
5364 break;
5365 case 0xcc: /* int3 */
5366#ifdef VBOX
5367 if (s->vm86 && s->iopl != 3 && !s->vme) {
5368 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5369 }
5370 else
5371#endif
5372 gen_interrupt(s, EXCP03_INT3, pc_start - s->cs_base, s->pc - s->cs_base);
5373 break;
5374 case 0xcd: /* int N */
5375 val = ldub_code(s->pc++);
5376#ifdef VBOX
5377 if (s->vm86 && s->iopl != 3 && !s->vme) {
5378#else
5379 if (s->vm86 && s->iopl != 3) {
5380#endif
5381 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5382 } else {
5383 gen_interrupt(s, val, pc_start - s->cs_base, s->pc - s->cs_base);
5384 }
5385 break;
5386 case 0xce: /* into */
5387 if (CODE64(s))
5388 goto illegal_op;
5389 if (s->cc_op != CC_OP_DYNAMIC)
5390 gen_op_set_cc_op(s->cc_op);
5391 gen_jmp_im(pc_start - s->cs_base);
5392 gen_op_into(s->pc - pc_start);
5393 break;
5394 case 0xf1: /* icebp (undocumented, exits to external debugger) */
5395 gen_debug(s, pc_start - s->cs_base);
5396 break;
5397 case 0xfa: /* cli */
5398 if (!s->vm86) {
5399 if (s->cpl <= s->iopl) {
5400 gen_op_cli();
5401 } else {
5402 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5403 }
5404 } else {
5405 if (s->iopl == 3) {
5406 gen_op_cli();
5407#ifdef VBOX
5408 } else
5409 if (s->iopl != 3 && s->vme) {
5410 gen_op_cli_vme();
5411#endif
5412 } else {
5413 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5414 }
5415 }
5416 break;
5417 case 0xfb: /* sti */
5418 if (!s->vm86) {
5419 if (s->cpl <= s->iopl) {
5420 gen_sti:
5421 gen_op_sti();
5422 /* interruptions are enabled only the first insn after sti */
5423 /* If several instructions disable interrupts, only the
5424 _first_ does it */
5425 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK))
5426 gen_op_set_inhibit_irq();
5427 /* give a chance to handle pending irqs */
5428 gen_jmp_im(s->pc - s->cs_base);
5429 gen_eob(s);
5430 } else {
5431 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5432 }
5433 } else {
5434 if (s->iopl == 3) {
5435 goto gen_sti;
5436#ifdef VBOX
5437 } else
5438 if (s->iopl != 3 && s->vme) {
5439 gen_op_sti_vme();
5440 /* give a chance to handle pending irqs */
5441 gen_jmp_im(s->pc - s->cs_base);
5442 gen_eob(s);
5443#endif
5444 } else {
5445 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5446 }
5447 }
5448 break;
5449 case 0x62: /* bound */
5450 if (CODE64(s))
5451 goto illegal_op;
5452 ot = dflag ? OT_LONG : OT_WORD;
5453 modrm = ldub_code(s->pc++);
5454 reg = (modrm >> 3) & 7;
5455 mod = (modrm >> 6) & 3;
5456 if (mod == 3)
5457 goto illegal_op;
5458 gen_op_mov_TN_reg[ot][0][reg]();
5459 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5460 gen_jmp_im(pc_start - s->cs_base);
5461 if (ot == OT_WORD)
5462 gen_op_boundw();
5463 else
5464 gen_op_boundl();
5465 break;
5466 case 0x1c8 ... 0x1cf: /* bswap reg */
5467 reg = (b & 7) | REX_B(s);
5468#ifdef TARGET_X86_64
5469 if (dflag == 2) {
5470 gen_op_mov_TN_reg[OT_QUAD][0][reg]();
5471 gen_op_bswapq_T0();
5472 gen_op_mov_reg_T0[OT_QUAD][reg]();
5473 } else
5474#endif
5475 {
5476 gen_op_mov_TN_reg[OT_LONG][0][reg]();
5477 gen_op_bswapl_T0();
5478 gen_op_mov_reg_T0[OT_LONG][reg]();
5479 }
5480 break;
5481 case 0xd6: /* salc */
5482 if (CODE64(s))
5483 goto illegal_op;
5484 if (s->cc_op != CC_OP_DYNAMIC)
5485 gen_op_set_cc_op(s->cc_op);
5486 gen_op_salc();
5487 break;
5488 case 0xe0: /* loopnz */
5489 case 0xe1: /* loopz */
5490 if (s->cc_op != CC_OP_DYNAMIC)
5491 gen_op_set_cc_op(s->cc_op);
5492 /* FALL THRU */
5493 case 0xe2: /* loop */
5494 case 0xe3: /* jecxz */
5495 {
5496 int l1, l2;
5497
5498 tval = (int8_t)insn_get(s, OT_BYTE);
5499 next_eip = s->pc - s->cs_base;
5500 tval += next_eip;
5501 if (s->dflag == 0)
5502 tval &= 0xffff;
5503
5504 l1 = gen_new_label();
5505 l2 = gen_new_label();
5506 b &= 3;
5507 if (b == 3) {
5508 gen_op_jz_ecx[s->aflag](l1);
5509 } else {
5510 gen_op_dec_ECX[s->aflag]();
5511 if (b <= 1)
5512 gen_op_mov_T0_cc();
5513 gen_op_loop[s->aflag][b](l1);
5514 }
5515
5516 gen_jmp_im(next_eip);
5517 gen_op_jmp_label(l2);
5518 gen_set_label(l1);
5519 gen_jmp_im(tval);
5520 gen_set_label(l2);
5521 gen_eob(s);
5522 }
5523 break;
5524 case 0x130: /* wrmsr */
5525 case 0x132: /* rdmsr */
5526 if (s->cpl != 0) {
5527 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5528 } else {
5529 if (b & 2)
5530 gen_op_rdmsr();
5531 else
5532 gen_op_wrmsr();
5533 }
5534 break;
5535 case 0x131: /* rdtsc */
5536 gen_jmp_im(pc_start - s->cs_base);
5537 gen_op_rdtsc();
5538 break;
5539 case 0x134: /* sysenter */
5540 if (CODE64(s))
5541 goto illegal_op;
5542 if (!s->pe) {
5543 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5544 } else {
5545 if (s->cc_op != CC_OP_DYNAMIC) {
5546 gen_op_set_cc_op(s->cc_op);
5547 s->cc_op = CC_OP_DYNAMIC;
5548 }
5549 gen_jmp_im(pc_start - s->cs_base);
5550 gen_op_sysenter();
5551 gen_eob(s);
5552 }
5553 break;
5554 case 0x135: /* sysexit */
5555 if (CODE64(s))
5556 goto illegal_op;
5557 if (!s->pe) {
5558 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5559 } else {
5560 if (s->cc_op != CC_OP_DYNAMIC) {
5561 gen_op_set_cc_op(s->cc_op);
5562 s->cc_op = CC_OP_DYNAMIC;
5563 }
5564 gen_jmp_im(pc_start - s->cs_base);
5565 gen_op_sysexit();
5566 gen_eob(s);
5567 }
5568 break;
5569#ifdef TARGET_X86_64
5570 case 0x105: /* syscall */
5571 /* XXX: is it usable in real mode ? */
5572 if (s->cc_op != CC_OP_DYNAMIC) {
5573 gen_op_set_cc_op(s->cc_op);
5574 s->cc_op = CC_OP_DYNAMIC;
5575 }
5576 gen_jmp_im(pc_start - s->cs_base);
5577 gen_op_syscall(s->pc - pc_start);
5578 gen_eob(s);
5579 break;
5580 case 0x107: /* sysret */
5581 if (!s->pe) {
5582 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5583 } else {
5584 if (s->cc_op != CC_OP_DYNAMIC) {
5585 gen_op_set_cc_op(s->cc_op);
5586 s->cc_op = CC_OP_DYNAMIC;
5587 }
5588 gen_jmp_im(pc_start - s->cs_base);
5589 gen_op_sysret(s->dflag);
5590 /* condition codes are modified only in long mode */
5591 if (s->lma)
5592 s->cc_op = CC_OP_EFLAGS;
5593 gen_eob(s);
5594 }
5595 break;
5596#endif
5597 case 0x1a2: /* cpuid */
5598 gen_op_cpuid();
5599 break;
5600 case 0xf4: /* hlt */
5601 if (s->cpl != 0) {
5602 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5603 } else {
5604 if (s->cc_op != CC_OP_DYNAMIC)
5605 gen_op_set_cc_op(s->cc_op);
5606 gen_jmp_im(s->pc - s->cs_base);
5607 gen_op_hlt();
5608 s->is_jmp = 3;
5609 }
5610 break;
5611 case 0x100:
5612 modrm = ldub_code(s->pc++);
5613 mod = (modrm >> 6) & 3;
5614 op = (modrm >> 3) & 7;
5615 switch(op) {
5616 case 0: /* sldt */
5617 if (!s->pe || s->vm86)
5618 goto illegal_op;
5619 gen_op_movl_T0_env(offsetof(CPUX86State,ldt.selector));
5620 ot = OT_WORD;
5621 if (mod == 3)
5622 ot += s->dflag;
5623 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5624 break;
5625 case 2: /* lldt */
5626 if (!s->pe || s->vm86)
5627 goto illegal_op;
5628 if (s->cpl != 0) {
5629 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5630 } else {
5631 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5632 gen_jmp_im(pc_start - s->cs_base);
5633 gen_op_lldt_T0();
5634 }
5635 break;
5636 case 1: /* str */
5637 if (!s->pe || s->vm86)
5638 goto illegal_op;
5639 gen_op_movl_T0_env(offsetof(CPUX86State,tr.selector));
5640 ot = OT_WORD;
5641 if (mod == 3)
5642 ot += s->dflag;
5643 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 1);
5644 break;
5645 case 3: /* ltr */
5646 if (!s->pe || s->vm86)
5647 goto illegal_op;
5648 if (s->cpl != 0) {
5649 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5650 } else {
5651 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5652 gen_jmp_im(pc_start - s->cs_base);
5653 gen_op_ltr_T0();
5654 }
5655 break;
5656 case 4: /* verr */
5657 case 5: /* verw */
5658 if (!s->pe || s->vm86)
5659 goto illegal_op;
5660 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5661 if (s->cc_op != CC_OP_DYNAMIC)
5662 gen_op_set_cc_op(s->cc_op);
5663 if (op == 4)
5664 gen_op_verr();
5665 else
5666 gen_op_verw();
5667 s->cc_op = CC_OP_EFLAGS;
5668 break;
5669 default:
5670 goto illegal_op;
5671 }
5672 break;
5673 case 0x101:
5674 modrm = ldub_code(s->pc++);
5675 mod = (modrm >> 6) & 3;
5676 op = (modrm >> 3) & 7;
5677 rm = modrm & 7;
5678 switch(op) {
5679 case 0: /* sgdt */
5680 if (mod == 3)
5681 goto illegal_op;
5682 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5683 gen_op_movl_T0_env(offsetof(CPUX86State, gdt.limit));
5684 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5685 gen_add_A0_im(s, 2);
5686 gen_op_movtl_T0_env(offsetof(CPUX86State, gdt.base));
5687 if (!s->dflag)
5688 gen_op_andl_T0_im(0xffffff);
5689 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5690 break;
5691 case 1:
5692 if (mod == 3) {
5693 switch (rm) {
5694 case 0: /* monitor */
5695 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5696 s->cpl != 0)
5697 goto illegal_op;
5698 gen_jmp_im(pc_start - s->cs_base);
5699#ifdef TARGET_X86_64
5700 if (s->aflag == 2) {
5701 gen_op_movq_A0_reg[R_EBX]();
5702 gen_op_addq_A0_AL();
5703 } else
5704#endif
5705 {
5706 gen_op_movl_A0_reg[R_EBX]();
5707 gen_op_addl_A0_AL();
5708 if (s->aflag == 0)
5709 gen_op_andl_A0_ffff();
5710 }
5711 gen_add_A0_ds_seg(s);
5712 gen_op_monitor();
5713 break;
5714 case 1: /* mwait */
5715 if (!(s->cpuid_ext_features & CPUID_EXT_MONITOR) ||
5716 s->cpl != 0)
5717 goto illegal_op;
5718 if (s->cc_op != CC_OP_DYNAMIC) {
5719 gen_op_set_cc_op(s->cc_op);
5720 s->cc_op = CC_OP_DYNAMIC;
5721 }
5722 gen_jmp_im(s->pc - s->cs_base);
5723 gen_op_mwait();
5724 gen_eob(s);
5725 break;
5726 default:
5727 goto illegal_op;
5728 }
5729 } else { /* sidt */
5730 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5731 gen_op_movl_T0_env(offsetof(CPUX86State, idt.limit));
5732 gen_op_st_T0_A0[OT_WORD + s->mem_index]();
5733 gen_add_A0_im(s, 2);
5734 gen_op_movtl_T0_env(offsetof(CPUX86State, idt.base));
5735 if (!s->dflag)
5736 gen_op_andl_T0_im(0xffffff);
5737 gen_op_st_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5738 }
5739 break;
5740 case 2: /* lgdt */
5741 case 3: /* lidt */
5742 if (mod == 3)
5743 goto illegal_op;
5744 if (s->cpl != 0) {
5745 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5746 } else {
5747 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5748 gen_op_ld_T1_A0[OT_WORD + s->mem_index]();
5749 gen_add_A0_im(s, 2);
5750 gen_op_ld_T0_A0[CODE64(s) + OT_LONG + s->mem_index]();
5751 if (!s->dflag)
5752 gen_op_andl_T0_im(0xffffff);
5753 if (op == 2) {
5754 gen_op_movtl_env_T0(offsetof(CPUX86State,gdt.base));
5755 gen_op_movl_env_T1(offsetof(CPUX86State,gdt.limit));
5756 } else {
5757 gen_op_movtl_env_T0(offsetof(CPUX86State,idt.base));
5758 gen_op_movl_env_T1(offsetof(CPUX86State,idt.limit));
5759 }
5760 }
5761 break;
5762 case 4: /* smsw */
5763 gen_op_movl_T0_env(offsetof(CPUX86State,cr[0]));
5764 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1);
5765 break;
5766 case 6: /* lmsw */
5767 if (s->cpl != 0) {
5768 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5769 } else {
5770 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0);
5771 gen_op_lmsw_T0();
5772 gen_jmp_im(s->pc - s->cs_base);
5773 gen_eob(s);
5774 }
5775 break;
5776 case 7: /* invlpg */
5777 if (s->cpl != 0) {
5778 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5779 } else {
5780 if (mod == 3) {
5781#ifdef TARGET_X86_64
5782 if (CODE64(s) && (modrm & 7) == 0) {
5783 /* swapgs */
5784 gen_op_movtl_T0_env(offsetof(CPUX86State,segs[R_GS].base));
5785 gen_op_movtl_T1_env(offsetof(CPUX86State,kernelgsbase));
5786 gen_op_movtl_env_T1(offsetof(CPUX86State,segs[R_GS].base));
5787 gen_op_movtl_env_T0(offsetof(CPUX86State,kernelgsbase));
5788 } else
5789#endif
5790 {
5791 goto illegal_op;
5792 }
5793 } else {
5794 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5795 gen_op_invlpg_A0();
5796 gen_jmp_im(s->pc - s->cs_base);
5797 gen_eob(s);
5798 }
5799 }
5800 break;
5801 default:
5802 goto illegal_op;
5803 }
5804 break;
5805 case 0x108: /* invd */
5806 case 0x109: /* wbinvd */
5807 if (s->cpl != 0) {
5808 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5809 } else {
5810 /* nothing to do */
5811 }
5812 break;
5813 case 0x63: /* arpl or movslS (x86_64) */
5814#ifdef TARGET_X86_64
5815 if (CODE64(s)) {
5816 int d_ot;
5817 /* d_ot is the size of destination */
5818 d_ot = dflag + OT_WORD;
5819
5820 modrm = ldub_code(s->pc++);
5821 reg = ((modrm >> 3) & 7) | rex_r;
5822 mod = (modrm >> 6) & 3;
5823 rm = (modrm & 7) | REX_B(s);
5824
5825 if (mod == 3) {
5826 gen_op_mov_TN_reg[OT_LONG][0][rm]();
5827 /* sign extend */
5828 if (d_ot == OT_QUAD)
5829 gen_op_movslq_T0_T0();
5830 gen_op_mov_reg_T0[d_ot][reg]();
5831 } else {
5832 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5833 if (d_ot == OT_QUAD) {
5834 gen_op_lds_T0_A0[OT_LONG + s->mem_index]();
5835 } else {
5836 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
5837 }
5838 gen_op_mov_reg_T0[d_ot][reg]();
5839 }
5840 } else
5841#endif
5842 {
5843 if (!s->pe || s->vm86)
5844 goto illegal_op;
5845 ot = dflag ? OT_LONG : OT_WORD;
5846 modrm = ldub_code(s->pc++);
5847 reg = (modrm >> 3) & 7;
5848 mod = (modrm >> 6) & 3;
5849 rm = modrm & 7;
5850 if (mod != 3) {
5851 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5852 gen_op_ld_T0_A0[ot + s->mem_index]();
5853 } else {
5854 gen_op_mov_TN_reg[ot][0][rm]();
5855 }
5856 if (s->cc_op != CC_OP_DYNAMIC)
5857 gen_op_set_cc_op(s->cc_op);
5858 gen_op_arpl();
5859 s->cc_op = CC_OP_EFLAGS;
5860 if (mod != 3) {
5861 gen_op_st_T0_A0[ot + s->mem_index]();
5862 } else {
5863 gen_op_mov_reg_T0[ot][rm]();
5864 }
5865 gen_op_arpl_update();
5866 }
5867 break;
5868 case 0x102: /* lar */
5869 case 0x103: /* lsl */
5870 if (!s->pe || s->vm86)
5871 goto illegal_op;
5872 ot = dflag ? OT_LONG : OT_WORD;
5873 modrm = ldub_code(s->pc++);
5874 reg = ((modrm >> 3) & 7) | rex_r;
5875 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0);
5876 gen_op_mov_TN_reg[ot][1][reg]();
5877 if (s->cc_op != CC_OP_DYNAMIC)
5878 gen_op_set_cc_op(s->cc_op);
5879 if (b == 0x102)
5880 gen_op_lar();
5881 else
5882 gen_op_lsl();
5883 s->cc_op = CC_OP_EFLAGS;
5884 gen_op_mov_reg_T1[ot][reg]();
5885 break;
5886 case 0x118:
5887 modrm = ldub_code(s->pc++);
5888 mod = (modrm >> 6) & 3;
5889 op = (modrm >> 3) & 7;
5890 switch(op) {
5891 case 0: /* prefetchnta */
5892 case 1: /* prefetchnt0 */
5893 case 2: /* prefetchnt0 */
5894 case 3: /* prefetchnt0 */
5895 if (mod == 3)
5896 goto illegal_op;
5897 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
5898 /* nothing more to do */
5899 break;
5900 default:
5901 goto illegal_op;
5902 }
5903 break;
5904 case 0x120: /* mov reg, crN */
5905 case 0x122: /* mov crN, reg */
5906 if (s->cpl != 0) {
5907 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5908 } else {
5909 modrm = ldub_code(s->pc++);
5910 if ((modrm & 0xc0) != 0xc0)
5911 goto illegal_op;
5912 rm = (modrm & 7) | REX_B(s);
5913 reg = ((modrm >> 3) & 7) | rex_r;
5914 if (CODE64(s))
5915 ot = OT_QUAD;
5916 else
5917 ot = OT_LONG;
5918 switch(reg) {
5919 case 0:
5920 case 2:
5921 case 3:
5922 case 4:
5923 case 8:
5924 if (b & 2) {
5925 gen_op_mov_TN_reg[ot][0][rm]();
5926 gen_op_movl_crN_T0(reg);
5927 gen_jmp_im(s->pc - s->cs_base);
5928 gen_eob(s);
5929 } else {
5930#if !defined(CONFIG_USER_ONLY)
5931 if (reg == 8)
5932 gen_op_movtl_T0_cr8();
5933 else
5934#endif
5935 gen_op_movtl_T0_env(offsetof(CPUX86State,cr[reg]));
5936 gen_op_mov_reg_T0[ot][rm]();
5937 }
5938 break;
5939 default:
5940 goto illegal_op;
5941 }
5942 }
5943 break;
5944 case 0x121: /* mov reg, drN */
5945 case 0x123: /* mov drN, reg */
5946 if (s->cpl != 0) {
5947 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5948 } else {
5949 modrm = ldub_code(s->pc++);
5950 if ((modrm & 0xc0) != 0xc0)
5951 goto illegal_op;
5952 rm = (modrm & 7) | REX_B(s);
5953 reg = ((modrm >> 3) & 7) | rex_r;
5954 if (CODE64(s))
5955 ot = OT_QUAD;
5956 else
5957 ot = OT_LONG;
5958 /* XXX: do it dynamically with CR4.DE bit */
5959 if (reg == 4 || reg == 5 || reg >= 8)
5960 goto illegal_op;
5961 if (b & 2) {
5962 gen_op_mov_TN_reg[ot][0][rm]();
5963 gen_op_movl_drN_T0(reg);
5964 gen_jmp_im(s->pc - s->cs_base);
5965 gen_eob(s);
5966 } else {
5967 gen_op_movtl_T0_env(offsetof(CPUX86State,dr[reg]));
5968 gen_op_mov_reg_T0[ot][rm]();
5969 }
5970 }
5971 break;
5972 case 0x106: /* clts */
5973 if (s->cpl != 0) {
5974 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base);
5975 } else {
5976 gen_op_clts();
5977 /* abort block because static cpu state changed */
5978 gen_jmp_im(s->pc - s->cs_base);
5979 gen_eob(s);
5980 }
5981 break;
5982 /* MMX/SSE/SSE2/PNI support */
5983 case 0x1c3: /* MOVNTI reg, mem */
5984 if (!(s->cpuid_features & CPUID_SSE2))
5985 goto illegal_op;
5986 ot = s->dflag == 2 ? OT_QUAD : OT_LONG;
5987 modrm = ldub_code(s->pc++);
5988 mod = (modrm >> 6) & 3;
5989 if (mod == 3)
5990 goto illegal_op;
5991 reg = ((modrm >> 3) & 7) | rex_r;
5992 /* generate a generic store */
5993 gen_ldst_modrm(s, modrm, ot, reg, 1);
5994 break;
5995 case 0x1ae:
5996 modrm = ldub_code(s->pc++);
5997 mod = (modrm >> 6) & 3;
5998 op = (modrm >> 3) & 7;
5999 switch(op) {
6000 case 0: /* fxsave */
6001 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6002 (s->flags & HF_EM_MASK))
6003 goto illegal_op;
6004 if (s->flags & HF_TS_MASK) {
6005 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6006 break;
6007 }
6008 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6009 gen_op_fxsave_A0((s->dflag == 2));
6010 break;
6011 case 1: /* fxrstor */
6012 if (mod == 3 || !(s->cpuid_features & CPUID_FXSR) ||
6013 (s->flags & HF_EM_MASK))
6014 goto illegal_op;
6015 if (s->flags & HF_TS_MASK) {
6016 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6017 break;
6018 }
6019 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6020 gen_op_fxrstor_A0((s->dflag == 2));
6021 break;
6022 case 2: /* ldmxcsr */
6023 case 3: /* stmxcsr */
6024 if (s->flags & HF_TS_MASK) {
6025 gen_exception(s, EXCP07_PREX, pc_start - s->cs_base);
6026 break;
6027 }
6028 if ((s->flags & HF_EM_MASK) || !(s->flags & HF_OSFXSR_MASK) ||
6029 mod == 3)
6030 goto illegal_op;
6031 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6032 if (op == 2) {
6033 gen_op_ld_T0_A0[OT_LONG + s->mem_index]();
6034 gen_op_movl_env_T0(offsetof(CPUX86State, mxcsr));
6035 } else {
6036 gen_op_movl_T0_env(offsetof(CPUX86State, mxcsr));
6037 gen_op_st_T0_A0[OT_LONG + s->mem_index]();
6038 }
6039 break;
6040 case 5: /* lfence */
6041 case 6: /* mfence */
6042 if ((modrm & 0xc7) != 0xc0 || !(s->cpuid_features & CPUID_SSE))
6043 goto illegal_op;
6044 break;
6045 case 7: /* sfence / clflush */
6046 if ((modrm & 0xc7) == 0xc0) {
6047 /* sfence */
6048 if (!(s->cpuid_features & CPUID_SSE))
6049 goto illegal_op;
6050 } else {
6051 /* clflush */
6052 if (!(s->cpuid_features & CPUID_CLFLUSH))
6053 goto illegal_op;
6054 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6055 }
6056 break;
6057 default:
6058 goto illegal_op;
6059 }
6060 break;
6061 case 0x10d: /* prefetch */
6062 modrm = ldub_code(s->pc++);
6063 gen_lea_modrm(s, modrm, &reg_addr, &offset_addr);
6064 /* ignore for now */
6065 break;
6066 case 0x110 ... 0x117:
6067 case 0x128 ... 0x12f:
6068 case 0x150 ... 0x177:
6069 case 0x17c ... 0x17f:
6070 case 0x1c2:
6071 case 0x1c4 ... 0x1c6:
6072 case 0x1d0 ... 0x1fe:
6073 gen_sse(s, b, pc_start, rex_r);
6074 break;
6075 default:
6076 goto illegal_op;
6077 }
6078 /* lock generation */
6079 if (s->prefix & PREFIX_LOCK)
6080 gen_op_unlock();
6081 return s->pc;
6082 illegal_op:
6083 if (s->prefix & PREFIX_LOCK)
6084 gen_op_unlock();
6085 /* XXX: ensure that no lock was generated */
6086 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base);
6087 return s->pc;
6088}
6089
6090#define CC_OSZAPC (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C)
6091#define CC_OSZAP (CC_O | CC_S | CC_Z | CC_A | CC_P)
6092
6093/* flags read by an operation */
6094static uint16_t opc_read_flags[NB_OPS] = {
6095 [INDEX_op_aas] = CC_A,
6096 [INDEX_op_aaa] = CC_A,
6097 [INDEX_op_das] = CC_A | CC_C,
6098 [INDEX_op_daa] = CC_A | CC_C,
6099
6100 /* subtle: due to the incl/decl implementation, C is used */
6101 [INDEX_op_update_inc_cc] = CC_C,
6102
6103 [INDEX_op_into] = CC_O,
6104
6105 [INDEX_op_jb_subb] = CC_C,
6106 [INDEX_op_jb_subw] = CC_C,
6107 [INDEX_op_jb_subl] = CC_C,
6108
6109 [INDEX_op_jz_subb] = CC_Z,
6110 [INDEX_op_jz_subw] = CC_Z,
6111 [INDEX_op_jz_subl] = CC_Z,
6112
6113 [INDEX_op_jbe_subb] = CC_Z | CC_C,
6114 [INDEX_op_jbe_subw] = CC_Z | CC_C,
6115 [INDEX_op_jbe_subl] = CC_Z | CC_C,
6116
6117 [INDEX_op_js_subb] = CC_S,
6118 [INDEX_op_js_subw] = CC_S,
6119 [INDEX_op_js_subl] = CC_S,
6120
6121 [INDEX_op_jl_subb] = CC_O | CC_S,
6122 [INDEX_op_jl_subw] = CC_O | CC_S,
6123 [INDEX_op_jl_subl] = CC_O | CC_S,
6124
6125 [INDEX_op_jle_subb] = CC_O | CC_S | CC_Z,
6126 [INDEX_op_jle_subw] = CC_O | CC_S | CC_Z,
6127 [INDEX_op_jle_subl] = CC_O | CC_S | CC_Z,
6128
6129 [INDEX_op_loopnzw] = CC_Z,
6130 [INDEX_op_loopnzl] = CC_Z,
6131 [INDEX_op_loopzw] = CC_Z,
6132 [INDEX_op_loopzl] = CC_Z,
6133
6134 [INDEX_op_seto_T0_cc] = CC_O,
6135 [INDEX_op_setb_T0_cc] = CC_C,
6136 [INDEX_op_setz_T0_cc] = CC_Z,
6137 [INDEX_op_setbe_T0_cc] = CC_Z | CC_C,
6138 [INDEX_op_sets_T0_cc] = CC_S,
6139 [INDEX_op_setp_T0_cc] = CC_P,
6140 [INDEX_op_setl_T0_cc] = CC_O | CC_S,
6141 [INDEX_op_setle_T0_cc] = CC_O | CC_S | CC_Z,
6142
6143 [INDEX_op_setb_T0_subb] = CC_C,
6144 [INDEX_op_setb_T0_subw] = CC_C,
6145 [INDEX_op_setb_T0_subl] = CC_C,
6146
6147 [INDEX_op_setz_T0_subb] = CC_Z,
6148 [INDEX_op_setz_T0_subw] = CC_Z,
6149 [INDEX_op_setz_T0_subl] = CC_Z,
6150
6151 [INDEX_op_setbe_T0_subb] = CC_Z | CC_C,
6152 [INDEX_op_setbe_T0_subw] = CC_Z | CC_C,
6153 [INDEX_op_setbe_T0_subl] = CC_Z | CC_C,
6154
6155 [INDEX_op_sets_T0_subb] = CC_S,
6156 [INDEX_op_sets_T0_subw] = CC_S,
6157 [INDEX_op_sets_T0_subl] = CC_S,
6158
6159 [INDEX_op_setl_T0_subb] = CC_O | CC_S,
6160 [INDEX_op_setl_T0_subw] = CC_O | CC_S,
6161 [INDEX_op_setl_T0_subl] = CC_O | CC_S,
6162
6163 [INDEX_op_setle_T0_subb] = CC_O | CC_S | CC_Z,
6164 [INDEX_op_setle_T0_subw] = CC_O | CC_S | CC_Z,
6165 [INDEX_op_setle_T0_subl] = CC_O | CC_S | CC_Z,
6166
6167 [INDEX_op_movl_T0_eflags] = CC_OSZAPC,
6168 [INDEX_op_cmc] = CC_C,
6169 [INDEX_op_salc] = CC_C,
6170
6171 /* needed for correct flag optimisation before string ops */
6172 [INDEX_op_jnz_ecxw] = CC_OSZAPC,
6173 [INDEX_op_jnz_ecxl] = CC_OSZAPC,
6174 [INDEX_op_jz_ecxw] = CC_OSZAPC,
6175 [INDEX_op_jz_ecxl] = CC_OSZAPC,
6176
6177#ifdef TARGET_X86_64
6178 [INDEX_op_jb_subq] = CC_C,
6179 [INDEX_op_jz_subq] = CC_Z,
6180 [INDEX_op_jbe_subq] = CC_Z | CC_C,
6181 [INDEX_op_js_subq] = CC_S,
6182 [INDEX_op_jl_subq] = CC_O | CC_S,
6183 [INDEX_op_jle_subq] = CC_O | CC_S | CC_Z,
6184
6185 [INDEX_op_loopnzq] = CC_Z,
6186 [INDEX_op_loopzq] = CC_Z,
6187
6188 [INDEX_op_setb_T0_subq] = CC_C,
6189 [INDEX_op_setz_T0_subq] = CC_Z,
6190 [INDEX_op_setbe_T0_subq] = CC_Z | CC_C,
6191 [INDEX_op_sets_T0_subq] = CC_S,
6192 [INDEX_op_setl_T0_subq] = CC_O | CC_S,
6193 [INDEX_op_setle_T0_subq] = CC_O | CC_S | CC_Z,
6194
6195 [INDEX_op_jnz_ecxq] = CC_OSZAPC,
6196 [INDEX_op_jz_ecxq] = CC_OSZAPC,
6197#endif
6198
6199#define DEF_READF(SUFFIX)\
6200 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6201 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6202 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6203 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6204 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6205 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6206 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6207 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6208\
6209 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6210 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6211 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_C,\
6212 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_C,)\
6213 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_C,\
6214 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_C,\
6215 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_C,\
6216 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_C,)
6217
6218 DEF_READF( )
6219 DEF_READF(_raw)
6220#ifndef CONFIG_USER_ONLY
6221 DEF_READF(_kernel)
6222 DEF_READF(_user)
6223#endif
6224};
6225
6226/* flags written by an operation */
6227static uint16_t opc_write_flags[NB_OPS] = {
6228 [INDEX_op_update2_cc] = CC_OSZAPC,
6229 [INDEX_op_update1_cc] = CC_OSZAPC,
6230 [INDEX_op_cmpl_T0_T1_cc] = CC_OSZAPC,
6231 [INDEX_op_update_neg_cc] = CC_OSZAPC,
6232 /* subtle: due to the incl/decl implementation, C is used */
6233 [INDEX_op_update_inc_cc] = CC_OSZAPC,
6234 [INDEX_op_testl_T0_T1_cc] = CC_OSZAPC,
6235
6236 [INDEX_op_mulb_AL_T0] = CC_OSZAPC,
6237 [INDEX_op_mulw_AX_T0] = CC_OSZAPC,
6238 [INDEX_op_mull_EAX_T0] = CC_OSZAPC,
6239 X86_64_DEF([INDEX_op_mulq_EAX_T0] = CC_OSZAPC,)
6240 [INDEX_op_imulb_AL_T0] = CC_OSZAPC,
6241 [INDEX_op_imulw_AX_T0] = CC_OSZAPC,
6242 [INDEX_op_imull_EAX_T0] = CC_OSZAPC,
6243 X86_64_DEF([INDEX_op_imulq_EAX_T0] = CC_OSZAPC,)
6244 [INDEX_op_imulw_T0_T1] = CC_OSZAPC,
6245 [INDEX_op_imull_T0_T1] = CC_OSZAPC,
6246 X86_64_DEF([INDEX_op_imulq_T0_T1] = CC_OSZAPC,)
6247
6248 /* sse */
6249 [INDEX_op_ucomiss] = CC_OSZAPC,
6250 [INDEX_op_ucomisd] = CC_OSZAPC,
6251 [INDEX_op_comiss] = CC_OSZAPC,
6252 [INDEX_op_comisd] = CC_OSZAPC,
6253
6254 /* bcd */
6255 [INDEX_op_aam] = CC_OSZAPC,
6256 [INDEX_op_aad] = CC_OSZAPC,
6257 [INDEX_op_aas] = CC_OSZAPC,
6258 [INDEX_op_aaa] = CC_OSZAPC,
6259 [INDEX_op_das] = CC_OSZAPC,
6260 [INDEX_op_daa] = CC_OSZAPC,
6261
6262 [INDEX_op_movb_eflags_T0] = CC_S | CC_Z | CC_A | CC_P | CC_C,
6263 [INDEX_op_movw_eflags_T0] = CC_OSZAPC,
6264 [INDEX_op_movl_eflags_T0] = CC_OSZAPC,
6265 [INDEX_op_movw_eflags_T0_io] = CC_OSZAPC,
6266 [INDEX_op_movl_eflags_T0_io] = CC_OSZAPC,
6267 [INDEX_op_movw_eflags_T0_cpl0] = CC_OSZAPC,
6268 [INDEX_op_movl_eflags_T0_cpl0] = CC_OSZAPC,
6269 [INDEX_op_clc] = CC_C,
6270 [INDEX_op_stc] = CC_C,
6271 [INDEX_op_cmc] = CC_C,
6272
6273 [INDEX_op_btw_T0_T1_cc] = CC_OSZAPC,
6274 [INDEX_op_btl_T0_T1_cc] = CC_OSZAPC,
6275 X86_64_DEF([INDEX_op_btq_T0_T1_cc] = CC_OSZAPC,)
6276 [INDEX_op_btsw_T0_T1_cc] = CC_OSZAPC,
6277 [INDEX_op_btsl_T0_T1_cc] = CC_OSZAPC,
6278 X86_64_DEF([INDEX_op_btsq_T0_T1_cc] = CC_OSZAPC,)
6279 [INDEX_op_btrw_T0_T1_cc] = CC_OSZAPC,
6280 [INDEX_op_btrl_T0_T1_cc] = CC_OSZAPC,
6281 X86_64_DEF([INDEX_op_btrq_T0_T1_cc] = CC_OSZAPC,)
6282 [INDEX_op_btcw_T0_T1_cc] = CC_OSZAPC,
6283 [INDEX_op_btcl_T0_T1_cc] = CC_OSZAPC,
6284 X86_64_DEF([INDEX_op_btcq_T0_T1_cc] = CC_OSZAPC,)
6285
6286 [INDEX_op_bsfw_T0_cc] = CC_OSZAPC,
6287 [INDEX_op_bsfl_T0_cc] = CC_OSZAPC,
6288 X86_64_DEF([INDEX_op_bsfq_T0_cc] = CC_OSZAPC,)
6289 [INDEX_op_bsrw_T0_cc] = CC_OSZAPC,
6290 [INDEX_op_bsrl_T0_cc] = CC_OSZAPC,
6291 X86_64_DEF([INDEX_op_bsrq_T0_cc] = CC_OSZAPC,)
6292
6293 [INDEX_op_cmpxchgb_T0_T1_EAX_cc] = CC_OSZAPC,
6294 [INDEX_op_cmpxchgw_T0_T1_EAX_cc] = CC_OSZAPC,
6295 [INDEX_op_cmpxchgl_T0_T1_EAX_cc] = CC_OSZAPC,
6296 X86_64_DEF([INDEX_op_cmpxchgq_T0_T1_EAX_cc] = CC_OSZAPC,)
6297
6298 [INDEX_op_cmpxchg8b] = CC_Z,
6299 [INDEX_op_lar] = CC_Z,
6300 [INDEX_op_lsl] = CC_Z,
6301 [INDEX_op_verr] = CC_Z,
6302 [INDEX_op_verw] = CC_Z,
6303 [INDEX_op_fcomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6304 [INDEX_op_fucomi_ST0_FT0] = CC_Z | CC_P | CC_C,
6305
6306#define DEF_WRITEF(SUFFIX)\
6307 [INDEX_op_adcb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6308 [INDEX_op_adcw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6309 [INDEX_op_adcl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6310 X86_64_DEF([INDEX_op_adcq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6311 [INDEX_op_sbbb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6312 [INDEX_op_sbbw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6313 [INDEX_op_sbbl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6314 X86_64_DEF([INDEX_op_sbbq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6315\
6316 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6317 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6318 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6319 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6320 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6321 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6322 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6323 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6324\
6325 [INDEX_op_rclb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6326 [INDEX_op_rclw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6327 [INDEX_op_rcll ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6328 X86_64_DEF([INDEX_op_rclq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6329 [INDEX_op_rcrb ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6330 [INDEX_op_rcrw ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6331 [INDEX_op_rcrl ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,\
6332 X86_64_DEF([INDEX_op_rcrq ## SUFFIX ## _T0_T1_cc] = CC_O | CC_C,)\
6333\
6334 [INDEX_op_shlb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6335 [INDEX_op_shlw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6336 [INDEX_op_shll ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6337 X86_64_DEF([INDEX_op_shlq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6338\
6339 [INDEX_op_shrb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6340 [INDEX_op_shrw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6341 [INDEX_op_shrl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6342 X86_64_DEF([INDEX_op_shrq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6343\
6344 [INDEX_op_sarb ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6345 [INDEX_op_sarw ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6346 [INDEX_op_sarl ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,\
6347 X86_64_DEF([INDEX_op_sarq ## SUFFIX ## _T0_T1_cc] = CC_OSZAPC,)\
6348\
6349 [INDEX_op_shldw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6350 [INDEX_op_shldl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6351 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6352 [INDEX_op_shldw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6353 [INDEX_op_shldl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6354 X86_64_DEF([INDEX_op_shldq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6355\
6356 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6357 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,\
6358 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_ECX_cc] = CC_OSZAPC,)\
6359 [INDEX_op_shrdw ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6360 [INDEX_op_shrdl ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,\
6361 X86_64_DEF([INDEX_op_shrdq ## SUFFIX ## _T0_T1_im_cc] = CC_OSZAPC,)\
6362\
6363 [INDEX_op_cmpxchgb ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6364 [INDEX_op_cmpxchgw ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6365 [INDEX_op_cmpxchgl ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,\
6366 X86_64_DEF([INDEX_op_cmpxchgq ## SUFFIX ## _T0_T1_EAX_cc] = CC_OSZAPC,)
6367
6368
6369 DEF_WRITEF( )
6370 DEF_WRITEF(_raw)
6371#ifndef CONFIG_USER_ONLY
6372 DEF_WRITEF(_kernel)
6373 DEF_WRITEF(_user)
6374#endif
6375};
6376
6377/* simpler form of an operation if no flags need to be generated */
6378static uint16_t opc_simpler[NB_OPS] = {
6379 [INDEX_op_update2_cc] = INDEX_op_nop,
6380 [INDEX_op_update1_cc] = INDEX_op_nop,
6381 [INDEX_op_update_neg_cc] = INDEX_op_nop,
6382#if 0
6383 /* broken: CC_OP logic must be rewritten */
6384 [INDEX_op_update_inc_cc] = INDEX_op_nop,
6385#endif
6386
6387 [INDEX_op_shlb_T0_T1_cc] = INDEX_op_shlb_T0_T1,
6388 [INDEX_op_shlw_T0_T1_cc] = INDEX_op_shlw_T0_T1,
6389 [INDEX_op_shll_T0_T1_cc] = INDEX_op_shll_T0_T1,
6390 X86_64_DEF([INDEX_op_shlq_T0_T1_cc] = INDEX_op_shlq_T0_T1,)
6391
6392 [INDEX_op_shrb_T0_T1_cc] = INDEX_op_shrb_T0_T1,
6393 [INDEX_op_shrw_T0_T1_cc] = INDEX_op_shrw_T0_T1,
6394 [INDEX_op_shrl_T0_T1_cc] = INDEX_op_shrl_T0_T1,
6395 X86_64_DEF([INDEX_op_shrq_T0_T1_cc] = INDEX_op_shrq_T0_T1,)
6396
6397 [INDEX_op_sarb_T0_T1_cc] = INDEX_op_sarb_T0_T1,
6398 [INDEX_op_sarw_T0_T1_cc] = INDEX_op_sarw_T0_T1,
6399 [INDEX_op_sarl_T0_T1_cc] = INDEX_op_sarl_T0_T1,
6400 X86_64_DEF([INDEX_op_sarq_T0_T1_cc] = INDEX_op_sarq_T0_T1,)
6401
6402#define DEF_SIMPLER(SUFFIX)\
6403 [INDEX_op_rolb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolb ## SUFFIX ## _T0_T1,\
6404 [INDEX_op_rolw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolw ## SUFFIX ## _T0_T1,\
6405 [INDEX_op_roll ## SUFFIX ## _T0_T1_cc] = INDEX_op_roll ## SUFFIX ## _T0_T1,\
6406 X86_64_DEF([INDEX_op_rolq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rolq ## SUFFIX ## _T0_T1,)\
6407\
6408 [INDEX_op_rorb ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorb ## SUFFIX ## _T0_T1,\
6409 [INDEX_op_rorw ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorw ## SUFFIX ## _T0_T1,\
6410 [INDEX_op_rorl ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorl ## SUFFIX ## _T0_T1,\
6411 X86_64_DEF([INDEX_op_rorq ## SUFFIX ## _T0_T1_cc] = INDEX_op_rorq ## SUFFIX ## _T0_T1,)
6412
6413 DEF_SIMPLER( )
6414 DEF_SIMPLER(_raw)
6415#ifndef CONFIG_USER_ONLY
6416 DEF_SIMPLER(_kernel)
6417 DEF_SIMPLER(_user)
6418#endif
6419};
6420
6421void optimize_flags_init(void)
6422{
6423 int i;
6424 /* put default values in arrays */
6425 for(i = 0; i < NB_OPS; i++) {
6426 if (opc_simpler[i] == 0)
6427 opc_simpler[i] = i;
6428 }
6429}
6430
6431/* CPU flags computation optimization: we move backward thru the
6432 generated code to see which flags are needed. The operation is
6433 modified if suitable */
6434static void optimize_flags(uint16_t *opc_buf, int opc_buf_len)
6435{
6436 uint16_t *opc_ptr;
6437 int live_flags, write_flags, op;
6438
6439 opc_ptr = opc_buf + opc_buf_len;
6440 /* live_flags contains the flags needed by the next instructions
6441 in the code. At the end of the bloc, we consider that all the
6442 flags are live. */
6443 live_flags = CC_OSZAPC;
6444 while (opc_ptr > opc_buf) {
6445 op = *--opc_ptr;
6446 /* if none of the flags written by the instruction is used,
6447 then we can try to find a simpler instruction */
6448 write_flags = opc_write_flags[op];
6449 if ((live_flags & write_flags) == 0) {
6450 *opc_ptr = opc_simpler[op];
6451 }
6452 /* compute the live flags before the instruction */
6453 live_flags &= ~write_flags;
6454 live_flags |= opc_read_flags[op];
6455 }
6456}
6457
6458/* generate intermediate code in gen_opc_buf and gen_opparam_buf for
6459 basic block 'tb'. If search_pc is TRUE, also generate PC
6460 information for each intermediate instruction. */
6461static inline int gen_intermediate_code_internal(CPUState *env,
6462 TranslationBlock *tb,
6463 int search_pc)
6464{
6465 DisasContext dc1, *dc = &dc1;
6466 target_ulong pc_ptr;
6467 uint16_t *gen_opc_end;
6468 int flags, j, lj, cflags;
6469 target_ulong pc_start;
6470 target_ulong cs_base;
6471
6472 /* generate intermediate code */
6473 pc_start = tb->pc;
6474 cs_base = tb->cs_base;
6475 flags = tb->flags;
6476 cflags = tb->cflags;
6477
6478 dc->pe = (flags >> HF_PE_SHIFT) & 1;
6479 dc->code32 = (flags >> HF_CS32_SHIFT) & 1;
6480 dc->ss32 = (flags >> HF_SS32_SHIFT) & 1;
6481 dc->addseg = (flags >> HF_ADDSEG_SHIFT) & 1;
6482 dc->f_st = 0;
6483 dc->vm86 = (flags >> VM_SHIFT) & 1;
6484#ifdef VBOX
6485 dc->vme = !!(env->cr[4] & CR4_VME_MASK);
6486#endif
6487 dc->cpl = (flags >> HF_CPL_SHIFT) & 3;
6488 dc->iopl = (flags >> IOPL_SHIFT) & 3;
6489 dc->tf = (flags >> TF_SHIFT) & 1;
6490 dc->singlestep_enabled = env->singlestep_enabled;
6491 dc->cc_op = CC_OP_DYNAMIC;
6492 dc->cs_base = cs_base;
6493 dc->tb = tb;
6494 dc->popl_esp_hack = 0;
6495 /* select memory access functions */
6496 dc->mem_index = 0;
6497 if (flags & HF_SOFTMMU_MASK) {
6498 if (dc->cpl == 3)
6499 dc->mem_index = 2 * 4;
6500 else
6501 dc->mem_index = 1 * 4;
6502 }
6503 dc->cpuid_features = env->cpuid_features;
6504 dc->cpuid_ext_features = env->cpuid_ext_features;
6505#ifdef TARGET_X86_64
6506 dc->lma = (flags >> HF_LMA_SHIFT) & 1;
6507 dc->code64 = (flags >> HF_CS64_SHIFT) & 1;
6508#endif
6509 dc->flags = flags;
6510 dc->jmp_opt = !(dc->tf || env->singlestep_enabled ||
6511 (flags & HF_INHIBIT_IRQ_MASK)
6512#ifndef CONFIG_SOFTMMU
6513 || (flags & HF_SOFTMMU_MASK)
6514#endif
6515 );
6516#if 0
6517 /* check addseg logic */
6518 if (!dc->addseg && (dc->vm86 || !dc->pe || !dc->code32))
6519 printf("ERROR addseg\n");
6520#endif
6521
6522 gen_opc_ptr = gen_opc_buf;
6523 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6524 gen_opparam_ptr = gen_opparam_buf;
6525 nb_gen_labels = 0;
6526
6527 dc->is_jmp = DISAS_NEXT;
6528 pc_ptr = pc_start;
6529 lj = -1;
6530
6531 for(;;) {
6532 if (env->nb_breakpoints > 0) {
6533 for(j = 0; j < env->nb_breakpoints; j++) {
6534 if (env->breakpoints[j] == pc_ptr) {
6535 gen_debug(dc, pc_ptr - dc->cs_base);
6536 break;
6537 }
6538 }
6539 }
6540 if (search_pc) {
6541 j = gen_opc_ptr - gen_opc_buf;
6542 if (lj < j) {
6543 lj++;
6544 while (lj < j)
6545 gen_opc_instr_start[lj++] = 0;
6546 }
6547 gen_opc_pc[lj] = pc_ptr;
6548 gen_opc_cc_op[lj] = dc->cc_op;
6549 gen_opc_instr_start[lj] = 1;
6550 }
6551 pc_ptr = disas_insn(dc, pc_ptr);
6552 /* stop translation if indicated */
6553 if (dc->is_jmp)
6554 break;
6555
6556#ifdef VBOX
6557#ifdef DEBUG
6558/*
6559 if(cpu_check_code_raw(env, pc_ptr, env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK))) == ERROR_SUCCESS)
6560 {
6561 //should never happen as the jump to the patch code terminates the translation block
6562 dprintf(("QEmu is about to execute instructions in our patch block at %08X!!\n", pc_ptr));
6563 }
6564*/
6565#endif
6566 if (env->state & CPU_EMULATE_SINGLE_INSTR)
6567 {
6568 env->state &= ~CPU_EMULATE_SINGLE_INSTR;
6569 gen_jmp_im(pc_ptr - dc->cs_base);
6570 gen_eob(dc);
6571 break;
6572 }
6573#endif
6574
6575 /* if single step mode, we generate only one instruction and
6576 generate an exception */
6577 /* if irq were inhibited with HF_INHIBIT_IRQ_MASK, we clear
6578 the flag and abort the translation to give the irqs a
6579 change to be happen */
6580 if (dc->tf || dc->singlestep_enabled ||
6581 (flags & HF_INHIBIT_IRQ_MASK) ||
6582 (cflags & CF_SINGLE_INSN)) {
6583 gen_jmp_im(pc_ptr - dc->cs_base);
6584 gen_eob(dc);
6585 break;
6586 }
6587 /* if too long translation, stop generation too */
6588 if (gen_opc_ptr >= gen_opc_end ||
6589 (pc_ptr - pc_start) >= (TARGET_PAGE_SIZE - 32)) {
6590 gen_jmp_im(pc_ptr - dc->cs_base);
6591 gen_eob(dc);
6592 break;
6593 }
6594 }
6595 *gen_opc_ptr = INDEX_op_end;
6596 /* we don't forget to fill the last values */
6597 if (search_pc) {
6598 j = gen_opc_ptr - gen_opc_buf;
6599 lj++;
6600 while (lj <= j)
6601 gen_opc_instr_start[lj++] = 0;
6602 }
6603
6604#ifdef DEBUG_DISAS
6605 if (loglevel & CPU_LOG_TB_CPU) {
6606 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);
6607 }
6608 if (loglevel & CPU_LOG_TB_IN_ASM) {
6609 int disas_flags;
6610 fprintf(logfile, "----------------\n");
6611 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6612#ifdef TARGET_X86_64
6613 if (dc->code64)
6614 disas_flags = 2;
6615 else
6616#endif
6617 disas_flags = !dc->code32;
6618 target_disas(logfile, pc_start, pc_ptr - pc_start, disas_flags);
6619 fprintf(logfile, "\n");
6620 if (loglevel & CPU_LOG_TB_OP) {
6621 fprintf(logfile, "OP:\n");
6622 dump_ops(gen_opc_buf, gen_opparam_buf);
6623 fprintf(logfile, "\n");
6624 }
6625 }
6626#endif
6627
6628 /* optimize flag computations */
6629 optimize_flags(gen_opc_buf, gen_opc_ptr - gen_opc_buf);
6630
6631#ifdef DEBUG_DISAS
6632 if (loglevel & CPU_LOG_TB_OP_OPT) {
6633 fprintf(logfile, "AFTER FLAGS OPT:\n");
6634 dump_ops(gen_opc_buf, gen_opparam_buf);
6635 fprintf(logfile, "\n");
6636 }
6637#endif
6638 if (!search_pc)
6639 tb->size = pc_ptr - pc_start;
6640 return 0;
6641}
6642
6643int gen_intermediate_code(CPUState *env, TranslationBlock *tb)
6644{
6645 return gen_intermediate_code_internal(env, tb, 0);
6646}
6647
6648int gen_intermediate_code_pc(CPUState *env, TranslationBlock *tb)
6649{
6650 return gen_intermediate_code_internal(env, tb, 1);
6651}
6652
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette