Changeset 36170 in vbox for trunk/src/recompiler/target-i386
- Timestamp:
- Mar 4, 2011 12:49:02 PM (14 years ago)
- svn:sync-xref-src-repo-rev:
- 70361
- Location:
- trunk/src/recompiler/target-i386
- Files:
-
- 9 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/recompiler/target-i386/cpu.h
r36140 r36170 16 16 * You should have received a copy of the GNU Lesser General Public 17 17 * License along with this library; if not, write to the Free Software 18 * Foundation, Inc., 5 9 Temple Place, Suite 330, Boston, MA 02111-1307USA18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA 19 19 */ 20 20 … … 179 179 #define HF_EM_MASK (1 << HF_EM_SHIFT) 180 180 #define HF_TS_MASK (1 << HF_TS_SHIFT) 181 #define HF_IOPL_MASK (3 << HF_IOPL_SHIFT) 181 182 #define HF_LMA_MASK (1 << HF_LMA_SHIFT) 182 183 #define HF_CS64_MASK (1 << HF_CS64_SHIFT) 183 184 #define HF_OSFXSR_MASK (1 << HF_OSFXSR_SHIFT) 185 #define HF_VM_MASK (1 << HF_VM_SHIFT) 184 186 #define HF_SMM_MASK (1 << HF_SMM_SHIFT) 185 187 #define HF_SVME_MASK (1 << HF_SVME_SHIFT) … … 197 199 #define HF2_NMI_MASK (1 << HF2_NMI_SHIFT) 198 200 #define HF2_VINTR_MASK (1 << HF2_VINTR_SHIFT) 201 202 #define CR0_PE_SHIFT 0 203 #define CR0_MP_SHIFT 1 199 204 200 205 #define CR0_PE_MASK (1 << 0) … … 216 221 #define CR4_PGE_MASK (1 << 7) 217 222 #define CR4_PCE_MASK (1 << 8) 218 #define CR4_OSFXSR_MASK (1 << 9) 223 #define CR4_OSFXSR_SHIFT 9 224 #define CR4_OSFXSR_MASK (1 << CR4_OSFXSR_SHIFT) 219 225 #define CR4_OSXMMEXCPT_MASK (1 << 10) 226 227 #define DR6_BD (1 << 13) 228 #define DR6_BS (1 << 14) 229 #define DR6_BT (1 << 15) 230 #define DR6_FIXED_1 0xffff0ff0 231 232 #define DR7_GD (1 << 13) 233 #define DR7_TYPE_SHIFT 16 234 #define DR7_LEN_SHIFT 18 235 #define DR7_FIXED_1 0x00000400 220 236 221 237 #define PG_PRESENT_BIT 0 … … 249 265 #define PG_ERROR_I_D_MASK 0x10 250 266 267 #define MSR_IA32_TSC 0x10 251 268 #define MSR_IA32_APICBASE 0x1b 252 269 #define MSR_IA32_APICBASE_BSP (1<<8) … … 254 271 #define MSR_IA32_APICBASE_BASE (0xfffff<<12) 255 272 273 #define MSR_MTRRcap 0xfe 274 #define MSR_MTRRcap_VCNT 8 275 #define MSR_MTRRcap_FIXRANGE_SUPPORT (1 << 8) 276 #define MSR_MTRRcap_WC_SUPPORTED (1 << 10) 277 256 278 #define MSR_IA32_SYSENTER_CS 0x174 257 279 #define MSR_IA32_SYSENTER_ESP 0x175 … … 264 286 #define MSR_IA32_PERF_STATUS 0x198 265 287 288 #define MSR_MTRRphysBase(reg) (0x200 + 2 * (reg)) 289 #define MSR_MTRRphysMask(reg) (0x200 + 2 * (reg) + 1) 290 291 #define MSR_MTRRfix64K_00000 0x250 292 #define MSR_MTRRfix16K_80000 0x258 293 #define MSR_MTRRfix16K_A0000 0x259 294 #define MSR_MTRRfix4K_C0000 0x268 295 #define MSR_MTRRfix4K_C8000 0x269 296 #define MSR_MTRRfix4K_D0000 0x26a 297 #define MSR_MTRRfix4K_D8000 0x26b 298 #define MSR_MTRRfix4K_E0000 0x26c 299 #define MSR_MTRRfix4K_E8000 0x26d 300 #define MSR_MTRRfix4K_F0000 0x26e 301 #define MSR_MTRRfix4K_F8000 0x26f 302 266 303 #define MSR_PAT 0x277 304 305 #define MSR_MTRRdefType 0x2ff 267 306 268 307 #define MSR_EFER 0xc0000080 … … 380 419 381 420 #define EXCP00_DIVZ 0 382 #define EXCP01_ SSTP1421 #define EXCP01_DB 1 383 422 #define EXCP02_NMI 2 384 423 #define EXCP03_INT3 3 … … 618 657 #endif 619 658 659 uint64_t tsc; 660 620 661 uint64_t pat; 621 662 … … 625 666 target_ulong exception_next_eip; 626 667 target_ulong dr[8]; /* debug registers */ 668 union { 669 CPUBreakpoint *cpu_breakpoint[4]; 670 CPUWatchpoint *cpu_watchpoint[4]; 671 }; /* break/watchpoints for dr[0..3] */ 627 672 uint32_t smbase; 628 673 int old_exception; /* exception in flight */ … … 662 707 663 708 #ifndef VBOX 709 /* MTRRs */ 710 uint64_t mtrr_fixed[11]; 711 uint64_t mtrr_deftype; 712 struct { 713 uint64_t base; 714 uint64_t mask; 715 } mtrr_var[8]; 716 664 717 #ifdef USE_KQEMU 665 718 int kqemu_enabled; 666 719 int last_io_time; 667 720 #endif 721 722 /* For KVM */ 723 uint64_t interrupt_bitmap[256 / 64]; 724 668 725 /* in order to simplify APIC support, we leave this pointer to the 669 726 user */ … … 874 931 } 875 932 933 /* op_helper.c */ 876 934 /* used for debug or cpu save/restore */ 877 935 void cpu_get_fp80(uint64_t *pmant, uint16_t *pexp, CPU86_LDouble f); 878 936 CPU86_LDouble cpu_set_fp80(uint64_t mant, uint16_t upper); 879 937 938 /* cpu-exec.c */ 880 939 /* the following helpers are only usable in user mode simulation as 881 940 they can trigger unexpected exceptions */ … … 889 948 int cpu_x86_signal_handler(int host_signum, void *pinfo, 890 949 void *puc); 950 951 /* helper.c */ 952 int cpu_x86_handle_mmu_fault(CPUX86State *env, target_ulong addr, 953 int is_write, int mmu_idx, int is_softmmu); 891 954 void cpu_x86_set_a20(CPUX86State *env, int a20_state); 892 893 uint64_t cpu_get_tsc(CPUX86State *env); 894 955 void cpu_x86_cpuid(CPUX86State *env, uint32_t index, uint32_t count, 956 uint32_t *eax, uint32_t *ebx, 957 uint32_t *ecx, uint32_t *edx); 958 959 static inline int hw_breakpoint_enabled(unsigned long dr7, int index) 960 { 961 return (dr7 >> (index * 2)) & 3; 962 } 963 964 static inline int hw_breakpoint_type(unsigned long dr7, int index) 965 { 966 return (dr7 >> (DR7_TYPE_SHIFT + (index * 2))) & 3; 967 } 968 969 static inline int hw_breakpoint_len(unsigned long dr7, int index) 970 { 971 int len = ((dr7 >> (DR7_LEN_SHIFT + (index * 2))) & 3); 972 return (len == 2) ? 8 : len + 1; 973 } 974 975 void hw_breakpoint_insert(CPUX86State *env, int index); 976 void hw_breakpoint_remove(CPUX86State *env, int index); 977 int check_hw_breakpoints(CPUX86State *env, int force_dr6_update); 978 979 /* will be suppressed */ 980 void cpu_x86_update_cr0(CPUX86State *env, uint32_t new_cr0); 981 void cpu_x86_update_cr3(CPUX86State *env, target_ulong new_cr3); 982 void cpu_x86_update_cr4(CPUX86State *env, uint32_t new_cr4); 983 984 /* hw/apic.c */ 895 985 void cpu_set_apic_base(CPUX86State *env, uint64_t val); 896 986 uint64_t cpu_get_apic_base(CPUX86State *env); … … 899 989 uint8_t cpu_get_apic_tpr(CPUX86State *env); 900 990 #endif 901 #ifdef VBOX 902 int cpu_rdmsr(CPUX86State *env, uint32_t idMsr, uint64_t *puValue); 903 int cpu_wrmsr(CPUX86State *env, uint32_t idMsr, uint64_t uValue); 904 #endif 991 992 /* hw/pc.c */ 905 993 void cpu_smm_update(CPUX86State *env); 906 907 /* will be suppressed */ 908 void cpu_x86_update_cr0(CPUX86State *env, uint32_t new_cr0); 994 uint64_t cpu_get_tsc(CPUX86State *env); 909 995 910 996 /* used to debug */ … … 922 1008 923 1009 #ifdef VBOX 1010 int cpu_rdmsr(CPUX86State *env, uint32_t idMsr, uint64_t *puValue); 1011 int cpu_wrmsr(CPUX86State *env, uint32_t idMsr, uint64_t uValue); 924 1012 void cpu_trap_raw(CPUX86State *env1); 925 1013 … … 948 1036 #define cpu_list x86_cpu_list 949 1037 950 #define CPU_SAVE_VERSION 71038 #define CPU_SAVE_VERSION 8 951 1039 952 1040 /* MMU modes definitions */ … … 959 1047 } 960 1048 1049 /* translate.c */ 961 1050 void optimize_flags_init(void); 962 1051 … … 965 1054 int (*compute_c)(void); /* return the C flag */ 966 1055 } CCTable; 967 968 extern CCTable cc_table[];969 1056 970 1057 #if defined(CONFIG_USER_ONLY) … … 977 1064 #endif 978 1065 979 #define CPU_PC_FROM_TB(env, tb) env->eip = tb->pc - tb->cs_base980 981 1066 #include "cpu-all.h" 1067 #include "exec-all.h" 982 1068 983 1069 #include "svm.h" 984 1070 1071 static inline void cpu_pc_from_tb(CPUState *env, TranslationBlock *tb) 1072 { 1073 env->eip = tb->pc - tb->cs_base; 1074 } 1075 1076 static inline void cpu_get_tb_cpu_state(CPUState *env, target_ulong *pc, 1077 target_ulong *cs_base, int *flags) 1078 { 1079 *cs_base = env->segs[R_CS].base; 1080 *pc = *cs_base + env->eip; 1081 *flags = env->hflags | (env->eflags & (IOPL_MASK | TF_MASK | VM_MASK)); 1082 } 1083 985 1084 #endif /* CPU_I386_H */ -
trunk/src/recompiler/target-i386/exec.h
r36140 r36170 16 16 * You should have received a copy of the GNU Lesser General Public 17 17 * License along with this library; if not, write to the Free Software 18 * Foundation, Inc., 5 9 Temple Place, Suite 330, Boston, MA 02111-1307USA18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA 19 19 */ 20 20 … … 42 42 register struct CPUX86State *env asm(AREG0); 43 43 44 #include "qemu-common.h" 44 45 #include "qemu-log.h" 45 46 … … 68 69 #include "exec-all.h" 69 70 70 void cpu_x86_update_cr3(CPUX86State *env, target_ulong new_cr3); 71 void cpu_x86_update_cr4(CPUX86State *env, uint32_t new_cr4); 72 int cpu_x86_handle_mmu_fault(CPUX86State *env, target_ulong addr, 73 int is_write, int mmu_idx, int is_softmmu); 74 void __hidden cpu_lock(void); 75 void __hidden cpu_unlock(void); 71 /* op_helper.c */ 76 72 void do_interrupt(int intno, int is_int, int error_code, 77 73 target_ulong next_eip, int is_hw); 78 74 void do_interrupt_user(int intno, int is_int, int error_code, 79 75 target_ulong next_eip); 80 void raise_interrupt(int intno, int is_int, int error_code, 81 int next_eip_addend); 82 void raise_exception_err(int exception_index, int error_code); 83 void raise_exception(int exception_index); 76 void QEMU_NORETURN raise_exception_err(int exception_index, int error_code); 77 void QEMU_NORETURN raise_exception(int exception_index); 84 78 void do_smm_enter(void); 85 void __hidden cpu_loop_exit(void);86 87 void OPPROTO op_movl_eflags_T0(void);88 void OPPROTO op_movl_T0_eflags(void);89 #ifdef VBOX90 void OPPROTO op_movl_T0_eflags_vme(void);91 void OPPROTO op_movw_eflags_T0_vme(void);92 void OPPROTO op_cli_vme(void);93 void OPPROTO op_sti_vme(void);94 #endif95 79 96 80 /* n must be a constant to be efficient */ … … 331 315 #define FPUC_EM 0x3f 332 316 333 extern const CPU86_LDouble f15rk[7];334 335 void fpu_raise_exception(void);336 void restore_native_fp_state(CPUState *env);337 void save_native_fp_state(CPUState *env);338 339 extern const uint8_t parity_table[256];340 extern const uint8_t rclw_table[32];341 extern const uint8_t rclb_table[32];342 343 317 static inline uint32_t compute_eflags(void) 344 318 { 345 return env->eflags | cc_table[CC_OP].compute_all() | (DF & DF_MASK);319 return env->eflags | helper_cc_compute_all(CC_OP) | (DF & DF_MASK); 346 320 } 347 321 -
trunk/src/recompiler/target-i386/helper.c
r36140 r36170 16 16 * You should have received a copy of the GNU Lesser General Public 17 17 * License along with this library; if not, write to the Free Software 18 * Foundation, Inc., 5 9 Temple Place, Suite 330, Boston, MA 02111-1307USA18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA 19 19 */ 20 20 … … 33 33 #include <string.h> 34 34 #ifndef VBOX 35 # 36 # 37 # 38 #endif 35 #include <inttypes.h> 36 #include <signal.h> 37 #include <assert.h> 38 #endif /* !VBOX */ 39 39 40 40 #include "cpu.h" 41 41 #include "exec-all.h" 42 #include "svm.h"43 42 #include "qemu-common.h" 43 #include "kvm.h" 44 44 45 45 //#define DEBUG_MMU 46 47 static int cpu_x86_register (CPUX86State *env, const char *cpu_model);48 46 49 47 #ifndef VBOX … … 71 69 static const char *ext2_feature_name[] = { 72 70 "fpu", "vme", "de", "pse", "tsc", "msr", "pae", "mce", 73 "cx8" /* AMD CMPXCHG8B */, "apic", NULL, "syscall", "mt tr", "pge", "mca", "cmov",71 "cx8" /* AMD CMPXCHG8B */, "apic", NULL, "syscall", "mtrr", "pge", "mca", "cmov", 74 72 "pat", "pse36", NULL, NULL /* Linux mp */, "nx" /* Intel xd */, NULL, "mmxext", "mmx", 75 73 "fxsr", "fxsr_opt" /* AMD ffxsr */, "pdpe1gb" /* AMD Page1GB */, "rdtscp", NULL, "lm" /* Intel 64 */, "3dnowext", "3dnow", … … 105 103 } 106 104 #endif /* !VBOX */ 107 108 #ifndef VBOX109 CPUX86State *cpu_x86_init(const char *cpu_model)110 #else111 CPUX86State *cpu_x86_init(CPUX86State *env, const char *cpu_model)112 #endif113 {114 #ifndef VBOX115 CPUX86State *env;116 #endif117 static int inited;118 119 #ifndef VBOX120 env = qemu_mallocz(sizeof(CPUX86State));121 if (!env)122 return NULL;123 #endif124 cpu_exec_init(env);125 env->cpu_model_str = cpu_model;126 127 /* init various static tables */128 if (!inited) {129 inited = 1;130 optimize_flags_init();131 }132 if (cpu_x86_register(env, cpu_model) < 0) {133 cpu_x86_close(env);134 return NULL;135 }136 cpu_reset(env);137 #ifdef USE_KQEMU138 kqemu_init(env);139 #endif140 return env;141 }142 105 143 106 typedef struct x86_def_t { … … 188 151 .xlevel = 0x8000000A, 189 152 .model_id = "QEMU Virtual CPU version " QEMU_VERSION, 153 }, 154 { 155 .name = "phenom", 156 .level = 5, 157 .vendor1 = CPUID_VENDOR_AMD_1, 158 .vendor2 = CPUID_VENDOR_AMD_2, 159 .vendor3 = CPUID_VENDOR_AMD_3, 160 .family = 16, 161 .model = 2, 162 .stepping = 3, 163 /* Missing: CPUID_VME, CPUID_HT */ 164 .features = PPRO_FEATURES | 165 CPUID_MTRR | CPUID_CLFLUSH | CPUID_MCA | 166 CPUID_PSE36, 167 /* Missing: CPUID_EXT_CX16, CPUID_EXT_POPCNT */ 168 .ext_features = CPUID_EXT_SSE3 | CPUID_EXT_MONITOR, 169 /* Missing: CPUID_EXT2_PDPE1GB, CPUID_EXT2_RDTSCP */ 170 .ext2_features = (PPRO_FEATURES & 0x0183F3FF) | 171 CPUID_EXT2_LM | CPUID_EXT2_SYSCALL | CPUID_EXT2_NX | 172 CPUID_EXT2_3DNOW | CPUID_EXT2_3DNOWEXT | CPUID_EXT2_MMXEXT | 173 CPUID_EXT2_FFXSR, 174 /* Missing: CPUID_EXT3_LAHF_LM, CPUID_EXT3_CMP_LEG, CPUID_EXT3_EXTAPIC, 175 CPUID_EXT3_CR8LEG, CPUID_EXT3_ABM, CPUID_EXT3_SSE4A, 176 CPUID_EXT3_MISALIGNSSE, CPUID_EXT3_3DNOWPREFETCH, 177 CPUID_EXT3_OSVW, CPUID_EXT3_IBS */ 178 .ext3_features = CPUID_EXT3_SVM, 179 .xlevel = 0x8000001A, 180 .model_id = "AMD Phenom(tm) 9550 Quad-Core Processor" 190 181 }, 191 182 { … … 305 296 /* Some CPUs got no CPUID_SEP */ 306 297 .ext_features = CPUID_EXT_MONITOR | 307 CPUID_EXT_SSE3 /* PNI */ ,CPUID_EXT_SSSE3,298 CPUID_EXT_SSE3 /* PNI */ | CPUID_EXT_SSSE3, 308 299 /* Missing: CPUID_EXT_DSCPL | CPUID_EXT_EST | 309 300 * CPUID_EXT_TM2 | CPUID_EXT_XTPR */ … … 327 318 328 319 def = NULL; 329 for (i = 0; i < sizeof(x86_defs) / sizeof(x86_def_t); i++) {320 for (i = 0; i < ARRAY_SIZE(x86_defs); i++) { 330 321 if (strcmp(name, x86_defs[i].name) == 0) { 331 322 def = &x86_defs[i]; … … 358 349 char *err; 359 350 model = strtol(val, &err, 10); 360 if (!*val || *err || model < 0 || model > 0xf ) {351 if (!*val || *err || model < 0 || model > 0xff) { 361 352 fprintf(stderr, "bad numerical value %s\n", val); 362 353 goto error; … … 417 408 unsigned int i; 418 409 419 for (i = 0; i < sizeof(x86_defs) / sizeof(x86_def_t); i++)410 for (i = 0; i < ARRAY_SIZE(x86_defs); i++) 420 411 (*cpu_fprintf)(f, "x86 %16s\n", x86_defs[i].name); 421 412 } … … 439 430 } 440 431 env->cpuid_level = def->level; 441 env->cpuid_version = (def->family << 8) | (def->model << 4) | def->stepping; 432 if (def->family > 0x0f) 433 env->cpuid_version = 0xf00 | ((def->family - 0x0f) << 20); 434 else 435 env->cpuid_version = def->family << 8; 436 env->cpuid_version |= ((def->model & 0xf) << 4) | ((def->model >> 4) << 16); 437 env->cpuid_version |= def->stepping; 442 438 env->cpuid_features = def->features; 443 439 env->pat = 0x0007040600070406ULL; … … 469 465 int i; 470 466 467 if (qemu_loglevel_mask(CPU_LOG_RESET)) { 468 qemu_log("CPU Reset (CPU %d)\n", env->cpu_index); 469 log_cpu_state(env, X86_DUMP_FPU | X86_DUMP_CCOP); 470 } 471 471 472 memset(env, 0, offsetof(CPUX86State, breakpoints)); 472 473 … … 522 523 523 524 env->mxcsr = 0x1f80; 525 526 memset(env->dr, 0, sizeof(env->dr)); 527 env->dr[6] = DR6_FIXED_1; 528 env->dr[7] = DR7_FIXED_1; 529 cpu_breakpoint_remove_all(env, BP_CPU); 530 cpu_watchpoint_remove_all(env, BP_CPU); 524 531 } 525 532 … … 596 603 char cc_op_name[32]; 597 604 static const char *seg_name[6] = { "ES", "CS", "SS", "DS", "FS", "GS" }; 605 606 if (kvm_enabled()) 607 kvm_arch_get_registers(env); 598 608 599 609 eflags = env->eflags; … … 694 704 env->cr[3], 695 705 (uint32_t)env->cr[4]); 706 for(i = 0; i < 4; i++) 707 cpu_fprintf(f, "DR%d=%016" PRIx64 " ", i, env->dr[i]); 708 cpu_fprintf(f, "\nDR6=%016" PRIx64 " DR7=%016" PRIx64 "\n", 709 env->dr[6], env->dr[7]); 696 710 } else 697 711 #endif … … 725 739 (uint32_t)env->cr[3], 726 740 (uint32_t)env->cr[4]); 741 for(i = 0; i < 4; i++) 742 cpu_fprintf(f, "DR%d=%08x ", i, env->dr[i]); 743 cpu_fprintf(f, "\nDR6=%08x DR7=%08x\n", env->dr[6], env->dr[7]); 727 744 } 728 745 if (flags & X86_DUMP_CCOP) { … … 899 916 } 900 917 901 /* XXX: also flush 4MB pages */902 void cpu_x86_flush_tlb(CPUX86State *env, target_ulong addr)903 {904 tlb_flush_page(env, addr);905 }906 907 918 #if defined(CONFIG_USER_ONLY) 908 919 … … 1327 1338 return paddr; 1328 1339 } 1340 1341 void hw_breakpoint_insert(CPUState *env, int index) 1342 { 1343 int type, err = 0; 1344 1345 switch (hw_breakpoint_type(env->dr[7], index)) { 1346 case 0: 1347 if (hw_breakpoint_enabled(env->dr[7], index)) 1348 err = cpu_breakpoint_insert(env, env->dr[index], BP_CPU, 1349 &env->cpu_breakpoint[index]); 1350 break; 1351 case 1: 1352 type = BP_CPU | BP_MEM_WRITE; 1353 goto insert_wp; 1354 case 2: 1355 /* No support for I/O watchpoints yet */ 1356 break; 1357 case 3: 1358 type = BP_CPU | BP_MEM_ACCESS; 1359 insert_wp: 1360 err = cpu_watchpoint_insert(env, env->dr[index], 1361 hw_breakpoint_len(env->dr[7], index), 1362 type, &env->cpu_watchpoint[index]); 1363 break; 1364 } 1365 if (err) 1366 env->cpu_breakpoint[index] = NULL; 1367 } 1368 1369 void hw_breakpoint_remove(CPUState *env, int index) 1370 { 1371 if (!env->cpu_breakpoint[index]) 1372 return; 1373 switch (hw_breakpoint_type(env->dr[7], index)) { 1374 case 0: 1375 if (hw_breakpoint_enabled(env->dr[7], index)) 1376 cpu_breakpoint_remove_by_ref(env, env->cpu_breakpoint[index]); 1377 break; 1378 case 1: 1379 case 3: 1380 cpu_watchpoint_remove_by_ref(env, env->cpu_watchpoint[index]); 1381 break; 1382 case 2: 1383 /* No support for I/O watchpoints yet */ 1384 break; 1385 } 1386 } 1387 1388 int check_hw_breakpoints(CPUState *env, int force_dr6_update) 1389 { 1390 target_ulong dr6; 1391 int reg, type; 1392 int hit_enabled = 0; 1393 1394 dr6 = env->dr[6] & ~0xf; 1395 for (reg = 0; reg < 4; reg++) { 1396 type = hw_breakpoint_type(env->dr[7], reg); 1397 if ((type == 0 && env->dr[reg] == env->eip) || 1398 ((type & 1) && env->cpu_watchpoint[reg] && 1399 (env->cpu_watchpoint[reg]->flags & BP_WATCHPOINT_HIT))) { 1400 dr6 |= 1 << reg; 1401 if (hw_breakpoint_enabled(env->dr[7], reg)) 1402 hit_enabled = 1; 1403 } 1404 } 1405 if (hit_enabled || force_dr6_update) 1406 env->dr[6] = dr6; 1407 return hit_enabled; 1408 } 1409 1410 static CPUDebugExcpHandler *prev_debug_excp_handler; 1411 1412 void raise_exception(int exception_index); 1413 1414 static void breakpoint_handler(CPUState *env) 1415 { 1416 CPUBreakpoint *bp; 1417 1418 if (env->watchpoint_hit) { 1419 if (env->watchpoint_hit->flags & BP_CPU) { 1420 env->watchpoint_hit = NULL; 1421 if (check_hw_breakpoints(env, 0)) 1422 raise_exception(EXCP01_DB); 1423 else 1424 cpu_resume_from_signal(env, NULL); 1425 } 1426 } else { 1427 TAILQ_FOREACH(bp, &env->breakpoints, entry) 1428 if (bp->pc == env->eip) { 1429 if (bp->flags & BP_CPU) { 1430 check_hw_breakpoints(env, 1); 1431 raise_exception(EXCP01_DB); 1432 } 1433 break; 1434 } 1435 } 1436 if (prev_debug_excp_handler) 1437 prev_debug_excp_handler(env); 1438 } 1329 1439 #endif /* !CONFIG_USER_ONLY */ 1440 1441 #ifndef VBOX 1442 static void host_cpuid(uint32_t function, uint32_t count, 1443 uint32_t *eax, uint32_t *ebx, 1444 uint32_t *ecx, uint32_t *edx) 1445 { 1446 #if defined(CONFIG_KVM) 1447 uint32_t vec[4]; 1448 1449 #ifdef __x86_64__ 1450 asm volatile("cpuid" 1451 : "=a"(vec[0]), "=b"(vec[1]), 1452 "=c"(vec[2]), "=d"(vec[3]) 1453 : "0"(function), "c"(count) : "cc"); 1454 #else 1455 asm volatile("pusha \n\t" 1456 "cpuid \n\t" 1457 "mov %%eax, 0(%1) \n\t" 1458 "mov %%ebx, 4(%1) \n\t" 1459 "mov %%ecx, 8(%1) \n\t" 1460 "mov %%edx, 12(%1) \n\t" 1461 "popa" 1462 : : "a"(function), "c"(count), "S"(vec) 1463 : "memory", "cc"); 1464 #endif 1465 1466 if (eax) 1467 *eax = vec[0]; 1468 if (ebx) 1469 *ebx = vec[1]; 1470 if (ecx) 1471 *ecx = vec[2]; 1472 if (edx) 1473 *edx = vec[3]; 1474 #endif 1475 } 1476 1477 void cpu_x86_cpuid(CPUX86State *env, uint32_t index, uint32_t count, 1478 uint32_t *eax, uint32_t *ebx, 1479 uint32_t *ecx, uint32_t *edx) 1480 { 1481 /* test if maximum index reached */ 1482 if (index & 0x80000000) { 1483 if (index > env->cpuid_xlevel) 1484 index = env->cpuid_level; 1485 } else { 1486 if (index > env->cpuid_level) 1487 index = env->cpuid_level; 1488 } 1489 1490 switch(index) { 1491 case 0: 1492 *eax = env->cpuid_level; 1493 *ebx = env->cpuid_vendor1; 1494 *edx = env->cpuid_vendor2; 1495 *ecx = env->cpuid_vendor3; 1496 1497 /* sysenter isn't supported on compatibility mode on AMD. and syscall 1498 * isn't supported in compatibility mode on Intel. so advertise the 1499 * actuall cpu, and say goodbye to migration between different vendors 1500 * is you use compatibility mode. */ 1501 if (kvm_enabled()) 1502 host_cpuid(0, 0, NULL, ebx, ecx, edx); 1503 break; 1504 case 1: 1505 *eax = env->cpuid_version; 1506 *ebx = (env->cpuid_apic_id << 24) | 8 << 8; /* CLFLUSH size in quad words, Linux wants it. */ 1507 *ecx = env->cpuid_ext_features; 1508 *edx = env->cpuid_features; 1509 1510 /* "Hypervisor present" bit required for Microsoft SVVP */ 1511 if (kvm_enabled()) 1512 *ecx |= (1 << 31); 1513 break; 1514 case 2: 1515 /* cache info: needed for Pentium Pro compatibility */ 1516 *eax = 1; 1517 *ebx = 0; 1518 *ecx = 0; 1519 *edx = 0x2c307d; 1520 break; 1521 case 4: 1522 /* cache info: needed for Core compatibility */ 1523 switch (count) { 1524 case 0: /* L1 dcache info */ 1525 *eax = 0x0000121; 1526 *ebx = 0x1c0003f; 1527 *ecx = 0x000003f; 1528 *edx = 0x0000001; 1529 break; 1530 case 1: /* L1 icache info */ 1531 *eax = 0x0000122; 1532 *ebx = 0x1c0003f; 1533 *ecx = 0x000003f; 1534 *edx = 0x0000001; 1535 break; 1536 case 2: /* L2 cache info */ 1537 *eax = 0x0000143; 1538 *ebx = 0x3c0003f; 1539 *ecx = 0x0000fff; 1540 *edx = 0x0000001; 1541 break; 1542 default: /* end of info */ 1543 *eax = 0; 1544 *ebx = 0; 1545 *ecx = 0; 1546 *edx = 0; 1547 break; 1548 } 1549 break; 1550 case 5: 1551 /* mwait info: needed for Core compatibility */ 1552 *eax = 0; /* Smallest monitor-line size in bytes */ 1553 *ebx = 0; /* Largest monitor-line size in bytes */ 1554 *ecx = CPUID_MWAIT_EMX | CPUID_MWAIT_IBE; 1555 *edx = 0; 1556 break; 1557 case 6: 1558 /* Thermal and Power Leaf */ 1559 *eax = 0; 1560 *ebx = 0; 1561 *ecx = 0; 1562 *edx = 0; 1563 break; 1564 case 9: 1565 /* Direct Cache Access Information Leaf */ 1566 *eax = 0; /* Bits 0-31 in DCA_CAP MSR */ 1567 *ebx = 0; 1568 *ecx = 0; 1569 *edx = 0; 1570 break; 1571 case 0xA: 1572 /* Architectural Performance Monitoring Leaf */ 1573 *eax = 0; 1574 *ebx = 0; 1575 *ecx = 0; 1576 *edx = 0; 1577 break; 1578 case 0x80000000: 1579 *eax = env->cpuid_xlevel; 1580 *ebx = env->cpuid_vendor1; 1581 *edx = env->cpuid_vendor2; 1582 *ecx = env->cpuid_vendor3; 1583 break; 1584 case 0x80000001: 1585 *eax = env->cpuid_features; 1586 *ebx = 0; 1587 *ecx = env->cpuid_ext3_features; 1588 *edx = env->cpuid_ext2_features; 1589 1590 if (kvm_enabled()) { 1591 uint32_t h_eax, h_edx; 1592 1593 host_cpuid(index, 0, &h_eax, NULL, NULL, &h_edx); 1594 1595 /* disable CPU features that the host does not support */ 1596 1597 /* long mode */ 1598 if ((h_edx & 0x20000000) == 0 /* || !lm_capable_kernel */) 1599 *edx &= ~0x20000000; 1600 /* syscall */ 1601 if ((h_edx & 0x00000800) == 0) 1602 *edx &= ~0x00000800; 1603 /* nx */ 1604 if ((h_edx & 0x00100000) == 0) 1605 *edx &= ~0x00100000; 1606 1607 /* disable CPU features that KVM cannot support */ 1608 1609 /* svm */ 1610 *ecx &= ~4UL; 1611 /* 3dnow */ 1612 *edx &= ~0xc0000000; 1613 } 1614 break; 1615 case 0x80000002: 1616 case 0x80000003: 1617 case 0x80000004: 1618 *eax = env->cpuid_model[(index - 0x80000002) * 4 + 0]; 1619 *ebx = env->cpuid_model[(index - 0x80000002) * 4 + 1]; 1620 *ecx = env->cpuid_model[(index - 0x80000002) * 4 + 2]; 1621 *edx = env->cpuid_model[(index - 0x80000002) * 4 + 3]; 1622 break; 1623 case 0x80000005: 1624 /* cache info (L1 cache) */ 1625 *eax = 0x01ff01ff; 1626 *ebx = 0x01ff01ff; 1627 *ecx = 0x40020140; 1628 *edx = 0x40020140; 1629 break; 1630 case 0x80000006: 1631 /* cache info (L2 cache) */ 1632 *eax = 0; 1633 *ebx = 0x42004200; 1634 *ecx = 0x02008140; 1635 *edx = 0; 1636 break; 1637 case 0x80000008: 1638 /* virtual & phys address size in low 2 bytes. */ 1639 /* XXX: This value must match the one used in the MMU code. */ 1640 if (env->cpuid_ext2_features & CPUID_EXT2_LM) { 1641 /* 64 bit processor */ 1642 #if defined(USE_KQEMU) 1643 *eax = 0x00003020; /* 48 bits virtual, 32 bits physical */ 1644 #else 1645 /* XXX: The physical address space is limited to 42 bits in exec.c. */ 1646 *eax = 0x00003028; /* 48 bits virtual, 40 bits physical */ 1647 #endif 1648 } else { 1649 #if defined(USE_KQEMU) 1650 *eax = 0x00000020; /* 32 bits physical */ 1651 #else 1652 if (env->cpuid_features & CPUID_PSE36) 1653 *eax = 0x00000024; /* 36 bits physical */ 1654 else 1655 *eax = 0x00000020; /* 32 bits physical */ 1656 #endif 1657 } 1658 *ebx = 0; 1659 *ecx = 0; 1660 *edx = 0; 1661 break; 1662 case 0x8000000A: 1663 *eax = 0x00000001; /* SVM Revision */ 1664 *ebx = 0x00000010; /* nr of ASIDs */ 1665 *ecx = 0; 1666 *edx = 0; /* optional features */ 1667 break; 1668 default: 1669 /* reserved values: zero */ 1670 *eax = 0; 1671 *ebx = 0; 1672 *ecx = 0; 1673 *edx = 0; 1674 break; 1675 } 1676 } 1677 #endif /* !VBOX */ 1678 1679 #ifndef VBOX 1680 CPUX86State *cpu_x86_init(const char *cpu_model) 1681 #else 1682 CPUX86State *cpu_x86_init(CPUX86State *env, const char *cpu_model) 1683 #endif 1684 { 1685 #ifndef VBOX 1686 CPUX86State *env; 1687 #endif 1688 static int inited; 1689 1690 #ifndef VBOX 1691 env = qemu_mallocz(sizeof(CPUX86State)); 1692 #endif 1693 cpu_exec_init(env); 1694 env->cpu_model_str = cpu_model; 1695 1696 /* init various static tables */ 1697 if (!inited) { 1698 inited = 1; 1699 optimize_flags_init(); 1700 #ifndef CONFIG_USER_ONLY 1701 prev_debug_excp_handler = 1702 cpu_set_debug_excp_handler(breakpoint_handler); 1703 #endif 1704 } 1705 if (cpu_x86_register(env, cpu_model) < 0) { 1706 cpu_x86_close(env); 1707 return NULL; 1708 } 1709 cpu_reset(env); 1710 #ifdef USE_KQEMU 1711 kqemu_init(env); 1712 #endif 1713 if (kvm_enabled()) 1714 kvm_init_vcpu(env); 1715 return env; 1716 } -
trunk/src/recompiler/target-i386/helper.h
r36140 r36170 1 #ifndef DEF_HELPER 2 #define DEF_HELPER(ret, name, params) ret name params; 3 #endif 4 5 DEF_HELPER(void, helper_lock, (void)) 6 DEF_HELPER(void, helper_unlock, (void)) 7 DEF_HELPER(void, helper_write_eflags, (target_ulong t0, uint32_t update_mask)) 8 DEF_HELPER(target_ulong, helper_read_eflags, (void)) 9 #ifdef VBOX 10 DEF_HELPER(void, helper_write_eflags_vme, (target_ulong t0)) 11 DEF_HELPER(target_ulong, helper_read_eflags_vme, (void)) 12 #endif 13 DEF_HELPER(void, helper_divb_AL, (target_ulong t0)) 14 DEF_HELPER(void, helper_idivb_AL, (target_ulong t0)) 15 DEF_HELPER(void, helper_divw_AX, (target_ulong t0)) 16 DEF_HELPER(void, helper_idivw_AX, (target_ulong t0)) 17 DEF_HELPER(void, helper_divl_EAX, (target_ulong t0)) 18 DEF_HELPER(void, helper_idivl_EAX, (target_ulong t0)) 19 #ifdef TARGET_X86_64 20 DEF_HELPER(void, helper_mulq_EAX_T0, (target_ulong t0)) 21 DEF_HELPER(void, helper_imulq_EAX_T0, (target_ulong t0)) 22 DEF_HELPER(target_ulong, helper_imulq_T0_T1, (target_ulong t0, target_ulong t1)) 23 DEF_HELPER(void, helper_divq_EAX, (target_ulong t0)) 24 DEF_HELPER(void, helper_idivq_EAX, (target_ulong t0)) 25 #endif 26 27 DEF_HELPER(void, helper_aam, (int base)) 28 DEF_HELPER(void, helper_aad, (int base)) 29 DEF_HELPER(void, helper_aaa, (void)) 30 DEF_HELPER(void, helper_aas, (void)) 31 DEF_HELPER(void, helper_daa, (void)) 32 DEF_HELPER(void, helper_das, (void)) 33 34 DEF_HELPER(target_ulong, helper_lsl, (target_ulong selector1)) 35 DEF_HELPER(target_ulong, helper_lar, (target_ulong selector1)) 36 DEF_HELPER(void, helper_verr, (target_ulong selector1)) 37 DEF_HELPER(void, helper_verw, (target_ulong selector1)) 38 DEF_HELPER(void, helper_lldt, (int selector)) 39 DEF_HELPER(void, helper_ltr, (int selector)) 40 DEF_HELPER(void, helper_load_seg, (int seg_reg, int selector)) 41 DEF_HELPER(void, helper_ljmp_protected, (int new_cs, target_ulong new_eip, 42 int next_eip_addend)) 43 DEF_HELPER(void, helper_lcall_real, (int new_cs, target_ulong new_eip1, 44 int shift, int next_eip)) 45 DEF_HELPER(void, helper_lcall_protected, (int new_cs, target_ulong new_eip, 46 int shift, int next_eip_addend)) 47 DEF_HELPER(void, helper_iret_real, (int shift)) 48 DEF_HELPER(void, helper_iret_protected, (int shift, int next_eip)) 49 DEF_HELPER(void, helper_lret_protected, (int shift, int addend)) 50 DEF_HELPER(target_ulong, helper_read_crN, (int reg)) 51 DEF_HELPER(void, helper_write_crN, (int reg, target_ulong t0)) 52 DEF_HELPER(void, helper_lmsw, (target_ulong t0)) 53 DEF_HELPER(void, helper_clts, (void)) 54 DEF_HELPER(void, helper_movl_drN_T0, (int reg, target_ulong t0)) 55 DEF_HELPER(void, helper_invlpg, (target_ulong addr)) 56 57 DEF_HELPER(void, helper_enter_level, (int level, int data32, target_ulong t1)) 58 #ifdef TARGET_X86_64 59 DEF_HELPER(void, helper_enter64_level, (int level, int data64, target_ulong t1)) 60 #endif 61 DEF_HELPER(void, helper_sysenter, (void)) 62 DEF_HELPER(void, helper_sysexit, (int dflag)) 63 #ifdef TARGET_X86_64 64 DEF_HELPER(void, helper_syscall, (int next_eip_addend)) 65 DEF_HELPER(void, helper_sysret, (int dflag)) 66 #endif 67 DEF_HELPER(void, helper_hlt, (int next_eip_addend)) 68 DEF_HELPER(void, helper_monitor, (target_ulong ptr)) 69 DEF_HELPER(void, helper_mwait, (int next_eip_addend)) 70 DEF_HELPER(void, helper_debug, (void)) 71 DEF_HELPER(void, helper_raise_interrupt, (int intno, int next_eip_addend)) 72 DEF_HELPER(void, helper_raise_exception, (int exception_index)) 73 DEF_HELPER(void, helper_cli, (void)) 74 DEF_HELPER(void, helper_sti, (void)) 75 #ifdef VBOX 76 DEF_HELPER(void, helper_cli_vme, (void)) 77 DEF_HELPER(void, helper_sti_vme, (void)) 78 #endif 79 DEF_HELPER(void, helper_set_inhibit_irq, (void)) 80 DEF_HELPER(void, helper_reset_inhibit_irq, (void)) 81 DEF_HELPER(void, helper_boundw, (target_ulong a0, int v)) 82 DEF_HELPER(void, helper_boundl, (target_ulong a0, int v)) 83 DEF_HELPER(void, helper_rsm, (void)) 84 DEF_HELPER(void, helper_into, (int next_eip_addend)) 85 DEF_HELPER(void, helper_cmpxchg8b, (target_ulong a0)) 86 #ifdef TARGET_X86_64 87 DEF_HELPER(void, helper_cmpxchg16b, (target_ulong a0)) 88 #endif 89 DEF_HELPER(void, helper_single_step, (void)) 90 DEF_HELPER(void, helper_cpuid, (void)) 91 DEF_HELPER(void, helper_rdtsc, (void)) 92 DEF_HELPER(void, helper_rdpmc, (void)) 93 DEF_HELPER(void, helper_rdmsr, (void)) 94 DEF_HELPER(void, helper_wrmsr, (void)) 95 #ifdef VBOX 96 DEF_HELPER(void, helper_rdtscp, (void)) 97 #endif 98 99 DEF_HELPER(void, helper_check_iob, (uint32_t t0)) 100 DEF_HELPER(void, helper_check_iow, (uint32_t t0)) 101 DEF_HELPER(void, helper_check_iol, (uint32_t t0)) 102 #ifdef VBOX 103 DEF_HELPER(void, helper_check_external_event, (void)) 104 DEF_HELPER(void, helper_dump_state, (void)) 105 DEF_HELPER(void, helper_sync_seg, (uint32_t t0)) 106 #endif 107 DEF_HELPER(void, helper_outb, (uint32_t port, uint32_t data)) 108 DEF_HELPER(target_ulong, helper_inb, (uint32_t port)) 109 DEF_HELPER(void, helper_outw, (uint32_t port, uint32_t data)) 110 DEF_HELPER(target_ulong, helper_inw, (uint32_t port)) 111 DEF_HELPER(void, helper_outl, (uint32_t port, uint32_t data)) 112 DEF_HELPER(target_ulong, helper_inl, (uint32_t port)) 113 114 DEF_HELPER(void, helper_svm_check_intercept_param, (uint32_t type, uint64_t param)) 115 DEF_HELPER(void, helper_vmexit, (uint32_t exit_code, uint64_t exit_info_1)) 116 DEF_HELPER(void, helper_svm_check_io, (uint32_t port, uint32_t param, 117 uint32_t next_eip_addend)) 118 DEF_HELPER(void, helper_vmrun, (int aflag, int next_eip_addend)) 119 DEF_HELPER(void, helper_vmmcall, (void)) 120 DEF_HELPER(void, helper_vmload, (int aflag)) 121 DEF_HELPER(void, helper_vmsave, (int aflag)) 122 DEF_HELPER(void, helper_stgi, (void)) 123 DEF_HELPER(void, helper_clgi, (void)) 124 DEF_HELPER(void, helper_skinit, (void)) 125 DEF_HELPER(void, helper_invlpga, (int aflag)) 1 #include "def-helper.h" 2 3 DEF_HELPER_FLAGS_1(cc_compute_all, TCG_CALL_PURE, i32, int) 4 DEF_HELPER_FLAGS_1(cc_compute_c, TCG_CALL_PURE, i32, int) 5 6 DEF_HELPER_0(lock, void) 7 DEF_HELPER_0(unlock, void) 8 DEF_HELPER_2(write_eflags, void, tl, i32) 9 DEF_HELPER_0(read_eflags, tl) 10 DEF_HELPER_1(divb_AL, void, tl) 11 DEF_HELPER_1(idivb_AL, void, tl) 12 DEF_HELPER_1(divw_AX, void, tl) 13 DEF_HELPER_1(idivw_AX, void, tl) 14 DEF_HELPER_1(divl_EAX, void, tl) 15 DEF_HELPER_1(idivl_EAX, void, tl) 16 #ifdef TARGET_X86_64 17 DEF_HELPER_1(mulq_EAX_T0, void, tl) 18 DEF_HELPER_1(imulq_EAX_T0, void, tl) 19 DEF_HELPER_2(imulq_T0_T1, tl, tl, tl) 20 DEF_HELPER_1(divq_EAX, void, tl) 21 DEF_HELPER_1(idivq_EAX, void, tl) 22 #endif 23 24 DEF_HELPER_1(aam, void, int) 25 DEF_HELPER_1(aad, void, int) 26 DEF_HELPER_0(aaa, void) 27 DEF_HELPER_0(aas, void) 28 DEF_HELPER_0(daa, void) 29 DEF_HELPER_0(das, void) 30 31 DEF_HELPER_1(lsl, tl, tl) 32 DEF_HELPER_1(lar, tl, tl) 33 DEF_HELPER_1(verr, void, tl) 34 DEF_HELPER_1(verw, void, tl) 35 DEF_HELPER_1(lldt, void, int) 36 DEF_HELPER_1(ltr, void, int) 37 DEF_HELPER_2(load_seg, void, int, int) 38 DEF_HELPER_3(ljmp_protected, void, int, tl, int) 39 DEF_HELPER_4(lcall_real, void, int, tl, int, int) 40 DEF_HELPER_4(lcall_protected, void, int, tl, int, int) 41 DEF_HELPER_1(iret_real, void, int) 42 DEF_HELPER_2(iret_protected, void, int, int) 43 DEF_HELPER_2(lret_protected, void, int, int) 44 DEF_HELPER_1(read_crN, tl, int) 45 DEF_HELPER_2(write_crN, void, int, tl) 46 DEF_HELPER_1(lmsw, void, tl) 47 DEF_HELPER_0(clts, void) 48 DEF_HELPER_2(movl_drN_T0, void, int, tl) 49 DEF_HELPER_1(invlpg, void, tl) 50 51 DEF_HELPER_3(enter_level, void, int, int, tl) 52 #ifdef TARGET_X86_64 53 DEF_HELPER_3(enter64_level, void, int, int, tl) 54 #endif 55 DEF_HELPER_0(sysenter, void) 56 DEF_HELPER_1(sysexit, void, int) 57 #ifdef TARGET_X86_64 58 DEF_HELPER_1(syscall, void, int) 59 DEF_HELPER_1(sysret, void, int) 60 #endif 61 DEF_HELPER_1(hlt, void, int) 62 DEF_HELPER_1(monitor, void, tl) 63 DEF_HELPER_1(mwait, void, int) 64 DEF_HELPER_0(debug, void) 65 DEF_HELPER_2(raise_interrupt, void, int, int) 66 DEF_HELPER_1(raise_exception, void, int) 67 DEF_HELPER_0(cli, void) 68 DEF_HELPER_0(sti, void) 69 DEF_HELPER_0(set_inhibit_irq, void) 70 DEF_HELPER_0(reset_inhibit_irq, void) 71 DEF_HELPER_2(boundw, void, tl, int) 72 DEF_HELPER_2(boundl, void, tl, int) 73 DEF_HELPER_0(rsm, void) 74 DEF_HELPER_1(into, void, int) 75 DEF_HELPER_1(cmpxchg8b, void, tl) 76 #ifdef TARGET_X86_64 77 DEF_HELPER_1(cmpxchg16b, void, tl) 78 #endif 79 DEF_HELPER_0(single_step, void) 80 DEF_HELPER_0(cpuid, void) 81 DEF_HELPER_0(rdtsc, void) 82 DEF_HELPER_0(rdpmc, void) 83 DEF_HELPER_0(rdmsr, void) 84 DEF_HELPER_0(wrmsr, void) 85 86 DEF_HELPER_1(check_iob, void, i32) 87 DEF_HELPER_1(check_iow, void, i32) 88 DEF_HELPER_1(check_iol, void, i32) 89 DEF_HELPER_2(outb, void, i32, i32) 90 DEF_HELPER_1(inb, tl, i32) 91 DEF_HELPER_2(outw, void, i32, i32) 92 DEF_HELPER_1(inw, tl, i32) 93 DEF_HELPER_2(outl, void, i32, i32) 94 DEF_HELPER_1(inl, tl, i32) 95 96 DEF_HELPER_2(svm_check_intercept_param, void, i32, i64) 97 DEF_HELPER_2(vmexit, void, i32, i64) 98 DEF_HELPER_3(svm_check_io, void, i32, i32, i32) 99 DEF_HELPER_2(vmrun, void, int, int) 100 DEF_HELPER_0(vmmcall, void) 101 DEF_HELPER_1(vmload, void, int) 102 DEF_HELPER_1(vmsave, void, int) 103 DEF_HELPER_0(stgi, void) 104 DEF_HELPER_0(clgi, void) 105 DEF_HELPER_0(skinit, void) 106 DEF_HELPER_1(invlpga, void, int) 126 107 127 108 /* x86 FPU */ 128 109 129 DEF_HELPER (void, helper_flds_FT0, (uint32_t val))130 DEF_HELPER (void, helper_fldl_FT0, (uint64_t val))131 DEF_HELPER (void, helper_fildl_FT0, (int32_t val))132 DEF_HELPER (void, helper_flds_ST0, (uint32_t val))133 DEF_HELPER (void, helper_fldl_ST0, (uint64_t val))134 DEF_HELPER (void, helper_fildl_ST0, (int32_t val))135 DEF_HELPER (void, helper_fildll_ST0, (int64_t val))110 DEF_HELPER_1(flds_FT0, void, i32) 111 DEF_HELPER_1(fldl_FT0, void, i64) 112 DEF_HELPER_1(fildl_FT0, void, s32) 113 DEF_HELPER_1(flds_ST0, void, i32) 114 DEF_HELPER_1(fldl_ST0, void, i64) 115 DEF_HELPER_1(fildl_ST0, void, s32) 116 DEF_HELPER_1(fildll_ST0, void, s64) 136 117 #ifndef VBOX 137 DEF_HELPER (uint32_t, helper_fsts_ST0, (void))138 DEF_HELPER (uint64_t, helper_fstl_ST0, (void))139 DEF_HELPER (int32_t, helper_fist_ST0, (void))140 DEF_HELPER (int32_t, helper_fistl_ST0, (void))141 DEF_HELPER (int64_t, helper_fistll_ST0, (void))142 DEF_HELPER (int32_t, helper_fistt_ST0, (void))143 DEF_HELPER (int32_t, helper_fisttl_ST0, (void))144 DEF_HELPER (int64_t, helper_fisttll_ST0, (void))118 DEF_HELPER_0(fsts_ST0, i32) 119 DEF_HELPER_0(fstl_ST0, i64) 120 DEF_HELPER_0(fist_ST0, s32) 121 DEF_HELPER_0(fistl_ST0, s32) 122 DEF_HELPER_0(fistll_ST0, s64) 123 DEF_HELPER_0(fistt_ST0, s32) 124 DEF_HELPER_0(fisttl_ST0, s32) 125 DEF_HELPER_0(fisttll_ST0, s64) 145 126 #else /* VBOX */ 146 DEF_HELPER (RTCCUINTREG, helper_fsts_ST0, (void))147 DEF_HELPER (uint64_t, helper_fstl_ST0, (void))148 DEF_HELPER (RTCCINTREG, helper_fist_ST0, (void))149 DEF_HELPER (RTCCINTREG, helper_fistl_ST0, (void))150 DEF_HELPER (int64_t, helper_fistll_ST0, (void))151 DEF_HELPER (RTCCINTREG, helper_fistt_ST0, (void))152 DEF_HELPER (RTCCINTREG, helper_fisttl_ST0, (void))153 DEF_HELPER (int64_t, helper_fisttll_ST0, (void))127 DEF_HELPER_0(fsts_ST0, RTCCUINTREG) 128 DEF_HELPER_0(fstl_ST0, i64) 129 DEF_HELPER_0(fist_ST0, RTCCINTREG) 130 DEF_HELPER_0(fistl_ST0, RTCCINTREG) 131 DEF_HELPER_0(fistll_ST0, s64) 132 DEF_HELPER_0(fistt_ST0, RTCCINTREG) 133 DEF_HELPER_0(fisttl_ST0, RTCCINTREG) 134 DEF_HELPER_0(fisttll_ST0, s64) 154 135 #endif /* VBOX */ 155 DEF_HELPER (void, helper_fldt_ST0, (target_ulong ptr))156 DEF_HELPER (void, helper_fstt_ST0, (target_ulong ptr))157 DEF_HELPER (void, helper_fpush, (void))158 DEF_HELPER (void, helper_fpop, (void))159 DEF_HELPER (void, helper_fdecstp, (void))160 DEF_HELPER (void, helper_fincstp, (void))161 DEF_HELPER (void, helper_ffree_STN, (int st_index))162 DEF_HELPER (void, helper_fmov_ST0_FT0, (void))163 DEF_HELPER (void, helper_fmov_FT0_STN, (int st_index))164 DEF_HELPER (void, helper_fmov_ST0_STN, (int st_index))165 DEF_HELPER (void, helper_fmov_STN_ST0, (int st_index))166 DEF_HELPER (void, helper_fxchg_ST0_STN, (int st_index))167 DEF_HELPER (void, helper_fcom_ST0_FT0, (void))168 DEF_HELPER (void, helper_fucom_ST0_FT0, (void))169 DEF_HELPER (void, helper_fcomi_ST0_FT0, (void))170 DEF_HELPER (void, helper_fucomi_ST0_FT0, (void))171 DEF_HELPER (void, helper_fadd_ST0_FT0, (void))172 DEF_HELPER (void, helper_fmul_ST0_FT0, (void))173 DEF_HELPER (void, helper_fsub_ST0_FT0, (void))174 DEF_HELPER (void, helper_fsubr_ST0_FT0, (void))175 DEF_HELPER (void, helper_fdiv_ST0_FT0, (void))176 DEF_HELPER (void, helper_fdivr_ST0_FT0, (void))177 DEF_HELPER (void, helper_fadd_STN_ST0, (int st_index))178 DEF_HELPER (void, helper_fmul_STN_ST0, (int st_index))179 DEF_HELPER (void, helper_fsub_STN_ST0, (int st_index))180 DEF_HELPER (void, helper_fsubr_STN_ST0, (int st_index))181 DEF_HELPER (void, helper_fdiv_STN_ST0, (int st_index))182 DEF_HELPER (void, helper_fdivr_STN_ST0, (int st_index))183 DEF_HELPER (void, helper_fchs_ST0, (void))184 DEF_HELPER (void, helper_fabs_ST0, (void))185 DEF_HELPER (void, helper_fxam_ST0, (void))186 DEF_HELPER (void, helper_fld1_ST0, (void))187 DEF_HELPER (void, helper_fldl2t_ST0, (void))188 DEF_HELPER (void, helper_fldl2e_ST0, (void))189 DEF_HELPER (void, helper_fldpi_ST0, (void))190 DEF_HELPER (void, helper_fldlg2_ST0, (void))191 DEF_HELPER (void, helper_fldln2_ST0, (void))192 DEF_HELPER (void, helper_fldz_ST0, (void))193 DEF_HELPER (void, helper_fldz_FT0, (void))136 DEF_HELPER_1(fldt_ST0, void, tl) 137 DEF_HELPER_1(fstt_ST0, void, tl) 138 DEF_HELPER_0(fpush, void) 139 DEF_HELPER_0(fpop, void) 140 DEF_HELPER_0(fdecstp, void) 141 DEF_HELPER_0(fincstp, void) 142 DEF_HELPER_1(ffree_STN, void, int) 143 DEF_HELPER_0(fmov_ST0_FT0, void) 144 DEF_HELPER_1(fmov_FT0_STN, void, int) 145 DEF_HELPER_1(fmov_ST0_STN, void, int) 146 DEF_HELPER_1(fmov_STN_ST0, void, int) 147 DEF_HELPER_1(fxchg_ST0_STN, void, int) 148 DEF_HELPER_0(fcom_ST0_FT0, void) 149 DEF_HELPER_0(fucom_ST0_FT0, void) 150 DEF_HELPER_0(fcomi_ST0_FT0, void) 151 DEF_HELPER_0(fucomi_ST0_FT0, void) 152 DEF_HELPER_0(fadd_ST0_FT0, void) 153 DEF_HELPER_0(fmul_ST0_FT0, void) 154 DEF_HELPER_0(fsub_ST0_FT0, void) 155 DEF_HELPER_0(fsubr_ST0_FT0, void) 156 DEF_HELPER_0(fdiv_ST0_FT0, void) 157 DEF_HELPER_0(fdivr_ST0_FT0, void) 158 DEF_HELPER_1(fadd_STN_ST0, void, int) 159 DEF_HELPER_1(fmul_STN_ST0, void, int) 160 DEF_HELPER_1(fsub_STN_ST0, void, int) 161 DEF_HELPER_1(fsubr_STN_ST0, void, int) 162 DEF_HELPER_1(fdiv_STN_ST0, void, int) 163 DEF_HELPER_1(fdivr_STN_ST0, void, int) 164 DEF_HELPER_0(fchs_ST0, void) 165 DEF_HELPER_0(fabs_ST0, void) 166 DEF_HELPER_0(fxam_ST0, void) 167 DEF_HELPER_0(fld1_ST0, void) 168 DEF_HELPER_0(fldl2t_ST0, void) 169 DEF_HELPER_0(fldl2e_ST0, void) 170 DEF_HELPER_0(fldpi_ST0, void) 171 DEF_HELPER_0(fldlg2_ST0, void) 172 DEF_HELPER_0(fldln2_ST0, void) 173 DEF_HELPER_0(fldz_ST0, void) 174 DEF_HELPER_0(fldz_FT0, void) 194 175 #ifndef VBOX 195 DEF_HELPER (uint32_t, helper_fnstsw, (void))196 DEF_HELPER (uint32_t, helper_fnstcw, (void))176 DEF_HELPER_0(fnstsw, i32) 177 DEF_HELPER_0(fnstcw, i32) 197 178 #else /* VBOX */ 198 DEF_HELPER (RTCCUINTREG, helper_fnstsw, (void))199 DEF_HELPER (RTCCUINTREG, helper_fnstcw, (void))179 DEF_HELPER_0(fnstsw, RTCCUINTREG) 180 DEF_HELPER_0(fnstcw, RTCCUINTREG) 200 181 #endif /* VBOX */ 201 DEF_HELPER (void, helper_fldcw, (uint32_t val))202 DEF_HELPER (void, helper_fclex, (void))203 DEF_HELPER (void, helper_fwait, (void))204 DEF_HELPER (void, helper_fninit, (void))205 DEF_HELPER (void, helper_fbld_ST0, (target_ulong ptr))206 DEF_HELPER (void, helper_fbst_ST0, (target_ulong ptr))207 DEF_HELPER (void, helper_f2xm1, (void))208 DEF_HELPER (void, helper_fyl2x, (void))209 DEF_HELPER (void, helper_fptan, (void))210 DEF_HELPER (void, helper_fpatan, (void))211 DEF_HELPER (void, helper_fxtract, (void))212 DEF_HELPER (void, helper_fprem1, (void))213 DEF_HELPER (void, helper_fprem, (void))214 DEF_HELPER (void, helper_fyl2xp1, (void))215 DEF_HELPER (void, helper_fsqrt, (void))216 DEF_HELPER (void, helper_fsincos, (void))217 DEF_HELPER (void, helper_frndint, (void))218 DEF_HELPER (void, helper_fscale, (void))219 DEF_HELPER (void, helper_fsin, (void))220 DEF_HELPER (void, helper_fcos, (void))221 DEF_HELPER (void, helper_fstenv, (target_ulong ptr, int data32))222 DEF_HELPER (void, helper_fldenv, (target_ulong ptr, int data32))223 DEF_HELPER (void, helper_fsave, (target_ulong ptr, int data32))224 DEF_HELPER (void, helper_frstor, (target_ulong ptr, int data32))225 DEF_HELPER (void, helper_fxsave, (target_ulong ptr, int data64))226 DEF_HELPER (void, helper_fxrstor, (target_ulong ptr, int data64))227 DEF_HELPER (target_ulong, helper_bsf, (target_ulong t0))228 DEF_HELPER (target_ulong, helper_bsr, (target_ulong t0))182 DEF_HELPER_1(fldcw, void, i32) 183 DEF_HELPER_0(fclex, void) 184 DEF_HELPER_0(fwait, void) 185 DEF_HELPER_0(fninit, void) 186 DEF_HELPER_1(fbld_ST0, void, tl) 187 DEF_HELPER_1(fbst_ST0, void, tl) 188 DEF_HELPER_0(f2xm1, void) 189 DEF_HELPER_0(fyl2x, void) 190 DEF_HELPER_0(fptan, void) 191 DEF_HELPER_0(fpatan, void) 192 DEF_HELPER_0(fxtract, void) 193 DEF_HELPER_0(fprem1, void) 194 DEF_HELPER_0(fprem, void) 195 DEF_HELPER_0(fyl2xp1, void) 196 DEF_HELPER_0(fsqrt, void) 197 DEF_HELPER_0(fsincos, void) 198 DEF_HELPER_0(frndint, void) 199 DEF_HELPER_0(fscale, void) 200 DEF_HELPER_0(fsin, void) 201 DEF_HELPER_0(fcos, void) 202 DEF_HELPER_2(fstenv, void, tl, int) 203 DEF_HELPER_2(fldenv, void, tl, int) 204 DEF_HELPER_2(fsave, void, tl, int) 205 DEF_HELPER_2(frstor, void, tl, int) 206 DEF_HELPER_2(fxsave, void, tl, int) 207 DEF_HELPER_2(fxrstor, void, tl, int) 208 DEF_HELPER_1(bsf, tl, tl) 209 DEF_HELPER_1(bsr, tl, tl) 229 210 230 211 /* MMX/SSE */ 231 212 232 DEF_HELPER (void, helper_enter_mmx, (void))233 DEF_HELPER (void, helper_emms, (void))234 DEF_HELPER (void, helper_movq, (uint64_t *d, uint64_t *s))213 DEF_HELPER_0(enter_mmx, void) 214 DEF_HELPER_0(emms, void) 215 DEF_HELPER_2(movq, void, ptr, ptr) 235 216 236 217 #define SHIFT 0 … … 239 220 #include "ops_sse_header.h" 240 221 241 DEF_HELPER (target_ulong, helper_rclb, (target_ulong t0, target_ulong t1))242 DEF_HELPER (target_ulong, helper_rclw, (target_ulong t0, target_ulong t1))243 DEF_HELPER (target_ulong, helper_rcll, (target_ulong t0, target_ulong t1))244 DEF_HELPER (target_ulong, helper_rcrb, (target_ulong t0, target_ulong t1))245 DEF_HELPER (target_ulong, helper_rcrw, (target_ulong t0, target_ulong t1))246 DEF_HELPER (target_ulong, helper_rcrl, (target_ulong t0, target_ulong t1))247 #ifdef TARGET_X86_64 248 DEF_HELPER (target_ulong, helper_rclq, (target_ulong t0, target_ulong t1))249 DEF_HELPER (target_ulong, helper_rcrq, (target_ulong t0, target_ulong t1))222 DEF_HELPER_2(rclb, tl, tl, tl) 223 DEF_HELPER_2(rclw, tl, tl, tl) 224 DEF_HELPER_2(rcll, tl, tl, tl) 225 DEF_HELPER_2(rcrb, tl, tl, tl) 226 DEF_HELPER_2(rcrw, tl, tl, tl) 227 DEF_HELPER_2(rcrl, tl, tl, tl) 228 #ifdef TARGET_X86_64 229 DEF_HELPER_2(rclq, tl, tl, tl) 230 DEF_HELPER_2(rcrq, tl, tl, tl) 250 231 #endif 251 232 252 233 #ifdef VBOX 234 DEF_HELPER_1(write_eflags_vme, void, tl) 235 DEF_HELPER_0(read_eflags_vme, tl) 236 DEF_HELPER_0(cli_vme, void) 237 DEF_HELPER_0(sti_vme, void) 238 DEF_HELPER_0(rdtscp, void) 239 DEF_HELPER_0(check_external_event, void) 240 DEF_HELPER_0(dump_state, void) 241 DEF_HELPER_1(sync_seg, void, i32) 242 253 243 void helper_external_event(void); 254 244 void helper_record_call(void); … … 259 249 #endif /* VBOX */ 260 250 261 # undef DEF_HELPER251 #include "def-helper.h" -
trunk/src/recompiler/target-i386/helper_template.h
r33656 r36170 16 16 * You should have received a copy of the GNU Lesser General Public 17 17 * License along with this library; if not, write to the Free Software 18 * Foundation, Inc., 5 9 Temple Place, Suite 330, Boston, MA 02111-1307USA18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA 19 19 */ 20 20 … … 291 291 #endif 292 292 if (count) { 293 eflags = cc_table[CC_OP].compute_all();293 eflags = helper_cc_compute_all(CC_OP); 294 294 t0 &= DATA_MASK; 295 295 src = t0; … … 320 320 #endif 321 321 if (count) { 322 eflags = cc_table[CC_OP].compute_all();322 eflags = helper_cc_compute_all(CC_OP); 323 323 t0 &= DATA_MASK; 324 324 src = t0; -
trunk/src/recompiler/target-i386/op_helper.c
r36140 r36170 16 16 * You should have received a copy of the GNU Lesser General Public 17 17 * License along with this library; if not, write to the Free Software 18 * Foundation, Inc., 5 9 Temple Place, Suite 330, Boston, MA 02111-1307USA18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA 19 19 */ 20 20 … … 30 30 #define CPU_NO_GLOBAL_REGS 31 31 #include "exec.h" 32 #include "exec-all.h" 32 33 #include "host-utils.h" 33 34 … … 39 40 //#define DEBUG_PCALL 40 41 42 43 #ifdef DEBUG_PCALL 44 # define LOG_PCALL(...) qemu_log_mask(CPU_LOG_PCALL, ## __VA_ARGS__) 45 # define LOG_PCALL_STATE(env) \ 46 log_cpu_state_mask(CPU_LOG_PCALL, (env), X86_DUMP_CCOP) 47 #else 48 # define LOG_PCALL(...) do { } while (0) 49 # define LOG_PCALL_STATE(env) do { } while (0) 50 #endif 51 52 41 53 #if 0 42 54 #define raise_exception_err(a, b)\ 43 55 do {\ 44 if (logfile)\ 45 fprintf(logfile, "raise_exception line=%d\n", __LINE__);\ 56 qemu_log("raise_exception line=%d\n", __LINE__);\ 46 57 (raise_exception_err)(a, b);\ 47 58 } while (0) 48 59 #endif 49 60 50 const uint8_t parity_table[256] = {61 static const uint8_t parity_table[256] = { 51 62 CC_P, 0, 0, CC_P, 0, CC_P, CC_P, 0, 52 63 0, CC_P, CC_P, 0, CC_P, 0, 0, CC_P, … … 84 95 85 96 /* modulo 17 table */ 86 const uint8_t rclw_table[32] = {97 static const uint8_t rclw_table[32] = { 87 98 0, 1, 2, 3, 4, 5, 6, 7, 88 99 8, 9,10,11,12,13,14,15, … … 92 103 93 104 /* modulo 9 table */ 94 const uint8_t rclb_table[32] = {105 static const uint8_t rclb_table[32] = { 95 106 0, 1, 2, 3, 4, 5, 6, 7, 96 107 8, 0, 1, 2, 3, 4, 5, 6, … … 99 110 }; 100 111 101 const CPU86_LDouble f15rk[7] =112 static const CPU86_LDouble f15rk[7] = 102 113 { 103 114 0.00000000000000000000L, … … 112 123 /* broken thread support */ 113 124 114 s pinlock_t global_cpu_lock = SPIN_LOCK_UNLOCKED;125 static spinlock_t global_cpu_lock = SPIN_LOCK_UNLOCKED; 115 126 116 127 void helper_lock(void) … … 132 143 { 133 144 uint32_t eflags; 134 eflags = cc_table[CC_OP].compute_all();145 eflags = helper_cc_compute_all(CC_OP); 135 146 eflags |= (DF & DF_MASK); 136 147 eflags |= env->eflags & ~(VM_MASK | RF_MASK); … … 166 177 { 167 178 uint32_t eflags; 168 eflags = cc_table[CC_OP].compute_all();179 eflags = helper_cc_compute_all(CC_OP); 169 180 eflags |= (DF & DF_MASK); 170 181 eflags |= env->eflags & ~(VM_MASK | RF_MASK); … … 308 319 309 320 #ifdef VBOX 310 e1 = e2 = 0; 321 e1 = e2 = 0; /* gcc warning? */ 311 322 cpl = env->hflags & HF_CPL_MASK; 312 323 /* Trying to load a selector with CPL=1? */ … … 392 403 393 404 type = (e2 >> DESC_TYPE_SHIFT) & 0xf; 394 #ifdef DEBUG_PCALL 395 if (loglevel & CPU_LOG_PCALL) 396 fprintf(logfile, "switch_tss: sel=0x%04x type=%d src=%d\n", tss_selector, type, source); 397 #endif 398 399 #if defined(VBOX) && defined(DEBUG) 400 printf("switch_tss %x %x %x %d %08x\n", tss_selector, e1, e2, source, next_eip); 401 #endif 405 LOG_PCALL("switch_tss: sel=0x%04x type=%d src=%d\n", tss_selector, type, source); 402 406 403 407 /* if task gate, we read the TSS segment and we load it */ … … 628 632 raise_exception_err(EXCP0D_GPF, 0); 629 633 } 634 635 #ifndef CONFIG_USER_ONLY 636 /* reset local breakpoints */ 637 if (env->dr[7] & 0x55) { 638 for (i = 0; i < 4; i++) { 639 if (hw_breakpoint_enabled(env->dr[7], i) == 0x1) 640 hw_breakpoint_remove(env, i); 641 } 642 env->dr[7] &= ~0x55; 643 } 644 #endif 630 645 } 631 646 … … 787 802 int type, dpl, selector, ss_dpl, cpl; 788 803 int has_error_code, new_stack, shift; 789 uint32_t e1, e2, offset, ss , esp, ss_e1, ss_e2;804 uint32_t e1, e2, offset, ss = 0, esp, ss_e1 = 0, ss_e2 = 0; 790 805 uint32_t old_eip, sp_mask; 791 806 792 807 #ifdef VBOX 793 ss = ss_e1 = ss_e2 = 0;794 808 if (remR3NotifyTrap(env, intno, error_code, next_eip) != VINF_SUCCESS) 795 809 cpu_loop_exit(); … … 1271 1285 env->eflags &= ~IF_MASK; 1272 1286 } 1273 1274 1287 #ifndef VBOX 1275 1288 env->eflags &= ~(TF_MASK | VM_MASK | RF_MASK | NT_MASK); … … 1285 1298 #endif 1286 1299 1300 #ifdef TARGET_X86_64 1287 1301 #if defined(CONFIG_USER_ONLY) 1288 1302 void helper_syscall(int next_eip_addend) … … 1301 1315 } 1302 1316 selector = (env->star >> 32) & 0xffff; 1303 #ifdef TARGET_X86_641304 1317 if (env->hflags & HF_LMA_MASK) { 1305 1318 int code64; … … 1327 1340 else 1328 1341 env->eip = env->cstar; 1329 } else 1330 #endif 1331 { 1342 } else { 1332 1343 ECX = (uint32_t)(env->eip + next_eip_addend); 1333 1344 … … 1348 1359 } 1349 1360 #endif 1350 1361 #endif 1362 1363 #ifdef TARGET_X86_64 1351 1364 void helper_sysret(int dflag) 1352 1365 { … … 1361 1374 } 1362 1375 selector = (env->star >> 48) & 0xffff; 1363 #ifdef TARGET_X86_641364 1376 if (env->hflags & HF_LMA_MASK) { 1365 1377 if (dflag == 2) { … … 1387 1399 IF_MASK | IOPL_MASK | VM_MASK | RF_MASK | NT_MASK); 1388 1400 cpu_x86_set_cpl(env, 3); 1389 } else 1390 #endif 1391 { 1401 } else { 1392 1402 cpu_x86_load_seg_cache(env, R_CS, selector | 3, 1393 1403 0, 0xffffffff, … … 1413 1423 #endif 1414 1424 } 1425 #endif 1415 1426 1416 1427 #ifdef VBOX … … 1421 1432 void helper_external_event(void) 1422 1433 { 1423 # if defined(RT_OS_DARWIN) && defined(VBOX_STRICT)1434 # if defined(RT_OS_DARWIN) && defined(VBOX_STRICT) 1424 1435 uintptr_t uSP; 1425 # ifdef RT_ARCH_AMD641436 # ifdef RT_ARCH_AMD64 1426 1437 __asm__ __volatile__("movq %%rsp, %0" : "=r" (uSP)); 1427 # else1438 # else 1428 1439 __asm__ __volatile__("movl %%esp, %0" : "=r" (uSP)); 1440 # endif 1441 AssertMsg(!(uSP & 15), ("xSP=%#p\n", uSP)); 1429 1442 # endif 1430 AssertMsg(!(uSP & 15), ("xSP=%#p\n", uSP));1431 #endif1432 1443 /* Keep in sync with flags checked by gen_check_external_event() */ 1433 1444 if (env->interrupt_request & CPU_INTERRUPT_EXTERNAL_HARD) … … 1546 1557 target_ulong next_eip, int is_hw) 1547 1558 { 1559 if (qemu_loglevel_mask(CPU_LOG_INT)) { 1560 if ((env->cr[0] & CR0_PE_MASK)) { 1561 static int count; 1562 qemu_log("%6d: v=%02x e=%04x i=%d cpl=%d IP=%04x:" TARGET_FMT_lx " pc=" TARGET_FMT_lx " SP=%04x:" TARGET_FMT_lx, 1563 count, intno, error_code, is_int, 1564 env->hflags & HF_CPL_MASK, 1565 env->segs[R_CS].selector, EIP, 1566 (int)env->segs[R_CS].base + EIP, 1567 env->segs[R_SS].selector, ESP); 1568 if (intno == 0x0e) { 1569 qemu_log(" CR2=" TARGET_FMT_lx, env->cr[2]); 1570 } else { 1571 qemu_log(" EAX=" TARGET_FMT_lx, EAX); 1572 } 1573 qemu_log("\n"); 1574 log_cpu_state(env, X86_DUMP_CCOP); 1575 #if 0 1576 { 1577 int i; 1578 uint8_t *ptr; 1579 qemu_log(" code="); 1580 ptr = env->segs[R_CS].base + env->eip; 1581 for(i = 0; i < 16; i++) { 1582 qemu_log(" %02x", ldub(ptr + i)); 1583 } 1584 qemu_log("\n"); 1585 } 1586 #endif 1587 count++; 1588 } 1589 } 1548 1590 #ifdef VBOX 1549 1591 if (RT_UNLIKELY(env->state & CPU_EMULATE_SINGLE_STEP)) { … … 1557 1599 } 1558 1600 #endif 1559 1560 if (loglevel & CPU_LOG_INT) {1561 if ((env->cr[0] & CR0_PE_MASK)) {1562 static int count;1563 fprintf(logfile, "%6d: v=%02x e=%04x i=%d cpl=%d IP=%04x:" TARGET_FMT_lx " pc=" TARGET_FMT_lx " SP=%04x:" TARGET_FMT_lx,1564 count, intno, error_code, is_int,1565 env->hflags & HF_CPL_MASK,1566 env->segs[R_CS].selector, EIP,1567 (int)env->segs[R_CS].base + EIP,1568 env->segs[R_SS].selector, ESP);1569 if (intno == 0x0e) {1570 fprintf(logfile, " CR2=" TARGET_FMT_lx, env->cr[2]);1571 } else {1572 fprintf(logfile, " EAX=" TARGET_FMT_lx, EAX);1573 }1574 fprintf(logfile, "\n");1575 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP);1576 #if 01577 {1578 int i;1579 uint8_t *ptr;1580 fprintf(logfile, " code=");1581 ptr = env->segs[R_CS].base + env->eip;1582 for(i = 0; i < 16; i++) {1583 fprintf(logfile, " %02x", ldub(ptr + i));1584 }1585 fprintf(logfile, "\n");1586 }1587 #endif1588 count++;1589 }1590 }1591 1601 if (env->cr[0] & CR0_PE_MASK) { 1592 1602 #ifdef TARGET_X86_64 … … 1614 1624 } 1615 1625 1626 /* This should come from sysemu.h - if we could include it here... */ 1627 void qemu_system_reset_request(void); 1628 1616 1629 /* 1617 1630 * Check nested exceptions and change to double or triple fault if … … 1627 1640 (intno >= 10 && intno <= 13); 1628 1641 1629 if (loglevel & CPU_LOG_INT) 1630 fprintf(logfile, "check_exception old: 0x%x new 0x%x\n", 1642 qemu_log_mask(CPU_LOG_INT, "check_exception old: 0x%x new 0x%x\n", 1631 1643 env->old_exception, intno); 1632 1644 1633 if (env->old_exception == EXCP08_DBLE) 1634 cpu_abort(env, "triple fault"); 1645 #if !defined(CONFIG_USER_ONLY) 1646 if (env->old_exception == EXCP08_DBLE) { 1647 if (env->hflags & HF_SVMI_MASK) 1648 helper_vmexit(SVM_EXIT_SHUTDOWN, 0); /* does not return */ 1649 1650 qemu_log_mask(CPU_LOG_RESET, "Triple fault\n"); 1651 1652 # ifndef VBOX 1653 qemu_system_reset_request(); 1654 # else 1655 remR3RaiseRC(env->pVM, VINF_EM_RESET); /** @todo test + improve tripple fault handling. */ 1656 # endif 1657 return EXCP_HLT; 1658 } 1659 #endif 1635 1660 1636 1661 if ((first_contributory && second_contributory) … … 1654 1679 * is_int is TRUE. 1655 1680 */ 1656 voidraise_interrupt(int intno, int is_int, int error_code,1657 int next_eip_addend)1681 static void QEMU_NORETURN raise_interrupt(int intno, int is_int, int error_code, 1682 int next_eip_addend) 1658 1683 { 1659 1684 #if defined(VBOX) && defined(DEBUG) … … 1676 1701 /* shortcuts to generate exceptions */ 1677 1702 1678 void (raise_exception_err)(int exception_index, int error_code)1703 void raise_exception_err(int exception_index, int error_code) 1679 1704 { 1680 1705 raise_interrupt(exception_index, 0, error_code, 0); … … 1712 1737 int i, offset; 1713 1738 1714 if (loglevel & CPU_LOG_INT) { 1715 fprintf(logfile, "SMM: enter\n"); 1716 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP); 1717 } 1739 qemu_log_mask(CPU_LOG_INT, "SMM: enter\n"); 1740 log_cpu_state_mask(CPU_LOG_INT, env, X86_DUMP_CCOP); 1718 1741 1719 1742 env->hflags |= HF_SMM_MASK; … … 1846 1869 cpu_abort(env, "helper_rsm"); 1847 1870 #else /* !VBOX */ 1848 target_ulong sm_1849 1850 1871 target_ulong sm_state; 1851 1872 int i, offset; … … 1960 1981 cpu_smm_update(env); 1961 1982 1962 if (loglevel & CPU_LOG_INT) { 1963 fprintf(logfile, "SMM: after RSM\n"); 1964 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP); 1965 } 1983 qemu_log_mask(CPU_LOG_INT, "SMM: after RSM\n"); 1984 log_cpu_state_mask(CPU_LOG_INT, env, X86_DUMP_CCOP); 1966 1985 #endif /* !VBOX */ 1967 1986 } … … 2107 2126 int eflags; 2108 2127 2109 eflags = cc_table[CC_OP].compute_all();2128 eflags = helper_cc_compute_all(CC_OP); 2110 2129 af = eflags & CC_A; 2111 2130 al = EAX & 0xff; … … 2123 2142 EAX = (EAX & ~0xffff) | al | (ah << 8); 2124 2143 CC_SRC = eflags; 2125 FORCE_RET();2126 2144 } 2127 2145 … … 2132 2150 int eflags; 2133 2151 2134 eflags = cc_table[CC_OP].compute_all();2152 eflags = helper_cc_compute_all(CC_OP); 2135 2153 af = eflags & CC_A; 2136 2154 al = EAX & 0xff; … … 2148 2166 EAX = (EAX & ~0xffff) | al | (ah << 8); 2149 2167 CC_SRC = eflags; 2150 FORCE_RET();2151 2168 } 2152 2169 … … 2156 2173 int eflags; 2157 2174 2158 eflags = cc_table[CC_OP].compute_all();2175 eflags = helper_cc_compute_all(CC_OP); 2159 2176 cf = eflags & CC_C; 2160 2177 af = eflags & CC_A; … … 2176 2193 eflags |= (al & 0x80); /* sf */ 2177 2194 CC_SRC = eflags; 2178 FORCE_RET();2179 2195 } 2180 2196 … … 2184 2200 int eflags; 2185 2201 2186 eflags = cc_table[CC_OP].compute_all();2202 eflags = helper_cc_compute_all(CC_OP); 2187 2203 cf = eflags & CC_C; 2188 2204 af = eflags & CC_A; … … 2207 2223 eflags |= (al & 0x80); /* sf */ 2208 2224 CC_SRC = eflags; 2209 FORCE_RET();2210 2225 } 2211 2226 … … 2213 2228 { 2214 2229 int eflags; 2215 eflags = cc_table[CC_OP].compute_all();2230 eflags = helper_cc_compute_all(CC_OP); 2216 2231 if (eflags & CC_O) { 2217 2232 raise_interrupt(EXCP04_INTO, 1, 0, next_eip_addend); … … 2224 2239 int eflags; 2225 2240 2226 eflags = cc_table[CC_OP].compute_all();2241 eflags = helper_cc_compute_all(CC_OP); 2227 2242 d = ldq(a0); 2228 2243 if (d == (((uint64_t)EDX << 32) | (uint32_t)EAX)) { … … 2247 2262 if ((a0 & 0xf) != 0) 2248 2263 raise_exception(EXCP0D_GPF); 2249 eflags = cc_table[CC_OP].compute_all();2264 eflags = helper_cc_compute_all(CC_OP); 2250 2265 d0 = ldq(a0); 2251 2266 d1 = ldq(a0 + 8); … … 2268 2283 void helper_single_step(void) 2269 2284 { 2270 env->dr[6] |= 0x4000; 2271 raise_exception(EXCP01_SSTP); 2285 #ifndef CONFIG_USER_ONLY 2286 check_hw_breakpoints(env, 1); 2287 env->dr[6] |= DR6_BS; 2288 #endif 2289 raise_exception(EXCP01_DB); 2272 2290 } 2273 2291 2274 2292 void helper_cpuid(void) 2275 2293 { 2276 #ifndef VBOX 2277 uint32_t index; 2294 uint32_t eax, ebx, ecx, edx; 2278 2295 2279 2296 helper_svm_check_intercept_param(SVM_EXIT_CPUID, 0); 2280 2297 2281 index = (uint32_t)EAX; 2282 /* test if maximum index reached */ 2283 if (index & 0x80000000) { 2284 if (index > env->cpuid_xlevel) 2285 index = env->cpuid_level; 2286 } else { 2287 if (index > env->cpuid_level) 2288 index = env->cpuid_level; 2289 } 2290 2291 switch(index) { 2292 case 0: 2293 EAX = env->cpuid_level; 2294 EBX = env->cpuid_vendor1; 2295 EDX = env->cpuid_vendor2; 2296 ECX = env->cpuid_vendor3; 2297 break; 2298 case 1: 2299 EAX = env->cpuid_version; 2300 EBX = (env->cpuid_apic_id << 24) | 8 << 8; /* CLFLUSH size in quad words, Linux wants it. */ 2301 ECX = env->cpuid_ext_features; 2302 EDX = env->cpuid_features; 2303 break; 2304 case 2: 2305 /* cache info: needed for Pentium Pro compatibility */ 2306 EAX = 1; 2307 EBX = 0; 2308 ECX = 0; 2309 EDX = 0x2c307d; 2310 break; 2311 case 4: 2312 /* cache info: needed for Core compatibility */ 2313 switch (ECX) { 2314 case 0: /* L1 dcache info */ 2315 EAX = 0x0000121; 2316 EBX = 0x1c0003f; 2317 ECX = 0x000003f; 2318 EDX = 0x0000001; 2319 break; 2320 case 1: /* L1 icache info */ 2321 EAX = 0x0000122; 2322 EBX = 0x1c0003f; 2323 ECX = 0x000003f; 2324 EDX = 0x0000001; 2325 break; 2326 case 2: /* L2 cache info */ 2327 EAX = 0x0000143; 2328 EBX = 0x3c0003f; 2329 ECX = 0x0000fff; 2330 EDX = 0x0000001; 2331 break; 2332 default: /* end of info */ 2333 EAX = 0; 2334 EBX = 0; 2335 ECX = 0; 2336 EDX = 0; 2337 break; 2338 } 2339 2340 break; 2341 case 5: 2342 /* mwait info: needed for Core compatibility */ 2343 EAX = 0; /* Smallest monitor-line size in bytes */ 2344 EBX = 0; /* Largest monitor-line size in bytes */ 2345 ECX = CPUID_MWAIT_EMX | CPUID_MWAIT_IBE; 2346 EDX = 0; 2347 break; 2348 case 6: 2349 /* Thermal and Power Leaf */ 2350 EAX = 0; 2351 EBX = 0; 2352 ECX = 0; 2353 EDX = 0; 2354 break; 2355 case 9: 2356 /* Direct Cache Access Information Leaf */ 2357 EAX = 0; /* Bits 0-31 in DCA_CAP MSR */ 2358 EBX = 0; 2359 ECX = 0; 2360 EDX = 0; 2361 break; 2362 case 0xA: 2363 /* Architectural Performance Monitoring Leaf */ 2364 EAX = 0; 2365 EBX = 0; 2366 ECX = 0; 2367 EDX = 0; 2368 break; 2369 case 0x80000000: 2370 EAX = env->cpuid_xlevel; 2371 EBX = env->cpuid_vendor1; 2372 EDX = env->cpuid_vendor2; 2373 ECX = env->cpuid_vendor3; 2374 break; 2375 case 0x80000001: 2376 EAX = env->cpuid_features; 2377 EBX = 0; 2378 ECX = env->cpuid_ext3_features; 2379 EDX = env->cpuid_ext2_features; 2380 break; 2381 case 0x80000002: 2382 case 0x80000003: 2383 case 0x80000004: 2384 EAX = env->cpuid_model[(index - 0x80000002) * 4 + 0]; 2385 EBX = env->cpuid_model[(index - 0x80000002) * 4 + 1]; 2386 ECX = env->cpuid_model[(index - 0x80000002) * 4 + 2]; 2387 EDX = env->cpuid_model[(index - 0x80000002) * 4 + 3]; 2388 break; 2389 case 0x80000005: 2390 /* cache info (L1 cache) */ 2391 EAX = 0x01ff01ff; 2392 EBX = 0x01ff01ff; 2393 ECX = 0x40020140; 2394 EDX = 0x40020140; 2395 break; 2396 case 0x80000006: 2397 /* cache info (L2 cache) */ 2398 EAX = 0; 2399 EBX = 0x42004200; 2400 ECX = 0x02008140; 2401 EDX = 0; 2402 break; 2403 case 0x80000008: 2404 /* virtual & phys address size in low 2 bytes. */ 2405 /* XXX: This value must match the one used in the MMU code. */ 2406 if (env->cpuid_ext2_features & CPUID_EXT2_LM) { 2407 /* 64 bit processor */ 2408 #if defined(USE_KQEMU) 2409 EAX = 0x00003020; /* 48 bits virtual, 32 bits physical */ 2410 #else 2411 /* XXX: The physical address space is limited to 42 bits in exec.c. */ 2412 EAX = 0x00003028; /* 48 bits virtual, 40 bits physical */ 2413 #endif 2414 } else { 2415 #if defined(USE_KQEMU) 2416 EAX = 0x00000020; /* 32 bits physical */ 2417 #else 2418 if (env->cpuid_features & CPUID_PSE36) 2419 EAX = 0x00000024; /* 36 bits physical */ 2420 else 2421 EAX = 0x00000020; /* 32 bits physical */ 2422 #endif 2423 } 2424 EBX = 0; 2425 ECX = 0; 2426 EDX = 0; 2427 break; 2428 case 0x8000000A: 2429 EAX = 0x00000001; 2430 EBX = 0; 2431 ECX = 0; 2432 EDX = 0; 2433 break; 2434 default: 2435 /* reserved values: zero */ 2436 EAX = 0; 2437 EBX = 0; 2438 ECX = 0; 2439 EDX = 0; 2440 break; 2441 } 2442 #else /* VBOX */ 2443 remR3CpuId(env, EAX, &EAX, &EBX, &ECX, &EDX); 2444 #endif /* VBOX */ 2298 cpu_x86_cpuid(env, (uint32_t)EAX, (uint32_t)ECX, &eax, &ebx, &ecx, &edx); 2299 EAX = eax; 2300 EBX = ebx; 2301 ECX = ecx; 2302 EDX = edx; 2445 2303 } 2446 2304 … … 2724 2582 e2); 2725 2583 #if 0 2726 fprintf(logfile,"load_seg: sel=0x%04x base=0x%08lx limit=0x%08lx flags=%08x\n",2584 qemu_log("load_seg: sel=0x%04x base=0x%08lx limit=0x%08lx flags=%08x\n", 2727 2585 selector, (unsigned long)sc->base, sc->limit, sc->flags); 2728 2586 #endif … … 2738 2596 target_ulong next_eip; 2739 2597 2740 #ifdef VBOX 2598 #ifdef VBOX /** @todo Why do we do this? */ 2741 2599 e1 = e2 = 0; 2742 2600 #endif … … 2859 2717 int new_stack, i; 2860 2718 uint32_t e1, e2, cpl, dpl, rpl, selector, offset, param_count; 2861 uint32_t ss , ss_e1, ss_e2, sp, type, ss_dpl, sp_mask;2719 uint32_t ss = 0, ss_e1 = 0, ss_e2 = 0, sp, type, ss_dpl, sp_mask; 2862 2720 uint32_t val, limit, old_sp_mask; 2863 2721 target_ulong ssp, old_ssp, next_eip; 2864 2722 2865 #ifdef VBOX 2866 ss = ss_e1 = ss_e2 =e1 = e2 = 0;2723 #ifdef VBOX /** @todo Why do we do this? */ 2724 e1 = e2 = 0; 2867 2725 #endif 2868 2726 next_eip = env->eip + next_eip_addend; 2869 #ifdef DEBUG_PCALL 2870 if (loglevel & CPU_LOG_PCALL) { 2871 fprintf(logfile, "lcall %04x:%08x s=%d\n", 2872 new_cs, (uint32_t)new_eip, shift); 2873 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP); 2874 } 2875 #endif 2727 LOG_PCALL("lcall %04x:%08x s=%d\n", new_cs, (uint32_t)new_eip, shift); 2728 LOG_PCALL_STATE(env); 2876 2729 if ((new_cs & 0xfffc) == 0) 2877 2730 raise_exception_err(EXCP0D_GPF, 0); … … 2879 2732 raise_exception_err(EXCP0D_GPF, new_cs & 0xfffc); 2880 2733 cpl = env->hflags & HF_CPL_MASK; 2881 #ifdef DEBUG_PCALL 2882 if (loglevel & CPU_LOG_PCALL) { 2883 fprintf(logfile, "desc=%08x:%08x\n", e1, e2); 2884 } 2885 #endif 2734 LOG_PCALL("desc=%08x:%08x\n", e1, e2); 2886 2735 if (e2 & DESC_S_MASK) { 2887 2736 if (!(e2 & DESC_CS_MASK)) … … 2987 2836 /* to inner privilege */ 2988 2837 get_ss_esp_from_tss(&ss, &sp, dpl); 2989 #ifdef DEBUG_PCALL 2990 if (loglevel & CPU_LOG_PCALL) 2991 fprintf(logfile, "new ss:esp=%04x:%08x param_count=%d ESP=" TARGET_FMT_lx "\n", 2838 LOG_PCALL("new ss:esp=%04x:%08x param_count=%d ESP=" TARGET_FMT_lx "\n", 2992 2839 ss, sp, param_count, ESP); 2993 #endif2994 2840 if ((ss & 0xfffc) == 0) 2995 2841 raise_exception_err(EXCP0A_TSS, ss & 0xfffc); … … 3178 3024 target_ulong ssp, sp, new_eip, new_esp, sp_mask; 3179 3025 3180 #ifdef VBOX 3026 #ifdef VBOX /** @todo Why do we do this? */ 3181 3027 ss_e1 = ss_e2 = e1 = e2 = 0; 3182 3028 #endif … … 3233 3079 POPW(ssp, sp, sp_mask, new_eflags); 3234 3080 } 3235 #ifdef DEBUG_PCALL 3236 if (loglevel & CPU_LOG_PCALL) { 3237 fprintf(logfile, "lret new %04x:" TARGET_FMT_lx " s=%d addend=0x%x\n", 3238 new_cs, new_eip, shift, addend); 3239 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP); 3240 } 3241 #endif 3081 LOG_PCALL("lret new %04x:" TARGET_FMT_lx " s=%d addend=0x%x\n", 3082 new_cs, new_eip, shift, addend); 3083 LOG_PCALL_STATE(env); 3242 3084 if ((new_cs & 0xfffc) == 0) 3243 3085 { … … 3324 3166 POPW(ssp, sp, sp_mask, new_ss); 3325 3167 } 3326 #ifdef DEBUG_PCALL 3327 if (loglevel & CPU_LOG_PCALL) { 3328 fprintf(logfile, "new ss:esp=%04x:" TARGET_FMT_lx "\n", 3168 LOG_PCALL("new ss:esp=%04x:" TARGET_FMT_lx "\n", 3329 3169 new_ss, new_esp); 3330 }3331 #endif3332 3170 if ((new_ss & 0xfffc) == 0) { 3333 3171 #ifdef TARGET_X86_64 … … 3436 3274 3437 3275 #ifdef VBOX 3438 e1 = e2 = 0; 3276 e1 = e2 = 0; /** @todo Why do we do this? */ 3439 3277 remR3TrapClear(env->pVM); 3440 3278 #endif … … 3567 3405 { 3568 3406 } 3407 3408 void helper_movl_drN_T0(int reg, target_ulong t0) 3409 { 3410 } 3569 3411 #else 3570 3412 target_ulong helper_read_crN(int reg) … … 3612 3454 } 3613 3455 } 3456 3457 void helper_movl_drN_T0(int reg, target_ulong t0) 3458 { 3459 int i; 3460 3461 if (reg < 4) { 3462 hw_breakpoint_remove(env, reg); 3463 env->dr[reg] = t0; 3464 hw_breakpoint_insert(env, reg); 3465 } else if (reg == 7) { 3466 for (i = 0; i < 4; i++) 3467 hw_breakpoint_remove(env, i); 3468 env->dr[7] = t0; 3469 for (i = 0; i < 4; i++) 3470 hw_breakpoint_insert(env, i); 3471 } else 3472 env->dr[reg] = t0; 3473 } 3614 3474 #endif 3615 3475 … … 3626 3486 env->cr[0] &= ~CR0_TS_MASK; 3627 3487 env->hflags &= ~HF_TS_MASK; 3628 }3629 3630 /* XXX: do more */3631 void helper_movl_drN_T0(int reg, target_ulong t0)3632 {3633 env->dr[reg] = t0;3634 3488 } 3635 3489 … … 3721 3575 break; 3722 3576 case MSR_IA32_APICBASE: 3723 # ifndef VBOX /* The CPUMSetGuestMsr call below does this now. */3577 # ifndef VBOX /* The CPUMSetGuestMsr call below does this now. */ 3724 3578 cpu_set_apic_base(env, val); 3725 # endif3579 # endif 3726 3580 break; 3727 3581 case MSR_EFER: … … 3739 3593 if (env->cpuid_ext3_features & CPUID_EXT3_SVM) 3740 3594 update_mask |= MSR_EFER_SVME; 3595 if (env->cpuid_ext2_features & CPUID_EXT2_FFXSR) 3596 update_mask |= MSR_EFER_FFXSR; 3741 3597 cpu_load_efer(env, (env->efer & ~update_mask) | 3742 3598 (val & update_mask)); … … 3772 3628 break; 3773 3629 #endif 3630 # ifndef VBOX 3631 case MSR_MTRRphysBase(0): 3632 case MSR_MTRRphysBase(1): 3633 case MSR_MTRRphysBase(2): 3634 case MSR_MTRRphysBase(3): 3635 case MSR_MTRRphysBase(4): 3636 case MSR_MTRRphysBase(5): 3637 case MSR_MTRRphysBase(6): 3638 case MSR_MTRRphysBase(7): 3639 env->mtrr_var[((uint32_t)ECX - MSR_MTRRphysBase(0)) / 2].base = val; 3640 break; 3641 case MSR_MTRRphysMask(0): 3642 case MSR_MTRRphysMask(1): 3643 case MSR_MTRRphysMask(2): 3644 case MSR_MTRRphysMask(3): 3645 case MSR_MTRRphysMask(4): 3646 case MSR_MTRRphysMask(5): 3647 case MSR_MTRRphysMask(6): 3648 case MSR_MTRRphysMask(7): 3649 env->mtrr_var[((uint32_t)ECX - MSR_MTRRphysMask(0)) / 2].mask = val; 3650 break; 3651 case MSR_MTRRfix64K_00000: 3652 env->mtrr_fixed[(uint32_t)ECX - MSR_MTRRfix64K_00000] = val; 3653 break; 3654 case MSR_MTRRfix16K_80000: 3655 case MSR_MTRRfix16K_A0000: 3656 env->mtrr_fixed[(uint32_t)ECX - MSR_MTRRfix16K_80000 + 1] = val; 3657 break; 3658 case MSR_MTRRfix4K_C0000: 3659 case MSR_MTRRfix4K_C8000: 3660 case MSR_MTRRfix4K_D0000: 3661 case MSR_MTRRfix4K_D8000: 3662 case MSR_MTRRfix4K_E0000: 3663 case MSR_MTRRfix4K_E8000: 3664 case MSR_MTRRfix4K_F0000: 3665 case MSR_MTRRfix4K_F8000: 3666 env->mtrr_fixed[(uint32_t)ECX - MSR_MTRRfix4K_C0000 + 3] = val; 3667 break; 3668 case MSR_MTRRdefType: 3669 env->mtrr_deftype = val; 3670 break; 3671 # endif /* !VBOX */ 3774 3672 default: 3775 # ifndef VBOX3673 # ifndef VBOX 3776 3674 /* XXX: exception ? */ 3777 # endif3778 break; 3779 } 3780 3781 # ifdef VBOX3675 # endif 3676 break; 3677 } 3678 3679 # ifdef VBOX 3782 3680 /* call CPUM. */ 3783 3681 if (cpu_wrmsr(env, (uint32_t)ECX, val) != 0) … … 3785 3683 /** @todo be a brave man and raise a \#GP(0) here as we should... */ 3786 3684 } 3787 # endif3685 # endif 3788 3686 } 3789 3687 … … 3791 3689 { 3792 3690 uint64_t val; 3691 3793 3692 helper_svm_check_intercept_param(SVM_EXIT_MSR, 0); 3794 3693 … … 3818 3717 val = env->vm_hsave; 3819 3718 break; 3820 # ifndef VBOX /* forward to CPUMQueryGuestMsr. */3719 # ifndef VBOX /* forward to CPUMQueryGuestMsr. */ 3821 3720 case MSR_IA32_PERF_STATUS: 3822 3721 /* tsc_increment_by_tick */ … … 3825 3724 val |= (((uint64_t)4ULL) << 40); 3826 3725 break; 3827 # endif /* !VBOX */3726 # endif /* !VBOX */ 3828 3727 #ifdef TARGET_X86_64 3829 3728 case MSR_LSTAR: … … 3855 3754 break; 3856 3755 #endif 3756 # ifndef VBOX 3757 case MSR_MTRRphysBase(0): 3758 case MSR_MTRRphysBase(1): 3759 case MSR_MTRRphysBase(2): 3760 case MSR_MTRRphysBase(3): 3761 case MSR_MTRRphysBase(4): 3762 case MSR_MTRRphysBase(5): 3763 case MSR_MTRRphysBase(6): 3764 case MSR_MTRRphysBase(7): 3765 val = env->mtrr_var[((uint32_t)ECX - MSR_MTRRphysBase(0)) / 2].base; 3766 break; 3767 case MSR_MTRRphysMask(0): 3768 case MSR_MTRRphysMask(1): 3769 case MSR_MTRRphysMask(2): 3770 case MSR_MTRRphysMask(3): 3771 case MSR_MTRRphysMask(4): 3772 case MSR_MTRRphysMask(5): 3773 case MSR_MTRRphysMask(6): 3774 case MSR_MTRRphysMask(7): 3775 val = env->mtrr_var[((uint32_t)ECX - MSR_MTRRphysMask(0)) / 2].mask; 3776 break; 3777 case MSR_MTRRfix64K_00000: 3778 val = env->mtrr_fixed[0]; 3779 break; 3780 case MSR_MTRRfix16K_80000: 3781 case MSR_MTRRfix16K_A0000: 3782 val = env->mtrr_fixed[(uint32_t)ECX - MSR_MTRRfix16K_80000 + 1]; 3783 break; 3784 case MSR_MTRRfix4K_C0000: 3785 case MSR_MTRRfix4K_C8000: 3786 case MSR_MTRRfix4K_D0000: 3787 case MSR_MTRRfix4K_D8000: 3788 case MSR_MTRRfix4K_E0000: 3789 case MSR_MTRRfix4K_E8000: 3790 case MSR_MTRRfix4K_F0000: 3791 case MSR_MTRRfix4K_F8000: 3792 val = env->mtrr_fixed[(uint32_t)ECX - MSR_MTRRfix4K_C0000 + 3]; 3793 break; 3794 case MSR_MTRRdefType: 3795 val = env->mtrr_deftype; 3796 break; 3797 case MSR_MTRRcap: 3798 if (env->cpuid_features & CPUID_MTRR) 3799 val = MSR_MTRRcap_VCNT | MSR_MTRRcap_FIXRANGE_SUPPORT | MSR_MTRRcap_WC_SUPPORTED; 3800 else 3801 /* XXX: exception ? */ 3802 val = 0; 3803 break; 3804 # endif /* !VBOX */ 3857 3805 default: 3858 # ifndef VBOX3806 # ifndef VBOX 3859 3807 /* XXX: exception ? */ 3860 3808 val = 0; 3861 # else /* VBOX */3809 # else /* VBOX */ 3862 3810 if (cpu_rdmsr(env, (uint32_t)ECX, &val) != 0) 3863 3811 { … … 3865 3813 val = 0; 3866 3814 } 3867 # endif /* VBOX */3815 # endif /* VBOX */ 3868 3816 break; 3869 3817 } … … 3871 3819 EDX = (uint32_t)(val >> 32); 3872 3820 3873 # ifdef VBOX_STRICT3821 # ifdef VBOX_STRICT 3874 3822 if (cpu_rdmsr(env, (uint32_t)ECX, &val) != 0) 3875 3823 val = 0; 3876 3824 AssertMsg(val == RT_MAKE_U64(EAX, EDX), ("idMsr=%#x val=%#llx eax:edx=%#llx\n", (uint32_t)ECX, val, RT_MAKE_U64(EAX, EDX))); 3877 # endif3825 # endif 3878 3826 } 3879 3827 #endif … … 3886 3834 3887 3835 selector = selector1 & 0xffff; 3888 eflags = cc_table[CC_OP].compute_all();3836 eflags = helper_cc_compute_all(CC_OP); 3889 3837 if (load_segment(&e1, &e2, selector) != 0) 3890 3838 goto fail; … … 3928 3876 3929 3877 selector = selector1 & 0xffff; 3930 eflags = cc_table[CC_OP].compute_all();3878 eflags = helper_cc_compute_all(CC_OP); 3931 3879 if ((selector & 0xfffc) == 0) 3932 3880 goto fail; … … 3974 3922 3975 3923 selector = selector1 & 0xffff; 3976 eflags = cc_table[CC_OP].compute_all();3924 eflags = helper_cc_compute_all(CC_OP); 3977 3925 if ((selector & 0xfffc) == 0) 3978 3926 goto fail; … … 4007 3955 4008 3956 selector = selector1 & 0xffff; 4009 eflags = cc_table[CC_OP].compute_all();3957 eflags = helper_cc_compute_all(CC_OP); 4010 3958 if ((selector & 0xfffc) == 0) 4011 3959 goto fail; … … 4047 3995 } 4048 3996 4049 void fpu_raise_exception(void)3997 static void fpu_raise_exception(void) 4050 3998 { 4051 3999 if (env->cr[0] & CR0_NE_MASK) { … … 4297 4245 ret = floatx_compare(ST0, FT0, &env->fp_status); 4298 4246 env->fpus = (env->fpus & ~0x4500) | fcom_ccval[ret + 1]; 4299 FORCE_RET();4300 4247 } 4301 4248 … … 4306 4253 ret = floatx_compare_quiet(ST0, FT0, &env->fp_status); 4307 4254 env->fpus = (env->fpus & ~0x4500) | fcom_ccval[ret+ 1]; 4308 FORCE_RET();4309 4255 } 4310 4256 … … 4317 4263 4318 4264 ret = floatx_compare(ST0, FT0, &env->fp_status); 4319 eflags = cc_table[CC_OP].compute_all();4265 eflags = helper_cc_compute_all(CC_OP); 4320 4266 eflags = (eflags & ~(CC_Z | CC_P | CC_C)) | fcomi_ccval[ret + 1]; 4321 4267 CC_SRC = eflags; 4322 FORCE_RET();4323 4268 } 4324 4269 … … 4329 4274 4330 4275 ret = floatx_compare_quiet(ST0, FT0, &env->fp_status); 4331 eflags = cc_table[CC_OP].compute_all();4276 eflags = helper_cc_compute_all(CC_OP); 4332 4277 eflags = (eflags & ~(CC_Z | CC_P | CC_C)) | fcomi_ccval[ret + 1]; 4333 4278 CC_SRC = eflags; 4334 FORCE_RET();4335 4279 } 4336 4280 … … 4525 4469 if (env->fpus & FPUS_SE) 4526 4470 fpu_raise_exception(); 4527 FORCE_RET();4528 4471 } 4529 4472 … … 5040 4983 nb_xmm_regs = 8; 5041 4984 addr = ptr + 0xa0; 5042 for(i = 0; i < nb_xmm_regs; i++) { 5043 stq(addr, env->xmm_regs[i].XMM_Q(0)); 5044 stq(addr + 8, env->xmm_regs[i].XMM_Q(1)); 5045 addr += 16; 4985 /* Fast FXSAVE leaves out the XMM registers */ 4986 if (!(env->efer & MSR_EFER_FFXSR) 4987 || (env->hflags & HF_CPL_MASK) 4988 || !(env->hflags & HF_LMA_MASK)) { 4989 for(i = 0; i < nb_xmm_regs; i++) { 4990 stq(addr, env->xmm_regs[i].XMM_Q(0)); 4991 stq(addr + 8, env->xmm_regs[i].XMM_Q(1)); 4992 addr += 16; 4993 } 5046 4994 } 5047 4995 } … … 5080 5028 nb_xmm_regs = 8; 5081 5029 addr = ptr + 0xa0; 5082 for(i = 0; i < nb_xmm_regs; i++) { 5030 /* Fast FXRESTORE leaves out the XMM registers */ 5031 if (!(env->efer & MSR_EFER_FFXSR) 5032 || (env->hflags & HF_CPL_MASK) 5033 || !(env->hflags & HF_LMA_MASK)) { 5034 for(i = 0; i < nb_xmm_regs; i++) { 5083 5035 #if !defined(VBOX) || __GNUC__ < 4 5084 env->xmm_regs[i].XMM_Q(0) = ldq(addr);5085 env->xmm_regs[i].XMM_Q(1) = ldq(addr + 8);5036 env->xmm_regs[i].XMM_Q(0) = ldq(addr); 5037 env->xmm_regs[i].XMM_Q(1) = ldq(addr + 8); 5086 5038 #else /* VBOX + __GNUC__ >= 4: gcc 4.x compiler bug - it runs out of registers for the 64-bit value. */ 5087 5039 # if 1 5088 env->xmm_regs[i].XMM_L(0) = ldl(addr);5089 env->xmm_regs[i].XMM_L(1) = ldl(addr + 4);5090 env->xmm_regs[i].XMM_L(2) = ldl(addr + 8);5091 env->xmm_regs[i].XMM_L(3) = ldl(addr + 12);5040 env->xmm_regs[i].XMM_L(0) = ldl(addr); 5041 env->xmm_regs[i].XMM_L(1) = ldl(addr + 4); 5042 env->xmm_regs[i].XMM_L(2) = ldl(addr + 8); 5043 env->xmm_regs[i].XMM_L(3) = ldl(addr + 12); 5092 5044 # else 5093 /* this works fine on Mac OS X, gcc 4.0.1 */5094 uint64_t u64 = ldq(addr);5095 env->xmm_regs[i].XMM_Q(0);5096 u64 = ldq(addr + 4);5097 env->xmm_regs[i].XMM_Q(1) = u64;5045 /* this works fine on Mac OS X, gcc 4.0.1 */ 5046 uint64_t u64 = ldq(addr); 5047 env->xmm_regs[i].XMM_Q(0); 5048 u64 = ldq(addr + 4); 5049 env->xmm_regs[i].XMM_Q(1) = u64; 5098 5050 # endif 5099 5051 #endif 5100 addr += 16; 5052 addr += 16; 5053 } 5101 5054 } 5102 5055 } … … 5429 5382 raise_exception(EXCP05_BOUND); 5430 5383 } 5431 FORCE_RET();5432 5384 } 5433 5385 … … 5440 5392 raise_exception(EXCP05_BOUND); 5441 5393 } 5442 FORCE_RET();5443 5394 } 5444 5395 … … 5520 5471 #endif /* VBOX */ 5521 5472 5473 #if !defined(CONFIG_USER_ONLY) 5522 5474 /* try to fill the TLB and return an exception if error. If retaddr is 5523 5475 NULL, it means that the function was called in C code (i.e. not … … 5552 5504 env = saved_env; 5553 5505 } 5506 #endif 5554 5507 5555 5508 #ifdef VBOX … … 6148 6101 addr = (uint32_t)EAX; 6149 6102 6150 if (loglevel & CPU_LOG_TB_IN_ASM) 6151 fprintf(logfile,"vmrun! " TARGET_FMT_lx "\n", addr); 6103 qemu_log_mask(CPU_LOG_TB_IN_ASM, "vmrun! " TARGET_FMT_lx "\n", addr); 6152 6104 6153 6105 env->vm_vmcb = addr; … … 6269 6221 stl_phys(env->vm_vmcb + offsetof(struct vmcb, control.event_inj), event_inj & ~SVM_EVTINJ_VALID); 6270 6222 6271 if (loglevel & CPU_LOG_TB_IN_ASM) 6272 fprintf(logfile, "Injecting(%#hx): ", valid_err); 6223 qemu_log_mask(CPU_LOG_TB_IN_ASM, "Injecting(%#hx): ", valid_err); 6273 6224 /* FIXME: need to implement valid_err */ 6274 6225 switch (event_inj & SVM_EVTINJ_TYPE_MASK) { … … 6278 6229 env->exception_is_int = 0; 6279 6230 env->exception_next_eip = -1; 6280 if (loglevel & CPU_LOG_TB_IN_ASM) 6281 fprintf(logfile, "INTR"); 6231 qemu_log_mask(CPU_LOG_TB_IN_ASM, "INTR"); 6282 6232 /* XXX: is it always correct ? */ 6283 6233 do_interrupt(vector, 0, 0, 0, 1); … … 6288 6238 env->exception_is_int = 0; 6289 6239 env->exception_next_eip = EIP; 6290 if (loglevel & CPU_LOG_TB_IN_ASM) 6291 fprintf(logfile, "NMI"); 6240 qemu_log_mask(CPU_LOG_TB_IN_ASM, "NMI"); 6292 6241 cpu_loop_exit(); 6293 6242 break; … … 6297 6246 env->exception_is_int = 0; 6298 6247 env->exception_next_eip = -1; 6299 if (loglevel & CPU_LOG_TB_IN_ASM) 6300 fprintf(logfile, "EXEPT"); 6248 qemu_log_mask(CPU_LOG_TB_IN_ASM, "EXEPT"); 6301 6249 cpu_loop_exit(); 6302 6250 break; … … 6306 6254 env->exception_is_int = 1; 6307 6255 env->exception_next_eip = EIP; 6308 if (loglevel & CPU_LOG_TB_IN_ASM) 6309 fprintf(logfile, "SOFT"); 6256 qemu_log_mask(CPU_LOG_TB_IN_ASM, "SOFT"); 6310 6257 cpu_loop_exit(); 6311 6258 break; 6312 6259 } 6313 if (loglevel & CPU_LOG_TB_IN_ASM) 6314 fprintf(logfile, " %#x %#x\n", env->exception_index, env->error_code); 6260 qemu_log_mask(CPU_LOG_TB_IN_ASM, " %#x %#x\n", env->exception_index, env->error_code); 6315 6261 } 6316 6262 } … … 6332 6278 addr = (uint32_t)EAX; 6333 6279 6334 if (loglevel & CPU_LOG_TB_IN_ASM) 6335 fprintf(logfile,"vmload! " TARGET_FMT_lx "\nFS: %016" PRIx64 " | " TARGET_FMT_lx "\n", 6280 qemu_log_mask(CPU_LOG_TB_IN_ASM, "vmload! " TARGET_FMT_lx "\nFS: %016" PRIx64 " | " TARGET_FMT_lx "\n", 6336 6281 addr, ldq_phys(addr + offsetof(struct vmcb, save.fs.base)), 6337 6282 env->segs[R_FS].base); … … 6368 6313 addr = (uint32_t)EAX; 6369 6314 6370 if (loglevel & CPU_LOG_TB_IN_ASM) 6371 fprintf(logfile,"vmsave! " TARGET_FMT_lx "\nFS: %016" PRIx64 " | " TARGET_FMT_lx "\n", 6315 qemu_log_mask(CPU_LOG_TB_IN_ASM, "vmsave! " TARGET_FMT_lx "\nFS: %016" PRIx64 " | " TARGET_FMT_lx "\n", 6372 6316 addr, ldq_phys(addr + offsetof(struct vmcb, save.fs.base)), 6373 6317 env->segs[R_FS].base); … … 6521 6465 uint32_t int_ctl; 6522 6466 6523 if (loglevel & CPU_LOG_TB_IN_ASM) 6524 fprintf(logfile,"vmexit(%08x, %016" PRIx64 ", %016" PRIx64 ", " TARGET_FMT_lx ")!\n", 6467 qemu_log_mask(CPU_LOG_TB_IN_ASM, "vmexit(%08x, %016" PRIx64 ", %016" PRIx64 ", " TARGET_FMT_lx ")!\n", 6525 6468 exit_code, exit_info_1, 6526 6469 ldq_phys(env->vm_vmcb + offsetof(struct vmcb, control.exit_info_2)), … … 6668 6611 6669 6612 /* XXX: suppress */ 6670 void helper_movq( uint64_t *d, uint64_t*s)6671 { 6672 * d = *s;6613 void helper_movq(void *d, void *s) 6614 { 6615 *(uint64_t *)d = *(uint64_t *)s; 6673 6616 } 6674 6617 … … 6740 6683 } 6741 6684 6742 CCTable cc_table[CC_OP_NB] = { 6743 [CC_OP_DYNAMIC] = { /* should never happen */ }, 6744 6745 [CC_OP_EFLAGS] = { compute_all_eflags, compute_c_eflags }, 6746 6747 [CC_OP_MULB] = { compute_all_mulb, compute_c_mull }, 6748 [CC_OP_MULW] = { compute_all_mulw, compute_c_mull }, 6749 [CC_OP_MULL] = { compute_all_mull, compute_c_mull }, 6750 6751 [CC_OP_ADDB] = { compute_all_addb, compute_c_addb }, 6752 [CC_OP_ADDW] = { compute_all_addw, compute_c_addw }, 6753 [CC_OP_ADDL] = { compute_all_addl, compute_c_addl }, 6754 6755 [CC_OP_ADCB] = { compute_all_adcb, compute_c_adcb }, 6756 [CC_OP_ADCW] = { compute_all_adcw, compute_c_adcw }, 6757 [CC_OP_ADCL] = { compute_all_adcl, compute_c_adcl }, 6758 6759 [CC_OP_SUBB] = { compute_all_subb, compute_c_subb }, 6760 [CC_OP_SUBW] = { compute_all_subw, compute_c_subw }, 6761 [CC_OP_SUBL] = { compute_all_subl, compute_c_subl }, 6762 6763 [CC_OP_SBBB] = { compute_all_sbbb, compute_c_sbbb }, 6764 [CC_OP_SBBW] = { compute_all_sbbw, compute_c_sbbw }, 6765 [CC_OP_SBBL] = { compute_all_sbbl, compute_c_sbbl }, 6766 6767 [CC_OP_LOGICB] = { compute_all_logicb, compute_c_logicb }, 6768 [CC_OP_LOGICW] = { compute_all_logicw, compute_c_logicw }, 6769 [CC_OP_LOGICL] = { compute_all_logicl, compute_c_logicl }, 6770 6771 [CC_OP_INCB] = { compute_all_incb, compute_c_incl }, 6772 [CC_OP_INCW] = { compute_all_incw, compute_c_incl }, 6773 [CC_OP_INCL] = { compute_all_incl, compute_c_incl }, 6774 6775 [CC_OP_DECB] = { compute_all_decb, compute_c_incl }, 6776 [CC_OP_DECW] = { compute_all_decw, compute_c_incl }, 6777 [CC_OP_DECL] = { compute_all_decl, compute_c_incl }, 6778 6779 [CC_OP_SHLB] = { compute_all_shlb, compute_c_shlb }, 6780 [CC_OP_SHLW] = { compute_all_shlw, compute_c_shlw }, 6781 [CC_OP_SHLL] = { compute_all_shll, compute_c_shll }, 6782 6783 [CC_OP_SARB] = { compute_all_sarb, compute_c_sarl }, 6784 [CC_OP_SARW] = { compute_all_sarw, compute_c_sarl }, 6785 [CC_OP_SARL] = { compute_all_sarl, compute_c_sarl }, 6685 uint32_t helper_cc_compute_all(int op) 6686 { 6687 switch (op) { 6688 default: /* should never happen */ return 0; 6689 6690 case CC_OP_EFLAGS: return compute_all_eflags(); 6691 6692 case CC_OP_MULB: return compute_all_mulb(); 6693 case CC_OP_MULW: return compute_all_mulw(); 6694 case CC_OP_MULL: return compute_all_mull(); 6695 6696 case CC_OP_ADDB: return compute_all_addb(); 6697 case CC_OP_ADDW: return compute_all_addw(); 6698 case CC_OP_ADDL: return compute_all_addl(); 6699 6700 case CC_OP_ADCB: return compute_all_adcb(); 6701 case CC_OP_ADCW: return compute_all_adcw(); 6702 case CC_OP_ADCL: return compute_all_adcl(); 6703 6704 case CC_OP_SUBB: return compute_all_subb(); 6705 case CC_OP_SUBW: return compute_all_subw(); 6706 case CC_OP_SUBL: return compute_all_subl(); 6707 6708 case CC_OP_SBBB: return compute_all_sbbb(); 6709 case CC_OP_SBBW: return compute_all_sbbw(); 6710 case CC_OP_SBBL: return compute_all_sbbl(); 6711 6712 case CC_OP_LOGICB: return compute_all_logicb(); 6713 case CC_OP_LOGICW: return compute_all_logicw(); 6714 case CC_OP_LOGICL: return compute_all_logicl(); 6715 6716 case CC_OP_INCB: return compute_all_incb(); 6717 case CC_OP_INCW: return compute_all_incw(); 6718 case CC_OP_INCL: return compute_all_incl(); 6719 6720 case CC_OP_DECB: return compute_all_decb(); 6721 case CC_OP_DECW: return compute_all_decw(); 6722 case CC_OP_DECL: return compute_all_decl(); 6723 6724 case CC_OP_SHLB: return compute_all_shlb(); 6725 case CC_OP_SHLW: return compute_all_shlw(); 6726 case CC_OP_SHLL: return compute_all_shll(); 6727 6728 case CC_OP_SARB: return compute_all_sarb(); 6729 case CC_OP_SARW: return compute_all_sarw(); 6730 case CC_OP_SARL: return compute_all_sarl(); 6786 6731 6787 6732 #ifdef TARGET_X86_64 6788 [CC_OP_MULQ] = { compute_all_mulq, compute_c_mull }, 6789 6790 [CC_OP_ADDQ] = { compute_all_addq, compute_c_addq }, 6791 6792 [CC_OP_ADCQ] = { compute_all_adcq, compute_c_adcq }, 6793 6794 [CC_OP_SUBQ] = { compute_all_subq, compute_c_subq }, 6795 6796 [CC_OP_SBBQ] = { compute_all_sbbq, compute_c_sbbq }, 6797 6798 [CC_OP_LOGICQ] = { compute_all_logicq, compute_c_logicq }, 6799 6800 [CC_OP_INCQ] = { compute_all_incq, compute_c_incl }, 6801 6802 [CC_OP_DECQ] = { compute_all_decq, compute_c_incl }, 6803 6804 [CC_OP_SHLQ] = { compute_all_shlq, compute_c_shlq }, 6805 6806 [CC_OP_SARQ] = { compute_all_sarq, compute_c_sarl }, 6807 #endif 6808 }; 6809 6733 case CC_OP_MULQ: return compute_all_mulq(); 6734 6735 case CC_OP_ADDQ: return compute_all_addq(); 6736 6737 case CC_OP_ADCQ: return compute_all_adcq(); 6738 6739 case CC_OP_SUBQ: return compute_all_subq(); 6740 6741 case CC_OP_SBBQ: return compute_all_sbbq(); 6742 6743 case CC_OP_LOGICQ: return compute_all_logicq(); 6744 6745 case CC_OP_INCQ: return compute_all_incq(); 6746 6747 case CC_OP_DECQ: return compute_all_decq(); 6748 6749 case CC_OP_SHLQ: return compute_all_shlq(); 6750 6751 case CC_OP_SARQ: return compute_all_sarq(); 6752 #endif 6753 } 6754 } 6755 6756 uint32_t helper_cc_compute_c(int op) 6757 { 6758 switch (op) { 6759 default: /* should never happen */ return 0; 6760 6761 case CC_OP_EFLAGS: return compute_c_eflags(); 6762 6763 case CC_OP_MULB: return compute_c_mull(); 6764 case CC_OP_MULW: return compute_c_mull(); 6765 case CC_OP_MULL: return compute_c_mull(); 6766 6767 case CC_OP_ADDB: return compute_c_addb(); 6768 case CC_OP_ADDW: return compute_c_addw(); 6769 case CC_OP_ADDL: return compute_c_addl(); 6770 6771 case CC_OP_ADCB: return compute_c_adcb(); 6772 case CC_OP_ADCW: return compute_c_adcw(); 6773 case CC_OP_ADCL: return compute_c_adcl(); 6774 6775 case CC_OP_SUBB: return compute_c_subb(); 6776 case CC_OP_SUBW: return compute_c_subw(); 6777 case CC_OP_SUBL: return compute_c_subl(); 6778 6779 case CC_OP_SBBB: return compute_c_sbbb(); 6780 case CC_OP_SBBW: return compute_c_sbbw(); 6781 case CC_OP_SBBL: return compute_c_sbbl(); 6782 6783 case CC_OP_LOGICB: return compute_c_logicb(); 6784 case CC_OP_LOGICW: return compute_c_logicw(); 6785 case CC_OP_LOGICL: return compute_c_logicl(); 6786 6787 case CC_OP_INCB: return compute_c_incl(); 6788 case CC_OP_INCW: return compute_c_incl(); 6789 case CC_OP_INCL: return compute_c_incl(); 6790 6791 case CC_OP_DECB: return compute_c_incl(); 6792 case CC_OP_DECW: return compute_c_incl(); 6793 case CC_OP_DECL: return compute_c_incl(); 6794 6795 case CC_OP_SHLB: return compute_c_shlb(); 6796 case CC_OP_SHLW: return compute_c_shlw(); 6797 case CC_OP_SHLL: return compute_c_shll(); 6798 6799 case CC_OP_SARB: return compute_c_sarl(); 6800 case CC_OP_SARW: return compute_c_sarl(); 6801 case CC_OP_SARL: return compute_c_sarl(); 6802 6803 #ifdef TARGET_X86_64 6804 case CC_OP_MULQ: return compute_c_mull(); 6805 6806 case CC_OP_ADDQ: return compute_c_addq(); 6807 6808 case CC_OP_ADCQ: return compute_c_adcq(); 6809 6810 case CC_OP_SUBQ: return compute_c_subq(); 6811 6812 case CC_OP_SBBQ: return compute_c_sbbq(); 6813 6814 case CC_OP_LOGICQ: return compute_c_logicq(); 6815 6816 case CC_OP_INCQ: return compute_c_incl(); 6817 6818 case CC_OP_DECQ: return compute_c_incl(); 6819 6820 case CC_OP_SHLQ: return compute_c_shlq(); 6821 6822 case CC_OP_SARQ: return compute_c_sarl(); 6823 #endif 6824 } 6825 } -
trunk/src/recompiler/target-i386/ops_sse.h
r36140 r36170 17 17 * You should have received a copy of the GNU Lesser General Public 18 18 * License along with this library; if not, write to the Free Software 19 * Foundation, Inc., 5 9 Temple Place, Suite 330, Boston, MA 02111-1307USA19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA 20 20 */ 21 21 … … 69 69 #endif 70 70 } 71 FORCE_RET();72 71 } 73 72 … … 115 114 #endif 116 115 } 117 FORCE_RET();118 116 } 119 117 … … 136 134 #endif 137 135 } 138 FORCE_RET();139 136 } 140 137 … … 174 171 #endif 175 172 } 176 FORCE_RET();177 173 } 178 174 … … 193 189 #endif 194 190 } 195 FORCE_RET();196 191 } 197 192 … … 212 207 #endif 213 208 } 214 FORCE_RET();215 209 } 216 210 … … 227 221 for(i = 16 - shift; i < 16; i++) 228 222 d->B(i) = 0; 229 FORCE_RET();230 223 } 231 224 … … 241 234 for(i = 0; i < shift; i++) 242 235 d->B(i) = 0; 243 FORCE_RET();244 236 } 245 237 #endif … … 443 435 (int16_t)s->W(2*i+1) * (int16_t)d->W(2*i+1); 444 436 } 445 FORCE_RET();446 437 } 447 438 … … 490 481 stb(a0 + i, d->B(i)); 491 482 } 492 FORCE_RET();493 483 } 494 484 … … 928 918 ret = float32_compare_quiet(s0, s1, &env->sse_status); 929 919 CC_SRC = comis_eflags[ret + 1]; 930 FORCE_RET();931 920 } 932 921 … … 940 929 ret = float32_compare(s0, s1, &env->sse_status); 941 930 CC_SRC = comis_eflags[ret + 1]; 942 FORCE_RET();943 931 } 944 932 … … 952 940 ret = float64_compare_quiet(d0, d1, &env->sse_status); 953 941 CC_SRC = comis_eflags[ret + 1]; 954 FORCE_RET();955 942 } 956 943 … … 964 951 ret = float64_compare(d0, d1, &env->sse_status); 965 952 CC_SRC = comis_eflags[ret + 1]; 966 FORCE_RET();967 953 } 968 954 … … 991 977 uint32_t val; 992 978 val = 0; 993 val |= (s-> XMM_B(0) >> 7);994 val |= (s-> XMM_B(1) >> 6) & 0x02;995 val |= (s-> XMM_B(2) >> 5) & 0x04;996 val |= (s-> XMM_B(3) >> 4) & 0x08;997 val |= (s-> XMM_B(4) >> 3) & 0x10;998 val |= (s-> XMM_B(5) >> 2) & 0x20;999 val |= (s-> XMM_B(6) >> 1) & 0x40;1000 val |= (s-> XMM_B(7)) & 0x80;1001 #if SHIFT == 1 1002 val |= (s-> XMM_B(8) << 1) & 0x0100;1003 val |= (s-> XMM_B(9) << 2) & 0x0200;1004 val |= (s-> XMM_B(10) << 3) & 0x0400;1005 val |= (s-> XMM_B(11) << 4) & 0x0800;1006 val |= (s-> XMM_B(12) << 5) & 0x1000;1007 val |= (s-> XMM_B(13) << 6) & 0x2000;1008 val |= (s-> XMM_B(14) << 7) & 0x4000;1009 val |= (s-> XMM_B(15) << 8) & 0x8000;979 val |= (s->B(0) >> 7); 980 val |= (s->B(1) >> 6) & 0x02; 981 val |= (s->B(2) >> 5) & 0x04; 982 val |= (s->B(3) >> 4) & 0x08; 983 val |= (s->B(4) >> 3) & 0x10; 984 val |= (s->B(5) >> 2) & 0x20; 985 val |= (s->B(6) >> 1) & 0x40; 986 val |= (s->B(7)) & 0x80; 987 #if SHIFT == 1 988 val |= (s->B(8) << 1) & 0x0100; 989 val |= (s->B(9) << 2) & 0x0200; 990 val |= (s->B(10) << 3) & 0x0400; 991 val |= (s->B(11) << 4) & 0x0800; 992 val |= (s->B(12) << 5) & 0x1000; 993 val |= (s->B(13) << 6) & 0x2000; 994 val |= (s->B(14) << 7) & 0x4000; 995 val |= (s->B(15) << 8) & 0x8000; 1010 996 #endif 1011 997 return val; … … 1510 1496 d->elem(0) = F(0);\ 1511 1497 d->elem(1) = F(1);\ 1512 d->elem(2) = F(2);\1513 d->elem(3) = F(3);\1514 if (num > 3) {\1515 d->elem(4) = F(4);\1516 d->elem(5) = F(5);\1517 if (num > 5) {\1498 if (num > 2) {\ 1499 d->elem(2) = F(2);\ 1500 d->elem(3) = F(3);\ 1501 if (num > 4) {\ 1502 d->elem(4) = F(4);\ 1503 d->elem(5) = F(5);\ 1518 1504 d->elem(6) = F(6);\ 1519 1505 d->elem(7) = F(7);\ -
trunk/src/recompiler/target-i386/ops_sse_header.h
r33656 r36170 16 16 * You should have received a copy of the GNU Lesser General Public 17 17 * License along with this library; if not, write to the Free Software 18 * Foundation, Inc., 5 9 Temple Place, Suite 330, Boston, MA 02111-1307USA18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA 19 19 */ 20 20 … … 36 36 #endif 37 37 38 DEF_HELPER(void, glue(helper_psrlw, SUFFIX), (Reg *d, Reg *s)) 39 DEF_HELPER(void, glue(helper_psraw, SUFFIX), (Reg *d, Reg *s)) 40 DEF_HELPER(void, glue(helper_psllw, SUFFIX), (Reg *d, Reg *s)) 41 DEF_HELPER(void, glue(helper_psrld, SUFFIX), (Reg *d, Reg *s)) 42 DEF_HELPER(void, glue(helper_psrad, SUFFIX), (Reg *d, Reg *s)) 43 DEF_HELPER(void, glue(helper_pslld, SUFFIX), (Reg *d, Reg *s)) 44 DEF_HELPER(void, glue(helper_psrlq, SUFFIX), (Reg *d, Reg *s)) 45 DEF_HELPER(void, glue(helper_psllq, SUFFIX), (Reg *d, Reg *s)) 46 47 #if SHIFT == 1 48 DEF_HELPER(void, glue(helper_psrldq, SUFFIX), (Reg *d, Reg *s)) 49 DEF_HELPER(void, glue(helper_pslldq, SUFFIX), (Reg *d, Reg *s)) 38 #define dh_alias_Reg ptr 39 #define dh_alias_XMMReg ptr 40 #define dh_alias_MMXReg ptr 41 #define dh_ctype_Reg Reg * 42 #define dh_ctype_XMMReg XMMReg * 43 #define dh_ctype_MMXReg MMXReg * 44 45 DEF_HELPER_2(glue(psrlw, SUFFIX), void, Reg, Reg) 46 DEF_HELPER_2(glue(psraw, SUFFIX), void, Reg, Reg) 47 DEF_HELPER_2(glue(psllw, SUFFIX), void, Reg, Reg) 48 DEF_HELPER_2(glue(psrld, SUFFIX), void, Reg, Reg) 49 DEF_HELPER_2(glue(psrad, SUFFIX), void, Reg, Reg) 50 DEF_HELPER_2(glue(pslld, SUFFIX), void, Reg, Reg) 51 DEF_HELPER_2(glue(psrlq, SUFFIX), void, Reg, Reg) 52 DEF_HELPER_2(glue(psllq, SUFFIX), void, Reg, Reg) 53 54 #if SHIFT == 1 55 DEF_HELPER_2(glue(psrldq, SUFFIX), void, Reg, Reg) 56 DEF_HELPER_2(glue(pslldq, SUFFIX), void, Reg, Reg) 50 57 #endif 51 58 52 59 #define SSE_HELPER_B(name, F)\ 53 DEF_HELPER (void, glue(name, SUFFIX), (Reg *d, Reg *s))60 DEF_HELPER_2(glue(name, SUFFIX), void, Reg, Reg) 54 61 55 62 #define SSE_HELPER_W(name, F)\ 56 DEF_HELPER (void, glue(name, SUFFIX), (Reg *d, Reg *s))63 DEF_HELPER_2(glue(name, SUFFIX), void, Reg, Reg) 57 64 58 65 #define SSE_HELPER_L(name, F)\ 59 DEF_HELPER (void, glue(name, SUFFIX), (Reg *d, Reg *s))66 DEF_HELPER_2(glue(name, SUFFIX), void, Reg, Reg) 60 67 61 68 #define SSE_HELPER_Q(name, F)\ 62 DEF_HELPER (void, glue(name, SUFFIX), (Reg *d, Reg *s))63 64 SSE_HELPER_B( helper_paddb, FADD)65 SSE_HELPER_W( helper_paddw, FADD)66 SSE_HELPER_L( helper_paddl, FADD)67 SSE_HELPER_Q( helper_paddq, FADD)68 69 SSE_HELPER_B( helper_psubb, FSUB)70 SSE_HELPER_W( helper_psubw, FSUB)71 SSE_HELPER_L( helper_psubl, FSUB)72 SSE_HELPER_Q( helper_psubq, FSUB)73 74 SSE_HELPER_B( helper_paddusb, FADDUB)75 SSE_HELPER_B( helper_paddsb, FADDSB)76 SSE_HELPER_B( helper_psubusb, FSUBUB)77 SSE_HELPER_B( helper_psubsb, FSUBSB)78 79 SSE_HELPER_W( helper_paddusw, FADDUW)80 SSE_HELPER_W( helper_paddsw, FADDSW)81 SSE_HELPER_W( helper_psubusw, FSUBUW)82 SSE_HELPER_W( helper_psubsw, FSUBSW)83 84 SSE_HELPER_B( helper_pminub, FMINUB)85 SSE_HELPER_B( helper_pmaxub, FMAXUB)86 87 SSE_HELPER_W( helper_pminsw, FMINSW)88 SSE_HELPER_W( helper_pmaxsw, FMAXSW)89 90 SSE_HELPER_Q( helper_pand, FAND)91 SSE_HELPER_Q( helper_pandn, FANDN)92 SSE_HELPER_Q( helper_por, FOR)93 SSE_HELPER_Q( helper_pxor, FXOR)94 95 SSE_HELPER_B( helper_pcmpgtb, FCMPGTB)96 SSE_HELPER_W( helper_pcmpgtw, FCMPGTW)97 SSE_HELPER_L( helper_pcmpgtl, FCMPGTL)98 99 SSE_HELPER_B( helper_pcmpeqb, FCMPEQ)100 SSE_HELPER_W( helper_pcmpeqw, FCMPEQ)101 SSE_HELPER_L( helper_pcmpeql, FCMPEQ)102 103 SSE_HELPER_W( helper_pmullw, FMULLW)69 DEF_HELPER_2(glue(name, SUFFIX), void, Reg, Reg) 70 71 SSE_HELPER_B(paddb, FADD) 72 SSE_HELPER_W(paddw, FADD) 73 SSE_HELPER_L(paddl, FADD) 74 SSE_HELPER_Q(paddq, FADD) 75 76 SSE_HELPER_B(psubb, FSUB) 77 SSE_HELPER_W(psubw, FSUB) 78 SSE_HELPER_L(psubl, FSUB) 79 SSE_HELPER_Q(psubq, FSUB) 80 81 SSE_HELPER_B(paddusb, FADDUB) 82 SSE_HELPER_B(paddsb, FADDSB) 83 SSE_HELPER_B(psubusb, FSUBUB) 84 SSE_HELPER_B(psubsb, FSUBSB) 85 86 SSE_HELPER_W(paddusw, FADDUW) 87 SSE_HELPER_W(paddsw, FADDSW) 88 SSE_HELPER_W(psubusw, FSUBUW) 89 SSE_HELPER_W(psubsw, FSUBSW) 90 91 SSE_HELPER_B(pminub, FMINUB) 92 SSE_HELPER_B(pmaxub, FMAXUB) 93 94 SSE_HELPER_W(pminsw, FMINSW) 95 SSE_HELPER_W(pmaxsw, FMAXSW) 96 97 SSE_HELPER_Q(pand, FAND) 98 SSE_HELPER_Q(pandn, FANDN) 99 SSE_HELPER_Q(por, FOR) 100 SSE_HELPER_Q(pxor, FXOR) 101 102 SSE_HELPER_B(pcmpgtb, FCMPGTB) 103 SSE_HELPER_W(pcmpgtw, FCMPGTW) 104 SSE_HELPER_L(pcmpgtl, FCMPGTL) 105 106 SSE_HELPER_B(pcmpeqb, FCMPEQ) 107 SSE_HELPER_W(pcmpeqw, FCMPEQ) 108 SSE_HELPER_L(pcmpeql, FCMPEQ) 109 110 SSE_HELPER_W(pmullw, FMULLW) 104 111 #if SHIFT == 0 105 SSE_HELPER_W( helper_pmulhrw, FMULHRW)106 #endif 107 SSE_HELPER_W( helper_pmulhuw, FMULHUW)108 SSE_HELPER_W( helper_pmulhw, FMULHW)109 110 SSE_HELPER_B( helper_pavgb, FAVG)111 SSE_HELPER_W( helper_pavgw, FAVG)112 113 DEF_HELPER (void, glue(helper_pmuludq, SUFFIX) , (Reg *d, Reg *s))114 DEF_HELPER (void, glue(helper_pmaddwd, SUFFIX) , (Reg *d, Reg *s))115 116 DEF_HELPER (void, glue(helper_psadbw, SUFFIX) , (Reg *d, Reg *s))117 DEF_HELPER (void, glue(helper_maskmov, SUFFIX) , (Reg *d, Reg *s, target_ulong a0))118 DEF_HELPER (void, glue(helper_movl_mm_T0, SUFFIX) , (Reg *d, uint32_t val))112 SSE_HELPER_W(pmulhrw, FMULHRW) 113 #endif 114 SSE_HELPER_W(pmulhuw, FMULHUW) 115 SSE_HELPER_W(pmulhw, FMULHW) 116 117 SSE_HELPER_B(pavgb, FAVG) 118 SSE_HELPER_W(pavgw, FAVG) 119 120 DEF_HELPER_2(glue(pmuludq, SUFFIX), void, Reg, Reg) 121 DEF_HELPER_2(glue(pmaddwd, SUFFIX), void, Reg, Reg) 122 123 DEF_HELPER_2(glue(psadbw, SUFFIX), void, Reg, Reg) 124 DEF_HELPER_3(glue(maskmov, SUFFIX), void, Reg, Reg, tl) 125 DEF_HELPER_2(glue(movl_mm_T0, SUFFIX), void, Reg, i32) 119 126 #ifdef TARGET_X86_64 120 DEF_HELPER (void, glue(helper_movq_mm_T0, SUFFIX) , (Reg *d, uint64_t val))127 DEF_HELPER_2(glue(movq_mm_T0, SUFFIX), void, Reg, i64) 121 128 #endif 122 129 123 130 #if SHIFT == 0 124 DEF_HELPER (void, glue(helper_pshufw, SUFFIX) , (Reg *d, Reg *s, int order))131 DEF_HELPER_3(glue(pshufw, SUFFIX), void, Reg, Reg, int) 125 132 #else 126 DEF_HELPER (void, helper_shufps, (Reg *d, Reg *s, int order))127 DEF_HELPER (void, helper_shufpd, (Reg *d, Reg *s, int order))128 DEF_HELPER (void, glue(helper_pshufd, SUFFIX) , (Reg *d, Reg *s, int order))129 DEF_HELPER (void, glue(helper_pshuflw, SUFFIX) , (Reg *d, Reg *s, int order))130 DEF_HELPER (void, glue(helper_pshufhw, SUFFIX) , (Reg *d, Reg *s, int order))133 DEF_HELPER_3(shufps, void, Reg, Reg, int) 134 DEF_HELPER_3(shufpd, void, Reg, Reg, int) 135 DEF_HELPER_3(glue(pshufd, SUFFIX), void, Reg, Reg, int) 136 DEF_HELPER_3(glue(pshuflw, SUFFIX), void, Reg, Reg, int) 137 DEF_HELPER_3(glue(pshufhw, SUFFIX), void, Reg, Reg, int) 131 138 #endif 132 139 … … 136 143 137 144 #define SSE_HELPER_S(name, F)\ 138 DEF_HELPER (void, helper_ ## name ## ps , (Reg *d, Reg *s)) \139 DEF_HELPER (void, helper_ ## name ## ss , (Reg *d, Reg *s)) \140 DEF_HELPER (void, helper_ ## name ## pd , (Reg *d, Reg *s)) \141 DEF_HELPER (void, helper_ ## name ## sd , (Reg *d, Reg *s))145 DEF_HELPER_2(name ## ps , void, Reg, Reg) \ 146 DEF_HELPER_2(name ## ss , void, Reg, Reg) \ 147 DEF_HELPER_2(name ## pd , void, Reg, Reg) \ 148 DEF_HELPER_2(name ## sd , void, Reg, Reg) 142 149 143 150 SSE_HELPER_S(add, FPU_ADD) … … 150 157 151 158 152 DEF_HELPER (void, helper_cvtps2pd, (Reg *d, Reg *s))153 DEF_HELPER (void, helper_cvtpd2ps, (Reg *d, Reg *s))154 DEF_HELPER (void, helper_cvtss2sd, (Reg *d, Reg *s))155 DEF_HELPER (void, helper_cvtsd2ss, (Reg *d, Reg *s))156 DEF_HELPER (void, helper_cvtdq2ps, (Reg *d, Reg *s))157 DEF_HELPER (void, helper_cvtdq2pd, (Reg *d, Reg *s))158 DEF_HELPER (void, helper_cvtpi2ps, (XMMReg *d, MMXReg *s))159 DEF_HELPER (void, helper_cvtpi2pd, (XMMReg *d, MMXReg *s))160 DEF_HELPER (void, helper_cvtsi2ss, (XMMReg *d, uint32_t val))161 DEF_HELPER (void, helper_cvtsi2sd, (XMMReg *d, uint32_t val))159 DEF_HELPER_2(cvtps2pd, void, Reg, Reg) 160 DEF_HELPER_2(cvtpd2ps, void, Reg, Reg) 161 DEF_HELPER_2(cvtss2sd, void, Reg, Reg) 162 DEF_HELPER_2(cvtsd2ss, void, Reg, Reg) 163 DEF_HELPER_2(cvtdq2ps, void, Reg, Reg) 164 DEF_HELPER_2(cvtdq2pd, void, Reg, Reg) 165 DEF_HELPER_2(cvtpi2ps, void, XMMReg, MMXReg) 166 DEF_HELPER_2(cvtpi2pd, void, XMMReg, MMXReg) 167 DEF_HELPER_2(cvtsi2ss, void, XMMReg, i32) 168 DEF_HELPER_2(cvtsi2sd, void, XMMReg, i32) 162 169 163 170 #ifdef TARGET_X86_64 164 DEF_HELPER (void, helper_cvtsq2ss, (XMMReg *d, uint64_t val))165 DEF_HELPER (void, helper_cvtsq2sd, (XMMReg *d, uint64_t val))166 #endif 167 168 DEF_HELPER (void, helper_cvtps2dq, (XMMReg *d, XMMReg *s))169 DEF_HELPER (void, helper_cvtpd2dq, (XMMReg *d, XMMReg *s))170 DEF_HELPER (void, helper_cvtps2pi, (MMXReg *d, XMMReg *s))171 DEF_HELPER (void, helper_cvtpd2pi, (MMXReg *d, XMMReg *s))172 DEF_HELPER (int32_t, helper_cvtss2si, (XMMReg *s))173 DEF_HELPER (int32_t, helper_cvtsd2si, (XMMReg *s))171 DEF_HELPER_2(cvtsq2ss, void, XMMReg, i64) 172 DEF_HELPER_2(cvtsq2sd, void, XMMReg, i64) 173 #endif 174 175 DEF_HELPER_2(cvtps2dq, void, XMMReg, XMMReg) 176 DEF_HELPER_2(cvtpd2dq, void, XMMReg, XMMReg) 177 DEF_HELPER_2(cvtps2pi, void, MMXReg, XMMReg) 178 DEF_HELPER_2(cvtpd2pi, void, MMXReg, XMMReg) 179 DEF_HELPER_1(cvtss2si, s32, XMMReg) 180 DEF_HELPER_1(cvtsd2si, s32, XMMReg) 174 181 #ifdef TARGET_X86_64 175 DEF_HELPER (int64_t, helper_cvtss2sq, (XMMReg *s))176 DEF_HELPER (int64_t, helper_cvtsd2sq, (XMMReg *s))177 #endif 178 179 DEF_HELPER (void, helper_cvttps2dq, (XMMReg *d, XMMReg *s))180 DEF_HELPER (void, helper_cvttpd2dq, (XMMReg *d, XMMReg *s))181 DEF_HELPER (void, helper_cvttps2pi, (MMXReg *d, XMMReg *s))182 DEF_HELPER (void, helper_cvttpd2pi, (MMXReg *d, XMMReg *s))183 DEF_HELPER (int32_t, helper_cvttss2si, (XMMReg *s))184 DEF_HELPER (int32_t, helper_cvttsd2si, (XMMReg *s))182 DEF_HELPER_1(cvtss2sq, s64, XMMReg) 183 DEF_HELPER_1(cvtsd2sq, s64, XMMReg) 184 #endif 185 186 DEF_HELPER_2(cvttps2dq, void, XMMReg, XMMReg) 187 DEF_HELPER_2(cvttpd2dq, void, XMMReg, XMMReg) 188 DEF_HELPER_2(cvttps2pi, void, MMXReg, XMMReg) 189 DEF_HELPER_2(cvttpd2pi, void, MMXReg, XMMReg) 190 DEF_HELPER_1(cvttss2si, s32, XMMReg) 191 DEF_HELPER_1(cvttsd2si, s32, XMMReg) 185 192 #ifdef TARGET_X86_64 186 DEF_HELPER (int64_t, helper_cvttss2sq, (XMMReg *s))187 DEF_HELPER (int64_t, helper_cvttsd2sq, (XMMReg *s))188 #endif 189 190 DEF_HELPER (void, helper_rsqrtps, (XMMReg *d, XMMReg *s))191 DEF_HELPER (void, helper_rsqrtss, (XMMReg *d, XMMReg *s))192 DEF_HELPER (void, helper_rcpps, (XMMReg *d, XMMReg *s))193 DEF_HELPER (void, helper_rcpss, (XMMReg *d, XMMReg *s))194 DEF_HELPER (void, helper_haddps, (XMMReg *d, XMMReg *s))195 DEF_HELPER (void, helper_haddpd, (XMMReg *d, XMMReg *s))196 DEF_HELPER (void, helper_hsubps, (XMMReg *d, XMMReg *s))197 DEF_HELPER (void, helper_hsubpd, (XMMReg *d, XMMReg *s))198 DEF_HELPER (void, helper_addsubps, (XMMReg *d, XMMReg *s))199 DEF_HELPER (void, helper_addsubpd, (XMMReg *d, XMMReg *s))193 DEF_HELPER_1(cvttss2sq, s64, XMMReg) 194 DEF_HELPER_1(cvttsd2sq, s64, XMMReg) 195 #endif 196 197 DEF_HELPER_2(rsqrtps, void, XMMReg, XMMReg) 198 DEF_HELPER_2(rsqrtss, void, XMMReg, XMMReg) 199 DEF_HELPER_2(rcpps, void, XMMReg, XMMReg) 200 DEF_HELPER_2(rcpss, void, XMMReg, XMMReg) 201 DEF_HELPER_2(haddps, void, XMMReg, XMMReg) 202 DEF_HELPER_2(haddpd, void, XMMReg, XMMReg) 203 DEF_HELPER_2(hsubps, void, XMMReg, XMMReg) 204 DEF_HELPER_2(hsubpd, void, XMMReg, XMMReg) 205 DEF_HELPER_2(addsubps, void, XMMReg, XMMReg) 206 DEF_HELPER_2(addsubpd, void, XMMReg, XMMReg) 200 207 201 208 #define SSE_HELPER_CMP(name, F)\ 202 DEF_HELPER (void, helper_ ## name ## ps , (Reg *d, Reg *s)) \203 DEF_HELPER (void, helper_ ## name ## ss , (Reg *d, Reg *s)) \204 DEF_HELPER (void, helper_ ## name ## pd , (Reg *d, Reg *s)) \205 DEF_HELPER (void, helper_ ## name ## sd , (Reg *d, Reg *s))209 DEF_HELPER_2( name ## ps , void, Reg, Reg) \ 210 DEF_HELPER_2( name ## ss , void, Reg, Reg) \ 211 DEF_HELPER_2( name ## pd , void, Reg, Reg) \ 212 DEF_HELPER_2( name ## sd , void, Reg, Reg) 206 213 207 214 SSE_HELPER_CMP(cmpeq, FPU_CMPEQ) … … 214 221 SSE_HELPER_CMP(cmpord, FPU_CMPORD) 215 222 216 DEF_HELPER (void, helper_ucomiss, (Reg *d, Reg *s))217 DEF_HELPER (void, helper_comiss, (Reg *d, Reg *s))218 DEF_HELPER (void, helper_ucomisd, (Reg *d, Reg *s))219 DEF_HELPER (void, helper_comisd, (Reg *d, Reg *s))220 DEF_HELPER (uint32_t, helper_movmskps, (Reg *s))221 DEF_HELPER (uint32_t, helper_movmskpd, (Reg *s))222 #endif 223 224 DEF_HELPER (uint32_t, glue(helper_pmovmskb, SUFFIX), (Reg *s))225 DEF_HELPER (void, glue(helper_packsswb, SUFFIX) , (Reg *d, Reg *s))226 DEF_HELPER (void, glue(helper_packuswb, SUFFIX) , (Reg *d, Reg *s))227 DEF_HELPER (void, glue(helper_packssdw, SUFFIX) , (Reg *d, Reg *s))223 DEF_HELPER_2(ucomiss, void, Reg, Reg) 224 DEF_HELPER_2(comiss, void, Reg, Reg) 225 DEF_HELPER_2(ucomisd, void, Reg, Reg) 226 DEF_HELPER_2(comisd, void, Reg, Reg) 227 DEF_HELPER_1(movmskps, i32, Reg) 228 DEF_HELPER_1(movmskpd, i32, Reg) 229 #endif 230 231 DEF_HELPER_1(glue(pmovmskb, SUFFIX), i32, Reg) 232 DEF_HELPER_2(glue(packsswb, SUFFIX), void, Reg, Reg) 233 DEF_HELPER_2(glue(packuswb, SUFFIX), void, Reg, Reg) 234 DEF_HELPER_2(glue(packssdw, SUFFIX), void, Reg, Reg) 228 235 #define UNPCK_OP(base_name, base) \ 229 DEF_HELPER (void, glue(helper_punpck ## base_name ## bw, SUFFIX) , (Reg *d, Reg *s)) \230 DEF_HELPER (void, glue(helper_punpck ## base_name ## wd, SUFFIX) , (Reg *d, Reg *s)) \231 DEF_HELPER (void, glue(helper_punpck ## base_name ## dq, SUFFIX) , (Reg *d, Reg *s))236 DEF_HELPER_2(glue(punpck ## base_name ## bw, SUFFIX) , void, Reg, Reg) \ 237 DEF_HELPER_2(glue(punpck ## base_name ## wd, SUFFIX) , void, Reg, Reg) \ 238 DEF_HELPER_2(glue(punpck ## base_name ## dq, SUFFIX) , void, Reg, Reg) 232 239 233 240 UNPCK_OP(l, 0) … … 235 242 236 243 #if SHIFT == 1 237 DEF_HELPER (void, glue(helper_punpcklqdq, SUFFIX) , (Reg *d, Reg *s))238 DEF_HELPER (void, glue(helper_punpckhqdq, SUFFIX) , (Reg *d, Reg *s))244 DEF_HELPER_2(glue(punpcklqdq, SUFFIX), void, Reg, Reg) 245 DEF_HELPER_2(glue(punpckhqdq, SUFFIX), void, Reg, Reg) 239 246 #endif 240 247 241 248 /* 3DNow! float ops */ 242 249 #if SHIFT == 0 243 DEF_HELPER (void, helper_pi2fd, (MMXReg *d, MMXReg *s))244 DEF_HELPER (void, helper_pi2fw, (MMXReg *d, MMXReg *s))245 DEF_HELPER (void, helper_pf2id, (MMXReg *d, MMXReg *s))246 DEF_HELPER (void, helper_pf2iw, (MMXReg *d, MMXReg *s))247 DEF_HELPER (void, helper_pfacc, (MMXReg *d, MMXReg *s))248 DEF_HELPER (void, helper_pfadd, (MMXReg *d, MMXReg *s))249 DEF_HELPER (void, helper_pfcmpeq, (MMXReg *d, MMXReg *s))250 DEF_HELPER (void, helper_pfcmpge, (MMXReg *d, MMXReg *s))251 DEF_HELPER (void, helper_pfcmpgt, (MMXReg *d, MMXReg *s))252 DEF_HELPER (void, helper_pfmax, (MMXReg *d, MMXReg *s))253 DEF_HELPER (void, helper_pfmin, (MMXReg *d, MMXReg *s))254 DEF_HELPER (void, helper_pfmul, (MMXReg *d, MMXReg *s))255 DEF_HELPER (void, helper_pfnacc, (MMXReg *d, MMXReg *s))256 DEF_HELPER (void, helper_pfpnacc, (MMXReg *d, MMXReg *s))257 DEF_HELPER (void, helper_pfrcp, (MMXReg *d, MMXReg *s))258 DEF_HELPER (void, helper_pfrsqrt, (MMXReg *d, MMXReg *s))259 DEF_HELPER (void, helper_pfsub, (MMXReg *d, MMXReg *s))260 DEF_HELPER (void, helper_pfsubr, (MMXReg *d, MMXReg *s))261 DEF_HELPER (void, helper_pswapd, (MMXReg *d, MMXReg *s))250 DEF_HELPER_2(pi2fd, void, MMXReg, MMXReg) 251 DEF_HELPER_2(pi2fw, void, MMXReg, MMXReg) 252 DEF_HELPER_2(pf2id, void, MMXReg, MMXReg) 253 DEF_HELPER_2(pf2iw, void, MMXReg, MMXReg) 254 DEF_HELPER_2(pfacc, void, MMXReg, MMXReg) 255 DEF_HELPER_2(pfadd, void, MMXReg, MMXReg) 256 DEF_HELPER_2(pfcmpeq, void, MMXReg, MMXReg) 257 DEF_HELPER_2(pfcmpge, void, MMXReg, MMXReg) 258 DEF_HELPER_2(pfcmpgt, void, MMXReg, MMXReg) 259 DEF_HELPER_2(pfmax, void, MMXReg, MMXReg) 260 DEF_HELPER_2(pfmin, void, MMXReg, MMXReg) 261 DEF_HELPER_2(pfmul, void, MMXReg, MMXReg) 262 DEF_HELPER_2(pfnacc, void, MMXReg, MMXReg) 263 DEF_HELPER_2(pfpnacc, void, MMXReg, MMXReg) 264 DEF_HELPER_2(pfrcp, void, MMXReg, MMXReg) 265 DEF_HELPER_2(pfrsqrt, void, MMXReg, MMXReg) 266 DEF_HELPER_2(pfsub, void, MMXReg, MMXReg) 267 DEF_HELPER_2(pfsubr, void, MMXReg, MMXReg) 268 DEF_HELPER_2(pswapd, void, MMXReg, MMXReg) 262 269 #endif 263 270 264 271 /* SSSE3 op helpers */ 265 DEF_HELPER (void, glue(helper_phaddw, SUFFIX), (Reg *d, Reg *s))266 DEF_HELPER (void, glue(helper_phaddd, SUFFIX), (Reg *d, Reg *s))267 DEF_HELPER (void, glue(helper_phaddsw, SUFFIX), (Reg *d, Reg *s))268 DEF_HELPER (void, glue(helper_phsubw, SUFFIX), (Reg *d, Reg *s))269 DEF_HELPER (void, glue(helper_phsubd, SUFFIX), (Reg *d, Reg *s))270 DEF_HELPER (void, glue(helper_phsubsw, SUFFIX), (Reg *d, Reg *s))271 DEF_HELPER (void, glue(helper_pabsb, SUFFIX), (Reg *d, Reg *s))272 DEF_HELPER (void, glue(helper_pabsw, SUFFIX), (Reg *d, Reg *s))273 DEF_HELPER (void, glue(helper_pabsd, SUFFIX), (Reg *d, Reg *s))274 DEF_HELPER (void, glue(helper_pmaddubsw, SUFFIX), (Reg *d, Reg *s))275 DEF_HELPER (void, glue(helper_pmulhrsw, SUFFIX), (Reg *d, Reg *s))276 DEF_HELPER (void, glue(helper_pshufb, SUFFIX), (Reg *d, Reg *s))277 DEF_HELPER (void, glue(helper_psignb, SUFFIX), (Reg *d, Reg *s))278 DEF_HELPER (void, glue(helper_psignw, SUFFIX), (Reg *d, Reg *s))279 DEF_HELPER (void, glue(helper_psignd, SUFFIX), (Reg *d, Reg *s))280 DEF_HELPER (void, glue(helper_palignr, SUFFIX), (Reg *d, Reg *s, int32_t shift))272 DEF_HELPER_2(glue(phaddw, SUFFIX), void, Reg, Reg) 273 DEF_HELPER_2(glue(phaddd, SUFFIX), void, Reg, Reg) 274 DEF_HELPER_2(glue(phaddsw, SUFFIX), void, Reg, Reg) 275 DEF_HELPER_2(glue(phsubw, SUFFIX), void, Reg, Reg) 276 DEF_HELPER_2(glue(phsubd, SUFFIX), void, Reg, Reg) 277 DEF_HELPER_2(glue(phsubsw, SUFFIX), void, Reg, Reg) 278 DEF_HELPER_2(glue(pabsb, SUFFIX), void, Reg, Reg) 279 DEF_HELPER_2(glue(pabsw, SUFFIX), void, Reg, Reg) 280 DEF_HELPER_2(glue(pabsd, SUFFIX), void, Reg, Reg) 281 DEF_HELPER_2(glue(pmaddubsw, SUFFIX), void, Reg, Reg) 282 DEF_HELPER_2(glue(pmulhrsw, SUFFIX), void, Reg, Reg) 283 DEF_HELPER_2(glue(pshufb, SUFFIX), void, Reg, Reg) 284 DEF_HELPER_2(glue(psignb, SUFFIX), void, Reg, Reg) 285 DEF_HELPER_2(glue(psignw, SUFFIX), void, Reg, Reg) 286 DEF_HELPER_2(glue(psignd, SUFFIX), void, Reg, Reg) 287 DEF_HELPER_3(glue(palignr, SUFFIX), void, Reg, Reg, s32) 281 288 282 289 /* SSE4.1 op helpers */ 283 290 #if SHIFT == 1 284 DEF_HELPER (void, glue(helper_pblendvb, SUFFIX), (Reg *d, Reg *s))285 DEF_HELPER (void, glue(helper_blendvps, SUFFIX), (Reg *d, Reg *s))286 DEF_HELPER (void, glue(helper_blendvpd, SUFFIX), (Reg *d, Reg *s))287 DEF_HELPER (void, glue(helper_ptest, SUFFIX), (Reg *d, Reg *s))288 DEF_HELPER (void, glue(helper_pmovsxbw, SUFFIX), (Reg *d, Reg *s))289 DEF_HELPER (void, glue(helper_pmovsxbd, SUFFIX), (Reg *d, Reg *s))290 DEF_HELPER (void, glue(helper_pmovsxbq, SUFFIX), (Reg *d, Reg *s))291 DEF_HELPER (void, glue(helper_pmovsxwd, SUFFIX), (Reg *d, Reg *s))292 DEF_HELPER (void, glue(helper_pmovsxwq, SUFFIX), (Reg *d, Reg *s))293 DEF_HELPER (void, glue(helper_pmovsxdq, SUFFIX), (Reg *d, Reg *s))294 DEF_HELPER (void, glue(helper_pmovzxbw, SUFFIX), (Reg *d, Reg *s))295 DEF_HELPER (void, glue(helper_pmovzxbd, SUFFIX), (Reg *d, Reg *s))296 DEF_HELPER (void, glue(helper_pmovzxbq, SUFFIX), (Reg *d, Reg *s))297 DEF_HELPER (void, glue(helper_pmovzxwd, SUFFIX), (Reg *d, Reg *s))298 DEF_HELPER (void, glue(helper_pmovzxwq, SUFFIX), (Reg *d, Reg *s))299 DEF_HELPER (void, glue(helper_pmovzxdq, SUFFIX), (Reg *d, Reg *s))300 DEF_HELPER (void, glue(helper_pmuldq, SUFFIX), (Reg *d, Reg *s))301 DEF_HELPER (void, glue(helper_pcmpeqq, SUFFIX), (Reg *d, Reg *s))302 DEF_HELPER (void, glue(helper_packusdw, SUFFIX), (Reg *d, Reg *s))303 DEF_HELPER (void, glue(helper_pminsb, SUFFIX), (Reg *d, Reg *s))304 DEF_HELPER (void, glue(helper_pminsd, SUFFIX), (Reg *d, Reg *s))305 DEF_HELPER (void, glue(helper_pminuw, SUFFIX), (Reg *d, Reg *s))306 DEF_HELPER (void, glue(helper_pminud, SUFFIX), (Reg *d, Reg *s))307 DEF_HELPER (void, glue(helper_pmaxsb, SUFFIX), (Reg *d, Reg *s))308 DEF_HELPER (void, glue(helper_pmaxsd, SUFFIX), (Reg *d, Reg *s))309 DEF_HELPER (void, glue(helper_pmaxuw, SUFFIX), (Reg *d, Reg *s))310 DEF_HELPER (void, glue(helper_pmaxud, SUFFIX), (Reg *d, Reg *s))311 DEF_HELPER (void, glue(helper_pmulld, SUFFIX), (Reg *d, Reg *s))312 DEF_HELPER (void, glue(helper_phminposuw, SUFFIX), (Reg *d, Reg *s))313 DEF_HELPER (void, glue(helper_roundps, SUFFIX), (Reg *d, Reg *s, uint32_t mode))314 DEF_HELPER (void, glue(helper_roundpd, SUFFIX), (Reg *d, Reg *s, uint32_t mode))315 DEF_HELPER (void, glue(helper_roundss, SUFFIX), (Reg *d, Reg *s, uint32_t mode))316 DEF_HELPER (void, glue(helper_roundsd, SUFFIX), (Reg *d, Reg *s, uint32_t mode))317 DEF_HELPER (void, glue(helper_blendps, SUFFIX), (Reg *d, Reg *s, uint32_t imm))318 DEF_HELPER (void, glue(helper_blendpd, SUFFIX), (Reg *d, Reg *s, uint32_t imm))319 DEF_HELPER (void, glue(helper_pblendw, SUFFIX), (Reg *d, Reg *s, uint32_t imm))320 DEF_HELPER (void, glue(helper_dpps, SUFFIX), (Reg *d, Reg *s, uint32_t mask))321 DEF_HELPER (void, glue(helper_dppd, SUFFIX), (Reg *d, Reg *s, uint32_t mask))322 DEF_HELPER (void, glue(helper_mpsadbw, SUFFIX), (Reg *d, Reg *s, uint32_t off))291 DEF_HELPER_2(glue(pblendvb, SUFFIX), void, Reg, Reg) 292 DEF_HELPER_2(glue(blendvps, SUFFIX), void, Reg, Reg) 293 DEF_HELPER_2(glue(blendvpd, SUFFIX), void, Reg, Reg) 294 DEF_HELPER_2(glue(ptest, SUFFIX), void, Reg, Reg) 295 DEF_HELPER_2(glue(pmovsxbw, SUFFIX), void, Reg, Reg) 296 DEF_HELPER_2(glue(pmovsxbd, SUFFIX), void, Reg, Reg) 297 DEF_HELPER_2(glue(pmovsxbq, SUFFIX), void, Reg, Reg) 298 DEF_HELPER_2(glue(pmovsxwd, SUFFIX), void, Reg, Reg) 299 DEF_HELPER_2(glue(pmovsxwq, SUFFIX), void, Reg, Reg) 300 DEF_HELPER_2(glue(pmovsxdq, SUFFIX), void, Reg, Reg) 301 DEF_HELPER_2(glue(pmovzxbw, SUFFIX), void, Reg, Reg) 302 DEF_HELPER_2(glue(pmovzxbd, SUFFIX), void, Reg, Reg) 303 DEF_HELPER_2(glue(pmovzxbq, SUFFIX), void, Reg, Reg) 304 DEF_HELPER_2(glue(pmovzxwd, SUFFIX), void, Reg, Reg) 305 DEF_HELPER_2(glue(pmovzxwq, SUFFIX), void, Reg, Reg) 306 DEF_HELPER_2(glue(pmovzxdq, SUFFIX), void, Reg, Reg) 307 DEF_HELPER_2(glue(pmuldq, SUFFIX), void, Reg, Reg) 308 DEF_HELPER_2(glue(pcmpeqq, SUFFIX), void, Reg, Reg) 309 DEF_HELPER_2(glue(packusdw, SUFFIX), void, Reg, Reg) 310 DEF_HELPER_2(glue(pminsb, SUFFIX), void, Reg, Reg) 311 DEF_HELPER_2(glue(pminsd, SUFFIX), void, Reg, Reg) 312 DEF_HELPER_2(glue(pminuw, SUFFIX), void, Reg, Reg) 313 DEF_HELPER_2(glue(pminud, SUFFIX), void, Reg, Reg) 314 DEF_HELPER_2(glue(pmaxsb, SUFFIX), void, Reg, Reg) 315 DEF_HELPER_2(glue(pmaxsd, SUFFIX), void, Reg, Reg) 316 DEF_HELPER_2(glue(pmaxuw, SUFFIX), void, Reg, Reg) 317 DEF_HELPER_2(glue(pmaxud, SUFFIX), void, Reg, Reg) 318 DEF_HELPER_2(glue(pmulld, SUFFIX), void, Reg, Reg) 319 DEF_HELPER_2(glue(phminposuw, SUFFIX), void, Reg, Reg) 320 DEF_HELPER_3(glue(roundps, SUFFIX), void, Reg, Reg, i32) 321 DEF_HELPER_3(glue(roundpd, SUFFIX), void, Reg, Reg, i32) 322 DEF_HELPER_3(glue(roundss, SUFFIX), void, Reg, Reg, i32) 323 DEF_HELPER_3(glue(roundsd, SUFFIX), void, Reg, Reg, i32) 324 DEF_HELPER_3(glue(blendps, SUFFIX), void, Reg, Reg, i32) 325 DEF_HELPER_3(glue(blendpd, SUFFIX), void, Reg, Reg, i32) 326 DEF_HELPER_3(glue(pblendw, SUFFIX), void, Reg, Reg, i32) 327 DEF_HELPER_3(glue(dpps, SUFFIX), void, Reg, Reg, i32) 328 DEF_HELPER_3(glue(dppd, SUFFIX), void, Reg, Reg, i32) 329 DEF_HELPER_3(glue(mpsadbw, SUFFIX), void, Reg, Reg, i32) 323 330 #endif 324 331 325 332 /* SSE4.2 op helpers */ 326 333 #if SHIFT == 1 327 DEF_HELPER(void, glue(helper_pcmpgtq, SUFFIX), (Reg *d, Reg *s)) 328 DEF_HELPER(void, glue(helper_pcmpestri, SUFFIX), (Reg *d, Reg *s, uint32_t ctl)) 329 DEF_HELPER(void, glue(helper_pcmpestrm, SUFFIX), (Reg *d, Reg *s, uint32_t ctl)) 330 DEF_HELPER(void, glue(helper_pcmpistri, SUFFIX), (Reg *d, Reg *s, uint32_t ctl)) 331 DEF_HELPER(void, glue(helper_pcmpistrm, SUFFIX), (Reg *d, Reg *s, uint32_t ctl)) 332 DEF_HELPER(target_ulong, helper_crc32, 333 (uint32_t crc1, target_ulong msg, uint32_t len)) 334 DEF_HELPER(target_ulong, helper_popcnt, (target_ulong n, uint32_t type)) 334 DEF_HELPER_2(glue(pcmpgtq, SUFFIX), void, Reg, Reg) 335 DEF_HELPER_3(glue(pcmpestri, SUFFIX), void, Reg, Reg, i32) 336 DEF_HELPER_3(glue(pcmpestrm, SUFFIX), void, Reg, Reg, i32) 337 DEF_HELPER_3(glue(pcmpistri, SUFFIX), void, Reg, Reg, i32) 338 DEF_HELPER_3(glue(pcmpistrm, SUFFIX), void, Reg, Reg, i32) 339 DEF_HELPER_3(crc32, tl, i32, tl, i32) 340 DEF_HELPER_2(popcnt, tl, tl, i32) 335 341 #endif 336 342 -
trunk/src/recompiler/target-i386/translate.c
r36140 r36170 16 16 * You should have received a copy of the GNU Lesser General Public 17 17 * License along with this library; if not, write to the Free Software 18 * Foundation, Inc., 5 9 Temple Place, Suite 330, Boston, MA 02111-1307USA18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA 19 19 */ 20 20 … … 33 33 #include <string.h> 34 34 #ifndef VBOX 35 # 36 # 37 # 35 #include <inttypes.h> 36 #include <signal.h> 37 #include <assert.h> 38 38 #endif /* !VBOX */ 39 39 … … 41 41 #include "exec-all.h" 42 42 #include "disas.h" 43 #include "tcg-op.h" 44 43 45 #include "helper.h" 44 #include "tcg-op.h" 46 #define GEN_HELPER 1 47 #include "helper.h" 45 48 46 49 #define PREFIX_REPZ 0x01 … … 71 74 72 75 /* global register indexes */ 73 static TCGv cpu_env, cpu_A0, cpu_cc_op, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp; 76 static TCGv_ptr cpu_env; 77 static TCGv cpu_A0, cpu_cc_src, cpu_cc_dst, cpu_cc_tmp; 78 static TCGv_i32 cpu_cc_op; 74 79 /* local temps */ 75 80 static TCGv cpu_T[2], cpu_T3; 76 81 /* local register indexes (only used inside old micro ops) */ 77 static TCGv cpu_tmp0, cpu_tmp1_i64, cpu_tmp2_i32, cpu_tmp3_i32, cpu_tmp4, cpu_ptr0, cpu_ptr1; 82 static TCGv cpu_tmp0, cpu_tmp4; 83 static TCGv_ptr cpu_ptr0, cpu_ptr1; 84 static TCGv_i32 cpu_tmp2_i32, cpu_tmp3_i32; 85 static TCGv_i64 cpu_tmp1_i64; 78 86 static TCGv cpu_tmp5, cpu_tmp6; 79 87 … … 658 666 default: 659 667 case 3: 668 /* Should never happen on 32-bit targets. */ 669 #ifdef TARGET_X86_64 660 670 tcg_gen_qemu_ld64(t0, a0, mem_index); 671 #endif 661 672 break; 662 673 } … … 694 705 default: 695 706 case 3: 707 /* Should never happen on 32-bit targets. */ 708 #ifdef TARGET_X86_64 696 709 tcg_gen_qemu_st64(t0, a0, mem_index); 710 #endif 697 711 break; 698 712 } … … 716 730 /** @todo: once TCG codegen improves, we may want to use version 717 731 from else version */ 718 tcg_gen_helper_0_0(helper_check_external_event);732 gen_helper_check_external_event(); 719 733 # else 720 734 int skip_label; … … 736 750 tcg_temp_free(t0); 737 751 738 tcg_gen_helper_0_0(helper_check_external_event);752 gen_helper_check_external_event(); 739 753 740 754 gen_set_label(skip_label); … … 745 759 static void gen_check_external_event2() 746 760 { 747 tcg_gen_helper_0_0(helper_check_external_event);761 gen_helper_check_external_event(); 748 762 } 749 763 # endif … … 762 776 gen_jmp_im(pc); 763 777 # ifdef VBOX_DUMP_STATE 764 tcg_gen_helper_0_0(helper_dump_state);778 gen_helper_dump_state(); 765 779 # endif 766 780 } … … 877 891 } 878 892 879 static void *helper_in_func[3] = { 880 helper_inb, 881 helper_inw, 882 helper_inl, 883 }; 884 885 static void *helper_out_func[3] = { 886 helper_outb, 887 helper_outw, 888 helper_outl, 889 }; 890 891 static void *gen_check_io_func[3] = { 892 helper_check_iob, 893 helper_check_iow, 894 helper_check_iol, 895 }; 893 static void gen_helper_in_func(int ot, TCGv v, TCGv_i32 n) 894 { 895 switch (ot) { 896 case 0: gen_helper_inb(v, n); break; 897 case 1: gen_helper_inw(v, n); break; 898 case 2: gen_helper_inl(v, n); break; 899 } 900 901 } 902 903 static void gen_helper_out_func(int ot, TCGv_i32 v, TCGv_i32 n) 904 { 905 switch (ot) { 906 case 0: gen_helper_outb(v, n); break; 907 case 1: gen_helper_outw(v, n); break; 908 case 2: gen_helper_outl(v, n); break; 909 } 910 911 } 896 912 897 913 static void gen_check_io(DisasContext *s, int ot, target_ulong cur_eip, … … 908 924 state_saved = 1; 909 925 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 910 tcg_gen_helper_0_1(gen_check_io_func[ot], 911 cpu_tmp2_i32); 926 switch (ot) { 927 case 0: gen_helper_check_iob(cpu_tmp2_i32); break; 928 case 1: gen_helper_check_iow(cpu_tmp2_i32); break; 929 case 2: gen_helper_check_iol(cpu_tmp2_i32); break; 930 } 912 931 } 913 932 if(s->flags & HF_SVMI_MASK) { … … 921 940 next_eip = s->pc - s->cs_base; 922 941 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 923 tcg_gen_helper_0_3(helper_svm_check_io, 924 cpu_tmp2_i32, 925 tcg_const_i32(svm_flags), 926 tcg_const_i32(next_eip - cur_eip)); 942 gen_helper_svm_check_io(cpu_tmp2_i32, tcg_const_i32(svm_flags), 943 tcg_const_i32(next_eip - cur_eip)); 927 944 } 928 945 } … … 980 997 static void gen_compute_eflags_c(TCGv reg) 981 998 { 982 #if TCG_TARGET_REG_BITS == 32 983 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3); 984 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 985 (long)cc_table + offsetof(CCTable, compute_c)); 986 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0); 987 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 988 1, &cpu_tmp2_i32, 0, NULL); 989 #else 990 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op); 991 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4); 992 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 993 (long)cc_table + offsetof(CCTable, compute_c)); 994 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0); 995 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 996 1, &cpu_tmp2_i32, 0, NULL); 997 #endif 999 gen_helper_cc_compute_c(cpu_tmp2_i32, cpu_cc_op); 998 1000 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32); 999 1001 } … … 1002 1004 static void gen_compute_eflags(TCGv reg) 1003 1005 { 1004 #if TCG_TARGET_REG_BITS == 32 1005 tcg_gen_shli_i32(cpu_tmp2_i32, cpu_cc_op, 3); 1006 tcg_gen_addi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 1007 (long)cc_table + offsetof(CCTable, compute_all)); 1008 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0); 1009 tcg_gen_call(&tcg_ctx, cpu_tmp2_i32, TCG_CALL_PURE, 1010 1, &cpu_tmp2_i32, 0, NULL); 1011 #else 1012 tcg_gen_extu_i32_tl(cpu_tmp1_i64, cpu_cc_op); 1013 tcg_gen_shli_i64(cpu_tmp1_i64, cpu_tmp1_i64, 4); 1014 tcg_gen_addi_i64(cpu_tmp1_i64, cpu_tmp1_i64, 1015 (long)cc_table + offsetof(CCTable, compute_all)); 1016 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_tmp1_i64, 0); 1017 tcg_gen_call(&tcg_ctx, cpu_tmp1_i64, TCG_CALL_PURE, 1018 1, &cpu_tmp2_i32, 0, NULL); 1019 #endif 1006 gen_helper_cc_compute_all(cpu_tmp2_i32, cpu_cc_op); 1020 1007 tcg_gen_extu_i32_tl(reg, cpu_tmp2_i32); 1021 1008 } … … 1391 1378 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[1]); 1392 1379 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff); 1393 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[0], cpu_tmp2_i32);1380 gen_helper_in_func(ot, cpu_T[0], cpu_tmp2_i32); 1394 1381 gen_op_st_T0_A0(ot + s->mem_index); 1395 1382 gen_op_movl_T0_Dshift(ot); … … 1410 1397 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff); 1411 1398 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[0]); 1412 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);1399 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32); 1413 1400 1414 1401 gen_op_movl_T0_Dshift(ot); … … 1462 1449 GEN_REPZ2(cmps) 1463 1450 1464 static void *helper_fp_arith_ST0_FT0[8] = { 1465 helper_fadd_ST0_FT0, 1466 helper_fmul_ST0_FT0, 1467 helper_fcom_ST0_FT0, 1468 helper_fcom_ST0_FT0, 1469 helper_fsub_ST0_FT0, 1470 helper_fsubr_ST0_FT0, 1471 helper_fdiv_ST0_FT0, 1472 helper_fdivr_ST0_FT0, 1473 }; 1451 static void gen_helper_fp_arith_ST0_FT0(int op) 1452 { 1453 switch (op) { 1454 case 0: gen_helper_fadd_ST0_FT0(); break; 1455 case 1: gen_helper_fmul_ST0_FT0(); break; 1456 case 2: gen_helper_fcom_ST0_FT0(); break; 1457 case 3: gen_helper_fcom_ST0_FT0(); break; 1458 case 4: gen_helper_fsub_ST0_FT0(); break; 1459 case 5: gen_helper_fsubr_ST0_FT0(); break; 1460 case 6: gen_helper_fdiv_ST0_FT0(); break; 1461 case 7: gen_helper_fdivr_ST0_FT0(); break; 1462 } 1463 } 1474 1464 1475 1465 /* NOTE the exception in "r" op ordering */ 1476 static void *helper_fp_arith_STN_ST0[8] = { 1477 helper_fadd_STN_ST0, 1478 helper_fmul_STN_ST0, 1479 NULL, 1480 NULL, 1481 helper_fsubr_STN_ST0, 1482 helper_fsub_STN_ST0, 1483 helper_fdivr_STN_ST0, 1484 helper_fdiv_STN_ST0, 1485 }; 1466 static void gen_helper_fp_arith_STN_ST0(int op, int opreg) 1467 { 1468 TCGv_i32 tmp = tcg_const_i32(opreg); 1469 switch (op) { 1470 case 0: gen_helper_fadd_STN_ST0(tmp); break; 1471 case 1: gen_helper_fmul_STN_ST0(tmp); break; 1472 case 4: gen_helper_fsubr_STN_ST0(tmp); break; 1473 case 5: gen_helper_fsub_STN_ST0(tmp); break; 1474 case 6: gen_helper_fdivr_STN_ST0(tmp); break; 1475 case 7: gen_helper_fdiv_STN_ST0(tmp); break; 1476 } 1477 } 1486 1478 1487 1479 /* if d == OR_TMP0, it means memory operand (address in A0) */ … … 1653 1645 1654 1646 /* XXX: inefficient */ 1655 t0 = tcg_temp_local_new( TCG_TYPE_TL);1656 t1 = tcg_temp_local_new( TCG_TYPE_TL);1647 t0 = tcg_temp_local_new(); 1648 t1 = tcg_temp_local_new(); 1657 1649 1658 1650 tcg_gen_mov_tl(t0, cpu_T[0]); … … 1744 1736 1745 1737 /* XXX: inefficient, but we must use local temps */ 1746 t0 = tcg_temp_local_new( TCG_TYPE_TL);1747 t1 = tcg_temp_local_new( TCG_TYPE_TL);1748 t2 = tcg_temp_local_new( TCG_TYPE_TL);1749 a0 = tcg_temp_local_new( TCG_TYPE_TL);1738 t0 = tcg_temp_local_new(); 1739 t1 = tcg_temp_local_new(); 1740 t2 = tcg_temp_local_new(); 1741 a0 = tcg_temp_local_new(); 1750 1742 1751 1743 if (ot == OT_QUAD) … … 1832 1824 } 1833 1825 1834 static void *helper_rotc[8] = {1835 helper_rclb,1836 helper_rclw,1837 helper_rcll,1838 X86_64_ONLY(helper_rclq),1839 helper_rcrb,1840 helper_rcrw,1841 helper_rcrl,1842 X86_64_ONLY(helper_rcrq),1843 };1844 1845 1826 /* XXX: add faster immediate = 1 case */ 1846 1827 static void gen_rotc_rm_T1(DisasContext *s, int ot, int op1, … … 1858 1839 gen_op_mov_TN_reg(ot, 0, op1); 1859 1840 1860 tcg_gen_helper_1_2(helper_rotc[ot + (is_right * 4)], 1861 cpu_T[0], cpu_T[0], cpu_T[1]); 1841 if (is_right) { 1842 switch (ot) { 1843 case 0: gen_helper_rcrb(cpu_T[0], cpu_T[0], cpu_T[1]); break; 1844 case 1: gen_helper_rcrw(cpu_T[0], cpu_T[0], cpu_T[1]); break; 1845 case 2: gen_helper_rcrl(cpu_T[0], cpu_T[0], cpu_T[1]); break; 1846 #ifdef TARGET_X86_64 1847 case 3: gen_helper_rcrq(cpu_T[0], cpu_T[0], cpu_T[1]); break; 1848 #endif 1849 } 1850 } else { 1851 switch (ot) { 1852 case 0: gen_helper_rclb(cpu_T[0], cpu_T[0], cpu_T[1]); break; 1853 case 1: gen_helper_rclw(cpu_T[0], cpu_T[0], cpu_T[1]); break; 1854 case 2: gen_helper_rcll(cpu_T[0], cpu_T[0], cpu_T[1]); break; 1855 #ifdef TARGET_X86_64 1856 case 3: gen_helper_rclq(cpu_T[0], cpu_T[0], cpu_T[1]); break; 1857 #endif 1858 } 1859 } 1862 1860 /* store */ 1863 1861 if (op1 == OR_TMP0) … … 1886 1884 TCGv t0, t1, t2, a0; 1887 1885 1888 t0 = tcg_temp_local_new( TCG_TYPE_TL);1889 t1 = tcg_temp_local_new( TCG_TYPE_TL);1890 t2 = tcg_temp_local_new( TCG_TYPE_TL);1891 a0 = tcg_temp_local_new( TCG_TYPE_TL);1886 t0 = tcg_temp_local_new(); 1887 t1 = tcg_temp_local_new(); 1888 t2 = tcg_temp_local_new(); 1889 a0 = tcg_temp_local_new(); 1892 1890 1893 1891 if (ot == OT_QUAD) … … 2449 2447 /* nominal case: we use a jump */ 2450 2448 /* XXX: make it faster by adding new instructions in TCG */ 2451 t0 = tcg_temp_local_new( TCG_TYPE_TL);2449 t0 = tcg_temp_local_new(); 2452 2450 tcg_gen_movi_tl(t0, 0); 2453 2451 l1 = gen_new_label(); … … 2507 2505 gen_jmp_im(cur_eip); 2508 2506 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 2509 tcg_gen_helper_0_2(helper_load_seg,tcg_const_i32(seg_reg), cpu_tmp2_i32);2507 gen_helper_load_seg(tcg_const_i32(seg_reg), cpu_tmp2_i32); 2510 2508 /* abort translation because the addseg value may change or 2511 2509 because ss32 may change. For R_SS, translation must always … … 2536 2534 gen_op_set_cc_op(s->cc_op); 2537 2535 gen_jmp_im(pc_start - s->cs_base); 2538 tcg_gen_helper_0_2(helper_svm_check_intercept_param,2539 tcg_const_i32(type),tcg_const_i64(param));2536 gen_helper_svm_check_intercept_param(tcg_const_i32(type), 2537 tcg_const_i64(param)); 2540 2538 } 2541 2539 … … 2744 2742 if (level) { 2745 2743 /* XXX: must save state */ 2746 tcg_gen_helper_0_3(helper_enter64_level, 2747 tcg_const_i32(level), 2748 tcg_const_i32((ot == OT_QUAD)), 2749 cpu_T[1]); 2744 gen_helper_enter64_level(tcg_const_i32(level), 2745 tcg_const_i32((ot == OT_QUAD)), 2746 cpu_T[1]); 2750 2747 } 2751 2748 gen_op_mov_reg_T1(ot, R_EBP); … … 2770 2767 if (level) { 2771 2768 /* XXX: must save state */ 2772 tcg_gen_helper_0_3(helper_enter_level, 2773 tcg_const_i32(level), 2774 tcg_const_i32(s->dflag), 2775 cpu_T[1]); 2769 gen_helper_enter_level(tcg_const_i32(level), 2770 tcg_const_i32(s->dflag), 2771 cpu_T[1]); 2776 2772 } 2777 2773 gen_op_mov_reg_T1(ot, R_EBP); … … 2786 2782 gen_op_set_cc_op(s->cc_op); 2787 2783 gen_jmp_im(cur_eip); 2788 tcg_gen_helper_0_1(helper_raise_exception,tcg_const_i32(trapno));2784 gen_helper_raise_exception(tcg_const_i32(trapno)); 2789 2785 s->is_jmp = 3; 2790 2786 } … … 2798 2794 gen_op_set_cc_op(s->cc_op); 2799 2795 gen_jmp_im(cur_eip); 2800 tcg_gen_helper_0_2(helper_raise_interrupt, 2801 tcg_const_i32(intno), 2802 tcg_const_i32(next_eip - cur_eip)); 2796 gen_helper_raise_interrupt(tcg_const_i32(intno), 2797 tcg_const_i32(next_eip - cur_eip)); 2803 2798 s->is_jmp = 3; 2804 2799 } … … 2809 2804 gen_op_set_cc_op(s->cc_op); 2810 2805 gen_jmp_im(cur_eip); 2811 tcg_gen_helper_0_0(helper_debug);2806 gen_helper_debug(); 2812 2807 s->is_jmp = 3; 2813 2808 } … … 2820 2815 gen_op_set_cc_op(s->cc_op); 2821 2816 if (s->tb->flags & HF_INHIBIT_IRQ_MASK) { 2822 tcg_gen_helper_0_0(helper_reset_inhibit_irq);2817 gen_helper_reset_inhibit_irq(); 2823 2818 } 2824 2825 #ifdef VBOX2826 gen_check_external_event();2827 #endif /* VBOX */2828 2829 2819 if ( s->singlestep_enabled 2830 2820 #ifdef VBOX … … 2833 2823 #endif 2834 2824 ) { 2835 tcg_gen_helper_0_0(helper_debug);2825 gen_helper_debug(); 2836 2826 } else if (s->tf) { 2837 tcg_gen_helper_0_0(helper_single_step);2827 gen_helper_single_step(); 2838 2828 } else { 2839 2829 tcg_gen_exit_tb(0); … … 2927 2917 #define SSE_DUMMY ((void *)2) 2928 2918 2929 #define MMX_OP2(x) { helper_ ## x ## _mmx,helper_ ## x ## _xmm }2930 #define SSE_FOP(x) { helper_ ## x ## ps,helper_ ## x ## pd, \2931 helper_ ## x ## ss,helper_ ## x ## sd, }2919 #define MMX_OP2(x) { gen_helper_ ## x ## _mmx, gen_helper_ ## x ## _xmm } 2920 #define SSE_FOP(x) { gen_helper_ ## x ## ps, gen_helper_ ## x ## pd, \ 2921 gen_helper_ ## x ## ss, gen_helper_ ## x ## sd, } 2932 2922 2933 2923 static void *sse_op_table1[256][4] = { … … 2940 2930 [0x12] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd, movsldup, movddup */ 2941 2931 [0x13] = { SSE_SPECIAL, SSE_SPECIAL }, /* movlps, movlpd */ 2942 [0x14] = { helper_punpckldq_xmm,helper_punpcklqdq_xmm },2943 [0x15] = { helper_punpckhdq_xmm,helper_punpckhqdq_xmm },2932 [0x14] = { gen_helper_punpckldq_xmm, gen_helper_punpcklqdq_xmm }, 2933 [0x15] = { gen_helper_punpckhdq_xmm, gen_helper_punpckhqdq_xmm }, 2944 2934 [0x16] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd, movshdup */ 2945 2935 [0x17] = { SSE_SPECIAL, SSE_SPECIAL }, /* movhps, movhpd */ … … 2951 2941 [0x2c] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvttps2pi, cvttpd2pi, cvttsd2si, cvttss2si */ 2952 2942 [0x2d] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* cvtps2pi, cvtpd2pi, cvtsd2si, cvtss2si */ 2953 [0x2e] = { helper_ucomiss,helper_ucomisd },2954 [0x2f] = { helper_comiss,helper_comisd },2943 [0x2e] = { gen_helper_ucomiss, gen_helper_ucomisd }, 2944 [0x2f] = { gen_helper_comiss, gen_helper_comisd }, 2955 2945 [0x50] = { SSE_SPECIAL, SSE_SPECIAL }, /* movmskps, movmskpd */ 2956 2946 [0x51] = SSE_FOP(sqrt), 2957 [0x52] = { helper_rsqrtps, NULL,helper_rsqrtss, NULL },2958 [0x53] = { helper_rcpps, NULL,helper_rcpss, NULL },2959 [0x54] = { helper_pand_xmm,helper_pand_xmm }, /* andps, andpd */2960 [0x55] = { helper_pandn_xmm,helper_pandn_xmm }, /* andnps, andnpd */2961 [0x56] = { helper_por_xmm,helper_por_xmm }, /* orps, orpd */2962 [0x57] = { helper_pxor_xmm,helper_pxor_xmm }, /* xorps, xorpd */2947 [0x52] = { gen_helper_rsqrtps, NULL, gen_helper_rsqrtss, NULL }, 2948 [0x53] = { gen_helper_rcpps, NULL, gen_helper_rcpss, NULL }, 2949 [0x54] = { gen_helper_pand_xmm, gen_helper_pand_xmm }, /* andps, andpd */ 2950 [0x55] = { gen_helper_pandn_xmm, gen_helper_pandn_xmm }, /* andnps, andnpd */ 2951 [0x56] = { gen_helper_por_xmm, gen_helper_por_xmm }, /* orps, orpd */ 2952 [0x57] = { gen_helper_pxor_xmm, gen_helper_pxor_xmm }, /* xorps, xorpd */ 2963 2953 [0x58] = SSE_FOP(add), 2964 2954 [0x59] = SSE_FOP(mul), 2965 [0x5a] = { helper_cvtps2pd,helper_cvtpd2ps,2966 helper_cvtss2sd,helper_cvtsd2ss },2967 [0x5b] = { helper_cvtdq2ps, helper_cvtps2dq,helper_cvttps2dq },2955 [0x5a] = { gen_helper_cvtps2pd, gen_helper_cvtpd2ps, 2956 gen_helper_cvtss2sd, gen_helper_cvtsd2ss }, 2957 [0x5b] = { gen_helper_cvtdq2ps, gen_helper_cvtps2dq, gen_helper_cvttps2dq }, 2968 2958 [0x5c] = SSE_FOP(sub), 2969 2959 [0x5d] = SSE_FOP(min), … … 2972 2962 2973 2963 [0xc2] = SSE_FOP(cmpeq), 2974 [0xc6] = { helper_shufps,helper_shufpd },2964 [0xc6] = { gen_helper_shufps, gen_helper_shufpd }, 2975 2965 2976 2966 [0x38] = { SSE_SPECIAL, SSE_SPECIAL, NULL, SSE_SPECIAL }, /* SSSE3/SSE4 */ … … 2990 2980 [0x6a] = MMX_OP2(punpckhdq), 2991 2981 [0x6b] = MMX_OP2(packssdw), 2992 [0x6c] = { NULL, helper_punpcklqdq_xmm },2993 [0x6d] = { NULL, helper_punpckhqdq_xmm },2982 [0x6c] = { NULL, gen_helper_punpcklqdq_xmm }, 2983 [0x6d] = { NULL, gen_helper_punpckhqdq_xmm }, 2994 2984 [0x6e] = { SSE_SPECIAL, SSE_SPECIAL }, /* movd mm, ea */ 2995 2985 [0x6f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, , movqdu */ 2996 [0x70] = { helper_pshufw_mmx,2997 helper_pshufd_xmm,2998 helper_pshufhw_xmm,2999 helper_pshuflw_xmm },2986 [0x70] = { gen_helper_pshufw_mmx, 2987 gen_helper_pshufd_xmm, 2988 gen_helper_pshufhw_xmm, 2989 gen_helper_pshuflw_xmm }, 3000 2990 [0x71] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftw */ 3001 2991 [0x72] = { SSE_SPECIAL, SSE_SPECIAL }, /* shiftd */ … … 3005 2995 [0x76] = MMX_OP2(pcmpeql), 3006 2996 [0x77] = { SSE_DUMMY }, /* emms */ 3007 [0x7c] = { NULL, helper_haddpd, NULL,helper_haddps },3008 [0x7d] = { NULL, helper_hsubpd, NULL,helper_hsubps },2997 [0x7c] = { NULL, gen_helper_haddpd, NULL, gen_helper_haddps }, 2998 [0x7d] = { NULL, gen_helper_hsubpd, NULL, gen_helper_hsubps }, 3009 2999 [0x7e] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movd, movd, , movq */ 3010 3000 [0x7f] = { SSE_SPECIAL, SSE_SPECIAL, SSE_SPECIAL }, /* movq, movdqa, movdqu */ 3011 3001 [0xc4] = { SSE_SPECIAL, SSE_SPECIAL }, /* pinsrw */ 3012 3002 [0xc5] = { SSE_SPECIAL, SSE_SPECIAL }, /* pextrw */ 3013 [0xd0] = { NULL, helper_addsubpd, NULL,helper_addsubps },3003 [0xd0] = { NULL, gen_helper_addsubpd, NULL, gen_helper_addsubps }, 3014 3004 [0xd1] = MMX_OP2(psrlw), 3015 3005 [0xd2] = MMX_OP2(psrld), … … 3033 3023 [0xe4] = MMX_OP2(pmulhuw), 3034 3024 [0xe5] = MMX_OP2(pmulhw), 3035 [0xe6] = { NULL, helper_cvttpd2dq, helper_cvtdq2pd,helper_cvtpd2dq },3025 [0xe6] = { NULL, gen_helper_cvttpd2dq, gen_helper_cvtdq2pd, gen_helper_cvtpd2dq }, 3036 3026 [0xe7] = { SSE_SPECIAL , SSE_SPECIAL }, /* movntq, movntq */ 3037 3027 [0xe8] = MMX_OP2(psubsb), … … 3068 3058 [8 + 6] = MMX_OP2(pslld), 3069 3059 [16 + 2] = MMX_OP2(psrlq), 3070 [16 + 3] = { NULL, helper_psrldq_xmm },3060 [16 + 3] = { NULL, gen_helper_psrldq_xmm }, 3071 3061 [16 + 6] = MMX_OP2(psllq), 3072 [16 + 7] = { NULL, helper_pslldq_xmm },3062 [16 + 7] = { NULL, gen_helper_pslldq_xmm }, 3073 3063 }; 3074 3064 3075 3065 static void *sse_op_table3[4 * 3] = { 3076 helper_cvtsi2ss,3077 helper_cvtsi2sd,3078 X86_64_ONLY( helper_cvtsq2ss),3079 X86_64_ONLY( helper_cvtsq2sd),3080 3081 helper_cvttss2si,3082 helper_cvttsd2si,3083 X86_64_ONLY( helper_cvttss2sq),3084 X86_64_ONLY( helper_cvttsd2sq),3085 3086 helper_cvtss2si,3087 helper_cvtsd2si,3088 X86_64_ONLY( helper_cvtss2sq),3089 X86_64_ONLY( helper_cvtsd2sq),3066 gen_helper_cvtsi2ss, 3067 gen_helper_cvtsi2sd, 3068 X86_64_ONLY(gen_helper_cvtsq2ss), 3069 X86_64_ONLY(gen_helper_cvtsq2sd), 3070 3071 gen_helper_cvttss2si, 3072 gen_helper_cvttsd2si, 3073 X86_64_ONLY(gen_helper_cvttss2sq), 3074 X86_64_ONLY(gen_helper_cvttsd2sq), 3075 3076 gen_helper_cvtss2si, 3077 gen_helper_cvtsd2si, 3078 X86_64_ONLY(gen_helper_cvtss2sq), 3079 X86_64_ONLY(gen_helper_cvtsd2sq), 3090 3080 }; 3091 3081 … … 3102 3092 3103 3093 static void *sse_op_table5[256] = { 3104 [0x0c] = helper_pi2fw,3105 [0x0d] = helper_pi2fd,3106 [0x1c] = helper_pf2iw,3107 [0x1d] = helper_pf2id,3108 [0x8a] = helper_pfnacc,3109 [0x8e] = helper_pfpnacc,3110 [0x90] = helper_pfcmpge,3111 [0x94] = helper_pfmin,3112 [0x96] = helper_pfrcp,3113 [0x97] = helper_pfrsqrt,3114 [0x9a] = helper_pfsub,3115 [0x9e] = helper_pfadd,3116 [0xa0] = helper_pfcmpgt,3117 [0xa4] = helper_pfmax,3118 [0xa6] = helper_movq, /* pfrcpit1; no need to actually increase precision */3119 [0xa7] = helper_movq, /* pfrsqit1 */3120 [0xaa] = helper_pfsubr,3121 [0xae] = helper_pfacc,3122 [0xb0] = helper_pfcmpeq,3123 [0xb4] = helper_pfmul,3124 [0xb6] = helper_movq, /* pfrcpit2 */3125 [0xb7] = helper_pmulhrw_mmx,3126 [0xbb] = helper_pswapd,3127 [0xbf] = helper_pavgb_mmx /* pavgusb */3094 [0x0c] = gen_helper_pi2fw, 3095 [0x0d] = gen_helper_pi2fd, 3096 [0x1c] = gen_helper_pf2iw, 3097 [0x1d] = gen_helper_pf2id, 3098 [0x8a] = gen_helper_pfnacc, 3099 [0x8e] = gen_helper_pfpnacc, 3100 [0x90] = gen_helper_pfcmpge, 3101 [0x94] = gen_helper_pfmin, 3102 [0x96] = gen_helper_pfrcp, 3103 [0x97] = gen_helper_pfrsqrt, 3104 [0x9a] = gen_helper_pfsub, 3105 [0x9e] = gen_helper_pfadd, 3106 [0xa0] = gen_helper_pfcmpgt, 3107 [0xa4] = gen_helper_pfmax, 3108 [0xa6] = gen_helper_movq, /* pfrcpit1; no need to actually increase precision */ 3109 [0xa7] = gen_helper_movq, /* pfrsqit1 */ 3110 [0xaa] = gen_helper_pfsubr, 3111 [0xae] = gen_helper_pfacc, 3112 [0xb0] = gen_helper_pfcmpeq, 3113 [0xb4] = gen_helper_pfmul, 3114 [0xb6] = gen_helper_movq, /* pfrcpit2 */ 3115 [0xb7] = gen_helper_pmulhrw_mmx, 3116 [0xbb] = gen_helper_pswapd, 3117 [0xbf] = gen_helper_pavgb_mmx /* pavgusb */ 3128 3118 }; 3129 3119 … … 3132 3122 }; 3133 3123 #define SSSE3_OP(x) { MMX_OP2(x), CPUID_EXT_SSSE3 } 3134 #define SSE41_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE41 }3135 #define SSE42_OP(x) { { NULL, helper_ ## x ## _xmm }, CPUID_EXT_SSE42 }3124 #define SSE41_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE41 } 3125 #define SSE42_OP(x) { { NULL, gen_helper_ ## x ## _xmm }, CPUID_EXT_SSE42 } 3136 3126 #define SSE41_SPECIAL { { NULL, SSE_SPECIAL }, CPUID_EXT_SSE41 } 3137 3127 static struct sse_op_helper_s sse_op_table6[256] = { … … 3254 3244 goto illegal_op; 3255 3245 /* femms */ 3256 tcg_gen_helper_0_0(helper_emms);3246 gen_helper_emms(); 3257 3247 return; 3258 3248 } 3259 3249 if (b == 0x77) { 3260 3250 /* emms */ 3261 tcg_gen_helper_0_0(helper_emms);3251 gen_helper_emms(); 3262 3252 return; 3263 3253 } … … 3265 3255 the static cpu state) */ 3266 3256 if (!is_xmm) { 3267 tcg_gen_helper_0_0(helper_enter_mmx);3257 gen_helper_enter_mmx(); 3268 3258 } 3269 3259 … … 3302 3292 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 3303 3293 offsetof(CPUX86State,fpregs[reg].mmx)); 3304 tcg_gen_helper_0_2(helper_movl_mm_T0_mmx, cpu_ptr0, cpu_T[0]); 3294 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 3295 gen_helper_movl_mm_T0_mmx(cpu_ptr0, cpu_tmp2_i32); 3305 3296 } 3306 3297 break; … … 3311 3302 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 3312 3303 offsetof(CPUX86State,xmm_regs[reg])); 3313 tcg_gen_helper_0_2(helper_movq_mm_T0_xmm,cpu_ptr0, cpu_T[0]);3304 gen_helper_movq_mm_T0_xmm(cpu_ptr0, cpu_T[0]); 3314 3305 } else 3315 3306 #endif … … 3319 3310 offsetof(CPUX86State,xmm_regs[reg])); 3320 3311 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 3321 tcg_gen_helper_0_2(helper_movl_mm_T0_xmm,cpu_ptr0, cpu_tmp2_i32);3312 gen_helper_movl_mm_T0_xmm(cpu_ptr0, cpu_tmp2_i32); 3322 3313 } 3323 3314 break; … … 3580 3571 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset); 3581 3572 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op1_offset); 3582 tcg_gen_helper_0_2(sse_op2,cpu_ptr0, cpu_ptr1);3573 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1); 3583 3574 break; 3584 3575 case 0x050: /* movmskps */ … … 3586 3577 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 3587 3578 offsetof(CPUX86State,xmm_regs[rm])); 3588 tcg_gen_helper_1_1(helper_movmskps,cpu_tmp2_i32, cpu_ptr0);3579 gen_helper_movmskps(cpu_tmp2_i32, cpu_ptr0); 3589 3580 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32); 3590 3581 gen_op_mov_reg_T0(OT_LONG, reg); … … 3594 3585 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, 3595 3586 offsetof(CPUX86State,xmm_regs[rm])); 3596 tcg_gen_helper_1_1(helper_movmskpd,cpu_tmp2_i32, cpu_ptr0);3587 gen_helper_movmskpd(cpu_tmp2_i32, cpu_ptr0); 3597 3588 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32); 3598 3589 gen_op_mov_reg_T0(OT_LONG, reg); … … 3600 3591 case 0x02a: /* cvtpi2ps */ 3601 3592 case 0x12a: /* cvtpi2pd */ 3602 tcg_gen_helper_0_0(helper_enter_mmx);3593 gen_helper_enter_mmx(); 3603 3594 if (mod != 3) { 3604 3595 gen_lea_modrm(s, modrm, ®_addr, &offset_addr); … … 3614 3605 switch(b >> 8) { 3615 3606 case 0x0: 3616 tcg_gen_helper_0_2(helper_cvtpi2ps,cpu_ptr0, cpu_ptr1);3607 gen_helper_cvtpi2ps(cpu_ptr0, cpu_ptr1); 3617 3608 break; 3618 3609 default: 3619 3610 case 0x1: 3620 tcg_gen_helper_0_2(helper_cvtpi2pd,cpu_ptr0, cpu_ptr1);3611 gen_helper_cvtpi2pd(cpu_ptr0, cpu_ptr1); 3621 3612 break; 3622 3613 } … … 3631 3622 if (ot == OT_LONG) { 3632 3623 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 3633 tcg_gen_helper_0_2(sse_op2,cpu_ptr0, cpu_tmp2_i32);3624 ((void (*)(TCGv_ptr, TCGv_i32))sse_op2)(cpu_ptr0, cpu_tmp2_i32); 3634 3625 } else { 3635 tcg_gen_helper_0_2(sse_op2,cpu_ptr0, cpu_T[0]);3626 ((void (*)(TCGv_ptr, TCGv))sse_op2)(cpu_ptr0, cpu_T[0]); 3636 3627 } 3637 3628 break; … … 3640 3631 case 0x02d: /* cvtps2pi */ 3641 3632 case 0x12d: /* cvtpd2pi */ 3642 tcg_gen_helper_0_0(helper_enter_mmx);3633 gen_helper_enter_mmx(); 3643 3634 if (mod != 3) { 3644 3635 gen_lea_modrm(s, modrm, ®_addr, &offset_addr); … … 3654 3645 switch(b) { 3655 3646 case 0x02c: 3656 tcg_gen_helper_0_2(helper_cvttps2pi,cpu_ptr0, cpu_ptr1);3647 gen_helper_cvttps2pi(cpu_ptr0, cpu_ptr1); 3657 3648 break; 3658 3649 case 0x12c: 3659 tcg_gen_helper_0_2(helper_cvttpd2pi,cpu_ptr0, cpu_ptr1);3650 gen_helper_cvttpd2pi(cpu_ptr0, cpu_ptr1); 3660 3651 break; 3661 3652 case 0x02d: 3662 tcg_gen_helper_0_2(helper_cvtps2pi,cpu_ptr0, cpu_ptr1);3653 gen_helper_cvtps2pi(cpu_ptr0, cpu_ptr1); 3663 3654 break; 3664 3655 case 0x12d: 3665 tcg_gen_helper_0_2(helper_cvtpd2pi,cpu_ptr0, cpu_ptr1);3656 gen_helper_cvtpd2pi(cpu_ptr0, cpu_ptr1); 3666 3657 break; 3667 3658 } … … 3689 3680 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op2_offset); 3690 3681 if (ot == OT_LONG) { 3691 tcg_gen_helper_1_1(sse_op2,cpu_tmp2_i32, cpu_ptr0);3682 ((void (*)(TCGv_i32, TCGv_ptr))sse_op2)(cpu_tmp2_i32, cpu_ptr0); 3692 3683 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32); 3693 3684 } else { 3694 tcg_gen_helper_1_1(sse_op2,cpu_T[0], cpu_ptr0);3685 ((void (*)(TCGv, TCGv_ptr))sse_op2)(cpu_T[0], cpu_ptr0); 3695 3686 } 3696 3687 gen_op_mov_reg_T0(ot, reg); … … 3743 3734 break; 3744 3735 case 0x2d6: /* movq2dq */ 3745 tcg_gen_helper_0_0(helper_enter_mmx);3736 gen_helper_enter_mmx(); 3746 3737 rm = (modrm & 7); 3747 3738 gen_op_movq(offsetof(CPUX86State,xmm_regs[reg].XMM_Q(0)), … … 3750 3741 break; 3751 3742 case 0x3d6: /* movdq2q */ 3752 tcg_gen_helper_0_0(helper_enter_mmx);3743 gen_helper_enter_mmx(); 3753 3744 rm = (modrm & 7) | REX_B(s); 3754 3745 gen_op_movq(offsetof(CPUX86State,fpregs[reg & 7].mmx), … … 3762 3753 rm = (modrm & 7) | REX_B(s); 3763 3754 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,xmm_regs[rm])); 3764 tcg_gen_helper_1_1(helper_pmovmskb_xmm,cpu_tmp2_i32, cpu_ptr0);3755 gen_helper_pmovmskb_xmm(cpu_tmp2_i32, cpu_ptr0); 3765 3756 } else { 3766 3757 rm = (modrm & 7); 3767 3758 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, offsetof(CPUX86State,fpregs[rm].mmx)); 3768 tcg_gen_helper_1_1(helper_pmovmskb_mmx,cpu_tmp2_i32, cpu_ptr0);3759 gen_helper_pmovmskb_mmx(cpu_tmp2_i32, cpu_ptr0); 3769 3760 } 3770 3761 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32); … … 3804 3795 case 0x21: case 0x31: /* pmovsxbd, pmovzxbd */ 3805 3796 case 0x24: case 0x34: /* pmovsxwq, pmovzxwq */ 3806 tcg_gen_qemu_ld32u(cpu_tmp 2_i32, cpu_A0,3797 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0, 3807 3798 (s->mem_index >> 2) - 1); 3799 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0); 3808 3800 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, op2_offset + 3809 3801 offsetof(XMMReg, XMM_L(0))); … … 3837 3829 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset); 3838 3830 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset); 3839 tcg_gen_helper_0_2(sse_op2,cpu_ptr0, cpu_ptr1);3831 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1); 3840 3832 3841 3833 if (b == 0x17) … … 3866 3858 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 3867 3859 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0); 3868 tcg_gen_helper_1_3(helper_crc32,cpu_T[0], cpu_tmp2_i32,3869 cpu_T[0], tcg_const_i32(8 << ot));3860 gen_helper_crc32(cpu_T[0], cpu_tmp2_i32, 3861 cpu_T[0], tcg_const_i32(8 << ot)); 3870 3862 3871 3863 ot = (s->dflag == 2) ? OT_QUAD : OT_LONG; … … 3917 3909 offsetof(CPUX86State, 3918 3910 xmm_regs[reg].XMM_L(val & 3))); 3911 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32); 3919 3912 if (mod == 3) 3920 gen_op_mov_reg_v(ot, rm, cpu_ tmp2_i32);3913 gen_op_mov_reg_v(ot, rm, cpu_T[0]); 3921 3914 else 3922 tcg_gen_qemu_st32(cpu_ tmp2_i32, cpu_A0,3915 tcg_gen_qemu_st32(cpu_T[0], cpu_A0, 3923 3916 (s->mem_index >> 2) - 1); 3924 3917 } else { /* pextrq */ 3918 #ifdef TARGET_X86_64 3925 3919 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, 3926 3920 offsetof(CPUX86State, … … 3931 3925 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 3932 3926 (s->mem_index >> 2) - 1); 3927 #else 3928 goto illegal_op; 3929 #endif 3933 3930 } 3934 3931 break; … … 3946 3943 gen_op_mov_TN_reg(OT_LONG, 0, rm); 3947 3944 else 3948 tcg_gen_qemu_ld8u(cpu_ T[0], cpu_A0,3945 tcg_gen_qemu_ld8u(cpu_tmp0, cpu_A0, 3949 3946 (s->mem_index >> 2) - 1); 3950 tcg_gen_st8_tl(cpu_ T[0], cpu_env, offsetof(CPUX86State,3947 tcg_gen_st8_tl(cpu_tmp0, cpu_env, offsetof(CPUX86State, 3951 3948 xmm_regs[reg].XMM_B(val & 15))); 3952 3949 break; 3953 3950 case 0x21: /* insertps */ 3954 if (mod == 3) 3951 if (mod == 3) { 3955 3952 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, 3956 3953 offsetof(CPUX86State,xmm_regs[rm] 3957 3954 .XMM_L((val >> 6) & 3))); 3958 else3959 tcg_gen_qemu_ld32u(cpu_tmp 2_i32, cpu_A0,3955 } else { 3956 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0, 3960 3957 (s->mem_index >> 2) - 1); 3958 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0); 3959 } 3961 3960 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, 3962 3961 offsetof(CPUX86State,xmm_regs[reg] … … 3982 3981 if (ot == OT_LONG) { /* pinsrd */ 3983 3982 if (mod == 3) 3984 gen_op_mov_v_reg(ot, cpu_tmp 2_i32, rm);3983 gen_op_mov_v_reg(ot, cpu_tmp0, rm); 3985 3984 else 3986 tcg_gen_qemu_ld32u(cpu_tmp 2_i32, cpu_A0,3985 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_A0, 3987 3986 (s->mem_index >> 2) - 1); 3987 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_tmp0); 3988 3988 tcg_gen_st_i32(cpu_tmp2_i32, cpu_env, 3989 3989 offsetof(CPUX86State, 3990 3990 xmm_regs[reg].XMM_L(val & 3))); 3991 3991 } else { /* pinsrq */ 3992 #ifdef TARGET_X86_64 3992 3993 if (mod == 3) 3993 3994 gen_op_mov_v_reg(ot, cpu_tmp1_i64, rm); … … 3998 3999 offsetof(CPUX86State, 3999 4000 xmm_regs[reg].XMM_Q(val & 1))); 4001 #else 4002 goto illegal_op; 4003 #endif 4000 4004 } 4001 4005 break; … … 4035 4039 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset); 4036 4040 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset); 4037 tcg_gen_helper_0_3(sse_op2,cpu_ptr0, cpu_ptr1, tcg_const_i32(val));4041 ((void (*)(TCGv_ptr, TCGv_ptr, TCGv_i32))sse_op2)(cpu_ptr0, cpu_ptr1, tcg_const_i32(val)); 4038 4042 break; 4039 4043 default: … … 4095 4099 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset); 4096 4100 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset); 4097 tcg_gen_helper_0_2(sse_op2,cpu_ptr0, cpu_ptr1);4101 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1); 4098 4102 break; 4099 4103 case 0x70: /* pshufx insn */ … … 4102 4106 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset); 4103 4107 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset); 4104 tcg_gen_helper_0_3(sse_op2,cpu_ptr0, cpu_ptr1, tcg_const_i32(val));4108 ((void (*)(TCGv_ptr, TCGv_ptr, TCGv_i32))sse_op2)(cpu_ptr0, cpu_ptr1, tcg_const_i32(val)); 4105 4109 break; 4106 4110 case 0xc2: … … 4112 4116 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset); 4113 4117 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset); 4114 tcg_gen_helper_0_2(sse_op2,cpu_ptr0, cpu_ptr1);4118 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1); 4115 4119 break; 4116 4120 case 0xf7: … … 4132 4136 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset); 4133 4137 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset); 4134 tcg_gen_helper_0_3(sse_op2,cpu_ptr0, cpu_ptr1, cpu_A0);4138 ((void (*)(TCGv_ptr, TCGv_ptr, TCGv))sse_op2)(cpu_ptr0, cpu_ptr1, cpu_A0); 4135 4139 break; 4136 4140 default: 4137 4141 tcg_gen_addi_ptr(cpu_ptr0, cpu_env, op1_offset); 4138 4142 tcg_gen_addi_ptr(cpu_ptr1, cpu_env, op2_offset); 4139 tcg_gen_helper_0_2(sse_op2,cpu_ptr0, cpu_ptr1);4143 ((void (*)(TCGv_ptr, TCGv_ptr))sse_op2)(cpu_ptr0, cpu_ptr1); 4140 4144 break; 4141 4145 } … … 4298 4302 int rex_w, rex_r; 4299 4303 4300 if (unlikely( loglevel & CPU_LOG_TB_OP))4304 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP))) 4301 4305 tcg_gen_debug_insn_start(pc_start); 4302 4306 s->pc = pc_start; … … 4431 4435 #ifndef VBOX 4432 4436 if (prefixes & PREFIX_LOCK) 4433 tcg_gen_helper_0_0(helper_lock);4437 gen_helper_lock(); 4434 4438 #else /* VBOX */ 4435 4439 if (prefixes & PREFIX_LOCK) { … … 4438 4442 return s->pc; 4439 4443 } 4440 tcg_gen_helper_0_0(helper_lock);4444 gen_helper_lock(); 4441 4445 } 4442 4446 #endif /* VBOX */ … … 4659 4663 #else 4660 4664 { 4661 TCGv t0, t1;4662 t0 = tcg_temp_new (TCG_TYPE_I64);4663 t1 = tcg_temp_new (TCG_TYPE_I64);4665 TCGv_i64 t0, t1; 4666 t0 = tcg_temp_new_i64(); 4667 t1 = tcg_temp_new_i64(); 4664 4668 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX); 4665 4669 tcg_gen_extu_i32_i64(t0, cpu_T[0]); … … 4679 4683 #ifdef TARGET_X86_64 4680 4684 case OT_QUAD: 4681 tcg_gen_helper_0_1(helper_mulq_EAX_T0,cpu_T[0]);4685 gen_helper_mulq_EAX_T0(cpu_T[0]); 4682 4686 s->cc_op = CC_OP_MULQ; 4683 4687 break; … … 4728 4732 #else 4729 4733 { 4730 TCGv t0, t1;4731 t0 = tcg_temp_new (TCG_TYPE_I64);4732 t1 = tcg_temp_new (TCG_TYPE_I64);4734 TCGv_i64 t0, t1; 4735 t0 = tcg_temp_new_i64(); 4736 t1 = tcg_temp_new_i64(); 4733 4737 gen_op_mov_TN_reg(OT_LONG, 1, R_EAX); 4734 4738 tcg_gen_ext_i32_i64(t0, cpu_T[0]); … … 4749 4753 #ifdef TARGET_X86_64 4750 4754 case OT_QUAD: 4751 tcg_gen_helper_0_1(helper_imulq_EAX_T0,cpu_T[0]);4755 gen_helper_imulq_EAX_T0(cpu_T[0]); 4752 4756 s->cc_op = CC_OP_MULQ; 4753 4757 break; … … 4759 4763 case OT_BYTE: 4760 4764 gen_jmp_im(pc_start - s->cs_base); 4761 tcg_gen_helper_0_1(helper_divb_AL,cpu_T[0]);4765 gen_helper_divb_AL(cpu_T[0]); 4762 4766 break; 4763 4767 case OT_WORD: 4764 4768 gen_jmp_im(pc_start - s->cs_base); 4765 tcg_gen_helper_0_1(helper_divw_AX,cpu_T[0]);4769 gen_helper_divw_AX(cpu_T[0]); 4766 4770 break; 4767 4771 default: 4768 4772 case OT_LONG: 4769 4773 gen_jmp_im(pc_start - s->cs_base); 4770 tcg_gen_helper_0_1(helper_divl_EAX,cpu_T[0]);4774 gen_helper_divl_EAX(cpu_T[0]); 4771 4775 break; 4772 4776 #ifdef TARGET_X86_64 4773 4777 case OT_QUAD: 4774 4778 gen_jmp_im(pc_start - s->cs_base); 4775 tcg_gen_helper_0_1(helper_divq_EAX,cpu_T[0]);4779 gen_helper_divq_EAX(cpu_T[0]); 4776 4780 break; 4777 4781 #endif … … 4782 4786 case OT_BYTE: 4783 4787 gen_jmp_im(pc_start - s->cs_base); 4784 tcg_gen_helper_0_1(helper_idivb_AL,cpu_T[0]);4788 gen_helper_idivb_AL(cpu_T[0]); 4785 4789 break; 4786 4790 case OT_WORD: 4787 4791 gen_jmp_im(pc_start - s->cs_base); 4788 tcg_gen_helper_0_1(helper_idivw_AX,cpu_T[0]);4792 gen_helper_idivw_AX(cpu_T[0]); 4789 4793 break; 4790 4794 default: 4791 4795 case OT_LONG: 4792 4796 gen_jmp_im(pc_start - s->cs_base); 4793 tcg_gen_helper_0_1(helper_idivl_EAX,cpu_T[0]);4797 gen_helper_idivl_EAX(cpu_T[0]); 4794 4798 break; 4795 4799 #ifdef TARGET_X86_64 4796 4800 case OT_QUAD: 4797 4801 gen_jmp_im(pc_start - s->cs_base); 4798 tcg_gen_helper_0_1(helper_idivq_EAX,cpu_T[0]);4802 gen_helper_idivq_EAX(cpu_T[0]); 4799 4803 break; 4800 4804 #endif … … 4880 4884 gen_jmp_im(pc_start - s->cs_base); 4881 4885 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 4882 tcg_gen_helper_0_4(helper_lcall_protected, 4883 cpu_tmp2_i32, cpu_T[1], 4884 tcg_const_i32(dflag), 4885 tcg_const_i32(s->pc - pc_start)); 4886 gen_helper_lcall_protected(cpu_tmp2_i32, cpu_T[1], 4887 tcg_const_i32(dflag), 4888 tcg_const_i32(s->pc - pc_start)); 4886 4889 } else { 4887 4890 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 4888 tcg_gen_helper_0_4(helper_lcall_real, 4889 cpu_tmp2_i32, cpu_T[1], 4890 tcg_const_i32(dflag), 4891 tcg_const_i32(s->pc - s->cs_base)); 4891 gen_helper_lcall_real(cpu_tmp2_i32, cpu_T[1], 4892 tcg_const_i32(dflag), 4893 tcg_const_i32(s->pc - s->cs_base)); 4892 4894 } 4893 4895 gen_eob(s); … … 4909 4911 gen_jmp_im(pc_start - s->cs_base); 4910 4912 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 4911 tcg_gen_helper_0_3(helper_ljmp_protected, 4912 cpu_tmp2_i32, 4913 cpu_T[1], 4914 tcg_const_i32(s->pc - pc_start)); 4913 gen_helper_ljmp_protected(cpu_tmp2_i32, cpu_T[1], 4914 tcg_const_i32(s->pc - pc_start)); 4915 4915 } else { 4916 4916 gen_op_movl_seg_T0_vm(R_CS); … … 5021 5021 #ifdef TARGET_X86_64 5022 5022 if (ot == OT_QUAD) { 5023 tcg_gen_helper_1_2(helper_imulq_T0_T1,cpu_T[0], cpu_T[0], cpu_T[1]);5023 gen_helper_imulq_T0_T1(cpu_T[0], cpu_T[0], cpu_T[1]); 5024 5024 } else 5025 5025 #endif … … 5034 5034 #else 5035 5035 { 5036 TCGv t0, t1;5037 t0 = tcg_temp_new (TCG_TYPE_I64);5038 t1 = tcg_temp_new (TCG_TYPE_I64);5036 TCGv_i64 t0, t1; 5037 t0 = tcg_temp_new_i64(); 5038 t1 = tcg_temp_new_i64(); 5039 5039 tcg_gen_ext_i32_i64(t0, cpu_T[0]); 5040 5040 tcg_gen_ext_i32_i64(t1, cpu_T[1]); … … 5100 5100 reg = ((modrm >> 3) & 7) | rex_r; 5101 5101 mod = (modrm >> 6) & 3; 5102 t0 = tcg_temp_local_new( TCG_TYPE_TL);5103 t1 = tcg_temp_local_new( TCG_TYPE_TL);5104 t2 = tcg_temp_local_new( TCG_TYPE_TL);5105 a0 = tcg_temp_local_new( TCG_TYPE_TL);5102 t0 = tcg_temp_local_new(); 5103 t1 = tcg_temp_local_new(); 5104 t2 = tcg_temp_local_new(); 5105 a0 = tcg_temp_local_new(); 5106 5106 gen_op_mov_v_reg(ot, t1, reg); 5107 5107 if (mod == 3) { … … 5155 5155 gen_op_set_cc_op(s->cc_op); 5156 5156 gen_lea_modrm(s, modrm, ®_addr, &offset_addr); 5157 tcg_gen_helper_0_1(helper_cmpxchg16b,cpu_A0);5157 gen_helper_cmpxchg16b(cpu_A0); 5158 5158 } else 5159 5159 #endif … … 5165 5165 gen_op_set_cc_op(s->cc_op); 5166 5166 gen_lea_modrm(s, modrm, ®_addr, &offset_addr); 5167 tcg_gen_helper_0_1(helper_cmpxchg8b,cpu_A0);5167 gen_helper_cmpxchg8b(cpu_A0); 5168 5168 } 5169 5169 s->cc_op = CC_OP_EFLAGS; … … 5291 5291 _first_ does it */ 5292 5292 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK)) 5293 tcg_gen_helper_0_0(helper_set_inhibit_irq);5293 gen_helper_set_inhibit_irq(); 5294 5294 s->tf = 0; 5295 5295 } … … 5371 5371 _first_ does it */ 5372 5372 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK)) 5373 tcg_gen_helper_0_0(helper_set_inhibit_irq);5373 gen_helper_set_inhibit_irq(); 5374 5374 s->tf = 0; 5375 5375 } … … 5565 5565 /* for xchg, lock is implicit */ 5566 5566 if (!(prefixes & PREFIX_LOCK)) 5567 tcg_gen_helper_0_0(helper_lock);5567 gen_helper_lock(); 5568 5568 gen_op_ld_T1_A0(ot + s->mem_index); 5569 5569 gen_op_st_T0_A0(ot + s->mem_index); 5570 5570 if (!(prefixes & PREFIX_LOCK)) 5571 tcg_gen_helper_0_0(helper_unlock);5571 gen_helper_unlock(); 5572 5572 gen_op_mov_reg_T1(ot, reg); 5573 5573 } … … 5728 5728 gen_op_ld_T0_A0(OT_LONG + s->mem_index); 5729 5729 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 5730 tcg_gen_helper_0_1(helper_flds_FT0,cpu_tmp2_i32);5730 gen_helper_flds_FT0(cpu_tmp2_i32); 5731 5731 break; 5732 5732 case 1: 5733 5733 gen_op_ld_T0_A0(OT_LONG + s->mem_index); 5734 5734 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 5735 tcg_gen_helper_0_1(helper_fildl_FT0,cpu_tmp2_i32);5735 gen_helper_fildl_FT0(cpu_tmp2_i32); 5736 5736 break; 5737 5737 case 2: 5738 5738 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 5739 5739 (s->mem_index >> 2) - 1); 5740 tcg_gen_helper_0_1(helper_fldl_FT0,cpu_tmp1_i64);5740 gen_helper_fldl_FT0(cpu_tmp1_i64); 5741 5741 break; 5742 5742 case 3: … … 5744 5744 gen_op_lds_T0_A0(OT_WORD + s->mem_index); 5745 5745 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 5746 tcg_gen_helper_0_1(helper_fildl_FT0,cpu_tmp2_i32);5746 gen_helper_fildl_FT0(cpu_tmp2_i32); 5747 5747 break; 5748 5748 } 5749 5749 5750 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);5750 gen_helper_fp_arith_ST0_FT0(op1); 5751 5751 if (op1 == 3) { 5752 5752 /* fcomp needs pop */ 5753 tcg_gen_helper_0_0(helper_fpop);5753 gen_helper_fpop(); 5754 5754 } 5755 5755 } … … 5767 5767 gen_op_ld_T0_A0(OT_LONG + s->mem_index); 5768 5768 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 5769 tcg_gen_helper_0_1(helper_flds_ST0,cpu_tmp2_i32);5769 gen_helper_flds_ST0(cpu_tmp2_i32); 5770 5770 break; 5771 5771 case 1: 5772 5772 gen_op_ld_T0_A0(OT_LONG + s->mem_index); 5773 5773 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 5774 tcg_gen_helper_0_1(helper_fildl_ST0,cpu_tmp2_i32);5774 gen_helper_fildl_ST0(cpu_tmp2_i32); 5775 5775 break; 5776 5776 case 2: 5777 5777 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 5778 5778 (s->mem_index >> 2) - 1); 5779 tcg_gen_helper_0_1(helper_fldl_ST0,cpu_tmp1_i64);5779 gen_helper_fldl_ST0(cpu_tmp1_i64); 5780 5780 break; 5781 5781 case 3: … … 5783 5783 gen_op_lds_T0_A0(OT_WORD + s->mem_index); 5784 5784 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 5785 tcg_gen_helper_0_1(helper_fildl_ST0,cpu_tmp2_i32);5785 gen_helper_fildl_ST0(cpu_tmp2_i32); 5786 5786 break; 5787 5787 } … … 5791 5791 switch(op >> 4) { 5792 5792 case 1: 5793 tcg_gen_helper_1_0(helper_fisttl_ST0,cpu_tmp2_i32);5793 gen_helper_fisttl_ST0(cpu_tmp2_i32); 5794 5794 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32); 5795 5795 gen_op_st_T0_A0(OT_LONG + s->mem_index); 5796 5796 break; 5797 5797 case 2: 5798 tcg_gen_helper_1_0(helper_fisttll_ST0,cpu_tmp1_i64);5798 gen_helper_fisttll_ST0(cpu_tmp1_i64); 5799 5799 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 5800 5800 (s->mem_index >> 2) - 1); … … 5802 5802 case 3: 5803 5803 default: 5804 tcg_gen_helper_1_0(helper_fistt_ST0,cpu_tmp2_i32);5804 gen_helper_fistt_ST0(cpu_tmp2_i32); 5805 5805 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32); 5806 5806 gen_op_st_T0_A0(OT_WORD + s->mem_index); 5807 5807 break; 5808 5808 } 5809 tcg_gen_helper_0_0(helper_fpop);5809 gen_helper_fpop(); 5810 5810 break; 5811 5811 default: 5812 5812 switch(op >> 4) { 5813 5813 case 0: 5814 tcg_gen_helper_1_0(helper_fsts_ST0,cpu_tmp2_i32);5814 gen_helper_fsts_ST0(cpu_tmp2_i32); 5815 5815 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32); 5816 5816 gen_op_st_T0_A0(OT_LONG + s->mem_index); 5817 5817 break; 5818 5818 case 1: 5819 tcg_gen_helper_1_0(helper_fistl_ST0,cpu_tmp2_i32);5819 gen_helper_fistl_ST0(cpu_tmp2_i32); 5820 5820 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32); 5821 5821 gen_op_st_T0_A0(OT_LONG + s->mem_index); 5822 5822 break; 5823 5823 case 2: 5824 tcg_gen_helper_1_0(helper_fstl_ST0,cpu_tmp1_i64);5824 gen_helper_fstl_ST0(cpu_tmp1_i64); 5825 5825 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 5826 5826 (s->mem_index >> 2) - 1); … … 5828 5828 case 3: 5829 5829 default: 5830 tcg_gen_helper_1_0(helper_fist_ST0,cpu_tmp2_i32);5830 gen_helper_fist_ST0(cpu_tmp2_i32); 5831 5831 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32); 5832 5832 gen_op_st_T0_A0(OT_WORD + s->mem_index); … … 5834 5834 } 5835 5835 if ((op & 7) == 3) 5836 tcg_gen_helper_0_0(helper_fpop);5836 gen_helper_fpop(); 5837 5837 break; 5838 5838 } … … 5842 5842 gen_op_set_cc_op(s->cc_op); 5843 5843 gen_jmp_im(pc_start - s->cs_base); 5844 tcg_gen_helper_0_2(helper_fldenv,5844 gen_helper_fldenv( 5845 5845 cpu_A0, tcg_const_i32(s->dflag)); 5846 5846 break; … … 5848 5848 gen_op_ld_T0_A0(OT_WORD + s->mem_index); 5849 5849 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 5850 tcg_gen_helper_0_1(helper_fldcw,cpu_tmp2_i32);5850 gen_helper_fldcw(cpu_tmp2_i32); 5851 5851 break; 5852 5852 case 0x0e: /* fnstenv mem */ … … 5854 5854 gen_op_set_cc_op(s->cc_op); 5855 5855 gen_jmp_im(pc_start - s->cs_base); 5856 tcg_gen_helper_0_2(helper_fstenv, 5857 cpu_A0, tcg_const_i32(s->dflag)); 5856 gen_helper_fstenv(cpu_A0, tcg_const_i32(s->dflag)); 5858 5857 break; 5859 5858 case 0x0f: /* fnstcw mem */ 5860 tcg_gen_helper_1_0(helper_fnstcw,cpu_tmp2_i32);5859 gen_helper_fnstcw(cpu_tmp2_i32); 5861 5860 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32); 5862 5861 gen_op_st_T0_A0(OT_WORD + s->mem_index); … … 5866 5865 gen_op_set_cc_op(s->cc_op); 5867 5866 gen_jmp_im(pc_start - s->cs_base); 5868 tcg_gen_helper_0_1(helper_fldt_ST0,cpu_A0);5867 gen_helper_fldt_ST0(cpu_A0); 5869 5868 break; 5870 5869 case 0x1f: /* fstpt mem */ … … 5872 5871 gen_op_set_cc_op(s->cc_op); 5873 5872 gen_jmp_im(pc_start - s->cs_base); 5874 tcg_gen_helper_0_1(helper_fstt_ST0,cpu_A0);5875 tcg_gen_helper_0_0(helper_fpop);5873 gen_helper_fstt_ST0(cpu_A0); 5874 gen_helper_fpop(); 5876 5875 break; 5877 5876 case 0x2c: /* frstor mem */ … … 5879 5878 gen_op_set_cc_op(s->cc_op); 5880 5879 gen_jmp_im(pc_start - s->cs_base); 5881 tcg_gen_helper_0_2(helper_frstor, 5882 cpu_A0, tcg_const_i32(s->dflag)); 5880 gen_helper_frstor(cpu_A0, tcg_const_i32(s->dflag)); 5883 5881 break; 5884 5882 case 0x2e: /* fnsave mem */ … … 5886 5884 gen_op_set_cc_op(s->cc_op); 5887 5885 gen_jmp_im(pc_start - s->cs_base); 5888 tcg_gen_helper_0_2(helper_fsave, 5889 cpu_A0, tcg_const_i32(s->dflag)); 5886 gen_helper_fsave(cpu_A0, tcg_const_i32(s->dflag)); 5890 5887 break; 5891 5888 case 0x2f: /* fnstsw mem */ 5892 tcg_gen_helper_1_0(helper_fnstsw,cpu_tmp2_i32);5889 gen_helper_fnstsw(cpu_tmp2_i32); 5893 5890 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32); 5894 5891 gen_op_st_T0_A0(OT_WORD + s->mem_index); … … 5898 5895 gen_op_set_cc_op(s->cc_op); 5899 5896 gen_jmp_im(pc_start - s->cs_base); 5900 tcg_gen_helper_0_1(helper_fbld_ST0,cpu_A0);5897 gen_helper_fbld_ST0(cpu_A0); 5901 5898 break; 5902 5899 case 0x3e: /* fbstp */ … … 5904 5901 gen_op_set_cc_op(s->cc_op); 5905 5902 gen_jmp_im(pc_start - s->cs_base); 5906 tcg_gen_helper_0_1(helper_fbst_ST0,cpu_A0);5907 tcg_gen_helper_0_0(helper_fpop);5903 gen_helper_fbst_ST0(cpu_A0); 5904 gen_helper_fpop(); 5908 5905 break; 5909 5906 case 0x3d: /* fildll */ 5910 5907 tcg_gen_qemu_ld64(cpu_tmp1_i64, cpu_A0, 5911 5908 (s->mem_index >> 2) - 1); 5912 tcg_gen_helper_0_1(helper_fildll_ST0,cpu_tmp1_i64);5909 gen_helper_fildll_ST0(cpu_tmp1_i64); 5913 5910 break; 5914 5911 case 0x3f: /* fistpll */ 5915 tcg_gen_helper_1_0(helper_fistll_ST0,cpu_tmp1_i64);5912 gen_helper_fistll_ST0(cpu_tmp1_i64); 5916 5913 tcg_gen_qemu_st64(cpu_tmp1_i64, cpu_A0, 5917 5914 (s->mem_index >> 2) - 1); 5918 tcg_gen_helper_0_0(helper_fpop);5915 gen_helper_fpop(); 5919 5916 break; 5920 5917 default: … … 5927 5924 switch(op) { 5928 5925 case 0x08: /* fld sti */ 5929 tcg_gen_helper_0_0(helper_fpush);5930 tcg_gen_helper_0_1(helper_fmov_ST0_STN,tcg_const_i32((opreg + 1) & 7));5926 gen_helper_fpush(); 5927 gen_helper_fmov_ST0_STN(tcg_const_i32((opreg + 1) & 7)); 5931 5928 break; 5932 5929 case 0x09: /* fxchg sti */ 5933 5930 case 0x29: /* fxchg4 sti, undocumented op */ 5934 5931 case 0x39: /* fxchg7 sti, undocumented op */ 5935 tcg_gen_helper_0_1(helper_fxchg_ST0_STN,tcg_const_i32(opreg));5932 gen_helper_fxchg_ST0_STN(tcg_const_i32(opreg)); 5936 5933 break; 5937 5934 case 0x0a: /* grp d9/2 */ … … 5942 5939 gen_op_set_cc_op(s->cc_op); 5943 5940 gen_jmp_im(pc_start - s->cs_base); 5944 tcg_gen_helper_0_0(helper_fwait);5941 gen_helper_fwait(); 5945 5942 break; 5946 5943 default: … … 5951 5948 switch(rm) { 5952 5949 case 0: /* fchs */ 5953 tcg_gen_helper_0_0(helper_fchs_ST0);5950 gen_helper_fchs_ST0(); 5954 5951 break; 5955 5952 case 1: /* fabs */ 5956 tcg_gen_helper_0_0(helper_fabs_ST0);5953 gen_helper_fabs_ST0(); 5957 5954 break; 5958 5955 case 4: /* ftst */ 5959 tcg_gen_helper_0_0(helper_fldz_FT0);5960 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);5956 gen_helper_fldz_FT0(); 5957 gen_helper_fcom_ST0_FT0(); 5961 5958 break; 5962 5959 case 5: /* fxam */ 5963 tcg_gen_helper_0_0(helper_fxam_ST0);5960 gen_helper_fxam_ST0(); 5964 5961 break; 5965 5962 default: … … 5971 5968 switch(rm) { 5972 5969 case 0: 5973 tcg_gen_helper_0_0(helper_fpush);5974 tcg_gen_helper_0_0(helper_fld1_ST0);5970 gen_helper_fpush(); 5971 gen_helper_fld1_ST0(); 5975 5972 break; 5976 5973 case 1: 5977 tcg_gen_helper_0_0(helper_fpush);5978 tcg_gen_helper_0_0(helper_fldl2t_ST0);5974 gen_helper_fpush(); 5975 gen_helper_fldl2t_ST0(); 5979 5976 break; 5980 5977 case 2: 5981 tcg_gen_helper_0_0(helper_fpush);5982 tcg_gen_helper_0_0(helper_fldl2e_ST0);5978 gen_helper_fpush(); 5979 gen_helper_fldl2e_ST0(); 5983 5980 break; 5984 5981 case 3: 5985 tcg_gen_helper_0_0(helper_fpush);5986 tcg_gen_helper_0_0(helper_fldpi_ST0);5982 gen_helper_fpush(); 5983 gen_helper_fldpi_ST0(); 5987 5984 break; 5988 5985 case 4: 5989 tcg_gen_helper_0_0(helper_fpush);5990 tcg_gen_helper_0_0(helper_fldlg2_ST0);5986 gen_helper_fpush(); 5987 gen_helper_fldlg2_ST0(); 5991 5988 break; 5992 5989 case 5: 5993 tcg_gen_helper_0_0(helper_fpush);5994 tcg_gen_helper_0_0(helper_fldln2_ST0);5990 gen_helper_fpush(); 5991 gen_helper_fldln2_ST0(); 5995 5992 break; 5996 5993 case 6: 5997 tcg_gen_helper_0_0(helper_fpush);5998 tcg_gen_helper_0_0(helper_fldz_ST0);5994 gen_helper_fpush(); 5995 gen_helper_fldz_ST0(); 5999 5996 break; 6000 5997 default: … … 6006 6003 switch(rm) { 6007 6004 case 0: /* f2xm1 */ 6008 tcg_gen_helper_0_0(helper_f2xm1);6005 gen_helper_f2xm1(); 6009 6006 break; 6010 6007 case 1: /* fyl2x */ 6011 tcg_gen_helper_0_0(helper_fyl2x);6008 gen_helper_fyl2x(); 6012 6009 break; 6013 6010 case 2: /* fptan */ 6014 tcg_gen_helper_0_0(helper_fptan);6011 gen_helper_fptan(); 6015 6012 break; 6016 6013 case 3: /* fpatan */ 6017 tcg_gen_helper_0_0(helper_fpatan);6014 gen_helper_fpatan(); 6018 6015 break; 6019 6016 case 4: /* fxtract */ 6020 tcg_gen_helper_0_0(helper_fxtract);6017 gen_helper_fxtract(); 6021 6018 break; 6022 6019 case 5: /* fprem1 */ 6023 tcg_gen_helper_0_0(helper_fprem1);6020 gen_helper_fprem1(); 6024 6021 break; 6025 6022 case 6: /* fdecstp */ 6026 tcg_gen_helper_0_0(helper_fdecstp);6023 gen_helper_fdecstp(); 6027 6024 break; 6028 6025 default: 6029 6026 case 7: /* fincstp */ 6030 tcg_gen_helper_0_0(helper_fincstp);6027 gen_helper_fincstp(); 6031 6028 break; 6032 6029 } … … 6035 6032 switch(rm) { 6036 6033 case 0: /* fprem */ 6037 tcg_gen_helper_0_0(helper_fprem);6034 gen_helper_fprem(); 6038 6035 break; 6039 6036 case 1: /* fyl2xp1 */ 6040 tcg_gen_helper_0_0(helper_fyl2xp1);6037 gen_helper_fyl2xp1(); 6041 6038 break; 6042 6039 case 2: /* fsqrt */ 6043 tcg_gen_helper_0_0(helper_fsqrt);6040 gen_helper_fsqrt(); 6044 6041 break; 6045 6042 case 3: /* fsincos */ 6046 tcg_gen_helper_0_0(helper_fsincos);6043 gen_helper_fsincos(); 6047 6044 break; 6048 6045 case 5: /* fscale */ 6049 tcg_gen_helper_0_0(helper_fscale);6046 gen_helper_fscale(); 6050 6047 break; 6051 6048 case 4: /* frndint */ 6052 tcg_gen_helper_0_0(helper_frndint);6049 gen_helper_frndint(); 6053 6050 break; 6054 6051 case 6: /* fsin */ 6055 tcg_gen_helper_0_0(helper_fsin);6052 gen_helper_fsin(); 6056 6053 break; 6057 6054 default: 6058 6055 case 7: /* fcos */ 6059 tcg_gen_helper_0_0(helper_fcos);6056 gen_helper_fcos(); 6060 6057 break; 6061 6058 } … … 6069 6066 op1 = op & 7; 6070 6067 if (op >= 0x20) { 6071 tcg_gen_helper_0_1(helper_fp_arith_STN_ST0[op1], tcg_const_i32(opreg));6068 gen_helper_fp_arith_STN_ST0(op1, opreg); 6072 6069 if (op >= 0x30) 6073 tcg_gen_helper_0_0(helper_fpop);6070 gen_helper_fpop(); 6074 6071 } else { 6075 tcg_gen_helper_0_1(helper_fmov_FT0_STN,tcg_const_i32(opreg));6076 tcg_gen_helper_0_0(helper_fp_arith_ST0_FT0[op1]);6072 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg)); 6073 gen_helper_fp_arith_ST0_FT0(op1); 6077 6074 } 6078 6075 } … … 6080 6077 case 0x02: /* fcom */ 6081 6078 case 0x22: /* fcom2, undocumented op */ 6082 tcg_gen_helper_0_1(helper_fmov_FT0_STN,tcg_const_i32(opreg));6083 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);6079 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg)); 6080 gen_helper_fcom_ST0_FT0(); 6084 6081 break; 6085 6082 case 0x03: /* fcomp */ 6086 6083 case 0x23: /* fcomp3, undocumented op */ 6087 6084 case 0x32: /* fcomp5, undocumented op */ 6088 tcg_gen_helper_0_1(helper_fmov_FT0_STN,tcg_const_i32(opreg));6089 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);6090 tcg_gen_helper_0_0(helper_fpop);6085 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg)); 6086 gen_helper_fcom_ST0_FT0(); 6087 gen_helper_fpop(); 6091 6088 break; 6092 6089 case 0x15: /* da/5 */ 6093 6090 switch(rm) { 6094 6091 case 1: /* fucompp */ 6095 tcg_gen_helper_0_1(helper_fmov_FT0_STN,tcg_const_i32(1));6096 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);6097 tcg_gen_helper_0_0(helper_fpop);6098 tcg_gen_helper_0_0(helper_fpop);6092 gen_helper_fmov_FT0_STN(tcg_const_i32(1)); 6093 gen_helper_fucom_ST0_FT0(); 6094 gen_helper_fpop(); 6095 gen_helper_fpop(); 6099 6096 break; 6100 6097 default: … … 6109 6106 break; 6110 6107 case 2: /* fclex */ 6111 tcg_gen_helper_0_0(helper_fclex);6108 gen_helper_fclex(); 6112 6109 break; 6113 6110 case 3: /* fninit */ 6114 tcg_gen_helper_0_0(helper_fninit);6111 gen_helper_fninit(); 6115 6112 break; 6116 6113 case 4: /* fsetpm (287 only, just do nop here) */ … … 6123 6120 if (s->cc_op != CC_OP_DYNAMIC) 6124 6121 gen_op_set_cc_op(s->cc_op); 6125 tcg_gen_helper_0_1(helper_fmov_FT0_STN,tcg_const_i32(opreg));6126 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);6122 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg)); 6123 gen_helper_fucomi_ST0_FT0(); 6127 6124 s->cc_op = CC_OP_EFLAGS; 6128 6125 break; … … 6130 6127 if (s->cc_op != CC_OP_DYNAMIC) 6131 6128 gen_op_set_cc_op(s->cc_op); 6132 tcg_gen_helper_0_1(helper_fmov_FT0_STN,tcg_const_i32(opreg));6133 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);6129 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg)); 6130 gen_helper_fcomi_ST0_FT0(); 6134 6131 s->cc_op = CC_OP_EFLAGS; 6135 6132 break; 6136 6133 case 0x28: /* ffree sti */ 6137 tcg_gen_helper_0_1(helper_ffree_STN,tcg_const_i32(opreg));6134 gen_helper_ffree_STN(tcg_const_i32(opreg)); 6138 6135 break; 6139 6136 case 0x2a: /* fst sti */ 6140 tcg_gen_helper_0_1(helper_fmov_STN_ST0,tcg_const_i32(opreg));6137 gen_helper_fmov_STN_ST0(tcg_const_i32(opreg)); 6141 6138 break; 6142 6139 case 0x2b: /* fstp sti */ … … 6144 6141 case 0x3a: /* fstp8 sti, undocumented op */ 6145 6142 case 0x3b: /* fstp9 sti, undocumented op */ 6146 tcg_gen_helper_0_1(helper_fmov_STN_ST0,tcg_const_i32(opreg));6147 tcg_gen_helper_0_0(helper_fpop);6143 gen_helper_fmov_STN_ST0(tcg_const_i32(opreg)); 6144 gen_helper_fpop(); 6148 6145 break; 6149 6146 case 0x2c: /* fucom st(i) */ 6150 tcg_gen_helper_0_1(helper_fmov_FT0_STN,tcg_const_i32(opreg));6151 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);6147 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg)); 6148 gen_helper_fucom_ST0_FT0(); 6152 6149 break; 6153 6150 case 0x2d: /* fucomp st(i) */ 6154 tcg_gen_helper_0_1(helper_fmov_FT0_STN,tcg_const_i32(opreg));6155 tcg_gen_helper_0_0(helper_fucom_ST0_FT0);6156 tcg_gen_helper_0_0(helper_fpop);6151 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg)); 6152 gen_helper_fucom_ST0_FT0(); 6153 gen_helper_fpop(); 6157 6154 break; 6158 6155 case 0x33: /* de/3 */ 6159 6156 switch(rm) { 6160 6157 case 1: /* fcompp */ 6161 tcg_gen_helper_0_1(helper_fmov_FT0_STN,tcg_const_i32(1));6162 tcg_gen_helper_0_0(helper_fcom_ST0_FT0);6163 tcg_gen_helper_0_0(helper_fpop);6164 tcg_gen_helper_0_0(helper_fpop);6158 gen_helper_fmov_FT0_STN(tcg_const_i32(1)); 6159 gen_helper_fcom_ST0_FT0(); 6160 gen_helper_fpop(); 6161 gen_helper_fpop(); 6165 6162 break; 6166 6163 default: … … 6169 6166 break; 6170 6167 case 0x38: /* ffreep sti, undocumented op */ 6171 tcg_gen_helper_0_1(helper_ffree_STN,tcg_const_i32(opreg));6172 tcg_gen_helper_0_0(helper_fpop);6168 gen_helper_ffree_STN(tcg_const_i32(opreg)); 6169 gen_helper_fpop(); 6173 6170 break; 6174 6171 case 0x3c: /* df/4 */ 6175 6172 switch(rm) { 6176 6173 case 0: 6177 tcg_gen_helper_1_0(helper_fnstsw,cpu_tmp2_i32);6174 gen_helper_fnstsw(cpu_tmp2_i32); 6178 6175 tcg_gen_extu_i32_tl(cpu_T[0], cpu_tmp2_i32); 6179 6176 gen_op_mov_reg_T0(OT_WORD, R_EAX); … … 6186 6183 if (s->cc_op != CC_OP_DYNAMIC) 6187 6184 gen_op_set_cc_op(s->cc_op); 6188 tcg_gen_helper_0_1(helper_fmov_FT0_STN,tcg_const_i32(opreg));6189 tcg_gen_helper_0_0(helper_fucomi_ST0_FT0);6190 tcg_gen_helper_0_0(helper_fpop);6185 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg)); 6186 gen_helper_fucomi_ST0_FT0(); 6187 gen_helper_fpop(); 6191 6188 s->cc_op = CC_OP_EFLAGS; 6192 6189 break; … … 6194 6191 if (s->cc_op != CC_OP_DYNAMIC) 6195 6192 gen_op_set_cc_op(s->cc_op); 6196 tcg_gen_helper_0_1(helper_fmov_FT0_STN,tcg_const_i32(opreg));6197 tcg_gen_helper_0_0(helper_fcomi_ST0_FT0);6198 tcg_gen_helper_0_0(helper_fpop);6193 gen_helper_fmov_FT0_STN(tcg_const_i32(opreg)); 6194 gen_helper_fcomi_ST0_FT0(); 6195 gen_helper_fpop(); 6199 6196 s->cc_op = CC_OP_EFLAGS; 6200 6197 break; … … 6212 6209 l1 = gen_new_label(); 6213 6210 gen_jcc1(s, s->cc_op, op1, l1); 6214 tcg_gen_helper_0_1(helper_fmov_ST0_STN,tcg_const_i32(opreg));6211 gen_helper_fmov_ST0_STN(tcg_const_i32(opreg)); 6215 6212 gen_set_label(l1); 6216 6213 } … … 6349 6346 gen_io_start(); 6350 6347 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 6351 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);6348 gen_helper_in_func(ot, cpu_T[1], cpu_tmp2_i32); 6352 6349 gen_op_mov_reg_T1(ot, R_EAX); 6353 6350 if (use_icount) { … … 6366 6363 gen_check_io(s, ot, pc_start - s->cs_base, 6367 6364 svm_is_rep(prefixes)); 6368 #ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */ /** @todo this breaks AIX, remove. */6365 #ifdef VBOX /* bird: linux is writing to this port for delaying I/O. */ /** @todo YYY: this breaks AIX, remove. */ 6369 6366 if (val == 0x80) 6370 6367 break; … … 6377 6374 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff); 6378 6375 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]); 6379 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);6376 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32); 6380 6377 if (use_icount) { 6381 6378 gen_io_end(); … … 6396 6393 gen_io_start(); 6397 6394 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 6398 tcg_gen_helper_1_1(helper_in_func[ot], cpu_T[1], cpu_tmp2_i32);6395 gen_helper_in_func(ot, cpu_T[1], cpu_tmp2_i32); 6399 6396 gen_op_mov_reg_T1(ot, R_EAX); 6400 6397 if (use_icount) { … … 6420 6417 tcg_gen_andi_i32(cpu_tmp2_i32, cpu_tmp2_i32, 0xffff); 6421 6418 tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_T[1]); 6422 tcg_gen_helper_0_2(helper_out_func[ot], cpu_tmp2_i32, cpu_tmp3_i32);6419 gen_helper_out_func(ot, cpu_tmp2_i32, cpu_tmp3_i32); 6423 6420 if (use_icount) { 6424 6421 gen_io_end(); … … 6457 6454 gen_op_set_cc_op(s->cc_op); 6458 6455 gen_jmp_im(pc_start - s->cs_base); 6459 tcg_gen_helper_0_2(helper_lret_protected, 6460 tcg_const_i32(s->dflag), 6461 tcg_const_i32(val)); 6456 gen_helper_lret_protected(tcg_const_i32(s->dflag), 6457 tcg_const_i32(val)); 6462 6458 } else { 6463 6459 gen_stack_A0(s); … … 6485 6481 if (!s->pe) { 6486 6482 /* real mode */ 6487 tcg_gen_helper_0_1(helper_iret_real,tcg_const_i32(s->dflag));6483 gen_helper_iret_real(tcg_const_i32(s->dflag)); 6488 6484 s->cc_op = CC_OP_EFLAGS; 6489 6485 } else if (s->vm86) { … … 6495 6491 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base); 6496 6492 } else { 6497 tcg_gen_helper_0_1(helper_iret_real,tcg_const_i32(s->dflag));6493 gen_helper_iret_real(tcg_const_i32(s->dflag)); 6498 6494 s->cc_op = CC_OP_EFLAGS; 6499 6495 } … … 6502 6498 gen_op_set_cc_op(s->cc_op); 6503 6499 gen_jmp_im(pc_start - s->cs_base); 6504 tcg_gen_helper_0_2(helper_iret_protected, 6505 tcg_const_i32(s->dflag), 6506 tcg_const_i32(s->pc - s->cs_base)); 6500 gen_helper_iret_protected(tcg_const_i32(s->dflag), 6501 tcg_const_i32(s->pc - s->cs_base)); 6507 6502 s->cc_op = CC_OP_EFLAGS; 6508 6503 } … … 6519 6514 if (s->dflag == 0) 6520 6515 tval &= 0xffff; 6516 #ifdef VBOX /* upstream fix */ 6521 6517 else if (!CODE64(s)) 6522 6518 tval &= 0xffffffff; 6519 #endif 6523 6520 gen_movtl_T0_im(next_eip); 6524 6521 gen_push_T0(s); … … 6604 6601 reg = ((modrm >> 3) & 7) | rex_r; 6605 6602 mod = (modrm >> 6) & 3; 6606 t0 = tcg_temp_local_new( TCG_TYPE_TL);6603 t0 = tcg_temp_local_new(); 6607 6604 if (mod != 3) { 6608 6605 gen_lea_modrm(s, modrm, ®_addr, &offset_addr); … … 6648 6645 #ifdef VBOX 6649 6646 if (s->vm86 && s->vme && s->iopl != 3) 6650 tcg_gen_helper_1_0(helper_read_eflags_vme,cpu_T[0]);6647 gen_helper_read_eflags_vme(cpu_T[0]); 6651 6648 else 6652 6649 #endif 6653 tcg_gen_helper_1_0(helper_read_eflags,cpu_T[0]);6650 gen_helper_read_eflags(cpu_T[0]); 6654 6651 gen_push_T0(s); 6655 6652 } … … 6667 6664 if (s->cpl == 0) { 6668 6665 if (s->dflag) { 6669 tcg_gen_helper_0_2(helper_write_eflags,cpu_T[0],6666 gen_helper_write_eflags(cpu_T[0], 6670 6667 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK))); 6671 6668 } else { 6672 tcg_gen_helper_0_2(helper_write_eflags,cpu_T[0],6669 gen_helper_write_eflags(cpu_T[0], 6673 6670 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK | IOPL_MASK) & 0xffff)); 6674 6671 } … … 6676 6673 if (s->cpl <= s->iopl) { 6677 6674 if (s->dflag) { 6678 tcg_gen_helper_0_2(helper_write_eflags,cpu_T[0],6675 gen_helper_write_eflags(cpu_T[0], 6679 6676 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK))); 6680 6677 } else { 6681 tcg_gen_helper_0_2(helper_write_eflags,cpu_T[0],6678 gen_helper_write_eflags(cpu_T[0], 6682 6679 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK | IF_MASK) & 0xffff)); 6683 6680 } 6684 6681 } else { 6685 6682 if (s->dflag) { 6686 tcg_gen_helper_0_2(helper_write_eflags,cpu_T[0],6683 gen_helper_write_eflags(cpu_T[0], 6687 6684 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK))); 6688 6685 } else { 6689 6686 #ifdef VBOX 6690 6687 if (s->vm86 && s->vme) 6691 tcg_gen_helper_0_1(helper_write_eflags_vme,cpu_T[0]);6688 gen_helper_write_eflags_vme(cpu_T[0]); 6692 6689 else 6693 6690 #endif 6694 tcg_gen_helper_0_2(helper_write_eflags,cpu_T[0],6691 gen_helper_write_eflags(cpu_T[0], 6695 6692 tcg_const_i32((TF_MASK | AC_MASK | ID_MASK | NT_MASK) & 0xffff)); 6696 6693 } … … 6858 6855 label1 = gen_new_label(); 6859 6856 tcg_gen_movi_tl(cpu_cc_dst, 0); 6860 t0 = tcg_temp_local_new( TCG_TYPE_TL);6857 t0 = tcg_temp_local_new(); 6861 6858 tcg_gen_mov_tl(t0, cpu_T[0]); 6862 6859 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, label1); 6863 6860 if (b & 1) { 6864 tcg_gen_helper_1_1(helper_bsr,cpu_T[0], t0);6861 gen_helper_bsr(cpu_T[0], t0); 6865 6862 } else { 6866 tcg_gen_helper_1_1(helper_bsf,cpu_T[0], t0);6863 gen_helper_bsf(cpu_T[0], t0); 6867 6864 } 6868 6865 gen_op_mov_reg_T0(ot, reg); … … 6881 6878 if (s->cc_op != CC_OP_DYNAMIC) 6882 6879 gen_op_set_cc_op(s->cc_op); 6883 tcg_gen_helper_0_0(helper_daa);6880 gen_helper_daa(); 6884 6881 s->cc_op = CC_OP_EFLAGS; 6885 6882 break; … … 6889 6886 if (s->cc_op != CC_OP_DYNAMIC) 6890 6887 gen_op_set_cc_op(s->cc_op); 6891 tcg_gen_helper_0_0(helper_das);6888 gen_helper_das(); 6892 6889 s->cc_op = CC_OP_EFLAGS; 6893 6890 break; … … 6897 6894 if (s->cc_op != CC_OP_DYNAMIC) 6898 6895 gen_op_set_cc_op(s->cc_op); 6899 tcg_gen_helper_0_0(helper_aaa);6896 gen_helper_aaa(); 6900 6897 s->cc_op = CC_OP_EFLAGS; 6901 6898 break; … … 6905 6902 if (s->cc_op != CC_OP_DYNAMIC) 6906 6903 gen_op_set_cc_op(s->cc_op); 6907 tcg_gen_helper_0_0(helper_aas);6904 gen_helper_aas(); 6908 6905 s->cc_op = CC_OP_EFLAGS; 6909 6906 break; … … 6915 6912 gen_exception(s, EXCP00_DIVZ, pc_start - s->cs_base); 6916 6913 } else { 6917 tcg_gen_helper_0_1(helper_aam,tcg_const_i32(val));6914 gen_helper_aam(tcg_const_i32(val)); 6918 6915 s->cc_op = CC_OP_LOGICB; 6919 6916 } … … 6923 6920 goto illegal_op; 6924 6921 val = ldub_code(s->pc++); 6925 tcg_gen_helper_0_1(helper_aad,tcg_const_i32(val));6922 gen_helper_aad(tcg_const_i32(val)); 6926 6923 s->cc_op = CC_OP_LOGICB; 6927 6924 break; … … 6945 6942 gen_op_set_cc_op(s->cc_op); 6946 6943 gen_jmp_im(pc_start - s->cs_base); 6947 tcg_gen_helper_0_0(helper_fwait);6944 gen_helper_fwait(); 6948 6945 } 6949 6946 break; … … 6974 6971 gen_op_set_cc_op(s->cc_op); 6975 6972 gen_jmp_im(pc_start - s->cs_base); 6976 tcg_gen_helper_0_1(helper_into, tcg_const_i32(s->pc - pc_start)); 6977 break; 6973 gen_helper_into(tcg_const_i32(s->pc - pc_start)); 6974 break; 6975 #ifdef WANT_ICEBP 6978 6976 case 0xf1: /* icebp (undocumented, exits to external debugger) */ 6979 6977 gen_svm_check_intercept(s, pc_start, SVM_EXIT_ICEBP); … … 6986 6984 #endif 6987 6985 break; 6986 #endif 6988 6987 case 0xfa: /* cli */ 6989 6988 if (!s->vm86) { 6990 6989 if (s->cpl <= s->iopl) { 6991 tcg_gen_helper_0_0(helper_cli);6990 gen_helper_cli(); 6992 6991 } else { 6993 6992 gen_exception(s, EXCP0D_GPF, pc_start - s->cs_base); … … 6995 6994 } else { 6996 6995 if (s->iopl == 3) { 6997 tcg_gen_helper_0_0(helper_cli);6996 gen_helper_cli(); 6998 6997 #ifdef VBOX 6999 6998 } else if (s->iopl != 3 && s->vme) { 7000 tcg_gen_helper_0_0(helper_cli_vme);6999 gen_helper_cli_vme(); 7001 7000 #endif 7002 7001 } else { … … 7009 7008 if (s->cpl <= s->iopl) { 7010 7009 gen_sti: 7011 tcg_gen_helper_0_0(helper_sti);7010 gen_helper_sti(); 7012 7011 /* interruptions are enabled only the first insn after sti */ 7013 7012 /* If several instructions disable interrupts, only the 7014 7013 _first_ does it */ 7015 7014 if (!(s->tb->flags & HF_INHIBIT_IRQ_MASK)) 7016 tcg_gen_helper_0_0(helper_set_inhibit_irq);7015 gen_helper_set_inhibit_irq(); 7017 7016 /* give a chance to handle pending irqs */ 7018 7017 gen_jmp_im(s->pc - s->cs_base); … … 7026 7025 #ifdef VBOX 7027 7026 } else if (s->iopl != 3 && s->vme) { 7028 tcg_gen_helper_0_0(helper_sti_vme);7027 gen_helper_sti_vme(); 7029 7028 /* give a chance to handle pending irqs */ 7030 7029 gen_jmp_im(s->pc - s->cs_base); … … 7050 7049 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 7051 7050 if (ot == OT_WORD) 7052 tcg_gen_helper_0_2(helper_boundw,cpu_A0, cpu_tmp2_i32);7051 gen_helper_boundw(cpu_A0, cpu_tmp2_i32); 7053 7052 else 7054 tcg_gen_helper_0_2(helper_boundl,cpu_A0, cpu_tmp2_i32);7053 gen_helper_boundl(cpu_A0, cpu_tmp2_i32); 7055 7054 break; 7056 7055 case 0x1c8 ... 0x1cf: /* bswap reg */ … … 7063 7062 } else 7064 7063 { 7065 TCGv tmp0;7064 TCGv_i32 tmp0; 7066 7065 gen_op_mov_TN_reg(OT_LONG, 0, reg); 7067 7066 7068 tmp0 = tcg_temp_new (TCG_TYPE_I32);7067 tmp0 = tcg_temp_new_i32(); 7069 7068 tcg_gen_trunc_i64_i32(tmp0, cpu_T[0]); 7070 7069 tcg_gen_bswap_i32(tmp0, tmp0); … … 7150 7149 gen_jmp_im(pc_start - s->cs_base); 7151 7150 if (b & 2) { 7152 tcg_gen_helper_0_0(helper_rdmsr);7151 gen_helper_rdmsr(); 7153 7152 } else { 7154 tcg_gen_helper_0_0(helper_wrmsr);7153 gen_helper_wrmsr(); 7155 7154 } 7156 7155 } … … 7162 7161 if (use_icount) 7163 7162 gen_io_start(); 7164 tcg_gen_helper_0_0(helper_rdtsc);7163 gen_helper_rdtsc(); 7165 7164 if (use_icount) { 7166 7165 gen_io_end(); … … 7172 7171 gen_op_set_cc_op(s->cc_op); 7173 7172 gen_jmp_im(pc_start - s->cs_base); 7174 tcg_gen_helper_0_0(helper_rdpmc);7173 gen_helper_rdpmc(); 7175 7174 break; 7176 7175 case 0x134: /* sysenter */ … … 7191 7190 } 7192 7191 gen_jmp_im(pc_start - s->cs_base); 7193 tcg_gen_helper_0_0(helper_sysenter);7192 gen_helper_sysenter(); 7194 7193 gen_eob(s); 7195 7194 } … … 7212 7211 } 7213 7212 gen_jmp_im(pc_start - s->cs_base); 7214 tcg_gen_helper_0_1(helper_sysexit,tcg_const_i32(dflag));7213 gen_helper_sysexit(tcg_const_i32(dflag)); 7215 7214 gen_eob(s); 7216 7215 } … … 7224 7223 } 7225 7224 gen_jmp_im(pc_start - s->cs_base); 7226 tcg_gen_helper_0_1(helper_syscall,tcg_const_i32(s->pc - pc_start));7225 gen_helper_syscall(tcg_const_i32(s->pc - pc_start)); 7227 7226 gen_eob(s); 7228 7227 break; … … 7236 7235 } 7237 7236 gen_jmp_im(pc_start - s->cs_base); 7238 tcg_gen_helper_0_1(helper_sysret,tcg_const_i32(s->dflag));7237 gen_helper_sysret(tcg_const_i32(s->dflag)); 7239 7238 /* condition codes are modified only in long mode */ 7240 7239 if (s->lma) … … 7248 7247 gen_op_set_cc_op(s->cc_op); 7249 7248 gen_jmp_im(pc_start - s->cs_base); 7250 tcg_gen_helper_0_0(helper_cpuid);7249 gen_helper_cpuid(); 7251 7250 break; 7252 7251 case 0xf4: /* hlt */ … … 7257 7256 gen_op_set_cc_op(s->cc_op); 7258 7257 gen_jmp_im(pc_start - s->cs_base); 7259 tcg_gen_helper_0_1(helper_hlt,tcg_const_i32(s->pc - pc_start));7258 gen_helper_hlt(tcg_const_i32(s->pc - pc_start)); 7260 7259 s->is_jmp = 3; 7261 7260 } … … 7286 7285 gen_jmp_im(pc_start - s->cs_base); 7287 7286 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 7288 tcg_gen_helper_0_1(helper_lldt,cpu_tmp2_i32);7287 gen_helper_lldt(cpu_tmp2_i32); 7289 7288 } 7290 7289 break; … … 7309 7308 gen_jmp_im(pc_start - s->cs_base); 7310 7309 tcg_gen_trunc_tl_i32(cpu_tmp2_i32, cpu_T[0]); 7311 tcg_gen_helper_0_1(helper_ltr,cpu_tmp2_i32);7310 gen_helper_ltr(cpu_tmp2_i32); 7312 7311 } 7313 7312 break; … … 7320 7319 gen_op_set_cc_op(s->cc_op); 7321 7320 if (op == 4) 7322 tcg_gen_helper_0_1(helper_verr,cpu_T[0]);7321 gen_helper_verr(cpu_T[0]); 7323 7322 else 7324 tcg_gen_helper_0_1(helper_verw,cpu_T[0]);7323 gen_helper_verw(cpu_T[0]); 7325 7324 s->cc_op = CC_OP_EFLAGS; 7326 7325 break; … … 7341 7340 goto illegal_op; 7342 7341 gen_jmp_im(pc_start - s->cs_base); 7343 tcg_gen_helper_0_0(helper_rdtscp);7342 gen_helper_rdtscp(); 7344 7343 break; 7345 7344 } … … 7380 7379 } 7381 7380 gen_add_A0_ds_seg(s); 7382 tcg_gen_helper_0_1(helper_monitor,cpu_A0);7381 gen_helper_monitor(cpu_A0); 7383 7382 break; 7384 7383 case 1: /* mwait */ … … 7391 7390 } 7392 7391 gen_jmp_im(pc_start - s->cs_base); 7393 tcg_gen_helper_0_1(helper_mwait,tcg_const_i32(s->pc - pc_start));7392 gen_helper_mwait(tcg_const_i32(s->pc - pc_start)); 7394 7393 gen_eob(s); 7395 7394 break; … … 7423 7422 break; 7424 7423 } else { 7425 tcg_gen_helper_0_2(helper_vmrun, 7426 tcg_const_i32(s->aflag), 7427 tcg_const_i32(s->pc - pc_start)); 7424 gen_helper_vmrun(tcg_const_i32(s->aflag), 7425 tcg_const_i32(s->pc - pc_start)); 7428 7426 tcg_gen_exit_tb(0); 7429 7427 s->is_jmp = 3; … … 7433 7431 if (!(s->flags & HF_SVME_MASK)) 7434 7432 goto illegal_op; 7435 tcg_gen_helper_0_0(helper_vmmcall);7433 gen_helper_vmmcall(); 7436 7434 break; 7437 7435 case 2: /* VMLOAD */ … … 7442 7440 break; 7443 7441 } else { 7444 tcg_gen_helper_0_1(helper_vmload, 7445 tcg_const_i32(s->aflag)); 7442 gen_helper_vmload(tcg_const_i32(s->aflag)); 7446 7443 } 7447 7444 break; … … 7453 7450 break; 7454 7451 } else { 7455 tcg_gen_helper_0_1(helper_vmsave, 7456 tcg_const_i32(s->aflag)); 7452 gen_helper_vmsave(tcg_const_i32(s->aflag)); 7457 7453 } 7458 7454 break; … … 7466 7462 break; 7467 7463 } else { 7468 tcg_gen_helper_0_0(helper_stgi);7464 gen_helper_stgi(); 7469 7465 } 7470 7466 break; … … 7476 7472 break; 7477 7473 } else { 7478 tcg_gen_helper_0_0(helper_clgi);7474 gen_helper_clgi(); 7479 7475 } 7480 7476 break; … … 7484 7480 !s->pe) 7485 7481 goto illegal_op; 7486 tcg_gen_helper_0_0(helper_skinit);7482 gen_helper_skinit(); 7487 7483 break; 7488 7484 case 7: /* INVLPGA */ … … 7493 7489 break; 7494 7490 } else { 7495 tcg_gen_helper_0_1(helper_invlpga, 7496 tcg_const_i32(s->aflag)); 7491 gen_helper_invlpga(tcg_const_i32(s->aflag)); 7497 7492 } 7498 7493 break; … … 7522 7517 case 4: /* smsw */ 7523 7518 gen_svm_check_intercept(s, pc_start, SVM_EXIT_READ_CR0); 7519 #if defined TARGET_X86_64 && defined WORDS_BIGENDIAN 7520 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0]) + 4); 7521 #else 7524 7522 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, offsetof(CPUX86State,cr[0])); 7523 #endif 7525 7524 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 1); 7526 7525 break; … … 7531 7530 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0); 7532 7531 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0); 7533 tcg_gen_helper_0_1(helper_lmsw,cpu_T[0]);7532 gen_helper_lmsw(cpu_T[0]); 7534 7533 gen_jmp_im(s->pc - s->cs_base); 7535 7534 gen_eob(s); … … 7558 7557 gen_jmp_im(pc_start - s->cs_base); 7559 7558 gen_lea_modrm(s, modrm, ®_addr, &offset_addr); 7560 tcg_gen_helper_0_1(helper_invlpg,cpu_A0);7559 gen_helper_invlpg(cpu_A0); 7561 7560 gen_jmp_im(s->pc - s->cs_base); 7562 7561 gen_eob(s); … … 7615 7614 if (!s->pe || s->vm86) 7616 7615 goto illegal_op; 7617 t0 = tcg_temp_local_new( TCG_TYPE_TL);7618 t1 = tcg_temp_local_new( TCG_TYPE_TL);7619 t2 = tcg_temp_local_new( TCG_TYPE_TL);7616 t0 = tcg_temp_local_new(); 7617 t1 = tcg_temp_local_new(); 7618 t2 = tcg_temp_local_new(); 7620 7619 #ifdef VBOX 7621 a0 = tcg_temp_local_new( TCG_TYPE_TL);7620 a0 = tcg_temp_local_new(); 7622 7621 #endif 7623 7622 ot = OT_WORD; … … 7680 7679 reg = ((modrm >> 3) & 7) | rex_r; 7681 7680 gen_ldst_modrm(s, modrm, OT_WORD, OR_TMP0, 0); 7682 t0 = tcg_temp_local_new( TCG_TYPE_TL);7681 t0 = tcg_temp_local_new(); 7683 7682 if (s->cc_op != CC_OP_DYNAMIC) 7684 7683 gen_op_set_cc_op(s->cc_op); 7685 7684 if (b == 0x102) 7686 tcg_gen_helper_1_1(helper_lar,t0, cpu_T[0]);7685 gen_helper_lar(t0, cpu_T[0]); 7687 7686 else 7688 tcg_gen_helper_1_1(helper_lsl,t0, cpu_T[0]);7687 gen_helper_lsl(t0, cpu_T[0]); 7689 7688 tcg_gen_andi_tl(cpu_tmp0, cpu_cc_src, CC_Z); 7690 7689 label1 = gen_new_label(); … … 7746 7745 if (b & 2) { 7747 7746 gen_op_mov_TN_reg(ot, 0, rm); 7748 tcg_gen_helper_0_2(helper_write_crN, 7749 tcg_const_i32(reg), cpu_T[0]); 7747 gen_helper_write_crN(tcg_const_i32(reg), cpu_T[0]); 7750 7748 gen_jmp_im(s->pc - s->cs_base); 7751 7749 gen_eob(s); 7752 7750 } else { 7753 tcg_gen_helper_1_1(helper_read_crN, 7754 cpu_T[0], tcg_const_i32(reg)); 7751 gen_helper_read_crN(cpu_T[0], tcg_const_i32(reg)); 7755 7752 gen_op_mov_reg_T0(ot, rm); 7756 7753 } … … 7783 7780 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_DR0 + reg); 7784 7781 gen_op_mov_TN_reg(ot, 0, rm); 7785 tcg_gen_helper_0_2(helper_movl_drN_T0, 7786 tcg_const_i32(reg), cpu_T[0]); 7782 gen_helper_movl_drN_T0(tcg_const_i32(reg), cpu_T[0]); 7787 7783 gen_jmp_im(s->pc - s->cs_base); 7788 7784 gen_eob(s); … … 7799 7795 } else { 7800 7796 gen_svm_check_intercept(s, pc_start, SVM_EXIT_WRITE_CR0); 7801 tcg_gen_helper_0_0(helper_clts);7797 gen_helper_clts(); 7802 7798 /* abort block because static cpu state changed */ 7803 7799 gen_jmp_im(s->pc - s->cs_base); … … 7835 7831 gen_op_set_cc_op(s->cc_op); 7836 7832 gen_jmp_im(pc_start - s->cs_base); 7837 tcg_gen_helper_0_2(helper_fxsave, 7838 cpu_A0, tcg_const_i32((s->dflag == 2))); 7833 gen_helper_fxsave(cpu_A0, tcg_const_i32((s->dflag == 2))); 7839 7834 break; 7840 7835 case 1: /* fxrstor */ … … 7850 7845 gen_op_set_cc_op(s->cc_op); 7851 7846 gen_jmp_im(pc_start - s->cs_base); 7852 tcg_gen_helper_0_2(helper_fxrstor, 7853 cpu_A0, tcg_const_i32((s->dflag == 2))); 7847 gen_helper_fxrstor(cpu_A0, tcg_const_i32((s->dflag == 2))); 7854 7848 break; 7855 7849 case 2: /* ldmxcsr */ … … 7910 7904 } 7911 7905 gen_jmp_im(s->pc - s->cs_base); 7912 tcg_gen_helper_0_0(helper_rsm);7906 gen_helper_rsm(); 7913 7907 gen_eob(s); 7914 7908 break; … … 7931 7925 7932 7926 gen_ldst_modrm(s, modrm, ot, OR_TMP0, 0); 7933 tcg_gen_helper_1_2(helper_popcnt, 7934 cpu_T[0], cpu_T[0], tcg_const_i32(ot)); 7927 gen_helper_popcnt(cpu_T[0], cpu_T[0], tcg_const_i32(ot)); 7935 7928 gen_op_mov_reg_T0(ot, reg); 7936 7929 … … 7955 7948 /* lock generation */ 7956 7949 if (s->prefix & PREFIX_LOCK) 7957 tcg_gen_helper_0_0(helper_unlock);7950 gen_helper_unlock(); 7958 7951 return s->pc; 7959 7952 illegal_op: 7960 7953 if (s->prefix & PREFIX_LOCK) 7961 tcg_gen_helper_0_0(helper_unlock);7954 gen_helper_unlock(); 7962 7955 /* XXX: ensure that no lock was generated */ 7963 7956 gen_exception(s, EXCP06_ILLOP, pc_start - s->cs_base); … … 7972 7965 assert(sizeof(CCTable) == (1 << 4)); 7973 7966 #endif 7974 cpu_env = tcg_global_reg_new (TCG_TYPE_PTR,TCG_AREG0, "env");7975 cpu_cc_op = tcg_global_mem_new (TCG_TYPE_I32,7976 TCG_AREG0,offsetof(CPUState, cc_op), "cc_op");7977 cpu_cc_src = tcg_global_mem_new(TCG_ TYPE_TL,7978 TCG_AREG0, offsetof(CPUState, cc_src),"cc_src");7979 cpu_cc_dst = tcg_global_mem_new(TCG_ TYPE_TL,7980 TCG_AREG0, offsetof(CPUState, cc_dst),"cc_dst");7981 cpu_cc_tmp = tcg_global_mem_new(TCG_ TYPE_TL,7982 TCG_AREG0, offsetof(CPUState, cc_tmp),"cc_tmp");7967 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env"); 7968 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, 7969 offsetof(CPUState, cc_op), "cc_op"); 7970 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src), 7971 "cc_src"); 7972 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst), 7973 "cc_dst"); 7974 cpu_cc_tmp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_tmp), 7975 "cc_tmp"); 7983 7976 7984 7977 /* register helpers */ 7985 7986 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name); 7978 #define GEN_HELPER 2 7987 7979 #include "helper.h" 7988 7980 } … … 7998 7990 target_ulong pc_ptr; 7999 7991 uint16_t *gen_opc_end; 7992 CPUBreakpoint *bp; 8000 7993 int j, lj, cflags; 8001 7994 uint64_t flags; … … 8029 8022 dc->record_call = 0; 8030 8023 # endif 8031 #endif 8024 #endif /* VBOX */ 8032 8025 dc->cpl = (flags >> HF_CPL_SHIFT) & 3; 8033 8026 dc->iopl = (flags >> IOPL_SHIFT) & 3; … … 8067 8060 #endif 8068 8061 8069 cpu_T[0] = tcg_temp_new( TCG_TYPE_TL);8070 cpu_T[1] = tcg_temp_new( TCG_TYPE_TL);8071 cpu_A0 = tcg_temp_new( TCG_TYPE_TL);8072 cpu_T3 = tcg_temp_new( TCG_TYPE_TL);8073 8074 cpu_tmp0 = tcg_temp_new( TCG_TYPE_TL);8075 cpu_tmp1_i64 = tcg_temp_new (TCG_TYPE_I64);8076 cpu_tmp2_i32 = tcg_temp_new (TCG_TYPE_I32);8077 cpu_tmp3_i32 = tcg_temp_new (TCG_TYPE_I32);8078 cpu_tmp4 = tcg_temp_new( TCG_TYPE_TL);8079 cpu_tmp5 = tcg_temp_new( TCG_TYPE_TL);8080 cpu_tmp6 = tcg_temp_new( TCG_TYPE_TL);8081 cpu_ptr0 = tcg_temp_new (TCG_TYPE_PTR);8082 cpu_ptr1 = tcg_temp_new (TCG_TYPE_PTR);8062 cpu_T[0] = tcg_temp_new(); 8063 cpu_T[1] = tcg_temp_new(); 8064 cpu_A0 = tcg_temp_new(); 8065 cpu_T3 = tcg_temp_new(); 8066 8067 cpu_tmp0 = tcg_temp_new(); 8068 cpu_tmp1_i64 = tcg_temp_new_i64(); 8069 cpu_tmp2_i32 = tcg_temp_new_i32(); 8070 cpu_tmp3_i32 = tcg_temp_new_i32(); 8071 cpu_tmp4 = tcg_temp_new(); 8072 cpu_tmp5 = tcg_temp_new(); 8073 cpu_tmp6 = tcg_temp_new(); 8074 cpu_ptr0 = tcg_temp_new_ptr(); 8075 cpu_ptr1 = tcg_temp_new_ptr(); 8083 8076 8084 8077 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE; … … 8094 8087 gen_icount_start(); 8095 8088 for(;;) { 8096 if ( env->nb_breakpoints > 0) {8097 for(j = 0; j < env->nb_breakpoints; j++) {8098 if ( env->breakpoints[j]== pc_ptr) {8089 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) { 8090 TAILQ_FOREACH(bp, &env->breakpoints, entry) { 8091 if (bp->pc == pc_ptr) { 8099 8092 gen_debug(dc, pc_ptr - dc->cs_base); 8100 8093 break; … … 8174 8167 8175 8168 #ifdef DEBUG_DISAS 8176 if (loglevel & CPU_LOG_TB_CPU) { 8177 cpu_dump_state(env, logfile, fprintf, X86_DUMP_CCOP); 8178 } 8179 if (loglevel & CPU_LOG_TB_IN_ASM) { 8169 log_cpu_state_mask(CPU_LOG_TB_CPU, env, X86_DUMP_CCOP); 8170 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) { 8180 8171 int disas_flags; 8181 fprintf(logfile,"----------------\n");8182 fprintf(logfile,"IN: %s\n", lookup_symbol(pc_start));8172 qemu_log("----------------\n"); 8173 qemu_log("IN: %s\n", lookup_symbol(pc_start)); 8183 8174 #ifdef TARGET_X86_64 8184 8175 if (dc->code64) … … 8187 8178 #endif 8188 8179 disas_flags = !dc->code32; 8189 target_disas(logfile,pc_start, pc_ptr - pc_start, disas_flags);8190 fprintf(logfile,"\n");8180 log_target_disas(pc_start, pc_ptr - pc_start, disas_flags); 8181 qemu_log("\n"); 8191 8182 } 8192 8183 #endif … … 8213 8204 int cc_op; 8214 8205 #ifdef DEBUG_DISAS 8215 if ( loglevel & CPU_LOG_TB_OP) {8206 if (qemu_loglevel_mask(CPU_LOG_TB_OP)) { 8216 8207 int i; 8217 fprintf(logfile,"RESTORE:\n");8208 qemu_log("RESTORE:\n"); 8218 8209 for(i = 0;i <= pc_pos; i++) { 8219 8210 if (gen_opc_instr_start[i]) { 8220 fprintf(logfile,"0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]);8221 } 8222 } 8223 fprintf(logfile,"spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n",8211 qemu_log("0x%04x: " TARGET_FMT_lx "\n", i, gen_opc_pc[i]); 8212 } 8213 } 8214 qemu_log("spc=0x%08lx pc_pos=0x%x eip=" TARGET_FMT_lx " cs_base=%x\n", 8224 8215 searched_pc, pc_pos, gen_opc_pc[pc_pos] - tb->cs_base, 8225 8216 (uint32_t)tb->cs_base);
Note:
See TracChangeset
for help on using the changeset viewer.