Changeset 36125 in vbox for trunk/src/recompiler
- Timestamp:
- Mar 1, 2011 4:49:42 PM (14 years ago)
- svn:sync-xref-src-repo-rev:
- 70290
- Location:
- trunk/src/recompiler
- Files:
-
- 27 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/recompiler/bswap.h
r13382 r36125 4 4 #include "config-host.h" 5 5 6 #ifndef _MSC_VER7 6 #include <inttypes.h> 8 #endif9 7 10 8 #ifdef HAVE_BYTESWAP_H 11 9 #include <byteswap.h> 12 #else13 #ifdef _MSC_VER14 static _inline uint16_t bswap_16(register uint16_t x)15 {16 return ((uint16_t)( \17 (((uint16_t)(x) & (uint16_t)0x00ffU) << 8) | \18 (((uint16_t)(x) & (uint16_t)0xff00U) >> 8) )); \19 }20 21 static _inline uint32_t bswap_32(register uint32_t x) \22 { \23 return ((uint32_t)( \24 (((uint32_t)(x) & (uint32_t)0x000000ffUL) << 24) | \25 (((uint32_t)(x) & (uint32_t)0x0000ff00UL) << 8) | \26 (((uint32_t)(x) & (uint32_t)0x00ff0000UL) >> 8) | \27 (((uint32_t)(x) & (uint32_t)0xff000000UL) >> 24) )); \28 }29 30 static _inline uint64_t bswap_64(register uint64_t x) \31 { \32 return ((uint64_t)( \33 (uint64_t)(((uint64_t)(x) & (uint64_t)0x00000000000000ffULL) << 56) | \34 (uint64_t)(((uint64_t)(x) & (uint64_t)0x000000000000ff00ULL) << 40) | \35 (uint64_t)(((uint64_t)(x) & (uint64_t)0x0000000000ff0000ULL) << 24) | \36 (uint64_t)(((uint64_t)(x) & (uint64_t)0x00000000ff000000ULL) << 8) | \37 (uint64_t)(((uint64_t)(x) & (uint64_t)0x000000ff00000000ULL) >> 8) | \38 (uint64_t)(((uint64_t)(x) & (uint64_t)0x0000ff0000000000ULL) >> 24) | \39 (uint64_t)(((uint64_t)(x) & (uint64_t)0x00ff000000000000ULL) >> 40) | \40 (uint64_t)(((uint64_t)(x) & (uint64_t)0xff00000000000000ULL) >> 56) )); \41 }42 43 10 #else 44 11 … … 74 41 (uint64_t)(((uint64_t)(__x) & (uint64_t)0xff00000000000000ULL) >> 56) )); \ 75 42 }) 76 #endif77 43 78 44 #endif /* !HAVE_BYTESWAP_H */ 79 45 80 46 #ifndef bswap16 /* BSD endian.h clash */ 81 #ifndef VBOX82 47 static inline uint16_t bswap16(uint16_t x) 83 #else84 DECLINLINE(uint16_t) bswap16(uint16_t x)85 #endif86 48 { 87 49 return bswap_16(x); … … 90 52 91 53 #ifndef bswap32 /* BSD endian.h clash */ 92 #ifndef VBOX93 54 static inline uint32_t bswap32(uint32_t x) 94 #else95 DECLINLINE(uint32_t) bswap32(uint32_t x)96 #endif97 55 { 98 56 return bswap_32(x); … … 101 59 102 60 #ifndef bswap64 /* BSD endian.h clash. */ 103 #ifndef VBOX104 61 static inline uint64_t bswap64(uint64_t x) 105 #else106 DECLINLINE(uint64_t) bswap64(uint64_t x)107 #endif108 62 { 109 63 return bswap_64(x); … … 111 65 #endif 112 66 113 #ifndef VBOX114 67 static inline void bswap16s(uint16_t *s) 115 #else116 DECLINLINE(void) bswap16s(uint16_t *s)117 #endif118 68 { 119 69 *s = bswap16(*s); 120 70 } 121 71 122 #ifndef VBOX123 72 static inline void bswap32s(uint32_t *s) 124 #else125 DECLINLINE(void) bswap32s(uint32_t *s)126 #endif127 73 { 128 74 *s = bswap32(*s); 129 75 } 130 76 131 #ifndef VBOX132 77 static inline void bswap64s(uint64_t *s) 133 #else134 DECLINLINE(void) bswap64s(uint64_t *s)135 #endif136 78 { 137 79 *s = bswap64(*s); … … 150 92 #endif 151 93 152 #ifndef VBOX153 94 #define CPU_CONVERT(endian, size, type)\ 154 95 static inline type endian ## size ## _to_cpu(type v)\ … … 181 122 *p = cpu_to_ ## endian ## size(v);\ 182 123 } 183 #else /* VBOX */184 #define CPU_CONVERT(endian, size, type)\185 DECLINLINE(type) endian ## size ## _to_cpu(type v)\186 {\187 return endian ## _bswap(v, size);\188 }\189 \190 DECLINLINE(type) cpu_to_ ## endian ## size(type v)\191 {\192 return endian ## _bswap(v, size);\193 }\194 \195 DECLINLINE(void) endian ## size ## _to_cpus(type *p)\196 {\197 endian ## _bswaps(p, size)\198 }\199 \200 DECLINLINE(void) cpu_to_ ## endian ## size ## s(type *p)\201 {\202 endian ## _bswaps(p, size)\203 }\204 \205 DECLINLINE(type) endian ## size ## _to_cpup(const type *p)\206 {\207 return endian ## size ## _to_cpu(*p);\208 }\209 \210 DECLINLINE(void) cpu_to_ ## endian ## size ## w(type *p, type v)\211 {\212 *p = cpu_to_ ## endian ## size(v);\213 }214 #endif /* VBOX */215 124 216 125 CPU_CONVERT(be, 16, uint16_t) -
trunk/src/recompiler/cpu-all.h
r35346 r36125 96 96 #else 97 97 98 #ifndef VBOX99 98 static inline uint16_t tswap16(uint16_t s) 100 #else101 DECLINLINE(uint16_t) tswap16(uint16_t s)102 #endif103 99 { 104 100 return s; 105 101 } 106 102 107 #ifndef VBOX108 103 static inline uint32_t tswap32(uint32_t s) 109 #else110 DECLINLINE(uint32_t) tswap32(uint32_t s)111 #endif112 104 { 113 105 return s; 114 106 } 115 107 116 #ifndef VBOX117 108 static inline uint64_t tswap64(uint64_t s) 118 #else119 DECLINLINE(uint64_t) tswap64(uint64_t s)120 #endif121 109 { 122 110 return s; 123 111 } 124 112 125 #ifndef VBOX126 113 static inline void tswap16s(uint16_t *s) 127 #else 128 DECLINLINE(void) tswap16s(uint16_t *s) 129 #endif 130 { 131 } 132 133 #ifndef VBOX 114 { 115 } 116 134 117 static inline void tswap32s(uint32_t *s) 135 #else 136 DECLINLINE(void) tswap32s(uint32_t *s) 137 #endif 138 { 139 } 140 141 #ifndef VBOX 118 { 119 } 120 142 121 static inline void tswap64s(uint64_t *s) 143 #else144 DECLINLINE(void) tswap64s(uint64_t *s)145 #endif146 122 { 147 123 } … … 375 351 } 376 352 377 #else /* ! (VBOX && REM_PHYS_ADDR_IN_TLB)*/353 #else /* !VBOX */ 378 354 379 355 static inline int ldub_p(void *ptr) … … 575 551 #if !defined(WORDS_BIGENDIAN) || defined(WORDS_ALIGNED) 576 552 577 #ifndef VBOX578 553 static inline int lduw_be_p(void *ptr) 579 554 { … … 590 565 #endif 591 566 } 592 #else /* VBOX */ 593 DECLINLINE(int) lduw_be_p(void *ptr) 594 { 595 #if defined(__i386__) && !defined(_MSC_VER) 596 int val; 597 asm volatile ("movzwl %1, %0\n" 598 "xchgb %b0, %h0\n" 599 : "=q" (val) 600 : "m" (*(uint16_t *)ptr)); 601 return val; 602 #else 603 uint8_t *b = (uint8_t *) ptr; 604 return ((b[0] << 8) | b[1]); 605 #endif 606 } 607 #endif 608 609 #ifndef VBOX 567 610 568 static inline int ldsw_be_p(void *ptr) 611 569 { … … 622 580 #endif 623 581 } 624 #else 625 DECLINLINE(int) ldsw_be_p(void *ptr) 626 { 627 #if defined(__i386__) && !defined(_MSC_VER) 628 int val; 629 asm volatile ("movzwl %1, %0\n" 630 "xchgb %b0, %h0\n" 631 : "=q" (val) 632 : "m" (*(uint16_t *)ptr)); 633 return (int16_t)val; 634 #else 635 uint8_t *b = (uint8_t *) ptr; 636 return (int16_t)((b[0] << 8) | b[1]); 637 #endif 638 } 639 #endif 640 641 #ifndef VBOX 582 642 583 static inline int ldl_be_p(void *ptr) 643 584 { … … 654 595 #endif 655 596 } 656 #else 657 DECLINLINE(int) ldl_be_p(void *ptr) 658 { 659 #if (defined(__i386__) || defined(__x86_64__)) && !defined(_MSC_VER) 660 int val; 661 asm volatile ("movl %1, %0\n" 662 "bswap %0\n" 663 : "=r" (val) 664 : "m" (*(uint32_t *)ptr)); 665 return val; 666 #else 667 uint8_t *b = (uint8_t *) ptr; 668 return (b[0] << 24) | (b[1] << 16) | (b[2] << 8) | b[3]; 669 #endif 670 } 671 #endif 672 673 #ifndef VBOX 597 674 598 static inline uint64_t ldq_be_p(void *ptr) 675 #else676 DECLINLINE(uint64_t) ldq_be_p(void *ptr)677 #endif678 599 { 679 600 uint32_t a,b; … … 683 604 } 684 605 685 #ifndef VBOX686 606 static inline void stw_be_p(void *ptr, int v) 687 607 { … … 697 617 #endif 698 618 } 699 #else 700 DECLINLINE(void) stw_be_p(void *ptr, int v) 701 { 702 #if defined(__i386__) && !defined(_MSC_VER) 703 asm volatile ("xchgb %b0, %h0\n" 704 "movw %w0, %1\n" 705 : "=q" (v) 706 : "m" (*(uint16_t *)ptr), "0" (v)); 707 #else 708 uint8_t *d = (uint8_t *) ptr; 709 d[0] = v >> 8; 710 d[1] = v; 711 #endif 712 } 713 714 #endif /* VBOX */ 715 716 #ifndef VBOX 619 717 620 static inline void stl_be_p(void *ptr, int v) 718 621 { … … 730 633 #endif 731 634 } 732 #else 733 DECLINLINE(void) stl_be_p(void *ptr, int v) 734 { 735 #if !defined(_MSC_VER) && (defined(__i386__) || defined(__x86_64__)) 736 asm volatile ("bswap %0\n" 737 "movl %0, %1\n" 738 : "=r" (v) 739 : "m" (*(uint32_t *)ptr), "0" (v)); 740 #else 741 uint8_t *d = (uint8_t *) ptr; 742 d[0] = v >> 24; 743 d[1] = v >> 16; 744 d[2] = v >> 8; 745 d[3] = v; 746 #endif 747 } 748 #endif /* VBOX */ 749 750 #ifndef VBOX 635 751 636 static inline void stq_be_p(void *ptr, uint64_t v) 752 #else753 DECLINLINE(void) stq_be_p(void *ptr, uint64_t v)754 #endif755 637 { 756 638 stl_be_p(ptr, v >> 32); … … 759 641 760 642 /* float access */ 761 #ifndef VBOX762 643 static inline float32 ldfl_be_p(void *ptr) 763 #else764 DECLINLINE(float32) ldfl_be_p(void *ptr)765 #endif766 644 { 767 645 union { … … 773 651 } 774 652 775 #ifndef VBOX776 653 static inline void stfl_be_p(void *ptr, float32 v) 777 #else778 DECLINLINE(void) stfl_be_p(void *ptr, float32 v)779 #endif780 654 { 781 655 union { … … 787 661 } 788 662 789 #ifndef VBOX790 663 static inline float64 ldfq_be_p(void *ptr) 791 #else792 DECLINLINE(float64) ldfq_be_p(void *ptr)793 #endif794 664 { 795 665 CPU_DoubleU u; … … 799 669 } 800 670 801 #ifndef VBOX802 671 static inline void stfq_be_p(void *ptr, float64 v) 803 #else804 DECLINLINE(void) stfq_be_p(void *ptr, float64 v)805 #endif806 672 { 807 673 CPU_DoubleU u; … … 1194 1060 /* MMIO pages are identified by a combination of an IO device index and 1195 1061 3 flags. The ROMD code stores the page ram offset in iotlb entry, 1196 so only a limited number of ids are avai lable. */1062 so only a limited number of ids are avaiable. */ 1197 1063 1198 1064 #define IO_MEM_SHIFT 3 … … 1237 1103 void cpu_physical_memory_rw(target_phys_addr_t addr, uint8_t *buf, 1238 1104 int len, int is_write); 1239 #ifndef VBOX1240 1105 static inline void cpu_physical_memory_read(target_phys_addr_t addr, 1241 1106 uint8_t *buf, int len) 1242 #else1243 DECLINLINE(void) cpu_physical_memory_read(target_phys_addr_t addr,1244 uint8_t *buf, int len)1245 #endif1246 1107 { 1247 1108 cpu_physical_memory_rw(addr, buf, len, 0); 1248 1109 } 1249 #ifndef VBOX1250 1110 static inline void cpu_physical_memory_write(target_phys_addr_t addr, 1251 1111 const uint8_t *buf, int len) 1252 #else1253 DECLINLINE(void) cpu_physical_memory_write(target_phys_addr_t addr,1254 const uint8_t *buf, int len)1255 #endif1256 1112 { 1257 1113 cpu_physical_memory_rw(addr, (uint8_t *)buf, len, 1); … … 1354 1210 DECLINLINE(int64_t) cpu_get_real_ticks(void) 1355 1211 { 1356 return 1212 return ASMReadTSC(); 1357 1213 } 1358 1214 … … 1472 1328 extern int64_t kqemu_ret_excp_count; 1473 1329 extern int64_t kqemu_ret_intr_count; 1474 1475 1330 #endif 1476 1331 -
trunk/src/recompiler/cpu-exec.c
r36056 r36125 174 174 } 175 175 176 #ifndef VBOX177 176 static inline TranslationBlock *tb_find_fast(void) 178 #else179 DECLINLINE(TranslationBlock *) tb_find_fast(void)180 #endif181 177 { 182 178 TranslationBlock *tb; -
trunk/src/recompiler/exec-all.h
r35346 r36125 112 112 target_phys_addr_t paddr, int prot, 113 113 int mmu_idx, int is_softmmu); 114 #ifndef VBOX115 114 static inline int tlb_set_page(CPUState *env1, target_ulong vaddr, 116 115 target_phys_addr_t paddr, int prot, 117 116 int mmu_idx, int is_softmmu) 118 #else119 DECLINLINE(int) tlb_set_page(CPUState *env1, target_ulong vaddr,120 target_phys_addr_t paddr, int prot,121 int mmu_idx, int is_softmmu)122 #endif123 117 { 124 118 if (prot & PAGE_READ) … … 193 187 }; 194 188 195 #ifndef VBOX196 189 static inline unsigned int tb_jmp_cache_hash_page(target_ulong pc) 197 #else198 DECLINLINE(unsigned int) tb_jmp_cache_hash_page(target_ulong pc)199 #endif200 190 { 201 191 target_ulong tmp; … … 204 194 } 205 195 206 #ifndef VBOX207 196 static inline unsigned int tb_jmp_cache_hash_func(target_ulong pc) 208 #else209 DECLINLINE(unsigned int) tb_jmp_cache_hash_func(target_ulong pc)210 #endif211 212 197 { 213 198 target_ulong tmp; … … 217 202 } 218 203 219 #ifndef VBOX220 204 static inline unsigned int tb_phys_hash_func(unsigned long pc) 221 #else222 DECLINLINE(unsigned int) tb_phys_hash_func(unsigned long pc)223 #endif224 205 { 225 206 return pc & (CODE_GEN_PHYS_HASH_SIZE - 1); … … 281 262 282 263 /* set the jump target */ 283 #ifndef VBOX284 264 static inline void tb_set_jmp_target(TranslationBlock *tb, 285 265 int n, unsigned long addr) 286 #else287 DECLINLINE(void) tb_set_jmp_target(TranslationBlock *tb,288 int n, unsigned long addr)289 #endif290 266 { 291 267 tb->tb_next[n] = addr; … … 294 270 #endif 295 271 296 #ifndef VBOX297 272 static inline void tb_add_jump(TranslationBlock *tb, int n, 298 273 TranslationBlock *tb_next) 299 #else300 DECLINLINE(void) tb_add_jump(TranslationBlock *tb, int n,301 TranslationBlock *tb_next)302 #endif303 274 { 304 275 /* NOTE: this test is only needed for thread safety */ … … 384 355 /* NOTE2: the returned address is not exactly the physical address: it 385 356 is the offset relative to phys_ram_base */ 386 #ifndef VBOX387 357 static inline target_ulong get_phys_addr_code(CPUState *env1, target_ulong addr) 388 #else389 DECLINLINE(target_ulong) get_phys_addr_code(CPUState *env1, target_ulong addr)390 #endif391 358 { 392 359 int mmu_idx, page_index, pd; … … 425 392 /* Deterministic execution requires that IO only be performed on the last 426 393 instruction of a TB so that interrupts take effect immediately. */ 427 #ifndef VBOX428 394 static inline int can_do_io(CPUState *env) 429 #else430 DECLINLINE(int) can_do_io(CPUState *env)431 #endif432 395 { 433 396 if (!use_icount) -
trunk/src/recompiler/exec.c
r35996 r36125 358 358 } 359 359 360 #ifndef VBOX361 360 static inline PageDesc **page_l1_map(target_ulong index) 362 #else363 DECLINLINE(PageDesc **) page_l1_map(target_ulong index)364 #endif365 361 { 366 362 #ifndef VBOX … … 392 388 } 393 389 394 #ifndef VBOX395 390 static inline PageDesc *page_find_alloc(target_ulong index) 396 #else397 DECLINLINE(PageDesc *) page_find_alloc(target_ulong index)398 #endif399 391 { 400 392 PageDesc **lp, *p; … … 427 419 } 428 420 429 #ifndef VBOX430 421 static inline PageDesc *page_find(target_ulong index) 431 #else432 DECLINLINE(PageDesc *) page_find(target_ulong index)433 #endif434 422 { 435 423 PageDesc **lp, *p; … … 497 485 } 498 486 499 #ifndef VBOX500 487 static inline PhysPageDesc *phys_page_find(target_phys_addr_t index) 501 #else502 DECLINLINE(PhysPageDesc *) phys_page_find(target_phys_addr_t index)503 #endif504 488 { 505 489 return phys_page_find_alloc(index, 0); … … 716 700 } 717 701 718 #ifndef VBOX719 702 static inline void invalidate_page_bitmap(PageDesc *p) 720 #else721 DECLINLINE(void) invalidate_page_bitmap(PageDesc *p)722 #endif723 703 { 724 704 if (p->code_bitmap) { … … 852 832 853 833 /* invalidate one TB */ 854 #ifndef VBOX855 834 static inline void tb_remove(TranslationBlock **ptb, TranslationBlock *tb, 856 835 int next_offset) 857 #else858 DECLINLINE(void) tb_remove(TranslationBlock **ptb, TranslationBlock *tb,859 int next_offset)860 #endif861 836 { 862 837 TranslationBlock *tb1; … … 871 846 } 872 847 873 #ifndef VBOX874 848 static inline void tb_page_remove(TranslationBlock **ptb, TranslationBlock *tb) 875 #else876 DECLINLINE(void) tb_page_remove(TranslationBlock **ptb, TranslationBlock *tb)877 #endif878 849 { 879 850 TranslationBlock *tb1; … … 892 863 } 893 864 894 #ifndef VBOX895 865 static inline void tb_jmp_remove(TranslationBlock *tb, int n) 896 #else897 DECLINLINE(void) tb_jmp_remove(TranslationBlock *tb, int n)898 #endif899 866 { 900 867 TranslationBlock *tb1, **ptb; … … 926 893 /* reset the jump entry 'n' of a TB so that it is not chained to 927 894 another TB */ 928 #ifndef VBOX929 895 static inline void tb_reset_jump(TranslationBlock *tb, int n) 930 #else931 DECLINLINE(void) tb_reset_jump(TranslationBlock *tb, int n)932 #endif933 896 { 934 897 tb_set_jmp_target(tb, n, (unsigned long)(tb->tc_ptr + tb->tb_next_offset[n])); … … 1037 1000 #endif /* VBOX */ 1038 1001 1039 #ifndef VBOX1040 1002 static inline void set_bits(uint8_t *tab, int start, int len) 1041 #else1042 DECLINLINE(void) set_bits(uint8_t *tab, int start, int len)1043 #endif1044 1003 { 1045 1004 int end, mask, end1; … … 1254 1213 1255 1214 /* len must be <= 8 and start must be a multiple of len */ 1256 #ifndef VBOX1257 1215 static inline void tb_invalidate_phys_page_fast(target_phys_addr_t start, int len) 1258 #else1259 DECLINLINE(void) tb_invalidate_phys_page_fast(target_phys_addr_t start, int len)1260 #endif1261 1216 { 1262 1217 PageDesc *p; … … 1356 1311 1357 1312 /* add the tb in the target page and protect it if necessary */ 1358 #ifndef VBOX1359 1313 static inline void tb_alloc_page(TranslationBlock *tb, 1360 1314 unsigned int n, target_ulong page_addr) 1361 #else1362 DECLINLINE(void) tb_alloc_page(TranslationBlock *tb,1363 unsigned int n, target_ulong page_addr)1364 #endif1365 1315 { 1366 1316 PageDesc *p; … … 1518 1468 static void tb_reset_jump_recursive(TranslationBlock *tb); 1519 1469 1520 #ifndef VBOX1521 1470 static inline void tb_reset_jump_recursive2(TranslationBlock *tb, int n) 1522 #else1523 DECLINLINE(void) tb_reset_jump_recursive2(TranslationBlock *tb, int n)1524 #endif1525 1471 { 1526 1472 TranslationBlock *tb1, *tb_next, **ptb; … … 1899 1845 #if !defined(CONFIG_USER_ONLY) 1900 1846 1901 #ifndef VBOX1902 1847 static inline void tlb_flush_jmp_cache(CPUState *env, target_ulong addr) 1903 #else1904 DECLINLINE(void) tlb_flush_jmp_cache(CPUState *env, target_ulong addr)1905 #endif1906 1848 { 1907 1849 unsigned int i; … … 1964 1906 } 1965 1907 1966 #ifndef VBOX1967 1908 static inline void tlb_flush_entry(CPUTLBEntry *tlb_entry, target_ulong addr) 1968 #else1969 DECLINLINE(void) tlb_flush_entry(CPUTLBEntry *tlb_entry, target_ulong addr)1970 #endif1971 1909 { 1972 1910 if (addr == (tlb_entry->addr_read & … … 2037 1975 } 2038 1976 2039 #ifndef VBOX2040 1977 static inline void tlb_reset_dirty_range(CPUTLBEntry *tlb_entry, 2041 1978 unsigned long start, unsigned long length) 2042 #else2043 DECLINLINE(void) tlb_reset_dirty_range(CPUTLBEntry *tlb_entry,2044 unsigned long start, unsigned long length)2045 #endif2046 1979 { 2047 1980 unsigned long addr; … … 2190 2123 } 2191 2124 2192 #ifndef VBOX2193 2125 static inline void tlb_set_dirty1(CPUTLBEntry *tlb_entry, target_ulong vaddr) 2194 #else2195 DECLINLINE(void) tlb_set_dirty1(CPUTLBEntry *tlb_entry, target_ulong vaddr)2196 #endif2197 2126 { 2198 2127 if (tlb_entry->addr_write == (vaddr | TLB_NOTDIRTY)) … … 2203 2132 /* update the TLB corresponding to virtual page vaddr and phys addr 2204 2133 addr so that it is no longer dirty */ 2205 #ifndef VBOX2206 2134 static inline void tlb_set_dirty(CPUState *env, 2207 2135 unsigned long addr, target_ulong vaddr) 2208 #else2209 DECLINLINE(void) tlb_set_dirty(CPUState *env,2210 unsigned long addr, target_ulong vaddr)2211 #endif2212 2136 { 2213 2137 int i; -
trunk/src/recompiler/fpu/softfloat-native.h
r21292 r36125 64 64 #define islessequal(x, y) ((!unordered(x, y)) && ((x) <= (y))) 65 65 #define isunordered(x,y) unordered(x, y) 66 #elif defined(_MSC_VER)67 #include <float.h>68 #define unordered(x1, x2) ((_fpclass(x1) <= 2) || (_fpclass(x2) <= 2))69 #define isless(x, y) ((!unordered(x, y)) && ((x) < (y)))70 #define islessequal(x, y) ((!unordered(x, y)) && ((x) <= (y)))71 #define isunordered(x,y) unordered(x, y)72 66 #endif 73 67 … … 141 135 float_round_to_zero = 3 142 136 }; 143 #elif defined(_MSC_VER)144 enum {145 float_round_nearest_even = _FpRoundNearest,146 float_round_down = _FpRoundMinusInfinity,147 float_round_up = _FpRoundPlusInfinity,148 float_round_to_zero = _FpRoundChopped149 };150 137 #else 151 138 enum { -
trunk/src/recompiler/gen-icount.h
r17040 r36125 4 4 static int icount_label; 5 5 6 #ifndef VBOX7 6 static inline void gen_icount_start(void) 8 #else /* VBOX */9 DECLINLINE(void) gen_icount_start(void)10 #endif /* VBOX */11 7 { 12 8 TCGv count; … … 45 41 } 46 42 47 #ifndef VBOX48 43 inline static void gen_io_start(void) 49 #else50 DECLINLINE(void) gen_io_start(void)51 #endif52 44 { 53 45 TCGv tmp = tcg_const_i32(1); … … 56 48 } 57 49 58 #ifndef VBOX59 50 static inline void gen_io_end(void) 60 #else /* VBOX */61 DECLINLINE(void) gen_io_end(void)62 #endif /* VBOX */63 51 { 64 52 TCGv tmp = tcg_const_i32(0); -
trunk/src/recompiler/host-utils.h
r26499 r36125 50 50 /* Binary search for leading zeros. */ 51 51 52 #ifndef VBOX53 52 static always_inline int clz32(uint32_t val) 54 #else55 DECLALWAYSINLINE(int) clz32(uint32_t val)56 #endif57 53 { 58 54 #if QEMU_GNUC_PREREQ(3, 4) … … 91 87 } 92 88 93 #ifndef VBOX94 89 static always_inline int clo32(uint32_t val) 95 #else96 DECLALWAYSINLINE(int) clo32(uint32_t val)97 #endif98 90 { 99 91 return clz32(~val); 100 92 } 101 93 102 #ifndef VBOX103 94 static always_inline int clz64(uint64_t val) 104 #else105 DECLALWAYSINLINE(int) clz64(uint64_t val)106 #endif107 95 { 108 96 #if QEMU_GNUC_PREREQ(3, 4) … … 124 112 } 125 113 126 #ifndef VBOX127 114 static always_inline int clo64(uint64_t val) 128 #else129 DECLALWAYSINLINE(int) clo64(uint64_t val)130 #endif131 115 { 132 116 return clz64(~val); 133 117 } 134 118 135 #ifndef VBOX136 119 static always_inline int ctz32 (uint32_t val) 137 #else138 DECLALWAYSINLINE(int) ctz32 (uint32_t val)139 #endif140 120 { 141 121 #if QEMU_GNUC_PREREQ(3, 4) … … 176 156 } 177 157 178 #ifndef VBOX179 158 static always_inline int cto32 (uint32_t val) 180 #else181 DECLALWAYSINLINE(int) cto32 (uint32_t val)182 #endif183 159 { 184 160 return ctz32(~val); 185 161 } 186 162 187 #ifndef VBOX188 163 static always_inline int ctz64 (uint64_t val) 189 #else190 DECLALWAYSINLINE(int) ctz64 (uint64_t val)191 #endif192 164 { 193 165 #if QEMU_GNUC_PREREQ(3, 4) … … 209 181 } 210 182 211 #ifndef VBOX212 183 static always_inline int cto64 (uint64_t val) 213 #else214 DECLALWAYSINLINE(int) cto64 (uint64_t val)215 #endif216 184 { 217 185 return ctz64(~val); 218 186 } 219 187 220 #ifndef VBOX221 188 static always_inline int ctpop8 (uint8_t val) 222 #else223 DECLALWAYSINLINE(int) ctpop8 (uint8_t val)224 #endif225 189 { 226 190 val = (val & 0x55) + ((val >> 1) & 0x55); … … 231 195 } 232 196 233 #ifndef VBOX234 197 static always_inline int ctpop16 (uint16_t val) 235 #else236 DECLALWAYSINLINE(int) ctpop16 (uint16_t val)237 #endif238 198 { 239 199 val = (val & 0x5555) + ((val >> 1) & 0x5555); … … 245 205 } 246 206 247 #ifndef VBOX248 207 static always_inline int ctpop32 (uint32_t val) 249 #else250 DECLALWAYSINLINE(int) ctpop32 (uint32_t val)251 #endif252 208 { 253 209 #if QEMU_GNUC_PREREQ(3, 4) … … 264 220 } 265 221 266 #ifndef VBOX267 222 static always_inline int ctpop64 (uint64_t val) 268 #else269 DECLALWAYSINLINE(int) ctpop64 (uint64_t val)270 #endif271 223 { 272 224 #if QEMU_GNUC_PREREQ(3, 4) -
trunk/src/recompiler/hostregs_helper.h
r33656 r36125 37 37 #if defined(DECLARE_HOST_REGS) 38 38 39 #ifndef VBOX40 39 #define DO_REG(REG) \ 41 40 register host_reg_t reg_AREG##REG asm(AREG##REG); \ 42 41 volatile host_reg_t saved_AREG##REG; 43 #else44 #define DO_REG(REG) \45 REGISTER_BOUND_GLOBAL(host_reg_t, reg_AREG##REG, AREG##REG); \46 volatile host_reg_t saved_AREG##REG;47 #endif48 42 49 43 #elif defined(SAVE_HOST_REGS) 50 44 51 #ifndef VBOX52 45 #define DO_REG(REG) \ 53 46 __asm__ __volatile__ ("" : "=r" (reg_AREG##REG)); \ 54 47 saved_AREG##REG = reg_AREG##REG; 55 #else /* VBOX */56 #define DO_REG(REG) \57 SAVE_GLOBAL_REGISTER(REG, reg_AREG##REG); \58 saved_AREG##REG = reg_AREG##REG;59 #endif /* VBOX */60 48 61 49 #else 62 50 63 #ifndef VBOX64 51 #define DO_REG(REG) \ 65 52 reg_AREG##REG = saved_AREG##REG; \ 66 53 __asm__ __volatile__ ("" : : "r" (reg_AREG##REG)); 67 #else /* VBOX */68 #define DO_REG(REG) \69 reg_AREG##REG = saved_AREG##REG; \70 RESTORE_GLOBAL_REGISTER(REG, reg_AREG##REG);71 #endif72 54 73 55 #endif -
trunk/src/recompiler/osdep.h
r28356 r36125 15 15 #define VBOX_ONLY(x) x 16 16 17 #ifndef _MSC_VER18 17 #define qemu_snprintf(pszBuf, cbBuf, ...) RTStrPrintf((pszBuf), (cbBuf), __VA_ARGS__) 19 #else20 #define qemu_snprintf RTStrPrintf21 #endif22 18 #define qemu_vsnprintf(pszBuf, cbBuf, pszFormat, args) \ 23 19 RTStrPrintfV((pszBuf), (cbBuf), (pszFormat), (args)) … … 130 126 131 127 #ifdef __i386__ 132 #ifdef _MSC_VER133 /** @todo: maybe wrong, or slow */134 #define REGPARM135 #else136 128 #define REGPARM __attribute((regparm(3))) 137 #endif138 129 #else 139 130 #define REGPARM … … 169 160 170 161 #ifdef VBOX 171 #ifdef _MSC_VER 172 #define ALIGNED_MEMBER(type, name, bytes) type name 173 #define ALIGNED_MEMBER_DEF(type, name) type name 174 #define PACKED_STRUCT(name) struct name 175 #define REGISTER_BOUND_GLOBAL(type, var, reg) type var 176 #define SAVE_GLOBAL_REGISTER(reg, var) 177 #define RESTORE_GLOBAL_REGISTER(reg, var) 178 #define DECLALWAYSINLINE(type) DECLINLINE(type) 162 /** @todo why don't we go with dyngen-exec.h here? */ 179 163 #define FORCE_RET() ; 180 #else /* ! _MSC_VER */181 #define ALIGNED_MEMBER(type, name, bytes) type name __attribute__((aligned(bytes)))182 #define ALIGNED_MEMBER_DEF(type, name) type name __attribute__((aligned()))183 #define PACKED_STRUCT(name) struct __attribute__ ((__packed__)) name184 #define REGISTER_BOUND_GLOBAL(type, var, reg) register type var asm(reg)185 #define SAVE_GLOBAL_REGISTER(reg, var) __asm__ __volatile__ ("" : "=r" (var))186 #define RESTORE_GLOBAL_REGISTER(reg, var) __asm__ __volatile__ ("" : : "r" (var))187 #define DECLALWAYSINLINE(type) static always_inline type188 #define FORCE_RET() ;189 #endif /* !_MSC_VER */190 164 #endif /* VBOX */ 191 165 -
trunk/src/recompiler/qemu-common.h
r31179 r36125 6 6 7 7 # include <string.h> 8 # if !defined(_MSC_VER) 9 # include <inttypes.h> 10 # endif 8 # include <inttypes.h> 11 9 12 10 void pstrcpy(char *buf, int buf_size, const char *str); 13 11 char *pstrcat(char *buf, int buf_size, const char *s); 14 12 # define snprintf RTStrPrintf 15 16 # ifdef _MSC_VER17 # define PRId32 "d"18 # define PRIx32 "x"19 # define PRIu32 "u"20 # define PRIo32 "o"21 # ifdef DEBUG_TMP_LOGGING22 # define PRId64 "I64d"23 # define PRIx64 "I64x"24 # define PRIu64 "I64u"25 # define PRIo64 "I64o"26 # else27 # define PRId64 "RI64"28 # define PRIx64 "RX64"29 # define PRIu64 "RU64"30 # endif31 # endif /* _MSC_VER */32 13 33 14 #else /* !VBOX */ -
trunk/src/recompiler/qemu-lock.h
r33656 r36125 61 61 #define SPIN_LOCK_UNLOCKED 0 62 62 63 #ifndef VBOX64 63 static inline void resetlock (spinlock_t *p) 65 #else66 DECLINLINE(void) resetlock (spinlock_t *p)67 #endif68 64 { 69 65 *p = SPIN_LOCK_UNLOCKED; … … 75 71 DECLINLINE(int) testandset (int *p) 76 72 { 77 78 73 return ASMAtomicCmpXchgU32((volatile uint32_t *)p, 1, 0) ? 0 : 1; 79 74 } … … 252 247 } 253 248 #else 254 #ifndef VBOX255 249 static inline void spin_lock(spinlock_t *lock) 256 #else 257 DECLINLINE(void) spin_lock(spinlock_t *lock) 258 #endif 259 { 260 } 261 262 #ifndef VBOX 250 { 251 } 252 263 253 static inline void spin_unlock(spinlock_t *lock) 264 #else 265 DECLINLINE(void) spin_unlock(spinlock_t *lock) 266 #endif 267 { 268 } 269 270 #ifndef VBOX 254 { 255 } 256 271 257 static inline int spin_trylock(spinlock_t *lock) 272 #else273 DECLINLINE(int) spin_trylock(spinlock_t *lock)274 #endif275 258 { 276 259 return 1; -
trunk/src/recompiler/softmmu_defs.h
r17040 r36125 20 20 uint64_t REGPARM __ldq_cmmu(target_ulong addr, int mmu_idx); 21 21 void REGPARM __stq_cmmu(target_ulong addr, uint64_t val, int mmu_idx); 22 #else 22 #else /* VBOX */ 23 23 RTCCUINTREG REGPARM __ldb_mmu(target_ulong addr, int mmu_idx); 24 24 void REGPARM __stb_mmu(target_ulong addr, uint8_t val, int mmu_idx); … … 39 39 void REGPARM __stq_cmmu(target_ulong addr, uint64_t val, int mmu_idx); 40 40 41 # ifdef REM_PHYS_ADDR_IN_TLB41 # ifdef REM_PHYS_ADDR_IN_TLB 42 42 RTCCUINTREG REGPARM __ldb_vbox_phys(RTCCUINTREG addr); 43 43 RTCCUINTREG REGPARM __ldub_vbox_phys(RTCCUINTREG addr); … … 51 51 uint64_t REGPARM __ldq_vbox_phys(RTCCUINTREG addr); 52 52 void REGPARM __stq_vbox_phys(RTCCUINTREG addr, uint64_t val); 53 #endif 53 # endif 54 55 #endif /* VBOX */ 54 56 55 57 #endif 56 57 #endif -
trunk/src/recompiler/softmmu_header.h
r33656 r36125 226 226 /* generic load/store macros */ 227 227 228 #ifndef VBOX229 228 static inline RES_TYPE glue(glue(ld, USUFFIX), MEMSUFFIX)(target_ulong ptr) 230 #else231 DECLINLINE(RES_TYPE) glue(glue(ld, USUFFIX), MEMSUFFIX)(target_ulong ptr)232 #endif233 229 { 234 230 … … 253 249 254 250 #if DATA_SIZE <= 2 255 #ifndef VBOX256 251 static inline int glue(glue(lds, SUFFIX), MEMSUFFIX)(target_ulong ptr) 257 #else258 DECLINLINE(int) glue(glue(lds, SUFFIX), MEMSUFFIX)(target_ulong ptr)259 #endif260 252 { 261 253 int res, page_index; … … 281 273 282 274 /* generic store macro */ 283 #ifndef VBOX284 275 static inline void glue(glue(st, SUFFIX), MEMSUFFIX)(target_ulong ptr, RES_TYPE v) 285 #else286 DECLINLINE(void) glue(glue(st, SUFFIX), MEMSUFFIX)(target_ulong ptr, RES_TYPE v)287 #endif288 276 { 289 277 int page_index; … … 311 299 312 300 #if DATA_SIZE == 8 313 #ifndef VBOX314 301 static inline float64 glue(ldfq, MEMSUFFIX)(target_ulong ptr) 315 #else316 DECLINLINE(float64) glue(ldfq, MEMSUFFIX)(target_ulong ptr)317 #endif318 302 { 319 303 union { … … 325 309 } 326 310 327 #ifndef VBOX328 311 static inline void glue(stfq, MEMSUFFIX)(target_ulong ptr, float64 v) 329 #else330 DECLINLINE(void) glue(stfq, MEMSUFFIX)(target_ulong ptr, float64 v)331 #endif332 312 { 333 313 union { … … 341 321 342 322 #if DATA_SIZE == 4 343 #ifndef VBOX344 323 static inline float32 glue(ldfl, MEMSUFFIX)(target_ulong ptr) 345 #else346 DECLINLINE(float32) glue(ldfl, MEMSUFFIX)(target_ulong ptr)347 #endif348 324 { 349 325 union { … … 355 331 } 356 332 357 #ifndef VBOX358 333 static inline void glue(stfl, MEMSUFFIX)(target_ulong ptr, float32 v) 359 #else360 DECLINLINE(void) glue(stfl, MEMSUFFIX)(target_ulong ptr, float32 v)361 #endif362 334 { 363 335 union { -
trunk/src/recompiler/softmmu_template.h
r33656 r36125 71 71 int mmu_idx, 72 72 void *retaddr); 73 #ifndef VBOX74 73 static inline DATA_TYPE glue(io_read, SUFFIX)(target_phys_addr_t physaddr, 75 74 target_ulong addr, 76 75 void *retaddr) 77 #else78 DECLINLINE(DATA_TYPE) glue(io_read, SUFFIX)(target_phys_addr_t physaddr,79 target_ulong addr,80 void *retaddr)81 #endif82 76 { 83 77 DATA_TYPE res; … … 230 224 void *retaddr); 231 225 232 #ifndef VBOX233 226 static inline void glue(io_write, SUFFIX)(target_phys_addr_t physaddr, 234 227 DATA_TYPE val, 235 228 target_ulong addr, 236 229 void *retaddr) 237 #else238 DECLINLINE(void) glue(io_write, SUFFIX)(target_phys_addr_t physaddr,239 DATA_TYPE val,240 target_ulong addr,241 void *retaddr)242 #endif243 230 { 244 231 int index; -
trunk/src/recompiler/target-i386/cpu.h
r35346 r36125 571 571 union { 572 572 #ifdef USE_X86LDOUBLE 573 #ifndef VBOX574 573 CPU86_LDouble d __attribute__((aligned(16))); 575 #else576 ALIGNED_MEMBER(CPU86_LDouble, d, 16);577 #endif578 574 #else 579 575 CPU86_LDouble d; … … 738 734 union { 739 735 #ifdef USE_X86LDOUBLE 740 #ifndef VBOX741 736 CPU86_LDouble d __attribute__((aligned(16))); 742 #else743 ALIGNED_MEMBER(CPU86_LDouble, d, 16);744 #endif745 737 #else 746 738 CPU86_LDouble d; … … 818 810 /* this function must always be used to load data in the segment 819 811 cache: it synchronizes the hflags with the segment cache values */ 820 #ifndef VBOX821 812 static inline void cpu_x86_load_seg_cache(CPUX86State *env, 822 813 int seg_reg, unsigned int selector, … … 824 815 unsigned int limit, 825 816 unsigned int flags) 826 #else827 DECLINLINE(void) cpu_x86_load_seg_cache(CPUX86State *env,828 int seg_reg, unsigned int selector,829 target_ulong base,830 unsigned int limit,831 unsigned int flags)832 833 #endif834 817 { 835 818 SegmentCache *sc; … … 888 871 889 872 /* wrapper, just in case memory mappings must be changed */ 890 #ifndef VBOX891 873 static inline void cpu_x86_set_cpl(CPUX86State *s, int cpl) 892 #else893 DECLINLINE(void) cpu_x86_set_cpl(CPUX86State *s, int cpl)894 #endif895 874 { 896 875 #if HF_CPL_MASK == 3 … … 982 961 #define MMU_MODE1_SUFFIX _user 983 962 #define MMU_USER_IDX 1 984 #ifndef VBOX985 963 static inline int cpu_mmu_index (CPUState *env) 986 #else987 DECLINLINE(int) cpu_mmu_index (CPUState *env)988 #endif989 964 { 990 965 return (env->hflags & HF_CPL_MASK) == 3 ? 1 : 0; -
trunk/src/recompiler/target-i386/exec.h
r33656 r36125 40 40 #include "cpu-defs.h" 41 41 42 #ifndef VBOX43 42 /* at least 4 register variables are defined */ 44 43 register struct CPUX86State *env asm(AREG0); 45 #else46 REGISTER_BOUND_GLOBAL(struct CPUX86State*, env, AREG0);47 #endif /* VBOX */48 44 49 45 #include "qemu-log.h" … … 116 112 117 113 /* n must be a constant to be efficient */ 118 #ifndef VBOX119 114 static inline target_long lshift(target_long x, int n) 120 #else121 DECLINLINE(target_long) lshift(target_long x, int n)122 #endif123 115 { 124 116 if (n >= 0) … … 130 122 #include "helper.h" 131 123 132 #ifndef VBOX133 124 static inline void svm_check_intercept(uint32_t type) 134 #else135 DECLINLINE(void) svm_check_intercept(uint32_t type)136 #endif137 125 { 138 126 helper_svm_check_intercept_param(type, 0); … … 150 138 #include "softmmu_exec.h" 151 139 152 #ifndef VBOX153 140 static inline double ldfq(target_ulong ptr) 154 #else155 DECLINLINE(double) ldfq(target_ulong ptr)156 #endif157 141 { 158 142 union { … … 164 148 } 165 149 166 #ifndef VBOX167 150 static inline void stfq(target_ulong ptr, double v) 168 #else169 DECLINLINE(void) stfq(target_ulong ptr, double v)170 #endif171 151 { 172 152 union { … … 178 158 } 179 159 180 #ifndef VBOX181 160 static inline float ldfl(target_ulong ptr) 182 #else183 DECLINLINE(float) ldfl(target_ulong ptr)184 #endif185 161 { 186 162 union { … … 192 168 } 193 169 194 #ifndef VBOX195 170 static inline void stfl(target_ulong ptr, float v) 196 #else197 DECLINLINE(void) stfl(target_ulong ptr, float v)198 #endif199 171 { 200 172 union { … … 268 240 269 241 #ifdef VBOX 270 #ifndef _MSC_VER271 242 extern CPU86_LDouble sin(CPU86_LDouble x); 272 243 extern CPU86_LDouble cos(CPU86_LDouble x); … … 278 249 extern CPU86_LDouble floor(CPU86_LDouble x); 279 250 extern CPU86_LDouble ceil(CPU86_LDouble x); 280 #endif /* !_MSC_VER */281 251 #endif /* VBOX */ 282 252 … … 350 320 #endif 351 321 352 #ifndef VBOX353 322 static inline void fpush(void) 354 #else355 DECLINLINE(void) fpush(void)356 #endif357 323 { 358 324 env->fpstt = (env->fpstt - 1) & 7; … … 360 326 } 361 327 362 #ifndef VBOX363 328 static inline void fpop(void) 364 #else365 DECLINLINE(void) fpop(void)366 #endif367 329 { 368 330 env->fptags[env->fpstt] = 1; /* invalidate stack entry */ … … 425 387 /* we use memory access macros */ 426 388 427 #ifndef VBOX428 389 static inline CPU86_LDouble helper_fldt(target_ulong ptr) 429 #else430 DECLINLINE(CPU86_LDouble) helper_fldt(target_ulong ptr)431 #endif432 390 { 433 391 CPU86_LDoubleU temp; … … 438 396 } 439 397 440 #ifndef VBOX441 398 static inline void helper_fstt(CPU86_LDouble f, target_ulong ptr) 442 #else443 DECLINLINE(void) helper_fstt(CPU86_LDouble f, target_ulong ptr)444 #endif445 399 { 446 400 CPU86_LDoubleU temp; … … 477 431 extern const uint8_t rclb_table[32]; 478 432 479 #ifndef VBOX480 433 static inline uint32_t compute_eflags(void) 481 #else482 DECLINLINE(uint32_t) compute_eflags(void)483 #endif484 434 { 485 435 return env->eflags | cc_table[CC_OP].compute_all() | (DF & DF_MASK); … … 487 437 488 438 /* NOTE: CC_OP must be modified manually to CC_OP_EFLAGS */ 489 #ifndef VBOX490 439 static inline void load_eflags(int eflags, int update_mask) 491 #else492 DECLINLINE(void) load_eflags(int eflags, int update_mask)493 #endif494 440 { 495 441 CC_SRC = eflags & (CC_O | CC_S | CC_Z | CC_A | CC_P | CC_C); … … 499 445 } 500 446 501 #ifndef VBOX502 447 static inline void env_to_regs(void) 503 #else504 DECLINLINE(void) env_to_regs(void)505 #endif506 448 { 507 449 #ifdef reg_EAX … … 531 473 } 532 474 533 #ifndef VBOX534 475 static inline void regs_to_env(void) 535 #else536 DECLINLINE(void) regs_to_env(void)537 #endif538 476 { 539 477 #ifdef reg_EAX … … 563 501 } 564 502 565 #ifndef VBOX566 503 static inline int cpu_halted(CPUState *env) { 567 #else568 DECLINLINE(int) cpu_halted(CPUState *env) {569 #endif570 504 /* handle exit of HALTED state */ 571 505 if (!env->halted) … … 583 517 /* load efer and update the corresponding hflags. XXX: do consistency 584 518 checks with cpuid bits ? */ 585 #ifndef VBOX586 519 static inline void cpu_load_efer(CPUState *env, uint64_t val) 587 #else588 DECLINLINE(void) cpu_load_efer(CPUState *env, uint64_t val)589 #endif590 520 { 591 521 env->efer = val; -
trunk/src/recompiler/target-i386/op_helper.c
r36064 r36125 193 193 194 194 /* return non zero if error */ 195 #ifndef VBOX196 195 static inline int load_segment(uint32_t *e1_ptr, uint32_t *e2_ptr, 197 #else /* VBOX */198 DECLINLINE(int) load_segment(uint32_t *e1_ptr, uint32_t *e2_ptr,199 #endif /* VBOX */200 196 int selector) 201 197 { … … 226 222 } 227 223 228 #ifndef VBOX229 224 static inline unsigned int get_seg_limit(uint32_t e1, uint32_t e2) 230 #else /* VBOX */231 DECLINLINE(unsigned int) get_seg_limit(uint32_t e1, uint32_t e2)232 #endif /* VBOX */233 225 { 234 226 unsigned int limit; … … 239 231 } 240 232 241 #ifndef VBOX242 233 static inline uint32_t get_seg_base(uint32_t e1, uint32_t e2) 243 #else /* VBOX */244 DECLINLINE(uint32_t) get_seg_base(uint32_t e1, uint32_t e2)245 #endif /* VBOX */246 234 { 247 235 return ((e1 >> 16) | ((e2 & 0xff) << 16) | (e2 & 0xff000000)); 248 236 } 249 237 250 #ifndef VBOX251 238 static inline void load_seg_cache_raw_dt(SegmentCache *sc, uint32_t e1, uint32_t e2) 252 #else /* VBOX */253 DECLINLINE(void) load_seg_cache_raw_dt(SegmentCache *sc, uint32_t e1, uint32_t e2)254 #endif /* VBOX */255 239 { 256 240 sc->base = get_seg_base(e1, e2); … … 260 244 261 245 /* init the segment cache in vm86 mode. */ 262 #ifndef VBOX263 246 static inline void load_seg_vm(int seg, int selector) 264 #else /* VBOX */265 DECLINLINE(void) load_seg_vm(int seg, int selector)266 #endif /* VBOX */267 247 { 268 248 selector &= 0xffff; … … 280 260 } 281 261 282 #ifndef VBOX283 262 static inline void get_ss_esp_from_tss(uint32_t *ss_ptr, 284 #else /* VBOX */285 DECLINLINE(void) get_ss_esp_from_tss(uint32_t *ss_ptr,286 #endif /* VBOX */287 263 uint32_t *esp_ptr, int dpl) 288 264 { … … 653 629 654 630 /* check if Port I/O is allowed in TSS */ 631 static inline void check_io(int addr, int size) 632 { 655 633 #ifndef VBOX 656 static inline void check_io(int addr, int size)657 {658 634 int io_offset, val, mask; 659 660 #else /* VBOX */ 661 DECLINLINE(void) check_io(int addr, int size) 662 { 635 #else 663 636 int val, mask; 664 637 unsigned int io_offset; … … 752 725 } 753 726 754 #ifndef VBOX755 727 static inline unsigned int get_sp_mask(unsigned int e2) 756 #else /* VBOX */757 DECLINLINE(unsigned int) get_sp_mask(unsigned int e2)758 #endif /* VBOX */759 728 { 760 729 if (e2 & DESC_B_MASK) … … 1037 1006 #endif 1038 1007 } 1008 1039 1009 #ifdef VBOX 1040 1010 … … 1134 1104 env->eflags &= ~IF_MASK; 1135 1105 } 1106 1136 1107 #endif /* VBOX */ 1137 1108 … … 1150 1121 } 1151 1122 1152 #ifndef VBOX1153 1123 static inline target_ulong get_rsp_from_tss(int level) 1154 #else /* VBOX */1155 DECLINLINE(target_ulong) get_rsp_from_tss(int level)1156 #endif /* VBOX */1157 1124 { 1158 1125 int index; … … 3175 3142 } 3176 3143 3177 #ifndef VBOX3178 3144 static inline void validate_seg(int seg_reg, int cpl) 3179 #else /* VBOX */3180 DECLINLINE(void) validate_seg(int seg_reg, int cpl)3181 #endif /* VBOX */3182 3145 { 3183 3146 int dpl; … … 3202 3165 3203 3166 /* protected mode iret */ 3204 #ifndef VBOX3205 3167 static inline void helper_ret_protected(int shift, int is_iret, int addend) 3206 #else /* VBOX */3207 DECLINLINE(void) helper_ret_protected(int shift, int is_iret, int addend)3208 #endif /* VBOX */3209 3168 { 3210 3169 uint32_t new_cs, new_eflags, new_ss; … … 4076 4035 } 4077 4036 4078 #ifndef VBOX4079 4037 static inline CPU86_LDouble helper_fdiv(CPU86_LDouble a, CPU86_LDouble b) 4080 #else /* VBOX */4081 DECLINLINE(CPU86_LDouble) helper_fdiv(CPU86_LDouble a, CPU86_LDouble b)4082 #endif /* VBOX */4083 4038 { 4084 4039 if (b == 0.0) … … 4690 4645 ST0 = temp.d; 4691 4646 } 4692 4693 #ifdef VBOX4694 #ifdef _MSC_VER4695 /* MSC cannot divide by zero */4696 extern double _Nan;4697 #define NaN _Nan4698 #else4699 #define NaN (0.0 / 0.0)4700 #endif4701 #endif /* VBOX */4702 4647 4703 4648 void helper_fprem1(void) … … 5942 5887 // Needs to be at the bottom of the file (overriding macros) 5943 5888 5944 #ifndef VBOX5945 5889 static inline CPU86_LDouble helper_fldt_raw(uint8_t *ptr) 5946 #else /* VBOX */5947 DECLINLINE(CPU86_LDouble) helper_fldt_raw(uint8_t *ptr)5948 #endif /* VBOX */5949 5890 { 5950 5891 return *(CPU86_LDouble *)ptr; 5951 5892 } 5952 5893 5953 #ifndef VBOX5954 5894 static inline void helper_fstt_raw(CPU86_LDouble f, uint8_t *ptr) 5955 #else /* VBOX */5956 DECLINLINE(void) helper_fstt_raw(CPU86_LDouble f, uint8_t *ptr)5957 #endif /* VBOX */5958 5895 { 5959 5896 *(CPU86_LDouble *)ptr = f; … … 6159 6096 #else 6160 6097 6161 #ifndef VBOX6162 6098 static inline void svm_save_seg(target_phys_addr_t addr, 6163 #else /* VBOX */6164 DECLINLINE(void) svm_save_seg(target_phys_addr_t addr,6165 #endif /* VBOX */6166 6099 const SegmentCache *sc) 6167 6100 { … … 6176 6109 } 6177 6110 6178 #ifndef VBOX6179 6111 static inline void svm_load_seg(target_phys_addr_t addr, SegmentCache *sc) 6180 #else /* VBOX */6181 DECLINLINE(void) svm_load_seg(target_phys_addr_t addr, SegmentCache *sc)6182 #endif /* VBOX */6183 6112 { 6184 6113 unsigned int flags; … … 6191 6120 } 6192 6121 6193 #ifndef VBOX6194 6122 static inline void svm_load_seg_cache(target_phys_addr_t addr, 6195 #else /* VBOX */6196 DECLINLINE(void) svm_load_seg_cache(target_phys_addr_t addr,6197 #endif /* VBOX */6198 6123 CPUState *env, int seg_reg) 6199 6124 { -
trunk/src/recompiler/target-i386/ops_sse.h
r33656 r36125 304 304 305 305 #if SHIFT == 0 306 #ifndef VBOX307 306 static inline int satub(int x) 308 #else /* VBOX */309 DECLINLINE(int) satub(int x)310 #endif /* VBOX */311 307 { 312 308 if (x < 0) … … 318 314 } 319 315 320 #ifndef VBOX321 316 static inline int satuw(int x) 322 #else /* VBOX */323 DECLINLINE(int) satuw(int x)324 #endif /* VBOX */325 317 { 326 318 if (x < 0) … … 332 324 } 333 325 334 #ifndef VBOX335 326 static inline int satsb(int x) 336 #else /* VBOX */337 DECLINLINE(int) satsb(int x)338 #endif /* VBOX */339 327 { 340 328 if (x < -128) … … 346 334 } 347 335 348 #ifndef VBOX349 336 static inline int satsw(int x) 350 #else /* VBOX */351 DECLINLINE(int) satsw(int x)352 #endif /* VBOX */353 337 { 354 338 if (x < -32768) … … 462 446 463 447 #if SHIFT == 0 464 #ifndef VBOX465 448 static inline int abs1(int a) 466 #else /* VBOX */467 DECLINLINE(int) abs1(int a)468 #endif /* VBOX */469 449 { 470 450 if (a < 0) … … 1817 1797 SSE_HELPER_Q(helper_pcmpgtq, FCMPGTQ) 1818 1798 1819 #ifndef VBOX1820 1799 static inline int pcmp_elen(int reg, uint32_t ctrl) 1821 #else /* VBOX */1822 DECLINLINE(int) pcmp_elen(int reg, uint32_t ctrl)1823 #endif /* VBOX */1824 1800 { 1825 1801 int val; … … 1841 1817 } 1842 1818 1843 #ifndef VBOX1844 1819 static inline int pcmp_ilen(Reg *r, uint8_t ctrl) 1845 #else /* VBOX */1846 DECLINLINE(int) pcmp_ilen(Reg *r, uint8_t ctrl)1847 #endif /* VBOX */1848 1820 { 1849 1821 int val = 0; … … 1859 1831 } 1860 1832 1861 #ifndef VBOX1862 1833 static inline int pcmp_val(Reg *r, uint8_t ctrl, int i) 1863 #else /* VBOX */1864 DECLINLINE(int) pcmp_val(Reg *r, uint8_t ctrl, int i)1865 #endif /* VBOX */1866 1834 { 1867 1835 switch ((ctrl >> 0) & 3) { … … 1878 1846 } 1879 1847 1880 #ifndef VBOX1881 1848 static inline unsigned pcmpxstrx(Reg *d, Reg *s, 1882 #else /* VBOX */1883 DECLINLINE(unsigned) pcmpxstrx(Reg *d, Reg *s,1884 #endif /* VBOX */1885 1849 int8_t ctrl, int valids, int validd) 1886 1850 { … … 1949 1913 } 1950 1914 1951 #ifndef VBOX1952 1915 static inline int rffs1(unsigned int val) 1953 #else /* VBOX */1954 DECLINLINE(int) rffs1(unsigned int val)1955 #endif /* VBOX */1956 1916 { 1957 1917 int ret = 1, hi; … … 1966 1926 } 1967 1927 1968 #ifndef VBOX1969 1928 static inline int ffs1(unsigned int val) 1970 #else /* VBOX */1971 DECLINLINE(int) ffs1(unsigned int val)1972 #endif /* VBOX */1973 1929 { 1974 1930 int ret = 1, hi; -
trunk/src/recompiler/target-i386/svm.h
r17040 r36125 131 131 #define SVM_CR0_SELECTIVE_MASK (1 << 3 | 1) /* TS and MP */ 132 132 133 #ifndef VBOX134 133 struct __attribute__ ((__packed__)) vmcb_control_area { 135 #else136 PACKED_STRUCT(vmcb_control_area) {137 #endif /* VBOX */138 134 uint16_t intercept_cr_read; 139 135 uint16_t intercept_cr_write; … … 167 163 }; 168 164 169 #ifndef VBOX170 165 struct __attribute__ ((__packed__)) vmcb_seg { 171 #else172 PACKED_STRUCT(vmcb_seg) {173 #endif174 166 uint16_t selector; 175 167 uint16_t attrib; … … 178 170 }; 179 171 180 #ifndef VBOX181 172 struct __attribute__ ((__packed__)) vmcb_save_area { 182 #else183 PACKED_STRUCT(vmcb_save_area) {184 #endif185 173 struct vmcb_seg es; 186 174 struct vmcb_seg cs; … … 227 215 }; 228 216 229 #ifndef VBOX230 217 struct __attribute__ ((__packed__)) vmcb { 231 #else232 PACKED_STRUCT(vmcb) {233 #endif234 218 struct vmcb_control_area control; 235 219 struct vmcb_save_area save; -
trunk/src/recompiler/target-i386/translate.c
r36056 r36125 232 232 }; 233 233 234 #ifndef VBOX235 234 static inline void gen_op_movl_T0_0(void) 236 #else /* VBOX */237 DECLINLINE(void) gen_op_movl_T0_0(void)238 #endif /* VBOX */239 235 { 240 236 tcg_gen_movi_tl(cpu_T[0], 0); 241 237 } 242 238 243 #ifndef VBOX244 239 static inline void gen_op_movl_T0_im(int32_t val) 245 #else /* VBOX */246 DECLINLINE(void) gen_op_movl_T0_im(int32_t val)247 #endif /* VBOX */248 240 { 249 241 tcg_gen_movi_tl(cpu_T[0], val); 250 242 } 251 243 252 #ifndef VBOX253 244 static inline void gen_op_movl_T0_imu(uint32_t val) 254 #else /* VBOX */255 DECLINLINE(void) gen_op_movl_T0_imu(uint32_t val)256 #endif /* VBOX */257 245 { 258 246 tcg_gen_movi_tl(cpu_T[0], val); 259 247 } 260 248 261 #ifndef VBOX262 249 static inline void gen_op_movl_T1_im(int32_t val) 263 #else /* VBOX */264 DECLINLINE(void) gen_op_movl_T1_im(int32_t val)265 #endif /* VBOX */266 250 { 267 251 tcg_gen_movi_tl(cpu_T[1], val); 268 252 } 269 253 270 #ifndef VBOX271 254 static inline void gen_op_movl_T1_imu(uint32_t val) 272 #else /* VBOX */273 DECLINLINE(void) gen_op_movl_T1_imu(uint32_t val)274 #endif /* VBOX */275 255 { 276 256 tcg_gen_movi_tl(cpu_T[1], val); 277 257 } 278 258 279 #ifndef VBOX280 259 static inline void gen_op_movl_A0_im(uint32_t val) 281 #else /* VBOX */282 DECLINLINE(void) gen_op_movl_A0_im(uint32_t val)283 #endif /* VBOX */284 260 { 285 261 tcg_gen_movi_tl(cpu_A0, val); … … 287 263 288 264 #ifdef TARGET_X86_64 289 #ifndef VBOX290 265 static inline void gen_op_movq_A0_im(int64_t val) 291 #else /* VBOX */292 DECLINLINE(void) gen_op_movq_A0_im(int64_t val)293 #endif /* VBOX */294 266 { 295 267 tcg_gen_movi_tl(cpu_A0, val); … … 297 269 #endif 298 270 299 #ifndef VBOX300 271 static inline void gen_movtl_T0_im(target_ulong val) 301 #else /* VBOX */302 DECLINLINE(void) gen_movtl_T0_im(target_ulong val)303 #endif /* VBOX */304 272 { 305 273 tcg_gen_movi_tl(cpu_T[0], val); 306 274 } 307 275 308 #ifndef VBOX309 276 static inline void gen_movtl_T1_im(target_ulong val) 310 #else /* VBOX */311 DECLINLINE(void) gen_movtl_T1_im(target_ulong val)312 #endif /* VBOX */313 277 { 314 278 tcg_gen_movi_tl(cpu_T[1], val); 315 279 } 316 280 317 #ifndef VBOX318 281 static inline void gen_op_andl_T0_ffff(void) 319 #else /* VBOX */320 DECLINLINE(void) gen_op_andl_T0_ffff(void)321 #endif /* VBOX */322 282 { 323 283 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff); 324 284 } 325 285 326 #ifndef VBOX327 286 static inline void gen_op_andl_T0_im(uint32_t val) 328 #else /* VBOX */329 DECLINLINE(void) gen_op_andl_T0_im(uint32_t val)330 #endif /* VBOX */331 287 { 332 288 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], val); 333 289 } 334 290 335 #ifndef VBOX336 291 static inline void gen_op_movl_T0_T1(void) 337 #else /* VBOX */338 DECLINLINE(void) gen_op_movl_T0_T1(void)339 #endif /* VBOX */340 292 { 341 293 tcg_gen_mov_tl(cpu_T[0], cpu_T[1]); 342 294 } 343 295 344 #ifndef VBOX345 296 static inline void gen_op_andl_A0_ffff(void) 346 #else /* VBOX */347 DECLINLINE(void) gen_op_andl_A0_ffff(void)348 #endif /* VBOX */349 297 { 350 298 tcg_gen_andi_tl(cpu_A0, cpu_A0, 0xffff); … … 375 323 #endif 376 324 377 #ifndef VBOX378 325 static inline void gen_op_mov_reg_v(int ot, int reg, TCGv t0) 379 #else /* VBOX */380 DECLINLINE(void) gen_op_mov_reg_v(int ot, int reg, TCGv t0)381 #endif /* VBOX */382 326 { 383 327 switch(ot) { … … 412 356 } 413 357 414 #ifndef VBOX415 358 static inline void gen_op_mov_reg_T0(int ot, int reg) 416 #else /* VBOX */417 DECLINLINE(void) gen_op_mov_reg_T0(int ot, int reg)418 #endif /* VBOX */419 359 { 420 360 gen_op_mov_reg_v(ot, reg, cpu_T[0]); 421 361 } 422 362 423 #ifndef VBOX424 363 static inline void gen_op_mov_reg_T1(int ot, int reg) 425 #else /* VBOX */426 DECLINLINE(void) gen_op_mov_reg_T1(int ot, int reg)427 #endif /* VBOX */428 364 { 429 365 gen_op_mov_reg_v(ot, reg, cpu_T[1]); 430 366 } 431 367 432 #ifndef VBOX433 368 static inline void gen_op_mov_reg_A0(int size, int reg) 434 #else /* VBOX */435 DECLINLINE(void) gen_op_mov_reg_A0(int size, int reg)436 #endif /* VBOX */437 369 { 438 370 switch(size) { … … 460 392 } 461 393 462 #ifndef VBOX463 394 static inline void gen_op_mov_v_reg(int ot, TCGv t0, int reg) 464 #else /* VBOX */465 DECLINLINE(void) gen_op_mov_v_reg(int ot, TCGv t0, int reg)466 #endif /* VBOX */467 395 { 468 396 switch(ot) { … … 487 415 } 488 416 489 #ifndef VBOX490 417 static inline void gen_op_mov_TN_reg(int ot, int t_index, int reg) 491 #else /* VBOX */492 DECLINLINE(void) gen_op_mov_TN_reg(int ot, int t_index, int reg)493 #endif /* VBOX */494 418 { 495 419 gen_op_mov_v_reg(ot, cpu_T[t_index], reg); 496 420 } 497 421 498 #ifndef VBOX499 422 static inline void gen_op_movl_A0_reg(int reg) 500 #else /* VBOX */501 DECLINLINE(void) gen_op_movl_A0_reg(int reg)502 #endif /* VBOX */503 423 { 504 424 tcg_gen_ld32u_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg]) + REG_L_OFFSET); 505 425 } 506 426 507 #ifndef VBOX508 427 static inline void gen_op_addl_A0_im(int32_t val) 509 #else /* VBOX */510 DECLINLINE(void) gen_op_addl_A0_im(int32_t val)511 #endif /* VBOX */512 428 { 513 429 tcg_gen_addi_tl(cpu_A0, cpu_A0, val); … … 518 434 519 435 #ifdef TARGET_X86_64 520 #ifndef VBOX521 436 static inline void gen_op_addq_A0_im(int64_t val) 522 #else /* VBOX */523 DECLINLINE(void) gen_op_addq_A0_im(int64_t val)524 #endif /* VBOX */525 437 { 526 438 tcg_gen_addi_tl(cpu_A0, cpu_A0, val); … … 538 450 } 539 451 540 #ifndef VBOX541 452 static inline void gen_op_addl_T0_T1(void) 542 #else /* VBOX */543 DECLINLINE(void) gen_op_addl_T0_T1(void)544 #endif /* VBOX */545 453 { 546 454 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]); 547 455 } 548 456 549 #ifndef VBOX550 457 static inline void gen_op_jmp_T0(void) 551 #else /* VBOX */552 DECLINLINE(void) gen_op_jmp_T0(void)553 #endif /* VBOX */554 458 { 555 459 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUState, eip)); 556 460 } 557 461 558 #ifndef VBOX559 462 static inline void gen_op_add_reg_im(int size, int reg, int32_t val) 560 #else /* VBOX */561 DECLINLINE(void) gen_op_add_reg_im(int size, int reg, int32_t val)562 #endif /* VBOX */563 463 { 564 464 switch(size) { … … 586 486 } 587 487 588 #ifndef VBOX589 488 static inline void gen_op_add_reg_T0(int size, int reg) 590 #else /* VBOX */591 DECLINLINE(void) gen_op_add_reg_T0(int size, int reg)592 #endif /* VBOX */593 489 { 594 490 switch(size) { … … 616 512 } 617 513 618 #ifndef VBOX619 514 static inline void gen_op_set_cc_op(int32_t val) 620 #else /* VBOX */621 DECLINLINE(void) gen_op_set_cc_op(int32_t val)622 #endif /* VBOX */623 515 { 624 516 tcg_gen_movi_i32(cpu_cc_op, val); 625 517 } 626 518 627 #ifndef VBOX628 519 static inline void gen_op_addl_A0_reg_sN(int shift, int reg) 629 #else /* VBOX */630 DECLINLINE(void) gen_op_addl_A0_reg_sN(int shift, int reg)631 #endif /* VBOX */632 520 { 633 521 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg])); … … 639 527 #endif 640 528 } 529 641 530 #ifdef VBOX 642 531 DECLINLINE(void) gen_op_seg_check(int reg, bool keepA0) 643 532 { 644 533 /* It seems segments doesn't get out of sync - if they do in fact - enable below code. */ 645 # ifdef FORCE_SEGMENT_SYNC646 # if 1534 # ifdef FORCE_SEGMENT_SYNC 535 # if 1 647 536 TCGv t0; 648 537 … … 652 541 tcg_gen_helper_0_1(helper_sync_seg, t0); 653 542 tcg_temp_free(t0); 654 # else543 # else 655 544 /* Our segments could be outdated, thus check for newselector field to see if update really needed */ 656 545 int skip_label; … … 689 578 tcg_temp_free(a0); 690 579 } 691 #endif /* 0 */ 692 #endif /* FORCE_SEGMENT_SYNC */ 693 } 694 #endif 695 696 #ifndef VBOX 580 # endif /* 0 */ 581 # endif /* FORCE_SEGMENT_SYNC */ 582 } 583 #endif /* VBOX */ 584 697 585 static inline void gen_op_movl_A0_seg(int reg) 698 #else /* VBOX */699 DECLINLINE(void) gen_op_movl_A0_seg(int reg)700 #endif /* VBOX */701 586 { 702 587 #ifdef VBOX … … 706 591 } 707 592 708 #ifndef VBOX709 593 static inline void gen_op_addl_A0_seg(int reg) 710 #else /* VBOX */711 DECLINLINE(void) gen_op_addl_A0_seg(int reg)712 #endif /* VBOX */713 594 { 714 595 #ifdef VBOX … … 723 604 724 605 #ifdef TARGET_X86_64 725 #ifndef VBOX726 606 static inline void gen_op_movq_A0_seg(int reg) 727 #else /* VBOX */728 DECLINLINE(void) gen_op_movq_A0_seg(int reg)729 #endif /* VBOX */730 607 { 731 608 #ifdef VBOX … … 735 612 } 736 613 737 #ifndef VBOX738 614 static inline void gen_op_addq_A0_seg(int reg) 739 #else /* VBOX */740 DECLINLINE(void) gen_op_addq_A0_seg(int reg)741 #endif /* VBOX */742 615 { 743 616 #ifdef VBOX … … 748 621 } 749 622 750 #ifndef VBOX751 623 static inline void gen_op_movq_A0_reg(int reg) 752 #else /* VBOX */753 DECLINLINE(void) gen_op_movq_A0_reg(int reg)754 #endif /* VBOX */755 624 { 756 625 tcg_gen_ld_tl(cpu_A0, cpu_env, offsetof(CPUState, regs[reg])); 757 626 } 758 627 759 #ifndef VBOX760 628 static inline void gen_op_addq_A0_reg_sN(int shift, int reg) 761 #else /* VBOX */762 DECLINLINE(void) gen_op_addq_A0_reg_sN(int shift, int reg)763 #endif /* VBOX */764 629 { 765 630 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[reg])); … … 770 635 #endif 771 636 772 #ifndef VBOX773 637 static inline void gen_op_lds_T0_A0(int idx) 774 #else /* VBOX */775 DECLINLINE(void) gen_op_lds_T0_A0(int idx)776 #endif /* VBOX */777 638 { 778 639 int mem_index = (idx >> 2) - 1; … … 791 652 } 792 653 793 #ifndef VBOX794 654 static inline void gen_op_ld_v(int idx, TCGv t0, TCGv a0) 795 #else /* VBOX */796 DECLINLINE(void) gen_op_ld_v(int idx, TCGv t0, TCGv a0)797 #endif /* VBOX */798 655 { 799 656 int mem_index = (idx >> 2) - 1; … … 816 673 817 674 /* XXX: always use ldu or lds */ 818 #ifndef VBOX819 675 static inline void gen_op_ld_T0_A0(int idx) 820 #else /* VBOX */821 DECLINLINE(void) gen_op_ld_T0_A0(int idx)822 #endif /* VBOX */823 676 { 824 677 gen_op_ld_v(idx, cpu_T[0], cpu_A0); 825 678 } 826 679 827 #ifndef VBOX828 680 static inline void gen_op_ldu_T0_A0(int idx) 829 #else /* VBOX */830 DECLINLINE(void) gen_op_ldu_T0_A0(int idx)831 #endif /* VBOX */832 681 { 833 682 gen_op_ld_v(idx, cpu_T[0], cpu_A0); 834 683 } 835 684 836 #ifndef VBOX837 685 static inline void gen_op_ld_T1_A0(int idx) 838 #else /* VBOX */839 DECLINLINE(void) gen_op_ld_T1_A0(int idx)840 #endif /* VBOX */841 686 { 842 687 gen_op_ld_v(idx, cpu_T[1], cpu_A0); 843 688 } 844 689 845 #ifndef VBOX846 690 static inline void gen_op_st_v(int idx, TCGv t0, TCGv a0) 847 #else /* VBOX */848 DECLINLINE(void) gen_op_st_v(int idx, TCGv t0, TCGv a0)849 #endif /* VBOX */850 691 { 851 692 int mem_index = (idx >> 2) - 1; … … 867 708 } 868 709 869 #ifndef VBOX870 710 static inline void gen_op_st_T0_A0(int idx) 871 #else /* VBOX */872 DECLINLINE(void) gen_op_st_T0_A0(int idx)873 #endif /* VBOX */874 711 { 875 712 gen_op_st_v(idx, cpu_T[0], cpu_A0); 876 713 } 877 714 878 #ifndef VBOX879 715 static inline void gen_op_st_T1_A0(int idx) 880 #else /* VBOX */881 DECLINLINE(void) gen_op_st_T1_A0(int idx)882 #endif /* VBOX */883 716 { 884 717 gen_op_st_v(idx, cpu_T[1], cpu_A0); … … 926 759 #endif 927 760 928 #ifndef VBOX929 761 static inline void gen_jmp_im(target_ulong pc) 930 #else /* VBOX */931 DECLINLINE(void) gen_jmp_im(target_ulong pc)932 #endif /* VBOX */933 762 { 934 763 tcg_gen_movi_tl(cpu_tmp0, pc); … … 940 769 { 941 770 gen_jmp_im(pc); 942 # ifdef VBOX_DUMP_STATE771 # ifdef VBOX_DUMP_STATE 943 772 tcg_gen_helper_0_0(helper_dump_state); 944 #endif 945 } 946 947 #endif 948 949 #ifndef VBOX 773 # endif 774 } 775 #endif /* VBOX */ 776 950 777 static inline void gen_string_movl_A0_ESI(DisasContext *s) 951 #else /* VBOX */952 DECLINLINE(void) gen_string_movl_A0_ESI(DisasContext *s)953 #endif /* VBOX */954 778 { 955 779 int override; … … 986 810 } 987 811 988 #ifndef VBOX989 812 static inline void gen_string_movl_A0_EDI(DisasContext *s) 990 #else /* VBOX */991 DECLINLINE(void) gen_string_movl_A0_EDI(DisasContext *s)992 #endif /* VBOX */993 813 { 994 814 #ifdef TARGET_X86_64 … … 1011 831 } 1012 832 1013 #ifndef VBOX1014 833 static inline void gen_op_movl_T0_Dshift(int ot) 1015 #else /* VBOX */1016 DECLINLINE(void) gen_op_movl_T0_Dshift(int ot)1017 #endif /* VBOX */1018 834 { 1019 835 tcg_gen_ld32s_tl(cpu_T[0], cpu_env, offsetof(CPUState, df)); … … 1055 871 } 1056 872 1057 #ifndef VBOX1058 873 static inline void gen_op_jnz_ecx(int size, int label1) 1059 #else /* VBOX */1060 DECLINLINE(void) gen_op_jnz_ecx(int size, int label1)1061 #endif /* VBOX */1062 874 { 1063 875 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX])); … … 1066 878 } 1067 879 1068 #ifndef VBOX1069 880 static inline void gen_op_jz_ecx(int size, int label1) 1070 #else /* VBOX */1071 DECLINLINE(void) gen_op_jz_ecx(int size, int label1)1072 #endif /* VBOX */1073 881 { 1074 882 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUState, regs[R_ECX])); … … 1128 936 } 1129 937 1130 #ifndef VBOX1131 938 static inline void gen_movs(DisasContext *s, int ot) 1132 #else /* VBOX */1133 DECLINLINE(void) gen_movs(DisasContext *s, int ot)1134 #endif /* VBOX */1135 939 { 1136 940 gen_string_movl_A0_ESI(s); … … 1143 947 } 1144 948 1145 #ifndef VBOX1146 949 static inline void gen_update_cc_op(DisasContext *s) 1147 #else /* VBOX */1148 DECLINLINE(void) gen_update_cc_op(DisasContext *s)1149 #endif /* VBOX */1150 950 { 1151 951 if (s->cc_op != CC_OP_DYNAMIC) { … … 1167 967 } 1168 968 1169 #ifndef VBOX1170 969 static inline void gen_op_cmpl_T0_T1_cc(void) 1171 #else /* VBOX */1172 DECLINLINE(void) gen_op_cmpl_T0_T1_cc(void)1173 #endif /* VBOX */1174 970 { 1175 971 tcg_gen_mov_tl(cpu_cc_src, cpu_T[1]); … … 1177 973 } 1178 974 1179 #ifndef VBOX1180 975 static inline void gen_op_testl_T0_T1_cc(void) 1181 #else /* VBOX */1182 DECLINLINE(void) gen_op_testl_T0_T1_cc(void)1183 #endif /* VBOX */1184 976 { 1185 977 tcg_gen_discard_tl(cpu_cc_src); … … 1237 1029 } 1238 1030 1239 #ifndef VBOX1240 1031 static inline void gen_setcc_slow_T0(DisasContext *s, int jcc_op) 1241 #else /* VBOX */1242 DECLINLINE(void) gen_setcc_slow_T0(DisasContext *s, int jcc_op)1243 #endif /* VBOX */1244 1032 { 1245 1033 if (s->cc_op != CC_OP_DYNAMIC) … … 1348 1136 /* generate a conditional jump to label 'l1' according to jump opcode 1349 1137 value 'b'. In the fast case, T0 is guaranteed not to be used. */ 1350 #ifndef VBOX1351 1138 static inline void gen_jcc1(DisasContext *s, int cc_op, int b, int l1) 1352 #else /* VBOX */1353 DECLINLINE(void) gen_jcc1(DisasContext *s, int cc_op, int b, int l1)1354 #endif /* VBOX */1355 1139 { 1356 1140 int inv, jcc_op, size, cond; … … 1563 1347 } 1564 1348 1565 #ifndef VBOX1566 1349 static inline void gen_stos(DisasContext *s, int ot) 1567 #else /* VBOX */1568 DECLINLINE(void) gen_stos(DisasContext *s, int ot)1569 #endif /* VBOX */1570 1350 { 1571 1351 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX); … … 1576 1356 } 1577 1357 1578 #ifndef VBOX1579 1358 static inline void gen_lods(DisasContext *s, int ot) 1580 #else /* VBOX */1581 DECLINLINE(void) gen_lods(DisasContext *s, int ot)1582 #endif /* VBOX */1583 1359 { 1584 1360 gen_string_movl_A0_ESI(s); … … 1589 1365 } 1590 1366 1591 #ifndef VBOX1592 1367 static inline void gen_scas(DisasContext *s, int ot) 1593 #else /* VBOX */1594 DECLINLINE(void) gen_scas(DisasContext *s, int ot)1595 #endif /* VBOX */1596 1368 { 1597 1369 gen_op_mov_TN_reg(OT_LONG, 0, R_EAX); … … 1603 1375 } 1604 1376 1605 #ifndef VBOX1606 1377 static inline void gen_cmps(DisasContext *s, int ot) 1607 #else /* VBOX */1608 DECLINLINE(void) gen_cmps(DisasContext *s, int ot)1609 #endif /* VBOX */1610 1378 { 1611 1379 gen_string_movl_A0_ESI(s); … … 1619 1387 } 1620 1388 1621 #ifndef VBOX1622 1389 static inline void gen_ins(DisasContext *s, int ot) 1623 #else /* VBOX */1624 DECLINLINE(void) gen_ins(DisasContext *s, int ot)1625 #endif /* VBOX */1626 1390 { 1627 1391 if (use_icount) … … 1643 1407 } 1644 1408 1645 #ifndef VBOX1646 1409 static inline void gen_outs(DisasContext *s, int ot) 1647 #else /* VBOX */1648 DECLINLINE(void) gen_outs(DisasContext *s, int ot)1649 #endif /* VBOX */1650 1410 { 1651 1411 if (use_icount) … … 1668 1428 /* same method as Valgrind : we generate jumps to current or next 1669 1429 instruction */ 1670 #ifndef VBOX1671 1430 #define GEN_REPZ(op) \ 1672 1431 static inline void gen_repz_ ## op(DisasContext *s, int ot, \ … … 1684 1443 gen_jmp(s, cur_eip); \ 1685 1444 } 1686 #else /* VBOX */ 1687 #define GEN_REPZ(op) \ 1688 DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \ 1689 target_ulong cur_eip, target_ulong next_eip) \ 1690 { \ 1691 int l2; \ 1692 gen_update_cc_op(s); \ 1693 l2 = gen_jz_ecx_string(s, next_eip); \ 1694 gen_ ## op(s, ot); \ 1695 gen_op_add_reg_im(s->aflag, R_ECX, -1); \ 1696 /* a loop would cause two single step exceptions if ECX = 1 \ 1697 before rep string_insn */ \ 1698 if (!s->jmp_opt) \ 1699 gen_op_jz_ecx(s->aflag, l2); \ 1700 gen_jmp(s, cur_eip); \ 1701 } 1702 #endif /* VBOX */ 1703 1704 #ifndef VBOX 1445 1705 1446 #define GEN_REPZ2(op) \ 1706 1447 static inline void gen_repz_ ## op(DisasContext *s, int ot, \ … … 1720 1461 gen_jmp(s, cur_eip); \ 1721 1462 } 1722 #else /* VBOX */1723 #define GEN_REPZ2(op) \1724 DECLINLINE(void) gen_repz_ ## op(DisasContext *s, int ot, \1725 target_ulong cur_eip, \1726 target_ulong next_eip, \1727 int nz) \1728 { \1729 int l2;\1730 gen_update_cc_op(s); \1731 l2 = gen_jz_ecx_string(s, next_eip); \1732 gen_ ## op(s, ot); \1733 gen_op_add_reg_im(s->aflag, R_ECX, -1); \1734 gen_op_set_cc_op(CC_OP_SUBB + ot); \1735 gen_jcc1(s, CC_OP_SUBB + ot, (JCC_Z << 1) | (nz ^ 1), l2); \1736 if (!s->jmp_opt) \1737 gen_op_jz_ecx(s->aflag, l2); \1738 gen_jmp(s, cur_eip); \1739 }1740 #endif /* VBOX */1741 1463 1742 1464 GEN_REPZ(movs) … … 2013 1735 } 2014 1736 2015 #ifndef VBOX2016 1737 static inline void tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2) 2017 #else /* VBOX */2018 DECLINLINE(void) tcg_gen_lshift(TCGv ret, TCGv arg1, target_long arg2)2019 #endif /* VBOX */2020 1738 { 2021 1739 if (arg2 >= 0) … … 2642 2360 } 2643 2361 2644 #ifndef VBOX2645 2362 static inline uint32_t insn_get(DisasContext *s, int ot) 2646 #else /* VBOX */2647 DECLINLINE(uint32_t) insn_get(DisasContext *s, int ot)2648 #endif /* VBOX */2649 2363 { 2650 2364 uint32_t ret; … … 2668 2382 } 2669 2383 2670 #ifndef VBOX2671 2384 static inline int insn_const_size(unsigned int ot) 2672 #else /* VBOX */2673 DECLINLINE(int) insn_const_size(unsigned int ot)2674 #endif /* VBOX */2675 2385 { 2676 2386 if (ot <= OT_LONG) … … 2680 2390 } 2681 2391 2682 #ifndef VBOX2683 2392 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip) 2684 #else /* VBOX */2685 DECLINLINE(void) gen_goto_tb(DisasContext *s, int tb_num, target_ulong eip)2686 #endif /* VBOX */2687 2393 { 2688 2394 TranslationBlock *tb; … … 2708 2414 } 2709 2415 2710 #ifndef VBOX2711 2416 static inline void gen_jcc(DisasContext *s, int b, 2712 #else /* VBOX */2713 DECLINLINE(void) gen_jcc(DisasContext *s, int b,2714 #endif /* VBOX */2715 2417 target_ulong val, target_ulong next_eip) 2716 2418 { … … 2776 2478 } 2777 2479 2778 #ifndef VBOX2779 2480 static inline void gen_op_movl_T0_seg(int seg_reg) 2780 #else /* VBOX */2781 DECLINLINE(void) gen_op_movl_T0_seg(int seg_reg)2782 #endif /* VBOX */2783 2481 { 2784 2482 tcg_gen_ld32u_tl(cpu_T[0], cpu_env, … … 2786 2484 } 2787 2485 2788 #ifndef VBOX2789 2486 static inline void gen_op_movl_seg_T0_vm(int seg_reg) 2790 #else /* VBOX */2791 DECLINLINE(void) gen_op_movl_seg_T0_vm(int seg_reg)2792 #endif /* VBOX */2793 2487 { 2794 2488 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffff); … … 2835 2529 } 2836 2530 2837 #ifndef VBOX2838 2531 static inline int svm_is_rep(int prefixes) 2839 #else /* VBOX */2840 DECLINLINE(int) svm_is_rep(int prefixes)2841 #endif /* VBOX */2842 2532 { 2843 2533 return ((prefixes & (PREFIX_REPZ | PREFIX_REPNZ)) ? 8 : 0); 2844 2534 } 2845 2535 2846 #ifndef VBOX2847 2536 static inline void 2848 #else /* VBOX */2849 DECLINLINE(void)2850 #endif /* VBOX */2851 2537 gen_svm_check_intercept_param(DisasContext *s, target_ulong pc_start, 2852 2538 uint32_t type, uint64_t param) … … 2862 2548 } 2863 2549 2864 #ifndef VBOX2865 2550 static inline void 2866 #else /* VBOX */2867 DECLINLINE(void)2868 #endif2869 2551 gen_svm_check_intercept(DisasContext *s, target_ulong pc_start, uint64_t type) 2870 2552 { … … 2872 2554 } 2873 2555 2874 #ifndef VBOX2875 2556 static inline void gen_stack_update(DisasContext *s, int addend) 2876 #else /* VBOX */2877 DECLINLINE(void) gen_stack_update(DisasContext *s, int addend)2878 #endif /* VBOX */2879 2557 { 2880 2558 #ifdef TARGET_X86_64 … … 3194 2872 } 3195 2873 3196 #ifndef VBOX3197 2874 static inline void gen_ldq_env_A0(int idx, int offset) 3198 #else /* VBOX */3199 DECLINLINE(void) gen_ldq_env_A0(int idx, int offset)3200 #endif /* VBOX */3201 2875 { 3202 2876 int mem_index = (idx >> 2) - 1; … … 3205 2879 } 3206 2880 3207 #ifndef VBOX3208 2881 static inline void gen_stq_env_A0(int idx, int offset) 3209 #else /* VBOX */3210 DECLINLINE(void) gen_stq_env_A0(int idx, int offset)3211 #endif /* VBOX */3212 2882 { 3213 2883 int mem_index = (idx >> 2) - 1; … … 3216 2886 } 3217 2887 3218 #ifndef VBOX3219 2888 static inline void gen_ldo_env_A0(int idx, int offset) 3220 #else /* VBOX */3221 DECLINLINE(void) gen_ldo_env_A0(int idx, int offset)3222 #endif /* VBOX */3223 2889 { 3224 2890 int mem_index = (idx >> 2) - 1; … … 3230 2896 } 3231 2897 3232 #ifndef VBOX3233 2898 static inline void gen_sto_env_A0(int idx, int offset) 3234 #else /* VBOX */3235 DECLINLINE(void) gen_sto_env_A0(int idx, int offset)3236 #endif /* VBOX */3237 2899 { 3238 2900 int mem_index = (idx >> 2) - 1; … … 3244 2906 } 3245 2907 3246 #ifndef VBOX3247 2908 static inline void gen_op_movo(int d_offset, int s_offset) 3248 #else /* VBOX */3249 DECLINLINE(void) gen_op_movo(int d_offset, int s_offset)3250 #endif /* VBOX */3251 2909 { 3252 2910 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset); … … 3256 2914 } 3257 2915 3258 #ifndef VBOX3259 2916 static inline void gen_op_movq(int d_offset, int s_offset) 3260 #else /* VBOX */3261 DECLINLINE(void) gen_op_movq(int d_offset, int s_offset)3262 #endif /* VBOX */3263 2917 { 3264 2918 tcg_gen_ld_i64(cpu_tmp1_i64, cpu_env, s_offset); … … 3266 2920 } 3267 2921 3268 #ifndef VBOX3269 2922 static inline void gen_op_movl(int d_offset, int s_offset) 3270 #else /* VBOX */3271 DECLINLINE(void) gen_op_movl(int d_offset, int s_offset)3272 #endif /* VBOX */3273 2923 { 3274 2924 tcg_gen_ld_i32(cpu_tmp2_i32, cpu_env, s_offset); … … 3276 2926 } 3277 2927 3278 #ifndef VBOX3279 2928 static inline void gen_op_movq_env_0(int d_offset) 3280 #else /* VBOX */3281 DECLINLINE(void) gen_op_movq_env_0(int d_offset)3282 #endif /* VBOX */3283 2929 { 3284 2930 tcg_gen_movi_i64(cpu_tmp1_i64, 0); … … 8360 8006 basic block 'tb'. If search_pc is TRUE, also generate PC 8361 8007 information for each intermediate instruction. */ 8362 #ifndef VBOX8363 8008 static inline void gen_intermediate_code_internal(CPUState *env, 8364 #else /* VBOX */8365 DECLINLINE(void) gen_intermediate_code_internal(CPUState *env,8366 #endif /* VBOX */8367 8009 TranslationBlock *tb, 8368 8010 int search_pc) -
trunk/src/recompiler/tcg/i386/tcg-target.c
r33540 r36125 84 84 85 85 /* maximum number of register used for input function arguments */ 86 #ifndef VBOX87 86 static inline int tcg_target_get_call_iarg_regs_count(int flags) 88 #else /* VBOX */89 DECLINLINE(int) tcg_target_get_call_iarg_regs_count(int flags)90 #endif /* VBOX */91 87 { 92 88 flags &= TCG_CALL_TYPE_MASK; … … 159 155 160 156 /* test if a constant matches the constraint */ 161 #ifndef VBOX162 157 static inline int tcg_target_const_match(tcg_target_long val, 163 #else /* VBOX */164 DECLINLINE(int) tcg_target_const_match(tcg_target_long val,165 #endif /* VBOX */166 158 const TCGArgConstraint *arg_ct) 167 159 { … … 236 228 #endif 237 229 238 #ifndef VBOX239 230 static inline void tcg_out_opc(TCGContext *s, int opc) 240 #else /* VBOX */241 DECLINLINE(void) tcg_out_opc(TCGContext *s, int opc)242 #endif /* VBOX */243 231 { 244 232 if (opc & P_EXT) … … 247 235 } 248 236 249 #ifndef VBOX250 237 static inline void tcg_out_modrm(TCGContext *s, int opc, int r, int rm) 251 #else /* VBOX */252 DECLINLINE(void) tcg_out_modrm(TCGContext *s, int opc, int r, int rm)253 #endif /* VBOX */254 238 { 255 239 tcg_out_opc(s, opc); … … 258 242 259 243 /* rm == -1 means no register index */ 260 #ifndef VBOX261 244 static inline void tcg_out_modrm_offset(TCGContext *s, int opc, int r, int rm, 262 #else /* VBOX */263 DECLINLINE(void) tcg_out_modrm_offset(TCGContext *s, int opc, int r, int rm,264 #endif /* VBOX */265 245 int32_t offset) 266 246 { … … 295 275 } 296 276 297 #ifndef VBOX298 277 static inline void tcg_out_mov(TCGContext *s, int ret, int arg) 299 #else /* VBOX */300 DECLINLINE(void) tcg_out_mov(TCGContext *s, int ret, int arg)301 #endif /* VBOX */302 278 { 303 279 if (arg != ret) … … 305 281 } 306 282 307 #ifndef VBOX308 283 static inline void tcg_out_movi(TCGContext *s, TCGType type, 309 #else /* VBOX */310 DECLINLINE(void) tcg_out_movi(TCGContext *s, TCGType type,311 #endif /* VBOX */312 284 int ret, int32_t arg) 313 285 { … … 321 293 } 322 294 323 #ifndef VBOX324 295 static inline void tcg_out_push(TCGContext *s, int reg) 325 #else /* VBOX */326 DECLINLINE(void) tcg_out_push(TCGContext *s, int reg)327 #endif /* VBOX */328 296 { 329 297 tcg_out_opc(s, 0x50 + reg); 330 298 } 331 299 332 #ifndef VBOX333 300 static inline void tcg_out_pop(TCGContext *s, int reg) 334 #else /* VBOX */335 DECLINLINE(void) tcg_out_pop(TCGContext *s, int reg)336 #endif /* VBOX */337 301 { 338 302 tcg_out_opc(s, 0x58 + reg); 339 303 } 340 304 341 #ifndef VBOX342 305 static inline void tcg_out_ld(TCGContext *s, TCGType type, int ret, 343 #else /* VBOX */344 DECLINLINE(void) tcg_out_ld(TCGContext *s, TCGType type, int ret,345 #endif /* VBOX */346 306 int arg1, tcg_target_long arg2) 347 307 { … … 350 310 } 351 311 352 #ifndef VBOX353 312 static inline void tcg_out_st(TCGContext *s, TCGType type, int arg, 354 #else /* VBOX */355 DECLINLINE(void) tcg_out_st(TCGContext *s, TCGType type, int arg,356 #endif /* VBOX */357 313 int arg1, tcg_target_long arg2) 358 314 { … … 361 317 } 362 318 363 #ifndef VBOX364 319 static inline void tgen_arithi(TCGContext *s, int c, int r0, int32_t val) 365 #else /* VBOX */366 DECLINLINE(void) tgen_arithi(TCGContext *s, int c, int r0, int32_t val)367 #endif /* VBOX */368 320 { 369 321 if (val == (int8_t)val) { … … 444 396 445 397 #ifdef VBOX 446 DECLINLINE(void) 447 tcg_out_long_call(TCGContext *s, void* dst)398 399 DECLINLINE(void) tcg_out_long_call(TCGContext *s, void* dst) 448 400 { 449 401 intptr_t disp; … … 455 407 tcg_out32(s, disp); /* disp32 */ 456 408 } 457 DECLINLINE(void) 458 tcg_out_long_jmp(TCGContext *s, void* dst)409 410 DECLINLINE(void) tcg_out_long_jmp(TCGContext *s, void* dst) 459 411 { 460 412 intptr_t disp = (uintptr_t)dst - (uintptr_t)s->code_ptr - 5; … … 462 414 tcg_out32(s, disp); /* disp32 */ 463 415 } 416 464 417 #endif /* VBOX */ 465 466 418 467 419 /* XXX: we implement it at the target level to avoid having to … … 1126 1078 } 1127 1079 1128 #ifndef VBOX1129 1080 static inline void tcg_out_op(TCGContext *s, int opc, 1130 #else /* VBOX */1131 DECLINLINE(void) tcg_out_op(TCGContext *s, int opc,1132 #endif /* VBOX */1133 1081 const TCGArg *args, const int *const_args) 1134 1082 { -
trunk/src/recompiler/tcg/i386/tcg-target.h
r29520 r36125 57 57 #endif 58 58 59 #ifndef VBOX60 59 static inline void flush_icache_range(unsigned long start, unsigned long stop) 61 #else62 DECLINLINE(void) flush_icache_range(unsigned long start, unsigned long stop)63 #endif64 60 { 65 61 } -
trunk/src/recompiler/tcg/tcg-op.h
r29520 r36125 32 32 int gen_new_label(void); 33 33 34 #ifndef VBOX35 34 static inline void tcg_gen_op1(int opc, TCGv arg1) 36 #else /* VBOX */37 DECLINLINE(void) tcg_gen_op1(int opc, TCGv arg1)38 #endif /* VBOX */39 35 { 40 36 *gen_opc_ptr++ = opc; … … 42 38 } 43 39 44 #ifndef VBOX45 40 static inline void tcg_gen_op1i(int opc, TCGArg arg1) 46 #else /* VBOX */47 DECLINLINE(void) tcg_gen_op1i(int opc, TCGArg arg1)48 #endif /* VBOX */49 41 { 50 42 *gen_opc_ptr++ = opc; … … 52 44 } 53 45 54 #ifndef VBOX55 46 static inline void tcg_gen_op2(int opc, TCGv arg1, TCGv arg2) 56 #else /* VBOX */57 DECLINLINE(void) tcg_gen_op2(int opc, TCGv arg1, TCGv arg2)58 #endif /* VBOX */59 47 { 60 48 *gen_opc_ptr++ = opc; … … 63 51 } 64 52 65 #ifndef VBOX66 53 static inline void tcg_gen_op2i(int opc, TCGv arg1, TCGArg arg2) 67 #else /* VBOX */68 DECLINLINE(void) tcg_gen_op2i(int opc, TCGv arg1, TCGArg arg2)69 #endif /* VBOX */70 54 { 71 55 *gen_opc_ptr++ = opc; … … 74 58 } 75 59 76 #ifndef VBOX77 60 static inline void tcg_gen_op2ii(int opc, TCGArg arg1, TCGArg arg2) 78 #else /* VBOX */79 DECLINLINE(void) tcg_gen_op2ii(int opc, TCGArg arg1, TCGArg arg2)80 #endif /* VBOX */81 61 { 82 62 *gen_opc_ptr++ = opc; … … 85 65 } 86 66 87 #ifndef VBOX88 67 static inline void tcg_gen_op3(int opc, TCGv arg1, TCGv arg2, TCGv arg3) 89 #else /* VBOX */90 DECLINLINE(void) tcg_gen_op3(int opc, TCGv arg1, TCGv arg2, TCGv arg3)91 #endif /* VBOX */92 68 { 93 69 *gen_opc_ptr++ = opc; … … 97 73 } 98 74 99 #ifndef VBOX100 75 static inline void tcg_gen_op3i(int opc, TCGv arg1, TCGv arg2, TCGArg arg3) 101 #else /* VBOX */102 DECLINLINE(void) tcg_gen_op3i(int opc, TCGv arg1, TCGv arg2, TCGArg arg3)103 #endif /* VBOX */104 76 { 105 77 *gen_opc_ptr++ = opc; … … 109 81 } 110 82 111 #ifndef VBOX112 83 static inline void tcg_gen_op4(int opc, TCGv arg1, TCGv arg2, TCGv arg3, 113 #else /* VBOX */114 DECLINLINE(void) tcg_gen_op4(int opc, TCGv arg1, TCGv arg2, TCGv arg3,115 #endif /* VBOX */116 84 TCGv arg4) 117 85 { … … 123 91 } 124 92 125 #ifndef VBOX126 93 static inline void tcg_gen_op4i(int opc, TCGv arg1, TCGv arg2, TCGv arg3, 127 #else /* VBOX */128 DECLINLINE(void) tcg_gen_op4i(int opc, TCGv arg1, TCGv arg2, TCGv arg3,129 #endif /* VBOX */130 94 TCGArg arg4) 131 95 { … … 137 101 } 138 102 139 #ifndef VBOX140 103 static inline void tcg_gen_op4ii(int opc, TCGv arg1, TCGv arg2, TCGArg arg3, 141 #else /* VBOX */142 DECLINLINE(void) tcg_gen_op4ii(int opc, TCGv arg1, TCGv arg2, TCGArg arg3,143 #endif /* VBOX */144 104 TCGArg arg4) 145 105 { … … 151 111 } 152 112 153 #ifndef VBOX154 113 static inline void tcg_gen_op5(int opc, TCGv arg1, TCGv arg2, 155 #else /* VBOX */156 DECLINLINE(void) tcg_gen_op5(int opc, TCGv arg1, TCGv arg2,157 #endif /* VBOX */158 114 TCGv arg3, TCGv arg4, 159 115 TCGv arg5) … … 167 123 } 168 124 169 #ifndef VBOX170 125 static inline void tcg_gen_op5i(int opc, TCGv arg1, TCGv arg2, 171 #else /* VBOX */172 DECLINLINE(void) tcg_gen_op5i(int opc, TCGv arg1, TCGv arg2,173 #endif /* VBOX */174 126 TCGv arg3, TCGv arg4, 175 127 TCGArg arg5) … … 183 135 } 184 136 185 #ifndef VBOX186 137 static inline void tcg_gen_op6(int opc, TCGv arg1, TCGv arg2, 187 #else /* VBOX */188 DECLINLINE(void) tcg_gen_op6(int opc, TCGv arg1, TCGv arg2,189 #endif /* VBOX */190 138 TCGv arg3, TCGv arg4, 191 139 TCGv arg5, TCGv arg6) … … 200 148 } 201 149 202 #ifndef VBOX203 150 static inline void tcg_gen_op6ii(int opc, TCGv arg1, TCGv arg2, 204 #else /* VBOX */205 DECLINLINE(void) tcg_gen_op6ii(int opc, TCGv arg1, TCGv arg2,206 #endif /* VBOX */207 151 TCGv arg3, TCGv arg4, 208 152 TCGArg arg5, TCGArg arg6) … … 217 161 } 218 162 219 #ifndef VBOX220 163 static inline void gen_set_label(int n) 221 #else /* VBOX */222 DECLINLINE(void) gen_set_label(int n)223 #endif /* VBOX */224 164 { 225 165 tcg_gen_op1i(INDEX_op_set_label, n); 226 166 } 227 167 228 #ifndef VBOX229 168 static inline void tcg_gen_br(int label) 230 #else /* VBOX */231 DECLINLINE(void) tcg_gen_br(int label)232 #endif /* VBOX */233 169 { 234 170 tcg_gen_op1i(INDEX_op_br, label); 235 171 } 236 172 237 #ifndef VBOX238 173 static inline void tcg_gen_mov_i32(TCGv ret, TCGv arg) 239 #else /* VBOX */240 DECLINLINE(void) tcg_gen_mov_i32(TCGv ret, TCGv arg)241 #endif /* VBOX */242 174 { 243 175 if (GET_TCGV(ret) != GET_TCGV(arg)) … … 245 177 } 246 178 247 #ifndef VBOX248 179 static inline void tcg_gen_movi_i32(TCGv ret, int32_t arg) 249 #else /* VBOX */250 DECLINLINE(void) tcg_gen_movi_i32(TCGv ret, int32_t arg)251 #endif /* VBOX */252 180 { 253 181 tcg_gen_op2i(INDEX_op_movi_i32, ret, arg); … … 257 185 #define TCG_HELPER_CALL_FLAGS 0 258 186 259 #ifndef VBOX260 187 static inline void tcg_gen_helper_0_0(void *func) 261 #else /* VBOX */262 DECLINLINE(void) tcg_gen_helper_0_0(void *func)263 #endif /* VBOX */264 188 { 265 189 TCGv t0; … … 271 195 } 272 196 273 #ifndef VBOX274 197 static inline void tcg_gen_helper_0_1(void *func, TCGv arg) 275 #else /* VBOX */276 DECLINLINE(void) tcg_gen_helper_0_1(void *func, TCGv arg)277 #endif /* VBOX */278 198 { 279 199 TCGv t0; … … 285 205 } 286 206 287 #ifndef VBOX288 207 static inline void tcg_gen_helper_0_2(void *func, TCGv arg1, TCGv arg2) 289 #else /* VBOX */290 DECLINLINE(void) tcg_gen_helper_0_2(void *func, TCGv arg1, TCGv arg2)291 #endif /* VBOX */292 208 { 293 209 TCGv args[2]; … … 302 218 } 303 219 304 #ifndef VBOX305 220 static inline void tcg_gen_helper_0_3(void *func, 306 #else /* VBOX */307 DECLINLINE(void) tcg_gen_helper_0_3(void *func,308 #endif /* VBOX */309 221 TCGv arg1, TCGv arg2, TCGv arg3) 310 222 { … … 321 233 } 322 234 323 #ifndef VBOX324 235 static inline void tcg_gen_helper_0_4(void *func, TCGv arg1, TCGv arg2, 325 #else /* VBOX */326 DECLINLINE(void) tcg_gen_helper_0_4(void *func, TCGv arg1, TCGv arg2,327 #endif /* VBOX */328 236 TCGv arg3, TCGv arg4) 329 237 { … … 341 249 } 342 250 343 #ifndef VBOX344 251 static inline void tcg_gen_helper_1_0(void *func, TCGv ret) 345 #else /* VBOX */346 DECLINLINE(void) tcg_gen_helper_1_0(void *func, TCGv ret)347 #endif /* VBOX */348 252 { 349 253 TCGv t0; … … 355 259 } 356 260 357 #ifndef VBOX358 261 static inline void tcg_gen_helper_1_1(void *func, TCGv ret, TCGv arg1) 359 #else /* VBOX */360 DECLINLINE(void) tcg_gen_helper_1_1(void *func, TCGv ret, TCGv arg1)361 #endif /* VBOX */362 262 { 363 263 TCGv t0; … … 369 269 } 370 270 371 #ifndef VBOX372 271 static inline void tcg_gen_helper_1_2(void *func, TCGv ret, 373 #else /* VBOX */374 DECLINLINE(void) tcg_gen_helper_1_2(void *func, TCGv ret,375 #endif /* VBOX */376 272 TCGv arg1, TCGv arg2) 377 273 { … … 387 283 } 388 284 389 #ifndef VBOX390 285 static inline void tcg_gen_helper_1_3(void *func, TCGv ret, 391 #else /* VBOX */392 DECLINLINE(void) tcg_gen_helper_1_3(void *func, TCGv ret,393 #endif /* VBOX */394 286 TCGv arg1, TCGv arg2, TCGv arg3) 395 287 { … … 406 298 } 407 299 408 #ifndef VBOX409 300 static inline void tcg_gen_helper_1_4(void *func, TCGv ret, 410 #else /* VBOX */411 DECLINLINE(void) tcg_gen_helper_1_4(void *func, TCGv ret,412 #endif /* VBOX */413 301 TCGv arg1, TCGv arg2, TCGv arg3, 414 302 TCGv arg4) … … 429 317 /* 32 bit ops */ 430 318 431 #ifndef VBOX432 319 static inline void tcg_gen_ld8u_i32(TCGv ret, TCGv arg2, tcg_target_long offset) 433 #else /* VBOX */434 DECLINLINE(void) tcg_gen_ld8u_i32(TCGv ret, TCGv arg2, tcg_target_long offset)435 #endif /* VBOX */436 320 { 437 321 tcg_gen_op3i(INDEX_op_ld8u_i32, ret, arg2, offset); 438 322 } 439 323 440 #ifndef VBOX441 324 static inline void tcg_gen_ld8s_i32(TCGv ret, TCGv arg2, tcg_target_long offset) 442 #else /* VBOX */443 DECLINLINE(void) tcg_gen_ld8s_i32(TCGv ret, TCGv arg2, tcg_target_long offset)444 #endif /* VBOX */445 325 { 446 326 tcg_gen_op3i(INDEX_op_ld8s_i32, ret, arg2, offset); 447 327 } 448 328 449 #ifndef VBOX450 329 static inline void tcg_gen_ld16u_i32(TCGv ret, TCGv arg2, tcg_target_long offset) 451 #else /* VBOX */452 DECLINLINE(void) tcg_gen_ld16u_i32(TCGv ret, TCGv arg2, tcg_target_long offset)453 #endif /* VBOX */454 330 { 455 331 tcg_gen_op3i(INDEX_op_ld16u_i32, ret, arg2, offset); 456 332 } 457 333 458 #ifndef VBOX459 334 static inline void tcg_gen_ld16s_i32(TCGv ret, TCGv arg2, tcg_target_long offset) 460 #else /* VBOX */461 DECLINLINE(void) tcg_gen_ld16s_i32(TCGv ret, TCGv arg2, tcg_target_long offset)462 #endif /* VBOX */463 335 { 464 336 tcg_gen_op3i(INDEX_op_ld16s_i32, ret, arg2, offset); 465 337 } 466 338 467 #ifndef VBOX468 339 static inline void tcg_gen_ld_i32(TCGv ret, TCGv arg2, tcg_target_long offset) 469 #else /* VBOX */470 DECLINLINE(void) tcg_gen_ld_i32(TCGv ret, TCGv arg2, tcg_target_long offset)471 #endif /* VBOX */472 340 { 473 341 tcg_gen_op3i(INDEX_op_ld_i32, ret, arg2, offset); 474 342 } 475 343 476 #ifndef VBOX477 344 static inline void tcg_gen_st8_i32(TCGv arg1, TCGv arg2, tcg_target_long offset) 478 #else /* VBOX */479 DECLINLINE(void) tcg_gen_st8_i32(TCGv arg1, TCGv arg2, tcg_target_long offset)480 #endif /* VBOX */481 345 { 482 346 tcg_gen_op3i(INDEX_op_st8_i32, arg1, arg2, offset); 483 347 } 484 348 485 #ifndef VBOX486 349 static inline void tcg_gen_st16_i32(TCGv arg1, TCGv arg2, tcg_target_long offset) 487 #else /* VBOX */488 DECLINLINE(void) tcg_gen_st16_i32(TCGv arg1, TCGv arg2, tcg_target_long offset)489 #endif /* VBOX */490 350 { 491 351 tcg_gen_op3i(INDEX_op_st16_i32, arg1, arg2, offset); 492 352 } 493 353 494 #ifndef VBOX495 354 static inline void tcg_gen_st_i32(TCGv arg1, TCGv arg2, tcg_target_long offset) 496 #else /* VBOX */497 DECLINLINE(void) tcg_gen_st_i32(TCGv arg1, TCGv arg2, tcg_target_long offset)498 #endif /* VBOX */499 355 { 500 356 tcg_gen_op3i(INDEX_op_st_i32, arg1, arg2, offset); 501 357 } 502 358 503 #ifndef VBOX504 359 static inline void tcg_gen_add_i32(TCGv ret, TCGv arg1, TCGv arg2) 505 #else /* VBOX */506 DECLINLINE(void) tcg_gen_add_i32(TCGv ret, TCGv arg1, TCGv arg2)507 #endif /* VBOX */508 360 { 509 361 tcg_gen_op3(INDEX_op_add_i32, ret, arg1, arg2); 510 362 } 511 363 512 #ifndef VBOX513 364 static inline void tcg_gen_addi_i32(TCGv ret, TCGv arg1, int32_t arg2) 514 #else /* VBOX */515 DECLINLINE(void) tcg_gen_addi_i32(TCGv ret, TCGv arg1, int32_t arg2)516 #endif /* VBOX */517 365 { 518 366 /* some cases can be optimized here */ … … 526 374 } 527 375 528 #ifndef VBOX529 376 static inline void tcg_gen_sub_i32(TCGv ret, TCGv arg1, TCGv arg2) 530 #else /* VBOX */531 DECLINLINE(void) tcg_gen_sub_i32(TCGv ret, TCGv arg1, TCGv arg2)532 #endif /* VBOX */533 377 { 534 378 tcg_gen_op3(INDEX_op_sub_i32, ret, arg1, arg2); 535 379 } 536 380 537 #ifndef VBOX538 381 static inline void tcg_gen_subi_i32(TCGv ret, TCGv arg1, int32_t arg2) 539 #else /* VBOX */540 DECLINLINE(void) tcg_gen_subi_i32(TCGv ret, TCGv arg1, int32_t arg2)541 #endif /* VBOX */542 382 { 543 383 /* some cases can be optimized here */ … … 551 391 } 552 392 553 #ifndef VBOX554 393 static inline void tcg_gen_and_i32(TCGv ret, TCGv arg1, TCGv arg2) 555 #else /* VBOX */556 DECLINLINE(void) tcg_gen_and_i32(TCGv ret, TCGv arg1, TCGv arg2)557 #endif /* VBOX */558 394 { 559 395 tcg_gen_op3(INDEX_op_and_i32, ret, arg1, arg2); 560 396 } 561 397 562 #ifndef VBOX563 398 static inline void tcg_gen_andi_i32(TCGv ret, TCGv arg1, int32_t arg2) 564 #else /* VBOX */565 DECLINLINE(void) tcg_gen_andi_i32(TCGv ret, TCGv arg1, int32_t arg2)566 #endif /* VBOX */567 399 { 568 400 /* some cases can be optimized here */ … … 578 410 } 579 411 580 #ifndef VBOX581 412 static inline void tcg_gen_or_i32(TCGv ret, TCGv arg1, TCGv arg2) 582 #else /* VBOX */583 DECLINLINE(void) tcg_gen_or_i32(TCGv ret, TCGv arg1, TCGv arg2)584 #endif /* VBOX */585 413 { 586 414 tcg_gen_op3(INDEX_op_or_i32, ret, arg1, arg2); 587 415 } 588 416 589 #ifndef VBOX590 417 static inline void tcg_gen_ori_i32(TCGv ret, TCGv arg1, int32_t arg2) 591 #else /* VBOX */592 DECLINLINE(void) tcg_gen_ori_i32(TCGv ret, TCGv arg1, int32_t arg2)593 #endif /* VBOX */594 418 { 595 419 /* some cases can be optimized here */ … … 605 429 } 606 430 607 #ifndef VBOX608 431 static inline void tcg_gen_xor_i32(TCGv ret, TCGv arg1, TCGv arg2) 609 #else /* VBOX */610 DECLINLINE(void) tcg_gen_xor_i32(TCGv ret, TCGv arg1, TCGv arg2)611 #endif /* VBOX */612 432 { 613 433 tcg_gen_op3(INDEX_op_xor_i32, ret, arg1, arg2); 614 434 } 615 435 616 #ifndef VBOX617 436 static inline void tcg_gen_xori_i32(TCGv ret, TCGv arg1, int32_t arg2) 618 #else /* VBOX */619 DECLINLINE(void) tcg_gen_xori_i32(TCGv ret, TCGv arg1, int32_t arg2)620 #endif /* VBOX */621 437 { 622 438 /* some cases can be optimized here */ … … 630 446 } 631 447 632 #ifndef VBOX633 448 static inline void tcg_gen_shl_i32(TCGv ret, TCGv arg1, TCGv arg2) 634 #else /* VBOX */635 DECLINLINE(void) tcg_gen_shl_i32(TCGv ret, TCGv arg1, TCGv arg2)636 #endif /* VBOX */637 449 { 638 450 tcg_gen_op3(INDEX_op_shl_i32, ret, arg1, arg2); 639 451 } 640 452 641 #ifndef VBOX642 453 static inline void tcg_gen_shli_i32(TCGv ret, TCGv arg1, int32_t arg2) 643 #else /* VBOX */644 DECLINLINE(void) tcg_gen_shli_i32(TCGv ret, TCGv arg1, int32_t arg2)645 #endif /* VBOX */646 454 { 647 455 if (arg2 == 0) { … … 654 462 } 655 463 656 #ifndef VBOX657 464 static inline void tcg_gen_shr_i32(TCGv ret, TCGv arg1, TCGv arg2) 658 #else /* VBOX */659 DECLINLINE(void) tcg_gen_shr_i32(TCGv ret, TCGv arg1, TCGv arg2)660 #endif /* VBOX */661 465 { 662 466 tcg_gen_op3(INDEX_op_shr_i32, ret, arg1, arg2); 663 467 } 664 468 665 #ifndef VBOX666 469 static inline void tcg_gen_shri_i32(TCGv ret, TCGv arg1, int32_t arg2) 667 #else /* VBOX */668 DECLINLINE(void) tcg_gen_shri_i32(TCGv ret, TCGv arg1, int32_t arg2)669 #endif /* VBOX */670 470 { 671 471 if (arg2 == 0) { … … 678 478 } 679 479 680 #ifndef VBOX681 480 static inline void tcg_gen_sar_i32(TCGv ret, TCGv arg1, TCGv arg2) 682 #else /* VBOX */683 DECLINLINE(void) tcg_gen_sar_i32(TCGv ret, TCGv arg1, TCGv arg2)684 #endif /* VBOX */685 481 { 686 482 tcg_gen_op3(INDEX_op_sar_i32, ret, arg1, arg2); 687 483 } 688 484 689 #ifndef VBOX690 485 static inline void tcg_gen_sari_i32(TCGv ret, TCGv arg1, int32_t arg2) 691 #else /* VBOX */692 DECLINLINE(void) tcg_gen_sari_i32(TCGv ret, TCGv arg1, int32_t arg2)693 #endif /* VBOX */694 486 { 695 487 if (arg2 == 0) { … … 702 494 } 703 495 704 #ifndef VBOX705 496 static inline void tcg_gen_brcond_i32(int cond, TCGv arg1, TCGv arg2, 706 #else /* VBOX */707 DECLINLINE(void) tcg_gen_brcond_i32(int cond, TCGv arg1, TCGv arg2,708 #endif /* VBOX */709 497 int label_index) 710 498 { … … 712 500 } 713 501 714 #ifndef VBOX715 502 static inline void tcg_gen_brcondi_i32(int cond, TCGv arg1, int32_t arg2, 716 #else /* VBOX */717 DECLINLINE(void) tcg_gen_brcondi_i32(int cond, TCGv arg1, int32_t arg2,718 #endif /* VBOX */719 503 int label_index) 720 504 { … … 724 508 } 725 509 726 #ifndef VBOX727 510 static inline void tcg_gen_mul_i32(TCGv ret, TCGv arg1, TCGv arg2) 728 #else /* VBOX */729 DECLINLINE(void) tcg_gen_mul_i32(TCGv ret, TCGv arg1, TCGv arg2)730 #endif /* VBOX */731 511 { 732 512 tcg_gen_op3(INDEX_op_mul_i32, ret, arg1, arg2); 733 513 } 734 514 735 #ifndef VBOX736 515 static inline void tcg_gen_muli_i32(TCGv ret, TCGv arg1, int32_t arg2) 737 #else /* VBOX */738 DECLINLINE(void) tcg_gen_muli_i32(TCGv ret, TCGv arg1, int32_t arg2)739 #endif /* VBOX */740 516 { 741 517 TCGv t0 = tcg_const_i32(arg2); … … 745 521 746 522 #ifdef TCG_TARGET_HAS_div_i32 747 #ifndef VBOX748 523 static inline void tcg_gen_div_i32(TCGv ret, TCGv arg1, TCGv arg2) 749 #else /* VBOX */750 DECLINLINE(void) tcg_gen_div_i32(TCGv ret, TCGv arg1, TCGv arg2)751 #endif /* VBOX */752 524 { 753 525 tcg_gen_op3(INDEX_op_div_i32, ret, arg1, arg2); 754 526 } 755 527 756 #ifndef VBOX757 528 static inline void tcg_gen_rem_i32(TCGv ret, TCGv arg1, TCGv arg2) 758 #else /* VBOX */759 DECLINLINE(void) tcg_gen_rem_i32(TCGv ret, TCGv arg1, TCGv arg2)760 #endif /* VBOX */761 529 { 762 530 tcg_gen_op3(INDEX_op_rem_i32, ret, arg1, arg2); 763 531 } 764 532 765 #ifndef VBOX766 533 static inline void tcg_gen_divu_i32(TCGv ret, TCGv arg1, TCGv arg2) 767 #else /* VBOX */768 DECLINLINE(void) tcg_gen_divu_i32(TCGv ret, TCGv arg1, TCGv arg2)769 #endif /* VBOX */770 534 { 771 535 tcg_gen_op3(INDEX_op_divu_i32, ret, arg1, arg2); 772 536 } 773 537 774 #ifndef VBOX775 538 static inline void tcg_gen_remu_i32(TCGv ret, TCGv arg1, TCGv arg2) 776 #else /* VBOX */777 DECLINLINE(void) tcg_gen_remu_i32(TCGv ret, TCGv arg1, TCGv arg2)778 #endif /* VBOX */779 539 { 780 540 tcg_gen_op3(INDEX_op_remu_i32, ret, arg1, arg2); 781 541 } 782 542 #else 783 #ifndef VBOX784 543 static inline void tcg_gen_div_i32(TCGv ret, TCGv arg1, TCGv arg2) 785 #else /* VBOX */786 DECLINLINE(void) tcg_gen_div_i32(TCGv ret, TCGv arg1, TCGv arg2)787 #endif /* VBOX */788 544 { 789 545 TCGv t0; … … 794 550 } 795 551 796 #ifndef VBOX797 552 static inline void tcg_gen_rem_i32(TCGv ret, TCGv arg1, TCGv arg2) 798 #else /* VBOX */799 DECLINLINE(void) tcg_gen_rem_i32(TCGv ret, TCGv arg1, TCGv arg2)800 #endif /* VBOX */801 553 { 802 554 TCGv t0; … … 807 559 } 808 560 809 #ifndef VBOX810 561 static inline void tcg_gen_divu_i32(TCGv ret, TCGv arg1, TCGv arg2) 811 #else /* VBOX */812 DECLINLINE(void) tcg_gen_divu_i32(TCGv ret, TCGv arg1, TCGv arg2)813 #endif /* VBOX */814 562 { 815 563 TCGv t0; … … 820 568 } 821 569 822 #ifndef VBOX823 570 static inline void tcg_gen_remu_i32(TCGv ret, TCGv arg1, TCGv arg2) 824 #else /* VBOX */825 DECLINLINE(void) tcg_gen_remu_i32(TCGv ret, TCGv arg1, TCGv arg2)826 #endif /* VBOX */827 571 { 828 572 TCGv t0; … … 836 580 #if TCG_TARGET_REG_BITS == 32 837 581 838 #ifndef VBOX839 582 static inline void tcg_gen_mov_i64(TCGv ret, TCGv arg) 840 #else /* VBOX */841 DECLINLINE(void) tcg_gen_mov_i64(TCGv ret, TCGv arg)842 #endif /* VBOX */843 583 { 844 584 if (GET_TCGV(ret) != GET_TCGV(arg)) { … … 848 588 } 849 589 850 #ifndef VBOX851 590 static inline void tcg_gen_movi_i64(TCGv ret, int64_t arg) 852 #else /* VBOX */853 DECLINLINE(void) tcg_gen_movi_i64(TCGv ret, int64_t arg)854 #endif /* VBOX */855 591 { 856 592 tcg_gen_movi_i32(ret, arg); … … 858 594 } 859 595 860 #ifndef VBOX861 596 static inline void tcg_gen_ld8u_i64(TCGv ret, TCGv arg2, tcg_target_long offset) 862 #else /* VBOX */863 DECLINLINE(void) tcg_gen_ld8u_i64(TCGv ret, TCGv arg2, tcg_target_long offset)864 #endif /* VBOX */865 597 { 866 598 tcg_gen_ld8u_i32(ret, arg2, offset); … … 868 600 } 869 601 870 #ifndef VBOX871 602 static inline void tcg_gen_ld8s_i64(TCGv ret, TCGv arg2, tcg_target_long offset) 872 #else /* VBOX */873 DECLINLINE(void) tcg_gen_ld8s_i64(TCGv ret, TCGv arg2, tcg_target_long offset)874 #endif /* VBOX */875 603 { 876 604 tcg_gen_ld8s_i32(ret, arg2, offset); … … 878 606 } 879 607 880 #ifndef VBOX881 608 static inline void tcg_gen_ld16u_i64(TCGv ret, TCGv arg2, tcg_target_long offset) 882 #else /* VBOX */883 DECLINLINE(void) tcg_gen_ld16u_i64(TCGv ret, TCGv arg2, tcg_target_long offset)884 #endif /* VBOX */885 609 { 886 610 tcg_gen_ld16u_i32(ret, arg2, offset); … … 888 612 } 889 613 890 #ifndef VBOX891 614 static inline void tcg_gen_ld16s_i64(TCGv ret, TCGv arg2, tcg_target_long offset) 892 #else /* VBOX */893 DECLINLINE(void) tcg_gen_ld16s_i64(TCGv ret, TCGv arg2, tcg_target_long offset)894 #endif /* VBOX */895 615 { 896 616 tcg_gen_ld16s_i32(ret, arg2, offset); … … 898 618 } 899 619 900 #ifndef VBOX901 620 static inline void tcg_gen_ld32u_i64(TCGv ret, TCGv arg2, tcg_target_long offset) 902 #else /* VBOX */903 DECLINLINE(void) tcg_gen_ld32u_i64(TCGv ret, TCGv arg2, tcg_target_long offset)904 #endif /* VBOX */905 621 { 906 622 tcg_gen_ld_i32(ret, arg2, offset); … … 908 624 } 909 625 910 #ifndef VBOX911 626 static inline void tcg_gen_ld32s_i64(TCGv ret, TCGv arg2, tcg_target_long offset) 912 #else /* VBOX */913 DECLINLINE(void) tcg_gen_ld32s_i64(TCGv ret, TCGv arg2, tcg_target_long offset)914 #endif /* VBOX */915 627 { 916 628 tcg_gen_ld_i32(ret, arg2, offset); … … 918 630 } 919 631 920 #ifndef VBOX921 632 static inline void tcg_gen_ld_i64(TCGv ret, TCGv arg2, tcg_target_long offset) 922 #else /* VBOX */923 DECLINLINE(void) tcg_gen_ld_i64(TCGv ret, TCGv arg2, tcg_target_long offset)924 #endif /* VBOX */925 633 { 926 634 /* since arg2 and ret have different types, they cannot be the … … 935 643 } 936 644 937 #ifndef VBOX938 645 static inline void tcg_gen_st8_i64(TCGv arg1, TCGv arg2, tcg_target_long offset) 939 #else /* VBOX */940 DECLINLINE(void) tcg_gen_st8_i64(TCGv arg1, TCGv arg2, tcg_target_long offset)941 #endif /* VBOX */942 646 { 943 647 tcg_gen_st8_i32(arg1, arg2, offset); 944 648 } 945 649 946 #ifndef VBOX947 650 static inline void tcg_gen_st16_i64(TCGv arg1, TCGv arg2, tcg_target_long offset) 948 #else /* VBOX */949 DECLINLINE(void) tcg_gen_st16_i64(TCGv arg1, TCGv arg2, tcg_target_long offset)950 #endif /* VBOX */951 651 { 952 652 tcg_gen_st16_i32(arg1, arg2, offset); 953 653 } 954 654 955 #ifndef VBOX956 655 static inline void tcg_gen_st32_i64(TCGv arg1, TCGv arg2, tcg_target_long offset) 957 #else /* VBOX */958 DECLINLINE(void) tcg_gen_st32_i64(TCGv arg1, TCGv arg2, tcg_target_long offset)959 #endif /* VBOX */960 656 { 961 657 tcg_gen_st_i32(arg1, arg2, offset); 962 658 } 963 659 964 #ifndef VBOX965 660 static inline void tcg_gen_st_i64(TCGv arg1, TCGv arg2, tcg_target_long offset) 966 #else /* VBOX */967 DECLINLINE(void) tcg_gen_st_i64(TCGv arg1, TCGv arg2, tcg_target_long offset)968 #endif /* VBOX */969 661 { 970 662 #ifdef TCG_TARGET_WORDS_BIGENDIAN … … 977 669 } 978 670 979 #ifndef VBOX980 671 static inline void tcg_gen_add_i64(TCGv ret, TCGv arg1, TCGv arg2) 981 #else /* VBOX */982 DECLINLINE(void) tcg_gen_add_i64(TCGv ret, TCGv arg1, TCGv arg2)983 #endif /* VBOX */984 672 { 985 673 tcg_gen_op6(INDEX_op_add2_i32, ret, TCGV_HIGH(ret), … … 987 675 } 988 676 989 #ifndef VBOX990 677 static inline void tcg_gen_addi_i64(TCGv ret, TCGv arg1, int64_t arg2) 991 #else /* VBOX */992 DECLINLINE(void) tcg_gen_addi_i64(TCGv ret, TCGv arg1, int64_t arg2)993 #endif /* VBOX */994 678 { 995 679 TCGv t0 = tcg_const_i64(arg2); … … 998 682 } 999 683 1000 #ifndef VBOX1001 684 static inline void tcg_gen_sub_i64(TCGv ret, TCGv arg1, TCGv arg2) 1002 #else /* VBOX */1003 DECLINLINE(void) tcg_gen_sub_i64(TCGv ret, TCGv arg1, TCGv arg2)1004 #endif /* VBOX */1005 685 { 1006 686 tcg_gen_op6(INDEX_op_sub2_i32, ret, TCGV_HIGH(ret), … … 1008 688 } 1009 689 1010 #ifndef VBOX1011 690 static inline void tcg_gen_subi_i64(TCGv ret, TCGv arg1, int64_t arg2) 1012 #else /* VBOX */1013 DECLINLINE(void) tcg_gen_subi_i64(TCGv ret, TCGv arg1, int64_t arg2)1014 #endif /* VBOX */1015 691 { 1016 692 TCGv t0 = tcg_const_i64(arg2); … … 1019 695 } 1020 696 1021 #ifndef VBOX1022 697 static inline void tcg_gen_and_i64(TCGv ret, TCGv arg1, TCGv arg2) 1023 #else /* VBOX */1024 DECLINLINE(void) tcg_gen_and_i64(TCGv ret, TCGv arg1, TCGv arg2)1025 #endif /* VBOX */1026 698 { 1027 699 tcg_gen_and_i32(ret, arg1, arg2); … … 1029 701 } 1030 702 1031 #ifndef VBOX1032 703 static inline void tcg_gen_andi_i64(TCGv ret, TCGv arg1, int64_t arg2) 1033 #else /* VBOX */1034 DECLINLINE(void) tcg_gen_andi_i64(TCGv ret, TCGv arg1, int64_t arg2)1035 #endif /* VBOX */1036 704 { 1037 705 tcg_gen_andi_i32(ret, arg1, arg2); … … 1039 707 } 1040 708 1041 #ifndef VBOX1042 709 static inline void tcg_gen_or_i64(TCGv ret, TCGv arg1, TCGv arg2) 1043 #else /* VBOX */1044 DECLINLINE(void) tcg_gen_or_i64(TCGv ret, TCGv arg1, TCGv arg2)1045 #endif /* VBOX */1046 710 { 1047 711 tcg_gen_or_i32(ret, arg1, arg2); … … 1049 713 } 1050 714 1051 #ifndef VBOX1052 715 static inline void tcg_gen_ori_i64(TCGv ret, TCGv arg1, int64_t arg2) 1053 #else /* VBOX */1054 DECLINLINE(void) tcg_gen_ori_i64(TCGv ret, TCGv arg1, int64_t arg2)1055 #endif /* VBOX */1056 716 { 1057 717 tcg_gen_ori_i32(ret, arg1, arg2); … … 1059 719 } 1060 720 1061 #ifndef VBOX1062 721 static inline void tcg_gen_xor_i64(TCGv ret, TCGv arg1, TCGv arg2) 1063 #else /* VBOX */1064 DECLINLINE(void) tcg_gen_xor_i64(TCGv ret, TCGv arg1, TCGv arg2)1065 #endif /* VBOX */1066 722 { 1067 723 tcg_gen_xor_i32(ret, arg1, arg2); … … 1069 725 } 1070 726 1071 #ifndef VBOX1072 727 static inline void tcg_gen_xori_i64(TCGv ret, TCGv arg1, int64_t arg2) 1073 #else /* VBOX */1074 DECLINLINE(void) tcg_gen_xori_i64(TCGv ret, TCGv arg1, int64_t arg2)1075 #endif /* VBOX */1076 728 { 1077 729 tcg_gen_xori_i32(ret, arg1, arg2); … … 1081 733 /* XXX: use generic code when basic block handling is OK or CPU 1082 734 specific code (x86) */ 1083 #ifndef VBOX1084 735 static inline void tcg_gen_shl_i64(TCGv ret, TCGv arg1, TCGv arg2) 1085 #else /* VBOX */1086 DECLINLINE(void) tcg_gen_shl_i64(TCGv ret, TCGv arg1, TCGv arg2)1087 #endif /* VBOX */1088 736 { 1089 737 tcg_gen_helper_1_2(tcg_helper_shl_i64, ret, arg1, arg2); 1090 738 } 1091 739 1092 #ifndef VBOX1093 740 static inline void tcg_gen_shli_i64(TCGv ret, TCGv arg1, int64_t arg2) 1094 #else /* VBOX */1095 DECLINLINE(void) tcg_gen_shli_i64(TCGv ret, TCGv arg1, int64_t arg2)1096 #endif /* VBOX */1097 741 { 1098 742 tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0); 1099 743 } 1100 744 1101 #ifndef VBOX1102 745 static inline void tcg_gen_shr_i64(TCGv ret, TCGv arg1, TCGv arg2) 1103 #else /* VBOX */1104 DECLINLINE(void) tcg_gen_shr_i64(TCGv ret, TCGv arg1, TCGv arg2)1105 #endif /* VBOX */1106 746 { 1107 747 tcg_gen_helper_1_2(tcg_helper_shr_i64, ret, arg1, arg2); 1108 748 } 1109 749 1110 #ifndef VBOX1111 750 static inline void tcg_gen_shri_i64(TCGv ret, TCGv arg1, int64_t arg2) 1112 #else /* VBOX */1113 DECLINLINE(void) tcg_gen_shri_i64(TCGv ret, TCGv arg1, int64_t arg2)1114 #endif /* VBOX */1115 751 { 1116 752 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0); 1117 753 } 1118 754 1119 #ifndef VBOX1120 755 static inline void tcg_gen_sar_i64(TCGv ret, TCGv arg1, TCGv arg2) 1121 #else /* VBOX */1122 DECLINLINE(void) tcg_gen_sar_i64(TCGv ret, TCGv arg1, TCGv arg2)1123 #endif /* VBOX */1124 756 { 1125 757 tcg_gen_helper_1_2(tcg_helper_sar_i64, ret, arg1, arg2); 1126 758 } 1127 759 1128 #ifndef VBOX1129 760 static inline void tcg_gen_sari_i64(TCGv ret, TCGv arg1, int64_t arg2) 1130 #else /* VBOX */1131 DECLINLINE(void) tcg_gen_sari_i64(TCGv ret, TCGv arg1, int64_t arg2)1132 #endif /* VBOX */1133 761 { 1134 762 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1); 1135 763 } 1136 764 1137 #ifndef VBOX1138 765 static inline void tcg_gen_brcond_i64(int cond, TCGv arg1, TCGv arg2, 1139 #else /* VBOX */1140 DECLINLINE(void) tcg_gen_brcond_i64(int cond, TCGv arg1, TCGv arg2,1141 #endif /* VBOX */1142 766 int label_index) 1143 767 { … … 1147 771 } 1148 772 1149 #ifndef VBOX1150 773 static inline void tcg_gen_mul_i64(TCGv ret, TCGv arg1, TCGv arg2) 1151 #else /* VBOX */1152 DECLINLINE(void) tcg_gen_mul_i64(TCGv ret, TCGv arg1, TCGv arg2)1153 #endif /* VBOX */1154 774 { 1155 775 TCGv t0, t1; … … 1170 790 } 1171 791 1172 #ifndef VBOX1173 792 static inline void tcg_gen_muli_i64(TCGv ret, TCGv arg1, int64_t arg2) 1174 #else /* VBOX */1175 DECLINLINE(void) tcg_gen_muli_i64(TCGv ret, TCGv arg1, int64_t arg2)1176 #endif /* VBOX */1177 793 { 1178 794 TCGv t0 = tcg_const_i64(arg2); … … 1181 797 } 1182 798 1183 #ifndef VBOX1184 799 static inline void tcg_gen_div_i64(TCGv ret, TCGv arg1, TCGv arg2) 1185 #else /* VBOX */1186 DECLINLINE(void) tcg_gen_div_i64(TCGv ret, TCGv arg1, TCGv arg2)1187 #endif /* VBOX */1188 800 { 1189 801 tcg_gen_helper_1_2(tcg_helper_div_i64, ret, arg1, arg2); 1190 802 } 1191 803 1192 #ifndef VBOX1193 804 static inline void tcg_gen_rem_i64(TCGv ret, TCGv arg1, TCGv arg2) 1194 #else /* VBOX */1195 DECLINLINE(void) tcg_gen_rem_i64(TCGv ret, TCGv arg1, TCGv arg2)1196 #endif /* VBOX */1197 805 { 1198 806 tcg_gen_helper_1_2(tcg_helper_rem_i64, ret, arg1, arg2); 1199 807 } 1200 808 1201 #ifndef VBOX1202 809 static inline void tcg_gen_divu_i64(TCGv ret, TCGv arg1, TCGv arg2) 1203 #else /* VBOX */1204 DECLINLINE(void) tcg_gen_divu_i64(TCGv ret, TCGv arg1, TCGv arg2)1205 #endif /* VBOX */1206 810 { 1207 811 tcg_gen_helper_1_2(tcg_helper_divu_i64, ret, arg1, arg2); 1208 812 } 1209 813 1210 #ifndef VBOX1211 814 static inline void tcg_gen_remu_i64(TCGv ret, TCGv arg1, TCGv arg2) 1212 #else /* VBOX */1213 DECLINLINE(void) tcg_gen_remu_i64(TCGv ret, TCGv arg1, TCGv arg2)1214 #endif /* VBOX */1215 815 { 1216 816 tcg_gen_helper_1_2(tcg_helper_remu_i64, ret, arg1, arg2); … … 1219 819 #else 1220 820 1221 #ifndef VBOX1222 821 static inline void tcg_gen_mov_i64(TCGv ret, TCGv arg) 1223 #else /* VBOX */1224 DECLINLINE(void) tcg_gen_mov_i64(TCGv ret, TCGv arg)1225 #endif /* VBOX */1226 822 { 1227 823 if (GET_TCGV(ret) != GET_TCGV(arg)) … … 1229 825 } 1230 826 1231 #ifndef VBOX1232 827 static inline void tcg_gen_movi_i64(TCGv ret, int64_t arg) 1233 #else /* VBOX */1234 DECLINLINE(void) tcg_gen_movi_i64(TCGv ret, int64_t arg)1235 #endif /* VBOX */1236 828 { 1237 829 tcg_gen_op2i(INDEX_op_movi_i64, ret, arg); 1238 830 } 1239 831 1240 #ifndef VBOX1241 832 static inline void tcg_gen_ld8u_i64(TCGv ret, TCGv arg2, 1242 #else /* VBOX */1243 DECLINLINE(void) tcg_gen_ld8u_i64(TCGv ret, TCGv arg2,1244 #endif /* VBOX */1245 833 tcg_target_long offset) 1246 834 { … … 1248 836 } 1249 837 1250 #ifndef VBOX1251 838 static inline void tcg_gen_ld8s_i64(TCGv ret, TCGv arg2, 1252 #else /* VBOX */1253 DECLINLINE(void) tcg_gen_ld8s_i64(TCGv ret, TCGv arg2,1254 #endif /* VBOX */1255 839 tcg_target_long offset) 1256 840 { … … 1258 842 } 1259 843 1260 #ifndef VBOX1261 844 static inline void tcg_gen_ld16u_i64(TCGv ret, TCGv arg2, 1262 #else /* VBOX */1263 DECLINLINE(void) tcg_gen_ld16u_i64(TCGv ret, TCGv arg2,1264 #endif /* VBOX */1265 845 tcg_target_long offset) 1266 846 { … … 1268 848 } 1269 849 1270 #ifndef VBOX1271 850 static inline void tcg_gen_ld16s_i64(TCGv ret, TCGv arg2, 1272 #else /* VBOX */1273 DECLINLINE(void) tcg_gen_ld16s_i64(TCGv ret, TCGv arg2,1274 #endif /* VBOX */1275 851 tcg_target_long offset) 1276 852 { … … 1278 854 } 1279 855 1280 #ifndef VBOX1281 856 static inline void tcg_gen_ld32u_i64(TCGv ret, TCGv arg2, 1282 #else /* VBOX */1283 DECLINLINE(void) tcg_gen_ld32u_i64(TCGv ret, TCGv arg2,1284 #endif /* VBOX */1285 857 tcg_target_long offset) 1286 858 { … … 1288 860 } 1289 861 1290 #ifndef VBOX1291 862 static inline void tcg_gen_ld32s_i64(TCGv ret, TCGv arg2, 1292 #else /* VBOX */1293 DECLINLINE(void) tcg_gen_ld32s_i64(TCGv ret, TCGv arg2,1294 #endif /* VBOX */1295 863 tcg_target_long offset) 1296 864 { … … 1298 866 } 1299 867 1300 #ifndef VBOX1301 868 static inline void tcg_gen_ld_i64(TCGv ret, TCGv arg2, tcg_target_long offset) 1302 #else /* VBOX */1303 DECLINLINE(void) tcg_gen_ld_i64(TCGv ret, TCGv arg2, tcg_target_long offset)1304 #endif /* VBOX */1305 869 { 1306 870 tcg_gen_op3i(INDEX_op_ld_i64, ret, arg2, offset); 1307 871 } 1308 872 1309 #ifndef VBOX1310 873 static inline void tcg_gen_st8_i64(TCGv arg1, TCGv arg2, 1311 #else /* VBOX */1312 DECLINLINE(void) tcg_gen_st8_i64(TCGv arg1, TCGv arg2,1313 #endif /* VBOX */1314 874 tcg_target_long offset) 1315 875 { … … 1317 877 } 1318 878 1319 #ifndef VBOX1320 879 static inline void tcg_gen_st16_i64(TCGv arg1, TCGv arg2, 1321 #else /* VBOX */1322 DECLINLINE(void) tcg_gen_st16_i64(TCGv arg1, TCGv arg2,1323 #endif /* VBOX */1324 880 tcg_target_long offset) 1325 881 { … … 1327 883 } 1328 884 1329 #ifndef VBOX1330 885 static inline void tcg_gen_st32_i64(TCGv arg1, TCGv arg2, 1331 #else /* VBOX */1332 DECLINLINE(void) tcg_gen_st32_i64(TCGv arg1, TCGv arg2,1333 #endif /* VBOX */1334 886 tcg_target_long offset) 1335 887 { … … 1337 889 } 1338 890 1339 #ifndef VBOX1340 891 static inline void tcg_gen_st_i64(TCGv arg1, TCGv arg2, tcg_target_long offset) 1341 #else /* VBOX */1342 DECLINLINE(void) tcg_gen_st_i64(TCGv arg1, TCGv arg2, tcg_target_long offset)1343 #endif /* VBOX */1344 892 { 1345 893 tcg_gen_op3i(INDEX_op_st_i64, arg1, arg2, offset); 1346 894 } 1347 895 1348 #ifndef VBOX1349 896 static inline void tcg_gen_add_i64(TCGv ret, TCGv arg1, TCGv arg2) 1350 #else /* VBOX */1351 DECLINLINE(void) tcg_gen_add_i64(TCGv ret, TCGv arg1, TCGv arg2)1352 #endif /* VBOX */1353 897 { 1354 898 tcg_gen_op3(INDEX_op_add_i64, ret, arg1, arg2); 1355 899 } 1356 900 1357 #ifndef VBOX1358 901 static inline void tcg_gen_addi_i64(TCGv ret, TCGv arg1, int64_t arg2) 1359 #else /* VBOX */1360 DECLINLINE(void) tcg_gen_addi_i64(TCGv ret, TCGv arg1, int64_t arg2)1361 #endif /* VBOX */1362 902 { 1363 903 TCGv t0 = tcg_const_i64(arg2); … … 1366 906 } 1367 907 1368 #ifndef VBOX1369 908 static inline void tcg_gen_sub_i64(TCGv ret, TCGv arg1, TCGv arg2) 1370 #else /* VBOX */1371 DECLINLINE(void) tcg_gen_sub_i64(TCGv ret, TCGv arg1, TCGv arg2)1372 #endif /* VBOX */1373 909 { 1374 910 tcg_gen_op3(INDEX_op_sub_i64, ret, arg1, arg2); 1375 911 } 1376 912 1377 #ifndef VBOX1378 913 static inline void tcg_gen_subi_i64(TCGv ret, TCGv arg1, int64_t arg2) 1379 #else /* VBOX */1380 DECLINLINE(void) tcg_gen_subi_i64(TCGv ret, TCGv arg1, int64_t arg2)1381 #endif /* VBOX */1382 914 { 1383 915 TCGv t0 = tcg_const_i64(arg2); … … 1386 918 } 1387 919 1388 #ifndef VBOX1389 920 static inline void tcg_gen_and_i64(TCGv ret, TCGv arg1, TCGv arg2) 1390 #else /* VBOX */1391 DECLINLINE(void) tcg_gen_and_i64(TCGv ret, TCGv arg1, TCGv arg2)1392 #endif /* VBOX */1393 921 { 1394 922 tcg_gen_op3(INDEX_op_and_i64, ret, arg1, arg2); 1395 923 } 1396 924 1397 #ifndef VBOX1398 925 static inline void tcg_gen_andi_i64(TCGv ret, TCGv arg1, int64_t arg2) 1399 #else /* VBOX */1400 DECLINLINE(void) tcg_gen_andi_i64(TCGv ret, TCGv arg1, int64_t arg2)1401 #endif /* VBOX */1402 926 { 1403 927 TCGv t0 = tcg_const_i64(arg2); … … 1406 930 } 1407 931 1408 #ifndef VBOX1409 932 static inline void tcg_gen_or_i64(TCGv ret, TCGv arg1, TCGv arg2) 1410 #else /* VBOX */1411 DECLINLINE(void) tcg_gen_or_i64(TCGv ret, TCGv arg1, TCGv arg2)1412 #endif /* VBOX */1413 933 { 1414 934 tcg_gen_op3(INDEX_op_or_i64, ret, arg1, arg2); 1415 935 } 1416 936 1417 #ifndef VBOX1418 937 static inline void tcg_gen_ori_i64(TCGv ret, TCGv arg1, int64_t arg2) 1419 #else /* VBOX */1420 DECLINLINE(void) tcg_gen_ori_i64(TCGv ret, TCGv arg1, int64_t arg2)1421 #endif /* VBOX */1422 938 { 1423 939 TCGv t0 = tcg_const_i64(arg2); … … 1426 942 } 1427 943 1428 #ifndef VBOX1429 944 static inline void tcg_gen_xor_i64(TCGv ret, TCGv arg1, TCGv arg2) 1430 #else /* VBOX */1431 DECLINLINE(void) tcg_gen_xor_i64(TCGv ret, TCGv arg1, TCGv arg2)1432 #endif /* VBOX */1433 945 { 1434 946 tcg_gen_op3(INDEX_op_xor_i64, ret, arg1, arg2); 1435 947 } 1436 948 1437 #ifndef VBOX1438 949 static inline void tcg_gen_xori_i64(TCGv ret, TCGv arg1, int64_t arg2) 1439 #else /* VBOX */1440 DECLINLINE(void) tcg_gen_xori_i64(TCGv ret, TCGv arg1, int64_t arg2)1441 #endif /* VBOX */1442 950 { 1443 951 TCGv t0 = tcg_const_i64(arg2); … … 1446 954 } 1447 955 1448 #ifndef VBOX1449 956 static inline void tcg_gen_shl_i64(TCGv ret, TCGv arg1, TCGv arg2) 1450 #else /* VBOX */1451 DECLINLINE(void) tcg_gen_shl_i64(TCGv ret, TCGv arg1, TCGv arg2)1452 #endif /* VBOX */1453 957 { 1454 958 tcg_gen_op3(INDEX_op_shl_i64, ret, arg1, arg2); 1455 959 } 1456 960 1457 #ifndef VBOX1458 961 static inline void tcg_gen_shli_i64(TCGv ret, TCGv arg1, int64_t arg2) 1459 #else /* VBOX */1460 DECLINLINE(void) tcg_gen_shli_i64(TCGv ret, TCGv arg1, int64_t arg2)1461 #endif /* VBOX */1462 962 { 1463 963 if (arg2 == 0) { … … 1470 970 } 1471 971 1472 #ifndef VBOX1473 972 static inline void tcg_gen_shr_i64(TCGv ret, TCGv arg1, TCGv arg2) 1474 #else /* VBOX */1475 DECLINLINE(void) tcg_gen_shr_i64(TCGv ret, TCGv arg1, TCGv arg2)1476 #endif /* VBOX */1477 973 { 1478 974 tcg_gen_op3(INDEX_op_shr_i64, ret, arg1, arg2); 1479 975 } 1480 976 1481 #ifndef VBOX1482 977 static inline void tcg_gen_shri_i64(TCGv ret, TCGv arg1, int64_t arg2) 1483 #else /* VBOX */1484 DECLINLINE(void) tcg_gen_shri_i64(TCGv ret, TCGv arg1, int64_t arg2)1485 #endif /* VBOX */1486 978 { 1487 979 if (arg2 == 0) { … … 1494 986 } 1495 987 1496 #ifndef VBOX1497 988 static inline void tcg_gen_sar_i64(TCGv ret, TCGv arg1, TCGv arg2) 1498 #else /* VBOX */1499 DECLINLINE(void) tcg_gen_sar_i64(TCGv ret, TCGv arg1, TCGv arg2)1500 #endif /* VBOX */1501 989 { 1502 990 tcg_gen_op3(INDEX_op_sar_i64, ret, arg1, arg2); 1503 991 } 1504 992 1505 #ifndef VBOX1506 993 static inline void tcg_gen_sari_i64(TCGv ret, TCGv arg1, int64_t arg2) 1507 #else /* VBOX */1508 DECLINLINE(void) tcg_gen_sari_i64(TCGv ret, TCGv arg1, int64_t arg2)1509 #endif /* VBOX */1510 994 { 1511 995 if (arg2 == 0) { … … 1518 1002 } 1519 1003 1520 #ifndef VBOX1521 1004 static inline void tcg_gen_brcond_i64(int cond, TCGv arg1, TCGv arg2, 1522 #else /* VBOX */1523 DECLINLINE(void) tcg_gen_brcond_i64(int cond, TCGv arg1, TCGv arg2,1524 #endif /* VBOX */1525 1005 int label_index) 1526 1006 { … … 1528 1008 } 1529 1009 1530 #ifndef VBOX1531 1010 static inline void tcg_gen_mul_i64(TCGv ret, TCGv arg1, TCGv arg2) 1532 #else /* VBOX */1533 DECLINLINE(void) tcg_gen_mul_i64(TCGv ret, TCGv arg1, TCGv arg2)1534 #endif /* VBOX */1535 1011 { 1536 1012 tcg_gen_op3(INDEX_op_mul_i64, ret, arg1, arg2); 1537 1013 } 1538 1014 1539 #ifndef VBOX1540 1015 static inline void tcg_gen_muli_i64(TCGv ret, TCGv arg1, int64_t arg2) 1541 #else /* VBOX */1542 DECLINLINE(void) tcg_gen_muli_i64(TCGv ret, TCGv arg1, int64_t arg2)1543 #endif /* VBOX */1544 1016 { 1545 1017 TCGv t0 = tcg_const_i64(arg2); … … 1549 1021 1550 1022 #ifdef TCG_TARGET_HAS_div_i64 1551 #ifndef VBOX1552 1023 static inline void tcg_gen_div_i64(TCGv ret, TCGv arg1, TCGv arg2) 1553 #else /* VBOX */1554 DECLINLINE(void) tcg_gen_div_i64(TCGv ret, TCGv arg1, TCGv arg2)1555 #endif /* VBOX */1556 1024 { 1557 1025 tcg_gen_op3(INDEX_op_div_i64, ret, arg1, arg2); 1558 1026 } 1559 1027 1560 #ifndef VBOX1561 1028 static inline void tcg_gen_rem_i64(TCGv ret, TCGv arg1, TCGv arg2) 1562 #else /* VBOX */1563 DECLINLINE(void) tcg_gen_rem_i64(TCGv ret, TCGv arg1, TCGv arg2)1564 #endif /* VBOX */1565 1029 { 1566 1030 tcg_gen_op3(INDEX_op_rem_i64, ret, arg1, arg2); 1567 1031 } 1568 1032 1569 #ifndef VBOX1570 1033 static inline void tcg_gen_divu_i64(TCGv ret, TCGv arg1, TCGv arg2) 1571 #else /* VBOX */1572 DECLINLINE(void) tcg_gen_divu_i64(TCGv ret, TCGv arg1, TCGv arg2)1573 #endif /* VBOX */1574 1034 { 1575 1035 tcg_gen_op3(INDEX_op_divu_i64, ret, arg1, arg2); 1576 1036 } 1577 1037 1578 #ifndef VBOX1579 1038 static inline void tcg_gen_remu_i64(TCGv ret, TCGv arg1, TCGv arg2) 1580 #else /* VBOX */1581 DECLINLINE(void) tcg_gen_remu_i64(TCGv ret, TCGv arg1, TCGv arg2)1582 #endif /* VBOX */1583 1039 { 1584 1040 tcg_gen_op3(INDEX_op_remu_i64, ret, arg1, arg2); 1585 1041 } 1586 1042 #else 1587 #ifndef VBOX1588 1043 static inline void tcg_gen_div_i64(TCGv ret, TCGv arg1, TCGv arg2) 1589 #else /* VBOX */1590 DECLINLINE(void) tcg_gen_div_i64(TCGv ret, TCGv arg1, TCGv arg2)1591 #endif /* VBOX */1592 1044 { 1593 1045 TCGv t0; … … 1598 1050 } 1599 1051 1600 #ifndef VBOX1601 1052 static inline void tcg_gen_rem_i64(TCGv ret, TCGv arg1, TCGv arg2) 1602 #else /* VBOX */1603 DECLINLINE(void) tcg_gen_rem_i64(TCGv ret, TCGv arg1, TCGv arg2)1604 #endif /* VBOX */1605 1053 { 1606 1054 TCGv t0; … … 1611 1059 } 1612 1060 1613 #ifndef VBOX1614 1061 static inline void tcg_gen_divu_i64(TCGv ret, TCGv arg1, TCGv arg2) 1615 #else /* VBOX */1616 DECLINLINE(void) tcg_gen_divu_i64(TCGv ret, TCGv arg1, TCGv arg2)1617 #endif /* VBOX */1618 1062 { 1619 1063 TCGv t0; … … 1624 1068 } 1625 1069 1626 #ifndef VBOX1627 1070 static inline void tcg_gen_remu_i64(TCGv ret, TCGv arg1, TCGv arg2) 1628 #else /* VBOX */1629 DECLINLINE(void) tcg_gen_remu_i64(TCGv ret, TCGv arg1, TCGv arg2)1630 #endif /* VBOX */1631 1071 { 1632 1072 TCGv t0; … … 1640 1080 #endif 1641 1081 1642 #ifndef VBOX1643 1082 static inline void tcg_gen_brcondi_i64(int cond, TCGv arg1, int64_t arg2, 1644 #else /* VBOX */1645 DECLINLINE(void) tcg_gen_brcondi_i64(int cond, TCGv arg1, int64_t arg2,1646 #endif /* VBOX */1647 1083 int label_index) 1648 1084 { … … 1655 1091 /* optional operations */ 1656 1092 1657 #ifndef VBOX1658 1093 static inline void tcg_gen_ext8s_i32(TCGv ret, TCGv arg) 1659 #else /* VBOX */1660 DECLINLINE(void) tcg_gen_ext8s_i32(TCGv ret, TCGv arg)1661 #endif /* VBOX */1662 1094 { 1663 1095 #ifdef TCG_TARGET_HAS_ext8s_i32 … … 1669 1101 } 1670 1102 1671 #ifndef VBOX1672 1103 static inline void tcg_gen_ext16s_i32(TCGv ret, TCGv arg) 1673 #else /* VBOX */1674 DECLINLINE(void) tcg_gen_ext16s_i32(TCGv ret, TCGv arg)1675 #endif /* VBOX */1676 1104 { 1677 1105 #ifdef TCG_TARGET_HAS_ext16s_i32 … … 1685 1113 /* These are currently just for convenience. 1686 1114 We assume a target will recognise these automatically . */ 1687 #ifndef VBOX1688 1115 static inline void tcg_gen_ext8u_i32(TCGv ret, TCGv arg) 1689 #else /* VBOX */1690 DECLINLINE(void) tcg_gen_ext8u_i32(TCGv ret, TCGv arg)1691 #endif /* VBOX */1692 1116 { 1693 1117 tcg_gen_andi_i32(ret, arg, 0xffu); 1694 1118 } 1695 1119 1696 #ifndef VBOX1697 1120 static inline void tcg_gen_ext16u_i32(TCGv ret, TCGv arg) 1698 #else /* VBOX */1699 DECLINLINE(void) tcg_gen_ext16u_i32(TCGv ret, TCGv arg)1700 #endif /* VBOX */1701 1121 { 1702 1122 tcg_gen_andi_i32(ret, arg, 0xffffu); … … 1704 1124 1705 1125 /* Note: we assume the two high bytes are set to zero */ 1706 #ifndef VBOX1707 1126 static inline void tcg_gen_bswap16_i32(TCGv ret, TCGv arg) 1708 #else /* VBOX */1709 DECLINLINE(void) tcg_gen_bswap16_i32(TCGv ret, TCGv arg)1710 #endif /* VBOX */1711 1127 { 1712 1128 #ifdef TCG_TARGET_HAS_bswap16_i32 … … 1726 1142 } 1727 1143 1728 #ifndef VBOX1729 1144 static inline void tcg_gen_bswap_i32(TCGv ret, TCGv arg) 1730 #else /* VBOX */1731 DECLINLINE(void) tcg_gen_bswap_i32(TCGv ret, TCGv arg)1732 #endif /* VBOX */1733 1145 { 1734 1146 #ifdef TCG_TARGET_HAS_bswap_i32 … … 1757 1169 1758 1170 #if TCG_TARGET_REG_BITS == 32 1759 #ifndef VBOX1760 1171 static inline void tcg_gen_ext8s_i64(TCGv ret, TCGv arg) 1761 #else /* VBOX */1762 DECLINLINE(void) tcg_gen_ext8s_i64(TCGv ret, TCGv arg)1763 #endif /* VBOX */1764 1172 { 1765 1173 tcg_gen_ext8s_i32(ret, arg); … … 1767 1175 } 1768 1176 1769 #ifndef VBOX1770 1177 static inline void tcg_gen_ext16s_i64(TCGv ret, TCGv arg) 1771 #else /* VBOX */1772 DECLINLINE(void) tcg_gen_ext16s_i64(TCGv ret, TCGv arg)1773 #endif /* VBOX */1774 1178 { 1775 1179 tcg_gen_ext16s_i32(ret, arg); … … 1777 1181 } 1778 1182 1779 #ifndef VBOX1780 1183 static inline void tcg_gen_ext32s_i64(TCGv ret, TCGv arg) 1781 #else /* VBOX */1782 DECLINLINE(void) tcg_gen_ext32s_i64(TCGv ret, TCGv arg)1783 #endif /* VBOX */1784 1184 { 1785 1185 tcg_gen_mov_i32(ret, arg); … … 1787 1187 } 1788 1188 1789 #ifndef VBOX1790 1189 static inline void tcg_gen_ext8u_i64(TCGv ret, TCGv arg) 1791 #else /* VBOX */1792 DECLINLINE(void) tcg_gen_ext8u_i64(TCGv ret, TCGv arg)1793 #endif /* VBOX */1794 1190 { 1795 1191 tcg_gen_ext8u_i32(ret, arg); … … 1797 1193 } 1798 1194 1799 #ifndef VBOX1800 1195 static inline void tcg_gen_ext16u_i64(TCGv ret, TCGv arg) 1801 #else /* VBOX */1802 DECLINLINE(void) tcg_gen_ext16u_i64(TCGv ret, TCGv arg)1803 #endif /* VBOX */1804 1196 { 1805 1197 tcg_gen_ext16u_i32(ret, arg); … … 1807 1199 } 1808 1200 1809 #ifndef VBOX1810 1201 static inline void tcg_gen_ext32u_i64(TCGv ret, TCGv arg) 1811 #else /* VBOX */1812 DECLINLINE(void) tcg_gen_ext32u_i64(TCGv ret, TCGv arg)1813 #endif /* VBOX */1814 1202 { 1815 1203 tcg_gen_mov_i32(ret, arg); … … 1817 1205 } 1818 1206 1819 #ifndef VBOX1820 1207 static inline void tcg_gen_trunc_i64_i32(TCGv ret, TCGv arg) 1821 #else /* VBOX */1822 DECLINLINE(void) tcg_gen_trunc_i64_i32(TCGv ret, TCGv arg)1823 #endif /* VBOX */1824 1208 { 1825 1209 tcg_gen_mov_i32(ret, arg); 1826 1210 } 1827 1211 1828 #ifndef VBOX1829 1212 static inline void tcg_gen_extu_i32_i64(TCGv ret, TCGv arg) 1830 #else /* VBOX */1831 DECLINLINE(void) tcg_gen_extu_i32_i64(TCGv ret, TCGv arg)1832 #endif /* VBOX */1833 1213 { 1834 1214 tcg_gen_mov_i32(ret, arg); … … 1836 1216 } 1837 1217 1838 #ifndef VBOX1839 1218 static inline void tcg_gen_ext_i32_i64(TCGv ret, TCGv arg) 1840 #else /* VBOX */1841 DECLINLINE(void) tcg_gen_ext_i32_i64(TCGv ret, TCGv arg)1842 #endif /* VBOX */1843 1219 { 1844 1220 tcg_gen_mov_i32(ret, arg); … … 1846 1222 } 1847 1223 1848 #ifndef VBOX1849 1224 static inline void tcg_gen_bswap_i64(TCGv ret, TCGv arg) 1850 #else /* VBOX */1851 DECLINLINE(void) tcg_gen_bswap_i64(TCGv ret, TCGv arg)1852 #endif /* VBOX */1853 1225 { 1854 1226 TCGv t0, t1; … … 1865 1237 #else 1866 1238 1867 #ifndef VBOX1868 1239 static inline void tcg_gen_ext8s_i64(TCGv ret, TCGv arg) 1869 #else /* VBOX */1870 DECLINLINE(void) tcg_gen_ext8s_i64(TCGv ret, TCGv arg)1871 #endif /* VBOX */1872 1240 { 1873 1241 #ifdef TCG_TARGET_HAS_ext8s_i64 … … 1879 1247 } 1880 1248 1881 #ifndef VBOX1882 1249 static inline void tcg_gen_ext16s_i64(TCGv ret, TCGv arg) 1883 #else /* VBOX */1884 DECLINLINE(void) tcg_gen_ext16s_i64(TCGv ret, TCGv arg)1885 #endif /* VBOX */1886 1250 { 1887 1251 #ifdef TCG_TARGET_HAS_ext16s_i64 … … 1893 1257 } 1894 1258 1895 #ifndef VBOX1896 1259 static inline void tcg_gen_ext32s_i64(TCGv ret, TCGv arg) 1897 #else /* VBOX */1898 DECLINLINE(void) tcg_gen_ext32s_i64(TCGv ret, TCGv arg)1899 #endif /* VBOX */1900 1260 { 1901 1261 #ifdef TCG_TARGET_HAS_ext32s_i64 … … 1907 1267 } 1908 1268 1909 #ifndef VBOX1910 1269 static inline void tcg_gen_ext8u_i64(TCGv ret, TCGv arg) 1911 #else /* VBOX */1912 DECLINLINE(void) tcg_gen_ext8u_i64(TCGv ret, TCGv arg)1913 #endif /* VBOX */1914 1270 { 1915 1271 tcg_gen_andi_i64(ret, arg, 0xffu); 1916 1272 } 1917 1273 1918 #ifndef VBOX1919 1274 static inline void tcg_gen_ext16u_i64(TCGv ret, TCGv arg) 1920 #else /* VBOX */1921 DECLINLINE(void) tcg_gen_ext16u_i64(TCGv ret, TCGv arg)1922 #endif /* VBOX */1923 1275 { 1924 1276 tcg_gen_andi_i64(ret, arg, 0xffffu); 1925 1277 } 1926 1278 1927 #ifndef VBOX1928 1279 static inline void tcg_gen_ext32u_i64(TCGv ret, TCGv arg) 1929 #else /* VBOX */1930 DECLINLINE(void) tcg_gen_ext32u_i64(TCGv ret, TCGv arg)1931 #endif /* VBOX */1932 1280 { 1933 1281 tcg_gen_andi_i64(ret, arg, 0xffffffffu); … … 1936 1284 /* Note: we assume the target supports move between 32 and 64 bit 1937 1285 registers. This will probably break MIPS64 targets. */ 1938 #ifndef VBOX1939 1286 static inline void tcg_gen_trunc_i64_i32(TCGv ret, TCGv arg) 1940 #else /* VBOX */1941 DECLINLINE(void) tcg_gen_trunc_i64_i32(TCGv ret, TCGv arg)1942 #endif /* VBOX */1943 1287 { 1944 1288 tcg_gen_mov_i32(ret, arg); … … 1947 1291 /* Note: we assume the target supports move between 32 and 64 bit 1948 1292 registers */ 1949 #ifndef VBOX1950 1293 static inline void tcg_gen_extu_i32_i64(TCGv ret, TCGv arg) 1951 #else /* VBOX */1952 DECLINLINE(void) tcg_gen_extu_i32_i64(TCGv ret, TCGv arg)1953 #endif /* VBOX */1954 1294 { 1955 1295 tcg_gen_andi_i64(ret, arg, 0xffffffffu); … … 1958 1298 /* Note: we assume the target supports move between 32 and 64 bit 1959 1299 registers */ 1960 #ifndef VBOX1961 1300 static inline void tcg_gen_ext_i32_i64(TCGv ret, TCGv arg) 1962 #else /* VBOX */1963 DECLINLINE(void) tcg_gen_ext_i32_i64(TCGv ret, TCGv arg)1964 #endif /* VBOX */1965 1301 { 1966 1302 tcg_gen_ext32s_i64(ret, arg); 1967 1303 } 1968 1304 1969 #ifndef VBOX1970 1305 static inline void tcg_gen_bswap_i64(TCGv ret, TCGv arg) 1971 #else /* VBOX */1972 DECLINLINE(void) tcg_gen_bswap_i64(TCGv ret, TCGv arg)1973 #endif /* VBOX */1974 1306 { 1975 1307 #ifdef TCG_TARGET_HAS_bswap_i64 … … 2015 1347 #endif 2016 1348 2017 #ifndef VBOX2018 1349 static inline void tcg_gen_neg_i32(TCGv ret, TCGv arg) 2019 #else /* VBOX */2020 DECLINLINE(void) tcg_gen_neg_i32(TCGv ret, TCGv arg)2021 #endif /* VBOX */2022 1350 { 2023 1351 #ifdef TCG_TARGET_HAS_neg_i32 … … 2030 1358 } 2031 1359 2032 #ifndef VBOX2033 1360 static inline void tcg_gen_neg_i64(TCGv ret, TCGv arg) 2034 #else /* VBOX */2035 DECLINLINE(void) tcg_gen_neg_i64(TCGv ret, TCGv arg)2036 #endif /* VBOX */2037 1361 { 2038 1362 #ifdef TCG_TARGET_HAS_neg_i64 … … 2045 1369 } 2046 1370 2047 #ifndef VBOX2048 1371 static inline void tcg_gen_not_i32(TCGv ret, TCGv arg) 2049 #else /* VBOX */2050 DECLINLINE(void) tcg_gen_not_i32(TCGv ret, TCGv arg)2051 #endif /* VBOX */2052 1372 { 2053 1373 tcg_gen_xori_i32(ret, arg, -1); 2054 1374 } 2055 1375 2056 #ifndef VBOX2057 1376 static inline void tcg_gen_not_i64(TCGv ret, TCGv arg) 2058 #else /* VBOX */2059 DECLINLINE(void) tcg_gen_not_i64(TCGv ret, TCGv arg)2060 #endif /* VBOX */2061 1377 { 2062 1378 tcg_gen_xori_i64(ret, arg, -1); 2063 1379 } 2064 1380 2065 #ifndef VBOX2066 1381 static inline void tcg_gen_discard_i32(TCGv arg) 2067 #else /* VBOX */2068 DECLINLINE(void) tcg_gen_discard_i32(TCGv arg)2069 #endif /* VBOX */2070 1382 { 2071 1383 tcg_gen_op1(INDEX_op_discard, arg); … … 2073 1385 2074 1386 #if TCG_TARGET_REG_BITS == 32 2075 #ifndef VBOX2076 1387 static inline void tcg_gen_discard_i64(TCGv arg) 2077 #else /* VBOX */2078 DECLINLINE(void) tcg_gen_discard_i64(TCGv arg)2079 #endif /* VBOX */2080 1388 { 2081 1389 tcg_gen_discard_i32(arg); … … 2083 1391 } 2084 1392 #else 2085 #ifndef VBOX2086 1393 static inline void tcg_gen_discard_i64(TCGv arg) 2087 #else /* VBOX */2088 DECLINLINE(void) tcg_gen_discard_i64(TCGv arg)2089 #endif /* VBOX */2090 1394 { 2091 1395 tcg_gen_op1(INDEX_op_discard, arg); … … 2093 1397 #endif 2094 1398 2095 #ifndef VBOX2096 1399 static inline void tcg_gen_concat_i32_i64(TCGv dest, TCGv low, TCGv high) 2097 #else /* VBOX */2098 DECLINLINE(void) tcg_gen_concat_i32_i64(TCGv dest, TCGv low, TCGv high)2099 #endif /* VBOX */2100 1400 { 2101 1401 #if TCG_TARGET_REG_BITS == 32 … … 2114 1414 } 2115 1415 2116 #ifndef VBOX2117 1416 static inline void tcg_gen_concat32_i64(TCGv dest, TCGv low, TCGv high) 2118 #else /* VBOX */2119 DECLINLINE(void) tcg_gen_concat32_i64(TCGv dest, TCGv low, TCGv high)2120 #endif /* VBOX */2121 1417 { 2122 1418 #if TCG_TARGET_REG_BITS == 32 … … 2139 1435 2140 1436 /* debug info: write the PC of the corresponding QEMU CPU instruction */ 2141 #ifndef VBOX2142 1437 static inline void tcg_gen_debug_insn_start(uint64_t pc) 2143 #else /* VBOX */2144 DECLINLINE(void) tcg_gen_debug_insn_start(uint64_t pc)2145 #endif /* VBOX */2146 1438 { 2147 1439 /* XXX: must really use a 32 bit size for TCGArg in all cases */ … … 2154 1446 } 2155 1447 2156 #ifndef VBOX2157 1448 static inline void tcg_gen_exit_tb(tcg_target_long val) 2158 #else /* VBOX */2159 DECLINLINE(void) tcg_gen_exit_tb(tcg_target_long val)2160 #endif /* VBOX */2161 1449 { 2162 1450 tcg_gen_op1i(INDEX_op_exit_tb, val); 2163 1451 } 2164 1452 2165 #ifndef VBOX2166 1453 static inline void tcg_gen_goto_tb(int idx) 2167 #else /* VBOX */2168 DECLINLINE(void) tcg_gen_goto_tb(int idx)2169 #endif /* VBOX */2170 1454 { 2171 1455 tcg_gen_op1i(INDEX_op_goto_tb, idx); … … 2173 1457 2174 1458 #if TCG_TARGET_REG_BITS == 32 2175 #ifndef VBOX2176 1459 static inline void tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index) 2177 #else /* VBOX */2178 DECLINLINE(void) tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index)2179 #endif /* VBOX */2180 1460 { 2181 1461 #if TARGET_LONG_BITS == 32 … … 2187 1467 } 2188 1468 2189 #ifndef VBOX2190 1469 static inline void tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index) 2191 #else /* VBOX */2192 DECLINLINE(void) tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index)2193 #endif /* VBOX */2194 1470 { 2195 1471 #if TARGET_LONG_BITS == 32 … … 2201 1477 } 2202 1478 2203 #ifndef VBOX2204 1479 static inline void tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index) 2205 #else /* VBOX */2206 DECLINLINE(void) tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index)2207 #endif /* VBOX */2208 1480 { 2209 1481 #if TARGET_LONG_BITS == 32 … … 2215 1487 } 2216 1488 2217 #ifndef VBOX2218 1489 static inline void tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index) 2219 #else /* VBOX */2220 DECLINLINE(void) tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index)2221 #endif /* VBOX */2222 1490 { 2223 1491 #if TARGET_LONG_BITS == 32 … … 2229 1497 } 2230 1498 2231 #ifndef VBOX2232 1499 static inline void tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index) 2233 #else /* VBOX */2234 DECLINLINE(void) tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index)2235 #endif /* VBOX */2236 1500 { 2237 1501 #if TARGET_LONG_BITS == 32 … … 2243 1507 } 2244 1508 2245 #ifndef VBOX2246 1509 static inline void tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index) 2247 #else /* VBOX */2248 DECLINLINE(void) tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index)2249 #endif /* VBOX */2250 1510 { 2251 1511 #if TARGET_LONG_BITS == 32 … … 2257 1517 } 2258 1518 2259 #ifndef VBOX2260 1519 static inline void tcg_gen_qemu_ld64(TCGv ret, TCGv addr, int mem_index) 2261 #else /* VBOX */2262 DECLINLINE(void) tcg_gen_qemu_ld64(TCGv ret, TCGv addr, int mem_index)2263 #endif /* VBOX */2264 1520 { 2265 1521 #if TARGET_LONG_BITS == 32 … … 2271 1527 } 2272 1528 2273 #ifndef VBOX2274 1529 static inline void tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index) 2275 #else /* VBOX */2276 DECLINLINE(void) tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index)2277 #endif /* VBOX */2278 1530 { 2279 1531 #if TARGET_LONG_BITS == 32 … … 2284 1536 } 2285 1537 2286 #ifndef VBOX2287 1538 static inline void tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index) 2288 #else /* VBOX */2289 DECLINLINE(void) tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index)2290 #endif /* VBOX */2291 1539 { 2292 1540 #if TARGET_LONG_BITS == 32 … … 2297 1545 } 2298 1546 2299 #ifndef VBOX2300 1547 static inline void tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index) 2301 #else /* VBOX */2302 DECLINLINE(void) tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index)2303 #endif /* VBOX */2304 1548 { 2305 1549 #if TARGET_LONG_BITS == 32 … … 2310 1554 } 2311 1555 2312 #ifndef VBOX2313 1556 static inline void tcg_gen_qemu_st64(TCGv arg, TCGv addr, int mem_index) 2314 #else /* VBOX */2315 DECLINLINE(void) tcg_gen_qemu_st64(TCGv arg, TCGv addr, int mem_index)2316 #endif /* VBOX */2317 1557 { 2318 1558 #if TARGET_LONG_BITS == 32 … … 2329 1569 #else /* TCG_TARGET_REG_BITS == 32 */ 2330 1570 2331 #ifndef VBOX2332 1571 static inline void tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index) 2333 #else /* VBOX */2334 DECLINLINE(void) tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index)2335 #endif /* VBOX */2336 1572 { 2337 1573 tcg_gen_op3i(INDEX_op_qemu_ld8u, ret, addr, mem_index); 2338 1574 } 2339 1575 2340 #ifndef VBOX2341 1576 static inline void tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index) 2342 #else /* VBOX */2343 DECLINLINE(void) tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index)2344 #endif /* VBOX */2345 1577 { 2346 1578 tcg_gen_op3i(INDEX_op_qemu_ld8s, ret, addr, mem_index); 2347 1579 } 2348 1580 2349 #ifndef VBOX2350 1581 static inline void tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index) 2351 #else /* VBOX */2352 DECLINLINE(void) tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index)2353 #endif /* VBOX */2354 1582 { 2355 1583 tcg_gen_op3i(INDEX_op_qemu_ld16u, ret, addr, mem_index); 2356 1584 } 2357 1585 2358 #ifndef VBOX2359 1586 static inline void tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index) 2360 #else /* VBOX */2361 DECLINLINE(void) tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index)2362 #endif /* VBOX */2363 1587 { 2364 1588 tcg_gen_op3i(INDEX_op_qemu_ld16s, ret, addr, mem_index); 2365 1589 } 2366 1590 2367 #ifndef VBOX2368 1591 static inline void tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index) 2369 #else /* VBOX */2370 DECLINLINE(void) tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index)2371 #endif /* VBOX */2372 1592 { 2373 1593 tcg_gen_op3i(INDEX_op_qemu_ld32u, ret, addr, mem_index); 2374 1594 } 2375 1595 2376 #ifndef VBOX2377 1596 static inline void tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index) 2378 #else /* VBOX */2379 DECLINLINE(void) tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index)2380 #endif /* VBOX */2381 1597 { 2382 1598 tcg_gen_op3i(INDEX_op_qemu_ld32s, ret, addr, mem_index); 2383 1599 } 2384 1600 2385 #ifndef VBOX2386 1601 static inline void tcg_gen_qemu_ld64(TCGv ret, TCGv addr, int mem_index) 2387 #else /* VBOX */2388 DECLINLINE(void) tcg_gen_qemu_ld64(TCGv ret, TCGv addr, int mem_index)2389 #endif /* VBOX */2390 1602 { 2391 1603 tcg_gen_op3i(INDEX_op_qemu_ld64, ret, addr, mem_index); 2392 1604 } 2393 1605 2394 #ifndef VBOX2395 1606 static inline void tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index) 2396 #else /* VBOX */2397 DECLINLINE(void) tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index)2398 #endif /* VBOX */2399 1607 { 2400 1608 tcg_gen_op3i(INDEX_op_qemu_st8, arg, addr, mem_index); 2401 1609 } 2402 1610 2403 #ifndef VBOX2404 1611 static inline void tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index) 2405 #else /* VBOX */2406 DECLINLINE(void) tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index)2407 #endif /* VBOX */2408 1612 { 2409 1613 tcg_gen_op3i(INDEX_op_qemu_st16, arg, addr, mem_index); 2410 1614 } 2411 1615 2412 #ifndef VBOX2413 1616 static inline void tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index) 2414 #else /* VBOX */2415 DECLINLINE(void) tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index)2416 #endif /* VBOX */2417 1617 { 2418 1618 tcg_gen_op3i(INDEX_op_qemu_st32, arg, addr, mem_index); 2419 1619 } 2420 1620 2421 #ifndef VBOX2422 1621 static inline void tcg_gen_qemu_st64(TCGv arg, TCGv addr, int mem_index) 2423 #else /* VBOX */2424 DECLINLINE(void) tcg_gen_qemu_st64(TCGv arg, TCGv addr, int mem_index)2425 #endif /* VBOX */2426 1622 { 2427 1623 tcg_gen_op3i(INDEX_op_qemu_st64, arg, addr, mem_index); -
trunk/src/recompiler/tcg/tcg.c
r33540 r36125 92 92 TCGArg *gen_opparam_ptr; 93 93 94 #ifndef VBOX95 94 static inline void tcg_out8(TCGContext *s, uint8_t v) 96 #else /* VBOX */97 DECLINLINE(void) tcg_out8(TCGContext *s, uint8_t v)98 #endif /* VBOX */99 95 { 100 96 *s->code_ptr++ = v; 101 97 } 102 98 103 #ifndef VBOX104 99 static inline void tcg_out16(TCGContext *s, uint16_t v) 105 #else /* VBOX */106 DECLINLINE(void) tcg_out16(TCGContext *s, uint16_t v)107 #endif /* VBOX */108 100 { 109 101 *(uint16_t *)s->code_ptr = v; … … 111 103 } 112 104 113 #ifndef VBOX114 105 static inline void tcg_out32(TCGContext *s, uint32_t v) 115 #else /* VBOX */116 DECLINLINE(void) tcg_out32(TCGContext *s, uint32_t v)117 #endif /* VBOX */118 106 { 119 107 *(uint32_t *)s->code_ptr = v; … … 295 283 } 296 284 297 #ifndef VBOX298 285 static inline void tcg_temp_alloc(TCGContext *s, int n) 299 #else /* VBOX */300 DECLINLINE(void) tcg_temp_alloc(TCGContext *s, int n)301 #endif /* VBOX */302 286 { 303 287 if (n > TCG_MAX_TEMPS) … … 537 521 } 538 522 539 #ifndef VBOX540 523 static inline TCGType tcg_get_base_type(TCGContext *s, TCGv arg) 541 #else /* VBOX */542 DECLINLINE(TCGType) tcg_get_base_type(TCGContext *s, TCGv arg)543 #endif /* VBOX */544 524 { 545 525 return s->temps[GET_TCGV(arg)].base_type; … … 1074 1054 1075 1055 /* set a nop for an operation using 'nb_args' */ 1076 #ifndef VBOX1077 1056 static inline void tcg_set_nop(TCGContext *s, uint16_t *opc_ptr, 1078 #else /* VBOX */1079 DECLINLINE(void) tcg_set_nop(TCGContext *s, uint16_t *opc_ptr,1080 #endif /* VBOX */1081 1057 TCGArg *args, int nb_args) 1082 1058 { … … 1094 1070 /* XXX: at this stage, not used as there would be little gains because 1095 1071 most TBs end with a conditional jump. */ 1096 #ifndef VBOX1097 1072 static inline void tcg_la_func_end(TCGContext *s, uint8_t *dead_temps) 1098 #else /* VBOX */1099 DECLINLINE(void) tcg_la_func_end(TCGContext *s, uint8_t *dead_temps)1100 #endif /* VBOX */1101 1073 { 1102 1074 memset(dead_temps, 0, s->nb_globals); … … 1106 1078 /* liveness analysis: end of basic block: globals are live, temps are 1107 1079 dead, local temps are live. */ 1108 #ifndef VBOX1109 1080 static inline void tcg_la_bb_end(TCGContext *s, uint8_t *dead_temps) 1110 #else /* VBOX */1111 DECLINLINE(void) tcg_la_bb_end(TCGContext *s, uint8_t *dead_temps)1112 #endif /* VBOX */1113 1081 { 1114 1082 int i; … … 1946 1914 1947 1915 1948 #ifndef VBOX1949 1916 static inline int tcg_gen_code_common(TCGContext *s, uint8_t *gen_code_buf, 1950 #else /* VBOX */1951 DECLINLINE(int) tcg_gen_code_common(TCGContext *s, uint8_t *gen_code_buf,1952 #endif /* VBOX */1953 1917 long search_pc) 1954 1918 { -
trunk/src/recompiler/tcg/tcg.h
r33540 r36125 83 83 struct TCGPool *next; 84 84 int size; 85 #ifndef VBOX86 85 uint8_t data[0] __attribute__ ((aligned)); 87 #else88 ALIGNED_MEMBER_DEF(uint8_t, data[0]);89 #endif90 86 } TCGPool; 91 87 … … 284 280 void tcg_pool_delete(TCGContext *s); 285 281 286 #ifndef VBOX287 282 static inline void *tcg_malloc(int size) 288 #else289 DECLINLINE(void *) tcg_malloc(int size)290 #endif291 283 { 292 284 TCGContext *s = &tcg_ctx; … … 317 309 const char *name); 318 310 TCGv tcg_temp_new_internal(TCGType type, int temp_local); 319 #ifndef VBOX320 311 static inline TCGv tcg_temp_new(TCGType type) 321 #else322 DECLINLINE(TCGv) tcg_temp_new(TCGType type)323 #endif324 312 { 325 313 return tcg_temp_new_internal(type, 0); 326 314 } 327 #ifndef VBOX328 315 static inline TCGv tcg_temp_local_new(TCGType type) 329 #else330 DECLINLINE(TCGv) tcg_temp_local_new(TCGType type)331 #endif332 316 { 333 317 return tcg_temp_new_internal(type, 1); -
trunk/src/recompiler/tcg/x86_64/tcg-target.c
r29520 r36125 497 497 498 498 #ifdef VBOX 499 499 500 DECLINLINE(void) tcg_out_pushq(TCGContext *s, tcg_target_long val) 500 501 { … … 522 523 else 523 524 { 524 # if 0525 # if 0 525 526 /* Somewhat tricky, but allows long jump not touching registers */ 526 527 int off = 5 /* push imm32 */ + 5 /* push imm32 */ + 1 /* ret */; … … 534 535 tcg_out_pushq(s, dst); 535 536 tcg_out8(s, 0xc3); /* ret, used as call */ 536 # else537 # else 537 538 tcg_out_movi(s, TCG_TYPE_I64, TCG_REG_RAX, dst); 538 539 tcg_out8(s, 0xff); /* call *%eax */ 539 540 tcg_out8(s, 0xd0); 540 # endif541 # endif 541 542 } 542 543 } … … 562 563 return; 563 564 } 564 # if 0565 # if 0 565 566 tcg_out_pushq(s, dst); 566 567 tcg_out8(s, 0xc3); /* ret */ 567 # else568 # else 568 569 tcg_out_movi(s, TCG_TYPE_I64, TCG_REG_RAX, dst); 569 570 tcg_out8(s, 0xff); /* jmp *%eax */ 570 571 tcg_out8(s, 0xe0); 571 #endif 572 } 573 #endif 572 # endif 573 } 574 575 #endif /* VBOX */ 574 576 575 577 #if defined(CONFIG_SOFTMMU)
Note:
See TracChangeset
for help on using the changeset viewer.