Changeset 15135 in vbox for trunk/src/recompiler_new/tcg/i386
- Timestamp:
- Dec 9, 2008 4:51:33 AM (16 years ago)
- svn:sync-xref-src-repo-rev:
- 40538
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/recompiler_new/tcg/i386/tcg-target.c
r14916 r15135 60 60 static uint8_t *tb_ret_addr; 61 61 62 static void patch_reloc(uint8_t *code_ptr, int type, 62 static void patch_reloc(uint8_t *code_ptr, int type, 63 63 tcg_target_long value, tcg_target_long addend) 64 64 { … … 75 75 } 76 76 } 77 78 #ifdef VBOX 79 /* emits stack alignment checks for strict builds. */ 80 DECLINLINE(void) tcg_gen_stack_alignment_check(TCGContext *s) 81 { 82 # if defined(RT_STRICT) && defined(RT_OS_DARWIN) /** @todo all OSes? */ 83 tcg_out8(s, 0xf7); tcg_out8(s, 0xc4); /* test %esp, 1fh */ 84 tcg_out32(s, TCG_TARGET_STACK_ALIGN - 1); 85 tcg_out8(s, 0x74); /* jz imm8 */ 86 tcg_out8(s, 1); /* $+3 (over int3) */ 87 tcg_out8(s, 0xcc); /* int3 */ 88 # else 89 NOREF(s); 90 # endif 91 } 92 #endif /* VBOX */ 77 93 78 94 /* maximum number of register used for input function arguments */ … … 252 268 /* rm == -1 means no register index */ 253 269 #ifndef VBOX 254 static inline void tcg_out_modrm_offset(TCGContext *s, int opc, int r, int rm, 270 static inline void tcg_out_modrm_offset(TCGContext *s, int opc, int r, int rm, 255 271 #else /* VBOX */ 256 DECLINLINE(void) tcg_out_modrm_offset(TCGContext *s, int opc, int r, int rm, 272 DECLINLINE(void) tcg_out_modrm_offset(TCGContext *s, int opc, int r, int rm, 257 273 #endif /* VBOX */ 258 274 int32_t offset) … … 379 395 int32_t val, val1; 380 396 TCGLabel *l = &s->labels[label_index]; 381 397 382 398 if (l->has_value) { 383 399 val = l->u.value - (tcg_target_long)s->code_ptr; … … 411 427 } 412 428 413 static void tcg_out_brcond(TCGContext *s, int cond, 429 static void tcg_out_brcond(TCGContext *s, int cond, 414 430 TCGArg arg1, TCGArg arg2, int const_arg2, 415 431 int label_index) … … 429 445 430 446 #ifdef VBOX 431 DECLINLINE(void) 432 tcg_out_long_call(TCGContext *s, void* dst) 433 { 434 intptr_t disp = (uintptr_t)dst - (uintptr_t)s->code_ptr - 5; 447 DECLINLINE(void) 448 tcg_out_long_call(TCGContext *s, void* dst) 449 { 450 intptr_t disp; 451 # ifdef VBOX 452 tcg_gen_stack_alignment_check(s); 453 # endif 454 disp = (uintptr_t)dst - (uintptr_t)s->code_ptr - 5; 435 455 tcg_out8(s, 0xe8); /* call disp32 */ 436 456 tcg_out32(s, disp); /* disp32 */ 437 457 } 438 DECLINLINE(void) 439 tcg_out_long_jmp(TCGContext *s, void* dst) 458 DECLINLINE(void) 459 tcg_out_long_jmp(TCGContext *s, void* dst) 440 460 { 441 461 intptr_t disp = (uintptr_t)dst - (uintptr_t)s->code_ptr - 5; … … 546 566 }; 547 567 548 #ifdef RT_OS_DARWIN 549 550 /* could be any register, not used for argument passing */ 551 #define BIAS_REG TCG_REG_EDI 552 /* Call out stack alignement, must be power of 2 for mask below to be right */ 553 #define CALLOUT_FRAME_ALIGN 16 554 #define CALLOUT_FRAME_MASK ~(CALLOUT_FRAME_ALIGN - 1) 555 556 static void tcg_align_frame_pre(TCGContext* s, int args_size) 557 { 558 int8_t bias; 559 560 bias = CALLOUT_FRAME_ALIGN - args_size; 561 562 while (bias <= 0) 563 bias += CALLOUT_FRAME_ALIGN; 564 565 tcg_out_push(s, BIAS_REG); 566 tcg_out_opc(s, 0x8d); tcg_out8(s, 0x74 | (BIAS_REG <<3)); tcg_out8(s, 0x20 | TCG_REG_ESP); tcg_out8(s, -args_size-4); /* lea -(args_size+4)(%esp),%bias */ 567 tcg_out8(s, 0x83); tcg_out8(s, 0xe0 | BIAS_REG); tcg_out8(s, CALLOUT_FRAME_MASK); /* andl $CALLOUT_FRAME_MASK, %bias */ 568 tcg_out_modrm(s, 0x01 | (ARITH_SUB << 3), TCG_REG_ESP, BIAS_REG); /* sub %esp,%bias */ 569 tcg_out8(s, 0x8d); tcg_out8(s, 0x64); tcg_out8(s, TCG_REG_ESP |(BIAS_REG << 3)); tcg_out8(s, args_size+4);/* lea args_size+4(%esp,%bias),%esp */ 570 tcg_out_push(s, BIAS_REG); 571 /* restore old bias_reg value, so nobody notices */ 572 tcg_out8(s, 0xf7); tcg_out8(s, 0xd8 | BIAS_REG); /* neg bias_reg */ 573 tcg_out8(s, 0x8b); tcg_out8(s, 0x44 | (BIAS_REG << 3)); tcg_out8(s, TCG_REG_ESP |(BIAS_REG << 3)); tcg_out8(s, -args_size);/* mov -args_size(%esp,%bias),%bias */ 574 575 /* how stack is ready for args to be pushed */ 576 } 577 578 static void tcg_align_frame_post(TCGContext* s, int args_size) 579 { 580 tcg_out8(s, 0x8b); tcg_out8(s, 0x44 | (BIAS_REG << 3)); tcg_out8(s, TCG_REG_ESP |(TCG_REG_ESP << 3)); tcg_out8(s, args_size);/* mov args_size(%esp),%bias */ 581 tcg_out_modrm(s, 0x01 | (ARITH_SUB << 3), BIAS_REG, TCG_REG_ESP); /* sub %bias, %esp */ 582 tcg_out_pop(s, BIAS_REG); 583 } 584 585 static void tcg_out_vbox_phys_read(TCGContext *s, int index, 586 int addr_reg, 587 int data_reg, int data_reg2) 568 static void tcg_out_vbox_phys_read(TCGContext *s, int index, 569 int addr_reg, 570 int data_reg, int data_reg2) 588 571 { 589 572 int useReg2 = ((index & 3) == 3); … … 591 574 /** @todo: should we make phys addess accessors fastcalls - probably not a big deal */ 592 575 /* out parameter (address), note that phys address is always 64-bit */ 593 AssertMsg(sizeof(RTGCPHYS) == 8, ("Physical address must be 64-bits, update caller\n")); 594 tcg_align_frame_pre(s, 8); 595 596 /* push arguments */ 597 tcg_out8(s, 0x6a); tcg_out8(s, 0x00); /* push $0 */ 576 AssertMsg(sizeof(RTGCPHYS) == 8, ("Physical address must be 64-bits, update caller\n")); 577 # ifdef RT_OS_DARWIN 578 tgen_arithi(s, ARITH_SUB, TCG_REG_ESP, 8); 579 # endif 580 tcg_out8(s, 0x6a); tcg_out8(s, 0x00); /* push $0 */ 598 581 tcg_out_push(s, addr_reg); 599 582 600 583 tcg_out_long_call(s, vbox_ld_helpers[index]); 601 tcg_align_frame_post(s, 8); 602 584 585 /* mov %eax, data_reg */ 603 586 tcg_out_mov(s, data_reg, TCG_REG_EAX); 587 604 588 /* returned 64-bit value */ 605 589 if (useReg2) 606 590 tcg_out_mov(s, data_reg2, TCG_REG_EDX); 607 } 608 static void tcg_out_vbox_phys_write(TCGContext *s, int index, 609 int addr_reg, 591 592 /* clean stack after us */ 593 # ifdef RT_OS_DARWIN 594 tcg_out_addi(s, TCG_REG_ESP, 16); 595 # else 596 tcg_out_addi(s, TCG_REG_ESP, 8); 597 # endif 598 } 599 600 static void tcg_out_vbox_phys_write(TCGContext *s, int index, 601 int addr_reg, 610 602 int val_reg, int val_reg2) { 611 int use_reg2 = ((index & 3) == 3); 612 int args_size = 8 + (use_reg2 ? 8 : 4); 613 int temp_val_reg = 0; 614 615 /** @todo: should we make phys addess accessors fastcalls - probably not a big deal */ 616 tcg_align_frame_pre(s, args_size); 603 int useReg2 = ((index & 3) == 3); 604 605 /** @todo: should we make phys addess accessors fastcalls - probably not a big deal */ 606 # ifdef RT_OS_DARWIN 607 if (!useReg2) 608 tgen_arithi(s, ARITH_SUB, TCG_REG_ESP, 4); 609 # endif 617 610 /* out parameter (value2) */ 618 if (use _reg2)611 if (useReg2) 619 612 tcg_out_push(s, val_reg2); 620 613 /* out parameter (value) */ … … 622 615 /* out parameter (address), note that phys address is always 64-bit */ 623 616 AssertMsg(sizeof(RTGCPHYS) == 8, ("Physical address must be 64-bits, update caller\n")); 624 tcg_out8(s, 0x6a); tcg_out8(s, 0x00); /* push $0 */ 617 tcg_out8(s, 0x6a); tcg_out8(s, 0x00); /* push $0 */ 625 618 tcg_out_push(s, addr_reg); 626 619 627 620 /* call it */ 628 621 tcg_out_long_call(s, vbox_st_helpers[index]); 629 630 tcg_align_frame_post(s, args_size); 631 } 632 633 #else 634 static void tcg_out_vbox_phys_read(TCGContext *s, int index, 635 int addr_reg, 636 int data_reg, int data_reg2) 637 { 638 int useReg2 = ((index & 3) == 3); 639 640 /** @todo: should we make phys addess accessors fastcalls - probably not a big deal */ 641 /* out parameter (address), note that phys address is always 64-bit */ 642 AssertMsg(sizeof(RTGCPHYS) == 8, ("Physical address must be 64-bits, update caller\n")); 643 tcg_out8(s, 0x6a); tcg_out8(s, 0x00); /* push $0 */ 644 tcg_out_push(s, addr_reg); 645 646 tcg_out_long_call(s, vbox_ld_helpers[index]); 647 648 /* mov %eax, data_reg */ 649 tcg_out_mov(s, data_reg, TCG_REG_EAX); 650 651 /* returned 64-bit value */ 652 if (useReg2) 653 tcg_out_mov(s, data_reg2, TCG_REG_EDX); 654 622 655 623 /* clean stack after us */ 656 tcg_out_addi(s, TCG_REG_ESP, 8); 657 } 658 659 static void tcg_out_vbox_phys_write(TCGContext *s, int index, 660 int addr_reg, 661 int val_reg, int val_reg2) { 662 int useReg2 = ((index & 3) == 3); 663 664 /** @todo: should we make phys addess accessors fastcalls - probably not a big deal */ 665 /* out parameter (value2) */ 666 if (useReg2) 667 tcg_out_push(s, val_reg2); 668 /* out parameter (value) */ 669 tcg_out_push(s, val_reg); 670 /* out parameter (address), note that phys address is always 64-bit */ 671 AssertMsg(sizeof(RTGCPHYS) == 8, ("Physical address must be 64-bits, update caller\n")); 672 tcg_out8(s, 0x6a); tcg_out8(s, 0x00); /* push $0 */ 673 tcg_out_push(s, addr_reg); 674 675 /* call it */ 676 tcg_out_long_call(s, vbox_st_helpers[index]); 677 678 /* clean stack after us */ 624 # ifdef RT_OS_DARWIN 625 tcg_out_addi(s, TCG_REG_ESP, 16); 626 # else 679 627 tcg_out_addi(s, TCG_REG_ESP, 8 + (useReg2 ? 8 : 4)); 680 } 681 #endif 682 683 #endif 628 # endif 629 } 630 631 #endif /* defined(VBOX) && defined(REM_PHYS_ADDR_IN_TLB) */ 684 632 685 633 /* XXX: qemu_ld and qemu_st could be modified to clobber only EDX and … … 716 664 717 665 #if defined(CONFIG_SOFTMMU) 718 tcg_out_mov(s, r1, addr_reg); 719 720 tcg_out_mov(s, r0, addr_reg); 721 666 tcg_out_mov(s, r1, addr_reg); 667 668 tcg_out_mov(s, r0, addr_reg); 669 722 670 tcg_out_modrm(s, 0xc1, 5, r1); /* shr $x, r1 */ 723 tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS); 724 671 tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS); 672 725 673 tcg_out_modrm(s, 0x81, 4, r0); /* andl $x, r0 */ 726 674 tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1)); 727 675 728 676 tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */ 729 677 tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS); … … 743 691 /* cmp 0(r1), r0 */ 744 692 tcg_out_modrm_offset(s, 0x3b, r0, r1, 0); 745 693 746 694 tcg_out_mov(s, r0, addr_reg); 747 695 748 696 #if TARGET_LONG_BITS == 32 749 697 /* je label1 */ … … 756 704 label3_ptr = s->code_ptr; 757 705 s->code_ptr++; 758 706 759 707 /* cmp 4(r1), addr_reg2 */ 760 708 tcg_out_modrm_offset(s, 0x3b, addr_reg2, r1, 4); … … 764 712 label1_ptr = s->code_ptr; 765 713 s->code_ptr++; 766 714 767 715 /* label3: */ 768 716 *label3_ptr = s->code_ptr - label3_ptr - 1; … … 776 724 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_ECX, mem_index); 777 725 #endif 726 #ifdef VBOX 727 tcg_gen_stack_alignment_check(s); 728 #endif 778 729 tcg_out8(s, 0xe8); 779 tcg_out32(s, (tcg_target_long)qemu_ld_helpers[s_bits] - 730 tcg_out32(s, (tcg_target_long)qemu_ld_helpers[s_bits] - 780 731 (tcg_target_long)s->code_ptr - 4); 781 732 … … 810 761 label2_ptr = s->code_ptr; 811 762 s->code_ptr++; 812 763 813 764 /* label1: */ 814 765 *label1_ptr = s->code_ptr - label1_ptr - 1; 815 766 816 767 /* add x(r1), r0 */ 817 tcg_out_modrm_offset(s, 0x03, r0, r1, offsetof(CPUTLBEntry, addend) - 768 tcg_out_modrm_offset(s, 0x03, r0, r1, offsetof(CPUTLBEntry, addend) - 818 769 offsetof(CPUTLBEntry, addr_read)); 819 770 #else … … 841 792 if (bswap) { 842 793 /* rolw $8, data_reg */ 843 tcg_out8(s, 0x66); 794 tcg_out8(s, 0x66); 844 795 tcg_out_modrm(s, 0xc1, 0, data_reg); 845 796 tcg_out8(s, 8); … … 851 802 if (bswap) { 852 803 /* rolw $8, data_reg */ 853 tcg_out8(s, 0x66); 804 tcg_out8(s, 0x66); 854 805 tcg_out_modrm(s, 0xc1, 0, data_reg); 855 806 tcg_out8(s, 8); … … 899 850 /* label2: */ 900 851 *label2_ptr = s->code_ptr - label2_ptr - 1; 852 # ifdef VBOX 853 Assert((unsigned)(s->code_ptr - label2_ptr - 1) <= 127); 854 # endif 901 855 #endif 902 856 } … … 934 888 935 889 #if defined(CONFIG_SOFTMMU) 936 tcg_out_mov(s, r1, addr_reg); 937 938 tcg_out_mov(s, r0, addr_reg); 939 890 tcg_out_mov(s, r1, addr_reg); 891 892 tcg_out_mov(s, r0, addr_reg); 893 940 894 tcg_out_modrm(s, 0xc1, 5, r1); /* shr $x, r1 */ 941 tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS); 942 895 tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS); 896 943 897 tcg_out_modrm(s, 0x81, 4, r0); /* andl $x, r0 */ 944 898 tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1)); 945 899 946 900 tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */ 947 901 tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS); … … 961 915 /* cmp 0(r1), r0 */ 962 916 tcg_out_modrm_offset(s, 0x3b, r0, r1, 0); 963 917 964 918 tcg_out_mov(s, r0, addr_reg); 965 919 966 920 #if TARGET_LONG_BITS == 32 967 921 /* je label1 */ … … 974 928 label3_ptr = s->code_ptr; 975 929 s->code_ptr++; 976 930 977 931 /* cmp 4(r1), addr_reg2 */ 978 932 tcg_out_modrm_offset(s, 0x3b, addr_reg2, r1, 4); … … 982 936 label1_ptr = s->code_ptr; 983 937 s->code_ptr++; 984 938 985 939 /* label3: */ 986 940 *label3_ptr = s->code_ptr - label3_ptr - 1; … … 994 948 tcg_out8(s, 0x6a); /* push Ib */ 995 949 tcg_out8(s, mem_index); 950 # ifdef VBOX 951 tcg_gen_stack_alignment_check(s); 952 # endif 996 953 tcg_out8(s, 0xe8); 997 tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] - 954 tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] - 998 955 (tcg_target_long)s->code_ptr - 4); 999 956 tcg_out_addi(s, TCG_REG_ESP, 4); … … 1013 970 } 1014 971 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_ECX, mem_index); 972 # ifdef VBOX 973 tcg_gen_stack_alignment_check(s); 974 # endif 1015 975 tcg_out8(s, 0xe8); 1016 tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] - 976 tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] - 1017 977 (tcg_target_long)s->code_ptr - 4); 1018 978 } … … 1024 984 tcg_out_opc(s, 0x50 + data_reg2); /* push */ 1025 985 tcg_out_opc(s, 0x50 + data_reg); /* push */ 986 # ifdef VBOX 987 tcg_gen_stack_alignment_check(s); 988 # endif 1026 989 tcg_out8(s, 0xe8); 1027 tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] - 990 tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] - 1028 991 (tcg_target_long)s->code_ptr - 4); 1029 992 tcg_out_addi(s, TCG_REG_ESP, 12); … … 1043 1006 break; 1044 1007 } 1008 # if defined(VBOX) && defined(RT_OS_DARWIN) 1009 tgen_arithi(s, ARITH_SUB, TCG_REG_ESP, 12); /** @todo FIXME: This is not 100% correct (assumes a bunch of things), but it works around the current issue it seems... */ 1010 # endif 1045 1011 tcg_out8(s, 0x6a); /* push Ib */ 1046 1012 tcg_out8(s, mem_index); 1013 # ifdef VBOX 1014 tcg_gen_stack_alignment_check(s); 1015 # endif 1016 1047 1017 tcg_out8(s, 0xe8); 1048 tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] - 1018 tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] - 1049 1019 (tcg_target_long)s->code_ptr - 4); 1020 # if defined(VBOX) && defined(RT_OS_DARWIN) 1021 tcg_out_addi(s, TCG_REG_ESP, 12+4); 1022 # else 1050 1023 tcg_out_addi(s, TCG_REG_ESP, 4); 1051 } 1052 #endif 1053 1024 # endif 1025 } 1026 #endif 1027 1054 1028 /* jmp label2 */ 1055 1029 tcg_out8(s, 0xeb); 1056 1030 label2_ptr = s->code_ptr; 1057 1031 s->code_ptr++; 1058 1032 1059 1033 /* label1: */ 1060 1034 *label1_ptr = s->code_ptr - label1_ptr - 1; 1061 1035 1062 1036 /* add x(r1), r0 */ 1063 tcg_out_modrm_offset(s, 0x03, r0, r1, offsetof(CPUTLBEntry, addend) - 1037 tcg_out_modrm_offset(s, 0x03, r0, r1, offsetof(CPUTLBEntry, addend) - 1064 1038 offsetof(CPUTLBEntry, addr_write)); 1065 1039 #else … … 1125 1099 /* label2: */ 1126 1100 *label2_ptr = s->code_ptr - label2_ptr - 1; 1127 #endif 1128 } 1129 1130 #ifndef VBOX 1131 static inline void tcg_out_op(TCGContext *s, int opc, 1101 # ifdef VBOX 1102 Assert((unsigned)(s->code_ptr - label2_ptr - 1) <= 127); 1103 # endif 1104 #endif 1105 } 1106 1107 #ifndef VBOX 1108 static inline void tcg_out_op(TCGContext *s, int opc, 1132 1109 #else /* VBOX */ 1133 DECLINLINE(void) tcg_out_op(TCGContext *s, int opc, 1110 DECLINLINE(void) tcg_out_op(TCGContext *s, int opc, 1134 1111 #endif /* VBOX */ 1135 1112 const TCGArg *args, const int *const_args) 1136 1113 { 1137 1114 int c; 1138 1115 1139 1116 switch(opc) { 1140 1117 case INDEX_op_exit_tb: … … 1152 1129 /* indirect jump method */ 1153 1130 /* jmp Ev */ 1154 tcg_out_modrm_offset(s, 0xff, 4, -1, 1131 tcg_out_modrm_offset(s, 0xff, 4, -1, 1155 1132 (tcg_target_long)(s->tb_next + args[0])); 1156 1133 } … … 1158 1135 break; 1159 1136 case INDEX_op_call: 1137 #ifdef VBOX 1138 tcg_gen_stack_alignment_check(s); 1139 #endif 1160 1140 if (const_args[0]) { 1161 1141 tcg_out8(s, 0xe8); … … 1277 1257 c = SHIFT_SAR; 1278 1258 goto gen_shift32; 1279 1259 1280 1260 case INDEX_op_add2_i32: 1281 if (const_args[4]) 1261 if (const_args[4]) 1282 1262 tgen_arithi(s, ARITH_ADD, args[0], args[4]); 1283 1263 else 1284 1264 tcg_out_modrm(s, 0x01 | (ARITH_ADD << 3), args[4], args[0]); 1285 if (const_args[5]) 1265 if (const_args[5]) 1286 1266 tgen_arithi(s, ARITH_ADC, args[1], args[5]); 1287 1267 else … … 1289 1269 break; 1290 1270 case INDEX_op_sub2_i32: 1291 if (const_args[4]) 1271 if (const_args[4]) 1292 1272 tgen_arithi(s, ARITH_SUB, args[0], args[4]); 1293 1273 else 1294 1274 tcg_out_modrm(s, 0x01 | (ARITH_SUB << 3), args[4], args[0]); 1295 if (const_args[5]) 1275 if (const_args[5]) 1296 1276 tgen_arithi(s, ARITH_SBB, args[1], args[5]); 1297 1277 else … … 1323 1303 tcg_out_qemu_ld(s, args, 3); 1324 1304 break; 1325 1305 1326 1306 case INDEX_op_qemu_st8: 1327 1307 tcg_out_qemu_st(s, args, 0); … … 1426 1406 #endif 1427 1407 }; 1428 1408 1429 1409 /* Generate global QEMU prologue and epilogue code */ 1430 1410 void tcg_target_qemu_prologue(TCGContext *s) 1431 1411 { 1432 1412 int i, frame_size, push_size, stack_addend; 1433 1413 1434 1414 /* TB prologue */ 1435 1415 /* save all callee saved registers */ … … 1440 1420 push_size = 4 + ARRAY_SIZE(tcg_target_callee_save_regs) * 4; 1441 1421 frame_size = push_size + TCG_STATIC_CALL_ARGS_SIZE; 1442 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) & 1422 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) & 1443 1423 ~(TCG_TARGET_STACK_ALIGN - 1); 1444 1424 stack_addend = frame_size - push_size; 1445 1425 tcg_out_addi(s, TCG_REG_ESP, -stack_addend); 1426 # ifdef VBOX 1427 tcg_gen_stack_alignment_check(s); 1428 # endif 1446 1429 1447 1430 tcg_out_modrm(s, 0xff, 4, TCG_REG_EAX); /* jmp *%eax */ 1448 1431 1449 1432 /* TB epilogue */ 1450 1433 tb_ret_addr = s->code_ptr; … … 1464 1447 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xff); 1465 1448 tcg_regset_set32(tcg_target_call_clobber_regs, 0, 1466 (1 << TCG_REG_EAX) | 1467 (1 << TCG_REG_EDX) | 1449 (1 << TCG_REG_EAX) | 1450 (1 << TCG_REG_EDX) | 1468 1451 (1 << TCG_REG_ECX)); 1469 1452 1470 1453 tcg_regset_clear(s->reserved_regs); 1471 1454 tcg_regset_set_reg(s->reserved_regs, TCG_REG_ESP);
Note:
See TracChangeset
for help on using the changeset viewer.