Changeset 86264 in vbox
- Timestamp:
- Sep 24, 2020 12:04:58 PM (4 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/libs/xpcom18a4/xpcom/reflect/xptcall/src/md/unix/xptcinvoke_amd64_vbox.asm
r86262 r86264 82 82 83 83 ; 84 ; Check that there aren't unreasonably many parameters 85 ; (we could do ~255, but 64 is more reasonable number). 86 ; 87 cmp edx, 64 88 je .too_many_parameters 89 90 ; 91 ; Look up the method address in the vtable and store it in r11. 92 ; 84 ; Move essential input parameters into non-parameter registers. 85 ; 86 mov rbx, rcx ; rbx = first / current parameter 87 mov r12d, edx ; r12 = parameter count / left 88 89 ; Look up the method address in the vtable and store it in r11 (freeing up rsi). 93 90 mov r11, [rdi] ; r11 = vtable 94 91 mov esi, esi ; zero extend vtable index. 95 92 mov r11, [r11 + rsi * 8] ; r11 = method to call. 96 93 94 %define WITH_OPTIMIZATION 95 %ifdef WITH_OPTIMIZATION 96 ; 97 ; If there are 5 or fewer parameters and they are all suitable for GREGs, 98 ; we can try optimize the processing here. 99 ; 100 ; Switch on count, using fall-thought-to-smaller-value logic, default 101 ; case goes to generic (slow) code path. 102 ; 103 cmp r12d, 1 104 je .fast_1 105 cmp r12d, 2 106 je .fast_2 107 cmp r12d, 3 108 je .fast_3 109 cmp r12d, 4 110 je .fast_4 111 cmp r12d, 0 112 je .fast_0 113 cmp r12d, 5 114 je .fast_5 115 jmp .slow 116 times 0x17 int3 ; manually align the 'ret' instruction on the last cacheline byte and fast_1 on the first. 117 %macro fast_case 4 118 %1: 119 test byte [rbx + nsXPTCVariant_size * %3 + nsXPTCVariant.flags], PTR_IS_DATA 120 mov %4, [rbx + nsXPTCVariant_size * %3 + nsXPTCVariant.ptr] 121 jnz %2 122 mov %4, [rbx + nsXPTCVariant_size * %3 + nsXPTCVariant.val] 123 cmp byte [rbx + nsXPTCVariant_size * %3 + nsXPTCVariant.flags], T_FLOAT 124 je .fast_bailout 125 cmp byte [rbx + nsXPTCVariant_size * %3 + nsXPTCVariant.flags], T_DOUBLE 126 je .fast_bailout 127 %endmacro 128 fast_case .fast_5, .fast_4, 4, r9 129 fast_case .fast_4, .fast_2, 3, r8 130 fast_case .fast_3, .fast_2, 2, rcx 131 fast_case .fast_2, .fast_1, 1, rdx 132 fast_case .fast_1, .fast_0, 0, rsi 133 .fast_0: 134 and rsp, 0ffffffffffffffe0h ; Align the stack on 32 bytes. 135 xor eax, eax 136 .make_just_call: 137 call r11 138 139 .return: 140 lea rsp, [rbp - 8*3] 141 pop r13 142 pop r12 143 pop rbx 144 leave 145 ret 146 147 .fast_bailout: 148 ; int3 149 .slow: 150 %endif 151 ; 152 ; Check that there aren't unreasonably many parameters 153 ; (we could do ~255, but 64 is more reasonable number). 154 ; 155 cmp r12d, 64 156 je .too_many_parameters 157 97 158 ; 98 159 ; For simplicity reserve stack space for all parameters and point r10 at it. 99 160 ; 100 mov r12d, edx ; r12d = parameter count / left 101 lea edx, [edx * 8] 161 lea edx, [r12d * 8] 102 162 sub rsp, rdx 103 and rsp, 0ffffffffffffffe0h ; 32 byte align stack.163 and rsp, 0ffffffffffffffe0h ; 32 byte aligned stack. 104 164 mov r10, rsp ; r10 = next stack parameter. 105 165 … … 107 167 ; Set up parameter pointer and register distribution counts. 108 168 ; 109 mov rbx, rcx ; rbx = current parameter110 169 mov eax, 1 ; al = greg count, ah = fpreg count. 111 170 … … 113 172 ; Anything to do here? 114 173 ; 174 %ifndef WITH_OPTIMIZATION 115 175 test r12d,r12d 116 176 jz .make_call 177 %endif 117 178 jmp .param_loop 118 179 … … 165 226 ; Pointers are loaded from the 'ptr' rather than the 'val' member. 166 227 ; 167 ALIGNCODE( 16)228 ALIGNCODE(64) 168 229 .is_ptr: 169 230 inc al … … 311 372 ; Call the method and return. 312 373 ; 374 %ifdef WITH_OPTIMIZATION 375 movzx eax, ah ; AL = number of parameters in XMM registers (variadict only, but easy to do). 376 jmp .make_just_call 377 %else 313 378 .make_call: 314 379 movzx eax, ah ; AL = number of parameters in XMM registers (variadict only, but easy to do). 380 .make_just_call: 315 381 call r11 316 382 … … 322 388 leave 323 389 ret 390 %endif 324 391 325 392 .too_many_parameters:
Note:
See TracChangeset
for help on using the changeset viewer.