Changeset 48127 in vbox for trunk/src/VBox
- Timestamp:
- Aug 28, 2013 2:48:16 PM (11 years ago)
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllAImpl.asm
r47548 r48127 355 355 EPILOGUE_3_ARGS_EX 8 356 356 ENDPROC iemAImpl_ %+ %1 %+ _u64 357 %else ; stub it for now - later, replace with hand coded stuff. 358 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u64, 16 359 int3 360 ret 361 ENDPROC iemAImpl_ %+ %1 %+ _u64 362 %endif ; !RT_ARCH_AMD64 357 %endif ; RT_ARCH_AMD64 363 358 364 359 %if %2 != 0 ; locked versions requested? … … 396 391 EPILOGUE_3_ARGS_EX 8 397 392 ENDPROC iemAImpl_ %+ %1 %+ _u64_locked 398 %else ; stub it for now - later, replace with hand coded stuff. 399 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u64_locked, 16 400 int3 401 ret 8 402 ENDPROC iemAImpl_ %+ %1 %+ _u64_locked 403 %endif ; !RT_ARCH_AMD64 393 %endif ; RT_ARCH_AMD64 404 394 %endif ; locked 405 395 %endmacro … … 458 448 EPILOGUE_3_ARGS_EX 8 459 449 ENDPROC iemAImpl_ %+ %1 %+ _u64 460 %else ; stub it for now - later, replace with hand coded stuff. 461 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u64, 16 462 int3 463 ret 8 464 ENDPROC iemAImpl_ %+ %1 %+ _u64 465 %endif ; !RT_ARCH_AMD64 450 %endif ; RT_ARCH_AMD64 466 451 467 452 %if %2 != 0 ; locked versions requested? … … 491 476 EPILOGUE_3_ARGS_EX 8 492 477 ENDPROC iemAImpl_ %+ %1 %+ _u64_locked 493 %else ; stub it for now - later, replace with hand coded stuff. 494 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u64_locked, 16 495 int3 496 ret 8 497 ENDPROC iemAImpl_ %+ %1 %+ _u64_locked 498 %endif ; !RT_ARCH_AMD64 478 %endif ; RT_ARCH_AMD64 499 479 %endif ; locked 500 480 %endmacro … … 552 532 EPILOGUE_3_ARGS_EX 8 553 533 ENDPROC iemAImpl_ %+ %1 %+ _u64 554 %else ; stub it for now - later, replace with hand coded stuff. 555 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u64, 16 556 int3 557 ret 8 558 ENDPROC iemAImpl_ %+ %1 %+ _u64 559 %endif ; !RT_ARCH_AMD64 534 %endif ; RT_ARCH_AMD64 560 535 %endmacro 561 536 IEMIMPL_BIT_OP bsf, (X86_EFL_ZF), (X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF) … … 586 561 ENDPROC iemAImpl_imul_two_u32 587 562 563 %ifdef RT_ARCH_AMD64 588 564 BEGINPROC_FASTCALL iemAImpl_imul_two_u64, 16 589 565 PROLOGUE_3_ARGS 590 %ifdef RT_ARCH_AMD64591 566 IEM_MAYBE_LOAD_FLAGS A2, (X86_EFL_OF | X86_EFL_CF), (X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF) 592 567 imul A1, qword [A0] 593 568 mov [A0], A1 594 569 IEM_SAVE_FLAGS A2, (X86_EFL_OF | X86_EFL_CF), (X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF) 595 %else596 int3 ;; @todo implement me597 %endif598 570 EPILOGUE_3_ARGS_EX 8 599 571 ENDPROC iemAImpl_imul_two_u64 572 %endif ; RT_ARCH_AMD64 600 573 601 574 … … 631 604 ENDPROC iemAImpl_xchg_u32 632 605 606 %ifdef RT_ARCH_AMD64 633 607 BEGINPROC_FASTCALL iemAImpl_xchg_u64, 8 634 %ifdef RT_ARCH_AMD64635 608 PROLOGUE_2_ARGS 636 609 mov T0, [A1] … … 638 611 mov [A1], T0 639 612 EPILOGUE_2_ARGS 640 %else 641 int3 642 ret 0 613 ENDPROC iemAImpl_xchg_u64 643 614 %endif 644 ENDPROC iemAImpl_xchg_u64645 615 646 616 … … 683 653 ENDPROC iemAImpl_xadd_u32 684 654 655 %ifdef RT_ARCH_AMD64 685 656 BEGINPROC_FASTCALL iemAImpl_xadd_u64, 12 686 %ifdef RT_ARCH_AMD64687 657 PROLOGUE_3_ARGS 688 658 IEM_MAYBE_LOAD_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 … … 692 662 IEM_SAVE_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 693 663 EPILOGUE_3_ARGS 694 %else695 int3696 ret 4697 %endif698 664 ENDPROC iemAImpl_xadd_u64 665 %endif ; RT_ARCH_AMD64 699 666 700 667 BEGINPROC_FASTCALL iemAImpl_xadd_u8_locked, 12 … … 728 695 ENDPROC iemAImpl_xadd_u32_locked 729 696 697 %ifdef RT_ARCH_AMD64 730 698 BEGINPROC_FASTCALL iemAImpl_xadd_u64_locked, 12 731 %ifdef RT_ARCH_AMD64732 699 PROLOGUE_3_ARGS 733 700 IEM_MAYBE_LOAD_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 … … 737 704 IEM_SAVE_FLAGS A2, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF), 0 738 705 EPILOGUE_3_ARGS 739 %else740 int3741 ret 4742 %endif743 706 ENDPROC iemAImpl_xadd_u64_locked 707 %endif ; RT_ARCH_AMD64 744 708 745 709 … … 1017 981 EPILOGUE_2_ARGS 1018 982 ENDPROC iemAImpl_ %+ %1 %+ _u64_locked 1019 %else 1020 ; stub them for now. 1021 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u64, 8 1022 int3 1023 ret 0 1024 ENDPROC iemAImpl_ %+ %1 %+ _u64 1025 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u64_locked, 8 1026 int3 1027 ret 0 1028 ENDPROC iemAImpl_ %+ %1 %+ _u64_locked 1029 %endif 983 %endif ; RT_ARCH_AMD64 1030 984 1031 985 %endmacro … … 1141 1095 EPILOGUE_3_ARGS 1142 1096 ENDPROC iemAImpl_ %+ %1 %+ _u64 1143 %else ; stub it for now - later, replace with hand coded stuff. 1144 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u64, 12 1145 int3 1146 ret 4 1147 ENDPROC iemAImpl_ %+ %1 %+ _u64 1148 %endif ; !RT_ARCH_AMD64 1097 %endif ; RT_ARCH_AMD64 1149 1098 1150 1099 %endmacro … … 1221 1170 EPILOGUE_4_ARGS_EX 12 1222 1171 ENDPROC iemAImpl_ %+ %1 %+ _u64 1223 %else ; stub it for now - later, replace with hand coded stuff. 1224 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u64, 20 1225 int3 1226 ret 12 1227 ENDPROC iemAImpl_ %+ %1 %+ _u64 1228 %endif ; !RT_ARCH_AMD64 1172 %endif ; RT_ARCH_AMD64 1229 1173 1230 1174 %endmacro -
trunk/src/VBox/VMM/VMMAll/IEMAllAImplC.cpp
r47568 r48127 26 26 #ifdef RT_ARCH_X86 27 27 /* 28 * There are a few 64-bit on 32-bit things we'd rather do in C. 28 * There are a few 64-bit on 32-bit things we'd rather do in C. Actually, doing 29 * it all in C is probably safer atm., optimize what's necessary later, maybe. 29 30 */ 30 31 31 32 32 IEM_DECL_IMPL_DEF(int, iemAImpl_mul_u64,(uint64_t *pu64RAX, uint64_t *pu64RDX, uint64_t u64Factor, uint32_t *pEFlags)) 33 /* Binary ops */ 34 35 IEM_DECL_IMPL_DEF(void, iemAImpl_add_u64,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 36 { 37 AssertFailed(); 38 } 39 40 41 IEM_DECL_IMPL_DEF(void, iemAImpl_adc_u64,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 42 { 43 AssertFailed(); 44 } 45 46 47 IEM_DECL_IMPL_DEF(void, iemAImpl_sub_u64,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 48 { 49 AssertFailed(); 50 } 51 52 53 IEM_DECL_IMPL_DEF(void, iemAImpl_sbb_u64,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 54 { 55 AssertFailed(); 56 } 57 58 59 IEM_DECL_IMPL_DEF(void, iemAImpl_or_u64,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 60 { 61 AssertFailed(); 62 } 63 64 65 IEM_DECL_IMPL_DEF(void, iemAImpl_xor_u64,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 66 { 67 AssertFailed(); 68 } 69 70 71 IEM_DECL_IMPL_DEF(void, iemAImpl_and_u64,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 72 { 73 AssertFailed(); 74 } 75 76 77 IEM_DECL_IMPL_DEF(void, iemAImpl_cmp_u64,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 78 { 79 AssertFailed(); 80 } 81 82 83 IEM_DECL_IMPL_DEF(void, iemAImpl_test_u64,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 84 { 85 AssertFailed(); 86 } 87 88 89 /** 64-bit locked binary operand operation. */ 90 # define DO_LOCKED_BIN_OP_U64(a_Mnemonic) \ 91 do { \ 92 uint64_t uOld = ASMAtomicReadU64(puDst); \ 93 uint64_t uTmp; \ 94 uint32_t fEflTmp; \ 95 do \ 96 { \ 97 uTmp = uOld; \ 98 fEflTmp = *pfEFlags; \ 99 iemAImpl_ ## a_Mnemonic ## _u64(&uTmp, uSrc, &fEflTmp); \ 100 } while (ASMAtomicCmpXchgExU64(puDst, uTmp, uOld, &uOld)); \ 101 *pfEFlags = fEflTmp; \ 102 } while (0) 103 104 105 IEM_DECL_IMPL_DEF(void, iemAImpl_add_u64_locked,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 106 { 107 DO_LOCKED_BIN_OP_U64(adc); 108 } 109 110 111 IEM_DECL_IMPL_DEF(void, iemAImpl_adc_u64_locked,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 112 { 113 DO_LOCKED_BIN_OP_U64(adc); 114 } 115 116 117 IEM_DECL_IMPL_DEF(void, iemAImpl_sub_u64_locked,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 118 { 119 DO_LOCKED_BIN_OP_U64(sub); 120 } 121 122 123 IEM_DECL_IMPL_DEF(void, iemAImpl_sbb_u64_locked,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 124 { 125 DO_LOCKED_BIN_OP_U64(sbb); 126 } 127 128 129 IEM_DECL_IMPL_DEF(void, iemAImpl_or_u64_locked,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 130 { 131 DO_LOCKED_BIN_OP_U64(or); 132 } 133 134 135 IEM_DECL_IMPL_DEF(void, iemAImpl_xor_u64_locked,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 136 { 137 DO_LOCKED_BIN_OP_U64(xor); 138 } 139 140 141 IEM_DECL_IMPL_DEF(void, iemAImpl_and_u64_locked,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 142 { 143 DO_LOCKED_BIN_OP_U64(and); 144 } 145 146 147 /* Bit operations (same signature as above). */ 148 149 IEM_DECL_IMPL_DEF(void, iemAImpl_bt_u64,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 150 { 151 /* Note! "undefined" flags: OF, SF, ZF, AF, PF. */ 152 Assert(uSrc < 64); 153 if (*puDst & RT_BIT_64(uSrc)) 154 *pfEFlags |= X86_EFL_CF; 155 else 156 *pfEFlags &= ~X86_EFL_CF; 157 } 158 159 IEM_DECL_IMPL_DEF(void, iemAImpl_btc_u64,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 160 { 161 /* Note! "undefined" flags: OF, SF, ZF, AF, PF. */ 162 Assert(uSrc < 64); 163 uint64_t fMask = RT_BIT_64(uSrc); 164 if (*puDst & fMask) 165 { 166 *puDst &= ~fMask; 167 *pfEFlags |= X86_EFL_CF; 168 } 169 else 170 { 171 *puDst |= ~fMask; 172 *pfEFlags &= ~X86_EFL_CF; 173 } 174 } 175 176 IEM_DECL_IMPL_DEF(void, iemAImpl_btr_u64,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 177 { 178 /* Note! "undefined" flags: OF, SF, ZF, AF, PF. */ 179 uint64_t fMask = RT_BIT_64(uSrc); 180 if (*puDst & fMask) 181 *pfEFlags |= X86_EFL_CF; 182 else 183 *pfEFlags &= ~X86_EFL_CF; 184 *puDst &= ~fMask; 185 } 186 187 IEM_DECL_IMPL_DEF(void, iemAImpl_bts_u64,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 188 { 189 /* Note! "undefined" flags: OF, SF, ZF, AF, PF. */ 190 uint64_t fMask = RT_BIT_64(uSrc); 191 if (*puDst & fMask) 192 *pfEFlags |= X86_EFL_CF; 193 else 194 *pfEFlags &= ~X86_EFL_CF; 195 *puDst |= fMask; 196 } 197 198 199 IEM_DECL_IMPL_DEF(void, iemAImpl_btc_u64_locked,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 200 { 201 DO_LOCKED_BIN_OP_U64(btc); 202 } 203 204 IEM_DECL_IMPL_DEF(void, iemAImpl_btr_u64_locked,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 205 { 206 DO_LOCKED_BIN_OP_U64(btr); 207 } 208 209 IEM_DECL_IMPL_DEF(void, iemAImpl_bts_u64_locked,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 210 { 211 DO_LOCKED_BIN_OP_U64(bts); 212 } 213 214 215 /* bit scan */ 216 217 IEM_DECL_IMPL_DEF(void, iemAImpl_bsf_u64,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 218 { 219 /* Note! "undefined" flags: OF, SF, AF, PF, CF. */ 220 if (uSrc) 221 { 222 uint8_t iBit; 223 uint32_t u32Src; 224 if (uSrc & UINT32_MAX) 225 { 226 iBit = 0; 227 u32Src = uSrc; 228 } 229 else 230 { 231 iBit = 32; 232 u32Src = uSrc >> 32; 233 } 234 if (!(u32Src & UINT16_MAX)) 235 { 236 iBit += 16; 237 u32Src >>= 16; 238 } 239 if (!(u32Src & UINT8_MAX)) 240 { 241 iBit += 8; 242 u32Src >>= 8; 243 } 244 if (!(u32Src & 0xf)) 245 { 246 iBit += 4; 247 u32Src >>= 4; 248 } 249 if (!(u32Src & 0x3)) 250 { 251 iBit += 2; 252 u32Src >>= 2; 253 } 254 if (!(u32Src & 1)) 255 { 256 iBit += 1; 257 Assert(u32Src & 2); 258 } 259 260 *puDst = iBit; 261 *pfEFlags &= ~X86_EFL_ZF; 262 } 263 else 264 *pfEFlags |= X86_EFL_ZF; 265 } 266 267 IEM_DECL_IMPL_DEF(void, iemAImpl_bsr_u64,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 268 { 269 /* Note! "undefined" flags: OF, SF, AF, PF, CF. */ 270 if (uSrc) 271 { 272 uint8_t iBit; 273 uint32_t u32Src; 274 if (uSrc & UINT64_C(0xffffffff00000000)) 275 { 276 iBit = 64; 277 u32Src = uSrc >> 32; 278 } 279 else 280 { 281 iBit = 32; 282 u32Src = uSrc; 283 } 284 if (!(u32Src & UINT32_C(0xffff0000))) 285 { 286 iBit -= 16; 287 u32Src <<= 16; 288 } 289 if (!(u32Src & UINT32_C(0xff000000))) 290 { 291 iBit -= 8; 292 u32Src <<= 8; 293 } 294 if (!(u32Src & UINT32_C(0xf0000000))) 295 { 296 iBit -= 4; 297 u32Src <<= 4; 298 } 299 if (!(u32Src & UINT32_C(0xc0000000))) 300 { 301 iBit -= 2; 302 u32Src <<= 2; 303 } 304 if (!(u32Src & UINT32_C(0x10000000))) 305 { 306 iBit -= 1; 307 u32Src <<= 1; 308 Assert(u32Src & RT_BIT_64(63)); 309 } 310 311 *puDst = iBit; 312 *pfEFlags &= ~X86_EFL_ZF; 313 } 314 else 315 *pfEFlags |= X86_EFL_ZF; 316 } 317 318 319 /* Unary operands. */ 320 321 IEM_DECL_IMPL_DEF(void, iemAImpl_inc_u64,(uint64_t *puDst, uint32_t *pEFlags)) 322 { 323 AssertFailed(); 324 } 325 326 327 IEM_DECL_IMPL_DEF(void, iemAImpl_dec_u64,(uint64_t *puDst, uint32_t *pEFlags)) 328 { 329 AssertFailed(); 330 } 331 332 333 IEM_DECL_IMPL_DEF(void, iemAImpl_not_u64,(uint64_t *puDst, uint32_t *pEFlags)) 334 { 335 AssertFailed(); 336 } 337 338 339 IEM_DECL_IMPL_DEF(void, iemAImpl_neg_u64,(uint64_t *puDst, uint32_t *pEFlags)) 340 { 341 AssertFailed(); 342 } 343 344 345 /** 64-bit locked unary operand operation. */ 346 # define DO_LOCKED_UNARY_OP_U64(a_Mnemonic) \ 347 do { \ 348 uint64_t uOld = ASMAtomicReadU64(puDst); \ 349 uint64_t uTmp; \ 350 uint32_t fEflTmp; \ 351 do \ 352 { \ 353 uTmp = uOld; \ 354 fEflTmp = *pfEFlags; \ 355 iemAImpl_ ## a_Mnemonic ## _u64(&uTmp, &fEflTmp); \ 356 } while (ASMAtomicCmpXchgExU64(puDst, uTmp, uOld, &uOld)); \ 357 *pfEFlags = fEflTmp; \ 358 } while (0) 359 360 IEM_DECL_IMPL_DEF(void, iemAImpl_inc_u64_locked,(uint64_t *puDst, uint32_t *pfEFlags)) 361 { 362 DO_LOCKED_UNARY_OP_U64(inc); 363 } 364 365 366 IEM_DECL_IMPL_DEF(void, iemAImpl_dec_u64_locked,(uint64_t *puDst, uint32_t *pfEFlags)) 367 { 368 DO_LOCKED_UNARY_OP_U64(dec); 369 } 370 371 372 IEM_DECL_IMPL_DEF(void, iemAImpl_not_u64_locked,(uint64_t *puDst, uint32_t *pfEFlags)) 373 { 374 DO_LOCKED_UNARY_OP_U64(not); 375 } 376 377 378 IEM_DECL_IMPL_DEF(void, iemAImpl_neg_u64_locked,(uint64_t *puDst, uint32_t *pfEFlags)) 379 { 380 DO_LOCKED_UNARY_OP_U64(neg); 381 } 382 383 384 /* Shift and rotate. */ 385 386 IEM_DECL_IMPL_DEF(void, iemAImpl_rol_u64,(uint64_t *puDst, uint8_t cShift, uint32_t *pfEFlags)) 387 { 388 AssertFailed(); 389 } 390 391 392 IEM_DECL_IMPL_DEF(void, iemAImpl_ror_u64,(uint64_t *puDst, uint8_t cShift, uint32_t *pfEFlags)) 393 { 394 AssertFailed(); 395 } 396 397 398 IEM_DECL_IMPL_DEF(void, iemAImpl_rcl_u64,(uint64_t *puDst, uint8_t cShift, uint32_t *pfEFlags)) 399 { 400 AssertFailed(); 401 } 402 403 404 IEM_DECL_IMPL_DEF(void, iemAImpl_rcr_u64,(uint64_t *puDst, uint8_t cShift, uint32_t *pfEFlags)) 405 { 406 AssertFailed(); 407 } 408 409 410 IEM_DECL_IMPL_DEF(void, iemAImpl_shl_u64,(uint64_t *puDst, uint8_t cShift, uint32_t *pfEFlags)) 411 { 412 AssertFailed(); 413 } 414 415 416 IEM_DECL_IMPL_DEF(void, iemAImpl_shr_u64,(uint64_t *puDst, uint8_t cShift, uint32_t *pfEFlags)) 417 { 418 AssertFailed(); 419 } 420 421 422 IEM_DECL_IMPL_DEF(void, iemAImpl_sar_u64,(uint64_t *puDst, uint8_t cShift, uint32_t *pfEFlags)) 423 { 424 AssertFailed(); 425 } 426 427 428 IEM_DECL_IMPL_DEF(void, iemAImpl_shld_u64,(uint64_t *puDst, uint64_t uSrc, uint8_t cShift, uint32_t *pfEFlags)) 429 { 430 AssertFailed(); 431 } 432 433 434 IEM_DECL_IMPL_DEF(void, iemAImpl_shrd_u64,(uint64_t *puDst, uint64_t uSrc, uint8_t cShift, uint32_t *pfEFlags)) 435 { 436 AssertFailed(); 437 } 438 439 440 /* multiplication and division */ 441 442 IEM_DECL_IMPL_DEF(int, iemAImpl_mul_u64,(uint64_t *pu64RAX, uint64_t *pu64RDX, uint64_t u64Factor, uint32_t *pfEFlags)) 33 443 { 34 444 AssertFailed(); … … 37 447 38 448 39 IEM_DECL_IMPL_DEF(int, iemAImpl_imul_u64,(uint64_t *pu64RAX, uint64_t *pu64RDX, uint64_t u64Factor, uint32_t *p EFlags))449 IEM_DECL_IMPL_DEF(int, iemAImpl_imul_u64,(uint64_t *pu64RAX, uint64_t *pu64RDX, uint64_t u64Factor, uint32_t *pfEFlags)) 40 450 { 41 451 AssertFailed(); … … 44 454 45 455 46 IEM_DECL_IMPL_DEF(int, iemAImpl_div_u64,(uint64_t *pu64RAX, uint64_t *pu64RDX, uint64_t u64Divisor, uint32_t *pEFlags)) 456 IEM_DECL_IMPL_DEF(void, iemAImpl_imul_two_u64,(uint64_t *puDst, uint64_t uSrc, uint32_t *pfEFlags)) 457 { 458 AssertFailed(); 459 } 460 461 462 463 IEM_DECL_IMPL_DEF(int, iemAImpl_div_u64,(uint64_t *pu64RAX, uint64_t *pu64RDX, uint64_t u64Divisor, uint32_t *pfEFlags)) 47 464 { 48 465 AssertFailed(); … … 51 468 52 469 53 IEM_DECL_IMPL_DEF(int, iemAImpl_idiv_u64,(uint64_t *pu64RAX, uint64_t *pu64RDX, uint64_t u64Divisor, uint32_t *p EFlags))470 IEM_DECL_IMPL_DEF(int, iemAImpl_idiv_u64,(uint64_t *pu64RAX, uint64_t *pu64RDX, uint64_t u64Divisor, uint32_t *pfEFlags)) 54 471 { 55 472 AssertFailed(); 56 473 return -1; 57 474 } 475 476 477 IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u64,(uint64_t *puMem, uint64_t *puReg)) 478 { 479 /* XCHG implies LOCK. */ 480 uint64_t uOldMem = *puMem; 481 while (!ASMAtomicCmpXchgExU64(puMem, *puReg, uOldMem, &uOldMem)) 482 ASMNopPause(); 483 *puReg = uOldMem; 484 } 485 486 487 IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u64,(uint64_t *puDst, uint64_t *puReg, uint32_t *pfEFlags)) 488 { 489 AssertFailed(); 490 } 491 492 493 IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u64_locked,(uint64_t *puDst, uint64_t *puReg, uint32_t *pfEFlags)) 494 { 495 uint64_t uOld = ASMAtomicReadU64(puDst); 496 uint64_t uTmpDst; 497 uint64_t uTmpReg; 498 uint32_t fEflTmp; 499 do 500 { 501 uTmpDst = uOld; 502 uTmpReg = *puReg; 503 fEflTmp = *pfEFlags; 504 iemAImpl_xadd_u64(&uTmpDst, &uTmpReg, &fEflTmp); 505 } while (ASMAtomicCmpXchgExU64(puDst, uTmpDst, uOld, &uOld)); 506 *puReg = uTmpReg; 507 *pfEFlags = fEflTmp; 508 } 509 58 510 59 511 #endif /* RT_ARCH_X86 */
Note:
See TracChangeset
for help on using the changeset viewer.