Changeset 36170 in vbox for trunk/src/recompiler/tcg/tcg.c
- Timestamp:
- Mar 4, 2011 12:49:02 PM (14 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/recompiler/tcg/tcg.c
r36140 r36170 43 43 #include <malloc.h> 44 44 #endif 45 #ifdef _AIX 46 #include <alloca.h> 47 #endif 45 48 46 49 #include "config.h" 47 50 #include "qemu-common.h" 51 #include "cache-utils.h" 48 52 49 53 /* Note: the long term plan is to reduce the dependancies on the QEMU … … 73 77 tcg_target_long value, tcg_target_long addend); 74 78 75 TCGOpDef tcg_op_defs[] = {79 static TCGOpDef tcg_op_defs[] = { 76 80 #define DEF(s, n, copy_size) { #s, 0, 0, n, n, 0, copy_size }, 77 81 #ifndef VBOX … … 85 89 }; 86 90 87 TCGRegSet tcg_target_available_regs[2];88 TCGRegSet tcg_target_call_clobber_regs;91 static TCGRegSet tcg_target_available_regs[2]; 92 static TCGRegSet tcg_target_call_clobber_regs; 89 93 90 94 /* XXX: move that inside the context */ … … 289 293 } 290 294 291 TCGv tcg_global_reg_new(TCGType type, int reg, const char *name) 295 static inline int tcg_global_reg_new_internal(TCGType type, int reg, 296 const char *name) 292 297 { 293 298 TCGContext *s = &tcg_ctx; … … 311 316 s->nb_globals++; 312 317 tcg_regset_set_reg(s->reserved_regs, reg); 313 return MAKE_TCGV(idx); 314 } 315 316 #if TCG_TARGET_REG_BITS == 32 317 /* temporary hack to avoid register shortage for tcg_qemu_st64() */ 318 TCGv tcg_global_reg2_new_hack(TCGType type, int reg1, int reg2, 319 const char *name) 320 { 321 TCGContext *s = &tcg_ctx; 322 TCGTemp *ts; 318 return idx; 319 } 320 321 TCGv_i32 tcg_global_reg_new_i32(int reg, const char *name) 322 { 323 323 int idx; 324 char buf[64]; 325 326 if (type != TCG_TYPE_I64) 327 tcg_abort(); 328 idx = s->nb_globals; 329 tcg_temp_alloc(s, s->nb_globals + 2); 330 ts = &s->temps[s->nb_globals]; 331 ts->base_type = type; 332 ts->type = TCG_TYPE_I32; 333 ts->fixed_reg = 1; 334 ts->reg = reg1; 335 pstrcpy(buf, sizeof(buf), name); 336 pstrcat(buf, sizeof(buf), "_0"); 337 ts->name = strdup(buf); 338 339 ts++; 340 ts->base_type = type; 341 ts->type = TCG_TYPE_I32; 342 ts->fixed_reg = 1; 343 ts->reg = reg2; 344 pstrcpy(buf, sizeof(buf), name); 345 pstrcat(buf, sizeof(buf), "_1"); 346 ts->name = strdup(buf); 347 348 s->nb_globals += 2; 349 return MAKE_TCGV(idx); 350 } 351 #endif 352 353 TCGv tcg_global_mem_new(TCGType type, int reg, tcg_target_long offset, 354 const char *name) 324 325 idx = tcg_global_reg_new_internal(TCG_TYPE_I32, reg, name); 326 return MAKE_TCGV_I32(idx); 327 } 328 329 TCGv_i64 tcg_global_reg_new_i64(int reg, const char *name) 330 { 331 int idx; 332 333 idx = tcg_global_reg_new_internal(TCG_TYPE_I64, reg, name); 334 return MAKE_TCGV_I64(idx); 335 } 336 337 static inline int tcg_global_mem_new_internal(TCGType type, int reg, 338 tcg_target_long offset, 339 const char *name) 355 340 { 356 341 TCGContext *s = &tcg_ctx; … … 408 393 s->nb_globals++; 409 394 } 410 return MAKE_TCGV(idx); 411 } 412 413 TCGv tcg_temp_new_internal(TCGType type, int temp_local) 395 return idx; 396 } 397 398 TCGv_i32 tcg_global_mem_new_i32(int reg, tcg_target_long offset, 399 const char *name) 400 { 401 int idx; 402 403 idx = tcg_global_mem_new_internal(TCG_TYPE_I32, reg, offset, name); 404 return MAKE_TCGV_I32(idx); 405 } 406 407 TCGv_i64 tcg_global_mem_new_i64(int reg, tcg_target_long offset, 408 const char *name) 409 { 410 int idx; 411 412 idx = tcg_global_mem_new_internal(TCG_TYPE_I64, reg, offset, name); 413 return MAKE_TCGV_I64(idx); 414 } 415 416 static inline int tcg_temp_new_internal(TCGType type, int temp_local) 414 417 { 415 418 TCGContext *s = &tcg_ctx; … … 459 462 } 460 463 } 461 return MAKE_TCGV(idx); 462 } 463 464 void tcg_temp_free(TCGv arg) 464 return idx; 465 } 466 467 TCGv_i32 tcg_temp_new_internal_i32(int temp_local) 468 { 469 int idx; 470 471 idx = tcg_temp_new_internal(TCG_TYPE_I32, temp_local); 472 return MAKE_TCGV_I32(idx); 473 } 474 475 TCGv_i64 tcg_temp_new_internal_i64(int temp_local) 476 { 477 int idx; 478 479 idx = tcg_temp_new_internal(TCG_TYPE_I64, temp_local); 480 return MAKE_TCGV_I64(idx); 481 } 482 483 static inline void tcg_temp_free_internal(int idx) 465 484 { 466 485 TCGContext *s = &tcg_ctx; 467 486 TCGTemp *ts; 468 int idx = GET_TCGV(arg);469 487 int k; 470 488 … … 480 498 } 481 499 482 483 TCGv tcg_const_i32(int32_t val) 484 { 485 TCGv t0; 486 t0 = tcg_temp_new(TCG_TYPE_I32); 500 void tcg_temp_free_i32(TCGv_i32 arg) 501 { 502 tcg_temp_free_internal(GET_TCGV_I32(arg)); 503 } 504 505 void tcg_temp_free_i64(TCGv_i64 arg) 506 { 507 tcg_temp_free_internal(GET_TCGV_I64(arg)); 508 } 509 510 TCGv_i32 tcg_const_i32(int32_t val) 511 { 512 TCGv_i32 t0; 513 t0 = tcg_temp_new_i32(); 487 514 tcg_gen_movi_i32(t0, val); 488 515 return t0; 489 516 } 490 517 491 TCGv tcg_const_i64(int64_t val) 492 { 493 TCGv t0; 494 t0 = tcg_temp_new(TCG_TYPE_I64); 518 TCGv_i64 tcg_const_i64(int64_t val) 519 { 520 TCGv_i64 t0; 521 t0 = tcg_temp_new_i64(); 522 tcg_gen_movi_i64(t0, val); 523 return t0; 524 } 525 526 TCGv_i32 tcg_const_local_i32(int32_t val) 527 { 528 TCGv_i32 t0; 529 t0 = tcg_temp_local_new_i32(); 530 tcg_gen_movi_i32(t0, val); 531 return t0; 532 } 533 534 TCGv_i64 tcg_const_local_i64(int64_t val) 535 { 536 TCGv_i64 t0; 537 t0 = tcg_temp_local_new_i64(); 495 538 tcg_gen_movi_i64(t0, val); 496 539 return t0; … … 520 563 } 521 564 522 static inline TCGType tcg_get_base_type(TCGContext *s, TCGv arg)523 {524 return s->temps[GET_TCGV(arg)].base_type;525 }526 527 static void tcg_gen_call_internal(TCGContext *s, TCGv func,528 unsigned int flags,529 unsigned int nb_rets, const TCGv *rets,530 unsigned int nb_params, const TCGv *params)531 {532 #ifndef VBOX533 int i;534 #else535 unsigned int i;536 #endif537 *gen_opc_ptr++ = INDEX_op_call;538 *gen_opparam_ptr++ = (nb_rets << 16) | (nb_params + 1);539 for(i = 0; i < nb_rets; i++) {540 *gen_opparam_ptr++ = GET_TCGV(rets[i]);541 }542 for(i = 0; i < nb_params; i++) {543 *gen_opparam_ptr++ = GET_TCGV(params[i]);544 }545 *gen_opparam_ptr++ = GET_TCGV(func);546 547 *gen_opparam_ptr++ = flags;548 /* total parameters, needed to go backward in the instruction stream */549 *gen_opparam_ptr++ = 1 + nb_rets + nb_params + 3;550 }551 552 553 #if TCG_TARGET_REG_BITS < 64554 565 /* Note: we convert the 64 bit args to 32 bit and do some alignment 555 566 and endian swap. Maybe it would be better to do the alignment 556 567 and endian swap in tcg_reg_alloc_call(). */ 557 void tcg_gen_call(TCGContext *s, TCGv func, unsigned int flags, 558 unsigned int nb_rets, const TCGv *rets, 559 unsigned int nb_params, const TCGv *args1) 560 { 561 TCGv ret, *args2, rets_2[2], arg; 562 int j, i, call_type; 563 564 if (nb_rets == 1) { 565 ret = rets[0]; 566 if (tcg_get_base_type(s, ret) == TCG_TYPE_I64) { 568 void tcg_gen_callN(TCGContext *s, TCGv_ptr func, unsigned int flags, 569 int sizemask, TCGArg ret, int nargs, TCGArg *args) 570 { 571 int call_type; 572 int i; 573 int real_args; 574 int nb_rets; 575 TCGArg *nparam; 576 *gen_opc_ptr++ = INDEX_op_call; 577 nparam = gen_opparam_ptr++; 578 call_type = (flags & TCG_CALL_TYPE_MASK); 579 if (ret != TCG_CALL_DUMMY_ARG) { 580 #if TCG_TARGET_REG_BITS < 64 581 if (sizemask & 1) { 582 #ifdef TCG_TARGET_WORDS_BIGENDIAN 583 *gen_opparam_ptr++ = ret + 1; 584 *gen_opparam_ptr++ = ret; 585 #else 586 *gen_opparam_ptr++ = ret; 587 *gen_opparam_ptr++ = ret + 1; 588 #endif 567 589 nb_rets = 2; 568 #ifdef TCG_TARGET_WORDS_BIGENDIAN 569 rets_2[0] = TCGV_HIGH(ret); 570 rets_2[1] = ret; 571 #else 572 rets_2[0] = ret; 573 rets_2[1] = TCGV_HIGH(ret); 574 #endif 575 rets = rets_2; 576 } 577 } 578 args2 = alloca((nb_params * 3) * sizeof(TCGv)); 579 j = 0; 580 call_type = (flags & TCG_CALL_TYPE_MASK); 581 for(i = 0; i < nb_params; i++) { 582 arg = args1[i]; 583 if (tcg_get_base_type(s, arg) == TCG_TYPE_I64) { 590 } else 591 #endif 592 { 593 *gen_opparam_ptr++ = ret; 594 nb_rets = 1; 595 } 596 } else { 597 nb_rets = 0; 598 } 599 real_args = 0; 600 for (i = 0; i < nargs; i++) { 601 #if TCG_TARGET_REG_BITS < 64 602 if (sizemask & (2 << i)) { 584 603 #ifdef TCG_TARGET_I386 585 604 /* REGPARM case: if the third parameter is 64 bit, it is 586 605 allocated on the stack */ 587 if ( j== 2 && call_type == TCG_CALL_TYPE_REGPARM) {606 if (i == 2 && call_type == TCG_CALL_TYPE_REGPARM) { 588 607 call_type = TCG_CALL_TYPE_REGPARM_2; 589 608 flags = (flags & ~TCG_CALL_TYPE_MASK) | call_type; 590 609 } 591 args2[j++] = arg; 592 args2[j++] = TCGV_HIGH(arg); 593 #else 610 #endif 594 611 #ifdef TCG_TARGET_CALL_ALIGN_ARGS 595 612 /* some targets want aligned 64 bit args */ 596 if (j & 1) { 597 args2[j++] = TCG_CALL_DUMMY_ARG; 613 if (real_args & 1) { 614 *gen_opparam_ptr++ = TCG_CALL_DUMMY_ARG; 615 real_args++; 598 616 } 599 617 #endif 600 618 #ifdef TCG_TARGET_WORDS_BIGENDIAN 601 args2[j++] = TCGV_HIGH(arg);602 args2[j++] = arg;619 *gen_opparam_ptr++ = args[i] + 1; 620 *gen_opparam_ptr++ = args[i]; 603 621 #else 604 args2[j++] = arg;605 args2[j++] = TCGV_HIGH(arg);606 #endif 607 #endif 608 } else {609 args2[j++] = arg; 610 }611 }612 tcg_gen_call_internal(s, func, flags,613 nb_rets, rets, j, args2);614 }615 #else 616 void tcg_gen_call(TCGContext *s, TCGv func, unsigned int flags, 617 unsigned int nb_rets, const TCGv *rets,618 unsigned int nb_params, const TCGv *args1) 619 { 620 tcg_gen_call_internal(s, func, flags, 621 nb_rets, rets, nb_params, args1);622 } 623 #endif 622 *gen_opparam_ptr++ = args[i]; 623 *gen_opparam_ptr++ = args[i] + 1; 624 #endif 625 real_args += 2; 626 } else 627 #endif 628 { 629 *gen_opparam_ptr++ = args[i]; 630 real_args++; 631 } 632 } 633 *gen_opparam_ptr++ = GET_TCGV_PTR(func); 634 635 *gen_opparam_ptr++ = flags; 636 637 *nparam = (nb_rets << 16) | (real_args + 1); 638 639 /* total parameters, needed to go backward in the instruction stream */ 640 *gen_opparam_ptr++ = 1 + nb_rets + real_args + 3; 641 } 624 642 625 643 #if TCG_TARGET_REG_BITS == 32 626 void tcg_gen_shifti_i64(TCGv ret, TCGvarg1,644 void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1, 627 645 int c, int right, int arith) 628 646 { 629 647 if (c == 0) { 630 tcg_gen_mov_i32( ret, arg1);648 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1)); 631 649 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1)); 632 650 } else if (c >= 32) { … … 634 652 if (right) { 635 653 if (arith) { 636 tcg_gen_sari_i32( ret, TCGV_HIGH(arg1), c);654 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c); 637 655 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31); 638 656 } else { 639 tcg_gen_shri_i32( ret, TCGV_HIGH(arg1), c);657 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c); 640 658 tcg_gen_movi_i32(TCGV_HIGH(ret), 0); 641 659 } 642 660 } else { 643 tcg_gen_shli_i32(TCGV_HIGH(ret), arg1, c);644 tcg_gen_movi_i32( ret, 0);661 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c); 662 tcg_gen_movi_i32(TCGV_LOW(ret), 0); 645 663 } 646 664 } else { 647 TCGv t0, t1;648 649 t0 = tcg_temp_new (TCG_TYPE_I32);650 t1 = tcg_temp_new (TCG_TYPE_I32);665 TCGv_i32 t0, t1; 666 667 t0 = tcg_temp_new_i32(); 668 t1 = tcg_temp_new_i32(); 651 669 if (right) { 652 670 tcg_gen_shli_i32(t0, TCGV_HIGH(arg1), 32 - c); … … 655 673 else 656 674 tcg_gen_shri_i32(t1, TCGV_HIGH(arg1), c); 657 tcg_gen_shri_i32( ret, arg1, c);658 tcg_gen_or_i32( ret, ret, t0);675 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c); 676 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t0); 659 677 tcg_gen_mov_i32(TCGV_HIGH(ret), t1); 660 678 } else { 661 tcg_gen_shri_i32(t0, arg1, 32 - c);679 tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c); 662 680 /* Note: ret can be the same as arg1, so we use t1 */ 663 tcg_gen_shli_i32(t1, arg1, c);681 tcg_gen_shli_i32(t1, TCGV_LOW(arg1), c); 664 682 tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c); 665 683 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t0); 666 tcg_gen_mov_i32( ret, t1);667 } 668 tcg_temp_free (t0);669 tcg_temp_free (t1);684 tcg_gen_mov_i32(TCGV_LOW(ret), t1); 685 } 686 tcg_temp_free_i32(t0); 687 tcg_temp_free_i32(t1); 670 688 } 671 689 } … … 712 730 } 713 731 714 char *tcg_get_arg_str(TCGContext *s, char *buf, int buf_size, TCGv arg) 715 { 716 return tcg_get_arg_str_idx(s, buf, buf_size, GET_TCGV(arg)); 732 char *tcg_get_arg_str_i32(TCGContext *s, char *buf, int buf_size, TCGv_i32 arg) 733 { 734 return tcg_get_arg_str_idx(s, buf, buf_size, GET_TCGV_I32(arg)); 735 } 736 737 char *tcg_get_arg_str_i64(TCGContext *s, char *buf, int buf_size, TCGv_i64 arg) 738 { 739 return tcg_get_arg_str_idx(s, buf, buf_size, GET_TCGV_I64(arg)); 717 740 } 718 741 … … 857 880 th = tcg_find_helper(s, val); 858 881 if (th) { 859 fprintf(outfile, th->name);882 fprintf(outfile, "%s", th->name); 860 883 } else { 861 884 if (c == INDEX_op_movi_i32) … … 1174 1197 /* XXX: optimize by hardcoding common cases (e.g. triadic ops) */ 1175 1198 default: 1176 if (op > INDEX_op_end) { 1177 args -= def->nb_args; 1178 nb_iargs = def->nb_iargs; 1179 nb_oargs = def->nb_oargs; 1180 1181 /* Test if the operation can be removed because all 1182 its outputs are dead. We assume that nb_oargs == 0 1183 implies side effects */ 1184 if (!(def->flags & TCG_OPF_SIDE_EFFECTS) && nb_oargs != 0) { 1185 for(i = 0; i < nb_oargs; i++) { 1186 arg = args[i]; 1187 if (!dead_temps[arg]) 1188 goto do_not_remove; 1199 args -= def->nb_args; 1200 nb_iargs = def->nb_iargs; 1201 nb_oargs = def->nb_oargs; 1202 1203 /* Test if the operation can be removed because all 1204 its outputs are dead. We assume that nb_oargs == 0 1205 implies side effects */ 1206 if (!(def->flags & TCG_OPF_SIDE_EFFECTS) && nb_oargs != 0) { 1207 for(i = 0; i < nb_oargs; i++) { 1208 arg = args[i]; 1209 if (!dead_temps[arg]) 1210 goto do_not_remove; 1211 } 1212 tcg_set_nop(s, gen_opc_buf + op_index, args, def->nb_args); 1213 #ifdef CONFIG_PROFILER 1214 s->del_op_count++; 1215 #endif 1216 } else { 1217 do_not_remove: 1218 1219 /* output args are dead */ 1220 for(i = 0; i < nb_oargs; i++) { 1221 arg = args[i]; 1222 dead_temps[arg] = 1; 1223 } 1224 1225 /* if end of basic block, update */ 1226 if (def->flags & TCG_OPF_BB_END) { 1227 tcg_la_bb_end(s, dead_temps); 1228 } else if (def->flags & TCG_OPF_CALL_CLOBBER) { 1229 /* globals are live */ 1230 memset(dead_temps, 0, s->nb_globals); 1231 } 1232 1233 /* input args are live */ 1234 dead_iargs = 0; 1235 for(i = 0; i < nb_iargs; i++) { 1236 arg = args[i + nb_oargs]; 1237 if (dead_temps[arg]) { 1238 dead_iargs |= (1 << i); 1189 1239 } 1190 tcg_set_nop(s, gen_opc_buf + op_index, args, def->nb_args); 1191 #ifdef CONFIG_PROFILER 1192 s->del_op_count++; 1193 #endif 1194 } else { 1195 do_not_remove: 1196 1197 /* output args are dead */ 1198 for(i = 0; i < nb_oargs; i++) { 1199 arg = args[i]; 1200 dead_temps[arg] = 1; 1201 } 1202 1203 /* if end of basic block, update */ 1204 if (def->flags & TCG_OPF_BB_END) { 1205 tcg_la_bb_end(s, dead_temps); 1206 } else if (def->flags & TCG_OPF_CALL_CLOBBER) { 1207 /* globals are live */ 1208 memset(dead_temps, 0, s->nb_globals); 1209 } 1210 1211 /* input args are live */ 1212 dead_iargs = 0; 1213 for(i = 0; i < nb_iargs; i++) { 1214 arg = args[i + nb_oargs]; 1215 if (dead_temps[arg]) { 1216 dead_iargs |= (1 << i); 1217 } 1218 dead_temps[arg] = 0; 1219 } 1220 s->op_dead_iargs[op_index] = dead_iargs; 1240 dead_temps[arg] = 0; 1221 1241 } 1222 } else { 1223 /* legacy dyngen operations */ 1224 args -= def->nb_args; 1225 /* mark end of basic block */ 1226 tcg_la_bb_end(s, dead_temps); 1242 s->op_dead_iargs[op_index] = dead_iargs; 1227 1243 } 1228 1244 break; … … 1877 1893 #ifdef CONFIG_PROFILER 1878 1894 1879 static int64_t dyngen_table_op_count[NB_OPS];1895 static int64_t tcg_table_op_count[NB_OPS]; 1880 1896 1881 1897 void dump_op_count(void) … … 1883 1899 int i; 1884 1900 FILE *f; 1885 f = fopen("/tmp/op1.log", "w"); 1886 for(i = 0; i < INDEX_op_end; i++) { 1887 fprintf(f, "%s %" PRId64 "\n", tcg_op_defs[i].name, dyngen_table_op_count[i]); 1888 } 1889 fclose(f); 1890 f = fopen("/tmp/op2.log", "w"); 1901 f = fopen("/tmp/op.log", "w"); 1891 1902 for(i = INDEX_op_end; i < NB_OPS; i++) { 1892 fprintf(f, "%s %" PRId64 "\n", tcg_op_defs[i].name, dyngen_table_op_count[i]);1903 fprintf(f, "%s %" PRId64 "\n", tcg_op_defs[i].name, tcg_table_op_count[i]); 1893 1904 } 1894 1905 fclose(f); … … 1906 1917 1907 1918 #ifdef DEBUG_DISAS 1908 if (unlikely( loglevel & CPU_LOG_TB_OP)) {1909 fprintf(logfile,"OP:\n");1919 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP))) { 1920 qemu_log("OP:\n"); 1910 1921 tcg_dump_ops(s, logfile); 1911 fprintf(logfile,"\n");1922 qemu_log("\n"); 1912 1923 } 1913 1924 #endif … … 1922 1933 1923 1934 #ifdef DEBUG_DISAS 1924 if (unlikely( loglevel & CPU_LOG_TB_OP_OPT)) {1925 fprintf(logfile,"OP after la:\n");1935 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP_OPT))) { 1936 qemu_log("OP after la:\n"); 1926 1937 tcg_dump_ops(s, logfile); 1927 fprintf(logfile,"\n");1938 qemu_log("\n"); 1928 1939 } 1929 1940 #endif … … 1940 1951 opc = gen_opc_buf[op_index]; 1941 1952 #ifdef CONFIG_PROFILER 1942 dyngen_table_op_count[opc]++;1953 tcg_table_op_count[opc]++; 1943 1954 #endif 1944 1955 def = &tcg_op_defs[opc]; … … 1995 2006 case INDEX_op_end: 1996 2007 goto the_end; 1997 1998 #ifdef CONFIG_DYNGEN_OP1999 case 0 ... INDEX_op_end - 1:2000 /* legacy dyngen ops */2001 #ifdef CONFIG_PROFILER2002 s->old_op_count++;2003 #endif2004 tcg_reg_alloc_bb_end(s, s->reserved_regs);2005 if (search_pc >= 0) {2006 s->code_ptr += def->copy_size;2007 args += def->nb_args;2008 } else {2009 args = dyngen_op(s, opc, args);2010 }2011 goto next;2012 #endif2013 2008 default: 2014 2009 /* Note: in order to speed up the code, it would be much … … 2033 2028 } 2034 2029 2035 int dyngen_code(TCGContext *s, uint8_t *gen_code_buf)2030 int tcg_gen_code(TCGContext *s, uint8_t *gen_code_buf) 2036 2031 { 2037 2032 #ifdef CONFIG_PROFILER … … 2061 2056 not be changed, though writing the same values is ok. 2062 2057 Return -1 if not found. */ 2063 int dyngen_code_search_pc(TCGContext *s, uint8_t *gen_code_buf, long offset)2058 int tcg_gen_code_search_pc(TCGContext *s, uint8_t *gen_code_buf, long offset) 2064 2059 { 2065 2060 return tcg_gen_code_common(s, gen_code_buf, offset); … … 2082 2077 cpu_fprintf(f, "avg ops/TB %0.1f max=%d\n", 2083 2078 s->tb_count ? (double)s->op_count / s->tb_count : 0, s->op_count_max); 2084 cpu_fprintf(f, "old ops/total ops %0.1f%%\n",2085 s->op_count ? (double)s->old_op_count / s->op_count * 100.0 : 0);2086 2079 cpu_fprintf(f, "deleted ops/TB %0.2f\n", 2087 2080 s->tb_count ?
Note:
See TracChangeset
for help on using the changeset viewer.