/dflybsd-src/contrib/gcc-4.7/gcc/ |
H A D | lcm.c | 109 qin = qout = worklist = XNEWVEC (basic_block, n_basic_blocks); in compute_antinout_edge() 124 qend = &worklist[n_basic_blocks - NUM_FIXED_BLOCKS]; in compute_antinout_edge() 125 qlen = n_basic_blocks - NUM_FIXED_BLOCKS; in compute_antinout_edge() 262 = XNEWVEC (basic_block, n_basic_blocks); in compute_laterin() 298 qend = &worklist[n_basic_blocks - NUM_FIXED_BLOCKS]; in compute_laterin() 299 qlen = n_basic_blocks - NUM_FIXED_BLOCKS; in compute_laterin() 489 XNEWVEC (basic_block, n_basic_blocks - NUM_FIXED_BLOCKS); in compute_available() 503 qend = &worklist[n_basic_blocks - NUM_FIXED_BLOCKS]; in compute_available() 504 qlen = n_basic_blocks - NUM_FIXED_BLOCKS; in compute_available() 618 tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1); in compute_nearerout()
|
H A D | cfganal.c | 178 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1); in mark_dfs_back_edges() 289 tos = worklist = XNEWVEC (basic_block, n_basic_blocks); in find_unreachable_blocks() 352 block_count = n_basic_blocks; /* Include the entry and exit blocks. */ in create_edge_list() 671 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1); in post_order_compute() 729 if (delete_unreachable && (count != n_basic_blocks)) in post_order_compute() 824 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1); in inverted_post_order_compute() 958 int rev_post_order_num = n_basic_blocks - 1; in pre_and_rev_post_order_compute() 962 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1); in pre_and_rev_post_order_compute() 1042 gcc_assert (pre_order_num == n_basic_blocks); in pre_and_rev_post_order_compute() 1047 gcc_assert (pre_order_num == n_basic_blocks - NUM_FIXED_BLOCKS); in pre_and_rev_post_order_compute() [all …]
|
H A D | tree-ssa-loop-ch.c | 153 bbs = XNEWVEC (basic_block, n_basic_blocks); in copy_loop_headers() 154 copied_bbs = XNEWVEC (basic_block, n_basic_blocks); in copy_loop_headers() 155 bbs_size = n_basic_blocks; in copy_loop_headers()
|
H A D | cprop.c | 1726 if (n_edges > 20000 + n_basic_blocks * 4) in is_too_expensive() 1730 pass, n_basic_blocks, n_edges / n_basic_blocks); in is_too_expensive() 1737 if ((n_basic_blocks in is_too_expensive() 1743 pass, n_basic_blocks, max_reg_num ()); in is_too_expensive() 1760 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1 in one_cprop_pass() 1870 current_function_name (), n_basic_blocks, bytes_used); in one_cprop_pass()
|
H A D | mcf.c | 481 int fnum_vertices_after_transform = 2 * n_basic_blocks; in create_fixup_graph() 482 int fnum_edges_after_transform = n_edges + n_basic_blocks; in create_fixup_graph() 486 fnum_vertices_after_transform + n_edges + n_basic_blocks + 2; in create_fixup_graph() 496 int fmax_num_edges = 8 * (n_basic_blocks + n_edges); in create_fixup_graph() 499 fixup_graph->num_vertices = n_basic_blocks; in create_fixup_graph() 518 sqrt_avg_vertex_weight = mcf_sqrt (total_vertex_weight / n_basic_blocks); in create_fixup_graph()
|
H A D | cfgloop.c | 359 root->num_nodes = n_basic_blocks; in init_loops_structure() 390 if (n_basic_blocks == NUM_FIXED_BLOCKS) in flow_loops_find() 449 dfs_order = XNEWVEC (int, n_basic_blocks); in flow_loops_find() 450 rc_order = XNEWVEC (int, n_basic_blocks); in flow_loops_find() 455 for (b = 0; b < n_basic_blocks - NUM_FIXED_BLOCKS; b++) in flow_loops_find() 822 gcc_assert (loop->num_nodes == (unsigned) n_basic_blocks); in get_loop_body()
|
H A D | tracer.c | 231 basic_block *trace = XNEWVEC (basic_block, n_basic_blocks); in tail_duplicate() 365 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1) in tracer()
|
H A D | cfgloopmanip.c | 78 *bbs = XCNEWVEC (basic_block, n_basic_blocks); in find_path() 80 n_basic_blocks, e->dest); in find_path() 329 bord_bbs = XCNEWVEC (basic_block, n_basic_blocks); in remove_path() 432 bbs = XNEWVEC (basic_block, n_basic_blocks); in add_loop() 433 n = get_loop_body_with_size (loop, bbs, n_basic_blocks); in add_loop() 1688 current_loops->tree_root->num_nodes = n_basic_blocks; in fix_loop_structure()
|
H A D | gcse.c | 2629 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1 in one_pre_gcse_pass() 2675 current_function_name (), n_basic_blocks, bytes_used); in one_pre_gcse_pass() 3178 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1 in one_code_hoisting_pass() 3214 current_function_name (), n_basic_blocks, bytes_used); in one_code_hoisting_pass() 3647 if (n_edges > 20000 + n_basic_blocks * 4) in is_too_expensive() 3651 pass, n_basic_blocks, n_edges / n_basic_blocks); in is_too_expensive() 3658 if ((n_basic_blocks in is_too_expensive() 3664 pass, n_basic_blocks, max_reg_num ()); in is_too_expensive()
|
H A D | domwalk.c | 145 basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks * 2); in walk_dominator_tree()
|
H A D | cfg.c | 188 gcc_assert (i == n_basic_blocks); in compact_blocks() 193 last_basic_block = n_basic_blocks; in compact_blocks() 203 n_basic_blocks--; in expunge_block() 681 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges); in dump_flow_info()
|
H A D | graphite.c | 196 || n_basic_blocks > PARAM_VALUE (PARAM_GRAPHITE_MAX_BBS_PER_FUNCTION)) in graphite_initialize()
|
H A D | df-core.c | 1046 n_basic_blocks, n_edges, in df_worklist_dataflow_doublequeue() 1047 dcount, dcount / (float)n_basic_blocks); in df_worklist_dataflow_doublequeue() 1556 gcc_assert (i == n_basic_blocks); in df_compact_blocks() 1664 int size = 2 + (2 * n_basic_blocks); in df_compute_cfg_image()
|
H A D | dominance.c | 151 unsigned int num = n_basic_blocks; in init_dom_info() 238 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1); in calc_dfs_tree_nonrec() 395 gcc_assert (di->nodes == (unsigned int) n_basic_blocks - 1); in calc_dfs_tree() 653 n_bbs_in_dom_tree[dir_index] = n_basic_blocks; in calculate_dominance_info()
|
H A D | sched-rgn.c | 792 queue = XNEWVEC (int, n_basic_blocks); in haifa_find_rgns() 1152 int nblocks = n_basic_blocks - NUM_FIXED_BLOCKS; in extend_rgns() 3026 || n_basic_blocks == NUM_FIXED_BLOCKS + 1 in sched_rgn_init() 3050 gcc_assert (0 < nr_regions && nr_regions <= n_basic_blocks); in sched_rgn_init() 3284 if (n_basic_blocks == NUM_FIXED_BLOCKS) in schedule_insns() 3330 rgn_table = XRESIZEVEC (region, rgn_table, n_basic_blocks); in extend_regions() 3331 rgn_bb_table = XRESIZEVEC (int, rgn_bb_table, n_basic_blocks); in extend_regions()
|
H A D | cfgcleanup.c | 465 while (counter < n_basic_blocks) in try_forward_edges() 478 counter = n_basic_blocks; in try_forward_edges() 523 threaded_edges = XNEWVEC (edge, n_basic_blocks); in try_forward_edges() 535 counter = n_basic_blocks; in try_forward_edges() 544 gcc_assert (nthreaded_edges < n_basic_blocks - NUM_FIXED_BLOCKS); in try_forward_edges() 560 if (counter >= n_basic_blocks) in try_forward_edges() 2705 && n_basic_blocks > NUM_FIXED_BLOCKS + 1) in try_optimize_cfg()
|
H A D | profile.c | 1127 total_num_blocks += n_basic_blocks; in branch_prob() 1129 fprintf (dump_file, "%d basic blocks\n", n_basic_blocks); in branch_prob() 1156 for (i = 0; i != (unsigned) (n_basic_blocks); i++) in branch_prob()
|
H A D | bb-reorder.c | 1914 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1) in reorder_basic_blocks() 1937 traces = XNEWVEC (struct trace, n_basic_blocks); in reorder_basic_blocks() 2011 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1) in duplicate_computed_gotos() 2222 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1) in partition_hot_cold_basic_blocks()
|
H A D | tree-ssa-phiopt.c | 227 n = n_basic_blocks - NUM_FIXED_BLOCKS; in tree_ssa_phiopt_worker() 380 basic_block *order = XNEWVEC (basic_block, n_basic_blocks); in blocks_in_phiopt_order() 381 unsigned n = n_basic_blocks - NUM_FIXED_BLOCKS; in blocks_in_phiopt_order()
|
H A D | tree-ssa-tail-merge.c | 714 = htab_create (n_basic_blocks, same_succ_hash, same_succ_equal, in init_worklist() 719 worklist = VEC_alloc (same_succ, heap, n_basic_blocks); in init_worklist() 950 all_clusters = VEC_alloc (bb_cluster, heap, n_basic_blocks);
|
H A D | tree-ssa-uncprop.c | 196 for (i = 0; i < n_basic_blocks; i++) in associate_equivalences_with_edges()
|
H A D | store-motion.c | 856 stack = XNEWVEC (edge_iterator, n_basic_blocks); in remove_reachable_equiv_notes() 1217 current_function_name (), n_basic_blocks); in one_store_motion_pass()
|
H A D | cfgrtl.c | 342 n_basic_blocks++; in rtl_create_basic_block() 460 return (n_basic_blocks > NUM_FIXED_BLOCKS ? in entry_of_function() 2278 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS) in rtl_verify_flow_info() 2281 num_bb_notes, n_basic_blocks); in rtl_verify_flow_info() 3070 if (n_basic_blocks == NUM_FIXED_BLOCKS) in rtl_flow_call_edges_add()
|
H A D | tree-cfg.c | 198 if (n_basic_blocks == NUM_FIXED_BLOCKS) in build_gimple_cfg() 202 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks) in build_gimple_cfg() 203 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks); in build_gimple_cfg() 456 n_basic_blocks++; in create_bb() 2110 n_basic_blocks, n_edges, last_basic_block); in gimple_dump_cfg() 2147 size = n_basic_blocks * sizeof (struct basic_block_def); in dump_cfg_stats() 2149 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks, in dump_cfg_stats() 6604 if (any_var && n_basic_blocks) in dump_function_to_file() 6941 if (n_basic_blocks == NUM_FIXED_BLOCKS) in gimple_flow_call_edges_add()
|
H A D | sched-ebb.c | 632 if (n_basic_blocks == NUM_FIXED_BLOCKS) in schedule_ebbs()
|