/netbsd-src/usr.bin/vndcompress/ |
H A D | vndcompress.c | 71 uint32_t n_blocks; /* ceiling(size/blocksize) */ member 161 while (S->blkno < S->n_blocks) { in vndcompress() 216 assert(S->blkno == S->n_blocks); in vndcompress() 313 assert(S->n_blocks > 0); in info_signal_handler() 318 ((uint64_t)S->n_blocks * sizeof(uint64_t)))? in info_signal_handler() 481 S->n_blocks = HOWMANY(S->size, S->blocksize); in compress_init() 482 assert(S->n_full_blocks <= S->n_blocks); in compress_init() 483 assert(S->n_blocks <= MAX_N_BLOCKS); in compress_init() 491 S->n_offsets = (S->n_blocks + 1); in compress_init() 603 if (be32toh(header.cl2h_n_blocks) != S->n_blocks) { in compress_restart() [all …]
|
H A D | vnduncompress.c | 88 const uint32_t n_blocks = be32toh(header.cl2h_n_blocks); in vnduncompress() local 107 if (MAX_N_BLOCKS < n_blocks) in vnduncompress() 109 n_blocks, (uint32_t)MAX_N_BLOCKS); in vnduncompress() 114 const uint32_t n_offsets = (n_blocks + 1); in vnduncompress() 166 for (blkno = 0; blkno < n_blocks; blkno++) { in vnduncompress() 210 if (((blkno + 1) < n_blocks) && (uncomplen != blocksize)) in vnduncompress()
|
/netbsd-src/external/gpl3/gcc.old/dist/gcc/ |
H A D | df-core.c | 705 df->n_blocks = post_order_compute (df->postorder, true, true); in rest_of_handle_df_initialize() 707 gcc_assert ((unsigned) df->n_blocks == df->postorder_inverted.length ()); in rest_of_handle_df_initialize() 1006 int n_blocks) in df_worklist_dataflow_doublequeue() argument 1017 last_visit_age.safe_grow_cleared (n_blocks); in df_worklist_dataflow_doublequeue() 1018 last_change_age.safe_grow_cleared (n_blocks); in df_worklist_dataflow_doublequeue() 1081 int n_blocks) in df_worklist_dataflow() argument 1109 for (i = 0; i < n_blocks; i++) in df_worklist_dataflow() 1124 n_blocks); in df_worklist_dataflow() 1156 int *postorder, int n_blocks) in df_analyze_problem() argument 1176 postorder, n_blocks); in df_analyze_problem() [all …]
|
H A D | dce.c | 1080 int n_blocks = df_get_n_blocks (DF_BACKWARD); in fast_dce() local 1101 for (i = 0; i < n_blocks; i++) in fast_dce() 1110 for (i = 0; i < n_blocks; i++) in fast_dce() 1167 df_analyze_problem (df_word_lr, all_blocks, postorder, n_blocks); in fast_dce() 1169 df_analyze_problem (df_lr, all_blocks, postorder, n_blocks); in fast_dce()
|
H A D | gcse.c | 1659 alloc_pre_mem (int n_blocks, int n_exprs) in alloc_pre_mem() argument 1661 transp = sbitmap_vector_alloc (n_blocks, n_exprs); in alloc_pre_mem() 1662 comp = sbitmap_vector_alloc (n_blocks, n_exprs); in alloc_pre_mem() 1663 antloc = sbitmap_vector_alloc (n_blocks, n_exprs); in alloc_pre_mem() 1669 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs); in alloc_pre_mem() 2713 alloc_code_hoist_mem (int n_blocks, int n_exprs) in alloc_code_hoist_mem() argument 2715 antloc = sbitmap_vector_alloc (n_blocks, n_exprs); in alloc_code_hoist_mem() 2716 transp = sbitmap_vector_alloc (n_blocks, n_exprs); in alloc_code_hoist_mem() 2717 comp = sbitmap_vector_alloc (n_blocks, n_exprs); in alloc_code_hoist_mem() 2719 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs); in alloc_code_hoist_mem() [all …]
|
H A D | cprop.c | 555 alloc_cprop_mem (int n_blocks, int n_sets) in alloc_cprop_mem() argument 557 cprop_avloc = sbitmap_vector_alloc (n_blocks, n_sets); in alloc_cprop_mem() 558 cprop_kill = sbitmap_vector_alloc (n_blocks, n_sets); in alloc_cprop_mem() 560 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets); in alloc_cprop_mem() 561 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets); in alloc_cprop_mem()
|
H A D | early-remat.c | 1009 unsigned int n_blocks = last_basic_block_for_fn (m_fn); in init_block_info() local 1010 m_block_info.safe_grow_cleared (n_blocks); in init_block_info() 1053 unsigned int n_blocks = last_basic_block_for_fn (m_fn); in sort_candidates() local 1056 postorder_index = new unsigned int[n_blocks]; in sort_candidates()
|
H A D | tree-dfa.c | 89 renumber_gimple_stmt_uids_in_blocks (basic_block *blocks, int n_blocks) in renumber_gimple_stmt_uids_in_blocks() argument 94 for (i = 0; i < n_blocks; i++) in renumber_gimple_stmt_uids_in_blocks()
|
H A D | function.c | 4582 int n_blocks = 0; in all_blocks() local 4590 vector[n_blocks] = block; in all_blocks() 4592 ++n_blocks; in all_blocks() 4595 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block), in all_blocks() 4596 vector ? vector + n_blocks : 0); in all_blocks() 4600 return n_blocks; in all_blocks() 4628 int n_blocks; in number_blocks() local 4639 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks); in number_blocks() 4642 for (i = 1; i < n_blocks; ++i) in number_blocks()
|
H A D | sel-sched-ir.c | 3676 int *postorder, n_blocks; in sel_recompute_toporder() local 3679 n_blocks = post_order_compute (postorder, false, false); in sel_recompute_toporder() 3682 for (n = 0, i = n_blocks - 1; i >= 0; i--) in sel_recompute_toporder() 4932 int n_blocks, i; in recompute_rev_top_order() local 4944 n_blocks = post_order_compute (postorder, true, false); in recompute_rev_top_order() 4945 gcc_assert (n_basic_blocks_for_fn (cfun) == n_blocks); in recompute_rev_top_order() 4949 for (i = 0; i < n_blocks; i++) in recompute_rev_top_order()
|
H A D | df.h | 588 int n_blocks; /* The number of blocks in reverse postorder. */ variable
|
H A D | cse.c | 6700 int i, n_blocks; in cse_main() local 6737 n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false); in cse_main() 6739 while (i < n_blocks) in cse_main() 6748 && i < n_blocks); in cse_main()
|
H A D | df-problems.c | 1043 df_worklist_dataflow (df_lr, all_blocks, df->postorder, df->n_blocks); in df_lr_finalize()
|
/netbsd-src/external/gpl3/gcc/dist/gcc/ |
H A D | df-core.cc | 705 df->n_blocks = post_order_compute (df->postorder, true, true); in rest_of_handle_df_initialize() 707 gcc_assert ((unsigned) df->n_blocks == df->postorder_inverted.length ()); in rest_of_handle_df_initialize() 1006 int n_blocks) in df_worklist_dataflow_doublequeue() argument 1017 last_visit_age.safe_grow_cleared (n_blocks, true); in df_worklist_dataflow_doublequeue() 1018 last_change_age.safe_grow_cleared (n_blocks, true); in df_worklist_dataflow_doublequeue() 1081 int n_blocks) in df_worklist_dataflow() argument 1109 for (i = 0; i < n_blocks; i++) in df_worklist_dataflow() 1124 n_blocks); in df_worklist_dataflow() 1156 int *postorder, int n_blocks) in df_analyze_problem() argument 1176 postorder, n_blocks); in df_analyze_problem() [all …]
|
H A D | dce.cc | 1080 int n_blocks = df_get_n_blocks (DF_BACKWARD); in fast_dce() local 1101 for (i = 0; i < n_blocks; i++) in fast_dce() 1110 for (i = 0; i < n_blocks; i++) in fast_dce() 1167 df_analyze_problem (df_word_lr, all_blocks, postorder, n_blocks); in fast_dce() 1169 df_analyze_problem (df_lr, all_blocks, postorder, n_blocks); in fast_dce()
|
H A D | gcse.cc | 1659 alloc_pre_mem (int n_blocks, int n_exprs) in alloc_pre_mem() argument 1661 transp = sbitmap_vector_alloc (n_blocks, n_exprs); in alloc_pre_mem() 1662 comp = sbitmap_vector_alloc (n_blocks, n_exprs); in alloc_pre_mem() 1663 antloc = sbitmap_vector_alloc (n_blocks, n_exprs); in alloc_pre_mem() 1669 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs); in alloc_pre_mem() 2695 alloc_code_hoist_mem (int n_blocks, int n_exprs) in alloc_code_hoist_mem() argument 2697 antloc = sbitmap_vector_alloc (n_blocks, n_exprs); in alloc_code_hoist_mem() 2698 transp = sbitmap_vector_alloc (n_blocks, n_exprs); in alloc_code_hoist_mem() 2699 comp = sbitmap_vector_alloc (n_blocks, n_exprs); in alloc_code_hoist_mem() 2701 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs); in alloc_code_hoist_mem() [all …]
|
H A D | cprop.cc | 556 alloc_cprop_mem (int n_blocks, int n_sets) in alloc_cprop_mem() argument 558 cprop_avloc = sbitmap_vector_alloc (n_blocks, n_sets); in alloc_cprop_mem() 559 cprop_kill = sbitmap_vector_alloc (n_blocks, n_sets); in alloc_cprop_mem() 561 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets); in alloc_cprop_mem() 562 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets); in alloc_cprop_mem()
|
H A D | early-remat.cc | 1009 unsigned int n_blocks = last_basic_block_for_fn (m_fn); in init_block_info() local 1010 m_block_info.safe_grow_cleared (n_blocks, true); in init_block_info() 1053 unsigned int n_blocks = last_basic_block_for_fn (m_fn); in sort_candidates() local 1056 postorder_index = new unsigned int[n_blocks]; in sort_candidates()
|
H A D | tree-dfa.cc | 90 renumber_gimple_stmt_uids_in_blocks (basic_block *blocks, int n_blocks) in renumber_gimple_stmt_uids_in_blocks() argument 95 for (i = 0; i < n_blocks; i++) in renumber_gimple_stmt_uids_in_blocks()
|
H A D | sel-sched-ir.cc | 3676 int *postorder, n_blocks; in sel_recompute_toporder() local 3679 n_blocks = post_order_compute (postorder, false, false); in sel_recompute_toporder() 3682 for (n = 0, i = n_blocks - 1; i >= 0; i--) in sel_recompute_toporder() 4930 int n_blocks, i; in recompute_rev_top_order() local 4942 n_blocks = post_order_compute (postorder, true, false); in recompute_rev_top_order() 4943 gcc_assert (n_basic_blocks_for_fn (cfun) == n_blocks); in recompute_rev_top_order() 4947 for (i = 0; i < n_blocks; i++) in recompute_rev_top_order()
|
H A D | df.h | 588 int n_blocks; /* The number of blocks in reverse postorder. */ variable
|
H A D | cse.cc | 6644 int i, n_blocks; in cse_main() local 6679 n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false); in cse_main() 6681 while (i < n_blocks) in cse_main() 6690 && i < n_blocks); in cse_main()
|
H A D | df-problems.cc | 1083 df_worklist_dataflow (df_lr, all_blocks, df->postorder, df->n_blocks); in df_lr_finalize()
|
/netbsd-src/external/gpl3/gcc/dist/gcc/config/aarch64/ |
H A D | cortex-a57-fma-steering.cc | 925 int i, n_blocks, *bb_dfs_preorder; in analyze() local 930 n_blocks = pre_and_rev_post_order_compute (bb_dfs_preorder, NULL, false); in analyze() 934 for (i = 0; i < n_blocks; i++) in analyze()
|
/netbsd-src/external/gpl3/gcc.old/dist/gcc/config/aarch64/ |
H A D | cortex-a57-fma-steering.c | 925 int i, n_blocks, *bb_dfs_preorder; in analyze() local 930 n_blocks = pre_and_rev_post_order_compute (bb_dfs_preorder, NULL, false); in analyze() 934 for (i = 0; i < n_blocks; i++) in analyze()
|