use auto_sbitmap in various places
gcc/ChangeLog: 2016-07-26 Trevor Saunders <tbsaunde+gcc@tbsaunde.org> * bt-load.c (compute_out): Use auto_sbitmap class. (link_btr_uses): Likewise. * cfganal.c (mark_dfs_back_edges): Likewise. (post_order_compute): Likewise. (inverted_post_order_compute): Likewise. (pre_and_rev_post_order_compute_fn): Likewise. (single_pred_before_succ_order): Likewise. * cfgexpand.c (pass_expand::execute): Likewise. * cfgloop.c (verify_loop_structure): Likewise. * cfgloopmanip.c (fix_bb_placements): Likewise. (remove_path): Likewise. (update_dominators_in_loop): Likewise. * cfgrtl.c (break_superblocks): Likewise. * ddg.c (check_sccs): Likewise. (create_ddg_all_sccs): Likewise. * df-core.c (df_worklist_dataflow): Likewise. * dse.c (dse_step3): Likewise. * except.c (eh_region_outermost): Likewise. * function.c (thread_prologue_and_epilogue_insns): Likewise. * gcse.c (prune_expressions): Likewise. (prune_insertions_deletions): Likewise. * gimple-ssa-backprop.c (backprop::~backprop): Likewise. * graph.c (draw_cfg_nodes_no_loops): Likewise. * ira-lives.c (remove_some_program_points_and_update_live_ranges): Likewise. * lcm.c (compute_earliest): Likewise. (compute_farthest): Likewise. * loop-unroll.c (unroll_loop_constant_iterations): Likewise. (unroll_loop_runtime_iterations): Likewise. (unroll_loop_stupid): Likewise. * lower-subreg.c (decompose_multiword_subregs): Likewise. * lra-lives.c: Likewise. * lra.c (lra): Likewise. * modulo-sched.c (schedule_reg_moves): Likewise. (optimize_sc): Likewise. (get_sched_window): Likewise. (sms_schedule_by_order): Likewise. (check_nodes_order): Likewise. (order_nodes_of_sccs): Likewise. (order_nodes_in_scc): Likewise. * recog.c (split_all_insns): Likewise. * regcprop.c (pass_cprop_hardreg::execute): Likewise. * reload1.c (reload): Likewise. * sched-rgn.c (haifa_find_rgns): Likewise. (split_edges): Likewise. (compute_trg_info): Likewise. * sel-sched.c (init_seqno): Likewise. * store-motion.c (remove_reachable_equiv_notes): Likewise. * tree-into-ssa.c (update_ssa): Likewise. * tree-ssa-live.c (live_worklist): Likewise. * tree-ssa-loop-im.c (fill_always_executed_in): Likewise. * tree-ssa-loop-ivcanon.c (try_unroll_loop_completely): * Likewise. (try_peel_loop): Likewise. * tree-ssa-loop-manip.c (tree_transform_and_unroll_loop): * Likewise. * tree-ssa-pre.c (compute_antic): Likewise. * tree-ssa-reassoc.c (undistribute_ops_list): Likewise. * tree-stdarg.c (reachable_at_most_once): Likewise. * tree-vect-slp.c (vect_attempt_slp_rearrange_stmts): Likewise. * var-tracking.c (vt_find_locations): Likewise. From-SVN: r238748
This commit is contained in:
parent
62e2078514
commit
7ba9e72de9
38 changed files with 162 additions and 269 deletions
|
@ -1,3 +1,66 @@
|
|||
2016-07-26 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
|
||||
|
||||
* bt-load.c (compute_out): Use auto_sbitmap class.
|
||||
(link_btr_uses): Likewise.
|
||||
* cfganal.c (mark_dfs_back_edges): Likewise.
|
||||
(post_order_compute): Likewise.
|
||||
(inverted_post_order_compute): Likewise.
|
||||
(pre_and_rev_post_order_compute_fn): Likewise.
|
||||
(single_pred_before_succ_order): Likewise.
|
||||
* cfgexpand.c (pass_expand::execute): Likewise.
|
||||
* cfgloop.c (verify_loop_structure): Likewise.
|
||||
* cfgloopmanip.c (fix_bb_placements): Likewise.
|
||||
(remove_path): Likewise.
|
||||
(update_dominators_in_loop): Likewise.
|
||||
* cfgrtl.c (break_superblocks): Likewise.
|
||||
* ddg.c (check_sccs): Likewise.
|
||||
(create_ddg_all_sccs): Likewise.
|
||||
* df-core.c (df_worklist_dataflow): Likewise.
|
||||
* dse.c (dse_step3): Likewise.
|
||||
* except.c (eh_region_outermost): Likewise.
|
||||
* function.c (thread_prologue_and_epilogue_insns): Likewise.
|
||||
* gcse.c (prune_expressions): Likewise.
|
||||
(prune_insertions_deletions): Likewise.
|
||||
* gimple-ssa-backprop.c (backprop::~backprop): Likewise.
|
||||
* graph.c (draw_cfg_nodes_no_loops): Likewise.
|
||||
* ira-lives.c (remove_some_program_points_and_update_live_ranges): Likewise.
|
||||
* lcm.c (compute_earliest): Likewise.
|
||||
(compute_farthest): Likewise.
|
||||
* loop-unroll.c (unroll_loop_constant_iterations): Likewise.
|
||||
(unroll_loop_runtime_iterations): Likewise.
|
||||
(unroll_loop_stupid): Likewise.
|
||||
* lower-subreg.c (decompose_multiword_subregs): Likewise.
|
||||
* lra-lives.c: Likewise.
|
||||
* lra.c (lra): Likewise.
|
||||
* modulo-sched.c (schedule_reg_moves): Likewise.
|
||||
(optimize_sc): Likewise.
|
||||
(get_sched_window): Likewise.
|
||||
(sms_schedule_by_order): Likewise.
|
||||
(check_nodes_order): Likewise.
|
||||
(order_nodes_of_sccs): Likewise.
|
||||
(order_nodes_in_scc): Likewise.
|
||||
* recog.c (split_all_insns): Likewise.
|
||||
* regcprop.c (pass_cprop_hardreg::execute): Likewise.
|
||||
* reload1.c (reload): Likewise.
|
||||
* sched-rgn.c (haifa_find_rgns): Likewise.
|
||||
(split_edges): Likewise.
|
||||
(compute_trg_info): Likewise.
|
||||
* sel-sched.c (init_seqno): Likewise.
|
||||
* store-motion.c (remove_reachable_equiv_notes): Likewise.
|
||||
* tree-into-ssa.c (update_ssa): Likewise.
|
||||
* tree-ssa-live.c (live_worklist): Likewise.
|
||||
* tree-ssa-loop-im.c (fill_always_executed_in): Likewise.
|
||||
* tree-ssa-loop-ivcanon.c (try_unroll_loop_completely):
|
||||
* Likewise.
|
||||
(try_peel_loop): Likewise.
|
||||
* tree-ssa-loop-manip.c (tree_transform_and_unroll_loop):
|
||||
* Likewise.
|
||||
* tree-ssa-pre.c (compute_antic): Likewise.
|
||||
* tree-ssa-reassoc.c (undistribute_ops_list): Likewise.
|
||||
* tree-stdarg.c (reachable_at_most_once): Likewise.
|
||||
* tree-vect-slp.c (vect_attempt_slp_rearrange_stmts): Likewise.
|
||||
* var-tracking.c (vt_find_locations): Likewise.
|
||||
|
||||
2016-07-26 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
|
||||
|
||||
* sbitmap.h (auto_sbitmap): New class.
|
||||
|
|
|
@ -626,7 +626,7 @@ compute_out (sbitmap *bb_out, sbitmap *bb_gen, sbitmap *bb_kill, int max_uid)
|
|||
Iterate until the bb_out sets stop growing. */
|
||||
int i;
|
||||
int changed;
|
||||
sbitmap bb_in = sbitmap_alloc (max_uid);
|
||||
auto_sbitmap bb_in (max_uid);
|
||||
|
||||
for (i = NUM_FIXED_BLOCKS; i < last_basic_block_for_fn (cfun); i++)
|
||||
bitmap_copy (bb_out[i], bb_gen[i]);
|
||||
|
@ -642,7 +642,6 @@ compute_out (sbitmap *bb_out, sbitmap *bb_gen, sbitmap *bb_kill, int max_uid)
|
|||
bb_in, bb_kill[i]);
|
||||
}
|
||||
}
|
||||
sbitmap_free (bb_in);
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -650,7 +649,7 @@ link_btr_uses (btr_def **def_array, btr_user **use_array, sbitmap *bb_out,
|
|||
sbitmap *btr_defset, int max_uid)
|
||||
{
|
||||
int i;
|
||||
sbitmap reaching_defs = sbitmap_alloc (max_uid);
|
||||
auto_sbitmap reaching_defs (max_uid);
|
||||
|
||||
/* Link uses to the uses lists of all of their reaching defs.
|
||||
Count up the number of reaching defs of each use. */
|
||||
|
@ -683,7 +682,7 @@ link_btr_uses (btr_def **def_array, btr_user **use_array, sbitmap *bb_out,
|
|||
if (user != NULL)
|
||||
{
|
||||
/* Find all the reaching defs for this use. */
|
||||
sbitmap reaching_defs_of_reg = sbitmap_alloc (max_uid);
|
||||
auto_sbitmap reaching_defs_of_reg (max_uid);
|
||||
unsigned int uid = 0;
|
||||
sbitmap_iterator sbi;
|
||||
|
||||
|
@ -738,7 +737,6 @@ link_btr_uses (btr_def **def_array, btr_user **use_array, sbitmap *bb_out,
|
|||
user->next = def->uses;
|
||||
def->uses = user;
|
||||
}
|
||||
sbitmap_free (reaching_defs_of_reg);
|
||||
}
|
||||
|
||||
if (CALL_P (insn))
|
||||
|
@ -754,7 +752,6 @@ link_btr_uses (btr_def **def_array, btr_user **use_array, sbitmap *bb_out,
|
|||
}
|
||||
}
|
||||
}
|
||||
sbitmap_free (reaching_defs);
|
||||
}
|
||||
|
||||
static void
|
||||
|
|
|
@ -67,7 +67,6 @@ mark_dfs_back_edges (void)
|
|||
int sp;
|
||||
int prenum = 1;
|
||||
int postnum = 1;
|
||||
sbitmap visited;
|
||||
bool found = false;
|
||||
|
||||
/* Allocate the preorder and postorder number arrays. */
|
||||
|
@ -79,7 +78,7 @@ mark_dfs_back_edges (void)
|
|||
sp = 0;
|
||||
|
||||
/* Allocate bitmap to track nodes that have been visited. */
|
||||
visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap visited (last_basic_block_for_fn (cfun));
|
||||
|
||||
/* None of the nodes in the CFG have been visited yet. */
|
||||
bitmap_clear (visited);
|
||||
|
@ -138,7 +137,6 @@ mark_dfs_back_edges (void)
|
|||
free (pre);
|
||||
free (post);
|
||||
free (stack);
|
||||
sbitmap_free (visited);
|
||||
|
||||
return found;
|
||||
}
|
||||
|
@ -642,7 +640,6 @@ post_order_compute (int *post_order, bool include_entry_exit,
|
|||
edge_iterator *stack;
|
||||
int sp;
|
||||
int post_order_num = 0;
|
||||
sbitmap visited;
|
||||
int count;
|
||||
|
||||
if (include_entry_exit)
|
||||
|
@ -653,7 +650,7 @@ post_order_compute (int *post_order, bool include_entry_exit,
|
|||
sp = 0;
|
||||
|
||||
/* Allocate bitmap to track nodes that have been visited. */
|
||||
visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap visited (last_basic_block_for_fn (cfun));
|
||||
|
||||
/* None of the nodes in the CFG have been visited yet. */
|
||||
bitmap_clear (visited);
|
||||
|
@ -726,7 +723,6 @@ post_order_compute (int *post_order, bool include_entry_exit,
|
|||
}
|
||||
|
||||
free (stack);
|
||||
sbitmap_free (visited);
|
||||
return post_order_num;
|
||||
}
|
||||
|
||||
|
@ -820,7 +816,6 @@ inverted_post_order_compute (int *post_order,
|
|||
edge_iterator *stack;
|
||||
int sp;
|
||||
int post_order_num = 0;
|
||||
sbitmap visited;
|
||||
|
||||
if (flag_checking)
|
||||
verify_no_unreachable_blocks ();
|
||||
|
@ -830,7 +825,7 @@ inverted_post_order_compute (int *post_order,
|
|||
sp = 0;
|
||||
|
||||
/* Allocate bitmap to track nodes that have been visited. */
|
||||
visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap visited (last_basic_block_for_fn (cfun));
|
||||
|
||||
/* None of the nodes in the CFG have been visited yet. */
|
||||
bitmap_clear (visited);
|
||||
|
@ -956,7 +951,6 @@ inverted_post_order_compute (int *post_order,
|
|||
post_order[post_order_num++] = EXIT_BLOCK;
|
||||
|
||||
free (stack);
|
||||
sbitmap_free (visited);
|
||||
return post_order_num;
|
||||
}
|
||||
|
||||
|
@ -981,7 +975,6 @@ pre_and_rev_post_order_compute_fn (struct function *fn,
|
|||
int sp;
|
||||
int pre_order_num = 0;
|
||||
int rev_post_order_num = n_basic_blocks_for_fn (cfun) - 1;
|
||||
sbitmap visited;
|
||||
|
||||
/* Allocate stack for back-tracking up CFG. */
|
||||
stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
|
||||
|
@ -999,7 +992,7 @@ pre_and_rev_post_order_compute_fn (struct function *fn,
|
|||
rev_post_order_num -= NUM_FIXED_BLOCKS;
|
||||
|
||||
/* Allocate bitmap to track nodes that have been visited. */
|
||||
visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap visited (last_basic_block_for_fn (cfun));
|
||||
|
||||
/* None of the nodes in the CFG have been visited yet. */
|
||||
bitmap_clear (visited);
|
||||
|
@ -1056,7 +1049,6 @@ pre_and_rev_post_order_compute_fn (struct function *fn,
|
|||
}
|
||||
|
||||
free (stack);
|
||||
sbitmap_free (visited);
|
||||
|
||||
if (include_entry_exit)
|
||||
{
|
||||
|
@ -1569,7 +1561,7 @@ single_pred_before_succ_order (void)
|
|||
basic_block *order = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
|
||||
unsigned n = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
|
||||
unsigned np, i;
|
||||
sbitmap visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap visited (last_basic_block_for_fn (cfun));
|
||||
|
||||
#define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
|
||||
#define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
|
||||
|
@ -1603,7 +1595,6 @@ single_pred_before_succ_order (void)
|
|||
n -= np;
|
||||
}
|
||||
|
||||
sbitmap_free (visited);
|
||||
gcc_assert (n == 0);
|
||||
return order;
|
||||
|
||||
|
|
|
@ -6159,7 +6159,6 @@ unsigned int
|
|||
pass_expand::execute (function *fun)
|
||||
{
|
||||
basic_block bb, init_block;
|
||||
sbitmap blocks;
|
||||
edge_iterator ei;
|
||||
edge e;
|
||||
rtx_insn *var_seq, *var_ret_seq;
|
||||
|
@ -6472,10 +6471,9 @@ pass_expand::execute (function *fun)
|
|||
}
|
||||
}
|
||||
|
||||
blocks = sbitmap_alloc (last_basic_block_for_fn (fun));
|
||||
auto_sbitmap blocks (last_basic_block_for_fn (fun));
|
||||
bitmap_ones (blocks);
|
||||
find_many_sub_basic_blocks (blocks);
|
||||
sbitmap_free (blocks);
|
||||
purge_all_dead_edges ();
|
||||
|
||||
expand_stack_alignment ();
|
||||
|
|
|
@ -1312,7 +1312,6 @@ DEBUG_FUNCTION void
|
|||
verify_loop_structure (void)
|
||||
{
|
||||
unsigned *sizes, i, j;
|
||||
sbitmap irreds;
|
||||
basic_block bb, *bbs;
|
||||
struct loop *loop;
|
||||
int err = 0;
|
||||
|
@ -1320,7 +1319,6 @@ verify_loop_structure (void)
|
|||
unsigned num = number_of_loops (cfun);
|
||||
struct loop_exit *exit, *mexit;
|
||||
bool dom_available = dom_info_available_p (CDI_DOMINATORS);
|
||||
sbitmap visited;
|
||||
|
||||
if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
|
||||
{
|
||||
|
@ -1366,7 +1364,7 @@ verify_loop_structure (void)
|
|||
}
|
||||
|
||||
/* Check the recorded loop father and sizes of loops. */
|
||||
visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap visited (last_basic_block_for_fn (cfun));
|
||||
bitmap_clear (visited);
|
||||
bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
|
||||
FOR_EACH_LOOP (loop, LI_FROM_INNERMOST)
|
||||
|
@ -1413,7 +1411,6 @@ verify_loop_structure (void)
|
|||
}
|
||||
}
|
||||
free (bbs);
|
||||
sbitmap_free (visited);
|
||||
|
||||
/* Check headers and latches. */
|
||||
FOR_EACH_LOOP (loop, 0)
|
||||
|
@ -1480,7 +1477,7 @@ verify_loop_structure (void)
|
|||
if (loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS))
|
||||
{
|
||||
/* Record old info. */
|
||||
irreds = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap irreds (last_basic_block_for_fn (cfun));
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
edge_iterator ei;
|
||||
|
@ -1532,7 +1529,6 @@ verify_loop_structure (void)
|
|||
e->flags &= ~(EDGE_ALL_FLAGS + 1);
|
||||
}
|
||||
}
|
||||
free (irreds);
|
||||
}
|
||||
|
||||
/* Check the recorded loop exits. */
|
||||
|
|
|
@ -181,7 +181,6 @@ fix_bb_placements (basic_block from,
|
|||
bool *irred_invalidated,
|
||||
bitmap loop_closed_ssa_invalidated)
|
||||
{
|
||||
sbitmap in_queue;
|
||||
basic_block *queue, *qtop, *qbeg, *qend;
|
||||
struct loop *base_loop, *target_loop;
|
||||
edge e;
|
||||
|
@ -201,7 +200,7 @@ fix_bb_placements (basic_block from,
|
|||
|| from == base_loop->header)
|
||||
return;
|
||||
|
||||
in_queue = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap in_queue (last_basic_block_for_fn (cfun));
|
||||
bitmap_clear (in_queue);
|
||||
bitmap_set_bit (in_queue, from->index);
|
||||
/* Prevent us from going out of the base_loop. */
|
||||
|
@ -292,7 +291,6 @@ fix_bb_placements (basic_block from,
|
|||
bitmap_set_bit (in_queue, pred->index);
|
||||
}
|
||||
}
|
||||
free (in_queue);
|
||||
free (queue);
|
||||
}
|
||||
|
||||
|
@ -306,7 +304,6 @@ remove_path (edge e)
|
|||
basic_block *rem_bbs, *bord_bbs, from, bb;
|
||||
vec<basic_block> dom_bbs;
|
||||
int i, nrem, n_bord_bbs;
|
||||
sbitmap seen;
|
||||
bool irred_invalidated = false;
|
||||
edge_iterator ei;
|
||||
struct loop *l, *f;
|
||||
|
@ -345,7 +342,7 @@ remove_path (edge e)
|
|||
|
||||
n_bord_bbs = 0;
|
||||
bord_bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
|
||||
seen = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap seen (last_basic_block_for_fn (cfun));
|
||||
bitmap_clear (seen);
|
||||
|
||||
/* Find "border" hexes -- i.e. those with predecessor in removed path. */
|
||||
|
@ -407,8 +404,6 @@ remove_path (edge e)
|
|||
dom_bbs.safe_push (ldom);
|
||||
}
|
||||
|
||||
free (seen);
|
||||
|
||||
/* Recount dominators. */
|
||||
iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, true);
|
||||
dom_bbs.release ();
|
||||
|
@ -616,11 +611,10 @@ static void
|
|||
update_dominators_in_loop (struct loop *loop)
|
||||
{
|
||||
vec<basic_block> dom_bbs = vNULL;
|
||||
sbitmap seen;
|
||||
basic_block *body;
|
||||
unsigned i;
|
||||
|
||||
seen = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap seen (last_basic_block_for_fn (cfun));
|
||||
bitmap_clear (seen);
|
||||
body = get_loop_body (loop);
|
||||
|
||||
|
@ -643,7 +637,6 @@ update_dominators_in_loop (struct loop *loop)
|
|||
|
||||
iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, false);
|
||||
free (body);
|
||||
free (seen);
|
||||
dom_bbs.release ();
|
||||
}
|
||||
|
||||
|
|
|
@ -4272,11 +4272,10 @@ cfg_layout_initialize (unsigned int flags)
|
|||
void
|
||||
break_superblocks (void)
|
||||
{
|
||||
sbitmap superblocks;
|
||||
bool need = false;
|
||||
basic_block bb;
|
||||
|
||||
superblocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap superblocks (last_basic_block_for_fn (cfun));
|
||||
bitmap_clear (superblocks);
|
||||
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
|
@ -4292,8 +4291,6 @@ break_superblocks (void)
|
|||
rebuild_jump_labels (get_insns ());
|
||||
find_many_sub_basic_blocks (superblocks);
|
||||
}
|
||||
|
||||
free (superblocks);
|
||||
}
|
||||
|
||||
/* Finalize the changes: reorder insn list according to the sequence specified
|
||||
|
|
12
gcc/ddg.c
12
gcc/ddg.c
|
@ -997,7 +997,7 @@ static void
|
|||
check_sccs (ddg_all_sccs_ptr sccs, int num_nodes)
|
||||
{
|
||||
int i = 0;
|
||||
sbitmap tmp = sbitmap_alloc (num_nodes);
|
||||
auto_sbitmap tmp (num_nodes);
|
||||
|
||||
bitmap_clear (tmp);
|
||||
for (i = 0; i < sccs->num_sccs; i++)
|
||||
|
@ -1008,7 +1008,6 @@ check_sccs (ddg_all_sccs_ptr sccs, int num_nodes)
|
|||
gcc_assert (!bitmap_intersect_p (tmp, sccs->sccs[i]->nodes));
|
||||
bitmap_ior (tmp, tmp, sccs->sccs[i]->nodes);
|
||||
}
|
||||
sbitmap_free (tmp);
|
||||
}
|
||||
|
||||
/* Perform the Strongly Connected Components decomposing algorithm on the
|
||||
|
@ -1018,9 +1017,9 @@ create_ddg_all_sccs (ddg_ptr g)
|
|||
{
|
||||
int i;
|
||||
int num_nodes = g->num_nodes;
|
||||
sbitmap from = sbitmap_alloc (num_nodes);
|
||||
sbitmap to = sbitmap_alloc (num_nodes);
|
||||
sbitmap scc_nodes = sbitmap_alloc (num_nodes);
|
||||
auto_sbitmap from (num_nodes);
|
||||
auto_sbitmap to (num_nodes);
|
||||
auto_sbitmap scc_nodes (num_nodes);
|
||||
ddg_all_sccs_ptr sccs = (ddg_all_sccs_ptr)
|
||||
xmalloc (sizeof (struct ddg_all_sccs));
|
||||
|
||||
|
@ -1052,9 +1051,6 @@ create_ddg_all_sccs (ddg_ptr g)
|
|||
}
|
||||
}
|
||||
order_sccs (sccs);
|
||||
sbitmap_free (from);
|
||||
sbitmap_free (to);
|
||||
sbitmap_free (scc_nodes);
|
||||
|
||||
if (flag_checking)
|
||||
check_sccs (sccs, num_nodes);
|
||||
|
|
|
@ -1083,7 +1083,6 @@ df_worklist_dataflow (struct dataflow *dataflow,
|
|||
int n_blocks)
|
||||
{
|
||||
bitmap pending = BITMAP_ALLOC (&df_bitmap_obstack);
|
||||
sbitmap considered = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
bitmap_iterator bi;
|
||||
unsigned int *bbindex_to_postorder;
|
||||
int i;
|
||||
|
@ -1101,6 +1100,7 @@ df_worklist_dataflow (struct dataflow *dataflow,
|
|||
bbindex_to_postorder[i] = last_basic_block_for_fn (cfun);
|
||||
|
||||
/* Initialize the considered map. */
|
||||
auto_sbitmap considered (last_basic_block_for_fn (cfun));
|
||||
bitmap_clear (considered);
|
||||
EXECUTE_IF_SET_IN_BITMAP (blocks_to_consider, 0, index, bi)
|
||||
{
|
||||
|
@ -1124,7 +1124,6 @@ df_worklist_dataflow (struct dataflow *dataflow,
|
|||
blocks_in_postorder,
|
||||
bbindex_to_postorder,
|
||||
n_blocks);
|
||||
sbitmap_free (considered);
|
||||
free (bbindex_to_postorder);
|
||||
}
|
||||
|
||||
|
|
|
@ -2999,11 +2999,11 @@ static void
|
|||
dse_step3 ()
|
||||
{
|
||||
basic_block bb;
|
||||
sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
sbitmap_iterator sbi;
|
||||
bitmap all_ones = NULL;
|
||||
unsigned int i;
|
||||
|
||||
auto_sbitmap unreachable_blocks (last_basic_block_for_fn (cfun));
|
||||
bitmap_ones (unreachable_blocks);
|
||||
|
||||
FOR_ALL_BB_FN (bb, cfun)
|
||||
|
@ -3058,7 +3058,6 @@ dse_step3 ()
|
|||
|
||||
if (all_ones)
|
||||
BITMAP_FREE (all_ones);
|
||||
sbitmap_free (unreachable_blocks);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -634,12 +634,10 @@ eh_region
|
|||
eh_region_outermost (struct function *ifun, eh_region region_a,
|
||||
eh_region region_b)
|
||||
{
|
||||
sbitmap b_outer;
|
||||
|
||||
gcc_assert (ifun->eh->region_array);
|
||||
gcc_assert (ifun->eh->region_tree);
|
||||
|
||||
b_outer = sbitmap_alloc (ifun->eh->region_array->length ());
|
||||
auto_sbitmap b_outer (ifun->eh->region_array->length ());
|
||||
bitmap_clear (b_outer);
|
||||
|
||||
do
|
||||
|
@ -657,7 +655,6 @@ eh_region_outermost (struct function *ifun, eh_region region_a,
|
|||
}
|
||||
while (region_a);
|
||||
|
||||
sbitmap_free (b_outer);
|
||||
return region_a;
|
||||
}
|
||||
|
||||
|
|
|
@ -6018,12 +6018,11 @@ thread_prologue_and_epilogue_insns (void)
|
|||
commit_edge_insertions ();
|
||||
|
||||
/* Look for basic blocks within the prologue insns. */
|
||||
sbitmap blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap blocks (last_basic_block_for_fn (cfun));
|
||||
bitmap_clear (blocks);
|
||||
bitmap_set_bit (blocks, entry_edge->dest->index);
|
||||
bitmap_set_bit (blocks, orig_entry_edge->dest->index);
|
||||
find_many_sub_basic_blocks (blocks);
|
||||
sbitmap_free (blocks);
|
||||
}
|
||||
|
||||
default_rtl_profile ();
|
||||
|
|
|
@ -1691,12 +1691,11 @@ free_pre_mem (void)
|
|||
static void
|
||||
prune_expressions (bool pre_p)
|
||||
{
|
||||
sbitmap prune_exprs;
|
||||
struct gcse_expr *expr;
|
||||
unsigned int ui;
|
||||
basic_block bb;
|
||||
|
||||
prune_exprs = sbitmap_alloc (expr_hash_table.n_elems);
|
||||
auto_sbitmap prune_exprs (expr_hash_table.n_elems);
|
||||
bitmap_clear (prune_exprs);
|
||||
for (ui = 0; ui < expr_hash_table.size; ui++)
|
||||
{
|
||||
|
@ -1767,8 +1766,6 @@ prune_expressions (bool pre_p)
|
|||
break;
|
||||
}
|
||||
}
|
||||
|
||||
sbitmap_free (prune_exprs);
|
||||
}
|
||||
|
||||
/* It may be necessary to insert a large number of insns on edges to
|
||||
|
@ -1783,7 +1780,6 @@ static void
|
|||
prune_insertions_deletions (int n_elems)
|
||||
{
|
||||
sbitmap_iterator sbi;
|
||||
sbitmap prune_exprs;
|
||||
|
||||
/* We always use I to iterate over blocks/edges and J to iterate over
|
||||
expressions. */
|
||||
|
@ -1797,7 +1793,7 @@ prune_insertions_deletions (int n_elems)
|
|||
/* Set of expressions which require too many insertions relative to
|
||||
the number of deletions achieved. We will prune these out of the
|
||||
insertion/deletion sets. */
|
||||
prune_exprs = sbitmap_alloc (n_elems);
|
||||
auto_sbitmap prune_exprs (n_elems);
|
||||
bitmap_clear (prune_exprs);
|
||||
|
||||
/* Iterate over the edges counting the number of times each expression
|
||||
|
@ -1835,7 +1831,6 @@ prune_insertions_deletions (int n_elems)
|
|||
bitmap_clear_bit (pre_delete_map[i], j);
|
||||
}
|
||||
|
||||
sbitmap_free (prune_exprs);
|
||||
free (insertions);
|
||||
free (deletions);
|
||||
}
|
||||
|
|
|
@ -258,7 +258,7 @@ private:
|
|||
|
||||
/* A bitmap of blocks that we have finished processing in the initial
|
||||
post-order walk. */
|
||||
sbitmap m_visited_blocks;
|
||||
auto_sbitmap m_visited_blocks;
|
||||
|
||||
/* A worklist of SSA names whose definitions need to be reconsidered. */
|
||||
auto_vec <tree, 64> m_worklist;
|
||||
|
@ -272,7 +272,7 @@ private:
|
|||
backprop::backprop (function *fn)
|
||||
: m_fn (fn),
|
||||
m_info_pool ("usage_info"),
|
||||
m_visited_blocks (sbitmap_alloc (last_basic_block_for_fn (m_fn))),
|
||||
m_visited_blocks (last_basic_block_for_fn (m_fn)),
|
||||
m_worklist_names (BITMAP_ALLOC (NULL))
|
||||
{
|
||||
bitmap_clear (m_visited_blocks);
|
||||
|
@ -281,7 +281,6 @@ backprop::backprop (function *fn)
|
|||
backprop::~backprop ()
|
||||
{
|
||||
BITMAP_FREE (m_worklist_names);
|
||||
sbitmap_free (m_visited_blocks);
|
||||
m_info_pool.release ();
|
||||
}
|
||||
|
||||
|
|
|
@ -156,9 +156,8 @@ draw_cfg_nodes_no_loops (pretty_printer *pp, struct function *fun)
|
|||
{
|
||||
int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fun));
|
||||
int i, n;
|
||||
sbitmap visited;
|
||||
|
||||
visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap visited (last_basic_block_for_fn (cfun));
|
||||
bitmap_clear (visited);
|
||||
|
||||
n = pre_and_rev_post_order_compute_fn (fun, NULL, rpo, true);
|
||||
|
@ -179,8 +178,6 @@ draw_cfg_nodes_no_loops (pretty_printer *pp, struct function *fun)
|
|||
if (! bitmap_bit_p (visited, bb->index))
|
||||
draw_cfg_node (pp, fun->funcdef_no, bb);
|
||||
}
|
||||
|
||||
sbitmap_free (visited);
|
||||
}
|
||||
|
||||
/* Draw all the basic blocks in LOOP. Print the blocks in breath-first
|
||||
|
|
|
@ -1409,12 +1409,11 @@ remove_some_program_points_and_update_live_ranges (void)
|
|||
ira_object_t obj;
|
||||
ira_object_iterator oi;
|
||||
live_range_t r, prev_r, next_r;
|
||||
sbitmap born_or_dead, born, dead;
|
||||
sbitmap_iterator sbi;
|
||||
bool born_p, dead_p, prev_born_p, prev_dead_p;
|
||||
|
||||
born = sbitmap_alloc (ira_max_point);
|
||||
dead = sbitmap_alloc (ira_max_point);
|
||||
auto_sbitmap born (ira_max_point);
|
||||
auto_sbitmap dead (ira_max_point);
|
||||
bitmap_clear (born);
|
||||
bitmap_clear (dead);
|
||||
FOR_EACH_OBJECT (obj, oi)
|
||||
|
@ -1425,7 +1424,7 @@ remove_some_program_points_and_update_live_ranges (void)
|
|||
bitmap_set_bit (dead, r->finish);
|
||||
}
|
||||
|
||||
born_or_dead = sbitmap_alloc (ira_max_point);
|
||||
auto_sbitmap born_or_dead (ira_max_point);
|
||||
bitmap_ior (born_or_dead, born, dead);
|
||||
map = (int *) ira_allocate (sizeof (int) * ira_max_point);
|
||||
n = -1;
|
||||
|
@ -1442,9 +1441,7 @@ remove_some_program_points_and_update_live_ranges (void)
|
|||
prev_born_p = born_p;
|
||||
prev_dead_p = dead_p;
|
||||
}
|
||||
sbitmap_free (born_or_dead);
|
||||
sbitmap_free (born);
|
||||
sbitmap_free (dead);
|
||||
|
||||
n++;
|
||||
if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
|
||||
fprintf (ira_dump_file, "Compressing live ranges: from %d to %d - %d%%\n",
|
||||
|
|
16
gcc/lcm.c
16
gcc/lcm.c
|
@ -170,15 +170,12 @@ compute_earliest (struct edge_list *edge_list, int n_exprs, sbitmap *antin,
|
|||
sbitmap *antout, sbitmap *avout, sbitmap *kill,
|
||||
sbitmap *earliest)
|
||||
{
|
||||
sbitmap difference, temp_bitmap;
|
||||
int x, num_edges;
|
||||
basic_block pred, succ;
|
||||
|
||||
num_edges = NUM_EDGES (edge_list);
|
||||
|
||||
difference = sbitmap_alloc (n_exprs);
|
||||
temp_bitmap = sbitmap_alloc (n_exprs);
|
||||
|
||||
auto_sbitmap difference (n_exprs), temp_bitmap (n_exprs);
|
||||
for (x = 0; x < num_edges; x++)
|
||||
{
|
||||
pred = INDEX_EDGE_PRED_BB (edge_list, x);
|
||||
|
@ -199,9 +196,6 @@ compute_earliest (struct edge_list *edge_list, int n_exprs, sbitmap *antin,
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
sbitmap_free (temp_bitmap);
|
||||
sbitmap_free (difference);
|
||||
}
|
||||
|
||||
/* later(p,s) is dependent on the calculation of laterin(p).
|
||||
|
@ -594,15 +588,12 @@ compute_farthest (struct edge_list *edge_list, int n_exprs,
|
|||
sbitmap *st_avout, sbitmap *st_avin, sbitmap *st_antin,
|
||||
sbitmap *kill, sbitmap *farthest)
|
||||
{
|
||||
sbitmap difference, temp_bitmap;
|
||||
int x, num_edges;
|
||||
basic_block pred, succ;
|
||||
|
||||
num_edges = NUM_EDGES (edge_list);
|
||||
|
||||
difference = sbitmap_alloc (n_exprs);
|
||||
temp_bitmap = sbitmap_alloc (n_exprs);
|
||||
|
||||
auto_sbitmap difference (n_exprs), temp_bitmap (n_exprs);
|
||||
for (x = 0; x < num_edges; x++)
|
||||
{
|
||||
pred = INDEX_EDGE_PRED_BB (edge_list, x);
|
||||
|
@ -623,9 +614,6 @@ compute_farthest (struct edge_list *edge_list, int n_exprs,
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
sbitmap_free (temp_bitmap);
|
||||
sbitmap_free (difference);
|
||||
}
|
||||
|
||||
/* Compute nearer and nearerout vectors for edge based lcm.
|
||||
|
|
|
@ -461,7 +461,6 @@ unroll_loop_constant_iterations (struct loop *loop)
|
|||
{
|
||||
unsigned HOST_WIDE_INT niter;
|
||||
unsigned exit_mod;
|
||||
sbitmap wont_exit;
|
||||
unsigned i;
|
||||
edge e;
|
||||
unsigned max_unroll = loop->lpt_decision.times;
|
||||
|
@ -477,7 +476,7 @@ unroll_loop_constant_iterations (struct loop *loop)
|
|||
|
||||
exit_mod = niter % (max_unroll + 1);
|
||||
|
||||
wont_exit = sbitmap_alloc (max_unroll + 1);
|
||||
auto_sbitmap wont_exit (max_unroll + 1);
|
||||
bitmap_ones (wont_exit);
|
||||
|
||||
auto_vec<edge> remove_edges;
|
||||
|
@ -604,8 +603,6 @@ unroll_loop_constant_iterations (struct loop *loop)
|
|||
free_opt_info (opt_info);
|
||||
}
|
||||
|
||||
free (wont_exit);
|
||||
|
||||
if (exit_at_end)
|
||||
{
|
||||
basic_block exit_block = get_bb_copy (desc->in_edge->src);
|
||||
|
@ -861,7 +858,6 @@ unroll_loop_runtime_iterations (struct loop *loop)
|
|||
rtx_insn *init_code, *branch_code;
|
||||
unsigned i, j, p;
|
||||
basic_block preheader, *body, swtch, ezc_swtch;
|
||||
sbitmap wont_exit;
|
||||
int may_exit_copy;
|
||||
unsigned n_peel;
|
||||
edge e;
|
||||
|
@ -936,7 +932,7 @@ unroll_loop_runtime_iterations (struct loop *loop)
|
|||
|
||||
auto_vec<edge> remove_edges;
|
||||
|
||||
wont_exit = sbitmap_alloc (max_unroll + 2);
|
||||
auto_sbitmap wont_exit (max_unroll + 2);
|
||||
|
||||
/* Peel the first copy of loop body (almost always we must leave exit test
|
||||
here; the only exception is when we have extra zero check and the number
|
||||
|
@ -1035,8 +1031,6 @@ unroll_loop_runtime_iterations (struct loop *loop)
|
|||
free_opt_info (opt_info);
|
||||
}
|
||||
|
||||
free (wont_exit);
|
||||
|
||||
if (exit_at_end)
|
||||
{
|
||||
basic_block exit_block = get_bb_copy (desc->in_edge->src);
|
||||
|
@ -1201,7 +1195,6 @@ decide_unroll_stupid (struct loop *loop, int flags)
|
|||
static void
|
||||
unroll_loop_stupid (struct loop *loop)
|
||||
{
|
||||
sbitmap wont_exit;
|
||||
unsigned nunroll = loop->lpt_decision.times;
|
||||
struct niter_desc *desc = get_simple_loop_desc (loop);
|
||||
struct opt_info *opt_info = NULL;
|
||||
|
@ -1211,8 +1204,7 @@ unroll_loop_stupid (struct loop *loop)
|
|||
|| flag_variable_expansion_in_unroller)
|
||||
opt_info = analyze_insns_in_loop (loop);
|
||||
|
||||
|
||||
wont_exit = sbitmap_alloc (nunroll + 1);
|
||||
auto_sbitmap wont_exit (nunroll + 1);
|
||||
bitmap_clear (wont_exit);
|
||||
opt_info_start_duplication (opt_info);
|
||||
|
||||
|
@ -1231,8 +1223,6 @@ unroll_loop_stupid (struct loop *loop)
|
|||
free_opt_info (opt_info);
|
||||
}
|
||||
|
||||
free (wont_exit);
|
||||
|
||||
if (desc->simple_p)
|
||||
{
|
||||
/* We indeed may get here provided that there are nontrivial assumptions
|
||||
|
|
|
@ -1507,7 +1507,6 @@ decompose_multiword_subregs (bool decompose_copies)
|
|||
bitmap_and_compl_into (decomposable_context, non_decomposable_context);
|
||||
if (!bitmap_empty_p (decomposable_context))
|
||||
{
|
||||
sbitmap sub_blocks;
|
||||
unsigned int i;
|
||||
sbitmap_iterator sbi;
|
||||
bitmap_iterator iter;
|
||||
|
@ -1515,7 +1514,7 @@ decompose_multiword_subregs (bool decompose_copies)
|
|||
|
||||
propagate_pseudo_copies ();
|
||||
|
||||
sub_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap sub_blocks (last_basic_block_for_fn (cfun));
|
||||
bitmap_clear (sub_blocks);
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (decomposable_context, 0, regno, iter)
|
||||
|
@ -1643,8 +1642,6 @@ decompose_multiword_subregs (bool decompose_copies)
|
|||
insn = NEXT_INSN (insn);
|
||||
}
|
||||
}
|
||||
|
||||
sbitmap_free (sub_blocks);
|
||||
}
|
||||
|
||||
{
|
||||
|
|
|
@ -1015,12 +1015,11 @@ remove_some_program_points_and_update_live_ranges (void)
|
|||
int n, max_regno;
|
||||
int *map;
|
||||
lra_live_range_t r, prev_r, next_r;
|
||||
sbitmap born_or_dead, born, dead;
|
||||
sbitmap_iterator sbi;
|
||||
bool born_p, dead_p, prev_born_p, prev_dead_p;
|
||||
|
||||
born = sbitmap_alloc (lra_live_max_point);
|
||||
dead = sbitmap_alloc (lra_live_max_point);
|
||||
auto_sbitmap born (lra_live_max_point);
|
||||
auto_sbitmap dead (lra_live_max_point);
|
||||
bitmap_clear (born);
|
||||
bitmap_clear (dead);
|
||||
max_regno = max_reg_num ();
|
||||
|
@ -1033,7 +1032,7 @@ remove_some_program_points_and_update_live_ranges (void)
|
|||
bitmap_set_bit (dead, r->finish);
|
||||
}
|
||||
}
|
||||
born_or_dead = sbitmap_alloc (lra_live_max_point);
|
||||
auto_sbitmap born_or_dead (lra_live_max_point);
|
||||
bitmap_ior (born_or_dead, born, dead);
|
||||
map = XCNEWVEC (int, lra_live_max_point);
|
||||
n = -1;
|
||||
|
@ -1056,9 +1055,6 @@ remove_some_program_points_and_update_live_ranges (void)
|
|||
prev_born_p = born_p;
|
||||
prev_dead_p = dead_p;
|
||||
}
|
||||
sbitmap_free (born_or_dead);
|
||||
sbitmap_free (born);
|
||||
sbitmap_free (dead);
|
||||
n++;
|
||||
if (lra_dump_file != NULL)
|
||||
fprintf (lra_dump_file, "Compressing live ranges: from %d to %d - %d%%\n",
|
||||
|
|
|
@ -2413,11 +2413,9 @@ lra (FILE *f)
|
|||
/* We've possibly turned single trapping insn into multiple ones. */
|
||||
if (cfun->can_throw_non_call_exceptions)
|
||||
{
|
||||
sbitmap blocks;
|
||||
blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap blocks (last_basic_block_for_fn (cfun));
|
||||
bitmap_ones (blocks);
|
||||
find_many_sub_basic_blocks (blocks);
|
||||
sbitmap_free (blocks);
|
||||
}
|
||||
|
||||
if (inserted_p)
|
||||
|
|
|
@ -682,7 +682,6 @@ schedule_reg_moves (partial_schedule_ptr ps)
|
|||
rtx prev_reg, old_reg;
|
||||
int first_move;
|
||||
int distances[2];
|
||||
sbitmap must_follow;
|
||||
sbitmap distance1_uses;
|
||||
rtx set = single_set (u->insn);
|
||||
|
||||
|
@ -792,12 +791,11 @@ schedule_reg_moves (partial_schedule_ptr ps)
|
|||
}
|
||||
}
|
||||
|
||||
must_follow = sbitmap_alloc (first_move + nreg_moves);
|
||||
auto_sbitmap must_follow (first_move + nreg_moves);
|
||||
for (i_reg_move = 0; i_reg_move < nreg_moves; i_reg_move++)
|
||||
if (!schedule_reg_move (ps, first_move + i_reg_move,
|
||||
distance1_uses, must_follow))
|
||||
break;
|
||||
sbitmap_free (must_follow);
|
||||
if (distance1_uses)
|
||||
sbitmap_free (distance1_uses);
|
||||
if (i_reg_move < nreg_moves)
|
||||
|
@ -927,7 +925,6 @@ static bool
|
|||
optimize_sc (partial_schedule_ptr ps, ddg_ptr g)
|
||||
{
|
||||
int amount = PS_MIN_CYCLE (ps);
|
||||
sbitmap sched_nodes = sbitmap_alloc (g->num_nodes);
|
||||
int start, end, step;
|
||||
int ii = ps->ii;
|
||||
bool ok = false;
|
||||
|
@ -944,8 +941,7 @@ optimize_sc (partial_schedule_ptr ps, ddg_ptr g)
|
|||
if (dump_file)
|
||||
fprintf (dump_file, "SMS SC already optimized.\n");
|
||||
|
||||
ok = false;
|
||||
goto clear;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (dump_file)
|
||||
|
@ -967,11 +963,9 @@ optimize_sc (partial_schedule_ptr ps, ddg_ptr g)
|
|||
}
|
||||
|
||||
if (SMODULO (SCHED_TIME (g->closing_branch->cuid), ii) == ii - 1)
|
||||
{
|
||||
ok = true;
|
||||
goto clear;
|
||||
}
|
||||
return true;
|
||||
|
||||
auto_sbitmap sched_nodes (g->num_nodes);
|
||||
bitmap_ones (sched_nodes);
|
||||
|
||||
/* Calculate the new placement of the branch. It should be in row
|
||||
|
@ -984,7 +978,7 @@ optimize_sc (partial_schedule_ptr ps, ddg_ptr g)
|
|||
int branch_cycle = SCHED_TIME (g->closing_branch->cuid);
|
||||
int row = SMODULO (branch_cycle, ps->ii);
|
||||
int num_splits = 0;
|
||||
sbitmap must_precede, must_follow, tmp_precede, tmp_follow;
|
||||
sbitmap tmp_precede, tmp_follow;
|
||||
int min_cycle, c;
|
||||
|
||||
if (dump_file)
|
||||
|
@ -1000,11 +994,10 @@ optimize_sc (partial_schedule_ptr ps, ddg_ptr g)
|
|||
gcc_assert (c >= start);
|
||||
if (c >= end)
|
||||
{
|
||||
ok = false;
|
||||
if (dump_file)
|
||||
fprintf (dump_file,
|
||||
"SMS failed to schedule branch at cycle: %d\n", c);
|
||||
goto clear;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else
|
||||
|
@ -1017,13 +1010,12 @@ optimize_sc (partial_schedule_ptr ps, ddg_ptr g)
|
|||
if (dump_file)
|
||||
fprintf (dump_file,
|
||||
"SMS failed to schedule branch at cycle: %d\n", c);
|
||||
ok = false;
|
||||
goto clear;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
must_precede = sbitmap_alloc (g->num_nodes);
|
||||
must_follow = sbitmap_alloc (g->num_nodes);
|
||||
auto_sbitmap must_precede (g->num_nodes);
|
||||
auto_sbitmap must_follow (g->num_nodes);
|
||||
|
||||
/* Try to schedule the branch is it's new cycle. */
|
||||
calculate_must_precede_follow (g->closing_branch, start, end,
|
||||
|
@ -1083,13 +1075,8 @@ optimize_sc (partial_schedule_ptr ps, ddg_ptr g)
|
|||
/* This might have been added to a new first stage. */
|
||||
if (PS_MIN_CYCLE (ps) < min_cycle)
|
||||
reset_sched_times (ps, 0);
|
||||
|
||||
free (must_precede);
|
||||
free (must_follow);
|
||||
}
|
||||
|
||||
clear:
|
||||
free (sched_nodes);
|
||||
return ok;
|
||||
}
|
||||
|
||||
|
@ -1866,8 +1853,8 @@ get_sched_window (partial_schedule_ptr ps, ddg_node_ptr u_node,
|
|||
int start, step, end;
|
||||
int early_start, late_start;
|
||||
ddg_edge_ptr e;
|
||||
sbitmap psp = sbitmap_alloc (ps->g->num_nodes);
|
||||
sbitmap pss = sbitmap_alloc (ps->g->num_nodes);
|
||||
auto_sbitmap psp (ps->g->num_nodes);
|
||||
auto_sbitmap pss (ps->g->num_nodes);
|
||||
sbitmap u_node_preds = NODE_PREDECESSORS (u_node);
|
||||
sbitmap u_node_succs = NODE_SUCCESSORS (u_node);
|
||||
int psp_not_empty;
|
||||
|
@ -1996,8 +1983,6 @@ get_sched_window (partial_schedule_ptr ps, ddg_node_ptr u_node,
|
|||
*start_p = start;
|
||||
*step_p = step;
|
||||
*end_p = end;
|
||||
sbitmap_free (psp);
|
||||
sbitmap_free (pss);
|
||||
|
||||
if ((start >= end && step == 1) || (start <= end && step == -1))
|
||||
{
|
||||
|
@ -2146,10 +2131,10 @@ sms_schedule_by_order (ddg_ptr g, int mii, int maxii, int *nodes_order)
|
|||
int flush_and_start_over = true;
|
||||
int num_nodes = g->num_nodes;
|
||||
int start, end, step; /* Place together into one struct? */
|
||||
sbitmap sched_nodes = sbitmap_alloc (num_nodes);
|
||||
sbitmap must_precede = sbitmap_alloc (num_nodes);
|
||||
sbitmap must_follow = sbitmap_alloc (num_nodes);
|
||||
sbitmap tobe_scheduled = sbitmap_alloc (num_nodes);
|
||||
auto_sbitmap sched_nodes (num_nodes);
|
||||
auto_sbitmap must_precede (num_nodes);
|
||||
auto_sbitmap must_follow (num_nodes);
|
||||
auto_sbitmap tobe_scheduled (num_nodes);
|
||||
|
||||
partial_schedule_ptr ps = create_partial_schedule (ii, g, DFA_HISTORY);
|
||||
|
||||
|
@ -2260,11 +2245,6 @@ sms_schedule_by_order (ddg_ptr g, int mii, int maxii, int *nodes_order)
|
|||
else
|
||||
gcc_assert (bitmap_equal_p (tobe_scheduled, sched_nodes));
|
||||
|
||||
sbitmap_free (sched_nodes);
|
||||
sbitmap_free (must_precede);
|
||||
sbitmap_free (must_follow);
|
||||
sbitmap_free (tobe_scheduled);
|
||||
|
||||
return ps;
|
||||
}
|
||||
|
||||
|
@ -2474,7 +2454,7 @@ static void
|
|||
check_nodes_order (int *node_order, int num_nodes)
|
||||
{
|
||||
int i;
|
||||
sbitmap tmp = sbitmap_alloc (num_nodes);
|
||||
auto_sbitmap tmp (num_nodes);
|
||||
|
||||
bitmap_clear (tmp);
|
||||
|
||||
|
@ -2494,8 +2474,6 @@ check_nodes_order (int *node_order, int num_nodes)
|
|||
|
||||
if (dump_file)
|
||||
fprintf (dump_file, "\n");
|
||||
|
||||
sbitmap_free (tmp);
|
||||
}
|
||||
|
||||
/* Order the nodes of G for scheduling and pass the result in
|
||||
|
@ -2539,10 +2517,10 @@ order_nodes_of_sccs (ddg_all_sccs_ptr all_sccs, int * node_order)
|
|||
int i, pos = 0;
|
||||
ddg_ptr g = all_sccs->ddg;
|
||||
int num_nodes = g->num_nodes;
|
||||
sbitmap prev_sccs = sbitmap_alloc (num_nodes);
|
||||
sbitmap on_path = sbitmap_alloc (num_nodes);
|
||||
sbitmap tmp = sbitmap_alloc (num_nodes);
|
||||
sbitmap ones = sbitmap_alloc (num_nodes);
|
||||
auto_sbitmap prev_sccs (num_nodes);
|
||||
auto_sbitmap on_path (num_nodes);
|
||||
auto_sbitmap tmp (num_nodes);
|
||||
auto_sbitmap ones (num_nodes);
|
||||
|
||||
bitmap_clear (prev_sccs);
|
||||
bitmap_ones (ones);
|
||||
|
@ -2575,10 +2553,6 @@ order_nodes_of_sccs (ddg_all_sccs_ptr all_sccs, int * node_order)
|
|||
bitmap_and_compl (tmp, ones, prev_sccs);
|
||||
pos = order_nodes_in_scc (g, prev_sccs, tmp, node_order, pos);
|
||||
}
|
||||
sbitmap_free (prev_sccs);
|
||||
sbitmap_free (on_path);
|
||||
sbitmap_free (tmp);
|
||||
sbitmap_free (ones);
|
||||
}
|
||||
|
||||
/* MII is needed if we consider backarcs (that do not close recursive cycles). */
|
||||
|
@ -2739,11 +2713,11 @@ order_nodes_in_scc (ddg_ptr g, sbitmap nodes_ordered, sbitmap scc,
|
|||
{
|
||||
enum sms_direction dir;
|
||||
int num_nodes = g->num_nodes;
|
||||
sbitmap workset = sbitmap_alloc (num_nodes);
|
||||
sbitmap tmp = sbitmap_alloc (num_nodes);
|
||||
auto_sbitmap workset (num_nodes);
|
||||
auto_sbitmap tmp (num_nodes);
|
||||
sbitmap zero_bitmap = sbitmap_alloc (num_nodes);
|
||||
sbitmap predecessors = sbitmap_alloc (num_nodes);
|
||||
sbitmap successors = sbitmap_alloc (num_nodes);
|
||||
auto_sbitmap predecessors (num_nodes);
|
||||
auto_sbitmap successors (num_nodes);
|
||||
|
||||
bitmap_clear (predecessors);
|
||||
find_predecessors (predecessors, g, nodes_ordered);
|
||||
|
@ -2823,11 +2797,7 @@ order_nodes_in_scc (ddg_ptr g, sbitmap nodes_ordered, sbitmap scc,
|
|||
bitmap_and (workset, successors, scc);
|
||||
}
|
||||
}
|
||||
sbitmap_free (tmp);
|
||||
sbitmap_free (workset);
|
||||
sbitmap_free (zero_bitmap);
|
||||
sbitmap_free (predecessors);
|
||||
sbitmap_free (successors);
|
||||
return pos;
|
||||
}
|
||||
|
||||
|
|
|
@ -2935,11 +2935,10 @@ split_insn (rtx_insn *insn)
|
|||
void
|
||||
split_all_insns (void)
|
||||
{
|
||||
sbitmap blocks;
|
||||
bool changed;
|
||||
basic_block bb;
|
||||
|
||||
blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap blocks (last_basic_block_for_fn (cfun));
|
||||
bitmap_clear (blocks);
|
||||
changed = false;
|
||||
|
||||
|
@ -2989,8 +2988,6 @@ split_all_insns (void)
|
|||
find_many_sub_basic_blocks (blocks);
|
||||
|
||||
checking_verify_flow_info ();
|
||||
|
||||
sbitmap_free (blocks);
|
||||
}
|
||||
|
||||
/* Same as split_all_insns, but do not expect CFG to be available.
|
||||
|
|
|
@ -1238,12 +1238,11 @@ pass_cprop_hardreg::execute (function *fun)
|
|||
{
|
||||
struct value_data *all_vd;
|
||||
basic_block bb;
|
||||
sbitmap visited;
|
||||
bool analyze_called = false;
|
||||
|
||||
all_vd = XNEWVEC (struct value_data, last_basic_block_for_fn (fun));
|
||||
|
||||
visited = sbitmap_alloc (last_basic_block_for_fn (fun));
|
||||
auto_sbitmap visited (last_basic_block_for_fn (fun));
|
||||
bitmap_clear (visited);
|
||||
|
||||
FOR_EACH_BB_FN (bb, fun)
|
||||
|
@ -1308,7 +1307,6 @@ pass_cprop_hardreg::execute (function *fun)
|
|||
queued_debug_insn_change_pool.release ();
|
||||
}
|
||||
|
||||
sbitmap_free (visited);
|
||||
free (all_vd);
|
||||
return 0;
|
||||
}
|
||||
|
|
|
@ -1280,11 +1280,9 @@ reload (rtx_insn *first, int global)
|
|||
/* We've possibly turned single trapping insn into multiple ones. */
|
||||
if (cfun->can_throw_non_call_exceptions)
|
||||
{
|
||||
sbitmap blocks;
|
||||
blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap blocks (last_basic_block_for_fn (cfun));
|
||||
bitmap_ones (blocks);
|
||||
find_many_sub_basic_blocks (blocks);
|
||||
sbitmap_free (blocks);
|
||||
}
|
||||
|
||||
if (inserted)
|
||||
|
|
|
@ -623,18 +623,6 @@ haifa_find_rgns (void)
|
|||
int too_large_failure;
|
||||
basic_block bb;
|
||||
|
||||
/* Note if a block is a natural loop header. */
|
||||
sbitmap header;
|
||||
|
||||
/* Note if a block is a natural inner loop header. */
|
||||
sbitmap inner;
|
||||
|
||||
/* Note if a block is in the block queue. */
|
||||
sbitmap in_queue;
|
||||
|
||||
/* Note if a block is in the block queue. */
|
||||
sbitmap in_stack;
|
||||
|
||||
/* Perform a DFS traversal of the cfg. Identify loop headers, inner loops
|
||||
and a mapping from block to its loop header (if the block is contained
|
||||
in a loop, else -1).
|
||||
|
@ -649,16 +637,20 @@ haifa_find_rgns (void)
|
|||
dfs_nr = XCNEWVEC (int, last_basic_block_for_fn (cfun));
|
||||
stack = XNEWVEC (edge_iterator, n_edges_for_fn (cfun));
|
||||
|
||||
inner = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
/* Note if a block is a natural inner loop header. */
|
||||
auto_sbitmap inner (last_basic_block_for_fn (cfun));
|
||||
bitmap_ones (inner);
|
||||
|
||||
header = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
/* Note if a block is a natural loop header. */
|
||||
auto_sbitmap header (last_basic_block_for_fn (cfun));
|
||||
bitmap_clear (header);
|
||||
|
||||
in_queue = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
/* Note if a block is in the block queue. */
|
||||
auto_sbitmap in_queue (last_basic_block_for_fn (cfun));
|
||||
bitmap_clear (in_queue);
|
||||
|
||||
in_stack = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
/* Note if a block is in the block queue. */
|
||||
auto_sbitmap in_stack (last_basic_block_for_fn (cfun));
|
||||
bitmap_clear (in_stack);
|
||||
|
||||
for (i = 0; i < last_basic_block_for_fn (cfun); i++)
|
||||
|
@ -1070,10 +1062,6 @@ haifa_find_rgns (void)
|
|||
free (max_hdr);
|
||||
free (degree);
|
||||
free (stack);
|
||||
sbitmap_free (header);
|
||||
sbitmap_free (inner);
|
||||
sbitmap_free (in_queue);
|
||||
sbitmap_free (in_stack);
|
||||
}
|
||||
|
||||
|
||||
|
@ -1477,12 +1465,11 @@ compute_dom_prob_ps (int bb)
|
|||
static void
|
||||
split_edges (int bb_src, int bb_trg, edgelst *bl)
|
||||
{
|
||||
sbitmap src = sbitmap_alloc (SBITMAP_SIZE (pot_split[bb_src]));
|
||||
auto_sbitmap src (SBITMAP_SIZE (pot_split[bb_src]));
|
||||
bitmap_copy (src, pot_split[bb_src]);
|
||||
|
||||
bitmap_and_compl (src, src, pot_split[bb_trg]);
|
||||
extract_edgelst (src, bl);
|
||||
sbitmap_free (src);
|
||||
}
|
||||
|
||||
/* Find the valid candidate-source-blocks for the target block TRG, compute
|
||||
|
@ -1496,7 +1483,6 @@ compute_trg_info (int trg)
|
|||
edgelst el = { NULL, 0 };
|
||||
int i, j, k, update_idx;
|
||||
basic_block block;
|
||||
sbitmap visited;
|
||||
edge_iterator ei;
|
||||
edge e;
|
||||
|
||||
|
@ -1519,7 +1505,7 @@ compute_trg_info (int trg)
|
|||
sp->is_speculative = 0;
|
||||
sp->src_prob = REG_BR_PROB_BASE;
|
||||
|
||||
visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap visited (last_basic_block_for_fn (cfun));
|
||||
|
||||
for (i = trg + 1; i < current_nr_blocks; i++)
|
||||
{
|
||||
|
@ -1595,8 +1581,6 @@ compute_trg_info (int trg)
|
|||
sp->src_prob = 0;
|
||||
}
|
||||
}
|
||||
|
||||
sbitmap_free (visited);
|
||||
}
|
||||
|
||||
/* Free the computed target info. */
|
||||
|
|
|
@ -6764,11 +6764,10 @@ init_seqno_1 (basic_block bb, sbitmap visited_bbs, bitmap blocks_to_reschedule)
|
|||
static int
|
||||
init_seqno (bitmap blocks_to_reschedule, basic_block from)
|
||||
{
|
||||
sbitmap visited_bbs;
|
||||
bitmap_iterator bi;
|
||||
unsigned bbi;
|
||||
|
||||
visited_bbs = sbitmap_alloc (current_nr_blocks);
|
||||
auto_sbitmap visited_bbs (current_nr_blocks);
|
||||
|
||||
if (blocks_to_reschedule)
|
||||
{
|
||||
|
@ -6793,7 +6792,6 @@ init_seqno (bitmap blocks_to_reschedule, basic_block from)
|
|||
removed by the call to purge_empty_blocks in sel_sched_region_1). */
|
||||
gcc_assert (cur_seqno >= 0);
|
||||
|
||||
sbitmap_free (visited_bbs);
|
||||
return sched_max_luid - 1;
|
||||
}
|
||||
|
||||
|
|
|
@ -825,7 +825,7 @@ remove_reachable_equiv_notes (basic_block bb, struct st_expr *smexpr)
|
|||
edge_iterator *stack, ei;
|
||||
int sp;
|
||||
edge act;
|
||||
sbitmap visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap visited (last_basic_block_for_fn (cfun));
|
||||
rtx note;
|
||||
rtx_insn *insn;
|
||||
rtx mem = smexpr->pattern;
|
||||
|
@ -844,7 +844,6 @@ remove_reachable_equiv_notes (basic_block bb, struct st_expr *smexpr)
|
|||
if (!sp)
|
||||
{
|
||||
free (stack);
|
||||
sbitmap_free (visited);
|
||||
return;
|
||||
}
|
||||
act = ei_edge (stack[--sp]);
|
||||
|
|
|
@ -3322,12 +3322,11 @@ update_ssa (unsigned update_flags)
|
|||
will grow while we are traversing it (but it will not
|
||||
gain any new members). Copy OLD_SSA_NAMES to a temporary
|
||||
for traversal. */
|
||||
sbitmap tmp = sbitmap_alloc (SBITMAP_SIZE (old_ssa_names));
|
||||
auto_sbitmap tmp (SBITMAP_SIZE (old_ssa_names));
|
||||
bitmap_copy (tmp, old_ssa_names);
|
||||
EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, sbi)
|
||||
insert_updated_phi_nodes_for (ssa_name (i), dfs, blocks_to_update,
|
||||
update_flags);
|
||||
sbitmap_free (tmp);
|
||||
}
|
||||
|
||||
symbols_to_rename.qsort (insert_updated_phi_nodes_compare_uids);
|
||||
|
|
|
@ -975,7 +975,7 @@ live_worklist (tree_live_info_p live)
|
|||
{
|
||||
unsigned b;
|
||||
basic_block bb;
|
||||
sbitmap visited = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
|
||||
auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
|
||||
|
||||
bitmap_clear (visited);
|
||||
|
||||
|
@ -990,8 +990,6 @@ live_worklist (tree_live_info_p live)
|
|||
b = *--(live->stack_top);
|
||||
loe_visit_block (live, BASIC_BLOCK_FOR_FN (cfun, b), visited);
|
||||
}
|
||||
|
||||
sbitmap_free (visited);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -2409,10 +2409,10 @@ fill_always_executed_in_1 (struct loop *loop, sbitmap contains_call)
|
|||
static void
|
||||
fill_always_executed_in (void)
|
||||
{
|
||||
sbitmap contains_call = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
basic_block bb;
|
||||
struct loop *loop;
|
||||
|
||||
auto_sbitmap contains_call (last_basic_block_for_fn (cfun));
|
||||
bitmap_clear (contains_call);
|
||||
FOR_EACH_BB_FN (bb, cfun)
|
||||
{
|
||||
|
@ -2429,8 +2429,6 @@ fill_always_executed_in (void)
|
|||
|
||||
for (loop = current_loops->tree_root->inner; loop; loop = loop->next)
|
||||
fill_always_executed_in_1 (loop, contains_call);
|
||||
|
||||
sbitmap_free (contains_call);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -743,7 +743,6 @@ try_unroll_loop_completely (struct loop *loop,
|
|||
|
||||
if (n_unroll)
|
||||
{
|
||||
sbitmap wont_exit;
|
||||
bool large;
|
||||
if (ul == UL_SINGLE_ITER)
|
||||
return false;
|
||||
|
@ -860,7 +859,7 @@ try_unroll_loop_completely (struct loop *loop,
|
|||
"loop turned into non-loop; it never loops.\n");
|
||||
|
||||
initialize_original_copy_tables ();
|
||||
wont_exit = sbitmap_alloc (n_unroll + 1);
|
||||
auto_sbitmap wont_exit (n_unroll + 1);
|
||||
if (exit && niter
|
||||
&& TREE_CODE (niter) == INTEGER_CST
|
||||
&& wi::leu_p (n_unroll, wi::to_widest (niter)))
|
||||
|
@ -883,13 +882,11 @@ try_unroll_loop_completely (struct loop *loop,
|
|||
| DLTHE_FLAG_COMPLETTE_PEEL))
|
||||
{
|
||||
free_original_copy_tables ();
|
||||
free (wont_exit);
|
||||
if (dump_file && (dump_flags & TDF_DETAILS))
|
||||
fprintf (dump_file, "Failed to duplicate the loop\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
free (wont_exit);
|
||||
free_original_copy_tables ();
|
||||
}
|
||||
|
||||
|
@ -967,7 +964,6 @@ try_peel_loop (struct loop *loop,
|
|||
HOST_WIDE_INT npeel;
|
||||
struct loop_size size;
|
||||
int peeled_size;
|
||||
sbitmap wont_exit;
|
||||
|
||||
if (!flag_peel_loops || PARAM_VALUE (PARAM_MAX_PEEL_TIMES) <= 0
|
||||
|| !peeled_loops)
|
||||
|
@ -1042,7 +1038,7 @@ try_peel_loop (struct loop *loop,
|
|||
|
||||
/* Duplicate possibly eliminating the exits. */
|
||||
initialize_original_copy_tables ();
|
||||
wont_exit = sbitmap_alloc (npeel + 1);
|
||||
auto_sbitmap wont_exit (npeel + 1);
|
||||
if (exit && niter
|
||||
&& TREE_CODE (niter) == INTEGER_CST
|
||||
&& wi::leu_p (npeel, wi::to_widest (niter)))
|
||||
|
@ -1061,10 +1057,8 @@ try_peel_loop (struct loop *loop,
|
|||
DLTHE_FLAG_UPDATE_FREQ))
|
||||
{
|
||||
free_original_copy_tables ();
|
||||
free (wont_exit);
|
||||
return false;
|
||||
}
|
||||
free (wont_exit);
|
||||
free_original_copy_tables ();
|
||||
if (dump_file && (dump_flags & TDF_DETAILS))
|
||||
{
|
||||
|
|
|
@ -1173,7 +1173,6 @@ tree_transform_and_unroll_loop (struct loop *loop, unsigned factor,
|
|||
unsigned est_niter, prob_entry, scale_unrolled, scale_rest, freq_e, freq_h;
|
||||
unsigned new_est_niter, i, prob;
|
||||
unsigned irr = loop_preheader_edge (loop)->flags & EDGE_IRREDUCIBLE_LOOP;
|
||||
sbitmap wont_exit;
|
||||
auto_vec<edge> to_remove;
|
||||
|
||||
est_niter = expected_loop_iterations (loop);
|
||||
|
@ -1307,14 +1306,13 @@ tree_transform_and_unroll_loop (struct loop *loop, unsigned factor,
|
|||
|
||||
/* Unroll the loop and remove the exits in all iterations except for the
|
||||
last one. */
|
||||
wont_exit = sbitmap_alloc (factor);
|
||||
auto_sbitmap wont_exit (factor);
|
||||
bitmap_ones (wont_exit);
|
||||
bitmap_clear_bit (wont_exit, factor - 1);
|
||||
|
||||
ok = gimple_duplicate_loop_to_header_edge
|
||||
(loop, loop_latch_edge (loop), factor - 1,
|
||||
wont_exit, new_exit, &to_remove, DLTHE_FLAG_UPDATE_FREQ);
|
||||
free (wont_exit);
|
||||
gcc_assert (ok);
|
||||
|
||||
FOR_EACH_VEC_ELT (to_remove, i, e)
|
||||
|
|
|
@ -2359,7 +2359,7 @@ compute_antic (void)
|
|||
int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
|
||||
int postorder_num = inverted_post_order_compute (postorder);
|
||||
|
||||
sbitmap worklist = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
|
||||
auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1);
|
||||
bitmap_ones (worklist);
|
||||
while (changed)
|
||||
{
|
||||
|
@ -2409,7 +2409,6 @@ compute_antic (void)
|
|||
}
|
||||
|
||||
sbitmap_free (has_abnormal_preds);
|
||||
sbitmap_free (worklist);
|
||||
free (postorder);
|
||||
}
|
||||
|
||||
|
|
|
@ -1458,7 +1458,6 @@ undistribute_ops_list (enum tree_code opcode,
|
|||
unsigned int length = ops->length ();
|
||||
operand_entry *oe1;
|
||||
unsigned i, j;
|
||||
sbitmap candidates, candidates2;
|
||||
unsigned nr_candidates, nr_candidates2;
|
||||
sbitmap_iterator sbi0;
|
||||
vec<operand_entry *> *subops;
|
||||
|
@ -1470,7 +1469,7 @@ undistribute_ops_list (enum tree_code opcode,
|
|||
return false;
|
||||
|
||||
/* Build a list of candidates to process. */
|
||||
candidates = sbitmap_alloc (length);
|
||||
auto_sbitmap candidates (length);
|
||||
bitmap_clear (candidates);
|
||||
nr_candidates = 0;
|
||||
FOR_EACH_VEC_ELT (*ops, i, oe1)
|
||||
|
@ -1494,10 +1493,7 @@ undistribute_ops_list (enum tree_code opcode,
|
|||
}
|
||||
|
||||
if (nr_candidates < 2)
|
||||
{
|
||||
sbitmap_free (candidates);
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
|
||||
if (dump_file && (dump_flags & TDF_DETAILS))
|
||||
{
|
||||
|
@ -1569,7 +1565,7 @@ undistribute_ops_list (enum tree_code opcode,
|
|||
}
|
||||
|
||||
/* Process the (operand, code) pairs in order of most occurrence. */
|
||||
candidates2 = sbitmap_alloc (length);
|
||||
auto_sbitmap candidates2 (length);
|
||||
while (!cvec.is_empty ())
|
||||
{
|
||||
oecount *c = &cvec.last ();
|
||||
|
@ -1665,8 +1661,6 @@ undistribute_ops_list (enum tree_code opcode,
|
|||
subops[i].release ();
|
||||
free (subops);
|
||||
cvec.release ();
|
||||
sbitmap_free (candidates);
|
||||
sbitmap_free (candidates2);
|
||||
|
||||
return changed;
|
||||
}
|
||||
|
|
|
@ -56,7 +56,6 @@ reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
|
|||
auto_vec<edge, 10> stack;
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
sbitmap visited;
|
||||
bool ret;
|
||||
|
||||
if (va_arg_bb == va_start_bb)
|
||||
|
@ -65,7 +64,7 @@ reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
|
|||
if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
|
||||
return false;
|
||||
|
||||
visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap visited (last_basic_block_for_fn (cfun));
|
||||
bitmap_clear (visited);
|
||||
ret = true;
|
||||
|
||||
|
@ -105,7 +104,6 @@ reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
|
|||
}
|
||||
}
|
||||
|
||||
sbitmap_free (visited);
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
|
|
@ -1274,7 +1274,6 @@ vect_attempt_slp_rearrange_stmts (slp_instance slp_instn)
|
|||
{
|
||||
unsigned int group_size = SLP_INSTANCE_GROUP_SIZE (slp_instn);
|
||||
unsigned int i, j;
|
||||
sbitmap load_index;
|
||||
unsigned int lidx;
|
||||
slp_tree node, load;
|
||||
|
||||
|
@ -1294,29 +1293,20 @@ vect_attempt_slp_rearrange_stmts (slp_instance slp_instn)
|
|||
|
||||
/* Check that the loads in the first sequence are different and there
|
||||
are no gaps between them. */
|
||||
load_index = sbitmap_alloc (group_size);
|
||||
auto_sbitmap load_index (group_size);
|
||||
bitmap_clear (load_index);
|
||||
FOR_EACH_VEC_ELT (node->load_permutation, i, lidx)
|
||||
{
|
||||
if (lidx >= group_size)
|
||||
{
|
||||
sbitmap_free (load_index);
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
if (bitmap_bit_p (load_index, lidx))
|
||||
{
|
||||
sbitmap_free (load_index);
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
|
||||
bitmap_set_bit (load_index, lidx);
|
||||
}
|
||||
for (i = 0; i < group_size; i++)
|
||||
if (!bitmap_bit_p (load_index, i))
|
||||
{
|
||||
sbitmap_free (load_index);
|
||||
return false;
|
||||
}
|
||||
sbitmap_free (load_index);
|
||||
return false;
|
||||
|
||||
/* This permutation is valid for reduction. Since the order of the
|
||||
statements in the nodes is not important unless they are memory
|
||||
|
|
|
@ -6996,7 +6996,7 @@ vt_find_locations (void)
|
|||
{
|
||||
bb_heap_t *worklist = new bb_heap_t (LONG_MIN);
|
||||
bb_heap_t *pending = new bb_heap_t (LONG_MIN);
|
||||
sbitmap visited, in_worklist, in_pending;
|
||||
sbitmap in_worklist, in_pending;
|
||||
basic_block bb;
|
||||
edge e;
|
||||
int *bb_order;
|
||||
|
@ -7016,7 +7016,7 @@ vt_find_locations (void)
|
|||
bb_order[rc_order[i]] = i;
|
||||
free (rc_order);
|
||||
|
||||
visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
auto_sbitmap visited (last_basic_block_for_fn (cfun));
|
||||
in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
|
||||
bitmap_clear (in_worklist);
|
||||
|
@ -7185,7 +7185,6 @@ vt_find_locations (void)
|
|||
free (bb_order);
|
||||
delete worklist;
|
||||
delete pending;
|
||||
sbitmap_free (visited);
|
||||
sbitmap_free (in_worklist);
|
||||
sbitmap_free (in_pending);
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue