ra.h (add_neighbor): Fix -Wc++-compat and/or -Wcast-qual warnings.
* ra.h (add_neighbor): Fix -Wc++-compat and/or -Wcast-qual warnings. * recog.c (check_asm_operands, validate_change_1): Likewise. * reg-stack.c (check_asm_stack_operands, subst_asm_stack_regs, subst_asm_stack_regs): Likewise. * regclass.c (regclass, som_hash, som_eq, record_subregs_of_mode, cannot_change_mode_set_regs, invalid_mode_change_p): Likewise. * regmove.c (reg_is_remote_constant_p): Likewise. * regrename.c (regrename_optimize, scan_rtx_reg, kill_clobbered_value, kill_set_value, kill_autoinc_value): Likewise. * regstat.c (regstat_init_n_sets_and_refs, regstat_compute_ri, regstat_compute_calls_crossed): Likewise. * reload1.c (init_reload, new_insn_chain, has_nonexceptional_receiver, reload, copy_reloads, calculate_needs_all_insns, init_elim_table): Likewise. * rtl-factoring.c (compute_rtx_cost, fill_hash_bucket): Likewise. * rtl.c (shallow_copy_rtx_stat): Likewise. * rtlanal.c (parms_set): Likewise. * sbitmap.c (sbitmap_alloc, sbitmap_alloc_with_popcount, sbitmap_resize, sbitmap_vector_alloc): Likewise. * sched-ebb.c (earliest_block_with_similiar_load, add_deps_for_risky_insns): Likewise. * sched-rgn.c (find_rgns, gather_region_statistics, extend_rgns, schedule_region): Likewise. * see.c (eq_descriptor_pre_extension, hash_descriptor_pre_extension, hash_del_pre_extension, eq_descriptor_properties, hash_descriptor_properties, hash_del_properties, see_seek_pre_extension_expr, see_initialize_data_structures, see_print_register_properties, see_print_pre_extension_expr, see_delete_merged_def_extension, see_delete_unmerged_def_extension, see_emit_use_extension, see_pre_delete_extension, see_map_extension, see_commit_changes, see_analyze_merged_def_local_prop, see_analyze_merged_def_local_prop, see_analyze_unmerged_def_local_prop, see_analyze_use_local_prop, see_set_prop_merged_def, see_set_prop_unmerged_def, see_set_prop_unmerged_use, see_print_one_extension, see_merge_one_use_extension, see_merge_one_def_extension, see_store_reference_and_extension, see_update_uses_relevancy, see_update_defs_relevancy): Likewise. * statistics.c (hash_statistics_hash, hash_statistics_eq, hash_statistics_free, curr_statistics_hash): Likewise. * stmt.c (parse_output_constraint, decl_overlaps_hard_reg_set_p, expand_asm_operands, expand_return, case_bit_test_cmp, expand_case): Likewise. * stor-layout.c (start_record_layout): Likewise. * stringpool.c (ggc_alloc_string, gt_pch_n_S, gt_pch_save_stringpool): Likewise. * tree-data-ref.c (hash_stmt_vertex_info, have_similar_memory_accesses_1, ref_base_address_1): Likewise. * tree-ssa-phiopt.c (name_to_bb_hash): Likewise. From-SVN: r137128
This commit is contained in:
parent
f0bbed4417
commit
1634b18f7b
22 changed files with 197 additions and 130 deletions
|
@ -1,3 +1,58 @@
|
|||
2008-06-25 Kaveh R. Ghazi <ghazi@caip.rutgers.edu>
|
||||
|
||||
* ra.h (add_neighbor): Fix -Wc++-compat and/or -Wcast-qual
|
||||
warnings.
|
||||
* recog.c (check_asm_operands, validate_change_1): Likewise.
|
||||
* reg-stack.c (check_asm_stack_operands, subst_asm_stack_regs,
|
||||
subst_asm_stack_regs): Likewise.
|
||||
* regclass.c (regclass, som_hash, som_eq, record_subregs_of_mode,
|
||||
cannot_change_mode_set_regs, invalid_mode_change_p): Likewise.
|
||||
* regmove.c (reg_is_remote_constant_p): Likewise.
|
||||
* regrename.c (regrename_optimize, scan_rtx_reg,
|
||||
kill_clobbered_value, kill_set_value, kill_autoinc_value):
|
||||
Likewise.
|
||||
* regstat.c (regstat_init_n_sets_and_refs, regstat_compute_ri,
|
||||
regstat_compute_calls_crossed): Likewise.
|
||||
* reload1.c (init_reload, new_insn_chain,
|
||||
has_nonexceptional_receiver, reload, copy_reloads,
|
||||
calculate_needs_all_insns, init_elim_table): Likewise.
|
||||
* rtl-factoring.c (compute_rtx_cost, fill_hash_bucket): Likewise.
|
||||
* rtl.c (shallow_copy_rtx_stat): Likewise.
|
||||
* rtlanal.c (parms_set): Likewise.
|
||||
* sbitmap.c (sbitmap_alloc, sbitmap_alloc_with_popcount,
|
||||
sbitmap_resize, sbitmap_vector_alloc): Likewise.
|
||||
* sched-ebb.c (earliest_block_with_similiar_load,
|
||||
add_deps_for_risky_insns): Likewise.
|
||||
* sched-rgn.c (find_rgns, gather_region_statistics, extend_rgns,
|
||||
schedule_region): Likewise.
|
||||
* see.c (eq_descriptor_pre_extension,
|
||||
hash_descriptor_pre_extension, hash_del_pre_extension,
|
||||
eq_descriptor_properties, hash_descriptor_properties,
|
||||
hash_del_properties, see_seek_pre_extension_expr,
|
||||
see_initialize_data_structures, see_print_register_properties,
|
||||
see_print_pre_extension_expr, see_delete_merged_def_extension,
|
||||
see_delete_unmerged_def_extension, see_emit_use_extension,
|
||||
see_pre_delete_extension, see_map_extension, see_commit_changes,
|
||||
see_analyze_merged_def_local_prop,
|
||||
see_analyze_merged_def_local_prop,
|
||||
see_analyze_unmerged_def_local_prop, see_analyze_use_local_prop,
|
||||
see_set_prop_merged_def, see_set_prop_unmerged_def,
|
||||
see_set_prop_unmerged_use, see_print_one_extension,
|
||||
see_merge_one_use_extension, see_merge_one_def_extension,
|
||||
see_store_reference_and_extension, see_update_uses_relevancy,
|
||||
see_update_defs_relevancy): Likewise.
|
||||
* statistics.c (hash_statistics_hash, hash_statistics_eq,
|
||||
hash_statistics_free, curr_statistics_hash): Likewise.
|
||||
* stmt.c (parse_output_constraint, decl_overlaps_hard_reg_set_p,
|
||||
expand_asm_operands, expand_return, case_bit_test_cmp,
|
||||
expand_case): Likewise.
|
||||
* stor-layout.c (start_record_layout): Likewise.
|
||||
* stringpool.c (ggc_alloc_string, gt_pch_n_S,
|
||||
gt_pch_save_stringpool): Likewise.
|
||||
* tree-data-ref.c (hash_stmt_vertex_info,
|
||||
have_similar_memory_accesses_1, ref_base_address_1): Likewise.
|
||||
* tree-ssa-phiopt.c (name_to_bb_hash): Likewise.
|
||||
|
||||
2008-06-25 Uros Bizjak <ubizjak@gmail.com>
|
||||
|
||||
PR target/36627
|
||||
|
|
2
gcc/ra.h
2
gcc/ra.h
|
@ -144,7 +144,7 @@ add_neighbor (int alloc_no, int neighbor)
|
|||
|
||||
if (adjlist == NULL || adjlist->index == ADJACENCY_VEC_LENGTH)
|
||||
{
|
||||
adjacency_t *new = pool_alloc (adjacency_pool);
|
||||
adjacency_t *new = (adjacency_t *) pool_alloc (adjacency_pool);
|
||||
new->index = 0;
|
||||
new->next = adjlist;
|
||||
adjlist = new;
|
||||
|
|
|
@ -145,8 +145,8 @@ check_asm_operands (rtx x)
|
|||
if (noperands == 0)
|
||||
return 1;
|
||||
|
||||
operands = alloca (noperands * sizeof (rtx));
|
||||
constraints = alloca (noperands * sizeof (char *));
|
||||
operands = XALLOCAVEC (rtx, noperands);
|
||||
constraints = XALLOCAVEC (const char *, noperands);
|
||||
|
||||
decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
|
||||
|
||||
|
@ -221,7 +221,7 @@ validate_change_1 (rtx object, rtx *loc, rtx new, bool in_group, bool unshare)
|
|||
else
|
||||
changes_allocated *= 2;
|
||||
|
||||
changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
|
||||
changes = XRESIZEVEC (change_t, changes, changes_allocated);
|
||||
}
|
||||
|
||||
changes[num_changes].object = object;
|
||||
|
|
|
@ -503,7 +503,7 @@ check_asm_stack_operands (rtx insn)
|
|||
|
||||
if (GET_CODE (body) == PARALLEL)
|
||||
{
|
||||
clobber_reg = alloca (XVECLEN (body, 0) * sizeof (rtx));
|
||||
clobber_reg = XALLOCAVEC (rtx, XVECLEN (body, 0));
|
||||
|
||||
for (i = 0; i < XVECLEN (body, 0); i++)
|
||||
if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
|
||||
|
@ -2012,9 +2012,9 @@ subst_asm_stack_regs (rtx insn, stack regstack)
|
|||
for (i = 0, note = REG_NOTES (insn); note; note = XEXP (note, 1))
|
||||
i++;
|
||||
|
||||
note_reg = alloca (i * sizeof (rtx));
|
||||
note_loc = alloca (i * sizeof (rtx *));
|
||||
note_kind = alloca (i * sizeof (enum reg_note));
|
||||
note_reg = XALLOCAVEC (rtx, i);
|
||||
note_loc = XALLOCAVEC (rtx *, i);
|
||||
note_kind = XALLOCAVEC (enum reg_note, i);
|
||||
|
||||
n_notes = 0;
|
||||
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
|
||||
|
@ -2045,8 +2045,8 @@ subst_asm_stack_regs (rtx insn, stack regstack)
|
|||
|
||||
if (GET_CODE (body) == PARALLEL)
|
||||
{
|
||||
clobber_reg = alloca (XVECLEN (body, 0) * sizeof (rtx));
|
||||
clobber_loc = alloca (XVECLEN (body, 0) * sizeof (rtx *));
|
||||
clobber_reg = XALLOCAVEC (rtx, XVECLEN (body, 0));
|
||||
clobber_loc = XALLOCAVEC (rtx *, XVECLEN (body, 0));
|
||||
|
||||
for (i = 0; i < XVECLEN (body, 0); i++)
|
||||
if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
|
||||
|
|
|
@ -1310,7 +1310,7 @@ regclass (rtx f, int nregs)
|
|||
|
||||
init_recog ();
|
||||
|
||||
reg_renumber = xmalloc (max_regno * sizeof (short));
|
||||
reg_renumber = XNEWVEC (short, max_regno);
|
||||
reg_pref = XCNEWVEC (struct reg_pref, max_regno);
|
||||
memset (reg_renumber, -1, max_regno * sizeof (short));
|
||||
|
||||
|
@ -2500,15 +2500,18 @@ static htab_t subregs_of_mode;
|
|||
static hashval_t
|
||||
som_hash (const void *x)
|
||||
{
|
||||
const struct subregs_of_mode_node *a = x;
|
||||
const struct subregs_of_mode_node *const a =
|
||||
(const struct subregs_of_mode_node *) x;
|
||||
return a->block;
|
||||
}
|
||||
|
||||
static int
|
||||
som_eq (const void *x, const void *y)
|
||||
{
|
||||
const struct subregs_of_mode_node *a = x;
|
||||
const struct subregs_of_mode_node *b = y;
|
||||
const struct subregs_of_mode_node *const a =
|
||||
(const struct subregs_of_mode_node *) x;
|
||||
const struct subregs_of_mode_node *const b =
|
||||
(const struct subregs_of_mode_node *) y;
|
||||
return a->block == b->block;
|
||||
}
|
||||
|
||||
|
@ -2533,7 +2536,7 @@ record_subregs_of_mode (rtx subreg)
|
|||
dummy.block = regno & -8;
|
||||
slot = htab_find_slot_with_hash (subregs_of_mode, &dummy,
|
||||
dummy.block, INSERT);
|
||||
node = *slot;
|
||||
node = (struct subregs_of_mode_node *) *slot;
|
||||
if (node == NULL)
|
||||
{
|
||||
node = XCNEW (struct subregs_of_mode_node);
|
||||
|
@ -2605,7 +2608,8 @@ cannot_change_mode_set_regs (HARD_REG_SET *used, enum machine_mode from,
|
|||
|
||||
gcc_assert (subregs_of_mode);
|
||||
dummy.block = regno & -8;
|
||||
node = htab_find_with_hash (subregs_of_mode, &dummy, dummy.block);
|
||||
node = (struct subregs_of_mode_node *)
|
||||
htab_find_with_hash (subregs_of_mode, &dummy, dummy.block);
|
||||
if (node == NULL)
|
||||
return;
|
||||
|
||||
|
@ -2632,7 +2636,8 @@ invalid_mode_change_p (unsigned int regno,
|
|||
|
||||
gcc_assert (subregs_of_mode);
|
||||
dummy.block = regno & -8;
|
||||
node = htab_find_with_hash (subregs_of_mode, &dummy, dummy.block);
|
||||
node = (struct subregs_of_mode_node *)
|
||||
htab_find_with_hash (subregs_of_mode, &dummy, dummy.block);
|
||||
if (node == NULL)
|
||||
return false;
|
||||
|
||||
|
|
|
@ -916,7 +916,7 @@ reg_is_remote_constant_p (rtx reg, rtx insn)
|
|||
if (!reg_set_in_bb)
|
||||
{
|
||||
max_reg_computed = max = max_reg_num ();
|
||||
reg_set_in_bb = xcalloc (max, sizeof (*reg_set_in_bb));
|
||||
reg_set_in_bb = XCNEWVEC (basic_block, max);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_BB_INSNS (bb, p)
|
||||
|
|
|
@ -189,7 +189,7 @@ regrename_optimize (void)
|
|||
memset (tick, 0, sizeof tick);
|
||||
|
||||
gcc_obstack_init (&rename_obstack);
|
||||
first_obj = obstack_alloc (&rename_obstack, 0);
|
||||
first_obj = XOBNEWVAR (&rename_obstack, char, 0);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
|
@ -385,8 +385,7 @@ scan_rtx_reg (rtx insn, rtx *loc, enum reg_class cl,
|
|||
{
|
||||
if (type == OP_OUT)
|
||||
{
|
||||
struct du_chain *this
|
||||
= obstack_alloc (&rename_obstack, sizeof (struct du_chain));
|
||||
struct du_chain *this = XOBNEW (&rename_obstack, struct du_chain);
|
||||
this->next_use = 0;
|
||||
this->next_chain = open_chains;
|
||||
this->loc = loc;
|
||||
|
@ -438,7 +437,7 @@ scan_rtx_reg (rtx insn, rtx *loc, enum reg_class cl,
|
|||
be replaced with, terminate the chain. */
|
||||
if (cl != NO_REGS)
|
||||
{
|
||||
this = obstack_alloc (&rename_obstack, sizeof (struct du_chain));
|
||||
this = XOBNEW (&rename_obstack, struct du_chain);
|
||||
this->next_use = 0;
|
||||
this->next_chain = (*p)->next_chain;
|
||||
this->loc = loc;
|
||||
|
@ -1165,7 +1164,7 @@ init_value_data (struct value_data *vd)
|
|||
static void
|
||||
kill_clobbered_value (rtx x, const_rtx set, void *data)
|
||||
{
|
||||
struct value_data *vd = data;
|
||||
struct value_data *const vd = (struct value_data *) data;
|
||||
if (GET_CODE (set) == CLOBBER)
|
||||
kill_value (x, vd);
|
||||
}
|
||||
|
@ -1176,7 +1175,7 @@ kill_clobbered_value (rtx x, const_rtx set, void *data)
|
|||
static void
|
||||
kill_set_value (rtx x, const_rtx set, void *data)
|
||||
{
|
||||
struct value_data *vd = data;
|
||||
struct value_data *const vd = (struct value_data *) data;
|
||||
if (GET_CODE (set) != CLOBBER)
|
||||
{
|
||||
kill_value (x, vd);
|
||||
|
@ -1193,7 +1192,7 @@ static int
|
|||
kill_autoinc_value (rtx *px, void *data)
|
||||
{
|
||||
rtx x = *px;
|
||||
struct value_data *vd = data;
|
||||
struct value_data *const vd = (struct value_data *) data;
|
||||
|
||||
if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
|
||||
{
|
||||
|
|
|
@ -60,7 +60,7 @@ regstat_init_n_sets_and_refs (void)
|
|||
df_grow_reg_info ();
|
||||
gcc_assert (!regstat_n_sets_and_refs);
|
||||
|
||||
regstat_n_sets_and_refs = xmalloc (max_regno * sizeof (struct regstat_n_sets_and_refs_t));
|
||||
regstat_n_sets_and_refs = XNEWVEC (struct regstat_n_sets_and_refs_t, max_regno);
|
||||
|
||||
for (i = 0; i < max_regno; i++)
|
||||
{
|
||||
|
@ -344,7 +344,7 @@ regstat_compute_ri (void)
|
|||
setjmp_crosses = BITMAP_ALLOC (&df_bitmap_obstack);
|
||||
max_regno = max_reg_num ();
|
||||
reg_info_p_size = max_regno;
|
||||
reg_info_p = xcalloc (max_regno, sizeof (struct reg_info_t));
|
||||
reg_info_p = XCNEWVEC (struct reg_info_t, max_regno);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
|
@ -488,7 +488,7 @@ regstat_compute_calls_crossed (void)
|
|||
timevar_push (TV_REG_STATS);
|
||||
max_regno = max_reg_num ();
|
||||
reg_info_p_size = max_regno;
|
||||
reg_info_p = xcalloc (max_regno, sizeof (struct reg_info_t));
|
||||
reg_info_p = XCNEWVEC (struct reg_info_t, max_regno);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
|
|
|
@ -498,7 +498,7 @@ init_reload (void)
|
|||
|
||||
/* Initialize obstack for our rtl allocation. */
|
||||
gcc_obstack_init (&reload_obstack);
|
||||
reload_startobj = obstack_alloc (&reload_obstack, 0);
|
||||
reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
|
||||
|
||||
INIT_REG_SET (&spilled_pseudos);
|
||||
INIT_REG_SET (&pseudos_counted);
|
||||
|
@ -515,7 +515,7 @@ new_insn_chain (void)
|
|||
|
||||
if (unused_insn_chains == 0)
|
||||
{
|
||||
c = obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
|
||||
c = XOBNEW (&reload_obstack, struct insn_chain);
|
||||
INIT_REG_SET (&c->live_throughout);
|
||||
INIT_REG_SET (&c->dead_or_set);
|
||||
}
|
||||
|
@ -633,7 +633,7 @@ has_nonexceptional_receiver (void)
|
|||
return true;
|
||||
|
||||
/* First determine which blocks can reach exit via normal paths. */
|
||||
tos = worklist = xmalloc (sizeof (basic_block) * (n_basic_blocks + 1));
|
||||
tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
bb->flags &= ~BB_REACHABLE;
|
||||
|
@ -710,7 +710,7 @@ reload (rtx first, int global)
|
|||
|
||||
failure = 0;
|
||||
|
||||
reload_firstobj = obstack_alloc (&reload_obstack, 0);
|
||||
reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
|
||||
|
||||
/* Make sure that the last insn in the chain
|
||||
is not something that needs reloading. */
|
||||
|
@ -1031,7 +1031,7 @@ reload (rtx first, int global)
|
|||
{
|
||||
save_call_clobbered_regs ();
|
||||
/* That might have allocated new insn_chain structures. */
|
||||
reload_firstobj = obstack_alloc (&reload_obstack, 0);
|
||||
reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
|
||||
}
|
||||
|
||||
calculate_needs_all_insns (global);
|
||||
|
@ -1500,10 +1500,9 @@ static void
|
|||
copy_reloads (struct insn_chain *chain)
|
||||
{
|
||||
chain->n_reloads = n_reloads;
|
||||
chain->rld = obstack_alloc (&reload_obstack,
|
||||
n_reloads * sizeof (struct reload));
|
||||
chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
|
||||
memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
|
||||
reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
|
||||
reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
|
||||
}
|
||||
|
||||
/* Walk the chain of insns, and determine for each whether it needs reloads
|
||||
|
@ -1517,7 +1516,7 @@ calculate_needs_all_insns (int global)
|
|||
|
||||
something_needs_elimination = 0;
|
||||
|
||||
reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
|
||||
reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
|
||||
for (chain = reload_insn_chain; chain != 0; chain = next)
|
||||
{
|
||||
rtx insn = chain->insn;
|
||||
|
@ -3707,7 +3706,7 @@ init_elim_table (void)
|
|||
#endif
|
||||
|
||||
if (!reg_eliminate)
|
||||
reg_eliminate = xcalloc (sizeof (struct elim_table), NUM_ELIMINABLE_REGS);
|
||||
reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
|
||||
|
||||
/* Does this function require a frame pointer? */
|
||||
|
||||
|
|
|
@ -323,14 +323,14 @@ compute_rtx_cost (rtx insn)
|
|||
tmp_bucket.hash = compute_hash (insn);
|
||||
|
||||
/* Select the hash group. */
|
||||
bucket = htab_find (hash_buckets, &tmp_bucket);
|
||||
bucket = (p_hash_bucket) htab_find (hash_buckets, &tmp_bucket);
|
||||
|
||||
if (bucket)
|
||||
{
|
||||
tmp_elem.insn = insn;
|
||||
|
||||
/* Select the insn. */
|
||||
elem = htab_find (bucket->seq_candidates, &tmp_elem);
|
||||
elem = (p_hash_elem) htab_find (bucket->seq_candidates, &tmp_elem);
|
||||
|
||||
/* If INSN is parsed the cost will be the cached length. */
|
||||
if (elem)
|
||||
|
@ -1319,7 +1319,7 @@ fill_hash_bucket (void)
|
|||
tmp_bucket.hash = compute_hash (insn);
|
||||
|
||||
/* Select the hash group. */
|
||||
bucket = htab_find (hash_buckets, &tmp_bucket);
|
||||
bucket = (p_hash_bucket) htab_find (hash_buckets, &tmp_bucket);
|
||||
|
||||
if (!bucket)
|
||||
{
|
||||
|
|
|
@ -323,7 +323,7 @@ shallow_copy_rtx_stat (const_rtx orig MEM_STAT_DECL)
|
|||
{
|
||||
const unsigned int size = rtx_size (orig);
|
||||
rtx const copy = (rtx) ggc_alloc_zone_pass_stat (size, &rtl_zone);
|
||||
return memcpy (copy, orig, size);
|
||||
return (rtx) memcpy (copy, orig, size);
|
||||
}
|
||||
|
||||
/* Nonzero when we are generating CONCATs. */
|
||||
|
|
|
@ -3266,7 +3266,7 @@ struct parms_set_data
|
|||
static void
|
||||
parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
|
||||
{
|
||||
struct parms_set_data *d = data;
|
||||
struct parms_set_data *const d = (struct parms_set_data *) data;
|
||||
if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
|
||||
&& TEST_HARD_REG_BIT (d->regs, REGNO (x)))
|
||||
{
|
||||
|
|
|
@ -79,7 +79,7 @@ sbitmap_alloc (unsigned int n_elms)
|
|||
bytes = size * sizeof (SBITMAP_ELT_TYPE);
|
||||
amt = (sizeof (struct simple_bitmap_def)
|
||||
+ bytes - sizeof (SBITMAP_ELT_TYPE));
|
||||
bmap = xmalloc (amt);
|
||||
bmap = (sbitmap) xmalloc (amt);
|
||||
bmap->n_bits = n_elms;
|
||||
bmap->size = size;
|
||||
bmap->popcount = NULL;
|
||||
|
@ -92,7 +92,7 @@ sbitmap
|
|||
sbitmap_alloc_with_popcount (unsigned int n_elms)
|
||||
{
|
||||
sbitmap const bmap = sbitmap_alloc (n_elms);
|
||||
bmap->popcount = xmalloc (bmap->size * sizeof (unsigned char));
|
||||
bmap->popcount = XNEWVEC (unsigned char, bmap->size);
|
||||
return bmap;
|
||||
}
|
||||
|
||||
|
@ -112,10 +112,9 @@ sbitmap_resize (sbitmap bmap, unsigned int n_elms, int def)
|
|||
{
|
||||
amt = (sizeof (struct simple_bitmap_def)
|
||||
+ bytes - sizeof (SBITMAP_ELT_TYPE));
|
||||
bmap = xrealloc (bmap, amt);
|
||||
bmap = (sbitmap) xrealloc (bmap, amt);
|
||||
if (bmap->popcount)
|
||||
bmap->popcount = xrealloc (bmap->popcount,
|
||||
size * sizeof (unsigned char));
|
||||
bmap->popcount = XRESIZEVEC (unsigned char, bmap->popcount, size);
|
||||
}
|
||||
|
||||
if (n_elms > bmap->n_bits)
|
||||
|
@ -218,7 +217,7 @@ sbitmap_vector_alloc (unsigned int n_vecs, unsigned int n_elms)
|
|||
}
|
||||
|
||||
amt = vector_bytes + (n_vecs * elm_bytes);
|
||||
bitmap_vector = xmalloc (amt);
|
||||
bitmap_vector = (sbitmap *) xmalloc (amt);
|
||||
|
||||
for (i = 0, offset = vector_bytes; i < n_vecs; i++, offset += elm_bytes)
|
||||
{
|
||||
|
|
|
@ -335,7 +335,7 @@ earliest_block_with_similiar_load (basic_block last_block, rtx load_insn)
|
|||
/* insn2 not guaranteed to be a 1 base reg load. */
|
||||
continue;
|
||||
|
||||
for (bb = last_block; bb; bb = bb->aux)
|
||||
for (bb = last_block; bb; bb = (basic_block) bb->aux)
|
||||
if (insn2_block == bb)
|
||||
break;
|
||||
|
||||
|
@ -382,7 +382,7 @@ add_deps_for_risky_insns (rtx head, rtx tail)
|
|||
bb = earliest_block_with_similiar_load (last_block, insn);
|
||||
if (bb)
|
||||
{
|
||||
bb = bb->aux;
|
||||
bb = (basic_block) bb->aux;
|
||||
if (!bb)
|
||||
break;
|
||||
prev = BB_END (bb);
|
||||
|
@ -436,7 +436,7 @@ add_deps_for_risky_insns (rtx head, rtx tail)
|
|||
/* Maintain the invariant that bb->aux is clear after use. */
|
||||
while (last_block)
|
||||
{
|
||||
bb = last_block->aux;
|
||||
bb = (basic_block) last_block->aux;
|
||||
last_block->aux = NULL;
|
||||
last_block = bb;
|
||||
}
|
||||
|
|
|
@ -700,7 +700,7 @@ find_rgns (void)
|
|||
extend_regions_p = PARAM_VALUE (PARAM_MAX_SCHED_EXTEND_REGIONS_ITERS) > 0;
|
||||
if (extend_regions_p)
|
||||
{
|
||||
degree1 = xmalloc (last_basic_block * sizeof (int));
|
||||
degree1 = XNEWVEC (int, last_basic_block);
|
||||
extended_rgn_header = sbitmap_alloc (last_basic_block);
|
||||
sbitmap_zero (extended_rgn_header);
|
||||
}
|
||||
|
@ -990,7 +990,7 @@ gather_region_statistics (int **rsp)
|
|||
|
||||
if (nr_blocks > a_sz)
|
||||
{
|
||||
a = xrealloc (a, nr_blocks * sizeof (*a));
|
||||
a = XRESIZEVEC (int, a, nr_blocks);
|
||||
do
|
||||
a[a_sz++] = 0;
|
||||
while (a_sz != nr_blocks);
|
||||
|
@ -1047,9 +1047,9 @@ extend_rgns (int *degree, int *idxp, sbitmap header, int *loop_hdr)
|
|||
|
||||
max_iter = PARAM_VALUE (PARAM_MAX_SCHED_EXTEND_REGIONS_ITERS);
|
||||
|
||||
max_hdr = xmalloc (last_basic_block * sizeof (*max_hdr));
|
||||
max_hdr = XNEWVEC (int, last_basic_block);
|
||||
|
||||
order = xmalloc (last_basic_block * sizeof (*order));
|
||||
order = XNEWVEC (int, last_basic_block);
|
||||
post_order_compute (order, false, false);
|
||||
|
||||
for (i = nblocks - 1; i >= 0; i--)
|
||||
|
@ -2685,7 +2685,7 @@ schedule_region (int rgn)
|
|||
current_blocks = RGN_BLOCKS (rgn);
|
||||
|
||||
/* See comments in add_block1, for what reasons we allocate +1 element. */
|
||||
ebb_head = xrealloc (ebb_head, (current_nr_blocks + 1) * sizeof (*ebb_head));
|
||||
ebb_head = XRESIZEVEC (int, ebb_head, current_nr_blocks + 1);
|
||||
for (bb = 0; bb <= current_nr_blocks; bb++)
|
||||
ebb_head[bb] = current_blocks + bb;
|
||||
|
||||
|
|
95
gcc/see.c
95
gcc/see.c
|
@ -807,8 +807,10 @@ see_gen_normalized_extension (rtx reg, enum rtx_code extension_code,
|
|||
static int
|
||||
eq_descriptor_pre_extension (const void *p1, const void *p2)
|
||||
{
|
||||
const struct see_pre_extension_expr *extension1 = p1;
|
||||
const struct see_pre_extension_expr *extension2 = p2;
|
||||
const struct see_pre_extension_expr *const extension1 =
|
||||
(const struct see_pre_extension_expr *) p1;
|
||||
const struct see_pre_extension_expr *const extension2 =
|
||||
(const struct see_pre_extension_expr *) p2;
|
||||
rtx set1 = single_set (extension1->se_insn);
|
||||
rtx set2 = single_set (extension2->se_insn);
|
||||
rtx rhs1, rhs2;
|
||||
|
@ -828,7 +830,8 @@ eq_descriptor_pre_extension (const void *p1, const void *p2)
|
|||
static hashval_t
|
||||
hash_descriptor_pre_extension (const void *p)
|
||||
{
|
||||
const struct see_pre_extension_expr *extension = p;
|
||||
const struct see_pre_extension_expr *const extension =
|
||||
(const struct see_pre_extension_expr *) p;
|
||||
rtx set = single_set (extension->se_insn);
|
||||
rtx rhs;
|
||||
|
||||
|
@ -846,7 +849,8 @@ hash_descriptor_pre_extension (const void *p)
|
|||
static void
|
||||
hash_del_pre_extension (void *p)
|
||||
{
|
||||
struct see_pre_extension_expr *extension = p;
|
||||
struct see_pre_extension_expr *const extension =
|
||||
(struct see_pre_extension_expr *) p;
|
||||
struct see_occr *curr_occr = extension->antic_occr;
|
||||
struct see_occr *next_occr = NULL;
|
||||
|
||||
|
@ -884,8 +888,10 @@ hash_del_pre_extension (void *p)
|
|||
static int
|
||||
eq_descriptor_properties (const void *p1, const void *p2)
|
||||
{
|
||||
const struct see_register_properties *curr_prop1 = p1;
|
||||
const struct see_register_properties *curr_prop2 = p2;
|
||||
const struct see_register_properties *const curr_prop1 =
|
||||
(const struct see_register_properties *) p1;
|
||||
const struct see_register_properties *const curr_prop2 =
|
||||
(const struct see_register_properties *) p2;
|
||||
|
||||
return curr_prop1->regno == curr_prop2->regno;
|
||||
}
|
||||
|
@ -897,7 +903,8 @@ eq_descriptor_properties (const void *p1, const void *p2)
|
|||
static hashval_t
|
||||
hash_descriptor_properties (const void *p)
|
||||
{
|
||||
const struct see_register_properties *curr_prop = p;
|
||||
const struct see_register_properties *const curr_prop =
|
||||
(const struct see_register_properties *) p;
|
||||
return curr_prop->regno;
|
||||
}
|
||||
|
||||
|
@ -906,7 +913,8 @@ hash_descriptor_properties (const void *p)
|
|||
static void
|
||||
hash_del_properties (void *p)
|
||||
{
|
||||
struct see_register_properties *curr_prop = p;
|
||||
struct see_register_properties *const curr_prop =
|
||||
(struct see_register_properties *) p;
|
||||
free (curr_prop);
|
||||
}
|
||||
|
||||
|
@ -1031,7 +1039,7 @@ see_seek_pre_extension_expr (rtx extension, enum extension_type type)
|
|||
/* This is the first time this extension instruction is encountered. Store
|
||||
it in the hash. */
|
||||
{
|
||||
(*slot_pre_exp) = xmalloc (sizeof (struct see_pre_extension_expr));
|
||||
(*slot_pre_exp) = XNEW (struct see_pre_extension_expr);
|
||||
(*slot_pre_exp)->se_insn = extension;
|
||||
(*slot_pre_exp)->bitmap_index =
|
||||
(htab_elements (see_pre_extension_hash) - 1);
|
||||
|
@ -1352,16 +1360,16 @@ see_initialize_data_structures (void)
|
|||
}
|
||||
|
||||
/* Allocate web entries array for the union-find data structure. */
|
||||
def_entry = xcalloc (defs_num, sizeof (struct web_entry));
|
||||
use_entry = xcalloc (uses_num, sizeof (struct web_entry));
|
||||
def_entry = XCNEWVEC (struct web_entry, defs_num);
|
||||
use_entry = XCNEWVEC (struct web_entry, uses_num);
|
||||
|
||||
/* Allocate an array of splay trees.
|
||||
One splay tree for each basic block. */
|
||||
see_bb_splay_ar = xcalloc (last_bb, sizeof (splay_tree));
|
||||
see_bb_splay_ar = XCNEWVEC (splay_tree, last_bb);
|
||||
|
||||
/* Allocate an array of hashes.
|
||||
One hash for each basic block. */
|
||||
see_bb_hash_ar = xcalloc (last_bb, sizeof (htab_t));
|
||||
see_bb_hash_ar = XCNEWVEC (htab_t, last_bb);
|
||||
|
||||
/* Allocate the extension hash. It will hold the extensions that we want
|
||||
to PRE. */
|
||||
|
@ -1478,7 +1486,8 @@ see_want_to_be_merged_with_extension (rtx ref, rtx extension,
|
|||
static int
|
||||
see_print_register_properties (void **slot, void *b ATTRIBUTE_UNUSED)
|
||||
{
|
||||
struct see_register_properties *prop = *slot;
|
||||
const struct see_register_properties *const prop =
|
||||
(const struct see_register_properties *) *slot;
|
||||
|
||||
gcc_assert (prop);
|
||||
fprintf (dump_file, "Property found for register %d\n", prop->regno);
|
||||
|
@ -1495,7 +1504,8 @@ see_print_register_properties (void **slot, void *b ATTRIBUTE_UNUSED)
|
|||
static int
|
||||
see_print_pre_extension_expr (void **slot, void *b ATTRIBUTE_UNUSED)
|
||||
{
|
||||
struct see_pre_extension_expr *pre_extension = *slot;
|
||||
const struct see_pre_extension_expr *const pre_extension =
|
||||
(const struct see_pre_extension_expr *) *slot;
|
||||
|
||||
gcc_assert (pre_extension
|
||||
&& pre_extension->se_insn
|
||||
|
@ -1520,7 +1530,7 @@ see_print_pre_extension_expr (void **slot, void *b ATTRIBUTE_UNUSED)
|
|||
static int
|
||||
see_delete_merged_def_extension (void **slot, void *b ATTRIBUTE_UNUSED)
|
||||
{
|
||||
rtx def_se = *slot;
|
||||
rtx def_se = (rtx) *slot;
|
||||
|
||||
if (dump_file)
|
||||
{
|
||||
|
@ -1548,7 +1558,7 @@ see_delete_merged_def_extension (void **slot, void *b ATTRIBUTE_UNUSED)
|
|||
static int
|
||||
see_delete_unmerged_def_extension (void **slot, void *b ATTRIBUTE_UNUSED)
|
||||
{
|
||||
rtx def_se = *slot;
|
||||
rtx def_se = (rtx) *slot;
|
||||
|
||||
if (dump_file)
|
||||
{
|
||||
|
@ -1571,7 +1581,7 @@ see_delete_unmerged_def_extension (void **slot, void *b ATTRIBUTE_UNUSED)
|
|||
static int
|
||||
see_emit_use_extension (void **slot, void *b)
|
||||
{
|
||||
rtx use_se = *slot;
|
||||
rtx use_se = (rtx) *slot;
|
||||
struct see_ref_s *curr_ref_s = (struct see_ref_s *) b;
|
||||
|
||||
if (INSN_DELETED_P (use_se))
|
||||
|
@ -1737,7 +1747,8 @@ see_pre_insert_extensions (struct see_pre_extension_expr **index_map)
|
|||
static int
|
||||
see_pre_delete_extension (void **slot, void *b ATTRIBUTE_UNUSED)
|
||||
{
|
||||
struct see_pre_extension_expr *expr = *slot;
|
||||
struct see_pre_extension_expr *const expr =
|
||||
(struct see_pre_extension_expr *) *slot;
|
||||
struct see_occr *occr;
|
||||
int indx = expr->bitmap_index;
|
||||
|
||||
|
@ -1768,8 +1779,9 @@ see_pre_delete_extension (void **slot, void *b ATTRIBUTE_UNUSED)
|
|||
static int
|
||||
see_map_extension (void **slot, void *b)
|
||||
{
|
||||
struct see_pre_extension_expr *expr = *slot;
|
||||
struct see_pre_extension_expr **index_map =
|
||||
struct see_pre_extension_expr *const expr =
|
||||
(struct see_pre_extension_expr *) *slot;
|
||||
struct see_pre_extension_expr **const index_map =
|
||||
(struct see_pre_extension_expr **) b;
|
||||
|
||||
index_map[expr->bitmap_index] = expr;
|
||||
|
@ -1792,8 +1804,7 @@ see_commit_changes (void)
|
|||
bool did_insert = false;
|
||||
int i;
|
||||
|
||||
index_map = xcalloc (pre_extension_num,
|
||||
sizeof (struct see_pre_extension_expr *));
|
||||
index_map = XCNEWVEC (struct see_pre_extension_expr *, pre_extension_num);
|
||||
|
||||
if (dump_file)
|
||||
fprintf (dump_file,
|
||||
|
@ -1842,7 +1853,7 @@ see_commit_changes (void)
|
|||
static int
|
||||
see_analyze_merged_def_local_prop (void **slot, void *b)
|
||||
{
|
||||
rtx def_se = *slot;
|
||||
rtx def_se = (rtx) *slot;
|
||||
struct see_ref_s *curr_ref_s = (struct see_ref_s *) b;
|
||||
rtx ref = curr_ref_s->insn;
|
||||
struct see_pre_extension_expr *extension_expr;
|
||||
|
@ -1880,7 +1891,7 @@ see_analyze_merged_def_local_prop (void **slot, void *b)
|
|||
/* Set the available bit. */
|
||||
SET_BIT (comp[bb_num], indx);
|
||||
/* Record the available occurrence. */
|
||||
curr_occr = xmalloc (sizeof (struct see_occr));
|
||||
curr_occr = XNEW (struct see_occr);
|
||||
curr_occr->next = NULL;
|
||||
curr_occr->insn = def_se;
|
||||
curr_occr->block_num = bb_num;
|
||||
|
@ -1910,7 +1921,7 @@ see_analyze_merged_def_local_prop (void **slot, void *b)
|
|||
static int
|
||||
see_analyze_unmerged_def_local_prop (void **slot, void *b)
|
||||
{
|
||||
rtx def_se = *slot;
|
||||
rtx def_se = (rtx) *slot;
|
||||
struct see_ref_s *curr_ref_s = (struct see_ref_s *) b;
|
||||
rtx ref = curr_ref_s->insn;
|
||||
struct see_pre_extension_expr *extension_expr;
|
||||
|
@ -1958,7 +1969,7 @@ static int
|
|||
see_analyze_use_local_prop (void **slot, void *b)
|
||||
{
|
||||
struct see_ref_s *curr_ref_s = (struct see_ref_s *) b;
|
||||
rtx use_se = *slot;
|
||||
rtx use_se = (rtx) *slot;
|
||||
rtx ref = curr_ref_s->insn;
|
||||
rtx dest_extension_reg = see_get_extension_reg (use_se, 1);
|
||||
struct see_pre_extension_expr *extension_expr;
|
||||
|
@ -1990,7 +2001,7 @@ see_analyze_use_local_prop (void **slot, void *b)
|
|||
/* Set the anticipatable bit. */
|
||||
SET_BIT (antloc[bb_num], indx);
|
||||
/* Record the anticipatable occurrence. */
|
||||
curr_occr = xmalloc (sizeof (struct see_occr));
|
||||
curr_occr = XNEW (struct see_occr);
|
||||
curr_occr->next = NULL;
|
||||
curr_occr->insn = use_se;
|
||||
curr_occr->block_num = bb_num;
|
||||
|
@ -2008,7 +2019,7 @@ see_analyze_use_local_prop (void **slot, void *b)
|
|||
/* Set the available bit. */
|
||||
SET_BIT (comp[bb_num], indx);
|
||||
/* Record the available occurrence. */
|
||||
curr_occr = xmalloc (sizeof (struct see_occr));
|
||||
curr_occr = XNEW (struct see_occr);
|
||||
curr_occr->next = NULL;
|
||||
curr_occr->insn = use_se;
|
||||
curr_occr->block_num = bb_num;
|
||||
|
@ -2032,7 +2043,7 @@ see_analyze_use_local_prop (void **slot, void *b)
|
|||
/* Reset the killed bit. */
|
||||
RESET_BIT (ae_kill[bb_num], indx);
|
||||
/* Record the available occurrence. */
|
||||
curr_occr = xmalloc (sizeof (struct see_occr));
|
||||
curr_occr = XNEW (struct see_occr);
|
||||
curr_occr->next = NULL;
|
||||
curr_occr->insn = use_se;
|
||||
curr_occr->block_num = bb_num;
|
||||
|
@ -2155,7 +2166,7 @@ see_execute_LCM (void)
|
|||
static int
|
||||
see_set_prop_merged_def (void **slot, void *b)
|
||||
{
|
||||
rtx def_se = *slot;
|
||||
rtx def_se = (rtx) *slot;
|
||||
struct see_ref_s *curr_ref_s = (struct see_ref_s *) b;
|
||||
rtx insn = curr_ref_s->insn;
|
||||
rtx dest_extension_reg = see_get_extension_reg (def_se, 1);
|
||||
|
@ -2194,7 +2205,7 @@ see_set_prop_merged_def (void **slot, void *b)
|
|||
else
|
||||
{
|
||||
/* Property doesn't exist yet. */
|
||||
curr_prop = xmalloc (sizeof (struct see_register_properties));
|
||||
curr_prop = XNEW (struct see_register_properties);
|
||||
curr_prop->regno = REGNO (dest_extension_reg);
|
||||
curr_prop->last_def = ref_luid;
|
||||
curr_prop->first_se_before_any_def = -1;
|
||||
|
@ -2226,7 +2237,7 @@ see_set_prop_merged_def (void **slot, void *b)
|
|||
static int
|
||||
see_set_prop_unmerged_def (void **slot, void *b)
|
||||
{
|
||||
rtx def_se = *slot;
|
||||
rtx def_se = (rtx) *slot;
|
||||
struct see_ref_s *curr_ref_s = (struct see_ref_s *) b;
|
||||
rtx insn = curr_ref_s->insn;
|
||||
rtx dest_extension_reg = see_get_extension_reg (def_se, 1);
|
||||
|
@ -2265,7 +2276,7 @@ see_set_prop_unmerged_def (void **slot, void *b)
|
|||
else
|
||||
{
|
||||
/* Property doesn't exist yet. */
|
||||
curr_prop = xmalloc (sizeof (struct see_register_properties));
|
||||
curr_prop = XNEW (struct see_register_properties);
|
||||
curr_prop->regno = REGNO (dest_extension_reg);
|
||||
curr_prop->last_def = ref_luid;
|
||||
curr_prop->first_se_before_any_def = -1;
|
||||
|
@ -2299,7 +2310,7 @@ see_set_prop_unmerged_def (void **slot, void *b)
|
|||
static int
|
||||
see_set_prop_unmerged_use (void **slot, void *b)
|
||||
{
|
||||
rtx use_se = *slot;
|
||||
rtx use_se = (rtx) *slot;
|
||||
struct see_ref_s *curr_ref_s = (struct see_ref_s *) b;
|
||||
rtx insn = curr_ref_s->insn;
|
||||
rtx dest_extension_reg = see_get_extension_reg (use_se, 1);
|
||||
|
@ -2359,7 +2370,7 @@ see_set_prop_unmerged_use (void **slot, void *b)
|
|||
else
|
||||
{
|
||||
/* Property doesn't exist yet. Create a new one. */
|
||||
curr_prop = xmalloc (sizeof (struct see_register_properties));
|
||||
curr_prop = XNEW (struct see_register_properties);
|
||||
curr_prop->regno = REGNO (dest_extension_reg);
|
||||
curr_prop->last_def = -1;
|
||||
curr_prop->first_se_before_any_def = ref_luid;
|
||||
|
@ -2389,7 +2400,7 @@ see_set_prop_unmerged_use (void **slot, void *b)
|
|||
static int
|
||||
see_print_one_extension (void **slot, void *b ATTRIBUTE_UNUSED)
|
||||
{
|
||||
rtx def_se = *slot;
|
||||
rtx def_se = (rtx) *slot;
|
||||
|
||||
gcc_assert (def_se && INSN_P (def_se));
|
||||
print_rtl_single (dump_file, def_se);
|
||||
|
@ -2619,7 +2630,7 @@ static int
|
|||
see_merge_one_use_extension (void **slot, void *b)
|
||||
{
|
||||
struct see_ref_s *curr_ref_s = (struct see_ref_s *) b;
|
||||
rtx use_se = *slot;
|
||||
rtx use_se = (rtx) *slot;
|
||||
rtx ref = curr_ref_s->merged_insn
|
||||
? curr_ref_s->merged_insn : curr_ref_s->insn;
|
||||
rtx merged_ref_next = curr_ref_s->merged_insn
|
||||
|
@ -2780,7 +2791,7 @@ static int
|
|||
see_merge_one_def_extension (void **slot, void *b)
|
||||
{
|
||||
struct see_ref_s *curr_ref_s = (struct see_ref_s *) b;
|
||||
rtx def_se = *slot;
|
||||
rtx def_se = (rtx) *slot;
|
||||
/* If the original insn was already merged with an extension before,
|
||||
take the merged one. */
|
||||
rtx ref = curr_ref_s->merged_insn
|
||||
|
@ -3160,7 +3171,7 @@ see_store_reference_and_extension (rtx ref_insn, rtx se_insn,
|
|||
tree. */
|
||||
if (!stn)
|
||||
{
|
||||
ref_s = xmalloc (sizeof (struct see_ref_s));
|
||||
ref_s = XNEW (struct see_ref_s);
|
||||
ref_s->luid = DF_INSN_LUID (ref_insn);
|
||||
ref_s->insn = ref_insn;
|
||||
ref_s->merged_insn = NULL;
|
||||
|
@ -3409,7 +3420,7 @@ see_update_uses_relevancy (rtx insn, struct df_ref *ref,
|
|||
}
|
||||
|
||||
DF_REF_ID (ref) = index;
|
||||
curr_entry_extra_info = xmalloc (sizeof (struct see_entry_extra_info));
|
||||
curr_entry_extra_info = XNEW (struct see_entry_extra_info);
|
||||
curr_entry_extra_info->relevancy = et;
|
||||
curr_entry_extra_info->local_relevancy = et;
|
||||
use_entry[index].extra_info = curr_entry_extra_info;
|
||||
|
@ -3583,7 +3594,7 @@ see_update_defs_relevancy (rtx insn, struct df_ref *ref,
|
|||
unsigned int index)
|
||||
{
|
||||
struct see_entry_extra_info *curr_entry_extra_info
|
||||
= xmalloc (sizeof (struct see_entry_extra_info));
|
||||
= XNEW (struct see_entry_extra_info);
|
||||
curr_entry_extra_info->relevancy = et;
|
||||
curr_entry_extra_info->local_relevancy = et;
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ static unsigned nr_statistics_hashes;
|
|||
static hashval_t
|
||||
hash_statistics_hash (const void *p)
|
||||
{
|
||||
statistics_counter_t *c = (statistics_counter_t *)p;
|
||||
const statistics_counter_t *const c = (const statistics_counter_t *)p;
|
||||
return htab_hash_string (c->id) + c->val;
|
||||
}
|
||||
|
||||
|
@ -62,8 +62,8 @@ hash_statistics_hash (const void *p)
|
|||
static int
|
||||
hash_statistics_eq (const void *p, const void *q)
|
||||
{
|
||||
statistics_counter_t *c1 = (statistics_counter_t *)p;
|
||||
statistics_counter_t *c2 = (statistics_counter_t *)q;
|
||||
const statistics_counter_t *const c1 = (const statistics_counter_t *)p;
|
||||
const statistics_counter_t *const c2 = (const statistics_counter_t *)q;
|
||||
return c1->val == c2->val && strcmp (c1->id, c2->id) == 0;
|
||||
}
|
||||
|
||||
|
@ -72,7 +72,7 @@ hash_statistics_eq (const void *p, const void *q)
|
|||
static void
|
||||
hash_statistics_free (void *p)
|
||||
{
|
||||
free ((void *)((statistics_counter_t *)p)->id);
|
||||
free (CONST_CAST(char *, ((statistics_counter_t *)p)->id));
|
||||
free (p);
|
||||
}
|
||||
|
||||
|
@ -90,8 +90,7 @@ curr_statistics_hash (void)
|
|||
|
||||
if (idx >= nr_statistics_hashes)
|
||||
{
|
||||
statistics_hashes = xrealloc (statistics_hashes,
|
||||
(idx + 1) * sizeof (htab_t));
|
||||
statistics_hashes = XRESIZEVEC (struct htab *, statistics_hashes, idx+1);
|
||||
memset (statistics_hashes + nr_statistics_hashes, 0,
|
||||
(idx + 1 - nr_statistics_hashes) * sizeof (htab_t));
|
||||
nr_statistics_hashes = idx + 1;
|
||||
|
|
24
gcc/stmt.c
24
gcc/stmt.c
|
@ -333,7 +333,7 @@ parse_output_constraint (const char **constraint_p, int operand_num,
|
|||
*p, operand_num);
|
||||
|
||||
/* Make a copy of the constraint. */
|
||||
buf = alloca (c_len + 1);
|
||||
buf = XALLOCAVEC (char, c_len + 1);
|
||||
strcpy (buf, constraint);
|
||||
/* Swap the first character and the `=' or `+'. */
|
||||
buf[p - constraint] = buf[0];
|
||||
|
@ -565,7 +565,7 @@ decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
|
|||
void *data)
|
||||
{
|
||||
tree decl = *declp;
|
||||
const HARD_REG_SET *regs = data;
|
||||
const HARD_REG_SET *const regs = (const HARD_REG_SET *) data;
|
||||
|
||||
if (TREE_CODE (decl) == VAR_DECL)
|
||||
{
|
||||
|
@ -651,13 +651,11 @@ expand_asm_operands (tree string, tree outputs, tree inputs,
|
|||
tree t;
|
||||
int i;
|
||||
/* Vector of RTX's of evaluated output operands. */
|
||||
rtx *output_rtx = alloca (noutputs * sizeof (rtx));
|
||||
int *inout_opnum = alloca (noutputs * sizeof (int));
|
||||
rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
|
||||
enum machine_mode *inout_mode
|
||||
= alloca (noutputs * sizeof (enum machine_mode));
|
||||
const char **constraints
|
||||
= alloca ((noutputs + ninputs) * sizeof (const char *));
|
||||
rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
|
||||
int *inout_opnum = XALLOCAVEC (int, noutputs);
|
||||
rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
|
||||
enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
|
||||
const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
|
||||
int old_generating_concat_p = generating_concat_p;
|
||||
|
||||
/* An ASM with no outputs needs to be treated as volatile, for now. */
|
||||
|
@ -1616,7 +1614,7 @@ expand_return (tree retval)
|
|||
int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
|
||||
unsigned int bitsize
|
||||
= MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
|
||||
rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
|
||||
rtx *result_pseudos = XALLOCAVEC (rtx, n_regs);
|
||||
rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
|
||||
rtx result_val = expand_normal (retval_rhs);
|
||||
enum machine_mode tmpmode, result_reg_mode;
|
||||
|
@ -2182,8 +2180,8 @@ bool lshift_cheap_p (void)
|
|||
static int
|
||||
case_bit_test_cmp (const void *p1, const void *p2)
|
||||
{
|
||||
const struct case_bit_test *d1 = p1;
|
||||
const struct case_bit_test *d2 = p2;
|
||||
const struct case_bit_test *const d1 = (const struct case_bit_test *) p1;
|
||||
const struct case_bit_test *const d2 = (const struct case_bit_test *) p2;
|
||||
|
||||
if (d2->bits != d1->bits)
|
||||
return d2->bits - d1->bits;
|
||||
|
@ -2547,7 +2545,7 @@ expand_case (tree exp)
|
|||
/* Get table of labels to jump to, in order of case index. */
|
||||
|
||||
ncases = tree_low_cst (range, 0) + 1;
|
||||
labelvec = alloca (ncases * sizeof (rtx));
|
||||
labelvec = XALLOCAVEC (rtx, ncases);
|
||||
memset (labelvec, 0, ncases * sizeof (rtx));
|
||||
|
||||
for (n = case_list; n; n = n->right)
|
||||
|
|
|
@ -506,7 +506,7 @@ relayout_decl (tree decl)
|
|||
record_layout_info
|
||||
start_record_layout (tree t)
|
||||
{
|
||||
record_layout_info rli = xmalloc (sizeof (struct record_layout_info_s));
|
||||
record_layout_info rli = XNEW (struct record_layout_info_s);
|
||||
|
||||
rli->t = t;
|
||||
|
||||
|
|
|
@ -93,7 +93,7 @@ ggc_alloc_string (const char *contents, int length)
|
|||
if (length == 1 && ISDIGIT (contents[0]))
|
||||
return digit_string (contents[0] - '0');
|
||||
|
||||
result = ggc_alloc (length + 1);
|
||||
result = GGC_NEWVAR (char, length + 1);
|
||||
memcpy (result, contents, length + 1);
|
||||
return (const char *) result;
|
||||
}
|
||||
|
@ -207,8 +207,8 @@ gt_pch_p_S (void *obj ATTRIBUTE_UNUSED, void *x ATTRIBUTE_UNUSED,
|
|||
void
|
||||
gt_pch_n_S (const void *x)
|
||||
{
|
||||
gt_pch_note_object ((void *)x, (void *)x, >_pch_p_S,
|
||||
gt_types_enum_last);
|
||||
gt_pch_note_object (CONST_CAST (void *, x), CONST_CAST (void *, x),
|
||||
>_pch_p_S, gt_types_enum_last);
|
||||
}
|
||||
|
||||
/* Handle saving and restoring the string pool for PCH. */
|
||||
|
@ -234,10 +234,10 @@ static GTY(()) struct string_pool_data * spd;
|
|||
void
|
||||
gt_pch_save_stringpool (void)
|
||||
{
|
||||
spd = ggc_alloc (sizeof (*spd));
|
||||
spd = GGC_NEW (struct string_pool_data);
|
||||
spd->nslots = ident_hash->nslots;
|
||||
spd->nelements = ident_hash->nelements;
|
||||
spd->entries = ggc_alloc (sizeof (spd->entries[0]) * spd->nslots);
|
||||
spd->entries = GGC_NEWVEC (struct ht_identifier *, spd->nslots);
|
||||
memcpy (spd->entries, ident_hash->entries,
|
||||
spd->nslots * sizeof (spd->entries[0]));
|
||||
}
|
||||
|
|
|
@ -4780,8 +4780,9 @@ known_dependences_p (VEC (ddr_p, heap) *dependence_relations)
|
|||
static hashval_t
|
||||
hash_stmt_vertex_info (const void *elt)
|
||||
{
|
||||
struct rdg_vertex_info *rvi = (struct rdg_vertex_info *) elt;
|
||||
tree stmt = rvi->stmt;
|
||||
const struct rdg_vertex_info *const rvi =
|
||||
(const struct rdg_vertex_info *) elt;
|
||||
const_tree stmt = rvi->stmt;
|
||||
|
||||
return htab_hash_pointer (stmt);
|
||||
}
|
||||
|
@ -4982,7 +4983,8 @@ have_similar_memory_accesses (tree s1, tree s2)
|
|||
static int
|
||||
have_similar_memory_accesses_1 (const void *s1, const void *s2)
|
||||
{
|
||||
return have_similar_memory_accesses ((tree) s1, (tree) s2);
|
||||
return have_similar_memory_accesses (CONST_CAST_TREE ((const_tree)s1),
|
||||
CONST_CAST_TREE ((const_tree)s2));
|
||||
}
|
||||
|
||||
/* Helper function for the hashtab. */
|
||||
|
@ -4990,7 +4992,7 @@ have_similar_memory_accesses_1 (const void *s1, const void *s2)
|
|||
static hashval_t
|
||||
ref_base_address_1 (const void *s)
|
||||
{
|
||||
tree stmt = (tree) s;
|
||||
tree stmt = CONST_CAST_TREE((const_tree)s);
|
||||
unsigned i;
|
||||
VEC (data_ref_loc, heap) *refs;
|
||||
data_ref_loc *ref;
|
||||
|
|
|
@ -1115,8 +1115,8 @@ static struct pointer_set_t *nontrap_set;
|
|||
static hashval_t
|
||||
name_to_bb_hash (const void *p)
|
||||
{
|
||||
tree n = ((struct name_to_bb *)p)->ssa_name;
|
||||
return htab_hash_pointer (n) ^ ((struct name_to_bb *)p)->store;
|
||||
const_tree n = ((const struct name_to_bb *)p)->ssa_name;
|
||||
return htab_hash_pointer (n) ^ ((const struct name_to_bb *)p)->store;
|
||||
}
|
||||
|
||||
/* The equality function of *P1 and *P2. SSA_NAMEs are shared, so
|
||||
|
|
Loading…
Add table
Reference in a new issue