Add operators / and * for profile_{count,probability}.

gcc/ChangeLog:

	* bb-reorder.cc (find_traces_1_round): Add operators / and * and
	use them.
	(better_edge_p): Likewise.
	* cfgloop.cc (find_subloop_latch_edge_by_profile): Likewise.
	* cfgloopmanip.cc (scale_loop_profile): Likewise.
	* cfgrtl.cc (force_nonfallthru_and_redirect): Likewise.
	* cgraph.cc (cgraph_edge::maybe_hot_p): Likewise.
	* config/sh/sh.cc (expand_cbranchdi4): Likewise.
	* dojump.cc (do_compare_rtx_and_jump): Likewise.
	* final.cc (compute_alignments): Likewise.
	* ipa-cp.cc (update_counts_for_self_gen_clones): Likewise.
	(decide_about_value): Likewise.
	* ipa-inline-analysis.cc (do_estimate_edge_time): Likewise.
	* loop-unroll.cc (unroll_loop_runtime_iterations): Likewise.
	* modulo-sched.cc (sms_schedule): Likewise.
	* omp-expand.cc (extract_omp_for_update_vars): Likewise.
	(expand_omp_ordered_sink): Likewise.
	(expand_omp_for_ordered_loops): Likewise.
	(expand_omp_for_static_nochunk): Likewise.
	* predict.cc (maybe_hot_count_p): Likewise.
	(probably_never_executed): Likewise.
	(set_even_probabilities): Likewise.
	(handle_missing_profiles): Likewise.
	(expensive_function_p): Likewise.
	* profile-count.h: Likewise.
	* profile.cc (compute_branch_probabilities): Likewise.
	* stmt.cc (emit_case_dispatch_table): Likewise.
	* symtab-thunks.cc (expand_thunk): Likewise.
	* tree-ssa-loop-manip.cc (tree_transform_and_unroll_loop): Likewise.
	* tree-ssa-sink.cc (select_best_block): Likewise.
	* tree-switch-conversion.cc (switch_decision_tree::analyze_switch_statement): Likewise.
	(switch_decision_tree::balance_case_nodes): Likewise.
	(switch_decision_tree::emit_case_nodes): Likewise.
	* tree-vect-loop.cc (scale_profile_for_vect_loop): Likewise.
This commit is contained in:
Martin Liska 2022-01-25 19:16:06 +01:00
parent 970b03c003
commit 9f55aee9dc
22 changed files with 137 additions and 120 deletions

View file

@ -761,7 +761,7 @@ find_traces_1_round (int branch_th, profile_count count_th,
& EDGE_CAN_FALLTHRU)
&& !(single_succ_edge (e->dest)->flags & EDGE_COMPLEX)
&& single_succ (e->dest) == best_edge->dest
&& (e->dest->count.apply_scale (2, 1)
&& (e->dest->count * 2
>= best_edge->count () || for_size))
{
best_edge = e;
@ -944,7 +944,7 @@ better_edge_p (const_basic_block bb, const_edge e, profile_probability prob,
/* The BEST_* values do not have to be best, but can be a bit smaller than
maximum values. */
profile_probability diff_prob = best_prob.apply_scale (1, 10);
profile_probability diff_prob = best_prob / 10;
/* The smaller one is better to keep the original order. */
if (optimize_function_for_size_p (cfun))
@ -966,7 +966,7 @@ better_edge_p (const_basic_block bb, const_edge e, profile_probability prob,
is_better_edge = false;
else
{
profile_count diff_count = best_count.apply_scale (1, 10);
profile_count diff_count = best_count / 10;
if (count < best_count - diff_count
|| (!best_count.initialized_p ()
&& count.nonzero_p ()))

View file

@ -619,7 +619,7 @@ find_subloop_latch_edge_by_profile (vec<edge> latches)
}
if (!tcount.initialized_p () || !(tcount.ipa () > HEAVY_EDGE_MIN_SAMPLES)
|| (tcount - mcount).apply_scale (HEAVY_EDGE_RATIO, 1) > tcount)
|| (tcount - mcount) * HEAVY_EDGE_RATIO > tcount)
return NULL;
if (dump_file)

View file

@ -563,8 +563,7 @@ scale_loop_profile (class loop *loop, profile_probability p,
/* Probability of exit must be 1/iterations. */
count_delta = e->count ();
e->probability = profile_probability::always ()
.apply_scale (1, iteration_bound);
e->probability = profile_probability::always () / iteration_bound;
other_e->probability = e->probability.invert ();
/* In code below we only handle the following two updates. */
@ -586,7 +585,7 @@ scale_loop_profile (class loop *loop, profile_probability p,
we look at the actual profile, if it is available. */
p = profile_probability::always ();
count_in = count_in.apply_scale (iteration_bound, 1);
count_in *= iteration_bound;
p = count_in.probability_in (loop->header->count);
if (!(p > profile_probability::never ()))
p = profile_probability::very_unlikely ();

View file

@ -1686,8 +1686,8 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
add also edge from asm goto bb to target. */
if (asm_goto_edge)
{
new_edge->probability = new_edge->probability.apply_scale (1, 2);
jump_block->count = jump_block->count.apply_scale (1, 2);
new_edge->probability /= 2;
jump_block->count /= 2;
edge new_edge2 = make_edge (new_edge->src, target,
e->flags & ~EDGE_FALLTHRU);
new_edge2->probability = probability - new_edge->probability;

View file

@ -2935,11 +2935,10 @@ cgraph_edge::maybe_hot_p (void)
return false;
if (caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE)
{
if (count.apply_scale (2, 1) < where->count.apply_scale (3, 1))
if (count * 2 < where->count * 3)
return false;
}
else if (count.apply_scale (param_hot_bb_frequency_fraction , 1)
< where->count)
else if (count * param_hot_bb_frequency_fraction < where->count)
return false;
return true;
}

View file

@ -2178,7 +2178,7 @@ expand_cbranchdi4 (rtx *operands, enum rtx_code comparison)
&& prob.to_reg_br_prob_base () >= (int) (REG_BR_PROB_BASE * 3 / 8U)
&& prob.to_reg_br_prob_base () <= (int) (REG_BR_PROB_BASE * 5 / 8U))
{
msw_taken_prob = prob.apply_scale (1, 2);
msw_taken_prob = prob / 2;
msw_skip_prob = rev_prob.apply_scale (REG_BR_PROB_BASE,
rev_prob.to_reg_br_prob_base ()
+ REG_BR_PROB_BASE);

View file

@ -1131,7 +1131,7 @@ do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
profile_probability cprob
= profile_probability::guessed_always ();
if (first_code == UNORDERED)
cprob = cprob.apply_scale (1, 100);
cprob /= 100;
else if (first_code == ORDERED)
cprob = cprob.apply_scale (99, 100);
else

View file

@ -642,8 +642,7 @@ compute_alignments (void)
flow_loops_dump (dump_file, NULL, 1);
}
loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
profile_count count_threshold = cfun->cfg->count_max.apply_scale
(1, param_align_threshold);
profile_count count_threshold = cfun->cfg->count_max / param_align_threshold;
if (dump_file)
{
@ -710,10 +709,9 @@ compute_alignments (void)
if (!has_fallthru
&& (branch_count > count_threshold
|| (bb->count > bb->prev_bb->count.apply_scale (10, 1)
|| (bb->count > bb->prev_bb->count * 10
&& (bb->prev_bb->count
<= ENTRY_BLOCK_PTR_FOR_FN (cfun)
->count.apply_scale (1, 2)))))
<= ENTRY_BLOCK_PTR_FOR_FN (cfun)->count / 2))))
{
align_flags alignment = JUMP_ALIGN (label);
if (dump_file)
@ -727,9 +725,7 @@ compute_alignments (void)
&& single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun))
&& optimize_bb_for_speed_p (bb)
&& branch_count + fallthru_count > count_threshold
&& (branch_count
> fallthru_count.apply_scale
(param_align_loop_iterations, 1)))
&& (branch_count > fallthru_count * param_align_loop_iterations))
{
align_flags alignment = LOOP_ALIGN (label);
if (dump_file)

View file

@ -4652,8 +4652,7 @@ update_counts_for_self_gen_clones (cgraph_node *orig_node,
{
profile_count orig_count = n->count;
profile_count new_count
= (redist_sum.apply_scale (1, self_gen_clones.length ())
+ other_edges_count[i]);
= (redist_sum / self_gen_clones.length () + other_edges_count[i]);
new_count = lenient_count_portion_handling (new_count, orig_node);
n->count = new_count;
profile_count::adjust_for_ipa_scaling (&new_count, &orig_count);
@ -4685,7 +4684,7 @@ update_counts_for_self_gen_clones (cgraph_node *orig_node,
for (cgraph_edge *e = cs; e; e = get_next_cgraph_edge_clone (e))
if (e->callee->ultimate_alias_target () == orig_node
&& processed_edges.contains (e))
e->count = e->count.apply_scale (1, den);
e->count /= den;
}
}
@ -4713,8 +4712,7 @@ update_counts_for_self_gen_clones (cgraph_node *orig_node,
&& desc.unproc_orig_rec_edges > 0)
{
desc.count = n->count - desc.count;
desc.count
= desc.count.apply_scale (1, desc.unproc_orig_rec_edges);
desc.count = desc.count /= desc.unproc_orig_rec_edges;
adjust_clone_incoming_counts (n, &desc);
}
else if (dump_file)
@ -6084,7 +6082,7 @@ decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
if (node->count.ipa ().nonzero_p ())
{
unsigned dem = self_gen_clones->length () + 1;
rec_count_sum = node->count.ipa ().apply_scale (1, dem);
rec_count_sum = node->count.ipa () / dem;
}
else
rec_count_sum = profile_count::zero ();

View file

@ -254,7 +254,7 @@ do_estimate_edge_time (struct cgraph_edge *edge, sreal *ret_nonspec_time)
probability that caller will call the callee is low however, since it
may hurt optimization of the caller's hot path. */
if (edge->count.ipa ().initialized_p () && edge->maybe_hot_p ()
&& (edge->count.ipa ().apply_scale (2, 1)
&& (edge->count.ipa () * 2
> (edge->caller->inlined_to
? edge->caller->inlined_to->count.ipa ()
: edge->caller->count.ipa ())))

View file

@ -978,7 +978,7 @@ unroll_loop_runtime_iterations (class loop *loop)
/* Compute count increments for each switch block and initialize
innermost switch block. Switch blocks and peeled loop copies are built
from innermost outward. */
iter_count = new_count = swtch->count.apply_scale (1, max_unroll + 1);
iter_count = new_count = swtch->count / (max_unroll + 1);
swtch->count = new_count;
for (i = 0; i < n_peel; i++)
@ -995,7 +995,7 @@ unroll_loop_runtime_iterations (class loop *loop)
/* Create item for switch. */
unsigned j = n_peel - i - (extra_zero_check ? 0 : 1);
p = profile_probability::always ().apply_scale (1, i + 2);
p = profile_probability::always () / (i + 2);
preheader = split_edge (loop_preheader_edge (loop));
/* Add in count of edge from switch block. */
@ -1021,12 +1021,12 @@ unroll_loop_runtime_iterations (class loop *loop)
if (extra_zero_check)
{
/* Add branch for zero iterations. */
p = profile_probability::always ().apply_scale (1, max_unroll + 1);
p = profile_probability::always () / (max_unroll + 1);
swtch = ezc_swtch;
preheader = split_edge (loop_preheader_edge (loop));
/* Recompute count adjustments since initial peel copy may
have exited and reduced those values that were computed above. */
iter_count = swtch->count.apply_scale (1, max_unroll + 1);
iter_count = swtch->count / (max_unroll + 1);
/* Add in count of edge from switch block. */
preheader->count += iter_count;
branch_code = compare_and_jump_seq (copy_rtx (niter), const0_rtx, EQ,

View file

@ -1439,10 +1439,10 @@ sms_schedule (void)
/* Perform SMS only on loops that their average count is above threshold. */
if ( latch_edge->count () > profile_count::zero ()
&& (latch_edge->count()
< single_exit (loop)->count ().apply_scale
(param_sms_loop_average_count_threshold, 1)))
if (latch_edge->count () > profile_count::zero ()
&& (latch_edge->count ()
< (single_exit (loop)->count ()
* param_sms_loop_average_count_threshold)))
{
if (dump_file)
{
@ -1464,12 +1464,12 @@ sms_schedule (void)
}
/* Make sure this is a doloop. */
if ( !(count_reg = doloop_register_get (head, tail)))
{
if (dump_file)
fprintf (dump_file, "SMS doloop_register_get failed\n");
continue;
}
if (!(count_reg = doloop_register_get (head, tail)))
{
if (dump_file)
fprintf (dump_file, "SMS doloop_register_get failed\n");
continue;
}
/* Don't handle BBs with calls or barriers
or !single_set with the exception of do-loop control part insns.

View file

@ -3120,8 +3120,7 @@ extract_omp_for_update_vars (struct omp_for_data *fd, tree *nonrect_bounds,
if (i < fd->collapse - 1)
{
e = make_edge (last_bb, bb, EDGE_FALSE_VALUE);
e->probability
= profile_probability::guessed_always ().apply_scale (1, 8);
e->probability = profile_probability::guessed_always () / 8;
struct omp_for_data_loop *l = &fd->loops[i + 1];
if (l->m1 == NULL_TREE || l->outer != 1)
@ -3240,8 +3239,7 @@ extract_omp_for_update_vars (struct omp_for_data *fd, tree *nonrect_bounds,
if (update_bb == NULL)
update_bb = this_bb;
e = make_edge (this_bb, bb, EDGE_FALSE_VALUE);
e->probability
= profile_probability::guessed_always ().apply_scale (1, 8);
e->probability = profile_probability::guessed_always () / 8;
if (prev_bb == NULL)
set_immediate_dominator (CDI_DOMINATORS, this_bb, bb);
prev_bb = this_bb;
@ -3533,7 +3531,7 @@ expand_omp_ordered_sink (gimple_stmt_iterator *gsi, struct omp_for_data *fd,
GSI_CONTINUE_LINKING);
gsi_insert_after (gsi, gimple_build_cond_empty (cond), GSI_NEW_STMT);
edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
e3->probability = profile_probability::guessed_always ().apply_scale (1, 8);
e3->probability = profile_probability::guessed_always () / 8;
e1->probability = e3->probability.invert ();
e1->flags = EDGE_TRUE_VALUE;
set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
@ -3687,7 +3685,7 @@ expand_omp_for_ordered_loops (struct omp_for_data *fd, tree *counts,
remove_edge (e1);
make_edge (body_bb, new_header, EDGE_FALLTHRU);
e3->flags = EDGE_FALSE_VALUE;
e3->probability = profile_probability::guessed_always ().apply_scale (1, 8);
e3->probability = profile_probability::guessed_always () / 8;
e1 = make_edge (new_header, new_body, EDGE_TRUE_VALUE);
e1->probability = e3->probability.invert ();
@ -5484,16 +5482,14 @@ expand_omp_for_static_nochunk (struct omp_region *region,
ep->probability = profile_probability::guessed_always ().apply_scale (3, 4);
ep = find_edge (entry_bb, second_bb);
ep->flags = EDGE_TRUE_VALUE;
ep->probability = profile_probability::guessed_always ().apply_scale (1, 4);
ep->probability = profile_probability::guessed_always () / 4;
if (fourth_bb)
{
ep = make_edge (third_bb, fifth_bb, EDGE_FALSE_VALUE);
ep->probability
= profile_probability::guessed_always ().apply_scale (1, 2);
ep->probability = profile_probability::guessed_always () / 2;
ep = find_edge (third_bb, fourth_bb);
ep->flags = EDGE_TRUE_VALUE;
ep->probability
= profile_probability::guessed_always ().apply_scale (1, 2);
ep->probability = profile_probability::guessed_always () / 2;
ep = find_edge (fourth_bb, fifth_bb);
redirect_edge_and_branch (ep, sixth_bb);
}
@ -5504,12 +5500,10 @@ expand_omp_for_static_nochunk (struct omp_region *region,
if (exit1_bb)
{
ep = make_edge (exit_bb, exit2_bb, EDGE_FALSE_VALUE);
ep->probability
= profile_probability::guessed_always ().apply_scale (1, 2);
ep->probability = profile_probability::guessed_always () / 2;
ep = find_edge (exit_bb, exit1_bb);
ep->flags = EDGE_TRUE_VALUE;
ep->probability
= profile_probability::guessed_always ().apply_scale (1, 2);
ep->probability = profile_probability::guessed_always () / 2;
ep = find_edge (exit1_bb, exit2_bb);
redirect_edge_and_branch (ep, exit3_bb);
}

View file

@ -172,7 +172,7 @@ maybe_hot_count_p (struct function *fun, profile_count count)
if (node->frequency == NODE_FREQUENCY_EXECUTED_ONCE
&& count < (ENTRY_BLOCK_PTR_FOR_FN (fun)->count.apply_scale (2, 3)))
return false;
if (count.apply_scale (param_hot_bb_frequency_fraction, 1)
if (count * param_hot_bb_frequency_fraction
< ENTRY_BLOCK_PTR_FOR_FN (fun)->count)
return false;
return true;
@ -219,7 +219,7 @@ probably_never_executed (struct function *fun, profile_count count)
if (count.precise_p () && profile_status_for_fn (fun) == PROFILE_READ)
{
const int unlikely_frac = param_unlikely_bb_count_fraction;
if (count.apply_scale (unlikely_frac, 1) >= profile_info->runs)
if (count * unlikely_frac >= profile_info->runs)
return false;
return true;
}
@ -916,12 +916,12 @@ set_even_probabilities (basic_block bb,
else
{
profile_probability remainder = prob.invert ();
remainder -= profile_probability::very_unlikely ()
.apply_scale (unlikely_count, 1);
remainder -= (profile_probability::very_unlikely ()
* unlikely_count);
int count = nedges - unlikely_count - 1;
gcc_assert (count >= 0);
e->probability = remainder.apply_scale (1, count);
e->probability = remainder / count;
}
}
else
@ -940,7 +940,7 @@ set_even_probabilities (basic_block bb,
if (unlikely_edges != NULL && unlikely_edges->contains (e))
e->probability = profile_probability::very_unlikely ();
else
e->probability = all.apply_scale (1, scale);
e->probability = all / scale;
}
else
e->probability = profile_probability::never ();
@ -3619,7 +3619,7 @@ handle_missing_profiles (void)
if (call_count > 0
&& fn && fn->cfg
&& call_count.apply_scale (unlikely_frac, 1) >= profile_info->runs)
&& call_count * unlikely_frac >= profile_info->runs)
{
drop_profile (node, call_count);
worklist.safe_push (node);
@ -3684,8 +3684,7 @@ expensive_function_p (int threshold)
if (!ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.nonzero_p ())
return true;
profile_count limit = ENTRY_BLOCK_PTR_FOR_FN
(cfun)->count.apply_scale (threshold, 1);
profile_count limit = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count * threshold;
profile_count sum = profile_count::zero ();
FOR_EACH_BB_FN (bb, cfun)
{

View file

@ -185,7 +185,7 @@ public:
static profile_probability very_unlikely ()
{
/* Be consistent with PROB_VERY_UNLIKELY in predict.h. */
profile_probability r = guessed_always ().apply_scale (1, 2000);
profile_probability r = guessed_always () / 2000;
r.m_val--;
return r;
}
@ -193,14 +193,14 @@ public:
static profile_probability unlikely ()
{
/* Be consistent with PROB_VERY_LIKELY in predict.h. */
profile_probability r = guessed_always ().apply_scale (1, 5);
profile_probability r = guessed_always () / 5;
r.m_val--;
return r;
}
static profile_probability even ()
{
return guessed_always ().apply_scale (1, 2);
return guessed_always () / 2;
}
static profile_probability very_likely ()
@ -600,6 +600,26 @@ public:
return initialized_p () && other.initialized_p () && m_val >= other.m_val;
}
profile_probability operator* (int64_t num) const
{
return apply_scale (num, 1);
}
profile_probability operator*= (int64_t den) const
{
return *this * den;
}
profile_probability operator/ (int64_t den) const
{
return apply_scale (1, den);
}
profile_probability operator/= (int64_t den) const
{
return *this / den;
}
/* Get the value of the count. */
uint32_t value () const { return m_val; }
@ -992,6 +1012,26 @@ public:
return ipa ().initialized_p () && ipa ().m_val >= (uint64_t) other;
}
profile_count operator* (int64_t num) const
{
return apply_scale (num, 1);
}
profile_count operator*= (int64_t den) const
{
return *this * den;
}
profile_count operator/ (int64_t den) const
{
return apply_scale (1, den);
}
profile_count operator/= (int64_t den) const
{
return *this / den;
}
/* Return true when value is not zero and can be used for scaling.
This is different from *this > 0 because that requires counter to
be IPA. */

View file

@ -716,7 +716,7 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
FOR_EACH_EDGE (e, ei, bb->succs)
if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
e->probability
= profile_probability::guessed_always ().apply_scale (1, total);
= profile_probability::guessed_always () / total;
else
e->probability = profile_probability::never ();
}
@ -724,8 +724,7 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
{
total += EDGE_COUNT (bb->succs);
FOR_EACH_EDGE (e, ei, bb->succs)
e->probability
= profile_probability::guessed_always ().apply_scale (1, total);
e->probability = profile_probability::guessed_always () / total;
}
if (bb->index >= NUM_FIXED_BLOCKS
&& block_ends_with_condjump_p (bb)

View file

@ -822,9 +822,8 @@ emit_case_dispatch_table (tree index_expr, tree index_type,
through the indirect jump or the direct conditional jump
before that. Split the probability of reaching the
default label among these two jumps. */
new_default_prob
= conditional_probability (default_prob.apply_scale (1, 2), base);
default_prob = default_prob.apply_scale (1, 2);
new_default_prob = conditional_probability (default_prob / 2, base);
default_prob /= 2;
base -= default_prob;
}
else

View file

@ -579,11 +579,11 @@ expand_thunk (cgraph_node *node, bool output_asm_thunks,
adjustment, because that's why we're emitting a
thunk. */
then_bb = create_basic_block (NULL, bb);
then_bb->count = cfg_count - cfg_count.apply_scale (1, 16);
then_bb->count = cfg_count - cfg_count / 16;
return_bb = create_basic_block (NULL, then_bb);
return_bb->count = cfg_count;
else_bb = create_basic_block (NULL, else_bb);
else_bb->count = cfg_count.apply_scale (1, 16);
else_bb->count = cfg_count / 16;
add_bb_to_loop (then_bb, bb->loop_father);
add_bb_to_loop (return_bb, bb->loop_father);
add_bb_to_loop (else_bb, bb->loop_father);
@ -594,11 +594,9 @@ expand_thunk (cgraph_node *node, bool output_asm_thunks,
NULL_TREE, NULL_TREE);
gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
e->probability = profile_probability::guessed_always ()
.apply_scale (1, 16);
e->probability = profile_probability::guessed_always () / 16;
e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
e->probability = profile_probability::guessed_always ()
.apply_scale (1, 16);
e->probability = profile_probability::guessed_always () / 16;
make_single_succ_edge (return_bb,
EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
make_single_succ_edge (then_bb, return_bb, EDGE_FALLTHRU);

View file

@ -1421,8 +1421,8 @@ tree_transform_and_unroll_loop (class loop *loop, unsigned factor,
}
basic_block rest = new_exit->dest;
new_exit->probability = profile_probability::always ()
.apply_scale (1, new_est_niter + 1);
new_exit->probability
= (profile_probability::always () / (new_est_niter + 1));
rest->count += new_exit->count ();
@ -1463,8 +1463,7 @@ tree_transform_and_unroll_loop (class loop *loop, unsigned factor,
&& TREE_CODE (desc->niter) == INTEGER_CST)
{
/* The + 1 converts latch counts to iteration counts. */
profile_count new_header_count
= (in_count.apply_scale (new_est_niter + 1, 1));
profile_count new_header_count = in_count * (new_est_niter + 1);
basic_block *body = get_loop_body (loop);
scale_bbs_frequencies_profile_count (body, loop->num_nodes,
new_header_count,
@ -1502,8 +1501,8 @@ tree_transform_and_unroll_loop (class loop *loop, unsigned factor,
e->dest->count / e->src->count ~= new e->probability
for every outgoing edge e of NEW_EXIT->src. */
profile_probability new_exit_prob = profile_probability::always ()
.apply_scale (1, new_est_niter + 1);
profile_probability new_exit_prob
= profile_probability::always () / (new_est_niter + 1);
change_edge_frequency (new_exit, new_exit_prob);
}

View file

@ -230,8 +230,7 @@ select_best_block (basic_block early_bb,
if (bb_loop_depth (best_bb) == bb_loop_depth (early_bb)
/* If result of comparsion is unknown, prefer EARLY_BB.
Thus use !(...>=..) rather than (...<...) */
&& !(best_bb->count.apply_scale (100, 1)
>= early_bb->count.apply_scale (threshold, 1)))
&& !(best_bb->count * 100 >= early_bb->count * threshold))
return best_bb;
/* No better block found, so return EARLY_BB, which happens to be the

View file

@ -1782,7 +1782,7 @@ switch_decision_tree::analyze_switch_statement ()
tree high = CASE_HIGH (elt);
profile_probability p
= case_edge->probability.apply_scale (1, (intptr_t) (case_edge->aux));
= case_edge->probability / ((intptr_t) (case_edge->aux));
clusters.quick_push (new simple_cluster (low, high, elt, case_edge->dest,
p));
m_case_bbs.quick_push (case_edge->dest);
@ -2057,7 +2057,7 @@ switch_decision_tree::balance_case_nodes (case_tree_node **head,
/* Split this list if it is long enough for that to help. */
npp = head;
left = *npp;
profile_probability pivot_prob = prob.apply_scale (1, 2);
profile_probability pivot_prob = prob / 2;
/* Find the place in the list that bisects the list's total cost
by probability. */
@ -2259,12 +2259,11 @@ switch_decision_tree::emit_case_nodes (basic_block bb, tree index,
redirect_edge_succ (single_pred_edge (test_bb),
single_succ_edge (bb)->dest);
p = ((node->m_right->m_c->m_subtree_prob
+ default_prob.apply_scale (1, 2))
p = ((node->m_right->m_c->m_subtree_prob + default_prob / 2)
/ (node->m_c->m_subtree_prob + default_prob));
bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
GT_EXPR, test_bb, p, loc);
default_prob = default_prob.apply_scale (1, 2);
default_prob /= 2;
/* Handle the left-hand subtree. */
bb = emit_case_nodes (bb, index, node->m_left,
@ -2293,11 +2292,11 @@ switch_decision_tree::emit_case_nodes (basic_block bb, tree index,
if (node->m_right->has_child ()
|| !node->m_right->m_c->is_single_value_p ())
{
p = (default_prob.apply_scale (1, 2)
p = ((default_prob / 2)
/ (node->m_c->m_subtree_prob + default_prob));
bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_low (),
LT_EXPR, m_default_bb, p, loc);
default_prob = default_prob.apply_scale (1, 2);
default_prob /= 2;
bb = emit_case_nodes (bb, index, node->m_right, default_prob,
index_type, loc);
@ -2320,11 +2319,11 @@ switch_decision_tree::emit_case_nodes (basic_block bb, tree index,
if (node->m_left->has_child ()
|| !node->m_left->m_c->is_single_value_p ())
{
p = (default_prob.apply_scale (1, 2)
p = ((default_prob / 2)
/ (node->m_c->m_subtree_prob + default_prob));
bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
GT_EXPR, m_default_bb, p, loc);
default_prob = default_prob.apply_scale (1, 2);
default_prob /= 2;
bb = emit_case_nodes (bb, index, node->m_left, default_prob,
index_type, loc);
@ -2357,29 +2356,29 @@ switch_decision_tree::emit_case_nodes (basic_block bb, tree index,
profile_probability right_prob = profile_probability::never ();
if (node->m_right)
right_prob = node->m_right->m_c->m_subtree_prob;
p = ((right_prob + default_prob.apply_scale (1, 2))
/ (node->m_c->m_subtree_prob + default_prob));
p = ((right_prob + default_prob / 2)
/ (node->m_c->m_subtree_prob + default_prob));
bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
GT_EXPR, test_bb, p, loc);
default_prob = default_prob.apply_scale (1, 2);
bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
GT_EXPR, test_bb, p, loc);
default_prob /= 2;
/* Value belongs to this node or to the left-hand subtree. */
p = node->m_c->m_prob / (node->m_c->m_subtree_prob + default_prob);
bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_low (),
GE_EXPR, node->m_c->m_case_bb, p, loc);
/* Value belongs to this node or to the left-hand subtree. */
p = node->m_c->m_prob / (node->m_c->m_subtree_prob + default_prob);
bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_low (),
GE_EXPR, node->m_c->m_case_bb, p, loc);
/* Handle the left-hand subtree. */
bb = emit_case_nodes (bb, index, node->m_left,
default_prob, index_type, loc);
/* Handle the left-hand subtree. */
bb = emit_case_nodes (bb, index, node->m_left, default_prob,
index_type, loc);
/* If the left-hand subtree fell through,
don't let it fall into the right-hand subtree. */
if (bb && m_default_bb)
emit_jump (bb, m_default_bb);
/* If the left-hand subtree fell through,
don't let it fall into the right-hand subtree. */
if (bb && m_default_bb)
emit_jump (bb, m_default_bb);
bb = emit_case_nodes (test_bb, index, node->m_right,
default_prob, index_type, loc);
bb = emit_case_nodes (test_bb, index, node->m_right, default_prob,
index_type, loc);
}
else
{

View file

@ -9343,13 +9343,12 @@ scale_profile_for_vect_loop (class loop *loop, unsigned vf)
in loop's preheader. */
if (!(freq_e == profile_count::zero ()))
freq_e = freq_e.force_nonzero ();
p = freq_e.apply_scale (new_est_niter + 1, 1).probability_in (freq_h);
p = (freq_e * (new_est_niter + 1)).probability_in (freq_h);
scale_loop_frequencies (loop, p);
}
edge exit_e = single_exit (loop);
exit_e->probability = profile_probability::always ()
.apply_scale (1, new_est_niter + 1);
exit_e->probability = profile_probability::always () / (new_est_niter + 1);
edge exit_l = single_pred_edge (loop->latch);
profile_probability prob = exit_l->probability;