cse.c (enum taken): Remove PATH_AROUND.

* cse.c (enum taken): Remove PATH_AROUND.
	(addr_affects_sp_p, invalidate_skipped_set,
	invalidate_skipped_block): Remove.
	(cse_end_of_basic_block): Remove skip_blocks and related code.
	(cse_main): Don't test for flag_cse_skip_blocks.
	Update cse_end_of_basic_block call.
	(cse_basic_block): Likewise.  Remove PATH_AROUND case.  Remove
	code to lengthen the path if a jump was simplified.

From-SVN: r119079
This commit is contained in:
Steven Bosscher 2006-11-22 00:13:42 +00:00
parent 7d93d98774
commit 7dad9fb498
2 changed files with 25 additions and 208 deletions

View file

@ -1,3 +1,14 @@
2006-11-22 Steven Bosscher <steven@gcc.gnu.org>
* cse.c (enum taken): Remove PATH_AROUND.
(addr_affects_sp_p, invalidate_skipped_set,
invalidate_skipped_block): Remove.
(cse_end_of_basic_block): Remove skip_blocks and related code.
(cse_main): Don't test for flag_cse_skip_blocks.
Update cse_end_of_basic_block call.
(cse_basic_block): Likewise. Remove PATH_AROUND case. Remove
code to lengthen the path if a jump was simplified.
2006-11-22 Zdenek Dvorak <dvorakz@suse.cz>
PR rtl-optimization/29924

222
gcc/cse.c
View file

@ -560,10 +560,8 @@ struct cse_basic_block_data
{
/* The branch insn. */
rtx branch;
/* Whether it should be taken or not. AROUND is the same as taken
except that it is used when the destination label is not preceded
by a BARRIER. */
enum taken {PATH_TAKEN, PATH_NOT_TAKEN, PATH_AROUND} status;
/* Whether it should be taken or not. */
enum taken {PATH_TAKEN, PATH_NOT_TAKEN} status;
} *path;
};
@ -610,12 +608,9 @@ static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
int);
static void cse_insn (rtx, rtx);
static void cse_end_of_basic_block (rtx, struct cse_basic_block_data *,
int, int);
static int addr_affects_sp_p (rtx);
int);
static void invalidate_from_clobbers (rtx);
static rtx cse_process_notes (rtx, rtx);
static void invalidate_skipped_set (rtx, rtx, void *);
static void invalidate_skipped_block (rtx);
static rtx cse_basic_block (rtx, rtx, struct branch_path *);
static void count_reg_usage (rtx, int *, rtx, int);
static int check_for_label_ref (rtx *, void *);
@ -5679,33 +5674,6 @@ invalidate_memory (void)
}
}
/* If ADDR is an address that implicitly affects the stack pointer, return
1 and update the register tables to show the effect. Else, return 0. */
static int
addr_affects_sp_p (rtx addr)
{
if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
&& REG_P (XEXP (addr, 0))
&& REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
{
if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
{
REG_TICK (STACK_POINTER_REGNUM)++;
/* Is it possible to use a subreg of SP? */
SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
}
/* This should be *very* rare. */
if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
invalidate (stack_pointer_rtx, VOIDmode);
return 1;
}
return 0;
}
/* Perform invalidation on the basis of everything about an insn
except for invalidating the actual places that are SET in it.
This includes the places CLOBBERed, and anything that might
@ -5836,66 +5804,6 @@ cse_process_notes (rtx x, rtx object)
return x;
}
/* Process one SET of an insn that was skipped. We ignore CLOBBERs
since they are done elsewhere. This function is called via note_stores. */
static void
invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
{
enum rtx_code code = GET_CODE (dest);
if (code == MEM
&& ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
/* There are times when an address can appear varying and be a PLUS
during this scan when it would be a fixed address were we to know
the proper equivalences. So invalidate all memory if there is
a BLKmode or nonscalar memory reference or a reference to a
variable address. */
&& (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
|| cse_rtx_varies_p (XEXP (dest, 0), 0)))
{
invalidate_memory ();
return;
}
if (GET_CODE (set) == CLOBBER
|| CC0_P (dest)
|| dest == pc_rtx)
return;
if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
invalidate (XEXP (dest, 0), GET_MODE (dest));
else if (code == REG || code == SUBREG || code == MEM)
invalidate (dest, VOIDmode);
}
/* Invalidate all insns from START up to the end of the function or the
next label. This called when we wish to CSE around a block that is
conditionally executed. */
static void
invalidate_skipped_block (rtx start)
{
rtx insn;
for (insn = start; insn && !LABEL_P (insn);
insn = NEXT_INSN (insn))
{
if (! INSN_P (insn))
continue;
if (CALL_P (insn))
{
if (! CONST_OR_PURE_CALL_P (insn))
invalidate_memory ();
invalidate_for_call ();
}
invalidate_from_clobbers (PATTERN (insn));
note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
}
}
/* Find the end of INSN's basic block and return its range,
the total number of SETs in all the insns of the block, the last insn of the
block, and the branch path.
@ -5903,7 +5811,7 @@ invalidate_skipped_block (rtx start)
The branch path indicates which branches should be followed. If a nonzero
path size is specified, the block should be rescanned and a different set
of branches will be taken. The branch path is only used if
FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
FLAG_CSE_FOLLOW_JUMPS is nonzero.
DATA is a pointer to a struct cse_basic_block_data, defined below, that is
used to describe the block. It is filled in with the information about
@ -5912,7 +5820,7 @@ invalidate_skipped_block (rtx start)
static void
cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
int follow_jumps, int skip_blocks)
int follow_jumps)
{
rtx p = insn, q;
int nsets = 0;
@ -5943,9 +5851,9 @@ cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
to figure out where to go next. We want to return the next block
in the instruction stream, not some branched-to block somewhere
else. We accomplish this by pretending our called forbid us to
follow jumps, or skip blocks. */
follow jumps. */
if (GET_MODE (insn) == QImode)
follow_jumps = skip_blocks = 0;
follow_jumps = 0;
/* Scan to end of this basic block. */
while (p && !LABEL_P (p))
@ -5986,14 +5894,9 @@ cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
/* If this is a conditional jump, we can follow it if -fcse-follow-jumps
was specified, we haven't reached our maximum path length, there are
insns following the target of the jump, this is the only use of the
jump label, and the target label is preceded by a BARRIER.
Alternatively, we can follow the jump if it branches around a
block of code and there are no other branches into the block.
In this case invalidate_skipped_block will be called to invalidate any
registers set in the block when following the jump. */
else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
jump label, and the target label is preceded by a BARRIER. */
else if (follow_jumps
&& path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
&& JUMP_P (p)
&& GET_CODE (PATTERN (p)) == SET
&& GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
@ -6041,42 +5944,6 @@ cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
/* Mark block so we won't scan it again later. */
PUT_MODE (NEXT_INSN (p), QImode);
}
/* Detect a branch around a block of code. */
else if (skip_blocks && q != 0 && !LABEL_P (q))
{
rtx tmp;
if (next_real_insn (q) == next)
{
p = NEXT_INSN (p);
continue;
}
for (i = 0; i < path_entry; i++)
if (data->path[i].branch == p)
break;
if (i != path_entry)
break;
/* This is no_labels_between_p (p, q) with an added check for
reaching the end of a function (in case Q precedes P). */
for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
if (LABEL_P (tmp))
break;
if (tmp == q)
{
data->path[path_entry].branch = p;
data->path[path_entry++].status = PATH_AROUND;
path_size = path_entry;
p = JUMP_LABEL (p);
/* Mark block so we won't scan it again later. */
PUT_MODE (NEXT_INSN (p), QImode);
}
}
}
p = NEXT_INSN (p);
}
@ -6150,8 +6017,7 @@ cse_main (rtx f, int nregs)
while (insn)
{
cse_altered = 0;
cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps,
flag_cse_skip_blocks);
cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps);
/* If this basic block was already processed or has no sets, skip it. */
if (val.nsets == 0 || GET_MODE (insn) == QImode)
@ -6191,8 +6057,7 @@ cse_main (rtx f, int nregs)
us a new branch path to investigate. */
cse_jumps_altered = 0;
temp = cse_basic_block (insn, val.last, val.path);
if (cse_jumps_altered == 0
|| (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
if (cse_jumps_altered == 0 || flag_cse_follow_jumps)
insn = temp;
cse_jumps_altered |= old_cse_jumps_altered;
@ -6264,10 +6129,8 @@ cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
enum taken status = next_branch++->status;
if (status != PATH_NOT_TAKEN)
{
if (status == PATH_TAKEN)
record_jump_equiv (insn, 1);
else
invalidate_skipped_block (NEXT_INSN (insn));
gcc_assert (status == PATH_TAKEN);
record_jump_equiv (insn, 1);
/* Set the last insn as the jump insn; it doesn't affect cc0.
Then follow this branch. */
@ -6358,63 +6221,6 @@ cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
insn = PREV_INSN (to);
}
/* See if it is ok to keep on going past the label
which used to end our basic block. Remember that we incremented
the count of that label, so we decrement it here. If we made
a jump unconditional, TO_USAGE will be one; in that case, we don't
want to count the use in that jump. */
if (to != 0 && NEXT_INSN (insn) == to
&& LABEL_P (to) && --LABEL_NUSES (to) == to_usage)
{
struct cse_basic_block_data val;
rtx prev;
insn = NEXT_INSN (to);
/* If TO was the last insn in the function, we are done. */
if (insn == 0)
{
free (qty_table);
return 0;
}
/* If TO was preceded by a BARRIER we are done with this block
because it has no continuation. */
prev = prev_nonnote_insn (to);
if (prev && BARRIER_P (prev))
{
free (qty_table);
return insn;
}
/* Find the end of the following block. Note that we won't be
following branches in this case. */
to_usage = 0;
val.path_size = 0;
val.path = XNEWVEC (struct branch_path, PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
cse_end_of_basic_block (insn, &val, 0, 0);
free (val.path);
/* If the tables we allocated have enough space left
to handle all the SETs in the next basic block,
continue through it. Otherwise, return,
and that block will be scanned individually. */
if (val.nsets * 2 + next_qty > max_qty)
break;
cse_basic_block_start = val.low_cuid;
cse_basic_block_end = val.high_cuid;
to = val.last;
/* Prevent TO from being deleted if it is a label. */
if (to != 0 && LABEL_P (to))
++LABEL_NUSES (to);
/* Back up so we process the first insn in the extension. */
insn = PREV_INSN (insn);
}
}
gcc_assert (next_qty <= max_qty);