combine.c (combine_simplify_rtx <case NOT, [...]): Move simplifications that do not require additional infrastructure...
2005-12-16 Paolo Bonzini <bonzini@gnu.org> * combine.c (combine_simplify_rtx <case NOT, NEG, TRUNCATE, FLOAT_TRUNCATE, FLOAT_EXTEND, PLUS, MINUS, AND, IOR, XOR, ABS, VEC_SELECT, POPCOUNT, PARITY, FFS, FLOAT>, simplify_logical): Move simplifications that do not require additional infrastructure... * simplify-rtx.c (simplify_unary_operation_1, simplify_binary_operation_1): ... here. From-SVN: r108634
This commit is contained in:
parent
488ce07ba2
commit
bd1ef75776
3 changed files with 543 additions and 565 deletions
|
@ -1,3 +1,13 @@
|
|||
2005-12-16 Paolo Bonzini <bonzini@gnu.org>
|
||||
|
||||
* combine.c (combine_simplify_rtx <case NOT, NEG, TRUNCATE,
|
||||
FLOAT_TRUNCATE, FLOAT_EXTEND, PLUS, MINUS, AND, IOR, XOR,
|
||||
ABS, VEC_SELECT, POPCOUNT, PARITY, FFS, FLOAT>,
|
||||
simplify_logical): Move simplifications that do not require
|
||||
additional infrastructure...
|
||||
* simplify-rtx.c (simplify_unary_operation_1,
|
||||
simplify_binary_operation_1): ... here.
|
||||
|
||||
2005-12-16 Andreas Krebbel <krebbel1@de.ibm.com>
|
||||
|
||||
PR 24823
|
||||
|
|
556
gcc/combine.c
556
gcc/combine.c
|
@ -3902,7 +3902,6 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
|
|||
enum rtx_code code = GET_CODE (x);
|
||||
enum machine_mode mode = GET_MODE (x);
|
||||
rtx temp;
|
||||
rtx reversed;
|
||||
int i;
|
||||
|
||||
/* If this is a commutative operation, put a constant last and a complex
|
||||
|
@ -4159,60 +4158,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
|
|||
|
||||
break;
|
||||
|
||||
case NOT:
|
||||
if (GET_CODE (XEXP (x, 0)) == SUBREG
|
||||
&& subreg_lowpart_p (XEXP (x, 0))
|
||||
&& (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
|
||||
< GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
|
||||
&& GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
|
||||
&& XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
|
||||
{
|
||||
enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
|
||||
|
||||
x = gen_rtx_ROTATE (inner_mode,
|
||||
simplify_gen_unary (NOT, inner_mode, const1_rtx,
|
||||
inner_mode),
|
||||
XEXP (SUBREG_REG (XEXP (x, 0)), 1));
|
||||
return gen_lowpart (mode, x);
|
||||
}
|
||||
|
||||
/* Apply De Morgan's laws to reduce number of patterns for machines
|
||||
with negating logical insns (and-not, nand, etc.). If result has
|
||||
only one NOT, put it first, since that is how the patterns are
|
||||
coded. */
|
||||
|
||||
if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
|
||||
{
|
||||
rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
|
||||
enum machine_mode op_mode;
|
||||
|
||||
op_mode = GET_MODE (in1);
|
||||
in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
|
||||
|
||||
op_mode = GET_MODE (in2);
|
||||
if (op_mode == VOIDmode)
|
||||
op_mode = mode;
|
||||
in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
|
||||
|
||||
if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
|
||||
{
|
||||
rtx tem = in2;
|
||||
in2 = in1; in1 = tem;
|
||||
}
|
||||
|
||||
return gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
|
||||
mode, in1, in2);
|
||||
}
|
||||
break;
|
||||
|
||||
case NEG:
|
||||
/* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
|
||||
if (GET_CODE (XEXP (x, 0)) == XOR
|
||||
&& XEXP (XEXP (x, 0), 1) == const1_rtx
|
||||
&& nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
|
||||
return simplify_gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
|
||||
constm1_rtx);
|
||||
|
||||
temp = expand_compound_operation (XEXP (x, 0));
|
||||
|
||||
/* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
|
||||
|
@ -4267,131 +4213,16 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
|
|||
force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
|
||||
GET_MODE_MASK (mode), 0));
|
||||
|
||||
/* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
|
||||
if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
|
||||
|| GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
|
||||
&& GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
|
||||
return XEXP (XEXP (x, 0), 0);
|
||||
|
||||
/* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
|
||||
(OP:SI foo:SI) if OP is NEG or ABS. */
|
||||
if ((GET_CODE (XEXP (x, 0)) == ABS
|
||||
|| GET_CODE (XEXP (x, 0)) == NEG)
|
||||
&& (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
|
||||
|| GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
|
||||
&& GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
|
||||
return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
|
||||
XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
|
||||
|
||||
/* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
|
||||
(truncate:SI x). */
|
||||
if (GET_CODE (XEXP (x, 0)) == SUBREG
|
||||
&& GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
|
||||
&& subreg_lowpart_p (XEXP (x, 0)))
|
||||
return SUBREG_REG (XEXP (x, 0));
|
||||
|
||||
/* If we know that the value is already truncated, we can
|
||||
replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
|
||||
is nonzero for the corresponding modes. But don't do this
|
||||
for an (LSHIFTRT (MULT ...)) since this will cause problems
|
||||
with the umulXi3_highpart patterns. */
|
||||
if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
|
||||
GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
|
||||
&& num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
|
||||
>= (unsigned int) (GET_MODE_BITSIZE (mode) + 1)
|
||||
&& ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
|
||||
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
|
||||
return gen_lowpart (mode, XEXP (x, 0));
|
||||
|
||||
/* A truncate of a comparison can be replaced with a subreg if
|
||||
STORE_FLAG_VALUE permits. This is like the previous test,
|
||||
but it works even if the comparison is done in a mode larger
|
||||
than HOST_BITS_PER_WIDE_INT. */
|
||||
if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
|
||||
&& COMPARISON_P (XEXP (x, 0))
|
||||
&& ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
|
||||
return gen_lowpart (mode, XEXP (x, 0));
|
||||
|
||||
/* Similarly, a truncate of a register whose value is a
|
||||
comparison can be replaced with a subreg if STORE_FLAG_VALUE
|
||||
permits. */
|
||||
/* Similarly to what we do in simplify-rtx.c, a truncate of a register
|
||||
whose value is a comparison can be replaced with a subreg if
|
||||
STORE_FLAG_VALUE permits. */
|
||||
if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
|
||||
&& ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
|
||||
&& (temp = get_last_value (XEXP (x, 0)))
|
||||
&& COMPARISON_P (temp))
|
||||
return gen_lowpart (mode, XEXP (x, 0));
|
||||
|
||||
break;
|
||||
|
||||
case FLOAT_TRUNCATE:
|
||||
/* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
|
||||
if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
|
||||
&& GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
|
||||
return XEXP (XEXP (x, 0), 0);
|
||||
|
||||
/* (float_truncate:SF (float_truncate:DF foo:XF))
|
||||
= (float_truncate:SF foo:XF).
|
||||
This may eliminate double rounding, so it is unsafe.
|
||||
|
||||
(float_truncate:SF (float_extend:XF foo:DF))
|
||||
= (float_truncate:SF foo:DF).
|
||||
|
||||
(float_truncate:DF (float_extend:XF foo:SF))
|
||||
= (float_extend:SF foo:DF). */
|
||||
if ((GET_CODE (XEXP (x, 0)) == FLOAT_TRUNCATE
|
||||
&& flag_unsafe_math_optimizations)
|
||||
|| GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND)
|
||||
return simplify_gen_unary (GET_MODE_SIZE (GET_MODE (XEXP (XEXP (x, 0),
|
||||
0)))
|
||||
> GET_MODE_SIZE (mode)
|
||||
? FLOAT_TRUNCATE : FLOAT_EXTEND,
|
||||
mode,
|
||||
XEXP (XEXP (x, 0), 0), mode);
|
||||
|
||||
/* (float_truncate (float x)) is (float x) */
|
||||
if (GET_CODE (XEXP (x, 0)) == FLOAT
|
||||
&& (flag_unsafe_math_optimizations
|
||||
|| ((unsigned)significand_size (GET_MODE (XEXP (x, 0)))
|
||||
>= (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))
|
||||
- num_sign_bit_copies (XEXP (XEXP (x, 0), 0),
|
||||
GET_MODE (XEXP (XEXP (x, 0), 0)))))))
|
||||
return simplify_gen_unary (FLOAT, mode,
|
||||
XEXP (XEXP (x, 0), 0),
|
||||
GET_MODE (XEXP (XEXP (x, 0), 0)));
|
||||
|
||||
/* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
|
||||
(OP:SF foo:SF) if OP is NEG or ABS. */
|
||||
if ((GET_CODE (XEXP (x, 0)) == ABS
|
||||
|| GET_CODE (XEXP (x, 0)) == NEG)
|
||||
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
|
||||
&& GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
|
||||
return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
|
||||
XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
|
||||
|
||||
/* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
|
||||
is (float_truncate:SF x). */
|
||||
if (GET_CODE (XEXP (x, 0)) == SUBREG
|
||||
&& subreg_lowpart_p (XEXP (x, 0))
|
||||
&& GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
|
||||
return SUBREG_REG (XEXP (x, 0));
|
||||
break;
|
||||
case FLOAT_EXTEND:
|
||||
/* (float_extend (float_extend x)) is (float_extend x)
|
||||
|
||||
(float_extend (float x)) is (float x) assuming that double
|
||||
rounding can't happen.
|
||||
*/
|
||||
if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
|
||||
|| (GET_CODE (XEXP (x, 0)) == FLOAT
|
||||
&& ((unsigned)significand_size (GET_MODE (XEXP (x, 0)))
|
||||
>= (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))
|
||||
- num_sign_bit_copies (XEXP (XEXP (x, 0), 0),
|
||||
GET_MODE (XEXP (XEXP (x, 0), 0)))))))
|
||||
return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
|
||||
XEXP (XEXP (x, 0), 0),
|
||||
GET_MODE (XEXP (XEXP (x, 0), 0)));
|
||||
|
||||
break;
|
||||
#ifdef HAVE_cc0
|
||||
case COMPARE:
|
||||
/* Convert (compare FOO (const_int 0)) to FOO unless we aren't
|
||||
|
@ -4430,32 +4261,6 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
|
|||
#endif
|
||||
|
||||
case PLUS:
|
||||
/* Canonicalize (plus (mult (neg B) C) A) to (minus A (mult B C)).
|
||||
*/
|
||||
if (GET_CODE (XEXP (x, 0)) == MULT
|
||||
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == NEG)
|
||||
{
|
||||
rtx in1, in2;
|
||||
|
||||
in1 = XEXP (XEXP (XEXP (x, 0), 0), 0);
|
||||
in2 = XEXP (XEXP (x, 0), 1);
|
||||
return simplify_gen_binary (MINUS, mode, XEXP (x, 1),
|
||||
simplify_gen_binary (MULT, mode,
|
||||
in1, in2));
|
||||
}
|
||||
|
||||
/* If we have (plus (plus (A const) B)), associate it so that CONST is
|
||||
outermost. That's because that's the way indexed addresses are
|
||||
supposed to appear. This code used to check many more cases, but
|
||||
they are now checked elsewhere. */
|
||||
if (GET_CODE (XEXP (x, 0)) == PLUS
|
||||
&& CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
|
||||
return simplify_gen_binary (PLUS, mode,
|
||||
simplify_gen_binary (PLUS, mode,
|
||||
XEXP (XEXP (x, 0), 0),
|
||||
XEXP (x, 1)),
|
||||
XEXP (XEXP (x, 0), 1));
|
||||
|
||||
/* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
|
||||
when c is (const_int (pow2 + 1) / 2) is a sign extension of a
|
||||
bit-field and can be replaced by either a sign_extend or a
|
||||
|
@ -4482,17 +4287,6 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
|
|||
GET_MODE_BITSIZE (mode) - (i + 1)),
|
||||
GET_MODE_BITSIZE (mode) - (i + 1));
|
||||
|
||||
/* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
|
||||
C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
|
||||
is 1. This produces better code than the alternative immediately
|
||||
below. */
|
||||
if (COMPARISON_P (XEXP (x, 0))
|
||||
&& ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
|
||||
|| (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx))
|
||||
&& (reversed = reversed_comparison (XEXP (x, 0), mode)))
|
||||
return
|
||||
simplify_gen_unary (NEG, mode, reversed, mode);
|
||||
|
||||
/* If only the low-order bit of X is possibly nonzero, (plus x -1)
|
||||
can become (ashiftrt (ashift (xor x 1) C) C) where C is
|
||||
the bitsize of the mode - 1. This allows simplification of
|
||||
|
@ -4530,14 +4324,6 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
|
|||
break;
|
||||
|
||||
case MINUS:
|
||||
/* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
|
||||
by reversing the comparison code if valid. */
|
||||
if (STORE_FLAG_VALUE == 1
|
||||
&& XEXP (x, 0) == const1_rtx
|
||||
&& COMPARISON_P (XEXP (x, 1))
|
||||
&& (reversed = reversed_comparison (XEXP (x, 1), mode)))
|
||||
return reversed;
|
||||
|
||||
/* (minus <foo> (and <foo> (const_int -pow2))) becomes
|
||||
(and <foo> (const_int pow2-1)) */
|
||||
if (GET_CODE (XEXP (x, 1)) == AND
|
||||
|
@ -4546,45 +4332,6 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
|
|||
&& rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
|
||||
return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
|
||||
-INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
|
||||
|
||||
/* Canonicalize (minus A (mult (neg B) C)) to (plus (mult B C) A).
|
||||
*/
|
||||
if (GET_CODE (XEXP (x, 1)) == MULT
|
||||
&& GET_CODE (XEXP (XEXP (x, 1), 0)) == NEG)
|
||||
{
|
||||
rtx in1, in2;
|
||||
|
||||
in1 = XEXP (XEXP (XEXP (x, 1), 0), 0);
|
||||
in2 = XEXP (XEXP (x, 1), 1);
|
||||
return simplify_gen_binary (PLUS, mode,
|
||||
simplify_gen_binary (MULT, mode,
|
||||
in1, in2),
|
||||
XEXP (x, 0));
|
||||
}
|
||||
|
||||
/* Canonicalize (minus (neg A) (mult B C)) to
|
||||
(minus (mult (neg B) C) A). */
|
||||
if (GET_CODE (XEXP (x, 1)) == MULT
|
||||
&& GET_CODE (XEXP (x, 0)) == NEG)
|
||||
{
|
||||
rtx in1, in2;
|
||||
|
||||
in1 = simplify_gen_unary (NEG, mode, XEXP (XEXP (x, 1), 0), mode);
|
||||
in2 = XEXP (XEXP (x, 1), 1);
|
||||
return simplify_gen_binary (MINUS, mode,
|
||||
simplify_gen_binary (MULT, mode,
|
||||
in1, in2),
|
||||
XEXP (XEXP (x, 0), 0));
|
||||
}
|
||||
|
||||
/* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
|
||||
integers. */
|
||||
if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
|
||||
return simplify_gen_binary (MINUS, mode,
|
||||
simplify_gen_binary (MINUS, mode,
|
||||
XEXP (x, 0),
|
||||
XEXP (XEXP (x, 1), 0)),
|
||||
XEXP (XEXP (x, 1), 1));
|
||||
break;
|
||||
|
||||
case MULT:
|
||||
|
@ -4800,55 +4547,8 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
|
|||
|
||||
case AND:
|
||||
case IOR:
|
||||
case XOR:
|
||||
return simplify_logical (x);
|
||||
|
||||
case ABS:
|
||||
/* (abs (neg <foo>)) -> (abs <foo>) */
|
||||
if (GET_CODE (XEXP (x, 0)) == NEG)
|
||||
SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
|
||||
|
||||
/* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
|
||||
do nothing. */
|
||||
if (GET_MODE (XEXP (x, 0)) == VOIDmode)
|
||||
break;
|
||||
|
||||
/* If operand is something known to be positive, ignore the ABS. */
|
||||
if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
|
||||
|| ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
|
||||
<= HOST_BITS_PER_WIDE_INT)
|
||||
&& ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
|
||||
& ((HOST_WIDE_INT) 1
|
||||
<< (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
|
||||
== 0)))
|
||||
return XEXP (x, 0);
|
||||
|
||||
/* If operand is known to be only -1 or 0, convert ABS to NEG. */
|
||||
if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
|
||||
return gen_rtx_NEG (mode, XEXP (x, 0));
|
||||
|
||||
break;
|
||||
|
||||
case FFS:
|
||||
/* (ffs (*_extend <X>)) = (ffs <X>) */
|
||||
if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
|
||||
|| GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
|
||||
SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
|
||||
break;
|
||||
|
||||
case POPCOUNT:
|
||||
case PARITY:
|
||||
/* (pop* (zero_extend <X>)) = (pop* <X>) */
|
||||
if (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
|
||||
SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
|
||||
break;
|
||||
|
||||
case FLOAT:
|
||||
/* (float (sign_extend <X>)) = (float <X>). */
|
||||
if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
|
||||
SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
|
||||
break;
|
||||
|
||||
case ASHIFT:
|
||||
case LSHIFTRT:
|
||||
case ASHIFTRT:
|
||||
|
@ -4868,44 +4568,6 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
|
|||
0));
|
||||
break;
|
||||
|
||||
case VEC_SELECT:
|
||||
{
|
||||
rtx op0 = XEXP (x, 0);
|
||||
rtx op1 = XEXP (x, 1);
|
||||
int len;
|
||||
|
||||
gcc_assert (GET_CODE (op1) == PARALLEL);
|
||||
len = XVECLEN (op1, 0);
|
||||
if (len == 1
|
||||
&& GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
|
||||
&& GET_CODE (op0) == VEC_CONCAT)
|
||||
{
|
||||
int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x));
|
||||
|
||||
/* Try to find the element in the VEC_CONCAT. */
|
||||
for (;;)
|
||||
{
|
||||
if (GET_MODE (op0) == GET_MODE (x))
|
||||
return op0;
|
||||
if (GET_CODE (op0) == VEC_CONCAT)
|
||||
{
|
||||
HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)));
|
||||
if (offset < op0_size)
|
||||
op0 = XEXP (op0, 0);
|
||||
else
|
||||
{
|
||||
offset -= op0_size;
|
||||
op0 = XEXP (op0, 1);
|
||||
}
|
||||
}
|
||||
else
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
@ -5598,42 +5260,10 @@ simplify_logical (rtx x)
|
|||
enum machine_mode mode = GET_MODE (x);
|
||||
rtx op0 = XEXP (x, 0);
|
||||
rtx op1 = XEXP (x, 1);
|
||||
rtx tmp, reversed;
|
||||
|
||||
switch (GET_CODE (x))
|
||||
{
|
||||
case AND:
|
||||
/* Convert (A ^ B) & A to A & (~B) since the latter is often a single
|
||||
insn (and may simplify more). */
|
||||
if (GET_CODE (op0) == XOR
|
||||
&& rtx_equal_p (XEXP (op0, 0), op1)
|
||||
&& ! side_effects_p (op1))
|
||||
x = simplify_gen_binary (AND, mode,
|
||||
simplify_gen_unary (NOT, mode,
|
||||
XEXP (op0, 1), mode),
|
||||
op1);
|
||||
|
||||
if (GET_CODE (op0) == XOR
|
||||
&& rtx_equal_p (XEXP (op0, 1), op1)
|
||||
&& ! side_effects_p (op1))
|
||||
x = simplify_gen_binary (AND, mode,
|
||||
simplify_gen_unary (NOT, mode,
|
||||
XEXP (op0, 0), mode),
|
||||
op1);
|
||||
|
||||
/* Similarly for (~(A ^ B)) & A. */
|
||||
if (GET_CODE (op0) == NOT
|
||||
&& GET_CODE (XEXP (op0, 0)) == XOR
|
||||
&& rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
|
||||
&& ! side_effects_p (op1))
|
||||
x = simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
|
||||
|
||||
if (GET_CODE (op0) == NOT
|
||||
&& GET_CODE (XEXP (op0, 0)) == XOR
|
||||
&& rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
|
||||
&& ! side_effects_p (op1))
|
||||
x = simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
|
||||
|
||||
/* We can call simplify_and_const_int only if we don't lose
|
||||
any (sign) bits when converting INTVAL (op1) to
|
||||
"unsigned HOST_WIDE_INT". */
|
||||
|
@ -5642,22 +5272,6 @@ simplify_logical (rtx x)
|
|||
|| INTVAL (op1) > 0))
|
||||
{
|
||||
x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
|
||||
|
||||
/* If we have (ior (and (X C1) C2)) and the next restart would be
|
||||
the last, simplify this by making C1 as small as possible
|
||||
and then exit. Only do this if C1 actually changes: for now
|
||||
this only saves memory but, should this transformation be
|
||||
moved to simplify-rtx.c, we'd risk unbounded recursion there. */
|
||||
if (GET_CODE (x) == IOR && GET_CODE (op0) == AND
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& GET_CODE (op1) == CONST_INT
|
||||
&& (INTVAL (XEXP (op0, 1)) & INTVAL (op1)) != 0)
|
||||
return simplify_gen_binary (IOR, mode,
|
||||
simplify_gen_binary
|
||||
(AND, mode, XEXP (op0, 0),
|
||||
GEN_INT (INTVAL (XEXP (op0, 1))
|
||||
& ~INTVAL (op1))), op1);
|
||||
|
||||
if (GET_CODE (x) != AND)
|
||||
return x;
|
||||
|
||||
|
@ -5665,14 +5279,6 @@ simplify_logical (rtx x)
|
|||
op1 = XEXP (x, 1);
|
||||
}
|
||||
|
||||
/* Convert (A | B) & A to A. */
|
||||
if (GET_CODE (op0) == IOR
|
||||
&& (rtx_equal_p (XEXP (op0, 0), op1)
|
||||
|| rtx_equal_p (XEXP (op0, 1), op1))
|
||||
&& ! side_effects_p (XEXP (op0, 0))
|
||||
&& ! side_effects_p (XEXP (op0, 1)))
|
||||
return op1;
|
||||
|
||||
/* If we have any of (and (ior A B) C) or (and (xor A B) C),
|
||||
apply the distributive law and then the inverse distributive
|
||||
law to see if things simplify. */
|
||||
|
@ -5691,20 +5297,6 @@ simplify_logical (rtx x)
|
|||
break;
|
||||
|
||||
case IOR:
|
||||
/* (ior A C) is C if all bits of A that might be nonzero are on in C. */
|
||||
if (GET_CODE (op1) == CONST_INT
|
||||
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
|
||||
&& (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
|
||||
return op1;
|
||||
|
||||
/* Convert (A & B) | A to A. */
|
||||
if (GET_CODE (op0) == AND
|
||||
&& (rtx_equal_p (XEXP (op0, 0), op1)
|
||||
|| rtx_equal_p (XEXP (op0, 1), op1))
|
||||
&& ! side_effects_p (XEXP (op0, 0))
|
||||
&& ! side_effects_p (XEXP (op0, 1)))
|
||||
return op1;
|
||||
|
||||
/* If we have (ior (and A B) C), apply the distributive law and then
|
||||
the inverse distributive law to see if things simplify. */
|
||||
|
||||
|
@ -5721,148 +5313,6 @@ simplify_logical (rtx x)
|
|||
if (result)
|
||||
return result;
|
||||
}
|
||||
|
||||
/* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
|
||||
mode size to (rotate A CX). */
|
||||
|
||||
if (GET_CODE (op1) == ASHIFT
|
||||
|| GET_CODE (op1) == SUBREG)
|
||||
tmp = op1, op1 = op0, op0 = tmp;
|
||||
|
||||
if (GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT
|
||||
&& rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (op1, 1)) == CONST_INT
|
||||
&& (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
|
||||
== GET_MODE_BITSIZE (mode)))
|
||||
return gen_rtx_ROTATE (mode, XEXP (op1, 0), XEXP (op0, 1));
|
||||
|
||||
/* Same, but for ashift that has been "simplified" to a wider mode
|
||||
by simplify_shift_const. */
|
||||
|
||||
if (GET_CODE (op0) == SUBREG
|
||||
&& GET_CODE (SUBREG_REG (op0)) == ASHIFT
|
||||
&& GET_CODE (op1) == LSHIFTRT
|
||||
&& GET_CODE (XEXP (op1, 0)) == SUBREG
|
||||
&& GET_MODE (op0) == GET_MODE (XEXP (op1, 0))
|
||||
&& SUBREG_BYTE (op0) == SUBREG_BYTE (XEXP (op1, 0))
|
||||
&& (GET_MODE_SIZE (GET_MODE (op0))
|
||||
< GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
|
||||
&& rtx_equal_p (XEXP (SUBREG_REG (op0), 0),
|
||||
SUBREG_REG (XEXP (op1, 0)))
|
||||
&& GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (op1, 1)) == CONST_INT
|
||||
&& (INTVAL (XEXP (SUBREG_REG (op0), 1)) + INTVAL (XEXP (op1, 1))
|
||||
== GET_MODE_BITSIZE (mode)))
|
||||
return gen_rtx_ROTATE (mode, XEXP (op1, 0),
|
||||
XEXP (SUBREG_REG (op0), 1));
|
||||
|
||||
/* If OP0 is (ashiftrt (plus ...) C), it might actually be
|
||||
a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
|
||||
does not affect any of the bits in OP1, it can really be done
|
||||
as a PLUS and we can associate. We do this by seeing if OP1
|
||||
can be safely shifted left C bits. */
|
||||
if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
|
||||
&& GET_CODE (XEXP (op0, 0)) == PLUS
|
||||
&& GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
|
||||
{
|
||||
int count = INTVAL (XEXP (op0, 1));
|
||||
HOST_WIDE_INT mask = INTVAL (op1) << count;
|
||||
|
||||
if (mask >> count == INTVAL (op1)
|
||||
&& (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
|
||||
{
|
||||
SUBST (XEXP (XEXP (op0, 0), 1),
|
||||
GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
|
||||
return op0;
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case XOR:
|
||||
/* If we are XORing two things that have no bits in common,
|
||||
convert them into an IOR. This helps to detect rotation encoded
|
||||
using those methods and possibly other simplifications. */
|
||||
|
||||
if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
|
||||
&& (nonzero_bits (op0, mode)
|
||||
& nonzero_bits (op1, mode)) == 0)
|
||||
return (simplify_gen_binary (IOR, mode, op0, op1));
|
||||
|
||||
/* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
|
||||
Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
|
||||
(NOT y). */
|
||||
{
|
||||
int num_negated = 0;
|
||||
|
||||
if (GET_CODE (op0) == NOT)
|
||||
num_negated++, op0 = XEXP (op0, 0);
|
||||
if (GET_CODE (op1) == NOT)
|
||||
num_negated++, op1 = XEXP (op1, 0);
|
||||
|
||||
if (num_negated == 2)
|
||||
{
|
||||
SUBST (XEXP (x, 0), op0);
|
||||
SUBST (XEXP (x, 1), op1);
|
||||
}
|
||||
else if (num_negated == 1)
|
||||
return
|
||||
simplify_gen_unary (NOT, mode,
|
||||
simplify_gen_binary (XOR, mode, op0, op1),
|
||||
mode);
|
||||
}
|
||||
|
||||
/* Convert (xor (and A B) B) to (and (not A) B). The latter may
|
||||
correspond to a machine insn or result in further simplifications
|
||||
if B is a constant. */
|
||||
|
||||
if (GET_CODE (op0) == AND
|
||||
&& rtx_equal_p (XEXP (op0, 1), op1)
|
||||
&& ! side_effects_p (op1))
|
||||
return simplify_gen_binary (AND, mode,
|
||||
simplify_gen_unary (NOT, mode,
|
||||
XEXP (op0, 0), mode),
|
||||
op1);
|
||||
|
||||
else if (GET_CODE (op0) == AND
|
||||
&& rtx_equal_p (XEXP (op0, 0), op1)
|
||||
&& ! side_effects_p (op1))
|
||||
return simplify_gen_binary (AND, mode,
|
||||
simplify_gen_unary (NOT, mode,
|
||||
XEXP (op0, 1), mode),
|
||||
op1);
|
||||
|
||||
/* (xor (comparison foo bar) (const_int 1)) can become the reversed
|
||||
comparison if STORE_FLAG_VALUE is 1. */
|
||||
if (STORE_FLAG_VALUE == 1
|
||||
&& op1 == const1_rtx
|
||||
&& COMPARISON_P (op0)
|
||||
&& (reversed = reversed_comparison (op0, mode)))
|
||||
return reversed;
|
||||
|
||||
/* (lshiftrt foo C) where C is the number of bits in FOO minus 1
|
||||
is (lt foo (const_int 0)), so we can perform the above
|
||||
simplification if STORE_FLAG_VALUE is 1. */
|
||||
|
||||
if (STORE_FLAG_VALUE == 1
|
||||
&& op1 == const1_rtx
|
||||
&& GET_CODE (op0) == LSHIFTRT
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
|
||||
return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
|
||||
|
||||
/* (xor (comparison foo bar) (const_int sign-bit))
|
||||
when STORE_FLAG_VALUE is the sign bit. */
|
||||
if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
|
||||
&& ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
|
||||
== (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
|
||||
&& op1 == const_true_rtx
|
||||
&& COMPARISON_P (op0)
|
||||
&& (reversed = reversed_comparison (op0, mode)))
|
||||
return reversed;
|
||||
|
||||
break;
|
||||
|
||||
default:
|
||||
|
|
|
@ -399,7 +399,8 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
|
|||
if (GET_CODE (op) == NOT)
|
||||
return XEXP (op, 0);
|
||||
|
||||
/* (not (eq X Y)) == (ne X Y), etc. */
|
||||
/* (not (eq X Y)) == (ne X Y), etc. if BImode or the result of the
|
||||
comparison is all ones. */
|
||||
if (COMPARISON_P (op)
|
||||
&& (mode == BImode || STORE_FLAG_VALUE == -1)
|
||||
&& ((reversed = reversed_comparison_code (op, NULL_RTX)) != UNKNOWN))
|
||||
|
@ -443,18 +444,10 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
|
|||
return simplify_gen_binary (ROTATE, mode, temp, XEXP (op, 1));
|
||||
}
|
||||
|
||||
/* If STORE_FLAG_VALUE is -1, (not (comparison X Y)) can be done
|
||||
by reversing the comparison code if valid. */
|
||||
if (STORE_FLAG_VALUE == -1
|
||||
&& COMPARISON_P (op)
|
||||
&& (reversed = reversed_comparison_code (op, NULL_RTX)) != UNKNOWN)
|
||||
return simplify_gen_relational (reversed, mode, VOIDmode,
|
||||
XEXP (op, 0), XEXP (op, 1));
|
||||
|
||||
/* (not (ashiftrt foo C)) where C is the number of bits in FOO
|
||||
minus 1 is (ge foo (const_int 0)) if STORE_FLAG_VALUE is -1,
|
||||
so we can perform the above simplification. */
|
||||
|
||||
|
||||
if (STORE_FLAG_VALUE == -1
|
||||
&& GET_CODE (op) == ASHIFTRT
|
||||
&& GET_CODE (XEXP (op, 1)) == CONST_INT
|
||||
|
@ -462,6 +455,51 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
|
|||
return simplify_gen_relational (GE, mode, VOIDmode,
|
||||
XEXP (op, 0), const0_rtx);
|
||||
|
||||
|
||||
if (GET_CODE (op) == SUBREG
|
||||
&& subreg_lowpart_p (op)
|
||||
&& (GET_MODE_SIZE (GET_MODE (op))
|
||||
< GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
|
||||
&& GET_CODE (SUBREG_REG (op)) == ASHIFT
|
||||
&& XEXP (SUBREG_REG (op), 0) == const1_rtx)
|
||||
{
|
||||
enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op));
|
||||
rtx x;
|
||||
|
||||
x = gen_rtx_ROTATE (inner_mode,
|
||||
simplify_gen_unary (NOT, inner_mode, const1_rtx,
|
||||
inner_mode),
|
||||
XEXP (SUBREG_REG (op), 1));
|
||||
return rtl_hooks.gen_lowpart_no_emit (mode, x);
|
||||
}
|
||||
|
||||
/* Apply De Morgan's laws to reduce number of patterns for machines
|
||||
with negating logical insns (and-not, nand, etc.). If result has
|
||||
only one NOT, put it first, since that is how the patterns are
|
||||
coded. */
|
||||
|
||||
if (GET_CODE (op) == IOR || GET_CODE (op) == AND)
|
||||
{
|
||||
rtx in1 = XEXP (op, 0), in2 = XEXP (op, 1);
|
||||
enum machine_mode op_mode;
|
||||
|
||||
op_mode = GET_MODE (in1);
|
||||
in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
|
||||
|
||||
op_mode = GET_MODE (in2);
|
||||
if (op_mode == VOIDmode)
|
||||
op_mode = mode;
|
||||
in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
|
||||
|
||||
if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
|
||||
{
|
||||
rtx tem = in2;
|
||||
in2 = in1; in1 = tem;
|
||||
}
|
||||
|
||||
return gen_rtx_fmt_ee (GET_CODE (op) == IOR ? AND : IOR,
|
||||
mode, in1, in2);
|
||||
}
|
||||
break;
|
||||
|
||||
case NEG:
|
||||
|
@ -541,6 +579,185 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
|
|||
return simplify_gen_binary (ASHIFTRT, mode,
|
||||
XEXP (op, 0), XEXP (op, 1));
|
||||
|
||||
/* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
|
||||
if (GET_CODE (op) == XOR
|
||||
&& XEXP (op, 1) == const1_rtx
|
||||
&& nonzero_bits (XEXP (op, 0), mode) == 1)
|
||||
return plus_constant (XEXP (op, 0), -1);
|
||||
break;
|
||||
|
||||
case TRUNCATE:
|
||||
/* We can't handle truncation to a partial integer mode here
|
||||
because we don't know the real bitsize of the partial
|
||||
integer mode. */
|
||||
if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
|
||||
break;
|
||||
|
||||
/* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
|
||||
if ((GET_CODE (op) == SIGN_EXTEND
|
||||
|| GET_CODE (op) == ZERO_EXTEND)
|
||||
&& GET_MODE (XEXP (op, 0)) == mode)
|
||||
return XEXP (op, 0);
|
||||
|
||||
/* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
|
||||
(OP:SI foo:SI) if OP is NEG or ABS. */
|
||||
if ((GET_CODE (op) == ABS
|
||||
|| GET_CODE (op) == NEG)
|
||||
&& (GET_CODE (XEXP (op, 0)) == SIGN_EXTEND
|
||||
|| GET_CODE (XEXP (op, 0)) == ZERO_EXTEND)
|
||||
&& GET_MODE (XEXP (XEXP (op, 0), 0)) == mode)
|
||||
return simplify_gen_unary (GET_CODE (op), mode,
|
||||
XEXP (XEXP (op, 0), 0), mode);
|
||||
|
||||
/* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
|
||||
(truncate:SI x). */
|
||||
if (GET_CODE (op) == SUBREG
|
||||
&& GET_CODE (SUBREG_REG (op)) == TRUNCATE
|
||||
&& subreg_lowpart_p (op))
|
||||
return SUBREG_REG (op);
|
||||
|
||||
/* If we know that the value is already truncated, we can
|
||||
replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
|
||||
is nonzero for the corresponding modes. But don't do this
|
||||
for an (LSHIFTRT (MULT ...)) since this will cause problems
|
||||
with the umulXi3_highpart patterns. */
|
||||
if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
|
||||
GET_MODE_BITSIZE (GET_MODE (op)))
|
||||
&& num_sign_bit_copies (op, GET_MODE (op))
|
||||
>= (unsigned int) (GET_MODE_BITSIZE (mode) + 1)
|
||||
&& ! (GET_CODE (op) == LSHIFTRT
|
||||
&& GET_CODE (XEXP (op, 0)) == MULT))
|
||||
return rtl_hooks.gen_lowpart_no_emit (mode, op);
|
||||
|
||||
/* A truncate of a comparison can be replaced with a subreg if
|
||||
STORE_FLAG_VALUE permits. This is like the previous test,
|
||||
but it works even if the comparison is done in a mode larger
|
||||
than HOST_BITS_PER_WIDE_INT. */
|
||||
if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
|
||||
&& COMPARISON_P (op)
|
||||
&& ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
|
||||
return rtl_hooks.gen_lowpart_no_emit (mode, op);
|
||||
break;
|
||||
|
||||
case FLOAT_TRUNCATE:
|
||||
/* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
|
||||
if (GET_CODE (op) == FLOAT_EXTEND
|
||||
&& GET_MODE (XEXP (op, 0)) == mode)
|
||||
return XEXP (op, 0);
|
||||
|
||||
/* (float_truncate:SF (float_truncate:DF foo:XF))
|
||||
= (float_truncate:SF foo:XF).
|
||||
This may eliminate double rounding, so it is unsafe.
|
||||
|
||||
(float_truncate:SF (float_extend:XF foo:DF))
|
||||
= (float_truncate:SF foo:DF).
|
||||
|
||||
(float_truncate:DF (float_extend:XF foo:SF))
|
||||
= (float_extend:SF foo:DF). */
|
||||
if ((GET_CODE (op) == FLOAT_TRUNCATE
|
||||
&& flag_unsafe_math_optimizations)
|
||||
|| GET_CODE (op) == FLOAT_EXTEND)
|
||||
return simplify_gen_unary (GET_MODE_SIZE (GET_MODE (XEXP (op,
|
||||
0)))
|
||||
> GET_MODE_SIZE (mode)
|
||||
? FLOAT_TRUNCATE : FLOAT_EXTEND,
|
||||
mode,
|
||||
XEXP (op, 0), mode);
|
||||
|
||||
/* (float_truncate (float x)) is (float x) */
|
||||
if (GET_CODE (op) == FLOAT
|
||||
&& (flag_unsafe_math_optimizations
|
||||
|| ((unsigned)significand_size (GET_MODE (op))
|
||||
>= (GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0)))
|
||||
- num_sign_bit_copies (XEXP (op, 0),
|
||||
GET_MODE (XEXP (op, 0)))))))
|
||||
return simplify_gen_unary (FLOAT, mode,
|
||||
XEXP (op, 0),
|
||||
GET_MODE (XEXP (op, 0)));
|
||||
|
||||
/* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
|
||||
(OP:SF foo:SF) if OP is NEG or ABS. */
|
||||
if ((GET_CODE (op) == ABS
|
||||
|| GET_CODE (op) == NEG)
|
||||
&& GET_CODE (XEXP (op, 0)) == FLOAT_EXTEND
|
||||
&& GET_MODE (XEXP (XEXP (op, 0), 0)) == mode)
|
||||
return simplify_gen_unary (GET_CODE (op), mode,
|
||||
XEXP (XEXP (op, 0), 0), mode);
|
||||
|
||||
/* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
|
||||
is (float_truncate:SF x). */
|
||||
if (GET_CODE (op) == SUBREG
|
||||
&& subreg_lowpart_p (op)
|
||||
&& GET_CODE (SUBREG_REG (op)) == FLOAT_TRUNCATE)
|
||||
return SUBREG_REG (op);
|
||||
break;
|
||||
|
||||
case FLOAT_EXTEND:
|
||||
/* (float_extend (float_extend x)) is (float_extend x)
|
||||
|
||||
(float_extend (float x)) is (float x) assuming that double
|
||||
rounding can't happen.
|
||||
*/
|
||||
if (GET_CODE (op) == FLOAT_EXTEND
|
||||
|| (GET_CODE (op) == FLOAT
|
||||
&& ((unsigned)significand_size (GET_MODE (op))
|
||||
>= (GET_MODE_BITSIZE (GET_MODE (XEXP (op, 0)))
|
||||
- num_sign_bit_copies (XEXP (op, 0),
|
||||
GET_MODE (XEXP (op, 0)))))))
|
||||
return simplify_gen_unary (GET_CODE (op), mode,
|
||||
XEXP (op, 0),
|
||||
GET_MODE (XEXP (op, 0)));
|
||||
|
||||
break;
|
||||
|
||||
case ABS:
|
||||
/* (abs (neg <foo>)) -> (abs <foo>) */
|
||||
if (GET_CODE (op) == NEG)
|
||||
return simplify_gen_unary (ABS, mode, XEXP (op, 0),
|
||||
GET_MODE (XEXP (op, 0)));
|
||||
|
||||
/* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
|
||||
do nothing. */
|
||||
if (GET_MODE (op) == VOIDmode)
|
||||
break;
|
||||
|
||||
/* If operand is something known to be positive, ignore the ABS. */
|
||||
if (GET_CODE (op) == FFS || GET_CODE (op) == ABS
|
||||
|| ((GET_MODE_BITSIZE (GET_MODE (op))
|
||||
<= HOST_BITS_PER_WIDE_INT)
|
||||
&& ((nonzero_bits (op, GET_MODE (op))
|
||||
& ((HOST_WIDE_INT) 1
|
||||
<< (GET_MODE_BITSIZE (GET_MODE (op)) - 1)))
|
||||
== 0)))
|
||||
return op;
|
||||
|
||||
/* If operand is known to be only -1 or 0, convert ABS to NEG. */
|
||||
if (num_sign_bit_copies (op, mode) == GET_MODE_BITSIZE (mode))
|
||||
return gen_rtx_NEG (mode, op);
|
||||
|
||||
break;
|
||||
|
||||
case FFS:
|
||||
/* (ffs (*_extend <X>)) = (ffs <X>) */
|
||||
if (GET_CODE (op) == SIGN_EXTEND
|
||||
|| GET_CODE (op) == ZERO_EXTEND)
|
||||
return simplify_gen_unary (FFS, mode, XEXP (op, 0),
|
||||
GET_MODE (XEXP (op, 0)));
|
||||
break;
|
||||
|
||||
case POPCOUNT:
|
||||
case PARITY:
|
||||
/* (pop* (zero_extend <X>)) = (pop* <X>) */
|
||||
if (GET_CODE (op) == ZERO_EXTEND)
|
||||
return simplify_gen_unary (code, mode, XEXP (op, 0),
|
||||
GET_MODE (XEXP (op, 0)));
|
||||
break;
|
||||
|
||||
case FLOAT:
|
||||
/* (float (sign_extend <X>)) = (float <X>). */
|
||||
if (GET_CODE (op) == SIGN_EXTEND)
|
||||
return simplify_gen_unary (FLOAT, mode, XEXP (op, 0),
|
||||
GET_MODE (XEXP (op, 0)));
|
||||
break;
|
||||
|
||||
case SIGN_EXTEND:
|
||||
|
@ -1218,7 +1435,7 @@ static rtx
|
|||
simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
rtx op0, rtx op1, rtx trueop0, rtx trueop1)
|
||||
{
|
||||
rtx tem;
|
||||
rtx tem, reversed, opleft, opright;
|
||||
HOST_WIDE_INT val;
|
||||
unsigned int width = GET_MODE_BITSIZE (mode);
|
||||
|
||||
|
@ -1346,6 +1563,29 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
|||
simplify_gen_binary (XOR, mode, op1,
|
||||
XEXP (op0, 1)));
|
||||
|
||||
/* Canonicalize (plus (mult (neg B) C) A) to (minus A (mult B C)). */
|
||||
if (GET_CODE (op0) == MULT
|
||||
&& GET_CODE (XEXP (op0, 0)) == NEG)
|
||||
{
|
||||
rtx in1, in2;
|
||||
|
||||
in1 = XEXP (XEXP (op0, 0), 0);
|
||||
in2 = XEXP (op0, 1);
|
||||
return simplify_gen_binary (MINUS, mode, op1,
|
||||
simplify_gen_binary (MULT, mode,
|
||||
in1, in2));
|
||||
}
|
||||
|
||||
/* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
|
||||
C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
|
||||
is 1. */
|
||||
if (COMPARISON_P (op0)
|
||||
&& ((STORE_FLAG_VALUE == -1 && trueop1 == const1_rtx)
|
||||
|| (STORE_FLAG_VALUE == 1 && trueop1 == constm1_rtx))
|
||||
&& (reversed = reversed_comparison (op0, mode)))
|
||||
return
|
||||
simplify_gen_unary (NEG, mode, reversed, mode);
|
||||
|
||||
/* If one of the operands is a PLUS or a MINUS, see if we can
|
||||
simplify this by the associative law.
|
||||
Don't use the associative law for floating point.
|
||||
|
@ -1543,6 +1783,43 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
|||
}
|
||||
}
|
||||
|
||||
/* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
|
||||
by reversing the comparison code if valid. */
|
||||
if (STORE_FLAG_VALUE == 1
|
||||
&& trueop0 == const1_rtx
|
||||
&& COMPARISON_P (op1)
|
||||
&& (reversed = reversed_comparison (op1, mode)))
|
||||
return reversed;
|
||||
|
||||
/* Canonicalize (minus A (mult (neg B) C)) to (plus (mult B C) A). */
|
||||
if (GET_CODE (op1) == MULT
|
||||
&& GET_CODE (XEXP (op1, 0)) == NEG)
|
||||
{
|
||||
rtx in1, in2;
|
||||
|
||||
in1 = XEXP (XEXP (op1, 0), 0);
|
||||
in2 = XEXP (op1, 1);
|
||||
return simplify_gen_binary (PLUS, mode,
|
||||
simplify_gen_binary (MULT, mode,
|
||||
in1, in2),
|
||||
op0);
|
||||
}
|
||||
|
||||
/* Canonicalize (minus (neg A) (mult B C)) to
|
||||
(minus (mult (neg B) C) A). */
|
||||
if (GET_CODE (op1) == MULT
|
||||
&& GET_CODE (op0) == NEG)
|
||||
{
|
||||
rtx in1, in2;
|
||||
|
||||
in1 = simplify_gen_unary (NEG, mode, XEXP (op1, 0), mode);
|
||||
in2 = XEXP (op1, 1);
|
||||
return simplify_gen_binary (MINUS, mode,
|
||||
simplify_gen_binary (MULT, mode,
|
||||
in1, in2),
|
||||
XEXP (op0, 0));
|
||||
}
|
||||
|
||||
/* If one of the operands is a PLUS or a MINUS, see if we can
|
||||
simplify this by the associative law. This will, for example,
|
||||
canonicalize (minus A (plus B C)) to (minus (minus A B) C).
|
||||
|
@ -1639,6 +1916,101 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
|||
&& ! side_effects_p (op0)
|
||||
&& SCALAR_INT_MODE_P (mode))
|
||||
return constm1_rtx;
|
||||
|
||||
/* (ior A C) is C if all bits of A that might be nonzero are on in C. */
|
||||
if (GET_CODE (op1) == CONST_INT
|
||||
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
|
||||
&& (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
|
||||
return op1;
|
||||
|
||||
/* Convert (A & B) | A to A. */
|
||||
if (GET_CODE (op0) == AND
|
||||
&& (rtx_equal_p (XEXP (op0, 0), op1)
|
||||
|| rtx_equal_p (XEXP (op0, 1), op1))
|
||||
&& ! side_effects_p (XEXP (op0, 0))
|
||||
&& ! side_effects_p (XEXP (op0, 1)))
|
||||
return op1;
|
||||
|
||||
/* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
|
||||
mode size to (rotate A CX). */
|
||||
|
||||
if (GET_CODE (op1) == ASHIFT
|
||||
|| GET_CODE (op1) == SUBREG)
|
||||
{
|
||||
opleft = op1;
|
||||
opright = op0;
|
||||
}
|
||||
else
|
||||
{
|
||||
opright = op1;
|
||||
opleft = op0;
|
||||
}
|
||||
|
||||
if (GET_CODE (opleft) == ASHIFT && GET_CODE (opright) == LSHIFTRT
|
||||
&& rtx_equal_p (XEXP (opleft, 0), XEXP (opright, 0))
|
||||
&& GET_CODE (XEXP (opleft, 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (opright, 1)) == CONST_INT
|
||||
&& (INTVAL (XEXP (opleft, 1)) + INTVAL (XEXP (opright, 1))
|
||||
== GET_MODE_BITSIZE (mode)))
|
||||
return gen_rtx_ROTATE (mode, XEXP (opright, 0), XEXP (opleft, 1));
|
||||
|
||||
/* Same, but for ashift that has been "simplified" to a wider mode
|
||||
by simplify_shift_const. */
|
||||
|
||||
if (GET_CODE (opleft) == SUBREG
|
||||
&& GET_CODE (SUBREG_REG (opleft)) == ASHIFT
|
||||
&& GET_CODE (opright) == LSHIFTRT
|
||||
&& GET_CODE (XEXP (opright, 0)) == SUBREG
|
||||
&& GET_MODE (opleft) == GET_MODE (XEXP (opright, 0))
|
||||
&& SUBREG_BYTE (opleft) == SUBREG_BYTE (XEXP (opright, 0))
|
||||
&& (GET_MODE_SIZE (GET_MODE (opleft))
|
||||
< GET_MODE_SIZE (GET_MODE (SUBREG_REG (opleft))))
|
||||
&& rtx_equal_p (XEXP (SUBREG_REG (opleft), 0),
|
||||
SUBREG_REG (XEXP (opright, 0)))
|
||||
&& GET_CODE (XEXP (SUBREG_REG (opleft), 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (opright, 1)) == CONST_INT
|
||||
&& (INTVAL (XEXP (SUBREG_REG (opleft), 1)) + INTVAL (XEXP (opright, 1))
|
||||
== GET_MODE_BITSIZE (mode)))
|
||||
return gen_rtx_ROTATE (mode, XEXP (opright, 0),
|
||||
XEXP (SUBREG_REG (opright), 1));
|
||||
|
||||
/* If we have (ior (and (X C1) C2)), simplify this by making
|
||||
C1 as small as possible if C1 actually changes. */
|
||||
if (GET_CODE (op1) == CONST_INT
|
||||
&& (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
|
||||
|| INTVAL (op1) > 0)
|
||||
&& GET_CODE (op0) == AND
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& GET_CODE (op1) == CONST_INT
|
||||
&& (INTVAL (XEXP (op0, 1)) & INTVAL (op1)) != 0)
|
||||
return simplify_gen_binary (IOR, mode,
|
||||
simplify_gen_binary
|
||||
(AND, mode, XEXP (op0, 0),
|
||||
GEN_INT (INTVAL (XEXP (op0, 1))
|
||||
& ~INTVAL (op1))),
|
||||
op1);
|
||||
|
||||
/* If OP0 is (ashiftrt (plus ...) C), it might actually be
|
||||
a (sign_extend (plus ...)). Then check if OP1 is a CONST_INT and
|
||||
the PLUS does not affect any of the bits in OP1: then we can do
|
||||
the IOR as a PLUS and we can associate. This is valid if OP1
|
||||
can be safely shifted left C bits. */
|
||||
if (GET_CODE (trueop1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
|
||||
&& GET_CODE (XEXP (op0, 0)) == PLUS
|
||||
&& GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
|
||||
{
|
||||
int count = INTVAL (XEXP (op0, 1));
|
||||
HOST_WIDE_INT mask = INTVAL (trueop1) << count;
|
||||
|
||||
if (mask >> count == INTVAL (trueop1)
|
||||
&& (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
|
||||
return simplify_gen_binary (ASHIFTRT, mode,
|
||||
plus_constant (XEXP (op0, 0), mask),
|
||||
XEXP (op0, 1));
|
||||
}
|
||||
|
||||
tem = simplify_associative_operation (code, mode, op0, op1);
|
||||
if (tem)
|
||||
return tem;
|
||||
|
@ -1671,7 +2043,86 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
|||
return simplify_gen_binary (XOR, mode, XEXP (op0, 0),
|
||||
simplify_gen_binary (XOR, mode, op1,
|
||||
XEXP (op0, 1)));
|
||||
|
||||
|
||||
/* If we are XORing two things that have no bits in common,
|
||||
convert them into an IOR. This helps to detect rotation encoded
|
||||
using those methods and possibly other simplifications. */
|
||||
|
||||
if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
|
||||
&& (nonzero_bits (op0, mode)
|
||||
& nonzero_bits (op1, mode)) == 0)
|
||||
return (simplify_gen_binary (IOR, mode, op0, op1));
|
||||
|
||||
/* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
|
||||
Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
|
||||
(NOT y). */
|
||||
{
|
||||
int num_negated = 0;
|
||||
|
||||
if (GET_CODE (op0) == NOT)
|
||||
num_negated++, op0 = XEXP (op0, 0);
|
||||
if (GET_CODE (op1) == NOT)
|
||||
num_negated++, op1 = XEXP (op1, 0);
|
||||
|
||||
if (num_negated == 2)
|
||||
return simplify_gen_binary (XOR, mode, op0, op1);
|
||||
else if (num_negated == 1)
|
||||
return simplify_gen_unary (NOT, mode,
|
||||
simplify_gen_binary (XOR, mode, op0, op1),
|
||||
mode);
|
||||
}
|
||||
|
||||
/* Convert (xor (and A B) B) to (and (not A) B). The latter may
|
||||
correspond to a machine insn or result in further simplifications
|
||||
if B is a constant. */
|
||||
|
||||
if (GET_CODE (op0) == AND
|
||||
&& rtx_equal_p (XEXP (op0, 1), op1)
|
||||
&& ! side_effects_p (op1))
|
||||
return simplify_gen_binary (AND, mode,
|
||||
simplify_gen_unary (NOT, mode,
|
||||
XEXP (op0, 0), mode),
|
||||
op1);
|
||||
|
||||
else if (GET_CODE (op0) == AND
|
||||
&& rtx_equal_p (XEXP (op0, 0), op1)
|
||||
&& ! side_effects_p (op1))
|
||||
return simplify_gen_binary (AND, mode,
|
||||
simplify_gen_unary (NOT, mode,
|
||||
XEXP (op0, 1), mode),
|
||||
op1);
|
||||
|
||||
/* (xor (comparison foo bar) (const_int 1)) can become the reversed
|
||||
comparison if STORE_FLAG_VALUE is 1. */
|
||||
if (STORE_FLAG_VALUE == 1
|
||||
&& trueop1 == const1_rtx
|
||||
&& COMPARISON_P (op0)
|
||||
&& (reversed = reversed_comparison (op0, mode)))
|
||||
return reversed;
|
||||
|
||||
/* (lshiftrt foo C) where C is the number of bits in FOO minus 1
|
||||
is (lt foo (const_int 0)), so we can perform the above
|
||||
simplification if STORE_FLAG_VALUE is 1. */
|
||||
|
||||
if (STORE_FLAG_VALUE == 1
|
||||
&& trueop1 == const1_rtx
|
||||
&& GET_CODE (op0) == LSHIFTRT
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
|
||||
return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
|
||||
|
||||
/* (xor (comparison foo bar) (const_int sign-bit))
|
||||
when STORE_FLAG_VALUE is the sign bit. */
|
||||
if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
|
||||
&& ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
|
||||
== (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
|
||||
&& trueop1 == const_true_rtx
|
||||
&& COMPARISON_P (op0)
|
||||
&& (reversed = reversed_comparison (op0, mode)))
|
||||
return reversed;
|
||||
|
||||
break;
|
||||
|
||||
tem = simplify_associative_operation (code, mode, op0, op1);
|
||||
if (tem)
|
||||
return tem;
|
||||
|
@ -1712,6 +2163,45 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
|||
return simplify_gen_unary (ZERO_EXTEND, mode, tem, imode);
|
||||
}
|
||||
|
||||
/* Convert (A ^ B) & A to A & (~B) since the latter is often a single
|
||||
insn (and may simplify more). */
|
||||
if (GET_CODE (op0) == XOR
|
||||
&& rtx_equal_p (XEXP (op0, 0), op1)
|
||||
&& ! side_effects_p (op1))
|
||||
return simplify_gen_binary (AND, mode,
|
||||
simplify_gen_unary (NOT, mode,
|
||||
XEXP (op0, 1), mode),
|
||||
op1);
|
||||
|
||||
if (GET_CODE (op0) == XOR
|
||||
&& rtx_equal_p (XEXP (op0, 1), op1)
|
||||
&& ! side_effects_p (op1))
|
||||
return simplify_gen_binary (AND, mode,
|
||||
simplify_gen_unary (NOT, mode,
|
||||
XEXP (op0, 0), mode),
|
||||
op1);
|
||||
|
||||
/* Similarly for (~(A ^ B)) & A. */
|
||||
if (GET_CODE (op0) == NOT
|
||||
&& GET_CODE (XEXP (op0, 0)) == XOR
|
||||
&& rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
|
||||
&& ! side_effects_p (op1))
|
||||
return simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
|
||||
|
||||
if (GET_CODE (op0) == NOT
|
||||
&& GET_CODE (XEXP (op0, 0)) == XOR
|
||||
&& rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
|
||||
&& ! side_effects_p (op1))
|
||||
return simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
|
||||
|
||||
/* Convert (A | B) & A to A. */
|
||||
if (GET_CODE (op0) == IOR
|
||||
&& (rtx_equal_p (XEXP (op0, 0), op1)
|
||||
|| rtx_equal_p (XEXP (op0, 1), op1))
|
||||
&& ! side_effects_p (XEXP (op0, 0))
|
||||
&& ! side_effects_p (XEXP (op0, 1)))
|
||||
return op1;
|
||||
|
||||
/* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
|
||||
((A & N) + B) & M -> (A + B) & M
|
||||
Similarly if (N & M) == 0,
|
||||
|
@ -1993,6 +2483,33 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
|||
return gen_rtx_CONST_VECTOR (mode, v);
|
||||
}
|
||||
}
|
||||
|
||||
if (XVECLEN (trueop1, 0) == 1
|
||||
&& GET_CODE (XVECEXP (trueop1, 0, 0)) == CONST_INT
|
||||
&& GET_CODE (trueop0) == VEC_CONCAT)
|
||||
{
|
||||
rtx vec = trueop0;
|
||||
int offset = INTVAL (XVECEXP (trueop1, 0, 0)) * GET_MODE_SIZE (mode);
|
||||
|
||||
/* Try to find the element in the VEC_CONCAT. */
|
||||
while (GET_MODE (vec) != mode
|
||||
&& GET_CODE (vec) == VEC_CONCAT)
|
||||
{
|
||||
HOST_WIDE_INT vec_size = GET_MODE_SIZE (GET_MODE (XEXP (vec, 0)));
|
||||
if (offset < vec_size)
|
||||
vec = XEXP (vec, 0);
|
||||
else
|
||||
{
|
||||
offset -= vec_size;
|
||||
vec = XEXP (vec, 1);
|
||||
}
|
||||
vec = avoid_constant_pool_reference (vec);
|
||||
}
|
||||
|
||||
if (GET_MODE (vec) == mode)
|
||||
return vec;
|
||||
}
|
||||
|
||||
return 0;
|
||||
case VEC_CONCAT:
|
||||
{
|
||||
|
@ -4192,3 +4709,4 @@ simplify_rtx (rtx x)
|
|||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue