expr.c (TARGET_MEM_FUNCTIONS): Transform to boolean.
* expr.c (TARGET_MEM_FUNCTIONS): Transform to boolean. (emit_block_move): Split out subroutines. (emit_block_move_via_movstr): New. (emit_block_move_via_libcall): New. Emit bcopy via normal call also. (emit_block_move_libcall_fn): New. Construct function prototype for bcopy as well. (clear_storage): Split out subroutines. (clear_storage_via_clrstr): New. (clear_storage_via_libcall): New. Emit bzero as a normal call also. (clear_storage_libcall_fn): New. Construct function prototype for bzero as well. (emit_push_insn): Use emit_block_move. (expand_assignment): Booleanize TARGET_MEM_FUNCTIONS. (store_constructor): Likewise. From-SVN: r56464
This commit is contained in:
parent
81eec873ab
commit
4ca79136cf
2 changed files with 468 additions and 420 deletions
|
@ -1,3 +1,20 @@
|
|||
2002-08-20 Richard Henderson <rth@redhat.com>
|
||||
|
||||
* expr.c (TARGET_MEM_FUNCTIONS): Transform to boolean.
|
||||
(emit_block_move): Split out subroutines.
|
||||
(emit_block_move_via_movstr): New.
|
||||
(emit_block_move_via_libcall): New. Emit bcopy via normal call also.
|
||||
(emit_block_move_libcall_fn): New. Construct function prototype for
|
||||
bcopy as well.
|
||||
(clear_storage): Split out subroutines.
|
||||
(clear_storage_via_clrstr): New.
|
||||
(clear_storage_via_libcall): New. Emit bzero as a normal call also.
|
||||
(clear_storage_libcall_fn): New. Construct function prototype for
|
||||
bzero as well.
|
||||
(emit_push_insn): Use emit_block_move.
|
||||
(expand_assignment): Booleanize TARGET_MEM_FUNCTIONS.
|
||||
(store_constructor): Likewise.
|
||||
|
||||
2002-08-19 Ziemowit Laski <zlaski@apple.com>
|
||||
|
||||
* objc/objc-act.c (building_objc_message_expr): Rename to
|
||||
|
|
871
gcc/expr.c
871
gcc/expr.c
|
@ -73,6 +73,15 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
|||
#define CASE_VECTOR_PC_RELATIVE 0
|
||||
#endif
|
||||
|
||||
/* Convert defined/undefined to boolean. */
|
||||
#ifdef TARGET_MEM_FUNCTIONS
|
||||
#undef TARGET_MEM_FUNCTIONS
|
||||
#define TARGET_MEM_FUNCTIONS 1
|
||||
#else
|
||||
#define TARGET_MEM_FUNCTIONS 0
|
||||
#endif
|
||||
|
||||
|
||||
/* If this is nonzero, we do not bother generating VOLATILE
|
||||
around volatile memory references, and we are willing to
|
||||
output indirect addresses. If cse is to follow, we reject
|
||||
|
@ -123,6 +132,9 @@ static unsigned HOST_WIDE_INT move_by_pieces_ninsns
|
|||
unsigned int));
|
||||
static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
|
||||
struct move_by_pieces *));
|
||||
static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
|
||||
static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
|
||||
static tree emit_block_move_libcall_fn PARAMS ((int));
|
||||
static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
|
||||
enum machine_mode));
|
||||
static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
|
||||
|
@ -132,6 +144,9 @@ static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
|
|||
static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
|
||||
enum machine_mode,
|
||||
struct store_by_pieces *));
|
||||
static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
|
||||
static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
|
||||
static tree clear_storage_libcall_fn PARAMS ((int));
|
||||
static rtx compress_float_constant PARAMS ((rtx, rtx));
|
||||
static rtx get_subtarget PARAMS ((rtx));
|
||||
static int is_zeros_p PARAMS ((tree));
|
||||
|
@ -1655,33 +1670,26 @@ move_by_pieces_1 (genfun, mode, data)
|
|||
}
|
||||
}
|
||||
|
||||
/* Emit code to move a block Y to a block X.
|
||||
This may be done with string-move instructions,
|
||||
with multiple scalar move instructions, or with a library call.
|
||||
/* Emit code to move a block Y to a block X. This may be done with
|
||||
string-move instructions, with multiple scalar move instructions,
|
||||
or with a library call.
|
||||
|
||||
Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
|
||||
with mode BLKmode.
|
||||
Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
|
||||
SIZE is an rtx that says how long they are.
|
||||
ALIGN is the maximum alignment we can assume they have.
|
||||
|
||||
Return the address of the new block, if memcpy is called and returns it,
|
||||
0 otherwise. */
|
||||
|
||||
static GTY(()) tree block_move_fn;
|
||||
rtx
|
||||
emit_block_move (x, y, size)
|
||||
rtx x, y;
|
||||
rtx size;
|
||||
rtx x, y, size;
|
||||
{
|
||||
rtx retval = 0;
|
||||
#ifdef TARGET_MEM_FUNCTIONS
|
||||
tree call_expr, arg_list;
|
||||
#endif
|
||||
unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
|
||||
|
||||
if (GET_MODE (x) != BLKmode)
|
||||
abort ();
|
||||
|
||||
if (GET_MODE (y) != BLKmode)
|
||||
abort ();
|
||||
|
||||
|
@ -1698,164 +1706,219 @@ emit_block_move (x, y, size)
|
|||
|
||||
if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
|
||||
move_by_pieces (x, y, INTVAL (size), align);
|
||||
else if (emit_block_move_via_movstr (x, y, size, align))
|
||||
;
|
||||
else
|
||||
{
|
||||
/* Try the most limited insn first, because there's no point
|
||||
including more than one in the machine description unless
|
||||
the more limited one has some advantage. */
|
||||
|
||||
rtx opalign = GEN_INT (align / BITS_PER_UNIT);
|
||||
enum machine_mode mode;
|
||||
|
||||
/* Since this is a move insn, we don't care about volatility. */
|
||||
volatile_ok = 1;
|
||||
|
||||
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
|
||||
mode = GET_MODE_WIDER_MODE (mode))
|
||||
{
|
||||
enum insn_code code = movstr_optab[(int) mode];
|
||||
insn_operand_predicate_fn pred;
|
||||
|
||||
if (code != CODE_FOR_nothing
|
||||
/* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
|
||||
here because if SIZE is less than the mode mask, as it is
|
||||
returned by the macro, it will definitely be less than the
|
||||
actual mode mask. */
|
||||
&& ((GET_CODE (size) == CONST_INT
|
||||
&& ((unsigned HOST_WIDE_INT) INTVAL (size)
|
||||
<= (GET_MODE_MASK (mode) >> 1)))
|
||||
|| GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
|
||||
&& ((pred = insn_data[(int) code].operand[0].predicate) == 0
|
||||
|| (*pred) (x, BLKmode))
|
||||
&& ((pred = insn_data[(int) code].operand[1].predicate) == 0
|
||||
|| (*pred) (y, BLKmode))
|
||||
&& ((pred = insn_data[(int) code].operand[3].predicate) == 0
|
||||
|| (*pred) (opalign, VOIDmode)))
|
||||
{
|
||||
rtx op2;
|
||||
rtx last = get_last_insn ();
|
||||
rtx pat;
|
||||
|
||||
op2 = convert_to_mode (mode, size, 1);
|
||||
pred = insn_data[(int) code].operand[2].predicate;
|
||||
if (pred != 0 && ! (*pred) (op2, mode))
|
||||
op2 = copy_to_mode_reg (mode, op2);
|
||||
|
||||
pat = GEN_FCN ((int) code) (x, y, op2, opalign);
|
||||
if (pat)
|
||||
{
|
||||
emit_insn (pat);
|
||||
volatile_ok = 0;
|
||||
return 0;
|
||||
}
|
||||
else
|
||||
delete_insns_since (last);
|
||||
}
|
||||
}
|
||||
|
||||
volatile_ok = 0;
|
||||
|
||||
/* X, Y, or SIZE may have been passed through protect_from_queue.
|
||||
|
||||
It is unsafe to save the value generated by protect_from_queue
|
||||
and reuse it later. Consider what happens if emit_queue is
|
||||
called before the return value from protect_from_queue is used.
|
||||
|
||||
Expansion of the CALL_EXPR below will call emit_queue before
|
||||
we are finished emitting RTL for argument setup. So if we are
|
||||
not careful we could get the wrong value for an argument.
|
||||
|
||||
To avoid this problem we go ahead and emit code to copy X, Y &
|
||||
SIZE into new pseudos. We can then place those new pseudos
|
||||
into an RTL_EXPR and use them later, even after a call to
|
||||
emit_queue.
|
||||
|
||||
Note this is not strictly needed for library calls since they
|
||||
do not call emit_queue before loading their arguments. However,
|
||||
we may need to have library calls call emit_queue in the future
|
||||
since failing to do so could cause problems for targets which
|
||||
define SMALL_REGISTER_CLASSES and pass arguments in registers. */
|
||||
x = copy_to_mode_reg (Pmode, XEXP (x, 0));
|
||||
y = copy_to_mode_reg (Pmode, XEXP (y, 0));
|
||||
|
||||
#ifdef TARGET_MEM_FUNCTIONS
|
||||
size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
|
||||
#else
|
||||
size = convert_to_mode (TYPE_MODE (integer_type_node), size,
|
||||
TREE_UNSIGNED (integer_type_node));
|
||||
size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
|
||||
#endif
|
||||
|
||||
#ifdef TARGET_MEM_FUNCTIONS
|
||||
/* It is incorrect to use the libcall calling conventions to call
|
||||
memcpy in this context.
|
||||
|
||||
This could be a user call to memcpy and the user may wish to
|
||||
examine the return value from memcpy.
|
||||
|
||||
For targets where libcalls and normal calls have different conventions
|
||||
for returning pointers, we could end up generating incorrect code.
|
||||
|
||||
So instead of using a libcall sequence we build up a suitable
|
||||
CALL_EXPR and expand the call in the normal fashion. */
|
||||
if (block_move_fn == NULL_TREE)
|
||||
{
|
||||
tree fntype;
|
||||
|
||||
/* This was copied from except.c, I don't know if all this is
|
||||
necessary in this context or not. */
|
||||
block_move_fn = get_identifier ("memcpy");
|
||||
fntype = build_pointer_type (void_type_node);
|
||||
fntype = build_function_type (fntype, NULL_TREE);
|
||||
block_move_fn = build_decl (FUNCTION_DECL, block_move_fn, fntype);
|
||||
DECL_EXTERNAL (block_move_fn) = 1;
|
||||
TREE_PUBLIC (block_move_fn) = 1;
|
||||
DECL_ARTIFICIAL (block_move_fn) = 1;
|
||||
TREE_NOTHROW (block_move_fn) = 1;
|
||||
make_decl_rtl (block_move_fn, NULL);
|
||||
assemble_external (block_move_fn);
|
||||
}
|
||||
|
||||
/* We need to make an argument list for the function call.
|
||||
|
||||
memcpy has three arguments, the first two are void * addresses and
|
||||
the last is a size_t byte count for the copy. */
|
||||
arg_list
|
||||
= build_tree_list (NULL_TREE,
|
||||
make_tree (build_pointer_type (void_type_node), x));
|
||||
TREE_CHAIN (arg_list)
|
||||
= build_tree_list (NULL_TREE,
|
||||
make_tree (build_pointer_type (void_type_node), y));
|
||||
TREE_CHAIN (TREE_CHAIN (arg_list))
|
||||
= build_tree_list (NULL_TREE, make_tree (sizetype, size));
|
||||
TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
|
||||
|
||||
/* Now we have to build up the CALL_EXPR itself. */
|
||||
call_expr = build1 (ADDR_EXPR,
|
||||
build_pointer_type (TREE_TYPE (block_move_fn)),
|
||||
block_move_fn);
|
||||
call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (block_move_fn)),
|
||||
call_expr, arg_list, NULL_TREE);
|
||||
TREE_SIDE_EFFECTS (call_expr) = 1;
|
||||
|
||||
retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
|
||||
#else
|
||||
emit_library_call (bcopy_libfunc, LCT_NORMAL,
|
||||
VOIDmode, 3, y, Pmode, x, Pmode,
|
||||
convert_to_mode (TYPE_MODE (integer_type_node), size,
|
||||
TREE_UNSIGNED (integer_type_node)),
|
||||
TYPE_MODE (integer_type_node));
|
||||
#endif
|
||||
|
||||
/* If we are initializing a readonly value, show the above call
|
||||
clobbered it. Otherwise, a load from it may erroneously be hoisted
|
||||
from a loop. */
|
||||
if (RTX_UNCHANGING_P (x))
|
||||
emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
|
||||
}
|
||||
retval = emit_block_move_via_libcall (x, y, size);
|
||||
|
||||
return retval;
|
||||
}
|
||||
|
||||
/* A subroutine of emit_block_move. Expand a movstr pattern;
|
||||
return true if successful. */
|
||||
|
||||
static bool
|
||||
emit_block_move_via_movstr (x, y, size, align)
|
||||
rtx x, y, size;
|
||||
unsigned int align;
|
||||
{
|
||||
/* Try the most limited insn first, because there's no point
|
||||
including more than one in the machine description unless
|
||||
the more limited one has some advantage. */
|
||||
|
||||
rtx opalign = GEN_INT (align / BITS_PER_UNIT);
|
||||
enum machine_mode mode;
|
||||
|
||||
/* Since this is a move insn, we don't care about volatility. */
|
||||
volatile_ok = 1;
|
||||
|
||||
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
|
||||
mode = GET_MODE_WIDER_MODE (mode))
|
||||
{
|
||||
enum insn_code code = movstr_optab[(int) mode];
|
||||
insn_operand_predicate_fn pred;
|
||||
|
||||
if (code != CODE_FOR_nothing
|
||||
/* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
|
||||
here because if SIZE is less than the mode mask, as it is
|
||||
returned by the macro, it will definitely be less than the
|
||||
actual mode mask. */
|
||||
&& ((GET_CODE (size) == CONST_INT
|
||||
&& ((unsigned HOST_WIDE_INT) INTVAL (size)
|
||||
<= (GET_MODE_MASK (mode) >> 1)))
|
||||
|| GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
|
||||
&& ((pred = insn_data[(int) code].operand[0].predicate) == 0
|
||||
|| (*pred) (x, BLKmode))
|
||||
&& ((pred = insn_data[(int) code].operand[1].predicate) == 0
|
||||
|| (*pred) (y, BLKmode))
|
||||
&& ((pred = insn_data[(int) code].operand[3].predicate) == 0
|
||||
|| (*pred) (opalign, VOIDmode)))
|
||||
{
|
||||
rtx op2;
|
||||
rtx last = get_last_insn ();
|
||||
rtx pat;
|
||||
|
||||
op2 = convert_to_mode (mode, size, 1);
|
||||
pred = insn_data[(int) code].operand[2].predicate;
|
||||
if (pred != 0 && ! (*pred) (op2, mode))
|
||||
op2 = copy_to_mode_reg (mode, op2);
|
||||
|
||||
/* ??? When called via emit_block_move_for_call, it'd be
|
||||
nice if there were some way to inform the backend, so
|
||||
that it doesn't fail the expansion because it thinks
|
||||
emitting the libcall would be more efficient. */
|
||||
|
||||
pat = GEN_FCN ((int) code) (x, y, op2, opalign);
|
||||
if (pat)
|
||||
{
|
||||
emit_insn (pat);
|
||||
volatile_ok = 0;
|
||||
return true;
|
||||
}
|
||||
else
|
||||
delete_insns_since (last);
|
||||
}
|
||||
}
|
||||
|
||||
volatile_ok = 0;
|
||||
return false;
|
||||
}
|
||||
|
||||
/* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
|
||||
Return the return value from memcpy, 0 otherwise. */
|
||||
|
||||
static rtx
|
||||
emit_block_move_via_libcall (dst, src, size)
|
||||
rtx dst, src, size;
|
||||
{
|
||||
tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
|
||||
enum machine_mode size_mode;
|
||||
rtx retval;
|
||||
|
||||
/* DST, SRC, or SIZE may have been passed through protect_from_queue.
|
||||
|
||||
It is unsafe to save the value generated by protect_from_queue
|
||||
and reuse it later. Consider what happens if emit_queue is
|
||||
called before the return value from protect_from_queue is used.
|
||||
|
||||
Expansion of the CALL_EXPR below will call emit_queue before
|
||||
we are finished emitting RTL for argument setup. So if we are
|
||||
not careful we could get the wrong value for an argument.
|
||||
|
||||
To avoid this problem we go ahead and emit code to copy X, Y &
|
||||
SIZE into new pseudos. We can then place those new pseudos
|
||||
into an RTL_EXPR and use them later, even after a call to
|
||||
emit_queue.
|
||||
|
||||
Note this is not strictly needed for library calls since they
|
||||
do not call emit_queue before loading their arguments. However,
|
||||
we may need to have library calls call emit_queue in the future
|
||||
since failing to do so could cause problems for targets which
|
||||
define SMALL_REGISTER_CLASSES and pass arguments in registers. */
|
||||
|
||||
dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
|
||||
src = copy_to_mode_reg (Pmode, XEXP (src, 0));
|
||||
|
||||
if (TARGET_MEM_FUNCTIONS)
|
||||
size_mode = TYPE_MODE (sizetype);
|
||||
else
|
||||
size_mode = TYPE_MODE (unsigned_type_node);
|
||||
size = convert_to_mode (size_mode, size, 1);
|
||||
size = copy_to_mode_reg (size_mode, size);
|
||||
|
||||
/* It is incorrect to use the libcall calling conventions to call
|
||||
memcpy in this context. This could be a user call to memcpy and
|
||||
the user may wish to examine the return value from memcpy. For
|
||||
targets where libcalls and normal calls have different conventions
|
||||
for returning pointers, we could end up generating incorrect code.
|
||||
|
||||
For convenience, we generate the call to bcopy this way as well. */
|
||||
|
||||
dst_tree = make_tree (ptr_type_node, dst);
|
||||
src_tree = make_tree (ptr_type_node, src);
|
||||
if (TARGET_MEM_FUNCTIONS)
|
||||
size_tree = make_tree (sizetype, size);
|
||||
else
|
||||
size_tree = make_tree (unsigned_type_node, size);
|
||||
|
||||
fn = emit_block_move_libcall_fn (true);
|
||||
arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
|
||||
if (TARGET_MEM_FUNCTIONS)
|
||||
{
|
||||
arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
|
||||
arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
|
||||
}
|
||||
else
|
||||
{
|
||||
arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
|
||||
arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
|
||||
}
|
||||
|
||||
/* Now we have to build up the CALL_EXPR itself. */
|
||||
call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
|
||||
call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
|
||||
call_expr, arg_list, NULL_TREE);
|
||||
TREE_SIDE_EFFECTS (call_expr) = 1;
|
||||
|
||||
retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
|
||||
|
||||
/* If we are initializing a readonly value, show the above call
|
||||
clobbered it. Otherwise, a load from it may erroneously be
|
||||
hoisted from a loop. */
|
||||
if (RTX_UNCHANGING_P (dst))
|
||||
emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
|
||||
|
||||
return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
|
||||
}
|
||||
|
||||
/* A subroutine of emit_block_move_via_libcall. Create the tree node
|
||||
for the function we use for block copies. The first time FOR_CALL
|
||||
is true, we call assemble_external. */
|
||||
|
||||
static GTY(()) tree block_move_fn;
|
||||
|
||||
static tree
|
||||
emit_block_move_libcall_fn (for_call)
|
||||
int for_call;
|
||||
{
|
||||
static bool emitted_extern;
|
||||
tree fn = block_move_fn, args;
|
||||
|
||||
if (!fn)
|
||||
{
|
||||
if (TARGET_MEM_FUNCTIONS)
|
||||
{
|
||||
fn = get_identifier ("memcpy");
|
||||
args = build_function_type_list (ptr_type_node, ptr_type_node,
|
||||
const_ptr_type_node, sizetype,
|
||||
NULL_TREE);
|
||||
}
|
||||
else
|
||||
{
|
||||
fn = get_identifier ("bcopy");
|
||||
args = build_function_type_list (void_type_node, const_ptr_type_node,
|
||||
ptr_type_node, unsigned_type_node,
|
||||
NULL_TREE);
|
||||
}
|
||||
|
||||
fn = build_decl (FUNCTION_DECL, fn, args);
|
||||
DECL_EXTERNAL (fn) = 1;
|
||||
TREE_PUBLIC (fn) = 1;
|
||||
DECL_ARTIFICIAL (fn) = 1;
|
||||
TREE_NOTHROW (fn) = 1;
|
||||
|
||||
block_move_fn = fn;
|
||||
}
|
||||
|
||||
if (for_call && !emitted_extern)
|
||||
{
|
||||
emitted_extern = true;
|
||||
make_decl_rtl (fn, NULL);
|
||||
assemble_external (fn);
|
||||
}
|
||||
|
||||
return fn;
|
||||
}
|
||||
|
||||
/* Copy all or part of a value X into registers starting at REGNO.
|
||||
The number of registers to be filled is NREGS. */
|
||||
|
@ -2624,15 +2687,11 @@ store_by_pieces_2 (genfun, mode, data)
|
|||
/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
|
||||
its length in bytes. */
|
||||
|
||||
static GTY(()) tree block_clear_fn;
|
||||
rtx
|
||||
clear_storage (object, size)
|
||||
rtx object;
|
||||
rtx size;
|
||||
{
|
||||
#ifdef TARGET_MEM_FUNCTIONS
|
||||
tree call_expr, arg_list;
|
||||
#endif
|
||||
rtx retval = 0;
|
||||
unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
|
||||
: GET_MODE_ALIGNMENT (GET_MODE (object)));
|
||||
|
@ -2641,7 +2700,7 @@ clear_storage (object, size)
|
|||
just move a zero. Otherwise, do this a piece at a time. */
|
||||
if (GET_MODE (object) != BLKmode
|
||||
&& GET_CODE (size) == CONST_INT
|
||||
&& GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
|
||||
&& INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
|
||||
emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
|
||||
else
|
||||
{
|
||||
|
@ -2651,159 +2710,199 @@ clear_storage (object, size)
|
|||
if (GET_CODE (size) == CONST_INT
|
||||
&& CLEAR_BY_PIECES_P (INTVAL (size), align))
|
||||
clear_by_pieces (object, INTVAL (size), align);
|
||||
else if (clear_storage_via_clrstr (object, size, align))
|
||||
;
|
||||
else
|
||||
{
|
||||
/* Try the most limited insn first, because there's no point
|
||||
including more than one in the machine description unless
|
||||
the more limited one has some advantage. */
|
||||
|
||||
rtx opalign = GEN_INT (align / BITS_PER_UNIT);
|
||||
enum machine_mode mode;
|
||||
|
||||
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
|
||||
mode = GET_MODE_WIDER_MODE (mode))
|
||||
{
|
||||
enum insn_code code = clrstr_optab[(int) mode];
|
||||
insn_operand_predicate_fn pred;
|
||||
|
||||
if (code != CODE_FOR_nothing
|
||||
/* We don't need MODE to be narrower than
|
||||
BITS_PER_HOST_WIDE_INT here because if SIZE is less than
|
||||
the mode mask, as it is returned by the macro, it will
|
||||
definitely be less than the actual mode mask. */
|
||||
&& ((GET_CODE (size) == CONST_INT
|
||||
&& ((unsigned HOST_WIDE_INT) INTVAL (size)
|
||||
<= (GET_MODE_MASK (mode) >> 1)))
|
||||
|| GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
|
||||
&& ((pred = insn_data[(int) code].operand[0].predicate) == 0
|
||||
|| (*pred) (object, BLKmode))
|
||||
&& ((pred = insn_data[(int) code].operand[2].predicate) == 0
|
||||
|| (*pred) (opalign, VOIDmode)))
|
||||
{
|
||||
rtx op1;
|
||||
rtx last = get_last_insn ();
|
||||
rtx pat;
|
||||
|
||||
op1 = convert_to_mode (mode, size, 1);
|
||||
pred = insn_data[(int) code].operand[1].predicate;
|
||||
if (pred != 0 && ! (*pred) (op1, mode))
|
||||
op1 = copy_to_mode_reg (mode, op1);
|
||||
|
||||
pat = GEN_FCN ((int) code) (object, op1, opalign);
|
||||
if (pat)
|
||||
{
|
||||
emit_insn (pat);
|
||||
return 0;
|
||||
}
|
||||
else
|
||||
delete_insns_since (last);
|
||||
}
|
||||
}
|
||||
|
||||
/* OBJECT or SIZE may have been passed through protect_from_queue.
|
||||
|
||||
It is unsafe to save the value generated by protect_from_queue
|
||||
and reuse it later. Consider what happens if emit_queue is
|
||||
called before the return value from protect_from_queue is used.
|
||||
|
||||
Expansion of the CALL_EXPR below will call emit_queue before
|
||||
we are finished emitting RTL for argument setup. So if we are
|
||||
not careful we could get the wrong value for an argument.
|
||||
|
||||
To avoid this problem we go ahead and emit code to copy OBJECT
|
||||
and SIZE into new pseudos. We can then place those new pseudos
|
||||
into an RTL_EXPR and use them later, even after a call to
|
||||
emit_queue.
|
||||
|
||||
Note this is not strictly needed for library calls since they
|
||||
do not call emit_queue before loading their arguments. However,
|
||||
we may need to have library calls call emit_queue in the future
|
||||
since failing to do so could cause problems for targets which
|
||||
define SMALL_REGISTER_CLASSES and pass arguments in registers. */
|
||||
object = copy_to_mode_reg (Pmode, XEXP (object, 0));
|
||||
|
||||
#ifdef TARGET_MEM_FUNCTIONS
|
||||
size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
|
||||
#else
|
||||
size = convert_to_mode (TYPE_MODE (integer_type_node), size,
|
||||
TREE_UNSIGNED (integer_type_node));
|
||||
size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
|
||||
#endif
|
||||
|
||||
#ifdef TARGET_MEM_FUNCTIONS
|
||||
/* It is incorrect to use the libcall calling conventions to call
|
||||
memset in this context.
|
||||
|
||||
This could be a user call to memset and the user may wish to
|
||||
examine the return value from memset.
|
||||
|
||||
For targets where libcalls and normal calls have different
|
||||
conventions for returning pointers, we could end up generating
|
||||
incorrect code.
|
||||
|
||||
So instead of using a libcall sequence we build up a suitable
|
||||
CALL_EXPR and expand the call in the normal fashion. */
|
||||
if (block_clear_fn == NULL_TREE)
|
||||
{
|
||||
tree fntype;
|
||||
|
||||
/* This was copied from except.c, I don't know if all this is
|
||||
necessary in this context or not. */
|
||||
block_clear_fn = get_identifier ("memset");
|
||||
fntype = build_pointer_type (void_type_node);
|
||||
fntype = build_function_type (fntype, NULL_TREE);
|
||||
block_clear_fn = build_decl (FUNCTION_DECL, block_clear_fn,
|
||||
fntype);
|
||||
DECL_EXTERNAL (block_clear_fn) = 1;
|
||||
TREE_PUBLIC (block_clear_fn) = 1;
|
||||
DECL_ARTIFICIAL (block_clear_fn) = 1;
|
||||
TREE_NOTHROW (block_clear_fn) = 1;
|
||||
make_decl_rtl (block_clear_fn, NULL);
|
||||
assemble_external (block_clear_fn);
|
||||
}
|
||||
|
||||
/* We need to make an argument list for the function call.
|
||||
|
||||
memset has three arguments, the first is a void * addresses, the
|
||||
second an integer with the initialization value, the last is a
|
||||
size_t byte count for the copy. */
|
||||
arg_list
|
||||
= build_tree_list (NULL_TREE,
|
||||
make_tree (build_pointer_type (void_type_node),
|
||||
object));
|
||||
TREE_CHAIN (arg_list)
|
||||
= build_tree_list (NULL_TREE,
|
||||
make_tree (integer_type_node, const0_rtx));
|
||||
TREE_CHAIN (TREE_CHAIN (arg_list))
|
||||
= build_tree_list (NULL_TREE, make_tree (sizetype, size));
|
||||
TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
|
||||
|
||||
/* Now we have to build up the CALL_EXPR itself. */
|
||||
call_expr = build1 (ADDR_EXPR,
|
||||
build_pointer_type (TREE_TYPE (block_clear_fn)),
|
||||
block_clear_fn);
|
||||
call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (block_clear_fn)),
|
||||
call_expr, arg_list, NULL_TREE);
|
||||
TREE_SIDE_EFFECTS (call_expr) = 1;
|
||||
|
||||
retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
|
||||
#else
|
||||
emit_library_call (bzero_libfunc, LCT_NORMAL,
|
||||
VOIDmode, 2, object, Pmode, size,
|
||||
TYPE_MODE (integer_type_node));
|
||||
#endif
|
||||
|
||||
/* If we are initializing a readonly value, show the above call
|
||||
clobbered it. Otherwise, a load from it may erroneously be
|
||||
hoisted from a loop. */
|
||||
if (RTX_UNCHANGING_P (object))
|
||||
emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
|
||||
}
|
||||
retval = clear_storage_via_libcall (object, size);
|
||||
}
|
||||
|
||||
return retval;
|
||||
}
|
||||
|
||||
/* A subroutine of clear_storage. Expand a clrstr pattern;
|
||||
return true if successful. */
|
||||
|
||||
static bool
|
||||
clear_storage_via_clrstr (object, size, align)
|
||||
rtx object, size;
|
||||
unsigned int align;
|
||||
{
|
||||
/* Try the most limited insn first, because there's no point
|
||||
including more than one in the machine description unless
|
||||
the more limited one has some advantage. */
|
||||
|
||||
rtx opalign = GEN_INT (align / BITS_PER_UNIT);
|
||||
enum machine_mode mode;
|
||||
|
||||
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
|
||||
mode = GET_MODE_WIDER_MODE (mode))
|
||||
{
|
||||
enum insn_code code = clrstr_optab[(int) mode];
|
||||
insn_operand_predicate_fn pred;
|
||||
|
||||
if (code != CODE_FOR_nothing
|
||||
/* We don't need MODE to be narrower than
|
||||
BITS_PER_HOST_WIDE_INT here because if SIZE is less than
|
||||
the mode mask, as it is returned by the macro, it will
|
||||
definitely be less than the actual mode mask. */
|
||||
&& ((GET_CODE (size) == CONST_INT
|
||||
&& ((unsigned HOST_WIDE_INT) INTVAL (size)
|
||||
<= (GET_MODE_MASK (mode) >> 1)))
|
||||
|| GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
|
||||
&& ((pred = insn_data[(int) code].operand[0].predicate) == 0
|
||||
|| (*pred) (object, BLKmode))
|
||||
&& ((pred = insn_data[(int) code].operand[2].predicate) == 0
|
||||
|| (*pred) (opalign, VOIDmode)))
|
||||
{
|
||||
rtx op1;
|
||||
rtx last = get_last_insn ();
|
||||
rtx pat;
|
||||
|
||||
op1 = convert_to_mode (mode, size, 1);
|
||||
pred = insn_data[(int) code].operand[1].predicate;
|
||||
if (pred != 0 && ! (*pred) (op1, mode))
|
||||
op1 = copy_to_mode_reg (mode, op1);
|
||||
|
||||
pat = GEN_FCN ((int) code) (object, op1, opalign);
|
||||
if (pat)
|
||||
{
|
||||
emit_insn (pat);
|
||||
return true;
|
||||
}
|
||||
else
|
||||
delete_insns_since (last);
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/* A subroutine of clear_storage. Expand a call to memset or bzero.
|
||||
Return the return value of memset, 0 otherwise. */
|
||||
|
||||
static rtx
|
||||
clear_storage_via_libcall (object, size)
|
||||
rtx object, size;
|
||||
{
|
||||
tree call_expr, arg_list, fn, object_tree, size_tree;
|
||||
enum machine_mode size_mode;
|
||||
rtx retval;
|
||||
|
||||
/* OBJECT or SIZE may have been passed through protect_from_queue.
|
||||
|
||||
It is unsafe to save the value generated by protect_from_queue
|
||||
and reuse it later. Consider what happens if emit_queue is
|
||||
called before the return value from protect_from_queue is used.
|
||||
|
||||
Expansion of the CALL_EXPR below will call emit_queue before
|
||||
we are finished emitting RTL for argument setup. So if we are
|
||||
not careful we could get the wrong value for an argument.
|
||||
|
||||
To avoid this problem we go ahead and emit code to copy OBJECT
|
||||
and SIZE into new pseudos. We can then place those new pseudos
|
||||
into an RTL_EXPR and use them later, even after a call to
|
||||
emit_queue.
|
||||
|
||||
Note this is not strictly needed for library calls since they
|
||||
do not call emit_queue before loading their arguments. However,
|
||||
we may need to have library calls call emit_queue in the future
|
||||
since failing to do so could cause problems for targets which
|
||||
define SMALL_REGISTER_CLASSES and pass arguments in registers. */
|
||||
|
||||
object = copy_to_mode_reg (Pmode, XEXP (object, 0));
|
||||
|
||||
if (TARGET_MEM_FUNCTIONS)
|
||||
size_mode = TYPE_MODE (sizetype);
|
||||
else
|
||||
size_mode = TYPE_MODE (unsigned_type_node);
|
||||
size = convert_to_mode (size_mode, size, 1);
|
||||
size = copy_to_mode_reg (size_mode, size);
|
||||
|
||||
/* It is incorrect to use the libcall calling conventions to call
|
||||
memset in this context. This could be a user call to memset and
|
||||
the user may wish to examine the return value from memset. For
|
||||
targets where libcalls and normal calls have different conventions
|
||||
for returning pointers, we could end up generating incorrect code.
|
||||
|
||||
For convenience, we generate the call to bzero this way as well. */
|
||||
|
||||
object_tree = make_tree (ptr_type_node, object);
|
||||
if (TARGET_MEM_FUNCTIONS)
|
||||
size_tree = make_tree (sizetype, size);
|
||||
else
|
||||
size_tree = make_tree (unsigned_type_node, size);
|
||||
|
||||
fn = clear_storage_libcall_fn (true);
|
||||
arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
|
||||
if (TARGET_MEM_FUNCTIONS)
|
||||
arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
|
||||
arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
|
||||
|
||||
/* Now we have to build up the CALL_EXPR itself. */
|
||||
call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
|
||||
call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
|
||||
call_expr, arg_list, NULL_TREE);
|
||||
TREE_SIDE_EFFECTS (call_expr) = 1;
|
||||
|
||||
retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
|
||||
|
||||
/* If we are initializing a readonly value, show the above call
|
||||
clobbered it. Otherwise, a load from it may erroneously be
|
||||
hoisted from a loop. */
|
||||
if (RTX_UNCHANGING_P (object))
|
||||
emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
|
||||
|
||||
return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
|
||||
}
|
||||
|
||||
/* A subroutine of clear_storage_via_libcall. Create the tree node
|
||||
for the function we use for block clears. The first time FOR_CALL
|
||||
is true, we call assemble_external. */
|
||||
|
||||
static GTY(()) tree block_clear_fn;
|
||||
|
||||
static tree
|
||||
clear_storage_libcall_fn (for_call)
|
||||
int for_call;
|
||||
{
|
||||
static bool emitted_extern;
|
||||
tree fn = block_clear_fn, args;
|
||||
|
||||
if (!fn)
|
||||
{
|
||||
if (TARGET_MEM_FUNCTIONS)
|
||||
{
|
||||
fn = get_identifier ("memset");
|
||||
args = build_function_type_list (ptr_type_node, ptr_type_node,
|
||||
integer_type_node, sizetype,
|
||||
NULL_TREE);
|
||||
}
|
||||
else
|
||||
{
|
||||
fn = get_identifier ("bzero");
|
||||
args = build_function_type_list (void_type_node, ptr_type_node,
|
||||
unsigned_type_node, NULL_TREE);
|
||||
}
|
||||
|
||||
fn = build_decl (FUNCTION_DECL, fn, args);
|
||||
DECL_EXTERNAL (fn) = 1;
|
||||
TREE_PUBLIC (fn) = 1;
|
||||
DECL_ARTIFICIAL (fn) = 1;
|
||||
TREE_NOTHROW (fn) = 1;
|
||||
|
||||
block_clear_fn = fn;
|
||||
}
|
||||
|
||||
if (for_call && !emitted_extern)
|
||||
{
|
||||
emitted_extern = true;
|
||||
make_decl_rtl (fn, NULL);
|
||||
assemble_external (fn);
|
||||
}
|
||||
|
||||
return fn;
|
||||
}
|
||||
|
||||
/* Generate code to copy Y into X.
|
||||
Both Y and X must have the same mode, except that
|
||||
Y can be a constant with VOIDmode.
|
||||
|
@ -3501,6 +3600,18 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra,
|
|||
args_addr,
|
||||
args_so_far),
|
||||
skip));
|
||||
|
||||
if (!ACCUMULATE_OUTGOING_ARGS)
|
||||
{
|
||||
/* If the source is referenced relative to the stack pointer,
|
||||
copy it to another register to stabilize it. We do not need
|
||||
to do this if we know that we won't be changing sp. */
|
||||
|
||||
if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
|
||||
|| reg_mentioned_p (virtual_outgoing_args_rtx, temp))
|
||||
temp = copy_to_reg (temp);
|
||||
}
|
||||
|
||||
target = gen_rtx_MEM (BLKmode, temp);
|
||||
|
||||
if (type != 0)
|
||||
|
@ -3515,86 +3626,12 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra,
|
|||
else
|
||||
set_mem_align (target, align);
|
||||
|
||||
/* TEMP is the address of the block. Copy the data there. */
|
||||
if (GET_CODE (size) == CONST_INT
|
||||
&& MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
|
||||
{
|
||||
move_by_pieces (target, xinner, INTVAL (size), align);
|
||||
goto ret;
|
||||
}
|
||||
else
|
||||
{
|
||||
rtx opalign = GEN_INT (align / BITS_PER_UNIT);
|
||||
enum machine_mode mode;
|
||||
|
||||
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
|
||||
mode != VOIDmode;
|
||||
mode = GET_MODE_WIDER_MODE (mode))
|
||||
{
|
||||
enum insn_code code = movstr_optab[(int) mode];
|
||||
insn_operand_predicate_fn pred;
|
||||
|
||||
if (code != CODE_FOR_nothing
|
||||
&& ((GET_CODE (size) == CONST_INT
|
||||
&& ((unsigned HOST_WIDE_INT) INTVAL (size)
|
||||
<= (GET_MODE_MASK (mode) >> 1)))
|
||||
|| GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
|
||||
&& (!(pred = insn_data[(int) code].operand[0].predicate)
|
||||
|| ((*pred) (target, BLKmode)))
|
||||
&& (!(pred = insn_data[(int) code].operand[1].predicate)
|
||||
|| ((*pred) (xinner, BLKmode)))
|
||||
&& (!(pred = insn_data[(int) code].operand[3].predicate)
|
||||
|| ((*pred) (opalign, VOIDmode))))
|
||||
{
|
||||
rtx op2 = convert_to_mode (mode, size, 1);
|
||||
rtx last = get_last_insn ();
|
||||
rtx pat;
|
||||
|
||||
pred = insn_data[(int) code].operand[2].predicate;
|
||||
if (pred != 0 && ! (*pred) (op2, mode))
|
||||
op2 = copy_to_mode_reg (mode, op2);
|
||||
|
||||
pat = GEN_FCN ((int) code) (target, xinner,
|
||||
op2, opalign);
|
||||
if (pat)
|
||||
{
|
||||
emit_insn (pat);
|
||||
goto ret;
|
||||
}
|
||||
else
|
||||
delete_insns_since (last);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!ACCUMULATE_OUTGOING_ARGS)
|
||||
{
|
||||
/* If the source is referenced relative to the stack pointer,
|
||||
copy it to another register to stabilize it. We do not need
|
||||
to do this if we know that we won't be changing sp. */
|
||||
|
||||
if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
|
||||
|| reg_mentioned_p (virtual_outgoing_args_rtx, temp))
|
||||
temp = copy_to_reg (temp);
|
||||
}
|
||||
|
||||
/* Make inhibit_defer_pop nonzero around the library call
|
||||
to force it to pop the bcopy-arguments right away. */
|
||||
NO_DEFER_POP;
|
||||
#ifdef TARGET_MEM_FUNCTIONS
|
||||
emit_library_call (memcpy_libfunc, LCT_NORMAL,
|
||||
VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
|
||||
convert_to_mode (TYPE_MODE (sizetype),
|
||||
size, TREE_UNSIGNED (sizetype)),
|
||||
TYPE_MODE (sizetype));
|
||||
#else
|
||||
emit_library_call (bcopy_libfunc, LCT_NORMAL,
|
||||
VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
|
||||
convert_to_mode (TYPE_MODE (integer_type_node),
|
||||
size,
|
||||
TREE_UNSIGNED (integer_type_node)),
|
||||
TYPE_MODE (integer_type_node));
|
||||
#endif
|
||||
|
||||
emit_block_move (target, xinner, size);
|
||||
|
||||
OK_DEFER_POP;
|
||||
}
|
||||
}
|
||||
|
@ -3700,10 +3737,8 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra,
|
|||
|
||||
emit_move_insn (dest, x);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
ret:
|
||||
/* If part should go in registers, copy that part
|
||||
into the appropriate registers. Do this now, at the end,
|
||||
since mem-to-mem copies above may do function calls. */
|
||||
|
@ -3971,21 +4006,21 @@ expand_assignment (to, from, want_value, suggest_reg)
|
|||
size = expr_size (from);
|
||||
from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
|
||||
|
||||
#ifdef TARGET_MEM_FUNCTIONS
|
||||
emit_library_call (memmove_libfunc, LCT_NORMAL,
|
||||
VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
|
||||
XEXP (from_rtx, 0), Pmode,
|
||||
convert_to_mode (TYPE_MODE (sizetype),
|
||||
size, TREE_UNSIGNED (sizetype)),
|
||||
TYPE_MODE (sizetype));
|
||||
#else
|
||||
emit_library_call (bcopy_libfunc, LCT_NORMAL,
|
||||
VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
|
||||
XEXP (to_rtx, 0), Pmode,
|
||||
convert_to_mode (TYPE_MODE (integer_type_node),
|
||||
size, TREE_UNSIGNED (integer_type_node)),
|
||||
TYPE_MODE (integer_type_node));
|
||||
#endif
|
||||
if (TARGET_MEM_FUNCTIONS)
|
||||
emit_library_call (memmove_libfunc, LCT_NORMAL,
|
||||
VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
|
||||
XEXP (from_rtx, 0), Pmode,
|
||||
convert_to_mode (TYPE_MODE (sizetype),
|
||||
size, TREE_UNSIGNED (sizetype)),
|
||||
TYPE_MODE (sizetype));
|
||||
else
|
||||
emit_library_call (bcopy_libfunc, LCT_NORMAL,
|
||||
VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
|
||||
XEXP (to_rtx, 0), Pmode,
|
||||
convert_to_mode (TYPE_MODE (integer_type_node),
|
||||
size,
|
||||
TREE_UNSIGNED (integer_type_node)),
|
||||
TYPE_MODE (integer_type_node));
|
||||
|
||||
preserve_temp_slots (to_rtx);
|
||||
free_temp_slots ();
|
||||
|
@ -5048,9 +5083,7 @@ store_constructor (exp, target, cleared, size)
|
|||
tree startbit = TREE_PURPOSE (elt);
|
||||
/* End of range of element, or element value. */
|
||||
tree endbit = TREE_VALUE (elt);
|
||||
#ifdef TARGET_MEM_FUNCTIONS
|
||||
HOST_WIDE_INT startb, endb;
|
||||
#endif
|
||||
rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
|
||||
|
||||
bitlength_rtx = expand_expr (bitlength,
|
||||
|
@ -5091,11 +5124,10 @@ store_constructor (exp, target, cleared, size)
|
|||
else
|
||||
abort ();
|
||||
|
||||
#ifdef TARGET_MEM_FUNCTIONS
|
||||
/* Optimization: If startbit and endbit are
|
||||
constants divisible by BITS_PER_UNIT,
|
||||
call memset instead. */
|
||||
if (TREE_CODE (startbit) == INTEGER_CST
|
||||
/* Optimization: If startbit and endbit are constants divisible
|
||||
by BITS_PER_UNIT, call memset instead. */
|
||||
if (TARGET_MEM_FUNCTIONS
|
||||
&& TREE_CODE (startbit) == INTEGER_CST
|
||||
&& TREE_CODE (endbit) == INTEGER_CST
|
||||
&& (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
|
||||
&& (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
|
||||
|
@ -5110,7 +5142,6 @@ store_constructor (exp, target, cleared, size)
|
|||
TYPE_MODE (sizetype));
|
||||
}
|
||||
else
|
||||
#endif
|
||||
emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
|
||||
LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
|
||||
Pmode, bitlength_rtx, TYPE_MODE (sizetype),
|
||||
|
|
Loading…
Add table
Reference in a new issue