optabs.h (emit_unop_insn, [...]): Change insn code parameter from "int" to "enum insn_code".

gcc/
	* optabs.h (emit_unop_insn, maybe_emit_unop_insn): Change insn code
	parameter from "int" to "enum insn_code".
	(expand_operand_type): New enum.
	(expand_operand): New structure.
	(create_expand_operand): New function.
	(create_fixed_operand, create_output_operand): Likewise
	(create_input_operand, create_convert_operand_to): Likewise.
	(create_convert_operand_from, create_address_operand): Likewise.
	(create_integer_operand): Likewise.
	(create_convert_operand_from_type, maybe_legitimize_operands): Declare.
	(maybe_gen_insn, maybe_expand_insn, maybe_expand_jump_insn): Likewise.
	(expand_insn, expand_jump_insn): Likewise.
	* builtins.c (expand_builtin_prefetch): Use the new interfaces.
	(expand_builtin_interclass_mathfn, expand_builtin_strlen): Likewise.
	(expand_movstr, expand_builtin___clear_cache): Likewise.
	(expand_builtin_lock_release): Likewise.
	* explow.c (allocate_dynamic_stack_space): Likewise.
	(probe_stack_range): Likewise.  Allow check_stack to FAIL,
	and use the default handling in that case.
	* expmed.c (check_predicate_volatile_ok): Delete.
	(store_bit_field_1, extract_bit_field_1): Use the new interfaces.
	(emit_cstore): Likewise.
	* expr.c (emit_block_move_via_movmem): Likewise.
	(set_storage_via_setmem, expand_assignment): Likewise.
	(emit_storent_insn, try_casesi): Likewise.
	(emit_single_push_insn): Likewise.  Allow the expansion to fail.
	* optabs.c (expand_widen_pattern_expr, expand_ternary_op): Likewise.
	(expand_vec_shift_expr, expand_binop_directly): Likewise.
	(expand_twoval_unop, expand_twoval_binop): Likewise.
	(expand_unop_direct, emit_indirect_jump): Likewise.
	(emit_conditional_move, vector_compare_rtx): Likewise.
	(expand_vec_cond_expr, expand_val_compare_and_swap_1): Likewise.
	(expand_sync_operation, expand_sync_fetch_operation): Likewise.
	(expand_sync_lock_test_and_set): Likewise.
	(maybe_emit_unop_insn): Likewise.  Change icode to an insn_code.
	(emit_unop_insn): Likewise.
	(expand_copysign_absneg): Change icode to an insn_code.
	(create_convert_operand_from_type): New function.
	(maybe_legitimize_operand, maybe_legitimize_operands): Likewise.
	(maybe_gen_insn, maybe_expand_insn, maybe_expand_jump_insn): Likewise.
	(expand_insn, expand_jump_insn): Likewise.
	* config/i386/i386.md (setmem<mode>): Use nonmemory_operand rather
	than const_int_operand for operand 2.

From-SVN: r171341
This commit is contained in:
Richard Sandiford 2011-03-23 09:30:58 +00:00 committed by Richard Sandiford
parent 78fadbabe3
commit a5c7d693b9
8 changed files with 762 additions and 939 deletions

View file

@ -1,3 +1,49 @@
2011-03-23 Richard Sandiford <richard.sandiford@linaro.org>
* optabs.h (emit_unop_insn, maybe_emit_unop_insn): Change insn code
parameter from "int" to "enum insn_code".
(expand_operand_type): New enum.
(expand_operand): New structure.
(create_expand_operand): New function.
(create_fixed_operand, create_output_operand): Likewise
(create_input_operand, create_convert_operand_to): Likewise.
(create_convert_operand_from, create_address_operand): Likewise.
(create_integer_operand): Likewise.
(create_convert_operand_from_type, maybe_legitimize_operands): Declare.
(maybe_gen_insn, maybe_expand_insn, maybe_expand_jump_insn): Likewise.
(expand_insn, expand_jump_insn): Likewise.
* builtins.c (expand_builtin_prefetch): Use the new interfaces.
(expand_builtin_interclass_mathfn, expand_builtin_strlen): Likewise.
(expand_movstr, expand_builtin___clear_cache): Likewise.
(expand_builtin_lock_release): Likewise.
* explow.c (allocate_dynamic_stack_space): Likewise.
(probe_stack_range): Likewise. Allow check_stack to FAIL,
and use the default handling in that case.
* expmed.c (check_predicate_volatile_ok): Delete.
(store_bit_field_1, extract_bit_field_1): Use the new interfaces.
(emit_cstore): Likewise.
* expr.c (emit_block_move_via_movmem): Likewise.
(set_storage_via_setmem, expand_assignment): Likewise.
(emit_storent_insn, try_casesi): Likewise.
(emit_single_push_insn): Likewise. Allow the expansion to fail.
* optabs.c (expand_widen_pattern_expr, expand_ternary_op): Likewise.
(expand_vec_shift_expr, expand_binop_directly): Likewise.
(expand_twoval_unop, expand_twoval_binop): Likewise.
(expand_unop_direct, emit_indirect_jump): Likewise.
(emit_conditional_move, vector_compare_rtx): Likewise.
(expand_vec_cond_expr, expand_val_compare_and_swap_1): Likewise.
(expand_sync_operation, expand_sync_fetch_operation): Likewise.
(expand_sync_lock_test_and_set): Likewise.
(maybe_emit_unop_insn): Likewise. Change icode to an insn_code.
(emit_unop_insn): Likewise.
(expand_copysign_absneg): Change icode to an insn_code.
(create_convert_operand_from_type): New function.
(maybe_legitimize_operand, maybe_legitimize_operands): Likewise.
(maybe_gen_insn, maybe_expand_insn, maybe_expand_jump_insn): Likewise.
(expand_insn, expand_jump_insn): Likewise.
* config/i386/i386.md (setmem<mode>): Use nonmemory_operand rather
than const_int_operand for operand 2.
2011-03-23 Andreas Krebbel <Andreas.Krebbel@de.ibm.com>
* dwarf2out.c (const_ok_for_output_1): Print the unspec enum name

View file

@ -1143,15 +1143,13 @@ expand_builtin_prefetch (tree exp)
#ifdef HAVE_prefetch
if (HAVE_prefetch)
{
if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
(op0,
insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
|| (GET_MODE (op0) != Pmode))
{
op0 = convert_memory_address (Pmode, op0);
op0 = force_reg (Pmode, op0);
}
emit_insn (gen_prefetch (op0, op1, op2));
struct expand_operand ops[3];
create_address_operand (&ops[0], op0);
create_integer_operand (&ops[1], INTVAL (op1));
create_integer_operand (&ops[2], INTVAL (op2));
if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
return;
}
#endif
@ -2431,16 +2429,9 @@ expand_builtin_interclass_mathfn (tree exp, rtx target)
if (icode != CODE_FOR_nothing)
{
struct expand_operand ops[1];
rtx last = get_last_insn ();
tree orig_arg = arg;
/* Make a suitable register to place result in. */
if (!target
|| GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
|| !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
gcc_assert (insn_data[icode].operand[0].predicate
(target, GET_MODE (target)));
/* Wrap the computation of the argument in a SAVE_EXPR, as we may
need to expand the argument again. This way, we will not perform
@ -2452,10 +2443,11 @@ expand_builtin_interclass_mathfn (tree exp, rtx target)
if (mode != GET_MODE (op0))
op0 = convert_to_mode (mode, op0, 0);
/* Compute into TARGET.
Set TARGET to wherever the result comes back. */
if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
return target;
create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
if (maybe_legitimize_operands (icode, 0, 1, ops)
&& maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
return ops[0].value;
delete_insns_since (last);
CALL_EXPR_ARG (exp, 0) = orig_arg;
}
@ -3362,11 +3354,12 @@ expand_builtin_strlen (tree exp, rtx target,
return NULL_RTX;
else
{
struct expand_operand ops[4];
rtx pat;
tree len;
tree src = CALL_EXPR_ARG (exp, 0);
rtx result, src_reg, char_rtx, before_strlen;
enum machine_mode insn_mode = target_mode, char_mode;
rtx src_reg, before_strlen;
enum machine_mode insn_mode = target_mode;
enum insn_code icode = CODE_FOR_nothing;
unsigned int align;
@ -3405,14 +3398,6 @@ expand_builtin_strlen (tree exp, rtx target,
if (insn_mode == VOIDmode)
return NULL_RTX;
/* Make a place to write the result of the instruction. */
result = target;
if (! (result != 0
&& REG_P (result)
&& GET_MODE (result) == insn_mode
&& REGNO (result) >= FIRST_PSEUDO_REGISTER))
result = gen_reg_rtx (insn_mode);
/* Make a place to hold the source address. We will not expand
the actual source until we are sure that the expansion will
not fail -- there are trees that cannot be expanded twice. */
@ -3422,17 +3407,12 @@ expand_builtin_strlen (tree exp, rtx target,
source operand later. */
before_strlen = get_last_insn ();
char_rtx = const0_rtx;
char_mode = insn_data[(int) icode].operand[2].mode;
if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
char_mode))
char_rtx = copy_to_mode_reg (char_mode, char_rtx);
pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
char_rtx, GEN_INT (align));
if (! pat)
create_output_operand (&ops[0], target, insn_mode);
create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
create_integer_operand (&ops[2], 0);
create_integer_operand (&ops[3], align);
if (!maybe_expand_insn (icode, 4, ops))
return NULL_RTX;
emit_insn (pat);
/* Now that we are assured of success, expand the source. */
start_sequence ();
@ -3448,12 +3428,12 @@ expand_builtin_strlen (tree exp, rtx target,
emit_insn_before (pat, get_insns ());
/* Return the value in the proper mode for this function. */
if (GET_MODE (result) == target_mode)
target = result;
if (GET_MODE (ops[0].value) == target_mode)
target = ops[0].value;
else if (target != 0)
convert_move (target, result, 0);
convert_move (target, ops[0].value, 0);
else
target = convert_to_mode (target_mode, result, 0);
target = convert_to_mode (target_mode, ops[0].value, 0);
return target;
}
@ -3674,56 +3654,39 @@ expand_builtin_mempcpy_args (tree dest, tree src, tree len,
static rtx
expand_movstr (tree dest, tree src, rtx target, int endp)
{
struct expand_operand ops[3];
rtx end;
rtx dest_mem;
rtx src_mem;
rtx insn;
const struct insn_data_d * data;
if (!HAVE_movstr)
return NULL_RTX;
dest_mem = get_memory_rtx (dest, NULL);
src_mem = get_memory_rtx (src, NULL);
data = insn_data + CODE_FOR_movstr;
if (!endp)
{
target = force_reg (Pmode, XEXP (dest_mem, 0));
dest_mem = replace_equiv_address (dest_mem, target);
end = gen_reg_rtx (Pmode);
}
else
create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
create_fixed_operand (&ops[1], dest_mem);
create_fixed_operand (&ops[2], src_mem);
expand_insn (CODE_FOR_movstr, 3, ops);
if (endp && target != const0_rtx)
{
if (target == 0
|| target == const0_rtx
|| ! (*data->operand[0].predicate) (target, Pmode))
target = ops[0].value;
/* movstr is supposed to set end to the address of the NUL
terminator. If the caller requested a mempcpy-like return value,
adjust it. */
if (endp == 1)
{
end = gen_reg_rtx (Pmode);
if (target != const0_rtx)
target = end;
rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
emit_move_insn (target, force_operand (tem, NULL_RTX));
}
else
end = target;
}
if (data->operand[0].mode != VOIDmode)
end = gen_lowpart (data->operand[0].mode, end);
insn = data->genfun (end, dest_mem, src_mem);
gcc_assert (insn);
emit_insn (insn);
/* movstr is supposed to set end to the address of the NUL
terminator. If the caller requested a mempcpy-like return value,
adjust it. */
if (endp == 1 && target != const0_rtx)
{
rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
emit_move_insn (target, force_operand (tem, NULL_RTX));
}
return target;
}
@ -5223,7 +5186,6 @@ expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
/* We have a "clear_cache" insn, and it will handle everything. */
tree begin, end;
rtx begin_rtx, end_rtx;
enum insn_code icode;
/* We must not expand to a library call. If we did, any
fallback library function in libgcc that might contain a call to
@ -5236,21 +5198,18 @@ expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
if (HAVE_clear_cache)
{
icode = CODE_FOR_clear_cache;
struct expand_operand ops[2];
begin = CALL_EXPR_ARG (exp, 0);
begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
begin_rtx = convert_memory_address (Pmode, begin_rtx);
if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
end = CALL_EXPR_ARG (exp, 1);
end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
end_rtx = convert_memory_address (Pmode, end_rtx);
if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
end_rtx = copy_to_mode_reg (Pmode, end_rtx);
emit_insn (gen_clear_cache (begin_rtx, end_rtx));
create_address_operand (&ops[0], begin_rtx);
create_address_operand (&ops[1], end_rtx);
if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
return const0_rtx;
}
return const0_rtx;
#endif /* HAVE_clear_cache */
@ -5748,9 +5707,9 @@ expand_builtin_synchronize (void)
static void
expand_builtin_lock_release (enum machine_mode mode, tree exp)
{
struct expand_operand ops[2];
enum insn_code icode;
rtx mem, insn;
rtx val = const0_rtx;
rtx mem;
/* Expand the operands. */
mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
@ -5759,21 +5718,16 @@ expand_builtin_lock_release (enum machine_mode mode, tree exp)
icode = direct_optab_handler (sync_lock_release_optab, mode);
if (icode != CODE_FOR_nothing)
{
if (!insn_data[icode].operand[1].predicate (val, mode))
val = force_reg (mode, val);
insn = GEN_FCN (icode) (mem, val);
if (insn)
{
emit_insn (insn);
return;
}
create_fixed_operand (&ops[0], mem);
create_input_operand (&ops[1], const0_rtx, mode);
if (maybe_expand_insn (icode, 2, ops))
return;
}
/* Otherwise we can implement this operation by emitting a barrier
followed by a store of zero. */
expand_builtin_synchronize ();
emit_move_insn (mem, val);
emit_move_insn (mem, const0_rtx);
}
/* Expand an expression EXP that calls a built-in function,

View file

@ -15793,7 +15793,7 @@
(define_expand "setmem<mode>"
[(use (match_operand:BLK 0 "memory_operand" ""))
(use (match_operand:SWI48 1 "nonmemory_operand" ""))
(use (match_operand 2 "const_int_operand" ""))
(use (match_operand:QI 2 "nonmemory_operand" ""))
(use (match_operand 3 "const_int_operand" ""))
(use (match_operand:SI 4 "const_int_operand" ""))
(use (match_operand:SI 5 "const_int_operand" ""))]

View file

@ -1379,21 +1379,13 @@ allocate_dynamic_stack_space (rtx size, unsigned size_align,
#ifdef HAVE_allocate_stack
if (HAVE_allocate_stack)
{
enum machine_mode mode = STACK_SIZE_MODE;
insn_operand_predicate_fn pred;
struct expand_operand ops[2];
/* We don't have to check against the predicate for operand 0 since
TARGET is known to be a pseudo of the proper mode, which must
be valid for the operand. For operand 1, convert to the
proper mode and validate. */
if (mode == VOIDmode)
mode = insn_data[(int) CODE_FOR_allocate_stack].operand[1].mode;
pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
if (pred && ! ((*pred) (size, mode)))
size = copy_to_mode_reg (mode, convert_to_mode (mode, size, 1));
emit_insn (gen_allocate_stack (target, size));
be valid for the operand. */
create_fixed_operand (&ops[0], target);
create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
expand_insn (CODE_FOR_allocate_stack, 2, ops);
}
else
#endif
@ -1544,22 +1536,22 @@ probe_stack_range (HOST_WIDE_INT first, rtx size)
plus_constant (size, first)));
emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
Pmode);
return;
}
/* Next see if we have an insn to check the stack. */
#ifdef HAVE_check_stack
else if (HAVE_check_stack)
if (HAVE_check_stack)
{
struct expand_operand ops[1];
rtx addr = memory_address (Pmode,
gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
stack_pointer_rtx,
plus_constant (size, first)));
insn_operand_predicate_fn pred
= insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
if (pred && !((*pred) (addr, Pmode)))
addr = copy_to_mode_reg (Pmode, addr);
emit_insn (gen_check_stack (addr));
create_input_operand (&ops[0], addr, Pmode);
if (maybe_expand_insn (CODE_FOR_check_stack, 1, ops))
return;
}
#endif

View file

@ -323,22 +323,6 @@ mode_for_extraction (enum extraction_pattern pattern, int opno)
return word_mode;
return data->operand[opno].mode;
}
/* Return true if X, of mode MODE, matches the predicate for operand
OPNO of instruction ICODE. Allow volatile memories, regardless of
the ambient volatile_ok setting. */
static bool
check_predicate_volatile_ok (enum insn_code icode, int opno,
rtx x, enum machine_mode mode)
{
bool save_volatile_ok, result;
save_volatile_ok = volatile_ok;
result = insn_data[(int) icode].operand[opno].predicate (x, mode);
volatile_ok = save_volatile_ok;
return result;
}
/* A subroutine of store_bit_field, with the same arguments. Return true
if the operation could be implemented.
@ -405,40 +389,17 @@ store_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
&& bitsize == GET_MODE_BITSIZE (GET_MODE_INNER (GET_MODE (op0)))
&& !(bitnum % GET_MODE_BITSIZE (GET_MODE_INNER (GET_MODE (op0)))))
{
struct expand_operand ops[3];
enum machine_mode outermode = GET_MODE (op0);
enum machine_mode innermode = GET_MODE_INNER (outermode);
int icode = (int) optab_handler (vec_set_optab, outermode);
enum insn_code icode = optab_handler (vec_set_optab, outermode);
int pos = bitnum / GET_MODE_BITSIZE (innermode);
rtx rtxpos = GEN_INT (pos);
rtx src = value;
rtx dest = op0;
rtx pat, seq;
enum machine_mode mode0 = insn_data[icode].operand[0].mode;
enum machine_mode mode1 = insn_data[icode].operand[1].mode;
enum machine_mode mode2 = insn_data[icode].operand[2].mode;
start_sequence ();
if (! (*insn_data[icode].operand[1].predicate) (src, mode1))
src = copy_to_mode_reg (mode1, src);
if (! (*insn_data[icode].operand[2].predicate) (rtxpos, mode2))
rtxpos = copy_to_mode_reg (mode1, rtxpos);
/* We could handle this, but we should always be called with a pseudo
for our targets and all insns should take them as outputs. */
gcc_assert ((*insn_data[icode].operand[0].predicate) (dest, mode0)
&& (*insn_data[icode].operand[1].predicate) (src, mode1)
&& (*insn_data[icode].operand[2].predicate) (rtxpos, mode2));
pat = GEN_FCN (icode) (dest, src, rtxpos);
seq = get_insns ();
end_sequence ();
if (pat)
{
emit_insn (seq);
emit_insn (pat);
return true;
}
create_fixed_operand (&ops[0], op0);
create_input_operand (&ops[1], value, innermode);
create_integer_operand (&ops[2], pos);
if (maybe_expand_insn (icode, 3, ops))
return true;
}
/* If the target is a register, overwriting the entire object, or storing
@ -515,44 +476,30 @@ store_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
&& bitsize == GET_MODE_BITSIZE (fieldmode)
&& optab_handler (movstrict_optab, fieldmode) != CODE_FOR_nothing)
{
int icode = optab_handler (movstrict_optab, fieldmode);
rtx insn;
rtx start = get_last_insn ();
struct expand_operand ops[2];
enum insn_code icode = optab_handler (movstrict_optab, fieldmode);
rtx arg0 = op0;
/* Get appropriate low part of the value being stored. */
if (CONST_INT_P (value) || REG_P (value))
value = gen_lowpart (fieldmode, value);
else if (!(GET_CODE (value) == SYMBOL_REF
|| GET_CODE (value) == LABEL_REF
|| GET_CODE (value) == CONST))
value = convert_to_mode (fieldmode, value, 0);
if (! (*insn_data[icode].operand[1].predicate) (value, fieldmode))
value = copy_to_mode_reg (fieldmode, value);
if (GET_CODE (op0) == SUBREG)
if (GET_CODE (arg0) == SUBREG)
{
/* Else we've got some float mode source being extracted into
a different float mode destination -- this combination of
subregs results in Severe Tire Damage. */
gcc_assert (GET_MODE (SUBREG_REG (op0)) == fieldmode
gcc_assert (GET_MODE (SUBREG_REG (arg0)) == fieldmode
|| GET_MODE_CLASS (fieldmode) == MODE_INT
|| GET_MODE_CLASS (fieldmode) == MODE_PARTIAL_INT);
arg0 = SUBREG_REG (op0);
arg0 = SUBREG_REG (arg0);
}
insn = (GEN_FCN (icode)
(gen_rtx_SUBREG (fieldmode, arg0,
(bitnum % BITS_PER_WORD) / BITS_PER_UNIT
+ (offset * UNITS_PER_WORD)),
value));
if (insn)
{
emit_insn (insn);
return true;
}
delete_insns_since (start);
arg0 = gen_rtx_SUBREG (fieldmode, arg0,
(bitnum % BITS_PER_WORD) / BITS_PER_UNIT
+ (offset * UNITS_PER_WORD));
create_fixed_operand (&ops[0], arg0);
/* Shrink the source operand to FIELDMODE. */
create_convert_operand_to (&ops[1], value, fieldmode, false);
if (maybe_expand_insn (icode, 2, ops))
return true;
}
/* Handle fields bigger than a word. */
@ -653,16 +600,13 @@ store_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
&& bitsize > 0
&& GET_MODE_BITSIZE (op_mode) >= bitsize
&& ! ((REG_P (op0) || GET_CODE (op0) == SUBREG)
&& (bitsize + bitpos > GET_MODE_BITSIZE (op_mode)))
&& insn_data[CODE_FOR_insv].operand[1].predicate (GEN_INT (bitsize),
VOIDmode)
&& check_predicate_volatile_ok (CODE_FOR_insv, 0, op0, VOIDmode))
&& (bitsize + bitpos > GET_MODE_BITSIZE (op_mode))))
{
struct expand_operand ops[4];
int xbitpos = bitpos;
rtx value1;
rtx xop0 = op0;
rtx last = get_last_insn ();
rtx pat;
bool copy_back = false;
/* Add OFFSET into OP0's address. */
@ -743,17 +687,12 @@ store_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
gcc_assert (CONSTANT_P (value));
}
/* If this machine's insv insists on a register,
get VALUE1 into a register. */
if (! ((*insn_data[(int) CODE_FOR_insv].operand[3].predicate)
(value1, op_mode)))
value1 = force_reg (op_mode, value1);
pat = gen_insv (xop0, GEN_INT (bitsize), GEN_INT (xbitpos), value1);
if (pat)
create_fixed_operand (&ops[0], xop0);
create_integer_operand (&ops[1], bitsize);
create_integer_operand (&ops[2], xbitpos);
create_input_operand (&ops[3], value1, op_mode);
if (maybe_expand_insn (CODE_FOR_insv, 4, ops))
{
emit_insn (pat);
if (copy_back)
convert_move (op0, xop0, true);
return true;
@ -1235,50 +1174,21 @@ extract_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
&& ((bitnum + bitsize - 1) / GET_MODE_BITSIZE (GET_MODE_INNER (GET_MODE (op0)))
== bitnum / GET_MODE_BITSIZE (GET_MODE_INNER (GET_MODE (op0)))))
{
struct expand_operand ops[3];
enum machine_mode outermode = GET_MODE (op0);
enum machine_mode innermode = GET_MODE_INNER (outermode);
int icode = (int) optab_handler (vec_extract_optab, outermode);
enum insn_code icode = optab_handler (vec_extract_optab, outermode);
unsigned HOST_WIDE_INT pos = bitnum / GET_MODE_BITSIZE (innermode);
rtx rtxpos = GEN_INT (pos);
rtx src = op0;
rtx dest = NULL, pat, seq;
enum machine_mode mode0 = insn_data[icode].operand[0].mode;
enum machine_mode mode1 = insn_data[icode].operand[1].mode;
enum machine_mode mode2 = insn_data[icode].operand[2].mode;
if (innermode == tmode || innermode == mode)
dest = target;
if (!dest)
dest = gen_reg_rtx (innermode);
start_sequence ();
if (! (*insn_data[icode].operand[0].predicate) (dest, mode0))
dest = copy_to_mode_reg (mode0, dest);
if (! (*insn_data[icode].operand[1].predicate) (src, mode1))
src = copy_to_mode_reg (mode1, src);
if (! (*insn_data[icode].operand[2].predicate) (rtxpos, mode2))
rtxpos = copy_to_mode_reg (mode1, rtxpos);
/* We could handle this, but we should always be called with a pseudo
for our targets and all insns should take them as outputs. */
gcc_assert ((*insn_data[icode].operand[0].predicate) (dest, mode0)
&& (*insn_data[icode].operand[1].predicate) (src, mode1)
&& (*insn_data[icode].operand[2].predicate) (rtxpos, mode2));
pat = GEN_FCN (icode) (dest, src, rtxpos);
seq = get_insns ();
end_sequence ();
if (pat)
create_output_operand (&ops[0], target, innermode);
create_input_operand (&ops[1], op0, outermode);
create_integer_operand (&ops[2], pos);
if (maybe_expand_insn (icode, 3, ops))
{
emit_insn (seq);
emit_insn (pat);
if (mode0 != mode)
return gen_lowpart (tmode, dest);
return dest;
target = ops[0].value;
if (GET_MODE (target) != mode)
return gen_lowpart (tmode, target);
return target;
}
}
@ -1517,17 +1427,14 @@ extract_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
acceptable to the format of ext(z)v. */
&& !(GET_CODE (op0) == SUBREG && GET_MODE (op0) != ext_mode)
&& !((REG_P (op0) || GET_CODE (op0) == SUBREG)
&& (bitsize + bitpos > GET_MODE_BITSIZE (ext_mode)))
&& check_predicate_volatile_ok (icode, 1, op0, GET_MODE (op0)))
&& (bitsize + bitpos > GET_MODE_BITSIZE (ext_mode))))
{
struct expand_operand ops[4];
unsigned HOST_WIDE_INT xbitpos = bitpos, xoffset = offset;
rtx bitsize_rtx, bitpos_rtx;
rtx last = get_last_insn ();
rtx xop0 = op0;
rtx xtarget = target;
rtx xspec_target = target;
rtx xspec_target_subreg = 0;
rtx pat;
/* If op0 is a register, we need it in EXT_MODE to make it
acceptable to the format of ext(z)v. */
@ -1570,27 +1477,20 @@ extract_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
xtarget = gen_reg_rtx (ext_mode);
}
/* If this machine's ext(z)v insists on a register target,
make sure we have one. */
if (!insn_data[(int) icode].operand[0].predicate (xtarget, ext_mode))
xtarget = gen_reg_rtx (ext_mode);
bitsize_rtx = GEN_INT (bitsize);
bitpos_rtx = GEN_INT (xbitpos);
pat = (unsignedp
? gen_extzv (xtarget, xop0, bitsize_rtx, bitpos_rtx)
: gen_extv (xtarget, xop0, bitsize_rtx, bitpos_rtx));
if (pat)
create_output_operand (&ops[0], xtarget, ext_mode);
create_fixed_operand (&ops[1], xop0);
create_integer_operand (&ops[2], bitsize);
create_integer_operand (&ops[3], xbitpos);
if (maybe_expand_insn (unsignedp ? CODE_FOR_extzv : CODE_FOR_extv,
4, ops))
{
emit_insn (pat);
xtarget = ops[0].value;
if (xtarget == xspec_target)
return xtarget;
if (xtarget == xspec_target_subreg)
return xspec_target;
return convert_extracted_bit_field (xtarget, mode, tmode, unsignedp);
}
delete_insns_since (last);
}
/* If OP0 is a memory, try copying it to a register and seeing if a
@ -5101,19 +5001,14 @@ emit_cstore (rtx target, enum insn_code icode, enum rtx_code code,
int unsignedp, rtx x, rtx y, int normalizep,
enum machine_mode target_mode)
{
rtx op0, last, comparison, subtarget, pattern;
struct expand_operand ops[4];
rtx op0, last, comparison, subtarget;
enum machine_mode result_mode = insn_data[(int) icode].operand[0].mode;
last = get_last_insn ();
x = prepare_operand (icode, x, 2, mode, compare_mode, unsignedp);
y = prepare_operand (icode, y, 3, mode, compare_mode, unsignedp);
comparison = gen_rtx_fmt_ee (code, result_mode, x, y);
if (!x || !y
|| !insn_data[icode].operand[2].predicate
(x, insn_data[icode].operand[2].mode)
|| !insn_data[icode].operand[3].predicate
(y, insn_data[icode].operand[3].mode)
|| !insn_data[icode].operand[1].predicate (comparison, VOIDmode))
if (!x || !y)
{
delete_insns_since (last);
return NULL_RTX;
@ -5124,16 +5019,18 @@ emit_cstore (rtx target, enum insn_code icode, enum rtx_code code,
if (!target)
target = gen_reg_rtx (target_mode);
if (optimize
|| !(insn_data[(int) icode].operand[0].predicate (target, result_mode)))
subtarget = gen_reg_rtx (result_mode);
else
subtarget = target;
comparison = gen_rtx_fmt_ee (code, result_mode, x, y);
pattern = GEN_FCN (icode) (subtarget, comparison, x, y);
if (!pattern)
return NULL_RTX;
emit_insn (pattern);
create_output_operand (&ops[0], optimize ? NULL_RTX : target, result_mode);
create_fixed_operand (&ops[1], comparison);
create_fixed_operand (&ops[2], x);
create_fixed_operand (&ops[3], y);
if (!maybe_expand_insn (icode, 4, ops))
{
delete_insns_since (last);
return NULL_RTX;
}
subtarget = ops[0].value;
/* If we are converting to a wider mode, first convert to
TARGET_MODE, then normalize. This produces better combining

View file

@ -1258,7 +1258,6 @@ static bool
emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
unsigned int expected_align, HOST_WIDE_INT expected_size)
{
rtx opalign = GEN_INT (align / BITS_PER_UNIT);
int save_volatile_ok = volatile_ok;
enum machine_mode mode;
@ -1276,7 +1275,6 @@ emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
mode = GET_MODE_WIDER_MODE (mode))
{
enum insn_code code = direct_optab_handler (movmem_optab, mode);
insn_operand_predicate_fn pred;
if (code != CODE_FOR_nothing
/* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
@ -1286,43 +1284,32 @@ emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
&& ((CONST_INT_P (size)
&& ((unsigned HOST_WIDE_INT) INTVAL (size)
<= (GET_MODE_MASK (mode) >> 1)))
|| GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
&& ((pred = insn_data[(int) code].operand[0].predicate) == 0
|| (*pred) (x, BLKmode))
&& ((pred = insn_data[(int) code].operand[1].predicate) == 0
|| (*pred) (y, BLKmode))
&& ((pred = insn_data[(int) code].operand[3].predicate) == 0
|| (*pred) (opalign, VOIDmode)))
|| GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
{
rtx op2;
rtx last = get_last_insn ();
rtx pat;
op2 = convert_to_mode (mode, size, 1);
pred = insn_data[(int) code].operand[2].predicate;
if (pred != 0 && ! (*pred) (op2, mode))
op2 = copy_to_mode_reg (mode, op2);
struct expand_operand ops[6];
unsigned int nops;
/* ??? When called via emit_block_move_for_call, it'd be
nice if there were some way to inform the backend, so
that it doesn't fail the expansion because it thinks
emitting the libcall would be more efficient. */
if (insn_data[(int) code].n_operands == 4)
pat = GEN_FCN ((int) code) (x, y, op2, opalign);
else
pat = GEN_FCN ((int) code) (x, y, op2, opalign,
GEN_INT (expected_align
/ BITS_PER_UNIT),
GEN_INT (expected_size));
if (pat)
nops = insn_data[(int) code].n_operands;
create_fixed_operand (&ops[0], x);
create_fixed_operand (&ops[1], y);
/* The check above guarantees that this size conversion is valid. */
create_convert_operand_to (&ops[2], size, mode, true);
create_integer_operand (&ops[3], align / BITS_PER_UNIT);
if (nops != 4)
{
create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
create_integer_operand (&ops[5], expected_size);
nops = 6;
}
if (maybe_expand_insn (code, nops, ops))
{
emit_insn (pat);
volatile_ok = save_volatile_ok;
return true;
}
else
delete_insns_since (last);
}
}
@ -2705,7 +2692,6 @@ set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
including more than one in the machine description unless
the more limited one has some advantage. */
rtx opalign = GEN_INT (align / BITS_PER_UNIT);
enum machine_mode mode;
if (expected_align < align)
@ -2715,7 +2701,6 @@ set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
mode = GET_MODE_WIDER_MODE (mode))
{
enum insn_code code = direct_optab_handler (setmem_optab, mode);
insn_operand_predicate_fn pred;
if (code != CODE_FOR_nothing
/* We don't need MODE to be narrower than
@ -2725,46 +2710,25 @@ set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
&& ((CONST_INT_P (size)
&& ((unsigned HOST_WIDE_INT) INTVAL (size)
<= (GET_MODE_MASK (mode) >> 1)))
|| GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
&& ((pred = insn_data[(int) code].operand[0].predicate) == 0
|| (*pred) (object, BLKmode))
&& ((pred = insn_data[(int) code].operand[3].predicate) == 0
|| (*pred) (opalign, VOIDmode)))
|| GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
{
rtx opsize, opchar;
enum machine_mode char_mode;
rtx last = get_last_insn ();
rtx pat;
struct expand_operand ops[6];
unsigned int nops;
opsize = convert_to_mode (mode, size, 1);
pred = insn_data[(int) code].operand[1].predicate;
if (pred != 0 && ! (*pred) (opsize, mode))
opsize = copy_to_mode_reg (mode, opsize);
opchar = val;
char_mode = insn_data[(int) code].operand[2].mode;
if (char_mode != VOIDmode)
nops = insn_data[(int) code].n_operands;
create_fixed_operand (&ops[0], object);
/* The check above guarantees that this size conversion is valid. */
create_convert_operand_to (&ops[1], size, mode, true);
create_convert_operand_from (&ops[2], val, byte_mode, true);
create_integer_operand (&ops[3], align / BITS_PER_UNIT);
if (nops != 4)
{
opchar = convert_to_mode (char_mode, opchar, 1);
pred = insn_data[(int) code].operand[2].predicate;
if (pred != 0 && ! (*pred) (opchar, char_mode))
opchar = copy_to_mode_reg (char_mode, opchar);
create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
create_integer_operand (&ops[5], expected_size);
nops = 6;
}
if (insn_data[(int) code].n_operands == 4)
pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
else
pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
GEN_INT (expected_align
/ BITS_PER_UNIT),
GEN_INT (expected_size));
if (pat)
{
emit_insn (pat);
return true;
}
else
delete_insns_since (last);
if (maybe_expand_insn (code, nops, ops))
return true;
}
}
@ -3547,7 +3511,6 @@ emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
rtx dest;
enum insn_code icode;
insn_operand_predicate_fn pred;
stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
/* If there is push pattern, use it. Otherwise try old way of throwing
@ -3555,11 +3518,11 @@ emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
icode = optab_handler (push_optab, mode);
if (icode != CODE_FOR_nothing)
{
if (((pred = insn_data[(int) icode].operand[0].predicate)
&& !((*pred) (x, mode))))
x = force_reg (mode, x);
emit_insn (GEN_FCN (icode) (x));
return;
struct expand_operand ops[1];
create_input_operand (&ops[0], x, mode);
if (maybe_expand_insn (icode, 1, ops))
return;
}
if (GET_MODE_SIZE (mode) == rounded_size)
dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
@ -4147,7 +4110,8 @@ expand_assignment (tree to, tree from, bool nontemporal)
rtx to_rtx = 0;
rtx result;
enum machine_mode mode;
int align, icode;
int align;
enum insn_code icode;
/* Don't crash if the lhs of the assignment was erroneous. */
if (TREE_CODE (to) == ERROR_MARK)
@ -4170,8 +4134,9 @@ expand_assignment (tree to, tree from, bool nontemporal)
&& ((icode = optab_handler (movmisalign_optab, mode))
!= CODE_FOR_nothing))
{
enum machine_mode address_mode, op_mode1;
rtx insn, reg, op0, mem;
struct expand_operand ops[2];
enum machine_mode address_mode;
rtx reg, op0, mem;
reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
reg = force_not_mem (reg);
@ -4212,16 +4177,11 @@ expand_assignment (tree to, tree from, bool nontemporal)
if (TREE_THIS_VOLATILE (to))
MEM_VOLATILE_P (mem) = 1;
op_mode1 = insn_data[icode].operand[1].mode;
if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1)
&& op_mode1 != VOIDmode)
reg = copy_to_mode_reg (op_mode1, reg);
insn = GEN_FCN (icode) (mem, reg);
create_fixed_operand (&ops[0], mem);
create_input_operand (&ops[1], reg, mode);
/* The movmisalign<mode> pattern cannot fail, else the assignment would
silently be omitted. */
gcc_assert (insn != NULL_RTX);
emit_insn (insn);
expand_insn (icode, 2, ops);
return;
}
@ -4483,31 +4443,16 @@ expand_assignment (tree to, tree from, bool nontemporal)
bool
emit_storent_insn (rtx to, rtx from)
{
enum machine_mode mode = GET_MODE (to), imode;
struct expand_operand ops[2];
enum machine_mode mode = GET_MODE (to);
enum insn_code code = optab_handler (storent_optab, mode);
rtx pattern;
if (code == CODE_FOR_nothing)
return false;
imode = insn_data[code].operand[0].mode;
if (!insn_data[code].operand[0].predicate (to, imode))
return false;
imode = insn_data[code].operand[1].mode;
if (!insn_data[code].operand[1].predicate (from, imode))
{
from = copy_to_mode_reg (imode, from);
if (!insn_data[code].operand[1].predicate (from, imode))
return false;
}
pattern = GEN_FCN (code) (to, from);
if (pattern == NULL_RTX)
return false;
emit_insn (pattern);
return true;
create_fixed_operand (&ops[0], to);
create_input_operand (&ops[1], from, mode);
return maybe_expand_insn (code, 2, ops);
}
/* Generate code for computing expression EXP,
@ -10120,10 +10065,10 @@ try_casesi (tree index_type, tree index_expr, tree minval, tree range,
rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
rtx fallback_label ATTRIBUTE_UNUSED)
{
struct expand_operand ops[5];
enum machine_mode index_mode = SImode;
int index_bits = GET_MODE_BITSIZE (index_mode);
rtx op1, op2, index;
enum machine_mode op_mode;
if (! HAVE_casesi)
return 0;
@ -10158,32 +10103,17 @@ try_casesi (tree index_type, tree index_expr, tree minval, tree range,
do_pending_stack_adjust ();
op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
(index, op_mode))
index = copy_to_mode_reg (op_mode, index);
op1 = expand_normal (minval);
op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
(op1, op_mode))
op1 = copy_to_mode_reg (op_mode, op1);
op2 = expand_normal (range);
op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
op2, TYPE_UNSIGNED (TREE_TYPE (range)));
if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
(op2, op_mode))
op2 = copy_to_mode_reg (op_mode, op2);
emit_jump_insn (gen_casesi (index, op1, op2,
table_label, !default_label
? fallback_label : default_label));
create_input_operand (&ops[0], index, index_mode);
create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
create_fixed_operand (&ops[3], table_label);
create_fixed_operand (&ops[4], (default_label
? default_label
: fallback_label));
expand_jump_insn (CODE_FOR_casesi, 5, ops);
return 1;
}

File diff suppressed because it is too large Load diff

View file

@ -791,8 +791,8 @@ extern rtx expand_copysign (rtx, rtx, rtx);
/* Generate an instruction with a given INSN_CODE with an output and
an input. */
extern void emit_unop_insn (int, rtx, rtx, enum rtx_code);
extern bool maybe_emit_unop_insn (int, rtx, rtx, enum rtx_code);
extern void emit_unop_insn (enum insn_code, rtx, rtx, enum rtx_code);
extern bool maybe_emit_unop_insn (enum insn_code, rtx, rtx, enum rtx_code);
/* An extra flag to control optab_for_tree_code's behavior. This is needed to
distinguish between machines with a vector shift that takes a scalar for the
@ -926,6 +926,148 @@ extern rtx convert_optab_libfunc (convert_optab optab, enum machine_mode mode1,
extern bool insn_operand_matches (enum insn_code icode, unsigned int opno,
rtx operand);
/* Describes the type of an expand_operand. Each value is associated
with a create_*_operand function; see the comments above those
functions for details. */
enum expand_operand_type {
EXPAND_FIXED,
EXPAND_OUTPUT,
EXPAND_INPUT,
EXPAND_CONVERT_TO,
EXPAND_CONVERT_FROM,
EXPAND_ADDRESS,
EXPAND_INTEGER
};
/* Information about an operand for instruction expansion. */
struct expand_operand {
/* The type of operand. */
ENUM_BITFIELD (expand_operand_type) type : 8;
/* True if any conversion should treat VALUE as being unsigned
rather than signed. Only meaningful for certain types. */
unsigned int unsigned_p : 1;
/* Unused; available for future use. */
unsigned int unused : 7;
/* The mode passed to the convert_*_operand function. It has a
type-dependent meaning. */
ENUM_BITFIELD (machine_mode) mode : 16;
/* The value of the operand. */
rtx value;
};
/* Initialize OP with the given fields. Initialise the other fields
to their default values. */
static inline void
create_expand_operand (struct expand_operand *op,
enum expand_operand_type type,
rtx value, enum machine_mode mode,
bool unsigned_p)
{
op->type = type;
op->unsigned_p = unsigned_p;
op->unused = 0;
op->mode = mode;
op->value = value;
}
/* Make OP describe an operand that must use rtx X, even if X is volatile. */
static inline void
create_fixed_operand (struct expand_operand *op, rtx x)
{
create_expand_operand (op, EXPAND_FIXED, x, VOIDmode, false);
}
/* Make OP describe an output operand that must have mode MODE.
X, if nonnull, is a suggestion for where the output should be stored.
It is OK for VALUE to be inconsistent with MODE, although it will just
be ignored in that case. */
static inline void
create_output_operand (struct expand_operand *op, rtx x,
enum machine_mode mode)
{
create_expand_operand (op, EXPAND_OUTPUT, x, mode, false);
}
/* Make OP describe an input operand that must have mode MODE and
value VALUE; MODE cannot be VOIDmode. The backend may request that
VALUE be copied into a different kind of rtx before being passed
as an operand. */
static inline void
create_input_operand (struct expand_operand *op, rtx value,
enum machine_mode mode)
{
create_expand_operand (op, EXPAND_INPUT, value, mode, false);
}
/* Like create_input_operand, except that VALUE must first be converted
to mode MODE. UNSIGNED_P says whether VALUE is unsigned. */
static inline void
create_convert_operand_to (struct expand_operand *op, rtx value,
enum machine_mode mode, bool unsigned_p)
{
create_expand_operand (op, EXPAND_CONVERT_TO, value, mode, unsigned_p);
}
/* Make OP describe an input operand that should have the same value
as VALUE, after any mode conversion that the backend might request.
If VALUE is a CONST_INT, it should be treated as having mode MODE.
UNSIGNED_P says whether VALUE is unsigned. */
static inline void
create_convert_operand_from (struct expand_operand *op, rtx value,
enum machine_mode mode, bool unsigned_p)
{
create_expand_operand (op, EXPAND_CONVERT_FROM, value, mode, unsigned_p);
}
extern void create_convert_operand_from_type (struct expand_operand *op,
rtx value, tree type);
/* Make OP describe an input Pmode address operand. VALUE is the value
of the address, but it may need to be converted to Pmode first. */
static inline void
create_address_operand (struct expand_operand *op, rtx value)
{
create_expand_operand (op, EXPAND_ADDRESS, value, Pmode, false);
}
/* Make OP describe an input operand that has value INTVAL and that has
no inherent mode. This function should only be used for operands that
are always expand-time constants. The backend may request that INTVAL
be copied into a different kind of rtx, but it must specify the mode
of that rtx if so. */
static inline void
create_integer_operand (struct expand_operand *op, HOST_WIDE_INT intval)
{
create_expand_operand (op, EXPAND_INTEGER, GEN_INT (intval), VOIDmode, false);
}
extern bool maybe_legitimize_operands (enum insn_code icode,
unsigned int opno, unsigned int nops,
struct expand_operand *ops);
extern rtx maybe_gen_insn (enum insn_code icode, unsigned int nops,
struct expand_operand *ops);
extern bool maybe_expand_insn (enum insn_code icode, unsigned int nops,
struct expand_operand *ops);
extern bool maybe_expand_jump_insn (enum insn_code icode, unsigned int nops,
struct expand_operand *ops);
extern void expand_insn (enum insn_code icode, unsigned int nops,
struct expand_operand *ops);
extern void expand_jump_insn (enum insn_code icode, unsigned int nops,
struct expand_operand *ops);
extern rtx prepare_operand (enum insn_code, rtx, int, enum machine_mode,
enum machine_mode, int);