basic-block.h (basic_block_computed_jump_target): Declare.
* basic-block.h (basic_block_computed_jump_target): Declare. * flags.h: (current_function_has_computed_jump): Declare. * flow.c: (basic_block_computed_jump_target): Define. (flow_analysis): Allocate it. Set current_function_has_computed_jump to 0. (find_basic_blocks): Set current_function_has_computed_jump and elements of basic_block_computed_jump_target to 1 as appropriate. * function.c: (current_function_has_computed_jump): Define. * global.c (global_conflicts): Don't allocate pseudos into stack regs at the start of a block that is reachable by a computed jump. * reg-stack.c (stack_reg_life_analysis): If must restart, do so immediately. (subst_stack_regs): Undo change from Sep 4 1997. (uses_reg_or_mem): Now unused, deleted. * stupid.c (stupid_life_analysis): Compute current_function_has_computed_jump. (stupid_find_reg): Don't allocate stack regs if the function has a computed goto. * haifa-sched.c (is_cfg_nonregular): Delete code to determine if the current function has a computed jump. Use the global value instead. Co-Authored-By: Jeffrey A Law <law@cygnus.com> From-SVN: r18860
This commit is contained in:
parent
56f28b73e7
commit
4d1d804584
9 changed files with 93 additions and 88 deletions
|
@ -1,3 +1,28 @@
|
||||||
|
Sat Mar 28 01:06:12 1998 Bernd Schmidt <crux@pool.informatik.rwth-aachen.de>
|
||||||
|
Jeffrey A Law (law@cygnus.com)
|
||||||
|
|
||||||
|
* basic-block.h (basic_block_computed_jump_target): Declare.
|
||||||
|
* flags.h: (current_function_has_computed_jump): Declare.
|
||||||
|
* flow.c: (basic_block_computed_jump_target): Define.
|
||||||
|
(flow_analysis): Allocate it. Set current_function_has_computed_jump
|
||||||
|
to 0.
|
||||||
|
(find_basic_blocks): Set current_function_has_computed_jump and
|
||||||
|
elements of basic_block_computed_jump_target to 1 as appropriate.
|
||||||
|
* function.c: (current_function_has_computed_jump): Define.
|
||||||
|
* global.c (global_conflicts): Don't allocate pseudos into stack regs
|
||||||
|
at the start of a block that is reachable by a computed jump.
|
||||||
|
* reg-stack.c (stack_reg_life_analysis): If must restart, do so
|
||||||
|
immediately.
|
||||||
|
(subst_stack_regs): Undo change from Sep 4 1997.
|
||||||
|
(uses_reg_or_mem): Now unused, deleted.
|
||||||
|
* stupid.c (stupid_life_analysis): Compute
|
||||||
|
current_function_has_computed_jump.
|
||||||
|
(stupid_find_reg): Don't allocate stack regs if the function has a
|
||||||
|
computed goto.
|
||||||
|
* haifa-sched.c (is_cfg_nonregular): Delete code to determine if
|
||||||
|
the current function has a computed jump. Use the global value
|
||||||
|
instead.
|
||||||
|
|
||||||
Sat Mar 28 00:21:37 1998 Jeffrey A Law (law@cygnus.com)
|
Sat Mar 28 00:21:37 1998 Jeffrey A Law (law@cygnus.com)
|
||||||
|
|
||||||
* i386/freebsd.h (CPP_PREDEFINES): Remove __386BSD__.
|
* i386/freebsd.h (CPP_PREDEFINES): Remove __386BSD__.
|
||||||
|
|
|
@ -108,6 +108,11 @@ extern rtx *basic_block_head;
|
||||||
|
|
||||||
extern rtx *basic_block_end;
|
extern rtx *basic_block_end;
|
||||||
|
|
||||||
|
/* Index by basic block number, determine whether the block can be reached
|
||||||
|
through a computed jump. */
|
||||||
|
|
||||||
|
extern char *basic_block_computed_jump_target;
|
||||||
|
|
||||||
/* Index by basic block number, get address of regset
|
/* Index by basic block number, get address of regset
|
||||||
describing the registers live at the start of that block. */
|
describing the registers live at the start of that block. */
|
||||||
|
|
||||||
|
|
|
@ -457,6 +457,13 @@ extern int current_function_has_nonlocal_label;
|
||||||
|
|
||||||
extern int current_function_has_nonlocal_goto;
|
extern int current_function_has_nonlocal_goto;
|
||||||
|
|
||||||
|
/* Nonzero if this function has a computed goto.
|
||||||
|
|
||||||
|
It is computed during find_basic_blocks or during stupid life
|
||||||
|
analysis. */
|
||||||
|
|
||||||
|
extern int current_function_has_computed_jump;
|
||||||
|
|
||||||
/* Nonzero if GCC must add code to check memory access (used by Checker). */
|
/* Nonzero if GCC must add code to check memory access (used by Checker). */
|
||||||
|
|
||||||
extern int flag_check_memory_usage;
|
extern int flag_check_memory_usage;
|
||||||
|
|
31
gcc/flow.c
31
gcc/flow.c
|
@ -197,6 +197,11 @@ rtx *basic_block_head;
|
||||||
|
|
||||||
rtx *basic_block_end;
|
rtx *basic_block_end;
|
||||||
|
|
||||||
|
/* Element N indicates whether basic block N can be reached through a
|
||||||
|
computed jump. */
|
||||||
|
|
||||||
|
char *basic_block_computed_jump_target;
|
||||||
|
|
||||||
/* Element N is a regset describing the registers live
|
/* Element N is a regset describing the registers live
|
||||||
at the start of basic block N.
|
at the start of basic block N.
|
||||||
This info lasts until we finish compiling the function. */
|
This info lasts until we finish compiling the function. */
|
||||||
|
@ -354,6 +359,7 @@ find_basic_blocks (f, nregs, file, live_reachable_p)
|
||||||
basic_block_head = (rtx *) xmalloc (n_basic_blocks * sizeof (rtx));
|
basic_block_head = (rtx *) xmalloc (n_basic_blocks * sizeof (rtx));
|
||||||
basic_block_end = (rtx *) xmalloc (n_basic_blocks * sizeof (rtx));
|
basic_block_end = (rtx *) xmalloc (n_basic_blocks * sizeof (rtx));
|
||||||
basic_block_drops_in = (char *) xmalloc (n_basic_blocks);
|
basic_block_drops_in = (char *) xmalloc (n_basic_blocks);
|
||||||
|
basic_block_computed_jump_target = (char *) oballoc (n_basic_blocks);
|
||||||
basic_block_loop_depth = (short *) xmalloc (n_basic_blocks * sizeof (short));
|
basic_block_loop_depth = (short *) xmalloc (n_basic_blocks * sizeof (short));
|
||||||
uid_block_number
|
uid_block_number
|
||||||
= (int *) xmalloc ((max_uid_for_flow + 1) * sizeof (int));
|
= (int *) xmalloc ((max_uid_for_flow + 1) * sizeof (int));
|
||||||
|
@ -403,7 +409,9 @@ find_basic_blocks_1 (f, nonlocal_label_list, live_reachable_p)
|
||||||
block_live_static = block_live;
|
block_live_static = block_live;
|
||||||
bzero (block_live, n_basic_blocks);
|
bzero (block_live, n_basic_blocks);
|
||||||
bzero (block_marked, n_basic_blocks);
|
bzero (block_marked, n_basic_blocks);
|
||||||
|
bzero (basic_block_computed_jump_target, n_basic_blocks);
|
||||||
bzero (active_eh_handler, (max_uid_for_flow + 1) * sizeof (rtx));
|
bzero (active_eh_handler, (max_uid_for_flow + 1) * sizeof (rtx));
|
||||||
|
current_function_has_computed_jump = 0;
|
||||||
|
|
||||||
/* Initialize with just block 0 reachable and no blocks marked. */
|
/* Initialize with just block 0 reachable and no blocks marked. */
|
||||||
if (n_basic_blocks > 0)
|
if (n_basic_blocks > 0)
|
||||||
|
@ -611,16 +619,25 @@ find_basic_blocks_1 (f, nonlocal_label_list, live_reachable_p)
|
||||||
and forced_labels list. */
|
and forced_labels list. */
|
||||||
if (computed_jump_p (insn))
|
if (computed_jump_p (insn))
|
||||||
{
|
{
|
||||||
|
current_function_has_computed_jump = 1;
|
||||||
for (x = label_value_list; x; x = XEXP (x, 1))
|
for (x = label_value_list; x; x = XEXP (x, 1))
|
||||||
mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
|
{
|
||||||
XEXP (x, 0)),
|
int b = BLOCK_NUM (XEXP (x, 0));
|
||||||
insn, 0);
|
basic_block_computed_jump_target[b] = 1;
|
||||||
|
mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
|
||||||
|
XEXP (x, 0)),
|
||||||
|
insn, 0);
|
||||||
|
}
|
||||||
|
|
||||||
for (x = forced_labels; x; x = XEXP (x, 1))
|
for (x = forced_labels; x; x = XEXP (x, 1))
|
||||||
mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
|
{
|
||||||
XEXP (x, 0)),
|
int b = BLOCK_NUM (XEXP (x, 0));
|
||||||
insn, 0);
|
basic_block_computed_jump_target[b] = 1;
|
||||||
}
|
mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
|
||||||
|
XEXP (x, 0)),
|
||||||
|
insn, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/* If this is a CALL_INSN, then mark it as reaching
|
/* If this is a CALL_INSN, then mark it as reaching
|
||||||
the active EH handler for this CALL_INSN. If
|
the active EH handler for this CALL_INSN. If
|
||||||
|
|
|
@ -126,6 +126,13 @@ int current_function_has_nonlocal_label;
|
||||||
|
|
||||||
int current_function_has_nonlocal_goto;
|
int current_function_has_nonlocal_goto;
|
||||||
|
|
||||||
|
/* Nonzero if this function has a computed goto.
|
||||||
|
|
||||||
|
It is computed during find_basic_blocks or during stupid life
|
||||||
|
analysis. */
|
||||||
|
|
||||||
|
int current_function_has_computed_jump;
|
||||||
|
|
||||||
/* Nonzero if function being compiled contains nested functions. */
|
/* Nonzero if function being compiled contains nested functions. */
|
||||||
|
|
||||||
int current_function_contains_functions;
|
int current_function_contains_functions;
|
||||||
|
|
|
@ -671,6 +671,15 @@ global_conflicts ()
|
||||||
allocno now live, and with each hard reg now live. */
|
allocno now live, and with each hard reg now live. */
|
||||||
|
|
||||||
record_conflicts (block_start_allocnos, ax);
|
record_conflicts (block_start_allocnos, ax);
|
||||||
|
|
||||||
|
#ifdef STACK_REGS
|
||||||
|
/* Pseudos can't go in stack regs at the start of a basic block
|
||||||
|
that can be reached through a computed goto, since reg-stack
|
||||||
|
can't handle computed gotos. */
|
||||||
|
if (basic_block_computed_jump_target[b])
|
||||||
|
for (ax = FIRST_STACK_REG; ax <= LAST_STACK_REG; ax++)
|
||||||
|
record_one_conflict (ax);
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
insn = basic_block_head[b];
|
insn = basic_block_head[b];
|
||||||
|
|
|
@ -1098,6 +1098,11 @@ is_cfg_nonregular ()
|
||||||
if (forced_labels)
|
if (forced_labels)
|
||||||
return 1;
|
return 1;
|
||||||
|
|
||||||
|
/* If this function has a computed jump, then we consider the cfg
|
||||||
|
not well structured. */
|
||||||
|
if (current_function_has_computed_jump)
|
||||||
|
return 1;
|
||||||
|
|
||||||
/* If we have exception handlers, then we consider the cfg not well
|
/* If we have exception handlers, then we consider the cfg not well
|
||||||
structured. ?!? We should be able to handle this now that flow.c
|
structured. ?!? We should be able to handle this now that flow.c
|
||||||
computes an accurate cfg for EH. */
|
computes an accurate cfg for EH. */
|
||||||
|
@ -1124,20 +1129,6 @@ is_cfg_nonregular ()
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* If this function has a computed jump, then we consider the cfg
|
|
||||||
not well structured. */
|
|
||||||
for (b = 0; b < n_basic_blocks; b++)
|
|
||||||
{
|
|
||||||
for (insn = basic_block_head[b];; insn = NEXT_INSN (insn))
|
|
||||||
{
|
|
||||||
if (computed_jump_p (insn))
|
|
||||||
return 1;
|
|
||||||
|
|
||||||
if (insn == basic_block_end[b])
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* All the tests passed. Consider the cfg well structured. */
|
/* All the tests passed. Consider the cfg well structured. */
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
|
@ -249,7 +249,6 @@ static void record_reg_life_pat PROTO((rtx, HARD_REG_SET *,
|
||||||
static void get_asm_operand_lengths PROTO((rtx, int, int *, int *));
|
static void get_asm_operand_lengths PROTO((rtx, int, int *, int *));
|
||||||
static void record_reg_life PROTO((rtx, int, stack));
|
static void record_reg_life PROTO((rtx, int, stack));
|
||||||
static void find_blocks PROTO((rtx));
|
static void find_blocks PROTO((rtx));
|
||||||
static int uses_reg_or_mem PROTO((rtx));
|
|
||||||
static rtx stack_result PROTO((tree));
|
static rtx stack_result PROTO((tree));
|
||||||
static void stack_reg_life_analysis PROTO((rtx, HARD_REG_SET *));
|
static void stack_reg_life_analysis PROTO((rtx, HARD_REG_SET *));
|
||||||
static void replace_reg PROTO((rtx *, int));
|
static void replace_reg PROTO((rtx *, int));
|
||||||
|
@ -1403,38 +1402,6 @@ find_blocks (first)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Return 1 if X contain a REG or MEM that is not in the constant pool. */
|
|
||||||
|
|
||||||
static int
|
|
||||||
uses_reg_or_mem (x)
|
|
||||||
rtx x;
|
|
||||||
{
|
|
||||||
enum rtx_code code = GET_CODE (x);
|
|
||||||
int i, j;
|
|
||||||
char *fmt;
|
|
||||||
|
|
||||||
if (code == REG
|
|
||||||
|| (code == MEM
|
|
||||||
&& ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
|
|
||||||
&& CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))))
|
|
||||||
return 1;
|
|
||||||
|
|
||||||
fmt = GET_RTX_FORMAT (code);
|
|
||||||
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
|
|
||||||
{
|
|
||||||
if (fmt[i] == 'e'
|
|
||||||
&& uses_reg_or_mem (XEXP (x, i)))
|
|
||||||
return 1;
|
|
||||||
|
|
||||||
if (fmt[i] == 'E')
|
|
||||||
for (j = 0; j < XVECLEN (x, i); j++)
|
|
||||||
if (uses_reg_or_mem (XVECEXP (x, i, j)))
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* If current function returns its result in an fp stack register,
|
/* If current function returns its result in an fp stack register,
|
||||||
return the REG. Otherwise, return 0. */
|
return the REG. Otherwise, return 0. */
|
||||||
|
|
||||||
|
@ -1583,6 +1550,7 @@ stack_reg_life_analysis (first, stackentry)
|
||||||
|
|
||||||
block = jump_block;
|
block = jump_block;
|
||||||
must_restart = 1;
|
must_restart = 1;
|
||||||
|
break;
|
||||||
|
|
||||||
win:
|
win:
|
||||||
;
|
;
|
||||||
|
@ -2730,7 +2698,6 @@ subst_stack_regs (insn, regstack)
|
||||||
{
|
{
|
||||||
register rtx *note_link, note;
|
register rtx *note_link, note;
|
||||||
register int i;
|
register int i;
|
||||||
rtx head, jump, pat, cipat;
|
|
||||||
int n_operands;
|
int n_operands;
|
||||||
|
|
||||||
if (GET_CODE (insn) == CALL_INSN)
|
if (GET_CODE (insn) == CALL_INSN)
|
||||||
|
@ -2802,39 +2769,6 @@ subst_stack_regs (insn, regstack)
|
||||||
if (GET_CODE (insn) == NOTE)
|
if (GET_CODE (insn) == NOTE)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
/* If we are reached by a computed goto which sets this same stack register,
|
|
||||||
then pop this stack register, but maintain regstack. */
|
|
||||||
|
|
||||||
pat = single_set (insn);
|
|
||||||
if (pat != 0
|
|
||||||
&& INSN_UID (insn) <= max_uid
|
|
||||||
&& GET_CODE (block_begin[BLOCK_NUM(insn)]) == CODE_LABEL
|
|
||||||
&& GET_CODE (pat) == SET && STACK_REG_P (SET_DEST (pat)))
|
|
||||||
for (head = block_begin[BLOCK_NUM(insn)], jump = LABEL_REFS (head);
|
|
||||||
jump != head;
|
|
||||||
jump = LABEL_NEXTREF (jump))
|
|
||||||
{
|
|
||||||
cipat = single_set (CONTAINING_INSN (jump));
|
|
||||||
if (cipat != 0
|
|
||||||
&& GET_CODE (cipat) == SET
|
|
||||||
&& SET_DEST (cipat) == pc_rtx
|
|
||||||
&& uses_reg_or_mem (SET_SRC (cipat))
|
|
||||||
&& INSN_UID (CONTAINING_INSN (jump)) <= max_uid)
|
|
||||||
{
|
|
||||||
int from_block = BLOCK_NUM (CONTAINING_INSN (jump));
|
|
||||||
if (TEST_HARD_REG_BIT (block_out_reg_set[from_block],
|
|
||||||
REGNO (SET_DEST (pat))))
|
|
||||||
{
|
|
||||||
struct stack_def old;
|
|
||||||
bcopy (regstack->reg, old.reg, sizeof (old.reg));
|
|
||||||
emit_pop_insn (insn, regstack, SET_DEST (pat), emit_insn_before);
|
|
||||||
regstack->top += 1;
|
|
||||||
bcopy (old.reg, regstack->reg, sizeof (old.reg));
|
|
||||||
SET_HARD_REG_BIT (regstack->reg_set, REGNO (SET_DEST (pat)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* If there is a REG_UNUSED note on a stack register on this insn,
|
/* If there is a REG_UNUSED note on a stack register on this insn,
|
||||||
the indicated reg must be popped. The REG_UNUSED note is removed,
|
the indicated reg must be popped. The REG_UNUSED note is removed,
|
||||||
since the form of the newly emitted pop insn references the reg,
|
since the form of the newly emitted pop insn references the reg,
|
||||||
|
|
10
gcc/stupid.c
10
gcc/stupid.c
|
@ -130,6 +130,8 @@ stupid_life_analysis (f, nregs, file)
|
||||||
register rtx last, insn;
|
register rtx last, insn;
|
||||||
int max_uid, max_suid;
|
int max_uid, max_suid;
|
||||||
|
|
||||||
|
current_function_has_computed_jump = 0;
|
||||||
|
|
||||||
bzero (regs_ever_live, sizeof regs_ever_live);
|
bzero (regs_ever_live, sizeof regs_ever_live);
|
||||||
|
|
||||||
regs_live = (char *) alloca (nregs);
|
regs_live = (char *) alloca (nregs);
|
||||||
|
@ -265,6 +267,8 @@ stupid_life_analysis (f, nregs, file)
|
||||||
be live if it's also used to pass arguments. */
|
be live if it's also used to pass arguments. */
|
||||||
stupid_mark_refs (CALL_INSN_FUNCTION_USAGE (insn), insn);
|
stupid_mark_refs (CALL_INSN_FUNCTION_USAGE (insn), insn);
|
||||||
}
|
}
|
||||||
|
if (GET_CODE (insn) == JUMP_INSN && computed_jump_p (insn))
|
||||||
|
current_function_has_computed_jump = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Now decide the order in which to allocate the pseudo registers. */
|
/* Now decide the order in which to allocate the pseudo registers. */
|
||||||
|
@ -394,6 +398,12 @@ stupid_find_reg (call_preserved, class, mode,
|
||||||
for (ins = born_insn; ins < dead_insn; ins++)
|
for (ins = born_insn; ins < dead_insn; ins++)
|
||||||
IOR_HARD_REG_SET (used, after_insn_hard_regs[ins]);
|
IOR_HARD_REG_SET (used, after_insn_hard_regs[ins]);
|
||||||
|
|
||||||
|
#ifdef STACK_REGS
|
||||||
|
if (current_function_has_computed_jump)
|
||||||
|
for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
|
||||||
|
SET_HARD_REG_BIT (used, i);
|
||||||
|
#endif
|
||||||
|
|
||||||
IOR_COMPL_HARD_REG_SET (used, reg_class_contents[(int) class]);
|
IOR_COMPL_HARD_REG_SET (used, reg_class_contents[(int) class]);
|
||||||
|
|
||||||
#ifdef CLASS_CANNOT_CHANGE_SIZE
|
#ifdef CLASS_CANNOT_CHANGE_SIZE
|
||||||
|
|
Loading…
Add table
Reference in a new issue