alias.c, [...]: Fix comment formatting.
* alias.c, basic-block.h, cgraphunit.c, combine.c, domwalk.h, final.c, gengtype.c, genpreds.c, ggc-page.c, insn-notes.def, lambda-code.c, loop-unroll.c, modulo-sched.c, pointer-set.c, pretty-print.c, ra-colorize.c, sbitmap.c, tree-complex.c, tree-data-ref.c, tree-dfa.c, tree-inline.c, tree-into-ssa.c, tree-scalar-evolution.c, tree-ssa-dom.c, tree-ssa-loop-manip.c, tree-ssa-loop-niter.c, tree-ssa-phiopt.c, tree-ssa-pre.c, tree-ssa-threadupdate.c, tree-vectorizer.c, vec.h: Fix comment formatting. From-SVN: r89453
This commit is contained in:
parent
969def5539
commit
471854f82a
32 changed files with 105 additions and 93 deletions
|
@ -1,3 +1,15 @@
|
|||
2004-10-22 Kazu Hirata <kazu@cs.umass.edu>
|
||||
|
||||
* alias.c, basic-block.h, cgraphunit.c, combine.c, domwalk.h,
|
||||
final.c, gengtype.c, genpreds.c, ggc-page.c, insn-notes.def,
|
||||
lambda-code.c, loop-unroll.c, modulo-sched.c, pointer-set.c,
|
||||
pretty-print.c, ra-colorize.c, sbitmap.c, tree-complex.c,
|
||||
tree-data-ref.c, tree-dfa.c, tree-inline.c, tree-into-ssa.c,
|
||||
tree-scalar-evolution.c, tree-ssa-dom.c,
|
||||
tree-ssa-loop-manip.c, tree-ssa-loop-niter.c,
|
||||
tree-ssa-phiopt.c, tree-ssa-pre.c, tree-ssa-threadupdate.c,
|
||||
tree-vectorizer.c, vec.h: Fix comment formatting.
|
||||
|
||||
2004-10-22 Kazu Hirata <kazu@cs.umass.edu>
|
||||
|
||||
* dwarf.h, gthr-dce.h, gthr-single.h, gthr-solaris.h, gthr.h,
|
||||
|
|
|
@ -482,7 +482,7 @@ get_alias_set (tree t)
|
|||
type, then we would believe that other subsets
|
||||
of the pointed-to type (such as fields of that
|
||||
type) do not conflict with the type pointed to
|
||||
by the restricted pointer. */
|
||||
by the restricted pointer. */
|
||||
DECL_POINTER_ALIAS_SET (decl)
|
||||
= pointed_to_alias_set;
|
||||
else
|
||||
|
|
|
@ -567,7 +567,7 @@ ei_start_1 (VEC(edge) **ev)
|
|||
}
|
||||
|
||||
/* Return an iterator pointing to the last element of an edge
|
||||
vector. */
|
||||
vector. */
|
||||
static inline edge_iterator
|
||||
ei_last_1 (VEC(edge) **ev)
|
||||
{
|
||||
|
|
|
@ -236,8 +236,8 @@ static FILE *cgraph_dump_file;
|
|||
static GTY((param1_is(tree), param2_is(tree)))
|
||||
splay_tree static_vars_to_consider_by_tree;
|
||||
|
||||
/* FIXME -- PROFILE-RESTRUCTURE: change comment from DECL_UID to var-ann. */
|
||||
/* same as above but indexed by DECL_UID */
|
||||
/* FIXME -- PROFILE-RESTRUCTURE: change comment from DECL_UID to var-ann. */
|
||||
/* Same as above but indexed by DECL_UID. */
|
||||
static GTY((param1_is(int), param2_is(tree)))
|
||||
splay_tree static_vars_to_consider_by_uid;
|
||||
|
||||
|
@ -246,10 +246,10 @@ static GTY((param1_is(int), param2_is(tree)))
|
|||
uid. */
|
||||
static bitmap module_statics_escape;
|
||||
|
||||
/* FIXME -- PROFILE-RESTRUCTURE: change comment from DECL_UID to var-ann. */
|
||||
/* FIXME -- PROFILE-RESTRUCTURE: change comment from DECL_UID to var-ann. */
|
||||
/* A bit is set for every module static we are considering and is
|
||||
indexed by DECL_UID. This is ored into the local info when asm
|
||||
code is found that clobbers all memory. */
|
||||
code is found that clobbers all memory. */
|
||||
static GTY(()) bitmap all_module_statics;
|
||||
|
||||
/* Holds the value of "memory". */
|
||||
|
@ -344,7 +344,7 @@ print_order (const char * note, struct cgraph_node** order, int count)
|
|||
fprintf (cgraph_dump_file, "\n");
|
||||
}
|
||||
|
||||
/* FIXME -- PROFILE-RESTRUCTURE: Remove this function, it becomes a nop. */
|
||||
/* FIXME -- PROFILE-RESTRUCTURE: Remove this function, it becomes a nop. */
|
||||
/* Convert IN_DECL bitmap which is indexed by DECL_UID to IN_ANN, a
|
||||
bitmap indexed by var_ann (VAR_DECL)->uid. */
|
||||
|
||||
|
@ -411,7 +411,7 @@ new_static_vars_info(struct cgraph_node* node,
|
|||
|
||||
|
||||
/* FIXME -- PROFILE-RESTRUCTURE: Remove this function, it becomes a
|
||||
nop. */
|
||||
nop. */
|
||||
/* The bitmaps used to represent the static global variables are
|
||||
indexed by DECL_UID however, this is not used inside of functions
|
||||
to index the ssa variables. The denser var_ann (VAR_DECL)->uid is
|
||||
|
@ -1218,7 +1218,7 @@ searchc (struct searchc_env* env, struct cgraph_node *v)
|
|||
/* Topsort the call graph by caller relation. Put the result in ORDER.
|
||||
|
||||
The REDUCE flag is true if you want the cycles reduced to single
|
||||
nodes. Only consider nodes that have the output bit set. */
|
||||
nodes. Only consider nodes that have the output bit set. */
|
||||
|
||||
static int
|
||||
cgraph_reduced_inorder (struct cgraph_node **order, bool reduce)
|
||||
|
@ -2018,7 +2018,7 @@ cgraph_inline_p (struct cgraph_edge *e, const char **reason)
|
|||
/* FIXME this needs to be enhanced. If we are compiling a single
|
||||
module this returns true if the variable is a module level static,
|
||||
but if we are doing whole program compilation, this could return
|
||||
true if TREE_PUBLIC is true. */
|
||||
true if TREE_PUBLIC is true. */
|
||||
/* Return true if the variable T is the right kind of static variable to
|
||||
perform compilation unit scope escape analysis. */
|
||||
|
||||
|
@ -2045,7 +2045,7 @@ check_rhs_var (struct cgraph_node *fn, tree t)
|
|||
lang_hooks.decl_printable_name (x, 2));
|
||||
|
||||
/* FIXME -- PROFILE-RESTRUCTURE: Change the call from
|
||||
DECL_UID to get the uid from the var_ann field. */
|
||||
DECL_UID to get the uid from the var_ann field. */
|
||||
bitmap_set_bit (module_statics_escape, DECL_UID (x));
|
||||
}
|
||||
}
|
||||
|
@ -2057,7 +2057,7 @@ check_rhs_var (struct cgraph_node *fn, tree t)
|
|||
fprintf (cgraph_dump_file, "\nadding rhs:%s",
|
||||
lang_hooks.decl_printable_name (t, 2));
|
||||
/* FIXME -- PROFILE-RESTRUCTURE: Change the call from
|
||||
DECL_UID to get the uid from the var_ann field. */
|
||||
DECL_UID to get the uid from the var_ann field. */
|
||||
bitmap_set_bit (fn->static_vars_info->local->statics_read_by_decl_uid,
|
||||
DECL_UID (t));
|
||||
}
|
||||
|
@ -2080,7 +2080,7 @@ check_lhs_var (struct cgraph_node *fn, tree t)
|
|||
lang_hooks.decl_printable_name (t, 2));
|
||||
|
||||
/* FIXME -- PROFILE-RESTRUCTURE: Change the call from
|
||||
DECL_UID to get the uid from the var_ann field. */
|
||||
DECL_UID to get the uid from the var_ann field. */
|
||||
bitmap_set_bit (fn->static_vars_info->local->statics_written_by_decl_uid,
|
||||
DECL_UID (t));
|
||||
}
|
||||
|
@ -2137,7 +2137,7 @@ get_asm_expr_operands (struct cgraph_node * fn, tree stmt)
|
|||
for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
|
||||
if (TREE_VALUE (link) == memory_identifier)
|
||||
{
|
||||
/* Abandon all hope, ye who enter here. */
|
||||
/* Abandon all hope, ye who enter here. */
|
||||
local_static_vars_info_t l = fn->static_vars_info->local;
|
||||
bitmap_a_or_b (l->statics_read_by_decl_uid,
|
||||
l->statics_read_by_decl_uid,
|
||||
|
@ -2184,7 +2184,7 @@ process_call_for_static_vars(struct cgraph_node * caller, tree call_expr)
|
|||
tree callee_t = get_callee_fndecl (call_expr);
|
||||
if (callee_t == NULL)
|
||||
{
|
||||
/* Indirect call. */
|
||||
/* Indirect call. */
|
||||
caller->local.calls_read_all = true;
|
||||
caller->local.calls_write_all = true;
|
||||
}
|
||||
|
@ -2225,10 +2225,10 @@ scan_for_static_refs (tree *tp,
|
|||
{
|
||||
case MODIFY_EXPR:
|
||||
{
|
||||
/* First look on the lhs and see what variable is stored to */
|
||||
/* First look on the lhs and see what variable is stored to. */
|
||||
tree rhs = TREE_OPERAND (t, 1);
|
||||
check_lhs_var (fn, TREE_OPERAND (t, 0));
|
||||
/* Next check the operands on the rhs to see if they are ok. */
|
||||
/* Next check the operands on the rhs to see if they are ok. */
|
||||
switch (TREE_CODE_CLASS (TREE_CODE (rhs))) {
|
||||
case tcc_binary:
|
||||
check_rhs_var (fn, TREE_OPERAND (rhs, 0));
|
||||
|
@ -2302,7 +2302,7 @@ cgraph_characterize_statics_local (struct cgraph_node *fn)
|
|||
}
|
||||
|
||||
/* Lookup the tree node for the static variable that has UID and
|
||||
conver the name to a string for debugging. */
|
||||
conver the name to a string for debugging. */
|
||||
static const char *
|
||||
cgraph_get_static_name_by_uid (int index)
|
||||
{
|
||||
|
@ -2397,7 +2397,7 @@ cgraph_characterize_statics (void)
|
|||
struct cgraph_varpool_node *vnode;
|
||||
tree global;
|
||||
|
||||
/* get rid of the splay trees from the previous compilation unit. */
|
||||
/* Get rid of the splay trees from the previous compilation unit. */
|
||||
|
||||
static_vars_to_consider_by_tree =
|
||||
splay_tree_new_ggc (splay_tree_compare_pointers);
|
||||
|
@ -2415,7 +2415,7 @@ cgraph_characterize_statics (void)
|
|||
all_module_statics = BITMAP_GGC_ALLOC ();
|
||||
}
|
||||
|
||||
/* Find all of the global variables that we wish to analyze. */
|
||||
/* Find all of the global variables that we wish to analyze. */
|
||||
for (vnode = cgraph_varpool_nodes_queue; vnode; vnode = vnode->next_needed)
|
||||
{
|
||||
global = vnode->decl;
|
||||
|
@ -2426,7 +2426,7 @@ cgraph_characterize_statics (void)
|
|||
(splay_tree_key) global,
|
||||
(splay_tree_value) global);
|
||||
/* FIXME -- PROFILE-RESTRUCTURE: Change the call from
|
||||
DECL_UID to get the uid from the var_ann field. */
|
||||
DECL_UID to get the uid from the var_ann field. */
|
||||
splay_tree_insert (static_vars_to_consider_by_uid,
|
||||
DECL_UID (global), (splay_tree_value)global);
|
||||
|
||||
|
@ -2434,7 +2434,7 @@ cgraph_characterize_statics (void)
|
|||
fprintf (cgraph_dump_file, "\nConsidering global:%s",
|
||||
lang_hooks.decl_printable_name (global, 2));
|
||||
/* FIXME -- PROFILE-RESTRUCTURE: Change the call from
|
||||
DECL_UID to get the uid from the var_ann field. */
|
||||
DECL_UID to get the uid from the var_ann field. */
|
||||
bitmap_set_bit (all_module_statics, DECL_UID (global));
|
||||
}
|
||||
}
|
||||
|
@ -2537,7 +2537,7 @@ cgraph_characterize_statics (void)
|
|||
write_all = node->local.calls_write_all;
|
||||
|
||||
/* If any node in a cycle is calls_read_all or calls_write_all
|
||||
they all are. */
|
||||
they all are. */
|
||||
w = node->next_cycle;
|
||||
while (w)
|
||||
{
|
||||
|
@ -2639,7 +2639,7 @@ cgraph_characterize_statics (void)
|
|||
}
|
||||
}
|
||||
|
||||
/* Cleanup. */
|
||||
/* Cleanup. */
|
||||
for (i = order_pos - 1; i >= 0; i--)
|
||||
{
|
||||
static_vars_info_t node_info;
|
||||
|
|
|
@ -9027,7 +9027,7 @@ simplify_shift_const (rtx x, enum rtx_code code,
|
|||
signbit', and attempt to change the PLUS to an XOR and move it to
|
||||
the outer operation as is done above in the AND/IOR/XOR case
|
||||
leg for shift(logical). See details in logical handling above
|
||||
for reasoning in doing so. */
|
||||
for reasoning in doing so. */
|
||||
if (code == LSHIFTRT
|
||||
&& GET_CODE (XEXP (varop, 1)) == CONST_INT
|
||||
&& mode_signbit_p (result_mode, XEXP (varop, 1))
|
||||
|
|
|
@ -103,7 +103,7 @@ struct dom_walk_data
|
|||
/* From here below are private data. Please do not use this
|
||||
information/data outside domwalk.c. */
|
||||
|
||||
/* Stack of available block local structures. */
|
||||
/* Stack of available block local structures. */
|
||||
varray_type free_block_data;
|
||||
};
|
||||
|
||||
|
|
|
@ -768,7 +768,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
|
|||
/* Compute maximum UID and allocate label_align / uid_shuid. */
|
||||
max_uid = get_max_uid ();
|
||||
|
||||
/* Free uid_shuid before reallocating it. */
|
||||
/* Free uid_shuid before reallocating it. */
|
||||
free (uid_shuid);
|
||||
|
||||
uid_shuid = xmalloc (max_uid * sizeof *uid_shuid);
|
||||
|
|
|
@ -3006,7 +3006,7 @@ main(int ARG_UNUSED (argc), char ** ARG_UNUSED (argv))
|
|||
parse_file (all_files[i]);
|
||||
#ifndef USE_MAPPED_LOCATION
|
||||
/* temporary kludge - gengtype doesn't handle conditionals.
|
||||
Manually add source_locus *after* we've processed input.h. */
|
||||
Manually add source_locus *after* we've processed input.h. */
|
||||
if (i == 0)
|
||||
do_typedef ("source_locus", create_pointer (resolve_typedef ("location_t", &pos)), &pos);
|
||||
#endif
|
||||
|
|
|
@ -106,7 +106,7 @@ write_tm_preds_h (void)
|
|||
(match_test "basereg_operand_1 (op, mode)")))
|
||||
|
||||
The only wart is that there's no way to insist on a { } string in
|
||||
an RTL template, so we have to handle "" strings. */
|
||||
an RTL template, so we have to handle "" strings. */
|
||||
|
||||
|
||||
static void
|
||||
|
@ -288,7 +288,7 @@ add_mode_tests (struct pred_data *p)
|
|||
if (test0 && test1)
|
||||
/* Must put it on the dependent clause, not the
|
||||
controlling expression, or we change the meaning of
|
||||
the test. */
|
||||
the test. */
|
||||
pos = &XEXP (subexp, 1);
|
||||
else
|
||||
pos = &XEXP (subexp, 2);
|
||||
|
|
|
@ -459,7 +459,7 @@ static struct globals
|
|||
allocation routines. The first page is used, the rest go onto the
|
||||
free list. This cannot be larger than HOST_BITS_PER_INT for the
|
||||
in_use bitmask for page_group. Hosts that need a different value
|
||||
can override this by defining GGC_QUIRE_SIZE explicitly. */
|
||||
can override this by defining GGC_QUIRE_SIZE explicitly. */
|
||||
#ifndef GGC_QUIRE_SIZE
|
||||
# ifdef USING_MMAP
|
||||
# define GGC_QUIRE_SIZE 256
|
||||
|
|
|
@ -90,7 +90,7 @@ INSN_NOTE (BASIC_BLOCK)
|
|||
|
||||
/* Record that the current basic block is unlikely to be executed and
|
||||
should be moved to the UNLIKELY_EXECUTED_TEXT_SECTION. FIXME: Make
|
||||
this a bit on the basic block structure. */
|
||||
this a bit on the basic block structure. */
|
||||
INSN_NOTE (UNLIKELY_EXECUTED_CODE)
|
||||
|
||||
#undef INSN_NOTE
|
||||
|
|
|
@ -357,7 +357,7 @@ print_lambda_loopnest (FILE * outfile, lambda_loopnest nest, char start)
|
|||
}
|
||||
|
||||
/* Allocate a new lattice structure of DEPTH x DEPTH, with INVARIANTS number
|
||||
of invariants. */
|
||||
of invariants. */
|
||||
|
||||
static lambda_lattice
|
||||
lambda_lattice_new (int depth, int invariants)
|
||||
|
@ -575,7 +575,7 @@ compute_nest_using_fourier_motzkin (int size,
|
|||
else if (A[j][i] > 0)
|
||||
{
|
||||
/* Any linear expression with a coefficient greater than 0
|
||||
becomes part of the new upper bound. */
|
||||
becomes part of the new upper bound. */
|
||||
expression = lambda_linear_expression_new (depth, invariants);
|
||||
for (k = 0; k < i; k++)
|
||||
LLE_COEFFICIENTS (expression)[k] = -1 * A[j][k];
|
||||
|
@ -2008,7 +2008,7 @@ stmt_uses_phi_result (tree stmt, tree phi_result)
|
|||
use_optype uses = STMT_USE_OPS (stmt);
|
||||
|
||||
/* This is conservatively true, because we only want SIMPLE bumpers
|
||||
of the form x +- constant for our pass. */
|
||||
of the form x +- constant for our pass. */
|
||||
if (NUM_USES (uses) != 1)
|
||||
return false;
|
||||
if (USE_OP (uses, 0) == phi_result)
|
||||
|
@ -2148,7 +2148,7 @@ replace_uses_of_x_with_y (tree stmt, tree x, tree y)
|
|||
}
|
||||
}
|
||||
|
||||
/* Return TRUE if STMT uses tree OP in it's uses. */
|
||||
/* Return TRUE if STMT uses tree OP in it's uses. */
|
||||
|
||||
static bool
|
||||
stmt_uses_op (tree stmt, tree op)
|
||||
|
@ -2206,7 +2206,7 @@ can_convert_to_perfect_nest (struct loop *loop,
|
|||
}
|
||||
|
||||
/* If the bb of a statement we care about isn't dominated by
|
||||
the header of the inner loop, then we are also screwed. */
|
||||
the header of the inner loop, then we are also screwed. */
|
||||
if (!dominated_by_p (CDI_DOMINATORS,
|
||||
bb_for_stmt (stmt),
|
||||
loop->inner->header))
|
||||
|
|
|
@ -89,7 +89,7 @@ struct iv_to_split
|
|||
struct var_to_expand
|
||||
{
|
||||
rtx insn; /* The insn in that the variable expansion occurs. */
|
||||
rtx reg; /* The accumulator which is expanded. */
|
||||
rtx reg; /* The accumulator which is expanded. */
|
||||
varray_type var_expansions; /* The copies of the accumulator which is expanded. */
|
||||
enum rtx_code op; /* The type of the accumulation - addition, subtraction
|
||||
or multiplication. */
|
||||
|
@ -1453,7 +1453,7 @@ ve_info_hash (const void *ves)
|
|||
}
|
||||
|
||||
/* Return true if IVTS1 and IVTS2 (which are really both of type
|
||||
"var_to_expand *") refer to the same instruction. */
|
||||
"var_to_expand *") refer to the same instruction. */
|
||||
|
||||
static int
|
||||
ve_info_eq (const void *ivts1, const void *ivts2)
|
||||
|
@ -1464,7 +1464,7 @@ ve_info_eq (const void *ivts1, const void *ivts2)
|
|||
return i1->insn == i2->insn;
|
||||
}
|
||||
|
||||
/* Returns true if REG is referenced in one insn in LOOP. */
|
||||
/* Returns true if REG is referenced in one insn in LOOP. */
|
||||
|
||||
bool
|
||||
referenced_in_one_insn_in_loop_p (struct loop *loop, rtx reg)
|
||||
|
|
|
@ -1936,7 +1936,7 @@ ps_insn_find_column (partial_schedule_ptr ps, ps_insn_ptr ps_i,
|
|||
|
||||
/* Find the first must follow and the last must precede
|
||||
and insert the node immediately after the must precede
|
||||
but make sure that it there is no must follow after it. */
|
||||
but make sure that it there is no must follow after it. */
|
||||
for (next_ps_i = ps->rows[row];
|
||||
next_ps_i;
|
||||
next_ps_i = next_ps_i->next_in_row)
|
||||
|
|
|
@ -51,7 +51,7 @@ struct pointer_set_t
|
|||
|
||||
We don't need to do anything special for full-width multiplication
|
||||
because we're only interested in the least significant word of the
|
||||
product, and unsigned arithmetic in C is modulo the word size. */
|
||||
product, and unsigned arithmetic in C is modulo the word size. */
|
||||
|
||||
static inline size_t
|
||||
hash1 (const void *p, unsigned long max, unsigned long logmax)
|
||||
|
@ -69,7 +69,7 @@ hash1 (const void *p, unsigned long max, unsigned long logmax)
|
|||
return ((A * (unsigned long) p) >> shift) & (max - 1);
|
||||
}
|
||||
|
||||
/* Allocate an empty pointer set. */
|
||||
/* Allocate an empty pointer set. */
|
||||
struct pointer_set_t *
|
||||
pointer_set_create (void)
|
||||
{
|
||||
|
@ -83,7 +83,7 @@ pointer_set_create (void)
|
|||
return result;
|
||||
}
|
||||
|
||||
/* Reclaims all memory associated with PSET. */
|
||||
/* Reclaims all memory associated with PSET. */
|
||||
void pointer_set_destroy (struct pointer_set_t *pset)
|
||||
{
|
||||
XDELETEVEC (pset->slots);
|
||||
|
@ -94,7 +94,7 @@ void pointer_set_destroy (struct pointer_set_t *pset)
|
|||
|
||||
Collisions are resolved by linear probing. More complicated
|
||||
collision management schemes are only useful when the load factor
|
||||
significantly exceeds 0.5, and we never let that happen. */
|
||||
significantly exceeds 0.5, and we never let that happen. */
|
||||
int
|
||||
pointer_set_contains (struct pointer_set_t *pset, void *p)
|
||||
{
|
||||
|
@ -117,7 +117,7 @@ pointer_set_contains (struct pointer_set_t *pset, void *p)
|
|||
|
||||
/* Subroutine of pointer_set_insert. Inserts P into an empty
|
||||
element of SLOTS, an array of length N_SLOTS. Returns nonzero
|
||||
if P was already present in N_SLOTS. */
|
||||
if P was already present in N_SLOTS. */
|
||||
static int
|
||||
insert_aux (void *p, void **slots, size_t n_slots, size_t log_slots)
|
||||
{
|
||||
|
@ -141,7 +141,7 @@ insert_aux (void *p, void **slots, size_t n_slots, size_t log_slots)
|
|||
}
|
||||
|
||||
/* Inserts P into PSET if it wasn't already there. Returns nonzero
|
||||
if it was already there. P must be nonnull. */
|
||||
if it was already there. P must be nonnull. */
|
||||
int
|
||||
pointer_set_insert (struct pointer_set_t *pset, void *p)
|
||||
{
|
||||
|
@ -149,7 +149,7 @@ pointer_set_insert (struct pointer_set_t *pset, void *p)
|
|||
return 1;
|
||||
|
||||
/* We've inserted a new element. Expand the table if necessary to keep
|
||||
the load factor small. */
|
||||
the load factor small. */
|
||||
++pset->n_elements;
|
||||
if (pset->n_elements > pset->n_slots / 4)
|
||||
{
|
||||
|
|
|
@ -607,7 +607,7 @@ pp_base_string (pretty_printer *pp, const char *str)
|
|||
pp_maybe_wrap_text (pp, str, str + (str ? strlen (str) : 0));
|
||||
}
|
||||
|
||||
/* Maybe print out a whitespace if needed. */
|
||||
/* Maybe print out a whitespace if needed. */
|
||||
|
||||
void
|
||||
pp_base_maybe_space (pretty_printer *pp)
|
||||
|
|
|
@ -247,7 +247,7 @@ reset_lists (void)
|
|||
}
|
||||
|
||||
#ifdef ENABLE_CHECKING
|
||||
/* Sanity check, that we only have free, initial or precolored webs. */
|
||||
/* Sanity check, that we only have free, initial or precolored webs. */
|
||||
{
|
||||
unsigned int i;
|
||||
|
||||
|
@ -2493,7 +2493,7 @@ ok_class (struct web *target, struct web *source)
|
|||
branch on count transformation (i.e. DoLoop) since the target, which
|
||||
prefers the CTR, was being coalesced with a source which preferred
|
||||
GENERAL_REGS. If only one web has a preferred class with 1 free reg
|
||||
then set it as the preferred color of the other web. */
|
||||
then set it as the preferred color of the other web. */
|
||||
enum reg_class t_class, s_class;
|
||||
t_class = reg_preferred_class (target->regno);
|
||||
s_class = reg_preferred_class (source->regno);
|
||||
|
|
|
@ -103,7 +103,7 @@ sbitmap_resize (sbitmap bmap, unsigned int n_elms, int def)
|
|||
return bmap;
|
||||
}
|
||||
|
||||
/* Re-allocate a simple bitmap of N_ELMS bits. New storage is uninitialized. */
|
||||
/* Re-allocate a simple bitmap of N_ELMS bits. New storage is uninitialized. */
|
||||
|
||||
sbitmap
|
||||
sbitmap_realloc (sbitmap src, unsigned int n_elms)
|
||||
|
|
|
@ -254,7 +254,7 @@ expand_complex_div_wide (block_stmt_iterator *bsi, tree inner_type,
|
|||
t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, min, ratio);
|
||||
div = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, max);
|
||||
|
||||
/* Result is now ((ar + ai*ratio)/div) + i((ai - ar*ratio)/div). */
|
||||
/* Result is now ((ar + ai*ratio)/div) + i((ai - ar*ratio)/div). */
|
||||
t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, ratio);
|
||||
t2 = gimplify_build2 (bsi, PLUS_EXPR, inner_type, ar, t1);
|
||||
rr = gimplify_build2 (bsi, code, inner_type, t2, div);
|
||||
|
|
|
@ -1457,7 +1457,7 @@ analyze_subscript_affine_affine (tree chrec_a,
|
|||
else
|
||||
{
|
||||
/* FIXME: For the moment, the upper bound of the
|
||||
iteration domain for j is not checked. */
|
||||
iteration domain for j is not checked. */
|
||||
*overlaps_a = chrec_dont_know;
|
||||
*overlaps_b = chrec_dont_know;
|
||||
*last_conflicts = chrec_dont_know;
|
||||
|
@ -1467,7 +1467,7 @@ analyze_subscript_affine_affine (tree chrec_a,
|
|||
else
|
||||
{
|
||||
/* FIXME: For the moment, the upper bound of the
|
||||
iteration domain for i is not checked. */
|
||||
iteration domain for i is not checked. */
|
||||
*overlaps_a = chrec_dont_know;
|
||||
*overlaps_b = chrec_dont_know;
|
||||
*last_conflicts = chrec_dont_know;
|
||||
|
|
|
@ -189,7 +189,7 @@ compute_immediate_uses (int flags, bool (*calc_for)(tree))
|
|||
}
|
||||
|
||||
|
||||
/* Invalidates dataflow information for a statement STMT. */
|
||||
/* Invalidates dataflow information for a statement STMT. */
|
||||
|
||||
void
|
||||
free_df_for_stmt (tree stmt)
|
||||
|
|
|
@ -889,7 +889,7 @@ declare_return_variable (inline_data *id, tree return_slot_addr,
|
|||
DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
|
||||
|
||||
/* Do not have the rest of GCC warn about this variable as it should
|
||||
not be visible to the user. */
|
||||
not be visible to the user. */
|
||||
TREE_NO_WARNING (var) = 1;
|
||||
|
||||
/* Build the use expr. If the return type of the function was
|
||||
|
|
|
@ -770,7 +770,7 @@ ssa_rewrite_initialize_block (struct dom_walk_data *walk_data, basic_block bb)
|
|||
/* SSA Rewriting Step 3. Visit all the successor blocks of BB looking for
|
||||
PHI nodes. For every PHI node found, add a new argument containing the
|
||||
current reaching definition for the variable and the edge through which
|
||||
that definition is reaching the PHI node. */
|
||||
that definition is reaching the PHI node. */
|
||||
|
||||
static void
|
||||
rewrite_add_phi_arguments (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
|
||||
|
|
|
@ -1049,7 +1049,7 @@ get_exit_conditions_rec (struct loop *loop,
|
|||
}
|
||||
|
||||
/* Select the candidate loop nests for the analysis. This function
|
||||
initializes the EXIT_CONDITIONS array. */
|
||||
initializes the EXIT_CONDITIONS array. */
|
||||
|
||||
static void
|
||||
select_loops_exit_conditions (struct loops *loops,
|
||||
|
|
|
@ -201,7 +201,7 @@ struct vrp_element
|
|||
static htab_t vrp_data;
|
||||
|
||||
/* An entry in the VRP_DATA hash table. We record the variable and a
|
||||
varray of VRP_ELEMENT records associated with that variable. */
|
||||
varray of VRP_ELEMENT records associated with that variable. */
|
||||
|
||||
struct vrp_hash_elt
|
||||
{
|
||||
|
@ -404,7 +404,7 @@ tree_ssa_dominator_optimize (void)
|
|||
/* And finalize the dominator walker. */
|
||||
fini_walk_dominator_tree (&walk_data);
|
||||
|
||||
/* Free nonzero_vars. */
|
||||
/* Free nonzero_vars. */
|
||||
BITMAP_XFREE (nonzero_vars);
|
||||
BITMAP_XFREE (need_eh_cleanup);
|
||||
|
||||
|
@ -694,7 +694,7 @@ thread_across_edge (struct dom_walk_data *walk_data, edge e)
|
|||
/* If we have a known destination for the conditional, then
|
||||
we can perform this optimization, which saves at least one
|
||||
conditional jump each time it applies since we get to
|
||||
bypass the conditional at our original destination. */
|
||||
bypass the conditional at our original destination. */
|
||||
if (dest)
|
||||
{
|
||||
update_bb_profile_for_threading (e->dest, EDGE_FREQUENCY (e),
|
||||
|
@ -2312,7 +2312,7 @@ eliminate_redundant_computations (struct dom_walk_data *walk_data,
|
|||
def = TREE_OPERAND (stmt, 0);
|
||||
|
||||
/* Certain expressions on the RHS can be optimized away, but can not
|
||||
themselves be entered into the hash tables. */
|
||||
themselves be entered into the hash tables. */
|
||||
if (ann->makes_aliased_stores
|
||||
|| ! def
|
||||
|| TREE_CODE (def) != SSA_NAME
|
||||
|
@ -3075,7 +3075,7 @@ get_eq_expr_value (tree if_stmt,
|
|||
retval.dst = NULL;
|
||||
|
||||
/* If the conditional is a single variable 'X', return 'X = 1' for
|
||||
the true arm and 'X = 0' on the false arm. */
|
||||
the true arm and 'X = 0' on the false arm. */
|
||||
if (TREE_CODE (cond) == SSA_NAME)
|
||||
{
|
||||
retval.dst = cond;
|
||||
|
|
|
@ -846,7 +846,7 @@ tree_ssa_loop_version (struct loops *loops, struct loop * loop,
|
|||
|
||||
/* At this point condition_bb is loop predheader with two successors,
|
||||
first_head and second_head. Make sure that loop predheader has only
|
||||
one successor. */
|
||||
one successor. */
|
||||
loop_split_edge_with (loop_preheader_edge (loop), NULL);
|
||||
loop_split_edge_with (loop_preheader_edge (nloop), NULL);
|
||||
|
||||
|
|
|
@ -54,7 +54,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
|||
*/
|
||||
|
||||
/* Returns true if ARG is either NULL_TREE or constant zero. Unlike
|
||||
integer_zerop, it does not care about overflow flags. */
|
||||
integer_zerop, it does not care about overflow flags. */
|
||||
|
||||
bool
|
||||
zero_p (tree arg)
|
||||
|
@ -69,7 +69,7 @@ zero_p (tree arg)
|
|||
}
|
||||
|
||||
/* Returns true if ARG a nonzero constant. Unlike integer_nonzerop, it does
|
||||
not care about overflow flags. */
|
||||
not care about overflow flags. */
|
||||
|
||||
static bool
|
||||
nonzero_p (tree arg)
|
||||
|
|
|
@ -280,7 +280,7 @@ replace_phi_with_stmt (block_stmt_iterator bsi, basic_block bb,
|
|||
conditional replacement. Return true if the replacement is done.
|
||||
Otherwise return false.
|
||||
BB is the basic block where the replacement is going to be done on. ARG0
|
||||
is argument 0 from PHI. Likewise for ARG1. */
|
||||
is argument 0 from PHI. Likewise for ARG1. */
|
||||
|
||||
static bool
|
||||
conditional_replacement (basic_block bb, tree phi, tree arg0, tree arg1)
|
||||
|
@ -384,7 +384,7 @@ conditional_replacement (basic_block bb, tree phi, tree arg0, tree arg1)
|
|||
return false;
|
||||
|
||||
/* If what we get back is not gimple try to create it as gimple by
|
||||
using a temporary variable. */
|
||||
using a temporary variable. */
|
||||
if (is_gimple_cast (cond)
|
||||
&& !is_gimple_val (TREE_OPERAND (cond, 0)))
|
||||
{
|
||||
|
@ -413,7 +413,7 @@ conditional_replacement (basic_block bb, tree phi, tree arg0, tree arg1)
|
|||
replacement. Return true if the replacement is done. Otherwise return
|
||||
false.
|
||||
BB is the basic block where the replacement is going to be done on. ARG0
|
||||
is argument 0 from the PHI. Likewise for ARG1. */
|
||||
is argument 0 from the PHI. Likewise for ARG1. */
|
||||
|
||||
static bool
|
||||
value_replacement (basic_block bb, tree phi, tree arg0, tree arg1)
|
||||
|
@ -425,7 +425,7 @@ value_replacement (basic_block bb, tree phi, tree arg0, tree arg1)
|
|||
edge true_edge, false_edge;
|
||||
|
||||
/* If the type says honor signed zeros we cannot do this
|
||||
optimization. */
|
||||
optimization. */
|
||||
if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
|
||||
return false;
|
||||
|
||||
|
@ -497,7 +497,7 @@ value_replacement (basic_block bb, tree phi, tree arg0, tree arg1)
|
|||
replacement. Return true if the replacement is done. Otherwise return
|
||||
false.
|
||||
bb is the basic block where the replacement is going to be done on. arg0
|
||||
is argument 0 from the phi. Likewise for arg1. */
|
||||
is argument 0 from the phi. Likewise for arg1. */
|
||||
static bool
|
||||
abs_replacement (basic_block bb, tree phi, tree arg0, tree arg1)
|
||||
{
|
||||
|
@ -514,7 +514,7 @@ abs_replacement (basic_block bb, tree phi, tree arg0, tree arg1)
|
|||
enum tree_code cond_code;
|
||||
|
||||
/* If the type says honor signed zeros we cannot do this
|
||||
optimization. */
|
||||
optimization. */
|
||||
if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
|
||||
return false;
|
||||
|
||||
|
@ -584,7 +584,7 @@ abs_replacement (basic_block bb, tree phi, tree arg0, tree arg1)
|
|||
&& cond_code != LT_EXPR && cond_code != LE_EXPR)
|
||||
return false;
|
||||
|
||||
/* Make sure the conditional is arg[01] OP y. */
|
||||
/* Make sure the conditional is arg[01] OP y. */
|
||||
if (TREE_OPERAND (cond, 0) != rhs)
|
||||
return false;
|
||||
|
||||
|
@ -617,7 +617,7 @@ abs_replacement (basic_block bb, tree phi, tree arg0, tree arg1)
|
|||
else
|
||||
lhs = result;
|
||||
|
||||
/* Build the modify expression with abs expression. */
|
||||
/* Build the modify expression with abs expression. */
|
||||
new = build (MODIFY_EXPR, TREE_TYPE (lhs),
|
||||
lhs, build1 (ABS_EXPR, TREE_TYPE (lhs), rhs));
|
||||
|
||||
|
|
|
@ -363,7 +363,7 @@ expr_pred_trans_eq (const void *p1, const void *p2)
|
|||
return false;
|
||||
|
||||
/* If they are for the same basic block, determine if the
|
||||
expressions are equal. */
|
||||
expressions are equal. */
|
||||
if (expressions_equal_p (ve1->e, ve2->e))
|
||||
return true;
|
||||
|
||||
|
@ -866,7 +866,7 @@ phi_translate (tree expr, value_set_t set, basic_block pred,
|
|||
switch (TREE_CODE_CLASS (TREE_CODE (expr)))
|
||||
{
|
||||
case tcc_reference:
|
||||
/* XXX: Until we have PRE of loads working, none will be ANTIC. */
|
||||
/* XXX: Until we have PRE of loads working, none will be ANTIC. */
|
||||
return NULL;
|
||||
|
||||
case tcc_binary:
|
||||
|
|
|
@ -422,7 +422,7 @@ thread_block (basic_block bb)
|
|||
It is the caller's responsibility to fix the dominance information
|
||||
and rewrite duplicated SSA_NAMEs back into SSA form.
|
||||
|
||||
Returns true if one or more edges were threaded, false otherwise. */
|
||||
Returns true if one or more edges were threaded, false otherwise. */
|
||||
|
||||
bool
|
||||
thread_through_all_blocks (void)
|
||||
|
|
|
@ -616,7 +616,7 @@ make_loop_iterate_ntimes (struct loop *loop, tree niters,
|
|||
|
||||
if (exit_edge->flags & EDGE_TRUE_VALUE) /* 'then' edge exits the loop. */
|
||||
cond = build2 (GE_EXPR, boolean_type_node, indx_after_incr, niters);
|
||||
else /* 'then' edge loops back. */
|
||||
else /* 'then' edge loops back. */
|
||||
cond = build2 (LT_EXPR, boolean_type_node, indx_after_incr, niters);
|
||||
|
||||
begin_label = build1 (GOTO_EXPR, void_type_node, begin_label);
|
||||
|
@ -800,7 +800,7 @@ verify_loop_for_duplication (struct loop *loop,
|
|||
return false;
|
||||
}
|
||||
|
||||
/* Only loops with 1 exit. */
|
||||
/* Only loops with 1 exit. */
|
||||
if (loop->num_exits != 1)
|
||||
{
|
||||
if (vect_debug_stats (loop) || vect_debug_details (loop))
|
||||
|
@ -809,7 +809,7 @@ verify_loop_for_duplication (struct loop *loop,
|
|||
return false;
|
||||
}
|
||||
|
||||
/* Only loops with 1 entry. */
|
||||
/* Only loops with 1 entry. */
|
||||
if (loop->num_entries != 1)
|
||||
{
|
||||
if (vect_debug_stats (loop) || vect_debug_details (loop))
|
||||
|
@ -1578,7 +1578,7 @@ vect_create_addr_base_for_vector_ref (tree stmt,
|
|||
tree dest, new_stmt;
|
||||
|
||||
/* Only the access function of the last index is relevant (i_n in
|
||||
a[i_1][i_2]...[i_n]), the others correspond to loop invariants. */
|
||||
a[i_1][i_2]...[i_n]), the others correspond to loop invariants. */
|
||||
access_fn = DR_ACCESS_FN (dr, 0);
|
||||
ok = vect_is_simple_iv_evolution (loop->num, access_fn, &init_oval, &step,
|
||||
true);
|
||||
|
@ -2705,7 +2705,7 @@ vect_generate_tmps_on_preheader (loop_vec_info loop_vinfo, tree *ni_name_p,
|
|||
ni_name = vect_build_loop_niters (loop_vinfo);
|
||||
|
||||
/* ratio = ni / vf.
|
||||
vf is power of 2; then if ratio = = n >> log2 (vf). */
|
||||
vf is power of 2; then if ratio = = n >> log2 (vf). */
|
||||
vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo);
|
||||
ratio = vect_build_symbol_bound (ni_name, vf, loop);
|
||||
|
||||
|
@ -2760,7 +2760,7 @@ vect_build_symbol_bound (tree n, int vf, struct loop * loop)
|
|||
|
||||
var_name = make_ssa_name (var, NULL_TREE);
|
||||
|
||||
/* vf is power of 2; then n/vf = n >> log2 (vf). */
|
||||
/* vf is power of 2; then n/vf = n >> log2 (vf). */
|
||||
|
||||
i = exact_log2 (vf);
|
||||
stmt = build2 (MODIFY_EXPR, void_type_node, var_name,
|
||||
|
@ -2834,7 +2834,7 @@ vect_transform_loop_bound (loop_vec_info loop_vinfo, tree niters)
|
|||
if (exit_edge->flags & EDGE_TRUE_VALUE) /* 'then' edge exits the loop. */
|
||||
cond = build2 (GE_EXPR, boolean_type_node,
|
||||
indx_after_incr, new_loop_bound);
|
||||
else /* 'then' edge loops back. */
|
||||
else /* 'then' edge loops back. */
|
||||
cond = build2 (LT_EXPR, boolean_type_node,
|
||||
indx_after_incr, new_loop_bound);
|
||||
|
||||
|
@ -3818,7 +3818,7 @@ vect_analyze_data_ref_dependence (struct data_reference *dra,
|
|||
exist any data dependences between them.
|
||||
|
||||
TODO: dependences which distance is greater than the vectorization factor
|
||||
can be ignored. */
|
||||
can be ignored. */
|
||||
|
||||
static bool
|
||||
vect_analyze_data_ref_dependences (loop_vec_info loop_vinfo)
|
||||
|
@ -4067,7 +4067,7 @@ vect_compute_data_ref_alignment (struct data_reference *dr,
|
|||
fprintf (dump_file, " scalar_type: ");
|
||||
print_generic_expr (dump_file, scalar_type, TDF_DETAILS);
|
||||
}
|
||||
/* It is not possible to vectorize this data reference. */
|
||||
/* It is not possible to vectorize this data reference. */
|
||||
return false;
|
||||
}
|
||||
gcc_assert (TREE_CODE (ref) == ARRAY_REF || TREE_CODE (ref) == INDIRECT_REF);
|
||||
|
@ -4184,7 +4184,7 @@ vect_compute_array_ref_alignment (struct data_reference *dr,
|
|||
tree nbits;
|
||||
|
||||
if (TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
|
||||
/* The reference is an array without its last index. */
|
||||
/* The reference is an array without its last index. */
|
||||
next_ref = vect_compute_array_base_alignment (ref, vectype, &dims,
|
||||
&misalign);
|
||||
else
|
||||
|
@ -4495,7 +4495,7 @@ vect_analyze_data_ref_access (struct data_reference *dr)
|
|||
loop_containing_stmt (DR_STMT (dr))->num))
|
||||
{
|
||||
/* Evolution part is not NULL in this loop (it is neither constant
|
||||
nor invariant). */
|
||||
nor invariant). */
|
||||
if (vect_debug_details (NULL))
|
||||
{
|
||||
fprintf (dump_file,
|
||||
|
@ -4579,7 +4579,7 @@ vect_analyze_data_ref_accesses (loop_vec_info loop_vinfo)
|
|||
MEMREF - a data-ref in STMT, which is an INDIRECT_REF.
|
||||
|
||||
If the data-ref access is vectorizable, return a data_reference structure
|
||||
that represents it (DR). Otherwise - return NULL. */
|
||||
that represents it (DR). Otherwise - return NULL. */
|
||||
|
||||
static struct data_reference *
|
||||
vect_analyze_pointer_ref_access (tree memref, tree stmt, bool is_read)
|
||||
|
@ -4751,7 +4751,7 @@ vect_get_symbl_and_dr (tree memref, tree stmt, bool is_read,
|
|||
/* Store the array base in the stmt info.
|
||||
For one dimensional array ref a[i], the base is a,
|
||||
for multidimensional a[i1][i2]..[iN], the base is
|
||||
a[i1][i2]..[iN-1]. */
|
||||
a[i1][i2]..[iN-1]. */
|
||||
array_base = TREE_OPERAND (memref, 0);
|
||||
STMT_VINFO_VECT_DR_BASE (stmt_info) = array_base;
|
||||
|
||||
|
@ -4809,7 +4809,7 @@ vect_get_symbl_and_dr (tree memref, tree stmt, bool is_read,
|
|||
|
||||
FORNOW: Handle aligned INDIRECT_REFs and ARRAY_REFs
|
||||
which base is really an array (not a pointer) and which alignment
|
||||
can be forced. This restriction will be relaxed. */
|
||||
can be forced. This restriction will be relaxed. */
|
||||
|
||||
static bool
|
||||
vect_analyze_data_refs (loop_vec_info loop_vinfo)
|
||||
|
|
|
@ -323,7 +323,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
|||
|
||||
Find the first position in which VAL could be inserted without
|
||||
changing the ordering of V. LESSTHAN is a function that returns
|
||||
true if the first argument is strictly less than the second. */
|
||||
true if the first argument is strictly less than the second. */
|
||||
|
||||
#define VEC_lower_bound(TDEF,V,O,LT) \
|
||||
(VEC_OP(TDEF,lower_bound)(V,O,LT VEC_CHECK_INFO))
|
||||
|
|
Loading…
Add table
Reference in a new issue