mirror of
https://gcc.gnu.org/git/gcc.git
synced 2024-12-12 05:13:50 +08:00
caller-save.c: Convert to ISO C90.
* caller-save.c: Convert to ISO C90. * calls.c: Likewise. * cfg.c: Likewise. * cfganal.c: Likewise. * cfgbuild.c: Likewise. * cfgcleanup.c: Likewise. * cfghooks.c: Likewise. * cfglayout.c: Likewise. * cfglayout.h: Likewise. * cfgloop.c: Likewise. * cfgloop.h: Likewise. * cfgloopanal.c: Likewise. * cfgloopmainip.c: Likewise. * cfgrtl.c: Likewise. From-SVN: r68331
This commit is contained in:
parent
84edd42cd4
commit
d329e058f2
@ -1,3 +1,20 @@
|
||||
2003-06-22 Andreas Jaeger <aj@suse.de>
|
||||
|
||||
* caller-save.c: Convert to ISO C90.
|
||||
* calls.c: Likewise.
|
||||
* cfg.c: Likewise.
|
||||
* cfganal.c: Likewise.
|
||||
* cfgbuild.c: Likewise.
|
||||
* cfgcleanup.c: Likewise.
|
||||
* cfghooks.c: Likewise.
|
||||
* cfglayout.c: Likewise.
|
||||
* cfglayout.h: Likewise.
|
||||
* cfgloop.c: Likewise.
|
||||
* cfgloop.h: Likewise.
|
||||
* cfgloopanal.c: Likewise.
|
||||
* cfgloopmainip.c: Likewise.
|
||||
* cfgrtl.c: Likewise.
|
||||
|
||||
2003-06-22 Richard Earnshaw <rearnsha@arm.com>
|
||||
|
||||
* arm.h (BIGGEST_ALIGNMENT): Use TARGET_REALLY_IWMMXT for selecting
|
||||
|
@ -1,6 +1,6 @@
|
||||
/* Save and restore call-clobbered registers which are live across a call.
|
||||
Copyright (C) 1989, 1992, 1994, 1995, 1997, 1998,
|
||||
1999, 2000, 2001, 2002 Free Software Foundation, Inc.
|
||||
1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
|
||||
|
||||
This file is part of GCC.
|
||||
|
||||
@ -51,13 +51,13 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
||||
register because it is live we first try to save in multi-register modes.
|
||||
If that is not possible the save is done one register at a time. */
|
||||
|
||||
static enum machine_mode
|
||||
static enum machine_mode
|
||||
regno_save_mode[FIRST_PSEUDO_REGISTER][MAX_MOVE_MAX / MIN_UNITS_PER_WORD + 1];
|
||||
|
||||
/* For each hard register, a place on the stack where it can be saved,
|
||||
if needed. */
|
||||
|
||||
static rtx
|
||||
static rtx
|
||||
regno_save_mem[FIRST_PSEUDO_REGISTER][MAX_MOVE_MAX / MIN_UNITS_PER_WORD + 1];
|
||||
|
||||
/* We will only make a register eligible for caller-save if it can be
|
||||
@ -68,7 +68,7 @@ static rtx
|
||||
|
||||
static int
|
||||
reg_save_code[FIRST_PSEUDO_REGISTER][MAX_MACHINE_MODE];
|
||||
static int
|
||||
static int
|
||||
reg_restore_code[FIRST_PSEUDO_REGISTER][MAX_MACHINE_MODE];
|
||||
|
||||
/* Set of hard regs currently residing in save area (during insn scan). */
|
||||
@ -88,29 +88,28 @@ static HARD_REG_SET referenced_regs;
|
||||
static HARD_REG_SET this_insn_sets;
|
||||
|
||||
|
||||
static void mark_set_regs PARAMS ((rtx, rtx, void *));
|
||||
static void mark_referenced_regs PARAMS ((rtx));
|
||||
static int insert_save PARAMS ((struct insn_chain *, int, int,
|
||||
HARD_REG_SET *,
|
||||
enum machine_mode *));
|
||||
static int insert_restore PARAMS ((struct insn_chain *, int, int,
|
||||
int, enum machine_mode *));
|
||||
static struct insn_chain *insert_one_insn PARAMS ((struct insn_chain *, int,
|
||||
int, rtx));
|
||||
static void add_stored_regs PARAMS ((rtx, rtx, void *));
|
||||
static void mark_set_regs (rtx, rtx, void *);
|
||||
static void mark_referenced_regs (rtx);
|
||||
static int insert_save (struct insn_chain *, int, int, HARD_REG_SET *,
|
||||
enum machine_mode *);
|
||||
static int insert_restore (struct insn_chain *, int, int, int,
|
||||
enum machine_mode *);
|
||||
static struct insn_chain *insert_one_insn (struct insn_chain *, int, int,
|
||||
rtx);
|
||||
static void add_stored_regs (rtx, rtx, void *);
|
||||
|
||||
/* Initialize for caller-save.
|
||||
|
||||
Look at all the hard registers that are used by a call and for which
|
||||
regclass.c has not already excluded from being used across a call.
|
||||
|
||||
Ensure that we can find a mode to save the register and that there is a
|
||||
Ensure that we can find a mode to save the register and that there is a
|
||||
simple insn to save and restore the register. This latter check avoids
|
||||
problems that would occur if we tried to save the MQ register of some
|
||||
machines directly into memory. */
|
||||
|
||||
void
|
||||
init_caller_save ()
|
||||
init_caller_save (void)
|
||||
{
|
||||
rtx addr_reg;
|
||||
int offset;
|
||||
@ -184,7 +183,7 @@ init_caller_save ()
|
||||
address = addr_reg;
|
||||
|
||||
/* Next we try to form an insn to save and restore the register. We
|
||||
see if such an insn is recognized and meets its constraints.
|
||||
see if such an insn is recognized and meets its constraints.
|
||||
|
||||
To avoid lots of unnecessary RTL allocation, we construct all the RTL
|
||||
once, then modify the memory and register operands in-place. */
|
||||
@ -256,7 +255,7 @@ init_caller_save ()
|
||||
/* Initialize save areas by showing that we haven't allocated any yet. */
|
||||
|
||||
void
|
||||
init_save_areas ()
|
||||
init_save_areas (void)
|
||||
{
|
||||
int i, j;
|
||||
|
||||
@ -274,17 +273,17 @@ init_save_areas ()
|
||||
Future work:
|
||||
|
||||
In the fallback case we should iterate backwards across all possible
|
||||
modes for the save, choosing the largest available one instead of
|
||||
modes for the save, choosing the largest available one instead of
|
||||
falling back to the smallest mode immediately. (eg TF -> DF -> SF).
|
||||
|
||||
We do not try to use "move multiple" instructions that exist
|
||||
on some machines (such as the 68k moveml). It could be a win to try
|
||||
on some machines (such as the 68k moveml). It could be a win to try
|
||||
and use them when possible. The hard part is doing it in a way that is
|
||||
machine independent since they might be saving non-consecutive
|
||||
machine independent since they might be saving non-consecutive
|
||||
registers. (imagine caller-saving d0,d1,a0,a1 on the 68k) */
|
||||
|
||||
void
|
||||
setup_save_areas ()
|
||||
setup_save_areas (void)
|
||||
{
|
||||
int i, j, k;
|
||||
unsigned int r;
|
||||
@ -300,7 +299,7 @@ setup_save_areas ()
|
||||
if (reg_renumber[i] >= 0 && REG_N_CALLS_CROSSED (i) > 0)
|
||||
{
|
||||
unsigned int regno = reg_renumber[i];
|
||||
unsigned int endregno
|
||||
unsigned int endregno
|
||||
= regno + HARD_REGNO_NREGS (regno, GET_MODE (regno_reg_rtx[i]));
|
||||
|
||||
for (r = regno; r < endregno; r++)
|
||||
@ -367,7 +366,7 @@ setup_save_areas ()
|
||||
/* Find the places where hard regs are live across calls and save them. */
|
||||
|
||||
void
|
||||
save_call_clobbered_regs ()
|
||||
save_call_clobbered_regs (void)
|
||||
{
|
||||
struct insn_chain *chain, *next;
|
||||
enum machine_mode save_mode [FIRST_PSEUDO_REGISTER];
|
||||
@ -488,7 +487,7 @@ save_call_clobbered_regs ()
|
||||
regno += insert_restore (chain, GET_CODE (insn) == JUMP_INSN,
|
||||
regno, MOVE_MAX_WORDS, save_mode);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Here from note_stores when an insn stores a value in a register.
|
||||
@ -496,10 +495,8 @@ save_call_clobbered_regs ()
|
||||
been assigned hard regs have had their register number changed already,
|
||||
so we can ignore pseudos. */
|
||||
static void
|
||||
mark_set_regs (reg, setter, data)
|
||||
rtx reg;
|
||||
rtx setter ATTRIBUTE_UNUSED;
|
||||
void *data ATTRIBUTE_UNUSED;
|
||||
mark_set_regs (rtx reg, rtx setter ATTRIBUTE_UNUSED,
|
||||
void *data ATTRIBUTE_UNUSED)
|
||||
{
|
||||
int regno, endregno, i;
|
||||
enum machine_mode mode = GET_MODE (reg);
|
||||
@ -529,10 +526,7 @@ mark_set_regs (reg, setter, data)
|
||||
been assigned hard regs have had their register number changed already,
|
||||
so we can ignore pseudos. */
|
||||
static void
|
||||
add_stored_regs (reg, setter, data)
|
||||
rtx reg;
|
||||
rtx setter;
|
||||
void *data;
|
||||
add_stored_regs (rtx reg, rtx setter, void *data)
|
||||
{
|
||||
int regno, endregno, i;
|
||||
enum machine_mode mode = GET_MODE (reg);
|
||||
@ -562,8 +556,7 @@ add_stored_regs (reg, setter, data)
|
||||
|
||||
/* Walk X and record all referenced registers in REFERENCED_REGS. */
|
||||
static void
|
||||
mark_referenced_regs (x)
|
||||
rtx x;
|
||||
mark_referenced_regs (rtx x)
|
||||
{
|
||||
enum rtx_code code = GET_CODE (x);
|
||||
const char *fmt;
|
||||
@ -639,12 +632,8 @@ mark_referenced_regs (x)
|
||||
Return the extra number of registers saved. */
|
||||
|
||||
static int
|
||||
insert_restore (chain, before_p, regno, maxrestore, save_mode)
|
||||
struct insn_chain *chain;
|
||||
int before_p;
|
||||
int regno;
|
||||
int maxrestore;
|
||||
enum machine_mode *save_mode;
|
||||
insert_restore (struct insn_chain *chain, int before_p, int regno,
|
||||
int maxrestore, enum machine_mode *save_mode)
|
||||
{
|
||||
int i, k;
|
||||
rtx pat = NULL_RTX;
|
||||
@ -695,7 +684,7 @@ insert_restore (chain, before_p, regno, maxrestore, save_mode)
|
||||
&& numregs == (unsigned int) HARD_REGNO_NREGS (regno, save_mode [regno]))
|
||||
mem = adjust_address (mem, save_mode[regno], 0);
|
||||
pat = gen_rtx_SET (VOIDmode,
|
||||
gen_rtx_REG (GET_MODE (mem),
|
||||
gen_rtx_REG (GET_MODE (mem),
|
||||
regno), mem);
|
||||
code = reg_restore_code[regno][GET_MODE (mem)];
|
||||
new = insert_one_insn (chain, before_p, code, pat);
|
||||
@ -715,12 +704,8 @@ insert_restore (chain, before_p, regno, maxrestore, save_mode)
|
||||
/* Like insert_restore above, but save registers instead. */
|
||||
|
||||
static int
|
||||
insert_save (chain, before_p, regno, to_save, save_mode)
|
||||
struct insn_chain *chain;
|
||||
int before_p;
|
||||
int regno;
|
||||
HARD_REG_SET *to_save;
|
||||
enum machine_mode *save_mode;
|
||||
insert_save (struct insn_chain *chain, int before_p, int regno,
|
||||
HARD_REG_SET (*to_save), enum machine_mode *save_mode)
|
||||
{
|
||||
int i;
|
||||
unsigned int k;
|
||||
@ -742,7 +727,7 @@ insert_save (chain, before_p, regno, to_save, save_mode)
|
||||
|
||||
/* Get the pattern to emit and update our status.
|
||||
|
||||
See if we can save several registers with a single instruction.
|
||||
See if we can save several registers with a single instruction.
|
||||
Work backwards to the single register case. */
|
||||
for (i = MOVE_MAX_WORDS; i > 0; i--)
|
||||
{
|
||||
@ -790,15 +775,11 @@ insert_save (chain, before_p, regno, to_save, save_mode)
|
||||
|
||||
/* Emit a new caller-save insn and set the code. */
|
||||
static struct insn_chain *
|
||||
insert_one_insn (chain, before_p, code, pat)
|
||||
struct insn_chain *chain;
|
||||
int before_p;
|
||||
int code;
|
||||
rtx pat;
|
||||
insert_one_insn (struct insn_chain *chain, int before_p, int code, rtx pat)
|
||||
{
|
||||
rtx insn = chain->insn;
|
||||
struct insn_chain *new;
|
||||
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
/* If INSN references CC0, put our insns in front of the insn that sets
|
||||
CC0. This is always safe, since the only way we could be passed an
|
||||
|
295
gcc/calls.c
295
gcc/calls.c
@ -140,59 +140,41 @@ static sbitmap stored_args_map;
|
||||
argument list for the constructor call. */
|
||||
int stack_arg_under_construction;
|
||||
|
||||
static int calls_function PARAMS ((tree, int));
|
||||
static int calls_function_1 PARAMS ((tree, int));
|
||||
static int calls_function (tree, int);
|
||||
static int calls_function_1 (tree, int);
|
||||
|
||||
static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
|
||||
HOST_WIDE_INT, HOST_WIDE_INT, rtx,
|
||||
rtx, int, rtx, int,
|
||||
CUMULATIVE_ARGS *));
|
||||
static void precompute_register_parameters PARAMS ((int,
|
||||
struct arg_data *,
|
||||
int *));
|
||||
static int store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
|
||||
int));
|
||||
static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
|
||||
int));
|
||||
static int finalize_must_preallocate PARAMS ((int, int,
|
||||
struct arg_data *,
|
||||
struct args_size *));
|
||||
static void precompute_arguments PARAMS ((int, int,
|
||||
struct arg_data *));
|
||||
static int compute_argument_block_size PARAMS ((int,
|
||||
struct args_size *,
|
||||
int));
|
||||
static void initialize_argument_information PARAMS ((int,
|
||||
struct arg_data *,
|
||||
struct args_size *,
|
||||
int, tree, tree,
|
||||
CUMULATIVE_ARGS *,
|
||||
int, rtx *, int *,
|
||||
int *, int *));
|
||||
static void compute_argument_addresses PARAMS ((struct arg_data *,
|
||||
rtx, int));
|
||||
static rtx rtx_for_function_call PARAMS ((tree, tree));
|
||||
static void load_register_parameters PARAMS ((struct arg_data *,
|
||||
int, rtx *, int,
|
||||
int, int *));
|
||||
static rtx emit_library_call_value_1 PARAMS ((int, rtx, rtx,
|
||||
enum libcall_type,
|
||||
enum machine_mode,
|
||||
int, va_list));
|
||||
static int special_function_p PARAMS ((tree, int));
|
||||
static rtx try_to_integrate PARAMS ((tree, tree, rtx,
|
||||
int, tree, rtx));
|
||||
static int check_sibcall_argument_overlap_1 PARAMS ((rtx));
|
||||
static int check_sibcall_argument_overlap PARAMS ((rtx, struct arg_data *,
|
||||
int));
|
||||
static void emit_call_1 (rtx, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
|
||||
HOST_WIDE_INT, rtx, rtx, int, rtx, int,
|
||||
CUMULATIVE_ARGS *);
|
||||
static void precompute_register_parameters (int, struct arg_data *, int *);
|
||||
static int store_one_arg (struct arg_data *, rtx, int, int, int);
|
||||
static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
|
||||
static int finalize_must_preallocate (int, int, struct arg_data *,
|
||||
struct args_size *);
|
||||
static void precompute_arguments (int, int, struct arg_data *);
|
||||
static int compute_argument_block_size (int, struct args_size *, int);
|
||||
static void initialize_argument_information (int, struct arg_data *,
|
||||
struct args_size *, int, tree,
|
||||
tree, CUMULATIVE_ARGS *, int,
|
||||
rtx *, int *, int *, int *);
|
||||
static void compute_argument_addresses (struct arg_data *, rtx, int);
|
||||
static rtx rtx_for_function_call (tree, tree);
|
||||
static void load_register_parameters (struct arg_data *, int, rtx *, int,
|
||||
int, int *);
|
||||
static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
|
||||
enum machine_mode, int, va_list);
|
||||
static int special_function_p (tree, int);
|
||||
static rtx try_to_integrate (tree, tree, rtx, int, tree, rtx);
|
||||
static int check_sibcall_argument_overlap_1 (rtx);
|
||||
static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
|
||||
|
||||
static int combine_pending_stack_adjustment_and_call
|
||||
PARAMS ((int, struct args_size *, int));
|
||||
static tree fix_unsafe_tree PARAMS ((tree));
|
||||
static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
|
||||
int);
|
||||
static tree fix_unsafe_tree (tree);
|
||||
|
||||
#ifdef REG_PARM_STACK_SPACE
|
||||
static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
|
||||
static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
|
||||
static rtx save_fixed_argument_area (int, rtx, int *, int *);
|
||||
static void restore_fixed_argument_area (rtx, rtx, int, int);
|
||||
#endif
|
||||
|
||||
/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
|
||||
@ -206,9 +188,7 @@ static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
|
||||
static tree calls_function_save_exprs;
|
||||
|
||||
static int
|
||||
calls_function (exp, which)
|
||||
tree exp;
|
||||
int which;
|
||||
calls_function (tree exp, int which)
|
||||
{
|
||||
int val;
|
||||
|
||||
@ -221,9 +201,7 @@ calls_function (exp, which)
|
||||
/* Recursive function to do the work of above function. */
|
||||
|
||||
static int
|
||||
calls_function_1 (exp, which)
|
||||
tree exp;
|
||||
int which;
|
||||
calls_function_1 (tree exp, int which)
|
||||
{
|
||||
int i;
|
||||
enum tree_code code = TREE_CODE (exp);
|
||||
@ -323,12 +301,8 @@ calls_function_1 (exp, which)
|
||||
CALL_INSN_FUNCTION_USAGE information. */
|
||||
|
||||
rtx
|
||||
prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen, sibcallp)
|
||||
rtx funexp;
|
||||
tree fndecl;
|
||||
rtx *call_fusage;
|
||||
int reg_parm_seen;
|
||||
int sibcallp;
|
||||
prepare_call_address (rtx funexp, tree fndecl, rtx *call_fusage,
|
||||
int reg_parm_seen, int sibcallp)
|
||||
{
|
||||
rtx static_chain_value = 0;
|
||||
|
||||
@ -409,21 +383,13 @@ prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen, sibcallp)
|
||||
denote registers used by the called function. */
|
||||
|
||||
static void
|
||||
emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
|
||||
struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
|
||||
call_fusage, ecf_flags, args_so_far)
|
||||
rtx funexp;
|
||||
tree fndecl ATTRIBUTE_UNUSED;
|
||||
tree funtype ATTRIBUTE_UNUSED;
|
||||
HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
|
||||
HOST_WIDE_INT rounded_stack_size;
|
||||
HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
|
||||
rtx next_arg_reg ATTRIBUTE_UNUSED;
|
||||
rtx valreg;
|
||||
int old_inhibit_defer_pop;
|
||||
rtx call_fusage;
|
||||
int ecf_flags;
|
||||
CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED;
|
||||
emit_call_1 (rtx funexp, tree fndecl, tree funtype,
|
||||
HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
|
||||
HOST_WIDE_INT rounded_stack_size,
|
||||
HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
|
||||
rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
|
||||
int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
|
||||
CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
|
||||
{
|
||||
rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
|
||||
rtx call_insn;
|
||||
@ -437,7 +403,7 @@ emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
|
||||
#ifdef CALL_POPS_ARGS
|
||||
n_popped += CALL_POPS_ARGS (* args_so_far);
|
||||
#endif
|
||||
|
||||
|
||||
/* Ensure address is valid. SYMBOL_REF is already valid, so no need,
|
||||
and we don't want to load it into a register as an optimization,
|
||||
because prepare_call_address already did it if it should be done. */
|
||||
@ -643,9 +609,7 @@ emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
|
||||
space from the stack such as alloca. */
|
||||
|
||||
static int
|
||||
special_function_p (fndecl, flags)
|
||||
tree fndecl;
|
||||
int flags;
|
||||
special_function_p (tree fndecl, int flags)
|
||||
{
|
||||
if (! (flags & ECF_MALLOC)
|
||||
&& fndecl && DECL_NAME (fndecl)
|
||||
@ -725,16 +689,14 @@ special_function_p (fndecl, flags)
|
||||
/* Return nonzero when tree represent call to longjmp. */
|
||||
|
||||
int
|
||||
setjmp_call_p (fndecl)
|
||||
tree fndecl;
|
||||
setjmp_call_p (tree fndecl)
|
||||
{
|
||||
return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
|
||||
}
|
||||
|
||||
/* Return true when exp contains alloca call. */
|
||||
bool
|
||||
alloca_call_p (exp)
|
||||
tree exp;
|
||||
alloca_call_p (tree exp)
|
||||
{
|
||||
if (TREE_CODE (exp) == CALL_EXPR
|
||||
&& TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
|
||||
@ -749,8 +711,7 @@ alloca_call_p (exp)
|
||||
/* Detect flags (function attributes) from the function decl or type node. */
|
||||
|
||||
int
|
||||
flags_from_decl_or_type (exp)
|
||||
tree exp;
|
||||
flags_from_decl_or_type (tree exp)
|
||||
{
|
||||
int flags = 0;
|
||||
tree type = exp;
|
||||
@ -805,10 +766,7 @@ flags_from_decl_or_type (exp)
|
||||
Set REG_PARM_SEEN if we encounter a register parameter. */
|
||||
|
||||
static void
|
||||
precompute_register_parameters (num_actuals, args, reg_parm_seen)
|
||||
int num_actuals;
|
||||
struct arg_data *args;
|
||||
int *reg_parm_seen;
|
||||
precompute_register_parameters (int num_actuals, struct arg_data *args, int *reg_parm_seen)
|
||||
{
|
||||
int i;
|
||||
|
||||
@ -873,12 +831,7 @@ precompute_register_parameters (num_actuals, args, reg_parm_seen)
|
||||
parameters, we must save and restore it. */
|
||||
|
||||
static rtx
|
||||
save_fixed_argument_area (reg_parm_stack_space, argblock,
|
||||
low_to_save, high_to_save)
|
||||
int reg_parm_stack_space;
|
||||
rtx argblock;
|
||||
int *low_to_save;
|
||||
int *high_to_save;
|
||||
save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
|
||||
{
|
||||
int low;
|
||||
int high;
|
||||
@ -945,11 +898,7 @@ save_fixed_argument_area (reg_parm_stack_space, argblock,
|
||||
}
|
||||
|
||||
static void
|
||||
restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
|
||||
rtx save_area;
|
||||
rtx argblock;
|
||||
int high_to_save;
|
||||
int low_to_save;
|
||||
restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
|
||||
{
|
||||
enum machine_mode save_mode = GET_MODE (save_area);
|
||||
int delta;
|
||||
@ -984,9 +933,7 @@ restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
|
||||
the aligned_regs array if it is nonzero. */
|
||||
|
||||
static void
|
||||
store_unaligned_arguments_into_pseudos (args, num_actuals)
|
||||
struct arg_data *args;
|
||||
int num_actuals;
|
||||
store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
|
||||
{
|
||||
int i, j;
|
||||
|
||||
@ -1067,23 +1014,15 @@ store_unaligned_arguments_into_pseudos (args, num_actuals)
|
||||
flags which may may be modified by this routine. */
|
||||
|
||||
static void
|
||||
initialize_argument_information (num_actuals, args, args_size, n_named_args,
|
||||
actparms, fndecl, args_so_far,
|
||||
reg_parm_stack_space, old_stack_level,
|
||||
old_pending_adj, must_preallocate,
|
||||
ecf_flags)
|
||||
int num_actuals ATTRIBUTE_UNUSED;
|
||||
struct arg_data *args;
|
||||
struct args_size *args_size;
|
||||
int n_named_args ATTRIBUTE_UNUSED;
|
||||
tree actparms;
|
||||
tree fndecl;
|
||||
CUMULATIVE_ARGS *args_so_far;
|
||||
int reg_parm_stack_space;
|
||||
rtx *old_stack_level;
|
||||
int *old_pending_adj;
|
||||
int *must_preallocate;
|
||||
int *ecf_flags;
|
||||
initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
|
||||
struct arg_data *args,
|
||||
struct args_size *args_size,
|
||||
int n_named_args ATTRIBUTE_UNUSED,
|
||||
tree actparms, tree fndecl,
|
||||
CUMULATIVE_ARGS *args_so_far,
|
||||
int reg_parm_stack_space,
|
||||
rtx *old_stack_level, int *old_pending_adj,
|
||||
int *must_preallocate, int *ecf_flags)
|
||||
{
|
||||
/* 1 if scanning parms front to back, -1 if scanning back to front. */
|
||||
int inc;
|
||||
@ -1324,11 +1263,9 @@ initialize_argument_information (num_actuals, args, args_size, n_named_args,
|
||||
for arguments passed in registers. */
|
||||
|
||||
static int
|
||||
compute_argument_block_size (reg_parm_stack_space, args_size,
|
||||
preferred_stack_boundary)
|
||||
int reg_parm_stack_space;
|
||||
struct args_size *args_size;
|
||||
int preferred_stack_boundary ATTRIBUTE_UNUSED;
|
||||
compute_argument_block_size (int reg_parm_stack_space,
|
||||
struct args_size *args_size,
|
||||
int preferred_stack_boundary ATTRIBUTE_UNUSED)
|
||||
{
|
||||
int unadjusted_args_size = args_size->constant;
|
||||
|
||||
@ -1411,10 +1348,7 @@ compute_argument_block_size (reg_parm_stack_space, args_size,
|
||||
precomputed argument. */
|
||||
|
||||
static void
|
||||
precompute_arguments (flags, num_actuals, args)
|
||||
int flags;
|
||||
int num_actuals;
|
||||
struct arg_data *args;
|
||||
precompute_arguments (int flags, int num_actuals, struct arg_data *args)
|
||||
{
|
||||
int i;
|
||||
|
||||
@ -1482,11 +1416,7 @@ precompute_arguments (flags, num_actuals, args)
|
||||
compute and return the final value for MUST_PREALLOCATE. */
|
||||
|
||||
static int
|
||||
finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
|
||||
int must_preallocate;
|
||||
int num_actuals;
|
||||
struct arg_data *args;
|
||||
struct args_size *args_size;
|
||||
finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
|
||||
{
|
||||
/* See if we have or want to preallocate stack space.
|
||||
|
||||
@ -1545,10 +1475,7 @@ finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
|
||||
ARGBLOCK is an rtx for the address of the outgoing arguments. */
|
||||
|
||||
static void
|
||||
compute_argument_addresses (args, argblock, num_actuals)
|
||||
struct arg_data *args;
|
||||
rtx argblock;
|
||||
int num_actuals;
|
||||
compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
|
||||
{
|
||||
if (argblock)
|
||||
{
|
||||
@ -1609,9 +1536,7 @@ compute_argument_addresses (args, argblock, num_actuals)
|
||||
ADDR is the operand 0 of CALL_EXPR for this call. */
|
||||
|
||||
static rtx
|
||||
rtx_for_function_call (fndecl, addr)
|
||||
tree fndecl;
|
||||
tree addr;
|
||||
rtx_for_function_call (tree fndecl, tree addr)
|
||||
{
|
||||
rtx funexp;
|
||||
|
||||
@ -1645,20 +1570,15 @@ rtx_for_function_call (fndecl, addr)
|
||||
expressions were already evaluated.
|
||||
|
||||
Mark all register-parms as living through the call, putting these USE
|
||||
insns in the CALL_INSN_FUNCTION_USAGE field.
|
||||
|
||||
insns in the CALL_INSN_FUNCTION_USAGE field.
|
||||
|
||||
When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
|
||||
checking, setting *SIBCALL_FAILURE if appropriate. */
|
||||
|
||||
static void
|
||||
load_register_parameters (args, num_actuals, call_fusage, flags,
|
||||
is_sibcall, sibcall_failure)
|
||||
struct arg_data *args;
|
||||
int num_actuals;
|
||||
rtx *call_fusage;
|
||||
int flags;
|
||||
int is_sibcall;
|
||||
int *sibcall_failure;
|
||||
load_register_parameters (struct arg_data *args, int num_actuals,
|
||||
rtx *call_fusage, int flags, int is_sibcall,
|
||||
int *sibcall_failure)
|
||||
{
|
||||
int i, j;
|
||||
|
||||
@ -1736,13 +1656,8 @@ load_register_parameters (args, num_actuals, call_fusage, flags,
|
||||
about the parameters. */
|
||||
|
||||
static rtx
|
||||
try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr)
|
||||
tree fndecl;
|
||||
tree actparms;
|
||||
rtx target;
|
||||
int ignore;
|
||||
tree type;
|
||||
rtx structure_value_addr;
|
||||
try_to_integrate (tree fndecl, tree actparms, rtx target, int ignore,
|
||||
tree type, rtx structure_value_addr)
|
||||
{
|
||||
rtx temp;
|
||||
rtx before_call;
|
||||
@ -1864,12 +1779,9 @@ try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr)
|
||||
be popped after the call. Returns the adjustment. */
|
||||
|
||||
static int
|
||||
combine_pending_stack_adjustment_and_call (unadjusted_args_size,
|
||||
args_size,
|
||||
preferred_unit_stack_boundary)
|
||||
int unadjusted_args_size;
|
||||
struct args_size *args_size;
|
||||
int preferred_unit_stack_boundary;
|
||||
combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
|
||||
struct args_size *args_size,
|
||||
int preferred_unit_stack_boundary)
|
||||
{
|
||||
/* The number of bytes to pop so that the stack will be
|
||||
under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
|
||||
@ -1920,8 +1832,7 @@ combine_pending_stack_adjustment_and_call (unadjusted_args_size,
|
||||
zero otherwise. */
|
||||
|
||||
static int
|
||||
check_sibcall_argument_overlap_1 (x)
|
||||
rtx x;
|
||||
check_sibcall_argument_overlap_1 (rtx x)
|
||||
{
|
||||
RTX_CODE code;
|
||||
int i, j;
|
||||
@ -1984,10 +1895,7 @@ check_sibcall_argument_overlap_1 (x)
|
||||
slots, zero otherwise. */
|
||||
|
||||
static int
|
||||
check_sibcall_argument_overlap (insn, arg, mark_stored_args_map)
|
||||
rtx insn;
|
||||
struct arg_data *arg;
|
||||
int mark_stored_args_map;
|
||||
check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
|
||||
{
|
||||
int low, high;
|
||||
|
||||
@ -2016,8 +1924,7 @@ check_sibcall_argument_overlap (insn, arg, mark_stored_args_map)
|
||||
}
|
||||
|
||||
static tree
|
||||
fix_unsafe_tree (t)
|
||||
tree t;
|
||||
fix_unsafe_tree (tree t)
|
||||
{
|
||||
switch (unsafe_for_reeval (t))
|
||||
{
|
||||
@ -2051,10 +1958,7 @@ fix_unsafe_tree (t)
|
||||
If IGNORE is nonzero, then we ignore the value of the function call. */
|
||||
|
||||
rtx
|
||||
expand_call (exp, target, ignore)
|
||||
tree exp;
|
||||
rtx target;
|
||||
int ignore;
|
||||
expand_call (tree exp, rtx target, int ignore)
|
||||
{
|
||||
/* Nonzero if we are currently expanding a call. */
|
||||
static int currently_expanding_call = 0;
|
||||
@ -2931,7 +2835,7 @@ expand_call (exp, target, ignore)
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
compute_argument_addresses (args, argblock, num_actuals);
|
||||
|
||||
/* If we push args individually in reverse order, perform stack alignment
|
||||
@ -3385,13 +3289,13 @@ expand_call (exp, target, ignore)
|
||||
by at least the delay slot scheduling pass. We do this now instead of
|
||||
adding to call_fusage before the call to emit_call_1 because TARGET
|
||||
may be modified in the meantime. */
|
||||
if (structure_value_addr != 0 && target != 0
|
||||
if (structure_value_addr != 0 && target != 0
|
||||
&& GET_CODE (target) == MEM && RTX_UNCHANGING_P (target))
|
||||
add_function_usage_to
|
||||
(last_call_insn (),
|
||||
gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
|
||||
NULL_RTX));
|
||||
|
||||
|
||||
insns = get_insns ();
|
||||
end_sequence ();
|
||||
|
||||
@ -3572,14 +3476,9 @@ split_complex_types (tree types)
|
||||
parameters are documented in the emit_library_call function below. */
|
||||
|
||||
static rtx
|
||||
emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
|
||||
int retval;
|
||||
rtx orgfun;
|
||||
rtx value;
|
||||
enum libcall_type fn_type;
|
||||
enum machine_mode outmode;
|
||||
int nargs;
|
||||
va_list p;
|
||||
emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
|
||||
enum libcall_type fn_type,
|
||||
enum machine_mode outmode, int nargs, va_list p)
|
||||
{
|
||||
/* Total size in bytes of all the stack-parms scanned so far. */
|
||||
struct args_size args_size;
|
||||
@ -3799,7 +3698,7 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
|
||||
{
|
||||
rtx slot;
|
||||
int must_copy = 1
|
||||
#ifdef FUNCTION_ARG_CALLEE_COPIES
|
||||
#ifdef FUNCTION_ARG_CALLEE_COPIES
|
||||
&& ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
|
||||
NULL_TREE, 1)
|
||||
#endif
|
||||
@ -4298,7 +4197,7 @@ emit_library_call (rtx orgfun, enum libcall_type fn_type,
|
||||
enum machine_mode outmode, int nargs, ...)
|
||||
{
|
||||
va_list p;
|
||||
|
||||
|
||||
va_start (p, nargs);
|
||||
emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
|
||||
va_end (p);
|
||||
@ -4319,7 +4218,7 @@ emit_library_call_value (rtx orgfun, rtx value,
|
||||
{
|
||||
rtx result;
|
||||
va_list p;
|
||||
|
||||
|
||||
va_start (p, nargs);
|
||||
result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
|
||||
nargs, p);
|
||||
@ -4348,12 +4247,8 @@ emit_library_call_value (rtx orgfun, rtx value,
|
||||
zero otherwise. */
|
||||
|
||||
static int
|
||||
store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space)
|
||||
struct arg_data *arg;
|
||||
rtx argblock;
|
||||
int flags;
|
||||
int variable_size ATTRIBUTE_UNUSED;
|
||||
int reg_parm_stack_space;
|
||||
store_one_arg (struct arg_data *arg, rtx argblock, int flags,
|
||||
int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
|
||||
{
|
||||
tree pval = arg->tree_value;
|
||||
rtx reg = 0;
|
||||
@ -4533,7 +4428,7 @@ store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space)
|
||||
|
||||
/* This isn't already where we want it on the stack, so put it there.
|
||||
This can either be done with push or copy insns. */
|
||||
emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
|
||||
emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
|
||||
PARM_BOUNDARY, partial, reg, used - size, argblock,
|
||||
ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
|
||||
ARGS_SIZE_RTX (arg->locate.alignment_pad));
|
||||
@ -4682,9 +4577,7 @@ store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space)
|
||||
For a little-endian machine, the reverse is true. */
|
||||
|
||||
bool
|
||||
default_must_pass_in_stack (mode, type)
|
||||
enum machine_mode mode;
|
||||
tree type;
|
||||
default_must_pass_in_stack (enum machine_mode mode, tree type)
|
||||
{
|
||||
if (!type)
|
||||
return false;
|
||||
|
114
gcc/cfg.c
114
gcc/cfg.c
@ -1,6 +1,6 @@
|
||||
/* Control flow graph manipulation code for GNU compiler.
|
||||
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
|
||||
1999, 2000, 2001, 2002 Free Software Foundation, Inc.
|
||||
1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
|
||||
|
||||
This file is part of GCC.
|
||||
|
||||
@ -138,13 +138,13 @@ struct basic_block_def entry_exit_blocks[2]
|
||||
}
|
||||
};
|
||||
|
||||
void debug_flow_info PARAMS ((void));
|
||||
static void free_edge PARAMS ((edge));
|
||||
void debug_flow_info (void);
|
||||
static void free_edge (edge);
|
||||
|
||||
/* Called once at initialization time. */
|
||||
|
||||
void
|
||||
init_flow ()
|
||||
init_flow (void)
|
||||
{
|
||||
static int initialized;
|
||||
|
||||
@ -163,7 +163,7 @@ init_flow ()
|
||||
obstack_free (&flow_obstack, flow_firstobj);
|
||||
flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
|
||||
}
|
||||
bb_pool = create_alloc_pool ("Basic block pool",
|
||||
bb_pool = create_alloc_pool ("Basic block pool",
|
||||
sizeof (struct basic_block_def), 100);
|
||||
edge_pool = create_alloc_pool ("Edge pool",
|
||||
sizeof (struct edge_def), 100);
|
||||
@ -173,8 +173,7 @@ init_flow ()
|
||||
without actually unlinking it from the pred/succ lists. */
|
||||
|
||||
static void
|
||||
free_edge (e)
|
||||
edge e;
|
||||
free_edge (edge e)
|
||||
{
|
||||
n_edges--;
|
||||
pool_free (edge_pool, e);
|
||||
@ -183,7 +182,7 @@ free_edge (e)
|
||||
/* Free the memory associated with the edge structures. */
|
||||
|
||||
void
|
||||
clear_edges ()
|
||||
clear_edges (void)
|
||||
{
|
||||
basic_block bb;
|
||||
edge e;
|
||||
@ -223,7 +222,7 @@ clear_edges ()
|
||||
/* Allocate memory for basic_block. */
|
||||
|
||||
basic_block
|
||||
alloc_block ()
|
||||
alloc_block (void)
|
||||
{
|
||||
basic_block bb;
|
||||
bb = pool_alloc (bb_pool);
|
||||
@ -233,8 +232,7 @@ alloc_block ()
|
||||
|
||||
/* Link block B to chain after AFTER. */
|
||||
void
|
||||
link_block (b, after)
|
||||
basic_block b, after;
|
||||
link_block (basic_block b, basic_block after)
|
||||
{
|
||||
b->next_bb = after->next_bb;
|
||||
b->prev_bb = after;
|
||||
@ -244,8 +242,7 @@ link_block (b, after)
|
||||
|
||||
/* Unlink block B from chain. */
|
||||
void
|
||||
unlink_block (b)
|
||||
basic_block b;
|
||||
unlink_block (basic_block b)
|
||||
{
|
||||
b->next_bb->prev_bb = b->prev_bb;
|
||||
b->prev_bb->next_bb = b->next_bb;
|
||||
@ -253,11 +250,11 @@ unlink_block (b)
|
||||
|
||||
/* Sequentially order blocks and compact the arrays. */
|
||||
void
|
||||
compact_blocks ()
|
||||
compact_blocks (void)
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
|
||||
|
||||
i = 0;
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
@ -275,8 +272,7 @@ compact_blocks ()
|
||||
/* Remove block B from the basic block array. */
|
||||
|
||||
void
|
||||
expunge_block (b)
|
||||
basic_block b;
|
||||
expunge_block (basic_block b)
|
||||
{
|
||||
unlink_block (b);
|
||||
BASIC_BLOCK (b->index) = NULL;
|
||||
@ -289,9 +285,7 @@ expunge_block (b)
|
||||
possibly already exist. */
|
||||
|
||||
edge
|
||||
unchecked_make_edge (src, dst, flags)
|
||||
basic_block src, dst;
|
||||
int flags;
|
||||
unchecked_make_edge (basic_block src, basic_block dst, int flags)
|
||||
{
|
||||
edge e;
|
||||
e = pool_alloc (edge_pool);
|
||||
@ -314,10 +308,7 @@ unchecked_make_edge (src, dst, flags)
|
||||
edge cache CACHE. Return the new edge, NULL if already exist. */
|
||||
|
||||
edge
|
||||
cached_make_edge (edge_cache, src, dst, flags)
|
||||
sbitmap *edge_cache;
|
||||
basic_block src, dst;
|
||||
int flags;
|
||||
cached_make_edge (sbitmap *edge_cache, basic_block src, basic_block dst, int flags)
|
||||
{
|
||||
int use_edge_cache;
|
||||
edge e;
|
||||
@ -349,7 +340,7 @@ cached_make_edge (edge_cache, src, dst, flags)
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
e = unchecked_make_edge (src, dst, flags);
|
||||
|
||||
if (use_edge_cache)
|
||||
@ -362,9 +353,7 @@ cached_make_edge (edge_cache, src, dst, flags)
|
||||
created edge or NULL if already exist. */
|
||||
|
||||
edge
|
||||
make_edge (src, dest, flags)
|
||||
basic_block src, dest;
|
||||
int flags;
|
||||
make_edge (basic_block src, basic_block dest, int flags)
|
||||
{
|
||||
return cached_make_edge (NULL, src, dest, flags);
|
||||
}
|
||||
@ -373,9 +362,7 @@ make_edge (src, dest, flags)
|
||||
that it is the single edge leaving SRC. */
|
||||
|
||||
edge
|
||||
make_single_succ_edge (src, dest, flags)
|
||||
basic_block src, dest;
|
||||
int flags;
|
||||
make_single_succ_edge (basic_block src, basic_block dest, int flags)
|
||||
{
|
||||
edge e = make_edge (src, dest, flags);
|
||||
|
||||
@ -387,8 +374,7 @@ make_single_succ_edge (src, dest, flags)
|
||||
/* This function will remove an edge from the flow graph. */
|
||||
|
||||
void
|
||||
remove_edge (e)
|
||||
edge e;
|
||||
remove_edge (edge e)
|
||||
{
|
||||
edge last_pred = NULL;
|
||||
edge last_succ = NULL;
|
||||
@ -423,9 +409,7 @@ remove_edge (e)
|
||||
/* Redirect an edge's successor from one block to another. */
|
||||
|
||||
void
|
||||
redirect_edge_succ (e, new_succ)
|
||||
edge e;
|
||||
basic_block new_succ;
|
||||
redirect_edge_succ (edge e, basic_block new_succ)
|
||||
{
|
||||
edge *pe;
|
||||
|
||||
@ -443,9 +427,7 @@ redirect_edge_succ (e, new_succ)
|
||||
/* Like previous but avoid possible duplicate edge. */
|
||||
|
||||
edge
|
||||
redirect_edge_succ_nodup (e, new_succ)
|
||||
edge e;
|
||||
basic_block new_succ;
|
||||
redirect_edge_succ_nodup (edge e, basic_block new_succ)
|
||||
{
|
||||
edge s;
|
||||
|
||||
@ -473,9 +455,7 @@ redirect_edge_succ_nodup (e, new_succ)
|
||||
/* Redirect an edge's predecessor from one block to another. */
|
||||
|
||||
void
|
||||
redirect_edge_pred (e, new_pred)
|
||||
edge e;
|
||||
basic_block new_pred;
|
||||
redirect_edge_pred (edge e, basic_block new_pred)
|
||||
{
|
||||
edge *pe;
|
||||
|
||||
@ -492,7 +472,7 @@ redirect_edge_pred (e, new_pred)
|
||||
}
|
||||
|
||||
void
|
||||
clear_bb_flags ()
|
||||
clear_bb_flags (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
@ -501,8 +481,7 @@ clear_bb_flags ()
|
||||
}
|
||||
|
||||
void
|
||||
dump_flow_info (file)
|
||||
FILE *file;
|
||||
dump_flow_info (FILE *file)
|
||||
{
|
||||
int i;
|
||||
int max_regno = max_reg_num ();
|
||||
@ -625,16 +604,13 @@ dump_flow_info (file)
|
||||
}
|
||||
|
||||
void
|
||||
debug_flow_info ()
|
||||
debug_flow_info (void)
|
||||
{
|
||||
dump_flow_info (stderr);
|
||||
}
|
||||
|
||||
void
|
||||
dump_edge_info (file, e, do_succ)
|
||||
FILE *file;
|
||||
edge e;
|
||||
int do_succ;
|
||||
dump_edge_info (FILE *file, edge e, int do_succ)
|
||||
{
|
||||
basic_block side = (do_succ ? e->dest : e->src);
|
||||
|
||||
@ -693,9 +669,7 @@ static void *first_edge_aux_obj = 0;
|
||||
be first initialized by alloc_aux_for_blocks. */
|
||||
|
||||
inline void
|
||||
alloc_aux_for_block (bb, size)
|
||||
basic_block bb;
|
||||
int size;
|
||||
alloc_aux_for_block (basic_block bb, int size)
|
||||
{
|
||||
/* Verify that aux field is clear. */
|
||||
if (bb->aux || !first_block_aux_obj)
|
||||
@ -708,8 +682,7 @@ alloc_aux_for_block (bb, size)
|
||||
alloc_aux_for_block for each basic block. */
|
||||
|
||||
void
|
||||
alloc_aux_for_blocks (size)
|
||||
int size;
|
||||
alloc_aux_for_blocks (int size)
|
||||
{
|
||||
static int initialized;
|
||||
|
||||
@ -735,7 +708,7 @@ alloc_aux_for_blocks (size)
|
||||
/* Clear AUX pointers of all blocks. */
|
||||
|
||||
void
|
||||
clear_aux_for_blocks ()
|
||||
clear_aux_for_blocks (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
@ -747,7 +720,7 @@ clear_aux_for_blocks ()
|
||||
of all blocks. */
|
||||
|
||||
void
|
||||
free_aux_for_blocks ()
|
||||
free_aux_for_blocks (void)
|
||||
{
|
||||
if (!first_block_aux_obj)
|
||||
abort ();
|
||||
@ -761,9 +734,7 @@ free_aux_for_blocks ()
|
||||
be first initialized by alloc_aux_for_edges. */
|
||||
|
||||
inline void
|
||||
alloc_aux_for_edge (e, size)
|
||||
edge e;
|
||||
int size;
|
||||
alloc_aux_for_edge (edge e, int size)
|
||||
{
|
||||
/* Verify that aux field is clear. */
|
||||
if (e->aux || !first_edge_aux_obj)
|
||||
@ -776,8 +747,7 @@ alloc_aux_for_edge (e, size)
|
||||
alloc_aux_for_edge for each basic edge. */
|
||||
|
||||
void
|
||||
alloc_aux_for_edges (size)
|
||||
int size;
|
||||
alloc_aux_for_edges (int size)
|
||||
{
|
||||
static int initialized;
|
||||
|
||||
@ -809,7 +779,7 @@ alloc_aux_for_edges (size)
|
||||
/* Clear AUX pointers of all edges. */
|
||||
|
||||
void
|
||||
clear_aux_for_edges ()
|
||||
clear_aux_for_edges (void)
|
||||
{
|
||||
basic_block bb;
|
||||
edge e;
|
||||
@ -825,7 +795,7 @@ clear_aux_for_edges ()
|
||||
of all edges. */
|
||||
|
||||
void
|
||||
free_aux_for_edges ()
|
||||
free_aux_for_edges (void)
|
||||
{
|
||||
if (!first_edge_aux_obj)
|
||||
abort ();
|
||||
@ -835,12 +805,12 @@ free_aux_for_edges ()
|
||||
clear_aux_for_edges ();
|
||||
}
|
||||
|
||||
/* Verify the CFG consistency.
|
||||
|
||||
/* Verify the CFG consistency.
|
||||
|
||||
Currently it does following checks edge and basic block list correctness
|
||||
and calls into IL dependent checking then. */
|
||||
void
|
||||
verify_flow_info ()
|
||||
verify_flow_info (void)
|
||||
{
|
||||
size_t *edge_checksum;
|
||||
int num_bb_notes, err = 0;
|
||||
@ -984,9 +954,7 @@ verify_flow_info ()
|
||||
/* Print out one basic block with live information at start and end. */
|
||||
|
||||
void
|
||||
dump_bb (bb, outf)
|
||||
basic_block bb;
|
||||
FILE *outf;
|
||||
dump_bb (basic_block bb, FILE *outf)
|
||||
{
|
||||
edge e;
|
||||
|
||||
@ -1004,15 +972,13 @@ dump_bb (bb, outf)
|
||||
}
|
||||
|
||||
void
|
||||
debug_bb (bb)
|
||||
basic_block bb;
|
||||
debug_bb (basic_block bb)
|
||||
{
|
||||
dump_bb (bb, stderr);
|
||||
}
|
||||
|
||||
basic_block
|
||||
debug_bb_n (n)
|
||||
int n;
|
||||
debug_bb_n (int n)
|
||||
{
|
||||
basic_block bb = BASIC_BLOCK (n);
|
||||
dump_bb (bb, stderr);
|
||||
|
112
gcc/cfganal.c
112
gcc/cfganal.c
@ -46,29 +46,25 @@ struct depth_first_search_dsS {
|
||||
};
|
||||
typedef struct depth_first_search_dsS *depth_first_search_ds;
|
||||
|
||||
static void flow_dfs_compute_reverse_init
|
||||
PARAMS ((depth_first_search_ds));
|
||||
static void flow_dfs_compute_reverse_add_bb
|
||||
PARAMS ((depth_first_search_ds, basic_block));
|
||||
static basic_block flow_dfs_compute_reverse_execute
|
||||
PARAMS ((depth_first_search_ds));
|
||||
static void flow_dfs_compute_reverse_finish
|
||||
PARAMS ((depth_first_search_ds));
|
||||
static void remove_fake_successors PARAMS ((basic_block));
|
||||
static bool need_fake_edge_p PARAMS ((rtx));
|
||||
static bool flow_active_insn_p PARAMS ((rtx));
|
||||
static void flow_dfs_compute_reverse_init (depth_first_search_ds);
|
||||
static void flow_dfs_compute_reverse_add_bb (depth_first_search_ds,
|
||||
basic_block);
|
||||
static basic_block flow_dfs_compute_reverse_execute (depth_first_search_ds);
|
||||
static void flow_dfs_compute_reverse_finish (depth_first_search_ds);
|
||||
static void remove_fake_successors (basic_block);
|
||||
static bool need_fake_edge_p (rtx);
|
||||
static bool flow_active_insn_p (rtx);
|
||||
|
||||
/* Like active_insn_p, except keep the return value clobber around
|
||||
even after reload. */
|
||||
|
||||
static bool
|
||||
flow_active_insn_p (insn)
|
||||
rtx insn;
|
||||
flow_active_insn_p (rtx insn)
|
||||
{
|
||||
if (active_insn_p (insn))
|
||||
return true;
|
||||
|
||||
/* A clobber of the function return value exists for buggy
|
||||
/* A clobber of the function return value exists for buggy
|
||||
programs that fail to return a value. Its effect is to
|
||||
keep the return value from being live across the entire
|
||||
function. If we allow it to be skipped, we introduce the
|
||||
@ -85,8 +81,7 @@ flow_active_insn_p (insn)
|
||||
its single destination. */
|
||||
|
||||
bool
|
||||
forwarder_block_p (bb)
|
||||
basic_block bb;
|
||||
forwarder_block_p (basic_block bb)
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
@ -106,8 +101,7 @@ forwarder_block_p (bb)
|
||||
/* Return nonzero if we can reach target from src by falling through. */
|
||||
|
||||
bool
|
||||
can_fallthru (src, target)
|
||||
basic_block src, target;
|
||||
can_fallthru (basic_block src, basic_block target)
|
||||
{
|
||||
rtx insn = src->end;
|
||||
rtx insn2 = target->head;
|
||||
@ -133,7 +127,7 @@ can_fallthru (src, target)
|
||||
and heavily borrowed from flow_depth_first_order_compute. */
|
||||
|
||||
bool
|
||||
mark_dfs_back_edges ()
|
||||
mark_dfs_back_edges (void)
|
||||
{
|
||||
edge *stack;
|
||||
int *pre;
|
||||
@ -217,7 +211,7 @@ mark_dfs_back_edges ()
|
||||
/* Set the flag EDGE_CAN_FALLTHRU for edges that can be fallthru. */
|
||||
|
||||
void
|
||||
set_edge_can_fallthru_flag ()
|
||||
set_edge_can_fallthru_flag (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
@ -252,8 +246,7 @@ set_edge_can_fallthru_flag ()
|
||||
Helper function for the flow_call_edges_add. */
|
||||
|
||||
static bool
|
||||
need_fake_edge_p (insn)
|
||||
rtx insn;
|
||||
need_fake_edge_p (rtx insn)
|
||||
{
|
||||
if (!INSN_P (insn))
|
||||
return false;
|
||||
@ -282,8 +275,7 @@ need_fake_edge_p (insn)
|
||||
that all subsequent instructions must be executed. */
|
||||
|
||||
int
|
||||
flow_call_edges_add (blocks)
|
||||
sbitmap blocks;
|
||||
flow_call_edges_add (sbitmap blocks)
|
||||
{
|
||||
int i;
|
||||
int blocks_split = 0;
|
||||
@ -405,7 +397,7 @@ flow_call_edges_add (blocks)
|
||||
block is reachable. */
|
||||
|
||||
void
|
||||
find_unreachable_blocks ()
|
||||
find_unreachable_blocks (void)
|
||||
{
|
||||
edge e;
|
||||
basic_block *tos, *worklist, bb;
|
||||
@ -461,7 +453,7 @@ find_unreachable_blocks ()
|
||||
and the data structure is filled in. */
|
||||
|
||||
struct edge_list *
|
||||
create_edge_list ()
|
||||
create_edge_list (void)
|
||||
{
|
||||
struct edge_list *elist;
|
||||
edge e;
|
||||
@ -499,8 +491,7 @@ create_edge_list ()
|
||||
/* This function free's memory associated with an edge list. */
|
||||
|
||||
void
|
||||
free_edge_list (elist)
|
||||
struct edge_list *elist;
|
||||
free_edge_list (struct edge_list *elist)
|
||||
{
|
||||
if (elist)
|
||||
{
|
||||
@ -512,9 +503,7 @@ free_edge_list (elist)
|
||||
/* This function provides debug output showing an edge list. */
|
||||
|
||||
void
|
||||
print_edge_list (f, elist)
|
||||
FILE *f;
|
||||
struct edge_list *elist;
|
||||
print_edge_list (FILE *f, struct edge_list *elist)
|
||||
{
|
||||
int x;
|
||||
|
||||
@ -541,9 +530,7 @@ print_edge_list (f, elist)
|
||||
extra edges. */
|
||||
|
||||
void
|
||||
verify_edge_list (f, elist)
|
||||
FILE *f;
|
||||
struct edge_list *elist;
|
||||
verify_edge_list (FILE *f, struct edge_list *elist)
|
||||
{
|
||||
int pred, succ, index;
|
||||
edge e;
|
||||
@ -608,9 +595,7 @@ verify_edge_list (f, elist)
|
||||
a specified predecessor and successor. */
|
||||
|
||||
int
|
||||
find_edge_index (edge_list, pred, succ)
|
||||
struct edge_list *edge_list;
|
||||
basic_block pred, succ;
|
||||
find_edge_index (struct edge_list *edge_list, basic_block pred, basic_block succ)
|
||||
{
|
||||
int x;
|
||||
|
||||
@ -625,10 +610,7 @@ find_edge_index (edge_list, pred, succ)
|
||||
/* Dump the list of basic blocks in the bitmap NODES. */
|
||||
|
||||
void
|
||||
flow_nodes_print (str, nodes, file)
|
||||
const char *str;
|
||||
const sbitmap nodes;
|
||||
FILE *file;
|
||||
flow_nodes_print (const char *str, const sbitmap nodes, FILE *file)
|
||||
{
|
||||
int node;
|
||||
|
||||
@ -643,11 +625,7 @@ flow_nodes_print (str, nodes, file)
|
||||
/* Dump the list of edges in the array EDGE_LIST. */
|
||||
|
||||
void
|
||||
flow_edge_list_print (str, edge_list, num_edges, file)
|
||||
const char *str;
|
||||
const edge *edge_list;
|
||||
int num_edges;
|
||||
FILE *file;
|
||||
flow_edge_list_print (const char *str, const edge *edge_list, int num_edges, FILE *file)
|
||||
{
|
||||
int i;
|
||||
|
||||
@ -668,8 +646,7 @@ flow_edge_list_print (str, edge_list, num_edges, file)
|
||||
list it is in. */
|
||||
|
||||
static void
|
||||
remove_fake_successors (bb)
|
||||
basic_block bb;
|
||||
remove_fake_successors (basic_block bb)
|
||||
{
|
||||
edge e;
|
||||
|
||||
@ -688,7 +665,7 @@ remove_fake_successors (bb)
|
||||
fake predecessors. */
|
||||
|
||||
void
|
||||
remove_fake_edges ()
|
||||
remove_fake_edges (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
@ -701,7 +678,7 @@ remove_fake_edges ()
|
||||
edges to exist. */
|
||||
|
||||
void
|
||||
add_noreturn_fake_exit_edges ()
|
||||
add_noreturn_fake_exit_edges (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
@ -722,7 +699,7 @@ add_noreturn_fake_exit_edges ()
|
||||
nodes not reachable from the exit block. */
|
||||
|
||||
void
|
||||
connect_infinite_loops_to_exit ()
|
||||
connect_infinite_loops_to_exit (void)
|
||||
{
|
||||
basic_block unvisited_block;
|
||||
struct depth_first_search_dsS dfs_ds;
|
||||
@ -750,8 +727,7 @@ connect_infinite_loops_to_exit ()
|
||||
/* Compute reverse top sort order */
|
||||
|
||||
void
|
||||
flow_reverse_top_sort_order_compute (rts_order)
|
||||
int *rts_order;
|
||||
flow_reverse_top_sort_order_compute (int *rts_order)
|
||||
{
|
||||
edge *stack;
|
||||
int sp;
|
||||
@ -819,9 +795,7 @@ flow_reverse_top_sort_order_compute (rts_order)
|
||||
possible. */
|
||||
|
||||
int
|
||||
flow_depth_first_order_compute (dfs_order, rc_order)
|
||||
int *dfs_order;
|
||||
int *rc_order;
|
||||
flow_depth_first_order_compute (int *dfs_order, int *rc_order)
|
||||
{
|
||||
edge *stack;
|
||||
int sp;
|
||||
@ -922,8 +896,7 @@ struct dfst_node
|
||||
2) Walking the resulting tree from right to left. */
|
||||
|
||||
void
|
||||
flow_preorder_transversal_compute (pot_order)
|
||||
int *pot_order;
|
||||
flow_preorder_transversal_compute (int *pot_order)
|
||||
{
|
||||
edge e;
|
||||
edge *stack;
|
||||
@ -1062,8 +1035,7 @@ flow_preorder_transversal_compute (pot_order)
|
||||
element on the stack. */
|
||||
|
||||
static void
|
||||
flow_dfs_compute_reverse_init (data)
|
||||
depth_first_search_ds data;
|
||||
flow_dfs_compute_reverse_init (depth_first_search_ds data)
|
||||
{
|
||||
/* Allocate stack for back-tracking up CFG. */
|
||||
data->stack = (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
|
||||
@ -1084,9 +1056,7 @@ flow_dfs_compute_reverse_init (data)
|
||||
block. */
|
||||
|
||||
static void
|
||||
flow_dfs_compute_reverse_add_bb (data, bb)
|
||||
depth_first_search_ds data;
|
||||
basic_block bb;
|
||||
flow_dfs_compute_reverse_add_bb (depth_first_search_ds data, basic_block bb)
|
||||
{
|
||||
data->stack[data->sp++] = bb;
|
||||
SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
|
||||
@ -1098,8 +1068,7 @@ flow_dfs_compute_reverse_add_bb (data, bb)
|
||||
available. */
|
||||
|
||||
static basic_block
|
||||
flow_dfs_compute_reverse_execute (data)
|
||||
depth_first_search_ds data;
|
||||
flow_dfs_compute_reverse_execute (depth_first_search_ds data)
|
||||
{
|
||||
basic_block bb;
|
||||
edge e;
|
||||
@ -1127,8 +1096,7 @@ flow_dfs_compute_reverse_execute (data)
|
||||
reverse graph. */
|
||||
|
||||
static void
|
||||
flow_dfs_compute_reverse_finish (data)
|
||||
depth_first_search_ds data;
|
||||
flow_dfs_compute_reverse_finish (depth_first_search_ds data)
|
||||
{
|
||||
free (data->stack);
|
||||
sbitmap_free (data->visited_blocks);
|
||||
@ -1138,13 +1106,9 @@ flow_dfs_compute_reverse_finish (data)
|
||||
if REVERSE, go against direction of edges. Returns number of blocks
|
||||
found and their list in RSLT. RSLT can contain at most RSLT_MAX items. */
|
||||
int
|
||||
dfs_enumerate_from (bb, reverse, predicate, rslt, rslt_max, data)
|
||||
basic_block bb;
|
||||
int reverse;
|
||||
bool (*predicate) PARAMS ((basic_block, void *));
|
||||
basic_block *rslt;
|
||||
int rslt_max;
|
||||
void *data;
|
||||
dfs_enumerate_from (basic_block bb, int reverse,
|
||||
bool (*predicate) (basic_block, void *),
|
||||
basic_block *rslt, int rslt_max, void *data)
|
||||
{
|
||||
basic_block *st, lbb;
|
||||
int sp = 0, tv = 0;
|
||||
|
@ -48,23 +48,20 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
||||
#include "toplev.h"
|
||||
#include "timevar.h"
|
||||
|
||||
static int count_basic_blocks PARAMS ((rtx));
|
||||
static void find_basic_blocks_1 PARAMS ((rtx));
|
||||
static rtx find_label_refs PARAMS ((rtx, rtx));
|
||||
static void make_edges PARAMS ((rtx, basic_block,
|
||||
basic_block, int));
|
||||
static void make_label_edge PARAMS ((sbitmap *, basic_block,
|
||||
rtx, int));
|
||||
static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
|
||||
static void find_bb_boundaries PARAMS ((basic_block));
|
||||
static void compute_outgoing_frequencies PARAMS ((basic_block));
|
||||
static int count_basic_blocks (rtx);
|
||||
static void find_basic_blocks_1 (rtx);
|
||||
static rtx find_label_refs (rtx, rtx);
|
||||
static void make_edges (rtx, basic_block, basic_block, int);
|
||||
static void make_label_edge (sbitmap *, basic_block, rtx, int);
|
||||
static void make_eh_edge (sbitmap *, basic_block, rtx);
|
||||
static void find_bb_boundaries (basic_block);
|
||||
static void compute_outgoing_frequencies (basic_block);
|
||||
|
||||
/* Return true if insn is something that should be contained inside basic
|
||||
block. */
|
||||
|
||||
bool
|
||||
inside_basic_block_p (insn)
|
||||
rtx insn;
|
||||
inside_basic_block_p (rtx insn)
|
||||
{
|
||||
switch (GET_CODE (insn))
|
||||
{
|
||||
@ -96,8 +93,7 @@ inside_basic_block_p (insn)
|
||||
the basic block. */
|
||||
|
||||
bool
|
||||
control_flow_insn_p (insn)
|
||||
rtx insn;
|
||||
control_flow_insn_p (rtx insn)
|
||||
{
|
||||
rtx note;
|
||||
|
||||
@ -138,8 +134,7 @@ control_flow_insn_p (insn)
|
||||
/* Count the basic blocks of the function. */
|
||||
|
||||
static int
|
||||
count_basic_blocks (f)
|
||||
rtx f;
|
||||
count_basic_blocks (rtx f)
|
||||
{
|
||||
int count = 0;
|
||||
bool saw_insn = false;
|
||||
@ -180,9 +175,7 @@ count_basic_blocks (f)
|
||||
This is used to scan the alternatives of a call placeholder. */
|
||||
|
||||
static rtx
|
||||
find_label_refs (f, lvl)
|
||||
rtx f;
|
||||
rtx lvl;
|
||||
find_label_refs (rtx f, rtx lvl)
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
@ -229,11 +222,7 @@ find_label_refs (f, lvl)
|
||||
/* Create an edge from a basic block to a label. */
|
||||
|
||||
static void
|
||||
make_label_edge (edge_cache, src, label, flags)
|
||||
sbitmap *edge_cache;
|
||||
basic_block src;
|
||||
rtx label;
|
||||
int flags;
|
||||
make_label_edge (sbitmap *edge_cache, basic_block src, rtx label, int flags)
|
||||
{
|
||||
if (GET_CODE (label) != CODE_LABEL)
|
||||
abort ();
|
||||
@ -252,10 +241,7 @@ make_label_edge (edge_cache, src, label, flags)
|
||||
/* Create the edges generated by INSN in REGION. */
|
||||
|
||||
static void
|
||||
make_eh_edge (edge_cache, src, insn)
|
||||
sbitmap *edge_cache;
|
||||
basic_block src;
|
||||
rtx insn;
|
||||
make_eh_edge (sbitmap *edge_cache, basic_block src, rtx insn)
|
||||
{
|
||||
int is_call = GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0;
|
||||
rtx handlers, i;
|
||||
@ -278,10 +264,7 @@ make_eh_edge (edge_cache, src, insn)
|
||||
the list of exception regions active at the end of the basic block. */
|
||||
|
||||
static void
|
||||
make_edges (label_value_list, min, max, update_p)
|
||||
rtx label_value_list;
|
||||
basic_block min, max;
|
||||
int update_p;
|
||||
make_edges (rtx label_value_list, basic_block min, basic_block max, int update_p)
|
||||
{
|
||||
basic_block bb;
|
||||
sbitmap *edge_cache = NULL;
|
||||
@ -464,8 +447,7 @@ make_edges (label_value_list, min, max, update_p)
|
||||
will be used in make_edges for use with computed gotos. */
|
||||
|
||||
static void
|
||||
find_basic_blocks_1 (f)
|
||||
rtx f;
|
||||
find_basic_blocks_1 (rtx f)
|
||||
{
|
||||
rtx insn, next;
|
||||
rtx bb_note = NULL_RTX;
|
||||
@ -604,10 +586,8 @@ find_basic_blocks_1 (f)
|
||||
numbers in use. */
|
||||
|
||||
void
|
||||
find_basic_blocks (f, nregs, file)
|
||||
rtx f;
|
||||
int nregs ATTRIBUTE_UNUSED;
|
||||
FILE *file ATTRIBUTE_UNUSED;
|
||||
find_basic_blocks (rtx f, int nregs ATTRIBUTE_UNUSED,
|
||||
FILE *file ATTRIBUTE_UNUSED)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
@ -622,7 +602,7 @@ find_basic_blocks (f, nregs, file)
|
||||
tag for reuse during create_basic_block, just in case some pass
|
||||
copies around basic block notes improperly. */
|
||||
FOR_EACH_BB (bb)
|
||||
bb->aux = NULL;
|
||||
bb->aux = NULL;
|
||||
|
||||
VARRAY_FREE (basic_block_info);
|
||||
}
|
||||
@ -667,8 +647,7 @@ enum state {BLOCK_NEW = 0, BLOCK_ORIGINAL, BLOCK_TO_SPLIT};
|
||||
and create new basic blocks in the progress. */
|
||||
|
||||
static void
|
||||
find_bb_boundaries (bb)
|
||||
basic_block bb;
|
||||
find_bb_boundaries (basic_block bb)
|
||||
{
|
||||
rtx insn = bb->head;
|
||||
rtx end = bb->end;
|
||||
@ -734,8 +713,7 @@ find_bb_boundaries (bb)
|
||||
and probabilities of outgoing edges. */
|
||||
|
||||
static void
|
||||
compute_outgoing_frequencies (b)
|
||||
basic_block b;
|
||||
compute_outgoing_frequencies (basic_block b)
|
||||
{
|
||||
edge e, f;
|
||||
|
||||
@ -771,8 +749,7 @@ compute_outgoing_frequencies (b)
|
||||
basic block. Update the data structure. */
|
||||
|
||||
void
|
||||
find_many_sub_basic_blocks (blocks)
|
||||
sbitmap blocks;
|
||||
find_many_sub_basic_blocks (sbitmap blocks)
|
||||
{
|
||||
basic_block bb, min, max;
|
||||
|
||||
@ -826,8 +803,7 @@ find_many_sub_basic_blocks (blocks)
|
||||
/* Like above but for single basic block only. */
|
||||
|
||||
void
|
||||
find_sub_basic_blocks (bb)
|
||||
basic_block bb;
|
||||
find_sub_basic_blocks (basic_block bb)
|
||||
{
|
||||
basic_block min, max, b;
|
||||
basic_block next = bb->next_bb;
|
||||
|
117
gcc/cfgcleanup.c
117
gcc/cfgcleanup.c
@ -1,6 +1,6 @@
|
||||
/* Control flow optimization code for GNU compiler.
|
||||
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
|
||||
1999, 2000, 2001, 2002 Free Software Foundation, Inc.
|
||||
1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
|
||||
|
||||
This file is part of GCC.
|
||||
|
||||
@ -67,36 +67,30 @@ enum bb_flags
|
||||
|
||||
#define FORWARDER_BLOCK_P(BB) (BB_FLAGS (BB) & BB_FORWARDER_BLOCK)
|
||||
|
||||
static bool try_crossjump_to_edge PARAMS ((int, edge, edge));
|
||||
static bool try_crossjump_bb PARAMS ((int, basic_block));
|
||||
static bool outgoing_edges_match PARAMS ((int,
|
||||
basic_block, basic_block));
|
||||
static int flow_find_cross_jump PARAMS ((int, basic_block, basic_block,
|
||||
rtx *, rtx *));
|
||||
static bool insns_match_p PARAMS ((int, rtx, rtx));
|
||||
static bool try_crossjump_to_edge (int, edge, edge);
|
||||
static bool try_crossjump_bb (int, basic_block);
|
||||
static bool outgoing_edges_match (int, basic_block, basic_block);
|
||||
static int flow_find_cross_jump (int, basic_block, basic_block, rtx *, rtx *);
|
||||
static bool insns_match_p (int, rtx, rtx);
|
||||
|
||||
static bool label_is_jump_target_p PARAMS ((rtx, rtx));
|
||||
static bool tail_recursion_label_p PARAMS ((rtx));
|
||||
static void merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
|
||||
basic_block));
|
||||
static void merge_blocks_move_successor_nojumps PARAMS ((basic_block,
|
||||
basic_block));
|
||||
static basic_block merge_blocks PARAMS ((edge,basic_block,basic_block,
|
||||
int));
|
||||
static bool try_optimize_cfg PARAMS ((int));
|
||||
static bool try_simplify_condjump PARAMS ((basic_block));
|
||||
static bool try_forward_edges PARAMS ((int, basic_block));
|
||||
static edge thread_jump PARAMS ((int, edge, basic_block));
|
||||
static bool mark_effect PARAMS ((rtx, bitmap));
|
||||
static void notice_new_block PARAMS ((basic_block));
|
||||
static void update_forwarder_flag PARAMS ((basic_block));
|
||||
static int mentions_nonequal_regs PARAMS ((rtx *, void *));
|
||||
static bool label_is_jump_target_p (rtx, rtx);
|
||||
static bool tail_recursion_label_p (rtx);
|
||||
static void merge_blocks_move_predecessor_nojumps (basic_block, basic_block);
|
||||
static void merge_blocks_move_successor_nojumps (basic_block, basic_block);
|
||||
static basic_block merge_blocks (edge,basic_block,basic_block, int);
|
||||
static bool try_optimize_cfg (int);
|
||||
static bool try_simplify_condjump (basic_block);
|
||||
static bool try_forward_edges (int, basic_block);
|
||||
static edge thread_jump (int, edge, basic_block);
|
||||
static bool mark_effect (rtx, bitmap);
|
||||
static void notice_new_block (basic_block);
|
||||
static void update_forwarder_flag (basic_block);
|
||||
static int mentions_nonequal_regs (rtx *, void *);
|
||||
|
||||
/* Set flags for newly created block. */
|
||||
|
||||
static void
|
||||
notice_new_block (bb)
|
||||
basic_block bb;
|
||||
notice_new_block (basic_block bb)
|
||||
{
|
||||
if (!bb)
|
||||
return;
|
||||
@ -108,8 +102,7 @@ notice_new_block (bb)
|
||||
/* Recompute forwarder flag after block has been modified. */
|
||||
|
||||
static void
|
||||
update_forwarder_flag (bb)
|
||||
basic_block bb;
|
||||
update_forwarder_flag (basic_block bb)
|
||||
{
|
||||
if (forwarder_block_p (bb))
|
||||
BB_SET_FLAG (bb, BB_FORWARDER_BLOCK);
|
||||
@ -121,8 +114,7 @@ update_forwarder_flag (bb)
|
||||
Return true if something changed. */
|
||||
|
||||
static bool
|
||||
try_simplify_condjump (cbranch_block)
|
||||
basic_block cbranch_block;
|
||||
try_simplify_condjump (basic_block cbranch_block)
|
||||
{
|
||||
basic_block jump_block, jump_dest_block, cbranch_dest_block;
|
||||
edge cbranch_jump_edge, cbranch_fallthru_edge;
|
||||
@ -190,9 +182,7 @@ try_simplify_condjump (cbranch_block)
|
||||
on register. Used by jump threading. */
|
||||
|
||||
static bool
|
||||
mark_effect (exp, nonequal)
|
||||
rtx exp;
|
||||
regset nonequal;
|
||||
mark_effect (rtx exp, regset nonequal)
|
||||
{
|
||||
int regno;
|
||||
rtx dest;
|
||||
@ -241,9 +231,7 @@ mark_effect (exp, nonequal)
|
||||
/* Return nonzero if X is an register set in regset DATA.
|
||||
Called via for_each_rtx. */
|
||||
static int
|
||||
mentions_nonequal_regs (x, data)
|
||||
rtx *x;
|
||||
void *data;
|
||||
mentions_nonequal_regs (rtx *x, void *data)
|
||||
{
|
||||
regset nonequal = (regset) data;
|
||||
if (REG_P (*x))
|
||||
@ -268,10 +256,7 @@ mentions_nonequal_regs (x, data)
|
||||
if exist, NULL otherwise. */
|
||||
|
||||
static edge
|
||||
thread_jump (mode, e, b)
|
||||
int mode;
|
||||
edge e;
|
||||
basic_block b;
|
||||
thread_jump (int mode, edge e, basic_block b)
|
||||
{
|
||||
rtx set1, set2, cond1, cond2, insn;
|
||||
enum rtx_code code1, code2, reversed_code2;
|
||||
@ -413,9 +398,7 @@ failed_exit:
|
||||
Return true if successful. */
|
||||
|
||||
static bool
|
||||
try_forward_edges (mode, b)
|
||||
basic_block b;
|
||||
int mode;
|
||||
try_forward_edges (int mode, basic_block b)
|
||||
{
|
||||
bool changed = false;
|
||||
edge e, next, *threaded_edges = NULL;
|
||||
@ -648,8 +631,7 @@ try_forward_edges (mode, b)
|
||||
not apply to the fallthru case of a conditional jump. */
|
||||
|
||||
static bool
|
||||
label_is_jump_target_p (label, jump_insn)
|
||||
rtx label, jump_insn;
|
||||
label_is_jump_target_p (rtx label, rtx jump_insn)
|
||||
{
|
||||
rtx tmp = JUMP_LABEL (jump_insn);
|
||||
|
||||
@ -672,8 +654,7 @@ label_is_jump_target_p (label, jump_insn)
|
||||
/* Return true if LABEL is used for tail recursion. */
|
||||
|
||||
static bool
|
||||
tail_recursion_label_p (label)
|
||||
rtx label;
|
||||
tail_recursion_label_p (rtx label)
|
||||
{
|
||||
rtx x;
|
||||
|
||||
@ -689,8 +670,7 @@ tail_recursion_label_p (label)
|
||||
any jumps (aside from the jump from A to B). */
|
||||
|
||||
static void
|
||||
merge_blocks_move_predecessor_nojumps (a, b)
|
||||
basic_block a, b;
|
||||
merge_blocks_move_predecessor_nojumps (basic_block a, basic_block b)
|
||||
{
|
||||
rtx barrier;
|
||||
|
||||
@ -732,8 +712,7 @@ merge_blocks_move_predecessor_nojumps (a, b)
|
||||
any jumps (aside from the jump from A to B). */
|
||||
|
||||
static void
|
||||
merge_blocks_move_successor_nojumps (a, b)
|
||||
basic_block a, b;
|
||||
merge_blocks_move_successor_nojumps (basic_block a, basic_block b)
|
||||
{
|
||||
rtx barrier, real_b_end;
|
||||
|
||||
@ -795,10 +774,7 @@ merge_blocks_move_successor_nojumps (a, b)
|
||||
relative ordering of these two. Hopefully it is not too common. */
|
||||
|
||||
static basic_block
|
||||
merge_blocks (e, b, c, mode)
|
||||
edge e;
|
||||
basic_block b, c;
|
||||
int mode;
|
||||
merge_blocks (edge e, basic_block b, basic_block c, int mode)
|
||||
{
|
||||
basic_block next;
|
||||
/* If C has a tail recursion label, do not merge. There is no
|
||||
@ -895,9 +871,7 @@ merge_blocks (e, b, c, mode)
|
||||
/* Return true if I1 and I2 are equivalent and thus can be crossjumped. */
|
||||
|
||||
static bool
|
||||
insns_match_p (mode, i1, i2)
|
||||
int mode ATTRIBUTE_UNUSED;
|
||||
rtx i1, i2;
|
||||
insns_match_p (int mode ATTRIBUTE_UNUSED, rtx i1, rtx i2)
|
||||
{
|
||||
rtx p1, p2;
|
||||
|
||||
@ -1010,10 +984,8 @@ insns_match_p (mode, i1, i2)
|
||||
store the head of the blocks in *F1 and *F2. */
|
||||
|
||||
static int
|
||||
flow_find_cross_jump (mode, bb1, bb2, f1, f2)
|
||||
int mode ATTRIBUTE_UNUSED;
|
||||
basic_block bb1, bb2;
|
||||
rtx *f1, *f2;
|
||||
flow_find_cross_jump (int mode ATTRIBUTE_UNUSED, basic_block bb1,
|
||||
basic_block bb2, rtx *f1, rtx *f2)
|
||||
{
|
||||
rtx i1, i2, last1, last2, afterlast1, afterlast2;
|
||||
int ninsns = 0;
|
||||
@ -1122,10 +1094,7 @@ flow_find_cross_jump (mode, bb1, bb2, f1, f2)
|
||||
We may assume that there exists one edge with a common destination. */
|
||||
|
||||
static bool
|
||||
outgoing_edges_match (mode, bb1, bb2)
|
||||
int mode;
|
||||
basic_block bb1;
|
||||
basic_block bb2;
|
||||
outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
|
||||
{
|
||||
int nehedges1 = 0, nehedges2 = 0;
|
||||
edge fallthru1 = 0, fallthru2 = 0;
|
||||
@ -1394,9 +1363,7 @@ outgoing_edges_match (mode, bb1, bb2)
|
||||
(maybe the middle of) E1->SRC to (maybe the middle of) E2->SRC. */
|
||||
|
||||
static bool
|
||||
try_crossjump_to_edge (mode, e1, e2)
|
||||
int mode;
|
||||
edge e1, e2;
|
||||
try_crossjump_to_edge (int mode, edge e1, edge e2)
|
||||
{
|
||||
int nmatch;
|
||||
basic_block src1 = e1->src, src2 = e2->src;
|
||||
@ -1580,9 +1547,7 @@ try_crossjump_to_edge (mode, e1, e2)
|
||||
any changes made. */
|
||||
|
||||
static bool
|
||||
try_crossjump_bb (mode, bb)
|
||||
int mode;
|
||||
basic_block bb;
|
||||
try_crossjump_bb (int mode, basic_block bb)
|
||||
{
|
||||
edge e, e2, nexte2, nexte, fallthru;
|
||||
bool changed;
|
||||
@ -1675,8 +1640,7 @@ try_crossjump_bb (mode, bb)
|
||||
instructions etc. Return nonzero if changes were made. */
|
||||
|
||||
static bool
|
||||
try_optimize_cfg (mode)
|
||||
int mode;
|
||||
try_optimize_cfg (int mode)
|
||||
{
|
||||
bool changed_overall = false;
|
||||
bool changed;
|
||||
@ -1856,7 +1820,7 @@ try_optimize_cfg (mode)
|
||||
/* Delete all unreachable basic blocks. */
|
||||
|
||||
bool
|
||||
delete_unreachable_blocks ()
|
||||
delete_unreachable_blocks (void)
|
||||
{
|
||||
bool changed = false;
|
||||
basic_block b, next_bb;
|
||||
@ -1884,8 +1848,7 @@ delete_unreachable_blocks ()
|
||||
/* Tidy the CFG by deleting unreachable code and whatnot. */
|
||||
|
||||
bool
|
||||
cleanup_cfg (mode)
|
||||
int mode;
|
||||
cleanup_cfg (int mode)
|
||||
{
|
||||
bool changed = false;
|
||||
|
||||
|
@ -34,15 +34,15 @@ extern struct cfg_hooks cfg_layout_rtl_cfg_hooks;
|
||||
struct cfg_hooks *cfg_hooks;
|
||||
|
||||
/* Initialization of functions specific to the rtl IR. */
|
||||
void
|
||||
rtl_register_cfg_hooks ()
|
||||
void
|
||||
rtl_register_cfg_hooks (void)
|
||||
{
|
||||
cfg_hooks = &rtl_cfg_hooks;
|
||||
}
|
||||
|
||||
/* Initialization of functions specific to the rtl IR. */
|
||||
void
|
||||
cfg_layout_rtl_register_cfg_hooks ()
|
||||
void
|
||||
cfg_layout_rtl_register_cfg_hooks (void)
|
||||
{
|
||||
cfg_hooks = &cfg_layout_rtl_cfg_hooks;
|
||||
}
|
||||
|
@ -26,30 +26,30 @@ struct cfg_hooks
|
||||
{
|
||||
/* Debugging. Do not use macros to hook these so they can be called from
|
||||
debugger! */
|
||||
int (*cfgh_verify_flow_info) PARAMS ((void));
|
||||
void (*dump_bb) PARAMS ((basic_block, FILE *));
|
||||
int (*cfgh_verify_flow_info) (void);
|
||||
void (*dump_bb) (basic_block, FILE *);
|
||||
|
||||
/* Basic CFG manipulation. */
|
||||
|
||||
/* Redirect edge E to the given basic block B and update underlying program
|
||||
representation. Returns false when edge is not easilly redirectable for
|
||||
whatever reason. */
|
||||
bool (*redirect_edge_and_branch) PARAMS ((edge e, basic_block b));
|
||||
bool (*redirect_edge_and_branch) (edge e, basic_block b);
|
||||
|
||||
/* Same as the above but allows redirecting of fallthru edges. In that case
|
||||
newly created forwarder basic block is returned. It aborts when called
|
||||
on abnormal edge. */
|
||||
basic_block (*redirect_edge_and_branch_force)PARAMS ((edge, basic_block));
|
||||
basic_block (*redirect_edge_and_branch_force) (edge, basic_block);
|
||||
|
||||
/* Remove given basic block and all edges possibly pointing into it. */
|
||||
void (*delete_block)PARAMS ((basic_block));
|
||||
void (*delete_block) (basic_block);
|
||||
|
||||
/* Split basic block B after specified instruction I. */
|
||||
edge (*split_block) PARAMS ((basic_block b, void * i));
|
||||
edge (*split_block) (basic_block b, void * i);
|
||||
|
||||
/* Higher level functions representable by primitive operations above if
|
||||
we didn't have some oddities in RTL and Tree representations. */
|
||||
basic_block (*cfgh_split_edge) PARAMS ((edge));
|
||||
basic_block (*cfgh_split_edge) (edge);
|
||||
};
|
||||
|
||||
#define redirect_edge_and_branch(e,b) cfg_hooks->redirect_edge_and_branch (e,b)
|
||||
@ -65,7 +65,7 @@ extern struct cfg_hooks rtl_cfg_hooks;
|
||||
extern struct cfg_hooks *cfg_hooks;
|
||||
|
||||
/* Declarations. */
|
||||
extern void rtl_register_cfg_hooks PARAMS ((void));
|
||||
extern void cfg_layout_rtl_register_cfg_hooks PARAMS ((void));
|
||||
extern void rtl_register_cfg_hooks (void);
|
||||
extern void cfg_layout_rtl_register_cfg_hooks (void);
|
||||
|
||||
#endif /* GCC_CFGHOOKS_H */
|
||||
|
@ -1,5 +1,5 @@
|
||||
/* Basic block reordering routines for the GNU compiler.
|
||||
Copyright (C) 2000, 2001 Free Software Foundation, Inc.
|
||||
Copyright (C) 2000, 2001, 2003 Free Software Foundation, Inc.
|
||||
|
||||
This file is part of GCC.
|
||||
|
||||
@ -42,25 +42,23 @@ extern struct obstack flow_obstack;
|
||||
/* Holds the interesting trailing notes for the function. */
|
||||
rtx cfg_layout_function_footer;
|
||||
|
||||
static rtx skip_insns_after_block PARAMS ((basic_block));
|
||||
static void record_effective_endpoints PARAMS ((void));
|
||||
static rtx label_for_bb PARAMS ((basic_block));
|
||||
static void fixup_reorder_chain PARAMS ((void));
|
||||
static rtx skip_insns_after_block (basic_block);
|
||||
static void record_effective_endpoints (void);
|
||||
static rtx label_for_bb (basic_block);
|
||||
static void fixup_reorder_chain (void);
|
||||
|
||||
static void set_block_levels PARAMS ((tree, int));
|
||||
static void change_scope PARAMS ((rtx, tree, tree));
|
||||
static void set_block_levels (tree, int);
|
||||
static void change_scope (rtx, tree, tree);
|
||||
|
||||
void verify_insn_chain PARAMS ((void));
|
||||
static void cleanup_unconditional_jumps PARAMS ((struct loops *));
|
||||
static void fixup_fallthru_exit_predecessor PARAMS ((void));
|
||||
static rtx duplicate_insn_chain PARAMS ((rtx, rtx));
|
||||
static void break_superblocks PARAMS ((void));
|
||||
static tree insn_scope PARAMS ((rtx));
|
||||
void verify_insn_chain (void);
|
||||
static void cleanup_unconditional_jumps (struct loops *);
|
||||
static void fixup_fallthru_exit_predecessor (void);
|
||||
static rtx duplicate_insn_chain (rtx, rtx);
|
||||
static void break_superblocks (void);
|
||||
static tree insn_scope (rtx);
|
||||
|
||||
rtx
|
||||
unlink_insn_chain (first, last)
|
||||
rtx first;
|
||||
rtx last;
|
||||
unlink_insn_chain (rtx first, rtx last)
|
||||
{
|
||||
rtx prevfirst = PREV_INSN (first);
|
||||
rtx nextlast = NEXT_INSN (last);
|
||||
@ -83,8 +81,7 @@ unlink_insn_chain (first, last)
|
||||
we return the last one. Otherwise, we return the end of BB. */
|
||||
|
||||
static rtx
|
||||
skip_insns_after_block (bb)
|
||||
basic_block bb;
|
||||
skip_insns_after_block (basic_block bb)
|
||||
{
|
||||
rtx insn, last_insn, next_head, prev;
|
||||
|
||||
@ -171,8 +168,7 @@ skip_insns_after_block (bb)
|
||||
/* Locate or create a label for a given basic block. */
|
||||
|
||||
static rtx
|
||||
label_for_bb (bb)
|
||||
basic_block bb;
|
||||
label_for_bb (basic_block bb)
|
||||
{
|
||||
rtx label = bb->head;
|
||||
|
||||
@ -191,7 +187,7 @@ label_for_bb (bb)
|
||||
block, as defined by skip_insns_after_block above. */
|
||||
|
||||
static void
|
||||
record_effective_endpoints ()
|
||||
record_effective_endpoints (void)
|
||||
{
|
||||
rtx next_insn = get_insns ();
|
||||
basic_block bb;
|
||||
@ -235,7 +231,7 @@ int epilogue_locator;
|
||||
INSN_LOCATORs. */
|
||||
|
||||
void
|
||||
insn_locators_initialize ()
|
||||
insn_locators_initialize (void)
|
||||
{
|
||||
tree block = NULL;
|
||||
tree last_block = NULL;
|
||||
@ -325,9 +321,7 @@ insn_locators_initialize ()
|
||||
found in the block tree. */
|
||||
|
||||
static void
|
||||
set_block_levels (block, level)
|
||||
tree block;
|
||||
int level;
|
||||
set_block_levels (tree block, int level)
|
||||
{
|
||||
while (block)
|
||||
{
|
||||
@ -339,8 +333,7 @@ set_block_levels (block, level)
|
||||
|
||||
/* Return sope resulting from combination of S1 and S2. */
|
||||
tree
|
||||
choose_inner_scope (s1, s2)
|
||||
tree s1, s2;
|
||||
choose_inner_scope (tree s1, tree s2)
|
||||
{
|
||||
if (!s1)
|
||||
return s2;
|
||||
@ -354,9 +347,7 @@ choose_inner_scope (s1, s2)
|
||||
/* Emit lexical block notes needed to change scope from S1 to S2. */
|
||||
|
||||
static void
|
||||
change_scope (orig_insn, s1, s2)
|
||||
rtx orig_insn;
|
||||
tree s1, s2;
|
||||
change_scope (rtx orig_insn, tree s1, tree s2)
|
||||
{
|
||||
rtx insn = orig_insn;
|
||||
tree com = NULL_TREE;
|
||||
@ -400,8 +391,7 @@ change_scope (orig_insn, s1, s2)
|
||||
|
||||
/* Return lexical scope block insn belong to. */
|
||||
static tree
|
||||
insn_scope (insn)
|
||||
rtx insn;
|
||||
insn_scope (rtx insn)
|
||||
{
|
||||
int max = VARRAY_ACTIVE_SIZE (block_locators_locs);
|
||||
int min = 0;
|
||||
@ -429,8 +419,7 @@ insn_scope (insn)
|
||||
|
||||
/* Return line number of the statement that produced this insn. */
|
||||
int
|
||||
insn_line (insn)
|
||||
rtx insn;
|
||||
insn_line (rtx insn)
|
||||
{
|
||||
int max = VARRAY_ACTIVE_SIZE (line_locators_locs);
|
||||
int min = 0;
|
||||
@ -458,8 +447,7 @@ insn_line (insn)
|
||||
|
||||
/* Return source file of the statement that produced this insn. */
|
||||
const char *
|
||||
insn_file (insn)
|
||||
rtx insn;
|
||||
insn_file (rtx insn)
|
||||
{
|
||||
int max = VARRAY_ACTIVE_SIZE (file_locators_locs);
|
||||
int min = 0;
|
||||
@ -489,7 +477,7 @@ insn_file (insn)
|
||||
on the scope tree and the newly reordered instructions. */
|
||||
|
||||
void
|
||||
reemit_insn_block_notes ()
|
||||
reemit_insn_block_notes (void)
|
||||
{
|
||||
tree cur_block = DECL_INITIAL (cfun->decl);
|
||||
rtx insn, note;
|
||||
@ -512,7 +500,7 @@ reemit_insn_block_notes ()
|
||||
this_block = NULL;
|
||||
for (i = 0; i < XVECLEN (body, 0); i++)
|
||||
this_block = choose_inner_scope (this_block,
|
||||
insn_scope (XVECEXP (body, 0, i)));
|
||||
insn_scope (XVECEXP (body, 0, i)));
|
||||
}
|
||||
if (! this_block)
|
||||
continue;
|
||||
@ -535,7 +523,7 @@ reemit_insn_block_notes ()
|
||||
/* Given a reorder chain, rearrange the code to match. */
|
||||
|
||||
static void
|
||||
fixup_reorder_chain ()
|
||||
fixup_reorder_chain (void)
|
||||
{
|
||||
basic_block bb, prev_bb;
|
||||
int index;
|
||||
@ -623,7 +611,7 @@ fixup_reorder_chain ()
|
||||
|
||||
/* The degenerated case of conditional jump jumping to the next
|
||||
instruction can happen on target having jumps with side
|
||||
effects.
|
||||
effects.
|
||||
|
||||
Create temporarily the duplicated edge representing branch.
|
||||
It will get unidentified by force_nonfallthru_and_redirect
|
||||
@ -776,7 +764,7 @@ fixup_reorder_chain ()
|
||||
3. Check that get_last_insn () returns the actual end of chain. */
|
||||
|
||||
void
|
||||
verify_insn_chain ()
|
||||
verify_insn_chain (void)
|
||||
{
|
||||
rtx x, prevx, nextx;
|
||||
int insn_cnt1, insn_cnt2;
|
||||
@ -807,8 +795,7 @@ verify_insn_chain ()
|
||||
dominators. */
|
||||
|
||||
static void
|
||||
cleanup_unconditional_jumps (loops)
|
||||
struct loops *loops;
|
||||
cleanup_unconditional_jumps (struct loops *loops)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
@ -885,7 +872,7 @@ cleanup_unconditional_jumps (loops)
|
||||
/* The block falling through to exit must be the last one in the
|
||||
reordered chain. Ensure that this condition is met. */
|
||||
static void
|
||||
fixup_fallthru_exit_predecessor ()
|
||||
fixup_fallthru_exit_predecessor (void)
|
||||
{
|
||||
edge e;
|
||||
basic_block bb = NULL;
|
||||
@ -913,8 +900,7 @@ fixup_fallthru_exit_predecessor ()
|
||||
/* Return true in case it is possible to duplicate the basic block BB. */
|
||||
|
||||
bool
|
||||
cfg_layout_can_duplicate_bb_p (bb)
|
||||
basic_block bb;
|
||||
cfg_layout_can_duplicate_bb_p (basic_block bb)
|
||||
{
|
||||
edge s;
|
||||
|
||||
@ -951,8 +937,7 @@ cfg_layout_can_duplicate_bb_p (bb)
|
||||
}
|
||||
|
||||
static rtx
|
||||
duplicate_insn_chain (from, to)
|
||||
rtx from, to;
|
||||
duplicate_insn_chain (rtx from, rtx to)
|
||||
{
|
||||
rtx insn, last;
|
||||
|
||||
@ -1047,9 +1032,7 @@ duplicate_insn_chain (from, to)
|
||||
/* Create a duplicate of the basic block BB and redirect edge E into it. */
|
||||
|
||||
basic_block
|
||||
cfg_layout_duplicate_bb (bb, e)
|
||||
basic_block bb;
|
||||
edge e;
|
||||
cfg_layout_duplicate_bb (basic_block bb, edge e)
|
||||
{
|
||||
rtx insn;
|
||||
edge s, n;
|
||||
@ -1141,8 +1124,7 @@ cfg_layout_duplicate_bb (bb, e)
|
||||
CFG layout changes. It keeps LOOPS up-to-date if not null. */
|
||||
|
||||
void
|
||||
cfg_layout_initialize (loops)
|
||||
struct loops *loops;
|
||||
cfg_layout_initialize (struct loops *loops)
|
||||
{
|
||||
/* Our algorithm depends on fact that there are now dead jumptables
|
||||
around the code. */
|
||||
@ -1156,7 +1138,7 @@ cfg_layout_initialize (loops)
|
||||
|
||||
/* Splits superblocks. */
|
||||
static void
|
||||
break_superblocks ()
|
||||
break_superblocks (void)
|
||||
{
|
||||
sbitmap superblocks;
|
||||
int i, need;
|
||||
@ -1187,7 +1169,7 @@ break_superblocks ()
|
||||
compensation code, rebuild scope forest. */
|
||||
|
||||
void
|
||||
cfg_layout_finalize ()
|
||||
cfg_layout_finalize (void)
|
||||
{
|
||||
#ifdef ENABLE_CHECKING
|
||||
verify_flow_info ();
|
||||
|
@ -1,5 +1,5 @@
|
||||
/* Basic block reordering routines for the GNU compiler.
|
||||
Copyright (C) 2000 Free Software Foundation, Inc.
|
||||
Copyright (C) 2000, 2003 Free Software Foundation, Inc.
|
||||
|
||||
This file is part of GCC.
|
||||
|
||||
@ -37,9 +37,9 @@ typedef struct reorder_block_def
|
||||
|
||||
extern rtx cfg_layout_function_footer;
|
||||
|
||||
extern void cfg_layout_initialize PARAMS ((struct loops *));
|
||||
extern void cfg_layout_finalize PARAMS ((void));
|
||||
extern bool cfg_layout_can_duplicate_bb_p PARAMS ((basic_block));
|
||||
extern basic_block cfg_layout_duplicate_bb PARAMS ((basic_block, edge));
|
||||
extern void insn_locators_initialize PARAMS ((void));
|
||||
extern void reemit_insn_block_notes PARAMS ((void));
|
||||
extern void cfg_layout_initialize (struct loops *);
|
||||
extern void cfg_layout_finalize (void);
|
||||
extern bool cfg_layout_can_duplicate_bb_p (basic_block);
|
||||
extern basic_block cfg_layout_duplicate_bb (basic_block, edge);
|
||||
extern void insn_locators_initialize (void);
|
||||
extern void reemit_insn_block_notes (void);
|
||||
|
191
gcc/cfgloop.c
191
gcc/cfgloop.c
@ -1,5 +1,5 @@
|
||||
/* Natural loop discovery code for GNU compiler.
|
||||
Copyright (C) 2000, 2001 Free Software Foundation, Inc.
|
||||
Copyright (C) 2000, 2001, 2003 Free Software Foundation, Inc.
|
||||
|
||||
This file is part of GCC.
|
||||
|
||||
@ -33,29 +33,24 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
||||
considered to belong to inner loop with same header. */
|
||||
#define HEAVY_EDGE_RATIO 8
|
||||
|
||||
static void flow_loops_cfg_dump PARAMS ((const struct loops *,
|
||||
FILE *));
|
||||
static void flow_loop_entry_edges_find PARAMS ((struct loop *));
|
||||
static void flow_loop_exit_edges_find PARAMS ((struct loop *));
|
||||
static int flow_loop_nodes_find PARAMS ((basic_block, struct loop *));
|
||||
static void flow_loop_pre_header_scan PARAMS ((struct loop *));
|
||||
static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
|
||||
dominance_info));
|
||||
static int flow_loop_level_compute PARAMS ((struct loop *));
|
||||
static int flow_loops_level_compute PARAMS ((struct loops *));
|
||||
static void establish_preds PARAMS ((struct loop *));
|
||||
static basic_block make_forwarder_block PARAMS ((basic_block, int, int,
|
||||
edge, int));
|
||||
static void canonicalize_loop_headers PARAMS ((void));
|
||||
static bool glb_enum_p PARAMS ((basic_block, void *));
|
||||
static void redirect_edge_with_latch_update PARAMS ((edge, basic_block));
|
||||
static void flow_loops_cfg_dump (const struct loops *, FILE *);
|
||||
static void flow_loop_entry_edges_find (struct loop *);
|
||||
static void flow_loop_exit_edges_find (struct loop *);
|
||||
static int flow_loop_nodes_find (basic_block, struct loop *);
|
||||
static void flow_loop_pre_header_scan (struct loop *);
|
||||
static basic_block flow_loop_pre_header_find (basic_block, dominance_info);
|
||||
static int flow_loop_level_compute (struct loop *);
|
||||
static int flow_loops_level_compute (struct loops *);
|
||||
static void establish_preds (struct loop *);
|
||||
static basic_block make_forwarder_block (basic_block, int, int, edge, int);
|
||||
static void canonicalize_loop_headers (void);
|
||||
static bool glb_enum_p (basic_block, void *);
|
||||
static void redirect_edge_with_latch_update (edge, basic_block);
|
||||
|
||||
/* Dump loop related CFG information. */
|
||||
|
||||
static void
|
||||
flow_loops_cfg_dump (loops, file)
|
||||
const struct loops *loops;
|
||||
FILE *file;
|
||||
flow_loops_cfg_dump (const struct loops *loops, FILE *file)
|
||||
{
|
||||
int i;
|
||||
basic_block bb;
|
||||
@ -97,9 +92,7 @@ flow_loops_cfg_dump (loops, file)
|
||||
/* Return nonzero if the nodes of LOOP are a subset of OUTER. */
|
||||
|
||||
bool
|
||||
flow_loop_nested_p (outer, loop)
|
||||
const struct loop *outer;
|
||||
const struct loop *loop;
|
||||
flow_loop_nested_p (const struct loop *outer, const struct loop *loop)
|
||||
{
|
||||
return loop->depth > outer->depth
|
||||
&& loop->pred[outer->depth] == outer;
|
||||
@ -109,11 +102,9 @@ flow_loop_nested_p (outer, loop)
|
||||
using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
|
||||
|
||||
void
|
||||
flow_loop_dump (loop, file, loop_dump_aux, verbose)
|
||||
const struct loop *loop;
|
||||
FILE *file;
|
||||
void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
|
||||
int verbose;
|
||||
flow_loop_dump (const struct loop *loop, FILE *file,
|
||||
void (*loop_dump_aux) (const struct loop *, FILE *, int),
|
||||
int verbose)
|
||||
{
|
||||
basic_block *bbs;
|
||||
unsigned i;
|
||||
@ -154,11 +145,7 @@ flow_loop_dump (loop, file, loop_dump_aux, verbose)
|
||||
using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
|
||||
|
||||
void
|
||||
flow_loops_dump (loops, file, loop_dump_aux, verbose)
|
||||
const struct loops *loops;
|
||||
FILE *file;
|
||||
void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
|
||||
int verbose;
|
||||
flow_loops_dump (const struct loops *loops, FILE *file, void (*loop_dump_aux) (const struct loop *, FILE *, int), int verbose)
|
||||
{
|
||||
int i;
|
||||
int num_loops;
|
||||
@ -186,8 +173,7 @@ flow_loops_dump (loops, file, loop_dump_aux, verbose)
|
||||
|
||||
/* Free data allocated for LOOP. */
|
||||
void
|
||||
flow_loop_free (loop)
|
||||
struct loop *loop;
|
||||
flow_loop_free (struct loop *loop)
|
||||
{
|
||||
if (loop->pre_header_edges)
|
||||
free (loop->pre_header_edges);
|
||||
@ -203,8 +189,7 @@ flow_loop_free (loop)
|
||||
/* Free all the memory allocated for LOOPS. */
|
||||
|
||||
void
|
||||
flow_loops_free (loops)
|
||||
struct loops *loops;
|
||||
flow_loops_free (struct loops *loops)
|
||||
{
|
||||
if (loops->parray)
|
||||
{
|
||||
@ -240,9 +225,8 @@ flow_loops_free (loops)
|
||||
|
||||
/* Find the entry edges into the LOOP. */
|
||||
|
||||
static void
|
||||
flow_loop_entry_edges_find (loop)
|
||||
struct loop *loop;
|
||||
static void
|
||||
flow_loop_entry_edges_find (struct loop *loop)
|
||||
{
|
||||
edge e;
|
||||
int num_entries;
|
||||
@ -272,8 +256,7 @@ flow_loop_entry_edges_find (loop)
|
||||
/* Find the exit edges from the LOOP. */
|
||||
|
||||
static void
|
||||
flow_loop_exit_edges_find (loop)
|
||||
struct loop *loop;
|
||||
flow_loop_exit_edges_find (struct loop *loop)
|
||||
{
|
||||
edge e;
|
||||
basic_block node, *bbs;
|
||||
@ -328,9 +311,7 @@ flow_loop_exit_edges_find (loop)
|
||||
Return the number of nodes within the loop. */
|
||||
|
||||
static int
|
||||
flow_loop_nodes_find (header, loop)
|
||||
basic_block header;
|
||||
struct loop *loop;
|
||||
flow_loop_nodes_find (basic_block header, struct loop *loop)
|
||||
{
|
||||
basic_block *stack;
|
||||
int sp;
|
||||
@ -347,14 +328,14 @@ flow_loop_nodes_find (header, loop)
|
||||
stack[sp++] = loop->latch;
|
||||
loop->latch->loop_father = loop;
|
||||
loop->latch->loop_depth = loop->depth;
|
||||
|
||||
|
||||
while (sp)
|
||||
{
|
||||
basic_block node;
|
||||
edge e;
|
||||
|
||||
node = stack[--sp];
|
||||
|
||||
|
||||
for (e = node->pred; e; e = e->pred_next)
|
||||
{
|
||||
basic_block ancestor = e->src;
|
||||
@ -378,8 +359,7 @@ flow_loop_nodes_find (header, loop)
|
||||
the edges along the trace from the root node to the loop header. */
|
||||
|
||||
static void
|
||||
flow_loop_pre_header_scan (loop)
|
||||
struct loop *loop;
|
||||
flow_loop_pre_header_scan (struct loop *loop)
|
||||
{
|
||||
int num;
|
||||
basic_block ebb;
|
||||
@ -415,9 +395,7 @@ flow_loop_pre_header_scan (loop)
|
||||
there is no pre-header. */
|
||||
|
||||
static basic_block
|
||||
flow_loop_pre_header_find (header, dom)
|
||||
basic_block header;
|
||||
dominance_info dom;
|
||||
flow_loop_pre_header_find (basic_block header, dominance_info dom)
|
||||
{
|
||||
basic_block pre_header;
|
||||
edge e;
|
||||
@ -448,8 +426,7 @@ flow_loop_pre_header_find (header, dom)
|
||||
}
|
||||
|
||||
static void
|
||||
establish_preds (loop)
|
||||
struct loop *loop;
|
||||
establish_preds (struct loop *loop)
|
||||
{
|
||||
struct loop *ploop, *father = loop->outer;
|
||||
|
||||
@ -469,9 +446,7 @@ establish_preds (loop)
|
||||
pred field will be initialized correctly. */
|
||||
|
||||
void
|
||||
flow_loop_tree_node_add (father, loop)
|
||||
struct loop *father;
|
||||
struct loop *loop;
|
||||
flow_loop_tree_node_add (struct loop *father, struct loop *loop)
|
||||
{
|
||||
loop->next = father->inner;
|
||||
father->inner = loop;
|
||||
@ -483,8 +458,7 @@ flow_loop_tree_node_add (father, loop)
|
||||
/* Remove LOOP from the loop hierarchy tree. */
|
||||
|
||||
void
|
||||
flow_loop_tree_node_remove (loop)
|
||||
struct loop *loop;
|
||||
flow_loop_tree_node_remove (struct loop *loop)
|
||||
{
|
||||
struct loop *prev, *father;
|
||||
|
||||
@ -509,8 +483,7 @@ flow_loop_tree_node_remove (loop)
|
||||
for the natural loop specified by LOOP. Returns the loop level. */
|
||||
|
||||
static int
|
||||
flow_loop_level_compute (loop)
|
||||
struct loop *loop;
|
||||
flow_loop_level_compute (struct loop *loop)
|
||||
{
|
||||
struct loop *inner;
|
||||
int level = 1;
|
||||
@ -540,8 +513,7 @@ flow_loop_level_compute (loop)
|
||||
level. */
|
||||
|
||||
static int
|
||||
flow_loops_level_compute (loops)
|
||||
struct loops *loops;
|
||||
flow_loops_level_compute (struct loops *loops)
|
||||
{
|
||||
return flow_loop_level_compute (loops->tree_root);
|
||||
}
|
||||
@ -550,10 +522,7 @@ flow_loops_level_compute (loops)
|
||||
about it specified by FLAGS. */
|
||||
|
||||
int
|
||||
flow_loop_scan (loops, loop, flags)
|
||||
struct loops *loops;
|
||||
struct loop *loop;
|
||||
int flags;
|
||||
flow_loop_scan (struct loops *loops, struct loop *loop, int flags)
|
||||
{
|
||||
if (flags & LOOP_ENTRY_EDGES)
|
||||
{
|
||||
@ -588,9 +557,7 @@ flow_loop_scan (loops, loop, flags)
|
||||
|
||||
/* Redirect edge and update latch and header info. */
|
||||
static void
|
||||
redirect_edge_with_latch_update (e, to)
|
||||
edge e;
|
||||
basic_block to;
|
||||
redirect_edge_with_latch_update (edge e, basic_block to)
|
||||
{
|
||||
basic_block jump;
|
||||
|
||||
@ -612,13 +579,7 @@ redirect_edge_with_latch_update (e, to)
|
||||
part. */
|
||||
|
||||
static basic_block
|
||||
make_forwarder_block (bb, redirect_latch, redirect_nonlatch, except,
|
||||
conn_latch)
|
||||
basic_block bb;
|
||||
int redirect_latch;
|
||||
int redirect_nonlatch;
|
||||
edge except;
|
||||
int conn_latch;
|
||||
make_forwarder_block (basic_block bb, int redirect_latch, int redirect_nonlatch, edge except, int conn_latch)
|
||||
{
|
||||
edge e, next_e, fallthru;
|
||||
basic_block dummy;
|
||||
@ -664,12 +625,12 @@ make_forwarder_block (bb, redirect_latch, redirect_nonlatch, except,
|
||||
|
||||
/* Takes care of merging natural loops with shared headers. */
|
||||
static void
|
||||
canonicalize_loop_headers ()
|
||||
canonicalize_loop_headers (void)
|
||||
{
|
||||
dominance_info dom;
|
||||
basic_block header;
|
||||
edge e;
|
||||
|
||||
|
||||
/* Compute the dominators. */
|
||||
dom = calculate_dominance_info (CDI_DOMINATORS);
|
||||
|
||||
@ -709,7 +670,7 @@ canonicalize_loop_headers ()
|
||||
/* We could not redirect edges freely here. On the other hand,
|
||||
we can simply split the edge from entry block. */
|
||||
bb = split_edge (ENTRY_BLOCK_PTR->succ);
|
||||
|
||||
|
||||
alloc_aux_for_edge (bb->succ, sizeof (int));
|
||||
LATCH_EDGE (bb->succ) = 0;
|
||||
alloc_aux_for_block (bb, sizeof (int));
|
||||
@ -776,9 +737,7 @@ canonicalize_loop_headers ()
|
||||
loops found. */
|
||||
|
||||
int
|
||||
flow_loops_find (loops, flags)
|
||||
struct loops *loops;
|
||||
int flags;
|
||||
flow_loops_find (struct loops *loops, int flags)
|
||||
{
|
||||
int i;
|
||||
int b;
|
||||
@ -822,7 +781,7 @@ flow_loops_find (loops, flags)
|
||||
FOR_EACH_BB (header)
|
||||
{
|
||||
int more_latches = 0;
|
||||
|
||||
|
||||
header->loop_depth = 0;
|
||||
|
||||
/* If we have an abnormal predecessor, do not consider the
|
||||
@ -906,7 +865,7 @@ flow_loops_find (loops, flags)
|
||||
continue;
|
||||
|
||||
header = BASIC_BLOCK (rc_order[b]);
|
||||
|
||||
|
||||
loop = loops->parray[num_loops] = xcalloc (1, sizeof (struct loop));
|
||||
|
||||
loop->header = header;
|
||||
@ -961,9 +920,7 @@ flow_loops_find (loops, flags)
|
||||
specified by LOOPS. */
|
||||
|
||||
int
|
||||
flow_loops_update (loops, flags)
|
||||
struct loops *loops;
|
||||
int flags;
|
||||
flow_loops_update (struct loops *loops, int flags)
|
||||
{
|
||||
/* One day we may want to update the current loop data. For now
|
||||
throw away the old stuff and rebuild what we need. */
|
||||
@ -975,9 +932,7 @@ flow_loops_update (loops, flags)
|
||||
|
||||
/* Return nonzero if basic block BB belongs to LOOP. */
|
||||
bool
|
||||
flow_bb_inside_loop_p (loop, bb)
|
||||
const struct loop *loop;
|
||||
const basic_block bb;
|
||||
flow_bb_inside_loop_p (const struct loop *loop, const basic_block bb)
|
||||
{
|
||||
struct loop *source_loop;
|
||||
|
||||
@ -991,9 +946,7 @@ flow_bb_inside_loop_p (loop, bb)
|
||||
/* Return nonzero if edge E enters header of LOOP from outside of LOOP. */
|
||||
|
||||
bool
|
||||
flow_loop_outside_edge_p (loop, e)
|
||||
const struct loop *loop;
|
||||
edge e;
|
||||
flow_loop_outside_edge_p (const struct loop *loop, edge e)
|
||||
{
|
||||
if (e->dest != loop->header)
|
||||
abort ();
|
||||
@ -1002,17 +955,14 @@ flow_loop_outside_edge_p (loop, e)
|
||||
|
||||
/* Enumeration predicate for get_loop_body. */
|
||||
static bool
|
||||
glb_enum_p (bb, glb_header)
|
||||
basic_block bb;
|
||||
void *glb_header;
|
||||
glb_enum_p (basic_block bb, void *glb_header)
|
||||
{
|
||||
return bb != (basic_block) glb_header;
|
||||
}
|
||||
|
||||
/* Gets basic blocks of a loop. */
|
||||
basic_block *
|
||||
get_loop_body (loop)
|
||||
const struct loop *loop;
|
||||
get_loop_body (const struct loop *loop)
|
||||
{
|
||||
basic_block *tovisit, bb;
|
||||
unsigned tv = 0;
|
||||
@ -1046,9 +996,7 @@ get_loop_body (loop)
|
||||
|
||||
/* Gets exit edges of a LOOP, returning their number in N_EDGES. */
|
||||
edge *
|
||||
get_loop_exit_edges (loop, n_edges)
|
||||
const struct loop *loop;
|
||||
unsigned *n_edges;
|
||||
get_loop_exit_edges (const struct loop *loop, unsigned int *n_edges)
|
||||
{
|
||||
edge *edges, e;
|
||||
unsigned i, n;
|
||||
@ -1077,12 +1025,10 @@ get_loop_exit_edges (loop, n_edges)
|
||||
|
||||
/* Adds basic block BB to LOOP. */
|
||||
void
|
||||
add_bb_to_loop (bb, loop)
|
||||
basic_block bb;
|
||||
struct loop *loop;
|
||||
{
|
||||
add_bb_to_loop (basic_block bb, struct loop *loop)
|
||||
{
|
||||
int i;
|
||||
|
||||
|
||||
bb->loop_father = loop;
|
||||
bb->loop_depth = loop->depth;
|
||||
loop->num_nodes++;
|
||||
@ -1092,9 +1038,8 @@ add_bb_to_loop (bb, loop)
|
||||
|
||||
/* Remove basic block BB from loops. */
|
||||
void
|
||||
remove_bb_from_loops (bb)
|
||||
basic_block bb;
|
||||
{
|
||||
remove_bb_from_loops (basic_block bb)
|
||||
{
|
||||
int i;
|
||||
struct loop *loop = bb->loop_father;
|
||||
|
||||
@ -1107,13 +1052,11 @@ remove_bb_from_loops (bb)
|
||||
|
||||
/* Finds nearest common ancestor in loop tree for given loops. */
|
||||
struct loop *
|
||||
find_common_loop (loop_s, loop_d)
|
||||
struct loop *loop_s;
|
||||
struct loop *loop_d;
|
||||
find_common_loop (struct loop *loop_s, struct loop *loop_d)
|
||||
{
|
||||
if (!loop_s) return loop_d;
|
||||
if (!loop_d) return loop_s;
|
||||
|
||||
|
||||
if (loop_s->depth < loop_d->depth)
|
||||
loop_d = loop_d->pred[loop_s->depth];
|
||||
else if (loop_s->depth > loop_d->depth)
|
||||
@ -1129,9 +1072,7 @@ find_common_loop (loop_s, loop_d)
|
||||
|
||||
/* Cancels the LOOP; it must be innermost one. */
|
||||
void
|
||||
cancel_loop (loops, loop)
|
||||
struct loops *loops;
|
||||
struct loop *loop;
|
||||
cancel_loop (struct loops *loops, struct loop *loop)
|
||||
{
|
||||
basic_block *bbs;
|
||||
unsigned i;
|
||||
@ -1156,9 +1097,7 @@ cancel_loop (loops, loop)
|
||||
|
||||
/* Cancels LOOP and all its subloops. */
|
||||
void
|
||||
cancel_loop_tree (loops, loop)
|
||||
struct loops *loops;
|
||||
struct loop *loop;
|
||||
cancel_loop_tree (struct loops *loops, struct loop *loop)
|
||||
{
|
||||
while (loop->inner)
|
||||
cancel_loop_tree (loops, loop->inner);
|
||||
@ -1173,8 +1112,7 @@ cancel_loop_tree (loops, loop)
|
||||
-- irreducible loops are correctly marked
|
||||
*/
|
||||
void
|
||||
verify_loop_structure (loops)
|
||||
struct loops *loops;
|
||||
verify_loop_structure (struct loops *loops)
|
||||
{
|
||||
unsigned *sizes, i, j;
|
||||
sbitmap irreds;
|
||||
@ -1283,7 +1221,7 @@ verify_loop_structure (loops)
|
||||
RESET_BIT (irreds, bb->index);
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
if (e->flags & EDGE_IRREDUCIBLE_LOOP)
|
||||
e->flags |= EDGE_ALL_FLAGS + 1;
|
||||
e->flags |= EDGE_ALL_FLAGS + 1;
|
||||
}
|
||||
|
||||
/* Recount it. */
|
||||
@ -1332,8 +1270,7 @@ verify_loop_structure (loops)
|
||||
|
||||
/* Returns latch edge of LOOP. */
|
||||
edge
|
||||
loop_latch_edge (loop)
|
||||
const struct loop *loop;
|
||||
loop_latch_edge (const struct loop *loop)
|
||||
{
|
||||
edge e;
|
||||
|
||||
@ -1345,8 +1282,7 @@ loop_latch_edge (loop)
|
||||
|
||||
/* Returns preheader edge of LOOP. */
|
||||
edge
|
||||
loop_preheader_edge (loop)
|
||||
const struct loop *loop;
|
||||
loop_preheader_edge (const struct loop *loop)
|
||||
{
|
||||
edge e;
|
||||
|
||||
@ -1355,4 +1291,3 @@ loop_preheader_edge (loop)
|
||||
|
||||
return e;
|
||||
}
|
||||
|
||||
|
105
gcc/cfgloop.h
105
gcc/cfgloop.h
@ -255,47 +255,42 @@ struct loops
|
||||
#define LOOP_ALL 15 /* All of the above */
|
||||
|
||||
/* Loop recognition. */
|
||||
extern int flow_loops_find PARAMS ((struct loops *, int flags));
|
||||
extern int flow_loops_update PARAMS ((struct loops *, int flags));
|
||||
extern void flow_loops_free PARAMS ((struct loops *));
|
||||
extern void flow_loops_dump PARAMS ((const struct loops *, FILE *,
|
||||
void (*)(const struct loop *,
|
||||
FILE *, int), int));
|
||||
extern void flow_loop_dump PARAMS ((const struct loop *, FILE *,
|
||||
void (*)(const struct loop *,
|
||||
FILE *, int), int));
|
||||
extern int flow_loop_scan PARAMS ((struct loops *,
|
||||
struct loop *, int));
|
||||
extern void flow_loop_free PARAMS ((struct loop *));
|
||||
void mark_irreducible_loops PARAMS ((struct loops *));
|
||||
extern int flow_loops_find (struct loops *, int flags);
|
||||
extern int flow_loops_update (struct loops *, int flags);
|
||||
extern void flow_loops_free (struct loops *);
|
||||
extern void flow_loops_dump (const struct loops *, FILE *,
|
||||
void (*)(const struct loop *, FILE *, int), int);
|
||||
extern void flow_loop_dump (const struct loop *, FILE *,
|
||||
void (*)(const struct loop *, FILE *, int), int);
|
||||
extern int flow_loop_scan (struct loops *, struct loop *, int);
|
||||
extern void flow_loop_free (struct loop *);
|
||||
void mark_irreducible_loops (struct loops *);
|
||||
|
||||
/* Loop datastructure manipulation/querying. */
|
||||
extern void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
|
||||
extern void flow_loop_tree_node_remove PARAMS ((struct loop *));
|
||||
extern bool flow_loop_outside_edge_p PARAMS ((const struct loop *, edge));
|
||||
extern bool flow_loop_nested_p PARAMS ((const struct loop *,
|
||||
const struct loop *));
|
||||
extern bool flow_bb_inside_loop_p PARAMS ((const struct loop *,
|
||||
const basic_block));
|
||||
extern struct loop * find_common_loop PARAMS ((struct loop *, struct loop *));
|
||||
extern int num_loop_insns PARAMS ((struct loop *));
|
||||
extern int average_num_loop_insns PARAMS ((struct loop *));
|
||||
extern void flow_loop_tree_node_add (struct loop *, struct loop *);
|
||||
extern void flow_loop_tree_node_remove (struct loop *);
|
||||
extern bool flow_loop_outside_edge_p (const struct loop *, edge);
|
||||
extern bool flow_loop_nested_p (const struct loop *, const struct loop *);
|
||||
extern bool flow_bb_inside_loop_p (const struct loop *, const basic_block);
|
||||
extern struct loop * find_common_loop (struct loop *, struct loop *);
|
||||
extern int num_loop_insns (struct loop *);
|
||||
extern int average_num_loop_insns (struct loop *);
|
||||
|
||||
/* Loops & cfg manipulation. */
|
||||
extern basic_block *get_loop_body PARAMS ((const struct loop *));
|
||||
extern edge *get_loop_exit_edges PARAMS ((const struct loop *, unsigned *));
|
||||
extern basic_block *get_loop_body (const struct loop *);
|
||||
extern edge *get_loop_exit_edges (const struct loop *, unsigned *);
|
||||
|
||||
extern edge loop_preheader_edge PARAMS ((const struct loop *));
|
||||
extern edge loop_latch_edge PARAMS ((const struct loop *));
|
||||
extern edge loop_preheader_edge (const struct loop *);
|
||||
extern edge loop_latch_edge (const struct loop *);
|
||||
|
||||
extern void add_bb_to_loop PARAMS ((basic_block, struct loop *));
|
||||
extern void remove_bb_from_loops PARAMS ((basic_block));
|
||||
extern void add_bb_to_loop (basic_block, struct loop *);
|
||||
extern void remove_bb_from_loops (basic_block);
|
||||
|
||||
extern void cancel_loop PARAMS ((struct loops *, struct loop *));
|
||||
extern void cancel_loop_tree PARAMS ((struct loops *, struct loop *));
|
||||
extern void cancel_loop (struct loops *, struct loop *);
|
||||
extern void cancel_loop_tree (struct loops *, struct loop *);
|
||||
|
||||
extern basic_block loop_split_edge_with PARAMS ((edge, rtx, struct loops *));
|
||||
extern int fix_loop_placement PARAMS ((struct loop *));
|
||||
extern basic_block loop_split_edge_with (edge, rtx, struct loops *);
|
||||
extern int fix_loop_placement (struct loop *);
|
||||
|
||||
enum
|
||||
{
|
||||
@ -303,42 +298,38 @@ enum
|
||||
CP_INSIDE_CFGLAYOUT = 2
|
||||
};
|
||||
|
||||
extern void create_preheaders PARAMS ((struct loops *, int));
|
||||
extern void force_single_succ_latches PARAMS ((struct loops *));
|
||||
extern void create_preheaders (struct loops *, int);
|
||||
extern void force_single_succ_latches (struct loops *);
|
||||
|
||||
extern void verify_loop_structure PARAMS ((struct loops *));
|
||||
extern void verify_loop_structure (struct loops *);
|
||||
|
||||
/* Loop analysis. */
|
||||
extern bool simple_loop_p PARAMS ((struct loops *, struct loop *,
|
||||
struct loop_desc *));
|
||||
extern rtx count_loop_iterations PARAMS ((struct loop_desc *, rtx, rtx));
|
||||
extern bool just_once_each_iteration_p PARAMS ((struct loops *,struct loop *,
|
||||
basic_block));
|
||||
extern unsigned expected_loop_iterations PARAMS ((const struct loop *));
|
||||
extern bool simple_loop_p (struct loops *, struct loop *, struct loop_desc *);
|
||||
extern rtx count_loop_iterations (struct loop_desc *, rtx, rtx);
|
||||
extern bool just_once_each_iteration_p (struct loops *,struct loop *,
|
||||
basic_block);
|
||||
extern unsigned expected_loop_iterations (const struct loop *);
|
||||
|
||||
/* Loop manipulation. */
|
||||
extern bool can_duplicate_loop_p PARAMS ((struct loop *loop));
|
||||
extern bool can_duplicate_loop_p (struct loop *loop);
|
||||
|
||||
#define DLTHE_FLAG_UPDATE_FREQ 1 /* Update frequencies in
|
||||
duplicate_loop_to_header_edge. */
|
||||
|
||||
extern int duplicate_loop_to_header_edge PARAMS ((struct loop *, edge,
|
||||
struct loops *, unsigned,
|
||||
sbitmap, edge, edge *,
|
||||
unsigned *, int));
|
||||
extern struct loop *loopify PARAMS ((struct loops *, edge,
|
||||
edge, basic_block));
|
||||
extern void unloop PARAMS ((struct loops *, struct loop *));
|
||||
extern bool remove_path PARAMS ((struct loops *, edge));
|
||||
extern edge split_loop_bb PARAMS ((struct loops *, basic_block,
|
||||
rtx));
|
||||
extern int duplicate_loop_to_header_edge (struct loop *, edge, struct loops *,
|
||||
unsigned, sbitmap, edge, edge *,
|
||||
unsigned *, int);
|
||||
extern struct loop *loopify (struct loops *, edge, edge, basic_block);
|
||||
extern void unloop (struct loops *, struct loop *);
|
||||
extern bool remove_path (struct loops *, edge);
|
||||
extern edge split_loop_bb (struct loops *, basic_block, rtx);
|
||||
|
||||
/* Loop optimizer initialization. */
|
||||
extern struct loops *loop_optimizer_init PARAMS ((FILE *));
|
||||
extern void loop_optimizer_finalize PARAMS ((struct loops *, FILE *));
|
||||
extern struct loops *loop_optimizer_init (FILE *);
|
||||
extern void loop_optimizer_finalize (struct loops *, FILE *);
|
||||
|
||||
/* Optimization passes. */
|
||||
extern void unswitch_loops PARAMS ((struct loops *));
|
||||
extern void unswitch_loops (struct loops *);
|
||||
|
||||
enum
|
||||
{
|
||||
@ -347,4 +338,4 @@ enum
|
||||
UAP_UNROLL_ALL = 4 /* Enables peeling of all loops. */
|
||||
};
|
||||
|
||||
extern void unroll_and_peel_loops PARAMS ((struct loops *, int));
|
||||
extern void unroll_and_peel_loops (struct loops *, int);
|
||||
|
@ -1,5 +1,5 @@
|
||||
/* Natural loop analysis code for GNU compiler.
|
||||
Copyright (C) 2002 Free Software Foundation, Inc.
|
||||
Copyright (C) 2002, 2003 Free Software Foundation, Inc.
|
||||
|
||||
This file is part of GCC.
|
||||
|
||||
@ -30,33 +30,27 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
||||
#include "output.h"
|
||||
|
||||
struct unmark_altered_insn_data;
|
||||
static void unmark_altered PARAMS ((rtx, rtx, regset));
|
||||
static void blocks_invariant_registers PARAMS ((basic_block *, int, regset));
|
||||
static void unmark_altered_insn PARAMS ((rtx, rtx, struct unmark_altered_insn_data *));
|
||||
static void blocks_single_set_registers PARAMS ((basic_block *, int, rtx *));
|
||||
static int invariant_rtx_wrto_regs_p_helper PARAMS ((rtx *, regset));
|
||||
static bool invariant_rtx_wrto_regs_p PARAMS ((rtx, regset));
|
||||
static rtx test_for_iteration PARAMS ((struct loop_desc *desc,
|
||||
unsigned HOST_WIDE_INT));
|
||||
static bool constant_iterations PARAMS ((struct loop_desc *,
|
||||
unsigned HOST_WIDE_INT *,
|
||||
bool *));
|
||||
static bool simple_loop_exit_p PARAMS ((struct loops *, struct loop *,
|
||||
edge, regset, rtx *,
|
||||
struct loop_desc *));
|
||||
static rtx variable_initial_value PARAMS ((rtx, regset, rtx, rtx *));
|
||||
static rtx variable_initial_values PARAMS ((edge, rtx));
|
||||
static bool simple_condition_p PARAMS ((struct loop *, rtx,
|
||||
regset, struct loop_desc *));
|
||||
static basic_block simple_increment PARAMS ((struct loops *, struct loop *,
|
||||
rtx *, struct loop_desc *));
|
||||
static void unmark_altered (rtx, rtx, regset);
|
||||
static void blocks_invariant_registers (basic_block *, int, regset);
|
||||
static void unmark_altered_insn (rtx, rtx, struct unmark_altered_insn_data *);
|
||||
static void blocks_single_set_registers (basic_block *, int, rtx *);
|
||||
static int invariant_rtx_wrto_regs_p_helper (rtx *, regset);
|
||||
static bool invariant_rtx_wrto_regs_p (rtx, regset);
|
||||
static rtx test_for_iteration (struct loop_desc *desc, unsigned HOST_WIDE_INT);
|
||||
static bool constant_iterations (struct loop_desc *, unsigned HOST_WIDE_INT *,
|
||||
bool *);
|
||||
static bool simple_loop_exit_p (struct loops *, struct loop *, edge, regset,
|
||||
rtx *, struct loop_desc *);
|
||||
static rtx variable_initial_value (rtx, regset, rtx, rtx *);
|
||||
static rtx variable_initial_values (edge, rtx);
|
||||
static bool simple_condition_p (struct loop *, rtx, regset,
|
||||
struct loop_desc *);
|
||||
static basic_block simple_increment (struct loops *, struct loop *, rtx *,
|
||||
struct loop_desc *);
|
||||
|
||||
/* Checks whether BB is executed exactly once in each LOOP iteration. */
|
||||
bool
|
||||
just_once_each_iteration_p (loops, loop, bb)
|
||||
struct loops *loops;
|
||||
struct loop *loop;
|
||||
basic_block bb;
|
||||
just_once_each_iteration_p (struct loops *loops, struct loop *loop, basic_block bb)
|
||||
{
|
||||
/* It must be executed at least once each iteration. */
|
||||
if (!dominated_by_p (loops->cfg.dom, loop->latch, bb))
|
||||
@ -76,10 +70,7 @@ just_once_each_iteration_p (loops, loop, bb)
|
||||
|
||||
/* Unmarks modified registers; helper to blocks_invariant_registers. */
|
||||
static void
|
||||
unmark_altered (what, by, regs)
|
||||
rtx what;
|
||||
rtx by ATTRIBUTE_UNUSED;
|
||||
regset regs;
|
||||
unmark_altered (rtx what, rtx by ATTRIBUTE_UNUSED, regset regs)
|
||||
{
|
||||
if (GET_CODE (what) == SUBREG)
|
||||
what = SUBREG_REG (what);
|
||||
@ -90,10 +81,7 @@ unmark_altered (what, by, regs)
|
||||
|
||||
/* Marks registers that are invariant inside blocks BBS. */
|
||||
static void
|
||||
blocks_invariant_registers (bbs, nbbs, regs)
|
||||
basic_block *bbs;
|
||||
int nbbs;
|
||||
regset regs;
|
||||
blocks_invariant_registers (basic_block *bbs, int nbbs, regset regs)
|
||||
{
|
||||
rtx insn;
|
||||
int i;
|
||||
@ -118,10 +106,8 @@ struct unmark_altered_insn_data
|
||||
};
|
||||
|
||||
static void
|
||||
unmark_altered_insn (what, by, data)
|
||||
rtx what;
|
||||
rtx by ATTRIBUTE_UNUSED;
|
||||
struct unmark_altered_insn_data *data;
|
||||
unmark_altered_insn (rtx what, rtx by ATTRIBUTE_UNUSED,
|
||||
struct unmark_altered_insn_data *data)
|
||||
{
|
||||
int rn;
|
||||
|
||||
@ -138,10 +124,7 @@ unmark_altered_insn (what, by, data)
|
||||
/* Marks registers that have just single simple set in BBS; the relevant
|
||||
insn is returned in REGS. */
|
||||
static void
|
||||
blocks_single_set_registers (bbs, nbbs, regs)
|
||||
basic_block *bbs;
|
||||
int nbbs;
|
||||
rtx *regs;
|
||||
blocks_single_set_registers (basic_block *bbs, int nbbs, rtx *regs)
|
||||
{
|
||||
rtx insn;
|
||||
int i;
|
||||
@ -180,9 +163,7 @@ blocks_single_set_registers (bbs, nbbs, regs)
|
||||
|
||||
/* Helper for invariant_rtx_wrto_regs_p. */
|
||||
static int
|
||||
invariant_rtx_wrto_regs_p_helper (expr, invariant_regs)
|
||||
rtx *expr;
|
||||
regset invariant_regs;
|
||||
invariant_rtx_wrto_regs_p_helper (rtx *expr, regset invariant_regs)
|
||||
{
|
||||
switch (GET_CODE (*expr))
|
||||
{
|
||||
@ -216,9 +197,7 @@ invariant_rtx_wrto_regs_p_helper (expr, invariant_regs)
|
||||
|
||||
/* Checks that EXPR is invariant provided that INVARIANT_REGS are invariant. */
|
||||
static bool
|
||||
invariant_rtx_wrto_regs_p (expr, invariant_regs)
|
||||
rtx expr;
|
||||
regset invariant_regs;
|
||||
invariant_rtx_wrto_regs_p (rtx expr, regset invariant_regs)
|
||||
{
|
||||
return !for_each_rtx (&expr, (rtx_function) invariant_rtx_wrto_regs_p_helper,
|
||||
invariant_regs);
|
||||
@ -228,11 +207,8 @@ invariant_rtx_wrto_regs_p (expr, invariant_regs)
|
||||
is register and the other one is invariant in the LOOP. Fills var, lim
|
||||
and cond fields in DESC. */
|
||||
static bool
|
||||
simple_condition_p (loop, condition, invariant_regs, desc)
|
||||
struct loop *loop ATTRIBUTE_UNUSED;
|
||||
rtx condition;
|
||||
regset invariant_regs;
|
||||
struct loop_desc *desc;
|
||||
simple_condition_p (struct loop *loop ATTRIBUTE_UNUSED, rtx condition,
|
||||
regset invariant_regs, struct loop_desc *desc)
|
||||
{
|
||||
rtx op0, op1;
|
||||
|
||||
@ -262,7 +238,7 @@ simple_condition_p (loop, condition, invariant_regs, desc)
|
||||
/* One of operands must be a simple register. */
|
||||
op0 = XEXP (condition, 0);
|
||||
op1 = XEXP (condition, 1);
|
||||
|
||||
|
||||
/* One of operands must be invariant. */
|
||||
if (invariant_rtx_wrto_regs_p (op0, invariant_regs))
|
||||
{
|
||||
@ -296,11 +272,8 @@ simple_condition_p (loop, condition, invariant_regs, desc)
|
||||
iteration. Fills in DESC->stride and returns block in that DESC->var is
|
||||
modified. */
|
||||
static basic_block
|
||||
simple_increment (loops, loop, simple_increment_regs, desc)
|
||||
struct loops *loops;
|
||||
struct loop *loop;
|
||||
rtx *simple_increment_regs;
|
||||
struct loop_desc *desc;
|
||||
simple_increment (struct loops *loops, struct loop *loop,
|
||||
rtx *simple_increment_regs, struct loop_desc *desc)
|
||||
{
|
||||
rtx mod_insn, set, set_src, set_add;
|
||||
basic_block mod_bb;
|
||||
@ -344,11 +317,7 @@ simple_increment (loops, loop, simple_increment_regs, desc)
|
||||
wrto INVARIANT_REGS. If SET_INSN is not NULL, insn in that var is set is
|
||||
placed here. */
|
||||
static rtx
|
||||
variable_initial_value (insn, invariant_regs, var, set_insn)
|
||||
rtx insn;
|
||||
regset invariant_regs;
|
||||
rtx var;
|
||||
rtx *set_insn;
|
||||
variable_initial_value (rtx insn, regset invariant_regs, rtx var, rtx *set_insn)
|
||||
{
|
||||
basic_block bb;
|
||||
rtx set;
|
||||
@ -373,7 +342,7 @@ variable_initial_value (insn, invariant_regs, var, set_insn)
|
||||
rtx set_dest;
|
||||
rtx val;
|
||||
rtx note;
|
||||
|
||||
|
||||
set = single_set (insn);
|
||||
if (!set)
|
||||
return NULL;
|
||||
@ -407,9 +376,7 @@ variable_initial_value (insn, invariant_regs, var, set_insn)
|
||||
|
||||
/* Returns list of definitions of initial value of VAR at Edge. */
|
||||
static rtx
|
||||
variable_initial_values (e, var)
|
||||
edge e;
|
||||
rtx var;
|
||||
variable_initial_values (edge e, rtx var)
|
||||
{
|
||||
rtx set_insn, list;
|
||||
regset invariant_regs;
|
||||
@ -437,10 +404,8 @@ variable_initial_values (e, var)
|
||||
/* Counts constant number of iterations of the loop described by DESC;
|
||||
returns false if impossible. */
|
||||
static bool
|
||||
constant_iterations (desc, niter, may_be_zero)
|
||||
struct loop_desc *desc;
|
||||
unsigned HOST_WIDE_INT *niter;
|
||||
bool *may_be_zero;
|
||||
constant_iterations (struct loop_desc *desc, unsigned HOST_WIDE_INT *niter,
|
||||
bool *may_be_zero)
|
||||
{
|
||||
rtx test, expr;
|
||||
rtx ainit, alim;
|
||||
@ -485,21 +450,18 @@ constant_iterations (desc, niter, may_be_zero)
|
||||
|
||||
/* Return RTX expression representing number of iterations of loop as bounded
|
||||
by test described by DESC (in the case loop really has multiple exit
|
||||
edges, fewer iterations may happen in the practice).
|
||||
edges, fewer iterations may happen in the practice).
|
||||
|
||||
Return NULL if it is unknown. Additionally the value may be invalid for
|
||||
paradoxical loop (lets define paradoxical loops as loops whose test is
|
||||
failing at -1th iteration, for instance "for (i=5;i<1;i++);").
|
||||
|
||||
|
||||
These cases needs to be either cared by copying the loop test in the front
|
||||
of loop or keeping the test in first iteration of loop.
|
||||
|
||||
|
||||
When INIT/LIM are set, they are used instead of var/lim of DESC. */
|
||||
rtx
|
||||
count_loop_iterations (desc, init, lim)
|
||||
struct loop_desc *desc;
|
||||
rtx init;
|
||||
rtx lim;
|
||||
count_loop_iterations (struct loop_desc *desc, rtx init, rtx lim)
|
||||
{
|
||||
enum rtx_code cond = desc->cond;
|
||||
rtx stride = desc->stride;
|
||||
@ -618,9 +580,7 @@ count_loop_iterations (desc, init, lim)
|
||||
described of DESC at given iteration of loop. */
|
||||
|
||||
static rtx
|
||||
test_for_iteration (desc, iter)
|
||||
struct loop_desc *desc;
|
||||
unsigned HOST_WIDE_INT iter;
|
||||
test_for_iteration (struct loop_desc *desc, unsigned HOST_WIDE_INT iter)
|
||||
{
|
||||
enum rtx_code cond = desc->cond;
|
||||
rtx exp = XEXP (desc->var_alts, 0);
|
||||
@ -661,13 +621,9 @@ test_for_iteration (desc, iter)
|
||||
description joined to it in in DESC. INVARIANT_REGS and SINGLE_SET_REGS
|
||||
are results of blocks_{invariant,single_set}_regs over BODY. */
|
||||
static bool
|
||||
simple_loop_exit_p (loops, loop, exit_edge, invariant_regs, single_set_regs, desc)
|
||||
struct loops *loops;
|
||||
struct loop *loop;
|
||||
edge exit_edge;
|
||||
struct loop_desc *desc;
|
||||
regset invariant_regs;
|
||||
rtx *single_set_regs;
|
||||
simple_loop_exit_p (struct loops *loops, struct loop *loop, edge exit_edge,
|
||||
regset invariant_regs, rtx *single_set_regs,
|
||||
struct loop_desc *desc)
|
||||
{
|
||||
basic_block mod_bb, exit_bb;
|
||||
int fallthru_out;
|
||||
@ -729,10 +685,7 @@ simple_loop_exit_p (loops, loop, exit_edge, invariant_regs, single_set_regs, des
|
||||
/* Tests whether LOOP is simple for loop. Returns simple loop description
|
||||
in DESC. */
|
||||
bool
|
||||
simple_loop_p (loops, loop, desc)
|
||||
struct loops *loops;
|
||||
struct loop *loop;
|
||||
struct loop_desc *desc;
|
||||
simple_loop_p (struct loops *loops, struct loop *loop, struct loop_desc *desc)
|
||||
{
|
||||
unsigned i;
|
||||
basic_block *body;
|
||||
@ -743,7 +696,7 @@ simple_loop_p (loops, loop, desc)
|
||||
regset_head invariant_regs_head;
|
||||
rtx *single_set_regs;
|
||||
int n_branches;
|
||||
|
||||
|
||||
body = get_loop_body (loop);
|
||||
|
||||
invariant_regs = INITIALIZE_REG_SET (invariant_regs_head);
|
||||
@ -825,8 +778,7 @@ simple_loop_p (loops, loop, desc)
|
||||
each cycle, we want to mark blocks that belong directly to innermost
|
||||
loop containing the whole cycle. */
|
||||
void
|
||||
mark_irreducible_loops (loops)
|
||||
struct loops *loops;
|
||||
mark_irreducible_loops (struct loops *loops)
|
||||
{
|
||||
int *dfs_in, *closed, *mr, *mri, *n_edges, *stack;
|
||||
unsigned i;
|
||||
@ -1019,8 +971,7 @@ next:;
|
||||
|
||||
/* Counts number of insns inside LOOP. */
|
||||
int
|
||||
num_loop_insns (loop)
|
||||
struct loop *loop;
|
||||
num_loop_insns (struct loop *loop)
|
||||
{
|
||||
basic_block *bbs, bb;
|
||||
unsigned i, ninsns = 0;
|
||||
@ -1036,14 +987,13 @@ num_loop_insns (loop)
|
||||
ninsns++;
|
||||
}
|
||||
free(bbs);
|
||||
|
||||
|
||||
return ninsns;
|
||||
}
|
||||
|
||||
/* Counts number of insns executed on average per iteration LOOP. */
|
||||
int
|
||||
average_num_loop_insns (loop)
|
||||
struct loop *loop;
|
||||
average_num_loop_insns (struct loop *loop)
|
||||
{
|
||||
basic_block *bbs, bb;
|
||||
unsigned i, binsns, ninsns, ratio;
|
||||
@ -1066,7 +1016,7 @@ average_num_loop_insns (loop)
|
||||
ninsns += binsns * ratio;
|
||||
}
|
||||
free(bbs);
|
||||
|
||||
|
||||
ninsns /= BB_FREQ_MAX;
|
||||
if (!ninsns)
|
||||
ninsns = 1; /* To avoid division by zero. */
|
||||
@ -1078,8 +1028,7 @@ average_num_loop_insns (loop)
|
||||
Compute upper bound on number of iterations in case they do not fit integer
|
||||
to help loop peeling heuristics. Use exact counts if at all possible. */
|
||||
unsigned
|
||||
expected_loop_iterations (loop)
|
||||
const struct loop *loop;
|
||||
expected_loop_iterations (const struct loop *loop)
|
||||
{
|
||||
edge e;
|
||||
|
||||
|
@ -29,44 +29,35 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
||||
#include "cfglayout.h"
|
||||
#include "output.h"
|
||||
|
||||
static struct loop * duplicate_loop PARAMS ((struct loops *,
|
||||
struct loop *, struct loop *));
|
||||
static void duplicate_subloops PARAMS ((struct loops *, struct loop *,
|
||||
struct loop *));
|
||||
static void copy_loops_to PARAMS ((struct loops *, struct loop **,
|
||||
int, struct loop *));
|
||||
static void loop_redirect_edge PARAMS ((edge, basic_block));
|
||||
static bool loop_delete_branch_edge PARAMS ((edge, int));
|
||||
static void copy_bbs PARAMS ((basic_block *, int, edge,
|
||||
edge, basic_block **,
|
||||
struct loops *, edge *,
|
||||
edge *, int));
|
||||
static void remove_bbs PARAMS ((dominance_info, basic_block *,
|
||||
int));
|
||||
static bool rpe_enum_p PARAMS ((basic_block, void *));
|
||||
static int find_path PARAMS ((edge, dominance_info,
|
||||
basic_block **));
|
||||
static bool alp_enum_p PARAMS ((basic_block, void *));
|
||||
static void add_loop PARAMS ((struct loops *, struct loop *));
|
||||
static void fix_loop_placements PARAMS ((struct loop *));
|
||||
static bool fix_bb_placement PARAMS ((struct loops *, basic_block));
|
||||
static void fix_bb_placements PARAMS ((struct loops *, basic_block));
|
||||
static void place_new_loop PARAMS ((struct loops *, struct loop *));
|
||||
static void scale_loop_frequencies PARAMS ((struct loop *, int, int));
|
||||
static void scale_bbs_frequencies PARAMS ((basic_block *, int, int, int));
|
||||
static void record_exit_edges PARAMS ((edge, basic_block *, int,
|
||||
edge *, unsigned *, int));
|
||||
static basic_block create_preheader PARAMS ((struct loop *, dominance_info,
|
||||
int));
|
||||
static void fix_irreducible_loops PARAMS ((basic_block));
|
||||
static struct loop * duplicate_loop (struct loops *, struct loop *,
|
||||
struct loop *);
|
||||
static void duplicate_subloops (struct loops *, struct loop *, struct loop *);
|
||||
static void copy_loops_to (struct loops *, struct loop **, int,
|
||||
struct loop *);
|
||||
static void loop_redirect_edge (edge, basic_block);
|
||||
static bool loop_delete_branch_edge (edge, int);
|
||||
static void copy_bbs (basic_block *, int, edge, edge, basic_block **,
|
||||
struct loops *, edge *, edge *, int);
|
||||
static void remove_bbs (dominance_info, basic_block *, int);
|
||||
static bool rpe_enum_p (basic_block, void *);
|
||||
static int find_path (edge, dominance_info, basic_block **);
|
||||
static bool alp_enum_p (basic_block, void *);
|
||||
static void add_loop (struct loops *, struct loop *);
|
||||
static void fix_loop_placements (struct loop *);
|
||||
static bool fix_bb_placement (struct loops *, basic_block);
|
||||
static void fix_bb_placements (struct loops *, basic_block);
|
||||
static void place_new_loop (struct loops *, struct loop *);
|
||||
static void scale_loop_frequencies (struct loop *, int, int);
|
||||
static void scale_bbs_frequencies (basic_block *, int, int, int);
|
||||
static void record_exit_edges (edge, basic_block *, int, edge *, unsigned *,
|
||||
int);
|
||||
static basic_block create_preheader (struct loop *, dominance_info, int);
|
||||
static void fix_irreducible_loops (basic_block);
|
||||
|
||||
/* Splits basic block BB after INSN, returns created edge. Updates loops
|
||||
and dominators. */
|
||||
edge
|
||||
split_loop_bb (loops, bb, insn)
|
||||
struct loops *loops;
|
||||
basic_block bb;
|
||||
rtx insn;
|
||||
split_loop_bb (struct loops *loops, basic_block bb, rtx insn)
|
||||
{
|
||||
edge e;
|
||||
basic_block *dom_bbs;
|
||||
@ -98,9 +89,7 @@ struct rpe_data
|
||||
};
|
||||
|
||||
static bool
|
||||
rpe_enum_p (bb, data)
|
||||
basic_block bb;
|
||||
void *data;
|
||||
rpe_enum_p (basic_block bb, void *data)
|
||||
{
|
||||
struct rpe_data *rpe = data;
|
||||
return dominated_by_p (rpe->doms, bb, rpe->dom);
|
||||
@ -109,10 +98,7 @@ rpe_enum_p (bb, data)
|
||||
/* Remove basic blocks BBS from loop structure and dominance info,
|
||||
and delete them afterwards. */
|
||||
static void
|
||||
remove_bbs (dom, bbs, nbbs)
|
||||
dominance_info dom;
|
||||
basic_block *bbs;
|
||||
int nbbs;
|
||||
remove_bbs (dominance_info dom, basic_block *bbs, int nbbs)
|
||||
{
|
||||
int i;
|
||||
|
||||
@ -131,10 +117,7 @@ remove_bbs (dom, bbs, nbbs)
|
||||
alter anything by this function). The number of basic blocks in the
|
||||
path is returned. */
|
||||
static int
|
||||
find_path (e, doms, bbs)
|
||||
edge e;
|
||||
dominance_info doms;
|
||||
basic_block **bbs;
|
||||
find_path (edge e, dominance_info doms, basic_block **bbs)
|
||||
{
|
||||
struct rpe_data rpe;
|
||||
|
||||
@ -157,9 +140,7 @@ find_path (e, doms, bbs)
|
||||
false if the placement of BB was already correct (provided that placements
|
||||
of its successors are correct). */
|
||||
static bool
|
||||
fix_bb_placement (loops, bb)
|
||||
struct loops *loops;
|
||||
basic_block bb;
|
||||
fix_bb_placement (struct loops *loops, basic_block bb)
|
||||
{
|
||||
edge e;
|
||||
struct loop *loop = loops->tree_root, *act;
|
||||
@ -195,9 +176,7 @@ fix_bb_placement (loops, bb)
|
||||
to this change; the condition for them is simmilar, except that instead of
|
||||
successors we consider edges coming out of the loops. */
|
||||
static void
|
||||
fix_bb_placements (loops, from)
|
||||
struct loops *loops;
|
||||
basic_block from;
|
||||
fix_bb_placements (struct loops *loops, basic_block from)
|
||||
{
|
||||
sbitmap in_queue;
|
||||
basic_block *queue, *qtop, *qbeg, *qend;
|
||||
@ -257,7 +236,7 @@ fix_bb_placements (loops, from)
|
||||
if (TEST_BIT (in_queue, pred->index))
|
||||
continue;
|
||||
|
||||
/* If it is subloop, then it either was not moved, or
|
||||
/* If it is subloop, then it either was not moved, or
|
||||
the path up the loop tree from base_loop do not contain
|
||||
it. */
|
||||
nca = find_common_loop (pred->loop_father, base_loop);
|
||||
@ -290,8 +269,7 @@ fix_bb_placements (loops, from)
|
||||
mo longer be part irreducible loop. Fix it and proceed recursively
|
||||
for its successors if needed. */
|
||||
static void
|
||||
fix_irreducible_loops (from)
|
||||
basic_block from;
|
||||
fix_irreducible_loops (basic_block from)
|
||||
{
|
||||
basic_block bb;
|
||||
basic_block *stack;
|
||||
@ -334,7 +312,7 @@ fix_irreducible_loops (from)
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
edges[n_edges++] = e;
|
||||
}
|
||||
|
||||
|
||||
for (i = 0; i < n_edges; i++)
|
||||
if (e->flags & EDGE_IRREDUCIBLE_LOOP)
|
||||
{
|
||||
@ -343,10 +321,10 @@ fix_irreducible_loops (from)
|
||||
|
||||
e->flags &= ~EDGE_IRREDUCIBLE_LOOP;
|
||||
if (TEST_BIT (on_stack, e->dest->index))
|
||||
continue;
|
||||
continue;
|
||||
|
||||
SET_BIT (on_stack, e->dest->index);
|
||||
stack[stack_top++] = e->dest;
|
||||
stack[stack_top++] = e->dest;
|
||||
}
|
||||
free (edges);
|
||||
}
|
||||
@ -360,9 +338,7 @@ fix_irreducible_loops (from)
|
||||
we were able to remove the path, false otherwise (and nothing is affected
|
||||
then). */
|
||||
bool
|
||||
remove_path (loops, e)
|
||||
struct loops *loops;
|
||||
edge e;
|
||||
remove_path (struct loops *loops, edge e)
|
||||
{
|
||||
edge ae;
|
||||
basic_block *rem_bbs, *bord_bbs, *dom_bbs, from, bb;
|
||||
@ -387,7 +363,7 @@ remove_path (loops, e)
|
||||
&& dominated_by_p (loops->cfg.dom,
|
||||
e->src->loop_father->latch, e->dest))
|
||||
unloop (loops, e->src->loop_father);
|
||||
|
||||
|
||||
/* Identify the path. */
|
||||
nrem = find_path (e, loops->cfg.dom, &rem_bbs);
|
||||
|
||||
@ -466,9 +442,7 @@ remove_path (loops, e)
|
||||
|
||||
/* Predicate for enumeration in add_loop. */
|
||||
static bool
|
||||
alp_enum_p (bb, alp_header)
|
||||
basic_block bb;
|
||||
void *alp_header;
|
||||
alp_enum_p (basic_block bb, void *alp_header)
|
||||
{
|
||||
return bb != (basic_block) alp_header;
|
||||
}
|
||||
@ -476,13 +450,11 @@ alp_enum_p (bb, alp_header)
|
||||
/* Given LOOP structure with filled header and latch, find the body of the
|
||||
corresponding loop and add it to LOOPS tree. */
|
||||
static void
|
||||
add_loop (loops, loop)
|
||||
struct loops *loops;
|
||||
struct loop *loop;
|
||||
add_loop (struct loops *loops, struct loop *loop)
|
||||
{
|
||||
basic_block *bbs;
|
||||
int i, n;
|
||||
|
||||
|
||||
/* Add it to loop structure. */
|
||||
place_new_loop (loops, loop);
|
||||
loop->level = 1;
|
||||
@ -502,11 +474,7 @@ add_loop (loops, loop)
|
||||
/* Multiply all frequencies of basic blocks in array BBS of lenght NBBS
|
||||
by NUM/DEN. */
|
||||
static void
|
||||
scale_bbs_frequencies (bbs, nbbs, num, den)
|
||||
basic_block *bbs;
|
||||
int nbbs;
|
||||
int num;
|
||||
int den;
|
||||
scale_bbs_frequencies (basic_block *bbs, int nbbs, int num, int den)
|
||||
{
|
||||
int i;
|
||||
edge e;
|
||||
@ -522,10 +490,7 @@ scale_bbs_frequencies (bbs, nbbs, num, den)
|
||||
|
||||
/* Multiply all frequencies in LOOP by NUM/DEN. */
|
||||
static void
|
||||
scale_loop_frequencies (loop, num, den)
|
||||
struct loop *loop;
|
||||
int num;
|
||||
int den;
|
||||
scale_loop_frequencies (struct loop *loop, int num, int den)
|
||||
{
|
||||
basic_block *bbs;
|
||||
|
||||
@ -543,11 +508,7 @@ scale_loop_frequencies (loop, num, den)
|
||||
SWITCH_BB->succ->succ_next to original destination of HEADER_EDGE.
|
||||
Returns newly created loop. */
|
||||
struct loop *
|
||||
loopify (loops, latch_edge, header_edge, switch_bb)
|
||||
struct loops *loops;
|
||||
edge latch_edge;
|
||||
edge header_edge;
|
||||
basic_block switch_bb;
|
||||
loopify (struct loops *loops, edge latch_edge, edge header_edge, basic_block switch_bb)
|
||||
{
|
||||
basic_block succ_bb = latch_edge->dest;
|
||||
basic_block pred_bb = header_edge->src;
|
||||
@ -634,9 +595,7 @@ loopify (loops, latch_edge, header_edge, switch_bb)
|
||||
the LOOP was removed. After this function, original loop latch will
|
||||
have no successor, which caller is expected to fix somehow. */
|
||||
void
|
||||
unloop (loops, loop)
|
||||
struct loops *loops;
|
||||
struct loop *loop;
|
||||
unloop (struct loops *loops, struct loop *loop)
|
||||
{
|
||||
basic_block *body;
|
||||
struct loop *ploop;
|
||||
@ -694,8 +653,7 @@ unloop (loops, loop)
|
||||
FATHER, and set it as outer loop of LOOP. Return 1 if placement of
|
||||
LOOP changed. */
|
||||
int
|
||||
fix_loop_placement (loop)
|
||||
struct loop *loop;
|
||||
fix_loop_placement (struct loop *loop)
|
||||
{
|
||||
basic_block *body;
|
||||
unsigned i;
|
||||
@ -729,8 +687,7 @@ fix_loop_placement (loop)
|
||||
It is used in case when we removed some edges coming out of LOOP, which
|
||||
may cause the right placement of LOOP inside loop tree to change. */
|
||||
static void
|
||||
fix_loop_placements (loop)
|
||||
struct loop *loop;
|
||||
fix_loop_placements (struct loop *loop)
|
||||
{
|
||||
struct loop *outer;
|
||||
|
||||
@ -745,9 +702,7 @@ fix_loop_placements (loop)
|
||||
|
||||
/* Creates place for a new LOOP in LOOPS structure. */
|
||||
static void
|
||||
place_new_loop (loops, loop)
|
||||
struct loops *loops;
|
||||
struct loop *loop;
|
||||
place_new_loop (struct loops *loops, struct loop *loop)
|
||||
{
|
||||
loops->parray =
|
||||
xrealloc (loops->parray, (loops->num + 1) * sizeof (struct loop *));
|
||||
@ -759,10 +714,7 @@ place_new_loop (loops, loop)
|
||||
/* Copies copy of LOOP as subloop of TARGET loop, placing newly
|
||||
created loop into LOOPS structure. */
|
||||
static struct loop *
|
||||
duplicate_loop (loops, loop, target)
|
||||
struct loops *loops;
|
||||
struct loop *loop;
|
||||
struct loop *target;
|
||||
duplicate_loop (struct loops *loops, struct loop *loop, struct loop *target)
|
||||
{
|
||||
struct loop *cloop;
|
||||
cloop = xcalloc (1, sizeof (struct loop));
|
||||
@ -782,11 +734,8 @@ duplicate_loop (loops, loop, target)
|
||||
|
||||
/* Copies structure of subloops of LOOP into TARGET loop, placing
|
||||
newly created loops into loop tree stored in LOOPS. */
|
||||
static void
|
||||
duplicate_subloops (loops, loop, target)
|
||||
struct loops *loops;
|
||||
struct loop *loop;
|
||||
struct loop *target;
|
||||
static void
|
||||
duplicate_subloops (struct loops *loops, struct loop *loop, struct loop *target)
|
||||
{
|
||||
struct loop *aloop, *cloop;
|
||||
|
||||
@ -799,12 +748,8 @@ duplicate_subloops (loops, loop, target)
|
||||
|
||||
/* Copies structure of subloops of N loops, stored in array COPIED_LOOPS,
|
||||
into TARGET loop, placing newly created loops into loop tree LOOPS. */
|
||||
static void
|
||||
copy_loops_to (loops, copied_loops, n, target)
|
||||
struct loops *loops;
|
||||
struct loop **copied_loops;
|
||||
int n;
|
||||
struct loop *target;
|
||||
static void
|
||||
copy_loops_to (struct loops *loops, struct loop **copied_loops, int n, struct loop *target)
|
||||
{
|
||||
struct loop *aloop;
|
||||
int i;
|
||||
@ -818,9 +763,7 @@ copy_loops_to (loops, copied_loops, n, target)
|
||||
|
||||
/* Redirects edge E to basic block DEST. */
|
||||
static void
|
||||
loop_redirect_edge (e, dest)
|
||||
edge e;
|
||||
basic_block dest;
|
||||
loop_redirect_edge (edge e, basic_block dest)
|
||||
{
|
||||
if (e->dest == dest)
|
||||
return;
|
||||
@ -831,9 +774,7 @@ loop_redirect_edge (e, dest)
|
||||
/* Deletes edge E from a branch if possible. Unless REALLY_DELETE is set,
|
||||
just test whether it is possible to remove the edge. */
|
||||
static bool
|
||||
loop_delete_branch_edge (e, really_delete)
|
||||
edge e;
|
||||
int really_delete;
|
||||
loop_delete_branch_edge (edge e, int really_delete)
|
||||
{
|
||||
basic_block src = e->src;
|
||||
int irr;
|
||||
@ -861,7 +802,7 @@ loop_delete_branch_edge (e, really_delete)
|
||||
|
||||
/* Redirecting behaves wrongly wrto this flag. */
|
||||
irr = snd->flags & EDGE_IRREDUCIBLE_LOOP;
|
||||
|
||||
|
||||
if (!redirect_edge_and_branch (e, newdest))
|
||||
return false;
|
||||
src->succ->flags &= ~EDGE_IRREDUCIBLE_LOOP;
|
||||
@ -904,16 +845,9 @@ loop_delete_branch_edge (e, really_delete)
|
||||
original entry block to first block in peeled copy.
|
||||
*/
|
||||
static void
|
||||
copy_bbs (bbs, n, entry, latch_edge, new_bbs, loops, header_edge, copy_header_edge, add_irreducible_flag)
|
||||
basic_block *bbs;
|
||||
int n;
|
||||
edge entry;
|
||||
edge latch_edge;
|
||||
basic_block **new_bbs;
|
||||
struct loops *loops;
|
||||
edge *header_edge;
|
||||
edge *copy_header_edge;
|
||||
int add_irreducible_flag;
|
||||
copy_bbs (basic_block *bbs, int n, edge entry, edge latch_edge,
|
||||
basic_block **new_bbs, struct loops *loops, edge *header_edge,
|
||||
edge *copy_header_edge, int add_irreducible_flag)
|
||||
{
|
||||
int i;
|
||||
basic_block bb, new_bb, header = entry->dest, dom_bb;
|
||||
@ -975,7 +909,7 @@ copy_bbs (bbs, n, entry, latch_edge, new_bbs, loops, header_edge, copy_header_ed
|
||||
basic_block src = e->src;
|
||||
|
||||
e_pred = e->pred_next;
|
||||
|
||||
|
||||
if (!RBI (src)->duplicated)
|
||||
continue;
|
||||
|
||||
@ -1007,8 +941,7 @@ copy_bbs (bbs, n, entry, latch_edge, new_bbs, loops, header_edge, copy_header_ed
|
||||
|
||||
/* Check whether LOOP's body can be duplicated. */
|
||||
bool
|
||||
can_duplicate_loop_p (loop)
|
||||
struct loop *loop;
|
||||
can_duplicate_loop_p (struct loop *loop)
|
||||
{
|
||||
basic_block *bbs;
|
||||
unsigned i;
|
||||
@ -1047,13 +980,8 @@ can_duplicate_loop_p (loop)
|
||||
into TO_REMOVE array that must be large enough to hold them all; their
|
||||
number is returned in N_TO_REMOVE. */
|
||||
static void
|
||||
record_exit_edges (orig, bbs, nbbs, to_remove, n_to_remove, is_orig)
|
||||
edge orig;
|
||||
basic_block *bbs;
|
||||
int nbbs;
|
||||
edge *to_remove;
|
||||
unsigned *n_to_remove;
|
||||
int is_orig;
|
||||
record_exit_edges (edge orig, basic_block *bbs, int nbbs, edge *to_remove,
|
||||
unsigned int *n_to_remove, int is_orig)
|
||||
{
|
||||
sbitmap my_blocks;
|
||||
int i;
|
||||
@ -1106,17 +1034,10 @@ record_exit_edges (orig, bbs, nbbs, to_remove, n_to_remove, is_orig)
|
||||
other copies are numbered in order given by control flow through them)
|
||||
into TO_REMOVE array. Returns false if duplication is impossible. */
|
||||
int
|
||||
duplicate_loop_to_header_edge (loop, e, loops, ndupl, wont_exit, orig,
|
||||
to_remove, n_to_remove, flags)
|
||||
struct loop *loop;
|
||||
edge e;
|
||||
struct loops *loops;
|
||||
unsigned ndupl;
|
||||
sbitmap wont_exit;
|
||||
edge orig;
|
||||
edge *to_remove;
|
||||
unsigned *n_to_remove;
|
||||
int flags;
|
||||
duplicate_loop_to_header_edge (struct loop *loop, edge e, struct loops *loops,
|
||||
unsigned int ndupl, sbitmap wont_exit,
|
||||
edge orig, edge *to_remove,
|
||||
unsigned int *n_to_remove, int flags)
|
||||
{
|
||||
struct loop *target, *aloop;
|
||||
struct loop **orig_loops;
|
||||
@ -1184,7 +1105,7 @@ duplicate_loop_to_header_edge (loop, e, loops, ndupl, wont_exit, orig,
|
||||
scale_step = xmalloc (ndupl * sizeof (int));
|
||||
|
||||
for (i = 1; i <= ndupl; i++)
|
||||
scale_step[i - 1] = TEST_BIT (wont_exit, i)
|
||||
scale_step[i - 1] = TEST_BIT (wont_exit, i)
|
||||
? prob_pass_wont_exit
|
||||
: prob_pass_thru;
|
||||
|
||||
@ -1230,7 +1151,7 @@ duplicate_loop_to_header_edge (loop, e, loops, ndupl, wont_exit, orig,
|
||||
orig_loops[i] = aloop;
|
||||
|
||||
loop->copy = target;
|
||||
|
||||
|
||||
/* Original basic blocks. */
|
||||
n = loop->num_nodes;
|
||||
|
||||
@ -1244,7 +1165,7 @@ duplicate_loop_to_header_edge (loop, e, loops, ndupl, wont_exit, orig,
|
||||
/* Record exit edges in original loop body. */
|
||||
if (TEST_BIT (wont_exit, 0))
|
||||
record_exit_edges (orig, bbs, n, to_remove, n_to_remove, true);
|
||||
|
||||
|
||||
for (j = 0; j < ndupl; j++)
|
||||
{
|
||||
/* Copy loops. */
|
||||
@ -1259,7 +1180,7 @@ duplicate_loop_to_header_edge (loop, e, loops, ndupl, wont_exit, orig,
|
||||
/* Record exit edges in this copy. */
|
||||
if (TEST_BIT (wont_exit, j + 1))
|
||||
record_exit_edges (orig, new_bbs, n, to_remove, n_to_remove, false);
|
||||
|
||||
|
||||
/* Set counts and frequencies. */
|
||||
for (i = 0; i < n; i++)
|
||||
{
|
||||
@ -1270,7 +1191,7 @@ duplicate_loop_to_header_edge (loop, e, loops, ndupl, wont_exit, orig,
|
||||
{
|
||||
new_bb->count = RDIV (scale_act * bb->count, REG_BR_PROB_BASE);
|
||||
new_bb->frequency = RDIV (scale_act * bb->frequency,
|
||||
REG_BR_PROB_BASE);
|
||||
REG_BR_PROB_BASE);
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -1279,7 +1200,7 @@ duplicate_loop_to_header_edge (loop, e, loops, ndupl, wont_exit, orig,
|
||||
}
|
||||
|
||||
for (ae = new_bb->succ; ae; ae = ae->succ_next)
|
||||
ae->count = RDIV (new_bb->count * ae->probability,
|
||||
ae->count = RDIV (new_bb->count * ae->probability,
|
||||
REG_BR_PROB_BASE);
|
||||
}
|
||||
if (flags & DLTHE_FLAG_UPDATE_FREQ)
|
||||
@ -1290,7 +1211,7 @@ duplicate_loop_to_header_edge (loop, e, loops, ndupl, wont_exit, orig,
|
||||
memcpy (first_active, new_bbs, n * sizeof (basic_block));
|
||||
first_active_latch = RBI (latch)->copy;
|
||||
}
|
||||
|
||||
|
||||
free (new_bbs);
|
||||
|
||||
/* Original loop header is dominated by latch copy
|
||||
@ -1306,7 +1227,7 @@ duplicate_loop_to_header_edge (loop, e, loops, ndupl, wont_exit, orig,
|
||||
}
|
||||
}
|
||||
/* Now handle original loop. */
|
||||
|
||||
|
||||
/* Update edge counts. */
|
||||
if (flags & DLTHE_FLAG_UPDATE_FREQ)
|
||||
{
|
||||
@ -1353,10 +1274,7 @@ duplicate_loop_to_header_edge (loop, e, loops, ndupl, wont_exit, orig,
|
||||
entry; otherwise we also force preheader block to have only one successor.
|
||||
The function also updates dominators stored in DOM. */
|
||||
static basic_block
|
||||
create_preheader (loop, dom, flags)
|
||||
struct loop *loop;
|
||||
dominance_info dom;
|
||||
int flags;
|
||||
create_preheader (struct loop *loop, dominance_info dom, int flags)
|
||||
{
|
||||
edge e, fallthru;
|
||||
basic_block dummy;
|
||||
@ -1404,7 +1322,7 @@ create_preheader (loop, dom, flags)
|
||||
ploop->latch = fallthru->dest;
|
||||
|
||||
add_to_dominance_info (dom, fallthru->dest);
|
||||
|
||||
|
||||
/* Redirect edges. */
|
||||
for (e = dummy->pred; e; e = e->pred_next)
|
||||
{
|
||||
@ -1442,9 +1360,7 @@ create_preheader (loop, dom, flags)
|
||||
/* Create preheaders for each loop from loop tree stored in LOOPS; for meaning
|
||||
of FLAGS see create_preheader. */
|
||||
void
|
||||
create_preheaders (loops, flags)
|
||||
struct loops *loops;
|
||||
int flags;
|
||||
create_preheaders (struct loops *loops, int flags)
|
||||
{
|
||||
unsigned i;
|
||||
for (i = 1; i < loops->num; i++)
|
||||
@ -1455,8 +1371,7 @@ create_preheaders (loops, flags)
|
||||
/* Forces all loop latches of loops from loop tree LOOPS to have only single
|
||||
successor. */
|
||||
void
|
||||
force_single_succ_latches (loops)
|
||||
struct loops *loops;
|
||||
force_single_succ_latches (struct loops *loops)
|
||||
{
|
||||
unsigned i;
|
||||
struct loop *loop;
|
||||
@ -1467,7 +1382,7 @@ force_single_succ_latches (loops)
|
||||
loop = loops->parray[i];
|
||||
if (!loop->latch->succ->succ_next)
|
||||
continue;
|
||||
|
||||
|
||||
for (e = loop->header->pred; e->src != loop->latch; e = e->pred_next)
|
||||
continue;
|
||||
|
||||
@ -1481,15 +1396,12 @@ force_single_succ_latches (loops)
|
||||
be ok after this function. The created block is placed on correct place
|
||||
in LOOPS structure and its dominator is set. */
|
||||
basic_block
|
||||
loop_split_edge_with (e, insns, loops)
|
||||
edge e;
|
||||
rtx insns;
|
||||
struct loops *loops;
|
||||
loop_split_edge_with (edge e, rtx insns, struct loops *loops)
|
||||
{
|
||||
basic_block src, dest, new_bb;
|
||||
struct loop *loop_c;
|
||||
edge new_e;
|
||||
|
||||
|
||||
src = e->src;
|
||||
dest = e->dest;
|
||||
|
||||
@ -1531,6 +1443,6 @@ loop_split_edge_with (e, insns, loops)
|
||||
|
||||
if (dest->loop_father->latch == src)
|
||||
dest->loop_father->latch = new_bb;
|
||||
|
||||
|
||||
return new_bb;
|
||||
}
|
||||
|
174
gcc/cfgrtl.c
174
gcc/cfgrtl.c
@ -1,6 +1,6 @@
|
||||
/* Control flow graph manipulation code for GNU compiler.
|
||||
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
|
||||
1999, 2000, 2001, 2002 Free Software Foundation, Inc.
|
||||
1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
|
||||
|
||||
This file is part of GCC.
|
||||
|
||||
@ -69,32 +69,31 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
||||
rtx label_value_list;
|
||||
rtx tail_recursion_label_list;
|
||||
|
||||
static int can_delete_note_p PARAMS ((rtx));
|
||||
static int can_delete_label_p PARAMS ((rtx));
|
||||
static void commit_one_edge_insertion PARAMS ((edge, int));
|
||||
static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
|
||||
static rtx last_loop_beg_note PARAMS ((rtx));
|
||||
static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
|
||||
basic_block force_nonfallthru_and_redirect PARAMS ((edge, basic_block));
|
||||
static basic_block rtl_split_edge PARAMS ((edge));
|
||||
static int rtl_verify_flow_info PARAMS ((void));
|
||||
static edge cfg_layout_split_block PARAMS ((basic_block, void *));
|
||||
static bool cfg_layout_redirect_edge_and_branch PARAMS ((edge, basic_block));
|
||||
static basic_block cfg_layout_redirect_edge_and_branch_force PARAMS ((edge, basic_block));
|
||||
static void cfg_layout_delete_block PARAMS ((basic_block));
|
||||
static void rtl_delete_block PARAMS ((basic_block));
|
||||
static basic_block rtl_redirect_edge_and_branch_force PARAMS ((edge, basic_block));
|
||||
static bool rtl_redirect_edge_and_branch PARAMS ((edge, basic_block));
|
||||
static edge rtl_split_block PARAMS ((basic_block, void *));
|
||||
static void rtl_dump_bb PARAMS ((basic_block, FILE *));
|
||||
static int rtl_verify_flow_info_1 PARAMS ((void));
|
||||
static int can_delete_note_p (rtx);
|
||||
static int can_delete_label_p (rtx);
|
||||
static void commit_one_edge_insertion (edge, int);
|
||||
static bool try_redirect_by_replacing_jump (edge, basic_block);
|
||||
static rtx last_loop_beg_note (rtx);
|
||||
static bool back_edge_of_syntactic_loop_p (basic_block, basic_block);
|
||||
basic_block force_nonfallthru_and_redirect (edge, basic_block);
|
||||
static basic_block rtl_split_edge (edge);
|
||||
static int rtl_verify_flow_info (void);
|
||||
static edge cfg_layout_split_block (basic_block, void *);
|
||||
static bool cfg_layout_redirect_edge_and_branch (edge, basic_block);
|
||||
static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
|
||||
static void cfg_layout_delete_block (basic_block);
|
||||
static void rtl_delete_block (basic_block);
|
||||
static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
|
||||
static bool rtl_redirect_edge_and_branch (edge, basic_block);
|
||||
static edge rtl_split_block (basic_block, void *);
|
||||
static void rtl_dump_bb (basic_block, FILE *);
|
||||
static int rtl_verify_flow_info_1 (void);
|
||||
|
||||
/* Return true if NOTE is not one of the ones that must be kept paired,
|
||||
so that we may simply delete it. */
|
||||
|
||||
static int
|
||||
can_delete_note_p (note)
|
||||
rtx note;
|
||||
can_delete_note_p (rtx note)
|
||||
{
|
||||
return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
|
||||
|| NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK
|
||||
@ -104,8 +103,7 @@ can_delete_note_p (note)
|
||||
/* True if a given label can be deleted. */
|
||||
|
||||
static int
|
||||
can_delete_label_p (label)
|
||||
rtx label;
|
||||
can_delete_label_p (rtx label)
|
||||
{
|
||||
return (!LABEL_PRESERVE_P (label)
|
||||
/* User declared labels must be preserved. */
|
||||
@ -117,8 +115,7 @@ can_delete_label_p (label)
|
||||
/* Delete INSN by patching it out. Return the next insn. */
|
||||
|
||||
rtx
|
||||
delete_insn (insn)
|
||||
rtx insn;
|
||||
delete_insn (rtx insn)
|
||||
{
|
||||
rtx next = NEXT_INSN (insn);
|
||||
rtx note;
|
||||
@ -189,8 +186,7 @@ delete_insn (insn)
|
||||
|
||||
/* Like delete_insn but also purge dead edges from BB. */
|
||||
rtx
|
||||
delete_insn_and_edges (insn)
|
||||
rtx insn;
|
||||
delete_insn_and_edges (rtx insn)
|
||||
{
|
||||
rtx x;
|
||||
bool purge = false;
|
||||
@ -209,8 +205,7 @@ delete_insn_and_edges (insn)
|
||||
that must be paired. */
|
||||
|
||||
void
|
||||
delete_insn_chain (start, finish)
|
||||
rtx start, finish;
|
||||
delete_insn_chain (rtx start, rtx finish)
|
||||
{
|
||||
rtx next;
|
||||
|
||||
@ -233,8 +228,7 @@ delete_insn_chain (start, finish)
|
||||
|
||||
/* Like delete_insn but also purge dead edges from BB. */
|
||||
void
|
||||
delete_insn_chain_and_edges (first, last)
|
||||
rtx first, last;
|
||||
delete_insn_chain_and_edges (rtx first, rtx last)
|
||||
{
|
||||
bool purge = false;
|
||||
|
||||
@ -256,9 +250,7 @@ delete_insn_chain_and_edges (first, last)
|
||||
AFTER is the basic block we should be put after. */
|
||||
|
||||
basic_block
|
||||
create_basic_block_structure (head, end, bb_note, after)
|
||||
rtx head, end, bb_note;
|
||||
basic_block after;
|
||||
create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
@ -333,9 +325,7 @@ create_basic_block_structure (head, end, bb_note, after)
|
||||
create basic block at the end of INSN chain. */
|
||||
|
||||
basic_block
|
||||
create_basic_block (head, end, after)
|
||||
rtx head, end;
|
||||
basic_block after;
|
||||
create_basic_block (rtx head, rtx end, basic_block after)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
@ -358,8 +348,7 @@ create_basic_block (head, end, after)
|
||||
to post-process the stream to remove empty blocks, loops, ranges, etc. */
|
||||
|
||||
static void
|
||||
rtl_delete_block (b)
|
||||
basic_block b;
|
||||
rtl_delete_block (basic_block b)
|
||||
{
|
||||
rtx insn, end, tmp;
|
||||
|
||||
@ -420,7 +409,7 @@ rtl_delete_block (b)
|
||||
/* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
|
||||
|
||||
void
|
||||
compute_bb_for_insn ()
|
||||
compute_bb_for_insn (void)
|
||||
{
|
||||
basic_block bb;
|
||||
|
||||
@ -441,7 +430,7 @@ compute_bb_for_insn ()
|
||||
/* Release the basic_block_for_insn array. */
|
||||
|
||||
void
|
||||
free_bb_for_insn ()
|
||||
free_bb_for_insn (void)
|
||||
{
|
||||
rtx insn;
|
||||
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
||||
@ -452,8 +441,7 @@ free_bb_for_insn ()
|
||||
/* Update insns block within BB. */
|
||||
|
||||
void
|
||||
update_bb_for_insn (bb)
|
||||
basic_block bb;
|
||||
update_bb_for_insn (basic_block bb)
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
@ -472,9 +460,7 @@ update_bb_for_insn (bb)
|
||||
one has a number one greater than the block split. */
|
||||
|
||||
static edge
|
||||
rtl_split_block (bb, insnp)
|
||||
basic_block bb;
|
||||
void *insnp;
|
||||
rtl_split_block (basic_block bb, void *insnp)
|
||||
{
|
||||
basic_block new_bb;
|
||||
edge new_edge;
|
||||
@ -533,8 +519,7 @@ rtl_split_block (bb, insnp)
|
||||
are already contiguous, hence `nomove'. */
|
||||
|
||||
void
|
||||
merge_blocks_nomove (a, b)
|
||||
basic_block a, b;
|
||||
merge_blocks_nomove (basic_block a, basic_block b)
|
||||
{
|
||||
rtx b_head = b->head, b_end = b->end, a_end = a->end;
|
||||
rtx del_first = NULL_RTX, del_last = NULL_RTX;
|
||||
@ -641,8 +626,7 @@ merge_blocks_nomove (a, b)
|
||||
exist. */
|
||||
|
||||
rtx
|
||||
block_label (block)
|
||||
basic_block block;
|
||||
block_label (basic_block block)
|
||||
{
|
||||
if (block == EXIT_BLOCK_PTR)
|
||||
return NULL_RTX;
|
||||
@ -661,9 +645,7 @@ block_label (block)
|
||||
return values are equivalent to redirect_edge_and_branch. */
|
||||
|
||||
static bool
|
||||
try_redirect_by_replacing_jump (e, target)
|
||||
edge e;
|
||||
basic_block target;
|
||||
try_redirect_by_replacing_jump (edge e, basic_block target)
|
||||
{
|
||||
basic_block src = e->src;
|
||||
rtx insn = src->end, kill_from;
|
||||
@ -785,8 +767,7 @@ try_redirect_by_replacing_jump (e, target)
|
||||
test. */
|
||||
|
||||
static rtx
|
||||
last_loop_beg_note (insn)
|
||||
rtx insn;
|
||||
last_loop_beg_note (rtx insn)
|
||||
{
|
||||
rtx last = insn;
|
||||
|
||||
@ -810,9 +791,7 @@ last_loop_beg_note (insn)
|
||||
stream. */
|
||||
|
||||
static bool
|
||||
rtl_redirect_edge_and_branch (e, target)
|
||||
edge e;
|
||||
basic_block target;
|
||||
rtl_redirect_edge_and_branch (edge e, basic_block target)
|
||||
{
|
||||
rtx tmp;
|
||||
rtx old_label = e->dest->head;
|
||||
@ -911,9 +890,7 @@ rtl_redirect_edge_and_branch (e, target)
|
||||
Used by redirect_edge_and_branch_force. */
|
||||
|
||||
basic_block
|
||||
force_nonfallthru_and_redirect (e, target)
|
||||
edge e;
|
||||
basic_block target;
|
||||
force_nonfallthru_and_redirect (edge e, basic_block target)
|
||||
{
|
||||
basic_block jump_block, new_bb = NULL, src = e->src;
|
||||
rtx note;
|
||||
@ -956,7 +933,7 @@ force_nonfallthru_and_redirect (e, target)
|
||||
/* Irritating special case - fallthru edge to the same block as abnormal
|
||||
edge.
|
||||
We can't redirect abnormal edge, but we still can split the fallthru
|
||||
one and create separate abnormal edge to original destination.
|
||||
one and create separate abnormal edge to original destination.
|
||||
This allows bb-reorder to make such edge non-fallthru. */
|
||||
if (e->dest != target)
|
||||
abort ();
|
||||
@ -1064,8 +1041,7 @@ force_nonfallthru_and_redirect (e, target)
|
||||
Return newly created BB or NULL if none. */
|
||||
|
||||
basic_block
|
||||
force_nonfallthru (e)
|
||||
edge e;
|
||||
force_nonfallthru (edge e)
|
||||
{
|
||||
return force_nonfallthru_and_redirect (e, e->dest);
|
||||
}
|
||||
@ -1075,9 +1051,7 @@ force_nonfallthru (e)
|
||||
Abort if conversion is impossible. */
|
||||
|
||||
static basic_block
|
||||
rtl_redirect_edge_and_branch_force (e, target)
|
||||
edge e;
|
||||
basic_block target;
|
||||
rtl_redirect_edge_and_branch_force (edge e, basic_block target)
|
||||
{
|
||||
if (redirect_edge_and_branch (e, target)
|
||||
|| e->dest == target)
|
||||
@ -1092,9 +1066,7 @@ rtl_redirect_edge_and_branch_force (e, target)
|
||||
fact true, delete the jump and barriers that are in the way. */
|
||||
|
||||
void
|
||||
tidy_fallthru_edge (e, b, c)
|
||||
edge e;
|
||||
basic_block b, c;
|
||||
tidy_fallthru_edge (edge e, basic_block b, basic_block c)
|
||||
{
|
||||
rtx q;
|
||||
|
||||
@ -1149,7 +1121,7 @@ tidy_fallthru_edge (e, b, c)
|
||||
is how find_basic_blocks created them. */
|
||||
|
||||
void
|
||||
tidy_fallthru_edges ()
|
||||
tidy_fallthru_edges (void)
|
||||
{
|
||||
basic_block b, c;
|
||||
|
||||
@ -1189,8 +1161,7 @@ tidy_fallthru_edges ()
|
||||
is back edge of syntactic loop. */
|
||||
|
||||
static bool
|
||||
back_edge_of_syntactic_loop_p (bb1, bb2)
|
||||
basic_block bb1, bb2;
|
||||
back_edge_of_syntactic_loop_p (basic_block bb1, basic_block bb2)
|
||||
{
|
||||
rtx insn;
|
||||
int count = 0;
|
||||
@ -1228,8 +1199,7 @@ back_edge_of_syntactic_loop_p (bb1, bb2)
|
||||
block with multiple predecessors is not handled optimally. */
|
||||
|
||||
basic_block
|
||||
rtl_split_edge (edge_in)
|
||||
edge edge_in;
|
||||
rtl_split_edge (edge edge_in)
|
||||
{
|
||||
basic_block bb;
|
||||
rtx before;
|
||||
@ -1317,9 +1287,7 @@ rtl_split_edge (edge_in)
|
||||
CFG until commit_edge_insertions is called. */
|
||||
|
||||
void
|
||||
insert_insn_on_edge (pattern, e)
|
||||
rtx pattern;
|
||||
edge e;
|
||||
insert_insn_on_edge (rtx pattern, edge e)
|
||||
{
|
||||
/* We cannot insert instructions on an abnormal critical edge.
|
||||
It will be easier to find the culprit if we die now. */
|
||||
@ -1340,9 +1308,7 @@ insert_insn_on_edge (pattern, e)
|
||||
/* Update the CFG for the instructions queued on edge E. */
|
||||
|
||||
static void
|
||||
commit_one_edge_insertion (e, watch_calls)
|
||||
edge e;
|
||||
int watch_calls;
|
||||
commit_one_edge_insertion (edge e, int watch_calls)
|
||||
{
|
||||
rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
|
||||
basic_block bb = NULL;
|
||||
@ -1467,7 +1433,7 @@ commit_one_edge_insertion (e, watch_calls)
|
||||
/* Update the CFG for all queued instructions. */
|
||||
|
||||
void
|
||||
commit_edge_insertions ()
|
||||
commit_edge_insertions (void)
|
||||
{
|
||||
basic_block bb;
|
||||
sbitmap blocks;
|
||||
@ -1515,7 +1481,7 @@ commit_edge_insertions ()
|
||||
code on edges between call and storing its return value. */
|
||||
|
||||
void
|
||||
commit_edge_insertions_watch_calls ()
|
||||
commit_edge_insertions_watch_calls (void)
|
||||
{
|
||||
basic_block bb;
|
||||
sbitmap blocks;
|
||||
@ -1562,9 +1528,7 @@ commit_edge_insertions_watch_calls ()
|
||||
/* Print out one basic block with live information at start and end. */
|
||||
|
||||
static void
|
||||
rtl_dump_bb (bb, outf)
|
||||
basic_block bb;
|
||||
FILE *outf;
|
||||
rtl_dump_bb (basic_block bb, FILE *outf)
|
||||
{
|
||||
rtx insn;
|
||||
rtx last;
|
||||
@ -1586,9 +1550,7 @@ rtl_dump_bb (bb, outf)
|
||||
basic block. */
|
||||
|
||||
void
|
||||
print_rtl_with_bb (outf, rtx_first)
|
||||
FILE *outf;
|
||||
rtx rtx_first;
|
||||
print_rtl_with_bb (FILE *outf, rtx rtx_first)
|
||||
{
|
||||
rtx tmp_rtx;
|
||||
|
||||
@ -1674,8 +1636,7 @@ print_rtl_with_bb (outf, rtx_first)
|
||||
}
|
||||
|
||||
void
|
||||
update_br_prob_note (bb)
|
||||
basic_block bb;
|
||||
update_br_prob_note (basic_block bb)
|
||||
{
|
||||
rtx note;
|
||||
if (GET_CODE (bb->end) != JUMP_INSN)
|
||||
@ -1701,7 +1662,7 @@ update_br_prob_note (bb)
|
||||
In future it can be extended check a lot of other stuff as well
|
||||
(reachability of basic blocks, life information, etc. etc.). */
|
||||
static int
|
||||
rtl_verify_flow_info_1 ()
|
||||
rtl_verify_flow_info_1 (void)
|
||||
{
|
||||
const int max_uid = get_max_uid ();
|
||||
rtx last_head = get_last_insn ();
|
||||
@ -1920,7 +1881,7 @@ rtl_verify_flow_info_1 ()
|
||||
- check that all returns are followed by barriers
|
||||
- check that all fallthru edge points to the adjacent blocks. */
|
||||
static int
|
||||
rtl_verify_flow_info ()
|
||||
rtl_verify_flow_info (void)
|
||||
{
|
||||
basic_block bb;
|
||||
int err = rtl_verify_flow_info_1 ();
|
||||
@ -2044,8 +2005,7 @@ rtl_verify_flow_info ()
|
||||
Return true if any edges are eliminated. */
|
||||
|
||||
bool
|
||||
purge_dead_edges (bb)
|
||||
basic_block bb;
|
||||
purge_dead_edges (basic_block bb)
|
||||
{
|
||||
edge e, next;
|
||||
rtx insn = bb->end, note;
|
||||
@ -2227,8 +2187,7 @@ purge_dead_edges (bb)
|
||||
true if some edge has been eliminated. */
|
||||
|
||||
bool
|
||||
purge_all_dead_edges (update_life_p)
|
||||
int update_life_p;
|
||||
purge_all_dead_edges (int update_life_p)
|
||||
{
|
||||
int purged = false;
|
||||
sbitmap blocks = 0;
|
||||
@ -2261,9 +2220,7 @@ purge_all_dead_edges (update_life_p)
|
||||
|
||||
/* Same as split_block but update cfg_layout structures. */
|
||||
static edge
|
||||
cfg_layout_split_block (bb, insnp)
|
||||
basic_block bb;
|
||||
void *insnp;
|
||||
cfg_layout_split_block (basic_block bb, void *insnp)
|
||||
{
|
||||
rtx insn = insnp;
|
||||
|
||||
@ -2278,9 +2235,7 @@ cfg_layout_split_block (bb, insnp)
|
||||
|
||||
/* Redirect Edge to DEST. */
|
||||
static bool
|
||||
cfg_layout_redirect_edge_and_branch (e, dest)
|
||||
edge e;
|
||||
basic_block dest;
|
||||
cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
|
||||
{
|
||||
basic_block src = e->src;
|
||||
basic_block old_next_bb = src->next_bb;
|
||||
@ -2332,9 +2287,7 @@ cfg_layout_redirect_edge_and_branch (e, dest)
|
||||
|
||||
/* Simple wrapper as we always can redirect fallthru edges. */
|
||||
static basic_block
|
||||
cfg_layout_redirect_edge_and_branch_force (e, dest)
|
||||
edge e;
|
||||
basic_block dest;
|
||||
cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
|
||||
{
|
||||
if (!cfg_layout_redirect_edge_and_branch (e, dest))
|
||||
abort ();
|
||||
@ -2343,8 +2296,7 @@ cfg_layout_redirect_edge_and_branch_force (e, dest)
|
||||
|
||||
/* Same as flow_delete_block but update cfg_layout structures. */
|
||||
static void
|
||||
cfg_layout_delete_block (bb)
|
||||
basic_block bb;
|
||||
cfg_layout_delete_block (basic_block bb)
|
||||
{
|
||||
rtx insn, next, prev = PREV_INSN (bb->head), *to, remaints;
|
||||
|
||||
@ -2384,11 +2336,11 @@ cfg_layout_delete_block (bb)
|
||||
|
||||
if (prev)
|
||||
prev = NEXT_INSN (prev);
|
||||
else
|
||||
else
|
||||
prev = get_insns ();
|
||||
if (next)
|
||||
next = PREV_INSN (next);
|
||||
else
|
||||
else
|
||||
next = get_last_insn ();
|
||||
|
||||
if (next && NEXT_INSN (next) != prev)
|
||||
|
Loading…
Reference in New Issue
Block a user