summaryrefslogtreecommitdiff
path: root/gcc/calls.c
diff options
context:
space:
mode:
authordnovillo <dnovillo@138bc75d-0d04-0410-961f-82ee72b054a4>2004-05-13 06:41:07 +0000
committerdnovillo <dnovillo@138bc75d-0d04-0410-961f-82ee72b054a4>2004-05-13 06:41:07 +0000
commit4ee9c6840ad3fc92a9034343278a1e476ad6872a (patch)
treea2568888a519c077427b133de9ece5879a8484a5 /gcc/calls.c
parentebb338380ab170c91e64d38038e6b5ce930d69a1 (diff)
downloadgcc-4ee9c6840ad3fc92a9034343278a1e476ad6872a.tar.gz
Merge tree-ssa-20020619-branch into mainline.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@81764 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/calls.c')
-rw-r--r--gcc/calls.c475
1 files changed, 163 insertions, 312 deletions
diff --git a/gcc/calls.c b/gcc/calls.c
index 49be7436ae1..e15a96b9c37 100644
--- a/gcc/calls.c
+++ b/gcc/calls.c
@@ -120,7 +120,7 @@ int stack_arg_under_construction;
static int calls_function (tree, int);
static int calls_function_1 (tree, int);
-static void emit_call_1 (rtx, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
+static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
HOST_WIDE_INT, rtx, rtx, int, rtx, int,
CUMULATIVE_ARGS *);
static void precompute_register_parameters (int, struct arg_data *, int *);
@@ -134,7 +134,7 @@ static void initialize_argument_information (int, struct arg_data *,
struct args_size *, int, tree,
tree, CUMULATIVE_ARGS *, int,
rtx *, int *, int *, int *,
- bool);
+ bool *, bool);
static void compute_argument_addresses (struct arg_data *, rtx, int);
static rtx rtx_for_function_call (tree, tree);
static void load_register_parameters (struct arg_data *, int, rtx *, int,
@@ -142,7 +142,6 @@ static void load_register_parameters (struct arg_data *, int, rtx *, int,
static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
enum machine_mode, int, va_list);
static int special_function_p (tree, int);
-static rtx try_to_integrate (tree, tree, rtx, int, tree, rtx);
static int check_sibcall_argument_overlap_1 (rtx);
static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
@@ -280,17 +279,11 @@ calls_function_1 (tree exp, int which)
CALL_INSN_FUNCTION_USAGE information. */
rtx
-prepare_call_address (rtx funexp, tree fndecl, rtx *call_fusage,
- int reg_parm_seen, int sibcallp)
+prepare_call_address (rtx funexp, rtx static_chain_value,
+ rtx *call_fusage, int reg_parm_seen, int sibcallp)
{
- rtx static_chain_value = 0;
-
funexp = protect_from_queue (funexp, 0);
- if (fndecl != 0)
- /* Get possible static chain value for nested function in C. */
- static_chain_value = lookup_static_chain (fndecl);
-
/* Make a valid memory address and copy constants through pseudo-regs,
but not for a constant address if -fno-function-cse. */
if (GET_CODE (funexp) != SYMBOL_REF)
@@ -362,7 +355,8 @@ prepare_call_address (rtx funexp, tree fndecl, rtx *call_fusage,
denote registers used by the called function. */
static void
-emit_call_1 (rtx funexp, tree fndecl ATTRIBUTE_UNUSED, tree funtype ATTRIBUTE_UNUSED,
+emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
+ tree funtype ATTRIBUTE_UNUSED,
HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
HOST_WIDE_INT rounded_stack_size,
HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
@@ -506,7 +500,16 @@ emit_call_1 (rtx funexp, tree fndecl ATTRIBUTE_UNUSED, tree funtype ATTRIBUTE_UN
REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
REG_NOTES (call_insn));
else
- note_eh_region_may_contain_throw ();
+ {
+ int rn = lookup_stmt_eh_region (fntree);
+
+ /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
+ throw, which we already took care of. */
+ if (rn > 0)
+ REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
+ REG_NOTES (call_insn));
+ note_current_region_may_contain_throw ();
+ }
if (ecf_flags & ECF_NORETURN)
REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
@@ -590,8 +593,7 @@ emit_call_1 (rtx funexp, tree fndecl ATTRIBUTE_UNUSED, tree funtype ATTRIBUTE_UN
static int
special_function_p (tree fndecl, int flags)
{
- if (! (flags & ECF_MALLOC)
- && fndecl && DECL_NAME (fndecl)
+ if (fndecl && DECL_NAME (fndecl)
&& IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
/* Exclude functions not at the file scope, or not `extern',
since they are not the magic functions we would otherwise
@@ -714,6 +716,8 @@ flags_from_decl_or_type (tree exp)
if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
flags |= ECF_LIBCALL_BLOCK | ECF_CONST;
+
+ flags = special_function_p (exp, flags);
}
else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
flags |= ECF_CONST;
@@ -1012,6 +1016,9 @@ store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
flags which may may be modified by this routine.
+ MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
+ that requires allocation of stack space.
+
CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
the thunked-to function. */
@@ -1025,7 +1032,7 @@ initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
int reg_parm_stack_space,
rtx *old_stack_level, int *old_pending_adj,
int *must_preallocate, int *ecf_flags,
- bool call_from_thunk_p)
+ bool *may_tailcall, bool call_from_thunk_p)
{
/* 1 if scanning parms front to back, -1 if scanning back to front. */
int inc;
@@ -1138,6 +1145,7 @@ initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
build_pointer_type (type),
args[i].tree_value);
type = build_pointer_type (type);
+ *may_tailcall = false;
}
else
{
@@ -1177,6 +1185,7 @@ initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
build_pointer_type (type),
make_tree (type, copy));
type = build_pointer_type (type);
+ *may_tailcall = false;
}
}
@@ -1705,120 +1714,6 @@ load_register_parameters (struct arg_data *args, int num_actuals,
}
}
-/* Try to integrate function. See expand_inline_function for documentation
- about the parameters. */
-
-static rtx
-try_to_integrate (tree fndecl, tree actparms, rtx target, int ignore,
- tree type, rtx structure_value_addr)
-{
- rtx temp;
- rtx before_call;
- int i;
- rtx old_stack_level = 0;
- int reg_parm_stack_space = 0;
-
-#ifdef REG_PARM_STACK_SPACE
- reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
-#endif
-
- before_call = get_last_insn ();
-
- timevar_push (TV_INTEGRATION);
-
- temp = expand_inline_function (fndecl, actparms, target,
- ignore, type,
- structure_value_addr);
-
- timevar_pop (TV_INTEGRATION);
-
- /* If inlining succeeded, return. */
- if (temp != (rtx) (size_t) - 1)
- {
- if (ACCUMULATE_OUTGOING_ARGS)
- {
- /* If the outgoing argument list must be preserved, push
- the stack before executing the inlined function if it
- makes any calls. */
-
- i = reg_parm_stack_space;
- if (i > highest_outgoing_arg_in_use)
- i = highest_outgoing_arg_in_use;
- while (--i >= 0 && stack_usage_map[i] == 0)
- ;
-
- if (stack_arg_under_construction || i >= 0)
- {
- rtx first_insn
- = before_call ? NEXT_INSN (before_call) : get_insns ();
- rtx insn = NULL_RTX, seq;
-
- /* Look for a call in the inline function code.
- If DECL_STRUCT_FUNCTION (fndecl)->outgoing_args_size is
- nonzero then there is a call and it is not necessary
- to scan the insns. */
-
- if (DECL_STRUCT_FUNCTION (fndecl)->outgoing_args_size == 0)
- for (insn = first_insn; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == CALL_INSN)
- break;
-
- if (insn)
- {
- /* Reserve enough stack space so that the largest
- argument list of any function call in the inline
- function does not overlap the argument list being
- evaluated. This is usually an overestimate because
- allocate_dynamic_stack_space reserves space for an
- outgoing argument list in addition to the requested
- space, but there is no way to ask for stack space such
- that an argument list of a certain length can be
- safely constructed.
-
- Add the stack space reserved for register arguments, if
- any, in the inline function. What is really needed is the
- largest value of reg_parm_stack_space in the inline
- function, but that is not available. Using the current
- value of reg_parm_stack_space is wrong, but gives
- correct results on all supported machines. */
-
- int adjust =
- (DECL_STRUCT_FUNCTION (fndecl)->outgoing_args_size
- + reg_parm_stack_space);
-
- start_sequence ();
- emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
- allocate_dynamic_stack_space (GEN_INT (adjust),
- NULL_RTX, BITS_PER_UNIT);
- seq = get_insns ();
- end_sequence ();
- emit_insn_before (seq, first_insn);
- emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
- }
- }
- }
-
- /* If the result is equivalent to TARGET, return TARGET to simplify
- checks in store_expr. They can be equivalent but not equal in the
- case of a function that returns BLKmode. */
- if (temp != target && rtx_equal_p (temp, target))
- return target;
- return temp;
- }
-
- /* If inlining failed, mark FNDECL as needing to be compiled
- separately after all. If function was declared inline,
- give a warning. */
- if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
- && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
- {
- warning ("%Jinlining failed in call to '%F'", fndecl, fndecl);
- warning ("called from here");
- }
- lang_hooks.mark_addressable (fndecl);
- return (rtx) (size_t) - 1;
-}
-
/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
bytes, then we would need to push some additional bytes to pad the
@@ -2029,6 +1924,69 @@ shift_returned_value (tree type, rtx *value)
return false;
}
+/* Remove all REG_EQUIV notes found in the insn chain. */
+
+static void
+purge_reg_equiv_notes (void)
+{
+ rtx insn;
+
+ for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ {
+ while (1)
+ {
+ rtx note = find_reg_note (insn, REG_EQUIV, 0);
+ if (note)
+ {
+ /* Remove the note and keep looking at the notes for
+ this insn. */
+ remove_note (insn, note);
+ continue;
+ }
+ break;
+ }
+ }
+}
+
+/* Clear RTX_UNCHANGING_P flag of incoming argument MEMs. */
+
+static void
+purge_mem_unchanging_flag (rtx x)
+{
+ RTX_CODE code;
+ int i, j;
+ const char *fmt;
+
+ if (x == NULL_RTX)
+ return;
+
+ code = GET_CODE (x);
+
+ if (code == MEM)
+ {
+ if (RTX_UNCHANGING_P (x)
+ && (XEXP (x, 0) == current_function_internal_arg_pointer
+ || (GET_CODE (XEXP (x, 0)) == PLUS
+ && XEXP (XEXP (x, 0), 0) ==
+ current_function_internal_arg_pointer
+ && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
+ RTX_UNCHANGING_P (x) = 0;
+ return;
+ }
+
+ /* Scan all subexpressions. */
+ fmt = GET_RTX_FORMAT (code);
+ for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
+ {
+ if (*fmt == 'e')
+ purge_mem_unchanging_flag (XEXP (x, i));
+ else if (*fmt == 'E')
+ for (j = 0; j < XVECLEN (x, i); j++)
+ purge_mem_unchanging_flag (XVECEXP (x, i, j));
+ }
+}
+
+
/* Generate all the code for a function call
and return an rtx for its value.
Store the value in TARGET (specified as an rtx) if convenient.
@@ -2045,11 +2003,9 @@ expand_call (tree exp, rtx target, int ignore)
tree actparms = TREE_OPERAND (exp, 1);
/* RTX for the function to be called. */
rtx funexp;
- /* Sequence of insns to perform a tail recursive "call". */
- rtx tail_recursion_insns = NULL_RTX;
/* Sequence of insns to perform a normal "call". */
rtx normal_call_insns = NULL_RTX;
- /* Sequence of insns to perform a tail recursive "call". */
+ /* Sequence of insns to perform a tail "call". */
rtx tail_call_insns = NULL_RTX;
/* Data type of the function. */
tree funtype;
@@ -2059,9 +2015,7 @@ expand_call (tree exp, rtx target, int ignore)
tree fndecl = 0;
/* The type of the function being called. */
tree fntype;
- rtx insn;
- int try_tail_call = 1;
- int try_tail_recursion = 1;
+ bool try_tail_call = CALL_EXPR_TAILCALL (exp);
int pass;
/* Register in which non-BLKmode value will be returned,
@@ -2122,8 +2076,6 @@ expand_call (tree exp, rtx target, int ignore)
/* Mask of ECF_ flags. */
int flags = 0;
- /* Nonzero if this is a call to an inline function. */
- int is_integrable = 0;
#ifdef REG_PARM_STACK_SPACE
/* Define the boundary of the register parm stack space that needs to be
saved, if any. */
@@ -2132,7 +2084,6 @@ expand_call (tree exp, rtx target, int ignore)
#endif
int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
- rtx temp_target = 0;
char *initial_stack_usage_map = stack_usage_map;
int old_stack_allocated;
@@ -2156,58 +2107,23 @@ expand_call (tree exp, rtx target, int ignore)
HOST_WIDE_INT preferred_stack_boundary;
/* The alignment of the stack, in bytes. */
HOST_WIDE_INT preferred_unit_stack_boundary;
-
+ /* The static chain value to use for this call. */
+ rtx static_chain_value;
/* See if this is "nothrow" function call. */
if (TREE_NOTHROW (exp))
flags |= ECF_NOTHROW;
- /* See if we can find a DECL-node for the actual function.
- As a result, decide whether this is a call to an integrable function. */
-
+ /* See if we can find a DECL-node for the actual function, and get the
+ function attributes (flags) from the function decl or type node. */
fndecl = get_callee_fndecl (exp);
if (fndecl)
{
fntype = TREE_TYPE (fndecl);
- if (!flag_no_inline
- && fndecl != current_function_decl
- && DECL_INLINE (fndecl)
- && DECL_STRUCT_FUNCTION (fndecl)
- && DECL_STRUCT_FUNCTION (fndecl)->inlinable)
- is_integrable = 1;
- else if (! TREE_ADDRESSABLE (fndecl))
- {
- /* In case this function later becomes inlinable,
- record that there was already a non-inline call to it.
-
- Use abstraction instead of setting TREE_ADDRESSABLE
- directly. */
- if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
- && optimize > 0)
- {
- warning ("%Jcan't inline call to '%F'", fndecl, fndecl);
- warning ("called from here");
- }
- lang_hooks.mark_addressable (fndecl);
- }
-
- if (ignore
- && lookup_attribute ("warn_unused_result",
- TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
- warning ("ignoring return value of `%D', "
- "declared with attribute warn_unused_result", fndecl);
-
flags |= flags_from_decl_or_type (fndecl);
}
-
- /* If we don't have specific function to call, see if we have a
- attributes set in the type. */
else
{
fntype = TREE_TYPE (TREE_TYPE (p));
- if (ignore
- && lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (fntype)))
- warning ("ignoring return value of function "
- "declared with attribute warn_unused_result");
flags |= flags_from_decl_or_type (fntype);
}
@@ -2264,15 +2180,6 @@ expand_call (tree exp, rtx target, int ignore)
#ifdef PCC_STATIC_STRUCT_RETURN
{
pcc_struct_value = 1;
- /* Easier than making that case work right. */
- if (is_integrable)
- {
- /* In case this is a static function, note that it has been
- used. */
- if (! TREE_ADDRESSABLE (fndecl))
- lang_hooks.mark_addressable (fndecl);
- is_integrable = 0;
- }
}
#else /* not PCC_STATIC_STRUCT_RETURN */
{
@@ -2305,17 +2212,6 @@ expand_call (tree exp, rtx target, int ignore)
#endif /* not PCC_STATIC_STRUCT_RETURN */
}
- /* If called function is inline, try to integrate it. */
-
- if (is_integrable)
- {
- rtx temp = try_to_integrate (fndecl, actparms, target,
- ignore, TREE_TYPE (exp),
- structure_value_addr);
- if (temp != (rtx) (size_t) - 1)
- return temp;
- }
-
/* Figure out the amount to which the stack should be aligned. */
preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
if (fndecl)
@@ -2341,10 +2237,6 @@ expand_call (tree exp, rtx target, int ignore)
else
type_arg_types = TYPE_ARG_TYPES (funtype);
- /* See if this is a call to a function that can return more than once
- or a call to longjmp or malloc. */
- flags |= special_function_p (fndecl, flags);
-
if (flags & ECF_MAY_BE_ALLOCA)
current_function_calls_alloca = 1;
@@ -2427,7 +2319,7 @@ expand_call (tree exp, rtx target, int ignore)
&args_so_far, reg_parm_stack_space,
&old_stack_level, &old_pending_adj,
&must_preallocate, &flags,
- CALL_FROM_THUNK_P (exp));
+ &try_tail_call, CALL_FROM_THUNK_P (exp));
if (args_size.var)
{
@@ -2476,14 +2368,9 @@ expand_call (tree exp, rtx target, int ignore)
|| !rtx_equal_function_value_matters
|| current_nesting_level () == 0
|| any_pending_cleanups ()
- || args_size.var)
- try_tail_call = try_tail_recursion = 0;
-
- /* Tail recursion fails, when we are not dealing with recursive calls. */
- if (!try_tail_recursion
- || TREE_CODE (addr) != ADDR_EXPR
- || TREE_OPERAND (addr, 0) != current_function_decl)
- try_tail_recursion = 0;
+ || args_size.var
+ || lookup_stmt_eh_region (exp) >= 0)
+ try_tail_call = 0;
/* Rest of purposes for tail call optimizations to fail. */
if (
@@ -2521,7 +2408,7 @@ expand_call (tree exp, rtx target, int ignore)
|| !lang_hooks.decls.ok_for_sibcall (fndecl))
try_tail_call = 0;
- if (try_tail_call || try_tail_recursion)
+ if (try_tail_call)
{
int end, inc;
actparms = NULL_TREE;
@@ -2556,11 +2443,6 @@ expand_call (tree exp, rtx target, int ignore)
for (; i != end; i += inc)
{
args[i].tree_value = fix_unsafe_tree (args[i].tree_value);
- /* We need to build actparms for optimize_tail_recursion. We can
- safely trash away TREE_PURPOSE, since it is unused by this
- function. */
- if (try_tail_recursion)
- actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
}
/* Do the same for the function address if it is an expression. */
if (!fndecl)
@@ -2568,50 +2450,9 @@ expand_call (tree exp, rtx target, int ignore)
/* Expanding one of those dangerous arguments could have added
cleanups, but otherwise give it a whirl. */
if (any_pending_cleanups ())
- try_tail_call = try_tail_recursion = 0;
+ try_tail_call = 0;
}
- /* Generate a tail recursion sequence when calling ourselves. */
-
- if (try_tail_recursion)
- {
- /* We want to emit any pending stack adjustments before the tail
- recursion "call". That way we know any adjustment after the tail
- recursion call can be ignored if we indeed use the tail recursion
- call expansion. */
- int save_pending_stack_adjust = pending_stack_adjust;
- int save_stack_pointer_delta = stack_pointer_delta;
-
- /* Emit any queued insns now; otherwise they would end up in
- only one of the alternates. */
- emit_queue ();
-
- /* Use a new sequence to hold any RTL we generate. We do not even
- know if we will use this RTL yet. The final decision can not be
- made until after RTL generation for the entire function is
- complete. */
- start_sequence ();
- /* If expanding any of the arguments creates cleanups, we can't
- do a tailcall. So, we'll need to pop the pending cleanups
- list. If, however, all goes well, and there are no cleanups
- then the call to expand_start_target_temps will have no
- effect. */
- expand_start_target_temps ();
- if (optimize_tail_recursion (actparms, get_last_insn ()))
- {
- if (any_pending_cleanups ())
- try_tail_call = try_tail_recursion = 0;
- else
- tail_recursion_insns = get_insns ();
- }
- expand_end_target_temps ();
- end_sequence ();
-
- /* Restore the original pending stack adjustment for the sibling and
- normal call cases below. */
- pending_stack_adjust = save_pending_stack_adjust;
- stack_pointer_delta = save_stack_pointer_delta;
- }
/* Ensure current function's preferred stack boundary is at least
what we need. We don't have to increase alignment for recursive
@@ -2634,7 +2475,7 @@ expand_call (tree exp, rtx target, int ignore)
int sibcall_failure = 0;
/* We want to emit any pending stack adjustments before the tail
recursion "call". That way we know any adjustment after the tail
- recursion call can be ignored if we indeed use the tail recursion
+ recursion call can be ignored if we indeed use the tail
call expansion. */
int save_pending_stack_adjust = 0;
int save_stack_pointer_delta = 0;
@@ -2966,6 +2807,12 @@ expand_call (tree exp, rtx target, int ignore)
once we have started filling any specific hard regs. */
precompute_register_parameters (num_actuals, args, &reg_parm_seen);
+ if (TREE_OPERAND (exp, 2))
+ static_chain_value = expand_expr (TREE_OPERAND (exp, 2),
+ NULL_RTX, VOIDmode, 0);
+ else
+ static_chain_value = 0;
+
#ifdef REG_PARM_STACK_SPACE
/* Save the fixed argument area if it's part of the caller's frame and
is clobbered by argument setup for this call. */
@@ -3056,8 +2903,8 @@ expand_call (tree exp, rtx target, int ignore)
use_reg (&call_fusage, struct_value);
}
- funexp = prepare_call_address (funexp, fndecl, &call_fusage,
- reg_parm_seen, pass == 0);
+ funexp = prepare_call_address (funexp, static_chain_value,
+ &call_fusage, reg_parm_seen, pass == 0);
load_register_parameters (args, num_actuals, &call_fusage, flags,
pass == 0, &sibcall_failure);
@@ -3088,7 +2935,7 @@ expand_call (tree exp, rtx target, int ignore)
abort ();
/* Generate the actual call instruction. */
- emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
+ emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
adjusted_args_size.constant, struct_value_size,
next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
flags, & args_so_far);
@@ -3255,11 +3102,7 @@ expand_call (tree exp, rtx target, int ignore)
The Irix 6 ABI has examples of this. */
else if (GET_CODE (valreg) == PARALLEL)
{
- /* Second condition is added because "target" is freed at the
- the end of "pass0" for -O2 when call is made to
- expand_end_target_temps (). Its "in_use" flag has been set
- to false, so allocate a new temp. */
- if (target == 0 || (pass == 1 && target == temp_target))
+ if (target == 0)
{
/* This will only be assigned once, so it can be readonly. */
tree nt = build_qualified_type (TREE_TYPE (exp),
@@ -3267,7 +3110,6 @@ expand_call (tree exp, rtx target, int ignore)
| TYPE_QUAL_CONST));
target = assign_temp (nt, 0, 1, 1);
- temp_target = target;
preserve_temp_slots (target);
}
@@ -3392,8 +3234,8 @@ expand_call (tree exp, rtx target, int ignore)
Check for the handler slots since we might not have a save area
for non-local gotos. */
- if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
- emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
+ if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
+ update_nonlocal_goto_save_area ();
/* Free up storage we no longer need. */
for (i = 0; i < num_actuals; ++i)
@@ -3462,48 +3304,16 @@ expand_call (tree exp, rtx target, int ignore)
zero out the sequence. */
if (sibcall_failure)
tail_call_insns = NULL_RTX;
+ else
+ break;
}
- /* The function optimize_sibling_and_tail_recursive_calls doesn't
- handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
- can happen if the arguments to this function call an inline
- function who's expansion contains another CALL_PLACEHOLDER.
-
- If there are any C_Ps in any of these sequences, replace them
- with their normal call. */
-
- for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == CALL_INSN
- && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
- replace_call_placeholder (insn, sibcall_use_normal);
-
- for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == CALL_INSN
- && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
- replace_call_placeholder (insn, sibcall_use_normal);
-
- for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
- if (GET_CODE (insn) == CALL_INSN
- && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
- replace_call_placeholder (insn, sibcall_use_normal);
-
- /* If this was a potential tail recursion site, then emit a
- CALL_PLACEHOLDER with the normal and the tail recursion streams.
- One of them will be selected later. */
- if (tail_recursion_insns || tail_call_insns)
+ /* If tail call production suceeded, we need to remove REG_EQUIV notes on
+ arguments too, as argument area is now clobbered by the call. */
+ if (tail_call_insns)
{
- /* The tail recursion label must be kept around. We could expose
- its use in the CALL_PLACEHOLDER, but that creates unwanted edges
- and makes determining true tail recursion sites difficult.
-
- So we set LABEL_PRESERVE_P here, then clear it when we select
- one of the call sequences after rtl generation is complete. */
- if (tail_recursion_insns)
- LABEL_PRESERVE_P (tail_recursion_label) = 1;
- emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
- tail_call_insns,
- tail_recursion_insns,
- tail_recursion_label));
+ emit_insn (tail_call_insns);
+ cfun->tail_call_emit = true;
}
else
emit_insn (normal_call_insns);
@@ -3524,6 +3334,47 @@ expand_call (tree exp, rtx target, int ignore)
return target;
}
+/* A sibling call sequence invalidates any REG_EQUIV notes made for
+ this function's incoming arguments.
+
+ At the start of RTL generation we know the only REG_EQUIV notes
+ in the rtl chain are those for incoming arguments, so we can safely
+ flush any REG_EQUIV note.
+
+ This is (slight) overkill. We could keep track of the highest
+ argument we clobber and be more selective in removing notes, but it
+ does not seem to be worth the effort. */
+void
+fixup_tail_calls (void)
+{
+ rtx insn;
+ tree arg;
+
+ purge_reg_equiv_notes ();
+
+ /* A sibling call sequence also may invalidate RTX_UNCHANGING_P
+ flag of some incoming arguments MEM RTLs, because it can write into
+ those slots. We clear all those bits now.
+
+ This is (slight) overkill, we could keep track of which arguments
+ we actually write into. */
+ for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ {
+ if (INSN_P (insn))
+ purge_mem_unchanging_flag (PATTERN (insn));
+ }
+
+ /* Similarly, invalidate RTX_UNCHANGING_P for any incoming
+ arguments passed in registers. */
+ for (arg = DECL_ARGUMENTS (current_function_decl);
+ arg;
+ arg = TREE_CHAIN (arg))
+ {
+ if (REG_P (DECL_RTL (arg)))
+ RTX_UNCHANGING_P (DECL_RTL (arg)) = false;
+ }
+}
+
/* Traverse an argument list in VALUES and expand all complex
arguments into their components. */
tree
@@ -4135,7 +3986,7 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
else
argnum = 0;
- fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0, 0);
+ fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
/* Now load any reg parms into their regs. */
@@ -4197,7 +4048,7 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
always signed. We also assume that the list of arguments passed has
no impact, so we pretend it is unknown. */
- emit_call_1 (fun,
+ emit_call_1 (fun, NULL,
get_identifier (XSTR (orgfun, 0)),
build_function_type (tfom, NULL_TREE),
original_args_size.constant, args_size.constant,