| /* Convert function calls to rtl insns, for GNU C compiler. |
| Copyright (C) 1989, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc. |
| |
| This file is part of GNU CC. |
| |
| GNU CC is free software; you can redistribute it and/or modify |
| it under the terms of the GNU General Public License as published by |
| the Free Software Foundation; either version 2, or (at your option) |
| any later version. |
| |
| GNU CC is distributed in the hope that it will be useful, |
| but WITHOUT ANY WARRANTY; without even the implied warranty of |
| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| GNU General Public License for more details. |
| |
| You should have received a copy of the GNU General Public License |
| along with GNU CC; see the file COPYING. If not, write to |
| the Free Software Foundation, 59 Temple Place - Suite 330, |
| Boston, MA 02111-1307, USA. */ |
| |
| #include "config.h" |
| #include "rtl.h" |
| #include "tree.h" |
| #include "flags.h" |
| #include "expr.h" |
| #ifdef __STDC__ |
| #include <stdarg.h> |
| #else |
| #include <varargs.h> |
| #endif |
| #include "insn-flags.h" |
| |
| /* Decide whether a function's arguments should be processed |
| from first to last or from last to first. |
| |
| They should if the stack and args grow in opposite directions, but |
| only if we have push insns. */ |
| |
| #ifdef PUSH_ROUNDING |
| |
| #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD) |
| #define PUSH_ARGS_REVERSED /* If it's last to first */ |
| #endif |
| |
| #endif |
| |
| /* Like STACK_BOUNDARY but in units of bytes, not bits. */ |
| #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT) |
| |
| /* Data structure and subroutines used within expand_call. */ |
| |
| struct arg_data |
| { |
| /* Tree node for this argument. */ |
| tree tree_value; |
| /* Mode for value; TYPE_MODE unless promoted. */ |
| enum machine_mode mode; |
| /* Current RTL value for argument, or 0 if it isn't precomputed. */ |
| rtx value; |
| /* Initially-compute RTL value for argument; only for const functions. */ |
| rtx initial_value; |
| /* Register to pass this argument in, 0 if passed on stack, or an |
| PARALLEL if the arg is to be copied into multiple non-contiguous |
| registers. */ |
| rtx reg; |
| /* If REG was promoted from the actual mode of the argument expression, |
| indicates whether the promotion is sign- or zero-extended. */ |
| int unsignedp; |
| /* Number of registers to use. 0 means put the whole arg in registers. |
| Also 0 if not passed in registers. */ |
| int partial; |
| /* Non-zero if argument must be passed on stack. |
| Note that some arguments may be passed on the stack |
| even though pass_on_stack is zero, just because FUNCTION_ARG says so. |
| pass_on_stack identifies arguments that *cannot* go in registers. */ |
| int pass_on_stack; |
| /* Offset of this argument from beginning of stack-args. */ |
| struct args_size offset; |
| /* Similar, but offset to the start of the stack slot. Different from |
| OFFSET if this arg pads downward. */ |
| struct args_size slot_offset; |
| /* Size of this argument on the stack, rounded up for any padding it gets, |
| parts of the argument passed in registers do not count. |
| If REG_PARM_STACK_SPACE is defined, then register parms |
| are counted here as well. */ |
| struct args_size size; |
| /* Location on the stack at which parameter should be stored. The store |
| has already been done if STACK == VALUE. */ |
| rtx stack; |
| /* Location on the stack of the start of this argument slot. This can |
| differ from STACK if this arg pads downward. This location is known |
| to be aligned to FUNCTION_ARG_BOUNDARY. */ |
| rtx stack_slot; |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| /* Place that this stack area has been saved, if needed. */ |
| rtx save_area; |
| #endif |
| /* If an argument's alignment does not permit direct copying into registers, |
| copy in smaller-sized pieces into pseudos. These are stored in a |
| block pointed to by this field. The next field says how many |
| word-sized pseudos we made. */ |
| rtx *aligned_regs; |
| int n_aligned_regs; |
| }; |
| |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| /* A vector of one char per byte of stack space. A byte if non-zero if |
| the corresponding stack location has been used. |
| This vector is used to prevent a function call within an argument from |
| clobbering any stack already set up. */ |
| static char *stack_usage_map; |
| |
| /* Size of STACK_USAGE_MAP. */ |
| static int highest_outgoing_arg_in_use; |
| |
| /* stack_arg_under_construction is nonzero when an argument may be |
| initialized with a constructor call (including a C function that |
| returns a BLKmode struct) and expand_call must take special action |
| to make sure the object being constructed does not overlap the |
| argument list for the constructor call. */ |
| int stack_arg_under_construction; |
| #endif |
| |
| static int calls_function PROTO((tree, int)); |
| static int calls_function_1 PROTO((tree, int)); |
| static void emit_call_1 PROTO((rtx, tree, tree, int, int, rtx, rtx, |
| int, rtx, int)); |
| static void store_one_arg PROTO ((struct arg_data *, rtx, int, int, |
| tree, int)); |
| |
| /* If WHICH is 1, return 1 if EXP contains a call to the built-in function |
| `alloca'. |
| |
| If WHICH is 0, return 1 if EXP contains a call to any function. |
| Actually, we only need return 1 if evaluating EXP would require pushing |
| arguments on the stack, but that is too difficult to compute, so we just |
| assume any function call might require the stack. */ |
| |
| static tree calls_function_save_exprs; |
| |
| static int |
| calls_function (exp, which) |
| tree exp; |
| int which; |
| { |
| int val; |
| calls_function_save_exprs = 0; |
| val = calls_function_1 (exp, which); |
| calls_function_save_exprs = 0; |
| return val; |
| } |
| |
| static int |
| calls_function_1 (exp, which) |
| tree exp; |
| int which; |
| { |
| register int i; |
| enum tree_code code = TREE_CODE (exp); |
| int type = TREE_CODE_CLASS (code); |
| int length = tree_code_length[(int) code]; |
| |
| /* If this code is language-specific, we don't know what it will do. */ |
| if ((int) code >= NUM_TREE_CODES) |
| return 1; |
| |
| /* Only expressions and references can contain calls. */ |
| if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r' |
| && type != 'b') |
| return 0; |
| |
| switch (code) |
| { |
| case CALL_EXPR: |
| if (which == 0) |
| return 1; |
| else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR |
| && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) |
| == FUNCTION_DECL)) |
| { |
| tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); |
| |
| if ((DECL_BUILT_IN (fndecl) |
| && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA) |
| || (DECL_SAVED_INSNS (fndecl) |
| && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl)) |
| & FUNCTION_FLAGS_CALLS_ALLOCA))) |
| return 1; |
| } |
| |
| /* Third operand is RTL. */ |
| length = 2; |
| break; |
| |
| case SAVE_EXPR: |
| if (SAVE_EXPR_RTL (exp) != 0) |
| return 0; |
| if (value_member (exp, calls_function_save_exprs)) |
| return 0; |
| calls_function_save_exprs = tree_cons (NULL_TREE, exp, |
| calls_function_save_exprs); |
| return (TREE_OPERAND (exp, 0) != 0 |
| && calls_function_1 (TREE_OPERAND (exp, 0), which)); |
| |
| case BLOCK: |
| { |
| register tree local; |
| |
| for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local)) |
| if (DECL_INITIAL (local) != 0 |
| && calls_function_1 (DECL_INITIAL (local), which)) |
| return 1; |
| } |
| { |
| register tree subblock; |
| |
| for (subblock = BLOCK_SUBBLOCKS (exp); |
| subblock; |
| subblock = TREE_CHAIN (subblock)) |
| if (calls_function_1 (subblock, which)) |
| return 1; |
| } |
| return 0; |
| |
| case METHOD_CALL_EXPR: |
| length = 3; |
| break; |
| |
| case WITH_CLEANUP_EXPR: |
| length = 1; |
| break; |
| |
| case RTL_EXPR: |
| return 0; |
| } |
| |
| for (i = 0; i < length; i++) |
| if (TREE_OPERAND (exp, i) != 0 |
| && calls_function_1 (TREE_OPERAND (exp, i), which)) |
| return 1; |
| |
| return 0; |
| } |
| |
| /* Force FUNEXP into a form suitable for the address of a CALL, |
| and return that as an rtx. Also load the static chain register |
| if FNDECL is a nested function. |
| |
| CALL_FUSAGE points to a variable holding the prospective |
| CALL_INSN_FUNCTION_USAGE information. */ |
| |
| rtx |
| prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen) |
| rtx funexp; |
| tree fndecl; |
| rtx *call_fusage; |
| int reg_parm_seen; |
| { |
| rtx static_chain_value = 0; |
| |
| funexp = protect_from_queue (funexp, 0); |
| |
| if (fndecl != 0) |
| /* Get possible static chain value for nested function in C. */ |
| static_chain_value = lookup_static_chain (fndecl); |
| |
| /* Make a valid memory address and copy constants thru pseudo-regs, |
| but not for a constant address if -fno-function-cse. */ |
| if (GET_CODE (funexp) != SYMBOL_REF) |
| funexp = |
| #ifdef SMALL_REGISTER_CLASSES |
| /* If we are using registers for parameters, force the |
| function address into a register now. */ |
| (SMALL_REGISTER_CLASSES && reg_parm_seen) |
| ? force_not_mem (memory_address (FUNCTION_MODE, funexp)) |
| : |
| #endif |
| memory_address (FUNCTION_MODE, funexp); |
| else |
| { |
| #ifndef NO_FUNCTION_CSE |
| if (optimize && ! flag_no_function_cse) |
| #ifdef NO_RECURSIVE_FUNCTION_CSE |
| if (fndecl != current_function_decl) |
| #endif |
| funexp = force_reg (Pmode, funexp); |
| #endif |
| } |
| |
| if (static_chain_value != 0) |
| { |
| emit_move_insn (static_chain_rtx, static_chain_value); |
| |
| if (GET_CODE (static_chain_rtx) == REG) |
| use_reg (call_fusage, static_chain_rtx); |
| } |
| |
| return funexp; |
| } |
| |
| /* Generate instructions to call function FUNEXP, |
| and optionally pop the results. |
| The CALL_INSN is the first insn generated. |
| |
| FNDECL is the declaration node of the function. This is given to the |
| macro RETURN_POPS_ARGS to determine whether this function pops its own args. |
| |
| FUNTYPE is the data type of the function. This is given to the macro |
| RETURN_POPS_ARGS to determine whether this function pops its own args. |
| We used to allow an identifier for library functions, but that doesn't |
| work when the return type is an aggregate type and the calling convention |
| says that the pointer to this aggregate is to be popped by the callee. |
| |
| STACK_SIZE is the number of bytes of arguments on the stack, |
| rounded up to STACK_BOUNDARY; zero if the size is variable. |
| This is both to put into the call insn and |
| to generate explicit popping code if necessary. |
| |
| STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value. |
| It is zero if this call doesn't want a structure value. |
| |
| NEXT_ARG_REG is the rtx that results from executing |
| FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1) |
| just after all the args have had their registers assigned. |
| This could be whatever you like, but normally it is the first |
| arg-register beyond those used for args in this call, |
| or 0 if all the arg-registers are used in this call. |
| It is passed on to `gen_call' so you can put this info in the call insn. |
| |
| VALREG is a hard register in which a value is returned, |
| or 0 if the call does not return a value. |
| |
| OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before |
| the args to this call were processed. |
| We restore `inhibit_defer_pop' to that value. |
| |
| CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that |
| denote registers used by the called function. |
| |
| IS_CONST is true if this is a `const' call. */ |
| |
| static void |
| emit_call_1 (funexp, fndecl, funtype, stack_size, struct_value_size, |
| next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage, |
| is_const) |
| rtx funexp; |
| tree fndecl; |
| tree funtype; |
| int stack_size; |
| int struct_value_size; |
| rtx next_arg_reg; |
| rtx valreg; |
| int old_inhibit_defer_pop; |
| rtx call_fusage; |
| int is_const; |
| { |
| rtx stack_size_rtx = GEN_INT (stack_size); |
| rtx struct_value_size_rtx = GEN_INT (struct_value_size); |
| rtx call_insn; |
| int already_popped = 0; |
| |
| /* Ensure address is valid. SYMBOL_REF is already valid, so no need, |
| and we don't want to load it into a register as an optimization, |
| because prepare_call_address already did it if it should be done. */ |
| if (GET_CODE (funexp) != SYMBOL_REF) |
| funexp = memory_address (FUNCTION_MODE, funexp); |
| |
| #ifndef ACCUMULATE_OUTGOING_ARGS |
| #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop) |
| if (HAVE_call_pop && HAVE_call_value_pop |
| && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0 |
| || stack_size == 0)) |
| { |
| rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size)); |
| rtx pat; |
| |
| /* If this subroutine pops its own args, record that in the call insn |
| if possible, for the sake of frame pointer elimination. */ |
| |
| if (valreg) |
| pat = gen_call_value_pop (valreg, |
| gen_rtx (MEM, FUNCTION_MODE, funexp), |
| stack_size_rtx, next_arg_reg, n_pop); |
| else |
| pat = gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, funexp), |
| stack_size_rtx, next_arg_reg, n_pop); |
| |
| emit_call_insn (pat); |
| already_popped = 1; |
| } |
| else |
| #endif |
| #endif |
| |
| #if defined (HAVE_call) && defined (HAVE_call_value) |
| if (HAVE_call && HAVE_call_value) |
| { |
| if (valreg) |
| emit_call_insn (gen_call_value (valreg, |
| gen_rtx (MEM, FUNCTION_MODE, funexp), |
| stack_size_rtx, next_arg_reg, |
| NULL_RTX)); |
| else |
| emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, funexp), |
| stack_size_rtx, next_arg_reg, |
| struct_value_size_rtx)); |
| } |
| else |
| #endif |
| abort (); |
| |
| /* Find the CALL insn we just emitted. */ |
| for (call_insn = get_last_insn (); |
| call_insn && GET_CODE (call_insn) != CALL_INSN; |
| call_insn = PREV_INSN (call_insn)) |
| ; |
| |
| if (! call_insn) |
| abort (); |
| |
| /* Put the register usage information on the CALL. If there is already |
| some usage information, put ours at the end. */ |
| if (CALL_INSN_FUNCTION_USAGE (call_insn)) |
| { |
| rtx link; |
| |
| for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0; |
| link = XEXP (link, 1)) |
| ; |
| |
| XEXP (link, 1) = call_fusage; |
| } |
| else |
| CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage; |
| |
| /* If this is a const call, then set the insn's unchanging bit. */ |
| if (is_const) |
| CONST_CALL_P (call_insn) = 1; |
| |
| /* Restore this now, so that we do defer pops for this call's args |
| if the context of the call as a whole permits. */ |
| inhibit_defer_pop = old_inhibit_defer_pop; |
| |
| #ifndef ACCUMULATE_OUTGOING_ARGS |
| /* If returning from the subroutine does not automatically pop the args, |
| we need an instruction to pop them sooner or later. |
| Perhaps do it now; perhaps just record how much space to pop later. |
| |
| If returning from the subroutine does pop the args, indicate that the |
| stack pointer will be changed. */ |
| |
| if (stack_size != 0 && RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0) |
| { |
| if (!already_popped) |
| CALL_INSN_FUNCTION_USAGE (call_insn) |
| = gen_rtx (EXPR_LIST, VOIDmode, |
| gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx), |
| CALL_INSN_FUNCTION_USAGE (call_insn)); |
| stack_size -= RETURN_POPS_ARGS (fndecl, funtype, stack_size); |
| stack_size_rtx = GEN_INT (stack_size); |
| } |
| |
| if (stack_size != 0) |
| { |
| if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const) |
| pending_stack_adjust += stack_size; |
| else |
| adjust_stack (stack_size_rtx); |
| } |
| #endif |
| } |
| |
| /* Generate all the code for a function call |
| and return an rtx for its value. |
| Store the value in TARGET (specified as an rtx) if convenient. |
| If the value is stored in TARGET then TARGET is returned. |
| If IGNORE is nonzero, then we ignore the value of the function call. */ |
| |
| rtx |
| expand_call (exp, target, ignore) |
| tree exp; |
| rtx target; |
| int ignore; |
| { |
| /* List of actual parameters. */ |
| tree actparms = TREE_OPERAND (exp, 1); |
| /* RTX for the function to be called. */ |
| rtx funexp; |
| /* Tree node for the function to be called (not the address!). */ |
| tree funtree; |
| /* Data type of the function. */ |
| tree funtype; |
| /* Declaration of the function being called, |
| or 0 if the function is computed (not known by name). */ |
| tree fndecl = 0; |
| char *name = 0; |
| |
| /* Register in which non-BLKmode value will be returned, |
| or 0 if no value or if value is BLKmode. */ |
| rtx valreg; |
| /* Address where we should return a BLKmode value; |
| 0 if value not BLKmode. */ |
| rtx structure_value_addr = 0; |
| /* Nonzero if that address is being passed by treating it as |
| an extra, implicit first parameter. Otherwise, |
| it is passed by being copied directly into struct_value_rtx. */ |
| int structure_value_addr_parm = 0; |
| /* Size of aggregate value wanted, or zero if none wanted |
| or if we are using the non-reentrant PCC calling convention |
| or expecting the value in registers. */ |
| int struct_value_size = 0; |
| /* Nonzero if called function returns an aggregate in memory PCC style, |
| by returning the address of where to find it. */ |
| int pcc_struct_value = 0; |
| |
| /* Number of actual parameters in this call, including struct value addr. */ |
| int num_actuals; |
| /* Number of named args. Args after this are anonymous ones |
| and they must all go on the stack. */ |
| int n_named_args; |
| /* Count arg position in order args appear. */ |
| int argpos; |
| |
| /* Vector of information about each argument. |
| Arguments are numbered in the order they will be pushed, |
| not the order they are written. */ |
| struct arg_data *args; |
| |
| /* Total size in bytes of all the stack-parms scanned so far. */ |
| struct args_size args_size; |
| /* Size of arguments before any adjustments (such as rounding). */ |
| struct args_size original_args_size; |
| /* Data on reg parms scanned so far. */ |
| CUMULATIVE_ARGS args_so_far; |
| /* Nonzero if a reg parm has been scanned. */ |
| int reg_parm_seen; |
| /* Nonzero if this is an indirect function call. */ |
| |
| /* Nonzero if we must avoid push-insns in the args for this call. |
| If stack space is allocated for register parameters, but not by the |
| caller, then it is preallocated in the fixed part of the stack frame. |
| So the entire argument block must then be preallocated (i.e., we |
| ignore PUSH_ROUNDING in that case). */ |
| |
| #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE) |
| int must_preallocate = 1; |
| #else |
| #ifdef PUSH_ROUNDING |
| int must_preallocate = 0; |
| #else |
| int must_preallocate = 1; |
| #endif |
| #endif |
| |
| /* Size of the stack reserved for parameter registers. */ |
| int reg_parm_stack_space = 0; |
| |
| /* 1 if scanning parms front to back, -1 if scanning back to front. */ |
| int inc; |
| /* Address of space preallocated for stack parms |
| (on machines that lack push insns), or 0 if space not preallocated. */ |
| rtx argblock = 0; |
| |
| /* Nonzero if it is plausible that this is a call to alloca. */ |
| int may_be_alloca; |
| /* Nonzero if this is a call to setjmp or a related function. */ |
| int returns_twice; |
| /* Nonzero if this is a call to `longjmp'. */ |
| int is_longjmp; |
| /* Nonzero if this is a call to an inline function. */ |
| int is_integrable = 0; |
| /* Nonzero if this is a call to a `const' function. |
| Note that only explicitly named functions are handled as `const' here. */ |
| int is_const = 0; |
| /* Nonzero if this is a call to a `volatile' function. */ |
| int is_volatile = 0; |
| #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE) |
| /* Define the boundary of the register parm stack space that needs to be |
| save, if any. */ |
| int low_to_save = -1, high_to_save; |
| rtx save_area = 0; /* Place that it is saved */ |
| #endif |
| |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| int initial_highest_arg_in_use = highest_outgoing_arg_in_use; |
| char *initial_stack_usage_map = stack_usage_map; |
| #endif |
| |
| rtx old_stack_level = 0; |
| int old_pending_adj = 0; |
| int old_stack_arg_under_construction; |
| int old_inhibit_defer_pop = inhibit_defer_pop; |
| rtx call_fusage = 0; |
| register tree p; |
| register int i, j; |
| |
| /* The value of the function call can be put in a hard register. But |
| if -fcheck-memory-usage, code which invokes functions (and thus |
| damages some hard registers) can be inserted before using the value. |
| So, target is always a pseudo-register in that case. */ |
| if (flag_check_memory_usage) |
| target = 0; |
| |
| /* See if we can find a DECL-node for the actual function. |
| As a result, decide whether this is a call to an integrable function. */ |
| |
| p = TREE_OPERAND (exp, 0); |
| if (TREE_CODE (p) == ADDR_EXPR) |
| { |
| fndecl = TREE_OPERAND (p, 0); |
| if (TREE_CODE (fndecl) != FUNCTION_DECL) |
| fndecl = 0; |
| else |
| { |
| if (!flag_no_inline |
| && fndecl != current_function_decl |
| && DECL_INLINE (fndecl) |
| && DECL_SAVED_INSNS (fndecl) |
| && RTX_INTEGRATED_P (DECL_SAVED_INSNS (fndecl))) |
| is_integrable = 1; |
| else if (! TREE_ADDRESSABLE (fndecl)) |
| { |
| /* In case this function later becomes inlinable, |
| record that there was already a non-inline call to it. |
| |
| Use abstraction instead of setting TREE_ADDRESSABLE |
| directly. */ |
| if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline |
| && optimize > 0) |
| { |
| warning_with_decl (fndecl, "can't inline call to `%s'"); |
| warning ("called from here"); |
| } |
| mark_addressable (fndecl); |
| } |
| |
| if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl) |
| && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode) |
| is_const = 1; |
| |
| if (TREE_THIS_VOLATILE (fndecl)) |
| is_volatile = 1; |
| } |
| } |
| |
| /* If we don't have specific function to call, see if we have a |
| constant or `noreturn' function from the type. */ |
| if (fndecl == 0) |
| { |
| is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p))); |
| is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p))); |
| } |
| |
| #ifdef REG_PARM_STACK_SPACE |
| #ifdef MAYBE_REG_PARM_STACK_SPACE |
| reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE; |
| #else |
| reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl); |
| #endif |
| #endif |
| |
| /* Warn if this value is an aggregate type, |
| regardless of which calling convention we are using for it. */ |
| if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp))) |
| warning ("function call has aggregate value"); |
| |
| /* Set up a place to return a structure. */ |
| |
| /* Cater to broken compilers. */ |
| if (aggregate_value_p (exp)) |
| { |
| /* This call returns a big structure. */ |
| is_const = 0; |
| |
| #ifdef PCC_STATIC_STRUCT_RETURN |
| { |
| pcc_struct_value = 1; |
| /* Easier than making that case work right. */ |
| if (is_integrable) |
| { |
| /* In case this is a static function, note that it has been |
| used. */ |
| if (! TREE_ADDRESSABLE (fndecl)) |
| mark_addressable (fndecl); |
| is_integrable = 0; |
| } |
| } |
| #else /* not PCC_STATIC_STRUCT_RETURN */ |
| { |
| struct_value_size = int_size_in_bytes (TREE_TYPE (exp)); |
| |
| if (target && GET_CODE (target) == MEM) |
| structure_value_addr = XEXP (target, 0); |
| else |
| { |
| /* Assign a temporary on the stack to hold the value. */ |
| |
| /* For variable-sized objects, we must be called with a target |
| specified. If we were to allocate space on the stack here, |
| we would have no way of knowing when to free it. */ |
| |
| if (struct_value_size < 0) |
| abort (); |
| |
| structure_value_addr |
| = XEXP (assign_stack_temp (BLKmode, struct_value_size, 1), 0); |
| MEM_IN_STRUCT_P (structure_value_addr) |
| = AGGREGATE_TYPE_P (TREE_TYPE (exp)); |
| target = 0; |
| } |
| } |
| #endif /* not PCC_STATIC_STRUCT_RETURN */ |
| } |
| |
| /* If called function is inline, try to integrate it. */ |
| |
| if (is_integrable) |
| { |
| rtx temp; |
| rtx before_call = get_last_insn (); |
| |
| temp = expand_inline_function (fndecl, actparms, target, |
| ignore, TREE_TYPE (exp), |
| structure_value_addr); |
| |
| /* If inlining succeeded, return. */ |
| if ((HOST_WIDE_INT) temp != -1) |
| { |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| /* If the outgoing argument list must be preserved, push |
| the stack before executing the inlined function if it |
| makes any calls. */ |
| |
| for (i = reg_parm_stack_space - 1; i >= 0; i--) |
| if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0) |
| break; |
| |
| if (stack_arg_under_construction || i >= 0) |
| { |
| rtx first_insn |
| = before_call ? NEXT_INSN (before_call) : get_insns (); |
| rtx insn, seq; |
| |
| /* Look for a call in the inline function code. |
| If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is |
| nonzero then there is a call and it is not necessary |
| to scan the insns. */ |
| |
| if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0) |
| for (insn = first_insn; insn; insn = NEXT_INSN (insn)) |
| if (GET_CODE (insn) == CALL_INSN) |
| break; |
| |
| if (insn) |
| { |
| /* Reserve enough stack space so that the largest |
| argument list of any function call in the inline |
| function does not overlap the argument list being |
| evaluated. This is usually an overestimate because |
| allocate_dynamic_stack_space reserves space for an |
| outgoing argument list in addition to the requested |
| space, but there is no way to ask for stack space such |
| that an argument list of a certain length can be |
| safely constructed. */ |
| |
| int adjust = OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)); |
| #ifdef REG_PARM_STACK_SPACE |
| /* Add the stack space reserved for register arguments |
| in the inline function. What is really needed is the |
| largest value of reg_parm_stack_space in the inline |
| function, but that is not available. Using the current |
| value of reg_parm_stack_space is wrong, but gives |
| correct results on all supported machines. */ |
| adjust += reg_parm_stack_space; |
| #endif |
| start_sequence (); |
| emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX); |
| allocate_dynamic_stack_space (GEN_INT (adjust), |
| NULL_RTX, BITS_PER_UNIT); |
| seq = get_insns (); |
| end_sequence (); |
| emit_insns_before (seq, first_insn); |
| emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX); |
| } |
| } |
| #endif |
| |
| /* If the result is equivalent to TARGET, return TARGET to simplify |
| checks in store_expr. They can be equivalent but not equal in the |
| case of a function that returns BLKmode. */ |
| if (temp != target && rtx_equal_p (temp, target)) |
| return target; |
| return temp; |
| } |
| |
| /* If inlining failed, mark FNDECL as needing to be compiled |
| separately after all. If function was declared inline, |
| give a warning. */ |
| if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline |
| && optimize > 0 && ! TREE_ADDRESSABLE (fndecl)) |
| { |
| warning_with_decl (fndecl, "inlining failed in call to `%s'"); |
| warning ("called from here"); |
| } |
| mark_addressable (fndecl); |
| } |
| |
| /* When calling a const function, we must pop the stack args right away, |
| so that the pop is deleted or moved with the call. */ |
| if (is_const) |
| NO_DEFER_POP; |
| |
| function_call_count++; |
| |
| if (fndecl && DECL_NAME (fndecl)) |
| name = IDENTIFIER_POINTER (DECL_NAME (fndecl)); |
| |
| #if 0 |
| /* Unless it's a call to a specific function that isn't alloca, |
| if it has one argument, we must assume it might be alloca. */ |
| |
| may_be_alloca |
| = (!(fndecl != 0 && strcmp (name, "alloca")) |
| && actparms != 0 |
| && TREE_CHAIN (actparms) == 0); |
| #else |
| /* We assume that alloca will always be called by name. It |
| makes no sense to pass it as a pointer-to-function to |
| anything that does not understand its behavior. */ |
| may_be_alloca |
| = (name && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6 |
| && name[0] == 'a' |
| && ! strcmp (name, "alloca")) |
| || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16 |
| && name[0] == '_' |
| && ! strcmp (name, "__builtin_alloca")))); |
| #endif |
| |
| /* See if this is a call to a function that can return more than once |
| or a call to longjmp. */ |
| |
| returns_twice = 0; |
| is_longjmp = 0; |
| |
| if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 15) |
| { |
| char *tname = name; |
| |
| /* Disregard prefix _, __ or __x. */ |
| if (name[0] == '_') |
| { |
| if (name[1] == '_' && name[2] == 'x') |
| tname += 3; |
| else if (name[1] == '_') |
| tname += 2; |
| else |
| tname += 1; |
| } |
| |
| if (tname[0] == 's') |
| { |
| returns_twice |
| = ((tname[1] == 'e' |
| && (! strcmp (tname, "setjmp") |
| || ! strcmp (tname, "setjmp_syscall"))) |
| || (tname[1] == 'i' |
| && ! strcmp (tname, "sigsetjmp")) |
| || (tname[1] == 'a' |
| && ! strcmp (tname, "savectx"))); |
| if (tname[1] == 'i' |
| && ! strcmp (tname, "siglongjmp")) |
| is_longjmp = 1; |
| } |
| else if ((tname[0] == 'q' && tname[1] == 's' |
| && ! strcmp (tname, "qsetjmp")) |
| || (tname[0] == 'v' && tname[1] == 'f' |
| && ! strcmp (tname, "vfork"))) |
| returns_twice = 1; |
| |
| else if (tname[0] == 'l' && tname[1] == 'o' |
| && ! strcmp (tname, "longjmp")) |
| is_longjmp = 1; |
| } |
| |
| if (may_be_alloca) |
| current_function_calls_alloca = 1; |
| |
| /* Don't let pending stack adjusts add up to too much. |
| Also, do all pending adjustments now |
| if there is any chance this might be a call to alloca. */ |
| |
| if (pending_stack_adjust >= 32 |
| || (pending_stack_adjust > 0 && may_be_alloca)) |
| do_pending_stack_adjust (); |
| |
| /* Operand 0 is a pointer-to-function; get the type of the function. */ |
| funtype = TREE_TYPE (TREE_OPERAND (exp, 0)); |
| if (TREE_CODE (funtype) != POINTER_TYPE) |
| abort (); |
| funtype = TREE_TYPE (funtype); |
| |
| /* Push the temporary stack slot level so that we can free any temporaries |
| we make. */ |
| push_temp_slots (); |
| |
| /* Start updating where the next arg would go. |
| |
| On some machines (such as the PA) indirect calls have a different |
| calling convention than normal calls. The last argument in |
| INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call |
| or not. */ |
| INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0)); |
| |
| /* If struct_value_rtx is 0, it means pass the address |
| as if it were an extra parameter. */ |
| if (structure_value_addr && struct_value_rtx == 0) |
| { |
| /* If structure_value_addr is a REG other than |
| virtual_outgoing_args_rtx, we can use always use it. If it |
| is not a REG, we must always copy it into a register. |
| If it is virtual_outgoing_args_rtx, we must copy it to another |
| register in some cases. */ |
| rtx temp = (GET_CODE (structure_value_addr) != REG |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| || (stack_arg_under_construction |
| && structure_value_addr == virtual_outgoing_args_rtx) |
| #endif |
| ? copy_addr_to_reg (structure_value_addr) |
| : structure_value_addr); |
| |
| actparms |
| = tree_cons (error_mark_node, |
| make_tree (build_pointer_type (TREE_TYPE (funtype)), |
| temp), |
| actparms); |
| structure_value_addr_parm = 1; |
| } |
| |
| /* Count the arguments and set NUM_ACTUALS. */ |
| for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++; |
| num_actuals = i; |
| |
| /* Compute number of named args. |
| Normally, don't include the last named arg if anonymous args follow. |
| We do include the last named arg if STRICT_ARGUMENT_NAMING is defined. |
| (If no anonymous args follow, the result of list_length is actually |
| one too large. This is harmless.) |
| |
| If SETUP_INCOMING_VARARGS is defined and STRICT_ARGUMENT_NAMING is not, |
| this machine will be able to place unnamed args that were passed in |
| registers into the stack. So treat all args as named. This allows the |
| insns emitting for a specific argument list to be independent of the |
| function declaration. |
| |
| If SETUP_INCOMING_VARARGS is not defined, we do not have any reliable |
| way to pass unnamed args in registers, so we must force them into |
| memory. */ |
| #if !defined(SETUP_INCOMING_VARARGS) || defined(STRICT_ARGUMENT_NAMING) |
| if (TYPE_ARG_TYPES (funtype) != 0) |
| n_named_args |
| = (list_length (TYPE_ARG_TYPES (funtype)) |
| #ifndef STRICT_ARGUMENT_NAMING |
| /* Don't include the last named arg. */ |
| - 1 |
| #endif |
| /* Count the struct value address, if it is passed as a parm. */ |
| + structure_value_addr_parm); |
| else |
| #endif |
| /* If we know nothing, treat all args as named. */ |
| n_named_args = num_actuals; |
| |
| /* Make a vector to hold all the information about each arg. */ |
| args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data)); |
| bzero ((char *) args, num_actuals * sizeof (struct arg_data)); |
| |
| args_size.constant = 0; |
| args_size.var = 0; |
| |
| /* In this loop, we consider args in the order they are written. |
| We fill up ARGS from the front or from the back if necessary |
| so that in any case the first arg to be pushed ends up at the front. */ |
| |
| #ifdef PUSH_ARGS_REVERSED |
| i = num_actuals - 1, inc = -1; |
| /* In this case, must reverse order of args |
| so that we compute and push the last arg first. */ |
| #else |
| i = 0, inc = 1; |
| #endif |
| |
| /* I counts args in order (to be) pushed; ARGPOS counts in order written. */ |
| for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++) |
| { |
| tree type = TREE_TYPE (TREE_VALUE (p)); |
| int unsignedp; |
| enum machine_mode mode; |
| |
| args[i].tree_value = TREE_VALUE (p); |
| |
| /* Replace erroneous argument with constant zero. */ |
| if (type == error_mark_node || TYPE_SIZE (type) == 0) |
| args[i].tree_value = integer_zero_node, type = integer_type_node; |
| |
| /* If TYPE is a transparent union, pass things the way we would |
| pass the first field of the union. We have already verified that |
| the modes are the same. */ |
| if (TYPE_TRANSPARENT_UNION (type)) |
| type = TREE_TYPE (TYPE_FIELDS (type)); |
| |
| /* Decide where to pass this arg. |
| |
| args[i].reg is nonzero if all or part is passed in registers. |
| |
| args[i].partial is nonzero if part but not all is passed in registers, |
| and the exact value says how many words are passed in registers. |
| |
| args[i].pass_on_stack is nonzero if the argument must at least be |
| computed on the stack. It may then be loaded back into registers |
| if args[i].reg is nonzero. |
| |
| These decisions are driven by the FUNCTION_... macros and must agree |
| with those made by function.c. */ |
| |
| /* See if this argument should be passed by invisible reference. */ |
| if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST |
| && contains_placeholder_p (TYPE_SIZE (type))) |
| || TREE_ADDRESSABLE (type) |
| #ifdef FUNCTION_ARG_PASS_BY_REFERENCE |
| || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type), |
| type, argpos < n_named_args) |
| #endif |
| ) |
| { |
| /* If we're compiling a thunk, pass through invisible |
| references instead of making a copy. */ |
| if (current_function_is_thunk |
| #ifdef FUNCTION_ARG_CALLEE_COPIES |
| || (FUNCTION_ARG_CALLEE_COPIES (args_so_far, TYPE_MODE (type), |
| type, argpos < n_named_args) |
| /* If it's in a register, we must make a copy of it too. */ |
| /* ??? Is this a sufficient test? Is there a better one? */ |
| && !(TREE_CODE (args[i].tree_value) == VAR_DECL |
| && REG_P (DECL_RTL (args[i].tree_value))) |
| && ! TREE_ADDRESSABLE (type)) |
| #endif |
| ) |
| { |
| args[i].tree_value = build1 (ADDR_EXPR, |
| build_pointer_type (type), |
| args[i].tree_value); |
| type = build_pointer_type (type); |
| } |
| else |
| { |
| /* We make a copy of the object and pass the address to the |
| function being called. */ |
| rtx copy; |
| |
| if (TYPE_SIZE (type) == 0 |
| || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST |
| || (flag_stack_check && ! STACK_CHECK_BUILTIN |
| && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0 |
| || (TREE_INT_CST_LOW (TYPE_SIZE (type)) |
| > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT)))) |
| { |
| /* This is a variable-sized object. Make space on the stack |
| for it. */ |
| rtx size_rtx = expr_size (TREE_VALUE (p)); |
| |
| if (old_stack_level == 0) |
| { |
| emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX); |
| old_pending_adj = pending_stack_adjust; |
| pending_stack_adjust = 0; |
| } |
| |
| copy = gen_rtx (MEM, BLKmode, |
| allocate_dynamic_stack_space (size_rtx, |
| NULL_RTX, |
| TYPE_ALIGN (type))); |
| } |
| else |
| { |
| int size = int_size_in_bytes (type); |
| copy = assign_stack_temp (TYPE_MODE (type), size, 0); |
| } |
| |
| MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type); |
| |
| store_expr (args[i].tree_value, copy, 0); |
| is_const = 0; |
| |
| args[i].tree_value = build1 (ADDR_EXPR, |
| build_pointer_type (type), |
| make_tree (type, copy)); |
| type = build_pointer_type (type); |
| } |
| } |
| |
| mode = TYPE_MODE (type); |
| unsignedp = TREE_UNSIGNED (type); |
| |
| #ifdef PROMOTE_FUNCTION_ARGS |
| mode = promote_mode (type, mode, &unsignedp, 1); |
| #endif |
| |
| args[i].unsignedp = unsignedp; |
| args[i].mode = mode; |
| args[i].reg = FUNCTION_ARG (args_so_far, mode, type, |
| argpos < n_named_args); |
| #ifdef FUNCTION_ARG_PARTIAL_NREGS |
| if (args[i].reg) |
| args[i].partial |
| = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, type, |
| argpos < n_named_args); |
| #endif |
| |
| args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type); |
| |
| /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]), |
| it means that we are to pass this arg in the register(s) designated |
| by the PARALLEL, but also to pass it in the stack. */ |
| if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL |
| && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0) |
| args[i].pass_on_stack = 1; |
| |
| /* If this is an addressable type, we must preallocate the stack |
| since we must evaluate the object into its final location. |
| |
| If this is to be passed in both registers and the stack, it is simpler |
| to preallocate. */ |
| if (TREE_ADDRESSABLE (type) |
| || (args[i].pass_on_stack && args[i].reg != 0)) |
| must_preallocate = 1; |
| |
| /* If this is an addressable type, we cannot pre-evaluate it. Thus, |
| we cannot consider this function call constant. */ |
| if (TREE_ADDRESSABLE (type)) |
| is_const = 0; |
| |
| /* Compute the stack-size of this argument. */ |
| if (args[i].reg == 0 || args[i].partial != 0 |
| #ifdef REG_PARM_STACK_SPACE |
| || reg_parm_stack_space > 0 |
| #endif |
| || args[i].pass_on_stack) |
| locate_and_pad_parm (mode, type, |
| #ifdef STACK_PARMS_IN_REG_PARM_AREA |
| 1, |
| #else |
| args[i].reg != 0, |
| #endif |
| fndecl, &args_size, &args[i].offset, |
| &args[i].size); |
| |
| #ifndef ARGS_GROW_DOWNWARD |
| args[i].slot_offset = args_size; |
| #endif |
| |
| #ifndef REG_PARM_STACK_SPACE |
| /* If a part of the arg was put into registers, |
| don't include that part in the amount pushed. */ |
| if (! args[i].pass_on_stack) |
| args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD) |
| / (PARM_BOUNDARY / BITS_PER_UNIT) |
| * (PARM_BOUNDARY / BITS_PER_UNIT)); |
| #endif |
| |
| /* Update ARGS_SIZE, the total stack space for args so far. */ |
| |
| args_size.constant += args[i].size.constant; |
| if (args[i].size.var) |
| { |
| ADD_PARM_SIZE (args_size, args[i].size.var); |
| } |
| |
| /* Since the slot offset points to the bottom of the slot, |
| we must record it after incrementing if the args grow down. */ |
| #ifdef ARGS_GROW_DOWNWARD |
| args[i].slot_offset = args_size; |
| |
| args[i].slot_offset.constant = -args_size.constant; |
| if (args_size.var) |
| { |
| SUB_PARM_SIZE (args[i].slot_offset, args_size.var); |
| } |
| #endif |
| |
| /* Increment ARGS_SO_FAR, which has info about which arg-registers |
| have been used, etc. */ |
| |
| FUNCTION_ARG_ADVANCE (args_so_far, TYPE_MODE (type), type, |
| argpos < n_named_args); |
| } |
| |
| #ifdef FINAL_REG_PARM_STACK_SPACE |
| reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant, |
| args_size.var); |
| #endif |
| |
| /* Compute the actual size of the argument block required. The variable |
| and constant sizes must be combined, the size may have to be rounded, |
| and there may be a minimum required size. */ |
| |
| original_args_size = args_size; |
| if (args_size.var) |
| { |
| /* If this function requires a variable-sized argument list, don't try to |
| make a cse'able block for this call. We may be able to do this |
| eventually, but it is too complicated to keep track of what insns go |
| in the cse'able block and which don't. */ |
| |
| is_const = 0; |
| must_preallocate = 1; |
| |
| args_size.var = ARGS_SIZE_TREE (args_size); |
| args_size.constant = 0; |
| |
| #ifdef STACK_BOUNDARY |
| if (STACK_BOUNDARY != BITS_PER_UNIT) |
| args_size.var = round_up (args_size.var, STACK_BYTES); |
| #endif |
| |
| #ifdef REG_PARM_STACK_SPACE |
| if (reg_parm_stack_space > 0) |
| { |
| args_size.var |
| = size_binop (MAX_EXPR, args_size.var, |
| size_int (REG_PARM_STACK_SPACE (fndecl))); |
| |
| #ifndef OUTGOING_REG_PARM_STACK_SPACE |
| /* The area corresponding to register parameters is not to count in |
| the size of the block we need. So make the adjustment. */ |
| args_size.var |
| = size_binop (MINUS_EXPR, args_size.var, |
| size_int (reg_parm_stack_space)); |
| #endif |
| } |
| #endif |
| } |
| else |
| { |
| #ifdef STACK_BOUNDARY |
| args_size.constant = (((args_size.constant + (STACK_BYTES - 1)) |
| / STACK_BYTES) * STACK_BYTES); |
| #endif |
| |
| #ifdef REG_PARM_STACK_SPACE |
| args_size.constant = MAX (args_size.constant, |
| reg_parm_stack_space); |
| #ifdef MAYBE_REG_PARM_STACK_SPACE |
| if (reg_parm_stack_space == 0) |
| args_size.constant = 0; |
| #endif |
| #ifndef OUTGOING_REG_PARM_STACK_SPACE |
| args_size.constant -= reg_parm_stack_space; |
| #endif |
| #endif |
| } |
| |
| /* See if we have or want to preallocate stack space. |
| |
| If we would have to push a partially-in-regs parm |
| before other stack parms, preallocate stack space instead. |
| |
| If the size of some parm is not a multiple of the required stack |
| alignment, we must preallocate. |
| |
| If the total size of arguments that would otherwise create a copy in |
| a temporary (such as a CALL) is more than half the total argument list |
| size, preallocation is faster. |
| |
| Another reason to preallocate is if we have a machine (like the m88k) |
| where stack alignment is required to be maintained between every |
| pair of insns, not just when the call is made. However, we assume here |
| that such machines either do not have push insns (and hence preallocation |
| would occur anyway) or the problem is taken care of with |
| PUSH_ROUNDING. */ |
| |
| if (! must_preallocate) |
| { |
| int partial_seen = 0; |
| int copy_to_evaluate_size = 0; |
| |
| for (i = 0; i < num_actuals && ! must_preallocate; i++) |
| { |
| if (args[i].partial > 0 && ! args[i].pass_on_stack) |
| partial_seen = 1; |
| else if (partial_seen && args[i].reg == 0) |
| must_preallocate = 1; |
| |
| if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode |
| && (TREE_CODE (args[i].tree_value) == CALL_EXPR |
| || TREE_CODE (args[i].tree_value) == TARGET_EXPR |
| || TREE_CODE (args[i].tree_value) == COND_EXPR |
| || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))) |
| copy_to_evaluate_size |
| += int_size_in_bytes (TREE_TYPE (args[i].tree_value)); |
| } |
| |
| if (copy_to_evaluate_size * 2 >= args_size.constant |
| && args_size.constant > 0) |
| must_preallocate = 1; |
| } |
| |
| /* If the structure value address will reference the stack pointer, we must |
| stabilize it. We don't need to do this if we know that we are not going |
| to adjust the stack pointer in processing this call. */ |
| |
| if (structure_value_addr |
| && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr) |
| || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr)) |
| && (args_size.var |
| #ifndef ACCUMULATE_OUTGOING_ARGS |
| || args_size.constant |
| #endif |
| )) |
| structure_value_addr = copy_to_reg (structure_value_addr); |
| |
| /* If this function call is cse'able, precompute all the parameters. |
| Note that if the parameter is constructed into a temporary, this will |
| cause an additional copy because the parameter will be constructed |
| into a temporary location and then copied into the outgoing arguments. |
| If a parameter contains a call to alloca and this function uses the |
| stack, precompute the parameter. */ |
| |
| /* If we preallocated the stack space, and some arguments must be passed |
| on the stack, then we must precompute any parameter which contains a |
| function call which will store arguments on the stack. |
| Otherwise, evaluating the parameter may clobber previous parameters |
| which have already been stored into the stack. */ |
| |
| for (i = 0; i < num_actuals; i++) |
| if (is_const |
| || ((args_size.var != 0 || args_size.constant != 0) |
| && calls_function (args[i].tree_value, 1)) |
| || (must_preallocate && (args_size.var != 0 || args_size.constant != 0) |
| && calls_function (args[i].tree_value, 0))) |
| { |
| /* If this is an addressable type, we cannot pre-evaluate it. */ |
| if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))) |
| abort (); |
| |
| push_temp_slots (); |
| |
| args[i].initial_value = args[i].value |
| = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0); |
| |
| preserve_temp_slots (args[i].value); |
| pop_temp_slots (); |
| |
| /* ANSI doesn't require a sequence point here, |
| but PCC has one, so this will avoid some problems. */ |
| emit_queue (); |
| |
| args[i].initial_value = args[i].value |
| = protect_from_queue (args[i].initial_value, 0); |
| |
| if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode) |
| args[i].value |
| = convert_modes (args[i].mode, |
| TYPE_MODE (TREE_TYPE (args[i].tree_value)), |
| args[i].value, args[i].unsignedp); |
| } |
| |
| /* Now we are about to start emitting insns that can be deleted |
| if a libcall is deleted. */ |
| if (is_const) |
| start_sequence (); |
| |
| /* If we have no actual push instructions, or shouldn't use them, |
| make space for all args right now. */ |
| |
| if (args_size.var != 0) |
| { |
| if (old_stack_level == 0) |
| { |
| emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX); |
| old_pending_adj = pending_stack_adjust; |
| pending_stack_adjust = 0; |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| /* stack_arg_under_construction says whether a stack arg is |
| being constructed at the old stack level. Pushing the stack |
| gets a clean outgoing argument block. */ |
| old_stack_arg_under_construction = stack_arg_under_construction; |
| stack_arg_under_construction = 0; |
| #endif |
| } |
| argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0); |
| } |
| else |
| { |
| /* Note that we must go through the motions of allocating an argument |
| block even if the size is zero because we may be storing args |
| in the area reserved for register arguments, which may be part of |
| the stack frame. */ |
| |
| int needed = args_size.constant; |
| |
| /* Store the maximum argument space used. It will be pushed by |
| the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow |
| checking). */ |
| |
| if (needed > current_function_outgoing_args_size) |
| current_function_outgoing_args_size = needed; |
| |
| if (must_preallocate) |
| { |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| /* Since the stack pointer will never be pushed, it is possible for |
| the evaluation of a parm to clobber something we have already |
| written to the stack. Since most function calls on RISC machines |
| do not use the stack, this is uncommon, but must work correctly. |
| |
| Therefore, we save any area of the stack that was already written |
| and that we are using. Here we set up to do this by making a new |
| stack usage map from the old one. The actual save will be done |
| by store_one_arg. |
| |
| Another approach might be to try to reorder the argument |
| evaluations to avoid this conflicting stack usage. */ |
| |
| #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE) |
| /* Since we will be writing into the entire argument area, the |
| map must be allocated for its entire size, not just the part that |
| is the responsibility of the caller. */ |
| needed += reg_parm_stack_space; |
| #endif |
| |
| #ifdef ARGS_GROW_DOWNWARD |
| highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, |
| needed + 1); |
| #else |
| highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, |
| needed); |
| #endif |
| stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use); |
| |
| if (initial_highest_arg_in_use) |
| bcopy (initial_stack_usage_map, stack_usage_map, |
| initial_highest_arg_in_use); |
| |
| if (initial_highest_arg_in_use != highest_outgoing_arg_in_use) |
| bzero (&stack_usage_map[initial_highest_arg_in_use], |
| highest_outgoing_arg_in_use - initial_highest_arg_in_use); |
| needed = 0; |
| |
| /* The address of the outgoing argument list must not be copied to a |
| register here, because argblock would be left pointing to the |
| wrong place after the call to allocate_dynamic_stack_space below. |
| */ |
| |
| argblock = virtual_outgoing_args_rtx; |
| |
| #else /* not ACCUMULATE_OUTGOING_ARGS */ |
| if (inhibit_defer_pop == 0) |
| { |
| /* Try to reuse some or all of the pending_stack_adjust |
| to get this space. Maybe we can avoid any pushing. */ |
| if (needed > pending_stack_adjust) |
| { |
| needed -= pending_stack_adjust; |
| pending_stack_adjust = 0; |
| } |
| else |
| { |
| pending_stack_adjust -= needed; |
| needed = 0; |
| } |
| } |
| /* Special case this because overhead of `push_block' in this |
| case is non-trivial. */ |
| if (needed == 0) |
| argblock = virtual_outgoing_args_rtx; |
| else |
| argblock = push_block (GEN_INT (needed), 0, 0); |
| |
| /* We only really need to call `copy_to_reg' in the case where push |
| insns are going to be used to pass ARGBLOCK to a function |
| call in ARGS. In that case, the stack pointer changes value |
| from the allocation point to the call point, and hence |
| the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well. |
| But might as well always do it. */ |
| argblock = copy_to_reg (argblock); |
| #endif /* not ACCUMULATE_OUTGOING_ARGS */ |
| } |
| } |
| |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| /* The save/restore code in store_one_arg handles all cases except one: |
| a constructor call (including a C function returning a BLKmode struct) |
| to initialize an argument. */ |
| if (stack_arg_under_construction) |
| { |
| #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE) |
| rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant); |
| #else |
| rtx push_size = GEN_INT (args_size.constant); |
| #endif |
| if (old_stack_level == 0) |
| { |
| emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX); |
| old_pending_adj = pending_stack_adjust; |
| pending_stack_adjust = 0; |
| /* stack_arg_under_construction says whether a stack arg is |
| being constructed at the old stack level. Pushing the stack |
| gets a clean outgoing argument block. */ |
| old_stack_arg_under_construction = stack_arg_under_construction; |
| stack_arg_under_construction = 0; |
| /* Make a new map for the new argument list. */ |
| stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use); |
| bzero (stack_usage_map, highest_outgoing_arg_in_use); |
| highest_outgoing_arg_in_use = 0; |
| } |
| allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT); |
| } |
| /* If argument evaluation might modify the stack pointer, copy the |
| address of the argument list to a register. */ |
| for (i = 0; i < num_actuals; i++) |
| if (args[i].pass_on_stack) |
| { |
| argblock = copy_addr_to_reg (argblock); |
| break; |
| } |
| #endif |
| |
| |
| /* If we preallocated stack space, compute the address of each argument. |
| We need not ensure it is a valid memory address here; it will be |
| validized when it is used. */ |
| if (argblock) |
| { |
| rtx arg_reg = argblock; |
| int arg_offset = 0; |
| |
| if (GET_CODE (argblock) == PLUS) |
| arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1)); |
| |
| for (i = 0; i < num_actuals; i++) |
| { |
| rtx offset = ARGS_SIZE_RTX (args[i].offset); |
| rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset); |
| rtx addr; |
| |
| /* Skip this parm if it will not be passed on the stack. */ |
| if (! args[i].pass_on_stack && args[i].reg != 0) |
| continue; |
| |
| if (GET_CODE (offset) == CONST_INT) |
| addr = plus_constant (arg_reg, INTVAL (offset)); |
| else |
| addr = gen_rtx (PLUS, Pmode, arg_reg, offset); |
| |
| addr = plus_constant (addr, arg_offset); |
| args[i].stack = gen_rtx (MEM, args[i].mode, addr); |
| MEM_IN_STRUCT_P (args[i].stack) |
| = AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)); |
| |
| if (GET_CODE (slot_offset) == CONST_INT) |
| addr = plus_constant (arg_reg, INTVAL (slot_offset)); |
| else |
| addr = gen_rtx (PLUS, Pmode, arg_reg, slot_offset); |
| |
| addr = plus_constant (addr, arg_offset); |
| args[i].stack_slot = gen_rtx (MEM, args[i].mode, addr); |
| } |
| } |
| |
| #ifdef PUSH_ARGS_REVERSED |
| #ifdef STACK_BOUNDARY |
| /* If we push args individually in reverse order, perform stack alignment |
| before the first push (the last arg). */ |
| if (argblock == 0) |
| anti_adjust_stack (GEN_INT (args_size.constant |
| - original_args_size.constant)); |
| #endif |
| #endif |
| |
| /* Don't try to defer pops if preallocating, not even from the first arg, |
| since ARGBLOCK probably refers to the SP. */ |
| if (argblock) |
| NO_DEFER_POP; |
| |
| /* Get the function to call, in the form of RTL. */ |
| if (fndecl) |
| { |
| /* If this is the first use of the function, see if we need to |
| make an external definition for it. */ |
| if (! TREE_USED (fndecl)) |
| { |
| assemble_external (fndecl); |
| TREE_USED (fndecl) = 1; |
| } |
| |
| /* Get a SYMBOL_REF rtx for the function address. */ |
| funexp = XEXP (DECL_RTL (fndecl), 0); |
| } |
| else |
| /* Generate an rtx (probably a pseudo-register) for the address. */ |
| { |
| push_temp_slots (); |
| funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); |
| pop_temp_slots (); /* FUNEXP can't be BLKmode */ |
| |
| /* Check the function is executable. */ |
| if (flag_check_memory_usage) |
| emit_library_call (chkr_check_exec_libfunc, 1, |
| VOIDmode, 1, |
| funexp, ptr_mode); |
| emit_queue (); |
| } |
| |
| /* Figure out the register where the value, if any, will come back. */ |
| valreg = 0; |
| if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode |
| && ! structure_value_addr) |
| { |
| if (pcc_struct_value) |
| valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)), |
| fndecl); |
| else |
| valreg = hard_function_value (TREE_TYPE (exp), fndecl); |
| } |
| |
| /* Precompute all register parameters. It isn't safe to compute anything |
| once we have started filling any specific hard regs. */ |
| reg_parm_seen = 0; |
| for (i = 0; i < num_actuals; i++) |
| if (args[i].reg != 0 && ! args[i].pass_on_stack) |
| { |
| reg_parm_seen = 1; |
| |
| if (args[i].value == 0) |
| { |
| push_temp_slots (); |
| args[i].value = expand_expr (args[i].tree_value, NULL_RTX, |
| VOIDmode, 0); |
| preserve_temp_slots (args[i].value); |
| pop_temp_slots (); |
| |
| /* ANSI doesn't require a sequence point here, |
| but PCC has one, so this will avoid some problems. */ |
| emit_queue (); |
| } |
| |
| /* If we are to promote the function arg to a wider mode, |
| do it now. */ |
| |
| if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value))) |
| args[i].value |
| = convert_modes (args[i].mode, |
| TYPE_MODE (TREE_TYPE (args[i].tree_value)), |
| args[i].value, args[i].unsignedp); |
| |
| /* If the value is expensive, and we are inside an appropriately |
| short loop, put the value into a pseudo and then put the pseudo |
| into the hard reg. |
| |
| For small register classes, also do this if this call uses |
| register parameters. This is to avoid reload conflicts while |
| loading the parameters registers. */ |
| |
| if ((! (GET_CODE (args[i].value) == REG |
| || (GET_CODE (args[i].value) == SUBREG |
| && GET_CODE (SUBREG_REG (args[i].value)) == REG))) |
| && args[i].mode != BLKmode |
| && rtx_cost (args[i].value, SET) > 2 |
| #ifdef SMALL_REGISTER_CLASSES |
| && ((SMALL_REGISTER_CLASSES && reg_parm_seen) |
| || preserve_subexpressions_p ()) |
| #else |
| && preserve_subexpressions_p () |
| #endif |
| ) |
| args[i].value = copy_to_mode_reg (args[i].mode, args[i].value); |
| } |
| |
| #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE) |
| /* The argument list is the property of the called routine and it |
| may clobber it. If the fixed area has been used for previous |
| parameters, we must save and restore it. |
| |
| Here we compute the boundary of the that needs to be saved, if any. */ |
| |
| #ifdef ARGS_GROW_DOWNWARD |
| for (i = 0; i < reg_parm_stack_space + 1; i++) |
| #else |
| for (i = 0; i < reg_parm_stack_space; i++) |
| #endif |
| { |
| if (i >= highest_outgoing_arg_in_use |
| || stack_usage_map[i] == 0) |
| continue; |
| |
| if (low_to_save == -1) |
| low_to_save = i; |
| |
| high_to_save = i; |
| } |
| |
| if (low_to_save >= 0) |
| { |
| int num_to_save = high_to_save - low_to_save + 1; |
| enum machine_mode save_mode |
| = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1); |
| rtx stack_area; |
| |
| /* If we don't have the required alignment, must do this in BLKmode. */ |
| if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode), |
| BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1))) |
| save_mode = BLKmode; |
| |
| stack_area = gen_rtx (MEM, save_mode, |
| memory_address (save_mode, |
| |
| #ifdef ARGS_GROW_DOWNWARD |
| plus_constant (argblock, |
| - high_to_save) |
| #else |
| plus_constant (argblock, |
| low_to_save) |
| #endif |
| )); |
| if (save_mode == BLKmode) |
| { |
| save_area = assign_stack_temp (BLKmode, num_to_save, 0); |
| MEM_IN_STRUCT_P (save_area) = 0; |
| emit_block_move (validize_mem (save_area), stack_area, |
| GEN_INT (num_to_save), |
| PARM_BOUNDARY / BITS_PER_UNIT); |
| } |
| else |
| { |
| save_area = gen_reg_rtx (save_mode); |
| emit_move_insn (save_area, stack_area); |
| } |
| } |
| #endif |
| |
| |
| /* Now store (and compute if necessary) all non-register parms. |
| These come before register parms, since they can require block-moves, |
| which could clobber the registers used for register parms. |
| Parms which have partial registers are not stored here, |
| but we do preallocate space here if they want that. */ |
| |
| for (i = 0; i < num_actuals; i++) |
| if (args[i].reg == 0 || args[i].pass_on_stack) |
| store_one_arg (&args[i], argblock, may_be_alloca, |
| args_size.var != 0, fndecl, reg_parm_stack_space); |
| |
| /* If we have a parm that is passed in registers but not in memory |
| and whose alignment does not permit a direct copy into registers, |
| make a group of pseudos that correspond to each register that we |
| will later fill. */ |
| |
| if (STRICT_ALIGNMENT) |
| for (i = 0; i < num_actuals; i++) |
| if (args[i].reg != 0 && ! args[i].pass_on_stack |
| && args[i].mode == BLKmode |
| && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value)) |
| < MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD))) |
| { |
| int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); |
| int big_endian_correction = 0; |
| |
| args[i].n_aligned_regs |
| = args[i].partial ? args[i].partial |
| : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; |
| |
| args[i].aligned_regs = (rtx *) alloca (sizeof (rtx) |
| * args[i].n_aligned_regs); |
| |
| /* Structures smaller than a word are aligned to the least |
| significant byte (to the right). On a BYTES_BIG_ENDIAN machine, |
| this means we must skip the empty high order bytes when |
| calculating the bit offset. */ |
| if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD) |
| big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT)); |
| |
| for (j = 0; j < args[i].n_aligned_regs; j++) |
| { |
| rtx reg = gen_reg_rtx (word_mode); |
| rtx word = operand_subword_force (args[i].value, j, BLKmode); |
| int bitsize = TYPE_ALIGN (TREE_TYPE (args[i].tree_value)); |
| int bitpos; |
| |
| args[i].aligned_regs[j] = reg; |
| |
| /* Clobber REG and move each partword into it. Ensure we don't |
| go past the end of the structure. Note that the loop below |
| works because we've already verified that padding |
| and endianness are compatible. |
| |
| We use to emit a clobber here but that doesn't let later |
| passes optimize the instructions we emit. By storing 0 into |
| the register later passes know the first AND to zero out the |
| bitfield being set in the register is unnecessary. The store |
| of 0 will be deleted as will at least the first AND. */ |
| |
| emit_move_insn (reg, const0_rtx); |
| |
| for (bitpos = 0; |
| bitpos < BITS_PER_WORD && bytes > 0; |
| bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT) |
| { |
| int xbitpos = bitpos + big_endian_correction; |
| |
| store_bit_field (reg, bitsize, xbitpos, word_mode, |
| extract_bit_field (word, bitsize, bitpos, 1, |
| NULL_RTX, word_mode, |
| word_mode, |
| bitsize / BITS_PER_UNIT, |
| BITS_PER_WORD), |
| bitsize / BITS_PER_UNIT, BITS_PER_WORD); |
| } |
| } |
| } |
| |
| /* Now store any partially-in-registers parm. |
| This is the last place a block-move can happen. */ |
| if (reg_parm_seen) |
| for (i = 0; i < num_actuals; i++) |
| if (args[i].partial != 0 && ! args[i].pass_on_stack) |
| store_one_arg (&args[i], argblock, may_be_alloca, |
| args_size.var != 0, fndecl, reg_parm_stack_space); |
| |
| #ifndef PUSH_ARGS_REVERSED |
| #ifdef STACK_BOUNDARY |
| /* If we pushed args in forward order, perform stack alignment |
| after pushing the last arg. */ |
| if (argblock == 0) |
| anti_adjust_stack (GEN_INT (args_size.constant |
| - original_args_size.constant)); |
| #endif |
| #endif |
| |
| /* If register arguments require space on the stack and stack space |
| was not preallocated, allocate stack space here for arguments |
| passed in registers. */ |
| #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE) |
| if (must_preallocate == 0 && reg_parm_stack_space > 0) |
| anti_adjust_stack (GEN_INT (reg_parm_stack_space)); |
| #endif |
| |
| /* Pass the function the address in which to return a structure value. */ |
| if (structure_value_addr && ! structure_value_addr_parm) |
| { |
| emit_move_insn (struct_value_rtx, |
| force_reg (Pmode, |
| force_operand (structure_value_addr, |
| NULL_RTX))); |
| |
| /* Mark the memory for the aggregate as write-only. */ |
| if (flag_check_memory_usage) |
| emit_library_call (chkr_set_right_libfunc, 1, |
| VOIDmode, 3, |
| structure_value_addr, ptr_mode, |
| GEN_INT (struct_value_size), TYPE_MODE (sizetype), |
| GEN_INT (MEMORY_USE_WO), QImode); |
| |
| if (GET_CODE (struct_value_rtx) == REG) |
| use_reg (&call_fusage, struct_value_rtx); |
| } |
| |
| funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen); |
| |
| /* Now do the register loads required for any wholly-register parms or any |
| parms which are passed both on the stack and in a register. Their |
| expressions were already evaluated. |
| |
| Mark all register-parms as living through the call, putting these USE |
| insns in the CALL_INSN_FUNCTION_USAGE field. */ |
| |
| for (i = 0; i < num_actuals; i++) |
| { |
| rtx reg = args[i].reg; |
| int partial = args[i].partial; |
| int nregs; |
| |
| if (reg) |
| { |
| /* Set to non-negative if must move a word at a time, even if just |
| one word (e.g, partial == 1 && mode == DFmode). Set to -1 if |
| we just use a normal move insn. This value can be zero if the |
| argument is a zero size structure with no fields. */ |
| nregs = (partial ? partial |
| : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode |
| ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value)) |
| + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) |
| : -1)); |
| |
| /* Handle calls that pass values in multiple non-contiguous |
| locations. The Irix 6 ABI has examples of this. */ |
| |
| if (GET_CODE (reg) == PARALLEL) |
| emit_group_load (reg, args[i].value); |
| |
| /* If simple case, just do move. If normal partial, store_one_arg |
| has already loaded the register for us. In all other cases, |
| load the register(s) from memory. */ |
| |
| else if (nregs == -1) |
| emit_move_insn (reg, args[i].value); |
| |
| /* If we have pre-computed the values to put in the registers in |
| the case of non-aligned structures, copy them in now. */ |
| |
| else if (args[i].n_aligned_regs != 0) |
| for (j = 0; j < args[i].n_aligned_regs; j++) |
| emit_move_insn (gen_rtx (REG, word_mode, REGNO (reg) + j), |
| args[i].aligned_regs[j]); |
| |
| else if (partial == 0 || args[i].pass_on_stack) |
| move_block_to_reg (REGNO (reg), |
| validize_mem (args[i].value), nregs, |
| args[i].mode); |
| |
| /* Handle calls that pass values in multiple non-contiguous |
| locations. The Irix 6 ABI has examples of this. */ |
| if (GET_CODE (reg) == PARALLEL) |
| use_group_regs (&call_fusage, reg); |
| else if (nregs == -1) |
| use_reg (&call_fusage, reg); |
| else |
| use_regs (&call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs); |
| } |
| } |
| |
| /* Perform postincrements before actually calling the function. */ |
| emit_queue (); |
| |
| /* All arguments and registers used for the call must be set up by now! */ |
| |
| /* Generate the actual call instruction. */ |
| emit_call_1 (funexp, fndecl, funtype, args_size.constant, struct_value_size, |
| FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1), |
| valreg, old_inhibit_defer_pop, call_fusage, is_const); |
| |
| /* If call is cse'able, make appropriate pair of reg-notes around it. |
| Test valreg so we don't crash; may safely ignore `const' |
| if return type is void. Disable for PARALLEL return values, because |
| we have no way to move such values into a pseudo register. */ |
| if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL) |
| { |
| rtx note = 0; |
| rtx temp = gen_reg_rtx (GET_MODE (valreg)); |
| rtx insns; |
| |
| /* Construct an "equal form" for the value which mentions all the |
| arguments in order as well as the function name. */ |
| #ifdef PUSH_ARGS_REVERSED |
| for (i = 0; i < num_actuals; i++) |
| note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note); |
| #else |
| for (i = num_actuals - 1; i >= 0; i--) |
| note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note); |
| #endif |
| note = gen_rtx (EXPR_LIST, VOIDmode, funexp, note); |
| |
| insns = get_insns (); |
| end_sequence (); |
| |
| emit_libcall_block (insns, temp, valreg, note); |
| |
| valreg = temp; |
| } |
| else if (is_const) |
| { |
| /* Otherwise, just write out the sequence without a note. */ |
| rtx insns = get_insns (); |
| |
| end_sequence (); |
| emit_insns (insns); |
| } |
| |
| /* For calls to `setjmp', etc., inform flow.c it should complain |
| if nonvolatile values are live. */ |
| |
| if (returns_twice) |
| { |
| emit_note (name, NOTE_INSN_SETJMP); |
| current_function_calls_setjmp = 1; |
| } |
| |
| if (is_longjmp) |
| current_function_calls_longjmp = 1; |
| |
| /* Notice functions that cannot return. |
| If optimizing, insns emitted below will be dead. |
| If not optimizing, they will exist, which is useful |
| if the user uses the `return' command in the debugger. */ |
| |
| if (is_volatile || is_longjmp) |
| emit_barrier (); |
| |
| /* If value type not void, return an rtx for the value. */ |
| |
| /* If there are cleanups to be called, don't use a hard reg as target. |
| We need to double check this and see if it matters anymore. */ |
| if (any_pending_cleanups () |
| && target && REG_P (target) |
| && REGNO (target) < FIRST_PSEUDO_REGISTER) |
| target = 0; |
| |
| if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode |
| || ignore) |
| { |
| target = const0_rtx; |
| } |
| else if (structure_value_addr) |
| { |
| if (target == 0 || GET_CODE (target) != MEM) |
| { |
| target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)), |
| memory_address (TYPE_MODE (TREE_TYPE (exp)), |
| structure_value_addr)); |
| MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp)); |
| } |
| } |
| else if (pcc_struct_value) |
| { |
| if (target == 0) |
| { |
| /* We used leave the value in the location that it is |
| returned in, but that causes problems if it is used more |
| than once in one expression. Rather than trying to track |
| when a copy is required, we always copy when TARGET is |
| not specified. This calling sequence is only used on |
| a few machines and TARGET is usually nonzero. */ |
| if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode) |
| { |
| target = assign_stack_temp (BLKmode, |
| int_size_in_bytes (TREE_TYPE (exp)), |
| 0); |
| |
| MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp)); |
| |
| /* Save this temp slot around the pop below. */ |
| preserve_temp_slots (target); |
| } |
| else |
| target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp))); |
| } |
| |
| if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode) |
| emit_move_insn (target, gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)), |
| copy_to_reg (valreg))); |
| else |
| emit_block_move (target, gen_rtx (MEM, BLKmode, copy_to_reg (valreg)), |
| expr_size (exp), |
| TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT); |
| } |
| /* Handle calls that return values in multiple non-contiguous locations. |
| The Irix 6 ABI has examples of this. */ |
| else if (GET_CODE (valreg) == PARALLEL) |
| { |
| if (target == 0) |
| { |
| int bytes = int_size_in_bytes (TREE_TYPE (exp)); |
| target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0); |
| MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp)); |
| preserve_temp_slots (target); |
| } |
| |
| emit_group_store (target, valreg); |
| } |
| else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp)) |
| && GET_MODE (target) == GET_MODE (valreg)) |
| /* TARGET and VALREG cannot be equal at this point because the latter |
| would not have REG_FUNCTION_VALUE_P true, while the former would if |
| it were referring to the same register. |
| |
| If they refer to the same register, this move will be a no-op, except |
| when function inlining is being done. */ |
| emit_move_insn (target, valreg); |
| else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode) |
| { |
| /* Some machines (the PA for example) want to return all small |
| structures in registers regardless of the structure's alignment. |
| |
| Deal with them explicitly by copying from the return registers |
| into the target MEM locations. */ |
| int bytes = int_size_in_bytes (TREE_TYPE (exp)); |
| int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD; |
| int i; |
| enum machine_mode tmpmode; |
| rtx src, dst; |
| int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (exp)), BITS_PER_WORD); |
| int bitpos, xbitpos, big_endian_correction = 0; |
| |
| if (target == 0) |
| { |
| target = assign_stack_temp (BLKmode, bytes, 0); |
| MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp)); |
| preserve_temp_slots (target); |
| } |
| |
| /* This code assumes valreg is at least a full word. If it isn't, |
| copy it into a new pseudo which is a full word. */ |
| if (GET_MODE (valreg) != BLKmode |
| && GET_MODE_SIZE (GET_MODE (valreg)) < UNITS_PER_WORD) |
| valreg = convert_to_mode (word_mode, valreg, |
| TREE_UNSIGNED (TREE_TYPE (exp))); |
| |
| /* Structures whose size is not a multiple of a word are aligned |
| to the least significant byte (to the right). On a BYTES_BIG_ENDIAN |
| machine, this means we must skip the empty high order bytes when |
| calculating the bit offset. */ |
| if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD) |
| big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) |
| * BITS_PER_UNIT)); |
| |
| /* Copy the structure BITSIZE bites at a time. |
| |
| We could probably emit more efficient code for machines |
| which do not use strict alignment, but it doesn't seem |
| worth the effort at the current time. */ |
| for (bitpos = 0, xbitpos = big_endian_correction; |
| bitpos < bytes * BITS_PER_UNIT; |
| bitpos += bitsize, xbitpos += bitsize) |
| { |
| |
| /* We need a new source operand each time xbitpos is on a |
| word boundary and when xbitpos == big_endian_correction |
| (the first time through). */ |
| if (xbitpos % BITS_PER_WORD == 0 |
| || xbitpos == big_endian_correction) |
| src = operand_subword_force (valreg, |
| xbitpos / BITS_PER_WORD, |
| BLKmode); |
| |
| /* We need a new destination operand each time bitpos is on |
| a word boundary. */ |
| if (bitpos % BITS_PER_WORD == 0) |
| dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, BLKmode); |
| |
| /* Use xbitpos for the source extraction (right justified) and |
| xbitpos for the destination store (left justified). */ |
| store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode, |
| extract_bit_field (src, bitsize, |
| xbitpos % BITS_PER_WORD, 1, |
| NULL_RTX, word_mode, |
| word_mode, |
| bitsize / BITS_PER_UNIT, |
| BITS_PER_WORD), |
| bitsize / BITS_PER_UNIT, BITS_PER_WORD); |
| } |
| } |
| else |
| target = copy_to_reg (valreg); |
| |
| #ifdef PROMOTE_FUNCTION_RETURN |
| /* If we promoted this return value, make the proper SUBREG. TARGET |
| might be const0_rtx here, so be careful. */ |
| if (GET_CODE (target) == REG |
| && TYPE_MODE (TREE_TYPE (exp)) != BLKmode |
| && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))) |
| { |
| tree type = TREE_TYPE (exp); |
| int unsignedp = TREE_UNSIGNED (type); |
| |
| /* If we don't promote as expected, something is wrong. */ |
| if (GET_MODE (target) |
| != promote_mode (type, TYPE_MODE (type), &unsignedp, 1)) |
| abort (); |
| |
| target = gen_rtx (SUBREG, TYPE_MODE (type), target, 0); |
| SUBREG_PROMOTED_VAR_P (target) = 1; |
| SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp; |
| } |
| #endif |
| |
| /* If size of args is variable or this was a constructor call for a stack |
| argument, restore saved stack-pointer value. */ |
| |
| if (old_stack_level) |
| { |
| emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX); |
| pending_stack_adjust = old_pending_adj; |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| stack_arg_under_construction = old_stack_arg_under_construction; |
| highest_outgoing_arg_in_use = initial_highest_arg_in_use; |
| stack_usage_map = initial_stack_usage_map; |
| #endif |
| } |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| else |
| { |
| #ifdef REG_PARM_STACK_SPACE |
| if (save_area) |
| { |
| enum machine_mode save_mode = GET_MODE (save_area); |
| rtx stack_area |
| = gen_rtx (MEM, save_mode, |
| memory_address (save_mode, |
| #ifdef ARGS_GROW_DOWNWARD |
| plus_constant (argblock, - high_to_save) |
| #else |
| plus_constant (argblock, low_to_save) |
| #endif |
| )); |
| |
| if (save_mode != BLKmode) |
| emit_move_insn (stack_area, save_area); |
| else |
| emit_block_move (stack_area, validize_mem (save_area), |
| GEN_INT (high_to_save - low_to_save + 1), |
| PARM_BOUNDARY / BITS_PER_UNIT); |
| } |
| #endif |
| |
| /* If we saved any argument areas, restore them. */ |
| for (i = 0; i < num_actuals; i++) |
| if (args[i].save_area) |
| { |
| enum machine_mode save_mode = GET_MODE (args[i].save_area); |
| rtx stack_area |
| = gen_rtx (MEM, save_mode, |
| memory_address (save_mode, |
| XEXP (args[i].stack_slot, 0))); |
| |
| if (save_mode != BLKmode) |
| emit_move_insn (stack_area, args[i].save_area); |
| else |
| emit_block_move (stack_area, validize_mem (args[i].save_area), |
| GEN_INT (args[i].size.constant), |
| PARM_BOUNDARY / BITS_PER_UNIT); |
| } |
| |
| highest_outgoing_arg_in_use = initial_highest_arg_in_use; |
| stack_usage_map = initial_stack_usage_map; |
| } |
| #endif |
| |
| /* If this was alloca, record the new stack level for nonlocal gotos. |
| Check for the handler slots since we might not have a save area |
| for non-local gotos. */ |
| |
| if (may_be_alloca && nonlocal_goto_handler_slot != 0) |
| emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX); |
| |
| pop_temp_slots (); |
| |
| return target; |
| } |
| |
| /* Output a library call to function FUN (a SYMBOL_REF rtx) |
| (emitting the queue unless NO_QUEUE is nonzero), |
| for a value of mode OUTMODE, |
| with NARGS different arguments, passed as alternating rtx values |
| and machine_modes to convert them to. |
| The rtx values should have been passed through protect_from_queue already. |
| |
| NO_QUEUE will be true if and only if the library call is a `const' call |
| which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent |
| to the variable is_const in expand_call. |
| |
| NO_QUEUE must be true for const calls, because if it isn't, then |
| any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes, |
| and will be lost if the libcall sequence is optimized away. |
| |
| NO_QUEUE must be false for non-const calls, because if it isn't, the |
| call insn will have its CONST_CALL_P bit set, and it will be incorrectly |
| optimized. For instance, the instruction scheduler may incorrectly |
| move memory references across the non-const call. */ |
| |
| void |
| emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode, |
| int nargs, ...)) |
| { |
| #ifndef __STDC__ |
| rtx orgfun; |
| int no_queue; |
| enum machine_mode outmode; |
| int nargs; |
| #endif |
| va_list p; |
| /* Total size in bytes of all the stack-parms scanned so far. */ |
| struct args_size args_size; |
| /* Size of arguments before any adjustments (such as rounding). */ |
| struct args_size original_args_size; |
| register int argnum; |
| rtx fun; |
| int inc; |
| int count; |
| rtx argblock = 0; |
| CUMULATIVE_ARGS args_so_far; |
| struct arg { rtx value; enum machine_mode mode; rtx reg; int partial; |
| struct args_size offset; struct args_size size; rtx save_area; }; |
| struct arg *argvec; |
| int old_inhibit_defer_pop = inhibit_defer_pop; |
| rtx call_fusage = 0; |
| /* Size of the stack reserved for parameter registers. */ |
| int reg_parm_stack_space = 0; |
| #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE) |
| /* Define the boundary of the register parm stack space that needs to be |
| save, if any. */ |
| int low_to_save = -1, high_to_save; |
| rtx save_area = 0; /* Place that it is saved */ |
| #endif |
| |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| int initial_highest_arg_in_use = highest_outgoing_arg_in_use; |
| char *initial_stack_usage_map = stack_usage_map; |
| int needed; |
| #endif |
| |
| #ifdef REG_PARM_STACK_SPACE |
| #ifdef MAYBE_REG_PARM_STACK_SPACE |
| reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE; |
| #else |
| reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl); |
| #endif |
| #endif |
| |
| VA_START (p, nargs); |
| |
| #ifndef __STDC__ |
| orgfun = va_arg (p, rtx); |
| no_queue = va_arg (p, int); |
| outmode = va_arg (p, enum machine_mode); |
| nargs = va_arg (p, int); |
| #endif |
| |
| fun = orgfun; |
| |
| /* Copy all the libcall-arguments out of the varargs data |
| and into a vector ARGVEC. |
| |
| Compute how to pass each argument. We only support a very small subset |
| of the full argument passing conventions to limit complexity here since |
| library functions shouldn't have many args. */ |
| |
| argvec = (struct arg *) alloca (nargs * sizeof (struct arg)); |
| bzero ((char *) argvec, nargs * sizeof (struct arg)); |
| |
| |
| INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0); |
| |
| args_size.constant = 0; |
| args_size.var = 0; |
| |
| push_temp_slots (); |
| |
| for (count = 0; count < nargs; count++) |
| { |
| rtx val = va_arg (p, rtx); |
| enum machine_mode mode = va_arg (p, enum machine_mode); |
| |
| /* We cannot convert the arg value to the mode the library wants here; |
| must do it earlier where we know the signedness of the arg. */ |
| if (mode == BLKmode |
| || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)) |
| abort (); |
| |
| /* On some machines, there's no way to pass a float to a library fcn. |
| Pass it as a double instead. */ |
| #ifdef LIBGCC_NEEDS_DOUBLE |
| if (LIBGCC_NEEDS_DOUBLE && mode == SFmode) |
| val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode; |
| #endif |
| |
| /* There's no need to call protect_from_queue, because |
| either emit_move_insn or emit_push_insn will do that. */ |
| |
| /* Make sure it is a reasonable operand for a move or push insn. */ |
| if (GET_CODE (val) != REG && GET_CODE (val) != MEM |
| && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val))) |
| val = force_operand (val, NULL_RTX); |
| |
| #ifdef FUNCTION_ARG_PASS_BY_REFERENCE |
| if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1)) |
| { |
| /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can |
| be viewed as just an efficiency improvement. */ |
| rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0); |
| emit_move_insn (slot, val); |
| val = force_operand (XEXP (slot, 0), NULL_RTX); |
| mode = Pmode; |
| } |
| #endif |
| |
| argvec[count].value = val; |
| argvec[count].mode = mode; |
| |
| argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1); |
| if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL) |
| abort (); |
| #ifdef FUNCTION_ARG_PARTIAL_NREGS |
| argvec[count].partial |
| = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1); |
| #else |
| argvec[count].partial = 0; |
| #endif |
| |
| locate_and_pad_parm (mode, NULL_TREE, |
| argvec[count].reg && argvec[count].partial == 0, |
| NULL_TREE, &args_size, &argvec[count].offset, |
| &argvec[count].size); |
| |
| if (argvec[count].size.var) |
| abort (); |
| |
| #ifndef REG_PARM_STACK_SPACE |
| if (argvec[count].partial) |
| argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD; |
| #endif |
| |
| if (argvec[count].reg == 0 || argvec[count].partial != 0 |
| #ifdef REG_PARM_STACK_SPACE |
| || 1 |
| #endif |
| ) |
| args_size.constant += argvec[count].size.constant; |
| |
| FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1); |
| } |
| va_end (p); |
| |
| #ifdef FINAL_REG_PARM_STACK_SPACE |
| reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant, |
| args_size.var); |
| #endif |
| |
| /* If this machine requires an external definition for library |
| functions, write one out. */ |
| assemble_external_libcall (fun); |
| |
| original_args_size = args_size; |
| #ifdef STACK_BOUNDARY |
| args_size.constant = (((args_size.constant + (STACK_BYTES - 1)) |
| / STACK_BYTES) * STACK_BYTES); |
| #endif |
| |
| #ifdef REG_PARM_STACK_SPACE |
| args_size.constant = MAX (args_size.constant, |
| reg_parm_stack_space); |
| #ifndef OUTGOING_REG_PARM_STACK_SPACE |
| args_size.constant -= reg_parm_stack_space; |
| #endif |
| #endif |
| |
| if (args_size.constant > current_function_outgoing_args_size) |
| current_function_outgoing_args_size = args_size.constant; |
| |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| /* Since the stack pointer will never be pushed, it is possible for |
| the evaluation of a parm to clobber something we have already |
| written to the stack. Since most function calls on RISC machines |
| do not use the stack, this is uncommon, but must work correctly. |
| |
| Therefore, we save any area of the stack that was already written |
| and that we are using. Here we set up to do this by making a new |
| stack usage map from the old one. |
| |
| Another approach might be to try to reorder the argument |
| evaluations to avoid this conflicting stack usage. */ |
| |
| needed = args_size.constant; |
| #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE) |
| /* Since we will be writing into the entire argument area, the |
| map must be allocated for its entire size, not just the part that |
| is the responsibility of the caller. */ |
| needed += reg_parm_stack_space; |
| #endif |
| |
| #ifdef ARGS_GROW_DOWNWARD |
| highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, |
| needed + 1); |
| #else |
| highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, |
| needed); |
| #endif |
| stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use); |
| |
| if (initial_highest_arg_in_use) |
| bcopy (initial_stack_usage_map, stack_usage_map, |
| initial_highest_arg_in_use); |
| |
| if (initial_highest_arg_in_use != highest_outgoing_arg_in_use) |
| bzero (&stack_usage_map[initial_highest_arg_in_use], |
| highest_outgoing_arg_in_use - initial_highest_arg_in_use); |
| needed = 0; |
| |
| /* The address of the outgoing argument list must not be copied to a |
| register here, because argblock would be left pointing to the |
| wrong place after the call to allocate_dynamic_stack_space below. |
| */ |
| |
| argblock = virtual_outgoing_args_rtx; |
| #else /* not ACCUMULATE_OUTGOING_ARGS */ |
| #ifndef PUSH_ROUNDING |
| argblock = push_block (GEN_INT (args_size.constant), 0, 0); |
| #endif |
| #endif |
| |
| #ifdef PUSH_ARGS_REVERSED |
| #ifdef STACK_BOUNDARY |
| /* If we push args individually in reverse order, perform stack alignment |
| before the first push (the last arg). */ |
| if (argblock == 0) |
| anti_adjust_stack (GEN_INT (args_size.constant |
| - original_args_size.constant)); |
| #endif |
| #endif |
| |
| #ifdef PUSH_ARGS_REVERSED |
| inc = -1; |
| argnum = nargs - 1; |
| #else |
| inc = 1; |
| argnum = 0; |
| #endif |
| |
| #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE) |
| /* The argument list is the property of the called routine and it |
| may clobber it. If the fixed area has been used for previous |
| parameters, we must save and restore it. |
| |
| Here we compute the boundary of the that needs to be saved, if any. */ |
| |
| #ifdef ARGS_GROW_DOWNWARD |
| for (count = 0; count < reg_parm_stack_space + 1; count++) |
| #else |
| for (count = 0; count < reg_parm_stack_space; count++) |
| #endif |
| { |
| if (count >= highest_outgoing_arg_in_use |
| || stack_usage_map[count] == 0) |
| continue; |
| |
| if (low_to_save == -1) |
| low_to_save = count; |
| |
| high_to_save = count; |
| } |
| |
| if (low_to_save >= 0) |
| { |
| int num_to_save = high_to_save - low_to_save + 1; |
| enum machine_mode save_mode |
| = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1); |
| rtx stack_area; |
| |
| /* If we don't have the required alignment, must do this in BLKmode. */ |
| if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode), |
| BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1))) |
| save_mode = BLKmode; |
| |
| stack_area = gen_rtx (MEM, save_mode, |
| memory_address (save_mode, |
| |
| #ifdef ARGS_GROW_DOWNWARD |
| plus_constant (argblock, |
| - high_to_save) |
| #else |
| plus_constant (argblock, |
| low_to_save) |
| #endif |
| )); |
| if (save_mode == BLKmode) |
| { |
| save_area = assign_stack_temp (BLKmode, num_to_save, 0); |
| MEM_IN_STRUCT_P (save_area) = 0; |
| emit_block_move (validize_mem (save_area), stack_area, |
| GEN_INT (num_to_save), |
| PARM_BOUNDARY / BITS_PER_UNIT); |
| } |
| else |
| { |
| save_area = gen_reg_rtx (save_mode); |
| emit_move_insn (save_area, stack_area); |
| } |
| } |
| #endif |
| |
| /* Push the args that need to be pushed. */ |
| |
| /* ARGNUM indexes the ARGVEC array in the order in which the arguments |
| are to be pushed. */ |
| for (count = 0; count < nargs; count++, argnum += inc) |
| { |
| register enum machine_mode mode = argvec[argnum].mode; |
| register rtx val = argvec[argnum].value; |
| rtx reg = argvec[argnum].reg; |
| int partial = argvec[argnum].partial; |
| int lower_bound, upper_bound, i; |
| |
| if (! (reg != 0 && partial == 0)) |
| { |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| /* If this is being stored into a pre-allocated, fixed-size, stack |
| area, save any previous data at that location. */ |
| |
| #ifdef ARGS_GROW_DOWNWARD |
| /* stack_slot is negative, but we want to index stack_usage_map |
| with positive values. */ |
| upper_bound = -argvec[argnum].offset.constant + 1; |
| lower_bound = upper_bound - argvec[argnum].size.constant; |
| #else |
| lower_bound = argvec[argnum].offset.constant; |
| upper_bound = lower_bound + argvec[argnum].size.constant; |
| #endif |
| |
| for (i = lower_bound; i < upper_bound; i++) |
| if (stack_usage_map[i] |
| #ifdef REG_PARM_STACK_SPACE |
| /* Don't store things in the fixed argument area at this point; |
| it has already been saved. */ |
| && i > reg_parm_stack_space |
| #endif |
| ) |
| break; |
| |
| if (i != upper_bound) |
| { |
| /* We need to make a save area. See what mode we can make it. */ |
| enum machine_mode save_mode |
| = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT, |
| MODE_INT, 1); |
| rtx stack_area |
| = gen_rtx (MEM, save_mode, |
| memory_address (save_mode, plus_constant (argblock, |
| argvec[argnum].offset.constant))); |
| argvec[argnum].save_area = gen_reg_rtx (save_mode); |
| emit_move_insn (argvec[argnum].save_area, stack_area); |
| } |
| #endif |
| emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0, |
| argblock, GEN_INT (argvec[argnum].offset.constant)); |
| |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| /* Now mark the segment we just used. */ |
| for (i = lower_bound; i < upper_bound; i++) |
| stack_usage_map[i] = 1; |
| #endif |
| |
| NO_DEFER_POP; |
| } |
| } |
| |
| #ifndef PUSH_ARGS_REVERSED |
| #ifdef STACK_BOUNDARY |
| /* If we pushed args in forward order, perform stack alignment |
| after pushing the last arg. */ |
| if (argblock == 0) |
| anti_adjust_stack (GEN_INT (args_size.constant |
| - original_args_size.constant)); |
| #endif |
| #endif |
| |
| #ifdef PUSH_ARGS_REVERSED |
| argnum = nargs - 1; |
| #else |
| argnum = 0; |
| #endif |
| |
| fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0); |
| |
| /* Now load any reg parms into their regs. */ |
| |
| /* ARGNUM indexes the ARGVEC array in the order in which the arguments |
| are to be pushed. */ |
| for (count = 0; count < nargs; count++, argnum += inc) |
| { |
| register enum machine_mode mode = argvec[argnum].mode; |
| register rtx val = argvec[argnum].value; |
| rtx reg = argvec[argnum].reg; |
| int partial = argvec[argnum].partial; |
| |
| if (reg != 0 && partial == 0) |
| emit_move_insn (reg, val); |
| NO_DEFER_POP; |
| } |
| |
| /* For version 1.37, try deleting this entirely. */ |
| if (! no_queue) |
| emit_queue (); |
| |
| /* Any regs containing parms remain in use through the call. */ |
| for (count = 0; count < nargs; count++) |
| if (argvec[count].reg != 0) |
| use_reg (&call_fusage, argvec[count].reg); |
| |
| /* Don't allow popping to be deferred, since then |
| cse'ing of library calls could delete a call and leave the pop. */ |
| NO_DEFER_POP; |
| |
| /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which |
| will set inhibit_defer_pop to that value. */ |
| |
| /* The return type is needed to decide how many bytes the function pops. |
| Signedness plays no role in that, so for simplicity, we pretend it's |
| always signed. We also assume that the list of arguments passed has |
| no impact, so we pretend it is unknown. */ |
| |
| emit_call_1 (fun, |
| get_identifier (XSTR (orgfun, 0)), |
| build_function_type (outmode == VOIDmode ? void_type_node |
| : type_for_mode (outmode, 0), NULL_TREE), |
| args_size.constant, 0, |
| FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1), |
| outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX, |
| old_inhibit_defer_pop + 1, call_fusage, no_queue); |
| |
| pop_temp_slots (); |
| |
| /* Now restore inhibit_defer_pop to its actual original value. */ |
| OK_DEFER_POP; |
| |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| #ifdef REG_PARM_STACK_SPACE |
| if (save_area) |
| { |
| enum machine_mode save_mode = GET_MODE (save_area); |
| rtx stack_area |
| = gen_rtx (MEM, save_mode, |
| memory_address (save_mode, |
| #ifdef ARGS_GROW_DOWNWARD |
| plus_constant (argblock, - high_to_save) |
| #else |
| plus_constant (argblock, low_to_save) |
| #endif |
| )); |
| |
| if (save_mode != BLKmode) |
| emit_move_insn (stack_area, save_area); |
| else |
| emit_block_move (stack_area, validize_mem (save_area), |
| GEN_INT (high_to_save - low_to_save + 1), |
| PARM_BOUNDARY / BITS_PER_UNIT); |
| } |
| #endif |
| |
| /* If we saved any argument areas, restore them. */ |
| for (count = 0; count < nargs; count++) |
| if (argvec[count].save_area) |
| { |
| enum machine_mode save_mode = GET_MODE (argvec[count].save_area); |
| rtx stack_area |
| = gen_rtx (MEM, save_mode, |
| memory_address (save_mode, plus_constant (argblock, |
| argvec[count].offset.constant))); |
| |
| emit_move_insn (stack_area, argvec[count].save_area); |
| } |
| |
| highest_outgoing_arg_in_use = initial_highest_arg_in_use; |
| stack_usage_map = initial_stack_usage_map; |
| #endif |
| |
| } |
| |
| /* Like emit_library_call except that an extra argument, VALUE, |
| comes second and says where to store the result. |
| (If VALUE is zero, this function chooses a convenient way |
| to return the value. |
| |
| This function returns an rtx for where the value is to be found. |
| If VALUE is nonzero, VALUE is returned. */ |
| |
| rtx |
| emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue, |
| enum machine_mode outmode, int nargs, ...)) |
| { |
| #ifndef __STDC__ |
| rtx orgfun; |
| rtx value; |
| int no_queue; |
| enum machine_mode outmode; |
| int nargs; |
| #endif |
| va_list p; |
| /* Total size in bytes of all the stack-parms scanned so far. */ |
| struct args_size args_size; |
| /* Size of arguments before any adjustments (such as rounding). */ |
| struct args_size original_args_size; |
| register int argnum; |
| rtx fun; |
| int inc; |
| int count; |
| rtx argblock = 0; |
| CUMULATIVE_ARGS args_so_far; |
| struct arg { rtx value; enum machine_mode mode; rtx reg; int partial; |
| struct args_size offset; struct args_size size; rtx save_area; }; |
| struct arg *argvec; |
| int old_inhibit_defer_pop = inhibit_defer_pop; |
| rtx call_fusage = 0; |
| /* Size of the stack reserved for parameter registers. */ |
| int reg_parm_stack_space = 0; |
| rtx mem_value = 0; |
| int pcc_struct_value = 0; |
| int struct_value_size = 0; |
| int is_const; |
| int needed; |
| |
| #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE) |
| /* Define the boundary of the register parm stack space that needs to be |
| save, if any. */ |
| int low_to_save = -1, high_to_save; |
| rtx save_area = 0; /* Place that it is saved */ |
| #endif |
| |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| int initial_highest_arg_in_use = highest_outgoing_arg_in_use; |
| char *initial_stack_usage_map = stack_usage_map; |
| #endif |
| |
| #ifdef REG_PARM_STACK_SPACE |
| #ifdef MAYBE_REG_PARM_STACK_SPACE |
| reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE; |
| #else |
| reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl); |
| #endif |
| #endif |
| |
| VA_START (p, nargs); |
| |
| #ifndef __STDC__ |
| orgfun = va_arg (p, rtx); |
| value = va_arg (p, rtx); |
| no_queue = va_arg (p, int); |
| outmode = va_arg (p, enum machine_mode); |
| nargs = va_arg (p, int); |
| #endif |
| |
| is_const = no_queue; |
| fun = orgfun; |
| |
| /* If this kind of value comes back in memory, |
| decide where in memory it should come back. */ |
| if (aggregate_value_p (type_for_mode (outmode, 0))) |
| { |
| #ifdef PCC_STATIC_STRUCT_RETURN |
| rtx pointer_reg |
| = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)), |
| 0); |
| mem_value = gen_rtx (MEM, outmode, pointer_reg); |
| pcc_struct_value = 1; |
| if (value == 0) |
| value = gen_reg_rtx (outmode); |
| #else /* not PCC_STATIC_STRUCT_RETURN */ |
| struct_value_size = GET_MODE_SIZE (outmode); |
| if (value != 0 && GET_CODE (value) == MEM) |
| mem_value = value; |
| else |
| mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0); |
| #endif |
| |
| /* This call returns a big structure. */ |
| is_const = 0; |
| } |
| |
| /* ??? Unfinished: must pass the memory address as an argument. */ |
| |
| /* Copy all the libcall-arguments out of the varargs data |
| and into a vector ARGVEC. |
| |
| Compute how to pass each argument. We only support a very small subset |
| of the full argument passing conventions to limit complexity here since |
| library functions shouldn't have many args. */ |
| |
| argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg)); |
| bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg)); |
| |
| INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0); |
| |
| args_size.constant = 0; |
| args_size.var = 0; |
| |
| count = 0; |
| |
| push_temp_slots (); |
| |
| /* If there's a structure value address to be passed, |
| either pass it in the special place, or pass it as an extra argument. */ |
| if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value) |
| { |
| rtx addr = XEXP (mem_value, 0); |
| nargs++; |
| |
| /* Make sure it is a reasonable operand for a move or push insn. */ |
| if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM |
| && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr))) |
| addr = force_operand (addr, NULL_RTX); |
| |
| argvec[count].value = addr; |
| argvec[count].mode = Pmode; |
| argvec[count].partial = 0; |
| |
| argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1); |
| #ifdef FUNCTION_ARG_PARTIAL_NREGS |
| if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1)) |
| abort (); |
| #endif |
| |
| locate_and_pad_parm (Pmode, NULL_TREE, |
| argvec[count].reg && argvec[count].partial == 0, |
| NULL_TREE, &args_size, &argvec[count].offset, |
| &argvec[count].size); |
| |
| |
| if (argvec[count].reg == 0 || argvec[count].partial != 0 |
| #ifdef REG_PARM_STACK_SPACE |
| || 1 |
| #endif |
| ) |
| args_size.constant += argvec[count].size.constant; |
| |
| FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1); |
| |
| count++; |
| } |
| |
| for (; count < nargs; count++) |
| { |
| rtx val = va_arg (p, rtx); |
| enum machine_mode mode = va_arg (p, enum machine_mode); |
| |
| /* We cannot convert the arg value to the mode the library wants here; |
| must do it earlier where we know the signedness of the arg. */ |
| if (mode == BLKmode |
| || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)) |
| abort (); |
| |
| /* On some machines, there's no way to pass a float to a library fcn. |
| Pass it as a double instead. */ |
| #ifdef LIBGCC_NEEDS_DOUBLE |
| if (LIBGCC_NEEDS_DOUBLE && mode == SFmode) |
| val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode; |
| #endif |
| |
| /* There's no need to call protect_from_queue, because |
| either emit_move_insn or emit_push_insn will do that. */ |
| |
| /* Make sure it is a reasonable operand for a move or push insn. */ |
| if (GET_CODE (val) != REG && GET_CODE (val) != MEM |
| && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val))) |
| val = force_operand (val, NULL_RTX); |
| |
| #ifdef FUNCTION_ARG_PASS_BY_REFERENCE |
| if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1)) |
| { |
| /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can |
| be viewed as just an efficiency improvement. */ |
| rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0); |
| emit_move_insn (slot, val); |
| val = XEXP (slot, 0); |
| mode = Pmode; |
| } |
| #endif |
| |
| argvec[count].value = val; |
| argvec[count].mode = mode; |
| |
| argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1); |
| if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL) |
| abort (); |
| #ifdef FUNCTION_ARG_PARTIAL_NREGS |
| argvec[count].partial |
| = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1); |
| #else |
| argvec[count].partial = 0; |
| #endif |
| |
| locate_and_pad_parm (mode, NULL_TREE, |
| argvec[count].reg && argvec[count].partial == 0, |
| NULL_TREE, &args_size, &argvec[count].offset, |
| &argvec[count].size); |
| |
| if (argvec[count].size.var) |
| abort (); |
| |
| #ifndef REG_PARM_STACK_SPACE |
| if (argvec[count].partial) |
| argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD; |
| #endif |
| |
| if (argvec[count].reg == 0 || argvec[count].partial != 0 |
| #ifdef REG_PARM_STACK_SPACE |
| || 1 |
| #endif |
| ) |
| args_size.constant += argvec[count].size.constant; |
| |
| FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1); |
| } |
| va_end (p); |
| |
| #ifdef FINAL_REG_PARM_STACK_SPACE |
| reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant, |
| args_size.var); |
| #endif |
| /* If this machine requires an external definition for library |
| functions, write one out. */ |
| assemble_external_libcall (fun); |
| |
| original_args_size = args_size; |
| #ifdef STACK_BOUNDARY |
| args_size.constant = (((args_size.constant + (STACK_BYTES - 1)) |
| / STACK_BYTES) * STACK_BYTES); |
| #endif |
| |
| #ifdef REG_PARM_STACK_SPACE |
| args_size.constant = MAX (args_size.constant, |
| reg_parm_stack_space); |
| #ifndef OUTGOING_REG_PARM_STACK_SPACE |
| args_size.constant -= reg_parm_stack_space; |
| #endif |
| #endif |
| |
| if (args_size.constant > current_function_outgoing_args_size) |
| current_function_outgoing_args_size = args_size.constant; |
| |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| /* Since the stack pointer will never be pushed, it is possible for |
| the evaluation of a parm to clobber something we have already |
| written to the stack. Since most function calls on RISC machines |
| do not use the stack, this is uncommon, but must work correctly. |
| |
| Therefore, we save any area of the stack that was already written |
| and that we are using. Here we set up to do this by making a new |
| stack usage map from the old one. |
| |
| Another approach might be to try to reorder the argument |
| evaluations to avoid this conflicting stack usage. */ |
| |
| needed = args_size.constant; |
| #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE) |
| /* Since we will be writing into the entire argument area, the |
| map must be allocated for its entire size, not just the part that |
| is the responsibility of the caller. */ |
| needed += reg_parm_stack_space; |
| #endif |
| |
| #ifdef ARGS_GROW_DOWNWARD |
| highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, |
| needed + 1); |
| #else |
| highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, |
| needed); |
| #endif |
| stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use); |
| |
| if (initial_highest_arg_in_use) |
| bcopy (initial_stack_usage_map, stack_usage_map, |
| initial_highest_arg_in_use); |
| |
| if (initial_highest_arg_in_use != highest_outgoing_arg_in_use) |
| bzero (&stack_usage_map[initial_highest_arg_in_use], |
| highest_outgoing_arg_in_use - initial_highest_arg_in_use); |
| needed = 0; |
| |
| /* The address of the outgoing argument list must not be copied to a |
| register here, because argblock would be left pointing to the |
| wrong place after the call to allocate_dynamic_stack_space below. |
| */ |
| |
| argblock = virtual_outgoing_args_rtx; |
| #else /* not ACCUMULATE_OUTGOING_ARGS */ |
| #ifndef PUSH_ROUNDING |
| argblock = push_block (GEN_INT (args_size.constant), 0, 0); |
| #endif |
| #endif |
| |
| #ifdef PUSH_ARGS_REVERSED |
| #ifdef STACK_BOUNDARY |
| /* If we push args individually in reverse order, perform stack alignment |
| before the first push (the last arg). */ |
| if (argblock == 0) |
| anti_adjust_stack (GEN_INT (args_size.constant |
| - original_args_size.constant)); |
| #endif |
| #endif |
| |
| #ifdef PUSH_ARGS_REVERSED |
| inc = -1; |
| argnum = nargs - 1; |
| #else |
| inc = 1; |
| argnum = 0; |
| #endif |
| |
| #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE) |
| /* The argument list is the property of the called routine and it |
| may clobber it. If the fixed area has been used for previous |
| parameters, we must save and restore it. |
| |
| Here we compute the boundary of the that needs to be saved, if any. */ |
| |
| #ifdef ARGS_GROW_DOWNWARD |
| for (count = 0; count < reg_parm_stack_space + 1; count++) |
| #else |
| for (count = 0; count < reg_parm_stack_space; count++) |
| #endif |
| { |
| if (count >= highest_outgoing_arg_in_use |
| || stack_usage_map[count] == 0) |
| continue; |
| |
| if (low_to_save == -1) |
| low_to_save = count; |
| |
| high_to_save = count; |
| } |
| |
| if (low_to_save >= 0) |
| { |
| int num_to_save = high_to_save - low_to_save + 1; |
| enum machine_mode save_mode |
| = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1); |
| rtx stack_area; |
| |
| /* If we don't have the required alignment, must do this in BLKmode. */ |
| if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode), |
| BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1))) |
| save_mode = BLKmode; |
| |
| stack_area = gen_rtx (MEM, save_mode, |
| memory_address (save_mode, |
| |
| #ifdef ARGS_GROW_DOWNWARD |
| plus_constant (argblock, |
| - high_to_save) |
| #else |
| plus_constant (argblock, |
| low_to_save) |
| #endif |
| )); |
| if (save_mode == BLKmode) |
| { |
| save_area = assign_stack_temp (BLKmode, num_to_save, 0); |
| MEM_IN_STRUCT_P (save_area) = 0; |
| emit_block_move (validize_mem (save_area), stack_area, |
| GEN_INT (num_to_save), |
| PARM_BOUNDARY / BITS_PER_UNIT); |
| } |
| else |
| { |
| save_area = gen_reg_rtx (save_mode); |
| emit_move_insn (save_area, stack_area); |
| } |
| } |
| #endif |
| |
| /* Push the args that need to be pushed. */ |
| |
| /* ARGNUM indexes the ARGVEC array in the order in which the arguments |
| are to be pushed. */ |
| for (count = 0; count < nargs; count++, argnum += inc) |
| { |
| register enum machine_mode mode = argvec[argnum].mode; |
| register rtx val = argvec[argnum].value; |
| rtx reg = argvec[argnum].reg; |
| int partial = argvec[argnum].partial; |
| int lower_bound, upper_bound, i; |
| |
| if (! (reg != 0 && partial == 0)) |
| { |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| /* If this is being stored into a pre-allocated, fixed-size, stack |
| area, save any previous data at that location. */ |
| |
| #ifdef ARGS_GROW_DOWNWARD |
| /* stack_slot is negative, but we want to index stack_usage_map |
| with positive values. */ |
| upper_bound = -argvec[argnum].offset.constant + 1; |
| lower_bound = upper_bound - argvec[argnum].size.constant; |
| #else |
| lower_bound = argvec[argnum].offset.constant; |
| upper_bound = lower_bound + argvec[argnum].size.constant; |
| #endif |
| |
| for (i = lower_bound; i < upper_bound; i++) |
| if (stack_usage_map[i] |
| #ifdef REG_PARM_STACK_SPACE |
| /* Don't store things in the fixed argument area at this point; |
| it has already been saved. */ |
| && i > reg_parm_stack_space |
| #endif |
| ) |
| break; |
| |
| if (i != upper_bound) |
| { |
| /* We need to make a save area. See what mode we can make it. */ |
| enum machine_mode save_mode |
| = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT, |
| MODE_INT, 1); |
| rtx stack_area |
| = gen_rtx (MEM, save_mode, |
| memory_address (save_mode, plus_constant (argblock, |
| argvec[argnum].offset.constant))); |
| argvec[argnum].save_area = gen_reg_rtx (save_mode); |
| emit_move_insn (argvec[argnum].save_area, stack_area); |
| } |
| #endif |
| emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0, |
| argblock, GEN_INT (argvec[argnum].offset.constant)); |
| |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| /* Now mark the segment we just used. */ |
| for (i = lower_bound; i < upper_bound; i++) |
| stack_usage_map[i] = 1; |
| #endif |
| |
| NO_DEFER_POP; |
| } |
| } |
|