| /* Expands front end tree to back end RTL for GNU C-Compiler |
| Copyright (C) 1987, 88, 89, 91-96, 1997 Free Software Foundation, Inc. |
| |
| This file is part of GNU CC. |
| |
| GNU CC is free software; you can redistribute it and/or modify |
| it under the terms of the GNU General Public License as published by |
| the Free Software Foundation; either version 2, or (at your option) |
| any later version. |
| |
| GNU CC is distributed in the hope that it will be useful, |
| but WITHOUT ANY WARRANTY; without even the implied warranty of |
| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| GNU General Public License for more details. |
| |
| You should have received a copy of the GNU General Public License |
| along with GNU CC; see the file COPYING. If not, write to |
| the Free Software Foundation, 59 Temple Place - Suite 330, |
| Boston, MA 02111-1307, USA. */ |
| |
| |
| /* This file handles the generation of rtl code from tree structure |
| at the level of the function as a whole. |
| It creates the rtl expressions for parameters and auto variables |
| and has full responsibility for allocating stack slots. |
| |
| `expand_function_start' is called at the beginning of a function, |
| before the function body is parsed, and `expand_function_end' is |
| called after parsing the body. |
| |
| Call `assign_stack_local' to allocate a stack slot for a local variable. |
| This is usually done during the RTL generation for the function body, |
| but it can also be done in the reload pass when a pseudo-register does |
| not get a hard register. |
| |
| Call `put_var_into_stack' when you learn, belatedly, that a variable |
| previously given a pseudo-register must in fact go in the stack. |
| This function changes the DECL_RTL to be a stack slot instead of a reg |
| then scans all the RTL instructions so far generated to correct them. */ |
| |
| #include "config.h" |
| #include <stdio.h> |
| #include "rtl.h" |
| #include "tree.h" |
| #include "flags.h" |
| #include "except.h" |
| #include "function.h" |
| #include "insn-flags.h" |
| #include "expr.h" |
| #include "insn-codes.h" |
| #include "regs.h" |
| #include "hard-reg-set.h" |
| #include "insn-config.h" |
| #include "recog.h" |
| #include "output.h" |
| #include "basic-block.h" |
| #include "obstack.h" |
| #include "bytecode.h" |
| #include "bc-emit.h" |
| |
| #ifndef TRAMPOLINE_ALIGNMENT |
| #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY |
| #endif |
| |
| /* Some systems use __main in a way incompatible with its use in gcc, in these |
| cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to |
| give the same symbol without quotes for an alternative entry point. You |
| must define both, or neither. */ |
| #ifndef NAME__MAIN |
| #define NAME__MAIN "__main" |
| #define SYMBOL__MAIN __main |
| #endif |
| |
| /* Round a value to the lowest integer less than it that is a multiple of |
| the required alignment. Avoid using division in case the value is |
| negative. Assume the alignment is a power of two. */ |
| #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1)) |
| |
| /* Similar, but round to the next highest integer that meets the |
| alignment. */ |
| #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1)) |
| |
| /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp |
| during rtl generation. If they are different register numbers, this is |
| always true. It may also be true if |
| FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl |
| generation. See fix_lexical_addr for details. */ |
| |
| #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM |
| #define NEED_SEPARATE_AP |
| #endif |
| |
| /* Number of bytes of args popped by function being compiled on its return. |
| Zero if no bytes are to be popped. |
| May affect compilation of return insn or of function epilogue. */ |
| |
| int current_function_pops_args; |
| |
| /* Nonzero if function being compiled needs to be given an address |
| where the value should be stored. */ |
| |
| int current_function_returns_struct; |
| |
| /* Nonzero if function being compiled needs to |
| return the address of where it has put a structure value. */ |
| |
| int current_function_returns_pcc_struct; |
| |
| /* Nonzero if function being compiled needs to be passed a static chain. */ |
| |
| int current_function_needs_context; |
| |
| /* Nonzero if function being compiled can call setjmp. */ |
| |
| int current_function_calls_setjmp; |
| |
| /* Nonzero if function being compiled can call longjmp. */ |
| |
| int current_function_calls_longjmp; |
| |
| /* Nonzero if function being compiled receives nonlocal gotos |
| from nested functions. */ |
| |
| int current_function_has_nonlocal_label; |
| |
| /* Nonzero if function being compiled has nonlocal gotos to parent |
| function. */ |
| |
| int current_function_has_nonlocal_goto; |
| |
| /* Nonzero if function being compiled contains nested functions. */ |
| |
| int current_function_contains_functions; |
| |
| /* Nonzero if the current function is a thunk (a lightweight function that |
| just adjusts one of its arguments and forwards to another function), so |
| we should try to cut corners where we can. */ |
| int current_function_is_thunk; |
| |
| /* Nonzero if function being compiled can call alloca, |
| either as a subroutine or builtin. */ |
| |
| int current_function_calls_alloca; |
| |
| /* Nonzero if the current function returns a pointer type */ |
| |
| int current_function_returns_pointer; |
| |
| /* If some insns can be deferred to the delay slots of the epilogue, the |
| delay list for them is recorded here. */ |
| |
| rtx current_function_epilogue_delay_list; |
| |
| /* If function's args have a fixed size, this is that size, in bytes. |
| Otherwise, it is -1. |
| May affect compilation of return insn or of function epilogue. */ |
| |
| int current_function_args_size; |
| |
| /* # bytes the prologue should push and pretend that the caller pushed them. |
| The prologue must do this, but only if parms can be passed in registers. */ |
| |
| int current_function_pretend_args_size; |
| |
| /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is |
| defined, the needed space is pushed by the prologue. */ |
| |
| int current_function_outgoing_args_size; |
| |
| /* This is the offset from the arg pointer to the place where the first |
| anonymous arg can be found, if there is one. */ |
| |
| rtx current_function_arg_offset_rtx; |
| |
| /* Nonzero if current function uses varargs.h or equivalent. |
| Zero for functions that use stdarg.h. */ |
| |
| int current_function_varargs; |
| |
| /* Nonzero if current function uses stdarg.h or equivalent. |
| Zero for functions that use varargs.h. */ |
| |
| int current_function_stdarg; |
| |
| /* Quantities of various kinds of registers |
| used for the current function's args. */ |
| |
| CUMULATIVE_ARGS current_function_args_info; |
| |
| /* Name of function now being compiled. */ |
| |
| char *current_function_name; |
| |
| /* If non-zero, an RTL expression for the location at which the current |
| function returns its result. If the current function returns its |
| result in a register, current_function_return_rtx will always be |
| the hard register containing the result. */ |
| |
| rtx current_function_return_rtx; |
| |
| /* Nonzero if the current function uses the constant pool. */ |
| |
| int current_function_uses_const_pool; |
| |
| /* Nonzero if the current function uses pic_offset_table_rtx. */ |
| int current_function_uses_pic_offset_table; |
| |
| /* The arg pointer hard register, or the pseudo into which it was copied. */ |
| rtx current_function_internal_arg_pointer; |
| |
| /* The FUNCTION_DECL for an inline function currently being expanded. */ |
| tree inline_function_decl; |
| |
| /* Number of function calls seen so far in current function. */ |
| |
| int function_call_count; |
| |
| /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels |
| (labels to which there can be nonlocal gotos from nested functions) |
| in this function. */ |
| |
| tree nonlocal_labels; |
| |
| /* RTX for stack slot that holds the current handler for nonlocal gotos. |
| Zero when function does not have nonlocal labels. */ |
| |
| rtx nonlocal_goto_handler_slot; |
| |
| /* RTX for stack slot that holds the stack pointer value to restore |
| for a nonlocal goto. |
| Zero when function does not have nonlocal labels. */ |
| |
| rtx nonlocal_goto_stack_level; |
| |
| /* Label that will go on parm cleanup code, if any. |
| Jumping to this label runs cleanup code for parameters, if |
| such code must be run. Following this code is the logical return label. */ |
| |
| rtx cleanup_label; |
| |
| /* Label that will go on function epilogue. |
| Jumping to this label serves as a "return" instruction |
| on machines which require execution of the epilogue on all returns. */ |
| |
| rtx return_label; |
| |
| /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs. |
| So we can mark them all live at the end of the function, if nonopt. */ |
| rtx save_expr_regs; |
| |
| /* List (chain of EXPR_LISTs) of all stack slots in this function. |
| Made for the sake of unshare_all_rtl. */ |
| rtx stack_slot_list; |
| |
| /* Chain of all RTL_EXPRs that have insns in them. */ |
| tree rtl_expr_chain; |
| |
| /* Label to jump back to for tail recursion, or 0 if we have |
| not yet needed one for this function. */ |
| rtx tail_recursion_label; |
| |
| /* Place after which to insert the tail_recursion_label if we need one. */ |
| rtx tail_recursion_reentry; |
| |
| /* Location at which to save the argument pointer if it will need to be |
| referenced. There are two cases where this is done: if nonlocal gotos |
| exist, or if vars stored at an offset from the argument pointer will be |
| needed by inner routines. */ |
| |
| rtx arg_pointer_save_area; |
| |
| /* Offset to end of allocated area of stack frame. |
| If stack grows down, this is the address of the last stack slot allocated. |
| If stack grows up, this is the address for the next slot. */ |
| HOST_WIDE_INT frame_offset; |
| |
| /* List (chain of TREE_LISTs) of static chains for containing functions. |
| Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx |
| in an RTL_EXPR in the TREE_VALUE. */ |
| static tree context_display; |
| |
| /* List (chain of TREE_LISTs) of trampolines for nested functions. |
| The trampoline sets up the static chain and jumps to the function. |
| We supply the trampoline's address when the function's address is requested. |
| |
| Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx |
| in an RTL_EXPR in the TREE_VALUE. */ |
| static tree trampoline_list; |
| |
| /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */ |
| static rtx parm_birth_insn; |
| |
| #if 0 |
| /* Nonzero if a stack slot has been generated whose address is not |
| actually valid. It means that the generated rtl must all be scanned |
| to detect and correct the invalid addresses where they occur. */ |
| static int invalid_stack_slot; |
| #endif |
| |
| /* Last insn of those whose job was to put parms into their nominal homes. */ |
| static rtx last_parm_insn; |
| |
| /* 1 + last pseudo register number used for loading a copy |
| of a parameter of this function. */ |
| static int max_parm_reg; |
| |
| /* Vector indexed by REGNO, containing location on stack in which |
| to put the parm which is nominally in pseudo register REGNO, |
| if we discover that that parm must go in the stack. */ |
| static rtx *parm_reg_stack_loc; |
| |
| /* Nonzero once virtual register instantiation has been done. |
| assign_stack_local uses frame_pointer_rtx when this is nonzero. */ |
| static int virtuals_instantiated; |
| |
| /* These variables hold pointers to functions to |
| save and restore machine-specific data, |
| in push_function_context and pop_function_context. */ |
| void (*save_machine_status) PROTO((struct function *)); |
| void (*restore_machine_status) PROTO((struct function *)); |
| |
| /* Nonzero if we need to distinguish between the return value of this function |
| and the return value of a function called by this function. This helps |
| integrate.c */ |
| |
| extern int rtx_equal_function_value_matters; |
| extern tree sequence_rtl_expr; |
| |
| /* In order to evaluate some expressions, such as function calls returning |
| structures in memory, we need to temporarily allocate stack locations. |
| We record each allocated temporary in the following structure. |
| |
| Associated with each temporary slot is a nesting level. When we pop up |
| one level, all temporaries associated with the previous level are freed. |
| Normally, all temporaries are freed after the execution of the statement |
| in which they were created. However, if we are inside a ({...}) grouping, |
| the result may be in a temporary and hence must be preserved. If the |
| result could be in a temporary, we preserve it if we can determine which |
| one it is in. If we cannot determine which temporary may contain the |
| result, all temporaries are preserved. A temporary is preserved by |
| pretending it was allocated at the previous nesting level. |
| |
| Automatic variables are also assigned temporary slots, at the nesting |
| level where they are defined. They are marked a "kept" so that |
| free_temp_slots will not free them. */ |
| |
| struct temp_slot |
| { |
| /* Points to next temporary slot. */ |
| struct temp_slot *next; |
| /* The rtx to used to reference the slot. */ |
| rtx slot; |
| /* The rtx used to represent the address if not the address of the |
| slot above. May be an EXPR_LIST if multiple addresses exist. */ |
| rtx address; |
| /* The size, in units, of the slot. */ |
| int size; |
| /* The value of `sequence_rtl_expr' when this temporary is allocated. */ |
| tree rtl_expr; |
| /* Non-zero if this temporary is currently in use. */ |
| char in_use; |
| /* Non-zero if this temporary has its address taken. */ |
| char addr_taken; |
| /* Nesting level at which this slot is being used. */ |
| int level; |
| /* Non-zero if this should survive a call to free_temp_slots. */ |
| int keep; |
| /* The offset of the slot from the frame_pointer, including extra space |
| for alignment. This info is for combine_temp_slots. */ |
| int base_offset; |
| /* The size of the slot, including extra space for alignment. This |
| info is for combine_temp_slots. */ |
| int full_size; |
| }; |
| |
| /* List of all temporaries allocated, both available and in use. */ |
| |
| struct temp_slot *temp_slots; |
| |
| /* Current nesting level for temporaries. */ |
| |
| int temp_slot_level; |
| |
| /* The FUNCTION_DECL node for the current function. */ |
| static tree this_function_decl; |
| |
| /* Callinfo pointer for the current function. */ |
| static rtx this_function_callinfo; |
| |
| /* The label in the bytecode file of this function's actual bytecode. |
| Not an rtx. */ |
| static char *this_function_bytecode; |
| |
| /* The call description vector for the current function. */ |
| static rtx this_function_calldesc; |
| |
| /* Size of the local variables allocated for the current function. */ |
| int local_vars_size; |
| |
| /* Current depth of the bytecode evaluation stack. */ |
| int stack_depth; |
| |
| /* Maximum depth of the evaluation stack in this function. */ |
| int max_stack_depth; |
| |
| /* Current depth in statement expressions. */ |
| static int stmt_expr_depth; |
| |
| /* This structure is used to record MEMs or pseudos used to replace VAR, any |
| SUBREGs of VAR, and any MEMs containing VAR as an address. We need to |
| maintain this list in case two operands of an insn were required to match; |
| in that case we must ensure we use the same replacement. */ |
| |
| struct fixup_replacement |
| { |
| rtx old; |
| rtx new; |
| struct fixup_replacement *next; |
| }; |
| |
| /* Forward declarations. */ |
| |
| static struct temp_slot *find_temp_slot_from_address PROTO((rtx)); |
| static void put_reg_into_stack PROTO((struct function *, rtx, tree, |
| enum machine_mode, enum machine_mode, |
| int)); |
| static void fixup_var_refs PROTO((rtx, enum machine_mode, int)); |
| static struct fixup_replacement |
| *find_fixup_replacement PROTO((struct fixup_replacement **, rtx)); |
| static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int, |
| rtx, int)); |
| static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx, |
| struct fixup_replacement **)); |
| static rtx fixup_memory_subreg PROTO((rtx, rtx, int)); |
| static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int)); |
| static rtx fixup_stack_1 PROTO((rtx, rtx)); |
| static void optimize_bit_field PROTO((rtx, rtx, rtx *)); |
| static void instantiate_decls PROTO((tree, int)); |
| static void instantiate_decls_1 PROTO((tree, int)); |
| static void instantiate_decl PROTO((rtx, int, int)); |
| static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int)); |
| static void delete_handlers PROTO((void)); |
| static void pad_to_arg_alignment PROTO((struct args_size *, int)); |
| static void pad_below PROTO((struct args_size *, enum machine_mode, |
| tree)); |
| static tree round_down PROTO((tree, int)); |
| static rtx round_trampoline_addr PROTO((rtx)); |
| static tree blocks_nreverse PROTO((tree)); |
| static int all_blocks PROTO((tree, tree *)); |
| static int *record_insns PROTO((rtx)); |
| static int contains PROTO((rtx, int *)); |
| |
| /* Pointer to chain of `struct function' for containing functions. */ |
| struct function *outer_function_chain; |
| |
| /* Given a function decl for a containing function, |
| return the `struct function' for it. */ |
| |
| struct function * |
| find_function_data (decl) |
| tree decl; |
| { |
| struct function *p; |
| for (p = outer_function_chain; p; p = p->next) |
| if (p->decl == decl) |
| return p; |
| abort (); |
| } |
| |
| /* Save the current context for compilation of a nested function. |
| This is called from language-specific code. |
| The caller is responsible for saving any language-specific status, |
| since this function knows only about language-independent variables. */ |
| |
| void |
| push_function_context_to (context) |
| tree context; |
| { |
| struct function *p = (struct function *) xmalloc (sizeof (struct function)); |
| |
| p->next = outer_function_chain; |
| outer_function_chain = p; |
| |
| p->name = current_function_name; |
| p->decl = current_function_decl; |
| p->pops_args = current_function_pops_args; |
| p->returns_struct = current_function_returns_struct; |
| p->returns_pcc_struct = current_function_returns_pcc_struct; |
| p->returns_pointer = current_function_returns_pointer; |
| p->needs_context = current_function_needs_context; |
| p->calls_setjmp = current_function_calls_setjmp; |
| p->calls_longjmp = current_function_calls_longjmp; |
| p->calls_alloca = current_function_calls_alloca; |
| p->has_nonlocal_label = current_function_has_nonlocal_label; |
| p->has_nonlocal_goto = current_function_has_nonlocal_goto; |
| p->contains_functions = current_function_contains_functions; |
| p->is_thunk = current_function_is_thunk; |
| p->args_size = current_function_args_size; |
| p->pretend_args_size = current_function_pretend_args_size; |
| p->arg_offset_rtx = current_function_arg_offset_rtx; |
| p->varargs = current_function_varargs; |
| p->stdarg = current_function_stdarg; |
| p->uses_const_pool = current_function_uses_const_pool; |
| p->uses_pic_offset_table = current_function_uses_pic_offset_table; |
| p->internal_arg_pointer = current_function_internal_arg_pointer; |
| p->max_parm_reg = max_parm_reg; |
| p->parm_reg_stack_loc = parm_reg_stack_loc; |
| p->outgoing_args_size = current_function_outgoing_args_size; |
| p->return_rtx = current_function_return_rtx; |
| p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot; |
| p->nonlocal_goto_stack_level = nonlocal_goto_stack_level; |
| p->nonlocal_labels = nonlocal_labels; |
| p->cleanup_label = cleanup_label; |
| p->return_label = return_label; |
| p->save_expr_regs = save_expr_regs; |
| p->stack_slot_list = stack_slot_list; |
| p->parm_birth_insn = parm_birth_insn; |
| p->frame_offset = frame_offset; |
| p->tail_recursion_label = tail_recursion_label; |
| p->tail_recursion_reentry = tail_recursion_reentry; |
| p->arg_pointer_save_area = arg_pointer_save_area; |
| p->rtl_expr_chain = rtl_expr_chain; |
| p->last_parm_insn = last_parm_insn; |
| p->context_display = context_display; |
| p->trampoline_list = trampoline_list; |
| p->function_call_count = function_call_count; |
| p->temp_slots = temp_slots; |
| p->temp_slot_level = temp_slot_level; |
| p->fixup_var_refs_queue = 0; |
| p->epilogue_delay_list = current_function_epilogue_delay_list; |
| p->args_info = current_function_args_info; |
| |
| save_tree_status (p, context); |
| save_storage_status (p); |
| save_emit_status (p); |
| init_emit (); |
| save_expr_status (p); |
| save_stmt_status (p); |
| save_varasm_status (p); |
| |
| if (save_machine_status) |
| (*save_machine_status) (p); |
| } |
| |
| void |
| push_function_context () |
| { |
| push_function_context_to (current_function_decl); |
| } |
| |
| /* Restore the last saved context, at the end of a nested function. |
| This function is called from language-specific code. */ |
| |
| void |
| pop_function_context_from (context) |
| tree context; |
| { |
| struct function *p = outer_function_chain; |
| |
| outer_function_chain = p->next; |
| |
| current_function_contains_functions |
| = p->contains_functions || p->inline_obstacks |
| || context == current_function_decl; |
| current_function_name = p->name; |
| current_function_decl = p->decl; |
| current_function_pops_args = p->pops_args; |
| current_function_returns_struct = p->returns_struct; |
| current_function_returns_pcc_struct = p->returns_pcc_struct; |
| current_function_returns_pointer = p->returns_pointer; |
| current_function_needs_context = p->needs_context; |
| current_function_calls_setjmp = p->calls_setjmp; |
| current_function_calls_longjmp = p->calls_longjmp; |
| current_function_calls_alloca = p->calls_alloca; |
| current_function_has_nonlocal_label = p->has_nonlocal_label; |
| current_function_has_nonlocal_goto = p->has_nonlocal_goto; |
| current_function_is_thunk = p->is_thunk; |
| current_function_args_size = p->args_size; |
| current_function_pretend_args_size = p->pretend_args_size; |
| current_function_arg_offset_rtx = p->arg_offset_rtx; |
| current_function_varargs = p->varargs; |
| current_function_stdarg = p->stdarg; |
| current_function_uses_const_pool = p->uses_const_pool; |
| current_function_uses_pic_offset_table = p->uses_pic_offset_table; |
| current_function_internal_arg_pointer = p->internal_arg_pointer; |
| max_parm_reg = p->max_parm_reg; |
| parm_reg_stack_loc = p->parm_reg_stack_loc; |
| current_function_outgoing_args_size = p->outgoing_args_size; |
| current_function_return_rtx = p->return_rtx; |
| nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot; |
| nonlocal_goto_stack_level = p->nonlocal_goto_stack_level; |
| nonlocal_labels = p->nonlocal_labels; |
| cleanup_label = p->cleanup_label; |
| return_label = p->return_label; |
| save_expr_regs = p->save_expr_regs; |
| stack_slot_list = p->stack_slot_list; |
| parm_birth_insn = p->parm_birth_insn; |
| frame_offset = p->frame_offset; |
| tail_recursion_label = p->tail_recursion_label; |
| tail_recursion_reentry = p->tail_recursion_reentry; |
| arg_pointer_save_area = p->arg_pointer_save_area; |
| rtl_expr_chain = p->rtl_expr_chain; |
| last_parm_insn = p->last_parm_insn; |
| context_display = p->context_display; |
| trampoline_list = p->trampoline_list; |
| function_call_count = p->function_call_count; |
| temp_slots = p->temp_slots; |
| temp_slot_level = p->temp_slot_level; |
| current_function_epilogue_delay_list = p->epilogue_delay_list; |
| reg_renumber = 0; |
| current_function_args_info = p->args_info; |
| |
| restore_tree_status (p, context); |
| restore_storage_status (p); |
| restore_expr_status (p); |
| restore_emit_status (p); |
| restore_stmt_status (p); |
| restore_varasm_status (p); |
| |
| if (restore_machine_status) |
| (*restore_machine_status) (p); |
| |
| /* Finish doing put_var_into_stack for any of our variables |
| which became addressable during the nested function. */ |
| { |
| struct var_refs_queue *queue = p->fixup_var_refs_queue; |
| for (; queue; queue = queue->next) |
| fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp); |
| } |
| |
| free (p); |
| |
| /* Reset variables that have known state during rtx generation. */ |
| rtx_equal_function_value_matters = 1; |
| virtuals_instantiated = 0; |
| } |
| |
| void pop_function_context () |
| { |
| pop_function_context_from (current_function_decl); |
| } |
| |
| /* Allocate fixed slots in the stack frame of the current function. */ |
| |
| /* Return size needed for stack frame based on slots so far allocated. |
| This size counts from zero. It is not rounded to STACK_BOUNDARY; |
| the caller may have to do that. */ |
| |
| HOST_WIDE_INT |
| get_frame_size () |
| { |
| #ifdef FRAME_GROWS_DOWNWARD |
| return -frame_offset; |
| #else |
| return frame_offset; |
| #endif |
| } |
| |
| /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it |
| with machine mode MODE. |
| |
| ALIGN controls the amount of alignment for the address of the slot: |
| 0 means according to MODE, |
| -1 means use BIGGEST_ALIGNMENT and round size to multiple of that, |
| positive specifies alignment boundary in bits. |
| |
| We do not round to stack_boundary here. */ |
| |
| rtx |
| assign_stack_local (mode, size, align) |
| enum machine_mode mode; |
| int size; |
| int align; |
| { |
| register rtx x, addr; |
| int bigend_correction = 0; |
| int alignment; |
| |
| if (align == 0) |
| { |
| alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; |
| if (mode == BLKmode) |
| alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; |
| } |
| else if (align == -1) |
| { |
| alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; |
| size = CEIL_ROUND (size, alignment); |
| } |
| else |
| alignment = align / BITS_PER_UNIT; |
| |
| /* Round frame offset to that alignment. |
| We must be careful here, since FRAME_OFFSET might be negative and |
| division with a negative dividend isn't as well defined as we might |
| like. So we instead assume that ALIGNMENT is a power of two and |
| use logical operations which are unambiguous. */ |
| #ifdef FRAME_GROWS_DOWNWARD |
| frame_offset = FLOOR_ROUND (frame_offset, alignment); |
| #else |
| frame_offset = CEIL_ROUND (frame_offset, alignment); |
| #endif |
| |
| /* On a big-endian machine, if we are allocating more space than we will use, |
| use the least significant bytes of those that are allocated. */ |
| if (BYTES_BIG_ENDIAN && mode != BLKmode) |
| bigend_correction = size - GET_MODE_SIZE (mode); |
| |
| #ifdef FRAME_GROWS_DOWNWARD |
| frame_offset -= size; |
| #endif |
| |
| /* If we have already instantiated virtual registers, return the actual |
| address relative to the frame pointer. */ |
| if (virtuals_instantiated) |
| addr = plus_constant (frame_pointer_rtx, |
| (frame_offset + bigend_correction |
| + STARTING_FRAME_OFFSET)); |
| else |
| addr = plus_constant (virtual_stack_vars_rtx, |
| frame_offset + bigend_correction); |
| |
| #ifndef FRAME_GROWS_DOWNWARD |
| frame_offset += size; |
| #endif |
| |
| x = gen_rtx (MEM, mode, addr); |
| |
| stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list); |
| |
| return x; |
| } |
| |
| /* Assign a stack slot in a containing function. |
| First three arguments are same as in preceding function. |
| The last argument specifies the function to allocate in. */ |
| |
| rtx |
| assign_outer_stack_local (mode, size, align, function) |
| enum machine_mode mode; |
| int size; |
| int align; |
| struct function *function; |
| { |
| register rtx x, addr; |
| int bigend_correction = 0; |
| int alignment; |
| |
| /* Allocate in the memory associated with the function in whose frame |
| we are assigning. */ |
| push_obstacks (function->function_obstack, |
| function->function_maybepermanent_obstack); |
| |
| if (align == 0) |
| { |
| alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; |
| if (mode == BLKmode) |
| alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; |
| } |
| else if (align == -1) |
| { |
| alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; |
| size = CEIL_ROUND (size, alignment); |
| } |
| else |
| alignment = align / BITS_PER_UNIT; |
| |
| /* Round frame offset to that alignment. */ |
| #ifdef FRAME_GROWS_DOWNWARD |
| function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment); |
| #else |
| function->frame_offset = CEIL_ROUND (function->frame_offset, alignment); |
| #endif |
| |
| /* On a big-endian machine, if we are allocating more space than we will use, |
| use the least significant bytes of those that are allocated. */ |
| if (BYTES_BIG_ENDIAN && mode != BLKmode) |
| bigend_correction = size - GET_MODE_SIZE (mode); |
| |
| #ifdef FRAME_GROWS_DOWNWARD |
| function->frame_offset -= size; |
| #endif |
| addr = plus_constant (virtual_stack_vars_rtx, |
| function->frame_offset + bigend_correction); |
| #ifndef FRAME_GROWS_DOWNWARD |
| function->frame_offset += size; |
| #endif |
| |
| x = gen_rtx (MEM, mode, addr); |
| |
| function->stack_slot_list |
| = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list); |
| |
| pop_obstacks (); |
| |
| return x; |
| } |
| |
| /* Allocate a temporary stack slot and record it for possible later |
| reuse. |
| |
| MODE is the machine mode to be given to the returned rtx. |
| |
| SIZE is the size in units of the space required. We do no rounding here |
| since assign_stack_local will do any required rounding. |
| |
| KEEP is 1 if this slot is to be retained after a call to |
| free_temp_slots. Automatic variables for a block are allocated |
| with this flag. KEEP is 2, if we allocate a longer term temporary, |
| whose lifetime is controlled by CLEANUP_POINT_EXPRs. */ |
| |
| rtx |
| assign_stack_temp (mode, size, keep) |
| enum machine_mode mode; |
| int size; |
| int keep; |
| { |
| struct temp_slot *p, *best_p = 0; |
| |
| /* If SIZE is -1 it means that somebody tried to allocate a temporary |
| of a variable size. */ |
| if (size == -1) |
| abort (); |
| |
| /* First try to find an available, already-allocated temporary that is the |
| exact size we require. */ |
| for (p = temp_slots; p; p = p->next) |
| if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use) |
| break; |
| |
| /* If we didn't find, one, try one that is larger than what we want. We |
| find the smallest such. */ |
| if (p == 0) |
| for (p = temp_slots; p; p = p->next) |
| if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use |
| && (best_p == 0 || best_p->size > p->size)) |
| best_p = p; |
| |
| /* Make our best, if any, the one to use. */ |
| if (best_p) |
| { |
| /* If there are enough aligned bytes left over, make them into a new |
| temp_slot so that the extra bytes don't get wasted. Do this only |
| for BLKmode slots, so that we can be sure of the alignment. */ |
| if (GET_MODE (best_p->slot) == BLKmode) |
| { |
| int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; |
| int rounded_size = CEIL_ROUND (size, alignment); |
| |
| if (best_p->size - rounded_size >= alignment) |
| { |
| p = (struct temp_slot *) oballoc (sizeof (struct temp_slot)); |
| p->in_use = p->addr_taken = 0; |
| p->size = best_p->size - rounded_size; |
| p->base_offset = best_p->base_offset + rounded_size; |
| p->full_size = best_p->full_size - rounded_size; |
| p->slot = gen_rtx (MEM, BLKmode, |
| plus_constant (XEXP (best_p->slot, 0), |
| rounded_size)); |
| p->address = 0; |
| p->rtl_expr = 0; |
| p->next = temp_slots; |
| temp_slots = p; |
| |
| stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, p->slot, |
| stack_slot_list); |
| |
| best_p->size = rounded_size; |
| best_p->full_size = rounded_size; |
| } |
| } |
| |
| p = best_p; |
| } |
| |
| /* If we still didn't find one, make a new temporary. */ |
| if (p == 0) |
| { |
| int frame_offset_old = frame_offset; |
| p = (struct temp_slot *) oballoc (sizeof (struct temp_slot)); |
| /* If the temp slot mode doesn't indicate the alignment, |
| use the largest possible, so no one will be disappointed. */ |
| p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0); |
| /* The following slot size computation is necessary because we don't |
| know the actual size of the temporary slot until assign_stack_local |
| has performed all the frame alignment and size rounding for the |
| requested temporary. Note that extra space added for alignment |
| can be either above or below this stack slot depending on which |
| way the frame grows. We include the extra space if and only if it |
| is above this slot. */ |
| #ifdef FRAME_GROWS_DOWNWARD |
| p->size = frame_offset_old - frame_offset; |
| #else |
| p->size = size; |
| #endif |
| /* Now define the fields used by combine_temp_slots. */ |
| #ifdef FRAME_GROWS_DOWNWARD |
| p->base_offset = frame_offset; |
| p->full_size = frame_offset_old - frame_offset; |
| #else |
| p->base_offset = frame_offset_old; |
| p->full_size = frame_offset - frame_offset_old; |
| #endif |
| p->address = 0; |
| p->next = temp_slots; |
| temp_slots = p; |
| } |
| |
| p->in_use = 1; |
| p->addr_taken = 0; |
| p->rtl_expr = sequence_rtl_expr; |
| |
| if (keep == 2) |
| { |
| p->level = target_temp_slot_level; |
| p->keep = 0; |
| } |
| else |
| { |
| p->level = temp_slot_level; |
| p->keep = keep; |
| } |
| |
| /* We may be reusing an old slot, so clear any MEM flags that may have been |
| set from before. */ |
| RTX_UNCHANGING_P (p->slot) = 0; |
| MEM_IN_STRUCT_P (p->slot) = 0; |
| return p->slot; |
| } |
| |
| /* Assign a temporary of given TYPE. |
| KEEP is as for assign_stack_temp. |
| MEMORY_REQUIRED is 1 if the result must be addressable stack memory; |
| it is 0 if a register is OK. |
| DONT_PROMOTE is 1 if we should not promote values in register |
| to wider modes. */ |
| |
| rtx |
| assign_temp (type, keep, memory_required, dont_promote) |
| tree type; |
| int keep; |
| int memory_required; |
| int dont_promote; |
| { |
| enum machine_mode mode = TYPE_MODE (type); |
| int unsignedp = TREE_UNSIGNED (type); |
| |
| if (mode == BLKmode || memory_required) |
| { |
| int size = int_size_in_bytes (type); |
| rtx tmp; |
| |
| /* Unfortunately, we don't yet know how to allocate variable-sized |
| temporaries. However, sometimes we have a fixed upper limit on |
| the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that |
| instead. This is the case for Chill variable-sized strings. */ |
| if (size == -1 && TREE_CODE (type) == ARRAY_TYPE |
| && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE |
| && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST) |
| size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type)); |
| |
| tmp = assign_stack_temp (mode, size, keep); |
| MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type); |
| return tmp; |
| } |
| |
| #ifndef PROMOTE_FOR_CALL_ONLY |
| if (! dont_promote) |
| mode = promote_mode (type, mode, &unsignedp, 0); |
| #endif |
| |
| return gen_reg_rtx (mode); |
| } |
| |
| /* Combine temporary stack slots which are adjacent on the stack. |
| |
| This allows for better use of already allocated stack space. This is only |
| done for BLKmode slots because we can be sure that we won't have alignment |
| problems in this case. */ |
| |
| void |
| combine_temp_slots () |
| { |
| struct temp_slot *p, *q; |
| struct temp_slot *prev_p, *prev_q; |
| /* Determine where to free back to after this function. */ |
| rtx free_pointer = rtx_alloc (CONST_INT); |
| |
| for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots) |
| { |
| int delete_p = 0; |
| if (! p->in_use && GET_MODE (p->slot) == BLKmode) |
| for (q = p->next, prev_q = p; q; q = prev_q->next) |
| { |
| int delete_q = 0; |
| if (! q->in_use && GET_MODE (q->slot) == BLKmode) |
| { |
| if (p->base_offset + p->full_size == q->base_offset) |
| { |
| /* Q comes after P; combine Q into P. */ |
| p->size += q->size; |
| p->full_size += q->full_size; |
| delete_q = 1; |
| } |
| else if (q->base_offset + q->full_size == p->base_offset) |
| { |
| /* P comes after Q; combine P into Q. */ |
| q->size += p->size; |
| q->full_size += p->full_size; |
| delete_p = 1; |
| break; |
| } |
| } |
| /* Either delete Q or advance past it. */ |
| if (delete_q) |
| prev_q->next = q->next; |
| else |
| prev_q = q; |
| } |
| /* Either delete P or advance past it. */ |
| if (delete_p) |
| { |
| if (prev_p) |
| prev_p->next = p->next; |
| else |
| temp_slots = p->next; |
| } |
| else |
| prev_p = p; |
| } |
| |
| /* Free all the RTL made by plus_constant. */ |
| rtx_free (free_pointer); |
| } |
| |
| /* Find the temp slot corresponding to the object at address X. */ |
| |
| static struct temp_slot * |
| find_temp_slot_from_address (x) |
| rtx x; |
| { |
| struct temp_slot *p; |
| rtx next; |
| |
| for (p = temp_slots; p; p = p->next) |
| { |
| if (! p->in_use) |
| continue; |
| else if (XEXP (p->slot, 0) == x |
| || p->address == x |
| || (GET_CODE (x) == PLUS |
| && XEXP (x, 0) == virtual_stack_vars_rtx |
| && GET_CODE (XEXP (x, 1)) == CONST_INT |
| && INTVAL (XEXP (x, 1)) >= p->base_offset |
| && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)) |
| return p; |
| |
| else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST) |
| for (next = p->address; next; next = XEXP (next, 1)) |
| if (XEXP (next, 0) == x) |
| return p; |
| } |
| |
| return 0; |
| } |
| |
| /* Indicate that NEW is an alternate way of referring to the temp slot |
| that previous was known by OLD. */ |
| |
| void |
| update_temp_slot_address (old, new) |
| rtx old, new; |
| { |
| struct temp_slot *p = find_temp_slot_from_address (old); |
| |
| /* If none, return. Else add NEW as an alias. */ |
| if (p == 0) |
| return; |
| else if (p->address == 0) |
| p->address = new; |
| else |
| { |
| if (GET_CODE (p->address) != EXPR_LIST) |
| p->address = gen_rtx (EXPR_LIST, VOIDmode, p->address, NULL_RTX); |
| |
| p->address = gen_rtx (EXPR_LIST, VOIDmode, new, p->address); |
| } |
| } |
| |
| /* If X could be a reference to a temporary slot, mark the fact that its |
| address was taken. */ |
| |
| void |
| mark_temp_addr_taken (x) |
| rtx x; |
| { |
| struct temp_slot *p; |
| |
| if (x == 0) |
| return; |
| |
| /* If X is not in memory or is at a constant address, it cannot be in |
| a temporary slot. */ |
| if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))) |
| return; |
| |
| p = find_temp_slot_from_address (XEXP (x, 0)); |
| if (p != 0) |
| p->addr_taken = 1; |
| } |
| |
| /* If X could be a reference to a temporary slot, mark that slot as |
| belonging to the to one level higher than the current level. If X |
| matched one of our slots, just mark that one. Otherwise, we can't |
| easily predict which it is, so upgrade all of them. Kept slots |
| need not be touched. |
| |
| This is called when an ({...}) construct occurs and a statement |
| returns a value in memory. */ |
| |
| void |
| preserve_temp_slots (x) |
| rtx x; |
| { |
| struct temp_slot *p = 0; |
| |
| /* If there is no result, we still might have some objects whose address |
| were taken, so we need to make sure they stay around. */ |
| if (x == 0) |
| { |
| for (p = temp_slots; p; p = p->next) |
| if (p->in_use && p->level == temp_slot_level && p->addr_taken) |
| p->level--; |
| |
| return; |
| } |
| |
| /* If X is a register that is being used as a pointer, see if we have |
| a temporary slot we know it points to. To be consistent with |
| the code below, we really should preserve all non-kept slots |
| if we can't find a match, but that seems to be much too costly. */ |
| if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x))) |
| p = find_temp_slot_from_address (x); |
| |
| /* If X is not in memory or is at a constant address, it cannot be in |
| a temporary slot, but it can contain something whose address was |
| taken. */ |
| if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))) |
| { |
| for (p = temp_slots; p; p = p->next) |
| if (p->in_use && p->level == temp_slot_level && p->addr_taken) |
| p->level--; |
| |
| return; |
| } |
| |
| /* First see if we can find a match. */ |
| if (p == 0) |
| p = find_temp_slot_from_address (XEXP (x, 0)); |
| |
| if (p != 0) |
| { |
| /* Move everything at our level whose address was taken to our new |
| level in case we used its address. */ |
| struct temp_slot *q; |
| |
| if (p->level == temp_slot_level) |
| { |
| for (q = temp_slots; q; q = q->next) |
| if (q != p && q->addr_taken && q->level == p->level) |
| q->level--; |
| |
| p->level--; |
| p->addr_taken = 0; |
| } |
| return; |
| } |
| |
| /* Otherwise, preserve all non-kept slots at this level. */ |
| for (p = temp_slots; p; p = p->next) |
| if (p->in_use && p->level == temp_slot_level && ! p->keep) |
| p->level--; |
| } |
| |
| /* X is the result of an RTL_EXPR. If it is a temporary slot associated |
| with that RTL_EXPR, promote it into a temporary slot at the present |
| level so it will not be freed when we free slots made in the |
| RTL_EXPR. */ |
| |
| void |
| preserve_rtl_expr_result (x) |
| rtx x; |
| { |
| struct temp_slot *p; |
| |
| /* If X is not in memory or is at a constant address, it cannot be in |
| a temporary slot. */ |
| if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))) |
| return; |
| |
| /* If we can find a match, move it to our level unless it is already at |
| an upper level. */ |
| p = find_temp_slot_from_address (XEXP (x, 0)); |
| if (p != 0) |
| { |
| p->level = MIN (p->level, temp_slot_level); |
| p->rtl_expr = 0; |
| } |
| |
| return; |
| } |
| |
| /* Free all temporaries used so far. This is normally called at the end |
| of generating code for a statement. Don't free any temporaries |
| currently in use for an RTL_EXPR that hasn't yet been emitted. |
| We could eventually do better than this since it can be reused while |
| generating the same RTL_EXPR, but this is complex and probably not |
| worthwhile. */ |
| |
| void |
| free_temp_slots () |
| { |
| struct temp_slot *p; |
| |
| for (p = temp_slots; p; p = p->next) |
| if (p->in_use && p->level == temp_slot_level && ! p->keep |
| && p->rtl_expr == 0) |
| p->in_use = 0; |
| |
| combine_temp_slots (); |
| } |
| |
| /* Free all temporary slots used in T, an RTL_EXPR node. */ |
| |
| void |
| free_temps_for_rtl_expr (t) |
| tree t; |
| { |
| struct temp_slot *p; |
| |
| for (p = temp_slots; p; p = p->next) |
| if (p->rtl_expr == t) |
| p->in_use = 0; |
| |
| combine_temp_slots (); |
| } |
| |
| /* Mark all temporaries ever allocated in this functon as not suitable |
| for reuse until the current level is exited. */ |
| |
| void |
| mark_all_temps_used () |
| { |
| struct temp_slot *p; |
| |
| for (p = temp_slots; p; p = p->next) |
| { |
| p->in_use = p->keep = 1; |
| p->level = MIN (p->level, temp_slot_level); |
| } |
| } |
| |
| /* Push deeper into the nesting level for stack temporaries. */ |
| |
| void |
| push_temp_slots () |
| { |
| temp_slot_level++; |
| } |
| |
| /* Pop a temporary nesting level. All slots in use in the current level |
| are freed. */ |
| |
| void |
| pop_temp_slots () |
| { |
| struct temp_slot *p; |
| |
| for (p = temp_slots; p; p = p->next) |
| if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0) |
| p->in_use = 0; |
| |
| combine_temp_slots (); |
| |
| temp_slot_level--; |
| } |
| |
| /* Initialize temporary slots. */ |
| |
| void |
| init_temp_slots () |
| { |
| /* We have not allocated any temporaries yet. */ |
| temp_slots = 0; |
| temp_slot_level = 0; |
| target_temp_slot_level = 0; |
| } |
| |
| /* Retroactively move an auto variable from a register to a stack slot. |
| This is done when an address-reference to the variable is seen. */ |
| |
| void |
| put_var_into_stack (decl) |
| tree decl; |
| { |
| register rtx reg; |
| enum machine_mode promoted_mode, decl_mode; |
| struct function *function = 0; |
| tree context; |
| |
| if (output_bytecode) |
| return; |
| |
| context = decl_function_context (decl); |
| |
| /* Get the current rtl used for this object and it's original mode. */ |
| reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl); |
| |
| /* No need to do anything if decl has no rtx yet |
| since in that case caller is setting TREE_ADDRESSABLE |
| and a stack slot will be assigned when the rtl is made. */ |
| if (reg == 0) |
| return; |
| |
| /* Get the declared mode for this object. */ |
| decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl)) |
| : DECL_MODE (decl)); |
| /* Get the mode it's actually stored in. */ |
| promoted_mode = GET_MODE (reg); |
| |
| /* If this variable comes from an outer function, |
| find that function's saved context. */ |
| if (context != current_function_decl && context != inline_function_decl) |
| for (function = outer_function_chain; function; function = function->next) |
| if (function->decl == context) |
| break; |
| |
| /* If this is a variable-size object with a pseudo to address it, |
| put that pseudo into the stack, if the var is nonlocal. */ |
| if (DECL_NONLOCAL (decl) |
| && GET_CODE (reg) == MEM |
| && GET_CODE (XEXP (reg, 0)) == REG |
| && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER) |
| { |
| reg = XEXP (reg, 0); |
| decl_mode = promoted_mode = GET_MODE (reg); |
| } |
| |
| /* Now we should have a value that resides in one or more pseudo regs. */ |
| |
| if (GET_CODE (reg) == REG) |
| put_reg_into_stack (function, reg, TREE_TYPE (decl), |
| promoted_mode, decl_mode, TREE_SIDE_EFFECTS (decl)); |
| else if (GET_CODE (reg) == CONCAT) |
| { |
| /* A CONCAT contains two pseudos; put them both in the stack. |
| We do it so they end up consecutive. */ |
| enum machine_mode part_mode = GET_MODE (XEXP (reg, 0)); |
| tree part_type = TREE_TYPE (TREE_TYPE (decl)); |
| #ifdef FRAME_GROWS_DOWNWARD |
| /* Since part 0 should have a lower address, do it second. */ |
| put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode, |
| part_mode, TREE_SIDE_EFFECTS (decl)); |
| put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode, |
| part_mode, TREE_SIDE_EFFECTS (decl)); |
| #else |
| put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode, |
| part_mode, TREE_SIDE_EFFECTS (decl)); |
| put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode, |
| part_mode, TREE_SIDE_EFFECTS (decl)); |
| #endif |
| |
| /* Change the CONCAT into a combined MEM for both parts. */ |
| PUT_CODE (reg, MEM); |
| MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0)); |
| |
| /* The two parts are in memory order already. |
| Use the lower parts address as ours. */ |
| XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0); |
| /* Prevent sharing of rtl that might lose. */ |
| if (GET_CODE (XEXP (reg, 0)) == PLUS) |
| XEXP (reg, 0) = copy_rtx (XEXP (reg, 0)); |
| } |
| else |
| return; |
| |
| if (flag_check_memory_usage) |
| emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3, |
| XEXP (reg, 0), ptr_mode, |
| GEN_INT (GET_MODE_SIZE (GET_MODE (reg))), |
| TYPE_MODE (sizetype), |
| GEN_INT (MEMORY_USE_RW), QImode); |
| } |
| |
| /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG |
| into the stack frame of FUNCTION (0 means the current function). |
| DECL_MODE is the machine mode of the user-level data type. |
| PROMOTED_MODE is the machine mode of the register. |
| VOLATILE_P is nonzero if this is for a "volatile" decl. */ |
| |
| static void |
| put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p) |
| struct function *function; |
| rtx reg; |
| tree type; |
| enum machine_mode promoted_mode, decl_mode; |
| int volatile_p; |
| { |
| rtx new = 0; |
| |
| if (function) |
| { |
| if (REGNO (reg) < function->max_parm_reg) |
| new = function->parm_reg_stack_loc[REGNO (reg)]; |
| if (new == 0) |
| new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), |
| 0, function); |
| } |
| else |
| { |
| if (REGNO (reg) < max_parm_reg) |
| new = parm_reg_stack_loc[REGNO (reg)]; |
| if (new == 0) |
| new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0); |
| } |
| |
| PUT_MODE (reg, decl_mode); |
| XEXP (reg, 0) = XEXP (new, 0); |
| /* `volatil' bit means one thing for MEMs, another entirely for REGs. */ |
| MEM_VOLATILE_P (reg) = volatile_p; |
| PUT_CODE (reg, MEM); |
| |
| /* If this is a memory ref that contains aggregate components, |
| mark it as such for cse and loop optimize. */ |
| MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type); |
| |
| /* Now make sure that all refs to the variable, previously made |
| when it was a register, are fixed up to be valid again. */ |
| if (function) |
| { |
| struct var_refs_queue *temp; |
| |
| /* Variable is inherited; fix it up when we get back to its function. */ |
| push_obstacks (function->function_obstack, |
| function->function_maybepermanent_obstack); |
| |
| /* See comment in restore_tree_status in tree.c for why this needs to be |
| on saveable obstack. */ |
| temp |
| = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue)); |
| temp->modified = reg; |
| temp->promoted_mode = promoted_mode; |
| temp->unsignedp = TREE_UNSIGNED (type); |
| temp->next = function->fixup_var_refs_queue; |
| function->fixup_var_refs_queue = temp; |
| pop_obstacks (); |
| } |
| else |
| /* Variable is local; fix it up now. */ |
| fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type)); |
| } |
| |
| static void |
| fixup_var_refs (var, promoted_mode, unsignedp) |
| rtx var; |
| enum machine_mode promoted_mode; |
| int unsignedp; |
| { |
| tree pending; |
| rtx first_insn = get_insns (); |
| struct sequence_stack *stack = sequence_stack; |
| tree rtl_exps = rtl_expr_chain; |
| |
| /* Must scan all insns for stack-refs that exceed the limit. */ |
| fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0); |
| |
| /* Scan all pending sequences too. */ |
| for (; stack; stack = stack->next) |
| { |
| push_to_sequence (stack->first); |
| fixup_var_refs_insns (var, promoted_mode, unsignedp, |
| stack->first, stack->next != 0); |
| /* Update remembered end of sequence |
| in case we added an insn at the end. */ |
| stack->last = get_last_insn (); |
| end_sequence (); |
| } |
| |
| /* Scan all waiting RTL_EXPRs too. */ |
| for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending)) |
| { |
| rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending)); |
| if (seq != const0_rtx && seq != 0) |
| { |
| push_to_sequence (seq); |
| fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0); |
| end_sequence (); |
| } |
| } |
| } |
| |
| /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is |
| some part of an insn. Return a struct fixup_replacement whose OLD |
| value is equal to X. Allocate a new structure if no such entry exists. */ |
| |
| static struct fixup_replacement * |
| find_fixup_replacement (replacements, x) |
| struct fixup_replacement **replacements; |
| rtx x; |
| { |
| struct fixup_replacement *p; |
| |
| /* See if we have already replaced this. */ |
| for (p = *replacements; p && p->old != x; p = p->next) |
| ; |
| |
| if (p == 0) |
| { |
| p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement)); |
| p->old = x; |
| p->new = 0; |
| p->next = *replacements; |
| *replacements = p; |
| } |
| |
| return p; |
| } |
| |
| /* Scan the insn-chain starting with INSN for refs to VAR |
| and fix them up. TOPLEVEL is nonzero if this chain is the |
| main chain of insns for the current function. */ |
| |
| static void |
| fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel) |
| rtx var; |
| enum machine_mode promoted_mode; |
| int unsignedp; |
| rtx insn; |
| int toplevel; |
| { |
| rtx call_dest = 0; |
| |
| while (insn) |
| { |
| rtx next = NEXT_INSN (insn); |
| rtx note; |
| if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') |
| { |
| /* If this is a CLOBBER of VAR, delete it. |
| |
| If it has a REG_LIBCALL note, delete the REG_LIBCALL |
| and REG_RETVAL notes too. */ |
| if (GET_CODE (PATTERN (insn)) == CLOBBER |
| && XEXP (PATTERN (insn), 0) == var) |
| { |
| if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0) |
| /* The REG_LIBCALL note will go away since we are going to |
| turn INSN into a NOTE, so just delete the |
| corresponding REG_RETVAL note. */ |
| remove_note (XEXP (note, 0), |
| find_reg_note (XEXP (note, 0), REG_RETVAL, |
| NULL_RTX)); |
| |
| /* In unoptimized compilation, we shouldn't call delete_insn |
| except in jump.c doing warnings. */ |
| PUT_CODE (insn, NOTE); |
| NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; |
| NOTE_SOURCE_FILE (insn) = 0; |
| } |
| |
| /* The insn to load VAR from a home in the arglist |
| is now a no-op. When we see it, just delete it. */ |
| else if (toplevel |
| && GET_CODE (PATTERN (insn)) == SET |
| && SET_DEST (PATTERN (insn)) == var |
| /* If this represents the result of an insn group, |
| don't delete the insn. */ |
| && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0 |
| && rtx_equal_p (SET_SRC (PATTERN (insn)), var)) |
| { |
| /* In unoptimized compilation, we shouldn't call delete_insn |
| except in jump.c doing warnings. */ |
| PUT_CODE (insn, NOTE); |
| NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; |
| NOTE_SOURCE_FILE (insn) = 0; |
| if (insn == last_parm_insn) |
| last_parm_insn = PREV_INSN (next); |
| } |
| else |
| { |
| struct fixup_replacement *replacements = 0; |
| rtx next_insn = NEXT_INSN (insn); |
| |
| #ifdef SMALL_REGISTER_CLASSES |
| /* If the insn that copies the results of a CALL_INSN |
| into a pseudo now references VAR, we have to use an |
| intermediate pseudo since we want the life of the |
| return value register to be only a single insn. |
| |
| If we don't use an intermediate pseudo, such things as |
| address computations to make the address of VAR valid |
| if it is not can be placed between the CALL_INSN and INSN. |
| |
| To make sure this doesn't happen, we record the destination |
| of the CALL_INSN and see if the next insn uses both that |
| and VAR. */ |
| |
| if (SMALL_REGISTER_CLASSES) |
| { |
| if (call_dest != 0 && GET_CODE (insn) == INSN |
| && reg_mentioned_p (var, PATTERN (insn)) |
| && reg_mentioned_p (call_dest, PATTERN (insn))) |
| { |
| rtx temp = gen_reg_rtx (GET_MODE (call_dest)); |
| |
| emit_insn_before (gen_move_insn (temp, call_dest), insn); |
| |
| PATTERN (insn) = replace_rtx (PATTERN (insn), |
| call_dest, temp); |
| } |
| |
| if (GET_CODE (insn) == CALL_INSN |
| && GET_CODE (PATTERN (insn)) == SET) |
| call_dest = SET_DEST (PATTERN (insn)); |
| else if (GET_CODE (insn) == CALL_INSN |
| && GET_CODE (PATTERN (insn)) == PARALLEL |
| && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET) |
| call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0)); |
| else |
| call_dest = 0; |
| } |
| #endif |
| |
| /* See if we have to do anything to INSN now that VAR is in |
| memory. If it needs to be loaded into a pseudo, use a single |
| pseudo for the entire insn in case there is a MATCH_DUP |
| between two operands. We pass a pointer to the head of |
| a list of struct fixup_replacements. If fixup_var_refs_1 |
| needs to allocate pseudos or replacement MEMs (for SUBREGs), |
| it will record them in this list. |
| |
| If it allocated a pseudo for any replacement, we copy into |
| it here. */ |
| |
| fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn, |
| &replacements); |
| |
| /* If this is last_parm_insn, and any instructions were output |
| after it to fix it up, then we must set last_parm_insn to |
| the last such instruction emitted. */ |
| if (insn == last_parm_insn) |
| last_parm_insn = PREV_INSN (next_insn); |
| |
| while (replacements) |
| { |
| if (GET_CODE (replacements->new) == REG) |
| { |
| rtx insert_before; |
| rtx seq; |
| |
| /* OLD might be a (subreg (mem)). */ |
| if (GET_CODE (replacements->old) == SUBREG) |
| replacements->old |
| = fixup_memory_subreg (replacements->old, insn, 0); |
| else |
| replacements->old |
| = fixup_stack_1 (replacements->old, insn); |
| |
| insert_before = insn; |
| |
| /* If we are changing the mode, do a conversion. |
| This might be wasteful, but combine.c will |
| eliminate much of the waste. */ |
| |
| if (GET_MODE (replacements->new) |
| != GET_MODE (replacements->old)) |
| { |
| start_sequence (); |
| convert_move (replacements->new, |
| replacements->old, unsignedp); |
| seq = gen_sequence (); |
| end_sequence (); |
| } |
| else |
| seq = gen_move_insn (replacements->new, |
| replacements->old); |
| |
| emit_insn_before (seq, insert_before); |
| } |
| |
| replacements = replacements->next; |
| } |
| } |
| |
| /* Also fix up any invalid exprs in the REG_NOTES of this insn. |
| But don't touch other insns referred to by reg-notes; |
| we will get them elsewhere. */ |
| for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) |
| if (GET_CODE (note) != INSN_LIST) |
| XEXP (note, 0) |
| = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1); |
| } |
| insn = next; |
| } |
| } |
| |
| /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE. |
| See if the rtx expression at *LOC in INSN needs to be changed. |
| |
| REPLACEMENTS is a pointer to a list head that starts out zero, but may |
| contain a list of original rtx's and replacements. If we find that we need |
| to modify this insn by replacing a memory reference with a pseudo or by |
| making a new MEM to implement a SUBREG, we consult that list to see if |
| we have already chosen a replacement. If none has already been allocated, |
| we allocate it and update the list. fixup_var_refs_insns will copy VAR |
| or the SUBREG, as appropriate, to the pseudo. */ |
| |
| static void |
| fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements) |
| register rtx var; |
| enum machine_mode promoted_mode; |
| register rtx *loc; |
| rtx insn; |
| struct fixup_replacement **replacements; |
| { |
| register int i; |
| register rtx x = *loc; |
| RTX_CODE code = GET_CODE (x); |
| register char *fmt; |
| register rtx tem, tem1; |
| struct fixup_replacement *replacement; |
| |
| switch (code) |
| { |
| case MEM: |
| if (var == x) |
| { |
| /* If we already have a replacement, use it. Otherwise, |
| try to fix up this address in case it is invalid. */ |
| |
| replacement = find_fixup_replacement (replacements, var); |
| if (replacement->new) |
| { |
| *loc = replacement->new; |
| return; |
| } |
| |
| *loc = replacement->new = x = fixup_stack_1 (x, insn); |
| |
| /* Unless we are forcing memory to register or we changed the mode, |
| we can leave things the way they are if the insn is valid. */ |
| |
| INSN_CODE (insn) = -1; |
| if (! flag_force_mem && GET_MODE (x) == promoted_mode |
| && recog_memoized (insn) >= 0) |
| return; |
| |
| *loc = replacement->new = gen_reg_rtx (promoted_mode); |
| return; |
| } |
| |
| /* If X contains VAR, we need to unshare it here so that we update |
| each occurrence separately. But all identical MEMs in one insn |
| must be replaced with the same rtx because of the possibility of |
| MATCH_DUPs. */ |
| |
| if (reg_mentioned_p (var, x)) |
| { |
| replacement = find_fixup_replacement (replacements, x); |
| if (replacement->new == 0) |
| replacement->new = copy_most_rtx (x, var); |
| |
| *loc = x = replacement->new; |
| } |
| break; |
| |
| case REG: |
| case CC0: |
| case PC: |
| case CONST_INT: |
| case CONST: |
| case SYMBOL_REF: |
| case LABEL_REF: |
| case CONST_DOUBLE: |
| return; |
| |
| case SIGN_EXTRACT: |
| case ZERO_EXTRACT: |
| /* Note that in some cases those types of expressions are altered |
| by optimize_bit_field, and do not survive to get here. */ |
| if (XEXP (x, 0) == var |
| || (GET_CODE (XEXP (x, 0)) == SUBREG |
| && SUBREG_REG (XEXP (x, 0)) == var)) |
| { |
| /* Get TEM as a valid MEM in the mode presently in the insn. |
| |
| We don't worry about the possibility of MATCH_DUP here; it |
| is highly unlikely and would be tricky to handle. */ |
| |
| tem = XEXP (x, 0); |
| if (GET_CODE (tem) == SUBREG) |
| { |
| if (GET_MODE_BITSIZE (GET_MODE (tem)) |
| > GET_MODE_BITSIZE (GET_MODE (var))) |
| { |
| replacement = find_fixup_replacement (replacements, var); |
| if (replacement->new == 0) |
| replacement->new = gen_reg_rtx (GET_MODE (var)); |
| SUBREG_REG (tem) = replacement->new; |
| } |
| else |
| tem = fixup_memory_subreg (tem, insn, 0); |
| } |
| else |
| tem = fixup_stack_1 (tem, insn); |
| |
| /* Unless we want to load from memory, get TEM into the proper mode |
| for an extract from memory. This can only be done if the |
| extract is at a constant position and length. */ |
| |
| if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT |
| && GET_CODE (XEXP (x, 2)) == CONST_INT |
| && ! mode_dependent_address_p (XEXP (tem, 0)) |
| && ! MEM_VOLATILE_P (tem)) |
| { |
| enum machine_mode wanted_mode = VOIDmode; |
| enum machine_mode is_mode = GET_MODE (tem); |
| int width = INTVAL (XEXP (x, 1)); |
| int pos = INTVAL (XEXP (x, 2)); |
| |
| #ifdef HAVE_extzv |
| if (GET_CODE (x) == ZERO_EXTRACT) |
| wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1]; |
| #endif |
| #ifdef HAVE_extv |
| if (GET_CODE (x) == SIGN_EXTRACT) |
| wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1]; |
| #endif |
| /* If we have a narrower mode, we can do something. */ |
| if (wanted_mode != VOIDmode |
| && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode)) |
| { |
| int offset = pos / BITS_PER_UNIT; |
| rtx old_pos = XEXP (x, 2); |
| rtx newmem; |
| |
| /* If the bytes and bits are counted differently, we |
| must adjust the offset. */ |
| if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN) |
| offset = (GET_MODE_SIZE (is_mode) |
| - GET_MODE_SIZE (wanted_mode) - offset); |
| |
| pos %= GET_MODE_BITSIZE (wanted_mode); |
| |
| newmem = gen_rtx (MEM, wanted_mode, |
| plus_constant (XEXP (tem, 0), offset)); |
| RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem); |
| MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem); |
| MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem); |
| |
| /* Make the change and see if the insn remains valid. */ |
| INSN_CODE (insn) = -1; |
| XEXP (x, 0) = newmem; |
| XEXP (x, 2) = GEN_INT (pos); |
| |
| if (recog_memoized (insn) >= 0) |
| return; |
| |
| /* Otherwise, restore old position. XEXP (x, 0) will be |
| restored later. */ |
| XEXP (x, 2) = old_pos; |
| } |
| } |
| |
| /* If we get here, the bitfield extract insn can't accept a memory |
| reference. Copy the input into a register. */ |
| |
| tem1 = gen_reg_rtx (GET_MODE (tem)); |
| emit_insn_before (gen_move_insn (tem1, tem), insn); |
| XEXP (x, 0) = tem1; |
| return; |
| } |
| break; |
| |
| case SUBREG: |
| if (SUBREG_REG (x) == var) |
| { |
| /* If this is a special SUBREG made because VAR was promoted |
| from a wider mode, replace it with VAR and call ourself |
| recursively, this time saying that the object previously |
| had its current mode (by virtue of the SUBREG). */ |
| |
| if (SUBREG_PROMOTED_VAR_P (x)) |
| { |
| *loc = var; |
| fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements); |
| return; |
| } |
| |
| /* If this SUBREG makes VAR wider, it has become a paradoxical |
| SUBREG with VAR in memory, but these aren't allowed at this |
| stage of the compilation. So load VAR into a pseudo and take |
| a SUBREG of that pseudo. */ |
| if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var))) |
| { |
| replacement = find_fixup_replacement (replacements, var); |
| if (replacement->new == 0) |
| replacement->new = gen_reg_rtx (GET_MODE (var)); |
| SUBREG_REG (x) = replacement->new; |
| return; |
| } |
| |
| /* See if we have already found a replacement for this SUBREG. |
| If so, use it. Otherwise, make a MEM and see if the insn |
| is recognized. If not, or if we should force MEM into a register, |
| make a pseudo for this SUBREG. */ |
| replacement = find_fixup_replacement (replacements, x); |
| if (replacement->new) |
| { |
| *loc = replacement->new; |
| return; |
| } |
| |
| replacement->new = *loc = fixup_memory_subreg (x, insn, 0); |
| |
| INSN_CODE (insn) = -1; |
| if (! flag_force_mem && recog_memoized (insn) >= 0) |
| return; |
| |
| *loc = replacement->new = gen_reg_rtx (GET_MODE (x)); |
| return; |
| } |
| break; |
| |
| case SET: |
| /* First do special simplification of bit-field references. */ |
| if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT |
| || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT) |
| optimize_bit_field (x, insn, 0); |
| if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT |
| || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT) |
| optimize_bit_field (x, insn, NULL_PTR); |
| |
| /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object |
| into a register and then store it back out. */ |
| if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT |
| && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG |
| && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var |
| && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0))) |
| > GET_MODE_SIZE (GET_MODE (var)))) |
| { |
| replacement = find_fixup_replacement (replacements, var); |
| if (replacement->new == 0) |
| replacement->new = gen_reg_rtx (GET_MODE (var)); |
| |
| SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new; |
| emit_insn_after (gen_move_insn (var, replacement->new), insn); |
| } |
| |
| /* If SET_DEST is now a paradoxical SUBREG, put the result of this |
| insn into a pseudo and store the low part of the pseudo into VAR. */ |
| if (GET_CODE (SET_DEST (x)) == SUBREG |
| && SUBREG_REG (SET_DEST (x)) == var |
| && (GET_MODE_SIZE (GET_MODE (SET_DEST (x))) |
| > GET_MODE_SIZE (GET_MODE (var)))) |
| { |
| SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x))); |
| emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var), |
| tem)), |
| insn); |
| break; |
| } |
| |
| { |
| rtx dest = SET_DEST (x); |
| rtx src = SET_SRC (x); |
| rtx outerdest = dest; |
| |
| while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART |
| || GET_CODE (dest) == SIGN_EXTRACT |
| || GET_CODE (dest) == ZERO_EXTRACT) |
| dest = XEXP (dest, 0); |
| |
| if (GET_CODE (src) == SUBREG) |
| src = XEXP (src, 0); |
| |
| /* If VAR does not appear at the top level of the SET |
| just scan the lower levels of the tree. */ |
| |
| if (src != var && dest != var) |
| break; |
| |
| /* We will need to rerecognize this insn. */ |
| INSN_CODE (insn) = -1; |
| |
| #ifdef HAVE_insv |
| if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var) |
| { |
| /* Since this case will return, ensure we fixup all the |
| operands here. */ |
| fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1), |
| insn, replacements); |
| fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2), |
| insn, replacements); |
| fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x), |
| insn, replacements); |
| |
| tem = XEXP (outerdest, 0); |
| |
| /* Clean up (SUBREG:SI (MEM:mode ...) 0) |
| that may appear inside a ZERO_EXTRACT. |
| This was legitimate when the MEM was a REG. */ |
| if (GET_CODE (tem) == SUBREG |
| && SUBREG_REG (tem) == var) |
| tem = fixup_memory_subreg (tem, insn, 0); |
| else |
| tem = fixup_stack_1 (tem, insn); |
| |
| if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT |
| && GET_CODE (XEXP (outerdest, 2)) == CONST_INT |
| && ! mode_dependent_address_p (XEXP (tem, 0)) |
| && ! MEM_VOLATILE_P (tem)) |
| { |
| enum machine_mode wanted_mode |
| = insn_operand_mode[(int) CODE_FOR_insv][0]; |
| enum machine_mode is_mode = GET_MODE (tem); |
| int width = INTVAL (XEXP (outerdest, 1)); |
| int pos = INTVAL (XEXP (outerdest, 2)); |
| |
| /* If we have a narrower mode, we can do something. */ |
| if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode)) |
| { |
| int offset = pos / BITS_PER_UNIT; |
| rtx old_pos = XEXP (outerdest, 2); |
| rtx newmem; |
| |
| if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN) |
| offset = (GET_MODE_SIZE (is_mode) |
| - GET_MODE_SIZE (wanted_mode) - offset); |
| |
| pos %= GET_MODE_BITSIZE (wanted_mode); |
| |
| newmem = gen_rtx (MEM, wanted_mode, |
| plus_constant (XEXP (tem, 0), offset)); |
| RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem); |
| MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem); |
| MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem); |
| |
| /* Make the change and see if the insn remains valid. */ |
| INSN_CODE (insn) = -1; |
| XEXP (outerdest, 0) = newmem; |
| XEXP (outerdest, 2) = GEN_INT (pos); |
| |
| if (recog_memoized (insn) >= 0) |
| return; |
| |
| /* Otherwise, restore old position. XEXP (x, 0) will be |
| restored later. */ |
| XEXP (outerdest, 2) = old_pos; |
| } |
| } |
| |
| /* If we get here, the bit-field store doesn't allow memory |
| or isn't located at a constant position. Load the value into |
| a register, do the store, and put it back into memory. */ |
| |
| tem1 = gen_reg_rtx (GET_MODE (tem)); |
| emit_insn_before (gen_move_insn (tem1, tem), insn); |
| emit_insn_after (gen_move_insn (tem, tem1), insn); |
| XEXP (outerdest, 0) = tem1; |
| return; |
| } |
| #endif |
| |
| /* STRICT_LOW_PART is a no-op on memory references |
| and it can cause combinations to be unrecognizable, |
| so eliminate it. */ |
| |
| if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART) |
| SET_DEST (x) = XEXP (SET_DEST (x), 0); |
| |
| /* A valid insn to copy VAR into or out of a register |
| must be left alone, to avoid an infinite loop here. |
| If the reference to VAR is by a subreg, fix that up, |
| since SUBREG is not valid for a memref. |
| Also fix up the address of the stack slot. |
| |
| Note that we must not try to recognize the insn until |
| after we know that we have valid addresses and no |
| (subreg (mem ...) ...) constructs, since these interfere |
| with determining the validity of the insn. */ |
| |
| if ((SET_SRC (x) == var |
| || (GET_CODE (SET_SRC (x)) == SUBREG |
| && SUBREG_REG (SET_SRC (x)) == var)) |
| && (GET_CODE (SET_DEST (x)) == REG |
| || (GET_CODE (SET_DEST (x)) == SUBREG |
| && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)) |
| && GET_MODE (var) == promoted_mode |
| && x == single_set (insn)) |
| { |
| rtx pat; |
| |
| replacement = find_fixup_replacement (replacements, SET_SRC (x)); |
| if (replacement->new) |
| SET_SRC (x) = replacement->new; |
| else if (GET_CODE (SET_SRC (x)) == SUBREG) |
| SET_SRC (x) = replacement->new |
| = fixup_memory_subreg (SET_SRC (x), insn, 0); |
| else |
| SET_SRC (x) = replacement->new |
| = fixup_stack_1 (SET_SRC (x), insn); |
| |
| if (recog_memoized (insn) >= 0) |
| return; |
| |
| /* INSN is not valid, but we know that we want to |
| copy SET_SRC (x) to SET_DEST (x) in some way. So |
| we generate the move and see whether it requires more |
| than one insn. If it does, we emit those insns and |
| delete INSN. Otherwise, we an just replace the pattern |
| of INSN; we have already verified above that INSN has |
| no other function that to do X. */ |
| |
| pat = gen_move_insn (SET_DEST (x), SET_SRC (x)); |
| if (GET_CODE (pat) == SEQUENCE) |
| { |
| emit_insn_after (pat, insn); |
| PUT_CODE (insn, NOTE); |
| NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; |
| NOTE_SOURCE_FILE (insn) = 0; |
| } |
| else |
| PATTERN (insn) = pat; |
| |
| return; |
| } |
| |
| if ((SET_DEST (x) == var |
| || (GET_CODE (SET_DEST (x)) == SUBREG |
| && SUBREG_REG (SET_DEST (x)) == var)) |
| && (GET_CODE (SET_SRC (x)) == REG |
| || (GET_CODE (SET_SRC (x)) == SUBREG |
| && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG)) |
| && GET_MODE (var) == promoted_mode |
| && x == single_set (insn)) |
| { |
| rtx pat; |
| |
| if (GET_CODE (SET_DEST (x)) == SUBREG) |
| SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0); |
| else |
| SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn); |
| |
| if (recog_memoized (insn) >= 0) |
| return; |
| |
| pat = gen_move_insn (SET_DEST (x), SET_SRC (x)); |
| if (GET_CODE (pat) == SEQUENCE) |
| { |
| emit_insn_after (pat, insn); |
| PUT_CODE (insn, NOTE); |
| NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; |
| NOTE_SOURCE_FILE (insn) = 0; |
| } |
| else |
| PATTERN (insn) = pat; |
| |
| return; |
| } |
| |
| /* Otherwise, storing into VAR must be handled specially |
| by storing into a temporary and copying that into VAR |
| with a new insn after this one. Note that this case |
| will be used when storing into a promoted scalar since |
| the insn will now have different modes on the input |
| and output and hence will be invalid (except for the case |
| of setting it to a constant, which does not need any |
| change if it is valid). We generate extra code in that case, |
| but combine.c will eliminate it. */ |
| |
| if (dest == var) |
| { |
| rtx temp; |
| rtx fixeddest = SET_DEST (x); |
| |
| /* STRICT_LOW_PART can be discarded, around a MEM. */ |
| if (GET_CODE (fixeddest) == STRICT_LOW_PART) |
| fixeddest = XEXP (fixeddest, 0); |
| /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */ |
| if (GET_CODE (fixeddest) == SUBREG) |
| { |
| fixeddest = fixup_memory_subreg (fixeddest, insn, 0); |
| promoted_mode = GET_MODE (fixeddest); |
| } |
| else |
| fixeddest = fixup_stack_1 (fixeddest, insn); |
| |
| temp = gen_reg_rtx (promoted_mode); |
| |
| emit_insn_after (gen_move_insn (fixeddest, |
| gen_lowpart (GET_MODE (fixeddest), |
| temp)), |
| insn); |
| |
| SET_DEST (x) = temp; |
| } |
| } |
| } |
| |
| /* Nothing special about this RTX; fix its operands. */ |
| |
| fmt = GET_RTX_FORMAT (code); |
| for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
| { |
| if (fmt[i] == 'e') |
| fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements); |
| if (fmt[i] == 'E') |
| { |
| register int j; |
| for (j = 0; j < XVECLEN (x, i); j++) |
| fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j), |
| insn, replacements); |
| } |
| } |
| } |
| |
| /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)), |
| return an rtx (MEM:m1 newaddr) which is equivalent. |
| If any insns must be emitted to compute NEWADDR, put them before INSN. |
| |
| UNCRITICAL nonzero means accept paradoxical subregs. |
| This is used for subregs found inside REG_NOTES. */ |
| |
| static rtx |
| fixup_memory_subreg (x, insn, uncritical) |
| rtx x; |
| rtx insn; |
| int uncritical; |
| { |
| int offset = SUBREG_WORD (x) * UNITS_PER_WORD; |
| rtx addr = XEXP (SUBREG_REG (x), 0); |
| enum machine_mode mode = GET_MODE (x); |
| rtx saved, result; |
| |
| /* Paradoxical SUBREGs are usually invalid during RTL generation. */ |
| if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) |
| && ! uncritical) |
| abort (); |
| |
| if (BYTES_BIG_ENDIAN) |
| offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) |
| - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))); |
| addr = plus_constant (addr, offset); |
| if (!flag_force_addr && memory_address_p (mode, addr)) |
| /* Shortcut if no insns need be emitted. */ |
| return change_address (SUBREG_REG (x), mode, addr); |
| start_sequence (); |
| result = change_address (SUBREG_REG (x), mode, addr); |
| emit_insn_before (gen_sequence (), insn); |
| end_sequence (); |
| return result; |
| } |
| |
| /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X. |
| Replace subexpressions of X in place. |
| If X itself is a (SUBREG (MEM ...) ...), return the replacement expression. |
| Otherwise return X, with its contents possibly altered. |
| |
| If any insns must be emitted to compute NEWADDR, put them before INSN. |
| |
| UNCRITICAL is as in fixup_memory_subreg. */ |
| |
| static rtx |
| walk_fixup_memory_subreg (x, insn, uncritical) |
| register rtx x; |
| rtx insn; |
| int uncritical; |
| { |
| register enum rtx_code code; |
| register char *fmt; |
| register int i; |
| |
| if (x == 0) |
| return 0; |
| |
| code = GET_CODE (x); |
| |
| if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM) |
| return fixup_memory_subreg (x, insn, uncritical); |
| |
| /* Nothing special about this RTX; fix its operands. */ |
| |
| fmt = GET_RTX_FORMAT (code); |
| for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
| { |
| if (fmt[i] == 'e') |
| XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical); |
| if (fmt[i] == 'E') |
| { |
| register int j; |
| for (j = 0; j < XVECLEN (x, i); j++) |
| XVECEXP (x, i, j) |
| = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical); |
| } |
| } |
| return x; |
| } |
| |
| /* For each memory ref within X, if it refers to a stack slot |
| with an out of range displacement, put the address in a temp register |
| (emitting new insns before INSN to load these registers) |
| and alter the memory ref to use that register. |
| Replace each such MEM rtx with a copy, to avoid clobberage. */ |
| |
| static rtx |
| fixup_stack_1 (x, insn) |
| rtx x; |
| rtx insn; |
| { |
| register int i; |
| register RTX_CODE code = GET_CODE (x); |
| register char *fmt; |
| |
| if (code == MEM) |
| { |
| register rtx ad = XEXP (x, 0); |
| /* If we have address of a stack slot but it's not valid |
| (displacement is too large), compute the sum in a register. */ |
| if (GET_CODE (ad) == PLUS |
| && GET_CODE (XEXP (ad, 0)) == REG |
| && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER |
| && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER) |
| || XEXP (ad, 0) == current_function_internal_arg_pointer) |
| && GET_CODE (XEXP (ad, 1)) == CONST_INT) |
| { |
| rtx temp, seq; |
| if (memory_address_p (GET_MODE (x), ad)) |
| return x; |
| |
| start_sequence (); |
| temp = copy_to_reg (ad); |
| seq = gen_sequence (); |
| end_sequence (); |
| emit_insn_before (seq, insn); |
| return change_address (x, VOIDmode, temp); |
| } |
| return x; |
| } |
| |
| fmt = GET_RTX_FORMAT (code); |
| for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
| { |
| if (fmt[i] == 'e') |
| XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn); |
| if (fmt[i] == 'E') |
| { |
| register int j; |
| for (j = 0; j < XVECLEN (x, i); j++) |
| XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn); |
| } |
| } |
| return x; |
| } |
| |
| /* Optimization: a bit-field instruction whose field |
| happens to be a byte or halfword in memory |
| can be changed to a move instruction. |
| |
| We call here when INSN is an insn to examine or store into a bit-field. |
| BODY is the SET-rtx to be altered. |
| |
| EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0. |
| (Currently this is called only from function.c, and EQUIV_MEM |
| is always 0.) */ |
| |
| static void |
| optimize_bit_field (body, insn, equiv_mem) |
| rtx body; |
| rtx insn; |
| rtx *equiv_mem; |
| { |
| register rtx bitfield; |
| int destflag; |
| rtx seq = 0; |
| enum machine_mode mode; |
| |
| if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT |
| || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT) |
| bitfield = SET_DEST (body), destflag = 1; |
| else |
| bitfield = SET_SRC (body), destflag = 0; |
| |
| /* First check that the field being stored has constant size and position |
| and is in fact a byte or halfword suitably aligned. */ |
| |
| if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT |
| && GET_CODE (XEXP (bitfield, 2)) == CONST_INT |
| && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1)) |
| != BLKmode) |
| && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0) |
| { |
| register rtx memref = 0; |
| |
| /* Now check that the containing word is memory, not a register, |
| and that it is safe to change the machine mode. */ |
| |
| if (GET_CODE (XEXP (bitfield, 0)) == MEM) |
| memref = XEXP (bitfield, 0); |
| else if (GET_CODE (XEXP (bitfield, 0)) == REG |
| && equiv_mem != 0) |
| memref = equiv_mem[REGNO (XEXP (bitfield, 0))]; |
| else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG |
| && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM) |
| memref = SUBREG_REG (XEXP (bitfield, 0)); |
| else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG |
| && equiv_mem != 0 |
| && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG) |
| memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))]; |
| |
| if (memref |
| && ! mode_dependent_address_p (XEXP (memref, 0)) |
| && ! MEM_VOLATILE_P (memref)) |
| { |
| /* Now adjust the address, first for any subreg'ing |
| that we are now getting rid of, |
| and then for which byte of the word is wanted. */ |
| |
| register int offset = INTVAL (XEXP (bitfield, 2)); |
| rtx insns; |
| |
| /* Adjust OFFSET to count bits from low-address byte. */ |
| if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN) |
| offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0))) |
| - offset - INTVAL (XEXP (bitfield, 1))); |
| |
| /* Adjust OFFSET to count bytes from low-address byte. */ |
| offset /= BITS_PER_UNIT; |
| if (GET_CODE (XEXP (bitfield, 0)) == SUBREG) |
| { |
| offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD; |
| if (BYTES_BIG_ENDIAN) |
| offset -= (MIN (UNITS_PER_WORD, |
| GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0)))) |
| - MIN (UNITS_PER_WORD, |
| GET_MODE_SIZE (GET_MODE (memref)))); |
| } |
| |
| start_sequence (); |
| memref = change_address (memref, mode, |
| plus_constant (XEXP (memref, 0), offset)); |
| insns = get_insns (); |
| end_sequence (); |
| emit_insns_before (insns, insn); |
| |
| /* Store this memory reference where |
| we found the bit field reference. */ |
| |
| if (destflag) |
| { |
| validate_change (insn, &SET_DEST (body), memref, 1); |
| if (! CONSTANT_ADDRESS_P (SET_SRC (body))) |
| { |
| rtx src = SET_SRC (body); |
| while (GET_CODE (src) == SUBREG |
| && SUBREG_WORD (src) == 0) |
| src = SUBREG_REG (src); |
| if (GET_MODE (src) != GET_MODE (memref)) |
| src = gen_lowpart (GET_MODE (memref), SET_SRC (body)); |
| validate_change (insn, &SET_SRC (body), src, 1); |
| } |
| else if (GET_MODE (SET_SRC (body)) != VOIDmode |
| && GET_MODE (SET_SRC (body)) != GET_MODE (memref)) |
| /* This shouldn't happen because anything that didn't have |
| one of these modes should have got converted explicitly |
| and then referenced through a subreg. |
| This is so because the original bit-field was |
| handled by agg_mode and so its tree structure had |
| the same mode that memref now has. */ |
| abort (); |
| } |
| else |
| { |
| rtx dest = SET_DEST (body); |
| |
| while (GET_CODE (dest) == SUBREG |
| && SUBREG_WORD (dest) == 0 |
| && (GET_MODE_CLASS (GET_MODE (dest)) |
| == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))) |
| dest = SUBREG_REG (dest); |
| |
| validate_change (insn, &SET_DEST (body), dest, 1); |
| |
| if (GET_MODE (dest) == GET_MODE (memref)) |
| validate_change (insn, &SET_SRC (body), memref, 1); |
| else |
| { |
| /* Convert the mem ref to the destination mode. */ |
| rtx newreg = gen_reg_rtx (GET_MODE (dest)); |
| |
| start_sequence (); |
| convert_move (newreg, memref, |
| GET_CODE (SET_SRC (body)) == ZERO_EXTRACT); |
| seq = get_insns (); |
| end_sequence (); |
| |
| validate_change (insn, &SET_SRC (body), newreg, 1); |
| } |
| } |
| |
| /* See if we can convert this extraction or insertion into |
| a simple move insn. We might not be able to do so if this |
| was, for example, part of a PARALLEL. |
| |
| If we succeed, write out any needed conversions. If we fail, |
| it is hard to guess why we failed, so don't do anything |
| special; just let the optimization be suppressed. */ |
| |
| if (apply_change_group () && seq) |
| emit_insns_before (seq, insn); |
| } |
| } |
| } |
| |
| /* These routines are responsible for converting virtual register references |
| to the actual hard register references once RTL generation is complete. |
| |
| The following four variables are used for communication between the |
| routines. They contain the offsets of the virtual registers from their |
| respective hard registers. */ |
| |
| static int in_arg_offset; |
| static int var_offset; |
| static int dynamic_offset; |
| static int out_arg_offset; |
| |
| /* In most machines, the stack pointer register is equivalent to the bottom |
| of the stack. */ |
| |
| #ifndef STACK_POINTER_OFFSET |
| #define STACK_POINTER_OFFSET 0 |
| #endif |
| |
| /* If not defined, pick an appropriate default for the offset of dynamically |
| allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS, |
| REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */ |
| |
| #ifndef STACK_DYNAMIC_OFFSET |
| |
| #ifdef ACCUMULATE_OUTGOING_ARGS |
| /* The bottom of the stack points to the actual arguments. If |
| REG_PARM_STACK_SPACE is defined, this includes the space for the register |
| parameters. However, if OUTGOING_REG_PARM_STACK space is not defined, |
| stack space for register parameters is not pushed by the caller, but |
| rather part of the fixed stack areas and hence not included in |
| `current_function_outgoing_args_size'. Nevertheless, we must allow |
| for it when allocating stack dynamic objects. */ |
| |
| #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE) |
| #define STACK_DYNAMIC_OFFSET(FNDECL) \ |
| (current_function_outgoing_args_size \ |
| + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET)) |
| |
| #else |
| #define STACK_DYNAMIC_OFFSET(FNDECL) \ |
| (current_function_outgoing_args_size + (STACK_POINTER_OFFSET)) |
| #endif |
| |
| #else |
| #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET |
| #endif |
| #endif |
| |
| /* Pass through the INSNS of function FNDECL and convert virtual register |
| references to hard register references. */ |
| |
| void |
| instantiate_virtual_regs (fndecl, insns) |
| tree fndecl; |
| rtx insns; |
| { |
| rtx insn; |
| |
| /* Compute the offsets to use for this function. */ |
| in_arg_offset = FIRST_PARM_OFFSET (fndecl); |
| var_offset = STARTING_FRAME_OFFSET; |
| dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl); |
| out_arg_offset = STACK_POINTER_OFFSET; |
| |
| /* Scan all variables and parameters of this function. For each that is |
| in memory, instantiate all virtual registers if the result is a valid |
| address. If not, we do it later. That will handle most uses of virtual |
| regs on many machines. */ |
| instantiate_decls (fndecl, 1); |
| |
| /* Initialize recognition, indicating that volatile is OK. */ |
| init_recog (); |
| |
| /* Scan through all the insns, instantiating every virtual register still |
| present. */ |
| for (insn = insns; insn; insn = NEXT_INSN (insn)) |
| if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN |
| || GET_CODE (insn) == CALL_INSN) |
| { |
| instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1); |
| instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0); |
| } |
| |
| /* Now instantiate the remaining register equivalences for debugging info. |
| These will not be valid addresses. */ |
| instantiate_decls (fndecl, 0); |
| |
| /* Indicate that, from now on, assign_stack_local should use |
| frame_pointer_rtx. */ |
| virtuals_instantiated = 1; |
| } |
| |
| /* Scan all decls in FNDECL (both variables and parameters) and instantiate |
| all virtual registers in their DECL_RTL's. |
| |
| If VALID_ONLY, do this only if the resulting address is still valid. |
| Otherwise, always do it. */ |
| |
| static void |
| instantiate_decls (fndecl, valid_only) |
| tree fndecl; |
| int valid_only; |
| { |
| tree decl; |
| |
| if (DECL_SAVED_INSNS (fndecl)) |
| /* When compiling an inline function, the obstack used for |
| rtl allocation is the maybepermanent_obstack. Calling |
| `resume_temporary_allocation' switches us back to that |
| obstack while we process this function's parameters. */ |
| resume_temporary_allocation (); |
| |
| /* Process all parameters of the function. */ |
| for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl)) |
| { |
| int size = int_size_in_bytes (TREE_TYPE (decl)); |
| instantiate_decl (DECL_RTL (decl), size, valid_only); |
| |
| /* If the parameter was promoted, then the incoming RTL mode may be |
| larger than the declared type size. We must use the larger of |
| the two sizes. */ |
| size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size); |
| instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only); |
| } |
| |
| /* Now process all variables defined in the function or its subblocks. */ |
| instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only); |
| |
| if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl)) |
| { |
| /* Save all rtl allocated for this function by raising the |
| high-water mark on the maybepermanent_obstack. */ |
| preserve_data (); |
| /* All further rtl allocation is now done in the current_obstack. */ |
| rtl_in_current_obstack (); |
| } |
| } |
| |
| /* Subroutine of instantiate_decls: Process all decls in the given |
| BLOCK node and all its subblocks. */ |
| |
| static void |
| instantiate_decls_1 (let, valid_only) |
| tree let; |
| int valid_only; |
| { |
| tree t; |
| |
| for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t)) |
| instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)), |
| valid_only); |
| |
| /* Process all subblocks. */ |
| for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t)) |
| instantiate_decls_1 (t, valid_only); |
| } |
| |
| /* Subroutine of the preceding procedures: Given RTL representing a |
| decl and the size of the object, do any instantiation required. |
| |
| If VALID_ONLY is non-zero, it means that the RTL should only be |
| changed if the new address is valid. */ |
| |
| static void |
| instantiate_decl (x, size, valid_only) |
| rtx x; |
| int size; |
| int valid_only; |
| { |
| enum machine_mode mode; |
| rtx addr; |
| |
| /* If this is not a MEM, no need to do anything. Similarly if the |
| address is a constant or a register that is not a virtual register. */ |
| |
| if (x == 0 || GET_CODE (x) != MEM) |
| return; |
| |
| addr = XEXP (x, 0); |
| if (CONSTANT_P (addr) |
| || (GET_CODE (addr) == REG |
| && (REGNO (addr) < FIRST_VIRTUAL_REGISTER |
| || REGNO (addr) > LAST_VIRTUAL_REGISTER))) |
| return; |
| |
| /* If we should only do this if the address is valid, copy the address. |
| We need to do this so we can undo any changes that might make the |
| address invalid. This copy is unfortunate, but probably can't be |
| avoided. */ |
| |
| if (valid_only) |
| addr = copy_rtx (addr); |
| |
| instantiate_virtual_regs_1 (&addr, NULL_RTX, 0); |
| |
| if (valid_only) |
| { |
| /* Now verify that the resulting address is valid for every integer or |
| floating-point mode up to and including SIZE bytes long. We do this |
| since the object might be accessed in any mode and frame addresses |
| are shared. */ |
| |
| for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
| mode != VOIDmode && GET_MODE_SIZE (mode) <= size; |
| mode = GET_MODE_WIDER_MODE (mode)) |
| if (! memory_address_p (mode, addr)) |
| return; |
| |
| for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); |
| mode != VOIDmode && GET_MODE_SIZE (mode) <= size; |
| mode = GET_MODE_WIDER_MODE (mode)) |
| if (! memory_address_p (mode, addr)) |
| return; |
| } |
| |
| /* Put back the address now that we have updated it and we either know |
| it is valid or we don't care whether it is valid. */ |
| |
| XEXP (x, 0) = addr; |
| } |
| |
| /* Given a pointer to a piece of rtx and an optional pointer to the |
| containing object, instantiate any virtual registers present in it. |
| |
| If EXTRA_INSNS, we always do the replacement and generate |
| any extra insns before OBJECT. If it zero, we do nothing if replacement |
| is not valid. |
| |
| Return 1 if we either had nothing to do or if we were able to do the |
| needed replacement. Return 0 otherwise; we only return zero if |
| EXTRA_INSNS is zero. |
| |
| We first try some simple transformations to avoid the creation of extra |
| pseudos. */ |
| |
| static int |
| instantiate_virtual_regs_1 (loc, object, extra_insns) |
| rtx *loc; |
| rtx object; |
| int extra_insns; |
| { |
| rtx x; |
| RTX_CODE code; |
| rtx new = 0; |
| int offset; |
| rtx temp; |
| rtx seq; |
| int i, j; |
| char *fmt; |
| |
| /* Re-start here to avoid recursion in common cases. */ |
| restart: |
| |
| x = *loc; |
| if (x == 0) |
| return 1; |
| |
| code = GET_CODE (x); |
| |
| /* Check for some special cases. */ |
| switch (code) |
| { |
| case CONST_INT: |
| case CONST_DOUBLE: |
| case CONST: |
| case SYMBOL_REF: |
| case CODE_LABEL: |
| case PC: |
| case CC0: |
| case ASM_INPUT: |
| case ADDR_VEC: |
| case ADDR_DIFF_VEC: |
| case RETURN: |
| return 1; |
| |
| case SET: |
| /* We are allowed to set the virtual registers. This means that |
| that the actual register should receive the source minus the |
| appropriate offset. This is used, for example, in the handling |
| of non-local gotos. */ |
| if (SET_DEST (x) == virtual_incoming_args_rtx) |
| new = arg_pointer_rtx, offset = - in_arg_offset; |
| else if (SET_DEST (x) == virtual_stack_vars_rtx) |
| new = frame_pointer_rtx, offset = - var_offset; |
| else if (SET_DEST (x) == virtual_stack_dynamic_rtx) |
| new = stack_pointer_rtx, offset = - dynamic_offset; |
| else if (SET_DEST (x) == virtual_outgoing_args_rtx) |
| new = stack_pointer_rtx, offset = - out_arg_offset; |
| |
| if (new) |
| { |
| /* The only valid sources here are PLUS or REG. Just do |
| the simplest possible thing to handle them. */ |
| if (GET_CODE (SET_SRC (x)) != REG |
| && GET_CODE (SET_SRC (x)) != PLUS) |
| abort (); |
| |
| start_sequence (); |
| if (GET_CODE (SET_SRC (x)) != REG) |
| temp = force_operand (SET_SRC (x), NULL_RTX); |
| else |
| temp = SET_SRC (x); |
| temp = force_operand (plus_constant (temp, offset), NULL_RTX); |
| seq = get_insns (); |
| end_sequence (); |
| |
| emit_insns_before (seq, object); |
| SET_DEST (x) = new; |
| |
| if (!validate_change (object, &SET_SRC (x), temp, 0) |
| || ! extra_insns) |
| abort (); |
| |
| return 1; |
| } |
| |
| instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns); |
| loc = &SET_SRC (x); |
| goto restart; |
| |
| case PLUS: |
| /* Handle special case of virtual register plus constant. */ |
| if (CONSTANT_P (XEXP (x, 1))) |
| { |
| rtx old, new_offset; |
| |
| /* Check for (plus (plus VIRT foo) (const_int)) first. */ |
| if (GET_CODE (XEXP (x, 0)) == PLUS) |
| { |
| rtx inner = XEXP (XEXP (x, 0), 0); |
| |
| if (inner == virtual_incoming_args_rtx) |
| new = arg_pointer_rtx, offset = in_arg_offset; |
| else if (inner == virtual_stack_vars_rtx) |
| new = frame_pointer_rtx, offset = var_offset; |
| else if (inner == virtual_stack_dynamic_rtx) |
| new = stack_pointer_rtx, offset = dynamic_offset; |
| else if (inner == virtual_outgoing_args_rtx) |
| new = stack_pointer_rtx, offset = out_arg_offset; |
| else |
| { |
| loc = &XEXP (x, 0); |
| goto restart; |
| } |
| |
| instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object, |
| extra_insns); |
| new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1)); |
| } |
| |
| else if (XEXP (x, 0) == virtual_incoming_args_rtx) |
| new = arg_pointer_rtx, offset = in_arg_offset; |
| else if (XEXP (x, 0) == virtual_stack_vars_rtx) |
| new = frame_pointer_rtx, offset = var_offset; |
| else if (XEXP (x, 0) == virtual_stack_dynamic_rtx) |
| new = stack_pointer_rtx, offset = dynamic_offset; |
| else if (XEXP (x, 0) == virtual_outgoing_args_rtx) |
| new = stack_pointer_rtx, offset = out_arg_offset; |
| else |
| { |
| /* We know the second operand is a constant. Unless the |
| first operand is a REG (which has been already checked), |
| it needs to be checked. */ |
| if (GET_CODE (XEXP (x, 0)) != REG) |
| { |
| loc = &XEXP (x, 0); |
| goto restart; |
| } |
| return 1; |
| } |
| |
| new_offset = plus_constant (XEXP (x, 1), offset); |
| |
| /* If the new constant is zero, try to replace the sum with just |
| the register. */ |
| if (new_offset == const0_rtx |
| && validate_change (object, loc, new, 0)) |
| return 1; |
| |
| /* Next try to replace the register and new offset. |
| There are two changes to validate here and we can't assume that |
| in the case of old offset equals new just changing the register |
| will yield a valid insn. In the interests of a little efficiency, |
| however, we only call validate change once (we don't queue up the |
| changes and then call apply_change_group). */ |
| |
| old = XEXP (x, 0); |
| if (offset == 0 |
| ? ! validate_change (object, &XEXP (x, 0), new, 0) |
| : (XEXP (x, 0) = new, |
| ! validate_change (object, &XEXP (x, 1), new_offset, 0))) |
| { |
| if (! extra_insns) |
| { |
| XEXP (x, 0) = old; |
| return 0; |
| } |
| |
| /* Otherwise copy the new constant into a register and replace |
| constant with that register. */ |
| temp = gen_reg_rtx (Pmode); |
| XEXP (x, 0) = new; |
| if (validate_change (object, &XEXP (x, 1), temp, 0)) |
| emit_insn_before (gen_move_insn (temp, new_offset), object); |
| else |
| { |
| /* If that didn't work, replace this expression with a |
| register containing the sum. */ |
| |
| XEXP (x, 0) = old; |
| new = gen_rtx (PLUS, Pmode, new, new_offset); |
| |
| start_sequence (); |
| temp = force_operand (new, NULL_RTX); |
| seq = get_insns (); |
| end_sequence (); |
| |
| emit_insns_before (seq, object); |
| if (! validate_change (object, loc, temp, 0) |
| && ! validate_replace_rtx (x, temp, object)) |
| abort (); |
| } |
| } |
| |
| return 1; |
| } |
| |
| /* Fall through to generic two-operand expression case. */ |
| case EXPR_LIST: |
| case CALL: |
| case COMPARE: |
| case MINUS: |
| case MULT: |
| case DIV: case UDIV: |
| case MOD: case UMOD: |
| case AND: case IOR: case XOR: |
| case ROTATERT: case ROTATE: |
| case ASHIFTRT: case LSHIFTRT: case ASHIFT: |
| case NE: case EQ: |
| case GE: case GT: case GEU: case GTU: |
| case LE: case LT: case LEU: case LTU: |
| if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1))) |
| instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns); |
| loc = &XEXP (x, 0); |
| goto restart; |
| |
| case MEM: |
| /* Most cases of MEM that convert to valid addresses have already been |
| handled by our scan of decls. The only special handling we |
| need here is to make a copy of the rtx to ensure it isn't being |
| shared if we have to change it to a pseudo. |
| |
| If the rtx is a simple reference to an address via a virtual register, |
| it can potentially be shared. In such cases, first try to make it |
| a valid address, which can also be shared. Otherwise, copy it and |
| proceed normally. |
| |
| First check for common cases that need no processing. These are |
| usually due to instantiation already being done on a previous instance |
| of a shared rtx. */ |
| |
| temp = XEXP (x, 0); |
| if (CONSTANT_ADDRESS_P (temp) |
| #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM |
| || temp == arg_pointer_rtx |
| #endif |
| #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM |
| || temp == hard_frame_pointer_rtx |
| #endif |
| || temp == frame_pointer_rtx) |
| return 1; |
| |
| if (GET_CODE (temp) == PLUS |
| && CONSTANT_ADDRESS_P (XEXP (temp, 1)) |
| && (XEXP (temp, 0) == frame_pointer_rtx |
| #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM |
| || XEXP (temp, 0) == hard_frame_pointer_rtx |
| #endif |
| #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM |
| || XEXP (temp, 0) == arg_pointer_rtx |
| #endif |
| )) |
| return 1; |
| |
| if (temp == virtual_stack_vars_rtx |
| || temp == virtual_incoming_args_rtx |
| || (GET_CODE (temp) == PLUS |
| && CONSTANT_ADDRESS_P (XEXP (temp, 1)) |
| && (XEXP (temp, 0) == virtual_stack_vars_rtx |
| || XEXP (temp, 0) == virtual_incoming_args_rtx))) |
| { |
| /* This MEM may be shared. If the substitution can be done without |
| the need to generate new pseudos, we want to do it in place |
| so all copies of the shared rtx benefit. The call below will |
| only make substitutions if the resulting address is still |
| valid. |
| |
| Note that we cannot pass X as the object in the recursive call |
| since the insn being processed may not allow all valid |
| addresses. However, if we were not passed on object, we can |
| only modify X without copying it if X will have a valid |
| address. |
| |
| ??? Also note that this can still lose if OBJECT is an insn that |
| has less restrictions on an address that some other insn. |
| In that case, we will modify the shared address. This case |
| doesn't seem very likely, though. One case where this could |
| happen is in the case of a USE or CLOBBER reference, but we |
| take care of that below. */ |
| |
| if (instantiate_virtual_regs_1 (&XEXP (x, 0), |
| object ? object : x, 0)) |
| return 1; |
| |
| /* Otherwise make a copy and process that copy. We copy the entire |
| RTL expression since it might be a PLUS which could also be |
| shared. */ |
| *loc = x = copy_rtx (x); |
| } |
| |
| /* Fall through to generic unary operation case. */ |
| case SUBREG: |
| case STRICT_LOW_PART: |
| case NEG: case NOT: |
| case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC: |
| case SIGN_EXTEND: case ZERO_EXTEND: |
| case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE: |
| case FLOAT: case FIX: |
| case UNSIGNED_FIX: case UNSIGNED_FLOAT: |
| case ABS: |
| case SQRT: |
| case FFS: |
| /* These case either have just one operand or we know that we need not |
| check the rest of the operands. */ |
| loc = &XEXP (x, 0); |
| goto restart; |
| |
| case USE: |
| case CLOBBER: |
| /* If the operand is a MEM, see if the change is a valid MEM. If not, |
| go ahead and make the invalid one, but do it to a copy. For a REG, |
| just make the recursive call, since there's no chance of a problem. */ |
| |
| if ((GET_CODE (XEXP (x, 0)) == MEM |
| && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0), |
| 0)) |
| || (GET_CODE (XEXP (x, 0)) == REG |
| && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0))) |
| return 1; |
| |
| XEXP (x, 0) = copy_rtx (XEXP (x, 0)); |
| loc = &XEXP (x, 0); |
| goto restart; |
| |
| case REG: |
| /* Try to replace with a PLUS. If that doesn't work, compute the sum |
| in front of this insn and substitute the temporary. */ |
| if (x == virtual_incoming_args_rtx) |
| new = arg_pointer_rtx, offset = in_arg_offset; |
| else if (x == virtual_stack_vars_rtx) |
| new = frame_pointer_rtx, offset = var_offset; |
| else if (x == virtual_stack_dynamic_rtx) |
| new = stack_pointer_rtx, offset = dynamic_offset; |
| else if (x == virtual_outgoing_args_rtx) |
| new = stack_pointer_rtx, offset = out_arg_offset; |
| |
| if (new) |
| { |
| temp = plus_constant (new, offset); |
| if (!validate_change (object, loc, temp, 0)) |
| { |
| if (! extra_insns) |
| return 0; |
| |
| start_sequence (); |
| temp = force_operand (temp, NULL_RTX); |
| seq = get_insns (); |
| end_sequence (); |
| |
| emit_insns_before (seq, object); |
| if (! validate_change (object, loc, temp, 0) |
| && ! validate_replace_rtx (x, temp, object)) |
| abort (); |
| } |
| } |
| |
| return 1; |
| } |
| |
| /* Scan all subexpressions. */ |
| fmt = GET_RTX_FORMAT (code); |
| for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) |
| if (*fmt == 'e') |
| { |
| if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns)) |
| return 0; |
| } |
| else if (*fmt == 'E') |
| for (j = 0; j < XVECLEN (x, i); j++) |
| if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object, |
| extra_insns)) |
| return 0; |
| |
| return 1; |
| } |
| |
| /* Optimization: assuming this function does not receive nonlocal gotos, |
| delete the handlers for such, as well as the insns to establish |
| and disestablish them. */ |
| |
| static void |
| delete_handlers () |
| { |
| rtx insn; |
| for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) |
| { |
| /* Delete the handler by turning off the flag that would |
| prevent jump_optimize from deleting it. |
| Also permit deletion of the nonlocal labels themselves |
| if nothing local refers to them. */ |
| if (GET_CODE (insn) == CODE_LABEL) |
| { |
| tree t, last_t; |
| |
| LABEL_PRESERVE_P (insn) = 0; |
| |
| /* Remove it from the nonlocal_label list, to avoid confusing |
| flow. */ |
| for (t = nonlocal_labels, last_t = 0; t; |
| last_t = t, t = TREE_CHAIN (t)) |
| if (DECL_RTL (TREE_VALUE (t)) == insn) |
| break; |
| if (t) |
| { |
| if (! last_t) |
| nonlocal_labels = TREE_CHAIN (nonlocal_labels); |
| else |
| TREE_CHAIN (last_t) = TREE_CHAIN (t); |
| } |
| } |
| if (GET_CODE (insn) == INSN |
| && ((nonlocal_goto_handler_slot != 0 |
| && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn))) |
| || (nonlocal_goto_stack_level != 0 |
| && reg_mentioned_p (nonlocal_goto_stack_level, |
| PATTERN (insn))))) |
| delete_insn (insn); |
| } |
| } |
| |
| /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels |
| of the current function. */ |
| |
| rtx |
| nonlocal_label_rtx_list () |
|