| /* Reload pseudo regs into hard regs for insns that require hard regs. |
| Copyright (C) 1987, 88, 89, 92-6, 1997 Free Software Foundation, Inc. |
| |
| This file is part of GNU CC. |
| |
| GNU CC is free software; you can redistribute it and/or modify |
| it under the terms of the GNU General Public License as published by |
| the Free Software Foundation; either version 2, or (at your option) |
| any later version. |
| |
| GNU CC is distributed in the hope that it will be useful, |
| but WITHOUT ANY WARRANTY; without even the implied warranty of |
| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| GNU General Public License for more details. |
| |
| You should have received a copy of the GNU General Public License |
| along with GNU CC; see the file COPYING. If not, write to |
| the Free Software Foundation, 59 Temple Place - Suite 330, |
| Boston, MA 02111-1307, USA. */ |
| |
| |
| #include <stdio.h> |
| #include "config.h" |
| #include "rtl.h" |
| #include "obstack.h" |
| #include "insn-config.h" |
| #include "insn-flags.h" |
| #include "insn-codes.h" |
| #include "flags.h" |
| #include "expr.h" |
| #include "regs.h" |
| #include "hard-reg-set.h" |
| #include "reload.h" |
| #include "recog.h" |
| #include "basic-block.h" |
| #include "output.h" |
| #include "real.h" |
| |
| /* This file contains the reload pass of the compiler, which is |
| run after register allocation has been done. It checks that |
| each insn is valid (operands required to be in registers really |
| are in registers of the proper class) and fixes up invalid ones |
| by copying values temporarily into registers for the insns |
| that need them. |
| |
| The results of register allocation are described by the vector |
| reg_renumber; the insns still contain pseudo regs, but reg_renumber |
| can be used to find which hard reg, if any, a pseudo reg is in. |
| |
| The technique we always use is to free up a few hard regs that are |
| called ``reload regs'', and for each place where a pseudo reg |
| must be in a hard reg, copy it temporarily into one of the reload regs. |
| |
| All the pseudos that were formerly allocated to the hard regs that |
| are now in use as reload regs must be ``spilled''. This means |
| that they go to other hard regs, or to stack slots if no other |
| available hard regs can be found. Spilling can invalidate more |
| insns, requiring additional need for reloads, so we must keep checking |
| until the process stabilizes. |
| |
| For machines with different classes of registers, we must keep track |
| of the register class needed for each reload, and make sure that |
| we allocate enough reload registers of each class. |
| |
| The file reload.c contains the code that checks one insn for |
| validity and reports the reloads that it needs. This file |
| is in charge of scanning the entire rtl code, accumulating the |
| reload needs, spilling, assigning reload registers to use for |
| fixing up each insn, and generating the new insns to copy values |
| into the reload registers. */ |
| |
| |
| #ifndef REGISTER_MOVE_COST |
| #define REGISTER_MOVE_COST(x, y) 2 |
| #endif |
| |
| #ifndef MEMORY_MOVE_COST |
| #define MEMORY_MOVE_COST(x) 4 |
| #endif |
| |
| /* During reload_as_needed, element N contains a REG rtx for the hard reg |
| into which reg N has been reloaded (perhaps for a previous insn). */ |
| static rtx *reg_last_reload_reg; |
| |
| /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn |
| for an output reload that stores into reg N. */ |
| static char *reg_has_output_reload; |
| |
| /* Indicates which hard regs are reload-registers for an output reload |
| in the current insn. */ |
| static HARD_REG_SET reg_is_output_reload; |
| |
| /* Element N is the constant value to which pseudo reg N is equivalent, |
| or zero if pseudo reg N is not equivalent to a constant. |
| find_reloads looks at this in order to replace pseudo reg N |
| with the constant it stands for. */ |
| rtx *reg_equiv_constant; |
| |
| /* Element N is a memory location to which pseudo reg N is equivalent, |
| prior to any register elimination (such as frame pointer to stack |
| pointer). Depending on whether or not it is a valid address, this value |
| is transferred to either reg_equiv_address or reg_equiv_mem. */ |
| rtx *reg_equiv_memory_loc; |
| |
| /* Element N is the address of stack slot to which pseudo reg N is equivalent. |
| This is used when the address is not valid as a memory address |
| (because its displacement is too big for the machine.) */ |
| rtx *reg_equiv_address; |
| |
| /* Element N is the memory slot to which pseudo reg N is equivalent, |
| or zero if pseudo reg N is not equivalent to a memory slot. */ |
| rtx *reg_equiv_mem; |
| |
| /* Widest width in which each pseudo reg is referred to (via subreg). */ |
| static int *reg_max_ref_width; |
| |
| /* Element N is the insn that initialized reg N from its equivalent |
| constant or memory slot. */ |
| static rtx *reg_equiv_init; |
| |
| /* During reload_as_needed, element N contains the last pseudo regno |
| reloaded into the Nth reload register. This vector is in parallel |
| with spill_regs. If that pseudo reg occupied more than one register, |
| reg_reloaded_contents points to that pseudo for each spill register in |
| use; all of these must remain set for an inheritance to occur. */ |
| static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER]; |
| |
| /* During reload_as_needed, element N contains the insn for which |
| the Nth reload register was last used. This vector is in parallel |
| with spill_regs, and its contents are significant only when |
| reg_reloaded_contents is significant. */ |
| static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER]; |
| |
| /* Number of spill-regs so far; number of valid elements of spill_regs. */ |
| static int n_spills; |
| |
| /* In parallel with spill_regs, contains REG rtx's for those regs. |
| Holds the last rtx used for any given reg, or 0 if it has never |
| been used for spilling yet. This rtx is reused, provided it has |
| the proper mode. */ |
| static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER]; |
| |
| /* In parallel with spill_regs, contains nonzero for a spill reg |
| that was stored after the last time it was used. |
| The precise value is the insn generated to do the store. */ |
| static rtx spill_reg_store[FIRST_PSEUDO_REGISTER]; |
| |
| /* This table is the inverse mapping of spill_regs: |
| indexed by hard reg number, |
| it contains the position of that reg in spill_regs, |
| or -1 for something that is not in spill_regs. */ |
| static short spill_reg_order[FIRST_PSEUDO_REGISTER]; |
| |
| /* This reg set indicates registers that may not be used for retrying global |
| allocation. The registers that may not be used include all spill registers |
| and the frame pointer (if we are using one). */ |
| HARD_REG_SET forbidden_regs; |
| |
| /* This reg set indicates registers that are not good for spill registers. |
| They will not be used to complete groups of spill registers. This includes |
| all fixed registers, registers that may be eliminated, and, if |
| SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl. |
| |
| (spill_reg_order prevents these registers from being used to start a |
| group.) */ |
| static HARD_REG_SET bad_spill_regs; |
| |
| /* Describes order of use of registers for reloading |
| of spilled pseudo-registers. `spills' is the number of |
| elements that are actually valid; new ones are added at the end. */ |
| static short spill_regs[FIRST_PSEUDO_REGISTER]; |
| |
| /* This reg set indicates those registers that have been used a spill |
| registers. This information is used in reorg.c, to help figure out |
| what registers are live at any point. It is assumed that all spill_regs |
| are dead at every CODE_LABEL. */ |
| |
| HARD_REG_SET used_spill_regs; |
| |
| /* Index of last register assigned as a spill register. We allocate in |
| a round-robin fashion. */ |
| |
| static int last_spill_reg; |
| |
| /* Describes order of preference for putting regs into spill_regs. |
| Contains the numbers of all the hard regs, in order most preferred first. |
| This order is different for each function. |
| It is set up by order_regs_for_reload. |
| Empty elements at the end contain -1. */ |
| static short potential_reload_regs[FIRST_PSEUDO_REGISTER]; |
| |
| /* 1 for a hard register that appears explicitly in the rtl |
| (for example, function value registers, special registers |
| used by insns, structure value pointer registers). */ |
| static char regs_explicitly_used[FIRST_PSEUDO_REGISTER]; |
| |
| /* Indicates if a register was counted against the need for |
| groups. 0 means it can count against max_nongroup instead. */ |
| static HARD_REG_SET counted_for_groups; |
| |
| /* Indicates if a register was counted against the need for |
| non-groups. 0 means it can become part of a new group. |
| During choose_reload_regs, 1 here means don't use this reg |
| as part of a group, even if it seems to be otherwise ok. */ |
| static HARD_REG_SET counted_for_nongroups; |
| |
| /* Indexed by pseudo reg number N, |
| says may not delete stores into the real (memory) home of pseudo N. |
| This is set if we already substituted a memory equivalent in some uses, |
| which happens when we have to eliminate the fp from it. */ |
| static char *cannot_omit_stores; |
| |
| /* Nonzero if indirect addressing is supported on the machine; this means |
| that spilling (REG n) does not require reloading it into a register in |
| order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The |
| value indicates the level of indirect addressing supported, e.g., two |
| means that (MEM (MEM (REG n))) is also valid if (REG n) does not get |
| a hard register. */ |
| |
| static char spill_indirect_levels; |
| |
| /* Nonzero if indirect addressing is supported when the innermost MEM is |
| of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to |
| which these are valid is the same as spill_indirect_levels, above. */ |
| |
| char indirect_symref_ok; |
| |
| /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */ |
| |
| char double_reg_address_ok; |
| |
| /* Record the stack slot for each spilled hard register. */ |
| |
| static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER]; |
| |
| /* Width allocated so far for that stack slot. */ |
| |
| static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER]; |
| |
| /* Indexed by register class and basic block number, nonzero if there is |
| any need for a spill register of that class in that basic block. |
| The pointer is 0 if we did stupid allocation and don't know |
| the structure of basic blocks. */ |
| |
| char *basic_block_needs[N_REG_CLASSES]; |
| |
| /* First uid used by insns created by reload in this function. |
| Used in find_equiv_reg. */ |
| int reload_first_uid; |
| |
| /* Flag set by local-alloc or global-alloc if anything is live in |
| a call-clobbered reg across calls. */ |
| |
| int caller_save_needed; |
| |
| /* The register class to use for a base register when reloading an |
| address. This is normally BASE_REG_CLASS, but it may be different |
| when using SMALL_REGISTER_CLASSES and passing parameters in |
| registers. */ |
| enum reg_class reload_address_base_reg_class; |
| |
| /* The register class to use for an index register when reloading an |
| address. This is normally INDEX_REG_CLASS, but it may be different |
| when using SMALL_REGISTER_CLASSES and passing parameters in |
| registers. */ |
| enum reg_class reload_address_index_reg_class; |
| |
| /* Set to 1 while reload_as_needed is operating. |
| Required by some machines to handle any generated moves differently. */ |
| |
| int reload_in_progress = 0; |
| |
| /* These arrays record the insn_code of insns that may be needed to |
| perform input and output reloads of special objects. They provide a |
| place to pass a scratch register. */ |
| |
| enum insn_code reload_in_optab[NUM_MACHINE_MODES]; |
| enum insn_code reload_out_optab[NUM_MACHINE_MODES]; |
| |
| /* This obstack is used for allocation of rtl during register elimination. |
| The allocated storage can be freed once find_reloads has processed the |
| insn. */ |
| |
| struct obstack reload_obstack; |
| char *reload_firstobj; |
| |
| #define obstack_chunk_alloc xmalloc |
| #define obstack_chunk_free free |
| |
| /* List of labels that must never be deleted. */ |
| extern rtx forced_labels; |
| |
| /* Allocation number table from global register allocation. */ |
| extern int *reg_allocno; |
| |
| /* This structure is used to record information about register eliminations. |
| Each array entry describes one possible way of eliminating a register |
| in favor of another. If there is more than one way of eliminating a |
| particular register, the most preferred should be specified first. */ |
| |
| static struct elim_table |
| { |
| int from; /* Register number to be eliminated. */ |
| int to; /* Register number used as replacement. */ |
| int initial_offset; /* Initial difference between values. */ |
| int can_eliminate; /* Non-zero if this elimination can be done. */ |
| int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over |
| insns made by reload. */ |
| int offset; /* Current offset between the two regs. */ |
| int max_offset; /* Maximum offset between the two regs. */ |
| int previous_offset; /* Offset at end of previous insn. */ |
| int ref_outside_mem; /* "to" has been referenced outside a MEM. */ |
| rtx from_rtx; /* REG rtx for the register to be eliminated. |
| We cannot simply compare the number since |
| we might then spuriously replace a hard |
| register corresponding to a pseudo |
| assigned to the reg to be eliminated. */ |
| rtx to_rtx; /* REG rtx for the replacement. */ |
| } reg_eliminate[] = |
| |
| /* If a set of eliminable registers was specified, define the table from it. |
| Otherwise, default to the normal case of the frame pointer being |
| replaced by the stack pointer. */ |
| |
| #ifdef ELIMINABLE_REGS |
| ELIMINABLE_REGS; |
| #else |
| {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}}; |
| #endif |
| |
| #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0]) |
| |
| /* Record the number of pending eliminations that have an offset not equal |
| to their initial offset. If non-zero, we use a new copy of each |
| replacement result in any insns encountered. */ |
| static int num_not_at_initial_offset; |
| |
| /* Count the number of registers that we may be able to eliminate. */ |
| static int num_eliminable; |
| |
| /* For each label, we record the offset of each elimination. If we reach |
| a label by more than one path and an offset differs, we cannot do the |
| elimination. This information is indexed by the number of the label. |
| The first table is an array of flags that records whether we have yet |
| encountered a label and the second table is an array of arrays, one |
| entry in the latter array for each elimination. */ |
| |
| static char *offsets_known_at; |
| static int (*offsets_at)[NUM_ELIMINABLE_REGS]; |
| |
| /* Number of labels in the current function. */ |
| |
| static int num_labels; |
| |
| struct hard_reg_n_uses { int regno; int uses; }; |
| |
| static int possible_group_p PROTO((int, int *)); |
| static void count_possible_groups PROTO((int *, enum machine_mode *, |
| int *, int)); |
| static int modes_equiv_for_class_p PROTO((enum machine_mode, |
| enum machine_mode, |
| enum reg_class)); |
| static void spill_failure PROTO((rtx)); |
| static int new_spill_reg PROTO((int, int, int *, int *, int, |
| FILE *)); |
| static void delete_dead_insn PROTO((rtx)); |
| static void alter_reg PROTO((int, int)); |
| static void mark_scratch_live PROTO((rtx)); |
| static void set_label_offsets PROTO((rtx, rtx, int)); |
| static int eliminate_regs_in_insn PROTO((rtx, int)); |
| static void mark_not_eliminable PROTO((rtx, rtx)); |
| static int spill_hard_reg PROTO((int, int, FILE *, int)); |
| static void scan_paradoxical_subregs PROTO((rtx)); |
| static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR)); |
| static void order_regs_for_reload PROTO((int)); |
| static int compare_spill_regs PROTO((const GENERIC_PTR, const GENERIC_PTR)); |
| static void reload_as_needed PROTO((rtx, int)); |
| static void forget_old_reloads_1 PROTO((rtx, rtx)); |
| static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR)); |
| static void mark_reload_reg_in_use PROTO((int, int, enum reload_type, |
| enum machine_mode)); |
| static void clear_reload_reg_in_use PROTO((int, int, enum reload_type, |
| enum machine_mode)); |
| static int reload_reg_free_p PROTO((int, int, enum reload_type)); |
| static int reload_reg_free_before_p PROTO((int, int, enum reload_type)); |
| static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type)); |
| static int reloads_conflict PROTO((int, int)); |
| static int allocate_reload_reg PROTO((int, rtx, int, int)); |
| static void choose_reload_regs PROTO((rtx, rtx)); |
| static void merge_assigned_reloads PROTO((rtx)); |
| static void emit_reload_insns PROTO((rtx)); |
| static void delete_output_reload PROTO((rtx, int, rtx)); |
| static void inc_for_reload PROTO((rtx, rtx, int)); |
| static int constraint_accepts_reg_p PROTO((char *, rtx)); |
| static int count_occurrences PROTO((rtx, rtx)); |
| static void reload_cse_invalidate_regno PROTO((int, enum machine_mode, int)); |
| static int reload_cse_mem_conflict_p PROTO((rtx, rtx)); |
| static void reload_cse_invalidate_mem PROTO((rtx)); |
| static void reload_cse_invalidate_rtx PROTO((rtx, rtx)); |
| static int reload_cse_regno_equal_p PROTO((int, rtx, enum machine_mode)); |
| static int reload_cse_noop_set_p PROTO((rtx, rtx)); |
| static void reload_cse_simplify_set PROTO((rtx, rtx)); |
| static void reload_cse_check_clobber PROTO((rtx, rtx)); |
| static void reload_cse_record_set PROTO((rtx, rtx)); |
| |
| /* Initialize the reload pass once per compilation. */ |
| |
| void |
| init_reload () |
| { |
| register int i; |
| |
| /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack. |
| Set spill_indirect_levels to the number of levels such addressing is |
| permitted, zero if it is not permitted at all. */ |
| |
| register rtx tem |
| = gen_rtx (MEM, Pmode, |
| gen_rtx (PLUS, Pmode, |
| gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1), |
| GEN_INT (4))); |
| spill_indirect_levels = 0; |
| |
| while (memory_address_p (QImode, tem)) |
| { |
| spill_indirect_levels++; |
| tem = gen_rtx (MEM, Pmode, tem); |
| } |
| |
| /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */ |
| |
| tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo")); |
| indirect_symref_ok = memory_address_p (QImode, tem); |
| |
| /* See if reg+reg is a valid (and offsettable) address. */ |
| |
| for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
| { |
| tem = gen_rtx (PLUS, Pmode, |
| gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM), |
| gen_rtx (REG, Pmode, i)); |
| /* This way, we make sure that reg+reg is an offsettable address. */ |
| tem = plus_constant (tem, 4); |
| |
| if (memory_address_p (QImode, tem)) |
| { |
| double_reg_address_ok = 1; |
| break; |
| } |
| } |
| |
| /* Initialize obstack for our rtl allocation. */ |
| gcc_obstack_init (&reload_obstack); |
| reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0); |
| |
| /* Decide which register class should be used when reloading |
| addresses. If we are using SMALL_REGISTER_CLASSES, and any |
| parameters are passed in registers, then we do not want to use |
| those registers when reloading an address. Otherwise, if a |
| function argument needs a reload, we may wind up clobbering |
| another argument to the function which was already computed. If |
| we find a subset class which simply avoids those registers, we |
| use it instead. ??? It would be better to only use the |
| restricted class when we actually are loading function arguments, |
| but that is hard to determine. */ |
| reload_address_base_reg_class = BASE_REG_CLASS; |
| reload_address_index_reg_class = INDEX_REG_CLASS; |
| #ifdef SMALL_REGISTER_CLASSES |
| if (SMALL_REGISTER_CLASSES) |
| { |
| int regno; |
| HARD_REG_SET base, index; |
| enum reg_class *p; |
| |
| COPY_HARD_REG_SET (base, reg_class_contents[BASE_REG_CLASS]); |
| COPY_HARD_REG_SET (index, reg_class_contents[INDEX_REG_CLASS]); |
| for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
| { |
| if (FUNCTION_ARG_REGNO_P (regno)) |
| { |
| CLEAR_HARD_REG_BIT (base, regno); |
| CLEAR_HARD_REG_BIT (index, regno); |
| } |
| } |
| |
| GO_IF_HARD_REG_EQUAL (base, reg_class_contents[BASE_REG_CLASS], |
| baseok); |
| for (p = reg_class_subclasses[BASE_REG_CLASS]; |
| *p != LIM_REG_CLASSES; |
| p++) |
| { |
| GO_IF_HARD_REG_EQUAL (base, reg_class_contents[*p], usebase); |
| continue; |
| usebase: |
| reload_address_base_reg_class = *p; |
| break; |
| } |
| baseok:; |
| |
| GO_IF_HARD_REG_EQUAL (index, reg_class_contents[INDEX_REG_CLASS], |
| indexok); |
| for (p = reg_class_subclasses[INDEX_REG_CLASS]; |
| *p != LIM_REG_CLASSES; |
| p++) |
| { |
| GO_IF_HARD_REG_EQUAL (index, reg_class_contents[*p], useindex); |
| continue; |
| useindex: |
| reload_address_index_reg_class = *p; |
| break; |
| } |
| indexok:; |
| } |
| #endif /* SMALL_REGISTER_CLASSES */ |
| } |
| |
| /* Main entry point for the reload pass. |
| |
| FIRST is the first insn of the function being compiled. |
| |
| GLOBAL nonzero means we were called from global_alloc |
| and should attempt to reallocate any pseudoregs that we |
| displace from hard regs we will use for reloads. |
| If GLOBAL is zero, we do not have enough information to do that, |
| so any pseudo reg that is spilled must go to the stack. |
| |
| DUMPFILE is the global-reg debugging dump file stream, or 0. |
| If it is nonzero, messages are written to it to describe |
| which registers are seized as reload regs, which pseudo regs |
| are spilled from them, and where the pseudo regs are reallocated to. |
| |
| Return value is nonzero if reload failed |
| and we must not do any more for this function. */ |
| |
| int |
| reload (first, global, dumpfile) |
| rtx first; |
| int global; |
| FILE *dumpfile; |
| { |
| register int class; |
| register int i, j, k; |
| register rtx insn; |
| register struct elim_table *ep; |
| |
| /* The two pointers used to track the true location of the memory used |
| for label offsets. */ |
| char *real_known_ptr = NULL_PTR; |
| int (*real_at_ptr)[NUM_ELIMINABLE_REGS]; |
| |
| int something_changed; |
| int something_needs_reloads; |
| int something_needs_elimination; |
| int new_basic_block_needs; |
| enum reg_class caller_save_spill_class = NO_REGS; |
| int caller_save_group_size = 1; |
| |
| /* Nonzero means we couldn't get enough spill regs. */ |
| int failure = 0; |
| |
| /* The basic block number currently being processed for INSN. */ |
| int this_block; |
| |
| /* Make sure even insns with volatile mem refs are recognizable. */ |
| init_recog (); |
| |
| /* Enable find_equiv_reg to distinguish insns made by reload. */ |
| reload_first_uid = get_max_uid (); |
| |
| for (i = 0; i < N_REG_CLASSES; i++) |
| basic_block_needs[i] = 0; |
| |
| #ifdef SECONDARY_MEMORY_NEEDED |
| /* Initialize the secondary memory table. */ |
| clear_secondary_mem (); |
| #endif |
| |
| /* Remember which hard regs appear explicitly |
| before we merge into `regs_ever_live' the ones in which |
| pseudo regs have been allocated. */ |
| bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live); |
| |
| /* We don't have a stack slot for any spill reg yet. */ |
| bzero ((char *) spill_stack_slot, sizeof spill_stack_slot); |
| bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width); |
| |
| /* Initialize the save area information for caller-save, in case some |
| are needed. */ |
| init_save_areas (); |
| |
| /* Compute which hard registers are now in use |
| as homes for pseudo registers. |
| This is done here rather than (eg) in global_alloc |
| because this point is reached even if not optimizing. */ |
| for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) |
| mark_home_live (i); |
| |
| /* A function that receives a nonlocal goto must save all call-saved |
| registers. */ |
| if (current_function_has_nonlocal_label) |
| for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
| { |
| if (! call_used_regs[i] && ! fixed_regs[i]) |
| regs_ever_live[i] = 1; |
| } |
| |
| for (i = 0; i < scratch_list_length; i++) |
| if (scratch_list[i]) |
| mark_scratch_live (scratch_list[i]); |
| |
| /* Make sure that the last insn in the chain |
| is not something that needs reloading. */ |
| emit_note (NULL_PTR, NOTE_INSN_DELETED); |
| |
| /* Find all the pseudo registers that didn't get hard regs |
| but do have known equivalent constants or memory slots. |
| These include parameters (known equivalent to parameter slots) |
| and cse'd or loop-moved constant memory addresses. |
| |
| Record constant equivalents in reg_equiv_constant |
| so they will be substituted by find_reloads. |
| Record memory equivalents in reg_mem_equiv so they can |
| be substituted eventually by altering the REG-rtx's. */ |
| |
| reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx)); |
| bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx)); |
| reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx)); |
| bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx)); |
| reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx)); |
| bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx)); |
| reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx)); |
| bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx)); |
| reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx)); |
| bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx)); |
| reg_max_ref_width = (int *) alloca (max_regno * sizeof (int)); |
| bzero ((char *) reg_max_ref_width, max_regno * sizeof (int)); |
| cannot_omit_stores = (char *) alloca (max_regno); |
| bzero (cannot_omit_stores, max_regno); |
| |
| #ifdef SMALL_REGISTER_CLASSES |
| if (SMALL_REGISTER_CLASSES) |
| CLEAR_HARD_REG_SET (forbidden_regs); |
| #endif |
| |
| /* Look for REG_EQUIV notes; record what each pseudo is equivalent to. |
| Also find all paradoxical subregs and find largest such for each pseudo. |
| On machines with small register classes, record hard registers that |
| are used for user variables. These can never be used for spills. |
| Also look for a "constant" NOTE_INSN_SETJMP. This means that all |
| caller-saved registers must be marked live. */ |
| |
| for (insn = first; insn; insn = NEXT_INSN (insn)) |
| { |
| rtx set = single_set (insn); |
| |
| if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn) |
| && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP) |
| for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
| if (! call_used_regs[i]) |
| regs_ever_live[i] = 1; |
| |
| if (set != 0 && GET_CODE (SET_DEST (set)) == REG) |
| { |
| rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX); |
| if (note |
| #ifdef LEGITIMATE_PIC_OPERAND_P |
| && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic |
| || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0))) |
| #endif |
| ) |
| { |
| rtx x = XEXP (note, 0); |
| i = REGNO (SET_DEST (set)); |
| if (i > LAST_VIRTUAL_REGISTER) |
| { |
| if (GET_CODE (x) == MEM) |
| reg_equiv_memory_loc[i] = x; |
| else if (CONSTANT_P (x)) |
| { |
| if (LEGITIMATE_CONSTANT_P (x)) |
| reg_equiv_constant[i] = x; |
| else |
| reg_equiv_memory_loc[i] |
| = force_const_mem (GET_MODE (SET_DEST (set)), x); |
| } |
| else |
| continue; |
| |
| /* If this register is being made equivalent to a MEM |
| and the MEM is not SET_SRC, the equivalencing insn |
| is one with the MEM as a SET_DEST and it occurs later. |
| So don't mark this insn now. */ |
| if (GET_CODE (x) != MEM |
| || rtx_equal_p (SET_SRC (set), x)) |
| reg_equiv_init[i] = insn; |
| } |
| } |
| } |
| |
| /* If this insn is setting a MEM from a register equivalent to it, |
| this is the equivalencing insn. */ |
| else if (set && GET_CODE (SET_DEST (set)) == MEM |
| && GET_CODE (SET_SRC (set)) == REG |
| && reg_equiv_memory_loc[REGNO (SET_SRC (set))] |
| && rtx_equal_p (SET_DEST (set), |
| reg_equiv_memory_loc[REGNO (SET_SRC (set))])) |
| reg_equiv_init[REGNO (SET_SRC (set))] = insn; |
| |
| if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') |
| scan_paradoxical_subregs (PATTERN (insn)); |
| } |
| |
| /* Does this function require a frame pointer? */ |
| |
| frame_pointer_needed = (! flag_omit_frame_pointer |
| #ifdef EXIT_IGNORE_STACK |
| /* ?? If EXIT_IGNORE_STACK is set, we will not save |
| and restore sp for alloca. So we can't eliminate |
| the frame pointer in that case. At some point, |
| we should improve this by emitting the |
| sp-adjusting insns for this case. */ |
| || (current_function_calls_alloca |
| && EXIT_IGNORE_STACK) |
| #endif |
| || FRAME_POINTER_REQUIRED); |
| |
| num_eliminable = 0; |
| |
| /* Initialize the table of registers to eliminate. The way we do this |
| depends on how the eliminable registers were defined. */ |
| #ifdef ELIMINABLE_REGS |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
| { |
| ep->can_eliminate = ep->can_eliminate_previous |
| = (CAN_ELIMINATE (ep->from, ep->to) |
| && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed)); |
| } |
| #else |
| reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous |
| = ! frame_pointer_needed; |
| #endif |
| |
| /* Count the number of eliminable registers and build the FROM and TO |
| REG rtx's. Note that code in gen_rtx will cause, e.g., |
| gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx. |
| We depend on this. */ |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
| { |
| num_eliminable += ep->can_eliminate; |
| ep->from_rtx = gen_rtx (REG, Pmode, ep->from); |
| ep->to_rtx = gen_rtx (REG, Pmode, ep->to); |
| } |
| |
| num_labels = max_label_num () - get_first_label_num (); |
| |
| /* Allocate the tables used to store offset information at labels. */ |
| /* We used to use alloca here, but the size of what it would try to |
| allocate would occasionally cause it to exceed the stack limit and |
| cause a core dump. */ |
| real_known_ptr = xmalloc (num_labels); |
| real_at_ptr |
| = (int (*)[NUM_ELIMINABLE_REGS]) |
| xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int)); |
| |
| offsets_known_at = real_known_ptr - get_first_label_num (); |
| offsets_at |
| = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ()); |
| |
| /* Alter each pseudo-reg rtx to contain its hard reg number. |
| Assign stack slots to the pseudos that lack hard regs or equivalents. |
| Do not touch virtual registers. */ |
| |
| for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++) |
| alter_reg (i, -1); |
| |
| /* If we have some registers we think can be eliminated, scan all insns to |
| see if there is an insn that sets one of these registers to something |
| other than itself plus a constant. If so, the register cannot be |
| eliminated. Doing this scan here eliminates an extra pass through the |
| main reload loop in the most common case where register elimination |
| cannot be done. */ |
| for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn)) |
| if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN |
| || GET_CODE (insn) == CALL_INSN) |
| note_stores (PATTERN (insn), mark_not_eliminable); |
| |
| #ifndef REGISTER_CONSTRAINTS |
| /* If all the pseudo regs have hard regs, |
| except for those that are never referenced, |
| we know that no reloads are needed. */ |
| /* But that is not true if there are register constraints, since |
| in that case some pseudos might be in the wrong kind of hard reg. */ |
| |
| for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) |
| if (reg_renumber[i] == -1 && REG_N_REFS (i) != 0) |
| break; |
| |
| if (i == max_regno && num_eliminable == 0 && ! caller_save_needed) |
| { |
| free (real_known_ptr); |
| free (real_at_ptr); |
| return; |
| } |
| #endif |
| |
| /* Compute the order of preference for hard registers to spill. |
| Store them by decreasing preference in potential_reload_regs. */ |
| |
| order_regs_for_reload (global); |
| |
| /* So far, no hard regs have been spilled. */ |
| n_spills = 0; |
| for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
| spill_reg_order[i] = -1; |
| |
| /* Initialize to -1, which means take the first spill register. */ |
| last_spill_reg = -1; |
| |
| /* On most machines, we can't use any register explicitly used in the |
| rtl as a spill register. But on some, we have to. Those will have |
| taken care to keep the life of hard regs as short as possible. */ |
| |
| #ifdef SMALL_REGISTER_CLASSES |
| if (! SMALL_REGISTER_CLASSES) |
| #endif |
| COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs); |
| |
| /* Spill any hard regs that we know we can't eliminate. */ |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
| if (! ep->can_eliminate) |
| spill_hard_reg (ep->from, global, dumpfile, 1); |
| |
| #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM |
| if (frame_pointer_needed) |
| spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1); |
| #endif |
| |
| if (global) |
| for (i = 0; i < N_REG_CLASSES; i++) |
| { |
| basic_block_needs[i] = (char *) alloca (n_basic_blocks); |
| bzero (basic_block_needs[i], n_basic_blocks); |
| } |
| |
| /* From now on, we need to emit any moves without making new pseudos. */ |
| reload_in_progress = 1; |
| |
| /* This loop scans the entire function each go-round |
| and repeats until one repetition spills no additional hard regs. */ |
| |
| /* This flag is set when a pseudo reg is spilled, |
| to require another pass. Note that getting an additional reload |
| reg does not necessarily imply any pseudo reg was spilled; |
| sometimes we find a reload reg that no pseudo reg was allocated in. */ |
| something_changed = 1; |
| /* This flag is set if there are any insns that require reloading. */ |
| something_needs_reloads = 0; |
| /* This flag is set if there are any insns that require register |
| eliminations. */ |
| something_needs_elimination = 0; |
| while (something_changed) |
| { |
| rtx after_call = 0; |
| |
| /* For each class, number of reload regs needed in that class. |
| This is the maximum over all insns of the needs in that class |
| of the individual insn. */ |
| int max_needs[N_REG_CLASSES]; |
| /* For each class, size of group of consecutive regs |
| that is needed for the reloads of this class. */ |
| int group_size[N_REG_CLASSES]; |
| /* For each class, max number of consecutive groups needed. |
| (Each group contains group_size[CLASS] consecutive registers.) */ |
| int max_groups[N_REG_CLASSES]; |
| /* For each class, max number needed of regs that don't belong |
| to any of the groups. */ |
| int max_nongroups[N_REG_CLASSES]; |
| /* For each class, the machine mode which requires consecutive |
| groups of regs of that class. |
| If two different modes ever require groups of one class, |
| they must be the same size and equally restrictive for that class, |
| otherwise we can't handle the complexity. */ |
| enum machine_mode group_mode[N_REG_CLASSES]; |
| /* Record the insn where each maximum need is first found. */ |
| rtx max_needs_insn[N_REG_CLASSES]; |
| rtx max_groups_insn[N_REG_CLASSES]; |
| rtx max_nongroups_insn[N_REG_CLASSES]; |
| rtx x; |
| HOST_WIDE_INT starting_frame_size; |
| int previous_frame_pointer_needed = frame_pointer_needed; |
| static char *reg_class_names[] = REG_CLASS_NAMES; |
| |
| something_changed = 0; |
| bzero ((char *) max_needs, sizeof max_needs); |
| bzero ((char *) max_groups, sizeof max_groups); |
| bzero ((char *) max_nongroups, sizeof max_nongroups); |
| bzero ((char *) max_needs_insn, sizeof max_needs_insn); |
| bzero ((char *) max_groups_insn, sizeof max_groups_insn); |
| bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn); |
| bzero ((char *) group_size, sizeof group_size); |
| for (i = 0; i < N_REG_CLASSES; i++) |
| group_mode[i] = VOIDmode; |
| |
| /* Keep track of which basic blocks are needing the reloads. */ |
| this_block = 0; |
| |
| /* Remember whether any element of basic_block_needs |
| changes from 0 to 1 in this pass. */ |
| new_basic_block_needs = 0; |
| |
| /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done |
| here because the stack size may be a part of the offset computation |
| for register elimination, and there might have been new stack slots |
| created in the last iteration of this loop. */ |
| assign_stack_local (BLKmode, 0, 0); |
| |
| starting_frame_size = get_frame_size (); |
| |
| /* Reset all offsets on eliminable registers to their initial values. */ |
| #ifdef ELIMINABLE_REGS |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
| { |
| INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset); |
| ep->previous_offset = ep->offset |
| = ep->max_offset = ep->initial_offset; |
| } |
| #else |
| #ifdef INITIAL_FRAME_POINTER_OFFSET |
| INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset); |
| #else |
| if (!FRAME_POINTER_REQUIRED) |
| abort (); |
| reg_eliminate[0].initial_offset = 0; |
| #endif |
| reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset |
| = reg_eliminate[0].offset = reg_eliminate[0].initial_offset; |
| #endif |
| |
| num_not_at_initial_offset = 0; |
| |
| bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels); |
| |
| /* Set a known offset for each forced label to be at the initial offset |
| of each elimination. We do this because we assume that all |
| computed jumps occur from a location where each elimination is |
| at its initial offset. */ |
| |
| for (x = forced_labels; x; x = XEXP (x, 1)) |
| if (XEXP (x, 0)) |
| set_label_offsets (XEXP (x, 0), NULL_RTX, 1); |
| |
| /* For each pseudo register that has an equivalent location defined, |
| try to eliminate any eliminable registers (such as the frame pointer) |
| assuming initial offsets for the replacement register, which |
| is the normal case. |
| |
| If the resulting location is directly addressable, substitute |
| the MEM we just got directly for the old REG. |
| |
| If it is not addressable but is a constant or the sum of a hard reg |
| and constant, it is probably not addressable because the constant is |
| out of range, in that case record the address; we will generate |
| hairy code to compute the address in a register each time it is |
| needed. Similarly if it is a hard register, but one that is not |
| valid as an address register. |
| |
| If the location is not addressable, but does not have one of the |
| above forms, assign a stack slot. We have to do this to avoid the |
| potential of producing lots of reloads if, e.g., a location involves |
| a pseudo that didn't get a hard register and has an equivalent memory |
| location that also involves a pseudo that didn't get a hard register. |
| |
| Perhaps at some point we will improve reload_when_needed handling |
| so this problem goes away. But that's very hairy. */ |
| |
| for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) |
| if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i]) |
| { |
| rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX, 0); |
| |
| if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]), |
| XEXP (x, 0))) |
| reg_equiv_mem[i] = x, reg_equiv_address[i] = 0; |
| else if (CONSTANT_P (XEXP (x, 0)) |
| || (GET_CODE (XEXP (x, 0)) == REG |
| && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER) |
| || (GET_CODE (XEXP (x, 0)) == PLUS |
| && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG |
| && (REGNO (XEXP (XEXP (x, 0), 0)) |
| < FIRST_PSEUDO_REGISTER) |
| && CONSTANT_P (XEXP (XEXP (x, 0), 1)))) |
| reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0; |
| else |
| { |
| /* Make a new stack slot. Then indicate that something |
| changed so we go back and recompute offsets for |
| eliminable registers because the allocation of memory |
| below might change some offset. reg_equiv_{mem,address} |
| will be set up for this pseudo on the next pass around |
| the loop. */ |
| reg_equiv_memory_loc[i] = 0; |
| reg_equiv_init[i] = 0; |
| alter_reg (i, -1); |
| something_changed = 1; |
| } |
| } |
| |
| /* If we allocated another pseudo to the stack, redo elimination |
| bookkeeping. */ |
| if (something_changed) |
| continue; |
| |
| /* If caller-saves needs a group, initialize the group to include |
| the size and mode required for caller-saves. */ |
| |
| if (caller_save_group_size > 1) |
| { |
| group_mode[(int) caller_save_spill_class] = Pmode; |
| group_size[(int) caller_save_spill_class] = caller_save_group_size; |
| } |
| |
| /* Compute the most additional registers needed by any instruction. |
| Collect information separately for each class of regs. */ |
| |
| for (insn = first; insn; insn = NEXT_INSN (insn)) |
| { |
| if (global && this_block + 1 < n_basic_blocks |
| && insn == basic_block_head[this_block+1]) |
| ++this_block; |
| |
| /* If this is a label, a JUMP_INSN, or has REG_NOTES (which |
| might include REG_LABEL), we need to see what effects this |
| has on the known offsets at labels. */ |
| |
| if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN |
| || (GET_RTX_CLASS (GET_CODE (insn)) == 'i' |
| && REG_NOTES (insn) != 0)) |
| set_label_offsets (insn, insn, 0); |
| |
| if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') |
| { |
| /* Nonzero means don't use a reload reg that overlaps |
| the place where a function value can be returned. */ |
| rtx avoid_return_reg = 0; |
| |
| rtx old_body = PATTERN (insn); |
| int old_code = INSN_CODE (insn); |
| rtx old_notes = REG_NOTES (insn); |
| int did_elimination = 0; |
| |
| /* To compute the number of reload registers of each class |
| needed for an insn, we must simulate what choose_reload_regs |
| can do. We do this by splitting an insn into an "input" and |
| an "output" part. RELOAD_OTHER reloads are used in both. |
| The input part uses those reloads, RELOAD_FOR_INPUT reloads, |
| which must be live over the entire input section of reloads, |
| and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and |
| RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the |
| inputs. |
| |
| The registers needed for output are RELOAD_OTHER and |
| RELOAD_FOR_OUTPUT, which are live for the entire output |
| portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS |
| reloads for each operand. |
| |
| The total number of registers needed is the maximum of the |
| inputs and outputs. */ |
| |
| struct needs |
| { |
| /* [0] is normal, [1] is nongroup. */ |
| int regs[2][N_REG_CLASSES]; |
| int groups[N_REG_CLASSES]; |
| }; |
| |
| /* Each `struct needs' corresponds to one RELOAD_... type. */ |
| struct { |
| struct needs other; |
| struct needs input; |
| struct needs output; |
| struct needs insn; |
| struct needs other_addr; |
| struct needs op_addr; |
| struct needs op_addr_reload; |
| struct needs in_addr[MAX_RECOG_OPERANDS]; |
| struct needs in_addr_addr[MAX_RECOG_OPERANDS]; |
| struct needs out_addr[MAX_RECOG_OPERANDS]; |
| struct needs out_addr_addr[MAX_RECOG_OPERANDS]; |
| } insn_needs; |
| |
| /* If needed, eliminate any eliminable registers. */ |
| if (num_eliminable) |
| did_elimination = eliminate_regs_in_insn (insn, 0); |
| |
| #ifdef SMALL_REGISTER_CLASSES |
| /* Set avoid_return_reg if this is an insn |
| that might use the value of a function call. */ |
| if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN) |
| { |
| if (GET_CODE (PATTERN (insn)) == SET) |
| after_call = SET_DEST (PATTERN (insn)); |
| else if (GET_CODE (PATTERN (insn)) == PARALLEL |
| && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET) |
| after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0)); |
| else |
| after_call = 0; |
| } |
| else if (SMALL_REGISTER_CLASSES |
| && after_call != 0 |
| && !(GET_CODE (PATTERN (insn)) == SET |
| && SET_DEST (PATTERN (insn)) == stack_pointer_rtx)) |
| { |
| if (reg_referenced_p (after_call, PATTERN (insn))) |
| avoid_return_reg = after_call; |
| after_call = 0; |
| } |
| #endif /* SMALL_REGISTER_CLASSES */ |
| |
| /* Analyze the instruction. */ |
| find_reloads (insn, 0, spill_indirect_levels, global, |
| spill_reg_order); |
| |
| /* Remember for later shortcuts which insns had any reloads or |
| register eliminations. |
| |
| One might think that it would be worthwhile to mark insns |
| that need register replacements but not reloads, but this is |
| not safe because find_reloads may do some manipulation of |
| the insn (such as swapping commutative operands), which would |
| be lost when we restore the old pattern after register |
| replacement. So the actions of find_reloads must be redone in |
| subsequent passes or in reload_as_needed. |
| |
| However, it is safe to mark insns that need reloads |
| but not register replacement. */ |
| |
| PUT_MODE (insn, (did_elimination ? QImode |
| : n_reloads ? HImode |
| : GET_MODE (insn) == DImode ? DImode |
| : VOIDmode)); |
| |
| /* Discard any register replacements done. */ |
| if (did_elimination) |
| { |
| obstack_free (&reload_obstack, reload_firstobj); |
| PATTERN (insn) = old_body; |
| INSN_CODE (insn) = old_code; |
| REG_NOTES (insn) = old_notes; |
| something_needs_elimination = 1; |
| } |
| |
| /* If this insn has no reloads, we need not do anything except |
| in the case of a CALL_INSN when we have caller-saves and |
| caller-save needs reloads. */ |
| |
| if (n_reloads == 0 |
| && ! (GET_CODE (insn) == CALL_INSN |
| && caller_save_spill_class != NO_REGS)) |
| continue; |
| |
| something_needs_reloads = 1; |
| bzero ((char *) &insn_needs, sizeof insn_needs); |
| |
| /* Count each reload once in every class |
| containing the reload's own class. */ |
| |
| for (i = 0; i < n_reloads; i++) |
| { |
| register enum reg_class *p; |
| enum reg_class class = reload_reg_class[i]; |
| int size; |
| enum machine_mode mode; |
| int nongroup_need; |
| struct needs *this_needs; |
| |
| /* Don't count the dummy reloads, for which one of the |
| regs mentioned in the insn can be used for reloading. |
| Don't count optional reloads. |
| Don't count reloads that got combined with others. */ |
| if (reload_reg_rtx[i] != 0 |
| || reload_optional[i] != 0 |
| || (reload_out[i] == 0 && reload_in[i] == 0 |
| && ! reload_secondary_p[i])) |
| continue; |
| |
| /* Show that a reload register of this class is needed |
| in this basic block. We do not use insn_needs and |
| insn_groups because they are overly conservative for |
| this purpose. */ |
| if (global && ! basic_block_needs[(int) class][this_block]) |
| { |
| basic_block_needs[(int) class][this_block] = 1; |
| new_basic_block_needs = 1; |
| } |
| |
| mode = reload_inmode[i]; |
| if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode)) |
| mode = reload_outmode[i]; |
| size = CLASS_MAX_NREGS (class, mode); |
| |
| /* If this class doesn't want a group, determine if we have |
| a nongroup need or a regular need. We have a nongroup |
| need if this reload conflicts with a group reload whose |
| class intersects with this reload's class. */ |
| |
| nongroup_need = 0; |
| if (size == 1) |
| for (j = 0; j < n_reloads; j++) |
| if ((CLASS_MAX_NREGS (reload_reg_class[j], |
| (GET_MODE_SIZE (reload_outmode[j]) |
| > GET_MODE_SIZE (reload_inmode[j])) |
| ? reload_outmode[j] |
| : reload_inmode[j]) |
| > 1) |
| && (!reload_optional[j]) |
| && (reload_in[j] != 0 || reload_out[j] != 0 |
| || reload_secondary_p[j]) |
| && reloads_conflict (i, j) |
| && reg_classes_intersect_p (class, |
| reload_reg_class[j])) |
| { |
| nongroup_need = 1; |
| break; |
| } |
| |
| /* Decide which time-of-use to count this reload for. */ |
| switch (reload_when_needed[i]) |
| { |
| case RELOAD_OTHER: |
| this_needs = &insn_needs.other; |
| break; |
| case RELOAD_FOR_INPUT: |
| this_needs = &insn_needs.input; |
| break; |
| case RELOAD_FOR_OUTPUT: |
| this_needs = &insn_needs.output; |
| break; |
| case RELOAD_FOR_INSN: |
| this_needs = &insn_needs.insn; |
| break; |
| case RELOAD_FOR_OTHER_ADDRESS: |
| this_needs = &insn_needs.other_addr; |
| break; |
| case RELOAD_FOR_INPUT_ADDRESS: |
| this_needs = &insn_needs.in_addr[reload_opnum[i]]; |
| break; |
| case RELOAD_FOR_INPADDR_ADDRESS: |
| this_needs = &insn_needs.in_addr_addr[reload_opnum[i]]; |
| break; |
| case RELOAD_FOR_OUTPUT_ADDRESS: |
| this_needs = &insn_needs.out_addr[reload_opnum[i]]; |
| break; |
| case RELOAD_FOR_OUTADDR_ADDRESS: |
| this_needs = &insn_needs.out_addr_addr[reload_opnum[i]]; |
| break; |
| case RELOAD_FOR_OPERAND_ADDRESS: |
| this_needs = &insn_needs.op_addr; |
| break; |
| case RELOAD_FOR_OPADDR_ADDR: |
| this_needs = &insn_needs.op_addr_reload; |
| break; |
| } |
| |
| if (size > 1) |
| { |
| enum machine_mode other_mode, allocate_mode; |
| |
| /* Count number of groups needed separately from |
| number of individual regs needed. */ |
| this_needs->groups[(int) class]++; |
| p = reg_class_superclasses[(int) class]; |
| while (*p != LIM_REG_CLASSES) |
| this_needs->groups[(int) *p++]++; |
| |
| /* Record size and mode of a group of this class. */ |
| /* If more than one size group is needed, |
| make all groups the largest needed size. */ |
| if (group_size[(int) class] < size) |
| { |
| other_mode = group_mode[(int) class]; |
| allocate_mode = mode; |
| |
| group_size[(int) class] = size; |
| group_mode[(int) class] = mode; |
| } |
| else |
| { |
| other_mode = mode; |
| allocate_mode = group_mode[(int) class]; |
| } |
| |
| /* Crash if two dissimilar machine modes both need |
| groups of consecutive regs of the same class. */ |
| |
| if (other_mode != VOIDmode && other_mode != allocate_mode |
| && ! modes_equiv_for_class_p (allocate_mode, |
| other_mode, class)) |
| fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class", |
| insn); |
| } |
| else if (size == 1) |
| { |
| this_needs->regs[nongroup_need][(int) class] += 1; |
| p = reg_class_superclasses[(int) class]; |
| while (*p != LIM_REG_CLASSES) |
| this_needs->regs[nongroup_need][(int) *p++] += 1; |
| } |
| else |
| abort (); |
| } |
| |
| /* All reloads have been counted for this insn; |
| now merge the various times of use. |
| This sets insn_needs, etc., to the maximum total number |
| of registers needed at any point in this insn. */ |
| |
| for (i = 0; i < N_REG_CLASSES; i++) |
| { |
| int in_max, out_max; |
| |
| /* Compute normal and nongroup needs. */ |
| for (j = 0; j <= 1; j++) |
| { |
| for (in_max = 0, out_max = 0, k = 0; |
| k < reload_n_operands; k++) |
| { |
| in_max |
| = MAX (in_max, |
| (insn_needs.in_addr[k].regs[j][i] |
| + insn_needs.in_addr_addr[k].regs[j][i])); |
| out_max |
| = MAX (out_max, insn_needs.out_addr[k].regs[j][i]); |
| out_max |
| = MAX (out_max, |
| insn_needs.out_addr_addr[k].regs[j][i]); |
| } |
| |
| /* RELOAD_FOR_INSN reloads conflict with inputs, outputs, |
| and operand addresses but not things used to reload |
| them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads |
| don't conflict with things needed to reload inputs or |
| outputs. */ |
| |
| in_max = MAX (MAX (insn_needs.op_addr.regs[j][i], |
| insn_needs.op_addr_reload.regs[j][i]), |
| in_max); |
| |
| out_max = MAX (out_max, insn_needs.insn.regs[j][i]); |
| |
| insn_needs.input.regs[j][i] |
| = MAX (insn_needs.input.regs[j][i] |
| + insn_needs.op_addr.regs[j][i] |
| + insn_needs.insn.regs[j][i], |
| in_max + insn_needs.input.regs[j][i]); |
| |
| insn_needs.output.regs[j][i] += out_max; |
| insn_needs.other.regs[j][i] |
| += MAX (MAX (insn_needs.input.regs[j][i], |
| insn_needs.output.regs[j][i]), |
| insn_needs.other_addr.regs[j][i]); |
| |
| } |
| |
| /* Now compute group needs. */ |
| for (in_max = 0, out_max = 0, j = 0; |
| j < reload_n_operands; j++) |
| { |
| in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]); |
| in_max = MAX (in_max, |
| insn_needs.in_addr_addr[j].groups[i]); |
| out_max |
| = MAX (out_max, insn_needs.out_addr[j].groups[i]); |
| out_max |
| = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]); |
| } |
| |
| in_max = MAX (MAX (insn_needs.op_addr.groups[i], |
| insn_needs.op_addr_reload.groups[i]), |
| in_max); |
| out_max = MAX (out_max, insn_needs.insn.groups[i]); |
| |
| insn_needs.input.groups[i] |
| = MAX (insn_needs.input.groups[i] |
| + insn_needs.op_addr.groups[i] |
| + insn_needs.insn.groups[i], |
| in_max + insn_needs.input.groups[i]); |
| |
| insn_needs.output.groups[i] += out_max; |
| insn_needs.other.groups[i] |
| += MAX (MAX (insn_needs.input.groups[i], |
| insn_needs.output.groups[i]), |
| insn_needs.other_addr.groups[i]); |
| } |
| |
| /* If this is a CALL_INSN and caller-saves will need |
| a spill register, act as if the spill register is |
| needed for this insn. However, the spill register |
| can be used by any reload of this insn, so we only |
| need do something if no need for that class has |
| been recorded. |
| |
| The assumption that every CALL_INSN will trigger a |
| caller-save is highly conservative, however, the number |
| of cases where caller-saves will need a spill register but |
| a block containing a CALL_INSN won't need a spill register |
| of that class should be quite rare. |
| |
| If a group is needed, the size and mode of the group will |
| have been set up at the beginning of this loop. */ |
| |
| if (GET_CODE (insn) == CALL_INSN |
| && caller_save_spill_class != NO_REGS) |
| { |
| /* See if this register would conflict with any reload |
| that needs a group. */ |
| int nongroup_need = 0; |
| int *caller_save_needs; |
| |
| for (j = 0; j < n_reloads; j++) |
| if ((CLASS_MAX_NREGS (reload_reg_class[j], |
| (GET_MODE_SIZE (reload_outmode[j]) |
| > GET_MODE_SIZE (reload_inmode[j])) |
| ? reload_outmode[j] |
| : reload_inmode[j]) |
| > 1) |
| && reg_classes_intersect_p (caller_save_spill_class, |
| reload_reg_class[j])) |
| { |
| nongroup_need = 1; |
| break; |
| } |
| |
| caller_save_needs |
| = (caller_save_group_size > 1 |
| ? insn_needs.other.groups |
| : insn_needs.other.regs[nongroup_need]); |
| |
| if (caller_save_needs[(int) caller_save_spill_class] == 0) |
| { |
| register enum reg_class *p |
| = reg_class_superclasses[(int) caller_save_spill_class]; |
| |
| caller_save_needs[(int) caller_save_spill_class]++; |
| |
| while (*p != LIM_REG_CLASSES) |
| caller_save_needs[(int) *p++] += 1; |
| } |
| |
| /* Show that this basic block will need a register of |
| this class. */ |
| |
| if (global |
| && ! (basic_block_needs[(int) caller_save_spill_class] |
| [this_block])) |
| { |
| basic_block_needs[(int) caller_save_spill_class] |
| [this_block] = 1; |
| new_basic_block_needs = 1; |
| } |
| } |
| |
| #ifdef SMALL_REGISTER_CLASSES |
| /* If this insn stores the value of a function call, |
| and that value is in a register that has been spilled, |
| and if the insn needs a reload in a class |
| that might use that register as the reload register, |
| then add add an extra need in that class. |
| This makes sure we have a register available that does |
| not overlap the return value. */ |
| |
| if (SMALL_REGISTER_CLASSES && avoid_return_reg) |
| { |
| int regno = REGNO (avoid_return_reg); |
| int nregs |
| = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg)); |
| int r; |
| int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES]; |
| |
| /* First compute the "basic needs", which counts a |
| need only in the smallest class in which it |
| is required. */ |
| |
| bcopy ((char *) insn_needs.other.regs[0], |
| (char *) basic_needs, sizeof basic_needs); |
| bcopy ((char *) insn_needs.other.groups, |
| (char *) basic_groups, sizeof basic_groups); |
| |
| for (i = 0; i < N_REG_CLASSES; i++) |
| { |
| enum reg_class *p; |
| |
| if (basic_needs[i] >= 0) |
| for (p = reg_class_superclasses[i]; |
| *p != LIM_REG_CLASSES; p++) |
| basic_needs[(int) *p] -= basic_needs[i]; |
| |
| if (basic_groups[i] >= 0) |
| for (p = reg_class_superclasses[i]; |
| *p != LIM_REG_CLASSES; p++) |
| basic_groups[(int) *p] -= basic_groups[i]; |
| } |
| |
| /* Now count extra regs if there might be a conflict with |
| the return value register. */ |
| |
| for (r = regno; r < regno + nregs; r++) |
| if (spill_reg_order[r] >= 0) |
| for (i = 0; i < N_REG_CLASSES; i++) |
| if (TEST_HARD_REG_BIT (reg_class_contents[i], r)) |
| { |
| if (basic_needs[i] > 0) |
| { |
| enum reg_class *p; |
| |
| insn_needs.other.regs[0][i]++; |
| p = reg_class_superclasses[i]; |
| while (*p != LIM_REG_CLASSES) |
| insn_needs.other.regs[0][(int) *p++]++; |
| } |
| if (basic_groups[i] > 0) |
| { |
| enum reg_class *p; |
| |
| insn_needs.other.groups[i]++; |
| p = reg_class_superclasses[i]; |
| while (*p != LIM_REG_CLASSES) |
| insn_needs.other.groups[(int) *p++]++; |
| } |
| } |
| } |
| #endif /* SMALL_REGISTER_CLASSES */ |
| |
| /* For each class, collect maximum need of any insn. */ |
| |
| for (i = 0; i < N_REG_CLASSES; i++) |
| { |
| if (max_needs[i] < insn_needs.other.regs[0][i]) |
| { |
| max_needs[i] = insn_needs.other.regs[0][i]; |
| max_needs_insn[i] = insn; |
| } |
| if (max_groups[i] < insn_needs.other.groups[i]) |
| { |
| max_groups[i] = insn_needs.other.groups[i]; |
| max_groups_insn[i] = insn; |
| } |
| if (max_nongroups[i] < insn_needs.other.regs[1][i]) |
| { |
| max_nongroups[i] = insn_needs.other.regs[1][i]; |
| max_nongroups_insn[i] = insn; |
| } |
| } |
| } |
| /* Note that there is a continue statement above. */ |
| } |
| |
| /* If we allocated any new memory locations, make another pass |
| since it might have changed elimination offsets. */ |
| if (starting_frame_size != get_frame_size ()) |
| something_changed = 1; |
| |
| if (dumpfile) |
| for (i = 0; i < N_REG_CLASSES; i++) |
| { |
| if (max_needs[i] > 0) |
| fprintf (dumpfile, |
| ";; Need %d reg%s of class %s (for insn %d).\n", |
| max_needs[i], max_needs[i] == 1 ? "" : "s", |
| reg_class_names[i], INSN_UID (max_needs_insn[i])); |
| if (max_nongroups[i] > 0) |
| fprintf (dumpfile, |
| ";; Need %d nongroup reg%s of class %s (for insn %d).\n", |
| max_nongroups[i], max_nongroups[i] == 1 ? "" : "s", |
| reg_class_names[i], INSN_UID (max_nongroups_insn[i])); |
| if (max_groups[i] > 0) |
| fprintf (dumpfile, |
| ";; Need %d group%s (%smode) of class %s (for insn %d).\n", |
| max_groups[i], max_groups[i] == 1 ? "" : "s", |
| mode_name[(int) group_mode[i]], |
| reg_class_names[i], INSN_UID (max_groups_insn[i])); |
| } |
| |
| /* If we have caller-saves, set up the save areas and see if caller-save |
| will need a spill register. */ |
| |
| if (caller_save_needed) |
| { |
| /* Set the offsets for setup_save_areas. */ |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; |
| ep++) |
| ep->previous_offset = ep->max_offset; |
| |
| if ( ! setup_save_areas (&something_changed) |
| && caller_save_spill_class == NO_REGS) |
| { |
| /* The class we will need depends on whether the machine |
| supports the sum of two registers for an address; see |
| find_address_reloads for details. */ |
| |
| caller_save_spill_class |
| = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS; |
| caller_save_group_size |
| = CLASS_MAX_NREGS (caller_save_spill_class, Pmode); |
| something_changed = 1; |
| } |
| } |
| |
| /* See if anything that happened changes which eliminations are valid. |
| For example, on the Sparc, whether or not the frame pointer can |
| be eliminated can depend on what registers have been used. We need |
| not check some conditions again (such as flag_omit_frame_pointer) |
| since they can't have changed. */ |
| |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
| if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED) |
| #ifdef ELIMINABLE_REGS |
| || ! CAN_ELIMINATE (ep->from, ep->to) |
| #endif |
| ) |
| ep->can_eliminate = 0; |
| |
| /* Look for the case where we have discovered that we can't replace |
| register A with register B and that means that we will now be |
| trying to replace register A with register C. This means we can |
| no longer replace register C with register B and we need to disable |
| such an elimination, if it exists. This occurs often with A == ap, |
| B == sp, and C == fp. */ |
| |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
| { |
| struct elim_table *op; |
| register int new_to = -1; |
| |
| if (! ep->can_eliminate && ep->can_eliminate_previous) |
| { |
| /* Find the current elimination for ep->from, if there is a |
| new one. */ |
| for (op = reg_eliminate; |
| op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++) |
| if (op->from == ep->from && op->can_eliminate) |
| { |
| new_to = op->to; |
| break; |
| } |
| |
| /* See if there is an elimination of NEW_TO -> EP->TO. If so, |
| disable it. */ |
| for (op = reg_eliminate; |
| op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++) |
| if (op->from == new_to && op->to == ep->to) |
| op->can_eliminate = 0; |
| } |
| } |
| |
| /* See if any registers that we thought we could eliminate the previous |
| time are no longer eliminable. If so, something has changed and we |
| must spill the register. Also, recompute the number of eliminable |
| registers and see if the frame pointer is needed; it is if there is |
| no elimination of the frame pointer that we can perform. */ |
| |
| frame_pointer_needed = 1; |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
| { |
| if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM |
| && ep->to != HARD_FRAME_POINTER_REGNUM) |
| frame_pointer_needed = 0; |
| |
| if (! ep->can_eliminate && ep->can_eliminate_previous) |
| { |
| ep->can_eliminate_previous = 0; |
| spill_hard_reg (ep->from, global, dumpfile, 1); |
| something_changed = 1; |
| num_eliminable--; |
| } |
| } |
| |
| #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM |
| /* If we didn't need a frame pointer last time, but we do now, spill |
| the hard frame pointer. */ |
| if (frame_pointer_needed && ! previous_frame_pointer_needed) |
| { |
| spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1); |
| something_changed = 1; |
| } |
| #endif |
| |
| /* If all needs are met, we win. */ |
| |
| for (i = 0; i < N_REG_CLASSES; i++) |
| if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0) |
| break; |
| if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed) |
| break; |
| |
| /* Not all needs are met; must spill some hard regs. */ |
| |
| /* Put all registers spilled so far back in potential_reload_regs, but |
| put them at the front, since we've already spilled most of the |
| pseudos in them (we might have left some pseudos unspilled if they |
| were in a block that didn't need any spill registers of a conflicting |
| class. We used to try to mark off the need for those registers, |
| but doing so properly is very complex and reallocating them is the |
| simpler approach. First, "pack" potential_reload_regs by pushing |
| any nonnegative entries towards the end. That will leave room |
| for the registers we already spilled. |
| |
| Also, undo the marking of the spill registers from the last time |
| around in FORBIDDEN_REGS since we will be probably be allocating |
| them again below. |
| |
| ??? It is theoretically possible that we might end up not using one |
| of our previously-spilled registers in this allocation, even though |
| they are at the head of the list. It's not clear what to do about |
| this, but it was no better before, when we marked off the needs met |
| by the previously-spilled registers. With the current code, globals |
| can be allocated into these registers, but locals cannot. */ |
| |
| if (n_spills) |
| { |
| for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--) |
| if (potential_reload_regs[i] != -1) |
| potential_reload_regs[j--] = potential_reload_regs[i]; |
| |
| for (i = 0; i < n_spills; i++) |
| { |
| potential_reload_regs[i] = spill_regs[i]; |
| spill_reg_order[spill_regs[i]] = -1; |
| CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]); |
| } |
| |
| n_spills = 0; |
| } |
| |
| /* Now find more reload regs to satisfy the remaining need |
| Do it by ascending class number, since otherwise a reg |
| might be spilled for a big class and might fail to count |
| for a smaller class even though it belongs to that class. |
| |
| Count spilled regs in `spills', and add entries to |
| `spill_regs' and `spill_reg_order'. |
| |
| ??? Note there is a problem here. |
| When there is a need for a group in a high-numbered class, |
| and also need for non-group regs that come from a lower class, |
| the non-group regs are chosen first. If there aren't many regs, |
| they might leave no room for a group. |
| |
| This was happening on the 386. To fix it, we added the code |
| that calls possible_group_p, so that the lower class won't |
| break up the last possible group. |
| |
| Really fixing the problem would require changes above |
| in counting the regs already spilled, and in choose_reload_regs. |
| It might be hard to avoid introducing bugs there. */ |
| |
| CLEAR_HARD_REG_SET (counted_for_groups); |
| CLEAR_HARD_REG_SET (counted_for_nongroups); |
| |
| for (class = 0; class < N_REG_CLASSES; class++) |
| { |
| /* First get the groups of registers. |
| If we got single registers first, we might fragment |
| possible groups. */ |
| while (max_groups[class] > 0) |
| { |
| /* If any single spilled regs happen to form groups, |
| count them now. Maybe we don't really need |
| to spill another group. */ |
| count_possible_groups (group_size, group_mode, max_groups, |
| class); |
| |
| if (max_groups[class] <= 0) |
| break; |
| |
| /* Groups of size 2 (the only groups used on most machines) |
| are treated specially. */ |
| if (group_size[class] == 2) |
| { |
| /* First, look for a register that will complete a group. */ |
| for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
| { |
| int other; |
| |
| j = potential_reload_regs[i]; |
| if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j) |
| && |
| ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0) |
| && TEST_HARD_REG_BIT (reg_class_contents[class], j) |
| && TEST_HARD_REG_BIT (reg_class_contents[class], other) |
| && HARD_REGNO_MODE_OK (other, group_mode[class]) |
| && ! TEST_HARD_REG_BIT (counted_for_nongroups, |
| other) |
| /* We don't want one part of another group. |
| We could get "two groups" that overlap! */ |
| && ! TEST_HARD_REG_BIT (counted_for_groups, other)) |
| || |
| (j < FIRST_PSEUDO_REGISTER - 1 |
| && (other = j + 1, spill_reg_order[other] >= 0) |
| && TEST_HARD_REG_BIT (reg_class_contents[class], j) |
| && TEST_HARD_REG_BIT (reg_class_contents[class], other) |
| && HARD_REGNO_MODE_OK (j, group_mode[class]) |
| && ! TEST_HARD_REG_BIT (counted_for_nongroups, |
| other) |
| && ! TEST_HARD_REG_BIT (counted_for_groups, |
| other)))) |
| { |
| register enum reg_class *p; |
| |
| /* We have found one that will complete a group, |
| so count off one group as provided. */ |
| max_groups[class]--; |
| p = reg_class_superclasses[class]; |
| while (*p != LIM_REG_CLASSES) |
| { |
| if (group_size [(int) *p] <= group_size [class]) |
| max_groups[(int) *p]--; |
| p++; |
| } |
| |
| /* Indicate both these regs are part of a group. */ |
| SET_HARD_REG_BIT (counted_for_groups, j); |
| SET_HARD_REG_BIT (counted_for_groups, other); |
| break; |
| } |
| } |
| /* We can't complete a group, so start one. */ |
| #ifdef SMALL_REGISTER_CLASSES |
| /* Look for a pair neither of which is explicitly used. */ |
| if (SMALL_REGISTER_CLASSES && i == FIRST_PSEUDO_REGISTER) |
| for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
| { |
| int k; |
| j = potential_reload_regs[i]; |
| /* Verify that J+1 is a potential reload reg. */ |
| for (k = 0; k < FIRST_PSEUDO_REGISTER; k++) |
| if (potential_reload_regs[k] == j + 1) |
| break; |
| if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER |
| && k < FIRST_PSEUDO_REGISTER |
| && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0 |
| && TEST_HARD_REG_BIT (reg_class_contents[class], j) |
| && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1) |
| && HARD_REGNO_MODE_OK (j, group_mode[class]) |
| && ! TEST_HARD_REG_BIT (counted_for_nongroups, |
| j + 1) |
| && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1) |
| /* Reject J at this stage |
| if J+1 was explicitly used. */ |
| && ! regs_explicitly_used[j + 1]) |
| break; |
| } |
| #endif |
| /* Now try any group at all |
| whose registers are not in bad_spill_regs. */ |
| if (i == FIRST_PSEUDO_REGISTER) |
| for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
| { |
| int k; |
| j = potential_reload_regs[i]; |
| /* Verify that J+1 is a potential reload reg. */ |
| for (k = 0; k < FIRST_PSEUDO_REGISTER; k++) |
| if (potential_reload_regs[k] == j + 1) |
| break; |
| if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER |
| && k < FIRST_PSEUDO_REGISTER |
| && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0 |
| && TEST_HARD_REG_BIT (reg_class_contents[class], j) |
| && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1) |
| && HARD_REGNO_MODE_OK (j, group_mode[class]) |
| && ! TEST_HARD_REG_BIT (counted_for_nongroups, |
| j + 1) |
| && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)) |
| break; |
| } |
| |
| /* I should be the index in potential_reload_regs |
| of the new reload reg we have found. */ |
| |
| if (i >= FIRST_PSEUDO_REGISTER) |
| { |
| /* There are no groups left to spill. */ |
| spill_failure (max_groups_insn[class]); |
| failure = 1; |
| goto failed; |
| } |
| else |
| something_changed |
| |= new_spill_reg (i, class, max_needs, NULL_PTR, |
| global, dumpfile); |
| } |
| else |
| { |
| /* For groups of more than 2 registers, |
| look for a sufficient sequence of unspilled registers, |
| and spill them all at once. */ |
| for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
| { |
| int k; |
| |
| j = potential_reload_regs[i]; |
| if (j >= 0 |
| && j + group_size[class] <= FIRST_PSEUDO_REGISTER |
| && HARD_REGNO_MODE_OK (j, group_mode[class])) |
| { |
| /* Check each reg in the sequence. */ |
| for (k = 0; k < group_size[class]; k++) |
| if (! (spill_reg_order[j + k] < 0 |
| && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k) |
| && TEST_HARD_REG_BIT (reg_class_contents[class], j + k))) |
| break; |
| /* We got a full sequence, so spill them all. */ |
| if (k == group_size[class]) |
| { |
| register enum reg_class *p; |
| for (k = 0; k < group_size[class]; k++) |
| { |
| int idx; |
| SET_HARD_REG_BIT (counted_for_groups, j + k); |
| for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++) |
| if (potential_reload_regs[idx] == j + k) |
| break; |
| something_changed |
| |= new_spill_reg (idx, class, |
| max_needs, NULL_PTR, |
| global, dumpfile); |
| } |
| |
| /* We have found one that will complete a group, |
| so count off one group as provided. */ |
| max_groups[class]--; |
| p = reg_class_superclasses[class]; |
| while (*p != LIM_REG_CLASSES) |
| { |
| if (group_size [(int) *p] |
| <= group_size [class]) |
| max_groups[(int) *p]--; |
| p++; |
| } |
| break; |
| } |
| } |
| } |
| /* We couldn't find any registers for this reload. |
| Avoid going into an infinite loop. */ |
| if (i >= FIRST_PSEUDO_REGISTER) |
| { |
| /* There are no groups left. */ |
| spill_failure (max_groups_insn[class]); |
| failure = 1; |
| goto failed; |
| } |
| } |
| } |
| |
| /* Now similarly satisfy all need for single registers. */ |
| |
| while (max_needs[class] > 0 || max_nongroups[class] > 0) |
| { |
| /* If we spilled enough regs, but they weren't counted |
| against the non-group need, see if we can count them now. |
| If so, we can avoid some actual spilling. */ |
| if (max_needs[class] <= 0 && max_nongroups[class] > 0) |
| for (i = 0; i < n_spills; i++) |
| if (TEST_HARD_REG_BIT (reg_class_contents[class], |
| spill_regs[i]) |
| && !TEST_HARD_REG_BIT (counted_for_groups, |
| spill_regs[i]) |
| && !TEST_HARD_REG_BIT (counted_for_nongroups, |
| spill_regs[i]) |
| && max_nongroups[class] > 0) |
| { |
| register enum reg_class *p; |
| |
| SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]); |
| max_nongroups[class]--; |
| p = reg_class_superclasses[class]; |
| while (*p != LIM_REG_CLASSES) |
| max_nongroups[(int) *p++]--; |
| } |
| if (max_needs[class] <= 0 && max_nongroups[class] <= 0) |
| break; |
| |
| /* Consider the potential reload regs that aren't |
| yet in use as reload regs, in order of preference. |
| Find the most preferred one that's in this class. */ |
| |
| for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
| if (potential_reload_regs[i] >= 0 |
| && TEST_HARD_REG_BIT (reg_class_contents[class], |
| potential_reload_regs[i]) |
| /* If this reg will not be available for groups, |
| pick one that does not foreclose possible groups. |
| This is a kludge, and not very general, |
| but it should be sufficient to make the 386 work, |
| and the problem should not occur on machines with |
| more registers. */ |
| && (max_nongroups[class] == 0 |
| || possible_group_p (potential_reload_regs[i], max_groups))) |
| break; |
| |
| /* If we couldn't get a register, try to get one even if we |
| might foreclose possible groups. This may cause problems |
| later, but that's better than aborting now, since it is |
| possible that we will, in fact, be able to form the needed |
| group even with this allocation. */ |
| |
| if (i >= FIRST_PSEUDO_REGISTER |
| && (asm_noperands (max_needs[class] > 0 |
| ? max_needs_insn[class] |
| : max_nongroups_insn[class]) |
| < 0)) |
| for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
| if (potential_reload_regs[i] >= 0 |
| && TEST_HARD_REG_BIT (reg_class_contents[class], |
| potential_reload_regs[i])) |
| break; |
| |
| /* I should be the index in potential_reload_regs |
| of the new reload reg we have found. */ |
| |
| if (i >= FIRST_PSEUDO_REGISTER) |
| { |
| /* There are no possible registers left to spill. */ |
| spill_failure (max_needs[class] > 0 ? max_needs_insn[class] |
| : max_nongroups_insn[class]); |
| failure = 1; |
| goto failed; |
| } |
| else |
| something_changed |
| |= new_spill_reg (i, class, max_needs, max_nongroups, |
| global, dumpfile); |
| } |
| } |
| } |
| |
| /* If global-alloc was run, notify it of any register eliminations we have |
| done. */ |
| if (global) |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
| if (ep->can_eliminate) |
| mark_elimination (ep->from, ep->to); |
| |
| /* Insert code to save and restore call-clobbered hard regs |
| around calls. Tell if what mode to use so that we will process |
| those insns in reload_as_needed if we have to. */ |
| |
| if (caller_save_needed) |
| save_call_clobbered_regs (num_eliminable ? QImode |
| : caller_save_spill_class != NO_REGS ? HImode |
| : VOIDmode); |
| |
| /* If a pseudo has no hard reg, delete the insns that made the equivalence. |
| If that insn didn't set the register (i.e., it copied the register to |
| memory), just delete that insn instead of the equivalencing insn plus |
| anything now dead. If we call delete_dead_insn on that insn, we may |
| delete the insn that actually sets the register if the register die |
| there and that is incorrect. */ |
| |
| for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) |
| if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0 |
| && GET_CODE (reg_equiv_init[i]) != NOTE) |
| { |
| if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i]))) |
| delete_dead_insn (reg_equiv_init[i]); |
| else |
| { |
| PUT_CODE (reg_equiv_init[i], NOTE); |
| NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0; |
| NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED; |
| } |
| } |
| |
| /* Use the reload registers where necessary |
| by generating move instructions to move the must-be-register |
| values into or out of the reload registers. */ |
| |
| if (something_needs_reloads || something_needs_elimination |
| || (caller_save_needed && num_eliminable) |
| || caller_save_spill_class != NO_REGS) |
| reload_as_needed (first, global); |
| |
| /* If we were able to eliminate the frame pointer, show that it is no |
| longer live at the start of any basic block. If it ls live by |
| virtue of being in a pseudo, that pseudo will be marked live |
| and hence the frame pointer will be known to be live via that |
| pseudo. */ |
| |
| if (! frame_pointer_needed) |
| for (i = 0; i < n_basic_blocks; i++) |
| CLEAR_REGNO_REG_SET (basic_block_live_at_start[i], |
| HARD_FRAME_POINTER_REGNUM); |
| |
| /* Come here (with failure set nonzero) if we can't get enough spill regs |
| and we decide not to abort about it. */ |
| failed: |
| |
| reload_in_progress = 0; |
| |
| /* Now eliminate all pseudo regs by modifying them into |
| their equivalent memory references. |
| The REG-rtx's for the pseudos are modified in place, |
| so all insns that used to refer to them now refer to memory. |
| |
| For a reg that has a reg_equiv_address, all those insns |
| were changed by reloading so that no insns refer to it any longer; |
| but the DECL_RTL of a variable decl may refer to it, |
| and if so this causes the debugging info to mention the variable. */ |
| |
| for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) |
| { |
| rtx addr = 0; |
| int in_struct = 0; |
| if (reg_equiv_mem[i]) |
| { |
| addr = XEXP (reg_equiv_mem[i], 0); |
| in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]); |
| } |
| if (reg_equiv_address[i]) |
| addr = reg_equiv_address[i]; |
| if (addr) |
| { |
| if (reg_renumber[i] < 0) |
| { |
| rtx reg = regno_reg_rtx[i]; |
| XEXP (reg, 0) = addr; |
| REG_USERVAR_P (reg) = 0; |
| MEM_IN_STRUCT_P (reg) = in_struct; |
| PUT_CODE (reg, MEM); |
| } |
| else if (reg_equiv_mem[i]) |
| XEXP (reg_equiv_mem[i], 0) = addr; |
| } |
| } |
| |
| #ifdef PRESERVE_DEATH_INFO_REGNO_P |
| /* Make a pass over all the insns and remove death notes for things that |
| are no longer registers or no longer die in the insn (e.g., an input |
| and output pseudo being tied). */ |
| |
| for (insn = first; insn; insn = NEXT_INSN (insn)) |
| if (GET_RTX_CLASS (GET_CODE (insn)) == 'i') |
| { |
| rtx note, next; |
| |
| for (note = REG_NOTES (insn); note; note = next) |
| { |
| next = XEXP (note, 1); |
| if (REG_NOTE_KIND (note) == REG_DEAD |
| && (GET_CODE (XEXP (note, 0)) != REG |
| || reg_set_p (XEXP (note, 0), PATTERN (insn)))) |
| remove_note (insn, note); |
| } |
| } |
| #endif |
| |
| /* If we are doing stack checking, give a warning if this function's |
| frame size is larger than we expect. */ |
| if (flag_stack_check && ! STACK_CHECK_BUILTIN) |
| { |
| HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE; |
| |
| for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
| if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i]) |
| size += UNITS_PER_WORD; |
| |
| if (size > STACK_CHECK_MAX_FRAME_SIZE) |
| warning ("frame size too large for reliable stack checking"); |
| } |
| |
| /* Indicate that we no longer have known memory locations or constants. */ |
| reg_equiv_constant = 0; |
| reg_equiv_memory_loc = 0; |
| |
| if (real_known_ptr) |
| free (real_known_ptr); |
| if (real_at_ptr) |
| free (real_at_ptr); |
| |
| if (scratch_list) |
| free (scratch_list); |
| scratch_list = 0; |
| if (scratch_block) |
| free (scratch_block); |
| scratch_block = 0; |
| |
| CLEAR_HARD_REG_SET (used_spill_regs); |
| for (i = 0; i < n_spills; i++) |
| SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]); |
| |
| return failure; |
| } |
| |
| /* Nonzero if, after spilling reg REGNO for non-groups, |
| it will still be possible to find a group if we still need one. */ |
| |
| static int |
| possible_group_p (regno, max_groups) |
| int regno; |
| int *max_groups; |
| { |
| int i; |
| int class = (int) NO_REGS; |
| |
| for (i = 0; i < (int) N_REG_CLASSES; i++) |
| if (max_groups[i] > 0) |
| { |
| class = i; |
| break; |
| } |
| |
| if (class == (int) NO_REGS) |
| return 1; |
| |
| /* Consider each pair of consecutive registers. */ |
| for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++) |
| { |
| /* Ignore pairs that include reg REGNO. */ |
| if (i == regno || i + 1 == regno) |
| continue; |
| |
| /* Ignore pairs that are outside the class that needs the group. |
| ??? Here we fail to handle the case where two different classes |
| independently need groups. But this never happens with our |
| current machine descriptions. */ |
| if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i) |
| && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1))) |
| continue; |
| |
| /* A pair of consecutive regs we can still spill does the trick. */ |
| if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0 |
| && ! TEST_HARD_REG_BIT (bad_spill_regs, i) |
| && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)) |
| return 1; |
| |
| /* A pair of one already spilled and one we can spill does it |
| provided the one already spilled is not otherwise reserved. */ |
| if (spill_reg_order[i] < 0 |
| && ! TEST_HARD_REG_BIT (bad_spill_regs, i) |
| && spill_reg_order[i + 1] >= 0 |
| && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1) |
| && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1)) |
| return 1; |
| if (spill_reg_order[i + 1] < 0 |
| && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1) |
| && spill_reg_order[i] >= 0 |
| && ! TEST_HARD_REG_BIT (counted_for_groups, i) |
| && ! TEST_HARD_REG_BIT (counted_for_nongroups, i)) |
| return 1; |
| } |
| |
| return 0; |
| } |
| |
| /* Count any groups of CLASS that can be formed from the registers recently |
| spilled. */ |
| |
| static void |
| count_possible_groups (group_size, group_mode, max_groups, class) |
| int *group_size; |
| enum machine_mode *group_mode; |
| int *max_groups; |
| int class; |
| { |
| HARD_REG_SET new; |
| int i, j; |
| |
| /* Now find all consecutive groups of spilled registers |
| and mark each group off against the need for such groups. |
| But don't count them against ordinary need, yet. */ |
| |
| if (group_size[class] == 0) |
| return; |
| |
| CLEAR_HARD_REG_SET (new); |
| |
| /* Make a mask of all the regs that are spill regs in class I. */ |
| for (i = 0; i < n_spills; i++) |
| if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i]) |
| && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i]) |
| && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i])) |
| SET_HARD_REG_BIT (new, spill_regs[i]); |
| |
| /* Find each consecutive group of them. */ |
| for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++) |
| if (TEST_HARD_REG_BIT (new, i) |
| && i + group_size[class] <= FIRST_PSEUDO_REGISTER |
| && HARD_REGNO_MODE_OK (i, group_mode[class])) |
| { |
| for (j = 1; j < group_size[class]; j++) |
| if (! TEST_HARD_REG_BIT (new, i + j)) |
| break; |
| |
| if (j == group_size[class]) |
| { |
| /* We found a group. Mark it off against this class's need for |
| groups, and against each superclass too. */ |
| register enum reg_class *p; |
| |
| max_groups[class]--; |
| p = reg_class_superclasses[class]; |
| while (*p != LIM_REG_CLASSES) |
| { |
| if (group_size [(int) *p] <= group_size [class]) |
| max_groups[(int) *p]--; |
| p++; |
| } |
| |
| /* Don't count these registers again. */ |
| for (j = 0; j < group_size[class]; j++) |
| SET_HARD_REG_BIT (counted_for_groups, i + j); |
| } |
| |
| /* Skip to the last reg in this group. When i is incremented above, |
| it will then point to the first reg of the next possible group. */ |
| i += j - 1; |
| } |
| } |
| |
| /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is |
| another mode that needs to be reloaded for the same register class CLASS. |
| If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail. |
| ALLOCATE_MODE will never be smaller than OTHER_MODE. |
| |
| This code used to also fail if any reg in CLASS allows OTHER_MODE but not |
| ALLOCATE_MODE. This test is unnecessary, because we will never try to put |
| something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this |
| causes unnecessary failures on machines requiring alignment of register |
| groups when the two modes are different sizes, because the larger mode has |
| more strict alignment rules than the smaller mode. */ |
| |
| static int |
| modes_equiv_for_class_p (allocate_mode, other_mode, class) |
| enum machine_mode allocate_mode, other_mode; |
| enum reg_class class; |
| { |
| register int regno; |
| for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
| { |
| if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno) |
| && HARD_REGNO_MODE_OK (regno, allocate_mode) |
| && ! HARD_REGNO_MODE_OK (regno, other_mode)) |
| return 0; |
| } |
| return 1; |
| } |
| |
| /* Handle the failure to find a register to spill. |
| INSN should be one of the insns which needed this particular spill reg. */ |
| |
| static void |
| spill_failure (insn) |
| rtx insn; |
| { |
| if (asm_noperands (PATTERN (insn)) >= 0) |
| error_for_asm (insn, "`asm' needs too many reloads"); |
| else |
| fatal_insn ("Unable to find a register to spill.", insn); |
| } |
| |
| /* Add a new register to the tables of available spill-registers |
| (as well as spilling all pseudos allocated to the register). |
| I is the index of this register in potential_reload_regs. |
| CLASS is the regclass whose need is being satisfied. |
| MAX_NEEDS and MAX_NONGROUPS are the vectors of needs, |
| so that this register can count off against them. |
| MAX_NONGROUPS is 0 if this register is part of a group. |
| GLOBAL and DUMPFILE are the same as the args that `reload' got. */ |
| |
| static int |
| new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile) |
| int i; |
| int class; |
| int *max_needs; |
| int *max_nongroups; |
| int global; |
| FILE *dumpfile; |
| { |
| register enum reg_class *p; |
| int val; |
| int regno = potential_reload_regs[i]; |
| |
| if (i >= FIRST_PSEUDO_REGISTER) |
| abort (); /* Caller failed to find any register. */ |
| |
| if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno)) |
| fatal ("fixed or forbidden register was spilled.\n\ |
| This may be due to a compiler bug or to impossible asm\n\ |
| statements or clauses."); |
| |
| /* Make reg REGNO an additional reload reg. */ |
| |
| potential_reload_regs[i] = -1; |
| spill_regs[n_spills] = regno; |
| spill_reg_order[regno] = n_spills; |
| if (dumpfile) |
| fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]); |
| |
| /* Clear off the needs we just satisfied. */ |
| |
| max_needs[class]--; |
| p = reg_class_superclasses[class]; |
| while (*p != LIM_REG_CLASSES) |
| max_needs[(int) *p++]--; |
| |
| if (max_nongroups && max_nongroups[class] > 0) |
| { |
| SET_HARD_REG_BIT (counted_for_nongroups, regno); |
| max_nongroups[class]--; |
| p = reg_class_superclasses[class]; |
| while (*p != LIM_REG_CLASSES) |
| max_nongroups[(int) *p++]--; |
| } |
| |
| /* Spill every pseudo reg that was allocated to this reg |
| or to something that overlaps this reg. */ |
| |
| val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0); |
| |
| /* If there are some registers still to eliminate and this register |
| wasn't ever used before, additional stack space may have to be |
| allocated to store this register. Thus, we may have changed the offset |
| between the stack and frame pointers, so mark that something has changed. |
| (If new pseudos were spilled, thus requiring more space, VAL would have |
| been set non-zero by the call to spill_hard_reg above since additional |
| reloads may be needed in that case. |
| |
| One might think that we need only set VAL to 1 if this is a call-used |
| register. However, the set of registers that must be saved by the |
| prologue is not identical to the call-used set. For example, the |
| register used by the call insn for the return PC is a call-used register, |
| but must be saved by the prologue. */ |
| if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]]) |
| val = 1; |
| |
| regs_ever_live[spill_regs[n_spills]] = 1; |
| n_spills++; |
| |
| return val; |
| } |
| |
| /* Delete an unneeded INSN and any previous insns who sole purpose is loading |
| data that is dead in INSN. */ |
| |
| static void |
| delete_dead_insn (insn) |
| rtx insn; |
| { |
| rtx prev = prev_real_insn (insn); |
| rtx prev_dest; |
| |
| /* If the previous insn sets a register that dies in our insn, delete it |
| too. */ |
| if (prev && GET_CODE (PATTERN (prev)) == SET |
| && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG) |
| && reg_mentioned_p (prev_dest, PATTERN (insn)) |
| && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))) |
| delete_dead_insn (prev); |
| |
| PUT_CODE (insn, NOTE); |
| NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; |
| NOTE_SOURCE_FILE (insn) = 0; |
| } |
| |
| /* Modify the home of pseudo-reg I. |
| The new home is present in reg_renumber[I]. |
| |
| FROM_REG may be the hard reg that the pseudo-reg is being spilled from; |
| or it may be -1, meaning there is none or it is not relevant. |
| This is used so that all pseudos spilled from a given hard reg |
| can share one stack slot. */ |
| |
| static void |
| alter_reg (i, from_reg) |
| register int i; |
| int from_reg; |
| { |
| /* When outputting an inline function, this can happen |
| for a reg that isn't actually used. */ |
| if (regno_reg_rtx[i] == 0) |
| return; |
| |
| /* If the reg got changed to a MEM at rtl-generation time, |
| ignore it. */ |
| if (GET_CODE (regno_reg_rtx[i]) != REG) |
| return; |
| |
| /* Modify the reg-rtx to contain the new hard reg |
| number or else to contain its pseudo reg number. */ |
| REGNO (regno_reg_rtx[i]) |
| = reg_renumber[i] >= 0 ? reg_renumber[i] : i; |
| |
| /* If we have a pseudo that is needed but has no hard reg or equivalent, |
| allocate a stack slot for it. */ |
| |
| if (reg_renumber[i] < 0 |
| && REG_N_REFS (i) > 0 |
| && reg_equiv_constant[i] == 0 |
| && reg_equiv_memory_loc[i] == 0) |
| { |
| register rtx x; |
| int inherent_size = PSEUDO_REGNO_BYTES (i); |
| int total_size = MAX (inherent_size, reg_max_ref_width[i]); |
| int adjust = 0; |
| |
| /* Each pseudo reg has an inherent size which comes from its own mode, |
| and a total size which provides room for paradoxical subregs |
| which refer to the pseudo reg in wider modes. |
| |
| We can use a slot already allocated if it provides both |
| enough inherent space and enough total space. |
| Otherwise, we allocate a new slot, making sure that it has no less |
| inherent space, and no less total space, then the previous slot. */ |
| if (from_reg == -1) |
| { |
| /* No known place to spill from => no slot to reuse. */ |
| x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, |
| inherent_size == total_size ? 0 : -1); |
| if (BYTES_BIG_ENDIAN) |
| /* Cancel the big-endian correction done in assign_stack_local. |
| Get the address of the beginning of the slot. |
| This is so we can do a big-endian correction unconditionally |
| below. */ |
| adjust = inherent_size - total_size; |
| |
| RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]); |
| } |
| /* Reuse a stack slot if possible. */ |
| else if (spill_stack_slot[from_reg] != 0 |
| && spill_stack_slot_width[from_reg] >= total_size |
| && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg])) |
| >= inherent_size)) |
| x = spill_stack_slot[from_reg]; |
| /* Allocate a bigger slot. */ |
| else |
| { |
| /* Compute maximum size needed, both for inherent size |
| and for total size. */ |
| enum machine_mode mode = GET_MODE (regno_reg_rtx[i]); |
| rtx stack_slot; |
| if (spill_stack_slot[from_reg]) |
| { |
| if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg])) |
| > inherent_size) |
| mode = GET_MODE (spill_stack_slot[from_reg]); |
| if (spill_stack_slot_width[from_reg] > total_size) |
| total_size = spill_stack_slot_width[from_reg]; |
| } |
| /* Make a slot with that size. */ |
| x = assign_stack_local (mode, total_size, |
| inherent_size == total_size ? 0 : -1); |
| stack_slot = x; |
| if (BYTES_BIG_ENDIAN) |
| { |
| /* Cancel the big-endian correction done in assign_stack_local. |
| Get the address of the beginning of the slot. |
| This is so we can do a big-endian correction unconditionally |
| below. */ |
| adjust = GET_MODE_SIZE (mode) - total_size; |
| if (adjust) |
| stack_slot = gen_rtx (MEM, mode_for_size (total_size |
| * BITS_PER_UNIT, |
| MODE_INT, 1), |
| plus_constant (XEXP (x, 0), adjust)); |
| } |
| spill_stack_slot[from_reg] = stack_slot; |
| spill_stack_slot_width[from_reg] = total_size; |
| } |
| |
| /* On a big endian machine, the "address" of the slot |
| is the address of the low part that fits its inherent mode. */ |
| if (BYTES_BIG_ENDIAN && inherent_size < total_size) |
| adjust += (total_size - inherent_size); |
| |
| /* If we have any adjustment to make, or if the stack slot is the |
| wrong mode, make a new stack slot. */ |
| if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i])) |
| { |
| x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]), |
| plus_constant (XEXP (x, 0), adjust)); |
| RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]); |
| } |
| |
| /* Save the stack slot for later. */ |
| reg_equiv_memory_loc[i] = x; |
| } |
| } |
| |
| /* Mark the slots in regs_ever_live for the hard regs |
| used by pseudo-reg number REGNO. */ |
| |
| void |
| mark_home_live (regno) |
| int regno; |
| { |
| register int i, lim; |
| i = reg_renumber[regno]; |
| if (i < 0) |
| return; |
| lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno)); |
| while (i < lim) |
| regs_ever_live[i++] = 1; |
| } |
| |
| /* Mark the registers used in SCRATCH as being live. */ |
| |
| static void |
| mark_scratch_live (scratch) |
| rtx scratch; |
| { |
| register int i; |
| int regno = REGNO (scratch); |
| int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch)); |
| |
| for (i = regno; i < lim; i++) |
| regs_ever_live[i] = 1; |
| } |
| |
| /* This function handles the tracking of elimination offsets around branches. |
| |
| X is a piece of RTL being scanned. |
| |
| INSN is the insn that it came from, if any. |
| |
| INITIAL_P is non-zero if we are to set the offset to be the initial |
| offset and zero if we are setting the offset of the label to be the |
| current offset. */ |
| |
| static void |
| set_label_offsets (x, insn, initial_p) |
| rtx x; |
| rtx insn; |
| int initial_p; |
| { |
| enum rtx_code code = GET_CODE (x); |
| rtx tem; |
| int i; |
| struct elim_table *p; |
| |
| switch (code) |
| { |
| case LABEL_REF: |
| if (LABEL_REF_NONLOCAL_P (x)) |
| return; |
| |
| x = XEXP (x, 0); |
| |
| /* ... fall through ... */ |
| |
| case CODE_LABEL: |
| /* If we know nothing about this label, set the desired offsets. Note |
| that this sets the offset at a label to be the offset before a label |
| if we don't know anything about the label. This is not correct for |
| the label after a BARRIER, but is the best guess we can make. If |
| we guessed wrong, we will suppress an elimination that might have |
| been possible had we been able to guess correctly. */ |
| |
| if (! offsets_known_at[CODE_LABEL_NUMBER (x)]) |
| { |
| for (i = 0; i < NUM_ELIMINABLE_REGS; i++) |
| offsets_at[CODE_LABEL_NUMBER (x)][i] |
| = (initial_p ? reg_eliminate[i].initial_offset |
| : reg_eliminate[i].offset); |
| offsets_known_at[CODE_LABEL_NUMBER (x)] = 1; |
| } |
| |
| /* Otherwise, if this is the definition of a label and it is |
| preceded by a BARRIER, set our offsets to the known offset of |
| that label. */ |
| |
| else if (x == insn |
| && (tem = prev_nonnote_insn (insn)) != 0 |
| && GET_CODE (tem) == BARRIER) |
| { |
| num_not_at_initial_offset = 0; |
| for (i = 0; i < NUM_ELIMINABLE_REGS; i++) |
| { |
| reg_eliminate[i].offset = reg_eliminate[i].previous_offset |
| = offsets_at[CODE_LABEL_NUMBER (x)][i]; |
| if (reg_eliminate[i].can_eliminate |
| && (reg_eliminate[i].offset |
| != reg_eliminate[i].initial_offset)) |
| num_not_at_initial_offset++; |
| } |
| } |
| |
| else |
| /* If neither of the above cases is true, compare each offset |
| with those previously recorded and suppress any eliminations |
| where the offsets disagree. */ |
| |
| for (i = 0; i < NUM_ELIMINABLE_REGS; i++) |
| if (offsets_at[CODE_LABEL_NUMBER (x)][i] |
| != (initial_p ? reg_eliminate[i].initial_offset |
| : reg_eliminate[i].offset)) |
| reg_eliminate[i].can_eliminate = 0; |
| |
| return; |
| |
| case JUMP_INSN: |
| set_label_offsets (PATTERN (insn), insn, initial_p); |
| |
| /* ... fall through ... */ |
| |
| case INSN: |
| case CALL_INSN: |
| /* Any labels mentioned in REG_LABEL notes can be branched to indirectly |
| and hence must have all eliminations at their initial offsets. */ |
| for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1)) |
| if (REG_NOTE_KIND (tem) == REG_LABEL) |
| set_label_offsets (XEXP (tem, 0), insn, 1); |
| return; |
| |
| case ADDR_VEC: |
| case ADDR_DIFF_VEC: |
| /* Each of the labels in the address vector must be at their initial |
| offsets. We want the first first for ADDR_VEC and the second |
| field for ADDR_DIFF_VEC. */ |
| |
| for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++) |
| set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i), |
| insn, initial_p); |
| return; |
| |
| case SET: |
| /* We only care about setting PC. If the source is not RETURN, |
| IF_THEN_ELSE, or a label, disable any eliminations not at |
| their initial offsets. Similarly if any arm of the IF_THEN_ELSE |
| isn't one of those possibilities. For branches to a label, |
| call ourselves recursively. |
| |
| Note that this can disable elimination unnecessarily when we have |
| a non-local goto since it will look like a non-constant jump to |
| someplace in the current function. This isn't a significant |
| problem since such jumps will normally be when all elimination |
| pairs are back to their initial offsets. */ |
| |
| if (SET_DEST (x) != pc_rtx) |
| return; |
| |
| switch (GET_CODE (SET_SRC (x))) |
| { |
| case PC: |
| case RETURN: |
| return; |
| |
| case LABEL_REF: |
| set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p); |
| return; |
| |
| case IF_THEN_ELSE: |
| tem = XEXP (SET_SRC (x), 1); |
| if (GET_CODE (tem) == LABEL_REF) |
| set_label_offsets (XEXP (tem, 0), insn, initial_p); |
| else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN) |
| break; |
| |
| tem = XEXP (SET_SRC (x), 2); |
| if (GET_CODE (tem) == LABEL_REF) |
| set_label_offsets (XEXP (tem, 0), insn, initial_p); |
| else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN) |
| break; |
| return; |
| } |
| |
| /* If we reach here, all eliminations must be at their initial |
| offset because we are doing a jump to a variable address. */ |
| for (p = reg_eliminate; p < ®_eliminate[NUM_ELIMINABLE_REGS]; p++) |
| if (p->offset != p->initial_offset) |
| p->can_eliminate = 0; |
| } |
| } |
| |
| /* Used for communication between the next two function to properly share |
| the vector for an ASM_OPERANDS. */ |
| |
| static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec; |
| |
| /* Scan X and replace any eliminable registers (such as fp) with a |
| replacement (such as sp), plus an offset. |
| |
| MEM_MODE is the mode of an enclosing MEM. We need this to know how |
| much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a |
| MEM, we are allowed to replace a sum of a register and the constant zero |
| with the register, which we cannot do outside a MEM. In addition, we need |
| to record the fact that a register is referenced outside a MEM. |
| |
| If INSN is an insn, it is the insn containing X. If we replace a REG |
| in a SET_DEST with an equivalent MEM and INSN is non-zero, write a |
| CLOBBER of the pseudo after INSN so find_equiv_regs will know that |
| that the REG is being modified. |
| |
| Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST). |
| That's used when we eliminate in expressions stored in notes. |
| This means, do not set ref_outside_mem even if the reference |
| is outside of MEMs. |
| |
| If we see a modification to a register we know about, take the |
| appropriate action (see case SET, below). |
| |
| REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had |
| replacements done assuming all offsets are at their initial values. If |
| they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we |
| encounter, return the actual location so that find_reloads will do |
| the proper thing. */ |
| |
| rtx |
| eliminate_regs (x, mem_mode, insn, storing) |
| rtx x; |
| enum machine_mode mem_mode; |
| rtx insn; |
| int storing; |
| { |
| enum rtx_code code = GET_CODE (x); |
| struct elim_table *ep; |
| int regno; |
| rtx new; |
| int i, j; |
| char *fmt; |
| int copied = 0; |
| |
| switch (code) |
| { |
| case CONST_INT: |
| case CONST_DOUBLE: |
| case CONST: |
| case SYMBOL_REF: |
| case CODE_LABEL: |
| case PC: |
| case CC0: |
| case ASM_INPUT: |
| case ADDR_VEC: |
| case ADDR_DIFF_VEC: |
| case RETURN: |
| return x; |
| |
| case REG: |
| regno = REGNO (x); |
| |
| /* First handle the case where we encounter a bare register that |
| is eliminable. Replace it with a PLUS. */ |
| if (regno < FIRST_PSEUDO_REGISTER) |
| { |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; |
| ep++) |
| if (ep->from_rtx == x && ep->can_eliminate) |
| { |
| if (! mem_mode |
| /* Refs inside notes don't count for this purpose. */ |
| && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST |
| || GET_CODE (insn) == INSN_LIST))) |
| ep->ref_outside_mem = 1; |
| return plus_constant (ep->to_rtx, ep->previous_offset); |
| } |
| |
| } |
| else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno] |
| && (reg_equiv_address[regno] || num_not_at_initial_offset)) |
| { |
| /* In this case, find_reloads would attempt to either use an |
| incorrect address (if something is not at its initial offset) |
| or substitute an replaced address into an insn (which loses |
| if the offset is changed by some later action). So we simply |
| return the replaced stack slot (assuming it is changed by |
| elimination) and ignore the fact that this is actually a |
| reference to the pseudo. Ensure we make a copy of the |
| address in case it is shared. */ |
| new = eliminate_regs (reg_equiv_memory_loc[regno], |
| mem_mode, insn, 0); |
| if (new != reg_equiv_memory_loc[regno]) |
| { |
| cannot_omit_stores[regno] = 1; |
| return copy_rtx (new); |
| } |
| } |
| return x; |
| |
| case PLUS: |
| /* If this is the sum of an eliminable register and a constant, rework |
| the sum. */ |
| if (GET_CODE (XEXP (x, 0)) == REG |
| && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER |
| && CONSTANT_P (XEXP (x, 1))) |
| { |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; |
| ep++) |
| if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate) |
| { |
| if (! mem_mode |
| /* Refs inside notes don't count for this purpose. */ |
| && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST |
| || GET_CODE (insn) == INSN_LIST))) |
| ep->ref_outside_mem = 1; |
| |
| /* The only time we want to replace a PLUS with a REG (this |
| occurs when the constant operand of the PLUS is the negative |
| of the offset) is when we are inside a MEM. We won't want |
| to do so at other times because that would change the |
| structure of the insn in a way that reload can't handle. |
| We special-case the commonest situation in |
| eliminate_regs_in_insn, so just replace a PLUS with a |
| PLUS here, unless inside a MEM. */ |
| if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT |
| && INTVAL (XEXP (x, 1)) == - ep->previous_offset) |
| return ep->to_rtx; |
| else |
| return gen_rtx (PLUS, Pmode, ep->to_rtx, |
| plus_constant (XEXP (x, 1), |
| ep->previous_offset)); |
| } |
| |
| /* If the register is not eliminable, we are done since the other |
| operand is a constant. */ |
| return x; |
| } |
| |
| /* If this is part of an address, we want to bring any constant to the |
| outermost PLUS. We will do this by doing register replacement in |
| our operands and seeing if a constant shows up in one of them. |
| |
| We assume here this is part of an address (or a "load address" insn) |
| since an eliminable register is not likely to appear in any other |
| context. |
| |
| If we have (plus (eliminable) (reg)), we want to produce |
| (plus (plus (replacement) (reg) (const))). If this was part of a |
| normal add insn, (plus (replacement) (reg)) will be pushed as a |
| reload. This is the desired action. */ |
| |
| { |
| rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0); |
| rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn, 0); |
| |
| if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)) |
| { |
| /* If one side is a PLUS and the other side is a pseudo that |
| didn't get a hard register but has a reg_equiv_constant, |
| we must replace the constant here since it may no longer |
| be in the position of any operand. */ |
| if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG |
| && REGNO (new1) >= FIRST_PSEUDO_REGISTER |
| && reg_renumber[REGNO (new1)] < 0 |
| && reg_equiv_constant != 0 |
| && reg_equiv_constant[REGNO (new1)] != 0) |
| new1 = reg_equiv_constant[REGNO (new1)]; |
| else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG |
| && REGNO (new0) >= FIRST_PSEUDO_REGISTER |
| && reg_renumber[REGNO (new0)] < 0 |
| && reg_equiv_constant[REGNO (new0)] != 0) |
| new0 = reg_equiv_constant[REGNO (new0)]; |
| |
| new = form_sum (new0, new1); |
| |
| /* As above, if we are not inside a MEM we do not want to |
| turn a PLUS into something else. We might try to do so here |
| for an addition of 0 if we aren't optimizing. */ |
| if (! mem_mode && GET_CODE (new) != PLUS) |
| return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx); |
| else |
| return new; |
| } |
| } |
| return x; |
| |
| case MULT: |
| /* If this is the product of an eliminable register and a |
| constant, apply the distribute law and move the constant out |
| so that we have (plus (mult ..) ..). This is needed in order |
| to keep load-address insns valid. This case is pathological. |
| We ignore the possibility of overflow here. */ |
| if (GET_CODE (XEXP (x, 0)) == REG |
| && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER |
| && GET_CODE (XEXP (x, 1)) == CONST_INT) |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; |
| ep++) |
| if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate) |
| { |
| if (! mem_mode |
| /* Refs inside notes don't count for this purpose. */ |
| && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST |
| || GET_CODE (insn) == INSN_LIST))) |
| ep->ref_outside_mem = 1; |
| |
| return |
| plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)), |
| ep->previous_offset * INTVAL (XEXP (x, 1))); |
| } |
| |
| /* ... fall through ... */ |
| |
| case CALL: |
| case COMPARE: |
| case MINUS: |
| case DIV: case UDIV: |
| case MOD: case UMOD: |
| case AND: case IOR: case XOR: |
| case ROTATERT: case ROTATE: |
| case ASHIFTRT: case LSHIFTRT: case ASHIFT: |
| case NE: case EQ: |
| case GE: case GT: case GEU: case GTU: |
| case LE: case LT: case LEU: case LTU: |
| { |
| rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0); |
| rtx new1 |
| = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn, 0) : 0; |
| |
| if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)) |
| return gen_rtx (code, GET_MODE (x), new0, new1); |
| } |
| return x; |
| |
| case EXPR_LIST: |
| /* If we have something in XEXP (x, 0), the usual case, eliminate it. */ |
| if (XEXP (x, 0)) |
| { |
| new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0); |
| if (new != XEXP (x, 0)) |
| x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1)); |
| } |
| |
| /* ... fall through ... */ |
| |
| case INSN_LIST: |
| /* Now do eliminations in the rest of the chain. If this was |
| an EXPR_LIST, this might result in allocating more memory than is |
| strictly needed, but it simplifies the code. */ |
| if (XEXP (x, 1)) |
| { |
| new = eliminate_regs (XEXP (x, 1), mem_mode, insn, 0); |
| if (new != XEXP (x, 1)) |
| return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new); |
| } |
| return x; |
| |
| case PRE_INC: |
| case POST_INC: |
| case PRE_DEC: |
| case POST_DEC: |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
| if (ep->to_rtx == XEXP (x, 0)) |
| { |
| int size = GET_MODE_SIZE (mem_mode); |
| |
| /* If more bytes than MEM_MODE are pushed, account for them. */ |
| #ifdef PUSH_ROUNDING |
| if (ep->to_rtx == stack_pointer_rtx) |
| size = PUSH_ROUNDING (size); |
| #endif |
| if (code == PRE_DEC || code == POST_DEC) |
| ep->offset += size; |
| else |
| ep->offset -= size; |
| } |
| |
| /* Fall through to generic unary operation case. */ |
| case STRICT_LOW_PART: |
| case NEG: case NOT: |
| case SIGN_EXTEND: case ZERO_EXTEND: |
| case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE: |
| case FLOAT: case FIX: |
| case UNSIGNED_FIX: case UNSIGNED_FLOAT: |
| case ABS: |
| case SQRT: |
| case FFS: |
| new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0); |
| if (new != XEXP (x, 0)) |
| return gen_rtx (code, GET_MODE (x), new); |
| return x; |
| |
| case SUBREG: |
| /* Similar to above processing, but preserve SUBREG_WORD. |
| Convert (subreg (mem)) to (mem) if not paradoxical. |
| Also, if we have a non-paradoxical (subreg (pseudo)) and the |
| pseudo didn't get a hard reg, we must replace this with the |
| eliminated version of the memory location because push_reloads |
| may do the replacement in certain circumstances. */ |
| if (GET_CODE (SUBREG_REG (x)) == REG |
| && (GET_MODE_SIZE (GET_MODE (x)) |
| <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) |
| && reg_equiv_memory_loc != 0 |
| && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0) |
| { |
| new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))], |
| mem_mode, insn, 0); |
| |
| /* If we didn't change anything, we must retain the pseudo. */ |
| if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))]) |
| new = SUBREG_REG (x); |
| else |
| { |
|