| /* Reload pseudo regs into hard regs for insns that require hard regs. |
| Copyright (C) 1987-2022 Free Software Foundation, Inc. |
| |
| This file is part of GCC. |
| |
| GCC is free software; you can redistribute it and/or modify it under |
| the terms of the GNU General Public License as published by the Free |
| Software Foundation; either version 3, or (at your option) any later |
| version. |
| |
| GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
| WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| for more details. |
| |
| You should have received a copy of the GNU General Public License |
| along with GCC; see the file COPYING3. If not see |
| <http://www.gnu.org/licenses/>. */ |
| |
| #include "config.h" |
| #include "system.h" |
| #include "coretypes.h" |
| #include "backend.h" |
| #include "target.h" |
| #include "rtl.h" |
| #include "tree.h" |
| #include "predict.h" |
| #include "df.h" |
| #include "memmodel.h" |
| #include "tm_p.h" |
| #include "optabs.h" |
| #include "regs.h" |
| #include "ira.h" |
| #include "recog.h" |
| |
| #include "rtl-error.h" |
| #include "expr.h" |
| #include "addresses.h" |
| #include "cfgrtl.h" |
| #include "cfgbuild.h" |
| #include "reload.h" |
| #include "except.h" |
| #include "dumpfile.h" |
| #include "rtl-iter.h" |
| #include "function-abi.h" |
| |
| /* This file contains the reload pass of the compiler, which is |
| run after register allocation has been done. It checks that |
| each insn is valid (operands required to be in registers really |
| are in registers of the proper class) and fixes up invalid ones |
| by copying values temporarily into registers for the insns |
| that need them. |
| |
| The results of register allocation are described by the vector |
| reg_renumber; the insns still contain pseudo regs, but reg_renumber |
| can be used to find which hard reg, if any, a pseudo reg is in. |
| |
| The technique we always use is to free up a few hard regs that are |
| called ``reload regs'', and for each place where a pseudo reg |
| must be in a hard reg, copy it temporarily into one of the reload regs. |
| |
| Reload regs are allocated locally for every instruction that needs |
| reloads. When there are pseudos which are allocated to a register that |
| has been chosen as a reload reg, such pseudos must be ``spilled''. |
| This means that they go to other hard regs, or to stack slots if no other |
| available hard regs can be found. Spilling can invalidate more |
| insns, requiring additional need for reloads, so we must keep checking |
| until the process stabilizes. |
| |
| For machines with different classes of registers, we must keep track |
| of the register class needed for each reload, and make sure that |
| we allocate enough reload registers of each class. |
| |
| The file reload.cc contains the code that checks one insn for |
| validity and reports the reloads that it needs. This file |
| is in charge of scanning the entire rtl code, accumulating the |
| reload needs, spilling, assigning reload registers to use for |
| fixing up each insn, and generating the new insns to copy values |
| into the reload registers. */ |
| |
| struct target_reload default_target_reload; |
| #if SWITCHABLE_TARGET |
| struct target_reload *this_target_reload = &default_target_reload; |
| #endif |
| |
| #define spill_indirect_levels \ |
| (this_target_reload->x_spill_indirect_levels) |
| |
| /* During reload_as_needed, element N contains a REG rtx for the hard reg |
| into which reg N has been reloaded (perhaps for a previous insn). */ |
| static rtx *reg_last_reload_reg; |
| |
| /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn |
| for an output reload that stores into reg N. */ |
| static regset_head reg_has_output_reload; |
| |
| /* Indicates which hard regs are reload-registers for an output reload |
| in the current insn. */ |
| static HARD_REG_SET reg_is_output_reload; |
| |
| /* Widest mode in which each pseudo reg is referred to (via subreg). */ |
| static machine_mode *reg_max_ref_mode; |
| |
| /* Vector to remember old contents of reg_renumber before spilling. */ |
| static short *reg_old_renumber; |
| |
| /* During reload_as_needed, element N contains the last pseudo regno reloaded |
| into hard register N. If that pseudo reg occupied more than one register, |
| reg_reloaded_contents points to that pseudo for each spill register in |
| use; all of these must remain set for an inheritance to occur. */ |
| static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER]; |
| |
| /* During reload_as_needed, element N contains the insn for which |
| hard register N was last used. Its contents are significant only |
| when reg_reloaded_valid is set for this register. */ |
| static rtx_insn *reg_reloaded_insn[FIRST_PSEUDO_REGISTER]; |
| |
| /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */ |
| static HARD_REG_SET reg_reloaded_valid; |
| /* Indicate if the register was dead at the end of the reload. |
| This is only valid if reg_reloaded_contents is set and valid. */ |
| static HARD_REG_SET reg_reloaded_dead; |
| |
| /* Number of spill-regs so far; number of valid elements of spill_regs. */ |
| static int n_spills; |
| |
| /* In parallel with spill_regs, contains REG rtx's for those regs. |
| Holds the last rtx used for any given reg, or 0 if it has never |
| been used for spilling yet. This rtx is reused, provided it has |
| the proper mode. */ |
| static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER]; |
| |
| /* In parallel with spill_regs, contains nonzero for a spill reg |
| that was stored after the last time it was used. |
| The precise value is the insn generated to do the store. */ |
| static rtx_insn *spill_reg_store[FIRST_PSEUDO_REGISTER]; |
| |
| /* This is the register that was stored with spill_reg_store. This is a |
| copy of reload_out / reload_out_reg when the value was stored; if |
| reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */ |
| static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER]; |
| |
| /* This table is the inverse mapping of spill_regs: |
| indexed by hard reg number, |
| it contains the position of that reg in spill_regs, |
| or -1 for something that is not in spill_regs. |
| |
| ?!? This is no longer accurate. */ |
| static short spill_reg_order[FIRST_PSEUDO_REGISTER]; |
| |
| /* This reg set indicates registers that can't be used as spill registers for |
| the currently processed insn. These are the hard registers which are live |
| during the insn, but not allocated to pseudos, as well as fixed |
| registers. */ |
| static HARD_REG_SET bad_spill_regs; |
| |
| /* These are the hard registers that can't be used as spill register for any |
| insn. This includes registers used for user variables and registers that |
| we can't eliminate. A register that appears in this set also can't be used |
| to retry register allocation. */ |
| static HARD_REG_SET bad_spill_regs_global; |
| |
| /* Describes order of use of registers for reloading |
| of spilled pseudo-registers. `n_spills' is the number of |
| elements that are actually valid; new ones are added at the end. |
| |
| Both spill_regs and spill_reg_order are used on two occasions: |
| once during find_reload_regs, where they keep track of the spill registers |
| for a single insn, but also during reload_as_needed where they show all |
| the registers ever used by reload. For the latter case, the information |
| is calculated during finish_spills. */ |
| static short spill_regs[FIRST_PSEUDO_REGISTER]; |
| |
| /* This vector of reg sets indicates, for each pseudo, which hard registers |
| may not be used for retrying global allocation because the register was |
| formerly spilled from one of them. If we allowed reallocating a pseudo to |
| a register that it was already allocated to, reload might not |
| terminate. */ |
| static HARD_REG_SET *pseudo_previous_regs; |
| |
| /* This vector of reg sets indicates, for each pseudo, which hard |
| registers may not be used for retrying global allocation because they |
| are used as spill registers during one of the insns in which the |
| pseudo is live. */ |
| static HARD_REG_SET *pseudo_forbidden_regs; |
| |
| /* All hard regs that have been used as spill registers for any insn are |
| marked in this set. */ |
| static HARD_REG_SET used_spill_regs; |
| |
| /* Index of last register assigned as a spill register. We allocate in |
| a round-robin fashion. */ |
| static int last_spill_reg; |
| |
| /* Record the stack slot for each spilled hard register. */ |
| static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER]; |
| |
| /* Width allocated so far for that stack slot. */ |
| static poly_uint64_pod spill_stack_slot_width[FIRST_PSEUDO_REGISTER]; |
| |
| /* Record which pseudos needed to be spilled. */ |
| static regset_head spilled_pseudos; |
| |
| /* Record which pseudos changed their allocation in finish_spills. */ |
| static regset_head changed_allocation_pseudos; |
| |
| /* Used for communication between order_regs_for_reload and count_pseudo. |
| Used to avoid counting one pseudo twice. */ |
| static regset_head pseudos_counted; |
| |
| /* First uid used by insns created by reload in this function. |
| Used in find_equiv_reg. */ |
| int reload_first_uid; |
| |
| /* Flag set by local-alloc or global-alloc if anything is live in |
| a call-clobbered reg across calls. */ |
| int caller_save_needed; |
| |
| /* Set to 1 while reload_as_needed is operating. |
| Required by some machines to handle any generated moves differently. */ |
| int reload_in_progress = 0; |
| |
| /* This obstack is used for allocation of rtl during register elimination. |
| The allocated storage can be freed once find_reloads has processed the |
| insn. */ |
| static struct obstack reload_obstack; |
| |
| /* Points to the beginning of the reload_obstack. All insn_chain structures |
| are allocated first. */ |
| static char *reload_startobj; |
| |
| /* The point after all insn_chain structures. Used to quickly deallocate |
| memory allocated in copy_reloads during calculate_needs_all_insns. */ |
| static char *reload_firstobj; |
| |
| /* This points before all local rtl generated by register elimination. |
| Used to quickly free all memory after processing one insn. */ |
| static char *reload_insn_firstobj; |
| |
| /* List of insn_chain instructions, one for every insn that reload needs to |
| examine. */ |
| class insn_chain *reload_insn_chain; |
| |
| /* TRUE if we potentially left dead insns in the insn stream and want to |
| run DCE immediately after reload, FALSE otherwise. */ |
| static bool need_dce; |
| |
| /* List of all insns needing reloads. */ |
| static class insn_chain *insns_need_reload; |
| |
| /* This structure is used to record information about register eliminations. |
| Each array entry describes one possible way of eliminating a register |
| in favor of another. If there is more than one way of eliminating a |
| particular register, the most preferred should be specified first. */ |
| |
| struct elim_table |
| { |
| int from; /* Register number to be eliminated. */ |
| int to; /* Register number used as replacement. */ |
| poly_int64_pod initial_offset; /* Initial difference between values. */ |
| int can_eliminate; /* Nonzero if this elimination can be done. */ |
| int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE |
| target hook in previous scan over insns |
| made by reload. */ |
| poly_int64_pod offset; /* Current offset between the two regs. */ |
| poly_int64_pod previous_offset; /* Offset at end of previous insn. */ |
| int ref_outside_mem; /* "to" has been referenced outside a MEM. */ |
| rtx from_rtx; /* REG rtx for the register to be eliminated. |
| We cannot simply compare the number since |
| we might then spuriously replace a hard |
| register corresponding to a pseudo |
| assigned to the reg to be eliminated. */ |
| rtx to_rtx; /* REG rtx for the replacement. */ |
| }; |
| |
| static struct elim_table *reg_eliminate = 0; |
| |
| /* This is an intermediate structure to initialize the table. It has |
| exactly the members provided by ELIMINABLE_REGS. */ |
| static const struct elim_table_1 |
| { |
| const int from; |
| const int to; |
| } reg_eliminate_1[] = |
| |
| ELIMINABLE_REGS; |
| |
| #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1) |
| |
| /* Record the number of pending eliminations that have an offset not equal |
| to their initial offset. If nonzero, we use a new copy of each |
| replacement result in any insns encountered. */ |
| int num_not_at_initial_offset; |
| |
| /* Count the number of registers that we may be able to eliminate. */ |
| static int num_eliminable; |
| /* And the number of registers that are equivalent to a constant that |
| can be eliminated to frame_pointer / arg_pointer + constant. */ |
| static int num_eliminable_invariants; |
| |
| /* For each label, we record the offset of each elimination. If we reach |
| a label by more than one path and an offset differs, we cannot do the |
| elimination. This information is indexed by the difference of the |
| number of the label and the first label number. We can't offset the |
| pointer itself as this can cause problems on machines with segmented |
| memory. The first table is an array of flags that records whether we |
| have yet encountered a label and the second table is an array of arrays, |
| one entry in the latter array for each elimination. */ |
| |
| static int first_label_num; |
| static char *offsets_known_at; |
| static poly_int64_pod (*offsets_at)[NUM_ELIMINABLE_REGS]; |
| |
| vec<reg_equivs_t, va_gc> *reg_equivs; |
| |
| /* Stack of addresses where an rtx has been changed. We can undo the |
| changes by popping items off the stack and restoring the original |
| value at each location. |
| |
| We use this simplistic undo capability rather than copy_rtx as copy_rtx |
| will not make a deep copy of a normally sharable rtx, such as |
| (const (plus (symbol_ref) (const_int))). If such an expression appears |
| as R1 in gen_reload_chain_without_interm_reg_p, then a shared |
| rtx expression would be changed. See PR 42431. */ |
| |
| typedef rtx *rtx_p; |
| static vec<rtx_p> substitute_stack; |
| |
| /* Number of labels in the current function. */ |
| |
| static int num_labels; |
| |
| static void replace_pseudos_in (rtx *, machine_mode, rtx); |
| static void maybe_fix_stack_asms (void); |
| static void copy_reloads (class insn_chain *); |
| static void calculate_needs_all_insns (int); |
| static int find_reg (class insn_chain *, int); |
| static void find_reload_regs (class insn_chain *); |
| static void select_reload_regs (void); |
| static void delete_caller_save_insns (void); |
| |
| static void spill_failure (rtx_insn *, enum reg_class); |
| static void count_spilled_pseudo (int, int, int); |
| static void delete_dead_insn (rtx_insn *); |
| static void alter_reg (int, int, bool); |
| static void set_label_offsets (rtx, rtx_insn *, int); |
| static void check_eliminable_occurrences (rtx); |
| static void elimination_effects (rtx, machine_mode); |
| static rtx eliminate_regs_1 (rtx, machine_mode, rtx, bool, bool); |
| static int eliminate_regs_in_insn (rtx_insn *, int); |
| static void update_eliminable_offsets (void); |
| static void mark_not_eliminable (rtx, const_rtx, void *); |
| static void set_initial_elim_offsets (void); |
| static bool verify_initial_elim_offsets (void); |
| static void set_initial_label_offsets (void); |
| static void set_offsets_for_label (rtx_insn *); |
| static void init_eliminable_invariants (rtx_insn *, bool); |
| static void init_elim_table (void); |
| static void free_reg_equiv (void); |
| static void update_eliminables (HARD_REG_SET *); |
| static bool update_eliminables_and_spill (void); |
| static void elimination_costs_in_insn (rtx_insn *); |
| static void spill_hard_reg (unsigned int, int); |
| static int finish_spills (int); |
| static void scan_paradoxical_subregs (rtx); |
| static void count_pseudo (int); |
| static void order_regs_for_reload (class insn_chain *); |
| static void reload_as_needed (int); |
| static void forget_old_reloads_1 (rtx, const_rtx, void *); |
| static void forget_marked_reloads (regset); |
| static int reload_reg_class_lower (const void *, const void *); |
| static void mark_reload_reg_in_use (unsigned int, int, enum reload_type, |
| machine_mode); |
| static void clear_reload_reg_in_use (unsigned int, int, enum reload_type, |
| machine_mode); |
| static int reload_reg_free_p (unsigned int, int, enum reload_type); |
| static int reload_reg_free_for_value_p (int, int, int, enum reload_type, |
| rtx, rtx, int, int); |
| static int free_for_value_p (int, machine_mode, int, enum reload_type, |
| rtx, rtx, int, int); |
| static int allocate_reload_reg (class insn_chain *, int, int); |
| static int conflicts_with_override (rtx); |
| static void failed_reload (rtx_insn *, int); |
| static int set_reload_reg (int, int); |
| static void choose_reload_regs_init (class insn_chain *, rtx *); |
| static void choose_reload_regs (class insn_chain *); |
| static void emit_input_reload_insns (class insn_chain *, struct reload *, |
| rtx, int); |
| static void emit_output_reload_insns (class insn_chain *, struct reload *, |
| int); |
| static void do_input_reload (class insn_chain *, struct reload *, int); |
| static void do_output_reload (class insn_chain *, struct reload *, int); |
| static void emit_reload_insns (class insn_chain *); |
| static void delete_output_reload (rtx_insn *, int, int, rtx); |
| static void delete_address_reloads (rtx_insn *, rtx_insn *); |
| static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *); |
| static void inc_for_reload (rtx, rtx, rtx, poly_int64); |
| static void substitute (rtx *, const_rtx, rtx); |
| static bool gen_reload_chain_without_interm_reg_p (int, int); |
| static int reloads_conflict (int, int); |
| static rtx_insn *gen_reload (rtx, rtx, int, enum reload_type); |
| static rtx_insn *emit_insn_if_valid_for_reload (rtx); |
| |
| /* Initialize the reload pass. This is called at the beginning of compilation |
| and may be called again if the target is reinitialized. */ |
| |
| void |
| init_reload (void) |
| { |
| int i; |
| |
| /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack. |
| Set spill_indirect_levels to the number of levels such addressing is |
| permitted, zero if it is not permitted at all. */ |
| |
| rtx tem |
| = gen_rtx_MEM (Pmode, |
| gen_rtx_PLUS (Pmode, |
| gen_rtx_REG (Pmode, |
| LAST_VIRTUAL_REGISTER + 1), |
| gen_int_mode (4, Pmode))); |
| spill_indirect_levels = 0; |
| |
| while (memory_address_p (QImode, tem)) |
| { |
| spill_indirect_levels++; |
| tem = gen_rtx_MEM (Pmode, tem); |
| } |
| |
| /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */ |
| |
| tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo")); |
| indirect_symref_ok = memory_address_p (QImode, tem); |
| |
| /* See if reg+reg is a valid (and offsettable) address. */ |
| |
| for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
| { |
| tem = gen_rtx_PLUS (Pmode, |
| gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM), |
| gen_rtx_REG (Pmode, i)); |
| |
| /* This way, we make sure that reg+reg is an offsettable address. */ |
| tem = plus_constant (Pmode, tem, 4); |
| |
| for (int mode = 0; mode < MAX_MACHINE_MODE; mode++) |
| if (!double_reg_address_ok[mode] |
| && memory_address_p ((enum machine_mode)mode, tem)) |
| double_reg_address_ok[mode] = 1; |
| } |
| |
| /* Initialize obstack for our rtl allocation. */ |
| if (reload_startobj == NULL) |
| { |
| gcc_obstack_init (&reload_obstack); |
| reload_startobj = XOBNEWVAR (&reload_obstack, char, 0); |
| } |
| |
| INIT_REG_SET (&spilled_pseudos); |
| INIT_REG_SET (&changed_allocation_pseudos); |
| INIT_REG_SET (&pseudos_counted); |
| } |
| |
| /* List of insn chains that are currently unused. */ |
| static class insn_chain *unused_insn_chains = 0; |
| |
| /* Allocate an empty insn_chain structure. */ |
| class insn_chain * |
| new_insn_chain (void) |
| { |
| class insn_chain *c; |
| |
| if (unused_insn_chains == 0) |
| { |
| c = XOBNEW (&reload_obstack, class insn_chain); |
| INIT_REG_SET (&c->live_throughout); |
| INIT_REG_SET (&c->dead_or_set); |
| } |
| else |
| { |
| c = unused_insn_chains; |
| unused_insn_chains = c->next; |
| } |
| c->is_caller_save_insn = 0; |
| c->need_operand_change = 0; |
| c->need_reload = 0; |
| c->need_elim = 0; |
| return c; |
| } |
| |
| /* Small utility function to set all regs in hard reg set TO which are |
| allocated to pseudos in regset FROM. */ |
| |
| void |
| compute_use_by_pseudos (HARD_REG_SET *to, regset from) |
| { |
| unsigned int regno; |
| reg_set_iterator rsi; |
| |
| EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi) |
| { |
| int r = reg_renumber[regno]; |
| |
| if (r < 0) |
| { |
| /* reload_combine uses the information from DF_LIVE_IN, |
| which might still contain registers that have not |
| actually been allocated since they have an |
| equivalence. */ |
| gcc_assert (ira_conflicts_p || reload_completed); |
| } |
| else |
| add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r); |
| } |
| } |
| |
| /* Replace all pseudos found in LOC with their corresponding |
| equivalences. */ |
| |
| static void |
| replace_pseudos_in (rtx *loc, machine_mode mem_mode, rtx usage) |
| { |
| rtx x = *loc; |
| enum rtx_code code; |
| const char *fmt; |
| int i, j; |
| |
| if (! x) |
| return; |
| |
| code = GET_CODE (x); |
| if (code == REG) |
| { |
| unsigned int regno = REGNO (x); |
| |
| if (regno < FIRST_PSEUDO_REGISTER) |
| return; |
| |
| x = eliminate_regs_1 (x, mem_mode, usage, true, false); |
| if (x != *loc) |
| { |
| *loc = x; |
| replace_pseudos_in (loc, mem_mode, usage); |
| return; |
| } |
| |
| if (reg_equiv_constant (regno)) |
| *loc = reg_equiv_constant (regno); |
| else if (reg_equiv_invariant (regno)) |
| *loc = reg_equiv_invariant (regno); |
| else if (reg_equiv_mem (regno)) |
| *loc = reg_equiv_mem (regno); |
| else if (reg_equiv_address (regno)) |
| *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address (regno)); |
| else |
| { |
| gcc_assert (!REG_P (regno_reg_rtx[regno]) |
| || REGNO (regno_reg_rtx[regno]) != regno); |
| *loc = regno_reg_rtx[regno]; |
| } |
| |
| return; |
| } |
| else if (code == MEM) |
| { |
| replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage); |
| return; |
| } |
| |
| /* Process each of our operands recursively. */ |
| fmt = GET_RTX_FORMAT (code); |
| for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) |
| if (*fmt == 'e') |
| replace_pseudos_in (&XEXP (x, i), mem_mode, usage); |
| else if (*fmt == 'E') |
| for (j = 0; j < XVECLEN (x, i); j++) |
| replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage); |
| } |
| |
| /* Determine if the current function has an exception receiver block |
| that reaches the exit block via non-exceptional edges */ |
| |
| static bool |
| has_nonexceptional_receiver (void) |
| { |
| edge e; |
| edge_iterator ei; |
| basic_block *tos, *worklist, bb; |
| |
| /* If we're not optimizing, then just err on the safe side. */ |
| if (!optimize) |
| return true; |
| |
| /* First determine which blocks can reach exit via normal paths. */ |
| tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1); |
| |
| FOR_EACH_BB_FN (bb, cfun) |
| bb->flags &= ~BB_REACHABLE; |
| |
| /* Place the exit block on our worklist. */ |
| EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE; |
| *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun); |
| |
| /* Iterate: find everything reachable from what we've already seen. */ |
| while (tos != worklist) |
| { |
| bb = *--tos; |
| |
| FOR_EACH_EDGE (e, ei, bb->preds) |
| if (!(e->flags & EDGE_ABNORMAL)) |
| { |
| basic_block src = e->src; |
| |
| if (!(src->flags & BB_REACHABLE)) |
| { |
| src->flags |= BB_REACHABLE; |
| *tos++ = src; |
| } |
| } |
| } |
| free (worklist); |
| |
| /* Now see if there's a reachable block with an exceptional incoming |
| edge. */ |
| FOR_EACH_BB_FN (bb, cfun) |
| if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb)) |
| return true; |
| |
| /* No exceptional block reached exit unexceptionally. */ |
| return false; |
| } |
| |
| /* Grow (or allocate) the REG_EQUIVS array from its current size (which may be |
| zero elements) to MAX_REG_NUM elements. |
| |
| Initialize all new fields to NULL and update REG_EQUIVS_SIZE. */ |
| void |
| grow_reg_equivs (void) |
| { |
| int old_size = vec_safe_length (reg_equivs); |
| int max_regno = max_reg_num (); |
| int i; |
| reg_equivs_t ze; |
| |
| memset (&ze, 0, sizeof (reg_equivs_t)); |
| vec_safe_reserve (reg_equivs, max_regno); |
| for (i = old_size; i < max_regno; i++) |
| reg_equivs->quick_insert (i, ze); |
| } |
| |
| |
| /* Global variables used by reload and its subroutines. */ |
| |
| /* The current basic block while in calculate_elim_costs_all_insns. */ |
| static basic_block elim_bb; |
| |
| /* Set during calculate_needs if an insn needs register elimination. */ |
| static int something_needs_elimination; |
| /* Set during calculate_needs if an insn needs an operand changed. */ |
| static int something_needs_operands_changed; |
| /* Set by alter_regs if we spilled a register to the stack. */ |
| static bool something_was_spilled; |
| |
| /* Nonzero means we couldn't get enough spill regs. */ |
| static int failure; |
| |
| /* Temporary array of pseudo-register number. */ |
| static int *temp_pseudo_reg_arr; |
| |
| /* If a pseudo has no hard reg, delete the insns that made the equivalence. |
| If that insn didn't set the register (i.e., it copied the register to |
| memory), just delete that insn instead of the equivalencing insn plus |
| anything now dead. If we call delete_dead_insn on that insn, we may |
| delete the insn that actually sets the register if the register dies |
| there and that is incorrect. */ |
| static void |
| remove_init_insns () |
| { |
| for (int i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) |
| { |
| if (reg_renumber[i] < 0 && reg_equiv_init (i) != 0) |
| { |
| rtx list; |
| for (list = reg_equiv_init (i); list; list = XEXP (list, 1)) |
| { |
| rtx_insn *equiv_insn = as_a <rtx_insn *> (XEXP (list, 0)); |
| |
| /* If we already deleted the insn or if it may trap, we can't |
| delete it. The latter case shouldn't happen, but can |
| if an insn has a variable address, gets a REG_EH_REGION |
| note added to it, and then gets converted into a load |
| from a constant address. */ |
| if (NOTE_P (equiv_insn) |
| || can_throw_internal (equiv_insn)) |
| ; |
| else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn))) |
| delete_dead_insn (equiv_insn); |
| else |
| SET_INSN_DELETED (equiv_insn); |
| } |
| } |
| } |
| } |
| |
| /* Return true if remove_init_insns will delete INSN. */ |
| static bool |
| will_delete_init_insn_p (rtx_insn *insn) |
| { |
| rtx set = single_set (insn); |
| if (!set || !REG_P (SET_DEST (set))) |
| return false; |
| unsigned regno = REGNO (SET_DEST (set)); |
| |
| if (can_throw_internal (insn)) |
| return false; |
| |
| if (regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0) |
| return false; |
| |
| for (rtx list = reg_equiv_init (regno); list; list = XEXP (list, 1)) |
| { |
| rtx equiv_insn = XEXP (list, 0); |
| if (equiv_insn == insn) |
| return true; |
| } |
| return false; |
| } |
| |
| /* Main entry point for the reload pass. |
| |
| FIRST is the first insn of the function being compiled. |
| |
| GLOBAL nonzero means we were called from global_alloc |
| and should attempt to reallocate any pseudoregs that we |
| displace from hard regs we will use for reloads. |
| If GLOBAL is zero, we do not have enough information to do that, |
| so any pseudo reg that is spilled must go to the stack. |
| |
| Return value is TRUE if reload likely left dead insns in the |
| stream and a DCE pass should be run to elimiante them. Else the |
| return value is FALSE. */ |
| |
| bool |
| reload (rtx_insn *first, int global) |
| { |
| int i, n; |
| rtx_insn *insn; |
| struct elim_table *ep; |
| basic_block bb; |
| bool inserted; |
| |
| /* Make sure even insns with volatile mem refs are recognizable. */ |
| init_recog (); |
| |
| failure = 0; |
| |
| reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0); |
| |
| /* Make sure that the last insn in the chain |
| is not something that needs reloading. */ |
| emit_note (NOTE_INSN_DELETED); |
| |
| /* Enable find_equiv_reg to distinguish insns made by reload. */ |
| reload_first_uid = get_max_uid (); |
| |
| /* Initialize the secondary memory table. */ |
| clear_secondary_mem (); |
| |
| /* We don't have a stack slot for any spill reg yet. */ |
| memset (spill_stack_slot, 0, sizeof spill_stack_slot); |
| memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width); |
| |
| /* Initialize the save area information for caller-save, in case some |
| are needed. */ |
| init_save_areas (); |
| |
| /* Compute which hard registers are now in use |
| as homes for pseudo registers. |
| This is done here rather than (eg) in global_alloc |
| because this point is reached even if not optimizing. */ |
| for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) |
| mark_home_live (i); |
| |
| /* A function that has a nonlocal label that can reach the exit |
| block via non-exceptional paths must save all call-saved |
| registers. */ |
| if (cfun->has_nonlocal_label |
| && has_nonexceptional_receiver ()) |
| crtl->saves_all_registers = 1; |
| |
| if (crtl->saves_all_registers) |
| for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
| if (! crtl->abi->clobbers_full_reg_p (i) |
| && ! fixed_regs[i] |
| && ! LOCAL_REGNO (i)) |
| df_set_regs_ever_live (i, true); |
| |
| /* Find all the pseudo registers that didn't get hard regs |
| but do have known equivalent constants or memory slots. |
| These include parameters (known equivalent to parameter slots) |
| and cse'd or loop-moved constant memory addresses. |
| |
| Record constant equivalents in reg_equiv_constant |
| so they will be substituted by find_reloads. |
| Record memory equivalents in reg_mem_equiv so they can |
| be substituted eventually by altering the REG-rtx's. */ |
| |
| grow_reg_equivs (); |
| reg_old_renumber = XCNEWVEC (short, max_regno); |
| memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short)); |
| pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno); |
| pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno); |
| |
| CLEAR_HARD_REG_SET (bad_spill_regs_global); |
| |
| init_eliminable_invariants (first, true); |
| init_elim_table (); |
| |
| /* Alter each pseudo-reg rtx to contain its hard reg number. Assign |
| stack slots to the pseudos that lack hard regs or equivalents. |
| Do not touch virtual registers. */ |
| |
| temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1); |
| for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++) |
| temp_pseudo_reg_arr[n++] = i; |
| |
| if (ira_conflicts_p) |
| /* Ask IRA to order pseudo-registers for better stack slot |
| sharing. */ |
| ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_mode); |
| |
| for (i = 0; i < n; i++) |
| alter_reg (temp_pseudo_reg_arr[i], -1, false); |
| |
| /* If we have some registers we think can be eliminated, scan all insns to |
| see if there is an insn that sets one of these registers to something |
| other than itself plus a constant. If so, the register cannot be |
| eliminated. Doing this scan here eliminates an extra pass through the |
| main reload loop in the most common case where register elimination |
| cannot be done. */ |
| for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn)) |
| if (INSN_P (insn)) |
| note_pattern_stores (PATTERN (insn), mark_not_eliminable, NULL); |
| |
| maybe_fix_stack_asms (); |
| |
| insns_need_reload = 0; |
| something_needs_elimination = 0; |
| |
| /* Initialize to -1, which means take the first spill register. */ |
| last_spill_reg = -1; |
| |
| /* Spill any hard regs that we know we can't eliminate. */ |
| CLEAR_HARD_REG_SET (used_spill_regs); |
| /* There can be multiple ways to eliminate a register; |
| they should be listed adjacently. |
| Elimination for any register fails only if all possible ways fail. */ |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ) |
| { |
| int from = ep->from; |
| int can_eliminate = 0; |
| do |
| { |
| can_eliminate |= ep->can_eliminate; |
| ep++; |
| } |
| while (ep < ®_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from); |
| if (! can_eliminate) |
| spill_hard_reg (from, 1); |
| } |
| |
| if (!HARD_FRAME_POINTER_IS_FRAME_POINTER && frame_pointer_needed) |
| spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1); |
| |
| finish_spills (global); |
| |
| /* From now on, we may need to generate moves differently. We may also |
| allow modifications of insns which cause them to not be recognized. |
| Any such modifications will be cleaned up during reload itself. */ |
| reload_in_progress = 1; |
| |
| /* This loop scans the entire function each go-round |
| and repeats until one repetition spills no additional hard regs. */ |
| for (;;) |
| { |
| int something_changed; |
| poly_int64 starting_frame_size; |
| |
| starting_frame_size = get_frame_size (); |
| something_was_spilled = false; |
| |
| set_initial_elim_offsets (); |
| set_initial_label_offsets (); |
| |
| /* For each pseudo register that has an equivalent location defined, |
| try to eliminate any eliminable registers (such as the frame pointer) |
| assuming initial offsets for the replacement register, which |
| is the normal case. |
| |
| If the resulting location is directly addressable, substitute |
| the MEM we just got directly for the old REG. |
| |
| If it is not addressable but is a constant or the sum of a hard reg |
| and constant, it is probably not addressable because the constant is |
| out of range, in that case record the address; we will generate |
| hairy code to compute the address in a register each time it is |
| needed. Similarly if it is a hard register, but one that is not |
| valid as an address register. |
| |
| If the location is not addressable, but does not have one of the |
| above forms, assign a stack slot. We have to do this to avoid the |
| potential of producing lots of reloads if, e.g., a location involves |
| a pseudo that didn't get a hard register and has an equivalent memory |
| location that also involves a pseudo that didn't get a hard register. |
| |
| Perhaps at some point we will improve reload_when_needed handling |
| so this problem goes away. But that's very hairy. */ |
| |
| for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) |
| if (reg_renumber[i] < 0 && reg_equiv_memory_loc (i)) |
| { |
| rtx x = eliminate_regs (reg_equiv_memory_loc (i), VOIDmode, |
| NULL_RTX); |
| |
| if (strict_memory_address_addr_space_p |
| (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0), |
| MEM_ADDR_SPACE (x))) |
| reg_equiv_mem (i) = x, reg_equiv_address (i) = 0; |
| else if (CONSTANT_P (XEXP (x, 0)) |
| || (REG_P (XEXP (x, 0)) |
| && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER) |
| || (GET_CODE (XEXP (x, 0)) == PLUS |
| && REG_P (XEXP (XEXP (x, 0), 0)) |
| && (REGNO (XEXP (XEXP (x, 0), 0)) |
| < FIRST_PSEUDO_REGISTER) |
| && CONSTANT_P (XEXP (XEXP (x, 0), 1)))) |
| reg_equiv_address (i) = XEXP (x, 0), reg_equiv_mem (i) = 0; |
| else |
| { |
| /* Make a new stack slot. Then indicate that something |
| changed so we go back and recompute offsets for |
| eliminable registers because the allocation of memory |
| below might change some offset. reg_equiv_{mem,address} |
| will be set up for this pseudo on the next pass around |
| the loop. */ |
| reg_equiv_memory_loc (i) = 0; |
| reg_equiv_init (i) = 0; |
| alter_reg (i, -1, true); |
| } |
| } |
| |
| if (caller_save_needed) |
| setup_save_areas (); |
| |
| if (maybe_ne (starting_frame_size, 0) && crtl->stack_alignment_needed) |
| { |
| /* If we have a stack frame, we must align it now. The |
| stack size may be a part of the offset computation for |
| register elimination. So if this changes the stack size, |
| then repeat the elimination bookkeeping. We don't |
| realign when there is no stack, as that will cause a |
| stack frame when none is needed should |
| TARGET_STARTING_FRAME_OFFSET not be already aligned to |
| STACK_BOUNDARY. */ |
| assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed); |
| } |
| /* If we allocated another stack slot, redo elimination bookkeeping. */ |
| if (something_was_spilled |
| || maybe_ne (starting_frame_size, get_frame_size ())) |
| { |
| if (update_eliminables_and_spill ()) |
| finish_spills (0); |
| continue; |
| } |
| |
| if (caller_save_needed) |
| { |
| save_call_clobbered_regs (); |
| /* That might have allocated new insn_chain structures. */ |
| reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0); |
| } |
| |
| calculate_needs_all_insns (global); |
| |
| if (! ira_conflicts_p) |
| /* Don't do it for IRA. We need this info because we don't |
| change live_throughout and dead_or_set for chains when IRA |
| is used. */ |
| CLEAR_REG_SET (&spilled_pseudos); |
| |
| something_changed = 0; |
| |
| /* If we allocated any new memory locations, make another pass |
| since it might have changed elimination offsets. */ |
| if (something_was_spilled |
| || maybe_ne (starting_frame_size, get_frame_size ())) |
| something_changed = 1; |
| |
| /* Even if the frame size remained the same, we might still have |
| changed elimination offsets, e.g. if find_reloads called |
| force_const_mem requiring the back end to allocate a constant |
| pool base register that needs to be saved on the stack. */ |
| else if (!verify_initial_elim_offsets ()) |
| something_changed = 1; |
| |
| if (update_eliminables_and_spill ()) |
| { |
| finish_spills (0); |
| something_changed = 1; |
| } |
| else |
| { |
| select_reload_regs (); |
| if (failure) |
| goto failed; |
| if (insns_need_reload) |
| something_changed |= finish_spills (global); |
| } |
| |
| if (! something_changed) |
| break; |
| |
| if (caller_save_needed) |
| delete_caller_save_insns (); |
| |
| obstack_free (&reload_obstack, reload_firstobj); |
| } |
| |
| /* If global-alloc was run, notify it of any register eliminations we have |
| done. */ |
| if (global) |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
| if (ep->can_eliminate) |
| mark_elimination (ep->from, ep->to); |
| |
| remove_init_insns (); |
| |
| /* Use the reload registers where necessary |
| by generating move instructions to move the must-be-register |
| values into or out of the reload registers. */ |
| |
| if (insns_need_reload != 0 || something_needs_elimination |
| || something_needs_operands_changed) |
| { |
| poly_int64 old_frame_size = get_frame_size (); |
| |
| reload_as_needed (global); |
| |
| gcc_assert (known_eq (old_frame_size, get_frame_size ())); |
| |
| gcc_assert (verify_initial_elim_offsets ()); |
| } |
| |
| /* If we were able to eliminate the frame pointer, show that it is no |
| longer live at the start of any basic block. If it ls live by |
| virtue of being in a pseudo, that pseudo will be marked live |
| and hence the frame pointer will be known to be live via that |
| pseudo. */ |
| |
| if (! frame_pointer_needed) |
| FOR_EACH_BB_FN (bb, cfun) |
| bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM); |
| |
| /* Come here (with failure set nonzero) if we can't get enough spill |
| regs. */ |
| failed: |
| |
| CLEAR_REG_SET (&changed_allocation_pseudos); |
| CLEAR_REG_SET (&spilled_pseudos); |
| reload_in_progress = 0; |
| |
| /* Now eliminate all pseudo regs by modifying them into |
| their equivalent memory references. |
| The REG-rtx's for the pseudos are modified in place, |
| so all insns that used to refer to them now refer to memory. |
| |
| For a reg that has a reg_equiv_address, all those insns |
| were changed by reloading so that no insns refer to it any longer; |
| but the DECL_RTL of a variable decl may refer to it, |
| and if so this causes the debugging info to mention the variable. */ |
| |
| for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) |
| { |
| rtx addr = 0; |
| |
| if (reg_equiv_mem (i)) |
| addr = XEXP (reg_equiv_mem (i), 0); |
| |
| if (reg_equiv_address (i)) |
| addr = reg_equiv_address (i); |
| |
| if (addr) |
| { |
| if (reg_renumber[i] < 0) |
| { |
| rtx reg = regno_reg_rtx[i]; |
| |
| REG_USERVAR_P (reg) = 0; |
| PUT_CODE (reg, MEM); |
| XEXP (reg, 0) = addr; |
| if (reg_equiv_memory_loc (i)) |
| MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc (i)); |
| else |
| MEM_ATTRS (reg) = 0; |
| MEM_NOTRAP_P (reg) = 1; |
| } |
| else if (reg_equiv_mem (i)) |
| XEXP (reg_equiv_mem (i), 0) = addr; |
| } |
| |
| /* We don't want complex addressing modes in debug insns |
| if simpler ones will do, so delegitimize equivalences |
| in debug insns. */ |
| if (MAY_HAVE_DEBUG_BIND_INSNS && reg_renumber[i] < 0) |
| { |
| rtx reg = regno_reg_rtx[i]; |
| rtx equiv = 0; |
| df_ref use, next; |
| |
| if (reg_equiv_constant (i)) |
| equiv = reg_equiv_constant (i); |
| else if (reg_equiv_invariant (i)) |
| equiv = reg_equiv_invariant (i); |
| else if (reg && MEM_P (reg)) |
| equiv = targetm.delegitimize_address (reg); |
| else if (reg && REG_P (reg) && (int)REGNO (reg) != i) |
| equiv = reg; |
| |
| if (equiv == reg) |
| continue; |
| |
| for (use = DF_REG_USE_CHAIN (i); use; use = next) |
| { |
| insn = DF_REF_INSN (use); |
| |
| /* Make sure the next ref is for a different instruction, |
| so that we're not affected by the rescan. */ |
| next = DF_REF_NEXT_REG (use); |
| while (next && DF_REF_INSN (next) == insn) |
| next = DF_REF_NEXT_REG (next); |
| |
| if (DEBUG_BIND_INSN_P (insn)) |
| { |
| if (!equiv) |
| { |
| INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC (); |
| df_insn_rescan_debug_internal (insn); |
| } |
| else |
| INSN_VAR_LOCATION_LOC (insn) |
| = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn), |
| reg, equiv); |
| } |
| } |
| } |
| } |
| |
| /* We must set reload_completed now since the cleanup_subreg_operands call |
| below will re-recognize each insn and reload may have generated insns |
| which are only valid during and after reload. */ |
| reload_completed = 1; |
| |
| /* Make a pass over all the insns and delete all USEs which we inserted |
| only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED |
| notes. Delete all CLOBBER insns, except those that refer to the return |
| value and the special mem:BLK CLOBBERs added to prevent the scheduler |
| from misarranging variable-array code, and simplify (subreg (reg)) |
| operands. Strip and regenerate REG_INC notes that may have been moved |
| around. */ |
| |
| for (insn = first; insn; insn = NEXT_INSN (insn)) |
| if (INSN_P (insn)) |
| { |
| rtx *pnote; |
| |
| if (CALL_P (insn)) |
| replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn), |
| VOIDmode, CALL_INSN_FUNCTION_USAGE (insn)); |
| |
| if ((GET_CODE (PATTERN (insn)) == USE |
| /* We mark with QImode USEs introduced by reload itself. */ |
| && (GET_MODE (insn) == QImode |
| || find_reg_note (insn, REG_EQUAL, NULL_RTX))) |
| || (GET_CODE (PATTERN (insn)) == CLOBBER |
| && (!MEM_P (XEXP (PATTERN (insn), 0)) |
| || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode |
| || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH |
| && XEXP (XEXP (PATTERN (insn), 0), 0) |
| != stack_pointer_rtx)) |
| && (!REG_P (XEXP (PATTERN (insn), 0)) |
| || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0))))) |
| { |
| delete_insn (insn); |
| continue; |
| } |
| |
| /* Some CLOBBERs may survive until here and still reference unassigned |
| pseudos with const equivalent, which may in turn cause ICE in later |
| passes if the reference remains in place. */ |
| if (GET_CODE (PATTERN (insn)) == CLOBBER) |
| replace_pseudos_in (& XEXP (PATTERN (insn), 0), |
| VOIDmode, PATTERN (insn)); |
| |
| /* Discard obvious no-ops, even without -O. This optimization |
| is fast and doesn't interfere with debugging. */ |
| if (NONJUMP_INSN_P (insn) |
| && GET_CODE (PATTERN (insn)) == SET |
| && REG_P (SET_SRC (PATTERN (insn))) |
| && REG_P (SET_DEST (PATTERN (insn))) |
| && (REGNO (SET_SRC (PATTERN (insn))) |
| == REGNO (SET_DEST (PATTERN (insn))))) |
| { |
| delete_insn (insn); |
| continue; |
| } |
| |
| pnote = ®_NOTES (insn); |
| while (*pnote != 0) |
| { |
| if (REG_NOTE_KIND (*pnote) == REG_DEAD |
| || REG_NOTE_KIND (*pnote) == REG_UNUSED |
| || REG_NOTE_KIND (*pnote) == REG_INC) |
| *pnote = XEXP (*pnote, 1); |
| else |
| pnote = &XEXP (*pnote, 1); |
| } |
| |
| if (AUTO_INC_DEC) |
| add_auto_inc_notes (insn, PATTERN (insn)); |
| |
| /* Simplify (subreg (reg)) if it appears as an operand. */ |
| cleanup_subreg_operands (insn); |
| |
| /* Clean up invalid ASMs so that they don't confuse later passes. |
| See PR 21299. */ |
| if (asm_noperands (PATTERN (insn)) >= 0) |
| { |
| extract_insn (insn); |
| if (!constrain_operands (1, get_enabled_alternatives (insn))) |
| { |
| error_for_asm (insn, |
| "%<asm%> operand has impossible constraints"); |
| delete_insn (insn); |
| continue; |
| } |
| } |
| } |
| |
| free (temp_pseudo_reg_arr); |
| |
| /* Indicate that we no longer have known memory locations or constants. */ |
| free_reg_equiv (); |
| |
| free (reg_max_ref_mode); |
| free (reg_old_renumber); |
| free (pseudo_previous_regs); |
| free (pseudo_forbidden_regs); |
| |
| CLEAR_HARD_REG_SET (used_spill_regs); |
| for (i = 0; i < n_spills; i++) |
| SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]); |
| |
| /* Free all the insn_chain structures at once. */ |
| obstack_free (&reload_obstack, reload_startobj); |
| unused_insn_chains = 0; |
| |
| inserted = fixup_abnormal_edges (); |
| |
| /* We've possibly turned single trapping insn into multiple ones. */ |
| if (cfun->can_throw_non_call_exceptions) |
| { |
| auto_sbitmap blocks (last_basic_block_for_fn (cfun)); |
| bitmap_ones (blocks); |
| find_many_sub_basic_blocks (blocks); |
| } |
| |
| if (inserted) |
| commit_edge_insertions (); |
| |
| /* Replacing pseudos with their memory equivalents might have |
| created shared rtx. Subsequent passes would get confused |
| by this, so unshare everything here. */ |
| unshare_all_rtl_again (first); |
| |
| #ifdef STACK_BOUNDARY |
| /* init_emit has set the alignment of the hard frame pointer |
| to STACK_BOUNDARY. It is very likely no longer valid if |
| the hard frame pointer was used for register allocation. */ |
| if (!frame_pointer_needed) |
| REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT; |
| #endif |
| |
| substitute_stack.release (); |
| |
| gcc_assert (bitmap_empty_p (&spilled_pseudos)); |
| |
| reload_completed = !failure; |
| |
| return need_dce; |
| } |
| |
| /* Yet another special case. Unfortunately, reg-stack forces people to |
| write incorrect clobbers in asm statements. These clobbers must not |
| cause the register to appear in bad_spill_regs, otherwise we'll call |
| fatal_insn later. We clear the corresponding regnos in the live |
| register sets to avoid this. |
| The whole thing is rather sick, I'm afraid. */ |
| |
| static void |
| maybe_fix_stack_asms (void) |
| { |
| #ifdef STACK_REGS |
| const char *constraints[MAX_RECOG_OPERANDS]; |
| machine_mode operand_mode[MAX_RECOG_OPERANDS]; |
| class insn_chain *chain; |
| |
| for (chain = reload_insn_chain; chain != 0; chain = chain->next) |
| { |
| int i, noperands; |
| HARD_REG_SET clobbered, allowed; |
| rtx pat; |
| |
| if (! INSN_P (chain->insn) |
| || (noperands = asm_noperands (PATTERN (chain->insn))) < 0) |
| continue; |
| pat = PATTERN (chain->insn); |
| if (GET_CODE (pat) != PARALLEL) |
| continue; |
| |
| CLEAR_HARD_REG_SET (clobbered); |
| CLEAR_HARD_REG_SET (allowed); |
| |
| /* First, make a mask of all stack regs that are clobbered. */ |
| for (i = 0; i < XVECLEN (pat, 0); i++) |
| { |
| rtx t = XVECEXP (pat, 0, i); |
| if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0))) |
| SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0))); |
| } |
| |
| /* Get the operand values and constraints out of the insn. */ |
| decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc, |
| constraints, operand_mode, NULL); |
| |
| /* For every operand, see what registers are allowed. */ |
| for (i = 0; i < noperands; i++) |
| { |
| const char *p = constraints[i]; |
| /* For every alternative, we compute the class of registers allowed |
| for reloading in CLS, and merge its contents into the reg set |
| ALLOWED. */ |
| int cls = (int) NO_REGS; |
| |
| for (;;) |
| { |
| char c = *p; |
| |
| if (c == '\0' || c == ',' || c == '#') |
| { |
| /* End of one alternative - mark the regs in the current |
| class, and reset the class. */ |
| allowed |= reg_class_contents[cls]; |
| cls = NO_REGS; |
| p++; |
| if (c == '#') |
| do { |
| c = *p++; |
| } while (c != '\0' && c != ','); |
| if (c == '\0') |
| break; |
| continue; |
| } |
| |
| switch (c) |
| { |
| case 'g': |
| cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS]; |
| break; |
| |
| default: |
| enum constraint_num cn = lookup_constraint (p); |
| if (insn_extra_address_constraint (cn)) |
| cls = (int) reg_class_subunion[cls] |
| [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC, |
| ADDRESS, SCRATCH)]; |
| else |
| cls = (int) reg_class_subunion[cls] |
| [reg_class_for_constraint (cn)]; |
| break; |
| } |
| p += CONSTRAINT_LEN (c, p); |
| } |
| } |
| /* Those of the registers which are clobbered, but allowed by the |
| constraints, must be usable as reload registers. So clear them |
| out of the life information. */ |
| allowed &= clobbered; |
| for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
| if (TEST_HARD_REG_BIT (allowed, i)) |
| { |
| CLEAR_REGNO_REG_SET (&chain->live_throughout, i); |
| CLEAR_REGNO_REG_SET (&chain->dead_or_set, i); |
| } |
| } |
| |
| #endif |
| } |
| |
| /* Copy the global variables n_reloads and rld into the corresponding elts |
| of CHAIN. */ |
| static void |
| copy_reloads (class insn_chain *chain) |
| { |
| chain->n_reloads = n_reloads; |
| chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads); |
| memcpy (chain->rld, rld, n_reloads * sizeof (struct reload)); |
| reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0); |
| } |
| |
| /* Walk the chain of insns, and determine for each whether it needs reloads |
| and/or eliminations. Build the corresponding insns_need_reload list, and |
| set something_needs_elimination as appropriate. */ |
| static void |
| calculate_needs_all_insns (int global) |
| { |
| class insn_chain **pprev_reload = &insns_need_reload; |
| class insn_chain *chain, *next = 0; |
| |
| something_needs_elimination = 0; |
| |
| reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0); |
| for (chain = reload_insn_chain; chain != 0; chain = next) |
| { |
| rtx_insn *insn = chain->insn; |
| |
| next = chain->next; |
| |
| /* Clear out the shortcuts. */ |
| chain->n_reloads = 0; |
| chain->need_elim = 0; |
| chain->need_reload = 0; |
| chain->need_operand_change = 0; |
| |
| /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might |
| include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see |
| what effects this has on the known offsets at labels. */ |
| |
| if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn) |
| || (INSN_P (insn) && REG_NOTES (insn) != 0)) |
| set_label_offsets (insn, insn, 0); |
| |
| if (INSN_P (insn)) |
| { |
| rtx old_body = PATTERN (insn); |
| int old_code = INSN_CODE (insn); |
| rtx old_notes = REG_NOTES (insn); |
| int did_elimination = 0; |
| int operands_changed = 0; |
| |
| /* Skip insns that only set an equivalence. */ |
| if (will_delete_init_insn_p (insn)) |
| continue; |
| |
| /* If needed, eliminate any eliminable registers. */ |
| if (num_eliminable || num_eliminable_invariants) |
| did_elimination = eliminate_regs_in_insn (insn, 0); |
| |
| /* Analyze the instruction. */ |
| operands_changed = find_reloads (insn, 0, spill_indirect_levels, |
| global, spill_reg_order); |
| |
| /* If a no-op set needs more than one reload, this is likely |
| to be something that needs input address reloads. We |
| can't get rid of this cleanly later, and it is of no use |
| anyway, so discard it now. |
| We only do this when expensive_optimizations is enabled, |
| since this complements reload inheritance / output |
| reload deletion, and it can make debugging harder. */ |
| if (flag_expensive_optimizations && n_reloads > 1) |
| { |
| rtx set = single_set (insn); |
| if (set |
| && |
| ((SET_SRC (set) == SET_DEST (set) |
| && REG_P (SET_SRC (set)) |
| && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER) |
| || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set)) |
| && reg_renumber[REGNO (SET_SRC (set))] < 0 |
| && reg_renumber[REGNO (SET_DEST (set))] < 0 |
| && reg_equiv_memory_loc (REGNO (SET_SRC (set))) != NULL |
| && reg_equiv_memory_loc (REGNO (SET_DEST (set))) != NULL |
| && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set))), |
| reg_equiv_memory_loc (REGNO (SET_DEST (set))))))) |
| { |
| if (ira_conflicts_p) |
| /* Inform IRA about the insn deletion. */ |
| ira_mark_memory_move_deletion (REGNO (SET_DEST (set)), |
| REGNO (SET_SRC (set))); |
| delete_insn (insn); |
| /* Delete it from the reload chain. */ |
| if (chain->prev) |
| chain->prev->next = next; |
| else |
| reload_insn_chain = next; |
| if (next) |
| next->prev = chain->prev; |
| chain->next = unused_insn_chains; |
| unused_insn_chains = chain; |
| continue; |
| } |
| } |
| if (num_eliminable) |
| update_eliminable_offsets (); |
| |
| /* Remember for later shortcuts which insns had any reloads or |
| register eliminations. */ |
| chain->need_elim = did_elimination; |
| chain->need_reload = n_reloads > 0; |
| chain->need_operand_change = operands_changed; |
| |
| /* Discard any register replacements done. */ |
| if (did_elimination) |
| { |
| obstack_free (&reload_obstack, reload_insn_firstobj); |
| PATTERN (insn) = old_body; |
| INSN_CODE (insn) = old_code; |
| REG_NOTES (insn) = old_notes; |
| something_needs_elimination = 1; |
| } |
| |
| something_needs_operands_changed |= operands_changed; |
| |
| if (n_reloads != 0) |
| { |
| copy_reloads (chain); |
| *pprev_reload = chain; |
| pprev_reload = &chain->next_need_reload; |
| } |
| } |
| } |
| *pprev_reload = 0; |
| } |
| |
| /* This function is called from the register allocator to set up estimates |
| for the cost of eliminating pseudos which have REG_EQUIV equivalences to |
| an invariant. The structure is similar to calculate_needs_all_insns. */ |
| |
| void |
| calculate_elim_costs_all_insns (void) |
| { |
| int *reg_equiv_init_cost; |
| basic_block bb; |
| int i; |
| |
| reg_equiv_init_cost = XCNEWVEC (int, max_regno); |
| init_elim_table (); |
| init_eliminable_invariants (get_insns (), false); |
| |
| set_initial_elim_offsets (); |
| set_initial_label_offsets (); |
| |
| FOR_EACH_BB_FN (bb, cfun) |
| { |
| rtx_insn *insn; |
| elim_bb = bb; |
| |
| FOR_BB_INSNS (bb, insn) |
| { |
| /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might |
| include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see |
| what effects this has on the known offsets at labels. */ |
| |
| if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn) |
| || (INSN_P (insn) && REG_NOTES (insn) != 0)) |
| set_label_offsets (insn, insn, 0); |
| |
| if (INSN_P (insn)) |
| { |
| rtx set = single_set (insn); |
| |
| /* Skip insns that only set an equivalence. */ |
| if (set && REG_P (SET_DEST (set)) |
| && reg_renumber[REGNO (SET_DEST (set))] < 0 |
| && (reg_equiv_constant (REGNO (SET_DEST (set))) |
| || reg_equiv_invariant (REGNO (SET_DEST (set))))) |
| { |
| unsigned regno = REGNO (SET_DEST (set)); |
| rtx_insn_list *init = reg_equiv_init (regno); |
| if (init) |
| { |
| rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn, |
| false, true); |
| machine_mode mode = GET_MODE (SET_DEST (set)); |
| int cost = set_src_cost (t, mode, |
| optimize_bb_for_speed_p (bb)); |
| int freq = REG_FREQ_FROM_BB (bb); |
| |
| reg_equiv_init_cost[regno] = cost * freq; |
| continue; |
| } |
| } |
| /* If needed, eliminate any eliminable registers. */ |
| if (num_eliminable || num_eliminable_invariants) |
| elimination_costs_in_insn (insn); |
| |
| if (num_eliminable) |
| update_eliminable_offsets (); |
| } |
| } |
| } |
| for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++) |
| { |
| if (reg_equiv_invariant (i)) |
| { |
| if (reg_equiv_init (i)) |
| { |
| int cost = reg_equiv_init_cost[i]; |
| if (dump_file) |
| fprintf (dump_file, |
| "Reg %d has equivalence, initial gains %d\n", i, cost); |
| if (cost != 0) |
| ira_adjust_equiv_reg_cost (i, cost); |
| } |
| else |
| { |
| if (dump_file) |
| fprintf (dump_file, |
| "Reg %d had equivalence, but can't be eliminated\n", |
| i); |
| ira_adjust_equiv_reg_cost (i, 0); |
| } |
| } |
| } |
| |
| free (reg_equiv_init_cost); |
| free (offsets_known_at); |
| free (offsets_at); |
| offsets_at = NULL; |
| offsets_known_at = NULL; |
| } |
| |
| /* Comparison function for qsort to decide which of two reloads |
| should be handled first. *P1 and *P2 are the reload numbers. */ |
| |
| static int |
| reload_reg_class_lower (const void *r1p, const void *r2p) |
| { |
| int r1 = *(const short *) r1p, r2 = *(const short *) r2p; |
| int t; |
| |
| /* Consider required reloads before optional ones. */ |
| t = rld[r1].optional - rld[r2].optional; |
| if (t != 0) |
| return t; |
| |
| /* Count all solitary classes before non-solitary ones. */ |
| t = ((reg_class_size[(int) rld[r2].rclass] == 1) |
| - (reg_class_size[(int) rld[r1].rclass] == 1)); |
| if (t != 0) |
| return t; |
| |
| /* Aside from solitaires, consider all multi-reg groups first. */ |
| t = rld[r2].nregs - rld[r1].nregs; |
| if (t != 0) |
| return t; |
| |
| /* Consider reloads in order of increasing reg-class number. */ |
| t = (int) rld[r1].rclass - (int) rld[r2].rclass; |
| if (t != 0) |
| return t; |
| |
| /* If reloads are equally urgent, sort by reload number, |
| so that the results of qsort leave nothing to chance. */ |
| return r1 - r2; |
| } |
| |
| /* The cost of spilling each hard reg. */ |
| static int spill_cost[FIRST_PSEUDO_REGISTER]; |
| |
| /* When spilling multiple hard registers, we use SPILL_COST for the first |
| spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST |
| only the first hard reg for a multi-reg pseudo. */ |
| static int spill_add_cost[FIRST_PSEUDO_REGISTER]; |
| |
| /* Map of hard regno to pseudo regno currently occupying the hard |
| reg. */ |
| static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER]; |
| |
| /* Update the spill cost arrays, considering that pseudo REG is live. */ |
| |
| static void |
| count_pseudo (int reg) |
| { |
| int freq = REG_FREQ (reg); |
| int r = reg_renumber[reg]; |
| int nregs; |
| |
| /* Ignore spilled pseudo-registers which can be here only if IRA is used. */ |
| if (ira_conflicts_p && r < 0) |
| return; |
| |
| if (REGNO_REG_SET_P (&pseudos_counted, reg) |
| || REGNO_REG_SET_P (&spilled_pseudos, reg)) |
| return; |
| |
| SET_REGNO_REG_SET (&pseudos_counted, reg); |
| |
| gcc_assert (r >= 0); |
| |
| spill_add_cost[r] += freq; |
| nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg)); |
| while (nregs-- > 0) |
| { |
| hard_regno_to_pseudo_regno[r + nregs] = reg; |
| spill_cost[r + nregs] += freq; |
| } |
| } |
| |
| /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the |
| contents of BAD_SPILL_REGS for the insn described by CHAIN. */ |
| |
| static void |
| order_regs_for_reload (class insn_chain *chain) |
| { |
| unsigned i; |
| HARD_REG_SET used_by_pseudos; |
| HARD_REG_SET used_by_pseudos2; |
| reg_set_iterator rsi; |
| |
| bad_spill_regs = fixed_reg_set; |
| |
| memset (spill_cost, 0, sizeof spill_cost); |
| memset (spill_add_cost, 0, sizeof spill_add_cost); |
| for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
| hard_regno_to_pseudo_regno[i] = -1; |
| |
| /* Count number of uses of each hard reg by pseudo regs allocated to it |
| and then order them by decreasing use. First exclude hard registers |
| that are live in or across this insn. */ |
| |
| REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout); |
| REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set); |
| bad_spill_regs |= used_by_pseudos; |
| bad_spill_regs |= used_by_pseudos2; |
| |
| /* Now find out which pseudos are allocated to it, and update |
| hard_reg_n_uses. */ |
| CLEAR_REG_SET (&pseudos_counted); |
| |
| EXECUTE_IF_SET_IN_REG_SET |
| (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi) |
| { |
| count_pseudo (i); |
| } |
| EXECUTE_IF_SET_IN_REG_SET |
| (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi) |
| { |
| count_pseudo (i); |
| } |
| CLEAR_REG_SET (&pseudos_counted); |
| } |
| |
| /* Vector of reload-numbers showing the order in which the reloads should |
| be processed. */ |
| static short reload_order[MAX_RELOADS]; |
| |
| /* This is used to keep track of the spill regs used in one insn. */ |
| static HARD_REG_SET used_spill_regs_local; |
| |
| /* We decided to spill hard register SPILLED, which has a size of |
| SPILLED_NREGS. Determine how pseudo REG, which is live during the insn, |
| is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will |
| update SPILL_COST/SPILL_ADD_COST. */ |
| |
| static void |
| count_spilled_pseudo (int spilled, int spilled_nregs, int reg) |
| { |
| int freq = REG_FREQ (reg); |
| int r = reg_renumber[reg]; |
| int nregs; |
| |
| /* Ignore spilled pseudo-registers which can be here only if IRA is used. */ |
| if (ira_conflicts_p && r < 0) |
| return; |
| |
| gcc_assert (r >= 0); |
| |
| nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg)); |
| |
| if (REGNO_REG_SET_P (&spilled_pseudos, reg) |
| || spilled + spilled_nregs <= r || r + nregs <= spilled) |
| return; |
| |
| SET_REGNO_REG_SET (&spilled_pseudos, reg); |
| |
| spill_add_cost[r] -= freq; |
| while (nregs-- > 0) |
| { |
| hard_regno_to_pseudo_regno[r + nregs] = -1; |
| spill_cost[r + nregs] -= freq; |
| } |
| } |
| |
| /* Find reload register to use for reload number ORDER. */ |
| |
| static int |
| find_reg (class insn_chain *chain, int order) |
| { |
| int rnum = reload_order[order]; |
| struct reload *rl = rld + rnum; |
| int best_cost = INT_MAX; |
| int best_reg = -1; |
| unsigned int i, j, n; |
| int k; |
| HARD_REG_SET not_usable; |
| HARD_REG_SET used_by_other_reload; |
| reg_set_iterator rsi; |
| static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER]; |
| static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER]; |
| |
| not_usable = (bad_spill_regs |
| | bad_spill_regs_global |
| | ~reg_class_contents[rl->rclass]); |
| |
| CLEAR_HARD_REG_SET (used_by_other_reload); |
| for (k = 0; k < order; k++) |
| { |
| int other = reload_order[k]; |
| |
| if (rld[other].regno >= 0 && reloads_conflict (other, rnum)) |
| for (j = 0; j < rld[other].nregs; j++) |
| SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j); |
| } |
| |
| for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
| { |
| #ifdef REG_ALLOC_ORDER |
| unsigned int regno = reg_alloc_order[i]; |
| #else |
| unsigned int regno = i; |
| #endif |
| |
| if (! TEST_HARD_REG_BIT (not_usable, regno) |
| && ! TEST_HARD_REG_BIT (used_by_other_reload, regno) |
| && targetm.hard_regno_mode_ok (regno, rl->mode)) |
| { |
| int this_cost = spill_cost[regno]; |
| int ok = 1; |
| unsigned int this_nregs = hard_regno_nregs (regno, rl->mode); |
| |
| for (j = 1; j < this_nregs; j++) |
| { |
| this_cost += spill_add_cost[regno + j]; |
| if ((TEST_HARD_REG_BIT (not_usable, regno + j)) |
| || TEST_HARD_REG_BIT (used_by_other_reload, regno + j)) |
| ok = 0; |
| } |
| if (! ok) |
| continue; |
| |
| if (ira_conflicts_p) |
| { |
| /* Ask IRA to find a better pseudo-register for |
| spilling. */ |
| for (n = j = 0; j < this_nregs; j++) |
| { |
| int r = hard_regno_to_pseudo_regno[regno + j]; |
| |
| if (r < 0) |
| continue; |
| if (n == 0 || regno_pseudo_regs[n - 1] != r) |
| regno_pseudo_regs[n++] = r; |
| } |
| regno_pseudo_regs[n++] = -1; |
| if (best_reg < 0 |
| || ira_better_spill_reload_regno_p (regno_pseudo_regs, |
| best_regno_pseudo_regs, |
| rl->in, rl->out, |
| chain->insn)) |
| { |
| best_reg = regno; |
| for (j = 0;; j++) |
| { |
| best_regno_pseudo_regs[j] = regno_pseudo_regs[j]; |
| if (regno_pseudo_regs[j] < 0) |
| break; |
| } |
| } |
| continue; |
| } |
| |
| if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno) |
| this_cost--; |
| if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno) |
| this_cost--; |
| if (this_cost < best_cost |
| /* Among registers with equal cost, prefer caller-saved ones, or |
| use REG_ALLOC_ORDER if it is defined. */ |
| || (this_cost == best_cost |
| #ifdef REG_ALLOC_ORDER |
| && (inv_reg_alloc_order[regno] |
| < inv_reg_alloc_order[best_reg]) |
| #else |
| && crtl->abi->clobbers_full_reg_p (regno) |
| && !crtl->abi->clobbers_full_reg_p (best_reg) |
| #endif |
| )) |
| { |
| best_reg = regno; |
| best_cost = this_cost; |
| } |
| } |
| } |
| if (best_reg == -1) |
| return 0; |
| |
| if (dump_file) |
| fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum); |
| |
| rl->nregs = hard_regno_nregs (best_reg, rl->mode); |
| rl->regno = best_reg; |
| |
| EXECUTE_IF_SET_IN_REG_SET |
| (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi) |
| { |
| count_spilled_pseudo (best_reg, rl->nregs, j); |
| } |
| |
| EXECUTE_IF_SET_IN_REG_SET |
| (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi) |
| { |
| count_spilled_pseudo (best_reg, rl->nregs, j); |
| } |
| |
| for (i = 0; i < rl->nregs; i++) |
| { |
| gcc_assert (spill_cost[best_reg + i] == 0); |
| gcc_assert (spill_add_cost[best_reg + i] == 0); |
| gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1); |
| SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i); |
| } |
| return 1; |
| } |
| |
| /* Find more reload regs to satisfy the remaining need of an insn, which |
| is given by CHAIN. |
| Do it by ascending class number, since otherwise a reg |
| might be spilled for a big class and might fail to count |
| for a smaller class even though it belongs to that class. */ |
| |
| static void |
| find_reload_regs (class insn_chain *chain) |
| { |
| int i; |
| |
| /* In order to be certain of getting the registers we need, |
| we must sort the reloads into order of increasing register class. |
| Then our grabbing of reload registers will parallel the process |
| that provided the reload registers. */ |
| for (i = 0; i < chain->n_reloads; i++) |
| { |
| /* Show whether this reload already has a hard reg. */ |
| if (chain->rld[i].reg_rtx) |
| { |
| chain->rld[i].regno = REGNO (chain->rld[i].reg_rtx); |
| chain->rld[i].nregs = REG_NREGS (chain->rld[i].reg_rtx); |
| } |
| else |
| chain->rld[i].regno = -1; |
| reload_order[i] = i; |
| } |
| |
| n_reloads = chain->n_reloads; |
| memcpy (rld, chain->rld, n_reloads * sizeof (struct reload)); |
| |
| CLEAR_HARD_REG_SET (used_spill_regs_local); |
| |
| if (dump_file) |
| fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn)); |
| |
| qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower); |
| |
| /* Compute the order of preference for hard registers to spill. */ |
| |
| order_regs_for_reload (chain); |
| |
| for (i = 0; i < n_reloads; i++) |
| { |
| int r = reload_order[i]; |
| |
| /* Ignore reloads that got marked inoperative. */ |
| if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p) |
| && ! rld[r].optional |
| && rld[r].regno == -1) |
| if (! find_reg (chain, i)) |
| { |
| if (dump_file) |
| fprintf (dump_file, "reload failure for reload %d\n", r); |
| spill_failure (chain->insn, rld[r].rclass); |
| failure = 1; |
| return; |
| } |
| } |
| |
| chain->used_spill_regs = used_spill_regs_local; |
| used_spill_regs |= used_spill_regs_local; |
| |
| memcpy (chain->rld, rld, n_reloads * sizeof (struct reload)); |
| } |
| |
| static void |
| select_reload_regs (void) |
| { |
| class insn_chain *chain; |
| |
| /* Try to satisfy the needs for each insn. */ |
| for (chain = insns_need_reload; chain != 0; |
| chain = chain->next_need_reload) |
| find_reload_regs (chain); |
| } |
| |
| /* Delete all insns that were inserted by emit_caller_save_insns during |
| this iteration. */ |
| static void |
| delete_caller_save_insns (void) |
| { |
| class insn_chain *c = reload_insn_chain; |
| |
| while (c != 0) |
| { |
| while (c != 0 && c->is_caller_save_insn) |
| { |
| class insn_chain *next = c->next; |
| rtx_insn *insn = c->insn; |
| |
| if (c == reload_insn_chain) |
| reload_insn_chain = next; |
| delete_insn (insn); |
| |
| if (next) |
| next->prev = c->prev; |
| if (c->prev) |
| c->prev->next = next; |
| c->next = unused_insn_chains; |
| unused_insn_chains = c; |
| c = next; |
| } |
| if (c != 0) |
| c = c->next; |
| } |
| } |
| |
| /* Handle the failure to find a register to spill. |
| INSN should be one of the insns which needed this particular spill reg. */ |
| |
| static void |
| spill_failure (rtx_insn *insn, enum reg_class rclass) |
| { |
| if (asm_noperands (PATTERN (insn)) >= 0) |
| error_for_asm (insn, "cannot find a register in class %qs while " |
| "reloading %<asm%>", |
| reg_class_names[rclass]); |
| else |
| { |
| error ("unable to find a register to spill in class %qs", |
| reg_class_names[rclass]); |
| |
| if (dump_file) |
| { |
| fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn)); |
| debug_reload_to_stream (dump_file); |
| } |
| fatal_insn ("this is the insn:", insn); |
| } |
| } |
| |
| /* Delete an unneeded INSN and any previous insns who sole purpose is loading |
| data that is dead in INSN. */ |
| |
| static void |
| delete_dead_insn (rtx_insn *insn) |
| { |
| rtx_insn *prev = prev_active_insn (insn); |
| rtx prev_dest; |
| |
| /* If the previous insn sets a register that dies in our insn make |
| a note that we want to run DCE immediately after reload. |
| |
| We used to delete the previous insn & recurse, but that's wrong for |
| block local equivalences. Instead of trying to figure out the exact |
| circumstances where we can delete the potentially dead insns, just |
| let DCE do the job. */ |
| if (prev && BLOCK_FOR_INSN (prev) == BLOCK_FOR_INSN (insn) |
| && GET_CODE (PATTERN (prev)) == SET |
| && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest)) |
| && reg_mentioned_p (prev_dest, PATTERN (insn)) |
| && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)) |
| && ! side_effects_p (SET_SRC (PATTERN (prev)))) |
| need_dce = 1; |
| |
| SET_INSN_DELETED (insn); |
| } |
| |
| /* Modify the home of pseudo-reg I. |
| The new home is present in reg_renumber[I]. |
| |
| FROM_REG may be the hard reg that the pseudo-reg is being spilled from; |
| or it may be -1, meaning there is none or it is not relevant. |
| This is used so that all pseudos spilled from a given hard reg |
| can share one stack slot. */ |
| |
| static void |
| alter_reg (int i, int from_reg, bool dont_share_p) |
| { |
| /* When outputting an inline function, this can happen |
| for a reg that isn't actually used. */ |
| if (regno_reg_rtx[i] == 0) |
| return; |
| |
| /* If the reg got changed to a MEM at rtl-generation time, |
| ignore it. */ |
| if (!REG_P (regno_reg_rtx[i])) |
| return; |
| |
| /* Modify the reg-rtx to contain the new hard reg |
| number or else to contain its pseudo reg number. */ |
| SET_REGNO (regno_reg_rtx[i], |
| reg_renumber[i] >= 0 ? reg_renumber[i] : i); |
| |
| /* If we have a pseudo that is needed but has no hard reg or equivalent, |
| allocate a stack slot for it. */ |
| |
| if (reg_renumber[i] < 0 |
| && REG_N_REFS (i) > 0 |
| && reg_equiv_constant (i) == 0 |
| && (reg_equiv_invariant (i) == 0 |
| || reg_equiv_init (i) == 0) |
| && reg_equiv_memory_loc (i) == 0) |
| { |
| rtx x = NULL_RTX; |
| machine_mode mode = GET_MODE (regno_reg_rtx[i]); |
| poly_uint64 inherent_size = GET_MODE_SIZE (mode); |
| unsigned int inherent_align = GET_MODE_ALIGNMENT (mode); |
| machine_mode wider_mode = wider_subreg_mode (mode, reg_max_ref_mode[i]); |
| poly_uint64 total_size = GET_MODE_SIZE (wider_mode); |
| /* ??? Seems strange to derive the minimum alignment from the size, |
| but that's the traditional behavior. For polynomial-size modes, |
| the natural extension is to use the minimum possible size. */ |
| unsigned int min_align |
| = constant_lower_bound (GET_MODE_BITSIZE (reg_max_ref_mode[i])); |
| poly_int64 adjust = 0; |
| |
| something_was_spilled = true; |
| |
| if (ira_conflicts_p) |
| { |
| /* Mark the spill for IRA. */ |
| SET_REGNO_REG_SET (&spilled_pseudos, i); |
| if (!dont_share_p) |
| x = ira_reuse_stack_slot (i, inherent_size, total_size); |
| } |
| |
| if (x) |
| ; |
| |
| /* Each pseudo reg has an inherent size which comes from its own mode, |
| and a total size which provides room for paradoxical subregs |
| which refer to the pseudo reg in wider modes. |
| |
| We can use a slot already allocated if it provides both |
| enough inherent space and enough total space. |
| Otherwise, we allocate a new slot, making sure that it has no less |
| inherent space, and no less total space, then the previous slot. */ |
| else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p)) |
| { |
| rtx stack_slot; |
| |
| /* The sizes are taken from a subreg operation, which guarantees |
| that they're ordered. */ |
| gcc_checking_assert (ordered_p (total_size, inherent_size)); |
| |
| /* No known place to spill from => no slot to reuse. */ |
| x = assign_stack_local (mode, total_size, |
| min_align > inherent_align |
| || maybe_gt (total_size, inherent_size) |
| ? -1 : 0); |
| |
| stack_slot = x; |
| |
| /* Cancel the big-endian correction done in assign_stack_local. |
| Get the address of the beginning of the slot. This is so we |
| can do a big-endian correction unconditionally below. */ |
| if (BYTES_BIG_ENDIAN) |
| { |
| adjust = inherent_size - total_size; |
| if (maybe_ne (adjust, 0)) |
| { |
| poly_uint64 total_bits = total_size * BITS_PER_UNIT; |
| machine_mode mem_mode |
| = int_mode_for_size (total_bits, 1).else_blk (); |
| stack_slot = adjust_address_nv (x, mem_mode, adjust); |
| } |
| } |
| |
| if (! dont_share_p && ira_conflicts_p) |
| /* Inform IRA about allocation a new stack slot. */ |
| ira_mark_new_stack_slot (stack_slot, i, total_size); |
| } |
| |
| /* Reuse a stack slot if possible. */ |
| else if (spill_stack_slot[from_reg] != 0 |
| && known_ge (spill_stack_slot_width[from_reg], total_size) |
| && known_ge (GET_MODE_SIZE |
| (GET_MODE (spill_stack_slot[from_reg])), |
| inherent_size) |
| && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align) |
| x = spill_stack_slot[from_reg]; |
| |
| /* Allocate a bigger slot. */ |
| else |
| { |
| /* Compute maximum size needed, both for inherent size |
| and for total size. */ |
| rtx stack_slot; |
| |
| if (spill_stack_slot[from_reg]) |
| { |
| if (partial_subreg_p (mode, |
| GET_MODE (spill_stack_slot[from_reg]))) |
| mode = GET_MODE (spill_stack_slot[from_reg]); |
| total_size = ordered_max (total_size, |
| spill_stack_slot_width[from_reg]); |
| if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align) |
| min_align = MEM_ALIGN (spill_stack_slot[from_reg]); |
| } |
| |
| /* The sizes are taken from a subreg operation, which guarantees |
| that they're ordered. */ |
| gcc_checking_assert (ordered_p (total_size, inherent_size)); |
| |
| /* Make a slot with that size. */ |
| x = assign_stack_local (mode, total_size, |
| min_align > inherent_align |
| || maybe_gt (total_size, inherent_size) |
| ? -1 : 0); |
| stack_slot = x; |
| |
| /* Cancel the big-endian correction done in assign_stack_local. |
| Get the address of the beginning of the slot. This is so we |
| can do a big-endian correction unconditionally below. */ |
| if (BYTES_BIG_ENDIAN) |
| { |
| adjust = GET_MODE_SIZE (mode) - total_size; |
| if (maybe_ne (adjust, 0)) |
| { |
| poly_uint64 total_bits = total_size * BITS_PER_UNIT; |
| machine_mode mem_mode |
| = int_mode_for_size (total_bits, 1).else_blk (); |
| stack_slot = adjust_address_nv (x, mem_mode, adjust); |
| } |
| } |
| |
| spill_stack_slot[from_reg] = stack_slot; |
| spill_stack_slot_width[from_reg] = total_size; |
| } |
| |
| /* On a big endian machine, the "address" of the slot |
| is the address of the low part that fits its inherent mode. */ |
| adjust += subreg_size_lowpart_offset (inherent_size, total_size); |
| |
| /* If we have any adjustment to make, or if the stack slot is the |
| wrong mode, make a new stack slot. */ |
| x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust); |
| |
| /* Set all of the memory attributes as appropriate for a spill. */ |
| set_mem_attrs_for_spill (x); |
| |
| /* Save the stack slot for later. */ |
| reg_equiv_memory_loc (i) = x; |
| } |
| } |
| |
| /* Mark the slots in regs_ever_live for the hard regs used by |
| pseudo-reg number REGNO, accessed in MODE. */ |
| |
| static void |
| mark_home_live_1 (int regno, machine_mode mode) |
| { |
| int i, lim; |
| |
| i = reg_renumber[regno]; |
| if (i < 0) |
| return; |
| lim = end_hard_regno (mode, i); |
| while (i < lim) |
| df_set_regs_ever_live (i++, true); |
| } |
| |
| /* Mark the slots in regs_ever_live for the hard regs |
| used by pseudo-reg number REGNO. */ |
| |
| void |
| mark_home_live (int regno) |
| { |
| if (reg_renumber[regno] >= 0) |
| mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno)); |
| } |
| |
| /* This function handles the tracking of elimination offsets around branches. |
| |
| X is a piece of RTL being scanned. |
| |
| INSN is the insn that it came from, if any. |
| |
| INITIAL_P is nonzero if we are to set the offset to be the initial |
| offset and zero if we are setting the offset of the label to be the |
| current offset. */ |
| |
| static void |
| set_label_offsets (rtx x, rtx_insn *insn, int initial_p) |
| { |
| enum rtx_code code = GET_CODE (x); |
| rtx tem; |
| unsigned int i; |
| struct elim_table *p; |
| |
| switch (code) |
| { |
| case LABEL_REF: |
| if (LABEL_REF_NONLOCAL_P (x)) |
| return; |
| |
| x = label_ref_label (x); |
| |
| /* fall through */ |
| |
| case CODE_LABEL: |
| /* If we know nothing about this label, set the desired offsets. Note |
| that this sets the offset at a label to be the offset before a label |
| if we don't know anything about the label. This is not correct for |
| the label after a BARRIER, but is the best guess we can make. If |
| we guessed wrong, we will suppress an elimination that might have |
| been possible had we been able to guess correctly. */ |
| |
| if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num]) |
| { |
| for (i = 0; i < NUM_ELIMINABLE_REGS; i++) |
| offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i] |
| = (initial_p ? reg_eliminate[i].initial_offset |
| : reg_eliminate[i].offset); |
| offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1; |
| } |
| |
| /* Otherwise, if this is the definition of a label and it is |
| preceded by a BARRIER, set our offsets to the known offset of |
| that label. */ |
| |
| else if (x == insn |
| && (tem = prev_nonnote_insn (insn)) != 0 |
| && BARRIER_P (tem)) |
| set_offsets_for_label (insn); |
| else |
| /* If neither of the above cases is true, compare each offset |
| with those previously recorded and suppress any eliminations |
| where the offsets disagree. */ |
| |
| for (i = 0; i < NUM_ELIMINABLE_REGS; i++) |
| if (maybe_ne (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i], |
| (initial_p ? reg_eliminate[i].initial_offset |
| : reg_eliminate[i].offset))) |
| reg_eliminate[i].can_eliminate = 0; |
| |
| return; |
| |
| case JUMP_TABLE_DATA: |
| set_label_offsets (PATTERN (insn), insn, initial_p); |
| return; |
| |
| case JUMP_INSN: |
| set_label_offsets (PATTERN (insn), insn, initial_p); |
| |
| /* fall through */ |
| |
| case INSN: |
| case CALL_INSN: |
| /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched |
| to indirectly and hence must have all eliminations at their |
| initial offsets. */ |
| for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1)) |
| if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND) |
| set_label_offsets (XEXP (tem, 0), insn, 1); |
| return; |
| |
| case PARALLEL: |
| case ADDR_VEC: |
| case ADDR_DIFF_VEC: |
| /* Each of the labels in the parallel or address vector must be |
| at their initial offsets. We want the first field for PARALLEL |
| and ADDR_VEC and the second field for ADDR_DIFF_VEC. */ |
| |
| for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++) |
| set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i), |
| insn, initial_p); |
| return; |
| |
| case SET: |
| /* We only care about setting PC. If the source is not RETURN, |
| IF_THEN_ELSE, or a label, disable any eliminations not at |
| their initial offsets. Similarly if any arm of the IF_THEN_ELSE |
| isn't one of those possibilities. For branches to a label, |
| call ourselves recursively. |
| |
| Note that this can disable elimination unnecessarily when we have |
| a non-local goto since it will look like a non-constant jump to |
| someplace in the current function. This isn't a significant |
| problem since such jumps will normally be when all elimination |
| pairs are back to their initial offsets. */ |
| |
| if (SET_DEST (x) != pc_rtx) |
| return; |
| |
| switch (GET_CODE (SET_SRC (x))) |
| { |
| case PC: |
| case RETURN: |
| return; |
| |
| case LABEL_REF: |
| set_label_offsets (SET_SRC (x), insn, initial_p); |
| return; |
| |
| case IF_THEN_ELSE: |
| tem = XEXP (SET_SRC (x), 1); |
| if (GET_CODE (tem) == LABEL_REF) |
| set_label_offsets (label_ref_label (tem), insn, initial_p); |
| else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN) |
| break; |
| |
| tem = XEXP (SET_SRC (x), 2); |
| if (GET_CODE (tem) == LABEL_REF) |
| set_label_offsets (label_ref_label (tem), insn, initial_p); |
| else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN) |
| break; |
| return; |
| |
| default: |
| break; |
| } |
| |
| /* If we reach here, all eliminations must be at their initial |
| offset because we are doing a jump to a variable address. */ |
| for (p = reg_eliminate; p < ®_eliminate[NUM_ELIMINABLE_REGS]; p++) |
| if (maybe_ne (p->offset, p->initial_offset)) |
| p->can_eliminate = 0; |
| break; |
| |
| default: |
| break; |
| } |
| } |
| |
| /* This function examines every reg that occurs in X and adjusts the |
| costs for its elimination which are gathered by IRA. INSN is the |
| insn in which X occurs. We do not recurse into MEM expressions. */ |
| |
| static void |
| note_reg_elim_costly (const_rtx x, rtx insn) |
| { |
| subrtx_iterator::array_type array; |
| FOR_EACH_SUBRTX (iter, array, x, NONCONST) |
| { |
| const_rtx x = *iter; |
| if (MEM_P (x)) |
| iter.skip_subrtxes (); |
| else if (REG_P (x) |
| && REGNO (x) >= FIRST_PSEUDO_REGISTER |
| && reg_equiv_init (REGNO (x)) |
| && reg_equiv_invariant (REGNO (x))) |
| { |
| rtx t = reg_equiv_invariant (REGNO (x)); |
| rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true); |
| int cost = set_src_cost (new_rtx, Pmode, |
| optimize_bb_for_speed_p (elim_bb)); |
| int freq = REG_FREQ_FROM_BB (elim_bb); |
| |
| if (cost != 0) |
| ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq); |
| } |
| } |
| } |
| |
| /* Scan X and replace any eliminable registers (such as fp) with a |
| replacement (such as sp), plus an offset. |
| |
| MEM_MODE is the mode of an enclosing MEM. We need this to know how |
| much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a |
| MEM, we are allowed to replace a sum of a register and the constant zero |
| with the register, which we cannot do outside a MEM. In addition, we need |
| to record the fact that a register is referenced outside a MEM. |
| |
| If INSN is an insn, it is the insn containing X. If we replace a REG |
| in a SET_DEST with an equivalent MEM and INSN is nonzero, write a |
| CLOBBER of the pseudo after INSN so find_equiv_regs will know that |
| the REG is being modified. |
| |
| Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST). |
| That's used when we eliminate in expressions stored in notes. |
| This means, do not set ref_outside_mem even if the reference |
| is outside of MEMs. |
| |
| If FOR_COSTS is true, we are being called before reload in order to |
| estimate the costs of keeping registers with an equivalence unallocated. |
| |
| REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had |
| replacements done assuming all offsets are at their initial values. If |
| they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we |
| encounter, return the actual location so that find_reloads will do |
| the proper thing. */ |
| |
| static rtx |
| eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx insn, |
| bool may_use_invariant, bool for_costs) |
| { |
| enum rtx_code code = GET_CODE (x); |
| struct elim_table *ep; |
| int regno; |
| rtx new_rtx; |
| int i, j; |
| const char *fmt; |
| int copied = 0; |
| |
| if (! current_function_decl) |
| return x; |
| |
| switch (code) |
| { |
| CASE_CONST_ANY: |
| case CONST: |
| case SYMBOL_REF: |
| case CODE_LABEL: |
| case PC: |
| case ASM_INPUT: |
| case ADDR_VEC: |
| case ADDR_DIFF_VEC: |
| case RETURN: |
| return x; |
| |
| case REG: |
| regno = REGNO (x); |
| |
| /* First handle the case where we encounter a bare register that |
| is eliminable. Replace it with a PLUS. */ |
| if (regno < FIRST_PSEUDO_REGISTER) |
| { |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; |
| ep++) |
| if (ep->from_rtx == x && ep->can_eliminate) |
| return plus_constant (Pmode, ep->to_rtx, ep->previous_offset); |
| |
| } |
| else if (reg_renumber && reg_renumber[regno] < 0 |
| && reg_equivs |
| && reg_equiv_invariant (regno)) |
| { |
| if (may_use_invariant || (insn && DEBUG_INSN_P (insn))) |
| return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno)), |
| mem_mode, insn, true, for_costs); |
| /* There exists at least one use of REGNO that cannot be |
| eliminated. Prevent the defining insn from being deleted. */ |
| reg_equiv_init (regno) = NULL; |
| if (!for_costs) |
| alter_reg (regno, -1, true); |
| } |
| return x; |
| |
| /* You might think handling MINUS in a manner similar to PLUS is a |
| good idea. It is not. It has been tried multiple times and every |
| time the change has had to have been reverted. |
| |
| Other parts of reload know a PLUS is special (gen_reload for example) |
| and require special code to handle code a reloaded PLUS operand. |
| |
| Also consider backends where the flags register is clobbered by a |
| MINUS, but we can emit a PLUS that does not clobber flags (IA-32, |
| lea instruction comes to mind). If we try to reload a MINUS, we |
| may kill the flags register that was holding a useful value. |
| |
| So, please before trying to handle MINUS, consider reload as a |
| whole instead of this little section as well as the backend issues. */ |
| case PLUS: |
| /* If this is the sum of an eliminable register and a constant, rework |
| the sum. */ |
| if (REG_P (XEXP (x, 0)) |
| && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER |
| && CONSTANT_P (XEXP (x, 1))) |
| { |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; |
| ep++) |
| if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate) |
| { |
| /* The only time we want to replace a PLUS with a REG (this |
| occurs when the constant operand of the PLUS is the negative |
| of the offset) is when we are inside a MEM. We won't want |
| to do so at other times because that would change the |
| structure of the insn in a way that reload can't handle. |
| We special-case the commonest situation in |
| eliminate_regs_in_insn, so just replace a PLUS with a |
| PLUS here, unless inside a MEM. In DEBUG_INSNs, it is |
| always ok to replace a PLUS with just a REG. */ |
| if ((mem_mode != 0 || (insn && DEBUG_INSN_P (insn))) |
| && CONST_INT_P (XEXP (x, 1)) |
| && known_eq (INTVAL (XEXP (x, 1)), -ep->previous_offset)) |
| return ep->to_rtx; |
| else |
| return gen_rtx_PLUS (Pmode, ep->to_rtx, |
| plus_constant (Pmode, XEXP (x, 1), |
| ep->previous_offset)); |
| } |
| |
| /* If the register is not eliminable, we are done since the other |
| operand is a constant. */ |
| return x; |
| } |
| |
| /* If this is part of an address, we want to bring any constant to the |
| outermost PLUS. We will do this by doing register replacement in |
| our operands and seeing if a constant shows up in one of them. |
| |
| Note that there is no risk of modifying the structure of the insn, |
| since we only get called for its operands, thus we are either |
| modifying the address inside a MEM, or something like an address |
| operand of a load-address insn. */ |
| |
| { |
| rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true, |
| for_costs); |
| rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true, |
| for_costs); |
| |
| if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))) |
| { |
| /* If one side is a PLUS and the other side is a pseudo that |
| didn't get a hard register but has a reg_equiv_constant, |
| we must replace the constant here since it may no longer |
| be in the position of any operand. */ |
| if (GET_CODE (new0) == PLUS && REG_P (new1) |
| && REGNO (new1) >= FIRST_PSEUDO_REGISTER |
| && reg_renumber[REGNO (new1)] < 0 |
| && reg_equivs |
| && reg_equiv_constant (REGNO (new1)) != 0) |
| new1 = reg_equiv_constant (REGNO (new1)); |
| else if (GET_CODE (new1) == PLUS && REG_P (new0) |
| && REGNO (new0) >= FIRST_PSEUDO_REGISTER |
| && reg_renumber[REGNO (new0)] < 0 |
| && reg_equiv_constant (REGNO (new0)) != 0) |
| new0 = reg_equiv_constant (REGNO (new0)); |
| |
| new_rtx = form_sum (GET_MODE (x), new0, new1); |
| |
| /* As above, if we are not inside a MEM we do not want to |
| turn a PLUS into something else. We might try to do so here |
| for an addition of 0 if we aren't optimizing. */ |
| if (! mem_mode && GET_CODE (new_rtx) != PLUS) |
| return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx); |
| else |
| return new_rtx; |
| } |
| } |
| return x; |
| |
| case MULT: |
| /* If this is the product of an eliminable register and a |
| constant, apply the distribute law and move the constant out |
| so that we have (plus (mult ..) ..). This is needed in order |
| to keep load-address insns valid. This case is pathological. |
| We ignore the possibility of overflow here. */ |
| if (REG_P (XEXP (x, 0)) |
| && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER |
| && CONST_INT_P (XEXP (x, 1))) |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; |
| ep++) |
| if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate) |
| { |
| if (! mem_mode |
| /* Refs inside notes or in DEBUG_INSNs don't count for |
| this purpose. */ |
| && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST |
| || GET_CODE (insn) == INSN_LIST |
| || DEBUG_INSN_P (insn)))) |
| ep->ref_outside_mem = 1; |
| |
| return |
| plus_constant (Pmode, |
| gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)), |
| ep->previous_offset * INTVAL (XEXP (x, 1))); |
| } |
| |
| /* fall through */ |
| |
| case CALL: |
| case COMPARE: |
| /* See comments before PLUS about handling MINUS. */ |
| case MINUS: |
| case DIV: case UDIV: |
| case MOD: case UMOD: |
| case AND: case IOR: case XOR: |
| case ROTATERT: case ROTATE: |
| case ASHIFTRT: case LSHIFTRT: case ASHIFT: |
| case NE: case EQ: |
| case GE: case GT: case GEU: case GTU: |
| case LE: case LT: case LEU: case LTU: |
| { |
| rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false, |
| for_costs); |
| rtx new1 = XEXP (x, 1) |
| ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false, |
| for_costs) : 0; |
| |
| if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)) |
| return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1); |
| } |
| return x; |
| |
| case EXPR_LIST: |
| /* If we have something in XEXP (x, 0), the usual case, eliminate it. */ |
| if (XEXP (x, 0)) |
| { |
| new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true, |
| for_costs); |
| if (new_rtx != XEXP (x, 0)) |
| { |
| /* If this is a REG_DEAD note, it is not valid anymore. |
| Using the eliminated version could result in creating a |
| REG_DEAD note for the stack or frame pointer. */ |
| if (REG_NOTE_KIND (x) == REG_DEAD) |
| return (XEXP (x, 1) |
| ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true, |
| for_costs) |
| : NULL_RTX); |
| |
| x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1)); |
| } |
| } |
| |
| /* fall through */ |
| |
| case INSN_LIST: |
| case INT_LIST: |
| /* Now do eliminations in the rest of the chain. If this was |
| an EXPR_LIST, this might result in allocating more memory than is |
| strictly needed, but it simplifies the code. */ |
| if (XEXP (x, 1)) |
| { |
| new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true, |
| for_costs); |
| if (new_rtx != XEXP (x, 1)) |
| return |
| gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx); |
| } |
| return x; |
| |
| case PRE_INC: |
| case POST_INC: |
| case PRE_DEC: |
| case POST_DEC: |
| /* We do not support elimination of a register that is modified. |
| elimination_effects has already make sure that this does not |
| happen. */ |
| return x; |
| |
| case PRE_MODIFY: |
| case POST_MODIFY: |
| /* We do not support elimination of a register that is modified. |
| elimination_effects has already make sure that this does not |
| happen. The only remaining case we need to consider here is |
| that the increment value may be an eliminable register. */ |
| if (GET_CODE (XEXP (x, 1)) == PLUS |
| && XEXP (XEXP (x, 1), 0) == XEXP (x, 0)) |
| { |
| rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode, |
| insn, true, for_costs); |
| |
| if (new_rtx != XEXP (XEXP (x, 1), 1)) |
| return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0), |
| gen_rtx_PLUS (GET_MODE (x), |
| XEXP (x, 0), new_rtx)); |
| } |
| return x; |
| |
| case STRICT_LOW_PART: |
| case NEG: case NOT: |
| case SIGN_EXTEND: case ZERO_EXTEND: |
| case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE: |
| case FLOAT: case FIX: |
| case UNSIGNED_FIX: case UNSIGNED_FLOAT: |
| case ABS: |
| case SQRT: |
| case FFS: |
| case CLZ: |
| case CTZ: |
| case POPCOUNT: |
| case PARITY: |
| case BSWAP: |
| new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false, |
| for_costs); |
| if (new_rtx != XEXP (x, 0)) |
| return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx); |
| return x; |
| |
| case SUBREG: |
| /* Similar to above processing, but preserve SUBREG_BYTE. |
| Convert (subreg (mem)) to (mem) if not paradoxical. |
| Also, if we have a non-paradoxical (subreg (pseudo)) and the |
| pseudo didn't get a hard reg, we must replace this with the |
| eliminated version of the memory location because push_reload |
| may do the replacement in certain circumstances. */ |
| if (REG_P (SUBREG_REG (x)) |
| && !paradoxical_subreg_p (x) |
| && reg_equivs |
| && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0) |
| { |
| new_rtx = SUBREG_REG (x); |
| } |
| else |
| new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false, for_costs); |
| |
| if (new_rtx != SUBREG_REG (x)) |
| { |
| poly_int64 x_size = GET_MODE_SIZE (GET_MODE (x)); |
| poly_int64 new_size = GET_MODE_SIZE (GET_MODE (new_rtx)); |
| |
| if (MEM_P (new_rtx) |
| && ((partial_subreg_p (GET_MODE (x), GET_MODE (new_rtx)) |
| /* On RISC machines, combine can create rtl of the form |
| (set (subreg:m1 (reg:m2 R) 0) ...) |
| where m1 < m2, and expects something interesting to |
| happen to the entire word. Moreover, it will use the |
| (reg:m2 R) later, expecting all bits to be preserved. |
| So if the number of words is the same, preserve the |
| subreg so that push_reload can see it. */ |
| && !(WORD_REGISTER_OPERATIONS |
| && known_equal_after_align_down (x_size - 1, |
| new_size - 1, |
| UNITS_PER_WORD))) |
| || known_eq (x_size, new_size)) |
| ) |
| return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x)); |
| else if (insn && GET_CODE (insn) == DEBUG_INSN) |
| return gen_rtx_raw_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x)); |
| else |
| return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x)); |
| } |
| |
| return x; |
| |
| case MEM: |
| /* Our only special processing is to pass the mode of the MEM to our |
| recursive call and copy the flags. While we are here, handle this |
| case more efficiently. */ |
| |
| new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true, |
| for_costs); |
| if (for_costs |
| && memory_address_p (GET_MODE (x), XEXP (x, 0)) |
| && !memory_address_p (GET_MODE (x), new_rtx)) |
| note_reg_elim_costly (XEXP (x, 0), insn); |
| |
| return replace_equiv_address_nv (x, new_rtx); |
| |
| case USE: |
| /* Handle insn_list USE that a call to a pure function may generate. */ |
| new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false, |
| for_costs); |
| if (new_rtx != XEXP (x, 0)) |
| return gen_rtx_USE (GET_MODE (x), new_rtx); |
| return x; |
| |
| case CLOBBER: |
| case ASM_OPERANDS: |
| gcc_assert (insn && DEBUG_INSN_P (insn)); |
| break; |
| |
| case SET: |
| gcc_unreachable (); |
| |
| default: |
| break; |
| } |
| |
| /* Process each of our operands recursively. If any have changed, make a |
| copy of the rtx. */ |
| fmt = GET_RTX_FORMAT (code); |
| for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) |
| { |
| if (*fmt == 'e') |
| { |
| new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false, |
| for_costs); |
| if (new_rtx != XEXP (x, i) && ! copied) |
| { |
| x = shallow_copy_rtx (x); |
| copied = 1; |
| } |
| XEXP (x, i) = new_rtx; |
| } |
| else if (*fmt == 'E') |
| { |
| int copied_vec = 0; |
| for (j = 0; j < XVECLEN (x, i); j++) |
| { |
| new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false, |
| for_costs); |
| if (new_rtx != XVECEXP (x, i, j) && ! copied_vec) |
| { |
| rtvec new_v = gen_rtvec_v (XVECLEN (x, i), |
| XVEC (x, i)->elem); |
| if (! copied) |
| { |
| x = shallow_copy_rtx (x); |
| copied = 1; |
| } |
| XVEC (x, i) = new_v; |
| copied_vec = 1; |
| } |
| XVECEXP (x, i, j) = new_rtx; |
| } |
| } |
| } |
| |
| return x; |
| } |
| |
| rtx |
| eliminate_regs (rtx x, machine_mode mem_mode, rtx insn) |
| { |
| if (reg_eliminate == NULL) |
| { |
| gcc_assert (targetm.no_register_allocation); |
| return x; |
| } |
| return eliminate_regs_1 (x, mem_mode, insn, false, false); |
| } |
| |
| /* Scan rtx X for modifications of elimination target registers. Update |
| the table of eliminables to reflect the changed state. MEM_MODE is |
| the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */ |
| |
| static void |
| elimination_effects (rtx x, machine_mode mem_mode) |
| { |
| enum rtx_code code = GET_CODE (x); |
| struct elim_table *ep; |
| int regno; |
| int i, j; |
| const char *fmt; |
| |
| switch (code) |
| { |
| CASE_CONST_ANY: |
| case CONST: |
| case SYMBOL_REF: |
| case CODE_LABEL: |
| case PC: |
| case ASM_INPUT: |
| case ADDR_VEC: |
| case ADDR_DIFF_VEC: |
| case RETURN: |
| return; |
| |
| case REG: |
| regno = REGNO (x); |
| |
| /* First handle the case where we encounter a bare register that |
| is eliminable. Replace it with a PLUS. */ |
| if (regno < FIRST_PSEUDO_REGISTER) |
| { |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; |
| ep++) |
| if (ep->from_rtx == x && ep->can_eliminate) |
| { |
| if (! mem_mode) |
| ep->ref_outside_mem = 1; |
| return; |
| } |
| |
| } |
| else if (reg_renumber[regno] < 0 |
| && reg_equivs |
| && reg_equiv_constant (regno) |
| && ! function_invariant_p (reg_equiv_constant (regno))) |
| elimination_effects (reg_equiv_constant (regno), mem_mode); |
| return; |
| |
| case PRE_INC: |
| case POST_INC: |
| case PRE_DEC: |
| case POST_DEC: |
| case POST_MODIFY: |
| case PRE_MODIFY: |
| /* If we modify the source of an elimination rule, disable it. */ |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
| if (ep->from_rtx == XEXP (x, 0)) |
| ep->can_eliminate = 0; |
| |
| /* If we modify the target of an elimination rule by adding a constant, |
| update its offset. If we modify the target in any other way, we'll |
| have to disable the rule as well. */ |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
| if (ep->to_rtx == XEXP (x, 0)) |
| { |
| poly_int64 size = GET_MODE_SIZE (mem_mode); |
| |
| /* If more bytes than MEM_MODE are pushed, account for them. */ |
| #ifdef PUSH_ROUNDING |
| if (ep->to_rtx == stack_pointer_rtx) |
| size = PUSH_ROUNDING (size); |
| #endif |
| if (code == PRE_DEC || code == POST_DEC) |
| ep->offset += size; |
| else if (code == PRE_INC || code == POST_INC) |
| ep->offset -= size; |
| else if (code == PRE_MODIFY || code == POST_MODIFY) |
| { |
| if (GET_CODE (XEXP (x, 1)) == PLUS |
| && XEXP (x, 0) == XEXP (XEXP (x, 1), 0) |
| && CONST_INT_P (XEXP (XEXP (x, 1), 1))) |
| ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1)); |
| else |
| ep->can_eliminate = 0; |
| } |
| } |
| |
| /* These two aren't unary operators. */ |
| if (code == POST_MODIFY || code == PRE_MODIFY) |
| break; |
| |
| /* Fall through to generic unary operation case. */ |
| gcc_fallthrough (); |
| case STRICT_LOW_PART: |
| case NEG: case NOT: |
| case SIGN_EXTEND: case ZERO_EXTEND: |
| case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE: |
| case FLOAT: case FIX: |
| case UNSIGNED_FIX: case UNSIGNED_FLOAT: |
| case ABS: |
| case SQRT: |
| case FFS: |
| case CLZ: |
| case CTZ: |
| case POPCOUNT: |
| case PARITY: |
| case BSWAP: |
| elimination_effects (XEXP (x, 0), mem_mode); |
| return; |
| |
| case SUBREG: |
| if (REG_P (SUBREG_REG (x)) |
| && !paradoxical_subreg_p (x) |
| && reg_equivs |
| && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0) |
| return; |
| |
| elimination_effects (SUBREG_REG (x), mem_mode); |
| return; |
| |
| case USE: |
| /* If using a register that is the source of an eliminate we still |
| think can be performed, note it cannot be performed since we don't |
| know how this register is used. */ |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
| if (ep->from_rtx == XEXP (x, 0)) |
| ep->can_eliminate = 0; |
| |
| elimination_effects (XEXP (x, 0), mem_mode); |
| return; |
| |
| case CLOBBER: |
| /* If clobbering a register that is the replacement register for an |
| elimination we still think can be performed, note that it cannot |
| be performed. Otherwise, we need not be concerned about it. */ |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++) |
| if (ep->to_rtx == XEXP (x, 0)) |
| ep->can_eliminate = 0; |
| |
| elimination_effects (XEXP (x, 0), mem_mode); |
| return; |
| |
| case SET: |
| /* Check for setting a register that we know about. */ |
| if (REG_P (SET_DEST (x))) |
| { |
| /* See if this is setting the replacement register for an |
| elimination. |
| |
| If DEST is the hard frame pointer, we do nothing because we |
| assume that all assignments to the frame pointer are for |
| non-local gotos and are being done at a time when they are valid |
| and do not disturb anything else. Some machines want to |
| eliminate a fake argument pointer (or even a fake frame pointer) |
| with either the real frame or the stack pointer. Assignments to |
| the hard frame pointer must not prevent this elimination. */ |
| |
| for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; |
| ep++) |
| if (ep->to_rtx == SET_DEST (x) |
| && SET_DEST (x) != hard_frame_pointer_rtx) |
| { |
| /* If it is being incremented, adjust the offset. Otherwise, |
| this elimination can't be done. */ |
| rtx src = SET_SRC (x); |
| |
| if (GET_CODE (src) == PLUS |
| && XEXP (src, 0) == SET_DEST (x) |
| && CONST_INT_P (XEXP (src, 1))) |
| ep->offset -= INTVAL (XEXP (src, 1)); |
| else |
| ep->can_eliminate = 0; |
| } |
| } |
| |
| elimination_effects (SET_DEST (x), VOIDmode); |
| elimination_effects (SET_SRC (x), VOIDmode); |
| return; |
| |
| case MEM: |
| /* Our only special processing is to pass the mode of the MEM to our |
| recursive call. */ |
| elimination_effects (XEXP (x, 0), GET_MODE (x)); |
| return; |
| |
| default: |
| break; |
| } |
| |
| fmt = GET_RTX_FORMAT (code); |
| for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) |
| { |
| if (*fmt == 'e') |
| elimination_effects (XEXP (x, i), mem_mode); |
| else if (*fmt == 'E') |
| for (j = 0; j < XVECLEN (x, i); j++) |
| elimination_effects (XVECEXP (x, i, j), mem_mode); |
| } |
| } |
| |
| /* Descend through rtx X and verify that no references to eliminable registers |
| remain. If any do remain, mark the involved register as not |
| eliminable. */ |
| |
| static void |
| check_eliminable_occurrences (rtx x) |
| { |
| const char *fmt; |
|