| /* Common subexpression elimination for GNU compiler. |
| Copyright (C) 1987-2019 Free Software Foundation, Inc. |
| |
| This file is part of GCC. |
| |
| GCC is free software; you can redistribute it and/or modify it under |
| the terms of the GNU General Public License as published by the Free |
| Software Foundation; either version 3, or (at your option) any later |
| version. |
| |
| GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
| WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| for more details. |
| |
| You should have received a copy of the GNU General Public License |
| along with GCC; see the file COPYING3. If not see |
| <http://www.gnu.org/licenses/>. */ |
| |
| #include "config.h" |
| #include "system.h" |
| #include "coretypes.h" |
| #include "backend.h" |
| #include "target.h" |
| #include "rtl.h" |
| #include "tree.h" |
| #include "cfghooks.h" |
| #include "df.h" |
| #include "memmodel.h" |
| #include "tm_p.h" |
| #include "insn-config.h" |
| #include "regs.h" |
| #include "emit-rtl.h" |
| #include "recog.h" |
| #include "cfgrtl.h" |
| #include "cfganal.h" |
| #include "cfgcleanup.h" |
| #include "alias.h" |
| #include "toplev.h" |
| #include "params.h" |
| #include "rtlhooks-def.h" |
| #include "tree-pass.h" |
| #include "dbgcnt.h" |
| #include "rtl-iter.h" |
| |
| /* The basic idea of common subexpression elimination is to go |
| through the code, keeping a record of expressions that would |
| have the same value at the current scan point, and replacing |
| expressions encountered with the cheapest equivalent expression. |
| |
| It is too complicated to keep track of the different possibilities |
| when control paths merge in this code; so, at each label, we forget all |
| that is known and start fresh. This can be described as processing each |
| extended basic block separately. We have a separate pass to perform |
| global CSE. |
| |
| Note CSE can turn a conditional or computed jump into a nop or |
| an unconditional jump. When this occurs we arrange to run the jump |
| optimizer after CSE to delete the unreachable code. |
| |
| We use two data structures to record the equivalent expressions: |
| a hash table for most expressions, and a vector of "quantity |
| numbers" to record equivalent (pseudo) registers. |
| |
| The use of the special data structure for registers is desirable |
| because it is faster. It is possible because registers references |
| contain a fairly small number, the register number, taken from |
| a contiguously allocated series, and two register references are |
| identical if they have the same number. General expressions |
| do not have any such thing, so the only way to retrieve the |
| information recorded on an expression other than a register |
| is to keep it in a hash table. |
| |
| Registers and "quantity numbers": |
| |
| At the start of each basic block, all of the (hardware and pseudo) |
| registers used in the function are given distinct quantity |
| numbers to indicate their contents. During scan, when the code |
| copies one register into another, we copy the quantity number. |
| When a register is loaded in any other way, we allocate a new |
| quantity number to describe the value generated by this operation. |
| `REG_QTY (N)' records what quantity register N is currently thought |
| of as containing. |
| |
| All real quantity numbers are greater than or equal to zero. |
| If register N has not been assigned a quantity, `REG_QTY (N)' will |
| equal -N - 1, which is always negative. |
| |
| Quantity numbers below zero do not exist and none of the `qty_table' |
| entries should be referenced with a negative index. |
| |
| We also maintain a bidirectional chain of registers for each |
| quantity number. The `qty_table` members `first_reg' and `last_reg', |
| and `reg_eqv_table' members `next' and `prev' hold these chains. |
| |
| The first register in a chain is the one whose lifespan is least local. |
| Among equals, it is the one that was seen first. |
| We replace any equivalent register with that one. |
| |
| If two registers have the same quantity number, it must be true that |
| REG expressions with qty_table `mode' must be in the hash table for both |
| registers and must be in the same class. |
| |
| The converse is not true. Since hard registers may be referenced in |
| any mode, two REG expressions might be equivalent in the hash table |
| but not have the same quantity number if the quantity number of one |
| of the registers is not the same mode as those expressions. |
| |
| Constants and quantity numbers |
| |
| When a quantity has a known constant value, that value is stored |
| in the appropriate qty_table `const_rtx'. This is in addition to |
| putting the constant in the hash table as is usual for non-regs. |
| |
| Whether a reg or a constant is preferred is determined by the configuration |
| macro CONST_COSTS and will often depend on the constant value. In any |
| event, expressions containing constants can be simplified, by fold_rtx. |
| |
| When a quantity has a known nearly constant value (such as an address |
| of a stack slot), that value is stored in the appropriate qty_table |
| `const_rtx'. |
| |
| Integer constants don't have a machine mode. However, cse |
| determines the intended machine mode from the destination |
| of the instruction that moves the constant. The machine mode |
| is recorded in the hash table along with the actual RTL |
| constant expression so that different modes are kept separate. |
| |
| Other expressions: |
| |
| To record known equivalences among expressions in general |
| we use a hash table called `table'. It has a fixed number of buckets |
| that contain chains of `struct table_elt' elements for expressions. |
| These chains connect the elements whose expressions have the same |
| hash codes. |
| |
| Other chains through the same elements connect the elements which |
| currently have equivalent values. |
| |
| Register references in an expression are canonicalized before hashing |
| the expression. This is done using `reg_qty' and qty_table `first_reg'. |
| The hash code of a register reference is computed using the quantity |
| number, not the register number. |
| |
| When the value of an expression changes, it is necessary to remove from the |
| hash table not just that expression but all expressions whose values |
| could be different as a result. |
| |
| 1. If the value changing is in memory, except in special cases |
| ANYTHING referring to memory could be changed. That is because |
| nobody knows where a pointer does not point. |
| The function `invalidate_memory' removes what is necessary. |
| |
| The special cases are when the address is constant or is |
| a constant plus a fixed register such as the frame pointer |
| or a static chain pointer. When such addresses are stored in, |
| we can tell exactly which other such addresses must be invalidated |
| due to overlap. `invalidate' does this. |
| All expressions that refer to non-constant |
| memory addresses are also invalidated. `invalidate_memory' does this. |
| |
| 2. If the value changing is a register, all expressions |
| containing references to that register, and only those, |
| must be removed. |
| |
| Because searching the entire hash table for expressions that contain |
| a register is very slow, we try to figure out when it isn't necessary. |
| Precisely, this is necessary only when expressions have been |
| entered in the hash table using this register, and then the value has |
| changed, and then another expression wants to be added to refer to |
| the register's new value. This sequence of circumstances is rare |
| within any one basic block. |
| |
| `REG_TICK' and `REG_IN_TABLE', accessors for members of |
| cse_reg_info, are used to detect this case. REG_TICK (i) is |
| incremented whenever a value is stored in register i. |
| REG_IN_TABLE (i) holds -1 if no references to register i have been |
| entered in the table; otherwise, it contains the value REG_TICK (i) |
| had when the references were entered. If we want to enter a |
| reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and |
| remove old references. Until we want to enter a new entry, the |
| mere fact that the two vectors don't match makes the entries be |
| ignored if anyone tries to match them. |
| |
| Registers themselves are entered in the hash table as well as in |
| the equivalent-register chains. However, `REG_TICK' and |
| `REG_IN_TABLE' do not apply to expressions which are simple |
| register references. These expressions are removed from the table |
| immediately when they become invalid, and this can be done even if |
| we do not immediately search for all the expressions that refer to |
| the register. |
| |
| A CLOBBER rtx in an instruction invalidates its operand for further |
| reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK |
| invalidates everything that resides in memory. |
| |
| Related expressions: |
| |
| Constant expressions that differ only by an additive integer |
| are called related. When a constant expression is put in |
| the table, the related expression with no constant term |
| is also entered. These are made to point at each other |
| so that it is possible to find out if there exists any |
| register equivalent to an expression related to a given expression. */ |
| |
| /* Length of qty_table vector. We know in advance we will not need |
| a quantity number this big. */ |
| |
| static int max_qty; |
| |
| /* Next quantity number to be allocated. |
| This is 1 + the largest number needed so far. */ |
| |
| static int next_qty; |
| |
| /* Per-qty information tracking. |
| |
| `first_reg' and `last_reg' track the head and tail of the |
| chain of registers which currently contain this quantity. |
| |
| `mode' contains the machine mode of this quantity. |
| |
| `const_rtx' holds the rtx of the constant value of this |
| quantity, if known. A summations of the frame/arg pointer |
| and a constant can also be entered here. When this holds |
| a known value, `const_insn' is the insn which stored the |
| constant value. |
| |
| `comparison_{code,const,qty}' are used to track when a |
| comparison between a quantity and some constant or register has |
| been passed. In such a case, we know the results of the comparison |
| in case we see it again. These members record a comparison that |
| is known to be true. `comparison_code' holds the rtx code of such |
| a comparison, else it is set to UNKNOWN and the other two |
| comparison members are undefined. `comparison_const' holds |
| the constant being compared against, or zero if the comparison |
| is not against a constant. `comparison_qty' holds the quantity |
| being compared against when the result is known. If the comparison |
| is not with a register, `comparison_qty' is -1. */ |
| |
| struct qty_table_elem |
| { |
| rtx const_rtx; |
| rtx_insn *const_insn; |
| rtx comparison_const; |
| int comparison_qty; |
| unsigned int first_reg, last_reg; |
| /* The sizes of these fields should match the sizes of the |
| code and mode fields of struct rtx_def (see rtl.h). */ |
| ENUM_BITFIELD(rtx_code) comparison_code : 16; |
| ENUM_BITFIELD(machine_mode) mode : 8; |
| }; |
| |
| /* The table of all qtys, indexed by qty number. */ |
| static struct qty_table_elem *qty_table; |
| |
| /* For machines that have a CC0, we do not record its value in the hash |
| table since its use is guaranteed to be the insn immediately following |
| its definition and any other insn is presumed to invalidate it. |
| |
| Instead, we store below the current and last value assigned to CC0. |
| If it should happen to be a constant, it is stored in preference |
| to the actual assigned value. In case it is a constant, we store |
| the mode in which the constant should be interpreted. */ |
| |
| static rtx this_insn_cc0, prev_insn_cc0; |
| static machine_mode this_insn_cc0_mode, prev_insn_cc0_mode; |
| |
| /* Insn being scanned. */ |
| |
| static rtx_insn *this_insn; |
| static bool optimize_this_for_speed_p; |
| |
| /* Index by register number, gives the number of the next (or |
| previous) register in the chain of registers sharing the same |
| value. |
| |
| Or -1 if this register is at the end of the chain. |
| |
| If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */ |
| |
| /* Per-register equivalence chain. */ |
| struct reg_eqv_elem |
| { |
| int next, prev; |
| }; |
| |
| /* The table of all register equivalence chains. */ |
| static struct reg_eqv_elem *reg_eqv_table; |
| |
| struct cse_reg_info |
| { |
| /* The timestamp at which this register is initialized. */ |
| unsigned int timestamp; |
| |
| /* The quantity number of the register's current contents. */ |
| int reg_qty; |
| |
| /* The number of times the register has been altered in the current |
| basic block. */ |
| int reg_tick; |
| |
| /* The REG_TICK value at which rtx's containing this register are |
| valid in the hash table. If this does not equal the current |
| reg_tick value, such expressions existing in the hash table are |
| invalid. */ |
| int reg_in_table; |
| |
| /* The SUBREG that was set when REG_TICK was last incremented. Set |
| to -1 if the last store was to the whole register, not a subreg. */ |
| unsigned int subreg_ticked; |
| }; |
| |
| /* A table of cse_reg_info indexed by register numbers. */ |
| static struct cse_reg_info *cse_reg_info_table; |
| |
| /* The size of the above table. */ |
| static unsigned int cse_reg_info_table_size; |
| |
| /* The index of the first entry that has not been initialized. */ |
| static unsigned int cse_reg_info_table_first_uninitialized; |
| |
| /* The timestamp at the beginning of the current run of |
| cse_extended_basic_block. We increment this variable at the beginning of |
| the current run of cse_extended_basic_block. The timestamp field of a |
| cse_reg_info entry matches the value of this variable if and only |
| if the entry has been initialized during the current run of |
| cse_extended_basic_block. */ |
| static unsigned int cse_reg_info_timestamp; |
| |
| /* A HARD_REG_SET containing all the hard registers for which there is |
| currently a REG expression in the hash table. Note the difference |
| from the above variables, which indicate if the REG is mentioned in some |
| expression in the table. */ |
| |
| static HARD_REG_SET hard_regs_in_table; |
| |
| /* True if CSE has altered the CFG. */ |
| static bool cse_cfg_altered; |
| |
| /* True if CSE has altered conditional jump insns in such a way |
| that jump optimization should be redone. */ |
| static bool cse_jumps_altered; |
| |
| /* True if we put a LABEL_REF into the hash table for an INSN |
| without a REG_LABEL_OPERAND, we have to rerun jump after CSE |
| to put in the note. */ |
| static bool recorded_label_ref; |
| |
| /* canon_hash stores 1 in do_not_record |
| if it notices a reference to CC0, PC, or some other volatile |
| subexpression. */ |
| |
| static int do_not_record; |
| |
| /* canon_hash stores 1 in hash_arg_in_memory |
| if it notices a reference to memory within the expression being hashed. */ |
| |
| static int hash_arg_in_memory; |
| |
| /* The hash table contains buckets which are chains of `struct table_elt's, |
| each recording one expression's information. |
| That expression is in the `exp' field. |
| |
| The canon_exp field contains a canonical (from the point of view of |
| alias analysis) version of the `exp' field. |
| |
| Those elements with the same hash code are chained in both directions |
| through the `next_same_hash' and `prev_same_hash' fields. |
| |
| Each set of expressions with equivalent values |
| are on a two-way chain through the `next_same_value' |
| and `prev_same_value' fields, and all point with |
| the `first_same_value' field at the first element in |
| that chain. The chain is in order of increasing cost. |
| Each element's cost value is in its `cost' field. |
| |
| The `in_memory' field is nonzero for elements that |
| involve any reference to memory. These elements are removed |
| whenever a write is done to an unidentified location in memory. |
| To be safe, we assume that a memory address is unidentified unless |
| the address is either a symbol constant or a constant plus |
| the frame pointer or argument pointer. |
| |
| The `related_value' field is used to connect related expressions |
| (that differ by adding an integer). |
| The related expressions are chained in a circular fashion. |
| `related_value' is zero for expressions for which this |
| chain is not useful. |
| |
| The `cost' field stores the cost of this element's expression. |
| The `regcost' field stores the value returned by approx_reg_cost for |
| this element's expression. |
| |
| The `is_const' flag is set if the element is a constant (including |
| a fixed address). |
| |
| The `flag' field is used as a temporary during some search routines. |
| |
| The `mode' field is usually the same as GET_MODE (`exp'), but |
| if `exp' is a CONST_INT and has no machine mode then the `mode' |
| field is the mode it was being used as. Each constant is |
| recorded separately for each mode it is used with. */ |
| |
| struct table_elt |
| { |
| rtx exp; |
| rtx canon_exp; |
| struct table_elt *next_same_hash; |
| struct table_elt *prev_same_hash; |
| struct table_elt *next_same_value; |
| struct table_elt *prev_same_value; |
| struct table_elt *first_same_value; |
| struct table_elt *related_value; |
| int cost; |
| int regcost; |
| /* The size of this field should match the size |
| of the mode field of struct rtx_def (see rtl.h). */ |
| ENUM_BITFIELD(machine_mode) mode : 8; |
| char in_memory; |
| char is_const; |
| char flag; |
| }; |
| |
| /* We don't want a lot of buckets, because we rarely have very many |
| things stored in the hash table, and a lot of buckets slows |
| down a lot of loops that happen frequently. */ |
| #define HASH_SHIFT 5 |
| #define HASH_SIZE (1 << HASH_SHIFT) |
| #define HASH_MASK (HASH_SIZE - 1) |
| |
| /* Compute hash code of X in mode M. Special-case case where X is a pseudo |
| register (hard registers may require `do_not_record' to be set). */ |
| |
| #define HASH(X, M) \ |
| ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \ |
| ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \ |
| : canon_hash (X, M)) & HASH_MASK) |
| |
| /* Like HASH, but without side-effects. */ |
| #define SAFE_HASH(X, M) \ |
| ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \ |
| ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \ |
| : safe_hash (X, M)) & HASH_MASK) |
| |
| /* Determine whether register number N is considered a fixed register for the |
| purpose of approximating register costs. |
| It is desirable to replace other regs with fixed regs, to reduce need for |
| non-fixed hard regs. |
| A reg wins if it is either the frame pointer or designated as fixed. */ |
| #define FIXED_REGNO_P(N) \ |
| ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \ |
| || fixed_regs[N] || global_regs[N]) |
| |
| /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed |
| hard registers and pointers into the frame are the cheapest with a cost |
| of 0. Next come pseudos with a cost of one and other hard registers with |
| a cost of 2. Aside from these special cases, call `rtx_cost'. */ |
| |
| #define CHEAP_REGNO(N) \ |
| (REGNO_PTR_FRAME_P (N) \ |
| || (HARD_REGISTER_NUM_P (N) \ |
| && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS)) |
| |
| #define COST(X, MODE) \ |
| (REG_P (X) ? 0 : notreg_cost (X, MODE, SET, 1)) |
| #define COST_IN(X, MODE, OUTER, OPNO) \ |
| (REG_P (X) ? 0 : notreg_cost (X, MODE, OUTER, OPNO)) |
| |
| /* Get the number of times this register has been updated in this |
| basic block. */ |
| |
| #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick) |
| |
| /* Get the point at which REG was recorded in the table. */ |
| |
| #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table) |
| |
| /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a |
| SUBREG). */ |
| |
| #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked) |
| |
| /* Get the quantity number for REG. */ |
| |
| #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty) |
| |
| /* Determine if the quantity number for register X represents a valid index |
| into the qty_table. */ |
| |
| #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0) |
| |
| /* Compare table_elt X and Y and return true iff X is cheaper than Y. */ |
| |
| #define CHEAPER(X, Y) \ |
| (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0) |
| |
| static struct table_elt *table[HASH_SIZE]; |
| |
| /* Chain of `struct table_elt's made so far for this function |
| but currently removed from the table. */ |
| |
| static struct table_elt *free_element_chain; |
| |
| /* Set to the cost of a constant pool reference if one was found for a |
| symbolic constant. If this was found, it means we should try to |
| convert constants into constant pool entries if they don't fit in |
| the insn. */ |
| |
| static int constant_pool_entries_cost; |
| static int constant_pool_entries_regcost; |
| |
| /* Trace a patch through the CFG. */ |
| |
| struct branch_path |
| { |
| /* The basic block for this path entry. */ |
| basic_block bb; |
| }; |
| |
| /* This data describes a block that will be processed by |
| cse_extended_basic_block. */ |
| |
| struct cse_basic_block_data |
| { |
| /* Total number of SETs in block. */ |
| int nsets; |
| /* Size of current branch path, if any. */ |
| int path_size; |
| /* Current path, indicating which basic_blocks will be processed. */ |
| struct branch_path *path; |
| }; |
| |
| |
| /* Pointers to the live in/live out bitmaps for the boundaries of the |
| current EBB. */ |
| static bitmap cse_ebb_live_in, cse_ebb_live_out; |
| |
| /* A simple bitmap to track which basic blocks have been visited |
| already as part of an already processed extended basic block. */ |
| static sbitmap cse_visited_basic_blocks; |
| |
| static bool fixed_base_plus_p (rtx x); |
| static int notreg_cost (rtx, machine_mode, enum rtx_code, int); |
| static int preferable (int, int, int, int); |
| static void new_basic_block (void); |
| static void make_new_qty (unsigned int, machine_mode); |
| static void make_regs_eqv (unsigned int, unsigned int); |
| static void delete_reg_equiv (unsigned int); |
| static int mention_regs (rtx); |
| static int insert_regs (rtx, struct table_elt *, int); |
| static void remove_from_table (struct table_elt *, unsigned); |
| static void remove_pseudo_from_table (rtx, unsigned); |
| static struct table_elt *lookup (rtx, unsigned, machine_mode); |
| static struct table_elt *lookup_for_remove (rtx, unsigned, machine_mode); |
| static rtx lookup_as_function (rtx, enum rtx_code); |
| static struct table_elt *insert_with_costs (rtx, struct table_elt *, unsigned, |
| machine_mode, int, int); |
| static struct table_elt *insert (rtx, struct table_elt *, unsigned, |
| machine_mode); |
| static void merge_equiv_classes (struct table_elt *, struct table_elt *); |
| static void invalidate_reg (rtx, bool); |
| static void invalidate (rtx, machine_mode); |
| static void remove_invalid_refs (unsigned int); |
| static void remove_invalid_subreg_refs (unsigned int, poly_uint64, |
| machine_mode); |
| static void rehash_using_reg (rtx); |
| static void invalidate_memory (void); |
| static void invalidate_for_call (void); |
| static rtx use_related_value (rtx, struct table_elt *); |
| |
| static inline unsigned canon_hash (rtx, machine_mode); |
| static inline unsigned safe_hash (rtx, machine_mode); |
| static inline unsigned hash_rtx_string (const char *); |
| |
| static rtx canon_reg (rtx, rtx_insn *); |
| static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *, |
| machine_mode *, |
| machine_mode *); |
| static rtx fold_rtx (rtx, rtx_insn *); |
| static rtx equiv_constant (rtx); |
| static void record_jump_equiv (rtx_insn *, bool); |
| static void record_jump_cond (enum rtx_code, machine_mode, rtx, rtx, |
| int); |
| static void cse_insn (rtx_insn *); |
| static void cse_prescan_path (struct cse_basic_block_data *); |
| static void invalidate_from_clobbers (rtx_insn *); |
| static void invalidate_from_sets_and_clobbers (rtx_insn *); |
| static rtx cse_process_notes (rtx, rtx, bool *); |
| static void cse_extended_basic_block (struct cse_basic_block_data *); |
| extern void dump_class (struct table_elt*); |
| static void get_cse_reg_info_1 (unsigned int regno); |
| static struct cse_reg_info * get_cse_reg_info (unsigned int regno); |
| |
| static void flush_hash_table (void); |
| static bool insn_live_p (rtx_insn *, int *); |
| static bool set_live_p (rtx, rtx_insn *, int *); |
| static void cse_change_cc_mode_insn (rtx_insn *, rtx); |
| static void cse_change_cc_mode_insns (rtx_insn *, rtx_insn *, rtx); |
| static machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx, |
| bool); |
| |
| |
| #undef RTL_HOOKS_GEN_LOWPART |
| #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible |
| |
| static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER; |
| |
| /* Nonzero if X has the form (PLUS frame-pointer integer). */ |
| |
| static bool |
| fixed_base_plus_p (rtx x) |
| { |
| switch (GET_CODE (x)) |
| { |
| case REG: |
| if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx) |
| return true; |
| if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]) |
| return true; |
| return false; |
| |
| case PLUS: |
| if (!CONST_INT_P (XEXP (x, 1))) |
| return false; |
| return fixed_base_plus_p (XEXP (x, 0)); |
| |
| default: |
| return false; |
| } |
| } |
| |
| /* Dump the expressions in the equivalence class indicated by CLASSP. |
| This function is used only for debugging. */ |
| DEBUG_FUNCTION void |
| dump_class (struct table_elt *classp) |
| { |
| struct table_elt *elt; |
| |
| fprintf (stderr, "Equivalence chain for "); |
| print_rtl (stderr, classp->exp); |
| fprintf (stderr, ": \n"); |
| |
| for (elt = classp->first_same_value; elt; elt = elt->next_same_value) |
| { |
| print_rtl (stderr, elt->exp); |
| fprintf (stderr, "\n"); |
| } |
| } |
| |
| /* Return an estimate of the cost of the registers used in an rtx. |
| This is mostly the number of different REG expressions in the rtx; |
| however for some exceptions like fixed registers we use a cost of |
| 0. If any other hard register reference occurs, return MAX_COST. */ |
| |
| static int |
| approx_reg_cost (const_rtx x) |
| { |
| int cost = 0; |
| subrtx_iterator::array_type array; |
| FOR_EACH_SUBRTX (iter, array, x, NONCONST) |
| { |
| const_rtx x = *iter; |
| if (REG_P (x)) |
| { |
| unsigned int regno = REGNO (x); |
| if (!CHEAP_REGNO (regno)) |
| { |
| if (regno < FIRST_PSEUDO_REGISTER) |
| { |
| if (targetm.small_register_classes_for_mode_p (GET_MODE (x))) |
| return MAX_COST; |
| cost += 2; |
| } |
| else |
| cost += 1; |
| } |
| } |
| } |
| return cost; |
| } |
| |
| /* Return a negative value if an rtx A, whose costs are given by COST_A |
| and REGCOST_A, is more desirable than an rtx B. |
| Return a positive value if A is less desirable, or 0 if the two are |
| equally good. */ |
| static int |
| preferable (int cost_a, int regcost_a, int cost_b, int regcost_b) |
| { |
| /* First, get rid of cases involving expressions that are entirely |
| unwanted. */ |
| if (cost_a != cost_b) |
| { |
| if (cost_a == MAX_COST) |
| return 1; |
| if (cost_b == MAX_COST) |
| return -1; |
| } |
| |
| /* Avoid extending lifetimes of hardregs. */ |
| if (regcost_a != regcost_b) |
| { |
| if (regcost_a == MAX_COST) |
| return 1; |
| if (regcost_b == MAX_COST) |
| return -1; |
| } |
| |
| /* Normal operation costs take precedence. */ |
| if (cost_a != cost_b) |
| return cost_a - cost_b; |
| /* Only if these are identical consider effects on register pressure. */ |
| if (regcost_a != regcost_b) |
| return regcost_a - regcost_b; |
| return 0; |
| } |
| |
| /* Internal function, to compute cost when X is not a register; called |
| from COST macro to keep it simple. */ |
| |
| static int |
| notreg_cost (rtx x, machine_mode mode, enum rtx_code outer, int opno) |
| { |
| scalar_int_mode int_mode, inner_mode; |
| return ((GET_CODE (x) == SUBREG |
| && REG_P (SUBREG_REG (x)) |
| && is_int_mode (mode, &int_mode) |
| && is_int_mode (GET_MODE (SUBREG_REG (x)), &inner_mode) |
| && GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (inner_mode) |
| && subreg_lowpart_p (x) |
| && TRULY_NOOP_TRUNCATION_MODES_P (int_mode, inner_mode)) |
| ? 0 |
| : rtx_cost (x, mode, outer, opno, optimize_this_for_speed_p) * 2); |
| } |
| |
| |
| /* Initialize CSE_REG_INFO_TABLE. */ |
| |
| static void |
| init_cse_reg_info (unsigned int nregs) |
| { |
| /* Do we need to grow the table? */ |
| if (nregs > cse_reg_info_table_size) |
| { |
| unsigned int new_size; |
| |
| if (cse_reg_info_table_size < 2048) |
| { |
| /* Compute a new size that is a power of 2 and no smaller |
| than the large of NREGS and 64. */ |
| new_size = (cse_reg_info_table_size |
| ? cse_reg_info_table_size : 64); |
| |
| while (new_size < nregs) |
| new_size *= 2; |
| } |
| else |
| { |
| /* If we need a big table, allocate just enough to hold |
| NREGS registers. */ |
| new_size = nregs; |
| } |
| |
| /* Reallocate the table with NEW_SIZE entries. */ |
| free (cse_reg_info_table); |
| cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size); |
| cse_reg_info_table_size = new_size; |
| cse_reg_info_table_first_uninitialized = 0; |
| } |
| |
| /* Do we have all of the first NREGS entries initialized? */ |
| if (cse_reg_info_table_first_uninitialized < nregs) |
| { |
| unsigned int old_timestamp = cse_reg_info_timestamp - 1; |
| unsigned int i; |
| |
| /* Put the old timestamp on newly allocated entries so that they |
| will all be considered out of date. We do not touch those |
| entries beyond the first NREGS entries to be nice to the |
| virtual memory. */ |
| for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++) |
| cse_reg_info_table[i].timestamp = old_timestamp; |
| |
| cse_reg_info_table_first_uninitialized = nregs; |
| } |
| } |
| |
| /* Given REGNO, initialize the cse_reg_info entry for REGNO. */ |
| |
| static void |
| get_cse_reg_info_1 (unsigned int regno) |
| { |
| /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this |
| entry will be considered to have been initialized. */ |
| cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp; |
| |
| /* Initialize the rest of the entry. */ |
| cse_reg_info_table[regno].reg_tick = 1; |
| cse_reg_info_table[regno].reg_in_table = -1; |
| cse_reg_info_table[regno].subreg_ticked = -1; |
| cse_reg_info_table[regno].reg_qty = -regno - 1; |
| } |
| |
| /* Find a cse_reg_info entry for REGNO. */ |
| |
| static inline struct cse_reg_info * |
| get_cse_reg_info (unsigned int regno) |
| { |
| struct cse_reg_info *p = &cse_reg_info_table[regno]; |
| |
| /* If this entry has not been initialized, go ahead and initialize |
| it. */ |
| if (p->timestamp != cse_reg_info_timestamp) |
| get_cse_reg_info_1 (regno); |
| |
| return p; |
| } |
| |
| /* Clear the hash table and initialize each register with its own quantity, |
| for a new basic block. */ |
| |
| static void |
| new_basic_block (void) |
| { |
| int i; |
| |
| next_qty = 0; |
| |
| /* Invalidate cse_reg_info_table. */ |
| cse_reg_info_timestamp++; |
| |
| /* Clear out hash table state for this pass. */ |
| CLEAR_HARD_REG_SET (hard_regs_in_table); |
| |
| /* The per-quantity values used to be initialized here, but it is |
| much faster to initialize each as it is made in `make_new_qty'. */ |
| |
| for (i = 0; i < HASH_SIZE; i++) |
| { |
| struct table_elt *first; |
| |
| first = table[i]; |
| if (first != NULL) |
| { |
| struct table_elt *last = first; |
| |
| table[i] = NULL; |
| |
| while (last->next_same_hash != NULL) |
| last = last->next_same_hash; |
| |
| /* Now relink this hash entire chain into |
| the free element list. */ |
| |
| last->next_same_hash = free_element_chain; |
| free_element_chain = first; |
| } |
| } |
| |
| prev_insn_cc0 = 0; |
| } |
| |
| /* Say that register REG contains a quantity in mode MODE not in any |
| register before and initialize that quantity. */ |
| |
| static void |
| make_new_qty (unsigned int reg, machine_mode mode) |
| { |
| int q; |
| struct qty_table_elem *ent; |
| struct reg_eqv_elem *eqv; |
| |
| gcc_assert (next_qty < max_qty); |
| |
| q = REG_QTY (reg) = next_qty++; |
| ent = &qty_table[q]; |
| ent->first_reg = reg; |
| ent->last_reg = reg; |
| ent->mode = mode; |
| ent->const_rtx = ent->const_insn = NULL; |
| ent->comparison_code = UNKNOWN; |
| |
| eqv = ®_eqv_table[reg]; |
| eqv->next = eqv->prev = -1; |
| } |
| |
| /* Make reg NEW equivalent to reg OLD. |
| OLD is not changing; NEW is. */ |
| |
| static void |
| make_regs_eqv (unsigned int new_reg, unsigned int old_reg) |
| { |
| unsigned int lastr, firstr; |
| int q = REG_QTY (old_reg); |
| struct qty_table_elem *ent; |
| |
| ent = &qty_table[q]; |
| |
| /* Nothing should become eqv until it has a "non-invalid" qty number. */ |
| gcc_assert (REGNO_QTY_VALID_P (old_reg)); |
| |
| REG_QTY (new_reg) = q; |
| firstr = ent->first_reg; |
| lastr = ent->last_reg; |
| |
| /* Prefer fixed hard registers to anything. Prefer pseudo regs to other |
| hard regs. Among pseudos, if NEW will live longer than any other reg |
| of the same qty, and that is beyond the current basic block, |
| make it the new canonical replacement for this qty. */ |
| if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr)) |
| /* Certain fixed registers might be of the class NO_REGS. This means |
| that not only can they not be allocated by the compiler, but |
| they cannot be used in substitutions or canonicalizations |
| either. */ |
| && (new_reg >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new_reg) != NO_REGS) |
| && ((new_reg < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new_reg)) |
| || (new_reg >= FIRST_PSEUDO_REGISTER |
| && (firstr < FIRST_PSEUDO_REGISTER |
| || (bitmap_bit_p (cse_ebb_live_out, new_reg) |
| && !bitmap_bit_p (cse_ebb_live_out, firstr)) |
| || (bitmap_bit_p (cse_ebb_live_in, new_reg) |
| && !bitmap_bit_p (cse_ebb_live_in, firstr)))))) |
| { |
| reg_eqv_table[firstr].prev = new_reg; |
| reg_eqv_table[new_reg].next = firstr; |
| reg_eqv_table[new_reg].prev = -1; |
| ent->first_reg = new_reg; |
| } |
| else |
| { |
| /* If NEW is a hard reg (known to be non-fixed), insert at end. |
| Otherwise, insert before any non-fixed hard regs that are at the |
| end. Registers of class NO_REGS cannot be used as an |
| equivalent for anything. */ |
| while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0 |
| && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr)) |
| && new_reg >= FIRST_PSEUDO_REGISTER) |
| lastr = reg_eqv_table[lastr].prev; |
| reg_eqv_table[new_reg].next = reg_eqv_table[lastr].next; |
| if (reg_eqv_table[lastr].next >= 0) |
| reg_eqv_table[reg_eqv_table[lastr].next].prev = new_reg; |
| else |
| qty_table[q].last_reg = new_reg; |
| reg_eqv_table[lastr].next = new_reg; |
| reg_eqv_table[new_reg].prev = lastr; |
| } |
| } |
| |
| /* Remove REG from its equivalence class. */ |
| |
| static void |
| delete_reg_equiv (unsigned int reg) |
| { |
| struct qty_table_elem *ent; |
| int q = REG_QTY (reg); |
| int p, n; |
| |
| /* If invalid, do nothing. */ |
| if (! REGNO_QTY_VALID_P (reg)) |
| return; |
| |
| ent = &qty_table[q]; |
| |
| p = reg_eqv_table[reg].prev; |
| n = reg_eqv_table[reg].next; |
| |
| if (n != -1) |
| reg_eqv_table[n].prev = p; |
| else |
| ent->last_reg = p; |
| if (p != -1) |
| reg_eqv_table[p].next = n; |
| else |
| ent->first_reg = n; |
| |
| REG_QTY (reg) = -reg - 1; |
| } |
| |
| /* Remove any invalid expressions from the hash table |
| that refer to any of the registers contained in expression X. |
| |
| Make sure that newly inserted references to those registers |
| as subexpressions will be considered valid. |
| |
| mention_regs is not called when a register itself |
| is being stored in the table. |
| |
| Return 1 if we have done something that may have changed the hash code |
| of X. */ |
| |
| static int |
| mention_regs (rtx x) |
| { |
| enum rtx_code code; |
| int i, j; |
| const char *fmt; |
| int changed = 0; |
| |
| if (x == 0) |
| return 0; |
| |
| code = GET_CODE (x); |
| if (code == REG) |
| { |
| unsigned int regno = REGNO (x); |
| unsigned int endregno = END_REGNO (x); |
| unsigned int i; |
| |
| for (i = regno; i < endregno; i++) |
| { |
| if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i)) |
| remove_invalid_refs (i); |
| |
| REG_IN_TABLE (i) = REG_TICK (i); |
| SUBREG_TICKED (i) = -1; |
| } |
| |
| return 0; |
| } |
| |
| /* If this is a SUBREG, we don't want to discard other SUBREGs of the same |
| pseudo if they don't use overlapping words. We handle only pseudos |
| here for simplicity. */ |
| if (code == SUBREG && REG_P (SUBREG_REG (x)) |
| && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER) |
| { |
| unsigned int i = REGNO (SUBREG_REG (x)); |
| |
| if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i)) |
| { |
| /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and |
| the last store to this register really stored into this |
| subreg, then remove the memory of this subreg. |
| Otherwise, remove any memory of the entire register and |
| all its subregs from the table. */ |
| if (REG_TICK (i) - REG_IN_TABLE (i) > 1 |
| || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x))) |
| remove_invalid_refs (i); |
| else |
| remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x)); |
| } |
| |
| REG_IN_TABLE (i) = REG_TICK (i); |
| SUBREG_TICKED (i) = REGNO (SUBREG_REG (x)); |
| return 0; |
| } |
| |
| /* If X is a comparison or a COMPARE and either operand is a register |
| that does not have a quantity, give it one. This is so that a later |
| call to record_jump_equiv won't cause X to be assigned a different |
| hash code and not found in the table after that call. |
| |
| It is not necessary to do this here, since rehash_using_reg can |
| fix up the table later, but doing this here eliminates the need to |
| call that expensive function in the most common case where the only |
| use of the register is in the comparison. */ |
| |
| if (code == COMPARE || COMPARISON_P (x)) |
| { |
| if (REG_P (XEXP (x, 0)) |
| && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))) |
| if (insert_regs (XEXP (x, 0), NULL, 0)) |
| { |
| rehash_using_reg (XEXP (x, 0)); |
| changed = 1; |
| } |
| |
| if (REG_P (XEXP (x, 1)) |
| && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1)))) |
| if (insert_regs (XEXP (x, 1), NULL, 0)) |
| { |
| rehash_using_reg (XEXP (x, 1)); |
| changed = 1; |
| } |
| } |
| |
| fmt = GET_RTX_FORMAT (code); |
| for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
| if (fmt[i] == 'e') |
| changed |= mention_regs (XEXP (x, i)); |
| else if (fmt[i] == 'E') |
| for (j = 0; j < XVECLEN (x, i); j++) |
| changed |= mention_regs (XVECEXP (x, i, j)); |
| |
| return changed; |
| } |
| |
| /* Update the register quantities for inserting X into the hash table |
| with a value equivalent to CLASSP. |
| (If the class does not contain a REG, it is irrelevant.) |
| If MODIFIED is nonzero, X is a destination; it is being modified. |
| Note that delete_reg_equiv should be called on a register |
| before insert_regs is done on that register with MODIFIED != 0. |
| |
| Nonzero value means that elements of reg_qty have changed |
| so X's hash code may be different. */ |
| |
| static int |
| insert_regs (rtx x, struct table_elt *classp, int modified) |
| { |
| if (REG_P (x)) |
| { |
| unsigned int regno = REGNO (x); |
| int qty_valid; |
| |
| /* If REGNO is in the equivalence table already but is of the |
| wrong mode for that equivalence, don't do anything here. */ |
| |
| qty_valid = REGNO_QTY_VALID_P (regno); |
| if (qty_valid) |
| { |
| struct qty_table_elem *ent = &qty_table[REG_QTY (regno)]; |
| |
| if (ent->mode != GET_MODE (x)) |
| return 0; |
| } |
| |
| if (modified || ! qty_valid) |
| { |
| if (classp) |
| for (classp = classp->first_same_value; |
| classp != 0; |
| classp = classp->next_same_value) |
| if (REG_P (classp->exp) |
| && GET_MODE (classp->exp) == GET_MODE (x)) |
| { |
| unsigned c_regno = REGNO (classp->exp); |
| |
| gcc_assert (REGNO_QTY_VALID_P (c_regno)); |
| |
| /* Suppose that 5 is hard reg and 100 and 101 are |
| pseudos. Consider |
| |
| (set (reg:si 100) (reg:si 5)) |
| (set (reg:si 5) (reg:si 100)) |
| (set (reg:di 101) (reg:di 5)) |
| |
| We would now set REG_QTY (101) = REG_QTY (5), but the |
| entry for 5 is in SImode. When we use this later in |
| copy propagation, we get the register in wrong mode. */ |
| if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x)) |
| continue; |
| |
| make_regs_eqv (regno, c_regno); |
| return 1; |
| } |
| |
| /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger |
| than REG_IN_TABLE to find out if there was only a single preceding |
| invalidation - for the SUBREG - or another one, which would be |
| for the full register. However, if we find here that REG_TICK |
| indicates that the register is invalid, it means that it has |
| been invalidated in a separate operation. The SUBREG might be used |
| now (then this is a recursive call), or we might use the full REG |
| now and a SUBREG of it later. So bump up REG_TICK so that |
| mention_regs will do the right thing. */ |
| if (! modified |
| && REG_IN_TABLE (regno) >= 0 |
| && REG_TICK (regno) == REG_IN_TABLE (regno) + 1) |
| REG_TICK (regno)++; |
| make_new_qty (regno, GET_MODE (x)); |
| return 1; |
| } |
| |
| return 0; |
| } |
| |
| /* If X is a SUBREG, we will likely be inserting the inner register in the |
| table. If that register doesn't have an assigned quantity number at |
| this point but does later, the insertion that we will be doing now will |
| not be accessible because its hash code will have changed. So assign |
| a quantity number now. */ |
| |
| else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x)) |
| && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x)))) |
| { |
| insert_regs (SUBREG_REG (x), NULL, 0); |
| mention_regs (x); |
| return 1; |
| } |
| else |
| return mention_regs (x); |
| } |
| |
| |
| /* Compute upper and lower anchors for CST. Also compute the offset of CST |
| from these anchors/bases such that *_BASE + *_OFFS = CST. Return false iff |
| CST is equal to an anchor. */ |
| |
| static bool |
| compute_const_anchors (rtx cst, |
| HOST_WIDE_INT *lower_base, HOST_WIDE_INT *lower_offs, |
| HOST_WIDE_INT *upper_base, HOST_WIDE_INT *upper_offs) |
| { |
| HOST_WIDE_INT n = INTVAL (cst); |
| |
| *lower_base = n & ~(targetm.const_anchor - 1); |
| if (*lower_base == n) |
| return false; |
| |
| *upper_base = |
| (n + (targetm.const_anchor - 1)) & ~(targetm.const_anchor - 1); |
| *upper_offs = n - *upper_base; |
| *lower_offs = n - *lower_base; |
| return true; |
| } |
| |
| /* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE. */ |
| |
| static void |
| insert_const_anchor (HOST_WIDE_INT anchor, rtx reg, HOST_WIDE_INT offs, |
| machine_mode mode) |
| { |
| struct table_elt *elt; |
| unsigned hash; |
| rtx anchor_exp; |
| rtx exp; |
| |
| anchor_exp = GEN_INT (anchor); |
| hash = HASH (anchor_exp, mode); |
| elt = lookup (anchor_exp, hash, mode); |
| if (!elt) |
| elt = insert (anchor_exp, NULL, hash, mode); |
| |
| exp = plus_constant (mode, reg, offs); |
| /* REG has just been inserted and the hash codes recomputed. */ |
| mention_regs (exp); |
| hash = HASH (exp, mode); |
| |
| /* Use the cost of the register rather than the whole expression. When |
| looking up constant anchors we will further offset the corresponding |
| expression therefore it does not make sense to prefer REGs over |
| reg-immediate additions. Prefer instead the oldest expression. Also |
| don't prefer pseudos over hard regs so that we derive constants in |
| argument registers from other argument registers rather than from the |
| original pseudo that was used to synthesize the constant. */ |
| insert_with_costs (exp, elt, hash, mode, COST (reg, mode), 1); |
| } |
| |
| /* The constant CST is equivalent to the register REG. Create |
| equivalences between the two anchors of CST and the corresponding |
| register-offset expressions using REG. */ |
| |
| static void |
| insert_const_anchors (rtx reg, rtx cst, machine_mode mode) |
| { |
| HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs; |
| |
| if (!compute_const_anchors (cst, &lower_base, &lower_offs, |
| &upper_base, &upper_offs)) |
| return; |
| |
| /* Ignore anchors of value 0. Constants accessible from zero are |
| simple. */ |
| if (lower_base != 0) |
| insert_const_anchor (lower_base, reg, -lower_offs, mode); |
| |
| if (upper_base != 0) |
| insert_const_anchor (upper_base, reg, -upper_offs, mode); |
| } |
| |
| /* We need to express ANCHOR_ELT->exp + OFFS. Walk the equivalence list of |
| ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a |
| valid expression. Return the cheapest and oldest of such expressions. In |
| *OLD, return how old the resulting expression is compared to the other |
| equivalent expressions. */ |
| |
| static rtx |
| find_reg_offset_for_const (struct table_elt *anchor_elt, HOST_WIDE_INT offs, |
| unsigned *old) |
| { |
| struct table_elt *elt; |
| unsigned idx; |
| struct table_elt *match_elt; |
| rtx match; |
| |
| /* Find the cheapest and *oldest* expression to maximize the chance of |
| reusing the same pseudo. */ |
| |
| match_elt = NULL; |
| match = NULL_RTX; |
| for (elt = anchor_elt->first_same_value, idx = 0; |
| elt; |
| elt = elt->next_same_value, idx++) |
| { |
| if (match_elt && CHEAPER (match_elt, elt)) |
| return match; |
| |
| if (REG_P (elt->exp) |
| || (GET_CODE (elt->exp) == PLUS |
| && REG_P (XEXP (elt->exp, 0)) |
| && GET_CODE (XEXP (elt->exp, 1)) == CONST_INT)) |
| { |
| rtx x; |
| |
| /* Ignore expressions that are no longer valid. */ |
| if (!REG_P (elt->exp) && !exp_equiv_p (elt->exp, elt->exp, 1, false)) |
| continue; |
| |
| x = plus_constant (GET_MODE (elt->exp), elt->exp, offs); |
| if (REG_P (x) |
| || (GET_CODE (x) == PLUS |
| && IN_RANGE (INTVAL (XEXP (x, 1)), |
| -targetm.const_anchor, |
| targetm.const_anchor - 1))) |
| { |
| match = x; |
| match_elt = elt; |
| *old = idx; |
| } |
| } |
| } |
| |
| return match; |
| } |
| |
| /* Try to express the constant SRC_CONST using a register+offset expression |
| derived from a constant anchor. Return it if successful or NULL_RTX, |
| otherwise. */ |
| |
| static rtx |
| try_const_anchors (rtx src_const, machine_mode mode) |
| { |
| struct table_elt *lower_elt, *upper_elt; |
| HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs; |
| rtx lower_anchor_rtx, upper_anchor_rtx; |
| rtx lower_exp = NULL_RTX, upper_exp = NULL_RTX; |
| unsigned lower_old, upper_old; |
| |
| /* CONST_INT is used for CC modes, but we should leave those alone. */ |
| if (GET_MODE_CLASS (mode) == MODE_CC) |
| return NULL_RTX; |
| |
| gcc_assert (SCALAR_INT_MODE_P (mode)); |
| if (!compute_const_anchors (src_const, &lower_base, &lower_offs, |
| &upper_base, &upper_offs)) |
| return NULL_RTX; |
| |
| lower_anchor_rtx = GEN_INT (lower_base); |
| upper_anchor_rtx = GEN_INT (upper_base); |
| lower_elt = lookup (lower_anchor_rtx, HASH (lower_anchor_rtx, mode), mode); |
| upper_elt = lookup (upper_anchor_rtx, HASH (upper_anchor_rtx, mode), mode); |
| |
| if (lower_elt) |
| lower_exp = find_reg_offset_for_const (lower_elt, lower_offs, &lower_old); |
| if (upper_elt) |
| upper_exp = find_reg_offset_for_const (upper_elt, upper_offs, &upper_old); |
| |
| if (!lower_exp) |
| return upper_exp; |
| if (!upper_exp) |
| return lower_exp; |
| |
| /* Return the older expression. */ |
| return (upper_old > lower_old ? upper_exp : lower_exp); |
| } |
| |
| /* Look in or update the hash table. */ |
| |
| /* Remove table element ELT from use in the table. |
| HASH is its hash code, made using the HASH macro. |
| It's an argument because often that is known in advance |
| and we save much time not recomputing it. */ |
| |
| static void |
| remove_from_table (struct table_elt *elt, unsigned int hash) |
| { |
| if (elt == 0) |
| return; |
| |
| /* Mark this element as removed. See cse_insn. */ |
| elt->first_same_value = 0; |
| |
| /* Remove the table element from its equivalence class. */ |
| |
| { |
| struct table_elt *prev = elt->prev_same_value; |
| struct table_elt *next = elt->next_same_value; |
| |
| if (next) |
| next->prev_same_value = prev; |
| |
| if (prev) |
| prev->next_same_value = next; |
| else |
| { |
| struct table_elt *newfirst = next; |
| while (next) |
| { |
| next->first_same_value = newfirst; |
| next = next->next_same_value; |
| } |
| } |
| } |
| |
| /* Remove the table element from its hash bucket. */ |
| |
| { |
| struct table_elt *prev = elt->prev_same_hash; |
| struct table_elt *next = elt->next_same_hash; |
| |
| if (next) |
| next->prev_same_hash = prev; |
| |
| if (prev) |
| prev->next_same_hash = next; |
| else if (table[hash] == elt) |
| table[hash] = next; |
| else |
| { |
| /* This entry is not in the proper hash bucket. This can happen |
| when two classes were merged by `merge_equiv_classes'. Search |
| for the hash bucket that it heads. This happens only very |
| rarely, so the cost is acceptable. */ |
| for (hash = 0; hash < HASH_SIZE; hash++) |
| if (table[hash] == elt) |
| table[hash] = next; |
| } |
| } |
| |
| /* Remove the table element from its related-value circular chain. */ |
| |
| if (elt->related_value != 0 && elt->related_value != elt) |
| { |
| struct table_elt *p = elt->related_value; |
| |
| while (p->related_value != elt) |
| p = p->related_value; |
| p->related_value = elt->related_value; |
| if (p->related_value == p) |
| p->related_value = 0; |
| } |
| |
| /* Now add it to the free element chain. */ |
| elt->next_same_hash = free_element_chain; |
| free_element_chain = elt; |
| } |
| |
| /* Same as above, but X is a pseudo-register. */ |
| |
| static void |
| remove_pseudo_from_table (rtx x, unsigned int hash) |
| { |
| struct table_elt *elt; |
| |
| /* Because a pseudo-register can be referenced in more than one |
| mode, we might have to remove more than one table entry. */ |
| while ((elt = lookup_for_remove (x, hash, VOIDmode))) |
| remove_from_table (elt, hash); |
| } |
| |
| /* Look up X in the hash table and return its table element, |
| or 0 if X is not in the table. |
| |
| MODE is the machine-mode of X, or if X is an integer constant |
| with VOIDmode then MODE is the mode with which X will be used. |
| |
| Here we are satisfied to find an expression whose tree structure |
| looks like X. */ |
| |
| static struct table_elt * |
| lookup (rtx x, unsigned int hash, machine_mode mode) |
| { |
| struct table_elt *p; |
| |
| for (p = table[hash]; p; p = p->next_same_hash) |
| if (mode == p->mode && ((x == p->exp && REG_P (x)) |
| || exp_equiv_p (x, p->exp, !REG_P (x), false))) |
| return p; |
| |
| return 0; |
| } |
| |
| /* Like `lookup' but don't care whether the table element uses invalid regs. |
| Also ignore discrepancies in the machine mode of a register. */ |
| |
| static struct table_elt * |
| lookup_for_remove (rtx x, unsigned int hash, machine_mode mode) |
| { |
| struct table_elt *p; |
| |
| if (REG_P (x)) |
| { |
| unsigned int regno = REGNO (x); |
| |
| /* Don't check the machine mode when comparing registers; |
| invalidating (REG:SI 0) also invalidates (REG:DF 0). */ |
| for (p = table[hash]; p; p = p->next_same_hash) |
| if (REG_P (p->exp) |
| && REGNO (p->exp) == regno) |
| return p; |
| } |
| else |
| { |
| for (p = table[hash]; p; p = p->next_same_hash) |
| if (mode == p->mode |
| && (x == p->exp || exp_equiv_p (x, p->exp, 0, false))) |
| return p; |
| } |
| |
| return 0; |
| } |
| |
| /* Look for an expression equivalent to X and with code CODE. |
| If one is found, return that expression. */ |
| |
| static rtx |
| lookup_as_function (rtx x, enum rtx_code code) |
| { |
| struct table_elt *p |
| = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x)); |
| |
| if (p == 0) |
| return 0; |
| |
| for (p = p->first_same_value; p; p = p->next_same_value) |
| if (GET_CODE (p->exp) == code |
| /* Make sure this is a valid entry in the table. */ |
| && exp_equiv_p (p->exp, p->exp, 1, false)) |
| return p->exp; |
| |
| return 0; |
| } |
| |
| /* Insert X in the hash table, assuming HASH is its hash code and |
| CLASSP is an element of the class it should go in (or 0 if a new |
| class should be made). COST is the code of X and reg_cost is the |
| cost of registers in X. It is inserted at the proper position to |
| keep the class in the order cheapest first. |
| |
| MODE is the machine-mode of X, or if X is an integer constant |
| with VOIDmode then MODE is the mode with which X will be used. |
| |
| For elements of equal cheapness, the most recent one |
| goes in front, except that the first element in the list |
| remains first unless a cheaper element is added. The order of |
| pseudo-registers does not matter, as canon_reg will be called to |
| find the cheapest when a register is retrieved from the table. |
| |
| The in_memory field in the hash table element is set to 0. |
| The caller must set it nonzero if appropriate. |
| |
| You should call insert_regs (X, CLASSP, MODIFY) before calling here, |
| and if insert_regs returns a nonzero value |
| you must then recompute its hash code before calling here. |
| |
| If necessary, update table showing constant values of quantities. */ |
| |
| static struct table_elt * |
| insert_with_costs (rtx x, struct table_elt *classp, unsigned int hash, |
| machine_mode mode, int cost, int reg_cost) |
| { |
| struct table_elt *elt; |
| |
| /* If X is a register and we haven't made a quantity for it, |
| something is wrong. */ |
| gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x))); |
| |
| /* If X is a hard register, show it is being put in the table. */ |
| if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) |
| add_to_hard_reg_set (&hard_regs_in_table, GET_MODE (x), REGNO (x)); |
| |
| /* Put an element for X into the right hash bucket. */ |
| |
| elt = free_element_chain; |
| if (elt) |
| free_element_chain = elt->next_same_hash; |
| else |
| elt = XNEW (struct table_elt); |
| |
| elt->exp = x; |
| elt->canon_exp = NULL_RTX; |
| elt->cost = cost; |
| elt->regcost = reg_cost; |
| elt->next_same_value = 0; |
| elt->prev_same_value = 0; |
| elt->next_same_hash = table[hash]; |
| elt->prev_same_hash = 0; |
| elt->related_value = 0; |
| elt->in_memory = 0; |
| elt->mode = mode; |
| elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x)); |
| |
| if (table[hash]) |
| table[hash]->prev_same_hash = elt; |
| table[hash] = elt; |
| |
| /* Put it into the proper value-class. */ |
| if (classp) |
| { |
| classp = classp->first_same_value; |
| if (CHEAPER (elt, classp)) |
| /* Insert at the head of the class. */ |
| { |
| struct table_elt *p; |
| elt->next_same_value = classp; |
| classp->prev_same_value = elt; |
| elt->first_same_value = elt; |
| |
| for (p = classp; p; p = p->next_same_value) |
| p->first_same_value = elt; |
| } |
| else |
| { |
| /* Insert not at head of the class. */ |
| /* Put it after the last element cheaper than X. */ |
| struct table_elt *p, *next; |
| |
| for (p = classp; |
| (next = p->next_same_value) && CHEAPER (next, elt); |
| p = next) |
| ; |
| |
| /* Put it after P and before NEXT. */ |
| elt->next_same_value = next; |
| if (next) |
| next->prev_same_value = elt; |
| |
| elt->prev_same_value = p; |
| p->next_same_value = elt; |
| elt->first_same_value = classp; |
| } |
| } |
| else |
| elt->first_same_value = elt; |
| |
| /* If this is a constant being set equivalent to a register or a register |
| being set equivalent to a constant, note the constant equivalence. |
| |
| If this is a constant, it cannot be equivalent to a different constant, |
| and a constant is the only thing that can be cheaper than a register. So |
| we know the register is the head of the class (before the constant was |
| inserted). |
| |
| If this is a register that is not already known equivalent to a |
| constant, we must check the entire class. |
| |
| If this is a register that is already known equivalent to an insn, |
| update the qtys `const_insn' to show that `this_insn' is the latest |
| insn making that quantity equivalent to the constant. */ |
| |
| if (elt->is_const && classp && REG_P (classp->exp) |
| && !REG_P (x)) |
| { |
| int exp_q = REG_QTY (REGNO (classp->exp)); |
| struct qty_table_elem *exp_ent = &qty_table[exp_q]; |
| |
| exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x); |
| exp_ent->const_insn = this_insn; |
| } |
| |
| else if (REG_P (x) |
| && classp |
| && ! qty_table[REG_QTY (REGNO (x))].const_rtx |
| && ! elt->is_const) |
| { |
| struct table_elt *p; |
| |
| for (p = classp; p != 0; p = p->next_same_value) |
| { |
| if (p->is_const && !REG_P (p->exp)) |
| { |
| int x_q = REG_QTY (REGNO (x)); |
| struct qty_table_elem *x_ent = &qty_table[x_q]; |
| |
| x_ent->const_rtx |
| = gen_lowpart (GET_MODE (x), p->exp); |
| x_ent->const_insn = this_insn; |
| break; |
| } |
| } |
| } |
| |
| else if (REG_P (x) |
| && qty_table[REG_QTY (REGNO (x))].const_rtx |
| && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode) |
| qty_table[REG_QTY (REGNO (x))].const_insn = this_insn; |
| |
| /* If this is a constant with symbolic value, |
| and it has a term with an explicit integer value, |
| link it up with related expressions. */ |
| if (GET_CODE (x) == CONST) |
| { |
| rtx subexp = get_related_value (x); |
| unsigned subhash; |
| struct table_elt *subelt, *subelt_prev; |
| |
| if (subexp != 0) |
| { |
| /* Get the integer-free subexpression in the hash table. */ |
| subhash = SAFE_HASH (subexp, mode); |
| subelt = lookup (subexp, subhash, mode); |
| if (subelt == 0) |
| subelt = insert (subexp, NULL, subhash, mode); |
| /* Initialize SUBELT's circular chain if it has none. */ |
| if (subelt->related_value == 0) |
| subelt->related_value = subelt; |
| /* Find the element in the circular chain that precedes SUBELT. */ |
| subelt_prev = subelt; |
| while (subelt_prev->related_value != subelt) |
| subelt_prev = subelt_prev->related_value; |
| /* Put new ELT into SUBELT's circular chain just before SUBELT. |
| This way the element that follows SUBELT is the oldest one. */ |
| elt->related_value = subelt_prev->related_value; |
| subelt_prev->related_value = elt; |
| } |
| } |
| |
| return elt; |
| } |
| |
| /* Wrap insert_with_costs by passing the default costs. */ |
| |
| static struct table_elt * |
| insert (rtx x, struct table_elt *classp, unsigned int hash, |
| machine_mode mode) |
| { |
| return insert_with_costs (x, classp, hash, mode, |
| COST (x, mode), approx_reg_cost (x)); |
| } |
| |
| |
| /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from |
| CLASS2 into CLASS1. This is done when we have reached an insn which makes |
| the two classes equivalent. |
| |
| CLASS1 will be the surviving class; CLASS2 should not be used after this |
| call. |
| |
| Any invalid entries in CLASS2 will not be copied. */ |
| |
| static void |
| merge_equiv_classes (struct table_elt *class1, struct table_elt *class2) |
| { |
| struct table_elt *elt, *next, *new_elt; |
| |
| /* Ensure we start with the head of the classes. */ |
| class1 = class1->first_same_value; |
| class2 = class2->first_same_value; |
| |
| /* If they were already equal, forget it. */ |
| if (class1 == class2) |
| return; |
| |
| for (elt = class2; elt; elt = next) |
| { |
| unsigned int hash; |
| rtx exp = elt->exp; |
| machine_mode mode = elt->mode; |
| |
| next = elt->next_same_value; |
| |
| /* Remove old entry, make a new one in CLASS1's class. |
| Don't do this for invalid entries as we cannot find their |
| hash code (it also isn't necessary). */ |
| if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false)) |
| { |
| bool need_rehash = false; |
| |
| hash_arg_in_memory = 0; |
| hash = HASH (exp, mode); |
| |
| if (REG_P (exp)) |
| { |
| need_rehash = REGNO_QTY_VALID_P (REGNO (exp)); |
| delete_reg_equiv (REGNO (exp)); |
| } |
| |
| if (REG_P (exp) && REGNO (exp) >= FIRST_PSEUDO_REGISTER) |
| remove_pseudo_from_table (exp, hash); |
| else |
| remove_from_table (elt, hash); |
| |
| if (insert_regs (exp, class1, 0) || need_rehash) |
| { |
| rehash_using_reg (exp); |
| hash = HASH (exp, mode); |
| } |
| new_elt = insert (exp, class1, hash, mode); |
| new_elt->in_memory = hash_arg_in_memory; |
| if (GET_CODE (exp) == ASM_OPERANDS && elt->cost == MAX_COST) |
| new_elt->cost = MAX_COST; |
| } |
| } |
| } |
| |
| /* Flush the entire hash table. */ |
| |
| static void |
| flush_hash_table (void) |
| { |
| int i; |
| struct table_elt *p; |
| |
| for (i = 0; i < HASH_SIZE; i++) |
| for (p = table[i]; p; p = table[i]) |
| { |
| /* Note that invalidate can remove elements |
| after P in the current hash chain. */ |
| if (REG_P (p->exp)) |
| invalidate (p->exp, VOIDmode); |
| else |
| remove_from_table (p, i); |
| } |
| } |
| |
| /* Check whether an anti dependence exists between X and EXP. MODE and |
| ADDR are as for canon_anti_dependence. */ |
| |
| static bool |
| check_dependence (const_rtx x, rtx exp, machine_mode mode, rtx addr) |
| { |
| subrtx_iterator::array_type array; |
| FOR_EACH_SUBRTX (iter, array, x, NONCONST) |
| { |
| const_rtx x = *iter; |
| if (MEM_P (x) && canon_anti_dependence (x, true, exp, mode, addr)) |
| return true; |
| } |
| return false; |
| } |
| |
| /* Remove from the hash table, or mark as invalid, all expressions whose |
| values could be altered by storing in register X. |
| |
| CLOBBER_HIGH is set if X was part of a CLOBBER_HIGH expression. */ |
| |
| static void |
| invalidate_reg (rtx x, bool clobber_high) |
| { |
| gcc_assert (GET_CODE (x) == REG); |
| |
| /* If X is a register, dependencies on its contents are recorded |
| through the qty number mechanism. Just change the qty number of |
| the register, mark it as invalid for expressions that refer to it, |
| and remove it itself. */ |
| unsigned int regno = REGNO (x); |
| unsigned int hash = HASH (x, GET_MODE (x)); |
| |
| /* Remove REGNO from any quantity list it might be on and indicate |
| that its value might have changed. If it is a pseudo, remove its |
| entry from the hash table. |
| |
| For a hard register, we do the first two actions above for any |
| additional hard registers corresponding to X. Then, if any of these |
| registers are in the table, we must remove any REG entries that |
| overlap these registers. */ |
| |
| delete_reg_equiv (regno); |
| REG_TICK (regno)++; |
| SUBREG_TICKED (regno) = -1; |
| |
| if (regno >= FIRST_PSEUDO_REGISTER) |
| { |
| gcc_assert (!clobber_high); |
| remove_pseudo_from_table (x, hash); |
| } |
| else |
| { |
| HOST_WIDE_INT in_table = TEST_HARD_REG_BIT (hard_regs_in_table, regno); |
| unsigned int endregno = END_REGNO (x); |
| unsigned int rn; |
| struct table_elt *p, *next; |
| |
| CLEAR_HARD_REG_BIT (hard_regs_in_table, regno); |
| |
| for (rn = regno + 1; rn < endregno; rn++) |
| { |
| in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn); |
| CLEAR_HARD_REG_BIT (hard_regs_in_table, rn); |
| delete_reg_equiv (rn); |
| REG_TICK (rn)++; |
| SUBREG_TICKED (rn) = -1; |
| } |
| |
| if (in_table) |
| for (hash = 0; hash < HASH_SIZE; hash++) |
| for (p = table[hash]; p; p = next) |
| { |
| next = p->next_same_hash; |
| |
| if (!REG_P (p->exp) || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER) |
| continue; |
| |
| if (clobber_high) |
| { |
| if (reg_is_clobbered_by_clobber_high (p->exp, x)) |
| remove_from_table (p, hash); |
| } |
| else |
| { |
| unsigned int tregno = REGNO (p->exp); |
| unsigned int tendregno = END_REGNO (p->exp); |
| if (tendregno > regno && tregno < endregno) |
| remove_from_table (p, hash); |
| } |
| } |
| } |
| } |
| |
| /* Remove from the hash table, or mark as invalid, all expressions whose |
| values could be altered by storing in X. X is a register, a subreg, or |
| a memory reference with nonvarying address (because, when a memory |
| reference with a varying address is stored in, all memory references are |
| removed by invalidate_memory so specific invalidation is superfluous). |
| FULL_MODE, if not VOIDmode, indicates that this much should be |
| invalidated instead of just the amount indicated by the mode of X. This |
| is only used for bitfield stores into memory. |
| |
| A nonvarying address may be just a register or just a symbol reference, |
| or it may be either of those plus a numeric offset. */ |
| |
| static void |
| invalidate (rtx x, machine_mode full_mode) |
| { |
| int i; |
| struct table_elt *p; |
| rtx addr; |
| |
| switch (GET_CODE (x)) |
| { |
| case REG: |
| invalidate_reg (x, false); |
| return; |
| |
| case SUBREG: |
| invalidate (SUBREG_REG (x), VOIDmode); |
| return; |
| |
| case PARALLEL: |
| for (i = XVECLEN (x, 0) - 1; i >= 0; --i) |
| invalidate (XVECEXP (x, 0, i), VOIDmode); |
| return; |
| |
| case EXPR_LIST: |
| /* This is part of a disjoint return value; extract the location in |
| question ignoring the offset. */ |
| invalidate (XEXP (x, 0), VOIDmode); |
| return; |
| |
| case MEM: |
| addr = canon_rtx (get_addr (XEXP (x, 0))); |
| /* Calculate the canonical version of X here so that |
| true_dependence doesn't generate new RTL for X on each call. */ |
| x = canon_rtx (x); |
| |
| /* Remove all hash table elements that refer to overlapping pieces of |
| memory. */ |
| if (full_mode == VOIDmode) |
| full_mode = GET_MODE (x); |
| |
| for (i = 0; i < HASH_SIZE; i++) |
| { |
| struct table_elt *next; |
| |
| for (p = table[i]; p; p = next) |
| { |
| next = p->next_same_hash; |
| if (p->in_memory) |
| { |
| /* Just canonicalize the expression once; |
| otherwise each time we call invalidate |
| true_dependence will canonicalize the |
| expression again. */ |
| if (!p->canon_exp) |
| p->canon_exp = canon_rtx (p->exp); |
| if (check_dependence (p->canon_exp, x, full_mode, addr)) |
| remove_from_table (p, i); |
| } |
| } |
| } |
| return; |
| |
| default: |
| gcc_unreachable (); |
| } |
| } |
| |
| /* Invalidate DEST. Used when DEST is not going to be added |
| into the hash table for some reason, e.g. do_not_record |
| flagged on it. */ |
| |
| static void |
| invalidate_dest (rtx dest) |
| { |
| if (REG_P (dest) |
| || GET_CODE (dest) == SUBREG |
| || MEM_P (dest)) |
| invalidate (dest, VOIDmode); |
| else if (GET_CODE (dest) == STRICT_LOW_PART |
| || GET_CODE (dest) == ZERO_EXTRACT) |
| invalidate (XEXP (dest, 0), GET_MODE (dest)); |
| } |
| |
| /* Remove all expressions that refer to register REGNO, |
| since they are already invalid, and we are about to |
| mark that register valid again and don't want the old |
| expressions to reappear as valid. */ |
| |
| static void |
| remove_invalid_refs (unsigned int regno) |
| { |
| unsigned int i; |
| struct table_elt *p, *next; |
| |
| for (i = 0; i < HASH_SIZE; i++) |
| for (p = table[i]; p; p = next) |
| { |
| next = p->next_same_hash; |
| if (!REG_P (p->exp) && refers_to_regno_p (regno, p->exp)) |
| remove_from_table (p, i); |
| } |
| } |
| |
| /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET, |
| and mode MODE. */ |
| static void |
| remove_invalid_subreg_refs (unsigned int regno, poly_uint64 offset, |
| machine_mode mode) |
| { |
| unsigned int i; |
| struct table_elt *p, *next; |
| |
| for (i = 0; i < HASH_SIZE; i++) |
| for (p = table[i]; p; p = next) |
| { |
| rtx exp = p->exp; |
| next = p->next_same_hash; |
| |
| if (!REG_P (exp) |
| && (GET_CODE (exp) != SUBREG |
| || !REG_P (SUBREG_REG (exp)) |
| || REGNO (SUBREG_REG (exp)) != regno |
| || ranges_maybe_overlap_p (SUBREG_BYTE (exp), |
| GET_MODE_SIZE (GET_MODE (exp)), |
| offset, GET_MODE_SIZE (mode))) |
| && refers_to_regno_p (regno, p->exp)) |
| remove_from_table (p, i); |
| } |
| } |
| |
| /* Recompute the hash codes of any valid entries in the hash table that |
| reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG. |
| |
| This is called when we make a jump equivalence. */ |
| |
| static void |
| rehash_using_reg (rtx x) |
| { |
| unsigned int i; |
| struct table_elt *p, *next; |
| unsigned hash; |
| |
| if (GET_CODE (x) == SUBREG) |
| x = SUBREG_REG (x); |
| |
| /* If X is not a register or if the register is known not to be in any |
| valid entries in the table, we have no work to do. */ |
| |
| if (!REG_P (x) |
| || REG_IN_TABLE (REGNO (x)) < 0 |
| || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x))) |
| return; |
| |
| /* Scan all hash chains looking for valid entries that mention X. |
| If we find one and it is in the wrong hash chain, move it. */ |
| |
| for (i = 0; i < HASH_SIZE; i++) |
| for (p = table[i]; p; p = next) |
| { |
| next = p->next_same_hash; |
| if (reg_mentioned_p (x, p->exp) |
| && exp_equiv_p (p->exp, p->exp, 1, false) |
| && i != (hash = SAFE_HASH (p->exp, p->mode))) |
| { |
| if (p->next_same_hash) |
| p->next_same_hash->prev_same_hash = p->prev_same_hash; |
| |
| if (p->prev_same_hash) |
| p->prev_same_hash->next_same_hash = p->next_same_hash; |
| else |
| table[i] = p->next_same_hash; |
| |
| p->next_same_hash = table[hash]; |
| p->prev_same_hash = 0; |
| if (table[hash]) |
| table[hash]->prev_same_hash = p; |
| table[hash] = p; |
| } |
| } |
| } |
| |
| /* Remove from the hash table any expression that is a call-clobbered |
| register. Also update their TICK values. */ |
| |
| static void |
| invalidate_for_call (void) |
| { |
| unsigned int regno, endregno; |
| unsigned int i; |
| unsigned hash; |
| struct table_elt *p, *next; |
| int in_table = 0; |
| hard_reg_set_iterator hrsi; |
| |
| /* Go through all the hard registers. For each that is clobbered in |
| a CALL_INSN, remove the register from quantity chains and update |
| reg_tick if defined. Also see if any of these registers is currently |
| in the table. */ |
| EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, regno, hrsi) |
| { |
| delete_reg_equiv (regno); |
| if (REG_TICK (regno) >= 0) |
| { |
| REG_TICK (regno)++; |
| SUBREG_TICKED (regno) = -1; |
| } |
| in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0); |
| } |
| |
| /* In the case where we have no call-clobbered hard registers in the |
| table, we are done. Otherwise, scan the table and remove any |
| entry that overlaps a call-clobbered register. */ |
| |
| if (in_table) |
| for (hash = 0; hash < HASH_SIZE; hash++) |
| for (p = table[hash]; p; p = next) |
| { |
| next = p->next_same_hash; |
| |
| if (!REG_P (p->exp) |
| || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER) |
| continue; |
| |
| regno = REGNO (p->exp); |
| endregno = END_REGNO (p->exp); |
| |
| for (i = regno; i < endregno; i++) |
| if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i)) |
| { |
| remove_from_table (p, hash); |
| break; |
| } |
| } |
| } |
| |
| /* Given an expression X of type CONST, |
| and ELT which is its table entry (or 0 if it |
| is not in the hash table), |
| return an alternate expression for X as a register plus integer. |
| If none can be found, return 0. */ |
| |
| static rtx |
| use_related_value (rtx x, struct table_elt *elt) |
| { |
| struct table_elt *relt = 0; |
| struct table_elt *p, *q; |
| HOST_WIDE_INT offset; |
| |
| /* First, is there anything related known? |
| If we have a table element, we can tell from that. |
| Otherwise, must look it up. */ |
| |
| if (elt != 0 && elt->related_value != 0) |
| relt = elt; |
| else if (elt == 0 && GET_CODE (x) == CONST) |
| { |
| rtx subexp = get_related_value (x); |
| if (subexp != 0) |
| relt = lookup (subexp, |
| SAFE_HASH (subexp, GET_MODE (subexp)), |
| GET_MODE (subexp)); |
| } |
| |
| if (relt == 0) |
| return 0; |
| |
| /* Search all related table entries for one that has an |
| equivalent register. */ |
| |
| p = relt; |
| while (1) |
| { |
| /* This loop is strange in that it is executed in two different cases. |
| The first is when X is already in the table. Then it is searching |
| the RELATED_VALUE list of X's class (RELT). The second case is when |
| X is not in the table. Then RELT points to a class for the related |
| value. |
| |
| Ensure that, whatever case we are in, that we ignore classes that have |
| the same value as X. */ |
| |
| if (rtx_equal_p (x, p->exp)) |
| q = 0; |
| else |
| for (q = p->first_same_value; q; q = q->next_same_value) |
| if (REG_P (q->exp)) |
| break; |
| |
| if (q) |
| break; |
| |
| p = p->related_value; |
| |
| /* We went all the way around, so there is nothing to be found. |
| Alternatively, perhaps RELT was in the table for some other reason |
| and it has no related values recorded. */ |
| if (p == relt || p == 0) |
| break; |
| } |
| |
| if (q == 0) |
| return 0; |
| |
| offset = (get_integer_term (x) - get_integer_term (p->exp)); |
| /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */ |
| return plus_constant (q->mode, q->exp, offset); |
| } |
| |
| |
| /* Hash a string. Just add its bytes up. */ |
| static inline unsigned |
| hash_rtx_string (const char *ps) |
| { |
| unsigned hash = 0; |
| const unsigned char *p = (const unsigned char *) ps; |
| |
| if (p) |
| while (*p) |
| hash += *p++; |
| |
| return hash; |
| } |
| |
| /* Same as hash_rtx, but call CB on each rtx if it is not NULL. |
| When the callback returns true, we continue with the new rtx. */ |
| |
| unsigned |
| hash_rtx_cb (const_rtx x, machine_mode mode, |
| int *do_not_record_p, int *hash_arg_in_memory_p, |
| bool have_reg_qty, hash_rtx_callback_function cb) |
| { |
| int i, j; |
| unsigned hash = 0; |
| enum rtx_code code; |
| const char *fmt; |
| machine_mode newmode; |
| rtx newx; |
| |
| /* Used to turn recursion into iteration. We can't rely on GCC's |
| tail-recursion elimination since we need to keep accumulating values |
| in HASH. */ |
| repeat: |
| if (x == 0) |
| return hash; |
| |
| /* Invoke the callback first. */ |
| if (cb != NULL |
| && ((*cb) (x, mode, &newx, &newmode))) |
| { |
| hash += hash_rtx_cb (newx, newmode, do_not_record_p, |
| hash_arg_in_memory_p, have_reg_qty, cb); |
| return hash; |
| } |
| |
| code = GET_CODE (x); |
| switch (code) |
| { |
| case REG: |
| { |
| unsigned int regno = REGNO (x); |
| |
| if (do_not_record_p && !reload_completed) |
| { |
| /* On some machines, we can't record any non-fixed hard register, |
| because extending its life will cause reload problems. We |
| consider ap, fp, sp, gp to be fixed for this purpose. |
| |
| We also consider CCmode registers to be fixed for this purpose; |
| failure to do so leads to failure to simplify 0<100 type of |
| conditionals. |
| |
| On all machines, we can't record any global registers. |
| Nor should we record any register that is in a small |
| class, as defined by TARGET_CLASS_LIKELY_SPILLED_P. */ |
| bool record; |
| |
| if (regno >= FIRST_PSEUDO_REGISTER) |
| record = true; |
| else if (x == frame_pointer_rtx |
| || x == hard_frame_pointer_rtx |
| || x == arg_pointer_rtx |
| || x == stack_pointer_rtx |
| || x == pic_offset_table_rtx) |
| record = true; |
| else if (global_regs[regno]) |
| record = false; |
| else if (fixed_regs[regno]) |
| record = true; |
| else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC) |
| record = true; |
| else if (targetm.small_register_classes_for_mode_p (GET_MODE (x))) |
| record = false; |
| else if (targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno))) |
| record = false; |
| else |
| record = true; |
| |
| if (!record) |
| { |
| *do_not_record_p = 1; |
| return 0; |
| } |
| } |
| |
| hash += ((unsigned int) REG << 7); |
| hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno); |
| return hash; |
| } |
| |
| /* We handle SUBREG of a REG specially because the underlying |
| reg changes its hash value with every value change; we don't |
| want to have to forget unrelated subregs when one subreg changes. */ |
| case SUBREG: |
| { |
| if (REG_P (SUBREG_REG (x))) |
| { |
| hash += (((unsigned int) SUBREG << 7) |
| + REGNO (SUBREG_REG (x)) |
| + (constant_lower_bound (SUBREG_BYTE (x)) |
| / UNITS_PER_WORD)); |
| return hash; |
| } |
| break; |
| } |
| |
| case CONST_INT: |
| hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode |
| + (unsigned int) INTVAL (x)); |
| return hash; |
| |
| case CONST_WIDE_INT: |
| for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++) |
| hash += CONST_WIDE_INT_ELT (x, i); |
| return hash; |
| |
| case CONST_POLY_INT: |
| { |
| inchash::hash h; |
| h.add_int (hash); |
| for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) |
| h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]); |
| return h.end (); |
| } |
| |
| case CONST_DOUBLE: |
| /* This is like the general case, except that it only counts |
| the integers representing the constant. */ |
| hash += (unsigned int) code + (unsigned int) GET_MODE (x); |
| if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode) |
| hash += ((unsigned int) CONST_DOUBLE_LOW (x) |
| + (unsigned int) CONST_DOUBLE_HIGH (x)); |
| else |
| hash += real_hash (CONST_DOUBLE_REAL_VALUE (x)); |
| return hash; |
| |
| case CONST_FIXED: |
| hash += (unsigned int) code + (unsigned int) GET_MODE (x); |
| hash += fixed_hash (CONST_FIXED_VALUE (x)); |
| return hash; |
| |
| case CONST_VECTOR: |
| { |
| int units; |
| rtx elt; |
| |
| units = const_vector_encoded_nelts (x); |
| |
| for (i = 0; i < units; ++i) |
| { |
| elt = CONST_VECTOR_ENCODED_ELT (x, i); |
| hash += hash_rtx_cb (elt, GET_MODE (elt), |
| do_not_record_p, hash_arg_in_memory_p, |
| have_reg_qty, cb); |
| } |
| |
| return hash; |
| } |
| |
| /* Assume there is only one rtx object for any given label. */ |
| case LABEL_REF: |
| /* We don't hash on the address of the CODE_LABEL to avoid bootstrap |
| differences and differences between each stage's debugging dumps. */ |
| hash += (((unsigned int) LABEL_REF << 7) |
| + CODE_LABEL_NUMBER (label_ref_label (x))); |
| return hash; |
| |
| case SYMBOL_REF: |
| { |
| /* Don't hash on the symbol's address to avoid bootstrap differences. |
| Different hash values may cause expressions to be recorded in |
| different orders and thus different registers to be used in the |
| final assembler. This also avoids differences in the dump files |
| between various stages. */ |
| unsigned int h = 0; |
| const unsigned char *p = (const unsigned char *) XSTR (x, 0); |
| |
| while (*p) |
| h += (h << 7) + *p++; /* ??? revisit */ |
| |
| hash += ((unsigned int) SYMBOL_REF << 7) + h; |
| return hash; |
| } |
| |
| case MEM: |
| /* We don't record if marked volatile or if BLKmode since we don't |
| know the size of the move. */ |
| if (do_not_record_p && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)) |
| { |
| *do_not_record_p = 1; |
| return 0; |
| } |
| if (hash_arg_in_memory_p && !MEM_READONLY_P (x)) |
| *hash_arg_in_memory_p = 1; |
| |
| /* Now that we have already found this special case, |
| might as well speed it up as much as possible. */ |
| hash += (unsigned) MEM; |
| x = XEXP (x, 0); |
| goto repeat; |
| |
| case USE: |
| /* A USE that mentions non-volatile memory needs special |
| handling since the MEM may be BLKmode which normally |
| prevents an entry from being made. Pure calls are |
| marked by a USE which mentions BLKmode memory. |
| See calls.c:emit_call_1. */ |
| if (MEM_P (XEXP (x, 0)) |
| && ! MEM_VOLATILE_P (XEXP (x, 0))) |
| { |
| hash += (unsigned) USE; |
| x = XEXP (x, 0); |
| |
| if (hash_arg_in_memory_p && !MEM_READONLY_P (x)) |
| *hash_arg_in_memory_p = 1; |
| |
| /* Now that we have already found this special case, |
| might as well speed it up as much as possible. */ |
| hash += (unsigned) MEM; |
| x = XEXP (x, 0); |
| goto repeat; |
| } |
| break; |
| |
| case PRE_DEC: |
| case PRE_INC: |
| case POST_DEC: |
| case POST_INC: |
| case PRE_MODIFY: |
| case POST_MODIFY: |
| case PC: |
| case CC0: |
| case CALL: |
| case UNSPEC_VOLATILE: |
| if (do_not_record_p) { |
| *do_not_record_p = 1; |
| return 0; |
| } |
| else |
| return hash; |
| break; |
| |
| case ASM_OPERANDS: |
| if (do_not_record_p && MEM_VOLATILE_P (x)) |
| { |
| *do_not_record_p = 1; |
| return 0; |
| } |
| else |
| { |
| /* We don't want to take the filename and line into account. */ |
| hash += (unsigned) code + (unsigned) GET_MODE (x) |
| + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x)) |
| + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x)) |
| + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x); |
| |
| if (ASM_OPERANDS_INPUT_LENGTH (x)) |
| { |
| for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++) |
| { |
| hash += (hash_rtx_cb (ASM_OPERANDS_INPUT (x, i), |
| GET_MODE (ASM_OPERANDS_INPUT (x, i)), |
| do_not_record_p, hash_arg_in_memory_p, |
| have_reg_qty, cb) |
| + hash_rtx_string |
| (ASM_OPERANDS_INPUT_CONSTRAINT (x, i))); |
| } |
| |
| hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0)); |
| x = ASM_OPERANDS_INPUT (x, 0); |
| mode = GET_MODE (x); |
| goto repeat; |
| } |
| |
| return hash; |
| } |
| break; |
| |
| default: |
| break; |
| } |
| |
| i = GET_RTX_LENGTH (code) - 1; |
| hash += (unsigned) code + (unsigned) GET_MODE (x); |
| fmt = GET_RTX_FORMAT (code); |
| for (; i >= 0; i--) |
| { |
| switch (fmt[i]) |
| { |
| case 'e': |
| /* If we are about to do the last recursive call |
| needed at this level, change it into iteration. |
| This function is called enough to be worth it. */ |
| if (i == 0) |
| { |
| x = XEXP (x, i); |
| goto repeat; |
| } |
| |
| hash += hash_rtx_cb (XEXP (x, i), VOIDmode, do_not_record_p, |
| hash_arg_in_memory_p, |
| have_reg_qty, cb); |
| break; |
| |
| case 'E': |
| for (j = 0; j < XVECLEN (x, i); j++) |
| hash += hash_rtx_cb (XVECEXP (x, i, j), VOIDmode, do_not_record_p, |
| hash_arg_in_memory_p, |
| have_reg_qty, cb); |
| break; |
| |
| case 's': |
| hash += hash_rtx_string (XSTR (x, i)); |
| break; |
| |
| case 'i': |
| hash += (unsigned int) XINT (x, i); |
| break; |
| |
| case 'p': |
| hash += constant_lower_bound (SUBREG_BYTE (x)); |
| break; |
| |
| case '0': case 't': |
| /* Unused. */ |
| break; |
| |
| default: |
| gcc_unreachable (); |
| } |
| } |
| |
| return hash; |
| } |
| |
| /* Hash an rtx. We are careful to make sure the value is never negative. |
| Equivalent registers hash identically. |
| MODE is used in hashing for CONST_INTs only; |
| otherwise the mode of X is used. |
| |
| Store 1 in DO_NOT_RECORD_P if any subexpression is volatile. |
| |
| If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains |
| a MEM rtx which does not have the MEM_READONLY_P flag set. |
| |
| Note that cse_insn knows that the hash code of a MEM expression |
| is just (int) MEM plus the hash code of the address. */ |
| |
| unsigned |
| hash_rtx (const_rtx x, machine_mode mode, int *do_not_record_p, |
| int *hash_arg_in_memory_p, bool have_reg_qty) |
| { |
| return hash_rtx_cb (x, mode, do_not_record_p, |
| hash_arg_in_memory_p, have_reg_qty, NULL); |
| } |
| |
| /* Hash an rtx X for cse via hash_rtx. |
| Stores 1 in do_not_record if any subexpression is volatile. |
| Stores 1 in hash_arg_in_memory if X contains a mem rtx which |
| does not have the MEM_READONLY_P flag set. */ |
| |
| static inline unsigned |
| canon_hash (rtx x, machine_mode mode) |
| { |
| return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true); |
| } |
| |
| /* Like canon_hash but with no side effects, i.e. do_not_record |
| and hash_arg_in_memory are not changed. */ |
| |
| static inline unsigned |
| safe_hash (rtx x, machine_mode mode) |
| { |
| int dummy_do_not_record; |
| return hash_rtx (x, mode, &dummy_do_not_record, NULL, true); |
| } |
| |
| /* Return 1 iff X and Y would canonicalize into the same thing, |
| without actually constructing the canonicalization of either one. |
| If VALIDATE is nonzero, |
| we assume X is an expression being processed from the rtl |
| and Y was found in the hash table. We check register refs |
| in Y for being marked as valid. |
| |
| If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */ |
| |
| int |
| exp_equiv_p (const_rtx x, const_rtx y, int validate, bool for_gcse) |
| { |
| int i, j; |
| enum rtx_code code; |
| const char *fmt; |
| |
| /* Note: it is incorrect to assume an expression is equivalent to itself |
| if VALIDATE is nonzero. */ |
| if (x == y && !validate) |
| return 1; |
| |
| if (x == 0 || y == 0) |
| return x == y; |
| |
| code = GET_CODE (x); |
| if (code != GET_CODE (y)) |
| return 0; |
| |
| /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */ |
| if (GET_MODE (x) != GET_MODE (y)) |
| return 0; |
| |
| /* MEMs referring to different address space are not equivalent. */ |
| if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y)) |
| return 0; |
| |
| switch (code) |
| { |
| case PC: |
| case CC0: |
| CASE_CONST_UNIQUE: |
| return x == y; |
| |
| case CONST_VECTOR: |
| if (!same_vector_encodings_p (x, y)) |
| return false; |
| break; |
| |
| case LABEL_REF: |
| return label_ref_label (x) == label_ref_label (y); |
| |
| case SYMBOL_REF: |
| return XSTR (x, 0) == XSTR (y, 0); |
| |
| case REG: |
| if (for_gcse) |
| return REGNO (x) == REGNO (y); |
| else |
| { |
| unsigned int regno = REGNO (y); |
| unsigned int i; |
| unsigned int endregno = END_REGNO (y); |
| |
| /* If the quantities are not the same, the expressions are not |
| equivalent. If there are and we are not to validate, they |
| are equivalent. Otherwise, ensure all regs are up-to-date. */ |
| |
| if (REG_QTY (REGNO (x)) != REG_QTY (regno)) |
| return 0; |
| |
| if (! validate) |
| return 1; |
| |
| for (i = regno; i < endregno; i++) |
| if (REG_IN_TABLE (i) != REG_TICK (i)) |
| return 0; |
| |
| return 1; |
| } |
| |
| case MEM: |
| if (for_gcse) |
| { |
| /* A volatile mem should not be considered equivalent to any |
| other. */ |
| if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y)) |
| return 0; |
| |
| /* Can't merge two expressions in different alias sets, since we |
| can decide that the expression is transparent in a block when |
| it isn't, due to it being set with the different alias set. |
| |
| Also, can't merge two expressions with different MEM_ATTRS. |
| They could e.g. be two different entities allocated into the |
| same space on the stack (see e.g. PR25130). In that case, the |
| MEM addresses can be the same, even though the two MEMs are |
| absolutely not equivalent. |
| |
| But because really all MEM attributes should be the same for |
| equivalent MEMs, we just use the invariant that MEMs that have |
| the same attributes share the same mem_attrs data structure. */ |
| if (!mem_attrs_eq_p (MEM_ATTRS (x), MEM_ATTRS (y))) |
| return 0; |
| |
| /* If we are handling exceptions, we cannot consider two expressions |
| with different trapping status as equivalent, because simple_mem |
| might accept one and reject the other. */ |
| if (cfun->can_throw_non_call_exceptions |
| && (MEM_NOTRAP_P (x) != MEM_NOTRAP_P (y))) |
| return 0; |
| } |
| break; |
| |
| /* For commutative operations, check both orders. */ |
| case PLUS: |
| case MULT: |
| case AND: |
| case IOR: |
| case XOR: |
| case NE: |
| case EQ: |
| return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), |
| validate, for_gcse) |
| && exp_equiv_p (XEXP (x, 1), XEXP (y, 1), |
| validate, for_gcse)) |
| || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1), |
| validate, for_gcse) |
| && exp_equiv_p (XEXP (x, 1), XEXP (y, 0), |
| validate, for_gcse))); |
| |
| case ASM_OPERANDS: |
| /* We don't use the generic code below because we want to |
| disregard filename and line numbers. */ |
| |
| /* A volatile asm isn't equivalent to any other. */ |
| if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y)) |
| return 0; |
| |
| if (GET_MODE (x) != GET_MODE (y) |
| || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y)) |
| || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x), |
| ASM_OPERANDS_OUTPUT_CONSTRAINT (y)) |
| || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y) |
| || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y)) |
| return 0; |
| |
| if (ASM_OPERANDS_INPUT_LENGTH (x)) |
| { |
| for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--) |
| if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i), |
| ASM_OPERANDS_INPUT (y, i), |
| validate, for_gcse) |
| || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i), |
| ASM_OPERANDS_INPUT_CONSTRAINT (y, i))) |
| return 0; |
| } |
| |
| return 1; |
| |
| default: |
| break; |
| } |
| |
| /* Compare the elements. If any pair of corresponding elements |
| fail to match, return 0 for the whole thing. */ |
| |
| fmt = GET_RTX_FORMAT (code); |
| for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
| { |
| switch (fmt[i]) |
| { |
| case 'e': |
| if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), |
| validate, for_gcse)) |
| return 0; |
| break; |
| |
| case 'E': |
| if (XVECLEN (x, i) != XVECLEN (y, i)) |
| return 0; |
| for (j = 0; j < XVECLEN (x, i); j++) |
| if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j), |
| validate, for_gcse)) |
| return 0; |
| break; |
| |
| case 's': |
| if (strcmp (XSTR (x, i), XSTR (y, i))) |
| return 0; |
| break; |
| |
| case 'i': |
| if (XINT (x, i) != XINT (y, i)) |
| return 0; |
| break; |
| |
| case 'w': |
| if (XWINT (x, i) != XWINT (y, i)) |
| return 0; |
| break; |
| |
| case 'p': |
| if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y))) |
| return 0; |
| break; |
| |
| case '0': |
| case 't': |
| break; |
| |
| default: |
| gcc_unreachable (); |
| } |
| } |
| |
| return 1; |
| } |
| |
| /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate |
| the result if necessary. INSN is as for canon_reg. */ |
| |
| static void |
| validate_canon_reg (rtx *xloc, rtx_insn *insn) |
| { |
| if (*xloc) |
| { |
| rtx new_rtx = canon_reg (*xloc, insn); |
| |
| /* If replacing pseudo with hard reg or vice versa, ensure the |
| insn remains valid. Likewise if the insn has MATCH_DUPs. */ |
| gcc_assert (insn && new_rtx); |
| validate_change (insn, xloc, new_rtx, 1); |
| } |
| } |
| |
| /* Canonicalize an expression: |
| replace each register reference inside it |
| with the "oldest" equivalent register. |
| |
| If INSN is nonzero validate_change is used to ensure that INSN remains valid |
| after we make our substitution. The calls are made with IN_GROUP nonzero |
| so apply_change_group must be called upon the outermost return from this |
| function (unless INSN is zero). The result of apply_change_group can |
| generally be discarded since the changes we are making are optional. */ |
| |
| static rtx |
| canon_reg (rtx x, rtx_insn *insn) |
| { |
| int i; |
| enum rtx_code code; |
| const char *fmt; |
| |
| if (x == 0) |
| return x; |
| |
| code = GET_CODE (x); |
| switch (code) |
| { |
| case PC: |
| case CC0: |
| case CONST: |
| CASE_CONST_ANY: |
| case SYMBOL_REF: |
| case LABEL_REF: |
| case ADDR_VEC: |
| case ADDR_DIFF_VEC: |
| return x; |
| |
| case REG: |
| { |
| int first; |
| int q; |
| struct qty_table_elem *ent; |
| |
| /* Never replace a hard reg, because hard regs can appear |
| in more than one machine mode, and we must preserve the mode |
| of each occurrence. Also, some hard regs appear in |
| MEMs that are shared and mustn't be altered. Don't try to |
| replace any reg that maps to a reg of class NO_REGS. */ |
| if (REGNO (x) < FIRST_PSEUDO_REGISTER |
| || ! REGNO_QTY_VALID_P (REGNO (x))) |
| return x; |
| |
| q = REG_QTY (REGNO (x)); |
| ent = &qty_table[q]; |
| first = ent->first_reg; |
| return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first] |
| : REGNO_REG_CLASS (first) == NO_REGS ? x |
| : gen_rtx_REG (ent->mode, first)); |
| } |
| |
| default: |
| break; |
| } |
| |
| fmt = GET_RTX_FORMAT (code); |
| for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
| { |
| int j; |
| |
| if (fmt[i] == 'e') |
| validate_canon_reg (&XEXP (x, i), insn); |
| else if (fmt[i] == 'E') |
| for (j = 0; j < XVECLEN (x, i); j++) |
| validate_canon_reg (&XVECEXP (x, i, j), insn); |
| } |
| |
| return x; |
| } |
| |
| /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison |
| operation (EQ, NE, GT, etc.), follow it back through the hash table and |
| what values are being compared. |
| |
| *PARG1 and *PARG2 are updated to contain the rtx representing the values |
| actually being compared. For example, if *PARG1 was (cc0) and *PARG2 |
| was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were |
| compared to produce cc0. |
| |
| The return value is the comparison operator and is either the code of |
| A or the code corresponding to the inverse of the comparison. */ |
| |
| static enum rtx_code |
| find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2, |
| machine_mode *pmode1, machine_mode *pmode2) |
| { |
| rtx arg1, arg2; |
| hash_set<rtx> *visited = NULL; |
| /* Set nonzero when we find something of interest. */ |
| rtx x = NULL; |
| |
| arg1 = *parg1, arg2 = *parg2; |
| |
| /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */ |
| |
| while (arg2 == CONST0_RTX (GET_MODE (arg1))) |
| { |
| int reverse_code = 0; |
| struct table_elt *p = 0; |
| |
| /* Remember state from previous iteration. */ |
| if (x) |
| { |
| if (!visited) |
| visited = new hash_set<rtx>; |
| visited->add (x); |
| x = 0; |
| } |
| |
| /* If arg1 is a COMPARE, extract the comparison arguments from it. |
| On machines with CC0, this is the only case that can occur, since |
| fold_rtx will return the COMPARE or item being compared with zero |
| when given CC0. */ |
| |
| if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx) |
| x = arg1; |
| |
| /* If ARG1 is a comparison operator and CODE is testing for |
| STORE_FLAG_VALUE, get the inner arguments. */ |
| |
| else if (COMPARISON_P (arg1)) |
| { |
| #ifdef FLOAT_STORE_FLAG_VALUE |
| REAL_VALUE_TYPE fsfv; |
| #endif |
| |
| if (code == NE |
| || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT |
| && code == LT && STORE_FLAG_VALUE == -1) |
| #ifdef FLOAT_STORE_FLAG_VALUE |
| || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1)) |
| && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)), |
| REAL_VALUE_NEGATIVE (fsfv))) |
| #endif |
| ) |
| x = arg1; |
| else if (code == EQ |
| || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT |
| && code == GE && STORE_FLAG_VALUE == -1) |
| #ifdef FLOAT_STORE_FLAG_VALUE |
| || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1)) |
| && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)), |
| REAL_VALUE_NEGATIVE (fsfv))) |
| #endif |
| ) |
| x = arg1, reverse_code = 1; |
| } |
| |
| /* ??? We could also check for |
| |
| (ne (and (eq (...) (const_int 1))) (const_int 0)) |
| |
| and related forms, but let's wait until we see them occurring. */ |
| |
| if (x == 0) |
| /* Look up ARG1 in the hash table and see if it has an equivalence |
| that lets us see what is being compared. */ |
| p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1)); |
| if (p) |
| { |
| p = p->first_same_value; |
| |
| /* If what we compare is already known to be constant, that is as |
| good as it gets. |
| We need to break the loop in this case, because otherwise we |
| can have an infinite loop when looking at a reg that is known |
| to be a constant which is the same as a comparison of a reg |
| against zero which appears later in the insn stream, which in |
| turn is constant and the same as the comparison of the first reg |
| against zero... */ |
| if (p->is_const) |
| break; |
| } |
| |
| for (; p; p = p->next_same_value) |
| { |
| machine_mode inner_mode = GET_MODE (p->exp); |
| #ifdef FLOAT_STORE_FLAG_VALUE |
| REAL_VALUE_TYPE fsfv; |
| #endif |
| |
| /* If the entry isn't valid, skip it. */ |
| if (! exp_equiv_p (p->exp, p->exp, 1, false)) |
| continue; |
| |
| /* If it's a comparison we've used before, skip it. */ |
| if (visited && visited->contains (p->exp)) |
| continue; |
| |
| if (GET_CODE (p->exp) == COMPARE |
| /* Another possibility is that this machine has a compare insn |
| that includes the comparison code. In that case, ARG1 would |
| be equivalent to a comparison operation that would set ARG1 to |
| either STORE_FLAG_VALUE or zero. If this is an NE operation, |
| ORIG_CODE is the actual comparison being done; if it is an EQ, |
| we must reverse ORIG_CODE. On machine with a negative value |
| for STORE_FLAG_VALUE, also look at LT and GE operations. */ |
| || ((code == NE |
| || (code == LT |
| && val_signbit_known_set_p (inner_mode, |
| STORE_FLAG_VALUE)) |
| #ifdef FLOAT_STORE_FLAG_VALUE |
| || (code == LT |
| && SCALAR_FLOAT_MODE_P (inner_mode) |
| && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)), |
| REAL_VALUE_NEGATIVE (fsfv))) |
| #endif |
| ) |
| && COMPARISON_P (p->exp))) |
| { |
| x = p->exp; |
| break; |
| } |
| else if ((code == EQ |
| || (code == GE |
| && val_signbit_known_set_p (inner_mode, |
| STORE_FLAG_VALUE)) |
| #ifdef FLOAT_STORE_FLAG_VALUE |
| || (code == GE |
| && SCALAR_FLOAT_MODE_P (inner_mode) |
| && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)), |
| REAL_VALUE_NEGATIVE (fsfv))) |
| #endif |
| ) |
| && COMPARISON_P (p->exp)) |
| { |
| reverse_code = 1; |
| x = p->exp; |
| break; |
| } |
| |
| /* If this non-trapping address, e.g. fp + constant, the |
| equivalent is a better operand since it may let us predict |
| the value of the comparison. */ |
| else if (!rtx_addr_can_trap_p (p->exp)) |
| { |
| arg1 = p->exp; |
| continue; |
| } |
| } |
| |
| /* If we didn't find a useful equivalence for ARG1, we are done. |
| Otherwise, set up for the next iteration. */ |
| if (x == 0) |
| break; |
| |
| /* If we need to reverse the comparison, make sure that is |
| possible -- we can't necessarily infer the value of GE from LT |
| with floating-point operands. */ |
| if (reverse_code) |
| { |
| enum rtx_code reversed = reversed_comparison_code (x, NULL); |
| if (reversed == UNKNOWN) |
| break; |
| else |
| code = reversed; |
| } |
| else if (COMPARISON_P (x)) |
| code = GET_CODE (x); |
| arg1 = XEXP (x, 0), arg2 = XEXP (x, 1); |
| } |
| |
| /* Return our results. Return the modes from before fold_rtx |
| because fold_rtx might produce const_int, and then it's too late. */ |
| *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2); |
| *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0); |
| |
| if (visited) |
| delete visited; |
| return code; |
| } |
| |
| /* If X is a nontrivial arithmetic operation on an argument for which |
| a constant value can be determined, return the result of operating |
| on that value, as a constant. Otherwise, return X, possibly with |
| one or more operands changed to a forward-propagated constant. |
| |
| If X is a register whose contents are known, we do NOT return |
| those contents here; equiv_constant is called to perform that task. |
| For SUBREGs and MEMs, we do that both here and in equiv_constant. |
| |
| INSN is the insn that we may be modifying. If it is 0, make a copy |
| of X before modifying it. */ |
| |
| static rtx |
| fold_rtx (rtx x, rtx_insn *insn) |
| { |
| enum rtx_code code; |
| machine_mode mode; |
| const char *fmt; |
| int i; |
| rtx new_rtx = 0; |
| int changed = 0; |
| poly_int64 xval; |
| |
| /* Operands of X. */ |
| /* Workaround -Wmaybe-uninitialized false positive during |
| profiledbootstrap by initializing them. */ |
| rtx folded_arg0 = NULL_RTX; |
| rtx folded_arg1 = NULL_RTX; |
| |
| /* Constant equivalents of first three operands of X; |
| 0 when no such equivalent is known. */ |
| rtx const_arg0; |
| rtx const_arg1; |
| rtx const_arg2; |
| |
| /* The mode of the first operand of X. We need this for sign and zero |
| extends. */ |
| machine_mode mode_arg0; |
| |
| if (x == 0) |
| return x; |
| |
| /* Try to perform some initial simplifications on X. */ |
| code = GET_CODE (x); |
| switch (code) |
| { |
| case MEM: |
| case SUBREG: |
| /* The first operand of a SIGN/ZERO_EXTRACT has a different meaning |
| than it would in other contexts. Basically its mode does not |
| signify the size of the object read. That information is carried |
| by size operand. If we happen to have a MEM of the appropriate |
| mode in our tables with a constant value we could simplify the |
| extraction incorrectly if we allowed substitution of that value |
| for the MEM. */ |
| case ZERO_EXTRACT: |
| case SIGN_EXTRACT: |
| if ((new_rtx = equiv_constant (x)) != NULL_RTX) |
| return new_rtx; |
| return x; |
| |
| case CONST: |
| CASE_CONST_ANY: |
| case SYMBOL_REF: |
| case LABEL_REF: |
| case REG: |
| case PC: |
| /* No use simplifying an EXPR_LIST |
| since they are used only for lists of args |
| in a function call's REG_EQUAL note. */ |
| case EXPR_LIST: |
| return x; |
| |
| case CC0: |
| return prev_insn_cc0; |
| |
| case ASM_OPERANDS: |
| if (insn) |
| { |
| for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--) |
| validate_change (insn, &ASM_OPERANDS_INPUT (x, i), |
| fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0); |
| } |
| return x; |
| |
| case CALL: |
| if (NO_FUNCTION_CSE && CONSTANT_P (XEXP (XEXP (x, 0), 0))) |
| return x; |
| break; |
| |
| /* Anything else goes through the loop below. */ |
| default: |
| break; |
| } |
| |
| mode = GET_MODE (x); |
| const_arg0 = 0; |
| const_arg1 = 0; |
| const_arg2 = 0; |
| mode_arg0 = VOIDmode; |
| |
| /* Try folding our operands. |
| Then see which ones have constant values known. */ |
| |
| fmt = GET_RTX_FORMAT (code); |
| for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
| if (fmt[i] == 'e') |
| { |
| rtx folded_arg = XEXP (x, i), const_arg; |
| machine_mode mode_arg = GET_MODE (folded_arg); |
| |
| switch (GET_CODE (folded_arg)) |
| { |
| case MEM: |
| case REG: |
| case SUBREG: |
| const_arg = equiv_constant (folded_arg); |
| break; |
| |
| case CONST: |
| CASE_CONST_ANY: |
| case SYMBOL_REF: |
| case LABEL_REF: |
| const_arg = folded_arg; |
| break; |
| |
| case CC0: |
| /* The cc0-user and cc0-setter may be in different blocks if |
| the cc0-setter potentially traps. In that case PREV_INSN_CC0 |
| will have been cleared as we exited the block with the |
| setter. |
| |
| While we could potentially track cc0 in this case, it just |
| doesn't seem to be worth it given that cc0 targets are not |
| terribly common or important these days and trapping math |
| is rarely used. The combination of those two conditions |
| necessary to trip this situation is exceedingly rare in the |
| real world. */ |
| if (!prev_
|