| /* SCC value numbering for trees |
| Copyright (C) 2006-2015 Free Software Foundation, Inc. |
| Contributed by Daniel Berlin <dan@dberlin.org> |
| |
| This file is part of GCC. |
| |
| GCC is free software; you can redistribute it and/or modify |
| it under the terms of the GNU General Public License as published by |
| the Free Software Foundation; either version 3, or (at your option) |
| any later version. |
| |
| GCC is distributed in the hope that it will be useful, |
| but WITHOUT ANY WARRANTY; without even the implied warranty of |
| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| GNU General Public License for more details. |
| |
| You should have received a copy of the GNU General Public License |
| along with GCC; see the file COPYING3. If not see |
| <http://www.gnu.org/licenses/>. */ |
| |
| #include "config.h" |
| #include "system.h" |
| #include "coretypes.h" |
| #include "tm.h" |
| #include "hash-set.h" |
| #include "machmode.h" |
| #include "vec.h" |
| #include "double-int.h" |
| #include "input.h" |
| #include "alias.h" |
| #include "symtab.h" |
| #include "wide-int.h" |
| #include "inchash.h" |
| #include "tree.h" |
| #include "fold-const.h" |
| #include "stor-layout.h" |
| #include "predict.h" |
| #include "hard-reg-set.h" |
| #include "function.h" |
| #include "dominance.h" |
| #include "cfg.h" |
| #include "cfganal.h" |
| #include "basic-block.h" |
| #include "gimple-pretty-print.h" |
| #include "tree-inline.h" |
| #include "hash-table.h" |
| #include "tree-ssa-alias.h" |
| #include "internal-fn.h" |
| #include "gimple-fold.h" |
| #include "tree-eh.h" |
| #include "gimple-expr.h" |
| #include "is-a.h" |
| #include "gimple.h" |
| #include "gimplify.h" |
| #include "gimple-ssa.h" |
| #include "tree-phinodes.h" |
| #include "ssa-iterators.h" |
| #include "stringpool.h" |
| #include "tree-ssanames.h" |
| #include "hashtab.h" |
| #include "rtl.h" |
| #include "flags.h" |
| #include "statistics.h" |
| #include "real.h" |
| #include "fixed-value.h" |
| #include "insn-config.h" |
| #include "expmed.h" |
| #include "dojump.h" |
| #include "explow.h" |
| #include "calls.h" |
| #include "emit-rtl.h" |
| #include "varasm.h" |
| #include "stmt.h" |
| #include "expr.h" |
| #include "tree-dfa.h" |
| #include "tree-ssa.h" |
| #include "dumpfile.h" |
| #include "alloc-pool.h" |
| #include "cfgloop.h" |
| #include "params.h" |
| #include "tree-ssa-propagate.h" |
| #include "tree-ssa-sccvn.h" |
| #include "tree-cfg.h" |
| #include "domwalk.h" |
| #include "ipa-ref.h" |
| #include "plugin-api.h" |
| #include "cgraph.h" |
| |
| /* This algorithm is based on the SCC algorithm presented by Keith |
| Cooper and L. Taylor Simpson in "SCC-Based Value numbering" |
| (http://citeseer.ist.psu.edu/41805.html). In |
| straight line code, it is equivalent to a regular hash based value |
| numbering that is performed in reverse postorder. |
| |
| For code with cycles, there are two alternatives, both of which |
| require keeping the hashtables separate from the actual list of |
| value numbers for SSA names. |
| |
| 1. Iterate value numbering in an RPO walk of the blocks, removing |
| all the entries from the hashtable after each iteration (but |
| keeping the SSA name->value number mapping between iterations). |
| Iterate until it does not change. |
| |
| 2. Perform value numbering as part of an SCC walk on the SSA graph, |
| iterating only the cycles in the SSA graph until they do not change |
| (using a separate, optimistic hashtable for value numbering the SCC |
| operands). |
| |
| The second is not just faster in practice (because most SSA graph |
| cycles do not involve all the variables in the graph), it also has |
| some nice properties. |
| |
| One of these nice properties is that when we pop an SCC off the |
| stack, we are guaranteed to have processed all the operands coming from |
| *outside of that SCC*, so we do not need to do anything special to |
| ensure they have value numbers. |
| |
| Another nice property is that the SCC walk is done as part of a DFS |
| of the SSA graph, which makes it easy to perform combining and |
| simplifying operations at the same time. |
| |
| The code below is deliberately written in a way that makes it easy |
| to separate the SCC walk from the other work it does. |
| |
| In order to propagate constants through the code, we track which |
| expressions contain constants, and use those while folding. In |
| theory, we could also track expressions whose value numbers are |
| replaced, in case we end up folding based on expression |
| identities. |
| |
| In order to value number memory, we assign value numbers to vuses. |
| This enables us to note that, for example, stores to the same |
| address of the same value from the same starting memory states are |
| equivalent. |
| TODO: |
| |
| 1. We can iterate only the changing portions of the SCC's, but |
| I have not seen an SCC big enough for this to be a win. |
| 2. If you differentiate between phi nodes for loops and phi nodes |
| for if-then-else, you can properly consider phi nodes in different |
| blocks for equivalence. |
| 3. We could value number vuses in more cases, particularly, whole |
| structure copies. |
| */ |
| |
| |
| /* vn_nary_op hashtable helpers. */ |
| |
| struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s> |
| { |
| typedef vn_nary_op_s value_type; |
| typedef vn_nary_op_s compare_type; |
| static inline hashval_t hash (const value_type *); |
| static inline bool equal (const value_type *, const compare_type *); |
| }; |
| |
| /* Return the computed hashcode for nary operation P1. */ |
| |
| inline hashval_t |
| vn_nary_op_hasher::hash (const value_type *vno1) |
| { |
| return vno1->hashcode; |
| } |
| |
| /* Compare nary operations P1 and P2 and return true if they are |
| equivalent. */ |
| |
| inline bool |
| vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2) |
| { |
| return vn_nary_op_eq (vno1, vno2); |
| } |
| |
| typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type; |
| typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type; |
| |
| |
| /* vn_phi hashtable helpers. */ |
| |
| static int |
| vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2); |
| |
| struct vn_phi_hasher |
| { |
| typedef vn_phi_s value_type; |
| typedef vn_phi_s compare_type; |
| static inline hashval_t hash (const value_type *); |
| static inline bool equal (const value_type *, const compare_type *); |
| static inline void remove (value_type *); |
| }; |
| |
| /* Return the computed hashcode for phi operation P1. */ |
| |
| inline hashval_t |
| vn_phi_hasher::hash (const value_type *vp1) |
| { |
| return vp1->hashcode; |
| } |
| |
| /* Compare two phi entries for equality, ignoring VN_TOP arguments. */ |
| |
| inline bool |
| vn_phi_hasher::equal (const value_type *vp1, const compare_type *vp2) |
| { |
| return vn_phi_eq (vp1, vp2); |
| } |
| |
| /* Free a phi operation structure VP. */ |
| |
| inline void |
| vn_phi_hasher::remove (value_type *phi) |
| { |
| phi->phiargs.release (); |
| } |
| |
| typedef hash_table<vn_phi_hasher> vn_phi_table_type; |
| typedef vn_phi_table_type::iterator vn_phi_iterator_type; |
| |
| |
| /* Compare two reference operands P1 and P2 for equality. Return true if |
| they are equal, and false otherwise. */ |
| |
| static int |
| vn_reference_op_eq (const void *p1, const void *p2) |
| { |
| const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1; |
| const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2; |
| |
| return (vro1->opcode == vro2->opcode |
| /* We do not care for differences in type qualification. */ |
| && (vro1->type == vro2->type |
| || (vro1->type && vro2->type |
| && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type), |
| TYPE_MAIN_VARIANT (vro2->type)))) |
| && expressions_equal_p (vro1->op0, vro2->op0) |
| && expressions_equal_p (vro1->op1, vro2->op1) |
| && expressions_equal_p (vro1->op2, vro2->op2)); |
| } |
| |
| /* Free a reference operation structure VP. */ |
| |
| static inline void |
| free_reference (vn_reference_s *vr) |
| { |
| vr->operands.release (); |
| } |
| |
| |
| /* vn_reference hashtable helpers. */ |
| |
| struct vn_reference_hasher |
| { |
| typedef vn_reference_s value_type; |
| typedef vn_reference_s compare_type; |
| static inline hashval_t hash (const value_type *); |
| static inline bool equal (const value_type *, const compare_type *); |
| static inline void remove (value_type *); |
| }; |
| |
| /* Return the hashcode for a given reference operation P1. */ |
| |
| inline hashval_t |
| vn_reference_hasher::hash (const value_type *vr1) |
| { |
| return vr1->hashcode; |
| } |
| |
| inline bool |
| vn_reference_hasher::equal (const value_type *v, const compare_type *c) |
| { |
| return vn_reference_eq (v, c); |
| } |
| |
| inline void |
| vn_reference_hasher::remove (value_type *v) |
| { |
| free_reference (v); |
| } |
| |
| typedef hash_table<vn_reference_hasher> vn_reference_table_type; |
| typedef vn_reference_table_type::iterator vn_reference_iterator_type; |
| |
| |
| /* The set of hashtables and alloc_pool's for their items. */ |
| |
| typedef struct vn_tables_s |
| { |
| vn_nary_op_table_type *nary; |
| vn_phi_table_type *phis; |
| vn_reference_table_type *references; |
| struct obstack nary_obstack; |
| alloc_pool phis_pool; |
| alloc_pool references_pool; |
| } *vn_tables_t; |
| |
| |
| /* vn_constant hashtable helpers. */ |
| |
| struct vn_constant_hasher : typed_free_remove <vn_constant_s> |
| { |
| typedef vn_constant_s value_type; |
| typedef vn_constant_s compare_type; |
| static inline hashval_t hash (const value_type *); |
| static inline bool equal (const value_type *, const compare_type *); |
| }; |
| |
| /* Hash table hash function for vn_constant_t. */ |
| |
| inline hashval_t |
| vn_constant_hasher::hash (const value_type *vc1) |
| { |
| return vc1->hashcode; |
| } |
| |
| /* Hash table equality function for vn_constant_t. */ |
| |
| inline bool |
| vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2) |
| { |
| if (vc1->hashcode != vc2->hashcode) |
| return false; |
| |
| return vn_constant_eq_with_type (vc1->constant, vc2->constant); |
| } |
| |
| static hash_table<vn_constant_hasher> *constant_to_value_id; |
| static bitmap constant_value_ids; |
| |
| |
| /* Valid hashtables storing information we have proven to be |
| correct. */ |
| |
| static vn_tables_t valid_info; |
| |
| /* Optimistic hashtables storing information we are making assumptions about |
| during iterations. */ |
| |
| static vn_tables_t optimistic_info; |
| |
| /* Pointer to the set of hashtables that is currently being used. |
| Should always point to either the optimistic_info, or the |
| valid_info. */ |
| |
| static vn_tables_t current_info; |
| |
| |
| /* Reverse post order index for each basic block. */ |
| |
| static int *rpo_numbers; |
| |
| #define SSA_VAL(x) (VN_INFO ((x))->valnum) |
| |
| /* Return the SSA value of the VUSE x, supporting released VDEFs |
| during elimination which will value-number the VDEF to the |
| associated VUSE (but not substitute in the whole lattice). */ |
| |
| static inline tree |
| vuse_ssa_val (tree x) |
| { |
| if (!x) |
| return NULL_TREE; |
| |
| do |
| { |
| x = SSA_VAL (x); |
| } |
| while (SSA_NAME_IN_FREE_LIST (x)); |
| |
| return x; |
| } |
| |
| /* This represents the top of the VN lattice, which is the universal |
| value. */ |
| |
| tree VN_TOP; |
| |
| /* Unique counter for our value ids. */ |
| |
| static unsigned int next_value_id; |
| |
| /* Next DFS number and the stack for strongly connected component |
| detection. */ |
| |
| static unsigned int next_dfs_num; |
| static vec<tree> sccstack; |
| |
| |
| |
| /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects |
| are allocated on an obstack for locality reasons, and to free them |
| without looping over the vec. */ |
| |
| static vec<vn_ssa_aux_t> vn_ssa_aux_table; |
| static struct obstack vn_ssa_aux_obstack; |
| |
| /* Return the value numbering information for a given SSA name. */ |
| |
| vn_ssa_aux_t |
| VN_INFO (tree name) |
| { |
| vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)]; |
| gcc_checking_assert (res); |
| return res; |
| } |
| |
| /* Set the value numbering info for a given SSA name to a given |
| value. */ |
| |
| static inline void |
| VN_INFO_SET (tree name, vn_ssa_aux_t value) |
| { |
| vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value; |
| } |
| |
| /* Initialize the value numbering info for a given SSA name. |
| This should be called just once for every SSA name. */ |
| |
| vn_ssa_aux_t |
| VN_INFO_GET (tree name) |
| { |
| vn_ssa_aux_t newinfo; |
| |
| newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux); |
| memset (newinfo, 0, sizeof (struct vn_ssa_aux)); |
| if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()) |
| vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1); |
| vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo; |
| return newinfo; |
| } |
| |
| |
| /* Get the representative expression for the SSA_NAME NAME. Returns |
| the representative SSA_NAME if there is no expression associated with it. */ |
| |
| tree |
| vn_get_expr_for (tree name) |
| { |
| vn_ssa_aux_t vn = VN_INFO (name); |
| gimple def_stmt; |
| tree expr = NULL_TREE; |
| enum tree_code code; |
| |
| if (vn->valnum == VN_TOP) |
| return name; |
| |
| /* If the value-number is a constant it is the representative |
| expression. */ |
| if (TREE_CODE (vn->valnum) != SSA_NAME) |
| return vn->valnum; |
| |
| /* Get to the information of the value of this SSA_NAME. */ |
| vn = VN_INFO (vn->valnum); |
| |
| /* If the value-number is a constant it is the representative |
| expression. */ |
| if (TREE_CODE (vn->valnum) != SSA_NAME) |
| return vn->valnum; |
| |
| /* Else if we have an expression, return it. */ |
| if (vn->expr != NULL_TREE) |
| return vn->expr; |
| |
| /* Otherwise use the defining statement to build the expression. */ |
| def_stmt = SSA_NAME_DEF_STMT (vn->valnum); |
| |
| /* If the value number is not an assignment use it directly. */ |
| if (!is_gimple_assign (def_stmt)) |
| return vn->valnum; |
| |
| /* Note that we can valueize here because we clear the cached |
| simplified expressions after each optimistic iteration. */ |
| code = gimple_assign_rhs_code (def_stmt); |
| switch (TREE_CODE_CLASS (code)) |
| { |
| case tcc_reference: |
| if ((code == REALPART_EXPR |
| || code == IMAGPART_EXPR |
| || code == VIEW_CONVERT_EXPR) |
| && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt), |
| 0)) == SSA_NAME) |
| expr = fold_build1 (code, |
| gimple_expr_type (def_stmt), |
| vn_valueize (TREE_OPERAND |
| (gimple_assign_rhs1 (def_stmt), 0))); |
| break; |
| |
| case tcc_unary: |
| expr = fold_build1 (code, |
| gimple_expr_type (def_stmt), |
| vn_valueize (gimple_assign_rhs1 (def_stmt))); |
| break; |
| |
| case tcc_binary: |
| expr = fold_build2 (code, |
| gimple_expr_type (def_stmt), |
| vn_valueize (gimple_assign_rhs1 (def_stmt)), |
| vn_valueize (gimple_assign_rhs2 (def_stmt))); |
| break; |
| |
| case tcc_exceptional: |
| if (code == CONSTRUCTOR |
| && TREE_CODE |
| (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE) |
| expr = gimple_assign_rhs1 (def_stmt); |
| break; |
| |
| default:; |
| } |
| if (expr == NULL_TREE) |
| return vn->valnum; |
| |
| /* Cache the expression. */ |
| vn->expr = expr; |
| |
| return expr; |
| } |
| |
| /* Return the vn_kind the expression computed by the stmt should be |
| associated with. */ |
| |
| enum vn_kind |
| vn_get_stmt_kind (gimple stmt) |
| { |
| switch (gimple_code (stmt)) |
| { |
| case GIMPLE_CALL: |
| return VN_REFERENCE; |
| case GIMPLE_PHI: |
| return VN_PHI; |
| case GIMPLE_ASSIGN: |
| { |
| enum tree_code code = gimple_assign_rhs_code (stmt); |
| tree rhs1 = gimple_assign_rhs1 (stmt); |
| switch (get_gimple_rhs_class (code)) |
| { |
| case GIMPLE_UNARY_RHS: |
| case GIMPLE_BINARY_RHS: |
| case GIMPLE_TERNARY_RHS: |
| return VN_NARY; |
| case GIMPLE_SINGLE_RHS: |
| switch (TREE_CODE_CLASS (code)) |
| { |
| case tcc_reference: |
| /* VOP-less references can go through unary case. */ |
| if ((code == REALPART_EXPR |
| || code == IMAGPART_EXPR |
| || code == VIEW_CONVERT_EXPR |
| || code == BIT_FIELD_REF) |
| && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME) |
| return VN_NARY; |
| |
| /* Fallthrough. */ |
| case tcc_declaration: |
| return VN_REFERENCE; |
| |
| case tcc_constant: |
| return VN_CONSTANT; |
| |
| default: |
| if (code == ADDR_EXPR) |
| return (is_gimple_min_invariant (rhs1) |
| ? VN_CONSTANT : VN_REFERENCE); |
| else if (code == CONSTRUCTOR) |
| return VN_NARY; |
| return VN_NONE; |
| } |
| default: |
| return VN_NONE; |
| } |
| } |
| default: |
| return VN_NONE; |
| } |
| } |
| |
| /* Lookup a value id for CONSTANT and return it. If it does not |
| exist returns 0. */ |
| |
| unsigned int |
| get_constant_value_id (tree constant) |
| { |
| vn_constant_s **slot; |
| struct vn_constant_s vc; |
| |
| vc.hashcode = vn_hash_constant_with_type (constant); |
| vc.constant = constant; |
| slot = constant_to_value_id->find_slot (&vc, NO_INSERT); |
| if (slot) |
| return (*slot)->value_id; |
| return 0; |
| } |
| |
| /* Lookup a value id for CONSTANT, and if it does not exist, create a |
| new one and return it. If it does exist, return it. */ |
| |
| unsigned int |
| get_or_alloc_constant_value_id (tree constant) |
| { |
| vn_constant_s **slot; |
| struct vn_constant_s vc; |
| vn_constant_t vcp; |
| |
| vc.hashcode = vn_hash_constant_with_type (constant); |
| vc.constant = constant; |
| slot = constant_to_value_id->find_slot (&vc, INSERT); |
| if (*slot) |
| return (*slot)->value_id; |
| |
| vcp = XNEW (struct vn_constant_s); |
| vcp->hashcode = vc.hashcode; |
| vcp->constant = constant; |
| vcp->value_id = get_next_value_id (); |
| *slot = vcp; |
| bitmap_set_bit (constant_value_ids, vcp->value_id); |
| return vcp->value_id; |
| } |
| |
| /* Return true if V is a value id for a constant. */ |
| |
| bool |
| value_id_constant_p (unsigned int v) |
| { |
| return bitmap_bit_p (constant_value_ids, v); |
| } |
| |
| /* Compute the hash for a reference operand VRO1. */ |
| |
| static void |
| vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate) |
| { |
| hstate.add_int (vro1->opcode); |
| if (vro1->op0) |
| inchash::add_expr (vro1->op0, hstate); |
| if (vro1->op1) |
| inchash::add_expr (vro1->op1, hstate); |
| if (vro1->op2) |
| inchash::add_expr (vro1->op2, hstate); |
| } |
| |
| /* Compute a hash for the reference operation VR1 and return it. */ |
| |
| static hashval_t |
| vn_reference_compute_hash (const vn_reference_t vr1) |
| { |
| inchash::hash hstate; |
| hashval_t result; |
| int i; |
| vn_reference_op_t vro; |
| HOST_WIDE_INT off = -1; |
| bool deref = false; |
| |
| FOR_EACH_VEC_ELT (vr1->operands, i, vro) |
| { |
| if (vro->opcode == MEM_REF) |
| deref = true; |
| else if (vro->opcode != ADDR_EXPR) |
| deref = false; |
| if (vro->off != -1) |
| { |
| if (off == -1) |
| off = 0; |
| off += vro->off; |
| } |
| else |
| { |
| if (off != -1 |
| && off != 0) |
| hstate.add_int (off); |
| off = -1; |
| if (deref |
| && vro->opcode == ADDR_EXPR) |
| { |
| if (vro->op0) |
| { |
| tree op = TREE_OPERAND (vro->op0, 0); |
| hstate.add_int (TREE_CODE (op)); |
| inchash::add_expr (op, hstate); |
| } |
| } |
| else |
| vn_reference_op_compute_hash (vro, hstate); |
| } |
| } |
| result = hstate.end (); |
| /* ??? We would ICE later if we hash instead of adding that in. */ |
| if (vr1->vuse) |
| result += SSA_NAME_VERSION (vr1->vuse); |
| |
| return result; |
| } |
| |
| /* Return true if reference operations VR1 and VR2 are equivalent. This |
| means they have the same set of operands and vuses. */ |
| |
| bool |
| vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2) |
| { |
| unsigned i, j; |
| |
| /* Early out if this is not a hash collision. */ |
| if (vr1->hashcode != vr2->hashcode) |
| return false; |
| |
| /* The VOP needs to be the same. */ |
| if (vr1->vuse != vr2->vuse) |
| return false; |
| |
| /* If the operands are the same we are done. */ |
| if (vr1->operands == vr2->operands) |
| return true; |
| |
| if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type))) |
| return false; |
| |
| if (INTEGRAL_TYPE_P (vr1->type) |
| && INTEGRAL_TYPE_P (vr2->type)) |
| { |
| if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type)) |
| return false; |
| } |
| else if (INTEGRAL_TYPE_P (vr1->type) |
| && (TYPE_PRECISION (vr1->type) |
| != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type)))) |
| return false; |
| else if (INTEGRAL_TYPE_P (vr2->type) |
| && (TYPE_PRECISION (vr2->type) |
| != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type)))) |
| return false; |
| |
| i = 0; |
| j = 0; |
| do |
| { |
| HOST_WIDE_INT off1 = 0, off2 = 0; |
| vn_reference_op_t vro1, vro2; |
| vn_reference_op_s tem1, tem2; |
| bool deref1 = false, deref2 = false; |
| for (; vr1->operands.iterate (i, &vro1); i++) |
| { |
| if (vro1->opcode == MEM_REF) |
| deref1 = true; |
| if (vro1->off == -1) |
| break; |
| off1 += vro1->off; |
| } |
| for (; vr2->operands.iterate (j, &vro2); j++) |
| { |
| if (vro2->opcode == MEM_REF) |
| deref2 = true; |
| if (vro2->off == -1) |
| break; |
| off2 += vro2->off; |
| } |
| if (off1 != off2) |
| return false; |
| if (deref1 && vro1->opcode == ADDR_EXPR) |
| { |
| memset (&tem1, 0, sizeof (tem1)); |
| tem1.op0 = TREE_OPERAND (vro1->op0, 0); |
| tem1.type = TREE_TYPE (tem1.op0); |
| tem1.opcode = TREE_CODE (tem1.op0); |
| vro1 = &tem1; |
| deref1 = false; |
| } |
| if (deref2 && vro2->opcode == ADDR_EXPR) |
| { |
| memset (&tem2, 0, sizeof (tem2)); |
| tem2.op0 = TREE_OPERAND (vro2->op0, 0); |
| tem2.type = TREE_TYPE (tem2.op0); |
| tem2.opcode = TREE_CODE (tem2.op0); |
| vro2 = &tem2; |
| deref2 = false; |
| } |
| if (deref1 != deref2) |
| return false; |
| if (!vn_reference_op_eq (vro1, vro2)) |
| return false; |
| ++j; |
| ++i; |
| } |
| while (vr1->operands.length () != i |
| || vr2->operands.length () != j); |
| |
| return true; |
| } |
| |
| /* Copy the operations present in load/store REF into RESULT, a vector of |
| vn_reference_op_s's. */ |
| |
| static void |
| copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result) |
| { |
| if (TREE_CODE (ref) == TARGET_MEM_REF) |
| { |
| vn_reference_op_s temp; |
| |
| result->reserve (3); |
| |
| memset (&temp, 0, sizeof (temp)); |
| temp.type = TREE_TYPE (ref); |
| temp.opcode = TREE_CODE (ref); |
| temp.op0 = TMR_INDEX (ref); |
| temp.op1 = TMR_STEP (ref); |
| temp.op2 = TMR_OFFSET (ref); |
| temp.off = -1; |
| result->quick_push (temp); |
| |
| memset (&temp, 0, sizeof (temp)); |
| temp.type = NULL_TREE; |
| temp.opcode = ERROR_MARK; |
| temp.op0 = TMR_INDEX2 (ref); |
| temp.off = -1; |
| result->quick_push (temp); |
| |
| memset (&temp, 0, sizeof (temp)); |
| temp.type = NULL_TREE; |
| temp.opcode = TREE_CODE (TMR_BASE (ref)); |
| temp.op0 = TMR_BASE (ref); |
| temp.off = -1; |
| result->quick_push (temp); |
| return; |
| } |
| |
| /* For non-calls, store the information that makes up the address. */ |
| tree orig = ref; |
| while (ref) |
| { |
| vn_reference_op_s temp; |
| |
| memset (&temp, 0, sizeof (temp)); |
| temp.type = TREE_TYPE (ref); |
| temp.opcode = TREE_CODE (ref); |
| temp.off = -1; |
| |
| switch (temp.opcode) |
| { |
| case MODIFY_EXPR: |
| temp.op0 = TREE_OPERAND (ref, 1); |
| break; |
| case WITH_SIZE_EXPR: |
| temp.op0 = TREE_OPERAND (ref, 1); |
| temp.off = 0; |
| break; |
| case MEM_REF: |
| /* The base address gets its own vn_reference_op_s structure. */ |
| temp.op0 = TREE_OPERAND (ref, 1); |
| { |
| offset_int off = mem_ref_offset (ref); |
| if (wi::fits_shwi_p (off)) |
| temp.off = off.to_shwi (); |
| } |
| break; |
| case BIT_FIELD_REF: |
| /* Record bits and position. */ |
| temp.op0 = TREE_OPERAND (ref, 1); |
| temp.op1 = TREE_OPERAND (ref, 2); |
| break; |
| case COMPONENT_REF: |
| /* The field decl is enough to unambiguously specify the field, |
| a matching type is not necessary and a mismatching type |
| is always a spurious difference. */ |
| temp.type = NULL_TREE; |
| temp.op0 = TREE_OPERAND (ref, 1); |
| temp.op1 = TREE_OPERAND (ref, 2); |
| { |
| tree this_offset = component_ref_field_offset (ref); |
| if (this_offset |
| && TREE_CODE (this_offset) == INTEGER_CST) |
| { |
| tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1)); |
| if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0) |
| { |
| offset_int off |
| = (wi::to_offset (this_offset) |
| + wi::lrshift (wi::to_offset (bit_offset), |
| LOG2_BITS_PER_UNIT)); |
| if (wi::fits_shwi_p (off) |
| /* Probibit value-numbering zero offset components |
| of addresses the same before the pass folding |
| __builtin_object_size had a chance to run |
| (checking cfun->after_inlining does the |
| trick here). */ |
| && (TREE_CODE (orig) != ADDR_EXPR |
| || off != 0 |
| || cfun->after_inlining)) |
| temp.off = off.to_shwi (); |
| } |
| } |
| } |
| break; |
| case ARRAY_RANGE_REF: |
| case ARRAY_REF: |
| /* Record index as operand. */ |
| temp.op0 = TREE_OPERAND (ref, 1); |
| /* Always record lower bounds and element size. */ |
| temp.op1 = array_ref_low_bound (ref); |
| temp.op2 = array_ref_element_size (ref); |
| if (TREE_CODE (temp.op0) == INTEGER_CST |
| && TREE_CODE (temp.op1) == INTEGER_CST |
| && TREE_CODE (temp.op2) == INTEGER_CST) |
| { |
| offset_int off = ((wi::to_offset (temp.op0) |
| - wi::to_offset (temp.op1)) |
| * wi::to_offset (temp.op2)); |
| if (wi::fits_shwi_p (off)) |
| temp.off = off.to_shwi(); |
| } |
| break; |
| case VAR_DECL: |
| if (DECL_HARD_REGISTER (ref)) |
| { |
| temp.op0 = ref; |
| break; |
| } |
| /* Fallthru. */ |
| case PARM_DECL: |
| case CONST_DECL: |
| case RESULT_DECL: |
| /* Canonicalize decls to MEM[&decl] which is what we end up with |
| when valueizing MEM[ptr] with ptr = &decl. */ |
| temp.opcode = MEM_REF; |
| temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0); |
| temp.off = 0; |
| result->safe_push (temp); |
| temp.opcode = ADDR_EXPR; |
| temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref); |
| temp.type = TREE_TYPE (temp.op0); |
| temp.off = -1; |
| break; |
| case STRING_CST: |
| case INTEGER_CST: |
| case COMPLEX_CST: |
| case VECTOR_CST: |
| case REAL_CST: |
| case FIXED_CST: |
| case CONSTRUCTOR: |
| case SSA_NAME: |
| temp.op0 = ref; |
| break; |
| case ADDR_EXPR: |
| if (is_gimple_min_invariant (ref)) |
| { |
| temp.op0 = ref; |
| break; |
| } |
| break; |
| /* These are only interesting for their operands, their |
| existence, and their type. They will never be the last |
| ref in the chain of references (IE they require an |
| operand), so we don't have to put anything |
| for op* as it will be handled by the iteration */ |
| case REALPART_EXPR: |
| case VIEW_CONVERT_EXPR: |
| temp.off = 0; |
| break; |
| case IMAGPART_EXPR: |
| /* This is only interesting for its constant offset. */ |
| temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref))); |
| break; |
| default: |
| gcc_unreachable (); |
| } |
| result->safe_push (temp); |
| |
| if (REFERENCE_CLASS_P (ref) |
| || TREE_CODE (ref) == MODIFY_EXPR |
| || TREE_CODE (ref) == WITH_SIZE_EXPR |
| || (TREE_CODE (ref) == ADDR_EXPR |
| && !is_gimple_min_invariant (ref))) |
| ref = TREE_OPERAND (ref, 0); |
| else |
| ref = NULL_TREE; |
| } |
| } |
| |
| /* Build a alias-oracle reference abstraction in *REF from the vn_reference |
| operands in *OPS, the reference alias set SET and the reference type TYPE. |
| Return true if something useful was produced. */ |
| |
| bool |
| ao_ref_init_from_vn_reference (ao_ref *ref, |
| alias_set_type set, tree type, |
| vec<vn_reference_op_s> ops) |
| { |
| vn_reference_op_t op; |
| unsigned i; |
| tree base = NULL_TREE; |
| tree *op0_p = &base; |
| HOST_WIDE_INT offset = 0; |
| HOST_WIDE_INT max_size; |
| HOST_WIDE_INT size = -1; |
| tree size_tree = NULL_TREE; |
| alias_set_type base_alias_set = -1; |
| |
| /* First get the final access size from just the outermost expression. */ |
| op = &ops[0]; |
| if (op->opcode == COMPONENT_REF) |
| size_tree = DECL_SIZE (op->op0); |
| else if (op->opcode == BIT_FIELD_REF) |
| size_tree = op->op0; |
| else |
| { |
| machine_mode mode = TYPE_MODE (type); |
| if (mode == BLKmode) |
| size_tree = TYPE_SIZE (type); |
| else |
| size = GET_MODE_BITSIZE (mode); |
| } |
| if (size_tree != NULL_TREE) |
| { |
| if (!tree_fits_uhwi_p (size_tree)) |
| size = -1; |
| else |
| size = tree_to_uhwi (size_tree); |
| } |
| |
| /* Initially, maxsize is the same as the accessed element size. |
| In the following it will only grow (or become -1). */ |
| max_size = size; |
| |
| /* Compute cumulative bit-offset for nested component-refs and array-refs, |
| and find the ultimate containing object. */ |
| FOR_EACH_VEC_ELT (ops, i, op) |
| { |
| switch (op->opcode) |
| { |
| /* These may be in the reference ops, but we cannot do anything |
| sensible with them here. */ |
| case ADDR_EXPR: |
| /* Apart from ADDR_EXPR arguments to MEM_REF. */ |
| if (base != NULL_TREE |
| && TREE_CODE (base) == MEM_REF |
| && op->op0 |
| && DECL_P (TREE_OPERAND (op->op0, 0))) |
| { |
| vn_reference_op_t pop = &ops[i-1]; |
| base = TREE_OPERAND (op->op0, 0); |
| if (pop->off == -1) |
| { |
| max_size = -1; |
| offset = 0; |
| } |
| else |
| offset += pop->off * BITS_PER_UNIT; |
| op0_p = NULL; |
| break; |
| } |
| /* Fallthru. */ |
| case CALL_EXPR: |
| return false; |
| |
| /* Record the base objects. */ |
| case MEM_REF: |
| base_alias_set = get_deref_alias_set (op->op0); |
| *op0_p = build2 (MEM_REF, op->type, |
| NULL_TREE, op->op0); |
| op0_p = &TREE_OPERAND (*op0_p, 0); |
| break; |
| |
| case VAR_DECL: |
| case PARM_DECL: |
| case RESULT_DECL: |
| case SSA_NAME: |
| *op0_p = op->op0; |
| op0_p = NULL; |
| break; |
| |
| /* And now the usual component-reference style ops. */ |
| case BIT_FIELD_REF: |
| offset += tree_to_shwi (op->op1); |
| break; |
| |
| case COMPONENT_REF: |
| { |
| tree field = op->op0; |
| /* We do not have a complete COMPONENT_REF tree here so we |
| cannot use component_ref_field_offset. Do the interesting |
| parts manually. */ |
| |
| if (op->op1 |
| || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))) |
| max_size = -1; |
| else |
| { |
| offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field)) |
| * BITS_PER_UNIT); |
| offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)); |
| } |
| break; |
| } |
| |
| case ARRAY_RANGE_REF: |
| case ARRAY_REF: |
| /* We recorded the lower bound and the element size. */ |
| if (!tree_fits_shwi_p (op->op0) |
| || !tree_fits_shwi_p (op->op1) |
| || !tree_fits_shwi_p (op->op2)) |
| max_size = -1; |
| else |
| { |
| HOST_WIDE_INT hindex = tree_to_shwi (op->op0); |
| hindex -= tree_to_shwi (op->op1); |
| hindex *= tree_to_shwi (op->op2); |
| hindex *= BITS_PER_UNIT; |
| offset += hindex; |
| } |
| break; |
| |
| case REALPART_EXPR: |
| break; |
| |
| case IMAGPART_EXPR: |
| offset += size; |
| break; |
| |
| case VIEW_CONVERT_EXPR: |
| break; |
| |
| case STRING_CST: |
| case INTEGER_CST: |
| case COMPLEX_CST: |
| case VECTOR_CST: |
| case REAL_CST: |
| case CONSTRUCTOR: |
| case CONST_DECL: |
| return false; |
| |
| default: |
| return false; |
| } |
| } |
| |
| if (base == NULL_TREE) |
| return false; |
| |
| ref->ref = NULL_TREE; |
| ref->base = base; |
| ref->offset = offset; |
| ref->size = size; |
| ref->max_size = max_size; |
| ref->ref_alias_set = set; |
| if (base_alias_set != -1) |
| ref->base_alias_set = base_alias_set; |
| else |
| ref->base_alias_set = get_alias_set (base); |
| /* We discount volatiles from value-numbering elsewhere. */ |
| ref->volatile_p = false; |
| |
| return true; |
| } |
| |
| /* Copy the operations present in load/store/call REF into RESULT, a vector of |
| vn_reference_op_s's. */ |
| |
| static void |
| copy_reference_ops_from_call (gcall *call, |
| vec<vn_reference_op_s> *result) |
| { |
| vn_reference_op_s temp; |
| unsigned i; |
| tree lhs = gimple_call_lhs (call); |
| int lr; |
| |
| /* If 2 calls have a different non-ssa lhs, vdef value numbers should be |
| different. By adding the lhs here in the vector, we ensure that the |
| hashcode is different, guaranteeing a different value number. */ |
| if (lhs && TREE_CODE (lhs) != SSA_NAME) |
| { |
| memset (&temp, 0, sizeof (temp)); |
| temp.opcode = MODIFY_EXPR; |
| temp.type = TREE_TYPE (lhs); |
| temp.op0 = lhs; |
| temp.off = -1; |
| result->safe_push (temp); |
| } |
| |
| /* Copy the type, opcode, function, static chain and EH region, if any. */ |
| memset (&temp, 0, sizeof (temp)); |
| temp.type = gimple_call_return_type (call); |
| temp.opcode = CALL_EXPR; |
| temp.op0 = gimple_call_fn (call); |
| temp.op1 = gimple_call_chain (call); |
| if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0) |
| temp.op2 = size_int (lr); |
| temp.off = -1; |
| if (gimple_call_with_bounds_p (call)) |
| temp.with_bounds = 1; |
| result->safe_push (temp); |
| |
| /* Copy the call arguments. As they can be references as well, |
| just chain them together. */ |
| for (i = 0; i < gimple_call_num_args (call); ++i) |
| { |
| tree callarg = gimple_call_arg (call, i); |
| copy_reference_ops_from_ref (callarg, result); |
| } |
| } |
| |
| /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates |
| *I_P to point to the last element of the replacement. */ |
| void |
| vn_reference_fold_indirect (vec<vn_reference_op_s> *ops, |
| unsigned int *i_p) |
| { |
| unsigned int i = *i_p; |
| vn_reference_op_t op = &(*ops)[i]; |
| vn_reference_op_t mem_op = &(*ops)[i - 1]; |
| tree addr_base; |
| HOST_WIDE_INT addr_offset = 0; |
| |
| /* The only thing we have to do is from &OBJ.foo.bar add the offset |
| from .foo.bar to the preceding MEM_REF offset and replace the |
| address with &OBJ. */ |
| addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0), |
| &addr_offset); |
| gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF); |
| if (addr_base != TREE_OPERAND (op->op0, 0)) |
| { |
| offset_int off = offset_int::from (mem_op->op0, SIGNED); |
| off += addr_offset; |
| mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off); |
| op->op0 = build_fold_addr_expr (addr_base); |
| if (tree_fits_shwi_p (mem_op->op0)) |
| mem_op->off = tree_to_shwi (mem_op->op0); |
| else |
| mem_op->off = -1; |
| } |
| } |
| |
| /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates |
| *I_P to point to the last element of the replacement. */ |
| static void |
| vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops, |
| unsigned int *i_p) |
| { |
| unsigned int i = *i_p; |
| vn_reference_op_t op = &(*ops)[i]; |
| vn_reference_op_t mem_op = &(*ops)[i - 1]; |
| gimple def_stmt; |
| enum tree_code code; |
| offset_int off; |
| |
| def_stmt = SSA_NAME_DEF_STMT (op->op0); |
| if (!is_gimple_assign (def_stmt)) |
| return; |
| |
| code = gimple_assign_rhs_code (def_stmt); |
| if (code != ADDR_EXPR |
| && code != POINTER_PLUS_EXPR) |
| return; |
| |
| off = offset_int::from (mem_op->op0, SIGNED); |
| |
| /* The only thing we have to do is from &OBJ.foo.bar add the offset |
| from .foo.bar to the preceding MEM_REF offset and replace the |
| address with &OBJ. */ |
| if (code == ADDR_EXPR) |
| { |
| tree addr, addr_base; |
| HOST_WIDE_INT addr_offset; |
| |
| addr = gimple_assign_rhs1 (def_stmt); |
| addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0), |
| &addr_offset); |
| if (!addr_base |
| || TREE_CODE (addr_base) != MEM_REF) |
| return; |
| |
| off += addr_offset; |
| off += mem_ref_offset (addr_base); |
| op->op0 = TREE_OPERAND (addr_base, 0); |
| } |
| else |
| { |
| tree ptr, ptroff; |
| ptr = gimple_assign_rhs1 (def_stmt); |
| ptroff = gimple_assign_rhs2 (def_stmt); |
| if (TREE_CODE (ptr) != SSA_NAME |
| || TREE_CODE (ptroff) != INTEGER_CST) |
| return; |
| |
| off += wi::to_offset (ptroff); |
| op->op0 = ptr; |
| } |
| |
| mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off); |
| if (tree_fits_shwi_p (mem_op->op0)) |
| mem_op->off = tree_to_shwi (mem_op->op0); |
| else |
| mem_op->off = -1; |
| if (TREE_CODE (op->op0) == SSA_NAME) |
| op->op0 = SSA_VAL (op->op0); |
| if (TREE_CODE (op->op0) != SSA_NAME) |
| op->opcode = TREE_CODE (op->op0); |
| |
| /* And recurse. */ |
| if (TREE_CODE (op->op0) == SSA_NAME) |
| vn_reference_maybe_forwprop_address (ops, i_p); |
| else if (TREE_CODE (op->op0) == ADDR_EXPR) |
| vn_reference_fold_indirect (ops, i_p); |
| } |
| |
| /* Optimize the reference REF to a constant if possible or return |
| NULL_TREE if not. */ |
| |
| tree |
| fully_constant_vn_reference_p (vn_reference_t ref) |
| { |
| vec<vn_reference_op_s> operands = ref->operands; |
| vn_reference_op_t op; |
| |
| /* Try to simplify the translated expression if it is |
| a call to a builtin function with at most two arguments. */ |
| op = &operands[0]; |
| if (op->opcode == CALL_EXPR |
| && TREE_CODE (op->op0) == ADDR_EXPR |
| && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL |
| && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0)) |
| && operands.length () >= 2 |
| && operands.length () <= 3) |
| { |
| vn_reference_op_t arg0, arg1 = NULL; |
| bool anyconst = false; |
| arg0 = &operands[1]; |
| if (operands.length () > 2) |
| arg1 = &operands[2]; |
| if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant |
| || (arg0->opcode == ADDR_EXPR |
| && is_gimple_min_invariant (arg0->op0))) |
| anyconst = true; |
| if (arg1 |
| && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant |
| || (arg1->opcode == ADDR_EXPR |
| && is_gimple_min_invariant (arg1->op0)))) |
| anyconst = true; |
| if (anyconst) |
| { |
| tree folded = build_call_expr (TREE_OPERAND (op->op0, 0), |
| arg1 ? 2 : 1, |
| arg0->op0, |
| arg1 ? arg1->op0 : NULL); |
| if (folded |
| && TREE_CODE (folded) == NOP_EXPR) |
| folded = TREE_OPERAND (folded, 0); |
| if (folded |
| && is_gimple_min_invariant (folded)) |
| return folded; |
| } |
| } |
| |
| /* Simplify reads from constants or constant initializers. */ |
| else if (BITS_PER_UNIT == 8 |
| && is_gimple_reg_type (ref->type) |
| && (!INTEGRAL_TYPE_P (ref->type) |
| || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0)) |
| { |
| HOST_WIDE_INT off = 0; |
| HOST_WIDE_INT size; |
| if (INTEGRAL_TYPE_P (ref->type)) |
| size = TYPE_PRECISION (ref->type); |
| else |
| size = tree_to_shwi (TYPE_SIZE (ref->type)); |
| if (size % BITS_PER_UNIT != 0 |
| || size > MAX_BITSIZE_MODE_ANY_MODE) |
| return NULL_TREE; |
| size /= BITS_PER_UNIT; |
| unsigned i; |
| for (i = 0; i < operands.length (); ++i) |
| { |
| if (operands[i].off == -1) |
| return NULL_TREE; |
| off += operands[i].off; |
| if (operands[i].opcode == MEM_REF) |
| { |
| ++i; |
| break; |
| } |
| } |
| vn_reference_op_t base = &operands[--i]; |
| tree ctor = error_mark_node; |
| tree decl = NULL_TREE; |
| if (TREE_CODE_CLASS (base->opcode) == tcc_constant) |
| ctor = base->op0; |
| else if (base->opcode == MEM_REF |
| && base[1].opcode == ADDR_EXPR |
| && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL |
| || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL)) |
| { |
| decl = TREE_OPERAND (base[1].op0, 0); |
| ctor = ctor_for_folding (decl); |
| } |
| if (ctor == NULL_TREE) |
| return build_zero_cst (ref->type); |
| else if (ctor != error_mark_node) |
| { |
| if (decl) |
| { |
| tree res = fold_ctor_reference (ref->type, ctor, |
| off * BITS_PER_UNIT, |
| size * BITS_PER_UNIT, decl); |
| if (res) |
| { |
| STRIP_USELESS_TYPE_CONVERSION (res); |
| if (is_gimple_min_invariant (res)) |
| return res; |
| } |
| } |
| else |
| { |
| unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT]; |
| if (native_encode_expr (ctor, buf, size, off) > 0) |
| return native_interpret_expr (ref->type, buf, size); |
| } |
| } |
| } |
| |
| return NULL_TREE; |
| } |
| |
| /* Transform any SSA_NAME's in a vector of vn_reference_op_s |
| structures into their value numbers. This is done in-place, and |
| the vector passed in is returned. *VALUEIZED_ANYTHING will specify |
| whether any operands were valueized. */ |
| |
| static vec<vn_reference_op_s> |
| valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything) |
| { |
| vn_reference_op_t vro; |
| unsigned int i; |
| |
| *valueized_anything = false; |
| |
| FOR_EACH_VEC_ELT (orig, i, vro) |
| { |
| if (vro->opcode == SSA_NAME |
| || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)) |
| { |
| tree tem = SSA_VAL (vro->op0); |
| if (tem != vro->op0) |
| { |
| *valueized_anything = true; |
| vro->op0 = tem; |
| } |
| /* If it transforms from an SSA_NAME to a constant, update |
| the opcode. */ |
| if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME) |
| vro->opcode = TREE_CODE (vro->op0); |
| } |
| if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME) |
| { |
| tree tem = SSA_VAL (vro->op1); |
| if (tem != vro->op1) |
| { |
| *valueized_anything = true; |
| vro->op1 = tem; |
| } |
| } |
| if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME) |
| { |
| tree tem = SSA_VAL (vro->op2); |
| if (tem != vro->op2) |
| { |
| *valueized_anything = true; |
| vro->op2 = tem; |
| } |
| } |
| /* If it transforms from an SSA_NAME to an address, fold with |
| a preceding indirect reference. */ |
| if (i > 0 |
| && vro->op0 |
| && TREE_CODE (vro->op0) == ADDR_EXPR |
| && orig[i - 1].opcode == MEM_REF) |
| vn_reference_fold_indirect (&orig, &i); |
| else if (i > 0 |
| && vro->opcode == SSA_NAME |
| && orig[i - 1].opcode == MEM_REF) |
| vn_reference_maybe_forwprop_address (&orig, &i); |
| /* If it transforms a non-constant ARRAY_REF into a constant |
| one, adjust the constant offset. */ |
| else if (vro->opcode == ARRAY_REF |
| && vro->off == -1 |
| && TREE_CODE (vro->op0) == INTEGER_CST |
| && TREE_CODE (vro->op1) == INTEGER_CST |
| && TREE_CODE (vro->op2) == INTEGER_CST) |
| { |
| offset_int off = ((wi::to_offset (vro->op0) |
| - wi::to_offset (vro->op1)) |
| * wi::to_offset (vro->op2)); |
| if (wi::fits_shwi_p (off)) |
| vro->off = off.to_shwi (); |
| } |
| } |
| |
| return orig; |
| } |
| |
| static vec<vn_reference_op_s> |
| valueize_refs (vec<vn_reference_op_s> orig) |
| { |
| bool tem; |
| return valueize_refs_1 (orig, &tem); |
| } |
| |
| static vec<vn_reference_op_s> shared_lookup_references; |
| |
| /* Create a vector of vn_reference_op_s structures from REF, a |
| REFERENCE_CLASS_P tree. The vector is shared among all callers of |
| this function. *VALUEIZED_ANYTHING will specify whether any |
| operands were valueized. */ |
| |
| static vec<vn_reference_op_s> |
| valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything) |
| { |
| if (!ref) |
| return vNULL; |
| shared_lookup_references.truncate (0); |
| copy_reference_ops_from_ref (ref, &shared_lookup_references); |
| shared_lookup_references = valueize_refs_1 (shared_lookup_references, |
| valueized_anything); |
| return shared_lookup_references; |
| } |
| |
| /* Create a vector of vn_reference_op_s structures from CALL, a |
| call statement. The vector is shared among all callers of |
| this function. */ |
| |
| static vec<vn_reference_op_s> |
| valueize_shared_reference_ops_from_call (gcall *call) |
| { |
| if (!call) |
| return vNULL; |
| shared_lookup_references.truncate (0); |
| copy_reference_ops_from_call (call, &shared_lookup_references); |
| shared_lookup_references = valueize_refs (shared_lookup_references); |
| return shared_lookup_references; |
| } |
| |
| /* Lookup a SCCVN reference operation VR in the current hash table. |
| Returns the resulting value number if it exists in the hash table, |
| NULL_TREE otherwise. VNRESULT will be filled in with the actual |
| vn_reference_t stored in the hashtable if something is found. */ |
| |
| static tree |
| vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult) |
| { |
| vn_reference_s **slot; |
| hashval_t hash; |
| |
| hash = vr->hashcode; |
| slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT); |
| if (!slot && current_info == optimistic_info) |
| slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT); |
| if (slot) |
| { |
| if (vnresult) |
| *vnresult = (vn_reference_t)*slot; |
| return ((vn_reference_t)*slot)->result; |
| } |
| |
| return NULL_TREE; |
| } |
| |
| static tree *last_vuse_ptr; |
| static vn_lookup_kind vn_walk_kind; |
| static vn_lookup_kind default_vn_walk_kind; |
| |
| /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_ |
| with the current VUSE and performs the expression lookup. */ |
| |
| static void * |
| vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, |
| unsigned int cnt, void *vr_) |
| { |
| vn_reference_t vr = (vn_reference_t)vr_; |
| vn_reference_s **slot; |
| hashval_t hash; |
| |
| /* This bounds the stmt walks we perform on reference lookups |
| to O(1) instead of O(N) where N is the number of dominating |
| stores. */ |
| if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS)) |
| return (void *)-1; |
| |
| if (last_vuse_ptr) |
| *last_vuse_ptr = vuse; |
| |
| /* Fixup vuse and hash. */ |
| if (vr->vuse) |
| vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse); |
| vr->vuse = vuse_ssa_val (vuse); |
| if (vr->vuse) |
| vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse); |
| |
| hash = vr->hashcode; |
| slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT); |
| if (!slot && current_info == optimistic_info) |
| slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT); |
| if (slot) |
| return *slot; |
| |
| return NULL; |
| } |
| |
| /* Lookup an existing or insert a new vn_reference entry into the |
| value table for the VUSE, SET, TYPE, OPERANDS reference which |
| has the value VALUE which is either a constant or an SSA name. */ |
| |
| static vn_reference_t |
| vn_reference_lookup_or_insert_for_pieces (tree vuse, |
| alias_set_type set, |
| tree type, |
| vec<vn_reference_op_s, |
| va_heap> operands, |
| tree value) |
| { |
| vn_reference_s vr1; |
| vn_reference_t result; |
| unsigned value_id; |
| vr1.vuse = vuse; |
| vr1.operands = operands; |
| vr1.type = type; |
| vr1.set = set; |
| vr1.hashcode = vn_reference_compute_hash (&vr1); |
| if (vn_reference_lookup_1 (&vr1, &result)) |
| return result; |
| if (TREE_CODE (value) == SSA_NAME) |
| value_id = VN_INFO (value)->value_id; |
| else |
| value_id = get_or_alloc_constant_value_id (value); |
| return vn_reference_insert_pieces (vuse, set, type, |
| operands.copy (), value, value_id); |
| } |
| |
| /* Callback for walk_non_aliased_vuses. Tries to perform a lookup |
| from the statement defining VUSE and if not successful tries to |
| translate *REFP and VR_ through an aggregate copy at the definition |
| of VUSE. */ |
| |
| static void * |
| vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_, |
| bool disambiguate_only) |
| { |
| vn_reference_t vr = (vn_reference_t)vr_; |
| gimple def_stmt = SSA_NAME_DEF_STMT (vuse); |
| tree base; |
| HOST_WIDE_INT offset, maxsize; |
| static vec<vn_reference_op_s> |
| lhs_ops = vNULL; |
| ao_ref lhs_ref; |
| bool lhs_ref_ok = false; |
| |
| /* First try to disambiguate after value-replacing in the definitions LHS. */ |
| if (is_gimple_assign (def_stmt)) |
| { |
| vec<vn_reference_op_s> tem; |
| tree lhs = gimple_assign_lhs (def_stmt); |
| bool valueized_anything = false; |
| /* Avoid re-allocation overhead. */ |
| lhs_ops.truncate (0); |
| copy_reference_ops_from_ref (lhs, &lhs_ops); |
| tem = lhs_ops; |
| lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything); |
| gcc_assert (lhs_ops == tem); |
| if (valueized_anything) |
| { |
| lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref, |
| get_alias_set (lhs), |
| TREE_TYPE (lhs), lhs_ops); |
| if (lhs_ref_ok |
| && !refs_may_alias_p_1 (ref, &lhs_ref, true)) |
| return NULL; |
| } |
| else |
| { |
| ao_ref_init (&lhs_ref, lhs); |
| lhs_ref_ok = true; |
| } |
| } |
| else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL) |
| && gimple_call_num_args (def_stmt) <= 4) |
| { |
| /* For builtin calls valueize its arguments and call the |
| alias oracle again. Valueization may improve points-to |
| info of pointers and constify size and position arguments. |
| Originally this was motivated by PR61034 which has |
| conditional calls to free falsely clobbering ref because |
| of imprecise points-to info of the argument. */ |
| tree oldargs[4]; |
| bool valueized_anything = false; |
| for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i) |
| { |
| oldargs[i] = gimple_call_arg (def_stmt, i); |
| if (TREE_CODE (oldargs[i]) == SSA_NAME |
| && VN_INFO (oldargs[i])->valnum != oldargs[i]) |
| { |
| gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum); |
| valueized_anything = true; |
| } |
| } |
| if (valueized_anything) |
| { |
| bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt), |
| ref); |
| for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i) |
| gimple_call_set_arg (def_stmt, i, oldargs[i]); |
| if (!res) |
| return NULL; |
| } |
| } |
| |
| if (disambiguate_only) |
| return (void *)-1; |
| |
| base = ao_ref_base (ref); |
| offset = ref->offset; |
| maxsize = ref->max_size; |
| |
| /* If we cannot constrain the size of the reference we cannot |
| test if anything kills it. */ |
| if (maxsize == -1) |
| return (void *)-1; |
| |
| /* We can't deduce anything useful from clobbers. */ |
| if (gimple_clobber_p (def_stmt)) |
| return (void *)-1; |
| |
| /* def_stmt may-defs *ref. See if we can derive a value for *ref |
| from that definition. |
| 1) Memset. */ |
| if (is_gimple_reg_type (vr->type) |
| && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET) |
| && integer_zerop (gimple_call_arg (def_stmt, 1)) |
| && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)) |
| && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR) |
| { |
| tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0); |
| tree base2; |
| HOST_WIDE_INT offset2, size2, maxsize2; |
| base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2); |
| size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8; |
| if ((unsigned HOST_WIDE_INT)size2 / 8 |
| == tree_to_uhwi (gimple_call_arg (def_stmt, 2)) |
| && maxsize2 != -1 |
| && operand_equal_p (base, base2, 0) |
| && offset2 <= offset |
| && offset2 + size2 >= offset + maxsize) |
| { |
| tree val = build_zero_cst (vr->type); |
| return vn_reference_lookup_or_insert_for_pieces |
| (vuse, vr->set, vr->type, vr->operands, val); |
| } |
| } |
| |
| /* 2) Assignment from an empty CONSTRUCTOR. */ |
| else if (is_gimple_reg_type (vr->type) |
| && gimple_assign_single_p (def_stmt) |
| && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR |
| && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0) |
| { |
| tree base2; |
| HOST_WIDE_INT offset2, size2, maxsize2; |
| base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt), |
| &offset2, &size2, &maxsize2); |
| if (maxsize2 != -1 |
| && operand_equal_p (base, base2, 0) |
| && offset2 <= offset |
| && offset2 + size2 >= offset + maxsize) |
| { |
| tree val = build_zero_cst (vr->type); |
| return vn_reference_lookup_or_insert_for_pieces |
| (vuse, vr->set, vr->type, vr->operands, val); |
| } |
| } |
| |
| /* 3) Assignment from a constant. We can use folds native encode/interpret |
| routines to extract the assigned bits. */ |
| else if (vn_walk_kind == VN_WALKREWRITE |
| && CHAR_BIT == 8 && BITS_PER_UNIT == 8 |
| && ref->size == maxsize |
| && maxsize % BITS_PER_UNIT == 0 |
| && offset % BITS_PER_UNIT == 0 |
| && is_gimple_reg_type (vr->type) |
| && gimple_assign_single_p (def_stmt) |
| && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))) |
| { |
| tree base2; |
| HOST_WIDE_INT offset2, size2, maxsize2; |
| base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt), |
| &offset2, &size2, &maxsize2); |
| if (maxsize2 != -1 |
| && maxsize2 == size2 |
| && size2 % BITS_PER_UNIT == 0 |
| && offset2 % BITS_PER_UNIT == 0 |
| && operand_equal_p (base, base2, 0) |
| && offset2 <= offset |
| && offset2 + size2 >= offset + maxsize) |
| { |
| /* We support up to 512-bit values (for V8DFmode). */ |
| unsigned char buffer[64]; |
| int len; |
| |
| len = native_encode_expr (gimple_assign_rhs1 (def_stmt), |
| buffer, sizeof (buffer)); |
| if (len > 0) |
| { |
| tree val = native_interpret_expr (vr->type, |
| buffer |
| + ((offset - offset2) |
| / BITS_PER_UNIT), |
| ref->size / BITS_PER_UNIT); |
| if (val) |
| return vn_reference_lookup_or_insert_for_pieces |
| (vuse, vr->set, vr->type, vr->operands, val); |
| } |
| } |
| } |
| |
| /* 4) Assignment from an SSA name which definition we may be able |
| to access pieces from. */ |
| else if (ref->size == maxsize |
| && is_gimple_reg_type (vr->type) |
| && gimple_assign_single_p (def_stmt) |
| && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME) |
| { |
| tree rhs1 = gimple_assign_rhs1 (def_stmt); |
| gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1); |
| if (is_gimple_assign (def_stmt2) |
| && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR |
| || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR) |
| && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1)))) |
| { |
| tree base2; |
| HOST_WIDE_INT offset2, size2, maxsize2, off; |
| base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt), |
| &offset2, &size2, &maxsize2); |
| off = offset - offset2; |
| if (maxsize2 != -1 |
| && maxsize2 == size2 |
| && operand_equal_p (base, base2, 0) |
| && offset2 <= offset |
| && offset2 + size2 >= offset + maxsize) |
| { |
| tree val = NULL_TREE; |
| HOST_WIDE_INT elsz |
| = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1)))); |
| if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR) |
| { |
| if (off == 0) |
| val = gimple_assign_rhs1 (def_stmt2); |
| else if (off == elsz) |
| val = gimple_assign_rhs2 (def_stmt2); |
| } |
| else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR |
| && off % elsz == 0) |
| { |
| tree ctor = gimple_assign_rhs1 (def_stmt2); |
| unsigned i = off / elsz; |
| if (i < CONSTRUCTOR_NELTS (ctor)) |
| { |
| constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i); |
| if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE) |
| { |
| if (TREE_CODE (TREE_TYPE (elt->value)) |
| != VECTOR_TYPE) |
| val = elt->value; |
| } |
| } |
| } |
| if (val) |
| return vn_reference_lookup_or_insert_for_pieces |
| (vuse, vr->set, vr->type, vr->operands, val); |
| } |
| } |
| } |
| |
| /* 5) For aggregate copies translate the reference through them if |
| the copy kills ref. */ |
| else if (vn_walk_kind == VN_WALKREWRITE |
| && gimple_assign_single_p (def_stmt) |
| && (DECL_P (gimple_assign_rhs1 (def_stmt)) |
| || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF |
| || handled_component_p (gimple_assign_rhs1 (def_stmt)))) |
| { |
| tree base2; |
| HOST_WIDE_INT offset2, size2, maxsize2; |
| int i, j; |
| auto_vec<vn_reference_op_s> rhs; |
| vn_reference_op_t vro; |
| ao_ref r; |
| |
| if (!lhs_ref_ok) |
| return (void *)-1; |
| |
| /* See if the assignment kills REF. */ |
| base2 = ao_ref_base (&lhs_ref); |
| offset2 = lhs_ref.offset; |
| size2 = lhs_ref.size; |
| maxsize2 = lhs_ref.max_size; |
| if (maxsize2 == -1 |
| || (base != base2 && !operand_equal_p (base, base2, 0)) |
| || offset2 > offset |
| || offset2 + size2 < offset + maxsize) |
| return (void *)-1; |
| |
| /* Find the common base of ref and the lhs. lhs_ops already |
| contains valueized operands for the lhs. */ |
| i = vr->operands.length () - 1; |
| j = lhs_ops.length () - 1; |
| while (j >= 0 && i >= 0 |
| && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j])) |
| { |
| i--; |
| j--; |
| } |
| |
| /* ??? The innermost op should always be a MEM_REF and we already |
| checked that the assignment to the lhs kills vr. Thus for |
| aggregate copies using char[] types the vn_reference_op_eq |
| may fail when comparing types for compatibility. But we really |
| don't care here - further lookups with the rewritten operands |
| will simply fail if we messed up types too badly. */ |
| HOST_WIDE_INT extra_off = 0; |
| if (j == 0 && i >= 0 |
| && lhs_ops[0].opcode == MEM_REF |
| && lhs_ops[0].off != -1) |
| { |
| if (lhs_ops[0].off == vr->operands[i].off) |
| i--, j--; |
| else if (vr->operands[i].opcode == MEM_REF |
| && vr->operands[i].off != -1) |
| { |
| extra_off = vr->operands[i].off - lhs_ops[0].off; |
| i--, j--; |
| } |
| } |
| |
| /* i now points to the first additional op. |
| ??? LHS may not be completely contained in VR, one or more |
| VIEW_CONVERT_EXPRs could be in its way. We could at least |
| try handling outermost VIEW_CONVERT_EXPRs. */ |
| if (j != -1) |
| return (void *)-1; |
| |
| /* Now re-write REF to be based on the rhs of the assignment. */ |
| copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs); |
| |
| /* Apply an extra offset to the inner MEM_REF of the RHS. */ |
| if (extra_off != 0) |
| { |
| if (rhs.length () < 2 |
| || rhs[0].opcode != MEM_REF |
| || rhs[0].off == -1) |
| return (void *)-1; |
| rhs[0].off += extra_off; |
| rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0, |
| build_int_cst (TREE_TYPE (rhs[0].op0), |
| extra_off)); |
| } |
| |
| /* We need to pre-pend vr->operands[0..i] to rhs. */ |
| vec<vn_reference_op_s> old = vr->operands; |
| if (i + 1 + rhs.length () > vr->operands.length ()) |
| { |
| vr->operands.safe_grow (i + 1 + rhs.length ()); |
| if (old == shared_lookup_references) |
| shared_lookup_references = vr->operands; |
| } |
| else |
| vr->operands.truncate (i + 1 + rhs.length ()); |
| FOR_EACH_VEC_ELT (rhs, j, vro) |
| vr->operands[i + 1 + j] = *vro; |
| vr->operands = valueize_refs (vr->operands); |
| if (old == shared_lookup_references) |
| shared_lookup_references = vr->operands; |
| vr->hashcode = vn_reference_compute_hash (vr); |
| |
| /* Try folding the new reference to a constant. */ |
| tree val = fully_constant_vn_reference_p (vr); |
| if (val) |
| return vn_reference_lookup_or_insert_for_pieces |
| (vuse, vr->set, vr->type, vr->operands, val); |
| |
| /* Adjust *ref from the new operands. */ |
| if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands)) |
| return (void *)-1; |
| /* This can happen with bitfields. */ |
| if (ref->size != r.size) |
| return (void *)-1; |
| *ref = r; |
| |
| /* Do not update last seen VUSE after translating. */ |
| last_vuse_ptr = NULL; |
| |
| /* Keep looking for the adjusted *REF / VR pair. */ |
| return NULL; |
| } |
| |
| /* 6) For memcpy copies translate the reference through them if |
| the copy kills ref. */ |
| else if (vn_walk_kind == VN_WALKREWRITE |
| && is_gimple_reg_type (vr->type) |
| /* ??? Handle BCOPY as well. */ |
| && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY) |
| || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY) |
| || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE)) |
| && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR |
| || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME) |
| && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR |
| || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME) |
| && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))) |
| { |
| tree lhs, rhs; |
| ao_ref r; |
| HOST_WIDE_INT rhs_offset, copy_size, lhs_offset; |
| vn_reference_op_s op; |
| HOST_WIDE_INT at; |
| |
| |
| /* Only handle non-variable, addressable refs. */ |
| if (ref->size != maxsize |
| || offset % BITS_PER_UNIT != 0 |
| || ref->size % BITS_PER_UNIT != 0) |
| return (void *)-1; |
| |
| /* Extract a pointer base and an offset for the destination. */ |
| lhs = gimple_call_arg (def_stmt, 0); |
| lhs_offset = 0; |
| if (TREE_CODE (lhs) == SSA_NAME) |
| lhs = SSA_VAL (lhs); |
| if (TREE_CODE (lhs) == ADDR_EXPR) |
| { |
| tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0), |
| &lhs_offset); |
| if (!tem) |
| return (void *)-1; |
| if (TREE_CODE (tem) == MEM_REF |
| && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))) |
| { |
| lhs = TREE_OPERAND (tem, 0); |
| lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1)); |
| } |
| else if (DECL_P (tem)) |
| lhs = build_fold_addr_expr (tem); |
| else |
| return (void *)-1; |
| } |
| if (TREE_CODE (lhs) != SSA_NAME |
| && TREE_CODE (lhs) != ADDR_EXPR) |
| return (void *)-1; |
| |
| /* Extract a pointer base and an offset for the source. */ |
| rhs = gimple_call_arg (def_stmt, 1); |
| rhs_offset = 0; |
| if (TREE_CODE (rhs) == SSA_NAME) |
| rhs = SSA_VAL (rhs); |
| if (TREE_CODE (rhs) == ADDR_EXPR) |
| { |
| tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0), |
| &rhs_offset); |
| if (!tem) |
| return (void *)-1; |
| if (TREE_CODE (tem) == MEM_REF |
| && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))) |
| { |
| rhs = TREE_OPERAND (tem, 0); |
| rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1)); |
| } |
| else if (DECL_P (tem)) |
| rhs = build_fold_addr_expr (tem); |
| else |
| return (void *)-1; |
| } |
| if (TREE_CODE (rhs) != SSA_NAME |
| && TREE_CODE (rhs) != ADDR_EXPR) |
| return (void *)-1; |
| |
| copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2)); |
| |
| /* The bases of the destination and the references have to agree. */ |
| if ((TREE_CODE (base) != MEM_REF |
| && !DECL_P (base)) |
| || (TREE_CODE (base) == MEM_REF |
| && (TREE_OPERAND (base, 0) != lhs |
| || !tree_fits_uhwi_p (TREE_OPERAND (base, 1)))) |
| || (DECL_P (base) |
| && (TREE_CODE (lhs) != ADDR_EXPR |
| || TREE_OPERAND (lhs, 0) != base))) |
| return (void *)-1; |
| |
| /* And the access has to be contained within the memcpy destination. */ |
| at = offset / BITS_PER_UNIT; |
| if (TREE_CODE (base) == MEM_REF) |
| at += tree_to_uhwi (TREE_OPERAND (base, 1)); |
| if (lhs_offset > at |
| || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT) |
| return (void *)-1; |
| |
| /* Make room for 2 operands in the new reference. */ |
| if (vr->operands.length () < 2) |
| { |
| vec<vn_reference_op_s> old = vr->operands; |
| vr->operands.safe_grow_cleared (2); |
| if (old == shared_lookup_references |
| && vr->operands != old) |
| shared_lookup_references = vr->operands; |
| } |
| else |
| vr->operands.truncate (2); |
| |
| /* The looked-through reference is a simple MEM_REF. */ |
| memset (&op, 0, sizeof (op)); |
| op.type = vr->type; |
| op.opcode = MEM_REF; |
| op.op0 = build_int_cst (ptr_type_node, at - rhs_offset); |
| op.off = at - lhs_offset + rhs_offset; |
| vr->operands[0] = op; |
| op.type = TREE_TYPE (rhs); |
| op.opcode = TREE_CODE (rhs); |
| op.op0 = rhs; |
| op.off = -1; |
| vr->operands[1] = op; |
| vr->hashcode = vn_reference_compute_hash (vr); |
| |
| /* Adjust *ref from the new operands. */ |
| if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands)) |
| return (void *)-1; |
| /* This can happen with bitfields. */ |
| if (ref->size != r.size) |
| return (void *)-1; |
| *ref = r; |
| |
| /* Do not update last seen VUSE after translating. */ |
| last_vuse_ptr = NULL; |
| |
| /* Keep looking for the adjusted *REF / VR pair. */ |
| return NULL; |
| } |
| |
| /* Bail out and stop walking. */ |
| return (void *)-1; |
| } |
| |
| /* Lookup a reference operation by it's parts, in the current hash table. |
| Returns the resulting value number if it exists in the hash table, |
| NULL_TREE otherwise. VNRESULT will be filled in with the actual |
| vn_reference_t stored in the hashtable if something is found. */ |
| |
| tree |
| vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type, |
| vec<vn_reference_op_s> operands, |
| vn_reference_t *vnresult, vn_lookup_kind kind) |
| { |
| struct vn_reference_s vr1; |
| vn_reference_t tmp; |
| tree cst; |
| |
| if (!vnresult) |
| vnresult = &tmp; |
| *vnresult = NULL; |
| |
| vr1.vuse = vuse_ssa_val (vuse); |
| shared_lookup_references.truncate (0); |
| shared_lookup_references.safe_grow (operands.length ()); |
| memcpy (shared_lookup_references.address (), |
| operands.address (), |
| sizeof (vn_reference_op_s) |
| * operands.length ()); |
| vr1.operands = operands = shared_lookup_references |
| = valueize_refs (shared_lookup_references); |
| vr1.type = type; |
| vr1.set = set; |
| vr1.hashcode = vn_reference_compute_hash (&vr1); |
| if ((cst = fully_constant_vn_reference_p (&vr1))) |
| return cst; |
| |
| vn_reference_lookup_1 (&vr1, vnresult); |
| if (!*vnresult |
| && kind != VN_NOWALK |
| && vr1.vuse) |
| { |
| ao_ref r; |
| vn_walk_kind = kind; |
| if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands)) |
| *vnresult = |
| (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, |
| vn_reference_lookup_2, |
| vn_reference_lookup_3, |
| vuse_ssa_val, &vr1); |
| gcc_checking_assert (vr1.operands == shared_lookup_references); |
| } |
| |
| if (*vnresult) |
| return (*vnresult)->result; |
| |
| return NULL_TREE; |
| } |
| |
| /* Lookup OP in the current hash table, and return the resulting value |
| number if it exists in the hash table. Return NULL_TREE if it does |
| not exist in the hash table or if the result field of the structure |
| was NULL.. VNRESULT will be filled in with the vn_reference_t |
| stored in the hashtable if one exists. When TBAA_P is false assume |
| we are looking up a store and treat it as having alias-set zero. */ |
| |
| tree |
| vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind, |
| vn_reference_t *vnresult, bool tbaa_p) |
| { |
| vec<vn_reference_op_s> operands; |
| struct vn_reference_s vr1; |
| tree cst; |
| bool valuezied_anything; |
| |
| if (vnresult) |
| *vnresult = NULL; |
| |
| vr1.vuse = vuse_ssa_val (vuse); |
| vr1.operands = operands |
| = valueize_shared_reference_ops_from_ref (op, &valuezied_anything); |
| vr1.type = TREE_TYPE (op); |
| vr1.set = tbaa_p ? get_alias_set (op) : 0; |
| vr1.hashcode = vn_reference_compute_hash (&vr1); |
| if ((cst = fully_constant_vn_reference_p (&vr1))) |
| return cst; |
| |
| if (kind != VN_NOWALK |
| && vr1.vuse) |
| { |
| vn_reference_t wvnresult; |
| ao_ref r; |
| /* Make sure to use a valueized reference if we valueized anything. |
| Otherwise preserve the full reference for advanced TBAA. */ |
| if (!valuezied_anything |
| || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type, |
| vr1.operands)) |
| ao_ref_init (&r, op); |
| if (! tbaa_p) |
| r.ref_alias_set = r.base_alias_set = 0; |
| vn_walk_kind = kind; |
| wvnresult = |
| (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, |
| vn_reference_lookup_2, |
| vn_reference_lookup_3, |
| vuse_ssa_val, &vr1); |
| gcc_checking_assert (vr1.operands == shared_lookup_references); |
| if (wvnresult) |
| { |
| if (vnresult) |
| *vnresult = wvnresult; |
| return wvnresult->result; |
| } |
| |
| return NULL_TREE; |
| } |
| |
| return vn_reference_lookup_1 (&vr1, vnresult); |
| } |
| |
| /* Lookup CALL in the current hash table and return the entry in |
| *VNRESULT if found. Populates *VR for the hashtable lookup. */ |
| |
| void |
| vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult, |
| vn_reference_t vr) |
| { |
| if (vnresult) |
| *vnresult = NULL; |
| |
| tree vuse = gimple_vuse (call); |
| |
| vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; |
| vr->operands = valueize_shared_reference_ops_from_call (call); |
| vr->type = gimple_expr_type (call); |
| vr->set = 0; |
| vr->hashcode = vn_reference_compute_hash (vr); |
| vn_reference_lookup_1 (vr, vnresult); |
| } |
| |
| /* Insert OP into the current hash table with a value number of |
| RESULT, and return the resulting reference structure we created. */ |
| |
| static vn_reference_t |
| vn_reference_insert (tree op, tree result, tree vuse, tree vdef) |
| { |
| vn_reference_s **slot; |
| vn_reference_t vr1; |
| bool tem; |
| |
| vr1 = (vn_reference_t) pool_alloc (current_info->references_pool); |
| if (TREE_CODE (result) == SSA_NAME) |
| vr1->value_id = VN_INFO (result)->value_id; |
| else |
| vr1->value_id = get_or_alloc_constant_value_id (result); |
| vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; |
| vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy (); |
| vr1->type = TREE_TYPE (op); |
| vr1->set = get_alias_set (op); |
| vr1->hashcode = vn_reference_compute_hash (vr1); |
| vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result; |
| vr1->result_vdef = vdef; |
| |
| slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode, |
| INSERT); |
| |
| /* Because we lookup stores using vuses, and value number failures |
| using the vdefs (see visit_reference_op_store for how and why), |
| it's possible that on failure we may try to insert an already |
| inserted store. This is not wrong, there is no ssa name for a |
| store that we could use as a differentiator anyway. Thus, unlike |
| the other lookup functions, you cannot gcc_assert (!*slot) |
| here. */ |
| |
| /* But free the old slot in case of a collision. */ |
| if (*slot) |
| free_reference (*slot); |
| |
| *slot = vr1; |
| return vr1; |
| } |
| |
| /* Insert a reference by it's pieces into the current hash table with |
| a value number of RESULT. Return the resulting reference |
| structure we created. */ |
| |
| vn_reference_t |
| vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type, |
| vec<vn_reference_op_s> operands, |
| tree result, unsigned int value_id) |
| |
| { |
| vn_reference_s **slot; |
| vn_reference_t vr1; |
| |
| vr1 = (vn_reference_t) pool_alloc (current_info->references_pool); |
| vr1->value_id = value_id; |
| vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; |
| vr1->operands = valueize_refs (operands); |
| vr1->type = type; |
| vr1->set = set; |
| vr1->hashcode = vn_reference_compute_hash (vr1); |
| if (result && TREE_CODE (result) == SSA_NAME) |
| result = SSA_VAL (result); |
| vr1->result = result; |
| |
| slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode, |
| INSERT); |
| |
| /* At this point we should have all the things inserted that we have |
| seen before, and we should never try inserting something that |
| already exists. */ |
| gcc_assert (!*slot); |
| if (*slot) |
| free_reference (*slot); |
| |
| *slot = vr1; |
| return vr1; |
| } |
| |
| /* Compute and return the hash value for nary operation VBO1. */ |
| |
| static hashval_t |
| vn_nary_op_compute_hash (const vn_nary_op_t vno1) |
| { |
| inchash::hash hstate; |
| unsigned i; |
| |
| for (i = 0; i < vno1->length; ++i) |
| if (TREE_CODE (vno1->op[i]) == SSA_NAME) |
| vno1->op[i] = SSA_VAL (vno1->op[i]); |
| |
| if (vno1->length == 2 |
| && commutative_tree_code (vno1->opcode) |
| && tree_swap_operands_p (vno1->op[0], vno1->op[1], false)) |
| { |
| tree temp = vno1->op[0]; |
| vno1->op[0] = vno1->op[1]; |
| vno1->op[1] = temp; |
| } |
| |
| hstate.add_int (vno1->opcode); |
| for (i = 0; i < vno1->length; ++i) |
| inchash::add_expr (vno1->op[i], hstate); |
| |
| return hstate.end (); |
| } |
| |
| /* Compare nary operations VNO1 and VNO2 and return true if they are |
| equivalent. */ |
| |
| bool |
| vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2) |
| { |
| unsigned i; |
| |
| if (vno1->hashcode != vno2->hashcode) |
| return false; |
| |
| if (vno1->length != vno2->length) |
| return false; |
| |
| if (vno1->opcode != vno2->opcode |
| || !types_compatible_p (vno1->type, vno2->type)) |
| return false; |
| |
| for (i = 0; i < vno1->length; ++i) |
| if (!expressions_equal_p (vno1->op[i], vno2->op[i])) |
| return false; |
| |
| return true; |
| } |
| |
| /* Initialize VNO from the pieces provided. */ |
| |
| static void |
| init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length, |
| enum tree_code code, tree type, tree *ops) |
| { |
| vno->opcode = code; |
| vno->length = length; |
| vno->type = type; |
| memcpy (&vno->op[0], ops, sizeof (tree) * length); |
| } |
| |
| /* Initialize VNO from OP. */ |
| |
| static void |
| init_vn_nary_op_from_op (vn_nary_op_t vno, tree op) |
| { |
| unsigned i; |
| |
| vno->opcode = TREE_CODE (op); |
| vno->length = TREE_CODE_LENGTH (TREE_CODE (op)); |
| vno->type = TREE_TYPE (op); |
| for (i = 0; i < vno->length; ++i) |
| vno->op[i] = TREE_OPERAND (op, i); |
| } |
| |
| /* Return the number of operands for a vn_nary ops structure from STMT. */ |
| |
| static unsigned int |
| vn_nary_length_from_stmt (gimple stmt) |
| { |
| switch (gimple_assign_rhs_code (stmt)) |
| { |
| case REALPART_EXPR: |
| case IMAGPART_EXPR: |
| case VIEW_CONVERT_EXPR: |
| return 1; |
| |
| case BIT_FIELD_REF: |
| return 3; |
| |
| case CONSTRUCTOR: |
| return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)); |
| |
| default: |
| return gimple_num_ops (stmt) - 1; |
| } |
| } |
| |
| /* Initialize VNO from STMT. */ |
| |
| static void |
| init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt) |
| { |
| unsigned i; |
| |
| vno->opcode = gimple_assign_rhs_code (stmt); |
| vno->type = gimple_expr_type (stmt); |
| switch (vno->opcode) |
| { |
| case REALPART_EXPR: |
| case IMAGPART_EXPR: |
| case VIEW_CONVERT_EXPR: |
| vno->length = 1; |
| vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); |
| break; |
| |
| case BIT_FIELD_REF: |
| vno->length = 3; |
| vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); |
| vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1); |
| vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2); |
| break; |
| |
| case CONSTRUCTOR: |
| vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)); |
| for (i = 0; i < vno->length; ++i) |
| vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value; |
| break; |
| |
| default: |
| gcc_checking_assert (!gimple_assign_single_p (stmt)); |
| vno->length = gimple_num_ops (stmt) - 1; |
| for (i = 0; i < vno->length; ++i) |
| vno->op[i] = gimple_op (stmt, i + 1); |
| } |
| } |
| |
| /* Compute the hashcode for VNO and look for it in the hash table; |
| return the resulting value number if it exists in the hash table. |
| Return NULL_TREE if it does not exist in the hash table or if the |
| result field of the operation is NULL. VNRESULT will contain the |
| vn_nary_op_t from the hashtable if it exists. */ |
| |
| static tree |
| vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult) |
| { |
| vn_nary_op_s **slot; |
| |
| if (vnresult) |
| *vnresult = NULL; |
| |
| vno->hashcode = vn_nary_op_compute_hash (vno); |
| slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode, |
| NO_INSERT); |
| if (!slot && current_info == optimistic_info) |
| slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, |
| NO_INSERT); |
| if (!slot) |
| return NULL_TREE; |
| if (vnresult) |
| *vnresult = *slot; |
| return (*slot)->result; |
| } |
| |
| /* Lookup a n-ary operation by its pieces and return the resulting value |
| number if it exists in the hash table. Return NULL_TREE if it does |
| not exist in the hash table or if the result field of the operation |
| is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable |
| if it exists. */ |
| |
| tree |
| vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code, |
| tree type, tree *ops, vn_nary_op_t *vnresult) |
| { |
| vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s, |
| sizeof_vn_nary_op (length)); |
| init_vn_nary_op_from_pieces (vno1, length, code, type, ops); |
| return vn_nary_op_lookup_1 (vno1, vnresult); |
| } |
| |
| /* Lookup OP in the current hash table, and return the resulting value |
| number if it exists in the hash table. Return NULL_TREE if it does |
| not exist in the hash table or if the result field of the operation |
| is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable |
| if it exists. */ |
| |
| tree |
| vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult) |
| { |
| vn_nary_op_t vno1 |
| = XALLOCAVAR (struct vn_nary_op_s, |
| sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op)))); |
| init_vn_nary_op_from_op (vno1, op); |
| return vn_nary_op_lookup_1 (vno1, vnresult); |
| } |
| |
| /* Lookup the rhs of STMT in the current hash table, and return the resulting |
| value number if it exists in the hash table. Return NULL_TREE if |
| it does not exist in the hash table. VNRESULT will contain the |
| vn_nary_op_t from the hashtable if it exists. */ |
| |
| tree |
| vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult) |
| { |
| vn_nary_op_t vno1 |
| = XALLOCAVAR (struct vn_nary_op_s, |
| sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt))); |
| init_vn_nary_op_from_stmt (vno1, stmt); |
| return vn_nary_op_lookup_1 (vno1, vnresult); |
| } |
| |
| /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */ |
| |
| static vn_nary_op_t |
| alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack) |
| { |
| return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length)); |
| } |
| |
| /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's |
| obstack. */ |
| |
| static vn_nary_op_t |
| alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id) |
| { |
| vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, |
| ¤t_info->nary_obstack); |
| |
| vno1->value_id = value_id; |
| vno1->length = length; |
| vno1->result = result; |
| |
| return vno1; |
| } |
| |
| /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute |
| VNO->HASHCODE first. */ |
| |
| static vn_nary_op_t |
| vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table, |
| bool compute_hash) |
| { |
| vn_nary_op_s **slot; |
| |
| if (compute_hash) |
| vno->hashcode = vn_nary_op_compute_hash (vno); |
| |
| slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT); |
| gcc_assert (!*slot); |
| |
| *slot = vno; |
| return vno; |
| } |
| |
| /* Insert a n-ary operation into the current hash table using it's |
| pieces. Return the vn_nary_op_t structure we created and put in |
| the hashtable. */ |
| |
| vn_nary_op_t |
| vn_nary_op_insert_pieces (unsigned int length, enum tree_code code, |
| tree type, tree *ops, |
| tree result, unsigned int value_id) |
| { |
| vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id); |
| init_vn_nary_op_from_pieces (vno1, length, code, type, ops); |
| return vn_nary_op_insert_into (vno1, current_info->nary, true); |
| } |
| |
| /* Insert OP into the current hash table with a value number of |
| RESULT. Return the vn_nary_op_t structure we created and put in |
| the hashtable. */ |
| |
| vn_nary_op_t |
| vn_nary_op_insert (tree op, tree result) |
| { |
| unsigned length = TREE_CODE_LENGTH (TREE_CODE (op)); |
| vn_nary_op_t vno1; |
| |
| vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id); |
| init_vn_nary_op_from_op (vno1, op); |
| return vn_nary_op_insert_into (vno1, current_info->nary, true); |
| } |
| |
| /* Insert the rhs of STMT into the current hash table with a value number of |
| RESULT. */ |
| |
| vn_nary_op_t |
| vn_nary_op_insert_stmt (gimple stmt, tree result) |
| { |
| vn_nary_op_t vno1 |
| = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt), |
| result, VN_INFO (result)->value_id); |
| init_vn_nary_op_from_stmt (vno1, stmt); |
| return vn_nary_op_insert_into (vno1, current_info->nary, true); |
| } |
| |
| /* Compute a hashcode for PHI operation VP1 and return it. */ |
| |
| static inline hashval_t |
| vn_phi_compute_hash (vn_phi_t vp1) |
| { |
| inchash::hash hstate (vp1->block->index); |
| int i; |
| tree phi1op; |
| tree type; |
| |
| /* If all PHI arguments are constants we need to distinguish |
| the PHI node via its type. */ |
| type = vp1->type; |
| hstate.merge_hash (vn_hash_type (type)); |
| |
| FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op) |
| { |
| if (phi1op == VN_TOP) |
| continue; |
| inchash::add_expr (phi1op, hstate); |
| } |
| |
| return hstate.end (); |
| } |
| |
| /* Compare two phi entries for equality, ignoring VN_TOP arguments. */ |
| |
| static int |
| vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2) |
| { |
| if (vp1->hashcode != vp2->hashcode) |
| return false; |
| |
| if (vp1->block == vp2->block) |
| { |
| int i; |
| tree phi1op; |
| |
| /* If the PHI nodes do not have compatible types |
| they are not the same. */ |
| if (!types_compatible_p (vp1->type, vp2->type)) |
| return false; |
| |
| /* Any phi in the same block will have it's arguments in the |
| same edge order, because of how we store phi nodes. */ |
| FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op) |
| { |
| tree phi2op = vp2->phiargs[i]; |
| if (phi1op == VN_TOP || phi2op == VN_TOP) |
| continue; |
| if (!expressions_equal_p (phi1op, phi2op)) |
| return false; |
| } |
| return true; |
| } |
| return false; |
| } |
| |
| static vec<tree> shared_lookup_phiargs; |
| |
| /* Lookup PHI in the current hash table, and return the resulting |
| value number if it exists in the hash table. Return NULL_TREE if |
| it does not exist in the hash table. */ |
| |
| static tree |
| vn_phi_lookup (gimple phi) |
| { |
| vn_phi_s **slot; |
| struct vn_phi_s vp1; |
| unsigned i; |
| |
| shared_lookup_phiargs.truncate (0); |
| |
| /* Canonicalize the SSA_NAME's to their value number. */ |
| for (i = 0; i < gimple_phi_num_args (phi); i++) |
| { |
| tree def = PHI_ARG_DEF (phi, i); |
| def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def; |
| shared_lookup_phiargs.safe_push (def); |
| } |
| vp1.type = TREE_TYPE (gimple_phi_result (phi)); |
| vp1.phiargs = shared_lookup_phiargs; |
| vp1.block = gimple_bb (phi); |
| vp1.hashcode = vn_phi_compute_hash (&vp1); |
| slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode, |
| NO_INSERT); |
| if (!slot && current_info == optimistic_info) |
| slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode, |
| NO_INSERT); |
| if (!slot) |
| return NULL_TREE; |
| return (*slot)->result; |
| } |
| |
| /* Insert PHI into the current hash table with a value number of |
| RESULT. */ |
| |
| static vn_phi_t |
| vn_phi_insert (gimple phi, tree result) |
| { |
| vn_phi_s **slot; |
| vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool); |
| unsigned i; |
| vec<tree> args = vNULL; |
| |
| /* Canonicalize the SSA_NAME's to their value number. */ |
| for (i = 0; i < gimple_phi_num_args (phi); i++) |
| { |
| tree def = PHI_ARG_DEF (phi, i); |
| def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def; |
| args.safe_push (def); |
| } |
| vp1->value_id = VN_INFO (result)->value_id; |
| vp1->type = TREE_TYPE (gimple_phi_result (phi)); |
| vp1->phiargs = args; |
| vp1->block = gimple_bb (phi); |
| vp1->result = result; |
| vp1->hashcode = vn_phi_compute_hash (vp1); |
| |
| slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT); |
| |
| /* Because we iterate over phi operations more than once, it's |
| possible the slot might already exist here, hence no assert.*/ |
| *slot = vp1; |
| return vp1; |
| } |
| |
| |
| /* Print set of components in strongly connected component SCC to OUT. */ |
| |
| static void |
| print_scc (FILE *out, vec<tree> scc) |
| { |
| tree var; |
| unsigned int i; |
| |
| fprintf (out, "SCC consists of:"); |
| FOR_EACH_VEC_ELT (scc, i, var) |
| { |
| fprintf (out, " "); |
| print_generic_expr (out, var, 0); |
| } |
| fprintf (out, "\n"); |
| } |
| |
| /* Set the value number of FROM to TO, return true if it has changed |
| as a result. */ |
| |
| static inline bool |
| set_ssa_val_to (tree from, tree to) |
| { |
| tree currval = SSA_VAL (from); |
| HOST_WIDE_INT toff, coff; |
| |
| /* The only thing we allow as value numbers are ssa_names |
| and invariants. So assert that here. We don't allow VN_TOP |
| as visiting a stmt should produce a value-number other than |
| that. |
| ??? Still VN_TOP can happen for unreachable code, so force |
| it to varying in that case. Not all code is prepared to |
| get VN_TOP on valueization. */ |
| if (to == VN_TOP) |
| { |
| if (dump_file && (dump_flags & TDF_DETAILS)) |
| fprintf (dump_file, "Forcing value number to varying on " |
| "receiving VN_TOP\n"); |
| to = from; |
| } |
| |
| gcc_assert (to != NULL_TREE |
| && ((TREE_CODE (to) == SSA_NAME |
| && (to == from || SSA_VAL (to) == to)) |
| || is_gimple_min_invariant (to))); |
| |
| if (from != to) |
| { |
| if (currval == from) |
| { |
| if (dump_file && (dump_flags & TDF_DETAILS)) |
| { |
| fprintf (dump_file, "Not changing value number of "); |
| print_generic_expr (dump_file, from, 0); |
| fprintf (dump_file, " from VARYING to "); |
| print_generic_expr (dump_file, to, 0); |
| fprintf (dump_file, "\n"); |
| } |
| return false; |
| } |
| else if (TREE_CODE (to) == SSA_NAME |
| && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to)) |
| to = from; |
| } |
| |
| if (dump_file && (dump_flags & TDF_DETAILS)) |
| { |
| fprintf (dump_file, "Setting value number of "); |
| print_generic_expr (dump_file, from, 0); |
| fprintf (dump_file, " to "); |
| print_generic_expr (dump_file, to, 0); |
| } |
| |
| if (currval != to |
| && !operand_equal_p (currval, to, 0) |
| /* ??? For addresses involving volatile objects or types operand_equal_p |
| does not reliably detect ADDR_EXPRs as equal. We know we are only |
| getting invariant gimple addresses here, so can use |
| get_addr_base_and_unit_offset to do this comparison. */ |
| && !(TREE_CODE (currval) == ADDR_EXPR |
| && TREE_CODE (to) == ADDR_EXPR |
| && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff) |
| == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff)) |
| && coff == toff)) |
| { |
| VN_INFO (from)->valnum = to; |
| if (dump_file && (dump_flags & TDF_DETAILS)) |
| fprintf (dump_file, " (changed)\n"); |
| return true; |
| } |
| if (dump_file && (dump_flags & TDF_DETAILS)) |
| fprintf (dump_file, "\n"); |
| return false; |
| } |
| |
| /* Mark as processed all the definitions in the defining stmt of USE, or |
| the USE itself. */ |
| |
| static void |
| mark_use_processed (tree use) |
| { |
| ssa_op_iter iter; |
| def_operand_p defp; |
| gimple stmt = SSA_NAME_DEF_STMT (use); |
| |
| if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI) |
| { |
| VN_INFO (use)->use_processed = true; |
| return; |
| } |
| |
| FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS) |
| { |
| tree def = DEF_FROM_PTR (defp); |
| |
| VN_INFO (def)->use_processed = true; |
| } |
| } |
| |
| /* Set all definitions in STMT to value number to themselves. |
| Return true if a value number changed. */ |
| |
| static bool |
| defs_to_varying (gimple stmt) |
| { |
| bool changed = false; |
| ssa_op_iter iter; |
| def_operand_p defp; |
| |
| FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS) |
| { |
| tree def = DEF_FROM_PTR (defp); |
| changed |= set_ssa_val_to (def, def); |
| } |
| return changed; |
| } |
| |
| static bool expr_has_constants (tree expr); |
| |
| /* Visit a copy between LHS and RHS, return true if the value number |
| changed. */ |
| |
| static bool |
| visit_copy (tree lhs, tree rhs) |
| { |
| /* The copy may have a more interesting constant filled expression |
| (we don't, since we know our RHS is just an SSA name). */ |
| VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants; |
| VN_INFO (lhs)->expr = VN_INFO (rhs)->expr; |
| |
| /* And finally valueize. */ |
| rhs = SSA_VAL (rhs); |
| |
| return set_ssa_val_to (lhs, rhs); |
| } |
| |
| /* Visit a nary operator RHS, value number it, and return true if the |
| value number of LHS has changed as a result. */ |
| |
| static bool |
| visit_nary_op (tree lhs, gimple stmt) |
| { |
| bool changed = false; |
| tree result = vn_nary_op_lookup_stmt (stmt, NULL); |
| |
| if (result) |
| changed = set_ssa_val_to (lhs, result); |
| else |
| { |
| changed = set_ssa_val_to (lhs, lhs); |
| vn_nary_op_insert_stmt (stmt, lhs); |
| } |
| |
| return changed; |
| } |
| |
| /* Visit a call STMT storing into LHS. Return true if the value number |
| of the LHS has changed as a result. */ |
| |
| static bool |
| visit_reference_op_call (tree lhs, gcall *stmt) |
| { |
| bool changed = false; |
| struct vn_reference_s vr1; |
| vn_reference_t vnresult = NULL; |
| tree vdef = gimple_vdef (stmt); |
| |
| /* Non-ssa lhs is handled in copy_reference_ops_from_call. */ |
| if (lhs && TREE_CODE (lhs) != SSA_NAME) |
| lhs = NULL_TREE; |
| |
| vn_reference_lookup_call (stmt, &vnresult, &vr1); |
| if (vnresult) |
| { |
| if (vnresult->result_vdef && vdef) |
| changed |= set_ssa_val_to (vdef, vnresult->result_vdef); |
| else if (vdef) |
| /* If the call was discovered to be pure or const reflect |
| that as far as possible. */ |
| changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt))); |
| |
| if (!vnresult->result && lhs) |
| vnresult->result = lhs; |
| |
| if (vnresult->result && lhs) |
| { |
| changed |= set_ssa_val_to (lhs, vnresult->result); |
| |
| if (VN_INFO (vnresult->result)->has_constants) |
| VN_INFO (lhs)->has_constants = true; |
| } |
| } |
| else |
| { |
| vn_reference_t vr2; |
| vn_reference_s **slot; |
| if (vdef) |
| changed |= set_ssa_val_to (vdef, vdef); |
| if (lhs) |
| changed |= set_ssa_val_to (lhs, lhs); |
| vr2 = (vn_reference_t) pool_alloc (current_info->references_pool); |
| vr2->vuse = vr1.vuse; |
| /* As we are not walking the virtual operand chain we know the |
| shared_lookup_references are still original so we can re-use |
| them here. */ |
| vr2->operands = vr1.operands.copy (); |
| vr2->type = vr1.type; |
| vr2->set = vr1.set; |
| vr2->hashcode = vr1.hashcode; |
| vr2->result = lhs; |
| vr2->result_vdef = vdef; |
| slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode, |
| INSERT); |
| gcc_assert (!*slot); |
| *slot = vr2; |
| } |
| |
| return changed; |
| } |
| |
| /* Visit a load from a reference operator RHS, part of STMT, value number it, |
| and return true if the value number of the LHS has changed as a result. */ |
| |
| static bool |
| visit_reference_op_load (tree lhs, tree op, gimple stmt) |
| { |
| bool changed = false; |
| tree last_vuse; |
| tree result; |
| |
| last_vuse = gimple_vuse (stmt); |
| last_vuse_ptr = &last_vuse; |
| result = vn_reference_lookup (op, gimple_vuse (stmt), |
| default_vn_walk_kind, NULL, true); |
| last_vuse_ptr = NULL; |
| |
| /* We handle type-punning through unions by value-numbering based |
| on offset and size of the access. Be prepared to handle a |
| type-mismatch here via creating a VIEW_CONVERT_EXPR. */ |
| if (result |
| && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op))) |
| { |
| /* We will be setting the value number of lhs to the value number |
| of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result). |
| So first simplify and lookup this expression to see if it |
| is already available. */ |
| tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result); |
| if ((CONVERT_EXPR_P (val) |
| || TREE_CODE (val) == VIEW_CONVERT_EXPR) |
| && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME) |
| { |
| tree tem = vn_get_expr_for (TREE_OPERAND (val, 0)); |
| if ((CONVERT_EXPR_P (tem) |
| || TREE_CODE (tem) == VIEW_CONVERT_EXPR) |
| && (tem = fold_unary_ignore_overflow (TREE_CODE (val), |
| TREE_TYPE (val), tem))) |
| val = tem; |
| } |
| result = val; |
| if (!is_gimple_min_invariant (val) |
| && TREE_CODE (val) != SSA_NAME) |
| result = vn_nary_op_lookup (val, NULL); |
| /* If the expression is not yet available, value-number lhs to |
| a new SSA_NAME we create. */ |
| if (!result) |
| { |
| result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (), |
| "vntemp"); |
| /* Initialize value-number information properly. */ |
| VN_INFO_GET (result)->valnum = result; |
| VN_INFO (result)->value_id = get_next_value_id (); |
| VN_INFO (result)->expr = val; |
| VN_INFO (result)->has_constants = expr_has_constants (val); |
| VN_INFO (result)->needs_insertion = true; |
| /* As all "inserted" statements are singleton SCCs, insert |
| to the valid table. This is strictly needed to |
| avoid re-generating new value SSA_NAMEs for the same |
| expression during SCC iteration over and over (the |
| optimistic table gets cleared after each iteration). |
| We do not need to insert into the optimistic table, as |
| lookups there will fall back to the valid table. */ |
| if (current_info == optimistic_info) |
| { |
| current_info = valid_info; |
| vn_nary_op_insert (val, result); |
| current_info = optimistic_info; |
| } |
| else |
| vn_nary_op_insert (val, result); |
| if (dump_file && (dump_flags & TDF_DETAILS)) |
| { |
| fprintf (dump_file, "Inserting name "); |
| print_generic_expr (dump_file, result, 0); |
| fprintf (dump_file, " for expression "); |
| print_generic_expr (dump_file, val, 0); |
| fprintf (dump_file, "\n"); |
| |