| /* Statement simplification on GIMPLE. |
| Copyright (C) 2010-2020 Free Software Foundation, Inc. |
| Split out from tree-ssa-ccp.c. |
| |
| This file is part of GCC. |
| |
| GCC is free software; you can redistribute it and/or modify it |
| under the terms of the GNU General Public License as published by the |
| Free Software Foundation; either version 3, or (at your option) any |
| later version. |
| |
| GCC is distributed in the hope that it will be useful, but WITHOUT |
| ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| for more details. |
| |
| You should have received a copy of the GNU General Public License |
| along with GCC; see the file COPYING3. If not see |
| <http://www.gnu.org/licenses/>. */ |
| |
| #include "config.h" |
| #include "system.h" |
| #include "coretypes.h" |
| #include "backend.h" |
| #include "target.h" |
| #include "rtl.h" |
| #include "tree.h" |
| #include "gimple.h" |
| #include "predict.h" |
| #include "ssa.h" |
| #include "cgraph.h" |
| #include "gimple-pretty-print.h" |
| #include "gimple-ssa-warn-restrict.h" |
| #include "fold-const.h" |
| #include "stmt.h" |
| #include "expr.h" |
| #include "stor-layout.h" |
| #include "dumpfile.h" |
| #include "gimple-fold.h" |
| #include "gimplify.h" |
| #include "gimple-iterator.h" |
| #include "tree-into-ssa.h" |
| #include "tree-dfa.h" |
| #include "tree-object-size.h" |
| #include "tree-ssa.h" |
| #include "tree-ssa-propagate.h" |
| #include "ipa-utils.h" |
| #include "tree-ssa-address.h" |
| #include "langhooks.h" |
| #include "gimplify-me.h" |
| #include "dbgcnt.h" |
| #include "builtins.h" |
| #include "tree-eh.h" |
| #include "gimple-match.h" |
| #include "gomp-constants.h" |
| #include "optabs-query.h" |
| #include "omp-general.h" |
| #include "tree-cfg.h" |
| #include "fold-const-call.h" |
| #include "stringpool.h" |
| #include "attribs.h" |
| #include "asan.h" |
| #include "diagnostic-core.h" |
| #include "intl.h" |
| #include "calls.h" |
| #include "tree-vector-builder.h" |
| #include "tree-ssa-strlen.h" |
| #include "varasm.h" |
| |
| enum strlen_range_kind { |
| /* Compute the exact constant string length. */ |
| SRK_STRLEN, |
| /* Compute the maximum constant string length. */ |
| SRK_STRLENMAX, |
| /* Compute a range of string lengths bounded by object sizes. When |
| the length of a string cannot be determined, consider as the upper |
| bound the size of the enclosing object the string may be a member |
| or element of. Also determine the size of the largest character |
| array the string may refer to. */ |
| SRK_LENRANGE, |
| /* Determine the integer value of the argument (not string length). */ |
| SRK_INT_VALUE |
| }; |
| |
| static bool |
| get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned); |
| |
| /* Return true when DECL can be referenced from current unit. |
| FROM_DECL (if non-null) specify constructor of variable DECL was taken from. |
| We can get declarations that are not possible to reference for various |
| reasons: |
| |
| 1) When analyzing C++ virtual tables. |
| C++ virtual tables do have known constructors even |
| when they are keyed to other compilation unit. |
| Those tables can contain pointers to methods and vars |
| in other units. Those methods have both STATIC and EXTERNAL |
| set. |
| 2) In WHOPR mode devirtualization might lead to reference |
| to method that was partitioned elsehwere. |
| In this case we have static VAR_DECL or FUNCTION_DECL |
| that has no corresponding callgraph/varpool node |
| declaring the body. |
| 3) COMDAT functions referred by external vtables that |
| we devirtualize only during final compilation stage. |
| At this time we already decided that we will not output |
| the function body and thus we can't reference the symbol |
| directly. */ |
| |
| static bool |
| can_refer_decl_in_current_unit_p (tree decl, tree from_decl) |
| { |
| varpool_node *vnode; |
| struct cgraph_node *node; |
| symtab_node *snode; |
| |
| if (DECL_ABSTRACT_P (decl)) |
| return false; |
| |
| /* We are concerned only about static/external vars and functions. */ |
| if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl)) |
| || !VAR_OR_FUNCTION_DECL_P (decl)) |
| return true; |
| |
| /* Static objects can be referred only if they are defined and not optimized |
| out yet. */ |
| if (!TREE_PUBLIC (decl)) |
| { |
| if (DECL_EXTERNAL (decl)) |
| return false; |
| /* Before we start optimizing unreachable code we can be sure all |
| static objects are defined. */ |
| if (symtab->function_flags_ready) |
| return true; |
| snode = symtab_node::get (decl); |
| if (!snode || !snode->definition) |
| return false; |
| node = dyn_cast <cgraph_node *> (snode); |
| return !node || !node->inlined_to; |
| } |
| |
| /* We will later output the initializer, so we can refer to it. |
| So we are concerned only when DECL comes from initializer of |
| external var or var that has been optimized out. */ |
| if (!from_decl |
| || !VAR_P (from_decl) |
| || (!DECL_EXTERNAL (from_decl) |
| && (vnode = varpool_node::get (from_decl)) != NULL |
| && vnode->definition) |
| || (flag_ltrans |
| && (vnode = varpool_node::get (from_decl)) != NULL |
| && vnode->in_other_partition)) |
| return true; |
| /* We are folding reference from external vtable. The vtable may reffer |
| to a symbol keyed to other compilation unit. The other compilation |
| unit may be in separate DSO and the symbol may be hidden. */ |
| if (DECL_VISIBILITY_SPECIFIED (decl) |
| && DECL_EXTERNAL (decl) |
| && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT |
| && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition)) |
| return false; |
| /* When function is public, we always can introduce new reference. |
| Exception are the COMDAT functions where introducing a direct |
| reference imply need to include function body in the curren tunit. */ |
| if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl)) |
| return true; |
| /* We have COMDAT. We are going to check if we still have definition |
| or if the definition is going to be output in other partition. |
| Bypass this when gimplifying; all needed functions will be produced. |
| |
| As observed in PR20991 for already optimized out comdat virtual functions |
| it may be tempting to not necessarily give up because the copy will be |
| output elsewhere when corresponding vtable is output. |
| This is however not possible - ABI specify that COMDATs are output in |
| units where they are used and when the other unit was compiled with LTO |
| it is possible that vtable was kept public while the function itself |
| was privatized. */ |
| if (!symtab->function_flags_ready) |
| return true; |
| |
| snode = symtab_node::get (decl); |
| if (!snode |
| || ((!snode->definition || DECL_EXTERNAL (decl)) |
| && (!snode->in_other_partition |
| || (!snode->forced_by_abi && !snode->force_output)))) |
| return false; |
| node = dyn_cast <cgraph_node *> (snode); |
| return !node || !node->inlined_to; |
| } |
| |
| /* Create a temporary for TYPE for a statement STMT. If the current function |
| is in SSA form, a SSA name is created. Otherwise a temporary register |
| is made. */ |
| |
| tree |
| create_tmp_reg_or_ssa_name (tree type, gimple *stmt) |
| { |
| if (gimple_in_ssa_p (cfun)) |
| return make_ssa_name (type, stmt); |
| else |
| return create_tmp_reg (type); |
| } |
| |
| /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into |
| acceptable form for is_gimple_min_invariant. |
| FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */ |
| |
| tree |
| canonicalize_constructor_val (tree cval, tree from_decl) |
| { |
| if (CONSTANT_CLASS_P (cval)) |
| return cval; |
| |
| tree orig_cval = cval; |
| STRIP_NOPS (cval); |
| if (TREE_CODE (cval) == POINTER_PLUS_EXPR |
| && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST) |
| { |
| tree ptr = TREE_OPERAND (cval, 0); |
| if (is_gimple_min_invariant (ptr)) |
| cval = build1_loc (EXPR_LOCATION (cval), |
| ADDR_EXPR, TREE_TYPE (ptr), |
| fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)), |
| ptr, |
| fold_convert (ptr_type_node, |
| TREE_OPERAND (cval, 1)))); |
| } |
| if (TREE_CODE (cval) == ADDR_EXPR) |
| { |
| tree base = NULL_TREE; |
| if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR) |
| { |
| base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0)); |
| if (base) |
| TREE_OPERAND (cval, 0) = base; |
| } |
| else |
| base = get_base_address (TREE_OPERAND (cval, 0)); |
| if (!base) |
| return NULL_TREE; |
| |
| if (VAR_OR_FUNCTION_DECL_P (base) |
| && !can_refer_decl_in_current_unit_p (base, from_decl)) |
| return NULL_TREE; |
| if (TREE_TYPE (base) == error_mark_node) |
| return NULL_TREE; |
| if (VAR_P (base)) |
| TREE_ADDRESSABLE (base) = 1; |
| else if (TREE_CODE (base) == FUNCTION_DECL) |
| { |
| /* Make sure we create a cgraph node for functions we'll reference. |
| They can be non-existent if the reference comes from an entry |
| of an external vtable for example. */ |
| cgraph_node::get_create (base); |
| } |
| /* Fixup types in global initializers. */ |
| if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0))) |
| cval = build_fold_addr_expr (TREE_OPERAND (cval, 0)); |
| |
| if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval))) |
| cval = fold_convert (TREE_TYPE (orig_cval), cval); |
| return cval; |
| } |
| /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */ |
| if (TREE_CODE (cval) == INTEGER_CST) |
| { |
| if (TREE_OVERFLOW_P (cval)) |
| cval = drop_tree_overflow (cval); |
| if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval))) |
| cval = fold_convert (TREE_TYPE (orig_cval), cval); |
| return cval; |
| } |
| return orig_cval; |
| } |
| |
| /* If SYM is a constant variable with known value, return the value. |
| NULL_TREE is returned otherwise. */ |
| |
| tree |
| get_symbol_constant_value (tree sym) |
| { |
| tree val = ctor_for_folding (sym); |
| if (val != error_mark_node) |
| { |
| if (val) |
| { |
| val = canonicalize_constructor_val (unshare_expr (val), sym); |
| if (val |
| && is_gimple_min_invariant (val) |
| && useless_type_conversion_p (TREE_TYPE (sym), TREE_TYPE (val))) |
| return val; |
| else |
| return NULL_TREE; |
| } |
| /* Variables declared 'const' without an initializer |
| have zero as the initializer if they may not be |
| overridden at link or run time. */ |
| if (!val |
| && is_gimple_reg_type (TREE_TYPE (sym))) |
| return build_zero_cst (TREE_TYPE (sym)); |
| } |
| |
| return NULL_TREE; |
| } |
| |
| |
| |
| /* Subroutine of fold_stmt. We perform several simplifications of the |
| memory reference tree EXPR and make sure to re-gimplify them properly |
| after propagation of constant addresses. IS_LHS is true if the |
| reference is supposed to be an lvalue. */ |
| |
| static tree |
| maybe_fold_reference (tree expr, bool is_lhs) |
| { |
| tree result; |
| |
| if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR |
| || TREE_CODE (expr) == REALPART_EXPR |
| || TREE_CODE (expr) == IMAGPART_EXPR) |
| && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0))) |
| return fold_unary_loc (EXPR_LOCATION (expr), |
| TREE_CODE (expr), |
| TREE_TYPE (expr), |
| TREE_OPERAND (expr, 0)); |
| else if (TREE_CODE (expr) == BIT_FIELD_REF |
| && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0))) |
| return fold_ternary_loc (EXPR_LOCATION (expr), |
| TREE_CODE (expr), |
| TREE_TYPE (expr), |
| TREE_OPERAND (expr, 0), |
| TREE_OPERAND (expr, 1), |
| TREE_OPERAND (expr, 2)); |
| |
| if (!is_lhs |
| && (result = fold_const_aggregate_ref (expr)) |
| && is_gimple_min_invariant (result)) |
| return result; |
| |
| return NULL_TREE; |
| } |
| |
| |
| /* Attempt to fold an assignment statement pointed-to by SI. Returns a |
| replacement rhs for the statement or NULL_TREE if no simplification |
| could be made. It is assumed that the operands have been previously |
| folded. */ |
| |
| static tree |
| fold_gimple_assign (gimple_stmt_iterator *si) |
| { |
| gimple *stmt = gsi_stmt (*si); |
| enum tree_code subcode = gimple_assign_rhs_code (stmt); |
| location_t loc = gimple_location (stmt); |
| |
| tree result = NULL_TREE; |
| |
| switch (get_gimple_rhs_class (subcode)) |
| { |
| case GIMPLE_SINGLE_RHS: |
| { |
| tree rhs = gimple_assign_rhs1 (stmt); |
| |
| if (TREE_CLOBBER_P (rhs)) |
| return NULL_TREE; |
| |
| if (REFERENCE_CLASS_P (rhs)) |
| return maybe_fold_reference (rhs, false); |
| |
| else if (TREE_CODE (rhs) == OBJ_TYPE_REF) |
| { |
| tree val = OBJ_TYPE_REF_EXPR (rhs); |
| if (is_gimple_min_invariant (val)) |
| return val; |
| else if (flag_devirtualize && virtual_method_call_p (rhs)) |
| { |
| bool final; |
| vec <cgraph_node *>targets |
| = possible_polymorphic_call_targets (rhs, stmt, &final); |
| if (final && targets.length () <= 1 && dbg_cnt (devirt)) |
| { |
| if (dump_enabled_p ()) |
| { |
| dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt, |
| "resolving virtual function address " |
| "reference to function %s\n", |
| targets.length () == 1 |
| ? targets[0]->name () |
| : "NULL"); |
| } |
| if (targets.length () == 1) |
| { |
| val = fold_convert (TREE_TYPE (val), |
| build_fold_addr_expr_loc |
| (loc, targets[0]->decl)); |
| STRIP_USELESS_TYPE_CONVERSION (val); |
| } |
| else |
| /* We cannot use __builtin_unreachable here because it |
| cannot have address taken. */ |
| val = build_int_cst (TREE_TYPE (val), 0); |
| return val; |
| } |
| } |
| } |
| |
| else if (TREE_CODE (rhs) == ADDR_EXPR) |
| { |
| tree ref = TREE_OPERAND (rhs, 0); |
| tree tem = maybe_fold_reference (ref, true); |
| if (tem |
| && TREE_CODE (tem) == MEM_REF |
| && integer_zerop (TREE_OPERAND (tem, 1))) |
| result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0)); |
| else if (tem) |
| result = fold_convert (TREE_TYPE (rhs), |
| build_fold_addr_expr_loc (loc, tem)); |
| else if (TREE_CODE (ref) == MEM_REF |
| && integer_zerop (TREE_OPERAND (ref, 1))) |
| result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0)); |
| |
| if (result) |
| { |
| /* Strip away useless type conversions. Both the |
| NON_LVALUE_EXPR that may have been added by fold, and |
| "useless" type conversions that might now be apparent |
| due to propagation. */ |
| STRIP_USELESS_TYPE_CONVERSION (result); |
| |
| if (result != rhs && valid_gimple_rhs_p (result)) |
| return result; |
| } |
| } |
| |
| else if (TREE_CODE (rhs) == CONSTRUCTOR |
| && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE) |
| { |
| /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */ |
| unsigned i; |
| tree val; |
| |
| FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val) |
| if (! CONSTANT_CLASS_P (val)) |
| return NULL_TREE; |
| |
| return build_vector_from_ctor (TREE_TYPE (rhs), |
| CONSTRUCTOR_ELTS (rhs)); |
| } |
| |
| else if (DECL_P (rhs) |
| && is_gimple_reg_type (TREE_TYPE (rhs))) |
| return get_symbol_constant_value (rhs); |
| } |
| break; |
| |
| case GIMPLE_UNARY_RHS: |
| break; |
| |
| case GIMPLE_BINARY_RHS: |
| break; |
| |
| case GIMPLE_TERNARY_RHS: |
| result = fold_ternary_loc (loc, subcode, |
| TREE_TYPE (gimple_assign_lhs (stmt)), |
| gimple_assign_rhs1 (stmt), |
| gimple_assign_rhs2 (stmt), |
| gimple_assign_rhs3 (stmt)); |
| |
| if (result) |
| { |
| STRIP_USELESS_TYPE_CONVERSION (result); |
| if (valid_gimple_rhs_p (result)) |
| return result; |
| } |
| break; |
| |
| case GIMPLE_INVALID_RHS: |
| gcc_unreachable (); |
| } |
| |
| return NULL_TREE; |
| } |
| |
| |
| /* Replace a statement at *SI_P with a sequence of statements in STMTS, |
| adjusting the replacement stmts location and virtual operands. |
| If the statement has a lhs the last stmt in the sequence is expected |
| to assign to that lhs. */ |
| |
| static void |
| gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts) |
| { |
| gimple *stmt = gsi_stmt (*si_p); |
| |
| if (gimple_has_location (stmt)) |
| annotate_all_with_location (stmts, gimple_location (stmt)); |
| |
| /* First iterate over the replacement statements backward, assigning |
| virtual operands to their defining statements. */ |
| gimple *laststore = NULL; |
| for (gimple_stmt_iterator i = gsi_last (stmts); |
| !gsi_end_p (i); gsi_prev (&i)) |
| { |
| gimple *new_stmt = gsi_stmt (i); |
| if ((gimple_assign_single_p (new_stmt) |
| && !is_gimple_reg (gimple_assign_lhs (new_stmt))) |
| || (is_gimple_call (new_stmt) |
| && (gimple_call_flags (new_stmt) |
| & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0)) |
| { |
| tree vdef; |
| if (!laststore) |
| vdef = gimple_vdef (stmt); |
| else |
| vdef = make_ssa_name (gimple_vop (cfun), new_stmt); |
| gimple_set_vdef (new_stmt, vdef); |
| if (vdef && TREE_CODE (vdef) == SSA_NAME) |
| SSA_NAME_DEF_STMT (vdef) = new_stmt; |
| laststore = new_stmt; |
| } |
| } |
| |
| /* Second iterate over the statements forward, assigning virtual |
| operands to their uses. */ |
| tree reaching_vuse = gimple_vuse (stmt); |
| for (gimple_stmt_iterator i = gsi_start (stmts); |
| !gsi_end_p (i); gsi_next (&i)) |
| { |
| gimple *new_stmt = gsi_stmt (i); |
| /* If the new statement possibly has a VUSE, update it with exact SSA |
| name we know will reach this one. */ |
| if (gimple_has_mem_ops (new_stmt)) |
| gimple_set_vuse (new_stmt, reaching_vuse); |
| gimple_set_modified (new_stmt, true); |
| if (gimple_vdef (new_stmt)) |
| reaching_vuse = gimple_vdef (new_stmt); |
| } |
| |
| /* If the new sequence does not do a store release the virtual |
| definition of the original statement. */ |
| if (reaching_vuse |
| && reaching_vuse == gimple_vuse (stmt)) |
| { |
| tree vdef = gimple_vdef (stmt); |
| if (vdef |
| && TREE_CODE (vdef) == SSA_NAME) |
| { |
| unlink_stmt_vdef (stmt); |
| release_ssa_name (vdef); |
| } |
| } |
| |
| /* Finally replace the original statement with the sequence. */ |
| gsi_replace_with_seq (si_p, stmts, false); |
| } |
| |
| /* Convert EXPR into a GIMPLE value suitable for substitution on the |
| RHS of an assignment. Insert the necessary statements before |
| iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL |
| is replaced. If the call is expected to produces a result, then it |
| is replaced by an assignment of the new RHS to the result variable. |
| If the result is to be ignored, then the call is replaced by a |
| GIMPLE_NOP. A proper VDEF chain is retained by making the first |
| VUSE and the last VDEF of the whole sequence be the same as the replaced |
| statement and using new SSA names for stores in between. */ |
| |
| void |
| gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr) |
| { |
| tree lhs; |
| gimple *stmt, *new_stmt; |
| gimple_stmt_iterator i; |
| gimple_seq stmts = NULL; |
| |
| stmt = gsi_stmt (*si_p); |
| |
| gcc_assert (is_gimple_call (stmt)); |
| |
| push_gimplify_context (gimple_in_ssa_p (cfun)); |
| |
| lhs = gimple_call_lhs (stmt); |
| if (lhs == NULL_TREE) |
| { |
| gimplify_and_add (expr, &stmts); |
| /* We can end up with folding a memcpy of an empty class assignment |
| which gets optimized away by C++ gimplification. */ |
| if (gimple_seq_empty_p (stmts)) |
| { |
| pop_gimplify_context (NULL); |
| if (gimple_in_ssa_p (cfun)) |
| { |
| unlink_stmt_vdef (stmt); |
| release_defs (stmt); |
| } |
| gsi_replace (si_p, gimple_build_nop (), false); |
| return; |
| } |
| } |
| else |
| { |
| tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE); |
| new_stmt = gimple_build_assign (lhs, tmp); |
| i = gsi_last (stmts); |
| gsi_insert_after_without_update (&i, new_stmt, |
| GSI_CONTINUE_LINKING); |
| } |
| |
| pop_gimplify_context (NULL); |
| |
| gsi_replace_with_seq_vops (si_p, stmts); |
| } |
| |
| |
| /* Replace the call at *GSI with the gimple value VAL. */ |
| |
| void |
| replace_call_with_value (gimple_stmt_iterator *gsi, tree val) |
| { |
| gimple *stmt = gsi_stmt (*gsi); |
| tree lhs = gimple_call_lhs (stmt); |
| gimple *repl; |
| if (lhs) |
| { |
| if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val))) |
| val = fold_convert (TREE_TYPE (lhs), val); |
| repl = gimple_build_assign (lhs, val); |
| } |
| else |
| repl = gimple_build_nop (); |
| tree vdef = gimple_vdef (stmt); |
| if (vdef && TREE_CODE (vdef) == SSA_NAME) |
| { |
| unlink_stmt_vdef (stmt); |
| release_ssa_name (vdef); |
| } |
| gsi_replace (gsi, repl, false); |
| } |
| |
| /* Replace the call at *GSI with the new call REPL and fold that |
| again. */ |
| |
| static void |
| replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl) |
| { |
| gimple *stmt = gsi_stmt (*gsi); |
| gimple_call_set_lhs (repl, gimple_call_lhs (stmt)); |
| gimple_set_location (repl, gimple_location (stmt)); |
| gimple_move_vops (repl, stmt); |
| gsi_replace (gsi, repl, false); |
| fold_stmt (gsi); |
| } |
| |
| /* Return true if VAR is a VAR_DECL or a component thereof. */ |
| |
| static bool |
| var_decl_component_p (tree var) |
| { |
| tree inner = var; |
| while (handled_component_p (inner)) |
| inner = TREE_OPERAND (inner, 0); |
| return (DECL_P (inner) |
| || (TREE_CODE (inner) == MEM_REF |
| && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR)); |
| } |
| |
| /* Return TRUE if the SIZE argument, representing the size of an |
| object, is in a range of values of which exactly zero is valid. */ |
| |
| static bool |
| size_must_be_zero_p (tree size) |
| { |
| if (integer_zerop (size)) |
| return true; |
| |
| if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size))) |
| return false; |
| |
| tree type = TREE_TYPE (size); |
| int prec = TYPE_PRECISION (type); |
| |
| /* Compute the value of SSIZE_MAX, the largest positive value that |
| can be stored in ssize_t, the signed counterpart of size_t. */ |
| wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1; |
| value_range valid_range (build_int_cst (type, 0), |
| wide_int_to_tree (type, ssize_max)); |
| value_range vr; |
| get_range_info (size, vr); |
| vr.intersect (&valid_range); |
| return vr.zero_p (); |
| } |
| |
| /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and |
| diagnose (otherwise undefined) overlapping copies without preventing |
| folding. When folded, GCC guarantees that overlapping memcpy has |
| the same semantics as memmove. Call to the library memcpy need not |
| provide the same guarantee. Return false if no simplification can |
| be made. */ |
| |
| static bool |
| gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi, |
| tree dest, tree src, enum built_in_function code) |
| { |
| gimple *stmt = gsi_stmt (*gsi); |
| tree lhs = gimple_call_lhs (stmt); |
| tree len = gimple_call_arg (stmt, 2); |
| tree destvar, srcvar; |
| location_t loc = gimple_location (stmt); |
| |
| /* If the LEN parameter is a constant zero or in range where |
| the only valid value is zero, return DEST. */ |
| if (size_must_be_zero_p (len)) |
| { |
| gimple *repl; |
| if (gimple_call_lhs (stmt)) |
| repl = gimple_build_assign (gimple_call_lhs (stmt), dest); |
| else |
| repl = gimple_build_nop (); |
| tree vdef = gimple_vdef (stmt); |
| if (vdef && TREE_CODE (vdef) == SSA_NAME) |
| { |
| unlink_stmt_vdef (stmt); |
| release_ssa_name (vdef); |
| } |
| gsi_replace (gsi, repl, false); |
| return true; |
| } |
| |
| /* If SRC and DEST are the same (and not volatile), return |
| DEST{,+LEN,+LEN-1}. */ |
| if (operand_equal_p (src, dest, 0)) |
| { |
| /* Avoid diagnosing exact overlap in calls to __builtin_memcpy. |
| It's safe and may even be emitted by GCC itself (see bug |
| 32667). */ |
| unlink_stmt_vdef (stmt); |
| if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) |
| release_ssa_name (gimple_vdef (stmt)); |
| if (!lhs) |
| { |
| gsi_replace (gsi, gimple_build_nop (), false); |
| return true; |
| } |
| goto done; |
| } |
| else |
| { |
| tree srctype, desttype; |
| unsigned int src_align, dest_align; |
| tree off0; |
| const char *tmp_str; |
| unsigned HOST_WIDE_INT tmp_len; |
| |
| /* Build accesses at offset zero with a ref-all character type. */ |
| off0 = build_int_cst (build_pointer_type_for_mode (char_type_node, |
| ptr_mode, true), 0); |
| |
| /* If we can perform the copy efficiently with first doing all loads |
| and then all stores inline it that way. Currently efficiently |
| means that we can load all the memory into a single integer |
| register which is what MOVE_MAX gives us. */ |
| src_align = get_pointer_alignment (src); |
| dest_align = get_pointer_alignment (dest); |
| if (tree_fits_uhwi_p (len) |
| && compare_tree_int (len, MOVE_MAX) <= 0 |
| /* FIXME: Don't transform copies from strings with known length. |
| Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c |
| from being handled, and the case was XFAILed for that reason. |
| Now that it is handled and the XFAIL removed, as soon as other |
| strlenopt tests that rely on it for passing are adjusted, this |
| hack can be removed. */ |
| && !c_strlen (src, 1) |
| && !((tmp_str = c_getstr (src, &tmp_len)) != NULL |
| && memchr (tmp_str, 0, tmp_len) == NULL)) |
| { |
| unsigned ilen = tree_to_uhwi (len); |
| if (pow2p_hwi (ilen)) |
| { |
| /* Detect out-of-bounds accesses without issuing warnings. |
| Avoid folding out-of-bounds copies but to avoid false |
| positives for unreachable code defer warning until after |
| DCE has worked its magic. |
| -Wrestrict is still diagnosed. */ |
| if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt), |
| dest, src, len, len, |
| false, false)) |
| if (warning != OPT_Wrestrict) |
| return false; |
| |
| scalar_int_mode mode; |
| tree type = lang_hooks.types.type_for_size (ilen * 8, 1); |
| if (type |
| && is_a <scalar_int_mode> (TYPE_MODE (type), &mode) |
| && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8 |
| /* If the destination pointer is not aligned we must be able |
| to emit an unaligned store. */ |
| && (dest_align >= GET_MODE_ALIGNMENT (mode) |
| || !targetm.slow_unaligned_access (mode, dest_align) |
| || (optab_handler (movmisalign_optab, mode) |
| != CODE_FOR_nothing))) |
| { |
| tree srctype = type; |
| tree desttype = type; |
| if (src_align < GET_MODE_ALIGNMENT (mode)) |
| srctype = build_aligned_type (type, src_align); |
| tree srcmem = fold_build2 (MEM_REF, srctype, src, off0); |
| tree tem = fold_const_aggregate_ref (srcmem); |
| if (tem) |
| srcmem = tem; |
| else if (src_align < GET_MODE_ALIGNMENT (mode) |
| && targetm.slow_unaligned_access (mode, src_align) |
| && (optab_handler (movmisalign_optab, mode) |
| == CODE_FOR_nothing)) |
| srcmem = NULL_TREE; |
| if (srcmem) |
| { |
| gimple *new_stmt; |
| if (is_gimple_reg_type (TREE_TYPE (srcmem))) |
| { |
| new_stmt = gimple_build_assign (NULL_TREE, srcmem); |
| srcmem |
| = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem), |
| new_stmt); |
| gimple_assign_set_lhs (new_stmt, srcmem); |
| gimple_set_vuse (new_stmt, gimple_vuse (stmt)); |
| gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); |
| } |
| if (dest_align < GET_MODE_ALIGNMENT (mode)) |
| desttype = build_aligned_type (type, dest_align); |
| new_stmt |
| = gimple_build_assign (fold_build2 (MEM_REF, desttype, |
| dest, off0), |
| srcmem); |
| gimple_move_vops (new_stmt, stmt); |
| if (!lhs) |
| { |
| gsi_replace (gsi, new_stmt, false); |
| return true; |
| } |
| gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); |
| goto done; |
| } |
| } |
| } |
| } |
| |
| if (code == BUILT_IN_MEMMOVE) |
| { |
| /* Both DEST and SRC must be pointer types. |
| ??? This is what old code did. Is the testing for pointer types |
| really mandatory? |
| |
| If either SRC is readonly or length is 1, we can use memcpy. */ |
| if (!dest_align || !src_align) |
| return false; |
| if (readonly_data_expr (src) |
| || (tree_fits_uhwi_p (len) |
| && (MIN (src_align, dest_align) / BITS_PER_UNIT |
| >= tree_to_uhwi (len)))) |
| { |
| tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY); |
| if (!fn) |
| return false; |
| gimple_call_set_fndecl (stmt, fn); |
| gimple_call_set_arg (stmt, 0, dest); |
| gimple_call_set_arg (stmt, 1, src); |
| fold_stmt (gsi); |
| return true; |
| } |
| |
| /* If *src and *dest can't overlap, optimize into memcpy as well. */ |
| if (TREE_CODE (src) == ADDR_EXPR |
| && TREE_CODE (dest) == ADDR_EXPR) |
| { |
| tree src_base, dest_base, fn; |
| poly_int64 src_offset = 0, dest_offset = 0; |
| poly_uint64 maxsize; |
| |
| srcvar = TREE_OPERAND (src, 0); |
| src_base = get_addr_base_and_unit_offset (srcvar, &src_offset); |
| if (src_base == NULL) |
| src_base = srcvar; |
| destvar = TREE_OPERAND (dest, 0); |
| dest_base = get_addr_base_and_unit_offset (destvar, |
| &dest_offset); |
| if (dest_base == NULL) |
| dest_base = destvar; |
| if (!poly_int_tree_p (len, &maxsize)) |
| maxsize = -1; |
| if (SSA_VAR_P (src_base) |
| && SSA_VAR_P (dest_base)) |
| { |
| if (operand_equal_p (src_base, dest_base, 0) |
| && ranges_maybe_overlap_p (src_offset, maxsize, |
| dest_offset, maxsize)) |
| return false; |
| } |
| else if (TREE_CODE (src_base) == MEM_REF |
| && TREE_CODE (dest_base) == MEM_REF) |
| { |
| if (! operand_equal_p (TREE_OPERAND (src_base, 0), |
| TREE_OPERAND (dest_base, 0), 0)) |
| return false; |
| poly_offset_int full_src_offset |
| = mem_ref_offset (src_base) + src_offset; |
| poly_offset_int full_dest_offset |
| = mem_ref_offset (dest_base) + dest_offset; |
| if (ranges_maybe_overlap_p (full_src_offset, maxsize, |
| full_dest_offset, maxsize)) |
| return false; |
| } |
| else |
| return false; |
| |
| fn = builtin_decl_implicit (BUILT_IN_MEMCPY); |
| if (!fn) |
| return false; |
| gimple_call_set_fndecl (stmt, fn); |
| gimple_call_set_arg (stmt, 0, dest); |
| gimple_call_set_arg (stmt, 1, src); |
| fold_stmt (gsi); |
| return true; |
| } |
| |
| /* If the destination and source do not alias optimize into |
| memcpy as well. */ |
| if ((is_gimple_min_invariant (dest) |
| || TREE_CODE (dest) == SSA_NAME) |
| && (is_gimple_min_invariant (src) |
| || TREE_CODE (src) == SSA_NAME)) |
| { |
| ao_ref destr, srcr; |
| ao_ref_init_from_ptr_and_size (&destr, dest, len); |
| ao_ref_init_from_ptr_and_size (&srcr, src, len); |
| if (!refs_may_alias_p_1 (&destr, &srcr, false)) |
| { |
| tree fn; |
| fn = builtin_decl_implicit (BUILT_IN_MEMCPY); |
| if (!fn) |
| return false; |
| gimple_call_set_fndecl (stmt, fn); |
| gimple_call_set_arg (stmt, 0, dest); |
| gimple_call_set_arg (stmt, 1, src); |
| fold_stmt (gsi); |
| return true; |
| } |
| } |
| |
| return false; |
| } |
| |
| if (!tree_fits_shwi_p (len)) |
| return false; |
| if (!POINTER_TYPE_P (TREE_TYPE (src)) |
| || !POINTER_TYPE_P (TREE_TYPE (dest))) |
| return false; |
| /* In the following try to find a type that is most natural to be |
| used for the memcpy source and destination and that allows |
| the most optimization when memcpy is turned into a plain assignment |
| using that type. In theory we could always use a char[len] type |
| but that only gains us that the destination and source possibly |
| no longer will have their address taken. */ |
| srctype = TREE_TYPE (TREE_TYPE (src)); |
| if (TREE_CODE (srctype) == ARRAY_TYPE |
| && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)) |
| srctype = TREE_TYPE (srctype); |
| desttype = TREE_TYPE (TREE_TYPE (dest)); |
| if (TREE_CODE (desttype) == ARRAY_TYPE |
| && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)) |
| desttype = TREE_TYPE (desttype); |
| if (TREE_ADDRESSABLE (srctype) |
| || TREE_ADDRESSABLE (desttype)) |
| return false; |
| |
| /* Make sure we are not copying using a floating-point mode or |
| a type whose size possibly does not match its precision. */ |
| if (FLOAT_MODE_P (TYPE_MODE (desttype)) |
| || TREE_CODE (desttype) == BOOLEAN_TYPE |
| || TREE_CODE (desttype) == ENUMERAL_TYPE) |
| desttype = bitwise_type_for_mode (TYPE_MODE (desttype)); |
| if (FLOAT_MODE_P (TYPE_MODE (srctype)) |
| || TREE_CODE (srctype) == BOOLEAN_TYPE |
| || TREE_CODE (srctype) == ENUMERAL_TYPE) |
| srctype = bitwise_type_for_mode (TYPE_MODE (srctype)); |
| if (!srctype) |
| srctype = desttype; |
| if (!desttype) |
| desttype = srctype; |
| if (!srctype) |
| return false; |
| |
| src_align = get_pointer_alignment (src); |
| dest_align = get_pointer_alignment (dest); |
| |
| /* Choose between src and destination type for the access based |
| on alignment, whether the access constitutes a register access |
| and whether it may actually expose a declaration for SSA rewrite |
| or SRA decomposition. */ |
| destvar = NULL_TREE; |
| srcvar = NULL_TREE; |
| if (TREE_CODE (dest) == ADDR_EXPR |
| && var_decl_component_p (TREE_OPERAND (dest, 0)) |
| && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len) |
| && dest_align >= TYPE_ALIGN (desttype) |
| && (is_gimple_reg_type (desttype) |
| || src_align >= TYPE_ALIGN (desttype))) |
| destvar = fold_build2 (MEM_REF, desttype, dest, off0); |
| else if (TREE_CODE (src) == ADDR_EXPR |
| && var_decl_component_p (TREE_OPERAND (src, 0)) |
| && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len) |
| && src_align >= TYPE_ALIGN (srctype) |
| && (is_gimple_reg_type (srctype) |
| || dest_align >= TYPE_ALIGN (srctype))) |
| srcvar = fold_build2 (MEM_REF, srctype, src, off0); |
| if (srcvar == NULL_TREE && destvar == NULL_TREE) |
| return false; |
| |
| /* Now that we chose an access type express the other side in |
| terms of it if the target allows that with respect to alignment |
| constraints. */ |
| if (srcvar == NULL_TREE) |
| { |
| if (src_align >= TYPE_ALIGN (desttype)) |
| srcvar = fold_build2 (MEM_REF, desttype, src, off0); |
| else |
| { |
| if (STRICT_ALIGNMENT) |
| return false; |
| srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype), |
| src_align); |
| srcvar = fold_build2 (MEM_REF, srctype, src, off0); |
| } |
| } |
| else if (destvar == NULL_TREE) |
| { |
| if (dest_align >= TYPE_ALIGN (srctype)) |
| destvar = fold_build2 (MEM_REF, srctype, dest, off0); |
| else |
| { |
| if (STRICT_ALIGNMENT) |
| return false; |
| desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype), |
| dest_align); |
| destvar = fold_build2 (MEM_REF, desttype, dest, off0); |
| } |
| } |
| |
| /* Same as above, detect out-of-bounds accesses without issuing |
| warnings. Avoid folding out-of-bounds copies but to avoid |
| false positives for unreachable code defer warning until |
| after DCE has worked its magic. |
| -Wrestrict is still diagnosed. */ |
| if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt), |
| dest, src, len, len, |
| false, false)) |
| if (warning != OPT_Wrestrict) |
| return false; |
| |
| gimple *new_stmt; |
| if (is_gimple_reg_type (TREE_TYPE (srcvar))) |
| { |
| tree tem = fold_const_aggregate_ref (srcvar); |
| if (tem) |
| srcvar = tem; |
| if (! is_gimple_min_invariant (srcvar)) |
| { |
| new_stmt = gimple_build_assign (NULL_TREE, srcvar); |
| srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar), |
| new_stmt); |
| gimple_assign_set_lhs (new_stmt, srcvar); |
| gimple_set_vuse (new_stmt, gimple_vuse (stmt)); |
| gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); |
| } |
| new_stmt = gimple_build_assign (destvar, srcvar); |
| goto set_vop_and_replace; |
| } |
| |
| /* We get an aggregate copy. Use an unsigned char[] type to |
| perform the copying to preserve padding and to avoid any issues |
| with TREE_ADDRESSABLE types or float modes behavior on copying. */ |
| desttype = build_array_type_nelts (unsigned_char_type_node, |
| tree_to_uhwi (len)); |
| srctype = desttype; |
| if (src_align > TYPE_ALIGN (srctype)) |
| srctype = build_aligned_type (srctype, src_align); |
| if (dest_align > TYPE_ALIGN (desttype)) |
| desttype = build_aligned_type (desttype, dest_align); |
| new_stmt |
| = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0), |
| fold_build2 (MEM_REF, srctype, src, off0)); |
| set_vop_and_replace: |
| gimple_move_vops (new_stmt, stmt); |
| if (!lhs) |
| { |
| gsi_replace (gsi, new_stmt, false); |
| return true; |
| } |
| gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); |
| } |
| |
| done: |
| gimple_seq stmts = NULL; |
| if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE) |
| len = NULL_TREE; |
| else if (code == BUILT_IN_MEMPCPY) |
| { |
| len = gimple_convert_to_ptrofftype (&stmts, loc, len); |
| dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR, |
| TREE_TYPE (dest), dest, len); |
| } |
| else |
| gcc_unreachable (); |
| |
| gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
| gimple *repl = gimple_build_assign (lhs, dest); |
| gsi_replace (gsi, repl, false); |
| return true; |
| } |
| |
| /* Transform a call to built-in bcmp(a, b, len) at *GSI into one |
| to built-in memcmp (a, b, len). */ |
| |
| static bool |
| gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi) |
| { |
| tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP); |
| |
| if (!fn) |
| return false; |
| |
| /* Transform bcmp (a, b, len) into memcmp (a, b, len). */ |
| |
| gimple *stmt = gsi_stmt (*gsi); |
| tree a = gimple_call_arg (stmt, 0); |
| tree b = gimple_call_arg (stmt, 1); |
| tree len = gimple_call_arg (stmt, 2); |
| |
| gimple *repl = gimple_build_call (fn, 3, a, b, len); |
| replace_call_with_call_and_fold (gsi, repl); |
| |
| return true; |
| } |
| |
| /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one |
| to built-in memmove (dest, src, len). */ |
| |
| static bool |
| gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi) |
| { |
| tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE); |
| |
| if (!fn) |
| return false; |
| |
| /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies |
| it's quivalent to memmove (not memcpy). Transform bcopy (src, dest, |
| len) into memmove (dest, src, len). */ |
| |
| gimple *stmt = gsi_stmt (*gsi); |
| tree src = gimple_call_arg (stmt, 0); |
| tree dest = gimple_call_arg (stmt, 1); |
| tree len = gimple_call_arg (stmt, 2); |
| |
| gimple *repl = gimple_build_call (fn, 3, dest, src, len); |
| gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn)); |
| replace_call_with_call_and_fold (gsi, repl); |
| |
| return true; |
| } |
| |
| /* Transform a call to built-in bzero (dest, len) at *GSI into one |
| to built-in memset (dest, 0, len). */ |
| |
| static bool |
| gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi) |
| { |
| tree fn = builtin_decl_implicit (BUILT_IN_MEMSET); |
| |
| if (!fn) |
| return false; |
| |
| /* Transform bzero (dest, len) into memset (dest, 0, len). */ |
| |
| gimple *stmt = gsi_stmt (*gsi); |
| tree dest = gimple_call_arg (stmt, 0); |
| tree len = gimple_call_arg (stmt, 1); |
| |
| gimple_seq seq = NULL; |
| gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len); |
| gimple_seq_add_stmt_without_update (&seq, repl); |
| gsi_replace_with_seq_vops (gsi, seq); |
| fold_stmt (gsi); |
| |
| return true; |
| } |
| |
| /* Fold function call to builtin memset or bzero at *GSI setting the |
| memory of size LEN to VAL. Return whether a simplification was made. */ |
| |
| static bool |
| gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len) |
| { |
| gimple *stmt = gsi_stmt (*gsi); |
| tree etype; |
| unsigned HOST_WIDE_INT length, cval; |
| |
| /* If the LEN parameter is zero, return DEST. */ |
| if (integer_zerop (len)) |
| { |
| replace_call_with_value (gsi, gimple_call_arg (stmt, 0)); |
| return true; |
| } |
| |
| if (! tree_fits_uhwi_p (len)) |
| return false; |
| |
| if (TREE_CODE (c) != INTEGER_CST) |
| return false; |
| |
| tree dest = gimple_call_arg (stmt, 0); |
| tree var = dest; |
| if (TREE_CODE (var) != ADDR_EXPR) |
| return false; |
| |
| var = TREE_OPERAND (var, 0); |
| if (TREE_THIS_VOLATILE (var)) |
| return false; |
| |
| etype = TREE_TYPE (var); |
| if (TREE_CODE (etype) == ARRAY_TYPE) |
| etype = TREE_TYPE (etype); |
| |
| if (!INTEGRAL_TYPE_P (etype) |
| && !POINTER_TYPE_P (etype)) |
| return NULL_TREE; |
| |
| if (! var_decl_component_p (var)) |
| return NULL_TREE; |
| |
| length = tree_to_uhwi (len); |
| if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length |
| || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype)) |
| != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype))) |
| || get_pointer_alignment (dest) / BITS_PER_UNIT < length) |
| return NULL_TREE; |
| |
| if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) |
| return NULL_TREE; |
| |
| if (!type_has_mode_precision_p (etype)) |
| etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype), |
| TYPE_UNSIGNED (etype)); |
| |
| if (integer_zerop (c)) |
| cval = 0; |
| else |
| { |
| if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64) |
| return NULL_TREE; |
| |
| cval = TREE_INT_CST_LOW (c); |
| cval &= 0xff; |
| cval |= cval << 8; |
| cval |= cval << 16; |
| cval |= (cval << 31) << 1; |
| } |
| |
| var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0)); |
| gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval)); |
| gimple_move_vops (store, stmt); |
| gsi_insert_before (gsi, store, GSI_SAME_STMT); |
| if (gimple_call_lhs (stmt)) |
| { |
| gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest); |
| gsi_replace (gsi, asgn, false); |
| } |
| else |
| { |
| gimple_stmt_iterator gsi2 = *gsi; |
| gsi_prev (gsi); |
| gsi_remove (&gsi2, true); |
| } |
| |
| return true; |
| } |
| |
| /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */ |
| |
| static bool |
| get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind, |
| c_strlen_data *pdata, unsigned eltsize) |
| { |
| gcc_assert (TREE_CODE (arg) != SSA_NAME); |
| |
| /* The length computed by this invocation of the function. */ |
| tree val = NULL_TREE; |
| |
| /* True if VAL is an optimistic (tight) bound determined from |
| the size of the character array in which the string may be |
| stored. In that case, the computed VAL is used to set |
| PDATA->MAXBOUND. */ |
| bool tight_bound = false; |
| |
| /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */ |
| if (TREE_CODE (arg) == ADDR_EXPR |
| && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF) |
| { |
| tree op = TREE_OPERAND (arg, 0); |
| if (integer_zerop (TREE_OPERAND (op, 1))) |
| { |
| tree aop0 = TREE_OPERAND (op, 0); |
| if (TREE_CODE (aop0) == INDIRECT_REF |
| && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME) |
| return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind, |
| pdata, eltsize); |
| } |
| else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF |
| && rkind == SRK_LENRANGE) |
| { |
| /* Fail if an array is the last member of a struct object |
| since it could be treated as a (fake) flexible array |
| member. */ |
| tree idx = TREE_OPERAND (op, 1); |
| |
| arg = TREE_OPERAND (op, 0); |
| tree optype = TREE_TYPE (arg); |
| if (tree dom = TYPE_DOMAIN (optype)) |
| if (tree bound = TYPE_MAX_VALUE (dom)) |
| if (TREE_CODE (bound) == INTEGER_CST |
| && TREE_CODE (idx) == INTEGER_CST |
| && tree_int_cst_lt (bound, idx)) |
| return false; |
| } |
| } |
| |
| if (rkind == SRK_INT_VALUE) |
| { |
| /* We are computing the maximum value (not string length). */ |
| val = arg; |
| if (TREE_CODE (val) != INTEGER_CST |
| || tree_int_cst_sgn (val) < 0) |
| return false; |
| } |
| else |
| { |
| c_strlen_data lendata = { }; |
| val = c_strlen (arg, 1, &lendata, eltsize); |
| |
| if (!val && lendata.decl) |
| { |
| /* ARG refers to an unterminated const character array. |
| DATA.DECL with size DATA.LEN. */ |
| val = lendata.minlen; |
| pdata->decl = lendata.decl; |
| } |
| } |
| |
| /* Set if VAL represents the maximum length based on array size (set |
| when exact length cannot be determined). */ |
| bool maxbound = false; |
| |
| if (!val && rkind == SRK_LENRANGE) |
| { |
| if (TREE_CODE (arg) == ADDR_EXPR) |
| return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind, |
| pdata, eltsize); |
| |
| if (TREE_CODE (arg) == ARRAY_REF) |
| { |
| tree optype = TREE_TYPE (TREE_OPERAND (arg, 0)); |
| |
| /* Determine the "innermost" array type. */ |
| while (TREE_CODE (optype) == ARRAY_TYPE |
| && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE) |
| optype = TREE_TYPE (optype); |
| |
| /* Avoid arrays of pointers. */ |
| tree eltype = TREE_TYPE (optype); |
| if (TREE_CODE (optype) != ARRAY_TYPE |
| || !INTEGRAL_TYPE_P (eltype)) |
| return false; |
| |
| /* Fail when the array bound is unknown or zero. */ |
| val = TYPE_SIZE_UNIT (optype); |
| if (!val |
| || TREE_CODE (val) != INTEGER_CST |
| || integer_zerop (val)) |
| return false; |
| |
| val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val, |
| integer_one_node); |
| |
| /* Set the minimum size to zero since the string in |
| the array could have zero length. */ |
| pdata->minlen = ssize_int (0); |
| |
| tight_bound = true; |
| } |
| else if (TREE_CODE (arg) == COMPONENT_REF |
| && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) |
| == ARRAY_TYPE)) |
| { |
| /* Use the type of the member array to determine the upper |
| bound on the length of the array. This may be overly |
| optimistic if the array itself isn't NUL-terminated and |
| the caller relies on the subsequent member to contain |
| the NUL but that would only be considered valid if |
| the array were the last member of a struct. */ |
| |
| tree fld = TREE_OPERAND (arg, 1); |
| |
| tree optype = TREE_TYPE (fld); |
| |
| /* Determine the "innermost" array type. */ |
| while (TREE_CODE (optype) == ARRAY_TYPE |
| && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE) |
| optype = TREE_TYPE (optype); |
| |
| /* Fail when the array bound is unknown or zero. */ |
| val = TYPE_SIZE_UNIT (optype); |
| if (!val |
| || TREE_CODE (val) != INTEGER_CST |
| || integer_zerop (val)) |
| return false; |
| val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val, |
| integer_one_node); |
| |
| /* Set the minimum size to zero since the string in |
| the array could have zero length. */ |
| pdata->minlen = ssize_int (0); |
| |
| /* The array size determined above is an optimistic bound |
| on the length. If the array isn't nul-terminated the |
| length computed by the library function would be greater. |
| Even though using strlen to cross the subobject boundary |
| is undefined, avoid drawing conclusions from the member |
| type about the length here. */ |
| tight_bound = true; |
| } |
| else if (TREE_CODE (arg) == MEM_REF |
| && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE |
| && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE |
| && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR) |
| { |
| /* Handle a MEM_REF into a DECL accessing an array of integers, |
| being conservative about references to extern structures with |
| flexible array members that can be initialized to arbitrary |
| numbers of elements as an extension (static structs are okay). |
| FIXME: Make this less conservative -- see |
| component_ref_size in tree.c. */ |
| tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0); |
| if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref)) |
| && (decl_binds_to_current_def_p (ref) |
| || !array_at_struct_end_p (arg))) |
| { |
| /* Fail if the offset is out of bounds. Such accesses |
| should be diagnosed at some point. */ |
| val = DECL_SIZE_UNIT (ref); |
| if (!val |
| || TREE_CODE (val) != INTEGER_CST |
| || integer_zerop (val)) |
| return false; |
| |
| poly_offset_int psiz = wi::to_offset (val); |
| poly_offset_int poff = mem_ref_offset (arg); |
| if (known_le (psiz, poff)) |
| return false; |
| |
| pdata->minlen = ssize_int (0); |
| |
| /* Subtract the offset and one for the terminating nul. */ |
| psiz -= poff; |
| psiz -= 1; |
| val = wide_int_to_tree (TREE_TYPE (val), psiz); |
| /* Since VAL reflects the size of a declared object |
| rather the type of the access it is not a tight bound. */ |
| } |
| } |
| else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg)) |
| { |
| /* Avoid handling pointers to arrays. GCC might misuse |
| a pointer to an array of one bound to point to an array |
| object of a greater bound. */ |
| tree argtype = TREE_TYPE (arg); |
| if (TREE_CODE (argtype) == ARRAY_TYPE) |
| { |
| val = TYPE_SIZE_UNIT (argtype); |
| if (!val |
| || TREE_CODE (val) != INTEGER_CST |
| || integer_zerop (val)) |
| return false; |
| val = wide_int_to_tree (TREE_TYPE (val), |
| wi::sub (wi::to_wide (val), 1)); |
| |
| /* Set the minimum size to zero since the string in |
| the array could have zero length. */ |
| pdata->minlen = ssize_int (0); |
| } |
| } |
| maxbound = true; |
| } |
| |
| if (!val) |
| return false; |
| |
| /* Adjust the lower bound on the string length as necessary. */ |
| if (!pdata->minlen |
| || (rkind != SRK_STRLEN |
| && TREE_CODE (pdata->minlen) == INTEGER_CST |
| && TREE_CODE (val) == INTEGER_CST |
| && tree_int_cst_lt (val, pdata->minlen))) |
| pdata->minlen = val; |
| |
| if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST) |
| { |
| /* Adjust the tighter (more optimistic) string length bound |
| if necessary and proceed to adjust the more conservative |
| bound. */ |
| if (TREE_CODE (val) == INTEGER_CST) |
| { |
| if (tree_int_cst_lt (pdata->maxbound, val)) |
| pdata->maxbound = val; |
| } |
| else |
| pdata->maxbound = val; |
| } |
| else if (pdata->maxbound || maxbound) |
| /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or |
| if VAL corresponds to the maximum length determined based |
| on the type of the object. */ |
| pdata->maxbound = val; |
| |
| if (tight_bound) |
| { |
| /* VAL computed above represents an optimistically tight bound |
| on the length of the string based on the referenced object's |
| or subobject's type. Determine the conservative upper bound |
| based on the enclosing object's size if possible. */ |
| if (rkind == SRK_LENRANGE) |
| { |
| poly_int64 offset; |
| tree base = get_addr_base_and_unit_offset (arg, &offset); |
| if (!base) |
| { |
| /* When the call above fails due to a non-constant offset |
| assume the offset is zero and use the size of the whole |
| enclosing object instead. */ |
| base = get_base_address (arg); |
| offset = 0; |
| } |
| /* If the base object is a pointer no upper bound on the length |
| can be determined. Otherwise the maximum length is equal to |
| the size of the enclosing object minus the offset of |
| the referenced subobject minus 1 (for the terminating nul). */ |
| tree type = TREE_TYPE (base); |
| if (TREE_CODE (type) == POINTER_TYPE |
| || (TREE_CODE (base) != PARM_DECL && !VAR_P (base)) |
| || !(val = DECL_SIZE_UNIT (base))) |
| val = build_all_ones_cst (size_type_node); |
| else |
| { |
| val = DECL_SIZE_UNIT (base); |
| val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val, |
| size_int (offset + 1)); |
| } |
| } |
| else |
| return false; |
| } |
| |
| if (pdata->maxlen) |
| { |
| /* Adjust the more conservative bound if possible/necessary |
| and fail otherwise. */ |
| if (rkind != SRK_STRLEN) |
| { |
| if (TREE_CODE (pdata->maxlen) != INTEGER_CST |
| || TREE_CODE (val) != INTEGER_CST) |
| return false; |
| |
| if (tree_int_cst_lt (pdata->maxlen, val)) |
| pdata->maxlen = val; |
| return true; |
| } |
| else if (simple_cst_equal (val, pdata->maxlen) != 1) |
| { |
| /* Fail if the length of this ARG is different from that |
| previously determined from another ARG. */ |
| return false; |
| } |
| } |
| |
| pdata->maxlen = val; |
| return rkind == SRK_LENRANGE || !integer_all_onesp (val); |
| } |
| |
| /* For an ARG referencing one or more strings, try to obtain the range |
| of their lengths, or the size of the largest array ARG referes to if |
| the range of lengths cannot be determined, and store all in *PDATA. |
| For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine |
| the maximum constant value. |
| If ARG is an SSA_NAME, follow its use-def chains. When RKIND == |
| SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined |
| length or if we are unable to determine the length, return false. |
| VISITED is a bitmap of visited variables. |
| RKIND determines the kind of value or range to obtain (see |
| strlen_range_kind). |
| Set PDATA->DECL if ARG refers to an unterminated constant array. |
| On input, set ELTSIZE to 1 for normal single byte character strings, |
| and either 2 or 4 for wide characer strings (the size of wchar_t). |
| Return true if *PDATA was successfully populated and false otherwise. */ |
| |
| static bool |
| get_range_strlen (tree arg, bitmap *visited, |
| strlen_range_kind rkind, |
| c_strlen_data *pdata, unsigned eltsize) |
| { |
| |
| if (TREE_CODE (arg) != SSA_NAME) |
| return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize); |
| |
| /* If ARG is registered for SSA update we cannot look at its defining |
| statement. */ |
| if (name_registered_for_update_p (arg)) |
| return false; |
| |
| /* If we were already here, break the infinite cycle. */ |
| if (!*visited) |
| *visited = BITMAP_ALLOC (NULL); |
| if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg))) |
| return true; |
| |
| tree var = arg; |
| gimple *def_stmt = SSA_NAME_DEF_STMT (var); |
| |
| switch (gimple_code (def_stmt)) |
| { |
| case GIMPLE_ASSIGN: |
| /* The RHS of the statement defining VAR must either have a |
| constant length or come from another SSA_NAME with a constant |
| length. */ |
| if (gimple_assign_single_p (def_stmt) |
| || gimple_assign_unary_nop_p (def_stmt)) |
| { |
| tree rhs = gimple_assign_rhs1 (def_stmt); |
| return get_range_strlen (rhs, visited, rkind, pdata, eltsize); |
| } |
| else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR) |
| { |
| tree ops[2] = { gimple_assign_rhs2 (def_stmt), |
| gimple_assign_rhs3 (def_stmt) }; |
| |
| for (unsigned int i = 0; i < 2; i++) |
| if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize)) |
| { |
| if (rkind != SRK_LENRANGE) |
| return false; |
| /* Set the upper bound to the maximum to prevent |
| it from being adjusted in the next iteration but |
| leave MINLEN and the more conservative MAXBOUND |
| determined so far alone (or leave them null if |
| they haven't been set yet). That the MINLEN is |
| in fact zero can be determined from MAXLEN being |
| unbounded but the discovered minimum is used for |
| diagnostics. */ |
| pdata->maxlen = build_all_ones_cst (size_type_node); |
| } |
| return true; |
| } |
| return false; |
| |
| case GIMPLE_PHI: |
| /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node |
| must have a constant length. */ |
| for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++) |
| { |
| tree arg = gimple_phi_arg (def_stmt, i)->def; |
| |
| /* If this PHI has itself as an argument, we cannot |
| determine the string length of this argument. However, |
| if we can find a constant string length for the other |
| PHI args then we can still be sure that this is a |
| constant string length. So be optimistic and just |
| continue with the next argument. */ |
| if (arg == gimple_phi_result (def_stmt)) |
| continue; |
| |
| if (!get_range_strlen (arg, visited, rkind, pdata, eltsize)) |
| { |
| if (rkind != SRK_LENRANGE) |
| return false; |
| /* Set the upper bound to the maximum to prevent |
| it from being adjusted in the next iteration but |
| leave MINLEN and the more conservative MAXBOUND |
| determined so far alone (or leave them null if |
| they haven't been set yet). That the MINLEN is |
| in fact zero can be determined from MAXLEN being |
| unbounded but the discovered minimum is used for |
| diagnostics. */ |
| pdata->maxlen = build_all_ones_cst (size_type_node); |
| } |
| } |
| return true; |
| |
| default: |
| return false; |
| } |
| } |
| |
| /* Try to obtain the range of the lengths of the string(s) referenced |
| by ARG, or the size of the largest array ARG refers to if the range |
| of lengths cannot be determined, and store all in *PDATA which must |
| be zero-initialized on input except PDATA->MAXBOUND may be set to |
| a non-null tree node other than INTEGER_CST to request to have it |
| set to the length of the longest string in a PHI. ELTSIZE is |
| the expected size of the string element in bytes: 1 for char and |
| some power of 2 for wide characters. |
| Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable |
| for optimization. Returning false means that a nonzero PDATA->MINLEN |
| doesn't reflect the true lower bound of the range when PDATA->MAXLEN |
| is -1 (in that case, the actual range is indeterminate, i.e., |
| [0, PTRDIFF_MAX - 2]. */ |
| |
| bool |
| get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize) |
| { |
| bitmap visited = NULL; |
| tree maxbound = pdata->maxbound; |
| |
| if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize)) |
| { |
| /* On failure extend the length range to an impossible maximum |
| (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other |
| members can stay unchanged regardless. */ |
| pdata->minlen = ssize_int (0); |
| pdata->maxlen = build_all_ones_cst (size_type_node); |
| } |
| else if (!pdata->minlen) |
| pdata->minlen = ssize_int (0); |
| |
| /* If it's unchanged from it initial non-null value, set the conservative |
| MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */ |
| if (maxbound && pdata->maxbound == maxbound) |
| pdata->maxbound = build_all_ones_cst (size_type_node); |
| |
| if (visited) |
| BITMAP_FREE (visited); |
| |
| return !integer_all_onesp (pdata->maxlen); |
| } |
| |
| /* Return the maximum value for ARG given RKIND (see strlen_range_kind). |
| For ARG of pointer types, NONSTR indicates if the caller is prepared |
| to handle unterminated strings. For integer ARG and when RKIND == |
| SRK_INT_VALUE, NONSTR must be null. |
| |
| If an unterminated array is discovered and our caller handles |
| unterminated arrays, then bubble up the offending DECL and |
| return the maximum size. Otherwise return NULL. */ |
| |
| static tree |
| get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL) |
| { |
| /* A non-null NONSTR is meaningless when determining the maximum |
| value of an integer ARG. */ |
| gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL); |
| /* ARG must have an integral type when RKIND says so. */ |
| gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg))); |
| |
| bitmap visited = NULL; |
| |
| /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN |
| is unbounded. */ |
| c_strlen_data lendata = { }; |
| if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1)) |
| lendata.maxlen = NULL_TREE; |
| else if (lendata.maxlen && integer_all_onesp (lendata.maxlen)) |
| lendata.maxlen = NULL_TREE; |
| |
| if (visited) |
| BITMAP_FREE (visited); |
| |
| if (nonstr) |
| { |
| /* For callers prepared to handle unterminated arrays set |
| *NONSTR to point to the declaration of the array and return |
| the maximum length/size. */ |
| *nonstr = lendata.decl; |
| return lendata.maxlen; |
| } |
| |
| /* Fail if the constant array isn't nul-terminated. */ |
| return lendata.decl ? NULL_TREE : lendata.maxlen; |
| } |
| |
| |
| /* Fold function call to builtin strcpy with arguments DEST and SRC. |
| If LEN is not NULL, it represents the length of the string to be |
| copied. Return NULL_TREE if no simplification can be made. */ |
| |
| static bool |
| gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi, |
| tree dest, tree src) |
| { |
| gimple *stmt = gsi_stmt (*gsi); |
| location_t loc = gimple_location (stmt); |
| tree fn; |
| |
| /* If SRC and DEST are the same (and not volatile), return DEST. */ |
| if (operand_equal_p (src, dest, 0)) |
| { |
| /* Issue -Wrestrict unless the pointers are null (those do |
| not point to objects and so do not indicate an overlap; |
| such calls could be the result of sanitization and jump |
| threading). */ |
| if (!integer_zerop (dest) && !gimple_no_warning_p (stmt)) |
| { |
| tree func = gimple_call_fndecl (stmt); |
| |
| warning_at (loc, OPT_Wrestrict, |
| "%qD source argument is the same as destination", |
| func); |
| } |
| |
| replace_call_with_value (gsi, dest); |
| return true; |
| } |
| |
| if (optimize_function_for_size_p (cfun)) |
| return false; |
| |
| fn = builtin_decl_implicit (BUILT_IN_MEMCPY); |
| if (!fn) |
| return false; |
| |
| /* Set to non-null if ARG refers to an unterminated array. */ |
| tree nonstr = NULL; |
| tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr); |
| |
| if (nonstr) |
| { |
| /* Avoid folding calls with unterminated arrays. */ |
| if (!gimple_no_warning_p (stmt)) |
| warn_string_no_nul (loc, "strcpy", src, nonstr); |
| gimple_set_no_warning (stmt, true); |
| return false; |
| } |
| |
| if (!len) |
| return false; |
| |
| len = fold_convert_loc (loc, size_type_node, len); |
| len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1)); |
| len = force_gimple_operand_gsi (gsi, len, true, |
| NULL_TREE, true, GSI_SAME_STMT); |
| gimple *repl = gimple_build_call (fn, 3, dest, src, len); |
| replace_call_with_call_and_fold (gsi, repl); |
| return true; |
| } |
| |
| /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN. |
| If SLEN is not NULL, it represents the length of the source string. |
| Return NULL_TREE if no simplification can be made. */ |
| |
| static bool |
| gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi, |
| tree dest, tree src, tree len) |
| { |
| gimple *stmt = gsi_stmt (*gsi); |
| location_t loc = gimple_location (stmt); |
| bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE; |
| |
| /* If the LEN parameter is zero, return DEST. */ |
| if (integer_zerop (len)) |
| { |
| /* Avoid warning if the destination refers to an array/pointer |
| decorate with attribute nonstring. */ |
| if (!nonstring) |
| { |
| tree fndecl = gimple_call_fndecl (stmt); |
| |
| /* Warn about the lack of nul termination: the result is not |
| a (nul-terminated) string. */ |
| tree slen = get_maxval_strlen (src, SRK_STRLEN); |
| if (slen && !integer_zerop (slen)) |
| warning_at (loc, OPT_Wstringop_truncation, |
| "%G%qD destination unchanged after copying no bytes " |
| "from a string of length %E", |
| stmt, fndecl, slen); |
| else |
| warning_at (loc, OPT_Wstringop_truncation, |
| "%G%qD destination unchanged after copying no bytes", |
| stmt, fndecl); |
| } |
| |
| replace_call_with_value (gsi, dest); |
| return true; |
| } |
| |
| /* We can't compare slen with len as constants below if len is not a |
| constant. */ |
| if (TREE_CODE (len) != INTEGER_CST) |
| return false; |
| |
| /* Now, we must be passed a constant src ptr parameter. */ |
| tree slen = get_maxval_strlen (src, SRK_STRLEN); |
| if (!slen || TREE_CODE (slen) != INTEGER_CST) |
| return false; |
| |
| /* The size of the source string including the terminating nul. */ |
| tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1)); |
| |
| /* We do not support simplification of this case, though we do |
| support it when expanding trees into RTL. */ |
| /* FIXME: generate a call to __builtin_memset. */ |
| if (tree_int_cst_lt (ssize, len)) |
| return false; |
| |
| /* Diagnose truncation that leaves the copy unterminated. */ |
| maybe_diag_stxncpy_trunc (*gsi, src, len); |
| |
| /* OK transform into builtin memcpy. */ |
| tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY); |
| if (!fn) |
| return false; |
| |
| len = fold_convert_loc (loc, size_type_node, len); |
| len = force_gimple_operand_gsi (gsi, len, true, |
| NULL_TREE, true, GSI_SAME_STMT); |
| gimple *repl = gimple_build_call (fn, 3, dest, src, len); |
| replace_call_with_call_and_fold (gsi, repl); |
| |
| return true; |
| } |
| |
| /* Fold function call to builtin strchr or strrchr. |
| If both arguments are constant, evaluate and fold the result, |
| otherwise simplify str(r)chr (str, 0) into str + strlen (str). |
| In general strlen is significantly faster than strchr |
| due to being a simpler operation. */ |
| static bool |
| gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr) |
| { |
| gimple *stmt = gsi_stmt (*gsi); |
| tree str = gimple_call_arg (stmt, 0); |
| tree c = gimple_call_arg (stmt, 1); |
| location_t loc = gimple_location (stmt); |
| const char *p; |
| char ch; |
| |
| if (!gimple_call_lhs (stmt)) |
| return false; |
| |
| /* Avoid folding if the first argument is not a nul-terminated array. |
| Defer warning until later. */ |
| if (!check_nul_terminated_array (NULL_TREE, str)) |
| return false; |
| |
| if ((p = c_getstr (str)) && target_char_cst_p (c, &ch)) |
| { |
| const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch); |
| |
| if (p1 == NULL) |
| { |
| replace_call_with_value (gsi, integer_zero_node); |
| return true; |
| } |
| |
| tree len = build_int_cst (size_type_node, p1 - p); |
| gimple_seq stmts = NULL; |
| gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt), |
| POINTER_PLUS_EXPR, str, len); |
| gimple_seq_add_stmt_without_update (&stmts, new_stmt); |
| gsi_replace_with_seq_vops (gsi, stmts); |
| return true; |
| } |
| |
| if (!integer_zerop (c)) |
| return false; |
| |
| /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */ |
| if (is_strrchr && optimize_function_for_size_p (cfun)) |
| { |
| tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR); |
| |
| if (strchr_fn) |
| { |
| gimple *repl = gimple_build_call (strchr_fn, 2, str, c); |
| replace_call_with_call_and_fold (gsi, repl); |
| return true; |
| } |
| |
| return false; |
| } |
| |
| tree len; |
| tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN); |
| |
| if (!strlen_fn) |
| return false; |
| |
| /* Create newstr = strlen (str). */ |
| gimple_seq stmts = NULL; |
| gimple *new_stmt = gimple_build_call (strlen_fn, 1, str); |
| gimple_set_location (new_stmt, loc); |
| len = create_tmp_reg_or_ssa_name (size_type_node); |
| gimple_call_set_lhs (new_stmt, len); |
| gimple_seq_add_stmt_without_update (&stmts, new_stmt); |
| |
| /* Create (str p+ strlen (str)). */ |
| new_stmt = gimple_build_assign (gimple_call_lhs (stmt), |
| POINTER_PLUS_EXPR, str, len); |
| gimple_seq_add_stmt_without_update (&stmts, new_stmt); |
| gsi_replace_with_seq_vops (gsi, stmts); |
| /* gsi now points at the assignment to the lhs, get a |
| stmt iterator to the strlen. |
| ??? We can't use gsi_for_stmt as that doesn't work when the |
| CFG isn't built yet. */ |
| gimple_stmt_iterator gsi2 = *gsi; |
| gsi_prev (&gsi2); |
| fold_stmt (&gsi2); |
| return true; |
| } |
| |
| /* Fold function call to builtin strstr. |
| If both arguments are constant, evaluate and fold the result, |
| additionally fold strstr (x, "") into x and strstr (x, "c") |
| into strchr (x, 'c'). */ |
| static bool |
| gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi) |
| { |
| gimple *stmt = gsi_stmt (*gsi); |
| if (!gimple_call_lhs (stmt)) |
| return false; |
| |
| tree haystack = gimple_call_arg (stmt, 0); |
| tree needle = gimple_call_arg (stmt, 1); |
| |
| /* Avoid folding if either argument is not a nul-terminated array. |
| Defer warning until later. */ |
| if (!check_nul_terminated_array (NULL_TREE, haystack) |
| || !check_nul_terminated_array (NULL_TREE, needle)) |
| return false; |
| |
| const char *q = c_getstr (needle); |
| if (q == NULL) |
| return false; |
| |
| if (const char *p = c_getstr (haystack)) |
| { |
| const char *r = strstr (p, q); |
| |
| if (r == NULL) |
| { |
| replace_call_with_value (gsi, integer_zero_node); |
| return true; |
| } |
| |
| tree len = build_int_cst (size_type_node, r - p); |
| gimple_seq stmts = NULL; |
| gimple *new_stmt |
| = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR, |
| haystack, len); |
| gimple_seq_add_stmt_without_update (&stmts, new_stmt); |
| gsi_replace_with_seq_vops (gsi, stmts); |
| return true; |
| } |
| |
| /* For strstr (x, "") return x. */ |
| if (q[0] == '\0') |
| { |
| replace_call_with_value (gsi, haystack); |
| return true; |
| } |
| |
| /* Transform strstr (x, "c") into strchr (x, 'c'). */ |
| if (q[1] == '\0') |
| { |
| tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR); |
| if (strchr_fn) |
| { |
| tree c = build_int_cst (integer_type_node, q[0]); |
| gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c); |
| replace_call_with_call_and_fold (gsi, repl); |
| return true; |
| } |
| } |
| |
| return false; |
| } |
| |
| /* Simplify a call to the strcat builtin. DST and SRC are the arguments |
| to the call. |
| |
| Return NULL_TREE if no simplification was possible, otherwise return the |
| simplified form of the call as a tree. |
| |
| The simplified form may be a constant or other expression which |
| computes the same value, but in a more efficient manner (including |
| calls to other builtin functions). |
| |
| The call may contain arguments which need to be evaluated, but |
| which are not useful to determine the result of the call. In |
| this case we return a chain of COMPOUND_EXPRs. The LHS of each |
| COMPOUND_EXPR will be an argument which must be evaluated. |
| COMPOUND_EXPRs are chained through their RHS. The RHS of the last |
| COMPOUND_EXPR in the chain will contain the tree for the simplified |
| form of the builtin function call. */ |
| |
| static bool |
| gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src) |
| { |
| gimple *stmt = gsi_stmt (*gsi); |
| location_t loc = gimple_location (stmt); |
| |
| const char *p = c_getstr (src); |
| |
| /* If the string length is zero, return the dst parameter. */ |
| if (p && *p == '\0') |
| { |
| replace_call_with_value (gsi, dst); |
| return true; |
| } |
| |
| if (!optimize_bb_for_speed_p (gimple_bb (stmt))) |
| return false; |
| |
| /* See if we can store by pieces into (dst + strlen(dst)). */ |
| tree newdst; |
| tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN); |
| tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY); |
| |
| if (!strlen_fn || !memcpy_fn) |
| return false; |
| |
| /* If the length of the source string isn't computable don't |
| split strcat into strlen and memcpy. */ |
| tree len = get_maxval_strlen (src, SRK_STRLEN); |
| if (! len) |
| return false; |
| |
| /* Create strlen (dst). */ |
| gimple_seq stmts = NULL, stmts2; |
| gimple *repl = gimple_build_call (strlen_fn, 1, dst); |
| gimple_set_location (repl, loc); |
| newdst = create_tmp_reg_or_ssa_name (size_type_node); |
| gimple_call_set_lhs (repl, newdst); |
| gimple_seq_add_stmt_without_update (&stmts, repl); |
| |
| /* Create (dst p+ strlen (dst)). */ |
| newdst = fold_build_pointer_plus_loc (loc, dst, newdst); |
| newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE); |
| gimple_seq_add_seq_without_update (&stmts, stmts2); |
| |
| len = fold_convert_loc (loc, size_type_node, len); |
| len = size_binop_loc (loc, PLUS_EXPR, len, |
| build_int_cst (size_type_node, 1)); |
| len = force_gimple_operand (len, &stmts2, true, NULL_TREE); |
| gimple_seq_add_seq_without_update (&stmts, stmts2); |
| |
| repl = gimple_build_call (memcpy_fn, 3, newdst, src, len); |
| gimple_seq_add_stmt_without_update (&stmts, repl); |
| if (gimple_call_lhs (stmt)) |
| { |
| repl = gimple_build_assign (gimple_call_lhs (stmt), dst); |
| gimple_seq_add_stmt_without_update (&stmts, repl); |
| gsi_replace_with_seq_vops (gsi, stmts); |
| /* gsi now points at the assignment to the lhs, get a |
| stmt iterator to the memcpy call. |
| ??? We can't use gsi_for_stmt as that doesn't work when the |
| CFG isn't built yet. */ |
| gimple_stmt_iterator gsi2 = *gsi; |
| gsi_prev (&gsi2); |
| fold_stmt (&gsi2); |
| } |
| else |
| { |
| gsi_replace_with_seq_vops (gsi, stmts); |
| fold_stmt (gsi); |
| } |
| return true; |
| } |
| |
| /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE |
| are the arguments to the call. */ |
| |
| static bool |
| gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi) |
| { |
| gimple *stmt = gsi_stmt (*gsi); |
| tree dest = gimple_call_arg (stmt, 0); |
| tree src = gimple_call_arg (stmt, 1); |
| tree size = gimple_call_arg (stmt, 2); |
| tree fn; |
| const char *p; |
| |
| |
| p = c_getstr (src); |
| /* If the SRC parameter is "", return DEST. */ |
| if (p && *p == '\0') |
| { |
| replace_call_with_value (gsi, dest); |
| return true; |
| } |
| |
| if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size)) |
| return false; |
| |
| /* If __builtin_strcat_chk is used, assume strcat is available. */ |
| fn = builtin_decl_explicit (BUILT_IN_STRCAT); |
| if (!fn) |
| return false; |
| |
| gimple *repl = gimple_build_call (fn, 2, dest, src); |
| replace_call_with_call_and_fold (gsi, repl); |
| return true; |
| } |
| |
| /* Simplify a call to the strncat builtin. */ |
| |
| static bool |
| gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi) |
| { |
| gimple *stmt = gsi_stmt (*gsi); |
| tree dst = gimple_call_arg (stmt, 0); |
| tree src = gimple_call_arg (stmt, 1); |
| tree len = gimple_call_arg (stmt, 2); |
| |
| const char *p = c_getstr (src); |
| |
| /* If the requested length is zero, or the src parameter string |
| length is zero, return the dst parameter. */ |
| if (integer_zerop (len) || (p && *p == '\0')) |
| { |
| replace_call_with_value (gsi, dst); |
| return true; |
| } |
| |
| if (TREE_CODE (len) != INTEGER_CST || !p) |
| return false; |
| |
| unsigned srclen = strlen (p); |
| |
| int cmpsrc = compare_tree_int (len, srclen); |
| |
| /* Return early if the requested len is less than the string length. |
| Warnings will be issued elsewhere later. */ |
| if (cmpsrc < 0) |
| return false; |
| |
| unsigned HOST_WIDE_INT dstsize; |
| |
| bool nowarn = gimple_no_warning_p (stmt); |
| |
| if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize)) |
| { |
| int cmpdst = compare_tree_int (len, dstsize); |
| |
| if (cmpdst >= 0) |
| { |
| tree fndecl = gimple_call_fndecl (stmt); |
| |
| /* Strncat copies (at most) LEN bytes and always appends |
| the terminating NUL so the specified bound should never |
| be equal to (or greater than) the size of the destination. |
| If it is, the copy could overflow. */ |
| location_t loc = gimple_location (stmt); |
| nowarn = warning_at (loc, OPT_Wstringop_overflow_, |
| cmpdst == 0 |
| ? G_("%G%qD specified bound %E equals " |
| "destination size") |
| : G_("%G%qD specified bound %E exceeds " |
| "destination size %wu"), |
| stmt, fndecl, len, dstsize); |
| if (nowarn) |
| gimple_set_no_warning (stmt, true); |
| } |
| } |
| |
| if (!nowarn && cmpsrc == 0) |
| { |
| tree fndecl = gimple_call_fndecl (stmt); |
| location_t loc = gimple_location (stmt); |
| |
| /* To avoid possible overflow the specified bound should also |
| not be equal to the length of the source, even when the size |
| of the destination is unknown (it's not an uncommon mistake |
| to specify as the bound to strncpy the length of the source). */ |
| if (warning_at (loc, OPT_Wstringop_overflow_, |
| "%G%qD specified bound %E equals source length", |
| stmt, fndecl, len)) |
| gimple_set_no_warning (stmt, true); |
| } |
| |
| tree fn = builtin_decl_implicit (BUILT_IN_STRCAT); |
| |
| /* If the replacement _DECL isn't initialized, don't do the |
| transformation. */ |
| if (!fn) |
| return false; |
| |
| /* Otherwise, emit a call to strcat. */ |
| gcall *repl = gimple_build_call (fn, 2, dst, src); |
| replace_call_with_call_and_fold (gsi, repl); |
| return true; |
| } |
| |
| /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC, |
| LEN, and SIZE. */ |
| |
| static bool |
| gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi) |
| { |
| gimple *stmt = gsi_stmt (*gsi); |
| tree dest = gimple_call_arg (stmt, 0); |
| tree src = gimple_call_arg (stmt, 1); |
| tree len = gimple_call_arg (stmt, 2); |
| tree size = gimple_call_arg (stmt, 3); |
| tree fn; |
| const char *p; |
| |
| p = c_getstr (src); |
| /* If the SRC parameter is "" or if LEN is 0, return DEST. */ |
| if ((p && *p == '\0') |
| || integer_zerop (len)) |
| { |
| replace_call_with_value (gsi, dest); |
| return true; |
| } |
| |
| if (! tree_fits_uhwi_p (size)) |
| return false; |
| |
| if (! integer_all_onesp (size)) |
| { |
| tree src_len = c_strlen (src, 1); |
| if (src_len |
| && tree_fits_uhwi_p (src_len) |
| && tree_fits_uhwi_p (len) |
| && ! tree_int_cst_lt (len, src_len)) |
| { |
| /* If LEN >= strlen (SRC), optimize into __strcat_chk. */ |
| fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK); |
| if (!fn) |
| return false; |
| |
| gimple *repl = gimple_build_call (fn, 3, dest, src, size); |
| replace_call_with_call_and_fold (gsi, repl); |
| return true; |
| } |
| return false; |
| } |
| |
| /* If __builtin_strncat_chk is used, assume strncat is available. */ |
| fn = builtin_decl_explicit (BUILT_IN_STRNCAT); |
| if (!fn) |
| return false; |
| |
| gimple *repl = gimple_build_call (fn, 3, dest, src, len); |
| replace_call_with_call_and_fold (gsi, repl); |
| return true; |
| } |
| |
| /* Build and append gimple statements to STMTS that would load a first |
| character of a memory location identified by STR. LOC is location |
| of the statement. */ |
| |
| static tree |
| gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts) |
| { |
| tree var; |
| |
| tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0); |
| tree cst_uchar_ptr_node |
| = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true); |
| tree off0 = build_int_cst (cst_uchar_ptr_node, 0); |
| |
| tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0); |
| gassign *stmt = gimple_build_assign (NULL_TREE, temp); |
| var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt); |
| |
| gimple_assign_set_lhs (stmt, var); |
| gimple_seq_add_stmt_without_update (stmts, stmt); |
| |
| return var; |
| } |
| |
| /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */ |
| |
| static bool |
| gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi) |
| { |
| gimple *stmt = gsi_stmt (*gsi); |
| tree callee = gimple_call_fndecl (stmt); |
| enum built_in_function fcode = DECL_FUNCTION_CODE (callee); |
| |
| tree type = integer_type_node; |
| tree str1 = gimple_call_arg (stmt, 0); |
| tree str2 = gimple_call_arg (stmt, 1); |
| tree lhs = gimple_call_lhs (stmt); |
| |
| tree bound_node = NULL_TREE; |
| unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U; |
| |
| /* Handle strncmp and strncasecmp functions. */ |
| if (gimple_call_num_args (stmt) == 3) |
| { |
| bound_node = gimple_call_arg (stmt, 2); |
| if (tree_fits_uhwi_p (bound_node)) |
| bound = tree_to_uhwi (bound_node); |
| } |
| |
| /* If the BOUND parameter is zero, return zero. */ |
| if (bound == 0) |
| { |
| replace_call_with_value (gsi, integer_zero_node); |
| return true; |
| } |
| |
| /* If ARG1 and ARG2 are the same (and not volatile), return zero. */ |
| if (operand_equal_p (str1, str2, 0)) |
| { |
| replace_call_with_value (gsi, integer_zero_node); |
| return true; |
| } |
| |
| /* Initially set to the number of characters, including the terminating |
| nul if each array has one. LENx == strnlen (Sx, LENx) implies that |
| the array Sx is not terminated by a nul. |
| For nul-terminated strings then adjusted to their length so that |
| LENx == NULPOSx holds. */ |
| unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1; |
| const char *p1 = c_getstr (str1, &len1); |
| const char *p2 = c_getstr (str2, &len2); |
| |
| /* The position of the terminating nul character if one exists, otherwise |
| a value greater than LENx. */ |
| unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1; |
| |
| if (p1) |
| { |
| size_t n = strnlen (p1, len1); |
| if (n < len1) |
| len1 = nulpos1 = n; |
| } |
| |
| if (p2) |
| { |
| size_t n = strnlen (p2, len2); |
| if (n < len2) |
| len2 = nulpos2 = n; |
| } |
| |
| /* For known strings, return an immediate value. */ |
| if (p1 && p2) |
| { |
| int r = 0; |
| bool known_result = false; |
| |
| switch (fcode) |
| { |
| case BUILT_IN_STRCMP: |
| case BUILT_IN_STRCMP_EQ: |
| if (len1 != nulpos1 || len2 != nulpos2) |
| break; |
| |
| r = strcmp (p1, p2); |
| known_result = true; |
| break; |
| |
| case BUILT_IN_STRNCMP: |
| case BUILT_IN_STRNCMP_EQ: |
| { |
| if (bound == HOST_WIDE_INT_M1U) |
| break; |
| |
| /* Reduce the bound to be no more than the length |
| of the shorter of the two strings, or the sizes |
| of the unterminated arrays. */ |
| unsigned HOST_WIDE_INT n = bound; |
| |
| if (len1 == nulpos1 && len1 < n) |
| n = len1 + 1; |
| if (len2 == nulpos2 && len2 < n) |
| n = len2 + 1; |
| |
| if (MIN (nulpos1, nulpos2) + 1 < n) |
| break; |
| |
| r = strncmp (p1, p2, n); |
| known_result = true; |
| break; |
| } |
| /* Only handleable situation is where the string are equal (result 0), |
| which is already handled by operand_equal_p case. */ |
| case BUILT_IN_STRCASECMP: |
| break; |
| case BUILT_IN_STRNCASECMP: |
| { |
| if (bound == HOST_WIDE_INT_M1U) |
| break; |
| r = strncmp (p1, p2, bound); |
| if (r == 0) |
| known_result = true; |
| break; |
| } |
| default: |
| gcc_unreachable (); |
| } |
| |
| if (known_result) |
| { |
| replace_call_with_value (gsi, build_cmp_result (type, r)); |
| return true; |
| } |
| } |
| |
| bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U) |
| || fcode == BUILT_IN_STRCMP |
| || fcode == BUILT_IN_STRCMP_EQ |
| || fcode == BUILT_IN_STRCASECMP; |
| |
| location_t loc = gimple_location (stmt); |
| |
| /* If the second arg is "", return *(const unsigned char*)arg1. */ |
| if (p2 && *p2 == '\0' && nonzero_bound) |
| { |
| gimple_seq stmts = NULL; |
| tree var = gimple_load_first_char (loc, str1, &stmts); |
| if (lhs) |
| { |
| stmt = gimple_build_assign (lhs, NOP_EXPR, var); |
| gimple_seq_add_stmt_without_update (&stmts, stmt); |
| } |
| |
| gsi_replace_with_seq_vops (gsi, stmts); |
| return true; |
| } |
| |
| /* If the first arg is "", return -*(const unsigned char*)arg2. */ |
| if (p1 && *p1 == '\0' && nonzero_bound) |
| { |
| gimple_seq stmts = NULL; |
| tree var = gimple_load_first_char (loc, str2, &stmts); |
| |
| if (lhs) |
| { |
| tree c = create_tmp_reg_or_ssa_name (integer_type_node); |
| stmt = gimple_build_assign (c, NOP_EXPR, var); |
| gimple_seq_add_stmt_without_update (&stmts, stmt); |
| |
| stmt = gimple_build_assign (lhs, NEGATE_EXPR, c); |
| gimple_seq_add_stmt_without_update (&stmts, stmt); |
| } |
| |
| gsi_replace_with_seq_vops (gsi, stmts); |
| return true; |
| } |
| |
| /* If BOUND is one, return an expression corresponding to |
| (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */ |
| if (fcode == BUILT_IN_STRNCMP && bound == 1) |
| { |
| gimple_seq stmts = NULL; |
| tree temp1 = gimple_load_first_char (loc, str1, &stmts); |
| tree temp2 = gimple_load_first_char (loc, str2, &stmts); |
| |
| if (lhs) |
| { |
| tree c1 = create_tmp_reg_or_ssa_name (integer_type_node); |
| gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1); |
| gimple_seq_add_stmt_without_update (&stmts, convert1); |
| |
| tree c2 = create_tmp_reg_or_ssa_name (integer_type_node); |
| gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2); |
| gimple_seq_add_stmt_without_update (&stmts, convert2); |
| |
| stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2); |
| gimple_seq_add_stmt_without_update (&stmts, stmt); |
| } |
| |
| gsi_replace_with_seq_vops (gsi, stmts); |
| return true; |
| } |
| |
| /* If BOUND is greater than the length of one constant string, |
| and the other argument is also a nul-terminated string, replace |
| strncmp with strcmp. */ |
| if (fcode == BUILT_IN_STRNCMP |
| && bound > 0 && bound < HOST_WIDE_INT_M1U |
| && ((p2 && len2 < bound && len2 == nulpos2) |
| || (p1 && len1 < bound && len1 == nulpos1))) |
| { |
| tree fn = builtin_decl_implicit (BUILT_IN_STRCMP); |
| if (!fn) |
| return false; |
| gimple *repl = gimple_build_call (fn, 2, str1, str2); |
| replace_call_with_call_and_fold (gsi, repl); |
| return true; |
| } |
| |
| return false; |
| } |
| |
| /* Fold a call to the memchr pointed by GSI iterator. */ |
| |
| static bool |
| gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi) |
| { |
| gimple *stmt = gsi_stmt (*gsi); |
| tree lhs = gimple_call_lhs (stmt); |
| tree arg1 = gimple_call_arg (stmt, 0); |
| tree arg2 = gimple_call_arg (stmt, 1); |
| tree len = gimple_call_arg (stmt, 2); |
| |
| /* If the LEN parameter is zero, return zero. */ |
| if (integer_zerop (len)) |
| { |
| replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0)); |
| return true; |
| } |
| |
| char c; |
| if (TREE_CODE (arg2) != INTEGER_CST |
| || !tree_fits_uhwi_p (len) |
| || !target_char_cst_p (arg2, &c)) |
| return false; |
| |
| unsigned HOST_WIDE_INT length = tree_to_uhwi (len); |
| unsigned HOST_WIDE_INT string_length; |
| const char *p1 = c_getstr (arg1, &string_length); |
| |
| if (p1) |
| { |
| const char *r = (const char *)memchr (p1, c, MIN (length, string_length)); |
| if (r == NULL) |
| { |
| tree mem_size, offset_node; |
| string_constant (arg1, &offset_node, &mem_size, NULL); |
| unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE) |
| ? 0 : tree_to_uhwi (offset_node); |
| /* MEM_SIZE is the size of the array the string literal |
| is stored in. */ |
| unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset; |
| gcc_checking_assert (string_length <= string_size); |
| if (length <= string_size) |
| { |
| replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0)); |
| return true; |
| } |
| } |
| else |
| { |
| unsigned HOST_WIDE_INT offset = r - p1; |
| gimple_seq stmts = NULL; |
| if (lhs != NULL_TREE) |
| { |
| tree offset_cst = build_int_cst (TREE_TYPE (len), offset); |
| gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR, |
| arg1, offset_cst); |
| gimple_seq_add_stmt_without_update (&stmts, stmt); |
| } |
| else |
| gimple_seq_add_stmt_without_update (&stmts, |
| gimple_build_nop ()); |
| |
| gsi_replace_with_seq_vops (gsi, stmts); |
| return true; |
| } |
| } |
| |
| return false; |
| } |
| |
| /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments |
| to the call. IGNORE is true if the value returned |
| by the builtin will be ignored. UNLOCKED is true is true if this |
| actually a call to fputs_unlocked. If LEN in non-NULL, it represents |
| the known length of the string. Return NULL_TREE if no simplification |
| was possible. */ |
| |
| static bool |
| gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi, |
| tree arg0, tree arg1, |
| bool unlocked) |
| { |
| gimple *stmt = gsi_stmt (*gsi); |
| |
| /* If we're using an unlocked function, assume the other unlocked |
| functions exist explicitly. */ |
| tree const fn_fputc = (unlocked |
| ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED) |
| : builtin_decl_implicit (BUILT_IN_FPUTC)); |
| tree const fn_fwrite = (unlocked |
| ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED) |
| : builtin_decl_implicit (BUILT_IN_FWRITE)); |
| |
| /* If the return value is used, don't do the transformation. */ |
| if (gimple_call_lhs (stmt)) |
| return false; |
| |
| /* Get the length of the string passed to fputs. If the length |
| can't be determined, punt. */ |
| tree len = get_maxval_strlen (arg0, SRK_STRLEN); |
| if (!len |
| || TREE_CODE (len) != INTEGER_CST) |
| return false; |
| |
| switch (compare_tree_int (len, 1)) |
| { |
| case -1: /* length is 0, delete the call entirely . */ |
| replace_call_with_value (gsi, integer_zero_node); |
| return true; |
| |
| case 0: /* length is 1, call fputc. */ |
| { |
| const char *p = c_getstr (arg0); |
| if (p != NULL) |
| { |
| if (!fn_fputc) |
| return false; |
| |
| gimple *repl = gimple_build_call (fn_fputc, 2, |
| build_int_cst |
| (integer_type_node, p[0]), arg1); |
| replace_call_with_call_and_fold (gsi, repl); |
| return true; |
| } |
| } |
| /* FALLTHROUGH */ |
| case 1: /* length is greater than 1, call fwrite. */ |
| { |
| /* If optimizing for size keep fputs. */ |
| if (optimize_function_for_size_p (cfun)) |
| return false; |
| /* New argument list transforming fputs(string, stream) to |
| fwrite(string, 1, len, stream). */ |
| if (!fn_fwrite) |
| return false; |
| |
| gimple *repl = gimple_build_call (fn_fwrite, 4, arg0, |
| size_one_node, len, arg1); |
| replace_call_with_call_and_fold (gsi, repl); |
| return true; |
| } |
| default: |
| gcc_unreachable (); |
| } |
| return false; |
| } |
| |
| /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin. |
| DEST, SRC, LEN, and SIZE are the arguments to the call. |
| IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_* |
| code of the builtin. If MAXLEN is not NULL, it is maximum length |
| passed as third argument. */ |
| |
| static bool |
| gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi, |
| tree dest, tree src, tree len, tree size, |
| enum built_in_function fcode) |
| { |
| gimple *stmt = gsi_stmt (*gsi); |
| location_t loc = gimple_location (stmt); |
| bool ignore = gimple_call_lhs (stmt) == NULL_TREE; |
| tree fn; |
| |
| /* If SRC and DEST are the same (and not volatile), return DEST |
| (resp. DEST+LEN for __mempcpy_chk). */ |
| if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0)) |
| { |
| if (fcode != BUILT_IN_MEMPCPY_CHK) |
| { |
| replace_call_with_value (gsi, dest); |
| return true; |
| } |
| else |
| { |
| gimple_seq stmts = NULL; |
| len = gimple_convert_to_ptrofftype (&stmts, loc, len); |
| tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR, |
| TREE_TYPE (dest), dest, len); |
| gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
| replace_call_with_value (gsi, temp); |
| return true; |
| } |
| } |
| |
| if (! tree_fits_uhwi_p (size)) |
| return false; |
| |
| tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE); |
| if (! integer_all_onesp (size)) |
| { |
| if (! tree_fits_uhwi_p (len)) |
| { |
| /* If LEN is not constant, try MAXLEN too. |
| For MAXLEN only allow optimizing into non-_ocs function |
| if SIZE is >= MAXLEN, never convert to __ocs_fail (). */ |
| if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen)) |
| { |
| if (fcode == BUILT_IN_MEMPCPY_CHK && ignore) |
| { |
| /* (void) __mempcpy_chk () can be optimized into |
| (void) __memcpy_chk (). */ |
| fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK); |
| if (!fn) |
| return false; |
| |
| gimple *repl = gimple_build_call (fn, 4, dest, src, len, size); |
| replace_call_with_call_and_fold (gsi, repl); |
| return true; |
| } |
| return false; |
| } |
| } |
| else |
| maxlen = len; |
| |
| if (tree_int_cst_lt (size, maxlen)) |
| return false; |
| } |
| |
| fn = NULL_TREE; |
| /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume |
| mem{cpy,pcpy,move,set} is available. */ |
| switch (fcode) |
| { |
| case BUILT_IN_MEMCPY_CHK: |
| fn = builtin_decl_explicit (BUILT_IN_MEMCPY); |
| break; |
| case BUILT_IN_MEMPCPY_CHK: |
| fn = builtin_decl_explicit (BUILT_IN_MEMPCPY); |
| break; |
| case BUILT_IN_MEMMOVE_CHK: |
| fn = builtin_decl_explicit (BUILT_IN_MEMMOVE); |
| break; |
| case BUILT_IN_MEMSET_CHK: |
| fn = builtin_decl_explicit (BUILT_IN_MEMSET); |
| break; |
| default: |
| break; |
| } |
| |
| if (!fn) |
| return false; |
| |
| gimple *repl = gimple_build_call (fn, 3, dest, src, len); |
| replace_call_with_call_and_fold (gsi, repl); |
| return true; |
| } |
| |
| /* Fold a call to the __st[rp]cpy_chk builtin. |
| DEST, SRC, and SIZE are the arguments to the call. |
| IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_* |
| code of the builtin. If MAXLEN is not NULL, it is maximum length of |
| strings passed as second argument. */ |
| |
| static bool |
| gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi, |
| tree dest, |
| tree src, tree size, |
| enum built_in_function fcode) |
| { |
| gimple *stmt = gsi_stmt (*gsi); |
| location_t loc = gimple_location (stmt); |
| bool ignore = gimple_call_lhs (stmt) == NULL_TREE; |
| tree len, fn; |
| |
| /* If SRC and DEST are the same (and not volatile), return DEST. */ |
| if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0)) |
| { |
| /* Issue -Wrestrict unless the pointers are null (those do |
| not point to objects and so do not indicate an overlap; |
| such calls could be the result of sanitization and jump |
| threading). */ |
| if (!integer_zerop (dest) && !gimple_no_warning_p (stmt)) |
| { |
| tree func = gimple_call_fndecl (stmt); |
| |
| warning_at (loc, OPT_Wrestrict, |
| "%qD source argument is the same as destination", |
| func); |
| } |
| |
| replace_call_with_value (gsi, dest); |
| return true; |
| } |
| |
| if (! tree_fits_uhwi_p (size)) |
| return false; |
| |
| tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX); |
| if (! integer_all_onesp (size)) |
| { |
| len = c_strlen (src, 1); |
| if (! len || ! tree_fits_uhwi_p (len)) |
| { |
| /* If LEN is not constant, try MAXLEN too. |
| For MAXLEN only allow optimizing into non-_ocs function |
| if SIZE is >= MAXLEN, never convert to __ocs_fail (). */ |
| if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen)) |
| { |
| if (fcode == BUILT_IN_STPCPY_CHK) |
| { |
| if (! ignore) |
| return false; |
| |
| /* If return value of __stpcpy_chk is ignored, |
| optimize into __strcpy_chk. */ |
| fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK); |
| if (!fn) |
| return false; |
| |
| gimple *repl = gimple_build_call (fn, 3, dest, src, size); |
| replace_call_with_call_and_fold (gsi, repl); |
| return true; |
| } |
| |
| if (! len || TREE_SIDE_EFFECTS (len)) |
| return false; |
| |
| /* If c_strlen returned something, but not a constant, |
| transform __strcpy_chk into __memcpy_chk. */ |
| fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK); |
| if (!fn) |
| return false; |
| |
| gimple_seq stmts = NULL; |
| len = force_gimple_operand (len, &stmts, true, NULL_TREE); |
| len = gimple_convert (&stmts, loc, size_type_node, len); |
| len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len, |
| build_int_cst (size_type_node, 1)); |
| gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
| gimple *repl = gimple_build_call (fn, 4, dest, src, len, size); |
| replace_call_with_call_and_fold (gsi, repl); |
| return true; |
| } |
| } |
| else |
| maxlen = len; |
| |
| if (! tree_int_cst_lt (maxlen, size)) |
| return false; |
| } |
| |
| /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */ |
| fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK |
| ? BUILT_IN_STPCPY : BUILT_IN_STRCPY); |
| if (!fn) |
| return false; |
| |
| gimple *repl = gimple_build_call (fn, 2, dest, src); |
| replace_call_with_call_and_fold (gsi, repl); |
| return true; |
| } |
| |
| /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE |
| are the arguments to the call. If MAXLEN is not NULL, it is maximum |
| length passed as third argument. IGNORE is true if return value can be |
| ignored. FCODE is the BUILT_IN_* code of the builtin. */ |
| |
| static bool |
| gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi, |
| tree dest, tree src, |
| tree len, tree size, |
| enum built_in_function fcode) |
| { |
| gimple *stmt = gsi_stmt (*gsi); |
| bool ignore = gimple_call_lhs (stmt) == NULL_TREE; |
| tree fn; |
| |
| if (fcode == BUILT_IN_STPNCPY_CHK && ignore) |
| { |
| /* If return value of __stpncpy_chk is ignored, |
| optimize into __strncpy_chk. */ |
| fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK); |
| if (fn) |
| { |
| gimple *repl = gimple_build_call (fn, 4, dest, src, len, size); |
| replace_call_with_call_and_fold (gsi, repl); |
| return true; |
| } |
| } |
| |
| if (! tree_fits_uhwi_p (size)) |
| return false; |
| |
| tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE); |
| if (! integer_all_onesp (size)) |
| { |
| if (! tree_fits_uhwi_p (len)) |
| { |
| /* If LEN is not constant, try MAXLEN too. |
| For MAXLEN only allow optimizing into non-_ocs function |
| if SIZE is >= MAXLEN, never convert to __ocs_fail (). */ |
| if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen)) |
| return false; |
| } |
| else |
| maxlen = len; |
| |
| if (tree_int_cst_lt (size, maxlen)) |
| return false; |
| } |
| |
| /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */ |
| fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK |
| ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY); |
| if (!fn) |
| return false; |
| |
| gimple *repl = gimple_build_call (fn, 3, dest, src, len); |
| replace_call_with_call_and_fold (gsi, repl); |
| return true; |
| } |
| |
| /* Fold function call to builtin stpcpy with arguments DEST and SRC. |
| Return NULL_TREE if no simplification can be made. */ |
| |
| static bool |
| gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi) |
| { |
| gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); |
| location_t loc = gimple_location (stmt); |
| tree dest = gimple_call_arg (stmt, 0); |
| tree src = gimple_call_arg (stmt, 1); |
| tree fn, lenp1; |
| |
| /* If the result is unused, replace stpcpy with strcpy. */ |
| if (gimple_call_lhs (stmt) == NULL_TREE) |
| { |
| tree fn = builtin_decl_implicit (BUILT_IN_STRCPY); |
| if (!fn) |
| return false; |
| gimple_call_set_fndecl (stmt, fn); |
| fold_stmt (gsi); |
| return true; |
| } |
| |
| /* Set to non-null if ARG refers to an unterminated array. */ |
| c_strlen_data data = { }; |
| tree len = c_strlen (src, 1, &data, 1); |
| if (!len |
| || TREE_CODE (len) != INTEGER_CST) |
| { |
| data.decl = unterminated_array (src); |
| if (!data.decl) |
| return false; |
| } |
| |
| if (data.decl) |
| { |
| /* Avoid folding calls with unterminated arrays. */ |
| if (!gimple_no_warning_p (stmt)) |
| warn_string_no_nul (loc, "stpcpy", src, data.decl); |
| gimple_set_no_warning (stmt, true); |
| return false; |
| } |
| |
| if (optimize_function_for_size_p (cfun) |
| /* If length is zero it's small enough. */ |
| && !integer_zerop (len)) |
| return false; |
| |
| /* If the source has a known length replace stpcpy with memcpy. */ |
| fn = builtin_decl_implicit (BUILT_IN_MEMCPY); |
| if (!fn) |
| return false; |
| |
| gimple_seq stmts = NULL; |
| tree tem = gimple_convert (&stmts, loc, size_type_node, len); |
| lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, |
| tem, build_int_cst (size_type_node, 1)); |
| gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
| gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1); |
| gimple_move_vops (repl, stmt); |
| gsi_insert_before (gsi, repl, GSI_SAME_STMT); |
| /* Replace the result with dest + len. */ |
| stmts = NULL; |
| tem = gimple_convert (&stmts, loc, sizetype, len); |
| gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); |
| gassign *ret = gimple_build_assign (gimple_call_lhs (stmt), |
| POINTER_PLUS_EXPR, dest, tem); |
| gsi_replace (gsi, ret, false); |
| /* Finally fold the memcpy call. */ |
| gimple_stmt_iterator gsi2 = *gsi; |
| gsi_prev (&gsi2); |
| fold_stmt (&gsi2); |
| return true; |
| } |
| |
| /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return |
| NULL_TREE if a normal call should be emitted rather than expanding |
| the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or |
| BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length |
| passed as second argument. */ |
| |
| static bool |
| gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi, |
| enum built_in_function fcode) |
| { |
| gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); |
| tree dest, size, len, fn, fmt, flag; |
| const char *fmt_str; |
| |
| /* Verify the required arguments in the original call. */ |
| if (gimple_call_num_args (stmt) < 5) |
| return false; |
| |
| dest = gimple_call_arg (stmt, 0); |
| len = gimple_call_arg (stmt, 1); |
| flag = gimple_call_arg (stmt, 2); |
| size = gimple_call_arg (stmt, 3); |
| fmt = gimple_call_arg (stmt, 4); |
| |
| if (! tree_fits_uhwi_p (size)) |
| return false; |
| |
| if (! integer_all_onesp (size)) |
| { |
| tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE); |
| if (! tree_fits_uhwi_p (len)) |
| { |
| /* If LEN is not constant, try MAXLEN too. |
| For MAXLEN only allow optimizing into non-_ocs function |
| if SIZE is >= MAXLEN, never convert to __ocs_fail (). */ |
| if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen)) |
| return false; |
| } |
| else |
| maxlen = len; |
| |
| if (tree_int_cst_lt (size, maxlen)) |
| return false; |
| } |
| |
| if (!init_target_chars ()) |
| return false; |
|