| /* Tree lowering pass. This pass converts the GENERIC functions-as-trees |
| tree representation into the GIMPLE form. |
| Copyright (C) 2002-2021 Free Software Foundation, Inc. |
| Major work done by Sebastian Pop <s.pop@laposte.net>, |
| Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>. |
| |
| This file is part of GCC. |
| |
| GCC is free software; you can redistribute it and/or modify it under |
| the terms of the GNU General Public License as published by the Free |
| Software Foundation; either version 3, or (at your option) any later |
| version. |
| |
| GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
| WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| for more details. |
| |
| You should have received a copy of the GNU General Public License |
| along with GCC; see the file COPYING3. If not see |
| <http://www.gnu.org/licenses/>. */ |
| |
| #include "config.h" |
| #include "system.h" |
| #include "coretypes.h" |
| #include "backend.h" |
| #include "target.h" |
| #include "rtl.h" |
| #include "tree.h" |
| #include "memmodel.h" |
| #include "tm_p.h" |
| #include "gimple.h" |
| #include "gimple-predict.h" |
| #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */ |
| #include "ssa.h" |
| #include "cgraph.h" |
| #include "tree-pretty-print.h" |
| #include "diagnostic-core.h" |
| #include "alias.h" |
| #include "fold-const.h" |
| #include "calls.h" |
| #include "varasm.h" |
| #include "stmt.h" |
| #include "expr.h" |
| #include "gimple-fold.h" |
| #include "tree-eh.h" |
| #include "gimplify.h" |
| #include "gimple-iterator.h" |
| #include "stor-layout.h" |
| #include "print-tree.h" |
| #include "tree-iterator.h" |
| #include "tree-inline.h" |
| #include "langhooks.h" |
| #include "tree-cfg.h" |
| #include "tree-ssa.h" |
| #include "omp-general.h" |
| #include "omp-low.h" |
| #include "gimple-low.h" |
| #include "gomp-constants.h" |
| #include "splay-tree.h" |
| #include "gimple-walk.h" |
| #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */ |
| #include "builtins.h" |
| #include "stringpool.h" |
| #include "attribs.h" |
| #include "asan.h" |
| #include "dbgcnt.h" |
| #include "omp-offload.h" |
| #include "context.h" |
| #include "tree-nested.h" |
| |
| /* Hash set of poisoned variables in a bind expr. */ |
| static hash_set<tree> *asan_poisoned_variables = NULL; |
| |
| enum gimplify_omp_var_data |
| { |
| GOVD_SEEN = 0x000001, |
| GOVD_EXPLICIT = 0x000002, |
| GOVD_SHARED = 0x000004, |
| GOVD_PRIVATE = 0x000008, |
| GOVD_FIRSTPRIVATE = 0x000010, |
| GOVD_LASTPRIVATE = 0x000020, |
| GOVD_REDUCTION = 0x000040, |
| GOVD_LOCAL = 0x00080, |
| GOVD_MAP = 0x000100, |
| GOVD_DEBUG_PRIVATE = 0x000200, |
| GOVD_PRIVATE_OUTER_REF = 0x000400, |
| GOVD_LINEAR = 0x000800, |
| GOVD_ALIGNED = 0x001000, |
| |
| /* Flag for GOVD_MAP: don't copy back. */ |
| GOVD_MAP_TO_ONLY = 0x002000, |
| |
| /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */ |
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000, |
| |
| GOVD_MAP_0LEN_ARRAY = 0x008000, |
| |
| /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */ |
| GOVD_MAP_ALWAYS_TO = 0x010000, |
| |
| /* Flag for shared vars that are or might be stored to in the region. */ |
| GOVD_WRITTEN = 0x020000, |
| |
| /* Flag for GOVD_MAP, if it is a forced mapping. */ |
| GOVD_MAP_FORCE = 0x040000, |
| |
| /* Flag for GOVD_MAP: must be present already. */ |
| GOVD_MAP_FORCE_PRESENT = 0x080000, |
| |
| /* Flag for GOVD_MAP: only allocate. */ |
| GOVD_MAP_ALLOC_ONLY = 0x100000, |
| |
| /* Flag for GOVD_MAP: only copy back. */ |
| GOVD_MAP_FROM_ONLY = 0x200000, |
| |
| GOVD_NONTEMPORAL = 0x400000, |
| |
| /* Flag for GOVD_LASTPRIVATE: conditional modifier. */ |
| GOVD_LASTPRIVATE_CONDITIONAL = 0x800000, |
| |
| GOVD_CONDTEMP = 0x1000000, |
| |
| /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */ |
| GOVD_REDUCTION_INSCAN = 0x2000000, |
| |
| /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for |
| fields. */ |
| GOVD_MAP_HAS_ATTACHMENTS = 8388608, |
| |
| GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE |
| | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR |
| | GOVD_LOCAL) |
| }; |
| |
| |
| enum omp_region_type |
| { |
| ORT_WORKSHARE = 0x00, |
| ORT_TASKGROUP = 0x01, |
| ORT_SIMD = 0x04, |
| |
| ORT_PARALLEL = 0x08, |
| ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1, |
| |
| ORT_TASK = 0x10, |
| ORT_UNTIED_TASK = ORT_TASK | 1, |
| ORT_TASKLOOP = ORT_TASK | 2, |
| ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2, |
| |
| ORT_TEAMS = 0x20, |
| ORT_COMBINED_TEAMS = ORT_TEAMS | 1, |
| ORT_HOST_TEAMS = ORT_TEAMS | 2, |
| ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2, |
| |
| /* Data region. */ |
| ORT_TARGET_DATA = 0x40, |
| |
| /* Data region with offloading. */ |
| ORT_TARGET = 0x80, |
| ORT_COMBINED_TARGET = ORT_TARGET | 1, |
| ORT_IMPLICIT_TARGET = ORT_TARGET | 2, |
| |
| /* OpenACC variants. */ |
| ORT_ACC = 0x100, /* A generic OpenACC region. */ |
| ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */ |
| ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */ |
| ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */ |
| ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */ |
| ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */ |
| |
| /* Dummy OpenMP region, used to disable expansion of |
| DECL_VALUE_EXPRs in taskloop pre body. */ |
| ORT_NONE = 0x200 |
| }; |
| |
| /* Gimplify hashtable helper. */ |
| |
| struct gimplify_hasher : free_ptr_hash <elt_t> |
| { |
| static inline hashval_t hash (const elt_t *); |
| static inline bool equal (const elt_t *, const elt_t *); |
| }; |
| |
| struct gimplify_ctx |
| { |
| struct gimplify_ctx *prev_context; |
| |
| vec<gbind *> bind_expr_stack; |
| tree temps; |
| gimple_seq conditional_cleanups; |
| tree exit_label; |
| tree return_temp; |
| |
| vec<tree> case_labels; |
| hash_set<tree> *live_switch_vars; |
| /* The formal temporary table. Should this be persistent? */ |
| hash_table<gimplify_hasher> *temp_htab; |
| |
| int conditions; |
| unsigned into_ssa : 1; |
| unsigned allow_rhs_cond_expr : 1; |
| unsigned in_cleanup_point_expr : 1; |
| unsigned keep_stack : 1; |
| unsigned save_stack : 1; |
| unsigned in_switch_expr : 1; |
| }; |
| |
| enum gimplify_defaultmap_kind |
| { |
| GDMK_SCALAR, |
| GDMK_AGGREGATE, |
| GDMK_ALLOCATABLE, |
| GDMK_POINTER |
| }; |
| |
| struct gimplify_omp_ctx |
| { |
| struct gimplify_omp_ctx *outer_context; |
| splay_tree variables; |
| hash_set<tree> *privatized_types; |
| tree clauses; |
| /* Iteration variables in an OMP_FOR. */ |
| vec<tree> loop_iter_var; |
| location_t location; |
| enum omp_clause_default_kind default_kind; |
| enum omp_region_type region_type; |
| enum tree_code code; |
| bool combined_loop; |
| bool distribute; |
| bool target_firstprivatize_array_bases; |
| bool add_safelen1; |
| bool order_concurrent; |
| bool has_depend; |
| bool in_for_exprs; |
| int defaultmap[4]; |
| }; |
| |
| static struct gimplify_ctx *gimplify_ctxp; |
| static struct gimplify_omp_ctx *gimplify_omp_ctxp; |
| static bool in_omp_construct; |
| |
| /* Forward declaration. */ |
| static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool); |
| static hash_map<tree, tree> *oacc_declare_returns; |
| static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *, |
| bool (*) (tree), fallback_t, bool); |
| |
| /* Shorter alias name for the above function for use in gimplify.c |
| only. */ |
| |
| static inline void |
| gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs) |
| { |
| gimple_seq_add_stmt_without_update (seq_p, gs); |
| } |
| |
| /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is |
| NULL, a new sequence is allocated. This function is |
| similar to gimple_seq_add_seq, but does not scan the operands. |
| During gimplification, we need to manipulate statement sequences |
| before the def/use vectors have been constructed. */ |
| |
| static void |
| gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src) |
| { |
| gimple_stmt_iterator si; |
| |
| if (src == NULL) |
| return; |
| |
| si = gsi_last (*dst_p); |
| gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT); |
| } |
| |
| |
| /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing |
| and popping gimplify contexts. */ |
| |
| static struct gimplify_ctx *ctx_pool = NULL; |
| |
| /* Return a gimplify context struct from the pool. */ |
| |
| static inline struct gimplify_ctx * |
| ctx_alloc (void) |
| { |
| struct gimplify_ctx * c = ctx_pool; |
| |
| if (c) |
| ctx_pool = c->prev_context; |
| else |
| c = XNEW (struct gimplify_ctx); |
| |
| memset (c, '\0', sizeof (*c)); |
| return c; |
| } |
| |
| /* Put gimplify context C back into the pool. */ |
| |
| static inline void |
| ctx_free (struct gimplify_ctx *c) |
| { |
| c->prev_context = ctx_pool; |
| ctx_pool = c; |
| } |
| |
| /* Free allocated ctx stack memory. */ |
| |
| void |
| free_gimplify_stack (void) |
| { |
| struct gimplify_ctx *c; |
| |
| while ((c = ctx_pool)) |
| { |
| ctx_pool = c->prev_context; |
| free (c); |
| } |
| } |
| |
| |
| /* Set up a context for the gimplifier. */ |
| |
| void |
| push_gimplify_context (bool in_ssa, bool rhs_cond_ok) |
| { |
| struct gimplify_ctx *c = ctx_alloc (); |
| |
| c->prev_context = gimplify_ctxp; |
| gimplify_ctxp = c; |
| gimplify_ctxp->into_ssa = in_ssa; |
| gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok; |
| } |
| |
| /* Tear down a context for the gimplifier. If BODY is non-null, then |
| put the temporaries into the outer BIND_EXPR. Otherwise, put them |
| in the local_decls. |
| |
| BODY is not a sequence, but the first tuple in a sequence. */ |
| |
| void |
| pop_gimplify_context (gimple *body) |
| { |
| struct gimplify_ctx *c = gimplify_ctxp; |
| |
| gcc_assert (c |
| && (!c->bind_expr_stack.exists () |
| || c->bind_expr_stack.is_empty ())); |
| c->bind_expr_stack.release (); |
| gimplify_ctxp = c->prev_context; |
| |
| if (body) |
| declare_vars (c->temps, body, false); |
| else |
| record_vars (c->temps); |
| |
| delete c->temp_htab; |
| c->temp_htab = NULL; |
| ctx_free (c); |
| } |
| |
| /* Push a GIMPLE_BIND tuple onto the stack of bindings. */ |
| |
| static void |
| gimple_push_bind_expr (gbind *bind_stmt) |
| { |
| gimplify_ctxp->bind_expr_stack.reserve (8); |
| gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt); |
| } |
| |
| /* Pop the first element off the stack of bindings. */ |
| |
| static void |
| gimple_pop_bind_expr (void) |
| { |
| gimplify_ctxp->bind_expr_stack.pop (); |
| } |
| |
| /* Return the first element of the stack of bindings. */ |
| |
| gbind * |
| gimple_current_bind_expr (void) |
| { |
| return gimplify_ctxp->bind_expr_stack.last (); |
| } |
| |
| /* Return the stack of bindings created during gimplification. */ |
| |
| vec<gbind *> |
| gimple_bind_expr_stack (void) |
| { |
| return gimplify_ctxp->bind_expr_stack; |
| } |
| |
| /* Return true iff there is a COND_EXPR between us and the innermost |
| CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */ |
| |
| static bool |
| gimple_conditional_context (void) |
| { |
| return gimplify_ctxp->conditions > 0; |
| } |
| |
| /* Note that we've entered a COND_EXPR. */ |
| |
| static void |
| gimple_push_condition (void) |
| { |
| #ifdef ENABLE_GIMPLE_CHECKING |
| if (gimplify_ctxp->conditions == 0) |
| gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups)); |
| #endif |
| ++(gimplify_ctxp->conditions); |
| } |
| |
| /* Note that we've left a COND_EXPR. If we're back at unconditional scope |
| now, add any conditional cleanups we've seen to the prequeue. */ |
| |
| static void |
| gimple_pop_condition (gimple_seq *pre_p) |
| { |
| int conds = --(gimplify_ctxp->conditions); |
| |
| gcc_assert (conds >= 0); |
| if (conds == 0) |
| { |
| gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups); |
| gimplify_ctxp->conditional_cleanups = NULL; |
| } |
| } |
| |
| /* A stable comparison routine for use with splay trees and DECLs. */ |
| |
| static int |
| splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb) |
| { |
| tree a = (tree) xa; |
| tree b = (tree) xb; |
| |
| return DECL_UID (a) - DECL_UID (b); |
| } |
| |
| /* Create a new omp construct that deals with variable remapping. */ |
| |
| static struct gimplify_omp_ctx * |
| new_omp_context (enum omp_region_type region_type) |
| { |
| struct gimplify_omp_ctx *c; |
| |
| c = XCNEW (struct gimplify_omp_ctx); |
| c->outer_context = gimplify_omp_ctxp; |
| c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); |
| c->privatized_types = new hash_set<tree>; |
| c->location = input_location; |
| c->region_type = region_type; |
| if ((region_type & ORT_TASK) == 0) |
| c->default_kind = OMP_CLAUSE_DEFAULT_SHARED; |
| else |
| c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED; |
| c->defaultmap[GDMK_SCALAR] = GOVD_MAP; |
| c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP; |
| c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP; |
| c->defaultmap[GDMK_POINTER] = GOVD_MAP; |
| |
| return c; |
| } |
| |
| /* Destroy an omp construct that deals with variable remapping. */ |
| |
| static void |
| delete_omp_context (struct gimplify_omp_ctx *c) |
| { |
| splay_tree_delete (c->variables); |
| delete c->privatized_types; |
| c->loop_iter_var.release (); |
| XDELETE (c); |
| } |
| |
| static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int); |
| static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool); |
| |
| /* Both gimplify the statement T and append it to *SEQ_P. This function |
| behaves exactly as gimplify_stmt, but you don't have to pass T as a |
| reference. */ |
| |
| void |
| gimplify_and_add (tree t, gimple_seq *seq_p) |
| { |
| gimplify_stmt (&t, seq_p); |
| } |
| |
| /* Gimplify statement T into sequence *SEQ_P, and return the first |
| tuple in the sequence of generated tuples for this statement. |
| Return NULL if gimplifying T produced no tuples. */ |
| |
| static gimple * |
| gimplify_and_return_first (tree t, gimple_seq *seq_p) |
| { |
| gimple_stmt_iterator last = gsi_last (*seq_p); |
| |
| gimplify_and_add (t, seq_p); |
| |
| if (!gsi_end_p (last)) |
| { |
| gsi_next (&last); |
| return gsi_stmt (last); |
| } |
| else |
| return gimple_seq_first_stmt (*seq_p); |
| } |
| |
| /* Returns true iff T is a valid RHS for an assignment to an un-renamed |
| LHS, or for a call argument. */ |
| |
| static bool |
| is_gimple_mem_rhs (tree t) |
| { |
| /* If we're dealing with a renamable type, either source or dest must be |
| a renamed variable. */ |
| if (is_gimple_reg_type (TREE_TYPE (t))) |
| return is_gimple_val (t); |
| else |
| return is_gimple_val (t) || is_gimple_lvalue (t); |
| } |
| |
| /* Return true if T is a CALL_EXPR or an expression that can be |
| assigned to a temporary. Note that this predicate should only be |
| used during gimplification. See the rationale for this in |
| gimplify_modify_expr. */ |
| |
| static bool |
| is_gimple_reg_rhs_or_call (tree t) |
| { |
| return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS |
| || TREE_CODE (t) == CALL_EXPR); |
| } |
| |
| /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that |
| this predicate should only be used during gimplification. See the |
| rationale for this in gimplify_modify_expr. */ |
| |
| static bool |
| is_gimple_mem_rhs_or_call (tree t) |
| { |
| /* If we're dealing with a renamable type, either source or dest must be |
| a renamed variable. */ |
| if (is_gimple_reg_type (TREE_TYPE (t))) |
| return is_gimple_val (t); |
| else |
| return (is_gimple_val (t) |
| || is_gimple_lvalue (t) |
| || TREE_CLOBBER_P (t) |
| || TREE_CODE (t) == CALL_EXPR); |
| } |
| |
| /* Create a temporary with a name derived from VAL. Subroutine of |
| lookup_tmp_var; nobody else should call this function. */ |
| |
| static inline tree |
| create_tmp_from_val (tree val) |
| { |
| /* Drop all qualifiers and address-space information from the value type. */ |
| tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val)); |
| tree var = create_tmp_var (type, get_name (val)); |
| return var; |
| } |
| |
| /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse |
| an existing expression temporary. */ |
| |
| static tree |
| lookup_tmp_var (tree val, bool is_formal) |
| { |
| tree ret; |
| |
| /* If not optimizing, never really reuse a temporary. local-alloc |
| won't allocate any variable that is used in more than one basic |
| block, which means it will go into memory, causing much extra |
| work in reload and final and poorer code generation, outweighing |
| the extra memory allocation here. */ |
| if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val)) |
| ret = create_tmp_from_val (val); |
| else |
| { |
| elt_t elt, *elt_p; |
| elt_t **slot; |
| |
| elt.val = val; |
| if (!gimplify_ctxp->temp_htab) |
| gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000); |
| slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT); |
| if (*slot == NULL) |
| { |
| elt_p = XNEW (elt_t); |
| elt_p->val = val; |
| elt_p->temp = ret = create_tmp_from_val (val); |
| *slot = elt_p; |
| } |
| else |
| { |
| elt_p = *slot; |
| ret = elt_p->temp; |
| } |
| } |
| |
| return ret; |
| } |
| |
| /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */ |
| |
| static tree |
| internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p, |
| bool is_formal, bool allow_ssa) |
| { |
| tree t, mod; |
| |
| /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we |
| can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */ |
| gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call, |
| fb_rvalue); |
| |
| if (allow_ssa |
| && gimplify_ctxp->into_ssa |
| && is_gimple_reg_type (TREE_TYPE (val))) |
| { |
| t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val))); |
| if (! gimple_in_ssa_p (cfun)) |
| { |
| const char *name = get_name (val); |
| if (name) |
| SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name)); |
| } |
| } |
| else |
| t = lookup_tmp_var (val, is_formal); |
| |
| mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val)); |
| |
| SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location)); |
| |
| /* gimplify_modify_expr might want to reduce this further. */ |
| gimplify_and_add (mod, pre_p); |
| ggc_free (mod); |
| |
| return t; |
| } |
| |
| /* Return a formal temporary variable initialized with VAL. PRE_P is as |
| in gimplify_expr. Only use this function if: |
| |
| 1) The value of the unfactored expression represented by VAL will not |
| change between the initialization and use of the temporary, and |
| 2) The temporary will not be otherwise modified. |
| |
| For instance, #1 means that this is inappropriate for SAVE_EXPR temps, |
| and #2 means it is inappropriate for && temps. |
| |
| For other cases, use get_initialized_tmp_var instead. */ |
| |
| tree |
| get_formal_tmp_var (tree val, gimple_seq *pre_p) |
| { |
| return internal_get_tmp_var (val, pre_p, NULL, true, true); |
| } |
| |
| /* Return a temporary variable initialized with VAL. PRE_P and POST_P |
| are as in gimplify_expr. */ |
| |
| tree |
| get_initialized_tmp_var (tree val, gimple_seq *pre_p, |
| gimple_seq *post_p /* = NULL */, |
| bool allow_ssa /* = true */) |
| { |
| return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa); |
| } |
| |
| /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true, |
| generate debug info for them; otherwise don't. */ |
| |
| void |
| declare_vars (tree vars, gimple *gs, bool debug_info) |
| { |
| tree last = vars; |
| if (last) |
| { |
| tree temps, block; |
| |
| gbind *scope = as_a <gbind *> (gs); |
| |
| temps = nreverse (last); |
| |
| block = gimple_bind_block (scope); |
| gcc_assert (!block || TREE_CODE (block) == BLOCK); |
| if (!block || !debug_info) |
| { |
| DECL_CHAIN (last) = gimple_bind_vars (scope); |
| gimple_bind_set_vars (scope, temps); |
| } |
| else |
| { |
| /* We need to attach the nodes both to the BIND_EXPR and to its |
| associated BLOCK for debugging purposes. The key point here |
| is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR |
| is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */ |
| if (BLOCK_VARS (block)) |
| BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps); |
| else |
| { |
| gimple_bind_set_vars (scope, |
| chainon (gimple_bind_vars (scope), temps)); |
| BLOCK_VARS (block) = temps; |
| } |
| } |
| } |
| } |
| |
| /* For VAR a VAR_DECL of variable size, try to find a constant upper bound |
| for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if |
| no such upper bound can be obtained. */ |
| |
| static void |
| force_constant_size (tree var) |
| { |
| /* The only attempt we make is by querying the maximum size of objects |
| of the variable's type. */ |
| |
| HOST_WIDE_INT max_size; |
| |
| gcc_assert (VAR_P (var)); |
| |
| max_size = max_int_size_in_bytes (TREE_TYPE (var)); |
| |
| gcc_assert (max_size >= 0); |
| |
| DECL_SIZE_UNIT (var) |
| = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size); |
| DECL_SIZE (var) |
| = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT); |
| } |
| |
| /* Push the temporary variable TMP into the current binding. */ |
| |
| void |
| gimple_add_tmp_var_fn (struct function *fn, tree tmp) |
| { |
| gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); |
| |
| /* Later processing assumes that the object size is constant, which might |
| not be true at this point. Force the use of a constant upper bound in |
| this case. */ |
| if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp))) |
| force_constant_size (tmp); |
| |
| DECL_CONTEXT (tmp) = fn->decl; |
| DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; |
| |
| record_vars_into (tmp, fn->decl); |
| } |
| |
| /* Push the temporary variable TMP into the current binding. */ |
| |
| void |
| gimple_add_tmp_var (tree tmp) |
| { |
| gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); |
| |
| /* Later processing assumes that the object size is constant, which might |
| not be true at this point. Force the use of a constant upper bound in |
| this case. */ |
| if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp))) |
| force_constant_size (tmp); |
| |
| DECL_CONTEXT (tmp) = current_function_decl; |
| DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; |
| |
| if (gimplify_ctxp) |
| { |
| DECL_CHAIN (tmp) = gimplify_ctxp->temps; |
| gimplify_ctxp->temps = tmp; |
| |
| /* Mark temporaries local within the nearest enclosing parallel. */ |
| if (gimplify_omp_ctxp) |
| { |
| struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; |
| int flag = GOVD_LOCAL | GOVD_SEEN; |
| while (ctx |
| && (ctx->region_type == ORT_WORKSHARE |
| || ctx->region_type == ORT_TASKGROUP |
| || ctx->region_type == ORT_SIMD |
| || ctx->region_type == ORT_ACC)) |
| { |
| if (ctx->region_type == ORT_SIMD |
| && TREE_ADDRESSABLE (tmp) |
| && !TREE_STATIC (tmp)) |
| { |
| if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST) |
| ctx->add_safelen1 = true; |
| else if (ctx->in_for_exprs) |
| flag = GOVD_PRIVATE; |
| else |
| flag = GOVD_PRIVATE | GOVD_SEEN; |
| break; |
| } |
| ctx = ctx->outer_context; |
| } |
| if (ctx) |
| omp_add_variable (ctx, tmp, flag); |
| } |
| } |
| else if (cfun) |
| record_vars (tmp); |
| else |
| { |
| gimple_seq body_seq; |
| |
| /* This case is for nested functions. We need to expose the locals |
| they create. */ |
| body_seq = gimple_body (current_function_decl); |
| declare_vars (tmp, gimple_seq_first_stmt (body_seq), false); |
| } |
| } |
| |
| |
| |
| /* This page contains routines to unshare tree nodes, i.e. to duplicate tree |
| nodes that are referenced more than once in GENERIC functions. This is |
| necessary because gimplification (translation into GIMPLE) is performed |
| by modifying tree nodes in-place, so gimplication of a shared node in a |
| first context could generate an invalid GIMPLE form in a second context. |
| |
| This is achieved with a simple mark/copy/unmark algorithm that walks the |
| GENERIC representation top-down, marks nodes with TREE_VISITED the first |
| time it encounters them, duplicates them if they already have TREE_VISITED |
| set, and finally removes the TREE_VISITED marks it has set. |
| |
| The algorithm works only at the function level, i.e. it generates a GENERIC |
| representation of a function with no nodes shared within the function when |
| passed a GENERIC function (except for nodes that are allowed to be shared). |
| |
| At the global level, it is also necessary to unshare tree nodes that are |
| referenced in more than one function, for the same aforementioned reason. |
| This requires some cooperation from the front-end. There are 2 strategies: |
| |
| 1. Manual unsharing. The front-end needs to call unshare_expr on every |
| expression that might end up being shared across functions. |
| |
| 2. Deep unsharing. This is an extension of regular unsharing. Instead |
| of calling unshare_expr on expressions that might be shared across |
| functions, the front-end pre-marks them with TREE_VISITED. This will |
| ensure that they are unshared on the first reference within functions |
| when the regular unsharing algorithm runs. The counterpart is that |
| this algorithm must look deeper than for manual unsharing, which is |
| specified by LANG_HOOKS_DEEP_UNSHARING. |
| |
| If there are only few specific cases of node sharing across functions, it is |
| probably easier for a front-end to unshare the expressions manually. On the |
| contrary, if the expressions generated at the global level are as widespread |
| as expressions generated within functions, deep unsharing is very likely the |
| way to go. */ |
| |
| /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes. |
| These nodes model computations that must be done once. If we were to |
| unshare something like SAVE_EXPR(i++), the gimplification process would |
| create wrong code. However, if DATA is non-null, it must hold a pointer |
| set that is used to unshare the subtrees of these nodes. */ |
| |
| static tree |
| mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data) |
| { |
| tree t = *tp; |
| enum tree_code code = TREE_CODE (t); |
| |
| /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but |
| copy their subtrees if we can make sure to do it only once. */ |
| if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR) |
| { |
| if (data && !((hash_set<tree> *)data)->add (t)) |
| ; |
| else |
| *walk_subtrees = 0; |
| } |
| |
| /* Stop at types, decls, constants like copy_tree_r. */ |
| else if (TREE_CODE_CLASS (code) == tcc_type |
| || TREE_CODE_CLASS (code) == tcc_declaration |
| || TREE_CODE_CLASS (code) == tcc_constant) |
| *walk_subtrees = 0; |
| |
| /* Cope with the statement expression extension. */ |
| else if (code == STATEMENT_LIST) |
| ; |
| |
| /* Leave the bulk of the work to copy_tree_r itself. */ |
| else |
| copy_tree_r (tp, walk_subtrees, NULL); |
| |
| return NULL_TREE; |
| } |
| |
| /* Callback for walk_tree to unshare most of the shared trees rooted at *TP. |
| If *TP has been visited already, then *TP is deeply copied by calling |
| mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */ |
| |
| static tree |
| copy_if_shared_r (tree *tp, int *walk_subtrees, void *data) |
| { |
| tree t = *tp; |
| enum tree_code code = TREE_CODE (t); |
| |
| /* Skip types, decls, and constants. But we do want to look at their |
| types and the bounds of types. Mark them as visited so we properly |
| unmark their subtrees on the unmark pass. If we've already seen them, |
| don't look down further. */ |
| if (TREE_CODE_CLASS (code) == tcc_type |
| || TREE_CODE_CLASS (code) == tcc_declaration |
| || TREE_CODE_CLASS (code) == tcc_constant) |
| { |
| if (TREE_VISITED (t)) |
| *walk_subtrees = 0; |
| else |
| TREE_VISITED (t) = 1; |
| } |
| |
| /* If this node has been visited already, unshare it and don't look |
| any deeper. */ |
| else if (TREE_VISITED (t)) |
| { |
| walk_tree (tp, mostly_copy_tree_r, data, NULL); |
| *walk_subtrees = 0; |
| } |
| |
| /* Otherwise, mark the node as visited and keep looking. */ |
| else |
| TREE_VISITED (t) = 1; |
| |
| return NULL_TREE; |
| } |
| |
| /* Unshare most of the shared trees rooted at *TP. DATA is passed to the |
| copy_if_shared_r callback unmodified. */ |
| |
| void |
| copy_if_shared (tree *tp, void *data) |
| { |
| walk_tree (tp, copy_if_shared_r, data, NULL); |
| } |
| |
| /* Unshare all the trees in the body of FNDECL, as well as in the bodies of |
| any nested functions. */ |
| |
| static void |
| unshare_body (tree fndecl) |
| { |
| struct cgraph_node *cgn = cgraph_node::get (fndecl); |
| /* If the language requires deep unsharing, we need a pointer set to make |
| sure we don't repeatedly unshare subtrees of unshareable nodes. */ |
| hash_set<tree> *visited |
| = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL; |
| |
| copy_if_shared (&DECL_SAVED_TREE (fndecl), visited); |
| copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited); |
| copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited); |
| |
| delete visited; |
| |
| if (cgn) |
| for (cgn = first_nested_function (cgn); cgn; |
| cgn = next_nested_function (cgn)) |
| unshare_body (cgn->decl); |
| } |
| |
| /* Callback for walk_tree to unmark the visited trees rooted at *TP. |
| Subtrees are walked until the first unvisited node is encountered. */ |
| |
| static tree |
| unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) |
| { |
| tree t = *tp; |
| |
| /* If this node has been visited, unmark it and keep looking. */ |
| if (TREE_VISITED (t)) |
| TREE_VISITED (t) = 0; |
| |
| /* Otherwise, don't look any deeper. */ |
| else |
| *walk_subtrees = 0; |
| |
| return NULL_TREE; |
| } |
| |
| /* Unmark the visited trees rooted at *TP. */ |
| |
| static inline void |
| unmark_visited (tree *tp) |
| { |
| walk_tree (tp, unmark_visited_r, NULL, NULL); |
| } |
| |
| /* Likewise, but mark all trees as not visited. */ |
| |
| static void |
| unvisit_body (tree fndecl) |
| { |
| struct cgraph_node *cgn = cgraph_node::get (fndecl); |
| |
| unmark_visited (&DECL_SAVED_TREE (fndecl)); |
| unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl))); |
| unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl))); |
| |
| if (cgn) |
| for (cgn = first_nested_function (cgn); |
| cgn; cgn = next_nested_function (cgn)) |
| unvisit_body (cgn->decl); |
| } |
| |
| /* Unconditionally make an unshared copy of EXPR. This is used when using |
| stored expressions which span multiple functions, such as BINFO_VTABLE, |
| as the normal unsharing process can't tell that they're shared. */ |
| |
| tree |
| unshare_expr (tree expr) |
| { |
| walk_tree (&expr, mostly_copy_tree_r, NULL, NULL); |
| return expr; |
| } |
| |
| /* Worker for unshare_expr_without_location. */ |
| |
| static tree |
| prune_expr_location (tree *tp, int *walk_subtrees, void *) |
| { |
| if (EXPR_P (*tp)) |
| SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION); |
| else |
| *walk_subtrees = 0; |
| return NULL_TREE; |
| } |
| |
| /* Similar to unshare_expr but also prune all expression locations |
| from EXPR. */ |
| |
| tree |
| unshare_expr_without_location (tree expr) |
| { |
| walk_tree (&expr, mostly_copy_tree_r, NULL, NULL); |
| if (EXPR_P (expr)) |
| walk_tree (&expr, prune_expr_location, NULL, NULL); |
| return expr; |
| } |
| |
| /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has |
| one, OR_ELSE otherwise. The location of a STATEMENT_LISTs |
| comprising at least one DEBUG_BEGIN_STMT followed by exactly one |
| EXPR is the location of the EXPR. */ |
| |
| static location_t |
| rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION) |
| { |
| if (!expr) |
| return or_else; |
| |
| if (EXPR_HAS_LOCATION (expr)) |
| return EXPR_LOCATION (expr); |
| |
| if (TREE_CODE (expr) != STATEMENT_LIST) |
| return or_else; |
| |
| tree_stmt_iterator i = tsi_start (expr); |
| |
| bool found = false; |
| while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT) |
| { |
| found = true; |
| tsi_next (&i); |
| } |
| |
| if (!found || !tsi_one_before_end_p (i)) |
| return or_else; |
| |
| return rexpr_location (tsi_stmt (i), or_else); |
| } |
| |
| /* Return TRUE iff EXPR (maybe recursively) has a location; see |
| rexpr_location for the potential recursion. */ |
| |
| static inline bool |
| rexpr_has_location (tree expr) |
| { |
| return rexpr_location (expr) != UNKNOWN_LOCATION; |
| } |
| |
| |
| /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both |
| contain statements and have a value. Assign its value to a temporary |
| and give it void_type_node. Return the temporary, or NULL_TREE if |
| WRAPPER was already void. */ |
| |
| tree |
| voidify_wrapper_expr (tree wrapper, tree temp) |
| { |
| tree type = TREE_TYPE (wrapper); |
| if (type && !VOID_TYPE_P (type)) |
| { |
| tree *p; |
| |
| /* Set p to point to the body of the wrapper. Loop until we find |
| something that isn't a wrapper. */ |
| for (p = &wrapper; p && *p; ) |
| { |
| switch (TREE_CODE (*p)) |
| { |
| case BIND_EXPR: |
| TREE_SIDE_EFFECTS (*p) = 1; |
| TREE_TYPE (*p) = void_type_node; |
| /* For a BIND_EXPR, the body is operand 1. */ |
| p = &BIND_EXPR_BODY (*p); |
| break; |
| |
| case CLEANUP_POINT_EXPR: |
| case TRY_FINALLY_EXPR: |
| case TRY_CATCH_EXPR: |
| TREE_SIDE_EFFECTS (*p) = 1; |
| TREE_TYPE (*p) = void_type_node; |
| p = &TREE_OPERAND (*p, 0); |
| break; |
| |
| case STATEMENT_LIST: |
| { |
| tree_stmt_iterator i = tsi_last (*p); |
| TREE_SIDE_EFFECTS (*p) = 1; |
| TREE_TYPE (*p) = void_type_node; |
| p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i); |
| } |
| break; |
| |
| case COMPOUND_EXPR: |
| /* Advance to the last statement. Set all container types to |
| void. */ |
| for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1)) |
| { |
| TREE_SIDE_EFFECTS (*p) = 1; |
| TREE_TYPE (*p) = void_type_node; |
| } |
| break; |
| |
| case TRANSACTION_EXPR: |
| TREE_SIDE_EFFECTS (*p) = 1; |
| TREE_TYPE (*p) = void_type_node; |
| p = &TRANSACTION_EXPR_BODY (*p); |
| break; |
| |
| default: |
| /* Assume that any tree upon which voidify_wrapper_expr is |
| directly called is a wrapper, and that its body is op0. */ |
| if (p == &wrapper) |
| { |
| TREE_SIDE_EFFECTS (*p) = 1; |
| TREE_TYPE (*p) = void_type_node; |
| p = &TREE_OPERAND (*p, 0); |
| break; |
| } |
| goto out; |
| } |
| } |
| |
| out: |
| if (p == NULL || IS_EMPTY_STMT (*p)) |
| temp = NULL_TREE; |
| else if (temp) |
| { |
| /* The wrapper is on the RHS of an assignment that we're pushing |
| down. */ |
| gcc_assert (TREE_CODE (temp) == INIT_EXPR |
| || TREE_CODE (temp) == MODIFY_EXPR); |
| TREE_OPERAND (temp, 1) = *p; |
| *p = temp; |
| } |
| else |
| { |
| temp = create_tmp_var (type, "retval"); |
| *p = build2 (INIT_EXPR, type, temp, *p); |
| } |
| |
| return temp; |
| } |
| |
| return NULL_TREE; |
| } |
| |
| /* Prepare calls to builtins to SAVE and RESTORE the stack as well as |
| a temporary through which they communicate. */ |
| |
| static void |
| build_stack_save_restore (gcall **save, gcall **restore) |
| { |
| tree tmp_var; |
| |
| *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0); |
| tmp_var = create_tmp_var (ptr_type_node, "saved_stack"); |
| gimple_call_set_lhs (*save, tmp_var); |
| |
| *restore |
| = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE), |
| 1, tmp_var); |
| } |
| |
| /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */ |
| |
| static tree |
| build_asan_poison_call_expr (tree decl) |
| { |
| /* Do not poison variables that have size equal to zero. */ |
| tree unit_size = DECL_SIZE_UNIT (decl); |
| if (zerop (unit_size)) |
| return NULL_TREE; |
| |
| tree base = build_fold_addr_expr (decl); |
| |
| return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK, |
| void_type_node, 3, |
| build_int_cst (integer_type_node, |
| ASAN_MARK_POISON), |
| base, unit_size); |
| } |
| |
| /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending |
| on POISON flag, shadow memory of a DECL variable. The call will be |
| put on location identified by IT iterator, where BEFORE flag drives |
| position where the stmt will be put. */ |
| |
| static void |
| asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it, |
| bool before) |
| { |
| tree unit_size = DECL_SIZE_UNIT (decl); |
| tree base = build_fold_addr_expr (decl); |
| |
| /* Do not poison variables that have size equal to zero. */ |
| if (zerop (unit_size)) |
| return; |
| |
| /* It's necessary to have all stack variables aligned to ASAN granularity |
| bytes. */ |
| gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ()); |
| unsigned shadow_granularity |
| = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY; |
| if (DECL_ALIGN_UNIT (decl) <= shadow_granularity) |
| SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity); |
| |
| HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON; |
| |
| gimple *g |
| = gimple_build_call_internal (IFN_ASAN_MARK, 3, |
| build_int_cst (integer_type_node, flags), |
| base, unit_size); |
| |
| if (before) |
| gsi_insert_before (it, g, GSI_NEW_STMT); |
| else |
| gsi_insert_after (it, g, GSI_NEW_STMT); |
| } |
| |
| /* Generate IFN_ASAN_MARK internal call that depending on POISON flag |
| either poisons or unpoisons a DECL. Created statement is appended |
| to SEQ_P gimple sequence. */ |
| |
| static void |
| asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p) |
| { |
| gimple_stmt_iterator it = gsi_last (*seq_p); |
| bool before = false; |
| |
| if (gsi_end_p (it)) |
| before = true; |
| |
| asan_poison_variable (decl, poison, &it, before); |
| } |
| |
| /* Sort pair of VAR_DECLs A and B by DECL_UID. */ |
| |
| static int |
| sort_by_decl_uid (const void *a, const void *b) |
| { |
| const tree *t1 = (const tree *)a; |
| const tree *t2 = (const tree *)b; |
| |
| int uid1 = DECL_UID (*t1); |
| int uid2 = DECL_UID (*t2); |
| |
| if (uid1 < uid2) |
| return -1; |
| else if (uid1 > uid2) |
| return 1; |
| else |
| return 0; |
| } |
| |
| /* Generate IFN_ASAN_MARK internal call for all VARIABLES |
| depending on POISON flag. Created statement is appended |
| to SEQ_P gimple sequence. */ |
| |
| static void |
| asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p) |
| { |
| unsigned c = variables->elements (); |
| if (c == 0) |
| return; |
| |
| auto_vec<tree> sorted_variables (c); |
| |
| for (hash_set<tree>::iterator it = variables->begin (); |
| it != variables->end (); ++it) |
| sorted_variables.safe_push (*it); |
| |
| sorted_variables.qsort (sort_by_decl_uid); |
| |
| unsigned i; |
| tree var; |
| FOR_EACH_VEC_ELT (sorted_variables, i, var) |
| { |
| asan_poison_variable (var, poison, seq_p); |
| |
| /* Add use_after_scope_memory attribute for the variable in order |
| to prevent re-written into SSA. */ |
| if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE, |
| DECL_ATTRIBUTES (var))) |
| DECL_ATTRIBUTES (var) |
| = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE), |
| integer_one_node, |
| DECL_ATTRIBUTES (var)); |
| } |
| } |
| |
| /* Gimplify a BIND_EXPR. Just voidify and recurse. */ |
| |
| static enum gimplify_status |
| gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p) |
| { |
| tree bind_expr = *expr_p; |
| bool old_keep_stack = gimplify_ctxp->keep_stack; |
| bool old_save_stack = gimplify_ctxp->save_stack; |
| tree t; |
| gbind *bind_stmt; |
| gimple_seq body, cleanup; |
| gcall *stack_save; |
| location_t start_locus = 0, end_locus = 0; |
| tree ret_clauses = NULL; |
| |
| tree temp = voidify_wrapper_expr (bind_expr, NULL); |
| |
| /* Mark variables seen in this bind expr. */ |
| for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t)) |
| { |
| if (VAR_P (t)) |
| { |
| struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; |
| |
| /* Mark variable as local. */ |
| if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)) |
| { |
| if (! DECL_SEEN_IN_BIND_EXPR_P (t) |
| || splay_tree_lookup (ctx->variables, |
| (splay_tree_key) t) == NULL) |
| { |
| int flag = GOVD_LOCAL; |
| if (ctx->region_type == ORT_SIMD |
| && TREE_ADDRESSABLE (t) |
| && !TREE_STATIC (t)) |
| { |
| if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST) |
| ctx->add_safelen1 = true; |
| else |
| flag = GOVD_PRIVATE; |
| } |
| omp_add_variable (ctx, t, flag | GOVD_SEEN); |
| } |
| /* Static locals inside of target construct or offloaded |
| routines need to be "omp declare target". */ |
| if (TREE_STATIC (t)) |
| for (; ctx; ctx = ctx->outer_context) |
| if ((ctx->region_type & ORT_TARGET) != 0) |
| { |
| if (!lookup_attribute ("omp declare target", |
| DECL_ATTRIBUTES (t))) |
| { |
| tree id = get_identifier ("omp declare target"); |
| DECL_ATTRIBUTES (t) |
| = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t)); |
| varpool_node *node = varpool_node::get (t); |
| if (node) |
| { |
| node->offloadable = 1; |
| if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t)) |
| { |
| g->have_offload = true; |
| if (!in_lto_p) |
| vec_safe_push (offload_vars, t); |
| } |
| } |
| } |
| break; |
| } |
| } |
| |
| DECL_SEEN_IN_BIND_EXPR_P (t) = 1; |
| |
| if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun) |
| cfun->has_local_explicit_reg_vars = true; |
| } |
| } |
| |
| bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL, |
| BIND_EXPR_BLOCK (bind_expr)); |
| gimple_push_bind_expr (bind_stmt); |
| |
| gimplify_ctxp->keep_stack = false; |
| gimplify_ctxp->save_stack = false; |
| |
| /* Gimplify the body into the GIMPLE_BIND tuple's body. */ |
| body = NULL; |
| gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body); |
| gimple_bind_set_body (bind_stmt, body); |
| |
| /* Source location wise, the cleanup code (stack_restore and clobbers) |
| belongs to the end of the block, so propagate what we have. The |
| stack_save operation belongs to the beginning of block, which we can |
| infer from the bind_expr directly if the block has no explicit |
| assignment. */ |
| if (BIND_EXPR_BLOCK (bind_expr)) |
| { |
| end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr)); |
| start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr)); |
| } |
| if (start_locus == 0) |
| start_locus = EXPR_LOCATION (bind_expr); |
| |
| cleanup = NULL; |
| stack_save = NULL; |
| |
| /* If the code both contains VLAs and calls alloca, then we cannot reclaim |
| the stack space allocated to the VLAs. */ |
| if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack) |
| { |
| gcall *stack_restore; |
| |
| /* Save stack on entry and restore it on exit. Add a try_finally |
| block to achieve this. */ |
| build_stack_save_restore (&stack_save, &stack_restore); |
| |
| gimple_set_location (stack_save, start_locus); |
| gimple_set_location (stack_restore, end_locus); |
| |
| gimplify_seq_add_stmt (&cleanup, stack_restore); |
| } |
| |
| /* Add clobbers for all variables that go out of scope. */ |
| for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t)) |
| { |
| if (VAR_P (t) |
| && !is_global_var (t) |
| && DECL_CONTEXT (t) == current_function_decl) |
| { |
| if (!DECL_HARD_REGISTER (t) |
| && !TREE_THIS_VOLATILE (t) |
| && !DECL_HAS_VALUE_EXPR_P (t) |
| /* Only care for variables that have to be in memory. Others |
| will be rewritten into SSA names, hence moved to the |
| top-level. */ |
| && !is_gimple_reg (t) |
| && flag_stack_reuse != SR_NONE) |
| { |
| tree clobber = build_clobber (TREE_TYPE (t)); |
| gimple *clobber_stmt; |
| clobber_stmt = gimple_build_assign (t, clobber); |
| gimple_set_location (clobber_stmt, end_locus); |
| gimplify_seq_add_stmt (&cleanup, clobber_stmt); |
| } |
| |
| if (flag_openacc && oacc_declare_returns != NULL) |
| { |
| tree key = t; |
| if (DECL_HAS_VALUE_EXPR_P (key)) |
| { |
| key = DECL_VALUE_EXPR (key); |
| if (TREE_CODE (key) == INDIRECT_REF) |
| key = TREE_OPERAND (key, 0); |
| } |
| tree *c = oacc_declare_returns->get (key); |
| if (c != NULL) |
| { |
| if (ret_clauses) |
| OMP_CLAUSE_CHAIN (*c) = ret_clauses; |
| |
| ret_clauses = unshare_expr (*c); |
| |
| oacc_declare_returns->remove (key); |
| |
| if (oacc_declare_returns->is_empty ()) |
| { |
| delete oacc_declare_returns; |
| oacc_declare_returns = NULL; |
| } |
| } |
| } |
| } |
| |
| if (asan_poisoned_variables != NULL |
| && asan_poisoned_variables->contains (t)) |
| { |
| asan_poisoned_variables->remove (t); |
| asan_poison_variable (t, true, &cleanup); |
| } |
| |
| if (gimplify_ctxp->live_switch_vars != NULL |
| && gimplify_ctxp->live_switch_vars->contains (t)) |
| gimplify_ctxp->live_switch_vars->remove (t); |
| } |
| |
| if (ret_clauses) |
| { |
| gomp_target *stmt; |
| gimple_stmt_iterator si = gsi_start (cleanup); |
| |
| stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE, |
| ret_clauses); |
| gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT); |
| } |
| |
| if (cleanup) |
| { |
| gtry *gs; |
| gimple_seq new_body; |
| |
| new_body = NULL; |
| gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup, |
| GIMPLE_TRY_FINALLY); |
| |
| if (stack_save) |
| gimplify_seq_add_stmt (&new_body, stack_save); |
| gimplify_seq_add_stmt (&new_body, gs); |
| gimple_bind_set_body (bind_stmt, new_body); |
| } |
| |
| /* keep_stack propagates all the way up to the outermost BIND_EXPR. */ |
| if (!gimplify_ctxp->keep_stack) |
| gimplify_ctxp->keep_stack = old_keep_stack; |
| gimplify_ctxp->save_stack = old_save_stack; |
| |
| gimple_pop_bind_expr (); |
| |
| gimplify_seq_add_stmt (pre_p, bind_stmt); |
| |
| if (temp) |
| { |
| *expr_p = temp; |
| return GS_OK; |
| } |
| |
| *expr_p = NULL_TREE; |
| return GS_ALL_DONE; |
| } |
| |
| /* Maybe add early return predict statement to PRE_P sequence. */ |
| |
| static void |
| maybe_add_early_return_predict_stmt (gimple_seq *pre_p) |
| { |
| /* If we are not in a conditional context, add PREDICT statement. */ |
| if (gimple_conditional_context ()) |
| { |
| gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN, |
| NOT_TAKEN); |
| gimplify_seq_add_stmt (pre_p, predict); |
| } |
| } |
| |
| /* Gimplify a RETURN_EXPR. If the expression to be returned is not a |
| GIMPLE value, it is assigned to a new temporary and the statement is |
| re-written to return the temporary. |
| |
| PRE_P points to the sequence where side effects that must happen before |
| STMT should be stored. */ |
| |
| static enum gimplify_status |
| gimplify_return_expr (tree stmt, gimple_seq *pre_p) |
| { |
| greturn *ret; |
| tree ret_expr = TREE_OPERAND (stmt, 0); |
| tree result_decl, result; |
| |
| if (ret_expr == error_mark_node) |
| return GS_ERROR; |
| |
| if (!ret_expr |
| || TREE_CODE (ret_expr) == RESULT_DECL) |
| { |
| maybe_add_early_return_predict_stmt (pre_p); |
| greturn *ret = gimple_build_return (ret_expr); |
| gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); |
| gimplify_seq_add_stmt (pre_p, ret); |
| return GS_ALL_DONE; |
| } |
| |
| if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))) |
| result_decl = NULL_TREE; |
| else if (TREE_CODE (ret_expr) == COMPOUND_EXPR) |
| { |
| /* Used in C++ for handling EH cleanup of the return value if a local |
| cleanup throws. Assume the front-end knows what it's doing. */ |
| result_decl = DECL_RESULT (current_function_decl); |
| /* But crash if we end up trying to modify ret_expr below. */ |
| ret_expr = NULL_TREE; |
| } |
| else |
| { |
| result_decl = TREE_OPERAND (ret_expr, 0); |
| |
| /* See through a return by reference. */ |
| if (TREE_CODE (result_decl) == INDIRECT_REF) |
| result_decl = TREE_OPERAND (result_decl, 0); |
| |
| gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR |
| || TREE_CODE (ret_expr) == INIT_EXPR) |
| && TREE_CODE (result_decl) == RESULT_DECL); |
| } |
| |
| /* If aggregate_value_p is true, then we can return the bare RESULT_DECL. |
| Recall that aggregate_value_p is FALSE for any aggregate type that is |
| returned in registers. If we're returning values in registers, then |
| we don't want to extend the lifetime of the RESULT_DECL, particularly |
| across another call. In addition, for those aggregates for which |
| hard_function_value generates a PARALLEL, we'll die during normal |
| expansion of structure assignments; there's special code in expand_return |
| to handle this case that does not exist in expand_expr. */ |
| if (!result_decl) |
| result = NULL_TREE; |
| else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl))) |
| { |
| if (!poly_int_tree_p (DECL_SIZE (result_decl))) |
| { |
| if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl))) |
| gimplify_type_sizes (TREE_TYPE (result_decl), pre_p); |
| /* Note that we don't use gimplify_vla_decl because the RESULT_DECL |
| should be effectively allocated by the caller, i.e. all calls to |
| this function must be subject to the Return Slot Optimization. */ |
| gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p); |
| gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p); |
| } |
| result = result_decl; |
| } |
| else if (gimplify_ctxp->return_temp) |
| result = gimplify_ctxp->return_temp; |
| else |
| { |
| result = create_tmp_reg (TREE_TYPE (result_decl)); |
| |
| /* ??? With complex control flow (usually involving abnormal edges), |
| we can wind up warning about an uninitialized value for this. Due |
| to how this variable is constructed and initialized, this is never |
| true. Give up and never warn. */ |
| TREE_NO_WARNING (result) = 1; |
| |
| gimplify_ctxp->return_temp = result; |
| } |
| |
| /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use. |
| Then gimplify the whole thing. */ |
| if (result != result_decl) |
| TREE_OPERAND (ret_expr, 0) = result; |
| |
| gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p); |
| |
| maybe_add_early_return_predict_stmt (pre_p); |
| ret = gimple_build_return (result); |
| gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); |
| gimplify_seq_add_stmt (pre_p, ret); |
| |
| return GS_ALL_DONE; |
| } |
| |
| /* Gimplify a variable-length array DECL. */ |
| |
| static void |
| gimplify_vla_decl (tree decl, gimple_seq *seq_p) |
| { |
| /* This is a variable-sized decl. Simplify its size and mark it |
| for deferred expansion. */ |
| tree t, addr, ptr_type; |
| |
| gimplify_one_sizepos (&DECL_SIZE (decl), seq_p); |
| gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p); |
| |
| /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */ |
| if (DECL_HAS_VALUE_EXPR_P (decl)) |
| return; |
| |
| /* All occurrences of this decl in final gimplified code will be |
| replaced by indirection. Setting DECL_VALUE_EXPR does two |
| things: First, it lets the rest of the gimplifier know what |
| replacement to use. Second, it lets the debug info know |
| where to find the value. */ |
| ptr_type = build_pointer_type (TREE_TYPE (decl)); |
| addr = create_tmp_var (ptr_type, get_name (decl)); |
| DECL_IGNORED_P (addr) = 0; |
| t = build_fold_indirect_ref (addr); |
| TREE_THIS_NOTRAP (t) = 1; |
| SET_DECL_VALUE_EXPR (decl, t); |
| DECL_HAS_VALUE_EXPR_P (decl) = 1; |
| |
| t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl), |
| max_int_size_in_bytes (TREE_TYPE (decl))); |
| /* The call has been built for a variable-sized object. */ |
| CALL_ALLOCA_FOR_VAR_P (t) = 1; |
| t = fold_convert (ptr_type, t); |
| t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t); |
| |
| gimplify_and_add (t, seq_p); |
| |
| /* Record the dynamic allocation associated with DECL if requested. */ |
| if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC) |
| record_dynamic_alloc (decl); |
| } |
| |
| /* A helper function to be called via walk_tree. Mark all labels under *TP |
| as being forced. To be called for DECL_INITIAL of static variables. */ |
| |
| static tree |
| force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) |
| { |
| if (TYPE_P (*tp)) |
| *walk_subtrees = 0; |
| if (TREE_CODE (*tp) == LABEL_DECL) |
| { |
| FORCED_LABEL (*tp) = 1; |
| cfun->has_forced_label_in_static = 1; |
| } |
| |
| return NULL_TREE; |
| } |
| |
| /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation |
| and initialization explicit. */ |
| |
| static enum gimplify_status |
| gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p) |
| { |
| tree stmt = *stmt_p; |
| tree decl = DECL_EXPR_DECL (stmt); |
| |
| *stmt_p = NULL_TREE; |
| |
| if (TREE_TYPE (decl) == error_mark_node) |
| return GS_ERROR; |
| |
| if ((TREE_CODE (decl) == TYPE_DECL |
| || VAR_P (decl)) |
| && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl))) |
| { |
| gimplify_type_sizes (TREE_TYPE (decl), seq_p); |
| if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE) |
| gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p); |
| } |
| |
| /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified |
| in case its size expressions contain problematic nodes like CALL_EXPR. */ |
| if (TREE_CODE (decl) == TYPE_DECL |
| && DECL_ORIGINAL_TYPE (decl) |
| && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl))) |
| { |
| gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p); |
| if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE) |
| gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p); |
| } |
| |
| if (VAR_P (decl) && !DECL_EXTERNAL (decl)) |
| { |
| tree init = DECL_INITIAL (decl); |
| bool is_vla = false; |
| |
| poly_uint64 size; |
| if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size) |
| || (!TREE_STATIC (decl) |
| && flag_stack_check == GENERIC_STACK_CHECK |
| && maybe_gt (size, |
| (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE))) |
| { |
| gimplify_vla_decl (decl, seq_p); |
| is_vla = true; |
| } |
| |
| if (asan_poisoned_variables |
| && !is_vla |
| && TREE_ADDRESSABLE (decl) |
| && !TREE_STATIC (decl) |
| && !DECL_HAS_VALUE_EXPR_P (decl) |
| && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT |
| && dbg_cnt (asan_use_after_scope) |
| && !gimplify_omp_ctxp |
| /* GNAT introduces temporaries to hold return values of calls in |
| initializers of variables defined in other units, so the |
| declaration of the variable is discarded completely. We do not |
| want to issue poison calls for such dropped variables. */ |
| && (DECL_SEEN_IN_BIND_EXPR_P (decl) |
| || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE))) |
| { |
| asan_poisoned_variables->add (decl); |
| asan_poison_variable (decl, false, seq_p); |
| if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars) |
| gimplify_ctxp->live_switch_vars->add (decl); |
| } |
| |
| /* Some front ends do not explicitly declare all anonymous |
| artificial variables. We compensate here by declaring the |
| variables, though it would be better if the front ends would |
| explicitly declare them. */ |
| if (!DECL_SEEN_IN_BIND_EXPR_P (decl) |
| && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE) |
| gimple_add_tmp_var (decl); |
| |
| if (init && init != error_mark_node) |
| { |
| if (!TREE_STATIC (decl)) |
| { |
| DECL_INITIAL (decl) = NULL_TREE; |
| init = build2 (INIT_EXPR, void_type_node, decl, init); |
| gimplify_and_add (init, seq_p); |
| ggc_free (init); |
| } |
| else |
| /* We must still examine initializers for static variables |
| as they may contain a label address. */ |
| walk_tree (&init, force_labels_r, NULL, NULL); |
| } |
| } |
| |
| return GS_ALL_DONE; |
| } |
| |
| /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body |
| and replacing the LOOP_EXPR with goto, but if the loop contains an |
| EXIT_EXPR, we need to append a label for it to jump to. */ |
| |
| static enum gimplify_status |
| gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p) |
| { |
| tree saved_label = gimplify_ctxp->exit_label; |
| tree start_label = create_artificial_label (UNKNOWN_LOCATION); |
| |
| gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label)); |
| |
| gimplify_ctxp->exit_label = NULL_TREE; |
| |
| gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p); |
| |
| gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label)); |
| |
| if (gimplify_ctxp->exit_label) |
| gimplify_seq_add_stmt (pre_p, |
| gimple_build_label (gimplify_ctxp->exit_label)); |
| |
| gimplify_ctxp->exit_label = saved_label; |
| |
| *expr_p = NULL; |
| return GS_ALL_DONE; |
| } |
| |
| /* Gimplify a statement list onto a sequence. These may be created either |
| by an enlightened front-end, or by shortcut_cond_expr. */ |
| |
| static enum gimplify_status |
| gimplify_statement_list (tree *expr_p, gimple_seq *pre_p) |
| { |
| tree temp = voidify_wrapper_expr (*expr_p, NULL); |
| |
| tree_stmt_iterator i = tsi_start (*expr_p); |
| |
| while (!tsi_end_p (i)) |
| { |
| gimplify_stmt (tsi_stmt_ptr (i), pre_p); |
| tsi_delink (&i); |
| } |
| |
| if (temp) |
| { |
| *expr_p = temp; |
| return GS_OK; |
| } |
| |
| return GS_ALL_DONE; |
| } |
| |
| /* Callback for walk_gimple_seq. */ |
| |
| static tree |
| warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, |
| struct walk_stmt_info *wi) |
| { |
| gimple *stmt = gsi_stmt (*gsi_p); |
| |
| *handled_ops_p = true; |
| switch (gimple_code (stmt)) |
| { |
| case GIMPLE_TRY: |
| /* A compiler-generated cleanup or a user-written try block. |
| If it's empty, don't dive into it--that would result in |
| worse location info. */ |
| if (gimple_try_eval (stmt) == NULL) |
| { |
| wi->info = stmt; |
| return integer_zero_node; |
| } |
| /* Fall through. */ |
| case GIMPLE_BIND: |
| case GIMPLE_CATCH: |
| case GIMPLE_EH_FILTER: |
| case GIMPLE_TRANSACTION: |
| /* Walk the sub-statements. */ |
| *handled_ops_p = false; |
| break; |
| |
| case GIMPLE_DEBUG: |
| /* Ignore these. We may generate them before declarations that |
| are never executed. If there's something to warn about, |
| there will be non-debug stmts too, and we'll catch those. */ |
| break; |
| |
| case GIMPLE_CALL: |
| if (gimple_call_internal_p (stmt, IFN_ASAN_MARK)) |
| { |
| *handled_ops_p = false; |
| break; |
| } |
| /* Fall through. */ |
| default: |
| /* Save the first "real" statement (not a decl/lexical scope/...). */ |
| wi->info = stmt; |
| return integer_zero_node; |
| } |
| return NULL_TREE; |
| } |
| |
| /* Possibly warn about unreachable statements between switch's controlling |
| expression and the first case. SEQ is the body of a switch expression. */ |
| |
| static void |
| maybe_warn_switch_unreachable (gimple_seq seq) |
| { |
| if (!warn_switch_unreachable |
| /* This warning doesn't play well with Fortran when optimizations |
| are on. */ |
| || lang_GNU_Fortran () |
| || seq == NULL) |
| return; |
| |
| struct walk_stmt_info wi; |
| memset (&wi, 0, sizeof (wi)); |
| walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi); |
| gimple *stmt = (gimple *) wi.info; |
| |
| if (stmt && gimple_code (stmt) != GIMPLE_LABEL) |
| { |
| if (gimple_code (stmt) == GIMPLE_GOTO |
| && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL |
| && DECL_ARTIFICIAL (gimple_goto_dest (stmt))) |
| /* Don't warn for compiler-generated gotos. These occur |
| in Duff's devices, for example. */; |
| else |
| warning_at (gimple_location (stmt), OPT_Wswitch_unreachable, |
| "statement will never be executed"); |
| } |
| } |
| |
| |
| /* A label entry that pairs label and a location. */ |
| struct label_entry |
| { |
| tree label; |
| location_t loc; |
| }; |
| |
| /* Find LABEL in vector of label entries VEC. */ |
| |
| static struct label_entry * |
| find_label_entry (const auto_vec<struct label_entry> *vec, tree label) |
| { |
| unsigned int i; |
| struct label_entry *l; |
| |
| FOR_EACH_VEC_ELT (*vec, i, l) |
| if (l->label == label) |
| return l; |
| return NULL; |
| } |
| |
| /* Return true if LABEL, a LABEL_DECL, represents a case label |
| in a vector of labels CASES. */ |
| |
| static bool |
| case_label_p (const vec<tree> *cases, tree label) |
| { |
| unsigned int i; |
| tree l; |
| |
| FOR_EACH_VEC_ELT (*cases, i, l) |
| if (CASE_LABEL (l) == label) |
| return true; |
| return false; |
| } |
| |
| /* Find the last nondebug statement in a scope STMT. */ |
| |
| static gimple * |
| last_stmt_in_scope (gimple *stmt) |
| { |
| if (!stmt) |
| return NULL; |
| |
| switch (gimple_code (stmt)) |
| { |
| case GIMPLE_BIND: |
| { |
| gbind *bind = as_a <gbind *> (stmt); |
| stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind)); |
| return last_stmt_in_scope (stmt); |
| } |
| |
| case GIMPLE_TRY: |
| { |
| gtry *try_stmt = as_a <gtry *> (stmt); |
| stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt)); |
| gimple *last_eval = last_stmt_in_scope (stmt); |
| if (gimple_stmt_may_fallthru (last_eval) |
| && (last_eval == NULL |
| || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH)) |
| && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY) |
| { |
| stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt)); |
| return last_stmt_in_scope (stmt); |
| } |
| else |
| return last_eval; |
| } |
| |
| case GIMPLE_DEBUG: |
| gcc_unreachable (); |
| |
| default: |
| return stmt; |
| } |
| } |
| |
| /* Collect interesting labels in LABELS and return the statement preceding |
| another case label, or a user-defined label. Store a location useful |
| to give warnings at *PREVLOC (usually the location of the returned |
| statement or of its surrounding scope). */ |
| |
| static gimple * |
| collect_fallthrough_labels (gimple_stmt_iterator *gsi_p, |
| auto_vec <struct label_entry> *labels, |
| location_t *prevloc) |
| { |
| gimple *prev = NULL; |
| |
| *prevloc = UNKNOWN_LOCATION; |
| do |
| { |
| if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND) |
| { |
| /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr, |
| which starts on a GIMPLE_SWITCH and ends with a break label. |
| Handle that as a single statement that can fall through. */ |
| gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p)); |
| gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind)); |
| gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind)); |
| if (last |
| && gimple_code (first) == GIMPLE_SWITCH |
| && gimple_code (last) == GIMPLE_LABEL) |
| { |
| tree label = gimple_label_label (as_a <glabel *> (last)); |
| if (SWITCH_BREAK_LABEL_P (label)) |
| { |
| prev = bind; |
| gsi_next (gsi_p); |
| continue; |
| } |
| } |
| } |
| if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND |
| || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY) |
| { |
| /* Nested scope. Only look at the last statement of |
| the innermost scope. */ |
| location_t bind_loc = gimple_location (gsi_stmt (*gsi_p)); |
| gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p)); |
| if (last) |
| { |
| prev = last; |
| /* It might be a label without a location. Use the |
| location of the scope then. */ |
| if (!gimple_has_location (prev)) |
| *prevloc = bind_loc; |
| } |
| gsi_next (gsi_p); |
| continue; |
| } |
| |
| /* Ifs are tricky. */ |
| if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND) |
| { |
| gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p)); |
| tree false_lab = gimple_cond_false_label (cond_stmt); |
| location_t if_loc = gimple_location (cond_stmt); |
| |
| /* If we have e.g. |
| if (i > 1) goto <D.2259>; else goto D; |
| we can't do much with the else-branch. */ |
| if (!DECL_ARTIFICIAL (false_lab)) |
| break; |
| |
| /* Go on until the false label, then one step back. */ |
| for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p)) |
| { |
| gimple *stmt = gsi_stmt (*gsi_p); |
| if (gimple_code (stmt) == GIMPLE_LABEL |
| && gimple_label_label (as_a <glabel *> (stmt)) == false_lab) |
| break; |
| } |
| |
| /* Not found? Oops. */ |
| if (gsi_end_p (*gsi_p)) |
| break; |
| |
| struct label_entry l = { false_lab, if_loc }; |
| labels->safe_push (l); |
| |
| /* Go to the last statement of the then branch. */ |
| gsi_prev (gsi_p); |
| |
| /* if (i != 0) goto <D.1759>; else goto <D.1760>; |
| <D.1759>: |
| <stmt>; |
| goto <D.1761>; |
| <D.1760>: |
| */ |
| if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO |
| && !gimple_has_location (gsi_stmt (*gsi_p))) |
| { |
| /* Look at the statement before, it might be |
| attribute fallthrough, in which case don't warn. */ |
| gsi_prev (gsi_p); |
| bool fallthru_before_dest |
| = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH); |
| gsi_next (gsi_p); |
| tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p)); |
| if (!fallthru_before_dest) |
| { |
| struct label_entry l = { goto_dest, if_loc }; |
| labels->safe_push (l); |
| } |
| } |
| /* And move back. */ |
| gsi_next (gsi_p); |
| } |
| |
| /* Remember the last statement. Skip labels that are of no interest |
| to us. */ |
| if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL) |
| { |
| tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p))); |
| if (find_label_entry (labels, label)) |
| prev = gsi_stmt (*gsi_p); |
| } |
| else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK)) |
| ; |
| else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT) |
| ; |
| else if (!is_gimple_debug (gsi_stmt (*gsi_p))) |
| prev = gsi_stmt (*gsi_p); |
| gsi_next (gsi_p); |
| } |
| while (!gsi_end_p (*gsi_p) |
| /* Stop if we find a case or a user-defined label. */ |
| && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL |
| || !gimple_has_location (gsi_stmt (*gsi_p)))); |
| |
| if (prev && gimple_has_location (prev)) |
| *prevloc = gimple_location (prev); |
| return prev; |
| } |
| |
| /* Return true if the switch fallthough warning should occur. LABEL is |
| the label statement that we're falling through to. */ |
| |
| static bool |
| should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label) |
| { |
| gimple_stmt_iterator gsi = *gsi_p; |
| |
| /* Don't warn if the label is marked with a "falls through" comment. */ |
| if (FALLTHROUGH_LABEL_P (label)) |
| return false; |
| |
| /* Don't warn for non-case labels followed by a statement: |
| case 0: |
| foo (); |
| label: |
| bar (); |
| as these are likely intentional. */ |
| if (!case_label_p (&gimplify_ctxp->case_labels, label)) |
| { |
| tree l; |
| while (!gsi_end_p (gsi) |
| && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL |
| && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi)))) |
| && !case_label_p (&gimplify_ctxp->case_labels, l)) |
| gsi_next_nondebug (&gsi); |
| if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL) |
| return false; |
| } |
| |
| /* Don't warn for terminated branches, i.e. when the subsequent case labels |
| immediately breaks. */ |
| gsi = *gsi_p; |
| |
| /* Skip all immediately following labels. */ |
| while (!gsi_end_p (gsi) |
| && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL |
| || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT)) |
| gsi_next_nondebug (&gsi); |
| |
| /* { ... something; default:; } */ |
| if (gsi_end_p (gsi) |
| /* { ... something; default: break; } or |
| { ... something; default: goto L; } */ |
| || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO |
| /* { ... something; default: return; } */ |
| || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN) |
| return false; |
| |
| return true; |
| } |
| |
| /* Callback for walk_gimple_seq. */ |
| |
| static tree |
| warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, |
| struct walk_stmt_info *) |
| { |
| gimple *stmt = gsi_stmt (*gsi_p); |
| |
| *handled_ops_p = true; |
| switch (gimple_code (stmt)) |
| { |
| case GIMPLE_TRY: |
| case GIMPLE_BIND: |
| case GIMPLE_CATCH: |
| case GIMPLE_EH_FILTER: |
| case GIMPLE_TRANSACTION: |
| /* Walk the sub-statements. */ |
| *handled_ops_p = false; |
| break; |
| |
| /* Find a sequence of form: |
| |
| GIMPLE_LABEL |
| [...] |
| <may fallthru stmt> |
| GIMPLE_LABEL |
| |
| and possibly warn. */ |
| case GIMPLE_LABEL: |
| { |
| /* Found a label. Skip all immediately following labels. */ |
| while (!gsi_end_p (*gsi_p) |
| && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL) |
| gsi_next_nondebug (gsi_p); |
| |
| /* There might be no more statements. */ |
| if (gsi_end_p (*gsi_p)) |
| return integer_zero_node; |
| |
| /* Vector of labels that fall through. */ |
| auto_vec <struct label_entry> labels; |
| location_t prevloc; |
| gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc); |
| |
| /* There might be no more statements. */ |
| if (gsi_end_p (*gsi_p)) |
| return integer_zero_node; |
| |
| gimple *next = gsi_stmt (*gsi_p); |
| tree label; |
| /* If what follows is a label, then we may have a fallthrough. */ |
| if (gimple_code (next) == GIMPLE_LABEL |
| && gimple_has_location (next) |
| && (label = gimple_label_label (as_a <glabel *> (next))) |
| && prev != NULL) |
| { |
| struct label_entry *l; |
| bool warned_p = false; |
| auto_diagnostic_group d; |
| if (!should_warn_for_implicit_fallthrough (gsi_p, label)) |
| /* Quiet. */; |
| else if (gimple_code (prev) == GIMPLE_LABEL |
| && (label = gimple_label_label (as_a <glabel *> (prev))) |
| && (l = find_label_entry (&labels, label))) |
| warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_, |
| "this statement may fall through"); |
| else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH) |
| /* Try to be clever and don't warn when the statement |
| can't actually fall through. */ |
| && gimple_stmt_may_fallthru (prev) |
| && prevloc != UNKNOWN_LOCATION) |
| warned_p = warning_at (prevloc, |
| OPT_Wimplicit_fallthrough_, |
| "this statement may fall through"); |
| if (warned_p) |
| inform (gimple_location (next), "here"); |
| |
| /* Mark this label as processed so as to prevent multiple |
| warnings in nested switches. */ |
| FALLTHROUGH_LABEL_P (label) = true; |
| |
| /* So that next warn_implicit_fallthrough_r will start looking for |
| a new sequence starting with this label. */ |
| gsi_prev (gsi_p); |
| } |
| } |
| break; |
| default: |
| break; |
| } |
| return NULL_TREE; |
| } |
| |
| /* Warn when a switch case falls through. */ |
| |
| static void |
| maybe_warn_implicit_fallthrough (gimple_seq seq) |
| { |
| if (!warn_implicit_fallthrough) |
| return; |
| |
| /* This warning is meant for C/C++/ObjC/ObjC++ only. */ |
| if (!(lang_GNU_C () |
| || lang_GNU_CXX () |
| || lang_GNU_OBJC ())) |
| return; |
| |
| struct walk_stmt_info wi; |
| memset (&wi, 0, sizeof (wi)); |
| walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi); |
| } |
| |
| /* Callback for walk_gimple_seq. */ |
| |
| static tree |
| expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, |
| struct walk_stmt_info *wi) |
| { |
| gimple *stmt = gsi_stmt (*gsi_p); |
| |
| *handled_ops_p = true; |
| switch (gimple_code (stmt)) |
| { |
| case GIMPLE_TRY: |
| case GIMPLE_BIND: |
| case GIMPLE_CATCH: |
| case GIMPLE_EH_FILTER: |
| case GIMPLE_TRANSACTION: |
| /* Walk the sub-statements. */ |
| *handled_ops_p = false; |
| break; |
| case GIMPLE_CALL: |
| if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH)) |
| { |
| gsi_remove (gsi_p, true); |
| if (gsi_end_p (*gsi_p)) |
| { |
| *static_cast<location_t *>(wi->info) = gimple_location (stmt); |
| return integer_zero_node; |
| } |
| |
| bool found = false; |
| location_t loc = gimple_location (stmt); |
| |
| gimple_stmt_iterator gsi2 = *gsi_p; |
| stmt = gsi_stmt (gsi2); |
| if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt)) |
| { |
| /* Go on until the artificial label. */ |
| tree goto_dest = gimple_goto_dest (stmt); |
| for (; !gsi_end_p (gsi2); gsi_next (&gsi2)) |
| { |
| if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL |
| && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2))) |
| == goto_dest) |
| break; |
| } |
| |
| /* Not found? Stop. */ |
| if (gsi_end_p (gsi2)) |
| break; |
| |
| /* Look one past it. */ |
| gsi_next (&gsi2); |
| } |
| |
| /* We're looking for a case label or default label here. */ |
| while (!gsi_end_p (gsi2)) |
| { |
| stmt = gsi_stmt (gsi2); |
| if (gimple_code (stmt) == GIMPLE_LABEL) |
| { |
| tree label = gimple_label_label (as_a <glabel *> (stmt)); |
| if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label)) |
| { |
| found = true; |
| break; |
| } |
| } |
| else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK)) |
| ; |
| else if (!is_gimple_debug (stmt)) |
| /* Anything else is not expected. */ |
| break; |
| gsi_next (&gsi2); |
| } |
| if (!found) |
| pedwarn (loc, 0, "attribute %<fallthrough%> not preceding " |
| "a case label or default label"); |
| } |
| break; |
| default: |
| break; |
| } |
| return NULL_TREE; |
| } |
| |
| /* Expand all FALLTHROUGH () calls in SEQ. */ |
| |
| static void |
| expand_FALLTHROUGH (gimple_seq *seq_p) |
| { |
| struct walk_stmt_info wi; |
| location_t loc; |
| memset (&wi, 0, sizeof (wi)); |
| wi.info = (void *) &loc; |
| walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi); |
| if (wi.callback_result == integer_zero_node) |
| /* We've found [[fallthrough]]; at the end of a switch, which the C++ |
| standard says is ill-formed; see [dcl.attr.fallthrough]. */ |
| pedwarn (loc, 0, "attribute %<fallthrough%> not preceding " |
| "a case label or default label"); |
| } |
| |
| |
| /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can |
| branch to. */ |
| |
| static enum gimplify_status |
| gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p) |
| { |
| tree switch_expr = *expr_p; |
| gimple_seq switch_body_seq = NULL; |
| enum gimplify_status ret; |
| tree index_type = TREE_TYPE (switch_expr); |
| if (index_type == NULL_TREE) |
| index_type = TREE_TYPE (SWITCH_COND (switch_expr)); |
| |
| ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val, |
| fb_rvalue); |
| if (ret == GS_ERROR || ret == GS_UNHANDLED) |
| return ret; |
| |
| if (SWITCH_BODY (switch_expr)) |
| { |
| vec<tree> labels; |
| vec<tree> saved_labels; |
| hash_set<tree> *saved_live_switch_vars = NULL; |
| tree default_case = NULL_TREE; |
| gswitch *switch_stmt; |
| |
| /* Save old labels, get new ones from body, then restore the old |
| labels. Save all the things from the switch body to append after. */ |
| saved_labels = gimplify_ctxp->case_labels; |
| gimplify_ctxp->case_labels.create (8); |
| |
| /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */ |
| saved_live_switch_vars = gimplify_ctxp->live_switch_vars; |
| tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr)); |
| if (body_type == BIND_EXPR || body_type == STATEMENT_LIST) |
| gimplify_ctxp->live_switch_vars = new hash_set<tree> (4); |
| else |
| gimplify_ctxp->live_switch_vars = NULL; |
| |
| bool old_in_switch_expr = gimplify_ctxp->in_switch_expr; |
| gimplify_ctxp->in_switch_expr = true; |
| |
| gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq); |
| |
| gimplify_ctxp->in_switch_expr = old_in_switch_expr; |
| maybe_warn_switch_unreachable (switch_body_seq); |
| maybe_warn_implicit_fallthrough (switch_body_seq); |
| /* Only do this for the outermost GIMPLE_SWITCH. */ |
| if (!gimplify_ctxp->in_switch_expr) |
| expand_FALLTHROUGH (&switch_body_seq); |
| |
| labels = gimplify_ctxp->case_labels; |
| gimplify_ctxp->case_labels = saved_labels; |
| |
| if (gimplify_ctxp->live_switch_vars) |
| { |
| gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ()); |
| delete gimplify_ctxp->live_switch_vars; |
| } |
| gimplify_ctxp->live_switch_vars = saved_live_switch_vars; |
| |
| preprocess_case_label_vec_for_gimple (labels, index_type, |
| &default_case); |
| |
| bool add_bind = false; |
| if (!default_case) |
| { |
| glabel *new_default; |
| |
| default_case |
| = build_case_label (NULL_TREE, NULL_TREE, |
| create_artificial_label (UNKNOWN_LOCATION)); |
| if (old_in_switch_expr) |
| { |
| SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1; |
| add_bind = true; |
| } |
| new_default = gimple_build_label (CASE_LABEL (default_case)); |
| gimplify_seq_add_stmt (&switch_body_seq, new_default); |
| } |
| else if (old_in_switch_expr) |
| { |
| gimple *last = gimple_seq_last_stmt (switch_body_seq); |
| if (last && gimple_code (last) == GIMPLE_LABEL) |
| { |
| tree label = gimple_label_label (as_a <glabel *> (last)); |
| if (SWITCH_BREAK_LABEL_P (label)) |
| add_bind = true; |
| } |
| } |
| |
| switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr), |
| default_case, labels); |
| /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq |
| ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL, |
| wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND, |
| so that we can easily find the start and end of the switch |
| statement. */ |
| if (add_bind) |
| { |
| gimple_seq bind_body = NULL; |
| gimplify_seq_add_stmt (&bind_body, switch_stmt); |
| gimple_seq_add_seq (&bind_body, switch_body_seq); |
| gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE); |
| gimple_set_location (bind, EXPR_LOCATION (switch_expr)); |
| gimplify_seq_add_stmt (pre_p, bind); |
| } |
| else |
| { |
| gimplify_seq_add_stmt (pre_p, switch_stmt); |
| gimplify_seq_add_seq (pre_p, switch_body_seq); |
| } |
| labels.release (); |
| } |
| else |
| gcc_unreachable (); |
| |
| return GS_ALL_DONE; |
| } |
| |
| /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */ |
| |
| static enum gimplify_status |
| gimplify_label_expr (tree *expr_p, gimple_seq *pre_p) |
| { |
| gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p)) |
| == current_function_decl); |
| |
| tree label = LABEL_EXPR_LABEL (*expr_p); |
| glabel *label_stmt = gimple_build_label (label); |
| gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p)); |
| gimplify_seq_add_stmt (pre_p, label_stmt); |
| |
| if (lookup_attribute ("cold", DECL_ATTRIBUTES (label))) |
| gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL, |
| NOT_TAKEN)); |
| else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label))) |
| gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL, |
| TAKEN)); |
| |
| return GS_ALL_DONE; |
| } |
| |
| /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */ |
| |
| static enum gimplify_status |
| gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p) |
| { |
| struct gimplify_ctx *ctxp; |
| glabel *label_stmt; |
| |
| /* Invalid programs can play Duff's Device type games with, for example, |
| #pragma omp parallel. At least in the C front end, we don't |
| detect such invalid branches until after gimplification, in the |
| diagnose_omp_blocks pass. */ |
| for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context) |
| if (ctxp->case_labels.exists ()) |
| break; |
| |
| tree label = CASE_LABEL (*expr_p); |
| label_stmt = gimple_build_label (label); |
| gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p)); |
| ctxp->case_labels.safe_push (*expr_p); |
| gimplify_seq_add_stmt (pre_p, label_stmt); |
| |
| if (lookup_attribute ("cold", DECL_ATTRIBUTES (label))) |
| gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL, |
| NOT_TAKEN)); |
| else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label))) |
| gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL, |
| TAKEN)); |
| |
| return GS_ALL_DONE; |
| } |
| |
| /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first |
| if necessary. */ |
| |
| tree |
| build_and_jump (tree *label_p) |
| { |
| if (label_p == NULL) |
| /* If there's nowhere to jump, just fall through. */ |
| return NULL_TREE; |
| |
| if (*label_p == NULL_TREE) |
| { |
| tree label = create_artificial_label (UNKNOWN_LOCATION); |
| *label_p = label; |
| } |
| |
| return build1 (GOTO_EXPR, void_type_node, *label_p); |
| } |
| |
| /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR. |
| This also involves building a label to jump to and communicating it to |
| gimplify_loop_expr through gimplify_ctxp->exit_label. */ |
| |
| static enum gimplify_status |
| gimplify_exit_expr (tree *expr_p) |
| { |
| tree cond = TREE_OPERAND (*expr_p, 0); |
| tree expr; |
| |
| expr = build_and_jump (&gimplify_ctxp->exit_label); |
| expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE); |
| *expr_p = expr; |
| |
| return GS_OK; |
| } |
| |
| /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is |
| different from its canonical type, wrap the whole thing inside a |
| NOP_EXPR and force the type of the COMPONENT_REF to be the canonical |
| type. |
| |
| The canonical type of a COMPONENT_REF is the type of the field being |
| referenced--unless the field is a bit-field which can be read directly |
| in a smaller mode, in which case the canonical type is the |
| sign-appropriate type corresponding to that mode. */ |
| |
| static void |
| canonicalize_component_ref (tree *expr_p) |
| { |
| tree expr = *expr_p; |
| tree type; |
| |
| gcc_assert (TREE_CODE (expr) == COMPONENT_REF); |
| |
| if (INTEGRAL_TYPE_P (TREE_TYPE (expr))) |
| type = TREE_TYPE (get_unwidened (expr, NULL_TREE)); |
| else |
| type = TREE_TYPE (TREE_OPERAND (expr, 1)); |
| |
| /* One could argue that all the stuff below is not necessary for |
| the non-bitfield case and declare it a FE error if type |
| adjustment would be needed. */ |
| if (TREE_TYPE (expr) != type) |
| { |
| #ifdef ENABLE_TYPES_CHECKING |
| tree old_type = TREE_TYPE (expr); |
| #endif |
| int type_quals; |
| |
| /* We need to preserve qualifiers and propagate them from |
| operand 0. */ |
| type_quals = TYPE_QUALS (type) |
| | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0))); |
| if (TYPE_QUALS (type) != type_quals) |
| type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals); |
| |
| /* Set the type of the COMPONENT_REF to the underlying type. */ |
| TREE_TYPE (expr) = type; |
| |
| #ifdef ENABLE_TYPES_CHECKING |
| /* It is now a FE error, if the conversion from the canonical |
| type to the original expression type is not useless. */ |
| gcc_assert (useless_type_conversion_p (old_type, type)); |
| #endif |
| } |
| } |
| |
| /* If a NOP conversion is changing a pointer to array of foo to a pointer |
| to foo, embed that change in the ADDR_EXPR by converting |
| T array[U]; |
| (T *)&array |
| ==> |
| &array[L] |
| where L is the lower bound. For simplicity, only do this for constant |
| lower bound. |
| The constraint is that the type of &array[L] is trivially convertible |
| to T *. */ |
| |
| static void |
| canonicalize_addr_expr (tree *expr_p) |
| { |
| tree expr = *expr_p; |
| tree addr_expr = TREE_OPERAND (expr, 0); |
| tree datype, ddatype, pddatype; |
| |
| /* We simplify only conversions from an ADDR_EXPR to a pointer type. */ |
| if (!POINTER_TYPE_P (TREE_TYPE (expr)) |
| || TREE_CODE (addr_expr) != ADDR_EXPR) |
| return; |
| |
| /* The addr_expr type should be a pointer to an array. */ |
| datype = TREE_TYPE (TREE_TYPE (addr_expr)); |
| if (TREE_CODE (datype) != ARRAY_TYPE) |
| return; |
| |
| /* The pointer to element type shall be trivially convertible to |
| the expression pointer type. */ |
| ddatype = TREE_TYPE (datype); |
| pddatype = build_pointer_type (ddatype); |
| if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)), |
| pddatype)) |
| return; |
| |
| /* The lower bound and element sizes must be constant. */ |
| if (!TYPE_SIZE_UNIT (ddatype) |
| || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST |
| || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype)) |
| || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST) |
| return; |
| |
| /* All checks succeeded. Build a new node to merge the cast. */ |
| *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0), |
| TYPE_MIN_VALUE (TYPE_DOMAIN (datype)), |
| NULL_TREE, NULL_TREE); |
| *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p); |
| |
| /* We can have stripped a required restrict qualifier above. */ |
| if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p))) |
| *expr_p = fold_convert (TREE_TYPE (expr), *expr_p); |
| } |
| |
| /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions |
| underneath as appropriate. */ |
| |
| static enum gimplify_status |
| gimplify_conversion (tree *expr_p) |
| { |
| location_t loc = EXPR_LOCATION (*expr_p); |
| gcc_assert (CONVERT_EXPR_P (*expr_p)); |
| |
| /* Then strip away all but the outermost conversion. */ |
| STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0)); |
| |
| /* And remove the outermost conversion if it's useless. */ |
| if (tree_ssa_useless_type_conversion (*expr_p)) |
| *expr_p = TREE_OPERAND (*expr_p, 0); |
| |
| /* If we still have a conversion at the toplevel, |
| then canonicalize some constructs. */ |
| if (CONVERT_EXPR_P (*expr_p)) |
| { |
| tree sub = TREE_OPERAND (*expr_p, 0); |
| |
| /* If a NOP conversion is changing the type of a COMPONENT_REF |
| expression, then canonicalize its type now in order to expose more |
| redundant conversions. */ |
| if (TREE_CODE (sub) == COMPONENT_REF) |
| canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0)); |
| |
| /* If a NOP conversion is changing a pointer to array of foo |
| to a pointer to foo, embed that change in the ADDR_EXPR. */ |
| else if (TREE_CODE (sub) == ADDR_EXPR) |
| canonicalize_addr_expr (expr_p); |
| } |
| |
| /* If we have a conversion to a non-register type force the |
| use of a VIEW_CONVERT_EXPR instead. */ |
| if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p))) |
| *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p), |
| TREE_OPERAND (*expr_p, 0)); |
| |
| /* Canonicalize CONVERT_EXPR to NOP_EXPR. */ |
| if (TREE_CODE (*expr_p) == CONVERT_EXPR) |
| TREE_SET_CODE (*expr_p, NOP_EXPR); |
| |
| return GS_OK; |
| } |
| |
| /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a |
| DECL_VALUE_EXPR, and it's worth re-examining things. */ |
| |
| static enum gimplify_status |
| gimplify_var_or_parm_decl (tree *expr_p) |
| { |
| tree decl = *expr_p; |
| |
| /* ??? If this is a local variable, and it has not been seen in any |
| outer BIND_EXPR, then it's probably the result of a duplicate |
| declaration, for which we've already issued an error. It would |
| be really nice if the front end wouldn't leak these at all. |
| Currently the only known culprit is C++ destructors, as seen |
| in g++.old-deja/g++.jason/binding.C. */ |
| if (VAR_P (decl) |
| && !DECL_SEEN_IN_BIND_EXPR_P (decl) |
| && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl) |
| && decl_function_context (decl) == current_function_decl) |
| { |
| gcc_assert (seen_error ()); |
| return GS_ERROR; |
| } |
| |
| /* When within an OMP context, notice uses of variables. */ |
| if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true)) |
| return GS_ALL_DONE; |
| |
| /* If the decl is an alias for another expression, substitute it now. */ |
| if (DECL_HAS_VALUE_EXPR_P (decl)) |
| { |
| *expr_p = unshare_expr (DECL_VALUE_EXPR (decl)); |
| return GS_OK; |
| } |
| |
| return GS_ALL_DONE; |
| } |
| |
| /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */ |
| |
| static void |
| recalculate_side_effects (tree t) |
| { |
| enum tree_code code = TREE_CODE (t); |
| int len = TREE_OPERAND_LENGTH (t); |
| int i; |
| |
| switch (TREE_CODE_CLASS (code)) |
| { |
| case tcc_expression: |
| switch (code) |
| { |
| case INIT_EXPR: |
| case MODIFY_EXPR: |
| case VA_ARG_EXPR: |
| case PREDECREMENT_EXPR: |
| case PREINCREMENT_EXPR: |
| case POSTDECREMENT_EXPR: |
| case POSTINCREMENT_EXPR: |
| /* All of these have side-effects, no matter what their |
| operands are. */ |
| return; |
| |
| default: |
| break; |
| } |
| /* Fall through. */ |
| |
| case tcc_comparison: /* a comparison expression */ |
| case tcc_unary: /* a unary arithmetic expression */ |
| case tcc_binary: /* a binary arithmetic expression */ |
| case tcc_reference: /* a reference */ |
| case tcc_vl_exp: /* a function call */ |
| TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t); |
| for (i = 0; i < len; ++i) |
| { |
| tree op = TREE_OPERAND (t, i); |
| if (op && TREE_SIDE_EFFECTS (op)) |
| TREE_SIDE_EFFECTS (t) = 1; |
| } |
| break; |
| |
| case tcc_constant: |
| /* No side-effects. */ |
| return; |
| |
| default: |
| gcc_unreachable (); |
| } |
| } |
| |
| /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR |
| node *EXPR_P. |
| |
| compound_lval |
| : min_lval '[' val ']' |
| | min_lval '.' ID |
| | compound_lval '[' val ']' |
| | compound_lval '.' ID |
| |
| This is not part of the original SIMPLE definition, which separates |
| array and member references, but it seems reasonable to handle them |
| together. Also, this way we don't run into problems with union |
| aliasing; gcc requires that for accesses through a union to alias, the |
| union reference must be explicit, which was not always the case when we |
| were splitting up array and member refs. |
| |
| PRE_P points to the sequence where side effects that must happen before |
| *EXPR_P should be stored. |
| |
| POST_P points to the sequence where side effects that must happen after |
| *EXPR_P should be stored. */ |
| |
| static enum gimplify_status |
| gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, |
| fallback_t fallback) |
| { |
| tree *p; |
| enum gimplify_status ret = GS_ALL_DONE, tret; |
| int i; |
| location_t loc = EXPR_LOCATION (*expr_p); |
| tree expr = *expr_p; |
| |
| /* Create a stack of the subexpressions so later we can walk them in |
| order from inner to outer. */ |
| auto_vec<tree, 10> expr_stack; |
| |
| /* We can handle anything that get_inner_reference can deal with. */ |
| for (p = expr_p; ; p = &TREE_OPERAND (*p, 0)) |
| { |
| restart: |
| /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */ |
| if (TREE_CODE (*p) == INDIRECT_REF) |
| *p = fold_indirect_ref_loc (loc, *p); |
| |
| if (handled_component_p (*p)) |
| ; |
| /* Expand DECL_VALUE_EXPR now. In some cases that may expose |
| additional COMPONENT_REFs. */ |
| else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL) |
| && gimplify_var_or_parm_decl (p) == GS_OK) |
| goto restart; |
| else |
| break; |
| |
| expr_stack.safe_push (*p); |
| } |
| |
| gcc_assert (expr_stack.length ()); |
| |
| /* Now EXPR_STACK is a stack of pointers to all the refs we've |
| walked through and P points to the innermost expression. |
| |
| Java requires that we elaborated nodes in source order. That |
| means we must gimplify the inner expression followed by each of |
| the indices, in order. But we can't gimplify the inner |
| expression until we deal with any variable bounds, sizes, or |
| positions in order to deal with PLACEHOLDER_EXPRs. |
| |
| So we do this in three steps. First we deal with the annotations |
| for any variables in the components, then we gimplify the base, |
| then we gimplify any indices, from left to right. */ |
| for (i = expr_stack.length () - 1; i >= 0; i--) |
| { |
| tree t = expr_stack[i]; |
| |
| if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) |
| { |
| /* Gimplify the low bound and element type size and put them into |
| the ARRAY_REF. If these values are set, they have already been |
| gimplified. */ |
| if (TREE_OPERAND (t, 2) == NULL_TREE) |
| { |
| tree low = unshare_expr (array_ref_low_bound (t)); |
| if (!is_gimple_min_invariant (low)) |
| { |
| TREE_OPERAND (t, 2) = low; |
| tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, |
| post_p, is_gimple_reg, |
| fb_rvalue); |
| ret = MIN (ret, tret); |
| } |
| } |
| else |
| { |
| tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, |
| is_gimple_reg, fb_rvalue); |
| ret = MIN (ret, tret); |
| } |
| |
| if (TREE_OPERAND (t, 3) == NULL_TREE) |
| { |
| tree elmt_size = array_ref_element_size (t); |
| if (!is_gimple_min_invariant (elmt_size)) |
| { |
| elmt_size = unshare_expr (elmt_size); |
| tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0))); |
| tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type)); |
| |
| /* Divide the element size by the alignment of the element |
| type (above). */ |
| elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR, |
| elmt_size, factor); |
| |
| TREE_OPERAND (t, 3) = elmt_size; |
| tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, |
| post_p, is_gimple_reg, |
| fb_rvalue); |
| ret = MIN (ret, tret); |
| } |
| } |
| else |
| { |
| tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p, |
| is_gimple_reg, fb_rvalue); |
| ret = MIN (ret, tret); |
| } |
| } |
| else if (TREE_CODE (t) == COMPONENT_REF) |
| { |
| /* Set the field offset into T and gimplify it. */ |
| if (TREE_OPERAND (t, 2) == NULL_TREE) |
| { |
| tree offset = component_ref_field_offset (t); |
| if (!is_gimple_min_invariant (offset)) |
| { |
| offset = unshare_expr (offset); |
| tree field = TREE_OPERAND (t, 1); |
| tree factor |
| = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT); |
| |
| /* Divide the offset by its alignment. */ |
| offset = size_binop_loc (loc, EXACT_DIV_EXPR, |
| offset, factor); |
| |
| TREE_OPERAND (t, 2) = offset; |
| tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, |
| post_p, is_gimple_reg, |
| fb_rvalue); |
| ret = MIN (ret, tret); |
| } |
| } |
| else |
| { |
| tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, |
| is_gimple_reg, fb_rvalue); |
| ret = MIN (ret, tret); |
| } |
| } |
| } |
| |
| /* Step 2 is to gimplify the base expression. Make sure lvalue is set |
| so as to match the min_lval predicate. Failure to do so may result |
| in the creation of large aggregate temporaries. */ |
| tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, |
| fallback | fb_lvalue); |
| ret = MIN (ret, tret); |
| |
| /* And finally, the indices and operands of ARRAY_REF. During this |
| loop we also remove any useless conversions. */ |
| for (; expr_stack.length () > 0; ) |
| { |
| tree t = expr_stack.pop (); |
| |
| if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) |
| { |
| /* Gimplify the dimension. */ |
| if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))) |
| { |
| tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p, |
| is_gimple_val, fb_rvalue); |
| ret = MIN (ret, tret); |
| } |
| } |
| |
| STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0)); |
| |
| /* The innermost expression P may have originally had |
| TREE_SIDE_EFFECTS set which would have caused all the outer |
| expressions in *EXPR_P leading to P to also have had |
| TREE_SIDE_EFFECTS set. */ |
| recalculate_side_effects (t); |
| } |
| |
| /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */ |
| if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF) |
| { |
| canonicalize_component_ref (expr_p); |
| } |
| |
| expr_stack.release (); |
| |
| gcc_assert (*expr_p == expr || ret != GS_ALL_DONE); |
| |
| return ret; |
| } |
| |
| /* Gimplify the self modifying expression pointed to by EXPR_P |
| (++, --, +=, -=). |
| |
| PRE_P points to the list where side effects that must happen before |
| *EXPR_P should be stored. |
| |
| POST_P points to the list where side effects that must happen after |
| *EXPR_P should be stored. |
| |
| WANT_VALUE is nonzero iff we want to use the value of this expression |
| in another expression. |
| |
| ARITH_TYPE is the type the computation should be performed in. */ |
| |
| enum gimplify_status |
| gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, |
| bool want_value, tree arith_type) |
| { |
| enum tree_code code; |
| tree lhs, lvalue, rhs, t1; |
| gimple_seq post = NULL, *orig_post_p = post_p; |
| bool postfix; |
| enum tree_code arith_code; |
| enum gimplify_status ret; |
| location_t loc = EXPR_LOCATION (*expr_p); |
| |
| code = TREE_CODE (*expr_p); |
| |
| gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR |
| || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR); |
| |
| /* Prefix or postfix? */ |
| if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR) |
| /* Faster to treat as prefix if result is not used. */ |
| postfix = want_value; |
| else |
| postfix = false; |
| |
| /* For postfix, make sure the inner expression's post side effects |
| are executed after side effects from this expression. */ |
| if (postfix) |
| post_p = &post; |
| |
| /* Add or subtract? */ |
| if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) |
| arith_code = PLUS_EXPR; |
| else |
| arith_code = MINUS_EXPR; |
| |
| /* Gimplify the LHS into a GIMPLE lvalue. */ |
| lvalue = TREE_OPERAND (*expr_p, 0); |
| ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue); |
| if (ret == GS_ERROR) |
| return ret; |
| |
| /* Extract the operands to the arithmetic operation. */ |
| lhs = lvalue; |
| rhs = TREE_OPERAND (*expr_p, 1); |
| |
| /* For postfix operator, we evaluate the LHS to an rvalue and then use |
| that as the result value and in the postqueue operation. */ |
| if (postfix) |
| { |
| ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue); |
| if (ret == GS_ERROR) |
| return ret; |
| |
| lhs = get_initialized_tmp_var (lhs, pre_p); |
| } |
| |
| /* For POINTERs increment, use POINTER_PLUS_EXPR. */ |
| if (POINTER_TYPE_P (TREE_TYPE (lhs))) |
| { |
| rhs = convert_to_ptrofftype_loc (loc, rhs); |
| if (arith_code == MINUS_EXPR) |
| rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs); |
| t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs); |
| } |
| else |
| t1 = fold_convert (TREE_TYPE (*expr_p), |
| fold_build2 (arith_code, arith_type, |
| fold_convert (arith_type, lhs), |
| fold_convert (arith_type, rhs))); |
| |
| if (postfix) |
| { |
| gimplify_assign (lvalue, t1, pre_p); |
| gimplify_seq_add_seq (orig_post_p, post); |
| *expr_p = lhs; |
| return GS_ALL_DONE; |
| } |
| else |
| { |
| *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1); |
| return GS_OK; |
| } |
| } |
| |
| /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */ |
| |
| static void |
| maybe_with_size_expr (tree *expr_p) |
| { |
| tree expr = *expr_p; |
| tree type = TREE_TYPE (expr); |
| tree size; |
| |
| /* If we've already wrapped this or the type is error_mark_node, we can't do |
| anything. */ |
| if (TREE_CODE (expr) == WITH_SIZE_EXPR |
| || type == error_mark_node) |
| return; |
| |
| /* If the size isn't known or is a constant, we have nothing to do. */ |
| size = TYPE_SIZE_UNIT (type); |
| if (!size || poly_int_tree_p (size)) |
| return; |
| |
| /* Otherwise, make a WITH_SIZE_EXPR. */ |
| size = unshare_expr (size); |
| size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr); |
| *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size); |
| } |
| |
| /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P |
| Store any side-effects in PRE_P. CALL_LOCATION is the location of |
| the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be |
| gimplified to an SSA name. */ |
| |
| enum gimplify_status |
| gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location, |
| bool allow_ssa) |
| { |
| bool (*test) (tree); |
| fallback_t fb; |
| |
| /* In general, we allow lvalues for function arguments to avoid |
| extra overhead of copying large aggregates out of even larger |
| aggregates into temporaries only to copy the temporaries to |
| the argument list. Make optimizers happy by pulling out to |
| temporaries those types that fit in registers. */ |
| if (is_gimple_reg_type (TREE_TYPE (*arg_p))) |
| test = is_gimple_val, fb = fb_rvalue; |
| else |
| { |
| test = is_gimple_lvalue, fb = fb_either; |
| /* Also strip a TARGET_EXPR that would force an extra copy. */ |
| if (TREE_CODE (*arg_p) == TARGET_EXPR) |
| { |
| tree init = TARGET_EXPR_INITIAL (*arg_p); |
| if (init |
| && !VOID_TYPE_P (TREE_TYPE (init))) |
| *arg_p = init; |
| } |
| } |
| |
| /* If this is a variable sized type, we must remember the size. */ |
| maybe_with_size_expr (arg_p); |
| |
| /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */ |
| /* Make sure arguments have the same location as the function call |
| itself. */ |
| protected_set_expr_location (*arg_p, call_location); |
| |
| /* There is a sequence point before a function call. Side effects in |
| the argument list must occur before the actual call. So, when |
| gimplifying arguments, force gimplify_expr to use an internal |
| post queue which is then appended to the end of PRE_P. */ |
| return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa); |
| } |
| |
| /* Don't fold inside offloading or taskreg regions: it can break code by |
| adding decl references that weren't in the source. We'll do it during |
| omplower pass instead. */ |
| |
| static bool |
| maybe_fold_stmt (gimple_stmt_iterator *gsi) |
| { |
| struct gimplify_omp_ctx *ctx; |
| for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context) |
| if ((ctx |