| /* Tree lowering pass. This pass converts the GENERIC functions-as-trees |
| tree representation into the GIMPLE form. |
| Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 |
| Free Software Foundation, Inc. |
| Major work done by Sebastian Pop <s.pop@laposte.net>, |
| Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>. |
| |
| This file is part of GCC. |
| |
| GCC is free software; you can redistribute it and/or modify it under |
| the terms of the GNU General Public License as published by the Free |
| Software Foundation; either version 3, or (at your option) any later |
| version. |
| |
| GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
| WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| for more details. |
| |
| You should have received a copy of the GNU General Public License |
| along with GCC; see the file COPYING3. If not see |
| <http://www.gnu.org/licenses/>. */ |
| |
| #include "config.h" |
| #include "system.h" |
| #include "coretypes.h" |
| #include "tm.h" |
| #include "tree.h" |
| #include "rtl.h" |
| #include "varray.h" |
| #include "gimple.h" |
| #include "tree-iterator.h" |
| #include "tree-inline.h" |
| #include "diagnostic.h" |
| #include "langhooks.h" |
| #include "langhooks-def.h" |
| #include "tree-flow.h" |
| #include "cgraph.h" |
| #include "timevar.h" |
| #include "except.h" |
| #include "hashtab.h" |
| #include "flags.h" |
| #include "real.h" |
| #include "function.h" |
| #include "output.h" |
| #include "expr.h" |
| #include "ggc.h" |
| #include "toplev.h" |
| #include "target.h" |
| #include "optabs.h" |
| #include "pointer-set.h" |
| #include "splay-tree.h" |
| #include "vec.h" |
| #include "gimple.h" |
| |
| |
| enum gimplify_omp_var_data |
| { |
| GOVD_SEEN = 1, |
| GOVD_EXPLICIT = 2, |
| GOVD_SHARED = 4, |
| GOVD_PRIVATE = 8, |
| GOVD_FIRSTPRIVATE = 16, |
| GOVD_LASTPRIVATE = 32, |
| GOVD_REDUCTION = 64, |
| GOVD_LOCAL = 128, |
| GOVD_DEBUG_PRIVATE = 256, |
| GOVD_PRIVATE_OUTER_REF = 512, |
| GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE |
| | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL) |
| }; |
| |
| |
| enum omp_region_type |
| { |
| ORT_WORKSHARE = 0, |
| ORT_TASK = 1, |
| ORT_PARALLEL = 2, |
| ORT_COMBINED_PARALLEL = 3 |
| }; |
| |
| struct gimplify_omp_ctx |
| { |
| struct gimplify_omp_ctx *outer_context; |
| splay_tree variables; |
| struct pointer_set_t *privatized_types; |
| location_t location; |
| enum omp_clause_default_kind default_kind; |
| enum omp_region_type region_type; |
| }; |
| |
| static struct gimplify_ctx *gimplify_ctxp; |
| static struct gimplify_omp_ctx *gimplify_omp_ctxp; |
| |
| |
| /* Formal (expression) temporary table handling: Multiple occurrences of |
| the same scalar expression are evaluated into the same temporary. */ |
| |
| typedef struct gimple_temp_hash_elt |
| { |
| tree val; /* Key */ |
| tree temp; /* Value */ |
| } elt_t; |
| |
| /* Forward declarations. */ |
| static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool); |
| |
| /* Mark X addressable. Unlike the langhook we expect X to be in gimple |
| form and we don't do any syntax checking. */ |
| static void |
| mark_addressable (tree x) |
| { |
| while (handled_component_p (x)) |
| x = TREE_OPERAND (x, 0); |
| if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL) |
| return ; |
| TREE_ADDRESSABLE (x) = 1; |
| } |
| |
| /* Return a hash value for a formal temporary table entry. */ |
| |
| static hashval_t |
| gimple_tree_hash (const void *p) |
| { |
| tree t = ((const elt_t *) p)->val; |
| return iterative_hash_expr (t, 0); |
| } |
| |
| /* Compare two formal temporary table entries. */ |
| |
| static int |
| gimple_tree_eq (const void *p1, const void *p2) |
| { |
| tree t1 = ((const elt_t *) p1)->val; |
| tree t2 = ((const elt_t *) p2)->val; |
| enum tree_code code = TREE_CODE (t1); |
| |
| if (TREE_CODE (t2) != code |
| || TREE_TYPE (t1) != TREE_TYPE (t2)) |
| return 0; |
| |
| if (!operand_equal_p (t1, t2, 0)) |
| return 0; |
| |
| /* Only allow them to compare equal if they also hash equal; otherwise |
| results are nondeterminate, and we fail bootstrap comparison. */ |
| gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2)); |
| |
| return 1; |
| } |
| |
| /* Link gimple statement GS to the end of the sequence *SEQ_P. If |
| *SEQ_P is NULL, a new sequence is allocated. This function is |
| similar to gimple_seq_add_stmt, but does not scan the operands. |
| During gimplification, we need to manipulate statement sequences |
| before the def/use vectors have been constructed. */ |
| |
| static void |
| gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs) |
| { |
| gimple_stmt_iterator si; |
| |
| if (gs == NULL) |
| return; |
| |
| if (*seq_p == NULL) |
| *seq_p = gimple_seq_alloc (); |
| |
| si = gsi_last (*seq_p); |
| |
| gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT); |
| } |
| |
| /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is |
| NULL, a new sequence is allocated. This function is |
| similar to gimple_seq_add_seq, but does not scan the operands. |
| During gimplification, we need to manipulate statement sequences |
| before the def/use vectors have been constructed. */ |
| |
| static void |
| gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src) |
| { |
| gimple_stmt_iterator si; |
| |
| if (src == NULL) |
| return; |
| |
| if (*dst_p == NULL) |
| *dst_p = gimple_seq_alloc (); |
| |
| si = gsi_last (*dst_p); |
| gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT); |
| } |
| |
| /* Set up a context for the gimplifier. */ |
| |
| void |
| push_gimplify_context (struct gimplify_ctx *c) |
| { |
| memset (c, '\0', sizeof (*c)); |
| c->prev_context = gimplify_ctxp; |
| gimplify_ctxp = c; |
| } |
| |
| /* Tear down a context for the gimplifier. If BODY is non-null, then |
| put the temporaries into the outer BIND_EXPR. Otherwise, put them |
| in the local_decls. |
| |
| BODY is not a sequence, but the first tuple in a sequence. */ |
| |
| void |
| pop_gimplify_context (gimple body) |
| { |
| struct gimplify_ctx *c = gimplify_ctxp; |
| tree t; |
| |
| gcc_assert (c && (c->bind_expr_stack == NULL |
| || VEC_empty (gimple, c->bind_expr_stack))); |
| VEC_free (gimple, heap, c->bind_expr_stack); |
| gimplify_ctxp = c->prev_context; |
| |
| for (t = c->temps; t ; t = TREE_CHAIN (t)) |
| DECL_GIMPLE_FORMAL_TEMP_P (t) = 0; |
| |
| if (body) |
| declare_vars (c->temps, body, false); |
| else |
| record_vars (c->temps); |
| |
| if (c->temp_htab) |
| htab_delete (c->temp_htab); |
| } |
| |
| static void |
| gimple_push_bind_expr (gimple gimple_bind) |
| { |
| if (gimplify_ctxp->bind_expr_stack == NULL) |
| gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8); |
| VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind); |
| } |
| |
| static void |
| gimple_pop_bind_expr (void) |
| { |
| VEC_pop (gimple, gimplify_ctxp->bind_expr_stack); |
| } |
| |
| gimple |
| gimple_current_bind_expr (void) |
| { |
| return VEC_last (gimple, gimplify_ctxp->bind_expr_stack); |
| } |
| |
| /* Return the stack GIMPLE_BINDs created during gimplification. */ |
| |
| VEC(gimple, heap) * |
| gimple_bind_expr_stack (void) |
| { |
| return gimplify_ctxp->bind_expr_stack; |
| } |
| |
| /* Returns true iff there is a COND_EXPR between us and the innermost |
| CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */ |
| |
| static bool |
| gimple_conditional_context (void) |
| { |
| return gimplify_ctxp->conditions > 0; |
| } |
| |
| /* Note that we've entered a COND_EXPR. */ |
| |
| static void |
| gimple_push_condition (void) |
| { |
| #ifdef ENABLE_GIMPLE_CHECKING |
| if (gimplify_ctxp->conditions == 0) |
| gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups)); |
| #endif |
| ++(gimplify_ctxp->conditions); |
| } |
| |
| /* Note that we've left a COND_EXPR. If we're back at unconditional scope |
| now, add any conditional cleanups we've seen to the prequeue. */ |
| |
| static void |
| gimple_pop_condition (gimple_seq *pre_p) |
| { |
| int conds = --(gimplify_ctxp->conditions); |
| |
| gcc_assert (conds >= 0); |
| if (conds == 0) |
| { |
| gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups); |
| gimplify_ctxp->conditional_cleanups = NULL; |
| } |
| } |
| |
| /* A stable comparison routine for use with splay trees and DECLs. */ |
| |
| static int |
| splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb) |
| { |
| tree a = (tree) xa; |
| tree b = (tree) xb; |
| |
| return DECL_UID (a) - DECL_UID (b); |
| } |
| |
| /* Create a new omp construct that deals with variable remapping. */ |
| |
| static struct gimplify_omp_ctx * |
| new_omp_context (enum omp_region_type region_type) |
| { |
| struct gimplify_omp_ctx *c; |
| |
| c = XCNEW (struct gimplify_omp_ctx); |
| c->outer_context = gimplify_omp_ctxp; |
| c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); |
| c->privatized_types = pointer_set_create (); |
| c->location = input_location; |
| c->region_type = region_type; |
| if (region_type != ORT_TASK) |
| c->default_kind = OMP_CLAUSE_DEFAULT_SHARED; |
| else |
| c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED; |
| |
| return c; |
| } |
| |
| /* Destroy an omp construct that deals with variable remapping. */ |
| |
| static void |
| delete_omp_context (struct gimplify_omp_ctx *c) |
| { |
| splay_tree_delete (c->variables); |
| pointer_set_destroy (c->privatized_types); |
| XDELETE (c); |
| } |
| |
| static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int); |
| static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool); |
| |
| /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */ |
| |
| static void |
| append_to_statement_list_1 (tree t, tree *list_p) |
| { |
| tree list = *list_p; |
| tree_stmt_iterator i; |
| |
| if (!list) |
| { |
| if (t && TREE_CODE (t) == STATEMENT_LIST) |
| { |
| *list_p = t; |
| return; |
| } |
| *list_p = list = alloc_stmt_list (); |
| } |
| |
| i = tsi_last (list); |
| tsi_link_after (&i, t, TSI_CONTINUE_LINKING); |
| } |
| |
| /* Add T to the end of the list container pointed to by LIST_P. |
| If T is an expression with no effects, it is ignored. */ |
| |
| void |
| append_to_statement_list (tree t, tree *list_p) |
| { |
| if (t && TREE_SIDE_EFFECTS (t)) |
| append_to_statement_list_1 (t, list_p); |
| } |
| |
| /* Similar, but the statement is always added, regardless of side effects. */ |
| |
| void |
| append_to_statement_list_force (tree t, tree *list_p) |
| { |
| if (t != NULL_TREE) |
| append_to_statement_list_1 (t, list_p); |
| } |
| |
| /* Both gimplify the statement T and append it to *SEQ_P. This function |
| behaves exactly as gimplify_stmt, but you don't have to pass T as a |
| reference. */ |
| |
| void |
| gimplify_and_add (tree t, gimple_seq *seq_p) |
| { |
| gimplify_stmt (&t, seq_p); |
| } |
| |
| /* Gimplify statement T into sequence *SEQ_P, and return the first |
| tuple in the sequence of generated tuples for this statement. |
| Return NULL if gimplifying T produced no tuples. */ |
| |
| static gimple |
| gimplify_and_return_first (tree t, gimple_seq *seq_p) |
| { |
| gimple_stmt_iterator last = gsi_last (*seq_p); |
| |
| gimplify_and_add (t, seq_p); |
| |
| if (!gsi_end_p (last)) |
| { |
| gsi_next (&last); |
| return gsi_stmt (last); |
| } |
| else |
| return gimple_seq_first_stmt (*seq_p); |
| } |
| |
| /* Strip off a legitimate source ending from the input string NAME of |
| length LEN. Rather than having to know the names used by all of |
| our front ends, we strip off an ending of a period followed by |
| up to five characters. (Java uses ".class".) */ |
| |
| static inline void |
| remove_suffix (char *name, int len) |
| { |
| int i; |
| |
| for (i = 2; i < 8 && len > i; i++) |
| { |
| if (name[len - i] == '.') |
| { |
| name[len - i] = '\0'; |
| break; |
| } |
| } |
| } |
| |
| /* Subroutine for find_single_pointer_decl. */ |
| |
| static tree |
| find_single_pointer_decl_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, |
| void *data) |
| { |
| tree *pdecl = (tree *) data; |
| |
| /* We are only looking for pointers at the same level as the |
| original tree; we must not look through any indirections. |
| Returning anything other than NULL_TREE will cause the caller to |
| not find a base. */ |
| if (REFERENCE_CLASS_P (*tp)) |
| return *tp; |
| |
| if (DECL_P (*tp) && POINTER_TYPE_P (TREE_TYPE (*tp))) |
| { |
| if (*pdecl) |
| { |
| /* We already found a pointer decl; return anything other |
| than NULL_TREE to unwind from walk_tree signalling that |
| we have a duplicate. */ |
| return *tp; |
| } |
| *pdecl = *tp; |
| } |
| |
| return NULL_TREE; |
| } |
| |
| /* Find the single DECL of pointer type in the tree T, used directly |
| rather than via an indirection, and return it. If there are zero |
| or more than one such DECLs, return NULL. */ |
| |
| static tree |
| find_single_pointer_decl (tree t) |
| { |
| tree decl = NULL_TREE; |
| |
| if (walk_tree (&t, find_single_pointer_decl_1, &decl, NULL)) |
| { |
| /* find_single_pointer_decl_1 returns a nonzero value, causing |
| walk_tree to return a nonzero value, to indicate that it |
| found more than one pointer DECL or that it found an |
| indirection. */ |
| return NULL_TREE; |
| } |
| |
| return decl; |
| } |
| |
| /* Create a new temporary name with PREFIX. Returns an identifier. */ |
| |
| static GTY(()) unsigned int tmp_var_id_num; |
| |
| tree |
| create_tmp_var_name (const char *prefix) |
| { |
| char *tmp_name; |
| |
| if (prefix) |
| { |
| char *preftmp = ASTRDUP (prefix); |
| |
| remove_suffix (preftmp, strlen (preftmp)); |
| prefix = preftmp; |
| } |
| |
| ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++); |
| return get_identifier (tmp_name); |
| } |
| |
| |
| /* Create a new temporary variable declaration of type TYPE. |
| Does NOT push it into the current binding. */ |
| |
| tree |
| create_tmp_var_raw (tree type, const char *prefix) |
| { |
| tree tmp_var; |
| tree new_type; |
| |
| /* Make the type of the variable writable. */ |
| new_type = build_type_variant (type, 0, 0); |
| TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type); |
| |
| tmp_var = build_decl (VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL, |
| type); |
| |
| /* The variable was declared by the compiler. */ |
| DECL_ARTIFICIAL (tmp_var) = 1; |
| /* And we don't want debug info for it. */ |
| DECL_IGNORED_P (tmp_var) = 1; |
| |
| /* Make the variable writable. */ |
| TREE_READONLY (tmp_var) = 0; |
| |
| DECL_EXTERNAL (tmp_var) = 0; |
| TREE_STATIC (tmp_var) = 0; |
| TREE_USED (tmp_var) = 1; |
| |
| return tmp_var; |
| } |
| |
| /* Create a new temporary variable declaration of type TYPE. DOES push the |
| variable into the current binding. Further, assume that this is called |
| only from gimplification or optimization, at which point the creation of |
| certain types are bugs. */ |
| |
| tree |
| create_tmp_var (tree type, const char *prefix) |
| { |
| tree tmp_var; |
| |
| /* We don't allow types that are addressable (meaning we can't make copies), |
| or incomplete. We also used to reject every variable size objects here, |
| but now support those for which a constant upper bound can be obtained. |
| The processing for variable sizes is performed in gimple_add_tmp_var, |
| point at which it really matters and possibly reached via paths not going |
| through this function, e.g. after direct calls to create_tmp_var_raw. */ |
| gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type)); |
| |
| tmp_var = create_tmp_var_raw (type, prefix); |
| gimple_add_tmp_var (tmp_var); |
| return tmp_var; |
| } |
| |
| /* Create a temporary with a name derived from VAL. Subroutine of |
| lookup_tmp_var; nobody else should call this function. */ |
| |
| static inline tree |
| create_tmp_from_val (tree val) |
| { |
| return create_tmp_var (TREE_TYPE (val), get_name (val)); |
| } |
| |
| /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse |
| an existing expression temporary. */ |
| |
| static tree |
| lookup_tmp_var (tree val, bool is_formal) |
| { |
| tree ret; |
| |
| /* If not optimizing, never really reuse a temporary. local-alloc |
| won't allocate any variable that is used in more than one basic |
| block, which means it will go into memory, causing much extra |
| work in reload and final and poorer code generation, outweighing |
| the extra memory allocation here. */ |
| if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val)) |
| ret = create_tmp_from_val (val); |
| else |
| { |
| elt_t elt, *elt_p; |
| void **slot; |
| |
| elt.val = val; |
| if (gimplify_ctxp->temp_htab == NULL) |
| gimplify_ctxp->temp_htab |
| = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free); |
| slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT); |
| if (*slot == NULL) |
| { |
| elt_p = XNEW (elt_t); |
| elt_p->val = val; |
| elt_p->temp = ret = create_tmp_from_val (val); |
| *slot = (void *) elt_p; |
| } |
| else |
| { |
| elt_p = (elt_t *) *slot; |
| ret = elt_p->temp; |
| } |
| } |
| |
| if (is_formal) |
| DECL_GIMPLE_FORMAL_TEMP_P (ret) = 1; |
| |
| return ret; |
| } |
| |
| |
| /* Return true if T is a CALL_EXPR or an expression that can be |
| assignmed to a temporary. Note that this predicate should only be |
| used during gimplification. See the rationale for this in |
| gimplify_modify_expr. */ |
| |
| static bool |
| is_gimple_formal_tmp_or_call_rhs (tree t) |
| { |
| return TREE_CODE (t) == CALL_EXPR || is_gimple_formal_tmp_rhs (t); |
| } |
| |
| /* Returns true iff T is a valid RHS for an assignment to a renamed |
| user -- or front-end generated artificial -- variable. */ |
| |
| static bool |
| is_gimple_reg_or_call_rhs (tree t) |
| { |
| /* If the RHS of the MODIFY_EXPR may throw or make a nonlocal goto |
| and the LHS is a user variable, then we need to introduce a formal |
| temporary. This way the optimizers can determine that the user |
| variable is only modified if evaluation of the RHS does not throw. |
| |
| Don't force a temp of a non-renamable type; the copy could be |
| arbitrarily expensive. Instead we will generate a VDEF for |
| the assignment. */ |
| |
| if (is_gimple_reg_type (TREE_TYPE (t)) |
| && ((TREE_CODE (t) == CALL_EXPR && TREE_SIDE_EFFECTS (t)) |
| || tree_could_throw_p (t))) |
| return false; |
| |
| return is_gimple_formal_tmp_or_call_rhs (t); |
| } |
| |
| /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that |
| this predicate should only be used during gimplification. See the |
| rationale for this in gimplify_modify_expr. */ |
| |
| static bool |
| is_gimple_mem_or_call_rhs (tree t) |
| { |
| /* If we're dealing with a renamable type, either source or dest must be |
| a renamed variable. */ |
| if (is_gimple_reg_type (TREE_TYPE (t))) |
| return is_gimple_val (t); |
| else |
| return is_gimple_formal_tmp_or_call_rhs (t); |
| } |
| |
| |
| /* Returns a formal temporary variable initialized with VAL. PRE_P is as |
| in gimplify_expr. Only use this function if: |
| |
| 1) The value of the unfactored expression represented by VAL will not |
| change between the initialization and use of the temporary, and |
| 2) The temporary will not be otherwise modified. |
| |
| For instance, #1 means that this is inappropriate for SAVE_EXPR temps, |
| and #2 means it is inappropriate for && temps. |
| |
| For other cases, use get_initialized_tmp_var instead. */ |
| |
| static tree |
| internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p, |
| bool is_formal) |
| { |
| tree t, mod; |
| |
| /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we |
| can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */ |
| gimplify_expr (&val, pre_p, post_p, is_gimple_formal_tmp_or_call_rhs, |
| fb_rvalue); |
| |
| t = lookup_tmp_var (val, is_formal); |
| |
| if (is_formal) |
| { |
| tree u = find_single_pointer_decl (val); |
| |
| if (u && TREE_CODE (u) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (u)) |
| u = DECL_GET_RESTRICT_BASE (u); |
| if (u && TYPE_RESTRICT (TREE_TYPE (u))) |
| { |
| if (DECL_BASED_ON_RESTRICT_P (t)) |
| gcc_assert (u == DECL_GET_RESTRICT_BASE (t)); |
| else |
| { |
| DECL_BASED_ON_RESTRICT_P (t) = 1; |
| SET_DECL_RESTRICT_BASE (t, u); |
| } |
| } |
| } |
| |
| if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE |
| || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) |
| DECL_GIMPLE_REG_P (t) = 1; |
| |
| mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val)); |
| |
| if (EXPR_HAS_LOCATION (val)) |
| SET_EXPR_LOCUS (mod, EXPR_LOCUS (val)); |
| else |
| SET_EXPR_LOCATION (mod, input_location); |
| |
| /* gimplify_modify_expr might want to reduce this further. */ |
| gimplify_and_add (mod, pre_p); |
| ggc_free (mod); |
| |
| /* If we're gimplifying into ssa, gimplify_modify_expr will have |
| given our temporary an SSA name. Find and return it. */ |
| if (gimplify_ctxp->into_ssa) |
| { |
| gimple last = gimple_seq_last_stmt (*pre_p); |
| t = gimple_get_lhs (last); |
| } |
| |
| return t; |
| } |
| |
| /* Returns a formal temporary variable initialized with VAL. PRE_P |
| points to a sequence where side-effects needed to compute VAL should be |
| stored. */ |
| |
| tree |
| get_formal_tmp_var (tree val, gimple_seq *pre_p) |
| { |
| return internal_get_tmp_var (val, pre_p, NULL, true); |
| } |
| |
| /* Returns a temporary variable initialized with VAL. PRE_P and POST_P |
| are as in gimplify_expr. */ |
| |
| tree |
| get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p) |
| { |
| return internal_get_tmp_var (val, pre_p, post_p, false); |
| } |
| |
| /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is |
| true, generate debug info for them; otherwise don't. */ |
| |
| void |
| declare_vars (tree vars, gimple scope, bool debug_info) |
| { |
| tree last = vars; |
| if (last) |
| { |
| tree temps, block; |
| |
| gcc_assert (gimple_code (scope) == GIMPLE_BIND); |
| |
| temps = nreverse (last); |
| |
| block = gimple_bind_block (scope); |
| gcc_assert (!block || TREE_CODE (block) == BLOCK); |
| if (!block || !debug_info) |
| { |
| TREE_CHAIN (last) = gimple_bind_vars (scope); |
| gimple_bind_set_vars (scope, temps); |
| } |
| else |
| { |
| /* We need to attach the nodes both to the BIND_EXPR and to its |
| associated BLOCK for debugging purposes. The key point here |
| is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR |
| is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */ |
| if (BLOCK_VARS (block)) |
| BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps); |
| else |
| { |
| gimple_bind_set_vars (scope, |
| chainon (gimple_bind_vars (scope), temps)); |
| BLOCK_VARS (block) = temps; |
| } |
| } |
| } |
| } |
| |
| /* For VAR a VAR_DECL of variable size, try to find a constant upper bound |
| for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if |
| no such upper bound can be obtained. */ |
| |
| static void |
| force_constant_size (tree var) |
| { |
| /* The only attempt we make is by querying the maximum size of objects |
| of the variable's type. */ |
| |
| HOST_WIDE_INT max_size; |
| |
| gcc_assert (TREE_CODE (var) == VAR_DECL); |
| |
| max_size = max_int_size_in_bytes (TREE_TYPE (var)); |
| |
| gcc_assert (max_size >= 0); |
| |
| DECL_SIZE_UNIT (var) |
| = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size); |
| DECL_SIZE (var) |
| = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT); |
| } |
| |
| void |
| gimple_add_tmp_var (tree tmp) |
| { |
| gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); |
| |
| /* Later processing assumes that the object size is constant, which might |
| not be true at this point. Force the use of a constant upper bound in |
| this case. */ |
| if (!host_integerp (DECL_SIZE_UNIT (tmp), 1)) |
| force_constant_size (tmp); |
| |
| DECL_CONTEXT (tmp) = current_function_decl; |
| DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; |
| |
| if (gimplify_ctxp) |
| { |
| TREE_CHAIN (tmp) = gimplify_ctxp->temps; |
| gimplify_ctxp->temps = tmp; |
| |
| /* Mark temporaries local within the nearest enclosing parallel. */ |
| if (gimplify_omp_ctxp) |
| { |
| struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; |
| while (ctx && ctx->region_type == ORT_WORKSHARE) |
| ctx = ctx->outer_context; |
| if (ctx) |
| omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN); |
| } |
| } |
| else if (cfun) |
| record_vars (tmp); |
| else |
| { |
| gimple_seq body_seq; |
| |
| /* This case is for nested functions. We need to expose the locals |
| they create. */ |
| body_seq = gimple_body (current_function_decl); |
| declare_vars (tmp, gimple_seq_first_stmt (body_seq), false); |
| } |
| } |
| |
| /* Determines whether to assign a location to the statement GS. */ |
| |
| static bool |
| should_carry_location_p (gimple gs) |
| { |
| /* Don't emit a line note for a label. We particularly don't want to |
| emit one for the break label, since it doesn't actually correspond |
| to the beginning of the loop/switch. */ |
| if (gimple_code (gs) == GIMPLE_LABEL) |
| return false; |
| |
| return true; |
| } |
| |
| /* Same, but for a tree. */ |
| |
| static bool |
| tree_should_carry_location_p (const_tree stmt) |
| { |
| /* Don't emit a line note for a label. We particularly don't want to |
| emit one for the break label, since it doesn't actually correspond |
| to the beginning of the loop/switch. */ |
| if (TREE_CODE (stmt) == LABEL_EXPR) |
| return false; |
| |
| /* Do not annotate empty statements, since it confuses gcov. */ |
| if (!TREE_SIDE_EFFECTS (stmt)) |
| return false; |
| |
| return true; |
| } |
| |
| /* Return true if a location should not be emitted for this statement |
| by annotate_one_with_location. */ |
| |
| static inline bool |
| gimple_do_not_emit_location_p (gimple g) |
| { |
| return gimple_plf (g, GF_PLF_1); |
| } |
| |
| /* Mark statement G so a location will not be emitted by |
| annotate_one_with_location. */ |
| |
| static inline void |
| gimple_set_do_not_emit_location (gimple g) |
| { |
| /* The PLF flags are initialized to 0 when a new tuple is created, |
| so no need to initialize it anywhere. */ |
| gimple_set_plf (g, GF_PLF_1, true); |
| } |
| |
| /* Set the location for gimple statement GS to LOCUS. */ |
| |
| static void |
| annotate_one_with_location (gimple gs, location_t location) |
| { |
| if (!gimple_has_location (gs) |
| && !gimple_do_not_emit_location_p (gs) |
| && should_carry_location_p (gs)) |
| gimple_set_location (gs, location); |
| } |
| |
| /* Same, but for tree T. */ |
| |
| static void |
| tree_annotate_one_with_location (tree t, location_t location) |
| { |
| if (CAN_HAVE_LOCATION_P (t) |
| && ! EXPR_HAS_LOCATION (t) && tree_should_carry_location_p (t)) |
| SET_EXPR_LOCATION (t, location); |
| } |
| |
| |
| /* Set LOCATION for all the statements after iterator GSI in sequence |
| SEQ. If GSI is pointing to the end of the sequence, start with the |
| first statement in SEQ. */ |
| |
| static void |
| annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi, |
| location_t location) |
| { |
| if (gsi_end_p (gsi)) |
| gsi = gsi_start (seq); |
| else |
| gsi_next (&gsi); |
| |
| for (; !gsi_end_p (gsi); gsi_next (&gsi)) |
| annotate_one_with_location (gsi_stmt (gsi), location); |
| } |
| |
| |
| /* Set the location for all the statements in a sequence STMT_P to LOCUS. */ |
| |
| void |
| annotate_all_with_location (gimple_seq stmt_p, location_t location) |
| { |
| gimple_stmt_iterator i; |
| |
| if (gimple_seq_empty_p (stmt_p)) |
| return; |
| |
| for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i)) |
| { |
| gimple gs = gsi_stmt (i); |
| annotate_one_with_location (gs, location); |
| } |
| } |
| |
| /* Same, but for statement or statement list in *STMT_P. */ |
| |
| void |
| tree_annotate_all_with_location (tree *stmt_p, location_t location) |
| { |
| tree_stmt_iterator i; |
| |
| if (!*stmt_p) |
| return; |
| |
| for (i = tsi_start (*stmt_p); !tsi_end_p (i); tsi_next (&i)) |
| { |
| tree t = tsi_stmt (i); |
| |
| /* Assuming we've already been gimplified, we shouldn't |
| see nested chaining constructs anymore. */ |
| gcc_assert (TREE_CODE (t) != STATEMENT_LIST |
| && TREE_CODE (t) != COMPOUND_EXPR); |
| |
| tree_annotate_one_with_location (t, location); |
| } |
| } |
| |
| |
| /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes. |
| These nodes model computations that should only be done once. If we |
| were to unshare something like SAVE_EXPR(i++), the gimplification |
| process would create wrong code. */ |
| |
| static tree |
| mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data) |
| { |
| enum tree_code code = TREE_CODE (*tp); |
| /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */ |
| if (TREE_CODE_CLASS (code) == tcc_type |
| || TREE_CODE_CLASS (code) == tcc_declaration |
| || TREE_CODE_CLASS (code) == tcc_constant |
| || code == SAVE_EXPR || code == TARGET_EXPR |
| /* We can't do anything sensible with a BLOCK used as an expression, |
| but we also can't just die when we see it because of non-expression |
| uses. So just avert our eyes and cross our fingers. Silly Java. */ |
| || code == BLOCK) |
| *walk_subtrees = 0; |
| else |
| { |
| gcc_assert (code != BIND_EXPR); |
| copy_tree_r (tp, walk_subtrees, data); |
| } |
| |
| return NULL_TREE; |
| } |
| |
| /* Callback for walk_tree to unshare most of the shared trees rooted at |
| *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1), |
| then *TP is deep copied by calling copy_tree_r. |
| |
| This unshares the same trees as copy_tree_r with the exception of |
| SAVE_EXPR nodes. These nodes model computations that should only be |
| done once. If we were to unshare something like SAVE_EXPR(i++), the |
| gimplification process would create wrong code. */ |
| |
| static tree |
| copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, |
| void *data ATTRIBUTE_UNUSED) |
| { |
| tree t = *tp; |
| enum tree_code code = TREE_CODE (t); |
| |
| /* Skip types, decls, and constants. But we do want to look at their |
| types and the bounds of types. Mark them as visited so we properly |
| unmark their subtrees on the unmark pass. If we've already seen them, |
| don't look down further. */ |
| if (TREE_CODE_CLASS (code) == tcc_type |
| || TREE_CODE_CLASS (code) == tcc_declaration |
| || TREE_CODE_CLASS (code) == tcc_constant) |
| { |
| if (TREE_VISITED (t)) |
| *walk_subtrees = 0; |
| else |
| TREE_VISITED (t) = 1; |
| } |
| |
| /* If this node has been visited already, unshare it and don't look |
| any deeper. */ |
| else if (TREE_VISITED (t)) |
| { |
| walk_tree (tp, mostly_copy_tree_r, NULL, NULL); |
| *walk_subtrees = 0; |
| } |
| |
| /* Otherwise, mark the tree as visited and keep looking. */ |
| else |
| TREE_VISITED (t) = 1; |
| |
| return NULL_TREE; |
| } |
| |
| static tree |
| unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, |
| void *data ATTRIBUTE_UNUSED) |
| { |
| if (TREE_VISITED (*tp)) |
| TREE_VISITED (*tp) = 0; |
| else |
| *walk_subtrees = 0; |
| |
| return NULL_TREE; |
| } |
| |
| /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the |
| bodies of any nested functions if we are unsharing the entire body of |
| FNDECL. */ |
| |
| static void |
| unshare_body (tree *body_p, tree fndecl) |
| { |
| struct cgraph_node *cgn = cgraph_node (fndecl); |
| |
| walk_tree (body_p, copy_if_shared_r, NULL, NULL); |
| if (body_p == &DECL_SAVED_TREE (fndecl)) |
| for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) |
| unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl); |
| } |
| |
| /* Likewise, but mark all trees as not visited. */ |
| |
| static void |
| unvisit_body (tree *body_p, tree fndecl) |
| { |
| struct cgraph_node *cgn = cgraph_node (fndecl); |
| |
| walk_tree (body_p, unmark_visited_r, NULL, NULL); |
| if (body_p == &DECL_SAVED_TREE (fndecl)) |
| for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) |
| unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl); |
| } |
| |
| /* Unconditionally make an unshared copy of EXPR. This is used when using |
| stored expressions which span multiple functions, such as BINFO_VTABLE, |
| as the normal unsharing process can't tell that they're shared. */ |
| |
| tree |
| unshare_expr (tree expr) |
| { |
| walk_tree (&expr, mostly_copy_tree_r, NULL, NULL); |
| return expr; |
| } |
| |
| /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both |
| contain statements and have a value. Assign its value to a temporary |
| and give it void_type_node. Returns the temporary, or NULL_TREE if |
| WRAPPER was already void. */ |
| |
| tree |
| voidify_wrapper_expr (tree wrapper, tree temp) |
| { |
| tree type = TREE_TYPE (wrapper); |
| if (type && !VOID_TYPE_P (type)) |
| { |
| tree *p; |
| |
| /* Set p to point to the body of the wrapper. Loop until we find |
| something that isn't a wrapper. */ |
| for (p = &wrapper; p && *p; ) |
| { |
| switch (TREE_CODE (*p)) |
| { |
| case BIND_EXPR: |
| TREE_SIDE_EFFECTS (*p) = 1; |
| TREE_TYPE (*p) = void_type_node; |
| /* For a BIND_EXPR, the body is operand 1. */ |
| p = &BIND_EXPR_BODY (*p); |
| break; |
| |
| case CLEANUP_POINT_EXPR: |
| case TRY_FINALLY_EXPR: |
| case TRY_CATCH_EXPR: |
| TREE_SIDE_EFFECTS (*p) = 1; |
| TREE_TYPE (*p) = void_type_node; |
| p = &TREE_OPERAND (*p, 0); |
| break; |
| |
| case STATEMENT_LIST: |
| { |
| tree_stmt_iterator i = tsi_last (*p); |
| TREE_SIDE_EFFECTS (*p) = 1; |
| TREE_TYPE (*p) = void_type_node; |
| p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i); |
| } |
| break; |
| |
| case COMPOUND_EXPR: |
| /* Advance to the last statement. Set all container types to void. */ |
| for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1)) |
| { |
| TREE_SIDE_EFFECTS (*p) = 1; |
| TREE_TYPE (*p) = void_type_node; |
| } |
| break; |
| |
| default: |
| goto out; |
| } |
| } |
| |
| out: |
| if (p == NULL || IS_EMPTY_STMT (*p)) |
| temp = NULL_TREE; |
| else if (temp) |
| { |
| /* The wrapper is on the RHS of an assignment that we're pushing |
| down. */ |
| gcc_assert (TREE_CODE (temp) == INIT_EXPR |
| || TREE_CODE (temp) == MODIFY_EXPR); |
| TREE_OPERAND (temp, 1) = *p; |
| *p = temp; |
| } |
| else |
| { |
| temp = create_tmp_var (type, "retval"); |
| *p = build2 (INIT_EXPR, type, temp, *p); |
| } |
| |
| return temp; |
| } |
| |
| return NULL_TREE; |
| } |
| |
| /* Prepare calls to builtins to SAVE and RESTORE the stack as well as |
| a temporary through which they communicate. */ |
| |
| static void |
| build_stack_save_restore (gimple *save, gimple *restore) |
| { |
| tree tmp_var; |
| |
| *save = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0); |
| tmp_var = create_tmp_var (ptr_type_node, "saved_stack"); |
| gimple_call_set_lhs (*save, tmp_var); |
| |
| *restore = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_RESTORE], |
| 1, tmp_var); |
| } |
| |
| /* Gimplify a BIND_EXPR. Just voidify and recurse. */ |
| |
| static enum gimplify_status |
| gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p) |
| { |
| tree bind_expr = *expr_p; |
| bool old_save_stack = gimplify_ctxp->save_stack; |
| tree t; |
| gimple gimple_bind; |
| gimple_seq body; |
| |
| tree temp = voidify_wrapper_expr (bind_expr, NULL); |
| |
| /* Mark variables seen in this bind expr. */ |
| for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t)) |
| { |
| if (TREE_CODE (t) == VAR_DECL) |
| { |
| struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; |
| |
| /* Mark variable as local. */ |
| if (ctx && !is_global_var (t) |
| && (! DECL_SEEN_IN_BIND_EXPR_P (t) |
| || splay_tree_lookup (ctx->variables, |
| (splay_tree_key) t) == NULL)) |
| omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN); |
| |
| DECL_SEEN_IN_BIND_EXPR_P (t) = 1; |
| |
| if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun) |
| cfun->has_local_explicit_reg_vars = true; |
| } |
| |
| /* Preliminarily mark non-addressed complex variables as eligible |
| for promotion to gimple registers. We'll transform their uses |
| as we find them. */ |
| if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE |
| || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) |
| && !TREE_THIS_VOLATILE (t) |
| && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t)) |
| && !needs_to_live_in_memory (t)) |
| DECL_GIMPLE_REG_P (t) = 1; |
| } |
| |
| gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL, |
| BIND_EXPR_BLOCK (bind_expr)); |
| gimple_push_bind_expr (gimple_bind); |
| |
| gimplify_ctxp->save_stack = false; |
| |
| /* Gimplify the body into the GIMPLE_BIND tuple's body. */ |
| body = NULL; |
| gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body); |
| gimple_bind_set_body (gimple_bind, body); |
| |
| if (gimplify_ctxp->save_stack) |
| { |
| gimple stack_save, stack_restore, gs; |
| gimple_seq cleanup, new_body; |
| |
| /* Save stack on entry and restore it on exit. Add a try_finally |
| block to achieve this. Note that mudflap depends on the |
| format of the emitted code: see mx_register_decls(). */ |
| build_stack_save_restore (&stack_save, &stack_restore); |
| |
| cleanup = new_body = NULL; |
| gimplify_seq_add_stmt (&cleanup, stack_restore); |
| gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup, |
| GIMPLE_TRY_FINALLY); |
| |
| gimplify_seq_add_stmt (&new_body, stack_save); |
| gimplify_seq_add_stmt (&new_body, gs); |
| gimple_bind_set_body (gimple_bind, new_body); |
| } |
| |
| gimplify_ctxp->save_stack = old_save_stack; |
| gimple_pop_bind_expr (); |
| |
| gimplify_seq_add_stmt (pre_p, gimple_bind); |
| |
| if (temp) |
| { |
| *expr_p = temp; |
| return GS_OK; |
| } |
| |
| *expr_p = NULL_TREE; |
| return GS_ALL_DONE; |
| } |
| |
| /* Gimplify a RETURN_EXPR. If the expression to be returned is not a |
| GIMPLE value, it is assigned to a new temporary and the statement is |
| re-written to return the temporary. |
| |
| PRE_P points to the sequence where side effects that must happen before |
| STMT should be stored. */ |
| |
| static enum gimplify_status |
| gimplify_return_expr (tree stmt, gimple_seq *pre_p) |
| { |
| gimple ret; |
| tree ret_expr = TREE_OPERAND (stmt, 0); |
| tree result_decl, result; |
| |
| if (ret_expr == error_mark_node) |
| return GS_ERROR; |
| |
| if (!ret_expr |
| || TREE_CODE (ret_expr) == RESULT_DECL |
| || ret_expr == error_mark_node) |
| { |
| gimple ret = gimple_build_return (ret_expr); |
| gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); |
| gimplify_seq_add_stmt (pre_p, ret); |
| return GS_ALL_DONE; |
| } |
| |
| if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))) |
| result_decl = NULL_TREE; |
| else |
| { |
| result_decl = TREE_OPERAND (ret_expr, 0); |
| |
| /* See through a return by reference. */ |
| if (TREE_CODE (result_decl) == INDIRECT_REF) |
| result_decl = TREE_OPERAND (result_decl, 0); |
| |
| gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR |
| || TREE_CODE (ret_expr) == INIT_EXPR) |
| && TREE_CODE (result_decl) == RESULT_DECL); |
| } |
| |
| /* If aggregate_value_p is true, then we can return the bare RESULT_DECL. |
| Recall that aggregate_value_p is FALSE for any aggregate type that is |
| returned in registers. If we're returning values in registers, then |
| we don't want to extend the lifetime of the RESULT_DECL, particularly |
| across another call. In addition, for those aggregates for which |
| hard_function_value generates a PARALLEL, we'll die during normal |
| expansion of structure assignments; there's special code in expand_return |
| to handle this case that does not exist in expand_expr. */ |
| if (!result_decl |
| || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl))) |
| result = result_decl; |
| else if (gimplify_ctxp->return_temp) |
| result = gimplify_ctxp->return_temp; |
| else |
| { |
| result = create_tmp_var (TREE_TYPE (result_decl), NULL); |
| if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE |
| || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE) |
| DECL_GIMPLE_REG_P (result) = 1; |
| |
| /* ??? With complex control flow (usually involving abnormal edges), |
| we can wind up warning about an uninitialized value for this. Due |
| to how this variable is constructed and initialized, this is never |
| true. Give up and never warn. */ |
| TREE_NO_WARNING (result) = 1; |
| |
| gimplify_ctxp->return_temp = result; |
| } |
| |
| /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use. |
| Then gimplify the whole thing. */ |
| if (result != result_decl) |
| TREE_OPERAND (ret_expr, 0) = result; |
| |
| gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p); |
| |
| ret = gimple_build_return (result); |
| gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); |
| gimplify_seq_add_stmt (pre_p, ret); |
| |
| return GS_ALL_DONE; |
| } |
| |
| static void |
| gimplify_vla_decl (tree decl, gimple_seq *seq_p) |
| { |
| /* This is a variable-sized decl. Simplify its size and mark it |
| for deferred expansion. Note that mudflap depends on the format |
| of the emitted code: see mx_register_decls(). */ |
| tree t, addr, ptr_type; |
| |
| gimplify_one_sizepos (&DECL_SIZE (decl), seq_p); |
| gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p); |
| |
| /* All occurrences of this decl in final gimplified code will be |
| replaced by indirection. Setting DECL_VALUE_EXPR does two |
| things: First, it lets the rest of the gimplifier know what |
| replacement to use. Second, it lets the debug info know |
| where to find the value. */ |
| ptr_type = build_pointer_type (TREE_TYPE (decl)); |
| addr = create_tmp_var (ptr_type, get_name (decl)); |
| DECL_IGNORED_P (addr) = 0; |
| t = build_fold_indirect_ref (addr); |
| SET_DECL_VALUE_EXPR (decl, t); |
| DECL_HAS_VALUE_EXPR_P (decl) = 1; |
| |
| t = built_in_decls[BUILT_IN_ALLOCA]; |
| t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl)); |
| t = fold_convert (ptr_type, t); |
| t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t); |
| |
| gimplify_and_add (t, seq_p); |
| |
| /* Indicate that we need to restore the stack level when the |
| enclosing BIND_EXPR is exited. */ |
| gimplify_ctxp->save_stack = true; |
| } |
| |
| |
| /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation |
| and initialization explicit. */ |
| |
| static enum gimplify_status |
| gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p) |
| { |
| tree stmt = *stmt_p; |
| tree decl = DECL_EXPR_DECL (stmt); |
| |
| *stmt_p = NULL_TREE; |
| |
| if (TREE_TYPE (decl) == error_mark_node) |
| return GS_ERROR; |
| |
| if ((TREE_CODE (decl) == TYPE_DECL |
| || TREE_CODE (decl) == VAR_DECL) |
| && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl))) |
| gimplify_type_sizes (TREE_TYPE (decl), seq_p); |
| |
| if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl)) |
| { |
| tree init = DECL_INITIAL (decl); |
| |
| if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST |
| || (!TREE_STATIC (decl) |
| && flag_stack_check == GENERIC_STACK_CHECK |
| && compare_tree_int (DECL_SIZE_UNIT (decl), |
| STACK_CHECK_MAX_VAR_SIZE) > 0)) |
| gimplify_vla_decl (decl, seq_p); |
| |
| if (init && init != error_mark_node) |
| { |
| if (!TREE_STATIC (decl)) |
| { |
| DECL_INITIAL (decl) = NULL_TREE; |
| init = build2 (INIT_EXPR, void_type_node, decl, init); |
| gimplify_and_add (init, seq_p); |
| ggc_free (init); |
| } |
| else |
| /* We must still examine initializers for static variables |
| as they may contain a label address. */ |
| walk_tree (&init, force_labels_r, NULL, NULL); |
| } |
| |
| /* Some front ends do not explicitly declare all anonymous |
| artificial variables. We compensate here by declaring the |
| variables, though it would be better if the front ends would |
| explicitly declare them. */ |
| if (!DECL_SEEN_IN_BIND_EXPR_P (decl) |
| && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE) |
| gimple_add_tmp_var (decl); |
| } |
| |
| return GS_ALL_DONE; |
| } |
| |
| /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body |
| and replacing the LOOP_EXPR with goto, but if the loop contains an |
| EXIT_EXPR, we need to append a label for it to jump to. */ |
| |
| static enum gimplify_status |
| gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p) |
| { |
| tree saved_label = gimplify_ctxp->exit_label; |
| tree start_label = create_artificial_label (); |
| |
| gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label)); |
| |
| gimplify_ctxp->exit_label = NULL_TREE; |
| |
| gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p); |
| |
| gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label)); |
| |
| if (gimplify_ctxp->exit_label) |
| gimplify_seq_add_stmt (pre_p, gimple_build_label (gimplify_ctxp->exit_label)); |
| |
| gimplify_ctxp->exit_label = saved_label; |
| |
| *expr_p = NULL; |
| return GS_ALL_DONE; |
| } |
| |
| /* Gimplifies a statement list onto a sequence. These may be created either |
| by an enlightened front-end, or by shortcut_cond_expr. */ |
| |
| static enum gimplify_status |
| gimplify_statement_list (tree *expr_p, gimple_seq *pre_p) |
| { |
| tree temp = voidify_wrapper_expr (*expr_p, NULL); |
| |
| tree_stmt_iterator i = tsi_start (*expr_p); |
| |
| while (!tsi_end_p (i)) |
| { |
| gimplify_stmt (tsi_stmt_ptr (i), pre_p); |
| tsi_delink (&i); |
| } |
| |
| if (temp) |
| { |
| *expr_p = temp; |
| return GS_OK; |
| } |
| |
| return GS_ALL_DONE; |
| } |
| |
| /* Compare two case labels. Because the front end should already have |
| made sure that case ranges do not overlap, it is enough to only compare |
| the CASE_LOW values of each case label. */ |
| |
| static int |
| compare_case_labels (const void *p1, const void *p2) |
| { |
| const_tree const case1 = *(const_tree const*)p1; |
| const_tree const case2 = *(const_tree const*)p2; |
| |
| /* The 'default' case label always goes first. */ |
| if (!CASE_LOW (case1)) |
| return -1; |
| else if (!CASE_LOW (case2)) |
| return 1; |
| else |
| return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2)); |
| } |
| |
| |
| /* Sort the case labels in LABEL_VEC in place in ascending order. */ |
| |
| void |
| sort_case_labels (VEC(tree,heap)* label_vec) |
| { |
| size_t len = VEC_length (tree, label_vec); |
| qsort (VEC_address (tree, label_vec), len, sizeof (tree), |
| compare_case_labels); |
| } |
| |
| |
| /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can |
| branch to. */ |
| |
| static enum gimplify_status |
| gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p) |
| { |
| tree switch_expr = *expr_p; |
| gimple_seq switch_body_seq = NULL; |
| enum gimplify_status ret; |
| |
| ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val, |
| fb_rvalue); |
| if (ret == GS_ERROR || ret == GS_UNHANDLED) |
| return ret; |
| |
| if (SWITCH_BODY (switch_expr)) |
| { |
| VEC (tree,heap) *labels; |
| VEC (tree,heap) *saved_labels; |
| tree default_case = NULL_TREE; |
| size_t i, len; |
| gimple gimple_switch; |
| |
| /* If someone can be bothered to fill in the labels, they can |
| be bothered to null out the body too. */ |
| gcc_assert (!SWITCH_LABELS (switch_expr)); |
| |
| /* save old labels, get new ones from body, then restore the old |
| labels. Save all the things from the switch body to append after. */ |
| saved_labels = gimplify_ctxp->case_labels; |
| gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8); |
| |
| gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq); |
| labels = gimplify_ctxp->case_labels; |
| gimplify_ctxp->case_labels = saved_labels; |
| |
| i = 0; |
| while (i < VEC_length (tree, labels)) |
| { |
| tree elt = VEC_index (tree, labels, i); |
| tree low = CASE_LOW (elt); |
| bool remove_element = FALSE; |
| |
| if (low) |
| { |
| /* Discard empty ranges. */ |
| tree high = CASE_HIGH (elt); |
| if (high && tree_int_cst_lt (high, low)) |
| remove_element = TRUE; |
| } |
| else |
| { |
| /* The default case must be the last label in the list. */ |
| gcc_assert (!default_case); |
| default_case = elt; |
| remove_element = TRUE; |
| } |
| |
| if (remove_element) |
| VEC_ordered_remove (tree, labels, i); |
| else |
| i++; |
| } |
| len = i; |
| |
| if (!VEC_empty (tree, labels)) |
| sort_case_labels (labels); |
| |
| if (!default_case) |
| { |
| tree type = TREE_TYPE (switch_expr); |
| |
| /* If the switch has no default label, add one, so that we jump |
| around the switch body. If the labels already cover the whole |
| range of type, add the default label pointing to one of the |
| existing labels. */ |
| if (type == void_type_node) |
| type = TREE_TYPE (SWITCH_COND (switch_expr)); |
| if (len |
| && INTEGRAL_TYPE_P (type) |
| && TYPE_MIN_VALUE (type) |
| && TYPE_MAX_VALUE (type) |
| && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)), |
| TYPE_MIN_VALUE (type))) |
| { |
| tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1)); |
| if (!high) |
| high = CASE_LOW (VEC_index (tree, labels, len - 1)); |
| if (tree_int_cst_equal (high, TYPE_MAX_VALUE (type))) |
| { |
| for (i = 1; i < len; i++) |
| { |
| high = CASE_LOW (VEC_index (tree, labels, i)); |
| low = CASE_HIGH (VEC_index (tree, labels, i - 1)); |
| if (!low) |
| low = CASE_LOW (VEC_index (tree, labels, i - 1)); |
| if ((TREE_INT_CST_LOW (low) + 1 |
| != TREE_INT_CST_LOW (high)) |
| || (TREE_INT_CST_HIGH (low) |
| + (TREE_INT_CST_LOW (high) == 0) |
| != TREE_INT_CST_HIGH (high))) |
| break; |
| } |
| if (i == len) |
| default_case = build3 (CASE_LABEL_EXPR, void_type_node, |
| NULL_TREE, NULL_TREE, |
| CASE_LABEL (VEC_index (tree, |
| labels, 0))); |
| } |
| } |
| |
| if (!default_case) |
| { |
| gimple new_default; |
| |
| default_case = build3 (CASE_LABEL_EXPR, void_type_node, |
| NULL_TREE, NULL_TREE, |
| create_artificial_label ()); |
| new_default = gimple_build_label (CASE_LABEL (default_case)); |
| gimplify_seq_add_stmt (&switch_body_seq, new_default); |
| } |
| } |
| |
| gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr), |
| default_case, labels); |
| gimplify_seq_add_stmt (pre_p, gimple_switch); |
| gimplify_seq_add_seq (pre_p, switch_body_seq); |
| VEC_free(tree, heap, labels); |
| } |
| else |
| gcc_assert (SWITCH_LABELS (switch_expr)); |
| |
| return GS_ALL_DONE; |
| } |
| |
| |
| static enum gimplify_status |
| gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p) |
| { |
| struct gimplify_ctx *ctxp; |
| gimple gimple_label; |
| |
| /* Invalid OpenMP programs can play Duff's Device type games with |
| #pragma omp parallel. At least in the C front end, we don't |
| detect such invalid branches until after gimplification. */ |
| for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context) |
| if (ctxp->case_labels) |
| break; |
| |
| gimple_label = gimple_build_label (CASE_LABEL (*expr_p)); |
| VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p); |
| gimplify_seq_add_stmt (pre_p, gimple_label); |
| |
| return GS_ALL_DONE; |
| } |
| |
| /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first |
| if necessary. */ |
| |
| tree |
| build_and_jump (tree *label_p) |
| { |
| if (label_p == NULL) |
| /* If there's nowhere to jump, just fall through. */ |
| return NULL_TREE; |
| |
| if (*label_p == NULL_TREE) |
| { |
| tree label = create_artificial_label (); |
| *label_p = label; |
| } |
| |
| return build1 (GOTO_EXPR, void_type_node, *label_p); |
| } |
| |
| /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR. |
| This also involves building a label to jump to and communicating it to |
| gimplify_loop_expr through gimplify_ctxp->exit_label. */ |
| |
| static enum gimplify_status |
| gimplify_exit_expr (tree *expr_p) |
| { |
| tree cond = TREE_OPERAND (*expr_p, 0); |
| tree expr; |
| |
| expr = build_and_jump (&gimplify_ctxp->exit_label); |
| expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE); |
| *expr_p = expr; |
| |
| return GS_OK; |
| } |
| |
| /* A helper function to be called via walk_tree. Mark all labels under *TP |
| as being forced. To be called for DECL_INITIAL of static variables. */ |
| |
| tree |
| force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) |
| { |
| if (TYPE_P (*tp)) |
| *walk_subtrees = 0; |
| if (TREE_CODE (*tp) == LABEL_DECL) |
| FORCED_LABEL (*tp) = 1; |
| |
| return NULL_TREE; |
| } |
| |
| /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is |
| different from its canonical type, wrap the whole thing inside a |
| NOP_EXPR and force the type of the COMPONENT_REF to be the canonical |
| type. |
| |
| The canonical type of a COMPONENT_REF is the type of the field being |
| referenced--unless the field is a bit-field which can be read directly |
| in a smaller mode, in which case the canonical type is the |
| sign-appropriate type corresponding to that mode. */ |
| |
| static void |
| canonicalize_component_ref (tree *expr_p) |
| { |
| tree expr = *expr_p; |
| tree type; |
| |
| gcc_assert (TREE_CODE (expr) == COMPONENT_REF); |
| |
| if (INTEGRAL_TYPE_P (TREE_TYPE (expr))) |
| type = TREE_TYPE (get_unwidened (expr, NULL_TREE)); |
| else |
| type = TREE_TYPE (TREE_OPERAND (expr, 1)); |
| |
| /* One could argue that all the stuff below is not necessary for |
| the non-bitfield case and declare it a FE error if type |
| adjustment would be needed. */ |
| if (TREE_TYPE (expr) != type) |
| { |
| #ifdef ENABLE_TYPES_CHECKING |
| tree old_type = TREE_TYPE (expr); |
| #endif |
| int type_quals; |
| |
| /* We need to preserve qualifiers and propagate them from |
| operand 0. */ |
| type_quals = TYPE_QUALS (type) |
| | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0))); |
| if (TYPE_QUALS (type) != type_quals) |
| type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals); |
| |
| /* Set the type of the COMPONENT_REF to the underlying type. */ |
| TREE_TYPE (expr) = type; |
| |
| #ifdef ENABLE_TYPES_CHECKING |
| /* It is now a FE error, if the conversion from the canonical |
| type to the original expression type is not useless. */ |
| gcc_assert (useless_type_conversion_p (old_type, type)); |
| #endif |
| } |
| } |
| |
| /* If a NOP conversion is changing a pointer to array of foo to a pointer |
| to foo, embed that change in the ADDR_EXPR by converting |
| T array[U]; |
| (T *)&array |
| ==> |
| &array[L] |
| where L is the lower bound. For simplicity, only do this for constant |
| lower bound. |
| The constraint is that the type of &array[L] is trivially convertible |
| to T *. */ |
| |
| static void |
| canonicalize_addr_expr (tree *expr_p) |
| { |
| tree expr = *expr_p; |
| tree addr_expr = TREE_OPERAND (expr, 0); |
| tree datype, ddatype, pddatype; |
| |
| /* We simplify only conversions from an ADDR_EXPR to a pointer type. */ |
| if (!POINTER_TYPE_P (TREE_TYPE (expr)) |
| || TREE_CODE (addr_expr) != ADDR_EXPR) |
| return; |
| |
| /* The addr_expr type should be a pointer to an array. */ |
| datype = TREE_TYPE (TREE_TYPE (addr_expr)); |
| if (TREE_CODE (datype) != ARRAY_TYPE) |
| return; |
| |
| /* The pointer to element type shall be trivially convertible to |
| the expression pointer type. */ |
| ddatype = TREE_TYPE (datype); |
| pddatype = build_pointer_type (ddatype); |
| if (!useless_type_conversion_p (pddatype, ddatype)) |
| return; |
| |
| /* The lower bound and element sizes must be constant. */ |
| if (!TYPE_SIZE_UNIT (ddatype) |
| || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST |
| || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype)) |
| || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST) |
| return; |
| |
| /* All checks succeeded. Build a new node to merge the cast. */ |
| *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0), |
| TYPE_MIN_VALUE (TYPE_DOMAIN (datype)), |
| NULL_TREE, NULL_TREE); |
| *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p); |
| } |
| |
| /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions |
| underneath as appropriate. */ |
| |
| static enum gimplify_status |
| gimplify_conversion (tree *expr_p) |
| { |
| tree tem; |
| gcc_assert (CONVERT_EXPR_P (*expr_p)); |
| |
| /* Then strip away all but the outermost conversion. */ |
| STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0)); |
| |
| /* And remove the outermost conversion if it's useless. */ |
| if (tree_ssa_useless_type_conversion (*expr_p)) |
| *expr_p = TREE_OPERAND (*expr_p, 0); |
| |
| /* Attempt to avoid NOP_EXPR by producing reference to a subtype. |
| For example this fold (subclass *)&A into &A->subclass avoiding |
| a need for statement. */ |
| if (CONVERT_EXPR_P (*expr_p) |
| && POINTER_TYPE_P (TREE_TYPE (*expr_p)) |
| && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0))) |
| && (tem = maybe_fold_offset_to_address |
| (TREE_OPERAND (*expr_p, 0), |
| integer_zero_node, TREE_TYPE (*expr_p))) != NULL_TREE) |
| *expr_p = tem; |
| |
| /* If we still have a conversion at the toplevel, |
| then canonicalize some constructs. */ |
| if (CONVERT_EXPR_P (*expr_p)) |
| { |
| tree sub = TREE_OPERAND (*expr_p, 0); |
| |
| /* If a NOP conversion is changing the type of a COMPONENT_REF |
| expression, then canonicalize its type now in order to expose more |
| redundant conversions. */ |
| if (TREE_CODE (sub) == COMPONENT_REF) |
| canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0)); |
| |
| /* If a NOP conversion is changing a pointer to array of foo |
| to a pointer to foo, embed that change in the ADDR_EXPR. */ |
| else if (TREE_CODE (sub) == ADDR_EXPR) |
| canonicalize_addr_expr (expr_p); |
| } |
| |
| /* If we have a conversion to a non-register type force the |
| use of a VIEW_CONVERT_EXPR instead. */ |
| if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p))) |
| *expr_p = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p), |
| TREE_OPERAND (*expr_p, 0)); |
| |
| return GS_OK; |
| } |
| |
| /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a |
| DECL_VALUE_EXPR, and it's worth re-examining things. */ |
| |
| static enum gimplify_status |
| gimplify_var_or_parm_decl (tree *expr_p) |
| { |
| tree decl = *expr_p; |
| |
| /* ??? If this is a local variable, and it has not been seen in any |
| outer BIND_EXPR, then it's probably the result of a duplicate |
| declaration, for which we've already issued an error. It would |
| be really nice if the front end wouldn't leak these at all. |
| Currently the only known culprit is C++ destructors, as seen |
| in g++.old-deja/g++.jason/binding.C. */ |
| if (TREE_CODE (decl) == VAR_DECL |
| && !DECL_SEEN_IN_BIND_EXPR_P (decl) |
| && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl) |
| && decl_function_context (decl) == current_function_decl) |
| { |
| gcc_assert (errorcount || sorrycount); |
| return GS_ERROR; |
| } |
| |
| /* When within an OpenMP context, notice uses of variables. */ |
| if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true)) |
| return GS_ALL_DONE; |
| |
| /* If the decl is an alias for another expression, substitute it now. */ |
| if (DECL_HAS_VALUE_EXPR_P (decl)) |
| { |
| *expr_p = unshare_expr (DECL_VALUE_EXPR (decl)); |
| return GS_OK; |
| } |
| |
| return GS_ALL_DONE; |
| } |
| |
| |
| /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR |
| node *EXPR_P. |
| |
| compound_lval |
| : min_lval '[' val ']' |
| | min_lval '.' ID |
| | compound_lval '[' val ']' |
| | compound_lval '.' ID |
| |
| This is not part of the original SIMPLE definition, which separates |
| array and member references, but it seems reasonable to handle them |
| together. Also, this way we don't run into problems with union |
| aliasing; gcc requires that for accesses through a union to alias, the |
| union reference must be explicit, which was not always the case when we |
| were splitting up array and member refs. |
| |
| PRE_P points to the sequence where side effects that must happen before |
| *EXPR_P should be stored. |
| |
| POST_P points to the sequence where side effects that must happen after |
| *EXPR_P should be stored. */ |
| |
| static enum gimplify_status |
| gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, |
| fallback_t fallback) |
| { |
| tree *p; |
| VEC(tree,heap) *stack; |
| enum gimplify_status ret = GS_OK, tret; |
| int i; |
| |
| /* Create a stack of the subexpressions so later we can walk them in |
| order from inner to outer. */ |
| stack = VEC_alloc (tree, heap, 10); |
| |
| /* We can handle anything that get_inner_reference can deal with. */ |
| for (p = expr_p; ; p = &TREE_OPERAND (*p, 0)) |
| { |
| restart: |
| /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */ |
| if (TREE_CODE (*p) == INDIRECT_REF) |
| *p = fold_indirect_ref (*p); |
| |
| if (handled_component_p (*p)) |
| ; |
| /* Expand DECL_VALUE_EXPR now. In some cases that may expose |
| additional COMPONENT_REFs. */ |
| else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL) |
| && gimplify_var_or_parm_decl (p) == GS_OK) |
| goto restart; |
| else |
| break; |
| |
| VEC_safe_push (tree, heap, stack, *p); |
| } |
| |
| gcc_assert (VEC_length (tree, stack)); |
| |
| /* Now STACK is a stack of pointers to all the refs we've walked through |
| and P points to the innermost expression. |
| |
| Java requires that we elaborated nodes in source order. That |
| means we must gimplify the inner expression followed by each of |
| the indices, in order. But we can't gimplify the inner |
| expression until we deal with any variable bounds, sizes, or |
| positions in order to deal with PLACEHOLDER_EXPRs. |
| |
| So we do this in three steps. First we deal with the annotations |
| for any variables in the components, then we gimplify the base, |
| then we gimplify any indices, from left to right. */ |
| for (i = VEC_length (tree, stack) - 1; i >= 0; i--) |
| { |
| tree t = VEC_index (tree, stack, i); |
| |
| if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) |
| { |
| /* Gimplify the low bound and element type size and put them into |
| the ARRAY_REF. If these values are set, they have already been |
| gimplified. */ |
| if (TREE_OPERAND (t, 2) == NULL_TREE) |
| { |
| tree low = unshare_expr (array_ref_low_bound (t)); |
| if (!is_gimple_min_invariant (low)) |
| { |
| TREE_OPERAND (t, 2) = low; |
| tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, |
| post_p, is_gimple_formal_tmp_reg, |
| fb_rvalue); |
| ret = MIN (ret, tret); |
| } |
| } |
| |
| if (!TREE_OPERAND (t, 3)) |
| { |
| tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0))); |
| tree elmt_size = unshare_expr (array_ref_element_size (t)); |
| tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type)); |
| |
| /* Divide the element size by the alignment of the element |
| type (above). */ |
| elmt_size = size_binop (EXACT_DIV_EXPR, elmt_size, factor); |
| |
| if (!is_gimple_min_invariant (elmt_size)) |
| { |
| TREE_OPERAND (t, 3) = elmt_size; |
| tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, |
| post_p, is_gimple_formal_tmp_reg, |
| fb_rvalue); |
| ret = MIN (ret, tret); |
| } |
| } |
| } |
| else if (TREE_CODE (t) == COMPONENT_REF) |
| { |
| /* Set the field offset into T and gimplify it. */ |
| if (!TREE_OPERAND (t, 2)) |
| { |
| tree offset = unshare_expr (component_ref_field_offset (t)); |
| tree field = TREE_OPERAND (t, 1); |
| tree factor |
| = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT); |
| |
| /* Divide the offset by its alignment. */ |
| offset = size_binop (EXACT_DIV_EXPR, offset, factor); |
| |
| if (!is_gimple_min_invariant (offset)) |
| { |
| TREE_OPERAND (t, 2) = offset; |
| tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, |
| post_p, is_gimple_formal_tmp_reg, |
| fb_rvalue); |
| ret = MIN (ret, tret); |
| } |
| } |
| } |
| } |
| |
| /* Step 2 is to gimplify the base expression. Make sure lvalue is set |
| so as to match the min_lval predicate. Failure to do so may result |
| in the creation of large aggregate temporaries. */ |
| tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, |
| fallback | fb_lvalue); |
| ret = MIN (ret, tret); |
| |
| /* And finally, the indices and operands to BIT_FIELD_REF. During this |
| loop we also remove any useless conversions. */ |
| for (; VEC_length (tree, stack) > 0; ) |
| { |
| tree t = VEC_pop (tree, stack); |
| |
| if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) |
| { |
| /* Gimplify the dimension. |
| Temporary fix for gcc.c-torture/execute/20040313-1.c. |
| Gimplify non-constant array indices into a temporary |
| variable. |
| FIXME - The real fix is to gimplify post-modify |
| expressions into a minimal gimple lvalue. However, that |
| exposes bugs in alias analysis. The alias analyzer does |
| not handle &PTR->FIELD very well. Will fix after the |
| branch is merged into mainline (dnovillo 2004-05-03). */ |
| if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))) |
| { |
| tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p, |
| is_gimple_formal_tmp_reg, fb_rvalue); |
| ret = MIN (ret, tret); |
| } |
| } |
| else if (TREE_CODE (t) == BIT_FIELD_REF) |
| { |
| tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p, |
| is_gimple_val, fb_rvalue); |
| ret = MIN (ret, tret); |
| tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, |
| is_gimple_val, fb_rvalue); |
| ret = MIN (ret, tret); |
| } |
| |
| STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0)); |
| |
| /* The innermost expression P may have originally had |
| TREE_SIDE_EFFECTS set which would have caused all the outer |
| expressions in *EXPR_P leading to P to also have had |
| TREE_SIDE_EFFECTS set. */ |
| recalculate_side_effects (t); |
| } |
| |
| /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */ |
| if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF) |
| { |
| canonicalize_component_ref (expr_p); |
| ret = MIN (ret, GS_OK); |
| } |
| |
| VEC_free (tree, heap, stack); |
| |
| return ret; |
| } |
| |
| /* Gimplify the self modifying expression pointed to by EXPR_P |
| (++, --, +=, -=). |
| |
| PRE_P points to the list where side effects that must happen before |
| *EXPR_P should be stored. |
| |
| POST_P points to the list where side effects that must happen after |
| *EXPR_P should be stored. |
| |
| WANT_VALUE is nonzero iff we want to use the value of this expression |
| in another expression. */ |
| |
| static enum gimplify_status |
| gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, |
| bool want_value) |
| { |
| enum tree_code code; |
| tree lhs, lvalue, rhs, t1; |
| gimple_seq post = NULL, *orig_post_p = post_p; |
| bool postfix; |
| enum tree_code arith_code; |
| enum gimplify_status ret; |
| |
| code = TREE_CODE (*expr_p); |
| |
| gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR |
| || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR); |
| |
| /* Prefix or postfix? */ |
| if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR) |
| /* Faster to treat as prefix if result is not used. */ |
| postfix = want_value; |
| else |
| postfix = false; |
| |
| /* For postfix, make sure the inner expression's post side effects |
| are executed after side effects from this expression. */ |
| if (postfix) |
| post_p = &post; |
| |
| /* Add or subtract? */ |
| if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) |
| arith_code = PLUS_EXPR; |
| else |
| arith_code = MINUS_EXPR; |
| |
| /* Gimplify the LHS into a GIMPLE lvalue. */ |
| lvalue = TREE_OPERAND (*expr_p, 0); |
| ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue); |
| if (ret == GS_ERROR) |
| return ret; |
| |
| /* Extract the operands to the arithmetic operation. */ |
| lhs = lvalue; |
| rhs = TREE_OPERAND (*expr_p, 1); |
| |
| /* For postfix operator, we evaluate the LHS to an rvalue and then use |
| that as the result value and in the postqueue operation. */ |
| if (postfix) |
| { |
| ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue); |
| if (ret == GS_ERROR) |
| return ret; |
| } |
| |
| /* For POINTERs increment, use POINTER_PLUS_EXPR. */ |
| if (POINTER_TYPE_P (TREE_TYPE (lhs))) |
| { |
| rhs = fold_convert (sizetype, rhs); |
| if (arith_code == MINUS_EXPR) |
| rhs = fold_build1 (NEGATE_EXPR, TREE_TYPE (rhs), rhs); |
| arith_code = POINTER_PLUS_EXPR; |
| } |
| |
| t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs); |
| |
| if (postfix) |
| { |
| gimplify_assign (lvalue, t1, orig_post_p); |
| gimplify_seq_add_seq (orig_post_p, post); |
| *expr_p = lhs; |
| return GS_ALL_DONE; |
| } |
| else |
| { |
| *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1); |
| return GS_OK; |
| } |
| } |
| |
| |
| /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */ |
| |
| static void |
| maybe_with_size_expr (tree *expr_p) |
| { |
| tree expr = *expr_p; |
| tree type = TREE_TYPE (expr); |
| tree size; |
| |
| /* If we've already wrapped this or the type is error_mark_node, we can't do |
| anything. */ |
| if (TREE_CODE (expr) == WITH_SIZE_EXPR |
| || type == error_mark_node) |
| return; |
| |
| /* If the size isn't known or is a constant, we have nothing to do. */ |
| size = TYPE_SIZE_UNIT (type); |
| if (!size || TREE_CODE (size) == INTEGER_CST) |
| return; |
| |
| /* Otherwise, make a WITH_SIZE_EXPR. */ |
| size = unshare_expr (size); |
| size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr); |
| *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size); |
| } |
| |
| |
| /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P |
| Store any side-effects in PRE_P. CALL_LOCATION is the location of |
| the CALL_EXPR. */ |
| |
| static enum gimplify_status |
| gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location) |
| { |
| bool (*test) (tree); |
| fallback_t fb; |
| |
| /* In general, we allow lvalues for function arguments to avoid |
| extra overhead of copying large aggregates out of even larger |
| aggregates into temporaries only to copy the temporaries to |
| the argument list. Make optimizers happy by pulling out to |
| temporaries those types that fit in registers. */ |
| if (is_gimple_reg_type (TREE_TYPE (*arg_p))) |
| test = is_gimple_val, fb = fb_rvalue; |
| else |
| test = is_gimple_lvalue, fb = fb_either; |
| |
| /* If this is a variable sized type, we must remember the size. */ |
| maybe_with_size_expr (arg_p); |
| |
| /* Make sure arguments have the same location as the function call |
| itself. */ |
| protected_set_expr_location (*arg_p, call_location); |
| |
| /* There is a sequence point before a function call. Side effects in |
| the argument list must occur before the actual call. So, when |
| gimplifying arguments, force gimplify_expr to use an internal |
| post queue which is then appended to the end of PRE_P. */ |
| return gimplify_expr (arg_p, pre_p, NULL, test, fb); |
| } |
| |
| |
| /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P. |
| WANT_VALUE is true if the result of the call is desired. */ |
| |
| static enum gimplify_status |
| gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) |
| { |
| tree fndecl, parms, p; |
| enum gimplify_status ret; |
| int i, nargs; |
| gimple call; |
| bool builtin_va_start_p = FALSE; |
| |
| gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR); |
| |
| /* For reliable diagnostics during inlining, it is necessary that |
| every call_expr be annotated with file and line. */ |
| if (! EXPR_HAS_LOCATION (*expr_p)) |
| SET_EXPR_LOCATION (*expr_p, input_location); |
| |
| /* This may be a call to a builtin function. |
| |
| Builtin function calls may be transformed into different |
| (and more efficient) builtin function calls under certain |
| circumstances. Unfortunately, gimplification can muck things |
| up enough that the builtin expanders are not aware that certain |
| transformations are still valid. |
| |
| So we attempt transformation/gimplification of the call before |
| we gimplify the CALL_EXPR. At this time we do not manage to |
| transform all calls in the same manner as the expanders do, but |
| we do transform most of them. */ |
| fndecl = get_callee_fndecl (*expr_p); |
| if (fndecl && DECL_BUILT_IN (fndecl)) |
| { |
| tree new_tree = fold_call_expr (*expr_p, !want_value); |
| |
| if (new_tree && new_tree != *expr_p) |
| { |
| /* There was a transformation of this call which computes the |
| same value, but in a more efficient way. Return and try |
| again. */ |
| *expr_p = new_tree; |
| return GS_OK; |
| } |
| |
| if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL |
| && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START) |
| { |
| builtin_va_start_p = TRUE; |
| if (call_expr_nargs (*expr_p) < 2) |
| { |
| error ("too few arguments to function %<va_start%>"); |
| *expr_p = build_empty_stmt (); |
| return GS_OK; |
| } |
| |
| if (fold_builtin_next_arg (*expr_p, true)) |
| { |
| *expr_p = build_empty_stmt (); |
| return GS_OK; |
| } |
| } |
| } |
| |
| /* There is a sequence point before the call, so any side effects in |
| the calling expression must occur before the actual call. Force |
| gimplify_expr to use an internal post queue. */ |
| ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL, |
| is_gimple_call_addr, fb_rvalue); |
| |
| nargs = call_expr_nargs (*expr_p); |
| |
| /* Get argument types for verification. */ |
| fndecl = get_callee_fndecl (*expr_p); |
| parms = NULL_TREE; |
| if (fndecl) |
| parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); |
| else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p)))) |
| parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p)))); |
| |
| if (fndecl && DECL_ARGUMENTS (fndecl)) |
| p = DECL_ARGUMENTS (fndecl); |
| else if (parms) |
| p = parms; |
| else |
| p = NULL_TREE; |
| for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p)) |
| ; |
| |
| /* If the last argument is __builtin_va_arg_pack () and it is not |
| passed as a named argument, decrease the number of CALL_EXPR |
| arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */ |
| if (!p |
| && i < nargs |
| && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR) |
| { |
| tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1); |
| tree last_arg_fndecl = get_callee_fndecl (last_arg); |
| |
| if (last_arg_fndecl |
| && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL |
| && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL |
| && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK) |
| { |
| tree call = *expr_p; |
| |
| --nargs; |
| *expr_p = build_call_array (TREE_TYPE (call), CALL_EXPR_FN (call), |
| nargs, CALL_EXPR_ARGP (call)); |
| |
| /* Copy all CALL_EXPR flags, location and block, except |
| CALL_EXPR_VA_ARG_PACK flag. */ |
| CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call); |
| CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call); |
| CALL_EXPR_RETURN_SLOT_OPT (*expr_p) |
| = CALL_EXPR_RETURN_SLOT_OPT (call); |
| CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call); |
| CALL_CANNOT_INLINE_P (*expr_p) = CALL_CANNOT_INLINE_P (call); |
| SET_EXPR_LOCUS (*expr_p, EXPR_LOCUS (call)); |
| TREE_BLOCK (*expr_p) = TREE_BLOCK (call); |
| |
| /* Set CALL_EXPR_VA_ARG_PACK. */ |
| CALL_EXPR_VA_ARG_PACK (*expr_p) = 1; |
| } |
| } |
| |
| /* Finally, gimplify the function arguments. */ |
| if (nargs > 0) |
| { |
| for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0); |
| PUSH_ARGS_REVERSED ? i >= 0 : i < nargs; |
| PUSH_ARGS_REVERSED ? i-- : i++) |
| { |
| enum gimplify_status t; |
| |
| /* Avoid gimplifying the second argument to va_start, which needs to |
| be the plain PARM_DECL. */ |
| if ((i != 1) || !builtin_va_start_p) |
| { |
| t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, |
| EXPR_LOCATION (*expr_p)); |
| |
| if (t == GS_ERROR) |
| ret = GS_ERROR; |
| } |
| } |
| } |
| |
| /* Try this again in case gimplification exposed something. */ |
| if (ret != GS_ERROR) |
| { |
| tree new_tree = fold_call_expr (*expr_p, !want_value); |
| |
| if (new_tree && new_tree != *expr_p) |
| { |
| /* There was a transformation of this call which computes the |
| same value, but in a more efficient way. Return and try |
| again. */ |
| *expr_p = new_tree; |
| return GS_OK; |
| } |
| } |
| else |
| { |
| *expr_p = error_mark_node; |
| return GS_ERROR; |
| } |
| |
| /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its |
| decl. This allows us to eliminate redundant or useless |
| calls to "const" functions. */ |
| if (TREE_CODE (*expr_p) == CALL_EXPR) |
| { |
| int flags = call_expr_flags (*expr_p); |
| if (flags & (ECF_CONST | ECF_PURE) |
| /* An infinite loop is considered a side effect. */ |
| && !(flags & (ECF_LOOPING_CONST_OR_PURE))) |
| TREE_SIDE_EFFECTS (*expr_p) = 0; |
| } |
| |
| /* If the value is not needed by the caller, emit a new GIMPLE_CALL |
| and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified |
| form and delegate the creation of a GIMPLE_CALL to |
| gimplify_modify_expr. This is always possible because when |
| WANT_VALUE is true, the caller wants the result of this call into |
| a temporary, which means that we will emit an INIT_EXPR in |
| internal_get_tmp_var which will then be handled by |
| gimplify_modify_expr. */ |
| if (!want_value) |
| { |
| /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we |
| have to do is replicate it as a GIMPLE_CALL tuple. */ |
| call = gimple_build_call_from_tree (*expr_p); |
| gimplify_seq_add_stmt (pre_p, call); |
| *expr_p = NULL_TREE; |
| } |
| |
| return ret; |
| } |
| |
| /* Handle shortcut semantics in the predicate operand of a COND_EXPR by |
| rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs. |
| |
| TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the |
| condition is true or false, respectively. If null, we should generate |
| our own to skip over the evaluation of this specific expression. |
| |
| This function is the tree equivalent of do_jump. |
| |
| shortcut_cond_r should only be called by shortcut_cond_expr. */ |
| |
| static tree |
| shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p) |
| { |
| tree local_label = NULL_TREE; |
| tree t, expr = NULL; |
| |
| /* OK, it's not a simple case; we need to pull apart the COND_EXPR to |
| retain the shortcut semantics. Just insert the gotos here; |
| shortcut_cond_expr will append the real blocks later. */ |
| if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) |
| { |
| /* Turn if (a && b) into |
| |
| if (a); else goto no; |
| if (b) goto yes; else goto no; |
| (no:) */ |
| |
| if (false_label_p == NULL) |
| false_label_p = &local_label; |
| |
| t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p); |
| append_to_statement_list (t, &expr); |
| |
| t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, |
| false_label_p); |
| append_to_statement_list (t, &expr); |
| } |
| else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR) |
| { |
| /* Turn if (a || b) into |
| |
| if (a) goto yes; |
| if (b) goto yes; else goto no; |
| (yes:) */ |
| |
| if (true_label_p == NULL) |
| true_label_p = &local_label; |
| |
| t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL); |
| append_to_statement_list (t, &expr); |
| |
| t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, |
| false_label_p); |
| append_to_statement_list (t, &expr); |
| } |
| else if (TREE_CODE (pred) == COND_EXPR) |
| { |
| /* As long as we're messing with gotos, turn if (a ? b : c) into |
| if (a) |
| if (b) goto yes; else goto no; |
| else |
| if (c) goto yes; else goto no; */ |
| expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0), |
| shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, |
| false_label_p), |
| shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p, |
| false_label_p)); |
| } |
| else |
| { |
| expr = build3 (COND_EXPR, void_type_node, pred, |
| build_and_jump (true_label_p), |
| build_and_jump (false_label_p)); |
| } |
| |
| if (local_label) |
| { |
| t = build1 (LABEL_EXPR, void_type_node, local_label); |
| append_to_statement_list (t, &expr); |
| } |
| |
| return expr; |
| } |
| |
| /* Given a conditional expression EXPR with short-circuit boolean |
| predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the |
| predicate appart into the equivalent sequence of conditionals. */ |
| |
| static tree |
| shortcut_cond_expr (tree expr) |
| { |
| tree pred = TREE_OPERAND (expr, 0); |
| tree then_ = TREE_OPERAND (expr, 1); |
| tree else_ = TREE_OPERAND (expr, 2); |
| tree true_label, false_label, end_label, t; |
| tree *true_label_p; |
| tree *false_label_p; |
| bool emit_end, emit_false, jump_over_else; |
| bool then_se = then_ && TREE_SIDE_EFFECTS (then_); |
| bool else_se = else_ && TREE_SIDE_EFFECTS (else_); |
| |
| /* First do simple transformations. */ |
| if (!else_se) |
| { |
| /* If there is no 'else', turn (a && b) into if (a) if (b). */ |
| while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) |
| { |
| TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); |
| then_ = shortcut_cond_expr (expr); |
| then_se = then_ && TREE_SIDE_EFFECTS (then_); |
| pred = TREE_OPERAND (pred, 0); |
| expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE); |
| } |
| } |
| |
| if (!then_se) |
| { |
| /* If there is no 'then', turn |
| if (a || b); else d |
| into |
| if (a); else if (b); else d. */ |
| while (TREE_CODE (pred) == TRUTH_ORIF_EXPR) |
| { |
| TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); |
| else_ = shortcut_cond_expr (expr); |
| else_se = else_ && TREE_SIDE_EFFECTS (else_); |
| pred = TREE_OPERAND (pred, 0); |
| expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_); |
| } |
| } |
| |
| /* If we're done, great. */ |
| if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR |
| && TREE_CODE (pred) != TRUTH_ORIF_EXPR) |
| return expr; |
| |
| /* Otherwise we need to mess with gotos. Change |
| if (a) c; else d; |
| to |
| if (a); else goto no; |
| c; goto end; |
| no: d; end: |
| and recursively gimplify the condition. */ |
| |
| true_label = false_label = end_label = NULL_TREE; |
| |
| /* If our arms just jump somewhere, hijack those labels so we don't |
| generate jumps to jumps. */ |
| |
| if (then_ |
| && TREE_CODE (then_) == GOTO_EXPR |
| && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL) |
| { |
| true_label = GOTO_DESTINATION (then_); |
| then_ = NULL; |
| then_se = false; |
| } |
| |
| if (else_ |
| && TREE_CODE (else_) == GOTO_EXPR |
| && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL) |
| { |
| false_label = GOTO_DESTINATION (else_); |
| else_ = NULL; |
| else_se = false; |
| } |
| |
| /* If we aren't hijacking a label for the 'then' branch, it falls through. */ |
| if (true_label) |
| true_label_p = &true_label; |
| else |
| true_label_p = NULL; |
| |
| /* The 'else' branch also needs a label if it contains interesting code. */ |
| if (false_label || else_se) |
| false_label_p = &false_label; |
| else |
| false_label_p = NULL; |
| |
| /* If there was nothing else in our arms, just forward the label(s). */ |
| if (!then_se && !else_se) |
| return shortcut_cond_r (pred, true_label_p, false_label_p); |
| |
| /* If our last subexpression already has a terminal label, reuse it. */ |
| if (else_se) |
| expr = expr_last (else_); |
| else if (then_se) |
| expr = expr_last (then_); |
| else |
| expr = NULL; |
| if (expr && TREE_CODE (expr) == LABEL_EXPR) |
| end_label = LABEL_EXPR_LABEL (expr); |
| |
| /* If we don't care about jumping to the 'else' branch, jump to the end |
| if the condition is false. */ |
| if (!false_label_p) |
| false_label_p = &end_label; |
| |
| /* We only want to emit these labels if we aren't hijacking them. */ |
| emit_end = (end_label == NULL_TREE); |
| emit_false = (false_label == NULL_TREE); |
| |
| /* We only emit the jump over the else clause if we have to--if the |
| then clause may fall through. Otherwise we can wind up with a |
| useless jump and a useless label at the end of gimplified code, |
| which will cause us to think that this conditional as a whole |
| falls through even if it doesn't. If we then inline a function |
| which ends with such a condition, that can cause us to issue an |
| inappropriate warning about control reaching the end of a |
| non-void function. */ |
| jump_over_else = block_may_fallthru (then_); |
| |
| pred = shortcut_cond_r (pred, true_label_p, false_label_p); |
| |
| expr = NULL; |
| append_to_statement_list (pred, &expr); |
| |
| append_to_statement_list (then_, &expr); |
| if (else_se) |
| { |
| if (jump_over_else) |
| { |
| t = build_and_jump (&end_label); |
| append_to_statement_list (t, &expr); |
| } |
| if (emit_false) |
| { |
| t = build1 (LABEL_EXPR, void_type_node, false_label); |
| append_to_statement_list (t, &expr); |
| } |
| append_to_statement_list (else_, &expr); |
| } |
| if (emit_end && end_label) |
| { |
| t = build1 (LABEL_EXPR, void_type_node, end_label); |
| append_to_statement_list (t, &expr); |
| } |
| |
| return expr; |
| } |
| |
| /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */ |
| |
| tree |
| gimple_boolify (tree expr) |
| { |
| tree type = TREE_TYPE (expr); |
| |
| if (TREE_CODE (type) == BOOLEAN_TYPE) |
| return expr; |
| |
| switch (TREE_CODE (expr)) |
| { |
| case TRUTH_AND_EXPR: |
| case TRUTH_OR_EXPR: |
| case TRUTH_XOR_EXPR: |
| case TRUTH_ANDIF_EXPR: |
| case TRUTH_ORIF_EXPR: |
| /* Also boolify the arguments of truth exprs. */ |
| TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1)); |
| /* FALLTHRU */ |
| |
| case TRUTH_NOT_EXPR: |
| TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); |
| /* FALLTHRU */ |
| |
| case EQ_EXPR: case NE_EXPR: |
| case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR: |
| /* These expressions always produce boolean results. */ |
| TREE_TYPE (expr) = boolean_type_node; |
| return expr; |
| |
| default: |
| /* Other expressions that get here must have boolean values, but |
| might need to be converted to the appropriate mode. */ |
| return fold_convert (boolean_type_node, expr); |
| } |
| } |
| |
| /* Given a conditional expression *EXPR_P without side effects, gimplify |
| its operands. New statements are inserted to PRE_P. */ |
| |
| static enum gimplify_status |
| gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p) |
| { |
| tree expr = *expr_p, cond; |
| enum gimplify_status ret, tret; |
| enum tree_code code; |
| |
| cond = gimple_boolify (COND_EXPR_COND (expr)); |
| |
| /* We need to handle && and || specially, as their gimplification |
| creates pure cond_expr, thus leading to an infinite cycle otherwise. */ |
| code = TREE_CODE (cond); |
| if (code == TRUTH_ANDIF_EXPR) |
| TREE_SET_CODE (cond, TRUTH_AND_EXPR); |
| else if (code == TRUTH_ORIF_EXPR) |
| TREE_SET_CODE (cond, TRUTH_OR_EXPR); |
| ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue); |
| COND_EXPR_COND (*expr_p) = cond; |
| |
| tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL, |
| is_gimple_val, fb_rvalue); |
| ret = MIN (ret, tret); |
| tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL, |
| is_gimple_val, fb_rvalue); |
| |
| return MIN (ret, tret); |
| } |
| |
| /* Returns true if evaluating EXPR could trap. |
| EXPR is GENERIC, while tree_could_trap_p can be called |
| only on GIMPLE. */ |
| |
| static bool |
| generic_expr_could_trap_p (tree expr) |
| { |
| unsigned i, n; |
| |
| if (!expr || is_gimple_val (expr)) |
| return false; |
| |
| if (!EXPR_P (expr) || tree_could_trap_p (expr)) |
| return true; |
| |
| n = TREE_OPERAND_LENGTH (expr); |
| for (i = 0; i < n; i++) |
| if (generic_expr_could_trap_p (TREE_OPERAND (expr, i))) |
| return true; |
| |
| return false; |
| } |
| |
| /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;' |
| into |
| |
| if (p) if (p) |
| t1 = a; a; |
| else or else |
| t1 = b; b; |
| t1; |
| |
| The second form is used when *EXPR_P is of type void. |
| |
| PRE_P points to the list where side effects that must happen before |
| *EXPR_P should be stored. */ |
| |
| static enum gimplify_status |
| gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback) |
| { |
| tree expr = *expr_p; |
| tree tmp, type, arm1, arm2; |
| enum gimplify_status ret; |
| tree label_true, label_false, label_cont; |
| bool have_then_clause_p, have_else_clause_p; |
| gimple gimple_cond; |
| enum tree_code pred_code; |
| gimple_seq seq = NULL; |
| |
| type = TREE_TYPE (expr); |
| |
| /* If this COND_EXPR has a value, copy the values into a temporary within |
| the arms. */ |
| if (! VOID_TYPE_P (type)) |
| { |
| tree result; |
| |
| /* If an rvalue is ok or we do not require an lvalue, avoid creating |
| an addressable temporary. */ |
| if (((fallback & fb_rvalue) |
| || !(fallback & fb_lvalue)) |
| && !TREE_ADDRESSABLE (type)) |
| { |
| if (gimplify_ctxp->allow_rhs_cond_expr |
| /* If either branch has side effects or could trap, it can't be |
| evaluated unconditionally. */ |
| && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1)) |
| && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 1)) |
| && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 2)) |
| && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 2))) |
| return gimplify_pure_cond_expr (expr_p, pre_p); |
| |
| result = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp"); |
| ret = GS_ALL_DONE; |
| } |
| else |
| { |
| tree type = build_pointer_type (TREE_TYPE (expr)); |
| |
| if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node) |
| TREE_OPERAND (expr, 1) = |
| build_fold_addr_expr (TREE_OPERAND (expr, 1)); |
| |
| if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node) |
| TREE_OPERAND (expr, 2) = |
| build_fold_addr_expr (TREE_OPERAND (expr, 2)); |
| |
| tmp = create_tmp_var (type, "iftmp"); |
| |
| expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0), |
| TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2)); |
| |
| result = build_fold_indirect_ref (tmp); |
| } |
| |
| /* Build the then clause, 't1 = a;'. But don't build an assignment |
| if this branch is void; in C++ it can be, if it's a throw. */ |
| if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node) |
| TREE_OPERAND (expr, 1) |
| = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 1)); |
| |
| /* Build the else clause, 't1 = b;'. */ |
| if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node) |
| TREE_OPERAND (expr, 2) |
| = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 2)); |
| |
| TREE_TYPE (expr) = void_type_node; |
| recalculate_side_effects (expr); |
| |
| /* Move the COND_EXPR to the prequeue. */ |
| gimplify_stmt (&expr, pre_p); |
| |
| *expr_p = result; |
| return GS_ALL_DONE; |
| } |
| |
| /* Make sure the condition has BOOLEAN_TYPE. */ |
| TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); |
| |
| /* Break apart && and || conditions. */ |
| if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR |
| || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR) |
| { |
| expr = shortcut_cond_expr (expr); |
| |
| if (expr != *expr_p) |
| { |
| *expr_p = expr; |
| |
| /* We can't rely on gimplify_expr to re-gimplify the expanded |
| form properly, as cleanups might cause the target labels to be |
| wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to |
| set up a conditional context. */ |
| gimple_push_condition (); |
| gimplify_stmt (expr_p, &seq); |
| gimple_pop_condition (pre_p); |
| gimple_seq_add_seq (pre_p, seq); |
| |
| return GS_ALL_DONE; |
| } |
| } |
| |
| /* Now do the normal gimplification. */ |
| |
| /* Gimplify condition. */ |
| ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr, |
| fb_rvalue); |
| if (ret == GS_ERROR) |
| return GS_ERROR; |
| gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE); |
| |
| gimple_push_condition (); |
| |
| have_then_clause_p = have_else_clause_p = false; |
| if (TREE_OPERAND (expr, 1) != NULL |
| && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR |
| && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL |
| && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) |
| == current_function_decl) |
| /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR |
| have different locations, otherwise we end up with incorrect |
| location information on the branches. */ |
| && (optimize |
| || !EXPR_HAS_LOCATION (expr) |
| || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1)) |
| || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1)))) |
| { |
| label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1)); |
| have_then_clause_p = true; |
| } |
| else |
| label_true = create_artificial_label (); |
| if (TREE_OPERAND (expr, 2) != NULL |
| && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR |
| && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL |
| && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) |
| == current_function_decl) |
| /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR |
| have different locations, otherwise we end up with incorrect |
| location information on the branches. */ |
| && (optimize |
| || !EXPR_HAS_LOCATION (expr) |
| || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2)) |
| || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2)))) |
| { |
| label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2)); |
| have_else_clause_p = true; |
| } |
| else |
| label_false = create_artificial_label (); |
| |
| gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1, |
| &arm2); |
| |
| gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true, |
| label_false); |
| |
| gimplify_seq_add_stmt (&seq, gimple_cond); |
| label_cont = NULL_TREE; |
| if (!have_then_clause_p) |
| { |
| /* For if (...) {} else { code; } put label_true after |
| the else block. */ |
| if (TREE_OPERAND (expr, 1) == NULL_TREE |
| && !have_else_clause_p |
| && TREE_OPERAND (expr, 2) != NULL_TREE) |
| label_cont = label_true; |
| else |
| { |
| gimplify_seq_add_stmt (&seq, gimple_build_label (label_true)); |
| have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq); |
| /* For if (...) { code; } else {} or |
| if (...) { code; } else goto label; or |
| if (...) { code; return; } else { ... } |
| label_cont isn't needed. */ |
| if (!have_else_clause_p |
| && TREE_OPERAND (expr, 2) != NULL_TREE |
| && gimple_seq_may_fallthru (seq)) |
| { |
| gimple g; |
| label_cont = create_artificial_label (); |
| |
| g = gimple_build_goto (label_cont); |
| |
| /* GIMPLE_COND's are very low level; they have embedded |
| gotos. This particular embedded goto should not be marked |
| with the location of the original COND_EXPR, as it would |
| correspond to the COND_EXPR's condition, not the ELSE or the |
| THEN arms. To avoid marking it with the wrong location, flag |
| it as "no location". */ |
| gimple_set_do_not_emit_location (g); |
| |
| gimplify_seq_add_stmt (&seq, g); |
| } |
| } |
| } |
| if (!have_else_clause_p) |
| { |
| gimplify_seq_add_stmt (&seq, gimple_build_label (label_false)); |
| have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq); |
| } |
| if (label_cont) |
| gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont)); |
| |
| gimple_pop_condition (pre_p); |
| gimple_seq_add_seq (pre_p, seq); |
| |
| if (ret == GS_ERROR) |
| ; /* Do nothing. */ |
| else if (have_then_clause_p || have_else_clause_p) |
| ret = GS_ALL_DONE; |
| else |
| { |
| /* Both arms are empty; replace the COND_EXPR with its predicate. */ |
| expr = TREE_OPERAND (expr, 0); |
| gimplify_stmt (&expr, pre_p); |
| } |
| |
| *expr_p = NULL; |
| return ret; |
| } |
| |
| /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with |
| a call to __builtin_memcpy. */ |
| |
| static enum gimplify_status |
| gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value, |
| gimple_seq *seq_p) |
| { |
| tree t, to, to_ptr, from, from_ptr; |
| gimple gs; |
| |
| to = TREE_OPERAND (*expr_p, 0); |
| from = TREE_OPERAND (*expr_p, 1); |
| |
| from_ptr = build_fold_addr_expr (from); |
| gimplify_arg (&from_ptr, seq_p, EXPR_LOCATION (*expr_p)); |
| |
| to_ptr = build_fold_addr_expr (to); |
| gimplify_arg (&to_ptr, seq_p, EXPR_LOCATION (*expr_p)); |
| |
| t = implicit_built_in_decls[BUILT_IN_MEMCPY]; |
| |
| gs = gimple_build_call (t, 3, to_ptr, from_ptr, size); |
| |
| if (want_value) |
| { |
| /* tmp = memcpy() */ |
| t = create_tmp_var (TREE_TYPE (to_ptr), NULL); |
| gimple_call_set_lhs (gs, t); |
| gimplify_seq_add_stmt (seq_p, gs); |
| |
| *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t); |
| return GS_ALL_DONE; |
| } |
| |
| gimplify_seq_add_stmt (seq_p, gs); |
| *expr_p = NULL; |
| return GS_ALL_DONE; |
| } |
| |
| /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with |
| a call to __builtin_memset. In this case we know that the RHS is |
| a CONSTRUCTOR with an empty element list. */ |
| |
| static enum gimplify_status |
| gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value, |
| gimple_seq *seq_p) |
| { |
| tree t, from, to, to_ptr; |
| gimple gs; |
| |
| /* Assert our assumptions, to abort instead of producing wrong code |
| silently if they are not met. Beware that the RHS CONSTRUCTOR might |
| not be immediately exposed. */ |
| from = TREE_OPERAND (*expr_p, 1); |
| if (TREE_CODE (from) == WITH_SIZE_EXPR) |
| from = TREE_OPERAND (from, 0); |
| |
| gcc_assert (TREE_CODE (from) == CONSTRUCTOR |
| && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from))); |
| |
| /* Now proceed. */ |
| to = TREE_OPERAND (*expr_p, 0); |
| |
| to_ptr = build_fold_addr_expr (to); |
| gimplify_arg (&to_ptr, seq_p, EXPR_LOCATION (*expr_p)); |
| t = implicit_built_in_decls[BUILT_IN_MEMSET]; |
| |
| gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size); |
| |
| if (want_value) |
| { |
| /* tmp = memset() */ |
| t = create_tmp_var (TREE_TYPE (to_ptr), NULL); |
| gimple_call_set_lhs (gs, t); |
| gimplify_seq_add_stmt (seq_p, gs); |
| |
| *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t); |
| return GS_ALL_DONE; |
| } |
| |
| gimplify_seq_add_stmt (seq_p, gs); |
| *expr_p = NULL; |
| return GS_ALL_DONE; |
| } |
| |
| /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree, |
| determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an |
| assignment. Returns non-null if we detect a potential overlap. */ |
| |
| struct gimplify_init_ctor_preeval_data |
| { |
| /* The base decl of the lhs object. May be NULL, in which case we |
| have to assume the lhs is indirect. */ |
| tree lhs_base_decl; |
| |
| /* The alias set of the lhs object. */ |
| alias_set_type lhs_alias_set; |
| }; |
| |
| static tree |
| gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata) |
| { |
| struct gimplify_init_ctor_preeval_data *data |
| = (struct gimplify_init_ctor_preeval_data *) xdata; |
| tree t = *tp; |
| |
| /* If we find the base object, obviously we have overlap. */ |
| if (data->lhs_base_decl == t) |
| return t; |
| |
| /* If the constructor component is indirect, determine if we have a |
| potential overlap with the lhs. The only bits of information we |
| have to go on at this point are addressability and alias sets. */ |
| if (TREE_CODE (t) == INDIRECT_REF |
| && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl)) |
| && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t))) |
| return t; |
| |
| /* If the constructor component is a call, determine if it can hide a |
| potential overlap with the lhs through an INDIRECT_REF like above. */ |
| if (TREE_CODE (t) == CALL_EXPR) |
| { |
| tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t))); |
| |
| for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type)) |
| if (POINTER_TYPE_P (TREE_VALUE (type)) |
| && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl)) |
| && alias_sets_conflict_p (data->lhs_alias_set, |
| get_alias_set |
| (TREE_TYPE (TREE_VALUE (type))))) |
| return t; |
| } |
| |
| if (IS_TYPE_OR_DECL_P (t)) |
| *walk_subtrees = 0; |
| return NULL; |
| } |
| |
| /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR, |
| force values that overlap with the lhs (as described by *DATA) |
| into temporaries. */ |
| |
| static void |
| gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, |
| struct gimplify_init_ctor_preeval_data *data) |
| { |
| enum gimplify_status one; |
| |
| /* If the value is constant, then there's nothing to pre-evaluate. */ |
| if (TREE_CONSTANT (*expr_p)) |
| { |
| /* Ensure it does not have side effects, it might contain a reference to |
| the object we're initializing. */ |
| gcc_assert (!TREE_SIDE_EFFECTS (*expr_p)); |
| return; |
| } |
| |
| /* If the type has non-trivial constructors, we can't pre-evaluate. */ |
| if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p))) |
| return; |
| |
| /* Recurse for nested constructors. */ |
| if (TREE_CODE (*expr_p) == CONSTRUCTOR) |
| { |
| unsigned HOST_WIDE_INT ix; |
| constructor_elt *ce; |
| VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p); |
| |
| for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++) |
| gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data); |
| |
| return; |
| } |
| |
| /* If this is a variable sized type, we must remember the size. */ |
| maybe_with_size_expr (expr_p); |
| |
| /* Gimplify the constructor element to something appropriate for the rhs |
| of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know |
| the gimplifier will consider this a store to memory. Doing this |
| gimplification now means that we won't have to deal with complicated |
| language-specific trees, nor trees like SAVE_EXPR that can induce |
| exponential search behavior. */ |
| one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue); |
| if (one == GS_ERROR) |
| { |
| *expr_p = NULL; |
| return; |
| } |
| |
| /* If we gimplified to a bare decl, we can be sure that it doesn't overlap |
| with the lhs, since "a = { .x=a }" doesn't make sense. This will |
|