| /* Language-independent node constructors for parse phase of GNU compiler. |
| Copyright (C) 1987-2022 Free Software Foundation, Inc. |
| |
| This file is part of GCC. |
| |
| GCC is free software; you can redistribute it and/or modify it under |
| the terms of the GNU General Public License as published by the Free |
| Software Foundation; either version 3, or (at your option) any later |
| version. |
| |
| GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
| WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| for more details. |
| |
| You should have received a copy of the GNU General Public License |
| along with GCC; see the file COPYING3. If not see |
| <http://www.gnu.org/licenses/>. */ |
| |
| /* This file contains the low level primitives for operating on tree nodes, |
| including allocation, list operations, interning of identifiers, |
| construction of data type nodes and statement nodes, |
| and construction of type conversion nodes. It also contains |
| tables index by tree code that describe how to take apart |
| nodes of that code. |
| |
| It is intended to be language-independent but can occasionally |
| calls language-dependent routines. */ |
| |
| #include "config.h" |
| #include "system.h" |
| #include "coretypes.h" |
| #include "backend.h" |
| #include "target.h" |
| #include "tree.h" |
| #include "gimple.h" |
| #include "tree-pass.h" |
| #include "ssa.h" |
| #include "cgraph.h" |
| #include "diagnostic.h" |
| #include "flags.h" |
| #include "alias.h" |
| #include "fold-const.h" |
| #include "stor-layout.h" |
| #include "calls.h" |
| #include "attribs.h" |
| #include "toplev.h" /* get_random_seed */ |
| #include "output.h" |
| #include "common/common-target.h" |
| #include "langhooks.h" |
| #include "tree-inline.h" |
| #include "tree-iterator.h" |
| #include "internal-fn.h" |
| #include "gimple-iterator.h" |
| #include "gimplify.h" |
| #include "tree-dfa.h" |
| #include "langhooks-def.h" |
| #include "tree-diagnostic.h" |
| #include "except.h" |
| #include "builtins.h" |
| #include "print-tree.h" |
| #include "ipa-utils.h" |
| #include "selftest.h" |
| #include "stringpool.h" |
| #include "attribs.h" |
| #include "rtl.h" |
| #include "regs.h" |
| #include "tree-vector-builder.h" |
| #include "gimple-fold.h" |
| #include "escaped_string.h" |
| #include "gimple-range.h" |
| #include "gomp-constants.h" |
| |
| /* Tree code classes. */ |
| |
| #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE, |
| #define END_OF_BASE_TREE_CODES tcc_exceptional, |
| |
| const enum tree_code_class tree_code_type[] = { |
| #include "all-tree.def" |
| }; |
| |
| #undef DEFTREECODE |
| #undef END_OF_BASE_TREE_CODES |
| |
| /* Table indexed by tree code giving number of expression |
| operands beyond the fixed part of the node structure. |
| Not used for types or decls. */ |
| |
| #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH, |
| #define END_OF_BASE_TREE_CODES 0, |
| |
| const unsigned char tree_code_length[] = { |
| #include "all-tree.def" |
| }; |
| |
| #undef DEFTREECODE |
| #undef END_OF_BASE_TREE_CODES |
| |
| /* Names of tree components. |
| Used for printing out the tree and error messages. */ |
| #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME, |
| #define END_OF_BASE_TREE_CODES "@dummy", |
| |
| static const char *const tree_code_name[] = { |
| #include "all-tree.def" |
| }; |
| |
| #undef DEFTREECODE |
| #undef END_OF_BASE_TREE_CODES |
| |
| /* Each tree code class has an associated string representation. |
| These must correspond to the tree_code_class entries. */ |
| |
| const char *const tree_code_class_strings[] = |
| { |
| "exceptional", |
| "constant", |
| "type", |
| "declaration", |
| "reference", |
| "comparison", |
| "unary", |
| "binary", |
| "statement", |
| "vl_exp", |
| "expression" |
| }; |
| |
| /* obstack.[ch] explicitly declined to prototype this. */ |
| extern int _obstack_allocated_p (struct obstack *h, void *obj); |
| |
| /* Statistics-gathering stuff. */ |
| |
| static uint64_t tree_code_counts[MAX_TREE_CODES]; |
| uint64_t tree_node_counts[(int) all_kinds]; |
| uint64_t tree_node_sizes[(int) all_kinds]; |
| |
| /* Keep in sync with tree.h:enum tree_node_kind. */ |
| static const char * const tree_node_kind_names[] = { |
| "decls", |
| "types", |
| "blocks", |
| "stmts", |
| "refs", |
| "exprs", |
| "constants", |
| "identifiers", |
| "vecs", |
| "binfos", |
| "ssa names", |
| "constructors", |
| "random kinds", |
| "lang_decl kinds", |
| "lang_type kinds", |
| "omp clauses", |
| }; |
| |
| /* Unique id for next decl created. */ |
| static GTY(()) int next_decl_uid; |
| /* Unique id for next type created. */ |
| static GTY(()) unsigned next_type_uid = 1; |
| /* Unique id for next debug decl created. Use negative numbers, |
| to catch erroneous uses. */ |
| static GTY(()) int next_debug_decl_uid; |
| |
| /* Since we cannot rehash a type after it is in the table, we have to |
| keep the hash code. */ |
| |
| struct GTY((for_user)) type_hash { |
| unsigned long hash; |
| tree type; |
| }; |
| |
| /* Initial size of the hash table (rounded to next prime). */ |
| #define TYPE_HASH_INITIAL_SIZE 1000 |
| |
| struct type_cache_hasher : ggc_cache_ptr_hash<type_hash> |
| { |
| static hashval_t hash (type_hash *t) { return t->hash; } |
| static bool equal (type_hash *a, type_hash *b); |
| |
| static int |
| keep_cache_entry (type_hash *&t) |
| { |
| return ggc_marked_p (t->type); |
| } |
| }; |
| |
| /* Now here is the hash table. When recording a type, it is added to |
| the slot whose index is the hash code. Note that the hash table is |
| used for several kinds of types (function types, array types and |
| array index range types, for now). While all these live in the |
| same table, they are completely independent, and the hash code is |
| computed differently for each of these. */ |
| |
| static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table; |
| |
| /* Hash table and temporary node for larger integer const values. */ |
| static GTY (()) tree int_cst_node; |
| |
| struct int_cst_hasher : ggc_cache_ptr_hash<tree_node> |
| { |
| static hashval_t hash (tree t); |
| static bool equal (tree x, tree y); |
| }; |
| |
| static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table; |
| |
| /* Class and variable for making sure that there is a single POLY_INT_CST |
| for a given value. */ |
| struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node> |
| { |
| typedef std::pair<tree, const poly_wide_int *> compare_type; |
| static hashval_t hash (tree t); |
| static bool equal (tree x, const compare_type &y); |
| }; |
| |
| static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table; |
| |
| /* Hash table for optimization flags and target option flags. Use the same |
| hash table for both sets of options. Nodes for building the current |
| optimization and target option nodes. The assumption is most of the time |
| the options created will already be in the hash table, so we avoid |
| allocating and freeing up a node repeatably. */ |
| static GTY (()) tree cl_optimization_node; |
| static GTY (()) tree cl_target_option_node; |
| |
| struct cl_option_hasher : ggc_cache_ptr_hash<tree_node> |
| { |
| static hashval_t hash (tree t); |
| static bool equal (tree x, tree y); |
| }; |
| |
| static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table; |
| |
| /* General tree->tree mapping structure for use in hash tables. */ |
| |
| |
| static GTY ((cache)) |
| hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl; |
| |
| static GTY ((cache)) |
| hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl; |
| |
| static GTY ((cache)) |
| hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl; |
| |
| static void set_type_quals (tree, int); |
| static void print_type_hash_statistics (void); |
| static void print_debug_expr_statistics (void); |
| static void print_value_expr_statistics (void); |
| |
| tree global_trees[TI_MAX]; |
| tree integer_types[itk_none]; |
| |
| bool int_n_enabled_p[NUM_INT_N_ENTS]; |
| struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS]; |
| |
| bool tree_contains_struct[MAX_TREE_CODES][64]; |
| |
| /* Number of operands for each OMP clause. */ |
| unsigned const char omp_clause_num_ops[] = |
| { |
| 0, /* OMP_CLAUSE_ERROR */ |
| 1, /* OMP_CLAUSE_PRIVATE */ |
| 1, /* OMP_CLAUSE_SHARED */ |
| 1, /* OMP_CLAUSE_FIRSTPRIVATE */ |
| 2, /* OMP_CLAUSE_LASTPRIVATE */ |
| 5, /* OMP_CLAUSE_REDUCTION */ |
| 5, /* OMP_CLAUSE_TASK_REDUCTION */ |
| 5, /* OMP_CLAUSE_IN_REDUCTION */ |
| 1, /* OMP_CLAUSE_COPYIN */ |
| 1, /* OMP_CLAUSE_COPYPRIVATE */ |
| 3, /* OMP_CLAUSE_LINEAR */ |
| 1, /* OMP_CLAUSE_AFFINITY */ |
| 2, /* OMP_CLAUSE_ALIGNED */ |
| 3, /* OMP_CLAUSE_ALLOCATE */ |
| 1, /* OMP_CLAUSE_DEPEND */ |
| 1, /* OMP_CLAUSE_NONTEMPORAL */ |
| 1, /* OMP_CLAUSE_UNIFORM */ |
| 1, /* OMP_CLAUSE_TO_DECLARE */ |
| 1, /* OMP_CLAUSE_LINK */ |
| 1, /* OMP_CLAUSE_DETACH */ |
| 1, /* OMP_CLAUSE_USE_DEVICE_PTR */ |
| 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */ |
| 1, /* OMP_CLAUSE_IS_DEVICE_PTR */ |
| 1, /* OMP_CLAUSE_INCLUSIVE */ |
| 1, /* OMP_CLAUSE_EXCLUSIVE */ |
| 2, /* OMP_CLAUSE_FROM */ |
| 2, /* OMP_CLAUSE_TO */ |
| 2, /* OMP_CLAUSE_MAP */ |
| 1, /* OMP_CLAUSE_HAS_DEVICE_ADDR */ |
| 2, /* OMP_CLAUSE__CACHE_ */ |
| 2, /* OMP_CLAUSE_GANG */ |
| 1, /* OMP_CLAUSE_ASYNC */ |
| 1, /* OMP_CLAUSE_WAIT */ |
| 0, /* OMP_CLAUSE_AUTO */ |
| 0, /* OMP_CLAUSE_SEQ */ |
| 1, /* OMP_CLAUSE__LOOPTEMP_ */ |
| 1, /* OMP_CLAUSE__REDUCTEMP_ */ |
| 1, /* OMP_CLAUSE__CONDTEMP_ */ |
| 1, /* OMP_CLAUSE__SCANTEMP_ */ |
| 1, /* OMP_CLAUSE_IF */ |
| 1, /* OMP_CLAUSE_NUM_THREADS */ |
| 1, /* OMP_CLAUSE_SCHEDULE */ |
| 0, /* OMP_CLAUSE_NOWAIT */ |
| 1, /* OMP_CLAUSE_ORDERED */ |
| 0, /* OMP_CLAUSE_DEFAULT */ |
| 3, /* OMP_CLAUSE_COLLAPSE */ |
| 0, /* OMP_CLAUSE_UNTIED */ |
| 1, /* OMP_CLAUSE_FINAL */ |
| 0, /* OMP_CLAUSE_MERGEABLE */ |
| 1, /* OMP_CLAUSE_DEVICE */ |
| 1, /* OMP_CLAUSE_DIST_SCHEDULE */ |
| 0, /* OMP_CLAUSE_INBRANCH */ |
| 0, /* OMP_CLAUSE_NOTINBRANCH */ |
| 2, /* OMP_CLAUSE_NUM_TEAMS */ |
| 1, /* OMP_CLAUSE_THREAD_LIMIT */ |
| 0, /* OMP_CLAUSE_PROC_BIND */ |
| 1, /* OMP_CLAUSE_SAFELEN */ |
| 1, /* OMP_CLAUSE_SIMDLEN */ |
| 0, /* OMP_CLAUSE_DEVICE_TYPE */ |
| 0, /* OMP_CLAUSE_FOR */ |
| 0, /* OMP_CLAUSE_PARALLEL */ |
| 0, /* OMP_CLAUSE_SECTIONS */ |
| 0, /* OMP_CLAUSE_TASKGROUP */ |
| 1, /* OMP_CLAUSE_PRIORITY */ |
| 1, /* OMP_CLAUSE_GRAINSIZE */ |
| 1, /* OMP_CLAUSE_NUM_TASKS */ |
| 0, /* OMP_CLAUSE_NOGROUP */ |
| 0, /* OMP_CLAUSE_THREADS */ |
| 0, /* OMP_CLAUSE_SIMD */ |
| 1, /* OMP_CLAUSE_HINT */ |
| 0, /* OMP_CLAUSE_DEFAULTMAP */ |
| 0, /* OMP_CLAUSE_ORDER */ |
| 0, /* OMP_CLAUSE_BIND */ |
| 1, /* OMP_CLAUSE_FILTER */ |
| 1, /* OMP_CLAUSE__SIMDUID_ */ |
| 0, /* OMP_CLAUSE__SIMT_ */ |
| 0, /* OMP_CLAUSE_INDEPENDENT */ |
| 1, /* OMP_CLAUSE_WORKER */ |
| 1, /* OMP_CLAUSE_VECTOR */ |
| 1, /* OMP_CLAUSE_NUM_GANGS */ |
| 1, /* OMP_CLAUSE_NUM_WORKERS */ |
| 1, /* OMP_CLAUSE_VECTOR_LENGTH */ |
| 3, /* OMP_CLAUSE_TILE */ |
| 0, /* OMP_CLAUSE_IF_PRESENT */ |
| 0, /* OMP_CLAUSE_FINALIZE */ |
| 0, /* OMP_CLAUSE_NOHOST */ |
| }; |
| |
| const char * const omp_clause_code_name[] = |
| { |
| "error_clause", |
| "private", |
| "shared", |
| "firstprivate", |
| "lastprivate", |
| "reduction", |
| "task_reduction", |
| "in_reduction", |
| "copyin", |
| "copyprivate", |
| "linear", |
| "affinity", |
| "aligned", |
| "allocate", |
| "depend", |
| "nontemporal", |
| "uniform", |
| "to", |
| "link", |
| "detach", |
| "use_device_ptr", |
| "use_device_addr", |
| "is_device_ptr", |
| "inclusive", |
| "exclusive", |
| "from", |
| "to", |
| "map", |
| "has_device_addr", |
| "_cache_", |
| "gang", |
| "async", |
| "wait", |
| "auto", |
| "seq", |
| "_looptemp_", |
| "_reductemp_", |
| "_condtemp_", |
| "_scantemp_", |
| "if", |
| "num_threads", |
| "schedule", |
| "nowait", |
| "ordered", |
| "default", |
| "collapse", |
| "untied", |
| "final", |
| "mergeable", |
| "device", |
| "dist_schedule", |
| "inbranch", |
| "notinbranch", |
| "num_teams", |
| "thread_limit", |
| "proc_bind", |
| "safelen", |
| "simdlen", |
| "device_type", |
| "for", |
| "parallel", |
| "sections", |
| "taskgroup", |
| "priority", |
| "grainsize", |
| "num_tasks", |
| "nogroup", |
| "threads", |
| "simd", |
| "hint", |
| "defaultmap", |
| "order", |
| "bind", |
| "filter", |
| "_simduid_", |
| "_simt_", |
| "independent", |
| "worker", |
| "vector", |
| "num_gangs", |
| "num_workers", |
| "vector_length", |
| "tile", |
| "if_present", |
| "finalize", |
| "nohost", |
| }; |
| |
| /* Unless specific to OpenACC, we tend to internally maintain OpenMP-centric |
| clause names, but for use in diagnostics etc. would like to use the "user" |
| clause names. */ |
| |
| const char * |
| user_omp_clause_code_name (tree clause, bool oacc) |
| { |
| /* For OpenACC, the 'OMP_CLAUSE_MAP_KIND' of an 'OMP_CLAUSE_MAP' is used to |
| distinguish clauses as seen by the user. See also where front ends do |
| 'build_omp_clause' with 'OMP_CLAUSE_MAP'. */ |
| if (oacc && OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP) |
| switch (OMP_CLAUSE_MAP_KIND (clause)) |
| { |
| case GOMP_MAP_FORCE_ALLOC: |
| case GOMP_MAP_ALLOC: return "create"; |
| case GOMP_MAP_FORCE_TO: |
| case GOMP_MAP_TO: return "copyin"; |
| case GOMP_MAP_FORCE_FROM: |
| case GOMP_MAP_FROM: return "copyout"; |
| case GOMP_MAP_FORCE_TOFROM: |
| case GOMP_MAP_TOFROM: return "copy"; |
| case GOMP_MAP_RELEASE: return "delete"; |
| case GOMP_MAP_FORCE_PRESENT: return "present"; |
| case GOMP_MAP_ATTACH: return "attach"; |
| case GOMP_MAP_FORCE_DETACH: |
| case GOMP_MAP_DETACH: return "detach"; |
| case GOMP_MAP_DEVICE_RESIDENT: return "device_resident"; |
| case GOMP_MAP_LINK: return "link"; |
| case GOMP_MAP_FORCE_DEVICEPTR: return "deviceptr"; |
| default: break; |
| } |
| |
| return omp_clause_code_name[OMP_CLAUSE_CODE (clause)]; |
| } |
| |
| |
| /* Return the tree node structure used by tree code CODE. */ |
| |
| static inline enum tree_node_structure_enum |
| tree_node_structure_for_code (enum tree_code code) |
| { |
| switch (TREE_CODE_CLASS (code)) |
| { |
| case tcc_declaration: |
| switch (code) |
| { |
| case CONST_DECL: return TS_CONST_DECL; |
| case DEBUG_EXPR_DECL: return TS_DECL_WRTL; |
| case FIELD_DECL: return TS_FIELD_DECL; |
| case FUNCTION_DECL: return TS_FUNCTION_DECL; |
| case LABEL_DECL: return TS_LABEL_DECL; |
| case PARM_DECL: return TS_PARM_DECL; |
| case RESULT_DECL: return TS_RESULT_DECL; |
| case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL; |
| case TYPE_DECL: return TS_TYPE_DECL; |
| case VAR_DECL: return TS_VAR_DECL; |
| default: return TS_DECL_NON_COMMON; |
| } |
| |
| case tcc_type: return TS_TYPE_NON_COMMON; |
| |
| case tcc_binary: |
| case tcc_comparison: |
| case tcc_expression: |
| case tcc_reference: |
| case tcc_statement: |
| case tcc_unary: |
| case tcc_vl_exp: return TS_EXP; |
| |
| default: /* tcc_constant and tcc_exceptional */ |
| break; |
| } |
| |
| switch (code) |
| { |
| /* tcc_constant cases. */ |
| case COMPLEX_CST: return TS_COMPLEX; |
| case FIXED_CST: return TS_FIXED_CST; |
| case INTEGER_CST: return TS_INT_CST; |
| case POLY_INT_CST: return TS_POLY_INT_CST; |
| case REAL_CST: return TS_REAL_CST; |
| case STRING_CST: return TS_STRING; |
| case VECTOR_CST: return TS_VECTOR; |
| case VOID_CST: return TS_TYPED; |
| |
| /* tcc_exceptional cases. */ |
| case BLOCK: return TS_BLOCK; |
| case CONSTRUCTOR: return TS_CONSTRUCTOR; |
| case ERROR_MARK: return TS_COMMON; |
| case IDENTIFIER_NODE: return TS_IDENTIFIER; |
| case OMP_CLAUSE: return TS_OMP_CLAUSE; |
| case OPTIMIZATION_NODE: return TS_OPTIMIZATION; |
| case PLACEHOLDER_EXPR: return TS_COMMON; |
| case SSA_NAME: return TS_SSA_NAME; |
| case STATEMENT_LIST: return TS_STATEMENT_LIST; |
| case TARGET_OPTION_NODE: return TS_TARGET_OPTION; |
| case TREE_BINFO: return TS_BINFO; |
| case TREE_LIST: return TS_LIST; |
| case TREE_VEC: return TS_VEC; |
| |
| default: |
| gcc_unreachable (); |
| } |
| } |
| |
| |
| /* Initialize tree_contains_struct to describe the hierarchy of tree |
| nodes. */ |
| |
| static void |
| initialize_tree_contains_struct (void) |
| { |
| unsigned i; |
| |
| for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++) |
| { |
| enum tree_code code; |
| enum tree_node_structure_enum ts_code; |
| |
| code = (enum tree_code) i; |
| ts_code = tree_node_structure_for_code (code); |
| |
| /* Mark the TS structure itself. */ |
| tree_contains_struct[code][ts_code] = 1; |
| |
| /* Mark all the structures that TS is derived from. */ |
| switch (ts_code) |
| { |
| case TS_TYPED: |
| case TS_BLOCK: |
| case TS_OPTIMIZATION: |
| case TS_TARGET_OPTION: |
| MARK_TS_BASE (code); |
| break; |
| |
| case TS_COMMON: |
| case TS_INT_CST: |
| case TS_POLY_INT_CST: |
| case TS_REAL_CST: |
| case TS_FIXED_CST: |
| case TS_VECTOR: |
| case TS_STRING: |
| case TS_COMPLEX: |
| case TS_SSA_NAME: |
| case TS_CONSTRUCTOR: |
| case TS_EXP: |
| case TS_STATEMENT_LIST: |
| MARK_TS_TYPED (code); |
| break; |
| |
| case TS_IDENTIFIER: |
| case TS_DECL_MINIMAL: |
| case TS_TYPE_COMMON: |
| case TS_LIST: |
| case TS_VEC: |
| case TS_BINFO: |
| case TS_OMP_CLAUSE: |
| MARK_TS_COMMON (code); |
| break; |
| |
| case TS_TYPE_WITH_LANG_SPECIFIC: |
| MARK_TS_TYPE_COMMON (code); |
| break; |
| |
| case TS_TYPE_NON_COMMON: |
| MARK_TS_TYPE_WITH_LANG_SPECIFIC (code); |
| break; |
| |
| case TS_DECL_COMMON: |
| MARK_TS_DECL_MINIMAL (code); |
| break; |
| |
| case TS_DECL_WRTL: |
| case TS_CONST_DECL: |
| MARK_TS_DECL_COMMON (code); |
| break; |
| |
| case TS_DECL_NON_COMMON: |
| MARK_TS_DECL_WITH_VIS (code); |
| break; |
| |
| case TS_DECL_WITH_VIS: |
| case TS_PARM_DECL: |
| case TS_LABEL_DECL: |
| case TS_RESULT_DECL: |
| MARK_TS_DECL_WRTL (code); |
| break; |
| |
| case TS_FIELD_DECL: |
| MARK_TS_DECL_COMMON (code); |
| break; |
| |
| case TS_VAR_DECL: |
| MARK_TS_DECL_WITH_VIS (code); |
| break; |
| |
| case TS_TYPE_DECL: |
| case TS_FUNCTION_DECL: |
| MARK_TS_DECL_NON_COMMON (code); |
| break; |
| |
| case TS_TRANSLATION_UNIT_DECL: |
| MARK_TS_DECL_COMMON (code); |
| break; |
| |
| default: |
| gcc_unreachable (); |
| } |
| } |
| |
| /* Basic consistency checks for attributes used in fold. */ |
| gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]); |
| gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]); |
| gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]); |
| gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]); |
| gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]); |
| gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]); |
| gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]); |
| gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]); |
| gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]); |
| gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]); |
| gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]); |
| gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]); |
| gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]); |
| gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]); |
| gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]); |
| gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]); |
| gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]); |
| gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]); |
| gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]); |
| gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]); |
| gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]); |
| gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]); |
| gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]); |
| gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]); |
| gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]); |
| gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]); |
| gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]); |
| gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]); |
| gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]); |
| gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]); |
| gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]); |
| gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]); |
| gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]); |
| gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]); |
| gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]); |
| gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]); |
| gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]); |
| gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]); |
| gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]); |
| gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]); |
| } |
| |
| |
| /* Init tree.cc. */ |
| |
| void |
| init_ttree (void) |
| { |
| /* Initialize the hash table of types. */ |
| type_hash_table |
| = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE); |
| |
| debug_expr_for_decl |
| = hash_table<tree_decl_map_cache_hasher>::create_ggc (512); |
| |
| value_expr_for_decl |
| = hash_table<tree_decl_map_cache_hasher>::create_ggc (512); |
| |
| int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024); |
| |
| poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64); |
| |
| int_cst_node = make_int_cst (1, 1); |
| |
| cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64); |
| |
| cl_optimization_node = make_node (OPTIMIZATION_NODE); |
| cl_target_option_node = make_node (TARGET_OPTION_NODE); |
| |
| /* Initialize the tree_contains_struct array. */ |
| initialize_tree_contains_struct (); |
| lang_hooks.init_ts (); |
| } |
| |
| |
| /* The name of the object as the assembler will see it (but before any |
| translations made by ASM_OUTPUT_LABELREF). Often this is the same |
| as DECL_NAME. It is an IDENTIFIER_NODE. */ |
| tree |
| decl_assembler_name (tree decl) |
| { |
| if (!DECL_ASSEMBLER_NAME_SET_P (decl)) |
| lang_hooks.set_decl_assembler_name (decl); |
| return DECL_ASSEMBLER_NAME_RAW (decl); |
| } |
| |
| /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME |
| (either of which may be NULL). Inform the FE, if this changes the |
| name. */ |
| |
| void |
| overwrite_decl_assembler_name (tree decl, tree name) |
| { |
| if (DECL_ASSEMBLER_NAME_RAW (decl) != name) |
| lang_hooks.overwrite_decl_assembler_name (decl, name); |
| } |
| |
| /* Return true if DECL may need an assembler name to be set. */ |
| |
| static inline bool |
| need_assembler_name_p (tree decl) |
| { |
| /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition |
| Rule merging. This makes type_odr_p to return true on those types during |
| LTO and by comparing the mangled name, we can say what types are intended |
| to be equivalent across compilation unit. |
| |
| We do not store names of type_in_anonymous_namespace_p. |
| |
| Record, union and enumeration type have linkage that allows use |
| to check type_in_anonymous_namespace_p. We do not mangle compound types |
| that always can be compared structurally. |
| |
| Similarly for builtin types, we compare properties of their main variant. |
| A special case are integer types where mangling do make differences |
| between char/signed char/unsigned char etc. Storing name for these makes |
| e.g. -fno-signed-char/-fsigned-char mismatches to be handled well. |
| See cp/mangle.cc:write_builtin_type for details. */ |
| |
| if (TREE_CODE (decl) == TYPE_DECL) |
| { |
| if (DECL_NAME (decl) |
| && decl == TYPE_NAME (TREE_TYPE (decl)) |
| && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl) |
| && !TYPE_ARTIFICIAL (TREE_TYPE (decl)) |
| && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE |
| && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE) |
| || TYPE_CXX_ODR_P (TREE_TYPE (decl))) |
| && (type_with_linkage_p (TREE_TYPE (decl)) |
| || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE) |
| && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)) |
| return !DECL_ASSEMBLER_NAME_SET_P (decl); |
| return false; |
| } |
| /* Only FUNCTION_DECLs and VAR_DECLs are considered. */ |
| if (!VAR_OR_FUNCTION_DECL_P (decl)) |
| return false; |
| |
| /* If DECL already has its assembler name set, it does not need a |
| new one. */ |
| if (!HAS_DECL_ASSEMBLER_NAME_P (decl) |
| || DECL_ASSEMBLER_NAME_SET_P (decl)) |
| return false; |
| |
| /* Abstract decls do not need an assembler name. */ |
| if (DECL_ABSTRACT_P (decl)) |
| return false; |
| |
| /* For VAR_DECLs, only static, public and external symbols need an |
| assembler name. */ |
| if (VAR_P (decl) |
| && !TREE_STATIC (decl) |
| && !TREE_PUBLIC (decl) |
| && !DECL_EXTERNAL (decl)) |
| return false; |
| |
| if (TREE_CODE (decl) == FUNCTION_DECL) |
| { |
| /* Do not set assembler name on builtins. Allow RTL expansion to |
| decide whether to expand inline or via a regular call. */ |
| if (fndecl_built_in_p (decl) |
| && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND) |
| return false; |
| |
| /* Functions represented in the callgraph need an assembler name. */ |
| if (cgraph_node::get (decl) != NULL) |
| return true; |
| |
| /* Unused and not public functions don't need an assembler name. */ |
| if (!TREE_USED (decl) && !TREE_PUBLIC (decl)) |
| return false; |
| } |
| |
| return true; |
| } |
| |
| /* If T needs an assembler name, have one created for it. */ |
| |
| void |
| assign_assembler_name_if_needed (tree t) |
| { |
| if (need_assembler_name_p (t)) |
| { |
| /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit |
| diagnostics that use input_location to show locus |
| information. The problem here is that, at this point, |
| input_location is generally anchored to the end of the file |
| (since the parser is long gone), so we don't have a good |
| position to pin it to. |
| |
| To alleviate this problem, this uses the location of T's |
| declaration. Examples of this are |
| testsuite/g++.dg/template/cond2.C and |
| testsuite/g++.dg/template/pr35240.C. */ |
| location_t saved_location = input_location; |
| input_location = DECL_SOURCE_LOCATION (t); |
| |
| decl_assembler_name (t); |
| |
| input_location = saved_location; |
| } |
| } |
| |
| /* When the target supports COMDAT groups, this indicates which group the |
| DECL is associated with. This can be either an IDENTIFIER_NODE or a |
| decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */ |
| tree |
| decl_comdat_group (const_tree node) |
| { |
| struct symtab_node *snode = symtab_node::get (node); |
| if (!snode) |
| return NULL; |
| return snode->get_comdat_group (); |
| } |
| |
| /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */ |
| tree |
| decl_comdat_group_id (const_tree node) |
| { |
| struct symtab_node *snode = symtab_node::get (node); |
| if (!snode) |
| return NULL; |
| return snode->get_comdat_group_id (); |
| } |
| |
| /* When the target supports named section, return its name as IDENTIFIER_NODE |
| or NULL if it is in no section. */ |
| const char * |
| decl_section_name (const_tree node) |
| { |
| struct symtab_node *snode = symtab_node::get (node); |
| if (!snode) |
| return NULL; |
| return snode->get_section (); |
| } |
| |
| /* Set section name of NODE to VALUE (that is expected to be |
| identifier node) */ |
| void |
| set_decl_section_name (tree node, const char *value) |
| { |
| struct symtab_node *snode; |
| |
| if (value == NULL) |
| { |
| snode = symtab_node::get (node); |
| if (!snode) |
| return; |
| } |
| else if (VAR_P (node)) |
| snode = varpool_node::get_create (node); |
| else |
| snode = cgraph_node::get_create (node); |
| snode->set_section (value); |
| } |
| |
| /* Set section name of NODE to match the section name of OTHER. |
| |
| set_decl_section_name (decl, other) is equivalent to |
| set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more |
| efficient. */ |
| void |
| set_decl_section_name (tree decl, const_tree other) |
| { |
| struct symtab_node *other_node = symtab_node::get (other); |
| if (other_node) |
| { |
| struct symtab_node *decl_node; |
| if (VAR_P (decl)) |
| decl_node = varpool_node::get_create (decl); |
| else |
| decl_node = cgraph_node::get_create (decl); |
| decl_node->set_section (*other_node); |
| } |
| else |
| { |
| struct symtab_node *decl_node = symtab_node::get (decl); |
| if (!decl_node) |
| return; |
| decl_node->set_section (NULL); |
| } |
| } |
| |
| /* Return TLS model of a variable NODE. */ |
| enum tls_model |
| decl_tls_model (const_tree node) |
| { |
| struct varpool_node *snode = varpool_node::get (node); |
| if (!snode) |
| return TLS_MODEL_NONE; |
| return snode->tls_model; |
| } |
| |
| /* Set TLS model of variable NODE to MODEL. */ |
| void |
| set_decl_tls_model (tree node, enum tls_model model) |
| { |
| struct varpool_node *vnode; |
| |
| if (model == TLS_MODEL_NONE) |
| { |
| vnode = varpool_node::get (node); |
| if (!vnode) |
| return; |
| } |
| else |
| vnode = varpool_node::get_create (node); |
| vnode->tls_model = model; |
| } |
| |
| /* Compute the number of bytes occupied by a tree with code CODE. |
| This function cannot be used for nodes that have variable sizes, |
| including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */ |
| size_t |
| tree_code_size (enum tree_code code) |
| { |
| switch (TREE_CODE_CLASS (code)) |
| { |
| case tcc_declaration: /* A decl node */ |
| switch (code) |
| { |
| case FIELD_DECL: return sizeof (tree_field_decl); |
| case PARM_DECL: return sizeof (tree_parm_decl); |
| case VAR_DECL: return sizeof (tree_var_decl); |
| case LABEL_DECL: return sizeof (tree_label_decl); |
| case RESULT_DECL: return sizeof (tree_result_decl); |
| case CONST_DECL: return sizeof (tree_const_decl); |
| case TYPE_DECL: return sizeof (tree_type_decl); |
| case FUNCTION_DECL: return sizeof (tree_function_decl); |
| case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl); |
| case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl); |
| case NAMESPACE_DECL: |
| case IMPORTED_DECL: |
| case NAMELIST_DECL: return sizeof (tree_decl_non_common); |
| default: |
| gcc_checking_assert (code >= NUM_TREE_CODES); |
| return lang_hooks.tree_size (code); |
| } |
| |
| case tcc_type: /* a type node */ |
| switch (code) |
| { |
| case OFFSET_TYPE: |
| case ENUMERAL_TYPE: |
| case BOOLEAN_TYPE: |
| case INTEGER_TYPE: |
| case REAL_TYPE: |
| case OPAQUE_TYPE: |
| case POINTER_TYPE: |
| case REFERENCE_TYPE: |
| case NULLPTR_TYPE: |
| case FIXED_POINT_TYPE: |
| case COMPLEX_TYPE: |
| case VECTOR_TYPE: |
| case ARRAY_TYPE: |
| case RECORD_TYPE: |
| case UNION_TYPE: |
| case QUAL_UNION_TYPE: |
| case VOID_TYPE: |
| case FUNCTION_TYPE: |
| case METHOD_TYPE: |
| case LANG_TYPE: return sizeof (tree_type_non_common); |
| default: |
| gcc_checking_assert (code >= NUM_TREE_CODES); |
| return lang_hooks.tree_size (code); |
| } |
| |
| case tcc_reference: /* a reference */ |
| case tcc_expression: /* an expression */ |
| case tcc_statement: /* an expression with side effects */ |
| case tcc_comparison: /* a comparison expression */ |
| case tcc_unary: /* a unary arithmetic expression */ |
| case tcc_binary: /* a binary arithmetic expression */ |
| return (sizeof (struct tree_exp) |
| + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree)); |
| |
| case tcc_constant: /* a constant */ |
| switch (code) |
| { |
| case VOID_CST: return sizeof (tree_typed); |
| case INTEGER_CST: gcc_unreachable (); |
| case POLY_INT_CST: return sizeof (tree_poly_int_cst); |
| case REAL_CST: return sizeof (tree_real_cst); |
| case FIXED_CST: return sizeof (tree_fixed_cst); |
| case COMPLEX_CST: return sizeof (tree_complex); |
| case VECTOR_CST: gcc_unreachable (); |
| case STRING_CST: gcc_unreachable (); |
| default: |
| gcc_checking_assert (code >= NUM_TREE_CODES); |
| return lang_hooks.tree_size (code); |
| } |
| |
| case tcc_exceptional: /* something random, like an identifier. */ |
| switch (code) |
| { |
| case IDENTIFIER_NODE: return lang_hooks.identifier_size; |
| case TREE_LIST: return sizeof (tree_list); |
| |
| case ERROR_MARK: |
| case PLACEHOLDER_EXPR: return sizeof (tree_common); |
| |
| case TREE_VEC: gcc_unreachable (); |
| case OMP_CLAUSE: gcc_unreachable (); |
| |
| case SSA_NAME: return sizeof (tree_ssa_name); |
| |
| case STATEMENT_LIST: return sizeof (tree_statement_list); |
| case BLOCK: return sizeof (struct tree_block); |
| case CONSTRUCTOR: return sizeof (tree_constructor); |
| case OPTIMIZATION_NODE: return sizeof (tree_optimization_option); |
| case TARGET_OPTION_NODE: return sizeof (tree_target_option); |
| |
| default: |
| gcc_checking_assert (code >= NUM_TREE_CODES); |
| return lang_hooks.tree_size (code); |
| } |
| |
| default: |
| gcc_unreachable (); |
| } |
| } |
| |
| /* Compute the number of bytes occupied by NODE. This routine only |
| looks at TREE_CODE, except for those nodes that have variable sizes. */ |
| size_t |
| tree_size (const_tree node) |
| { |
| const enum tree_code code = TREE_CODE (node); |
| switch (code) |
| { |
| case INTEGER_CST: |
| return (sizeof (struct tree_int_cst) |
| + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT)); |
| |
| case TREE_BINFO: |
| return (offsetof (struct tree_binfo, base_binfos) |
| + vec<tree, va_gc> |
| ::embedded_size (BINFO_N_BASE_BINFOS (node))); |
| |
| case TREE_VEC: |
| return (sizeof (struct tree_vec) |
| + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree)); |
| |
| case VECTOR_CST: |
| return (sizeof (struct tree_vector) |
| + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree)); |
| |
| case STRING_CST: |
| return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1; |
| |
| case OMP_CLAUSE: |
| return (sizeof (struct tree_omp_clause) |
| + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1) |
| * sizeof (tree)); |
| |
| default: |
| if (TREE_CODE_CLASS (code) == tcc_vl_exp) |
| return (sizeof (struct tree_exp) |
| + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree)); |
| else |
| return tree_code_size (code); |
| } |
| } |
| |
| /* Return tree node kind based on tree CODE. */ |
| |
| static tree_node_kind |
| get_stats_node_kind (enum tree_code code) |
| { |
| enum tree_code_class type = TREE_CODE_CLASS (code); |
| |
| switch (type) |
| { |
| case tcc_declaration: /* A decl node */ |
| return d_kind; |
| case tcc_type: /* a type node */ |
| return t_kind; |
| case tcc_statement: /* an expression with side effects */ |
| return s_kind; |
| case tcc_reference: /* a reference */ |
| return r_kind; |
| case tcc_expression: /* an expression */ |
| case tcc_comparison: /* a comparison expression */ |
| case tcc_unary: /* a unary arithmetic expression */ |
| case tcc_binary: /* a binary arithmetic expression */ |
| return e_kind; |
| case tcc_constant: /* a constant */ |
| return c_kind; |
| case tcc_exceptional: /* something random, like an identifier. */ |
| switch (code) |
| { |
| case IDENTIFIER_NODE: |
| return id_kind; |
| case TREE_VEC: |
| return vec_kind; |
| case TREE_BINFO: |
| return binfo_kind; |
| case SSA_NAME: |
| return ssa_name_kind; |
| case BLOCK: |
| return b_kind; |
| case CONSTRUCTOR: |
| return constr_kind; |
| case OMP_CLAUSE: |
| return omp_clause_kind; |
| default: |
| return x_kind; |
| } |
| break; |
| case tcc_vl_exp: |
| return e_kind; |
| default: |
| gcc_unreachable (); |
| } |
| } |
| |
| /* Record interesting allocation statistics for a tree node with CODE |
| and LENGTH. */ |
| |
| static void |
| record_node_allocation_statistics (enum tree_code code, size_t length) |
| { |
| if (!GATHER_STATISTICS) |
| return; |
| |
| tree_node_kind kind = get_stats_node_kind (code); |
| |
| tree_code_counts[(int) code]++; |
| tree_node_counts[(int) kind]++; |
| tree_node_sizes[(int) kind] += length; |
| } |
| |
| /* Allocate and return a new UID from the DECL_UID namespace. */ |
| |
| int |
| allocate_decl_uid (void) |
| { |
| return next_decl_uid++; |
| } |
| |
| /* Return a newly allocated node of code CODE. For decl and type |
| nodes, some other fields are initialized. The rest of the node is |
| initialized to zero. This function cannot be used for TREE_VEC, |
| INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in |
| tree_code_size. |
| |
| Achoo! I got a code in the node. */ |
| |
| tree |
| make_node (enum tree_code code MEM_STAT_DECL) |
| { |
| tree t; |
| enum tree_code_class type = TREE_CODE_CLASS (code); |
| size_t length = tree_code_size (code); |
| |
| record_node_allocation_statistics (code, length); |
| |
| t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); |
| TREE_SET_CODE (t, code); |
| |
| switch (type) |
| { |
| case tcc_statement: |
| if (code != DEBUG_BEGIN_STMT) |
| TREE_SIDE_EFFECTS (t) = 1; |
| break; |
| |
| case tcc_declaration: |
| if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) |
| { |
| if (code == FUNCTION_DECL) |
| { |
| SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY)); |
| SET_DECL_MODE (t, FUNCTION_MODE); |
| } |
| else |
| SET_DECL_ALIGN (t, 1); |
| } |
| DECL_SOURCE_LOCATION (t) = input_location; |
| if (TREE_CODE (t) == DEBUG_EXPR_DECL) |
| DECL_UID (t) = --next_debug_decl_uid; |
| else |
| { |
| DECL_UID (t) = allocate_decl_uid (); |
| SET_DECL_PT_UID (t, -1); |
| } |
| if (TREE_CODE (t) == LABEL_DECL) |
| LABEL_DECL_UID (t) = -1; |
| |
| break; |
| |
| case tcc_type: |
| TYPE_UID (t) = next_type_uid++; |
| SET_TYPE_ALIGN (t, BITS_PER_UNIT); |
| TYPE_USER_ALIGN (t) = 0; |
| TYPE_MAIN_VARIANT (t) = t; |
| TYPE_CANONICAL (t) = t; |
| |
| /* Default to no attributes for type, but let target change that. */ |
| TYPE_ATTRIBUTES (t) = NULL_TREE; |
| targetm.set_default_type_attributes (t); |
| |
| /* We have not yet computed the alias set for this type. */ |
| TYPE_ALIAS_SET (t) = -1; |
| break; |
| |
| case tcc_constant: |
| TREE_CONSTANT (t) = 1; |
| break; |
| |
| case tcc_expression: |
| switch (code) |
| { |
| case INIT_EXPR: |
| case MODIFY_EXPR: |
| case VA_ARG_EXPR: |
| case PREDECREMENT_EXPR: |
| case PREINCREMENT_EXPR: |
| case POSTDECREMENT_EXPR: |
| case POSTINCREMENT_EXPR: |
| /* All of these have side-effects, no matter what their |
| operands are. */ |
| TREE_SIDE_EFFECTS (t) = 1; |
| break; |
| |
| default: |
| break; |
| } |
| break; |
| |
| case tcc_exceptional: |
| switch (code) |
| { |
| case TARGET_OPTION_NODE: |
| TREE_TARGET_OPTION(t) |
| = ggc_cleared_alloc<struct cl_target_option> (); |
| break; |
| |
| case OPTIMIZATION_NODE: |
| TREE_OPTIMIZATION (t) |
| = ggc_cleared_alloc<struct cl_optimization> (); |
| break; |
| |
| default: |
| break; |
| } |
| break; |
| |
| default: |
| /* Other classes need no special treatment. */ |
| break; |
| } |
| |
| return t; |
| } |
| |
| /* Free tree node. */ |
| |
| void |
| free_node (tree node) |
| { |
| enum tree_code code = TREE_CODE (node); |
| if (GATHER_STATISTICS) |
| { |
| enum tree_node_kind kind = get_stats_node_kind (code); |
| |
| gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0); |
| gcc_checking_assert (tree_node_counts[(int) kind] != 0); |
| gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node)); |
| |
| tree_code_counts[(int) TREE_CODE (node)]--; |
| tree_node_counts[(int) kind]--; |
| tree_node_sizes[(int) kind] -= tree_size (node); |
| } |
| if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR)) |
| vec_free (CONSTRUCTOR_ELTS (node)); |
| else if (code == BLOCK) |
| vec_free (BLOCK_NONLOCALIZED_VARS (node)); |
| else if (code == TREE_BINFO) |
| vec_free (BINFO_BASE_ACCESSES (node)); |
| else if (code == OPTIMIZATION_NODE) |
| cl_optimization_option_free (TREE_OPTIMIZATION (node)); |
| else if (code == TARGET_OPTION_NODE) |
| cl_target_option_free (TREE_TARGET_OPTION (node)); |
| ggc_free (node); |
| } |
| |
| /* Return a new node with the same contents as NODE except that its |
| TREE_CHAIN, if it has one, is zero and it has a fresh uid. */ |
| |
| tree |
| copy_node (tree node MEM_STAT_DECL) |
| { |
| tree t; |
| enum tree_code code = TREE_CODE (node); |
| size_t length; |
| |
| gcc_assert (code != STATEMENT_LIST); |
| |
| length = tree_size (node); |
| record_node_allocation_statistics (code, length); |
| t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT); |
| memcpy (t, node, length); |
| |
| if (CODE_CONTAINS_STRUCT (code, TS_COMMON)) |
| TREE_CHAIN (t) = 0; |
| TREE_ASM_WRITTEN (t) = 0; |
| TREE_VISITED (t) = 0; |
| |
| if (TREE_CODE_CLASS (code) == tcc_declaration) |
| { |
| if (code == DEBUG_EXPR_DECL) |
| DECL_UID (t) = --next_debug_decl_uid; |
| else |
| { |
| DECL_UID (t) = allocate_decl_uid (); |
| if (DECL_PT_UID_SET_P (node)) |
| SET_DECL_PT_UID (t, DECL_PT_UID (node)); |
| } |
| if ((TREE_CODE (node) == PARM_DECL || VAR_P (node)) |
| && DECL_HAS_VALUE_EXPR_P (node)) |
| { |
| SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node)); |
| DECL_HAS_VALUE_EXPR_P (t) = 1; |
| } |
| /* DECL_DEBUG_EXPR is copied explicitly by callers. */ |
| if (VAR_P (node)) |
| { |
| DECL_HAS_DEBUG_EXPR_P (t) = 0; |
| t->decl_with_vis.symtab_node = NULL; |
| } |
| if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node)) |
| { |
| SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node)); |
| DECL_HAS_INIT_PRIORITY_P (t) = 1; |
| } |
| if (TREE_CODE (node) == FUNCTION_DECL) |
| { |
| DECL_STRUCT_FUNCTION (t) = NULL; |
| t->decl_with_vis.symtab_node = NULL; |
| } |
| } |
| else if (TREE_CODE_CLASS (code) == tcc_type) |
| { |
| TYPE_UID (t) = next_type_uid++; |
| /* The following is so that the debug code for |
| the copy is different from the original type. |
| The two statements usually duplicate each other |
| (because they clear fields of the same union), |
| but the optimizer should catch that. */ |
| TYPE_SYMTAB_ADDRESS (t) = 0; |
| TYPE_SYMTAB_DIE (t) = 0; |
| |
| /* Do not copy the values cache. */ |
| if (TYPE_CACHED_VALUES_P (t)) |
| { |
| TYPE_CACHED_VALUES_P (t) = 0; |
| TYPE_CACHED_VALUES (t) = NULL_TREE; |
| } |
| } |
| else if (code == TARGET_OPTION_NODE) |
| { |
| TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>(); |
| memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node), |
| sizeof (struct cl_target_option)); |
| } |
| else if (code == OPTIMIZATION_NODE) |
| { |
| TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>(); |
| memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node), |
| sizeof (struct cl_optimization)); |
| } |
| |
| return t; |
| } |
| |
| /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field. |
| For example, this can copy a list made of TREE_LIST nodes. */ |
| |
| tree |
| copy_list (tree list) |
| { |
| tree head; |
| tree prev, next; |
| |
| if (list == 0) |
| return 0; |
| |
| head = prev = copy_node (list); |
| next = TREE_CHAIN (list); |
| while (next) |
| { |
| TREE_CHAIN (prev) = copy_node (next); |
| prev = TREE_CHAIN (prev); |
| next = TREE_CHAIN (next); |
| } |
| return head; |
| } |
| |
| |
| /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an |
| INTEGER_CST with value CST and type TYPE. */ |
| |
| static unsigned int |
| get_int_cst_ext_nunits (tree type, const wide_int &cst) |
| { |
| gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type)); |
| /* We need extra HWIs if CST is an unsigned integer with its |
| upper bit set. */ |
| if (TYPE_UNSIGNED (type) && wi::neg_p (cst)) |
| return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1; |
| return cst.get_len (); |
| } |
| |
| /* Return a new INTEGER_CST with value CST and type TYPE. */ |
| |
| static tree |
| build_new_int_cst (tree type, const wide_int &cst) |
| { |
| unsigned int len = cst.get_len (); |
| unsigned int ext_len = get_int_cst_ext_nunits (type, cst); |
| tree nt = make_int_cst (len, ext_len); |
| |
| if (len < ext_len) |
| { |
| --ext_len; |
| TREE_INT_CST_ELT (nt, ext_len) |
| = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT); |
| for (unsigned int i = len; i < ext_len; ++i) |
| TREE_INT_CST_ELT (nt, i) = -1; |
| } |
| else if (TYPE_UNSIGNED (type) |
| && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT) |
| { |
| len--; |
| TREE_INT_CST_ELT (nt, len) |
| = zext_hwi (cst.elt (len), |
| cst.get_precision () % HOST_BITS_PER_WIDE_INT); |
| } |
| |
| for (unsigned int i = 0; i < len; i++) |
| TREE_INT_CST_ELT (nt, i) = cst.elt (i); |
| TREE_TYPE (nt) = type; |
| return nt; |
| } |
| |
| /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */ |
| |
| static tree |
| build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS] |
| CXX_MEM_STAT_INFO) |
| { |
| size_t length = sizeof (struct tree_poly_int_cst); |
| record_node_allocation_statistics (POLY_INT_CST, length); |
| |
| tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); |
| |
| TREE_SET_CODE (t, POLY_INT_CST); |
| TREE_CONSTANT (t) = 1; |
| TREE_TYPE (t) = type; |
| for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) |
| POLY_INT_CST_COEFF (t, i) = coeffs[i]; |
| return t; |
| } |
| |
| /* Create a constant tree that contains CST sign-extended to TYPE. */ |
| |
| tree |
| build_int_cst (tree type, poly_int64 cst) |
| { |
| /* Support legacy code. */ |
| if (!type) |
| type = integer_type_node; |
| |
| return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type))); |
| } |
| |
| /* Create a constant tree that contains CST zero-extended to TYPE. */ |
| |
| tree |
| build_int_cstu (tree type, poly_uint64 cst) |
| { |
| return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type))); |
| } |
| |
| /* Create a constant tree that contains CST sign-extended to TYPE. */ |
| |
| tree |
| build_int_cst_type (tree type, poly_int64 cst) |
| { |
| gcc_assert (type); |
| return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type))); |
| } |
| |
| /* Constructs tree in type TYPE from with value given by CST. Signedness |
| of CST is assumed to be the same as the signedness of TYPE. */ |
| |
| tree |
| double_int_to_tree (tree type, double_int cst) |
| { |
| return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type))); |
| } |
| |
| /* We force the wide_int CST to the range of the type TYPE by sign or |
| zero extending it. OVERFLOWABLE indicates if we are interested in |
| overflow of the value, when >0 we are only interested in signed |
| overflow, for <0 we are interested in any overflow. OVERFLOWED |
| indicates whether overflow has already occurred. CONST_OVERFLOWED |
| indicates whether constant overflow has already occurred. We force |
| T's value to be within range of T's type (by setting to 0 or 1 all |
| the bits outside the type's range). We set TREE_OVERFLOWED if, |
| OVERFLOWED is nonzero, |
| or OVERFLOWABLE is >0 and signed overflow occurs |
| or OVERFLOWABLE is <0 and any overflow occurs |
| We return a new tree node for the extended wide_int. The node |
| is shared if no overflow flags are set. */ |
| |
| |
| tree |
| force_fit_type (tree type, const poly_wide_int_ref &cst, |
| int overflowable, bool overflowed) |
| { |
| signop sign = TYPE_SIGN (type); |
| |
| /* If we need to set overflow flags, return a new unshared node. */ |
| if (overflowed || !wi::fits_to_tree_p (cst, type)) |
| { |
| if (overflowed |
| || overflowable < 0 |
| || (overflowable > 0 && sign == SIGNED)) |
| { |
| poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type), |
| sign); |
| tree t; |
| if (tmp.is_constant ()) |
| t = build_new_int_cst (type, tmp.coeffs[0]); |
| else |
| { |
| tree coeffs[NUM_POLY_INT_COEFFS]; |
| for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) |
| { |
| coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]); |
| TREE_OVERFLOW (coeffs[i]) = 1; |
| } |
| t = build_new_poly_int_cst (type, coeffs); |
| } |
| TREE_OVERFLOW (t) = 1; |
| return t; |
| } |
| } |
| |
| /* Else build a shared node. */ |
| return wide_int_to_tree (type, cst); |
| } |
| |
| /* These are the hash table functions for the hash table of INTEGER_CST |
| nodes of a sizetype. */ |
| |
| /* Return the hash code X, an INTEGER_CST. */ |
| |
| hashval_t |
| int_cst_hasher::hash (tree x) |
| { |
| const_tree const t = x; |
| hashval_t code = TYPE_UID (TREE_TYPE (t)); |
| int i; |
| |
| for (i = 0; i < TREE_INT_CST_NUNITS (t); i++) |
| code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code); |
| |
| return code; |
| } |
| |
| /* Return nonzero if the value represented by *X (an INTEGER_CST tree node) |
| is the same as that given by *Y, which is the same. */ |
| |
| bool |
| int_cst_hasher::equal (tree x, tree y) |
| { |
| const_tree const xt = x; |
| const_tree const yt = y; |
| |
| if (TREE_TYPE (xt) != TREE_TYPE (yt) |
| || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt) |
| || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt)) |
| return false; |
| |
| for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++) |
| if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i)) |
| return false; |
| |
| return true; |
| } |
| |
| /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE. |
| SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum |
| number of slots that can be cached for the type. */ |
| |
| static inline tree |
| cache_wide_int_in_type_cache (tree type, const wide_int &cst, |
| int slot, int max_slots) |
| { |
| gcc_checking_assert (slot >= 0); |
| /* Initialize cache. */ |
| if (!TYPE_CACHED_VALUES_P (type)) |
| { |
| TYPE_CACHED_VALUES_P (type) = 1; |
| TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots); |
| } |
| tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot); |
| if (!t) |
| { |
| /* Create a new shared int. */ |
| t = build_new_int_cst (type, cst); |
| TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t; |
| } |
| return t; |
| } |
| |
| /* Create an INT_CST node of TYPE and value CST. |
| The returned node is always shared. For small integers we use a |
| per-type vector cache, for larger ones we use a single hash table. |
| The value is extended from its precision according to the sign of |
| the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines |
| the upper bits and ensures that hashing and value equality based |
| upon the underlying HOST_WIDE_INTs works without masking. */ |
| |
| static tree |
| wide_int_to_tree_1 (tree type, const wide_int_ref &pcst) |
| { |
| tree t; |
| int ix = -1; |
| int limit = 0; |
| |
| gcc_assert (type); |
| unsigned int prec = TYPE_PRECISION (type); |
| signop sgn = TYPE_SIGN (type); |
| |
| /* Verify that everything is canonical. */ |
| int l = pcst.get_len (); |
| if (l > 1) |
| { |
| if (pcst.elt (l - 1) == 0) |
| gcc_checking_assert (pcst.elt (l - 2) < 0); |
| if (pcst.elt (l - 1) == HOST_WIDE_INT_M1) |
| gcc_checking_assert (pcst.elt (l - 2) >= 0); |
| } |
| |
| wide_int cst = wide_int::from (pcst, prec, sgn); |
| unsigned int ext_len = get_int_cst_ext_nunits (type, cst); |
| |
| enum tree_code code = TREE_CODE (type); |
| if (code == POINTER_TYPE || code == REFERENCE_TYPE) |
| { |
| /* Cache NULL pointer and zero bounds. */ |
| if (cst == 0) |
| ix = 0; |
| /* Cache upper bounds of pointers. */ |
| else if (cst == wi::max_value (prec, sgn)) |
| ix = 1; |
| /* Cache 1 which is used for a non-zero range. */ |
| else if (cst == 1) |
| ix = 2; |
| |
| if (ix >= 0) |
| { |
| t = cache_wide_int_in_type_cache (type, cst, ix, 3); |
| /* Make sure no one is clobbering the shared constant. */ |
| gcc_checking_assert (TREE_TYPE (t) == type |
| && cst == wi::to_wide (t)); |
| return t; |
| } |
| } |
| if (ext_len == 1) |
| { |
| /* We just need to store a single HOST_WIDE_INT. */ |
| HOST_WIDE_INT hwi; |
| if (TYPE_UNSIGNED (type)) |
| hwi = cst.to_uhwi (); |
| else |
| hwi = cst.to_shwi (); |
| |
| switch (code) |
| { |
| case NULLPTR_TYPE: |
| gcc_assert (hwi == 0); |
| /* Fallthru. */ |
| |
| case POINTER_TYPE: |
| case REFERENCE_TYPE: |
| /* Ignore pointers, as they were already handled above. */ |
| break; |
| |
| case BOOLEAN_TYPE: |
| /* Cache false or true. */ |
| limit = 2; |
| if (IN_RANGE (hwi, 0, 1)) |
| ix = hwi; |
| break; |
| |
| case INTEGER_TYPE: |
| case OFFSET_TYPE: |
| if (TYPE_SIGN (type) == UNSIGNED) |
| { |
| /* Cache [0, N). */ |
| limit = param_integer_share_limit; |
| if (IN_RANGE (hwi, 0, param_integer_share_limit - 1)) |
| ix = hwi; |
| } |
| else |
| { |
| /* Cache [-1, N). */ |
| limit = param_integer_share_limit + 1; |
| if (IN_RANGE (hwi, -1, param_integer_share_limit - 1)) |
| ix = hwi + 1; |
| } |
| break; |
| |
| case ENUMERAL_TYPE: |
| break; |
| |
| default: |
| gcc_unreachable (); |
| } |
| |
| if (ix >= 0) |
| { |
| t = cache_wide_int_in_type_cache (type, cst, ix, limit); |
| /* Make sure no one is clobbering the shared constant. */ |
| gcc_checking_assert (TREE_TYPE (t) == type |
| && TREE_INT_CST_NUNITS (t) == 1 |
| && TREE_INT_CST_OFFSET_NUNITS (t) == 1 |
| && TREE_INT_CST_EXT_NUNITS (t) == 1 |
| && TREE_INT_CST_ELT (t, 0) == hwi); |
| return t; |
| } |
| else |
| { |
| /* Use the cache of larger shared ints, using int_cst_node as |
| a temporary. */ |
| |
| TREE_INT_CST_ELT (int_cst_node, 0) = hwi; |
| TREE_TYPE (int_cst_node) = type; |
| |
| tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT); |
| t = *slot; |
| if (!t) |
| { |
| /* Insert this one into the hash table. */ |
| t = int_cst_node; |
| *slot = t; |
| /* Make a new node for next time round. */ |
| int_cst_node = make_int_cst (1, 1); |
| } |
| } |
| } |
| else |
| { |
| /* The value either hashes properly or we drop it on the floor |
| for the gc to take care of. There will not be enough of them |
| to worry about. */ |
| |
| tree nt = build_new_int_cst (type, cst); |
| tree *slot = int_cst_hash_table->find_slot (nt, INSERT); |
| t = *slot; |
| if (!t) |
| { |
| /* Insert this one into the hash table. */ |
| t = nt; |
| *slot = t; |
| } |
| else |
| ggc_free (nt); |
| } |
| |
| return t; |
| } |
| |
| hashval_t |
| poly_int_cst_hasher::hash (tree t) |
| { |
| inchash::hash hstate; |
| |
| hstate.add_int (TYPE_UID (TREE_TYPE (t))); |
| for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) |
| hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i))); |
| |
| return hstate.end (); |
| } |
| |
| bool |
| poly_int_cst_hasher::equal (tree x, const compare_type &y) |
| { |
| if (TREE_TYPE (x) != y.first) |
| return false; |
| for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) |
| if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i]) |
| return false; |
| return true; |
| } |
| |
| /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES. |
| The elements must also have type TYPE. */ |
| |
| tree |
| build_poly_int_cst (tree type, const poly_wide_int_ref &values) |
| { |
| unsigned int prec = TYPE_PRECISION (type); |
| gcc_assert (prec <= values.coeffs[0].get_precision ()); |
| poly_wide_int c = poly_wide_int::from (values, prec, SIGNED); |
| |
| inchash::hash h; |
| h.add_int (TYPE_UID (type)); |
| for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) |
| h.add_wide_int (c.coeffs[i]); |
| poly_int_cst_hasher::compare_type comp (type, &c); |
| tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (), |
| INSERT); |
| if (*slot == NULL_TREE) |
| { |
| tree coeffs[NUM_POLY_INT_COEFFS]; |
| for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) |
| coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]); |
| *slot = build_new_poly_int_cst (type, coeffs); |
| } |
| return *slot; |
| } |
| |
| /* Create a constant tree with value VALUE in type TYPE. */ |
| |
| tree |
| wide_int_to_tree (tree type, const poly_wide_int_ref &value) |
| { |
| if (value.is_constant ()) |
| return wide_int_to_tree_1 (type, value.coeffs[0]); |
| return build_poly_int_cst (type, value); |
| } |
| |
| /* Insert INTEGER_CST T into a cache of integer constants. And return |
| the cached constant (which may or may not be T). If MIGHT_DUPLICATE |
| is false, and T falls into the type's 'smaller values' range, there |
| cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true, |
| or the value is large, should an existing entry exist, it is |
| returned (rather than inserting T). */ |
| |
| tree |
| cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED) |
| { |
| tree type = TREE_TYPE (t); |
| int ix = -1; |
| int limit = 0; |
| int prec = TYPE_PRECISION (type); |
| |
| gcc_assert (!TREE_OVERFLOW (t)); |
| |
| /* The caching indices here must match those in |
| wide_int_to_type_1. */ |
| switch (TREE_CODE (type)) |
| { |
| case NULLPTR_TYPE: |
| gcc_checking_assert (integer_zerop (t)); |
| /* Fallthru. */ |
| |
| case POINTER_TYPE: |
| case REFERENCE_TYPE: |
| { |
| if (integer_zerop (t)) |
| ix = 0; |
| else if (integer_onep (t)) |
| ix = 2; |
| |
| if (ix >= 0) |
| limit = 3; |
| } |
| break; |
| |
| case BOOLEAN_TYPE: |
| /* Cache false or true. */ |
| limit = 2; |
| if (wi::ltu_p (wi::to_wide (t), 2)) |
| ix = TREE_INT_CST_ELT (t, 0); |
| break; |
| |
| case INTEGER_TYPE: |
| case OFFSET_TYPE: |
| if (TYPE_UNSIGNED (type)) |
| { |
| /* Cache 0..N */ |
| limit = param_integer_share_limit; |
| |
| /* This is a little hokie, but if the prec is smaller than |
| what is necessary to hold param_integer_share_limit, then the |
| obvious test will not get the correct answer. */ |
| if (prec < HOST_BITS_PER_WIDE_INT) |
| { |
| if (tree_to_uhwi (t) |
| < (unsigned HOST_WIDE_INT) param_integer_share_limit) |
| ix = tree_to_uhwi (t); |
| } |
| else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit)) |
| ix = tree_to_uhwi (t); |
| } |
| else |
| { |
| /* Cache -1..N */ |
| limit = param_integer_share_limit + 1; |
| |
| if (integer_minus_onep (t)) |
| ix = 0; |
| else if (!wi::neg_p (wi::to_wide (t))) |
| { |
| if (prec < HOST_BITS_PER_WIDE_INT) |
| { |
| if (tree_to_shwi (t) < param_integer_share_limit) |
| ix = tree_to_shwi (t) + 1; |
| } |
| else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit)) |
| ix = tree_to_shwi (t) + 1; |
| } |
| } |
| break; |
| |
| case ENUMERAL_TYPE: |
| /* The slot used by TYPE_CACHED_VALUES is used for the enum |
| members. */ |
| break; |
| |
| default: |
| gcc_unreachable (); |
| } |
| |
| if (ix >= 0) |
| { |
| /* Look for it in the type's vector of small shared ints. */ |
| if (!TYPE_CACHED_VALUES_P (type)) |
| { |
| TYPE_CACHED_VALUES_P (type) = 1; |
| TYPE_CACHED_VALUES (type) = make_tree_vec (limit); |
| } |
| |
| if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix)) |
| { |
| gcc_checking_assert (might_duplicate); |
| t = r; |
| } |
| else |
| TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t; |
| } |
| else |
| { |
| /* Use the cache of larger shared ints. */ |
| tree *slot = int_cst_hash_table->find_slot (t, INSERT); |
| if (tree r = *slot) |
| { |
| /* If there is already an entry for the number verify it's the |
| same value. */ |
| gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t)); |
| /* And return the cached value. */ |
| t = r; |
| } |
| else |
| /* Otherwise insert this one into the hash table. */ |
| *slot = t; |
| } |
| |
| return t; |
| } |
| |
| |
| /* Builds an integer constant in TYPE such that lowest BITS bits are ones |
| and the rest are zeros. */ |
| |
| tree |
| build_low_bits_mask (tree type, unsigned bits) |
| { |
| gcc_assert (bits <= TYPE_PRECISION (type)); |
| |
| return wide_int_to_tree (type, wi::mask (bits, false, |
| TYPE_PRECISION (type))); |
| } |
| |
| /* Checks that X is integer constant that can be expressed in (unsigned) |
| HOST_WIDE_INT without loss of precision. */ |
| |
| bool |
| cst_and_fits_in_hwi (const_tree x) |
| { |
| return (TREE_CODE (x) == INTEGER_CST |
| && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x))); |
| } |
| |
| /* Build a newly constructed VECTOR_CST with the given values of |
| (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */ |
| |
| tree |
| make_vector (unsigned log2_npatterns, |
| unsigned int nelts_per_pattern MEM_STAT_DECL) |
| { |
| gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3)); |
| tree t; |
| unsigned npatterns = 1 << log2_npatterns; |
| unsigned encoded_nelts = npatterns * nelts_per_pattern; |
| unsigned length = (sizeof (struct tree_vector) |
| + (encoded_nelts - 1) * sizeof (tree)); |
| |
| record_node_allocation_statistics (VECTOR_CST, length); |
| |
| t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); |
| |
| TREE_SET_CODE (t, VECTOR_CST); |
| TREE_CONSTANT (t) = 1; |
| VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns; |
| VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern; |
| |
| return t; |
| } |
| |
| /* Return a new VECTOR_CST node whose type is TYPE and whose values |
| are extracted from V, a vector of CONSTRUCTOR_ELT. */ |
| |
| tree |
| build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v) |
| { |
| if (vec_safe_length (v) == 0) |
| return build_zero_cst (type); |
| |
| unsigned HOST_WIDE_INT idx, nelts; |
| tree value; |
| |
| /* We can't construct a VECTOR_CST for a variable number of elements. */ |
| nelts = TYPE_VECTOR_SUBPARTS (type).to_constant (); |
| tree_vector_builder vec (type, nelts, 1); |
| FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value) |
| { |
| if (TREE_CODE (value) == VECTOR_CST) |
| { |
| /* If NELTS is constant then this must be too. */ |
| unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant (); |
| for (unsigned i = 0; i < sub_nelts; ++i) |
| vec.quick_push (VECTOR_CST_ELT (value, i)); |
| } |
| else |
| vec.quick_push (value); |
| } |
| while (vec.length () < nelts) |
| vec.quick_push (build_zero_cst (TREE_TYPE (type))); |
| |
| return vec.build (); |
| } |
| |
| /* Build a vector of type VECTYPE where all the elements are SCs. */ |
| tree |
| build_vector_from_val (tree vectype, tree sc) |
| { |
| unsigned HOST_WIDE_INT i, nunits; |
| |
| if (sc == error_mark_node) |
| return sc; |
| |
| /* Verify that the vector type is suitable for SC. Note that there |
| is some inconsistency in the type-system with respect to restrict |
| qualifications of pointers. Vector types always have a main-variant |
| element type and the qualification is applied to the vector-type. |
| So TREE_TYPE (vector-type) does not return a properly qualified |
| vector element-type. */ |
| gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)), |
| TREE_TYPE (vectype))); |
| |
| if (CONSTANT_CLASS_P (sc)) |
| { |
| tree_vector_builder v (vectype, 1, 1); |
| v.quick_push (sc); |
| return v.build (); |
| } |
| else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits)) |
| return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc); |
| else |
| { |
| vec<constructor_elt, va_gc> *v; |
| vec_alloc (v, nunits); |
| for (i = 0; i < nunits; ++i) |
| CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc); |
| return build_constructor (vectype, v); |
| } |
| } |
| |
| /* If TYPE is not a vector type, just return SC, otherwise return |
| build_vector_from_val (TYPE, SC). */ |
| |
| tree |
| build_uniform_cst (tree type, tree sc) |
| { |
| if (!VECTOR_TYPE_P (type)) |
| return sc; |
| |
| return build_vector_from_val (type, sc); |
| } |
| |
| /* Build a vector series of type TYPE in which element I has the value |
| BASE + I * STEP. The result is a constant if BASE and STEP are constant |
| and a VEC_SERIES_EXPR otherwise. */ |
| |
| tree |
| build_vec_series (tree type, tree base, tree step) |
| { |
| if (integer_zerop (step)) |
| return build_vector_from_val (type, base); |
| if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST) |
| { |
| tree_vector_builder builder (type, 1, 3); |
| tree elt1 = wide_int_to_tree (TREE_TYPE (base), |
| wi::to_wide (base) + wi::to_wide (step)); |
| tree elt2 = wide_int_to_tree (TREE_TYPE (base), |
| wi::to_wide (elt1) + wi::to_wide (step)); |
| builder.quick_push (base); |
| builder.quick_push (elt1); |
| builder.quick_push (elt2); |
| return builder.build (); |
| } |
| return build2 (VEC_SERIES_EXPR, type, base, step); |
| } |
| |
| /* Return a vector with the same number of units and number of bits |
| as VEC_TYPE, but in which the elements are a linear series of unsigned |
| integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */ |
| |
| tree |
| build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step) |
| { |
| tree index_vec_type = vec_type; |
| tree index_elt_type = TREE_TYPE (vec_type); |
| poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type); |
| if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type)) |
| { |
| index_elt_type = build_nonstandard_integer_type |
| (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true); |
| index_vec_type = build_vector_type (index_elt_type, nunits); |
| } |
| |
| tree_vector_builder v (index_vec_type, 1, 3); |
| for (unsigned int i = 0; i < 3; ++i) |
| v.quick_push (build_int_cstu (index_elt_type, base + i * step)); |
| return v.build (); |
| } |
| |
| /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A |
| elements are A and the rest are B. */ |
| |
| tree |
| build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b) |
| { |
| gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type))); |
| unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type)); |
| /* Optimize the constant case. */ |
| if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ()) |
| count /= 2; |
| tree_vector_builder builder (vec_type, count, 2); |
| for (unsigned int i = 0; i < count * 2; ++i) |
| builder.quick_push (i < num_a ? a : b); |
| return builder.build (); |
| } |
| |
| /* Something has messed with the elements of CONSTRUCTOR C after it was built; |
| calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */ |
| |
| void |
| recompute_constructor_flags (tree c) |
| { |
| unsigned int i; |
| tree val; |
| bool constant_p = true; |
| bool side_effects_p = false; |
| vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c); |
| |
| FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val) |
| { |
| /* Mostly ctors will have elts that don't have side-effects, so |
| the usual case is to scan all the elements. Hence a single |
| loop for both const and side effects, rather than one loop |
| each (with early outs). */ |
| if (!TREE_CONSTANT (val)) |
| constant_p = false; |
| if (TREE_SIDE_EFFECTS (val)) |
| side_effects_p = true; |
| } |
| |
| TREE_SIDE_EFFECTS (c) = side_effects_p; |
| TREE_CONSTANT (c) = constant_p; |
| } |
| |
| /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for |
| CONSTRUCTOR C. */ |
| |
| void |
| verify_constructor_flags (tree c) |
| { |
| unsigned int i; |
| tree val; |
| bool constant_p = TREE_CONSTANT (c); |
| bool side_effects_p = TREE_SIDE_EFFECTS (c); |
| vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c); |
| |
| FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val) |
| { |
| if (constant_p && !TREE_CONSTANT (val)) |
| internal_error ("non-constant element in constant CONSTRUCTOR"); |
| if (!side_effects_p && TREE_SIDE_EFFECTS (val)) |
| internal_error ("side-effects element in no-side-effects CONSTRUCTOR"); |
| } |
| } |
| |
| /* Return a new CONSTRUCTOR node whose type is TYPE and whose values |
| are in the vec pointed to by VALS. */ |
| tree |
| build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL) |
| { |
| tree c = make_node (CONSTRUCTOR PASS_MEM_STAT); |
| |
| TREE_TYPE (c) = type; |
| CONSTRUCTOR_ELTS (c) = vals; |
| |
| recompute_constructor_flags (c); |
| |
| return c; |
| } |
| |
| /* Build a CONSTRUCTOR node made of a single initializer, with the specified |
| INDEX and VALUE. */ |
| tree |
| build_constructor_single (tree type, tree index, tree value) |
| { |
| vec<constructor_elt, va_gc> *v; |
| constructor_elt elt = {index, value}; |
| |
| vec_alloc (v, 1); |
| v->quick_push (elt); |
| |
| return build_constructor (type, v); |
| } |
| |
| |
| /* Return a new CONSTRUCTOR node whose type is TYPE and whose values |
| are in a list pointed to by VALS. */ |
| tree |
| build_constructor_from_list (tree type, tree vals) |
| { |
| tree t; |
| vec<constructor_elt, va_gc> *v = NULL; |
| |
| if (vals) |
| { |
| vec_alloc (v, list_length (vals)); |
| for (t = vals; t; t = TREE_CHAIN (t)) |
| CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t)); |
| } |
| |
| return build_constructor (type, v); |
| } |
| |
| /* Return a new CONSTRUCTOR node whose type is TYPE and whose values |
| are in a vector pointed to by VALS. Note that the TREE_PURPOSE |
| fields in the constructor remain null. */ |
| |
| tree |
| build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals) |
| { |
| vec<constructor_elt, va_gc> *v = NULL; |
| |
| for (tree t : vals) |
| CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t); |
| |
| return build_constructor (type, v); |
| } |
| |
| /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number |
| of elements, provided as index/value pairs. */ |
| |
| tree |
| build_constructor_va (tree type, int nelts, ...) |
| { |
| vec<constructor_elt, va_gc> *v = NULL; |
| va_list p; |
| |
| va_start (p, nelts); |
| vec_alloc (v, nelts); |
| while (nelts--) |
| { |
| tree index = va_arg (p, tree); |
| tree value = va_arg (p, tree); |
| CONSTRUCTOR_APPEND_ELT (v, index, value); |
| } |
| va_end (p); |
| return build_constructor (type, v); |
| } |
| |
| /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */ |
| |
| tree |
| build_clobber (tree type, enum clobber_kind kind) |
| { |
| tree clobber = build_constructor (type, NULL); |
| TREE_THIS_VOLATILE (clobber) = true; |
| CLOBBER_KIND (clobber) = kind; |
| return clobber; |
| } |
| |
| /* Return a new FIXED_CST node whose type is TYPE and value is F. */ |
| |
| tree |
| build_fixed (tree type, FIXED_VALUE_TYPE f) |
| { |
| tree v; |
| FIXED_VALUE_TYPE *fp; |
| |
| v = make_node (FIXED_CST); |
| fp = ggc_alloc<fixed_value> (); |
| memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE)); |
| |
| TREE_TYPE (v) = type; |
| TREE_FIXED_CST_PTR (v) = fp; |
| return v; |
| } |
| |
| /* Return a new REAL_CST node whose type is TYPE and value is D. */ |
| |
| tree |
| build_real (tree type, REAL_VALUE_TYPE d) |
| { |
| tree v; |
| REAL_VALUE_TYPE *dp; |
| int overflow = 0; |
| |
| /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE. |
| Consider doing it via real_convert now. */ |
| |
| v = make_node (REAL_CST); |
| dp = ggc_alloc<real_value> (); |
| memcpy (dp, &d, sizeof (REAL_VALUE_TYPE)); |
| |
| TREE_TYPE (v) = type; |
| TREE_REAL_CST_PTR (v) = dp; |
| TREE_OVERFLOW (v) = overflow; |
| return v; |
| } |
| |
| /* Like build_real, but first truncate D to the type. */ |
| |
| tree |
| build_real_truncate (tree type, REAL_VALUE_TYPE d) |
| { |
| return build_real (type, real_value_truncate (TYPE_MODE (type), d)); |
| } |
| |
| /* Return a new REAL_CST node whose type is TYPE |
| and whose value is the integer value of the INTEGER_CST node I. */ |
| |
| REAL_VALUE_TYPE |
| real_value_from_int_cst (const_tree type, const_tree i) |
| { |
| REAL_VALUE_TYPE d; |
| |
| /* Clear all bits of the real value type so that we can later do |
| bitwise comparisons to see if two values are the same. */ |
| memset (&d, 0, sizeof d); |
| |
| real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i), |
| TYPE_SIGN (TREE_TYPE (i))); |
| return d; |
| } |
| |
| /* Given a tree representing an integer constant I, return a tree |
| representing the same value as a floating-point constant of type TYPE. */ |
| |
| tree |
| build_real_from_int_cst (tree type, const_tree i) |
| { |
| tree v; |
| int overflow = TREE_OVERFLOW (i); |
| |
| v = build_real (type, real_value_from_int_cst (type, i)); |
| |
| TREE_OVERFLOW (v) |= overflow; |
| return v; |
| } |
| |
| /* Return a new REAL_CST node whose type is TYPE |
| and whose value is the integer value I which has sign SGN. */ |
| |
| tree |
| build_real_from_wide (tree type, const wide_int_ref &i, signop sgn) |
| { |
| REAL_VALUE_TYPE d; |
| |
| /* Clear all bits of the real value type so that we can later do |
| bitwise comparisons to see if two values are the same. */ |
| memset (&d, 0, sizeof d); |
| |
| real_from_integer (&d, TYPE_MODE (type), i, sgn); |
| return build_real (type, d); |
| } |
| |
| /* Return a newly constructed STRING_CST node whose value is the LEN |
| characters at STR when STR is nonnull, or all zeros otherwise. |
| Note that for a C string literal, LEN should include the trailing NUL. |
| The TREE_TYPE is not initialized. */ |
| |
| tree |
| build_string (unsigned len, const char *str /*= NULL */) |
| { |
| /* Do not waste bytes provided by padding of struct tree_string. */ |
| unsigned size = len + offsetof (struct tree_string, str) + 1; |
| |
| record_node_allocation_statistics (STRING_CST, size); |
| |
| tree s = (tree) ggc_internal_alloc (size); |
| |
| memset (s, 0, sizeof (struct tree_typed)); |
| TREE_SET_CODE (s, STRING_CST); |
| TREE_CONSTANT (s) = 1; |
| TREE_STRING_LENGTH (s) = len; |
| if (str) |
| memcpy (s->string.str, str, len); |
| else |
| memset (s->string.str, 0, len); |
| s->string.str[len] = '\0'; |
| |
| return s; |
| } |
| |
| /* Return a newly constructed COMPLEX_CST node whose value is |
| specified by the real and imaginary parts REAL and IMAG. |
| Both REAL and IMAG should be constant nodes. TYPE, if specified, |
| will be the type of the COMPLEX_CST; otherwise a new type will be made. */ |
| |
| tree |
| build_complex (tree type, tree real, tree imag) |
| { |
| gcc_assert (CONSTANT_CLASS_P (real)); |
| gcc_assert (CONSTANT_CLASS_P (imag)); |
| |
| tree t = make_node (COMPLEX_CST); |
| |
| TREE_REALPART (t) = real; |
| TREE_IMAGPART (t) = imag; |
| TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real)); |
| TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag); |
| return t; |
| } |
| |
| /* Build a complex (inf +- 0i), such as for the result of cproj. |
| TYPE is the complex tree type of the result. If NEG is true, the |
| imaginary zero is negative. */ |
| |
| tree |
| build_complex_inf (tree type, bool neg) |
| { |
| REAL_VALUE_TYPE rinf, rzero = dconst0; |
| |
| real_inf (&rinf); |
| rzero.sign = neg; |
| return build_complex (type, build_real (TREE_TYPE (type), rinf), |
| build_real (TREE_TYPE (type), rzero)); |
| } |
| |
| /* Return the constant 1 in type TYPE. If TYPE has several elements, each |
| element is set to 1. In particular, this is 1 + i for complex types. */ |
| |
| tree |
| build_each_one_cst (tree type) |
| { |
| if (TREE_CODE (type) == COMPLEX_TYPE) |
| { |
| tree scalar = build_one_cst (TREE_TYPE (type)); |
| return build_complex (type, scalar, scalar); |
| } |
| else |
| return build_one_cst (type); |
| } |
| |
| /* Return a constant of arithmetic type TYPE which is the |
| multiplicative identity of the set TYPE. */ |
| |
| tree |
| build_one_cst (tree type) |
| { |
| switch (TREE_CODE (type)) |
| { |
| case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: |
| case POINTER_TYPE: case REFERENCE_TYPE: |
| case OFFSET_TYPE: |
| return build_int_cst (type, 1); |
| |
| case REAL_TYPE: |
| return build_real (type, dconst1); |
| |
| case FIXED_POINT_TYPE: |
| /* We can only generate 1 for accum types. */ |
| gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type))); |
| return build_fixed (type, FCONST1 (TYPE_MODE (type))); |
| |
| case VECTOR_TYPE: |
| { |
| tree scalar = build_one_cst (TREE_TYPE (type)); |
| |
| return build_vector_from_val (type, scalar); |
| } |
| |
| case COMPLEX_TYPE: |
| return build_complex (type, |
| build_one_cst (TREE_TYPE (type)), |
| build_zero_cst (TREE_TYPE (type))); |
| |
| default: |
| gcc_unreachable (); |
| } |
| } |
| |
| /* Return an integer of type TYPE containing all 1's in as much precision as |
| it contains, or a complex or vector whose subparts are such integers. */ |
| |
| tree |
| build_all_ones_cst (tree type) |
| { |
| if (TREE_CODE (type) == COMPLEX_TYPE) |
| { |
| tree scalar = build_all_ones_cst (TREE_TYPE (type)); |
| return build_complex (type, scalar, scalar); |
| } |
| else |
| return build_minus_one_cst (type); |
| } |
| |
| /* Return a constant of arithmetic type TYPE which is the |
| opposite of the multiplicative identity of the set TYPE. */ |
| |
| tree |
| build_minus_one_cst (tree type) |
| { |
| switch (TREE_CODE (type)) |
| { |
| case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: |
| case POINTER_TYPE: case REFERENCE_TYPE: |
| case OFFSET_TYPE: |
| return build_int_cst (type, -1); |
| |
| case REAL_TYPE: |
| return build_real (type, dconstm1); |
| |
| case FIXED_POINT_TYPE: |
| /* We can only generate 1 for accum types. */ |
| gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type))); |
| return build_fixed (type, |
| fixed_from_double_int (double_int_minus_one, |
| SCALAR_TYPE_MODE (type))); |
| |
| case VECTOR_TYPE: |
| { |
| tree scalar = build_minus_one_cst (TREE_TYPE (type)); |
| |
| return build_vector_from_val (type, scalar); |
| } |
| |
| case COMPLEX_TYPE: |
| return build_complex (type, |
| build_minus_one_cst (TREE_TYPE (type)), |
| build_zero_cst (TREE_TYPE (type))); |
| |
| default: |
| gcc_unreachable (); |
| } |
| } |
| |
| /* Build 0 constant of type TYPE. This is used by constructor folding |
| and thus the constant should be represented in memory by |
| zero(es). */ |
| |
| tree |
| build_zero_cst (tree type) |
| { |
| switch (TREE_CODE (type)) |
| { |
| case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: |
| case POINTER_TYPE: case REFERENCE_TYPE: |
| case OFFSET_TYPE: case NULLPTR_TYPE: |
| return build_int_cst (type, 0); |
| |
| case REAL_TYPE: |
| return build_real (type, dconst0); |
| |
| case FIXED_POINT_TYPE: |
| return build_fixed (type, FCONST0 (TYPE_MODE (type))); |
| |
| case VECTOR_TYPE: |
| { |
| tree scalar = build_zero_cst (TREE_TYPE (type)); |
| |
| return build_vector_from_val (type, scalar); |
| } |
| |
| case COMPLEX_TYPE: |
| { |
| tree zero = build_zero_cst (TREE_TYPE (type)); |
| |
| return build_complex (type, zero, zero); |
| } |
| |
| default: |
| if (!AGGREGATE_TYPE_P (type)) |
| return fold_convert (type, integer_zero_node); |
| return build_constructor (type, NULL); |
| } |
| } |
| |
| |
| /* Build a BINFO with LEN language slots. */ |
| |
| tree |
| make_tree_binfo (unsigned base_binfos MEM_STAT_DECL) |
| { |
| tree t; |
| size_t length = (offsetof (struct tree_binfo, base_binfos) |
| + vec<tree, va_gc>::embedded_size (base_binfos)); |
| |
| record_node_allocation_statistics (TREE_BINFO, length); |
| |
| t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT); |
| |
| memset (t, 0, offsetof (struct tree_binfo, base_binfos)); |
| |
| TREE_SET_CODE (t, TREE_BINFO); |
| |
| BINFO_BASE_BINFOS (t)->embedded_init (base_binfos); |
| |
| return t; |
| } |
| |
| /* Create a CASE_LABEL_EXPR tree node and return it. */ |
| |
| tree |
| build_case_label (tree low_value, tree high_value, tree label_decl) |
| { |
| tree t = make_node (CASE_LABEL_EXPR); |
| |
| TREE_TYPE (t) = void_type_node; |
| SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl)); |
| |
| CASE_LOW (t) = low_value; |
| CASE_HIGH (t) = high_value; |
| CASE_LABEL (t) = label_decl; |
| CASE_CHAIN (t) = NULL_TREE; |
| |
| return t; |
| } |
| |
| /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the |
| values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively. |
| The latter determines the length of the HOST_WIDE_INT vector. */ |
| |
| tree |
| make_int_cst (int len, int ext_len MEM_STAT_DECL) |
| { |
| tree t; |
| int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT) |
| + sizeof (struct tree_int_cst)); |
| |
| gcc_assert (len); |
| record_node_allocation_statistics (INTEGER_CST, length); |
| |
| t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); |
| |
| TREE_SET_CODE (t, INTEGER_CST); |
| TREE_INT_CST_NUNITS (t) = len; |
| TREE_INT_CST_EXT_NUNITS (t) = ext_len; |
| /* to_offset can only be applied to trees that are offset_int-sized |
| or smaller. EXT_LEN is correct if it fits, otherwise the constant |
| must be exactly the precision of offset_int and so LEN is correct. */ |
| if (ext_len <= OFFSET_INT_ELTS) |
| TREE_INT_CST_OFFSET_NUNITS (t) = ext_len; |
| else |
| TREE_INT_CST_OFFSET_NUNITS (t) = len; |
| |
| TREE_CONSTANT (t) = 1; |
| |
| return t; |
| } |
| |
| /* Build a newly constructed TREE_VEC node of length LEN. */ |
| |
| tree |
| make_tree_vec (int len MEM_STAT_DECL) |
| { |
| tree t; |
| size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec); |
| |
| record_node_allocation_statistics (TREE_VEC, length); |
| |
| t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); |
| |
| TREE_SET_CODE (t, TREE_VEC); |
| TREE_VEC_LENGTH (t) = len; |
| |
| return t; |
| } |
| |
| /* Grow a TREE_VEC node to new length LEN. */ |
| |
| tree |
| grow_tree_vec (tree v, int len MEM_STAT_DECL) |
| { |
| gcc_assert (TREE_CODE (v) == TREE_VEC); |
| |
| int oldlen = TREE_VEC_LENGTH (v); |
| gcc_assert (len > oldlen); |
| |
| size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec); |
| size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec); |
| |
| record_node_allocation_statistics (TREE_VEC, length - oldlength); |
| |
| v = (tree) ggc_realloc (v, length PASS_MEM_STAT); |
| |
| TREE_VEC_LENGTH (v) = len; |
| |
| return v; |
| } |
| |
| /* Return 1 if EXPR is the constant zero, whether it is integral, float or |
| fixed, and scalar, complex or vector. */ |
| |
| bool |
| zerop (const_tree expr) |
| { |
| return (integer_zerop (expr) |
| || real_zerop (expr) |
| || fixed_zerop (expr)); |
| } |
| |
| /* Return 1 if EXPR is the integer constant zero or a complex constant |
| of zero, or a location wrapper for such a constant. */ |
| |
| bool |
| integer_zerop (const_tree expr) |
| { |
| STRIP_ANY_LOCATION_WRAPPER (expr); |
| |
| switch (TREE_CODE (expr)) |
| { |
| case INTEGER_CST: |
| return wi::to_wide (expr) == 0; |
| case COMPLEX_CST: |
| return (integer_zerop (TREE_REALPART (expr)) |
| && integer_zerop (TREE_IMAGPART (expr))); |
| case VECTOR_CST: |
| return (VECTOR_CST_NPATTERNS (expr) == 1 |
| && VECTOR_CST_DUPLICATE_P (expr) |
| && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0))); |
| default: |
| return false; |
| } |
| } |
| |
| /* Return 1 if EXPR is the integer constant one or the corresponding |
| complex constant, or a location wrapper for such a constant. */ |
| |
| bool |
| integer_onep (const_tree expr) |
| { |
| STRIP_ANY_LOCATION_WRAPPER (expr); |
| |
| switch (TREE_CODE (expr)) |
| { |
| case INTEGER_CST: |
| return wi::eq_p (wi::to_widest (expr), 1); |
| case COMPLEX_CST: |
| return (integer_onep (TREE_REALPART (expr)) |
| && integer_zerop (TREE_IMAGPART (expr))); |
| case VECTOR_CST: |
| return (VECTOR_CST_NPATTERNS (expr) == 1 |
| && VECTOR_CST_DUPLICATE_P (expr) |
| && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0))); |
| default: |
| return false; |
| } |
| } |
| |
| /* Return 1 if EXPR is the integer constant one. For complex and vector, |
| return 1 if every piece is the integer constant one. |
| Also return 1 for location wrappers for such a constant. */ |
| |
| bool |
| integer_each_onep (const_tree expr) |
| { |
| STRIP_ANY_LOCATION_WRAPPER (expr); |
| |
| if (TREE_CODE (expr) == COMPLEX_CST) |
| return (integer_onep (TREE_REALPART (expr)) |
| && integer_onep (TREE_IMAGPART (expr))); |
| else |
| return integer_onep (expr); |
| } |
| |
| /* Return 1 if EXPR is an integer containing all 1's in as much precision as |
| it contains, or a complex or vector whose subparts are such integers, |
| or a location wrapper for such a constant. */ |
| |
| bool |
| integer_all_onesp (const_tree expr) |
| { |
| STRIP_ANY_LOCATION_WRAPPER (expr); |
| |
| if (TREE_CODE (expr) == COMPLEX_CST |
| && integer_all_onesp (TREE_REALPART (expr)) |
| && integer_all_onesp (TREE_IMAGPART (expr))) |
| return true; |
| |
| else if (TREE_CODE (expr) == VECTOR_CST) |
| return (VECTOR_CST_NPATTERNS (expr) == 1 |
| && VECTOR_CST_DUPLICATE_P (expr) |
| && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0))); |
| |
| else if (TREE_CODE (expr) != INTEGER_CST) |
| return false; |
| |
| return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) |
| == wi::to_wide (expr)); |
| } |
| |
| /* Return 1 if EXPR is the integer constant minus one, or a location wrapper |
| for such a constant. */ |
| |
| bool |
| integer_minus_onep (const_tree expr) |
| { |
| STRIP_ANY_LOCATION_WRAPPER (expr); |
| |
| if (TREE_CODE (expr) == COMPLEX_CST) |
| return (integer_all_onesp (TREE_REALPART (expr)) |
| && integer_zerop (TREE_IMAGPART (expr))); |
| else |
| return integer_all_onesp (expr); |
| } |
| |
| /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only |
| one bit on), or a location wrapper for such a constant. */ |
| |
| bool |
| integer_pow2p (const_tree expr) |
| { |
| STRIP_ANY_LOCATION_WRAPPER (expr); |
| |
| if (TREE_CODE (expr) == COMPLEX_CST |
| && integer_pow2p (TREE_REALPART (expr)) |
| && integer_zerop (TREE_IMAGPART (expr))) |
| return true; |
| |
| if (TREE_CODE (expr) != INTEGER_CST) |
| return false; |
| |
| return wi::popcount (wi::to_wide (expr)) == 1; |
| } |
| |
| /* Return 1 if EXPR is an integer constant other than zero or a |
| complex constant other than zero, or a location wrapper for such a |
| constant. */ |
| |
| bool |
| integer_nonzerop (const_tree expr) |
| { |
| STRIP_ANY_LOCATION_WRAPPER (expr); |
| |
| return ((TREE_CODE (expr) == INTEGER_CST |
| && wi::to_wide (expr) != 0) |
| || (TREE_CODE (expr) == COMPLEX_CST |
| && (integer_nonzerop (TREE_REALPART (expr)) |
| || integer_nonzerop (TREE_IMAGPART (expr))))); |
| } |
| |
| /* Return 1 if EXPR is the integer constant one. For vector, |
| return 1 if every piece is the integer constant minus one |
| (representing the value TRUE). |
| Also return 1 for location wrappers for such a constant. */ |
| |
| bool |
| integer_truep (const_tree expr) |
| { |
| STRIP_ANY_LOCATION_WRAPPER (expr); |
| |
| if (TREE_CODE (expr) == VECTOR_CST) |
| return integer_all_onesp (expr); |
| return integer_onep (expr); |
| } |
| |
| /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper |
| for such a constant. */ |
| |
| bool |
| fixed_zerop (const_tree expr) |
| { |
| STRIP_ANY_LOCATION_WRAPPER (expr); |
| |
| return (TREE_CODE (expr) == FIXED_CST |
| && TREE_FIXED_CST (expr).data.is_zero ()); |
| } |
| |
| /* Return the power of two represented by a tree node known to be a |
| power of two. */ |
| |
| int |
| tree_log2 (const_tree expr) |
| { |
| if (TREE_CODE (expr) == COMPLEX_CST) |
| return tree_log2 (TREE_REALPART (expr)); |
| |
| return wi::exact_log2 (wi::to_wide (expr)); |
| } |
| |
| /* Similar, but return the largest integer Y such that 2 ** Y is less |
| than or equal to EXPR. */ |
| |
| int |
| tree_floor_log2 (const_tree expr) |
| { |
| if (TREE_CODE (expr) == COMPLEX_CST) |
| return tree_log2 (TREE_REALPART (expr)); |
| |
| return wi::floor_log2 (wi::to_wide (expr)); |
| } |
| |
| /* Return number of known trailing zero bits in EXPR, or, if the value of |
| EXPR is known to be zero, the precision of it's type. */ |
| |
| unsigned int |
| tree_ctz (const_tree expr) |
| { |
| if (!INTEGRAL_TYPE_P (TREE_TYPE (expr)) |
| && !POINTER_TYPE_P (TREE_TYPE (expr))) |
| return 0; |
| |
| unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr)); |
| switch (TREE_CODE (expr)) |
| { |
| case INTEGER_CST: |
| ret1 = wi::ctz (wi::to_wide (expr)); |
| return MIN (ret1, prec); |
| case SSA_NAME: |
| ret1 = wi::ctz (get_nonzero_bits (expr)); |
| return MIN (ret1, prec); |
| case PLUS_EXPR: |
| case MINUS_EXPR: |
| case BIT_IOR_EXPR: |
| case BIT_XOR_EXPR: |
| case MIN_EXPR: |
| case MAX_EXPR: |
| ret1 = tree_ctz (TREE_OPERAND (expr, 0)); |
| if (ret1 == 0) |
| return ret1; |
| ret2 = tree_ctz (TREE_OPERAND (expr, 1)); |
| return MIN (ret1, ret2); |
| case POINTER_PLUS_EXPR: |
| ret1 = tree_ctz (TREE_OPERAND (expr, 0)); |
| ret2 = tree_ctz (TREE_OPERAND (expr, 1)); |
| /* Second operand is sizetype, which could be in theory |
| wider than pointer's precision. Make sure we never |
| return more than prec. */ |
| ret2 = MIN (ret2, prec); |
| return MIN (ret1, ret2); |
| case BIT_AND_EXPR: |
| ret1 = tree_ctz (TREE_OPERAND (expr, 0)); |
| ret2 = tree_ctz (TREE_OPERAND (expr, 1)); |
| return MAX (ret1, ret2); |
| case MULT_EXPR: |
| ret1 = tree_ctz (TREE_OPERAND (expr, 0)); |
| ret2 = tree_ctz (TREE_OPERAND (expr, 1)); |
| return MIN (ret1 + ret2, prec); |
| case LSHIFT_EXPR: |
| ret1 = tree_ctz (TREE_OPERAND (expr, 0)); |
| if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1)) |
| && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec)) |
| { |
| ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1)); |
| return MIN (ret1 + ret2, prec); |
| } |
| return ret1; |
| case RSHIFT_EXPR: |
| if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1)) |
| && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec)) |
| { |
| ret1 = tree_ctz (TREE_OPERAND (expr, 0)); |
| ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1)); |
| if (ret1 > ret2) |
| return ret1 - ret2; |
| } |
| return 0; |
| case TRUNC_DIV_EXPR: |
| case CEIL_DIV_EXPR: |
| case FLOOR_DIV_EXPR: |
| case ROUND_DIV_EXPR: |
| case EXACT_DIV_EXPR: |
| if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST |
| && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1) |
| { |
| int l = tree_log2 (TREE_OPERAND (expr, 1)); |
| if (l >= 0) |
| { |
| ret1 = tree_ctz (TREE_OPERAND (expr, 0)); |
| ret2 = l; |
| if (ret1 > ret2) |
| return ret1 - ret2; |
| } |
| } |
| return 0; |
| CASE_CONVERT: |
| ret1 = tree_ctz (TREE_OPERAND (expr, 0)); |
| if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0)))) |
| ret1 = prec; |
| return MIN (ret1, prec); |
| case SAVE_EXPR: |
| return tree_ctz (TREE_OPERAND (expr, 0)); |
| case COND_EXPR: |
| ret1 = tree_ctz (TREE_OPERAND (expr, 1)); |
| if (ret1 == 0) |
| return 0; |
| ret2 = tree_ctz (TREE_OPERAND (expr, 2)); |
| return MIN (ret1, ret2); |
| case COMPOUND_EXPR: |
| return tree_ctz (TREE_OPERAND (expr, 1)); |
| case ADDR_EXPR: |
| ret1 = get_pointer_alignment (CONST_CAST_TREE (expr)); |
| if (ret1 > BITS_PER_UNIT) |
| { |
| ret1 = ctz_hwi (ret1 / BITS_PER_UNIT); |
| return MIN (ret1, prec); |
| } |
| return 0; |
| default: |
| return 0; |
| } |
| } |
| |
| /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for |
| decimal float constants, so don't return 1 for them. |
| Also return 1 for location wrappers around such a constant. */ |
| |
| bool |
| real_zerop (const_tree expr) |
| { |
| STRIP_ANY_LOCATION_WRAPPER (expr); |
| |
| switch (TREE_CODE (expr)) |
| { |
| case REAL_CST: |
| return real_equal (&TREE_REAL_CST (expr), &dconst0) |
| && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr)))); |
| case COMPLEX_CST: |
| return real_zerop (TREE_REALPART (expr)) |
| && real_zerop (TREE_IMAGPART (expr)); |
| case VECTOR_CST: |
| { |
| /* Don't simply check for a duplicate because the predicate |
| accepts both +0.0 and -0.0. */ |
| unsigned count = vector_cst_encoded_nelts (expr); |
| for (unsigned int i = 0; i < count; ++i) |
| if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i))) |
| return false; |
| return true; |
| } |
| default: |
| return false; |
| } |
| } |
| |
| /* Return 1 if EXPR is the real constant one in real or complex form. |
| Trailing zeroes matter for decimal float constants, so don't return |
| 1 for them. |
| Also return 1 for location wrappers around such a constant. */ |
| |
| bool |
| real_onep (const_tree expr) |
| { |
| STRIP_ANY_LOCATION_WRAPPER (expr); |
| |
| switch (TREE_CODE (expr)) |
| { |
| case REAL_CST: |
| return real_equal (&TREE_REAL_CST (expr), &dconst1) |
| && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr)))); |
| case COMPLEX_CST: |
| return real_onep (TREE_REALPART (expr)) |
| && real_zerop (TREE_IMAGPART (expr)); |
| case VECTOR_CST: |
| return (VECTOR_CST_NPATTERNS (expr) == 1 |
| && VECTOR_CST_DUPLICATE_P (expr) |
| && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0))); |
| default: |
| return false; |
| } |
| } |
| |
| /* Return 1 if EXPR is the real constant minus one. Trailing zeroes |
| matter for decimal float constants, so don't return 1 for them. |
| Also return 1 for location wrappers around such a constant. */ |
| |
| bool |
| real_minus_onep (const_tree expr) |
| { |
| STRIP_ANY_LOCATION_WRAPPER (expr); |
| |
| switch (TREE_CODE (expr)) |
| { |
| case REAL_CST: |
| return real_equal (&TREE_REAL_CST (expr), &dconstm1) |
| && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr)))); |
| case COMPLEX_CST: |
| return real_minus_onep (TREE_REALPART (expr)) |
| && real_zerop (TREE_IMAGPART (expr)); |
| case VECTOR_CST: |
| return (VECTOR_CST_NPATTERNS (expr) == 1 |
| && VECTOR_CST_DUPLICATE_P (expr) |
| && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0))); |
| default: |
| return false; |
| } |
| } |
| |
| /* Nonzero if EXP is a constant or a cast of a constant. */ |
| |
| bool |
| really_constant_p (const_tree exp) |
| { |
| /* This is not quite the same as STRIP_NOPS. It does more. */ |
| while (CONVERT_EXPR_P (exp) |
| || TREE_CODE (exp) == NON_LVALUE_EXPR) |
| exp = TREE_OPERAND (exp, 0); |
| return TREE_CONSTANT (exp); |
| } |
| |
| /* Return true if T holds a polynomial pointer difference, storing it in |
| *VALUE if so. A true return means that T's precision is no greater |
| than 64 bits, which is the largest address space we support, so *VALUE |
| never loses precision. However, the signedness of the result does |
| not necessarily match the signedness of T: sometimes an unsigned type |
| like sizetype is used to encode a value that is actually negative. */ |
| |
| bool |
| ptrdiff_tree_p (const_tree t, poly_int64_pod *value) |
| { |
| if (!t) |
| return false; |
| if (TREE_CODE (t) == INTEGER_CST) |
| { |
| if (!cst_and_fits_in_hwi (t)) |
| return false; |
| *value = int_cst_value (t); |
| return true; |
| } |
| if (POLY_INT_CST_P (t)) |
| { |
| for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) |
| if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i))) |
| return false; |
| for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) |
| value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i)); |
| return true; |
| } |
| return false; |
| } |
| |
| poly_int64 |
| tree_to_poly_int64 (const_tree t) |
| { |
| gcc_assert (tree_fits_poly_int64_p (t)); |
| if (POLY_INT_CST_P (t)) |
| return poly_int_cst_value (t).force_shwi (); |
| return TREE_INT_CST_LOW (t); |
| } |
| |
|