| /* Register Transfer Language (RTL) definitions for GCC |
| Copyright (C) 1987-2022 Free Software Foundation, Inc. |
| |
| This file is part of GCC. |
| |
| GCC is free software; you can redistribute it and/or modify it under |
| the terms of the GNU General Public License as published by the Free |
| Software Foundation; either version 3, or (at your option) any later |
| version. |
| |
| GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
| WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| for more details. |
| |
| You should have received a copy of the GNU General Public License |
| along with GCC; see the file COPYING3. If not see |
| <http://www.gnu.org/licenses/>. */ |
| |
| #ifndef GCC_RTL_H |
| #define GCC_RTL_H |
| |
| /* This file is occasionally included by generator files which expect |
| machmode.h and other files to exist and would not normally have been |
| included by coretypes.h. */ |
| #ifdef GENERATOR_FILE |
| #include "real.h" |
| #include "fixed-value.h" |
| #include "statistics.h" |
| #include "vec.h" |
| #include "hash-table.h" |
| #include "hash-set.h" |
| #include "input.h" |
| #include "is-a.h" |
| #endif /* GENERATOR_FILE */ |
| |
| #include "hard-reg-set.h" |
| |
| class predefined_function_abi; |
| |
| /* Value used by some passes to "recognize" noop moves as valid |
| instructions. */ |
| #define NOOP_MOVE_INSN_CODE INT_MAX |
| |
| /* Register Transfer Language EXPRESSIONS CODES */ |
| |
| #define RTX_CODE enum rtx_code |
| enum rtx_code { |
| |
| #define DEF_RTL_EXPR(ENUM, NAME, FORMAT, CLASS) ENUM , |
| #include "rtl.def" /* rtl expressions are documented here */ |
| #undef DEF_RTL_EXPR |
| |
| LAST_AND_UNUSED_RTX_CODE}; /* A convenient way to get a value for |
| NUM_RTX_CODE. |
| Assumes default enum value assignment. */ |
| |
| /* The cast here, saves many elsewhere. */ |
| #define NUM_RTX_CODE ((int) LAST_AND_UNUSED_RTX_CODE) |
| |
| /* Similar, but since generator files get more entries... */ |
| #ifdef GENERATOR_FILE |
| # define NON_GENERATOR_NUM_RTX_CODE ((int) MATCH_OPERAND) |
| #endif |
| |
| /* Register Transfer Language EXPRESSIONS CODE CLASSES */ |
| |
| enum rtx_class { |
| /* We check bit 0-1 of some rtx class codes in the predicates below. */ |
| |
| /* Bit 0 = comparison if 0, arithmetic is 1 |
| Bit 1 = 1 if commutative. */ |
| RTX_COMPARE, /* 0 */ |
| RTX_COMM_COMPARE, |
| RTX_BIN_ARITH, |
| RTX_COMM_ARITH, |
| |
| /* Must follow the four preceding values. */ |
| RTX_UNARY, /* 4 */ |
| |
| RTX_EXTRA, |
| RTX_MATCH, |
| RTX_INSN, |
| |
| /* Bit 0 = 1 if constant. */ |
| RTX_OBJ, /* 8 */ |
| RTX_CONST_OBJ, |
| |
| RTX_TERNARY, |
| RTX_BITFIELD_OPS, |
| RTX_AUTOINC |
| }; |
| |
| #define RTX_OBJ_MASK (~1) |
| #define RTX_OBJ_RESULT (RTX_OBJ & RTX_OBJ_MASK) |
| #define RTX_COMPARE_MASK (~1) |
| #define RTX_COMPARE_RESULT (RTX_COMPARE & RTX_COMPARE_MASK) |
| #define RTX_ARITHMETIC_MASK (~1) |
| #define RTX_ARITHMETIC_RESULT (RTX_COMM_ARITH & RTX_ARITHMETIC_MASK) |
| #define RTX_BINARY_MASK (~3) |
| #define RTX_BINARY_RESULT (RTX_COMPARE & RTX_BINARY_MASK) |
| #define RTX_COMMUTATIVE_MASK (~2) |
| #define RTX_COMMUTATIVE_RESULT (RTX_COMM_COMPARE & RTX_COMMUTATIVE_MASK) |
| #define RTX_NON_COMMUTATIVE_RESULT (RTX_COMPARE & RTX_COMMUTATIVE_MASK) |
| |
| extern const unsigned char rtx_length[NUM_RTX_CODE]; |
| #define GET_RTX_LENGTH(CODE) (rtx_length[(int) (CODE)]) |
| |
| extern const char * const rtx_name[NUM_RTX_CODE]; |
| #define GET_RTX_NAME(CODE) (rtx_name[(int) (CODE)]) |
| |
| extern const char * const rtx_format[NUM_RTX_CODE]; |
| #define GET_RTX_FORMAT(CODE) (rtx_format[(int) (CODE)]) |
| |
| extern const enum rtx_class rtx_class[NUM_RTX_CODE]; |
| #define GET_RTX_CLASS(CODE) (rtx_class[(int) (CODE)]) |
| |
| /* True if CODE is part of the insn chain (i.e. has INSN_UID, PREV_INSN |
| and NEXT_INSN fields). */ |
| #define INSN_CHAIN_CODE_P(CODE) IN_RANGE (CODE, DEBUG_INSN, NOTE) |
| |
| extern const unsigned char rtx_code_size[NUM_RTX_CODE]; |
| extern const unsigned char rtx_next[NUM_RTX_CODE]; |
| |
| /* The flags and bitfields of an ADDR_DIFF_VEC. BASE is the base label |
| relative to which the offsets are calculated, as explained in rtl.def. */ |
| struct addr_diff_vec_flags |
| { |
| /* Set at the start of shorten_branches - ONLY WHEN OPTIMIZING - : */ |
| unsigned min_align: 8; |
| /* Flags: */ |
| unsigned base_after_vec: 1; /* BASE is after the ADDR_DIFF_VEC. */ |
| unsigned min_after_vec: 1; /* minimum address target label is |
| after the ADDR_DIFF_VEC. */ |
| unsigned max_after_vec: 1; /* maximum address target label is |
| after the ADDR_DIFF_VEC. */ |
| unsigned min_after_base: 1; /* minimum address target label is |
| after BASE. */ |
| unsigned max_after_base: 1; /* maximum address target label is |
| after BASE. */ |
| /* Set by the actual branch shortening process - ONLY WHEN OPTIMIZING - : */ |
| unsigned offset_unsigned: 1; /* offsets have to be treated as unsigned. */ |
| unsigned : 2; |
| unsigned scale : 8; |
| }; |
| |
| /* Structure used to describe the attributes of a MEM. These are hashed |
| so MEMs that the same attributes share a data structure. This means |
| they cannot be modified in place. */ |
| class GTY(()) mem_attrs |
| { |
| public: |
| mem_attrs (); |
| |
| /* The expression that the MEM accesses, or null if not known. |
| This expression might be larger than the memory reference itself. |
| (In other words, the MEM might access only part of the object.) */ |
| tree expr; |
| |
| /* The offset of the memory reference from the start of EXPR. |
| Only valid if OFFSET_KNOWN_P. */ |
| poly_int64 offset; |
| |
| /* The size of the memory reference in bytes. Only valid if |
| SIZE_KNOWN_P. */ |
| poly_int64 size; |
| |
| /* The alias set of the memory reference. */ |
| alias_set_type alias; |
| |
| /* The alignment of the reference in bits. Always a multiple of |
| BITS_PER_UNIT. Note that EXPR may have a stricter alignment |
| than the memory reference itself. */ |
| unsigned int align; |
| |
| /* The address space that the memory reference uses. */ |
| unsigned char addrspace; |
| |
| /* True if OFFSET is known. */ |
| bool offset_known_p; |
| |
| /* True if SIZE is known. */ |
| bool size_known_p; |
| }; |
| |
| /* Structure used to describe the attributes of a REG in similar way as |
| mem_attrs does for MEM above. Note that the OFFSET field is calculated |
| in the same way as for mem_attrs, rather than in the same way as a |
| SUBREG_BYTE. For example, if a big-endian target stores a byte |
| object in the low part of a 4-byte register, the OFFSET field |
| will be -3 rather than 0. */ |
| |
| class GTY((for_user)) reg_attrs { |
| public: |
| tree decl; /* decl corresponding to REG. */ |
| poly_int64 offset; /* Offset from start of DECL. */ |
| }; |
| |
| /* Common union for an element of an rtx. */ |
| |
| union rtunion |
| { |
| int rt_int; |
| unsigned int rt_uint; |
| poly_uint16_pod rt_subreg; |
| const char *rt_str; |
| rtx rt_rtx; |
| rtvec rt_rtvec; |
| machine_mode rt_type; |
| addr_diff_vec_flags rt_addr_diff_vec_flags; |
| struct cselib_val *rt_cselib; |
| tree rt_tree; |
| basic_block rt_bb; |
| mem_attrs *rt_mem; |
| class constant_descriptor_rtx *rt_constant; |
| struct dw_cfi_node *rt_cfi; |
| }; |
| |
| /* Describes the properties of a REG. */ |
| struct GTY(()) reg_info { |
| /* The value of REGNO. */ |
| unsigned int regno; |
| |
| /* The value of REG_NREGS. */ |
| unsigned int nregs : 8; |
| unsigned int unused : 24; |
| |
| /* The value of REG_ATTRS. */ |
| reg_attrs *attrs; |
| }; |
| |
| /* This structure remembers the position of a SYMBOL_REF within an |
| object_block structure. A SYMBOL_REF only provides this information |
| if SYMBOL_REF_HAS_BLOCK_INFO_P is true. */ |
| struct GTY(()) block_symbol { |
| /* The usual SYMBOL_REF fields. */ |
| rtunion GTY ((skip)) fld[2]; |
| |
| /* The block that contains this object. */ |
| struct object_block *block; |
| |
| /* The offset of this object from the start of its block. It is negative |
| if the symbol has not yet been assigned an offset. */ |
| HOST_WIDE_INT offset; |
| }; |
| |
| /* Describes a group of objects that are to be placed together in such |
| a way that their relative positions are known. */ |
| struct GTY((for_user)) object_block { |
| /* The section in which these objects should be placed. */ |
| section *sect; |
| |
| /* The alignment of the first object, measured in bits. */ |
| unsigned int alignment; |
| |
| /* The total size of the objects, measured in bytes. */ |
| HOST_WIDE_INT size; |
| |
| /* The SYMBOL_REFs for each object. The vector is sorted in |
| order of increasing offset and the following conditions will |
| hold for each element X: |
| |
| SYMBOL_REF_HAS_BLOCK_INFO_P (X) |
| !SYMBOL_REF_ANCHOR_P (X) |
| SYMBOL_REF_BLOCK (X) == [address of this structure] |
| SYMBOL_REF_BLOCK_OFFSET (X) >= 0. */ |
| vec<rtx, va_gc> *objects; |
| |
| /* All the anchor SYMBOL_REFs used to address these objects, sorted |
| in order of increasing offset, and then increasing TLS model. |
| The following conditions will hold for each element X in this vector: |
| |
| SYMBOL_REF_HAS_BLOCK_INFO_P (X) |
| SYMBOL_REF_ANCHOR_P (X) |
| SYMBOL_REF_BLOCK (X) == [address of this structure] |
| SYMBOL_REF_BLOCK_OFFSET (X) >= 0. */ |
| vec<rtx, va_gc> *anchors; |
| }; |
| |
| struct GTY((variable_size)) hwivec_def { |
| HOST_WIDE_INT elem[1]; |
| }; |
| |
| /* Number of elements of the HWIVEC if RTX is a CONST_WIDE_INT. */ |
| #define CWI_GET_NUM_ELEM(RTX) \ |
| ((int)RTL_FLAG_CHECK1("CWI_GET_NUM_ELEM", (RTX), CONST_WIDE_INT)->u2.num_elem) |
| #define CWI_PUT_NUM_ELEM(RTX, NUM) \ |
| (RTL_FLAG_CHECK1("CWI_PUT_NUM_ELEM", (RTX), CONST_WIDE_INT)->u2.num_elem = (NUM)) |
| |
| struct GTY((variable_size)) const_poly_int_def { |
| trailing_wide_ints<NUM_POLY_INT_COEFFS> coeffs; |
| }; |
| |
| /* RTL expression ("rtx"). */ |
| |
| /* The GTY "desc" and "tag" options below are a kludge: we need a desc |
| field for gengtype to recognize that inheritance is occurring, |
| so that all subclasses are redirected to the traversal hook for the |
| base class. |
| However, all of the fields are in the base class, and special-casing |
| is at work. Hence we use desc and tag of 0, generating a switch |
| statement of the form: |
| switch (0) |
| { |
| case 0: // all the work happens here |
| } |
| in order to work with the existing special-casing in gengtype. */ |
| |
| struct GTY((desc("0"), tag("0"), |
| chain_next ("RTX_NEXT (&%h)"), |
| chain_prev ("RTX_PREV (&%h)"))) rtx_def { |
| /* The kind of expression this is. */ |
| ENUM_BITFIELD(rtx_code) code: 16; |
| |
| /* The kind of value the expression has. */ |
| ENUM_BITFIELD(machine_mode) mode : 8; |
| |
| /* 1 in a MEM if we should keep the alias set for this mem unchanged |
| when we access a component. |
| 1 in a JUMP_INSN if it is a crossing jump. |
| 1 in a CALL_INSN if it is a sibling call. |
| 1 in a SET that is for a return. |
| In a CODE_LABEL, part of the two-bit alternate entry field. |
| 1 in a CONCAT is VAL_EXPR_IS_COPIED in var-tracking.cc. |
| 1 in a VALUE is SP_BASED_VALUE_P in cselib.cc. |
| 1 in a SUBREG generated by LRA for reload insns. |
| 1 in a REG if this is a static chain register. |
| Dumped as "/j" in RTL dumps. */ |
| unsigned int jump : 1; |
| /* In a CODE_LABEL, part of the two-bit alternate entry field. |
| 1 in a MEM if it cannot trap. |
| 1 in a CALL_INSN logically equivalent to |
| ECF_LOOPING_CONST_OR_PURE and DECL_LOOPING_CONST_OR_PURE_P. |
| 1 in a VALUE is SP_DERIVED_VALUE_P in cselib.cc. |
| Dumped as "/c" in RTL dumps. */ |
| unsigned int call : 1; |
| /* 1 in a REG, MEM, or CONCAT if the value is set at most once, anywhere. |
| 1 in a SUBREG used for SUBREG_PROMOTED_UNSIGNED_P. |
| 1 in a SYMBOL_REF if it addresses something in the per-function |
| constants pool. |
| 1 in a CALL_INSN logically equivalent to ECF_CONST and TREE_READONLY. |
| 1 in a NOTE, or EXPR_LIST for a const call. |
| 1 in a JUMP_INSN of an annulling branch. |
| 1 in a CONCAT is VAL_EXPR_IS_CLOBBERED in var-tracking.cc. |
| 1 in a preserved VALUE is PRESERVED_VALUE_P in cselib.cc. |
| 1 in a clobber temporarily created for LRA. |
| Dumped as "/u" in RTL dumps. */ |
| unsigned int unchanging : 1; |
| /* 1 in a MEM or ASM_OPERANDS expression if the memory reference is volatile. |
| 1 in an INSN, CALL_INSN, JUMP_INSN, CODE_LABEL, BARRIER, or NOTE |
| if it has been deleted. |
| 1 in a REG expression if corresponds to a variable declared by the user, |
| 0 for an internally generated temporary. |
| 1 in a SUBREG used for SUBREG_PROMOTED_UNSIGNED_P. |
| 1 in a LABEL_REF, REG_LABEL_TARGET or REG_LABEL_OPERAND note for a |
| non-local label. |
| In a SYMBOL_REF, this flag is used for machine-specific purposes. |
| In a PREFETCH, this flag indicates that it should be considered a |
| scheduling barrier. |
| 1 in a CONCAT is VAL_NEEDS_RESOLUTION in var-tracking.cc. |
| Dumped as "/v" in RTL dumps. */ |
| unsigned int volatil : 1; |
| /* 1 in a REG if the register is used only in exit code a loop. |
| 1 in a SUBREG expression if was generated from a variable with a |
| promoted mode. |
| 1 in a CODE_LABEL if the label is used for nonlocal gotos |
| and must not be deleted even if its count is zero. |
| 1 in an INSN, JUMP_INSN or CALL_INSN if this insn must be scheduled |
| together with the preceding insn. Valid only within sched. |
| 1 in an INSN, JUMP_INSN, or CALL_INSN if insn is in a delay slot and |
| from the target of a branch. Valid from reorg until end of compilation; |
| cleared before used. |
| |
| The name of the field is historical. It used to be used in MEMs |
| to record whether the MEM accessed part of a structure. |
| Dumped as "/s" in RTL dumps. */ |
| unsigned int in_struct : 1; |
| /* At the end of RTL generation, 1 if this rtx is used. This is used for |
| copying shared structure. See `unshare_all_rtl'. |
| In a REG, this is not needed for that purpose, and used instead |
| in `leaf_renumber_regs_insn'. |
| 1 in a SYMBOL_REF, means that emit_library_call |
| has used it as the function. |
| 1 in a CONCAT is VAL_HOLDS_TRACK_EXPR in var-tracking.cc. |
| 1 in a VALUE or DEBUG_EXPR is VALUE_RECURSED_INTO in var-tracking.cc. */ |
| unsigned int used : 1; |
| /* 1 in an INSN or a SET if this rtx is related to the call frame, |
| either changing how we compute the frame address or saving and |
| restoring registers in the prologue and epilogue. |
| 1 in a REG or MEM if it is a pointer. |
| 1 in a SYMBOL_REF if it addresses something in the per-function |
| constant string pool. |
| 1 in a VALUE is VALUE_CHANGED in var-tracking.cc. |
| Dumped as "/f" in RTL dumps. */ |
| unsigned frame_related : 1; |
| /* 1 in a REG or PARALLEL that is the current function's return value. |
| 1 in a SYMBOL_REF for a weak symbol. |
| 1 in a CALL_INSN logically equivalent to ECF_PURE and DECL_PURE_P. |
| 1 in a CONCAT is VAL_EXPR_HAS_REVERSE in var-tracking.cc. |
| 1 in a VALUE or DEBUG_EXPR is NO_LOC_P in var-tracking.cc. |
| Dumped as "/i" in RTL dumps. */ |
| unsigned return_val : 1; |
| |
| union { |
| /* The final union field is aligned to 64 bits on LP64 hosts, |
| giving a 32-bit gap after the fields above. We optimize the |
| layout for that case and use the gap for extra code-specific |
| information. */ |
| |
| /* The ORIGINAL_REGNO of a REG. */ |
| unsigned int original_regno; |
| |
| /* The INSN_UID of an RTX_INSN-class code. */ |
| int insn_uid; |
| |
| /* The SYMBOL_REF_FLAGS of a SYMBOL_REF. */ |
| unsigned int symbol_ref_flags; |
| |
| /* The PAT_VAR_LOCATION_STATUS of a VAR_LOCATION. */ |
| enum var_init_status var_location_status; |
| |
| /* In a CONST_WIDE_INT (aka hwivec_def), this is the number of |
| HOST_WIDE_INTs in the hwivec_def. */ |
| unsigned int num_elem; |
| |
| /* Information about a CONST_VECTOR. */ |
| struct |
| { |
| /* The value of CONST_VECTOR_NPATTERNS. */ |
| unsigned int npatterns : 16; |
| |
| /* The value of CONST_VECTOR_NELTS_PER_PATTERN. */ |
| unsigned int nelts_per_pattern : 8; |
| |
| /* For future expansion. */ |
| unsigned int unused : 8; |
| } const_vector; |
| } GTY ((skip)) u2; |
| |
| /* The first element of the operands of this rtx. |
| The number of operands and their types are controlled |
| by the `code' field, according to rtl.def. */ |
| union u { |
| rtunion fld[1]; |
| HOST_WIDE_INT hwint[1]; |
| struct reg_info reg; |
| struct block_symbol block_sym; |
| struct real_value rv; |
| struct fixed_value fv; |
| struct hwivec_def hwiv; |
| struct const_poly_int_def cpi; |
| } GTY ((special ("rtx_def"), desc ("GET_CODE (&%0)"))) u; |
| }; |
| |
| /* A node for constructing singly-linked lists of rtx. */ |
| |
| struct GTY(()) rtx_expr_list : public rtx_def |
| { |
| private: |
| /* No extra fields, but adds invariant: (GET_CODE (X) == EXPR_LIST). */ |
| |
| public: |
| /* Get next in list. */ |
| rtx_expr_list *next () const; |
| |
| /* Get at the underlying rtx. */ |
| rtx element () const; |
| }; |
| |
| template <> |
| template <> |
| inline bool |
| is_a_helper <rtx_expr_list *>::test (rtx rt) |
| { |
| return rt->code == EXPR_LIST; |
| } |
| |
| struct GTY(()) rtx_insn_list : public rtx_def |
| { |
| private: |
| /* No extra fields, but adds invariant: (GET_CODE (X) == INSN_LIST). |
| |
| This is an instance of: |
| |
| DEF_RTL_EXPR(INSN_LIST, "insn_list", "ue", RTX_EXTRA) |
| |
| i.e. a node for constructing singly-linked lists of rtx_insn *, where |
| the list is "external" to the insn (as opposed to the doubly-linked |
| list embedded within rtx_insn itself). */ |
| |
| public: |
| /* Get next in list. */ |
| rtx_insn_list *next () const; |
| |
| /* Get at the underlying instruction. */ |
| rtx_insn *insn () const; |
| |
| }; |
| |
| template <> |
| template <> |
| inline bool |
| is_a_helper <rtx_insn_list *>::test (rtx rt) |
| { |
| return rt->code == INSN_LIST; |
| } |
| |
| /* A node with invariant GET_CODE (X) == SEQUENCE i.e. a vector of rtx, |
| typically (but not always) of rtx_insn *, used in the late passes. */ |
| |
| struct GTY(()) rtx_sequence : public rtx_def |
| { |
| private: |
| /* No extra fields, but adds invariant: (GET_CODE (X) == SEQUENCE). */ |
| |
| public: |
| /* Get number of elements in sequence. */ |
| int len () const; |
| |
| /* Get i-th element of the sequence. */ |
| rtx element (int index) const; |
| |
| /* Get i-th element of the sequence, with a checked cast to |
| rtx_insn *. */ |
| rtx_insn *insn (int index) const; |
| }; |
| |
| template <> |
| template <> |
| inline bool |
| is_a_helper <rtx_sequence *>::test (rtx rt) |
| { |
| return rt->code == SEQUENCE; |
| } |
| |
| template <> |
| template <> |
| inline bool |
| is_a_helper <const rtx_sequence *>::test (const_rtx rt) |
| { |
| return rt->code == SEQUENCE; |
| } |
| |
| struct GTY(()) rtx_insn : public rtx_def |
| { |
| public: |
| /* No extra fields, but adds the invariant: |
| |
| (INSN_P (X) |
| || NOTE_P (X) |
| || JUMP_TABLE_DATA_P (X) |
| || BARRIER_P (X) |
| || LABEL_P (X)) |
| |
| i.e. that we must be able to use the following: |
| INSN_UID () |
| NEXT_INSN () |
| PREV_INSN () |
| i.e. we have an rtx that has an INSN_UID field and can be part of |
| a linked list of insns. |
| */ |
| |
| /* Returns true if this insn has been deleted. */ |
| |
| bool deleted () const { return volatil; } |
| |
| /* Mark this insn as deleted. */ |
| |
| void set_deleted () { volatil = true; } |
| |
| /* Mark this insn as not deleted. */ |
| |
| void set_undeleted () { volatil = false; } |
| }; |
| |
| /* Subclasses of rtx_insn. */ |
| |
| struct GTY(()) rtx_debug_insn : public rtx_insn |
| { |
| /* No extra fields, but adds the invariant: |
| DEBUG_INSN_P (X) aka (GET_CODE (X) == DEBUG_INSN) |
| i.e. an annotation for tracking variable assignments. |
| |
| This is an instance of: |
| DEF_RTL_EXPR(DEBUG_INSN, "debug_insn", "uuBeiie", RTX_INSN) |
| from rtl.def. */ |
| }; |
| |
| struct GTY(()) rtx_nonjump_insn : public rtx_insn |
| { |
| /* No extra fields, but adds the invariant: |
| NONJUMP_INSN_P (X) aka (GET_CODE (X) == INSN) |
| i.e an instruction that cannot jump. |
| |
| This is an instance of: |
| DEF_RTL_EXPR(INSN, "insn", "uuBeiie", RTX_INSN) |
| from rtl.def. */ |
| }; |
| |
| struct GTY(()) rtx_jump_insn : public rtx_insn |
| { |
| public: |
| /* No extra fields, but adds the invariant: |
| JUMP_P (X) aka (GET_CODE (X) == JUMP_INSN) |
| i.e. an instruction that can possibly jump. |
| |
| This is an instance of: |
| DEF_RTL_EXPR(JUMP_INSN, "jump_insn", "uuBeiie0", RTX_INSN) |
| from rtl.def. */ |
| |
| /* Returns jump target of this instruction. The returned value is not |
| necessarily a code label: it may also be a RETURN or SIMPLE_RETURN |
| expression. Also, when the code label is marked "deleted", it is |
| replaced by a NOTE. In some cases the value is NULL_RTX. */ |
| |
| inline rtx jump_label () const; |
| |
| /* Returns jump target cast to rtx_code_label *. */ |
| |
| inline rtx_code_label *jump_target () const; |
| |
| /* Set jump target. */ |
| |
| inline void set_jump_target (rtx_code_label *); |
| }; |
| |
| struct GTY(()) rtx_call_insn : public rtx_insn |
| { |
| /* No extra fields, but adds the invariant: |
| CALL_P (X) aka (GET_CODE (X) == CALL_INSN) |
| i.e. an instruction that can possibly call a subroutine |
| but which will not change which instruction comes next |
| in the current function. |
| |
| This is an instance of: |
| DEF_RTL_EXPR(CALL_INSN, "call_insn", "uuBeiiee", RTX_INSN) |
| from rtl.def. */ |
| }; |
| |
| struct GTY(()) rtx_jump_table_data : public rtx_insn |
| { |
| /* No extra fields, but adds the invariant: |
| JUMP_TABLE_DATA_P (X) aka (GET_CODE (INSN) == JUMP_TABLE_DATA) |
| i.e. a data for a jump table, considered an instruction for |
| historical reasons. |
| |
| This is an instance of: |
| DEF_RTL_EXPR(JUMP_TABLE_DATA, "jump_table_data", "uuBe0000", RTX_INSN) |
| from rtl.def. */ |
| |
| /* This can be either: |
| |
| (a) a table of absolute jumps, in which case PATTERN (this) is an |
| ADDR_VEC with arg 0 a vector of labels, or |
| |
| (b) a table of relative jumps (e.g. for -fPIC), in which case |
| PATTERN (this) is an ADDR_DIFF_VEC, with arg 0 a LABEL_REF and |
| arg 1 the vector of labels. |
| |
| This method gets the underlying vec. */ |
| |
| inline rtvec get_labels () const; |
| inline scalar_int_mode get_data_mode () const; |
| }; |
| |
| struct GTY(()) rtx_barrier : public rtx_insn |
| { |
| /* No extra fields, but adds the invariant: |
| BARRIER_P (X) aka (GET_CODE (X) == BARRIER) |
| i.e. a marker that indicates that control will not flow through. |
| |
| This is an instance of: |
| DEF_RTL_EXPR(BARRIER, "barrier", "uu00000", RTX_EXTRA) |
| from rtl.def. */ |
| }; |
| |
| struct GTY(()) rtx_code_label : public rtx_insn |
| { |
| /* No extra fields, but adds the invariant: |
| LABEL_P (X) aka (GET_CODE (X) == CODE_LABEL) |
| i.e. a label in the assembler. |
| |
| This is an instance of: |
| DEF_RTL_EXPR(CODE_LABEL, "code_label", "uuB00is", RTX_EXTRA) |
| from rtl.def. */ |
| }; |
| |
| struct GTY(()) rtx_note : public rtx_insn |
| { |
| /* No extra fields, but adds the invariant: |
| NOTE_P(X) aka (GET_CODE (X) == NOTE) |
| i.e. a note about the corresponding source code. |
| |
| This is an instance of: |
| DEF_RTL_EXPR(NOTE, "note", "uuB0ni", RTX_EXTRA) |
| from rtl.def. */ |
| }; |
| |
| /* The size in bytes of an rtx header (code, mode and flags). */ |
| #define RTX_HDR_SIZE offsetof (struct rtx_def, u) |
| |
| /* The size in bytes of an rtx with code CODE. */ |
| #define RTX_CODE_SIZE(CODE) rtx_code_size[CODE] |
| |
| #define NULL_RTX (rtx) 0 |
| |
| /* The "next" and "previous" RTX, relative to this one. */ |
| |
| #define RTX_NEXT(X) (rtx_next[GET_CODE (X)] == 0 ? NULL \ |
| : *(rtx *)(((char *)X) + rtx_next[GET_CODE (X)])) |
| |
| /* FIXME: the "NEXT_INSN (PREV_INSN (X)) == X" condition shouldn't be needed. |
| */ |
| #define RTX_PREV(X) ((INSN_P (X) \ |
| || NOTE_P (X) \ |
| || JUMP_TABLE_DATA_P (X) \ |
| || BARRIER_P (X) \ |
| || LABEL_P (X)) \ |
| && PREV_INSN (as_a <rtx_insn *> (X)) != NULL \ |
| && NEXT_INSN (PREV_INSN (as_a <rtx_insn *> (X))) == X \ |
| ? PREV_INSN (as_a <rtx_insn *> (X)) : NULL) |
| |
| /* Define macros to access the `code' field of the rtx. */ |
| |
| #define GET_CODE(RTX) ((enum rtx_code) (RTX)->code) |
| #define PUT_CODE(RTX, CODE) ((RTX)->code = (CODE)) |
| |
| #define GET_MODE(RTX) ((machine_mode) (RTX)->mode) |
| #define PUT_MODE_RAW(RTX, MODE) ((RTX)->mode = (MODE)) |
| |
| /* RTL vector. These appear inside RTX's when there is a need |
| for a variable number of things. The principle use is inside |
| PARALLEL expressions. */ |
| |
| struct GTY(()) rtvec_def { |
| int num_elem; /* number of elements */ |
| rtx GTY ((length ("%h.num_elem"))) elem[1]; |
| }; |
| |
| #define NULL_RTVEC (rtvec) 0 |
| |
| #define GET_NUM_ELEM(RTVEC) ((RTVEC)->num_elem) |
| #define PUT_NUM_ELEM(RTVEC, NUM) ((RTVEC)->num_elem = (NUM)) |
| |
| /* Predicate yielding nonzero iff X is an rtx for a register. */ |
| #define REG_P(X) (GET_CODE (X) == REG) |
| |
| /* Predicate yielding nonzero iff X is an rtx for a memory location. */ |
| #define MEM_P(X) (GET_CODE (X) == MEM) |
| |
| #if TARGET_SUPPORTS_WIDE_INT |
| |
| /* Match CONST_*s that can represent compile-time constant integers. */ |
| #define CASE_CONST_SCALAR_INT \ |
| case CONST_INT: \ |
| case CONST_WIDE_INT |
| |
| /* Match CONST_*s for which pointer equality corresponds to value |
| equality. */ |
| #define CASE_CONST_UNIQUE \ |
| case CONST_INT: \ |
| case CONST_WIDE_INT: \ |
| case CONST_POLY_INT: \ |
| case CONST_DOUBLE: \ |
| case CONST_FIXED |
| |
| /* Match all CONST_* rtxes. */ |
| #define CASE_CONST_ANY \ |
| case CONST_INT: \ |
| case CONST_WIDE_INT: \ |
| case CONST_POLY_INT: \ |
| case CONST_DOUBLE: \ |
| case CONST_FIXED: \ |
| case CONST_VECTOR |
| |
| #else |
| |
| /* Match CONST_*s that can represent compile-time constant integers. */ |
| #define CASE_CONST_SCALAR_INT \ |
| case CONST_INT: \ |
| case CONST_DOUBLE |
| |
| /* Match CONST_*s for which pointer equality corresponds to value |
| equality. */ |
| #define CASE_CONST_UNIQUE \ |
| case CONST_INT: \ |
| case CONST_DOUBLE: \ |
| case CONST_FIXED |
| |
| /* Match all CONST_* rtxes. */ |
| #define CASE_CONST_ANY \ |
| case CONST_INT: \ |
| case CONST_DOUBLE: \ |
| case CONST_FIXED: \ |
| case CONST_VECTOR |
| #endif |
| |
| /* Predicate yielding nonzero iff X is an rtx for a constant integer. */ |
| #define CONST_INT_P(X) (GET_CODE (X) == CONST_INT) |
| |
| /* Predicate yielding nonzero iff X is an rtx for a constant integer. */ |
| #define CONST_WIDE_INT_P(X) (GET_CODE (X) == CONST_WIDE_INT) |
| |
| /* Predicate yielding nonzero iff X is an rtx for a polynomial constant |
| integer. */ |
| #define CONST_POLY_INT_P(X) \ |
| (NUM_POLY_INT_COEFFS > 1 && GET_CODE (X) == CONST_POLY_INT) |
| |
| /* Predicate yielding nonzero iff X is an rtx for a constant fixed-point. */ |
| #define CONST_FIXED_P(X) (GET_CODE (X) == CONST_FIXED) |
| |
| /* Predicate yielding true iff X is an rtx for a double-int |
| or floating point constant. */ |
| #define CONST_DOUBLE_P(X) (GET_CODE (X) == CONST_DOUBLE) |
| |
| /* Predicate yielding true iff X is an rtx for a double-int. */ |
| #define CONST_DOUBLE_AS_INT_P(X) \ |
| (GET_CODE (X) == CONST_DOUBLE && GET_MODE (X) == VOIDmode) |
| |
| /* Predicate yielding true iff X is an rtx for a integer const. */ |
| #if TARGET_SUPPORTS_WIDE_INT |
| #define CONST_SCALAR_INT_P(X) \ |
| (CONST_INT_P (X) || CONST_WIDE_INT_P (X)) |
| #else |
| #define CONST_SCALAR_INT_P(X) \ |
| (CONST_INT_P (X) || CONST_DOUBLE_AS_INT_P (X)) |
| #endif |
| |
| /* Predicate yielding true iff X is an rtx for a double-int. */ |
| #define CONST_DOUBLE_AS_FLOAT_P(X) \ |
| (GET_CODE (X) == CONST_DOUBLE && GET_MODE (X) != VOIDmode) |
| |
| /* Predicate yielding nonzero iff X is an rtx for a constant vector. */ |
| #define CONST_VECTOR_P(X) (GET_CODE (X) == CONST_VECTOR) |
| |
| /* Predicate yielding nonzero iff X is a label insn. */ |
| #define LABEL_P(X) (GET_CODE (X) == CODE_LABEL) |
| |
| /* Predicate yielding nonzero iff X is a jump insn. */ |
| #define JUMP_P(X) (GET_CODE (X) == JUMP_INSN) |
| |
| /* Predicate yielding nonzero iff X is a call insn. */ |
| #define CALL_P(X) (GET_CODE (X) == CALL_INSN) |
| |
| /* 1 if RTX is a call_insn for a fake call. |
| CALL_INSN use "used" flag to indicate it's a fake call. */ |
| #define FAKE_CALL_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("FAKE_CALL_P", (RTX), CALL_INSN)->used) |
| |
| /* Predicate yielding nonzero iff X is an insn that cannot jump. */ |
| #define NONJUMP_INSN_P(X) (GET_CODE (X) == INSN) |
| |
| /* Predicate yielding nonzero iff X is a debug note/insn. */ |
| #define DEBUG_INSN_P(X) (GET_CODE (X) == DEBUG_INSN) |
| |
| /* Predicate yielding nonzero iff X is an insn that is not a debug insn. */ |
| #define NONDEBUG_INSN_P(X) (NONJUMP_INSN_P (X) || JUMP_P (X) || CALL_P (X)) |
| |
| /* Nonzero if DEBUG_MARKER_INSN_P may possibly hold. */ |
| #define MAY_HAVE_DEBUG_MARKER_INSNS debug_nonbind_markers_p |
| /* Nonzero if DEBUG_BIND_INSN_P may possibly hold. */ |
| #define MAY_HAVE_DEBUG_BIND_INSNS flag_var_tracking_assignments |
| /* Nonzero if DEBUG_INSN_P may possibly hold. */ |
| #define MAY_HAVE_DEBUG_INSNS \ |
| (MAY_HAVE_DEBUG_MARKER_INSNS || MAY_HAVE_DEBUG_BIND_INSNS) |
| |
| /* Predicate yielding nonzero iff X is a real insn. */ |
| #define INSN_P(X) (NONDEBUG_INSN_P (X) || DEBUG_INSN_P (X)) |
| |
| /* Predicate yielding nonzero iff X is a note insn. */ |
| #define NOTE_P(X) (GET_CODE (X) == NOTE) |
| |
| /* Predicate yielding nonzero iff X is a barrier insn. */ |
| #define BARRIER_P(X) (GET_CODE (X) == BARRIER) |
| |
| /* Predicate yielding nonzero iff X is a data for a jump table. */ |
| #define JUMP_TABLE_DATA_P(INSN) (GET_CODE (INSN) == JUMP_TABLE_DATA) |
| |
| /* Predicate yielding nonzero iff RTX is a subreg. */ |
| #define SUBREG_P(RTX) (GET_CODE (RTX) == SUBREG) |
| |
| /* Predicate yielding true iff RTX is a symbol ref. */ |
| #define SYMBOL_REF_P(RTX) (GET_CODE (RTX) == SYMBOL_REF) |
| |
| template <> |
| template <> |
| inline bool |
| is_a_helper <rtx_insn *>::test (rtx rt) |
| { |
| return (INSN_P (rt) |
| || NOTE_P (rt) |
| || JUMP_TABLE_DATA_P (rt) |
| || BARRIER_P (rt) |
| || LABEL_P (rt)); |
| } |
| |
| template <> |
| template <> |
| inline bool |
| is_a_helper <const rtx_insn *>::test (const_rtx rt) |
| { |
| return (INSN_P (rt) |
| || NOTE_P (rt) |
| || JUMP_TABLE_DATA_P (rt) |
| || BARRIER_P (rt) |
| || LABEL_P (rt)); |
| } |
| |
| template <> |
| template <> |
| inline bool |
| is_a_helper <rtx_debug_insn *>::test (rtx rt) |
| { |
| return DEBUG_INSN_P (rt); |
| } |
| |
| template <> |
| template <> |
| inline bool |
| is_a_helper <rtx_nonjump_insn *>::test (rtx rt) |
| { |
| return NONJUMP_INSN_P (rt); |
| } |
| |
| template <> |
| template <> |
| inline bool |
| is_a_helper <rtx_jump_insn *>::test (rtx rt) |
| { |
| return JUMP_P (rt); |
| } |
| |
| template <> |
| template <> |
| inline bool |
| is_a_helper <rtx_jump_insn *>::test (rtx_insn *insn) |
| { |
| return JUMP_P (insn); |
| } |
| |
| template <> |
| template <> |
| inline bool |
| is_a_helper <rtx_call_insn *>::test (rtx rt) |
| { |
| return CALL_P (rt); |
| } |
| |
| template <> |
| template <> |
| inline bool |
| is_a_helper <rtx_call_insn *>::test (rtx_insn *insn) |
| { |
| return CALL_P (insn); |
| } |
| |
| template <> |
| template <> |
| inline bool |
| is_a_helper <rtx_jump_table_data *>::test (rtx rt) |
| { |
| return JUMP_TABLE_DATA_P (rt); |
| } |
| |
| template <> |
| template <> |
| inline bool |
| is_a_helper <rtx_jump_table_data *>::test (rtx_insn *insn) |
| { |
| return JUMP_TABLE_DATA_P (insn); |
| } |
| |
| template <> |
| template <> |
| inline bool |
| is_a_helper <rtx_barrier *>::test (rtx rt) |
| { |
| return BARRIER_P (rt); |
| } |
| |
| template <> |
| template <> |
| inline bool |
| is_a_helper <rtx_code_label *>::test (rtx rt) |
| { |
| return LABEL_P (rt); |
| } |
| |
| template <> |
| template <> |
| inline bool |
| is_a_helper <rtx_code_label *>::test (rtx_insn *insn) |
| { |
| return LABEL_P (insn); |
| } |
| |
| template <> |
| template <> |
| inline bool |
| is_a_helper <rtx_note *>::test (rtx rt) |
| { |
| return NOTE_P (rt); |
| } |
| |
| template <> |
| template <> |
| inline bool |
| is_a_helper <rtx_note *>::test (rtx_insn *insn) |
| { |
| return NOTE_P (insn); |
| } |
| |
| /* Predicate yielding nonzero iff X is a return or simple_return. */ |
| #define ANY_RETURN_P(X) \ |
| (GET_CODE (X) == RETURN || GET_CODE (X) == SIMPLE_RETURN) |
| |
| /* 1 if X is a unary operator. */ |
| |
| #define UNARY_P(X) \ |
| (GET_RTX_CLASS (GET_CODE (X)) == RTX_UNARY) |
| |
| /* 1 if X is a binary operator. */ |
| |
| #define BINARY_P(X) \ |
| ((GET_RTX_CLASS (GET_CODE (X)) & RTX_BINARY_MASK) == RTX_BINARY_RESULT) |
| |
| /* 1 if X is an arithmetic operator. */ |
| |
| #define ARITHMETIC_P(X) \ |
| ((GET_RTX_CLASS (GET_CODE (X)) & RTX_ARITHMETIC_MASK) \ |
| == RTX_ARITHMETIC_RESULT) |
| |
| /* 1 if X is an arithmetic operator. */ |
| |
| #define COMMUTATIVE_ARITH_P(X) \ |
| (GET_RTX_CLASS (GET_CODE (X)) == RTX_COMM_ARITH) |
| |
| /* 1 if X is a commutative arithmetic operator or a comparison operator. |
| These two are sometimes selected together because it is possible to |
| swap the two operands. */ |
| |
| #define SWAPPABLE_OPERANDS_P(X) \ |
| ((1 << GET_RTX_CLASS (GET_CODE (X))) \ |
| & ((1 << RTX_COMM_ARITH) | (1 << RTX_COMM_COMPARE) \ |
| | (1 << RTX_COMPARE))) |
| |
| /* 1 if X is a non-commutative operator. */ |
| |
| #define NON_COMMUTATIVE_P(X) \ |
| ((GET_RTX_CLASS (GET_CODE (X)) & RTX_COMMUTATIVE_MASK) \ |
| == RTX_NON_COMMUTATIVE_RESULT) |
| |
| /* 1 if X is a commutative operator on integers. */ |
| |
| #define COMMUTATIVE_P(X) \ |
| ((GET_RTX_CLASS (GET_CODE (X)) & RTX_COMMUTATIVE_MASK) \ |
| == RTX_COMMUTATIVE_RESULT) |
| |
| /* 1 if X is a relational operator. */ |
| |
| #define COMPARISON_P(X) \ |
| ((GET_RTX_CLASS (GET_CODE (X)) & RTX_COMPARE_MASK) == RTX_COMPARE_RESULT) |
| |
| /* 1 if X is a constant value that is an integer. */ |
| |
| #define CONSTANT_P(X) \ |
| (GET_RTX_CLASS (GET_CODE (X)) == RTX_CONST_OBJ) |
| |
| /* 1 if X is a LABEL_REF. */ |
| #define LABEL_REF_P(X) \ |
| (GET_CODE (X) == LABEL_REF) |
| |
| /* 1 if X can be used to represent an object. */ |
| #define OBJECT_P(X) \ |
| ((GET_RTX_CLASS (GET_CODE (X)) & RTX_OBJ_MASK) == RTX_OBJ_RESULT) |
| |
| /* General accessor macros for accessing the fields of an rtx. */ |
| |
| #if defined ENABLE_RTL_CHECKING && (GCC_VERSION >= 2007) |
| /* The bit with a star outside the statement expr and an & inside is |
| so that N can be evaluated only once. */ |
| #define RTL_CHECK1(RTX, N, C1) __extension__ \ |
| (*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \ |
| const enum rtx_code _code = GET_CODE (_rtx); \ |
| if (_n < 0 || _n >= GET_RTX_LENGTH (_code)) \ |
| rtl_check_failed_bounds (_rtx, _n, __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| if (GET_RTX_FORMAT (_code)[_n] != C1) \ |
| rtl_check_failed_type1 (_rtx, _n, C1, __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| &_rtx->u.fld[_n]; })) |
| |
| #define RTL_CHECK2(RTX, N, C1, C2) __extension__ \ |
| (*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \ |
| const enum rtx_code _code = GET_CODE (_rtx); \ |
| if (_n < 0 || _n >= GET_RTX_LENGTH (_code)) \ |
| rtl_check_failed_bounds (_rtx, _n, __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| if (GET_RTX_FORMAT (_code)[_n] != C1 \ |
| && GET_RTX_FORMAT (_code)[_n] != C2) \ |
| rtl_check_failed_type2 (_rtx, _n, C1, C2, __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| &_rtx->u.fld[_n]; })) |
| |
| #define RTL_CHECKC1(RTX, N, C) __extension__ \ |
| (*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \ |
| if (GET_CODE (_rtx) != (C)) \ |
| rtl_check_failed_code1 (_rtx, (C), __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| &_rtx->u.fld[_n]; })) |
| |
| #define RTL_CHECKC2(RTX, N, C1, C2) __extension__ \ |
| (*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \ |
| const enum rtx_code _code = GET_CODE (_rtx); \ |
| if (_code != (C1) && _code != (C2)) \ |
| rtl_check_failed_code2 (_rtx, (C1), (C2), __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| &_rtx->u.fld[_n]; })) |
| |
| #define RTL_CHECKC3(RTX, N, C1, C2, C3) __extension__ \ |
| (*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \ |
| const enum rtx_code _code = GET_CODE (_rtx); \ |
| if (_code != (C1) && _code != (C2) && _code != (C3)) \ |
| rtl_check_failed_code3 (_rtx, (C1), (C2), (C3), __FILE__, \ |
| __LINE__, __FUNCTION__); \ |
| &_rtx->u.fld[_n]; })) |
| |
| #define RTVEC_ELT(RTVEC, I) __extension__ \ |
| (*({ __typeof (RTVEC) const _rtvec = (RTVEC); const int _i = (I); \ |
| if (_i < 0 || _i >= GET_NUM_ELEM (_rtvec)) \ |
| rtvec_check_failed_bounds (_rtvec, _i, __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| &_rtvec->elem[_i]; })) |
| |
| #define XWINT(RTX, N) __extension__ \ |
| (*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \ |
| const enum rtx_code _code = GET_CODE (_rtx); \ |
| if (_n < 0 || _n >= GET_RTX_LENGTH (_code)) \ |
| rtl_check_failed_bounds (_rtx, _n, __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| if (GET_RTX_FORMAT (_code)[_n] != 'w') \ |
| rtl_check_failed_type1 (_rtx, _n, 'w', __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| &_rtx->u.hwint[_n]; })) |
| |
| #define CWI_ELT(RTX, I) __extension__ \ |
| (*({ __typeof (RTX) const _cwi = (RTX); \ |
| int _max = CWI_GET_NUM_ELEM (_cwi); \ |
| const int _i = (I); \ |
| if (_i < 0 || _i >= _max) \ |
| cwi_check_failed_bounds (_cwi, _i, __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| &_cwi->u.hwiv.elem[_i]; })) |
| |
| #define XCWINT(RTX, N, C) __extension__ \ |
| (*({ __typeof (RTX) const _rtx = (RTX); \ |
| if (GET_CODE (_rtx) != (C)) \ |
| rtl_check_failed_code1 (_rtx, (C), __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| &_rtx->u.hwint[N]; })) |
| |
| #define XCMWINT(RTX, N, C, M) __extension__ \ |
| (*({ __typeof (RTX) const _rtx = (RTX); \ |
| if (GET_CODE (_rtx) != (C) || GET_MODE (_rtx) != (M)) \ |
| rtl_check_failed_code_mode (_rtx, (C), (M), false, __FILE__, \ |
| __LINE__, __FUNCTION__); \ |
| &_rtx->u.hwint[N]; })) |
| |
| #define XCNMPRV(RTX, C, M) __extension__ \ |
| ({ __typeof (RTX) const _rtx = (RTX); \ |
| if (GET_CODE (_rtx) != (C) || GET_MODE (_rtx) == (M)) \ |
| rtl_check_failed_code_mode (_rtx, (C), (M), true, __FILE__, \ |
| __LINE__, __FUNCTION__); \ |
| &_rtx->u.rv; }) |
| |
| #define XCNMPFV(RTX, C, M) __extension__ \ |
| ({ __typeof (RTX) const _rtx = (RTX); \ |
| if (GET_CODE (_rtx) != (C) || GET_MODE (_rtx) == (M)) \ |
| rtl_check_failed_code_mode (_rtx, (C), (M), true, __FILE__, \ |
| __LINE__, __FUNCTION__); \ |
| &_rtx->u.fv; }) |
| |
| #define REG_CHECK(RTX) __extension__ \ |
| ({ __typeof (RTX) const _rtx = (RTX); \ |
| if (GET_CODE (_rtx) != REG) \ |
| rtl_check_failed_code1 (_rtx, REG, __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| &_rtx->u.reg; }) |
| |
| #define BLOCK_SYMBOL_CHECK(RTX) __extension__ \ |
| ({ __typeof (RTX) const _symbol = (RTX); \ |
| const unsigned int flags = SYMBOL_REF_FLAGS (_symbol); \ |
| if ((flags & SYMBOL_FLAG_HAS_BLOCK_INFO) == 0) \ |
| rtl_check_failed_block_symbol (__FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| &_symbol->u.block_sym; }) |
| |
| #define HWIVEC_CHECK(RTX,C) __extension__ \ |
| ({ __typeof (RTX) const _symbol = (RTX); \ |
| RTL_CHECKC1 (_symbol, 0, C); \ |
| &_symbol->u.hwiv; }) |
| |
| extern void rtl_check_failed_bounds (const_rtx, int, const char *, int, |
| const char *) |
| ATTRIBUTE_NORETURN ATTRIBUTE_COLD; |
| extern void rtl_check_failed_type1 (const_rtx, int, int, const char *, int, |
| const char *) |
| ATTRIBUTE_NORETURN ATTRIBUTE_COLD; |
| extern void rtl_check_failed_type2 (const_rtx, int, int, int, const char *, |
| int, const char *) |
| ATTRIBUTE_NORETURN ATTRIBUTE_COLD; |
| extern void rtl_check_failed_code1 (const_rtx, enum rtx_code, const char *, |
| int, const char *) |
| ATTRIBUTE_NORETURN ATTRIBUTE_COLD; |
| extern void rtl_check_failed_code2 (const_rtx, enum rtx_code, enum rtx_code, |
| const char *, int, const char *) |
| ATTRIBUTE_NORETURN ATTRIBUTE_COLD; |
| extern void rtl_check_failed_code3 (const_rtx, enum rtx_code, enum rtx_code, |
| enum rtx_code, const char *, int, |
| const char *) |
| ATTRIBUTE_NORETURN ATTRIBUTE_COLD; |
| extern void rtl_check_failed_code_mode (const_rtx, enum rtx_code, machine_mode, |
| bool, const char *, int, const char *) |
| ATTRIBUTE_NORETURN ATTRIBUTE_COLD; |
| extern void rtl_check_failed_block_symbol (const char *, int, const char *) |
| ATTRIBUTE_NORETURN ATTRIBUTE_COLD; |
| extern void cwi_check_failed_bounds (const_rtx, int, const char *, int, |
| const char *) |
| ATTRIBUTE_NORETURN ATTRIBUTE_COLD; |
| extern void rtvec_check_failed_bounds (const_rtvec, int, const char *, int, |
| const char *) |
| ATTRIBUTE_NORETURN ATTRIBUTE_COLD; |
| |
| #else /* not ENABLE_RTL_CHECKING */ |
| |
| #define RTL_CHECK1(RTX, N, C1) ((RTX)->u.fld[N]) |
| #define RTL_CHECK2(RTX, N, C1, C2) ((RTX)->u.fld[N]) |
| #define RTL_CHECKC1(RTX, N, C) ((RTX)->u.fld[N]) |
| #define RTL_CHECKC2(RTX, N, C1, C2) ((RTX)->u.fld[N]) |
| #define RTL_CHECKC3(RTX, N, C1, C2, C3) ((RTX)->u.fld[N]) |
| #define RTVEC_ELT(RTVEC, I) ((RTVEC)->elem[I]) |
| #define XWINT(RTX, N) ((RTX)->u.hwint[N]) |
| #define CWI_ELT(RTX, I) ((RTX)->u.hwiv.elem[I]) |
| #define XCWINT(RTX, N, C) ((RTX)->u.hwint[N]) |
| #define XCMWINT(RTX, N, C, M) ((RTX)->u.hwint[N]) |
| #define XCNMWINT(RTX, N, C, M) ((RTX)->u.hwint[N]) |
| #define XCNMPRV(RTX, C, M) (&(RTX)->u.rv) |
| #define XCNMPFV(RTX, C, M) (&(RTX)->u.fv) |
| #define REG_CHECK(RTX) (&(RTX)->u.reg) |
| #define BLOCK_SYMBOL_CHECK(RTX) (&(RTX)->u.block_sym) |
| #define HWIVEC_CHECK(RTX,C) (&(RTX)->u.hwiv) |
| |
| #endif |
| |
| /* General accessor macros for accessing the flags of an rtx. */ |
| |
| /* Access an individual rtx flag, with no checking of any kind. */ |
| #define RTX_FLAG(RTX, FLAG) ((RTX)->FLAG) |
| |
| #if defined ENABLE_RTL_FLAG_CHECKING && (GCC_VERSION >= 2007) |
| #define RTL_FLAG_CHECK1(NAME, RTX, C1) __extension__ \ |
| ({ __typeof (RTX) const _rtx = (RTX); \ |
| if (GET_CODE (_rtx) != C1) \ |
| rtl_check_failed_flag (NAME, _rtx, __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| _rtx; }) |
| |
| #define RTL_FLAG_CHECK2(NAME, RTX, C1, C2) __extension__ \ |
| ({ __typeof (RTX) const _rtx = (RTX); \ |
| if (GET_CODE (_rtx) != C1 && GET_CODE(_rtx) != C2) \ |
| rtl_check_failed_flag (NAME,_rtx, __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| _rtx; }) |
| |
| #define RTL_FLAG_CHECK3(NAME, RTX, C1, C2, C3) __extension__ \ |
| ({ __typeof (RTX) const _rtx = (RTX); \ |
| if (GET_CODE (_rtx) != C1 && GET_CODE(_rtx) != C2 \ |
| && GET_CODE (_rtx) != C3) \ |
| rtl_check_failed_flag (NAME, _rtx, __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| _rtx; }) |
| |
| #define RTL_FLAG_CHECK4(NAME, RTX, C1, C2, C3, C4) __extension__ \ |
| ({ __typeof (RTX) const _rtx = (RTX); \ |
| if (GET_CODE (_rtx) != C1 && GET_CODE(_rtx) != C2 \ |
| && GET_CODE (_rtx) != C3 && GET_CODE(_rtx) != C4) \ |
| rtl_check_failed_flag (NAME, _rtx, __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| _rtx; }) |
| |
| #define RTL_FLAG_CHECK5(NAME, RTX, C1, C2, C3, C4, C5) __extension__ \ |
| ({ __typeof (RTX) const _rtx = (RTX); \ |
| if (GET_CODE (_rtx) != C1 && GET_CODE (_rtx) != C2 \ |
| && GET_CODE (_rtx) != C3 && GET_CODE (_rtx) != C4 \ |
| && GET_CODE (_rtx) != C5) \ |
| rtl_check_failed_flag (NAME, _rtx, __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| _rtx; }) |
| |
| #define RTL_FLAG_CHECK6(NAME, RTX, C1, C2, C3, C4, C5, C6) \ |
| __extension__ \ |
| ({ __typeof (RTX) const _rtx = (RTX); \ |
| if (GET_CODE (_rtx) != C1 && GET_CODE (_rtx) != C2 \ |
| && GET_CODE (_rtx) != C3 && GET_CODE (_rtx) != C4 \ |
| && GET_CODE (_rtx) != C5 && GET_CODE (_rtx) != C6) \ |
| rtl_check_failed_flag (NAME,_rtx, __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| _rtx; }) |
| |
| #define RTL_FLAG_CHECK7(NAME, RTX, C1, C2, C3, C4, C5, C6, C7) \ |
| __extension__ \ |
| ({ __typeof (RTX) const _rtx = (RTX); \ |
| if (GET_CODE (_rtx) != C1 && GET_CODE (_rtx) != C2 \ |
| && GET_CODE (_rtx) != C3 && GET_CODE (_rtx) != C4 \ |
| && GET_CODE (_rtx) != C5 && GET_CODE (_rtx) != C6 \ |
| && GET_CODE (_rtx) != C7) \ |
| rtl_check_failed_flag (NAME, _rtx, __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| _rtx; }) |
| |
| #define RTL_INSN_CHAIN_FLAG_CHECK(NAME, RTX) \ |
| __extension__ \ |
| ({ __typeof (RTX) const _rtx = (RTX); \ |
| if (!INSN_CHAIN_CODE_P (GET_CODE (_rtx))) \ |
| rtl_check_failed_flag (NAME, _rtx, __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| _rtx; }) |
| |
| extern void rtl_check_failed_flag (const char *, const_rtx, const char *, |
| int, const char *) |
| ATTRIBUTE_NORETURN ATTRIBUTE_COLD |
| ; |
| |
| #else /* not ENABLE_RTL_FLAG_CHECKING */ |
| |
| #define RTL_FLAG_CHECK1(NAME, RTX, C1) (RTX) |
| #define RTL_FLAG_CHECK2(NAME, RTX, C1, C2) (RTX) |
| #define RTL_FLAG_CHECK3(NAME, RTX, C1, C2, C3) (RTX) |
| #define RTL_FLAG_CHECK4(NAME, RTX, C1, C2, C3, C4) (RTX) |
| #define RTL_FLAG_CHECK5(NAME, RTX, C1, C2, C3, C4, C5) (RTX) |
| #define RTL_FLAG_CHECK6(NAME, RTX, C1, C2, C3, C4, C5, C6) (RTX) |
| #define RTL_FLAG_CHECK7(NAME, RTX, C1, C2, C3, C4, C5, C6, C7) (RTX) |
| #define RTL_INSN_CHAIN_FLAG_CHECK(NAME, RTX) (RTX) |
| #endif |
| |
| #define XINT(RTX, N) (RTL_CHECK2 (RTX, N, 'i', 'n').rt_int) |
| #define XUINT(RTX, N) (RTL_CHECK2 (RTX, N, 'i', 'n').rt_uint) |
| #define XSTR(RTX, N) (RTL_CHECK2 (RTX, N, 's', 'S').rt_str) |
| #define XEXP(RTX, N) (RTL_CHECK2 (RTX, N, 'e', 'u').rt_rtx) |
| #define XVEC(RTX, N) (RTL_CHECK2 (RTX, N, 'E', 'V').rt_rtvec) |
| #define XMODE(RTX, N) (RTL_CHECK1 (RTX, N, 'M').rt_type) |
| #define XTREE(RTX, N) (RTL_CHECK1 (RTX, N, 't').rt_tree) |
| #define XBBDEF(RTX, N) (RTL_CHECK1 (RTX, N, 'B').rt_bb) |
| #define XTMPL(RTX, N) (RTL_CHECK1 (RTX, N, 'T').rt_str) |
| #define XCFI(RTX, N) (RTL_CHECK1 (RTX, N, 'C').rt_cfi) |
| |
| #define XVECEXP(RTX, N, M) RTVEC_ELT (XVEC (RTX, N), M) |
| #define XVECLEN(RTX, N) GET_NUM_ELEM (XVEC (RTX, N)) |
| |
| /* These are like XINT, etc. except that they expect a '0' field instead |
| of the normal type code. */ |
| |
| #define X0INT(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_int) |
| #define X0UINT(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_uint) |
| #define X0STR(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_str) |
| #define X0EXP(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_rtx) |
| #define X0VEC(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_rtvec) |
| #define X0MODE(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_type) |
| #define X0TREE(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_tree) |
| #define X0BBDEF(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_bb) |
| #define X0ADVFLAGS(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_addr_diff_vec_flags) |
| #define X0CSELIB(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_cselib) |
| #define X0MEMATTR(RTX, N) (RTL_CHECKC1 (RTX, N, MEM).rt_mem) |
| #define X0CONSTANT(RTX, N) (RTL_CHECK1 (RTX, N, '0').rt_constant) |
| |
| /* Access a '0' field with any type. */ |
| #define X0ANY(RTX, N) RTL_CHECK1 (RTX, N, '0') |
| |
| #define XCINT(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_int) |
| #define XCUINT(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_uint) |
| #define XCSUBREG(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_subreg) |
| #define XCSTR(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_str) |
| #define XCEXP(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_rtx) |
| #define XCVEC(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_rtvec) |
| #define XCMODE(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_type) |
| #define XCTREE(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_tree) |
| #define XCBBDEF(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_bb) |
| #define XCCFI(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_cfi) |
| #define XCCSELIB(RTX, N, C) (RTL_CHECKC1 (RTX, N, C).rt_cselib) |
| |
| #define XCVECEXP(RTX, N, M, C) RTVEC_ELT (XCVEC (RTX, N, C), M) |
| #define XCVECLEN(RTX, N, C) GET_NUM_ELEM (XCVEC (RTX, N, C)) |
| |
| #define XC2EXP(RTX, N, C1, C2) (RTL_CHECKC2 (RTX, N, C1, C2).rt_rtx) |
| #define XC3EXP(RTX, N, C1, C2, C3) (RTL_CHECKC3 (RTX, N, C1, C2, C3).rt_rtx) |
| |
| |
| /* Methods of rtx_expr_list. */ |
| |
| inline rtx_expr_list *rtx_expr_list::next () const |
| { |
| rtx tmp = XEXP (this, 1); |
| return safe_as_a <rtx_expr_list *> (tmp); |
| } |
| |
| inline rtx rtx_expr_list::element () const |
| { |
| return XEXP (this, 0); |
| } |
| |
| /* Methods of rtx_insn_list. */ |
| |
| inline rtx_insn_list *rtx_insn_list::next () const |
| { |
| rtx tmp = XEXP (this, 1); |
| return safe_as_a <rtx_insn_list *> (tmp); |
| } |
| |
| inline rtx_insn *rtx_insn_list::insn () const |
| { |
| rtx tmp = XEXP (this, 0); |
| return safe_as_a <rtx_insn *> (tmp); |
| } |
| |
| /* Methods of rtx_sequence. */ |
| |
| inline int rtx_sequence::len () const |
| { |
| return XVECLEN (this, 0); |
| } |
| |
| inline rtx rtx_sequence::element (int index) const |
| { |
| return XVECEXP (this, 0, index); |
| } |
| |
| inline rtx_insn *rtx_sequence::insn (int index) const |
| { |
| return as_a <rtx_insn *> (XVECEXP (this, 0, index)); |
| } |
| |
| /* ACCESS MACROS for particular fields of insns. */ |
| |
| /* Holds a unique number for each insn. |
| These are not necessarily sequentially increasing. */ |
| inline int INSN_UID (const_rtx insn) |
| { |
| return RTL_INSN_CHAIN_FLAG_CHECK ("INSN_UID", |
| (insn))->u2.insn_uid; |
| } |
| inline int& INSN_UID (rtx insn) |
| { |
| return RTL_INSN_CHAIN_FLAG_CHECK ("INSN_UID", |
| (insn))->u2.insn_uid; |
| } |
| |
| /* Chain insns together in sequence. */ |
| |
| /* For now these are split in two: an rvalue form: |
| PREV_INSN/NEXT_INSN |
| and an lvalue form: |
| SET_NEXT_INSN/SET_PREV_INSN. */ |
| |
| inline rtx_insn *PREV_INSN (const rtx_insn *insn) |
| { |
| rtx prev = XEXP (insn, 0); |
| return safe_as_a <rtx_insn *> (prev); |
| } |
| |
| inline rtx& SET_PREV_INSN (rtx_insn *insn) |
| { |
| return XEXP (insn, 0); |
| } |
| |
| inline rtx_insn *NEXT_INSN (const rtx_insn *insn) |
| { |
| rtx next = XEXP (insn, 1); |
| return safe_as_a <rtx_insn *> (next); |
| } |
| |
| inline rtx& SET_NEXT_INSN (rtx_insn *insn) |
| { |
| return XEXP (insn, 1); |
| } |
| |
| inline basic_block BLOCK_FOR_INSN (const_rtx insn) |
| { |
| return XBBDEF (insn, 2); |
| } |
| |
| inline basic_block& BLOCK_FOR_INSN (rtx insn) |
| { |
| return XBBDEF (insn, 2); |
| } |
| |
| inline void set_block_for_insn (rtx_insn *insn, basic_block bb) |
| { |
| BLOCK_FOR_INSN (insn) = bb; |
| } |
| |
| /* The body of an insn. */ |
| inline rtx PATTERN (const_rtx insn) |
| { |
| return XEXP (insn, 3); |
| } |
| |
| inline rtx& PATTERN (rtx insn) |
| { |
| return XEXP (insn, 3); |
| } |
| |
| inline unsigned int INSN_LOCATION (const rtx_insn *insn) |
| { |
| return XUINT (insn, 4); |
| } |
| |
| inline unsigned int& INSN_LOCATION (rtx_insn *insn) |
| { |
| return XUINT (insn, 4); |
| } |
| |
| inline bool INSN_HAS_LOCATION (const rtx_insn *insn) |
| { |
| return LOCATION_LOCUS (INSN_LOCATION (insn)) != UNKNOWN_LOCATION; |
| } |
| |
| /* LOCATION of an RTX if relevant. */ |
| #define RTL_LOCATION(X) (INSN_P (X) ? \ |
| INSN_LOCATION (as_a <rtx_insn *> (X)) \ |
| : UNKNOWN_LOCATION) |
| |
| /* Code number of instruction, from when it was recognized. |
| -1 means this instruction has not been recognized yet. */ |
| #define INSN_CODE(INSN) XINT (INSN, 5) |
| |
| inline rtvec rtx_jump_table_data::get_labels () const |
| { |
| rtx pat = PATTERN (this); |
| if (GET_CODE (pat) == ADDR_VEC) |
| return XVEC (pat, 0); |
| else |
| return XVEC (pat, 1); /* presumably an ADDR_DIFF_VEC */ |
| } |
| |
| /* Return the mode of the data in the table, which is always a scalar |
| integer. */ |
| |
| inline scalar_int_mode |
| rtx_jump_table_data::get_data_mode () const |
| { |
| return as_a <scalar_int_mode> (GET_MODE (PATTERN (this))); |
| } |
| |
| /* If LABEL is followed by a jump table, return the table, otherwise |
| return null. */ |
| |
| inline rtx_jump_table_data * |
| jump_table_for_label (const rtx_code_label *label) |
| { |
| return safe_dyn_cast <rtx_jump_table_data *> (NEXT_INSN (label)); |
| } |
| |
| #define RTX_FRAME_RELATED_P(RTX) \ |
| (RTL_FLAG_CHECK6 ("RTX_FRAME_RELATED_P", (RTX), DEBUG_INSN, INSN, \ |
| CALL_INSN, JUMP_INSN, BARRIER, SET)->frame_related) |
| |
| /* 1 if JUMP RTX is a crossing jump. */ |
| #define CROSSING_JUMP_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("CROSSING_JUMP_P", (RTX), JUMP_INSN)->jump) |
| |
| /* 1 if RTX is a call to a const function. Built from ECF_CONST and |
| TREE_READONLY. */ |
| #define RTL_CONST_CALL_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("RTL_CONST_CALL_P", (RTX), CALL_INSN)->unchanging) |
| |
| /* 1 if RTX is a call to a pure function. Built from ECF_PURE and |
| DECL_PURE_P. */ |
| #define RTL_PURE_CALL_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("RTL_PURE_CALL_P", (RTX), CALL_INSN)->return_val) |
| |
| /* 1 if RTX is a call to a const or pure function. */ |
| #define RTL_CONST_OR_PURE_CALL_P(RTX) \ |
| (RTL_CONST_CALL_P (RTX) || RTL_PURE_CALL_P (RTX)) |
| |
| /* 1 if RTX is a call to a looping const or pure function. Built from |
| ECF_LOOPING_CONST_OR_PURE and DECL_LOOPING_CONST_OR_PURE_P. */ |
| #define RTL_LOOPING_CONST_OR_PURE_CALL_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("CONST_OR_PURE_CALL_P", (RTX), CALL_INSN)->call) |
| |
| /* 1 if RTX is a call_insn for a sibling call. */ |
| #define SIBLING_CALL_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("SIBLING_CALL_P", (RTX), CALL_INSN)->jump) |
| |
| /* 1 if RTX is a jump_insn, call_insn, or insn that is an annulling branch. */ |
| #define INSN_ANNULLED_BRANCH_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("INSN_ANNULLED_BRANCH_P", (RTX), JUMP_INSN)->unchanging) |
| |
| /* 1 if RTX is an insn in a delay slot and is from the target of the branch. |
| If the branch insn has INSN_ANNULLED_BRANCH_P set, this insn should only be |
| executed if the branch is taken. For annulled branches with this bit |
| clear, the insn should be executed only if the branch is not taken. */ |
| #define INSN_FROM_TARGET_P(RTX) \ |
| (RTL_FLAG_CHECK3 ("INSN_FROM_TARGET_P", (RTX), INSN, JUMP_INSN, \ |
| CALL_INSN)->in_struct) |
| |
| /* In an ADDR_DIFF_VEC, the flags for RTX for use by branch shortening. |
| See the comments for ADDR_DIFF_VEC in rtl.def. */ |
| #define ADDR_DIFF_VEC_FLAGS(RTX) X0ADVFLAGS (RTX, 4) |
| |
| /* In a VALUE, the value cselib has assigned to RTX. |
| This is a "struct cselib_val", see cselib.h. */ |
| #define CSELIB_VAL_PTR(RTX) X0CSELIB (RTX, 0) |
| |
| /* Holds a list of notes on what this insn does to various REGs. |
| It is a chain of EXPR_LIST rtx's, where the second operand is the |
| chain pointer and the first operand is the REG being described. |
| The mode field of the EXPR_LIST contains not a real machine mode |
| but a value from enum reg_note. */ |
| #define REG_NOTES(INSN) XEXP(INSN, 6) |
| |
| /* In an ENTRY_VALUE this is the DECL_INCOMING_RTL of the argument in |
| question. */ |
| #define ENTRY_VALUE_EXP(RTX) (RTL_CHECKC1 (RTX, 0, ENTRY_VALUE).rt_rtx) |
| |
| enum reg_note |
| { |
| #define DEF_REG_NOTE(NAME) NAME, |
| #include "reg-notes.def" |
| #undef DEF_REG_NOTE |
| REG_NOTE_MAX |
| }; |
| |
| /* Define macros to extract and insert the reg-note kind in an EXPR_LIST. */ |
| #define REG_NOTE_KIND(LINK) ((enum reg_note) GET_MODE (LINK)) |
| #define PUT_REG_NOTE_KIND(LINK, KIND) \ |
| PUT_MODE_RAW (LINK, (machine_mode) (KIND)) |
| |
| /* Names for REG_NOTE's in EXPR_LIST insn's. */ |
| |
| extern const char * const reg_note_name[]; |
| #define GET_REG_NOTE_NAME(MODE) (reg_note_name[(int) (MODE)]) |
| |
| /* This field is only present on CALL_INSNs. It holds a chain of EXPR_LIST of |
| USE, CLOBBER and SET expressions. |
| USE expressions list the registers filled with arguments that |
| are passed to the function. |
| CLOBBER expressions document the registers explicitly clobbered |
| by this CALL_INSN. |
| SET expressions say that the return value of the call (the SET_DEST) |
| is equivalent to a value available before the call (the SET_SRC). |
| This kind of SET is used when the return value is predictable in |
| advance. It is purely an optimisation hint; unlike USEs and CLOBBERs, |
| it does not affect register liveness. |
| |
| Pseudo registers cannot be mentioned in this list. */ |
| #define CALL_INSN_FUNCTION_USAGE(INSN) XEXP(INSN, 7) |
| |
| /* The label-number of a code-label. The assembler label |
| is made from `L' and the label-number printed in decimal. |
| Label numbers are unique in a compilation. */ |
| #define CODE_LABEL_NUMBER(INSN) XINT (INSN, 5) |
| |
| /* In a NOTE that is a line number, this is a string for the file name that the |
| line is in. We use the same field to record block numbers temporarily in |
| NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes. (We avoid lots of casts |
| between ints and pointers if we use a different macro for the block number.) |
| */ |
| |
| /* Opaque data. */ |
| #define NOTE_DATA(INSN) RTL_CHECKC1 (INSN, 3, NOTE) |
| #define NOTE_DELETED_LABEL_NAME(INSN) XCSTR (INSN, 3, NOTE) |
| #define SET_INSN_DELETED(INSN) set_insn_deleted (INSN); |
| #define NOTE_BLOCK(INSN) XCTREE (INSN, 3, NOTE) |
| #define NOTE_EH_HANDLER(INSN) XCINT (INSN, 3, NOTE) |
| #define NOTE_BASIC_BLOCK(INSN) XCBBDEF (INSN, 3, NOTE) |
| #define NOTE_VAR_LOCATION(INSN) XCEXP (INSN, 3, NOTE) |
| #define NOTE_MARKER_LOCATION(INSN) XCUINT (INSN, 3, NOTE) |
| #define NOTE_CFI(INSN) XCCFI (INSN, 3, NOTE) |
| #define NOTE_LABEL_NUMBER(INSN) XCINT (INSN, 3, NOTE) |
| |
| /* In a NOTE that is a line number, this is the line number. |
| Other kinds of NOTEs are identified by negative numbers here. */ |
| #define NOTE_KIND(INSN) XCINT (INSN, 4, NOTE) |
| |
| /* Nonzero if INSN is a note marking the beginning of a basic block. */ |
| #define NOTE_INSN_BASIC_BLOCK_P(INSN) \ |
| (NOTE_P (INSN) && NOTE_KIND (INSN) == NOTE_INSN_BASIC_BLOCK) |
| |
| /* Nonzero if INSN is a debug nonbind marker note, |
| for which NOTE_MARKER_LOCATION can be used. */ |
| #define NOTE_MARKER_P(INSN) \ |
| (NOTE_P (INSN) && \ |
| (NOTE_KIND (INSN) == NOTE_INSN_BEGIN_STMT \ |
| || NOTE_KIND (INSN) == NOTE_INSN_INLINE_ENTRY)) |
| |
| /* Variable declaration and the location of a variable. */ |
| #define PAT_VAR_LOCATION_DECL(PAT) (XCTREE ((PAT), 0, VAR_LOCATION)) |
| #define PAT_VAR_LOCATION_LOC(PAT) (XCEXP ((PAT), 1, VAR_LOCATION)) |
| |
| /* Initialization status of the variable in the location. Status |
| can be unknown, uninitialized or initialized. See enumeration |
| type below. */ |
| #define PAT_VAR_LOCATION_STATUS(PAT) \ |
| (RTL_FLAG_CHECK1 ("PAT_VAR_LOCATION_STATUS", PAT, VAR_LOCATION) \ |
| ->u2.var_location_status) |
| |
| /* Accessors for a NOTE_INSN_VAR_LOCATION. */ |
| #define NOTE_VAR_LOCATION_DECL(NOTE) \ |
| PAT_VAR_LOCATION_DECL (NOTE_VAR_LOCATION (NOTE)) |
| #define NOTE_VAR_LOCATION_LOC(NOTE) \ |
| PAT_VAR_LOCATION_LOC (NOTE_VAR_LOCATION (NOTE)) |
| #define NOTE_VAR_LOCATION_STATUS(NOTE) \ |
| PAT_VAR_LOCATION_STATUS (NOTE_VAR_LOCATION (NOTE)) |
| |
| /* Evaluate to TRUE if INSN is a debug insn that denotes a variable |
| location/value tracking annotation. */ |
| #define DEBUG_BIND_INSN_P(INSN) \ |
| (DEBUG_INSN_P (INSN) \ |
| && (GET_CODE (PATTERN (INSN)) \ |
| == VAR_LOCATION)) |
| /* Evaluate to TRUE if INSN is a debug insn that denotes a program |
| source location marker. */ |
| #define DEBUG_MARKER_INSN_P(INSN) \ |
| (DEBUG_INSN_P (INSN) \ |
| && (GET_CODE (PATTERN (INSN)) \ |
| != VAR_LOCATION)) |
| /* Evaluate to the marker kind. */ |
| #define INSN_DEBUG_MARKER_KIND(INSN) \ |
| (GET_CODE (PATTERN (INSN)) == DEBUG_MARKER \ |
| ? (GET_MODE (PATTERN (INSN)) == VOIDmode \ |
| ? NOTE_INSN_BEGIN_STMT \ |
| : GET_MODE (PATTERN (INSN)) == BLKmode \ |
| ? NOTE_INSN_INLINE_ENTRY \ |
| : (enum insn_note)-1) \ |
| : (enum insn_note)-1) |
| /* Create patterns for debug markers. These and the above abstract |
| the representation, so that it's easier to get rid of the abuse of |
| the mode to hold the marker kind. Other marker types are |
| envisioned, so a single bit flag won't do; maybe separate RTL codes |
| wouldn't be a problem. */ |
| #define GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT() \ |
| gen_rtx_DEBUG_MARKER (VOIDmode) |
| #define GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT() \ |
| gen_rtx_DEBUG_MARKER (BLKmode) |
| |
| /* The VAR_LOCATION rtx in a DEBUG_INSN. */ |
| #define INSN_VAR_LOCATION(INSN) \ |
| (RTL_FLAG_CHECK1 ("INSN_VAR_LOCATION", PATTERN (INSN), VAR_LOCATION)) |
| /* A pointer to the VAR_LOCATION rtx in a DEBUG_INSN. */ |
| #define INSN_VAR_LOCATION_PTR(INSN) \ |
| (&PATTERN (INSN)) |
| |
| /* Accessors for a tree-expanded var location debug insn. */ |
| #define INSN_VAR_LOCATION_DECL(INSN) \ |
| PAT_VAR_LOCATION_DECL (INSN_VAR_LOCATION (INSN)) |
| #define INSN_VAR_LOCATION_LOC(INSN) \ |
| PAT_VAR_LOCATION_LOC (INSN_VAR_LOCATION (INSN)) |
| #define INSN_VAR_LOCATION_STATUS(INSN) \ |
| PAT_VAR_LOCATION_STATUS (INSN_VAR_LOCATION (INSN)) |
| |
| /* Expand to the RTL that denotes an unknown variable location in a |
| DEBUG_INSN. */ |
| #define gen_rtx_UNKNOWN_VAR_LOC() (gen_rtx_CLOBBER (VOIDmode, const0_rtx)) |
| |
| /* Determine whether X is such an unknown location. */ |
| #define VAR_LOC_UNKNOWN_P(X) \ |
| (GET_CODE (X) == CLOBBER && XEXP ((X), 0) == const0_rtx) |
| |
| /* 1 if RTX is emitted after a call, but it should take effect before |
| the call returns. */ |
| #define NOTE_DURING_CALL_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("NOTE_VAR_LOCATION_DURING_CALL_P", (RTX), NOTE)->call) |
| |
| /* DEBUG_EXPR_DECL corresponding to a DEBUG_EXPR RTX. */ |
| #define DEBUG_EXPR_TREE_DECL(RTX) XCTREE (RTX, 0, DEBUG_EXPR) |
| |
| /* VAR_DECL/PARM_DECL DEBUG_IMPLICIT_PTR takes address of. */ |
| #define DEBUG_IMPLICIT_PTR_DECL(RTX) XCTREE (RTX, 0, DEBUG_IMPLICIT_PTR) |
| |
| /* PARM_DECL DEBUG_PARAMETER_REF references. */ |
| #define DEBUG_PARAMETER_REF_DECL(RTX) XCTREE (RTX, 0, DEBUG_PARAMETER_REF) |
| |
| /* Codes that appear in the NOTE_KIND field for kinds of notes |
| that are not line numbers. These codes are all negative. |
| |
| Notice that we do not try to use zero here for any of |
| the special note codes because sometimes the source line |
| actually can be zero! This happens (for example) when we |
| are generating code for the per-translation-unit constructor |
| and destructor routines for some C++ translation unit. */ |
| |
| enum insn_note |
| { |
| #define DEF_INSN_NOTE(NAME) NAME, |
| #include "insn-notes.def" |
| #undef DEF_INSN_NOTE |
| |
| NOTE_INSN_MAX |
| }; |
| |
| /* Names for NOTE insn's other than line numbers. */ |
| |
| extern const char * const note_insn_name[NOTE_INSN_MAX]; |
| #define GET_NOTE_INSN_NAME(NOTE_CODE) \ |
| (note_insn_name[(NOTE_CODE)]) |
| |
| /* The name of a label, in case it corresponds to an explicit label |
| in the input source code. */ |
| #define LABEL_NAME(RTX) XCSTR (RTX, 6, CODE_LABEL) |
| |
| /* In jump.cc, each label contains a count of the number |
| of LABEL_REFs that point at it, so unused labels can be deleted. */ |
| #define LABEL_NUSES(RTX) XCINT (RTX, 4, CODE_LABEL) |
| |
| /* Labels carry a two-bit field composed of the ->jump and ->call |
| bits. This field indicates whether the label is an alternate |
| entry point, and if so, what kind. */ |
| enum label_kind |
| { |
| LABEL_NORMAL = 0, /* ordinary label */ |
| LABEL_STATIC_ENTRY, /* alternate entry point, not exported */ |
| LABEL_GLOBAL_ENTRY, /* alternate entry point, exported */ |
| LABEL_WEAK_ENTRY /* alternate entry point, exported as weak symbol */ |
| }; |
| |
| #if defined ENABLE_RTL_FLAG_CHECKING && (GCC_VERSION > 2007) |
| |
| /* Retrieve the kind of LABEL. */ |
| #define LABEL_KIND(LABEL) __extension__ \ |
| ({ __typeof (LABEL) const _label = (LABEL); \ |
| if (! LABEL_P (_label)) \ |
| rtl_check_failed_flag ("LABEL_KIND", _label, __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| (enum label_kind) ((_label->jump << 1) | _label->call); }) |
| |
| /* Set the kind of LABEL. */ |
| #define SET_LABEL_KIND(LABEL, KIND) do { \ |
| __typeof (LABEL) const _label = (LABEL); \ |
| const unsigned int _kind = (KIND); \ |
| if (! LABEL_P (_label)) \ |
| rtl_check_failed_flag ("SET_LABEL_KIND", _label, __FILE__, __LINE__, \ |
| __FUNCTION__); \ |
| _label->jump = ((_kind >> 1) & 1); \ |
| _label->call = (_kind & 1); \ |
| } while (0) |
| |
| #else |
| |
| /* Retrieve the kind of LABEL. */ |
| #define LABEL_KIND(LABEL) \ |
| ((enum label_kind) (((LABEL)->jump << 1) | (LABEL)->call)) |
| |
| /* Set the kind of LABEL. */ |
| #define SET_LABEL_KIND(LABEL, KIND) do { \ |
| rtx const _label = (LABEL); \ |
| const unsigned int _kind = (KIND); \ |
| _label->jump = ((_kind >> 1) & 1); \ |
| _label->call = (_kind & 1); \ |
| } while (0) |
| |
| #endif /* rtl flag checking */ |
| |
| #define LABEL_ALT_ENTRY_P(LABEL) (LABEL_KIND (LABEL) != LABEL_NORMAL) |
| |
| /* In jump.cc, each JUMP_INSN can point to a label that it can jump to, |
| so that if the JUMP_INSN is deleted, the label's LABEL_NUSES can |
| be decremented and possibly the label can be deleted. */ |
| #define JUMP_LABEL(INSN) XCEXP (INSN, 7, JUMP_INSN) |
| |
| inline rtx_insn *JUMP_LABEL_AS_INSN (const rtx_insn *insn) |
| { |
| return safe_as_a <rtx_insn *> (JUMP_LABEL (insn)); |
| } |
| |
| /* Methods of rtx_jump_insn. */ |
| |
| inline rtx rtx_jump_insn::jump_label () const |
| { |
| return JUMP_LABEL (this); |
| } |
| |
| inline rtx_code_label *rtx_jump_insn::jump_target () const |
| { |
| return safe_as_a <rtx_code_label *> (JUMP_LABEL (this)); |
| } |
| |
| inline void rtx_jump_insn::set_jump_target (rtx_code_label *target) |
| { |
| JUMP_LABEL (this) = target; |
| } |
| |
| /* Once basic blocks are found, each CODE_LABEL starts a chain that |
| goes through all the LABEL_REFs that jump to that label. The chain |
| eventually winds up at the CODE_LABEL: it is circular. */ |
| #define LABEL_REFS(LABEL) XCEXP (LABEL, 3, CODE_LABEL) |
| |
| /* Get the label that a LABEL_REF references. */ |
| static inline rtx_insn * |
| label_ref_label (const_rtx ref) |
| { |
| return as_a<rtx_insn *> (XCEXP (ref, 0, LABEL_REF)); |
| } |
| |
| /* Set the label that LABEL_REF ref refers to. */ |
| |
| static inline void |
| set_label_ref_label (rtx ref, rtx_insn *label) |
| { |
| XCEXP (ref, 0, LABEL_REF) = label; |
| } |
| |
| /* For a REG rtx, REGNO extracts the register number. REGNO can only |
| be used on RHS. Use SET_REGNO to change the value. */ |
| #define REGNO(RTX) (rhs_regno(RTX)) |
| #define SET_REGNO(RTX, N) (df_ref_change_reg_with_loc (RTX, N)) |
| |
| /* Return the number of consecutive registers in a REG. This is always |
| 1 for pseudo registers and is determined by TARGET_HARD_REGNO_NREGS for |
| hard registers. */ |
| #define REG_NREGS(RTX) (REG_CHECK (RTX)->nregs) |
| |
| /* ORIGINAL_REGNO holds the number the register originally had; for a |
| pseudo register turned into a hard reg this will hold the old pseudo |
| register number. */ |
| #define ORIGINAL_REGNO(RTX) \ |
| (RTL_FLAG_CHECK1 ("ORIGINAL_REGNO", (RTX), REG)->u2.original_regno) |
| |
| /* Force the REGNO macro to only be used on the lhs. */ |
| static inline unsigned int |
| rhs_regno (const_rtx x) |
| { |
| return REG_CHECK (x)->regno; |
| } |
| |
| /* Return the final register in REG X plus one. */ |
| static inline unsigned int |
| END_REGNO (const_rtx x) |
| { |
| return REGNO (x) + REG_NREGS (x); |
| } |
| |
| /* Change the REGNO and REG_NREGS of REG X to the specified values, |
| bypassing the df machinery. */ |
| static inline void |
| set_regno_raw (rtx x, unsigned int regno, unsigned int nregs) |
| { |
| reg_info *reg = REG_CHECK (x); |
| reg->regno = regno; |
| reg->nregs = nregs; |
| } |
| |
| /* 1 if RTX is a reg or parallel that is the current function's return |
| value. */ |
| #define REG_FUNCTION_VALUE_P(RTX) \ |
| (RTL_FLAG_CHECK2 ("REG_FUNCTION_VALUE_P", (RTX), REG, PARALLEL)->return_val) |
| |
| /* 1 if RTX is a reg that corresponds to a variable declared by the user. */ |
| #define REG_USERVAR_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("REG_USERVAR_P", (RTX), REG)->volatil) |
| |
| /* 1 if RTX is a reg that holds a pointer value. */ |
| #define REG_POINTER(RTX) \ |
| (RTL_FLAG_CHECK1 ("REG_POINTER", (RTX), REG)->frame_related) |
| |
| /* 1 if RTX is a mem that holds a pointer value. */ |
| #define MEM_POINTER(RTX) \ |
| (RTL_FLAG_CHECK1 ("MEM_POINTER", (RTX), MEM)->frame_related) |
| |
| /* 1 if the given register REG corresponds to a hard register. */ |
| #define HARD_REGISTER_P(REG) (HARD_REGISTER_NUM_P (REGNO (REG))) |
| |
| /* 1 if the given register number REG_NO corresponds to a hard register. */ |
| #define HARD_REGISTER_NUM_P(REG_NO) ((REG_NO) < FIRST_PSEUDO_REGISTER) |
| |
| /* For a CONST_INT rtx, INTVAL extracts the integer. */ |
| #define INTVAL(RTX) XCWINT (RTX, 0, CONST_INT) |
| #define UINTVAL(RTX) ((unsigned HOST_WIDE_INT) INTVAL (RTX)) |
| |
| /* For a CONST_WIDE_INT, CONST_WIDE_INT_NUNITS is the number of |
| elements actually needed to represent the constant. |
| CONST_WIDE_INT_ELT gets one of the elements. 0 is the least |
| significant HOST_WIDE_INT. */ |
| #define CONST_WIDE_INT_VEC(RTX) HWIVEC_CHECK (RTX, CONST_WIDE_INT) |
| #define CONST_WIDE_INT_NUNITS(RTX) CWI_GET_NUM_ELEM (RTX) |
| #define CONST_WIDE_INT_ELT(RTX, N) CWI_ELT (RTX, N) |
| |
| /* For a CONST_POLY_INT, CONST_POLY_INT_COEFFS gives access to the |
| individual coefficients, in the form of a trailing_wide_ints structure. */ |
| #define CONST_POLY_INT_COEFFS(RTX) \ |
| (RTL_FLAG_CHECK1("CONST_POLY_INT_COEFFS", (RTX), \ |
| CONST_POLY_INT)->u.cpi.coeffs) |
| |
| /* For a CONST_DOUBLE: |
| #if TARGET_SUPPORTS_WIDE_INT == 0 |
| For a VOIDmode, there are two integers CONST_DOUBLE_LOW is the |
| low-order word and ..._HIGH the high-order. |
| #endif |
| For a float, there is a REAL_VALUE_TYPE structure, and |
| CONST_DOUBLE_REAL_VALUE(r) is a pointer to it. */ |
| #define CONST_DOUBLE_LOW(r) XCMWINT (r, 0, CONST_DOUBLE, VOIDmode) |
| #define CONST_DOUBLE_HIGH(r) XCMWINT (r, 1, CONST_DOUBLE, VOIDmode) |
| #define CONST_DOUBLE_REAL_VALUE(r) \ |
| ((const struct real_value *) XCNMPRV (r, CONST_DOUBLE, VOIDmode)) |
| |
| #define CONST_FIXED_VALUE(r) \ |
| ((const struct fixed_value *) XCNMPFV (r, CONST_FIXED, VOIDmode)) |
| #define CONST_FIXED_VALUE_HIGH(r) \ |
| ((HOST_WIDE_INT) (CONST_FIXED_VALUE (r)->data.high)) |
| #define CONST_FIXED_VALUE_LOW(r) \ |
| ((HOST_WIDE_INT) (CONST_FIXED_VALUE (r)->data.low)) |
| |
| /* For a CONST_VECTOR, return element #n. */ |
| #define CONST_VECTOR_ELT(RTX, N) const_vector_elt (RTX, N) |
| |
| /* See rtl.texi for a description of these macros. */ |
| #define CONST_VECTOR_NPATTERNS(RTX) \ |
| (RTL_FLAG_CHECK1 ("CONST_VECTOR_NPATTERNS", (RTX), CONST_VECTOR) \ |
| ->u2.const_vector.npatterns) |
| |
| #define CONST_VECTOR_NELTS_PER_PATTERN(RTX) \ |
| (RTL_FLAG_CHECK1 ("CONST_VECTOR_NELTS_PER_PATTERN", (RTX), CONST_VECTOR) \ |
| ->u2.const_vector.nelts_per_pattern) |
| |
| #define CONST_VECTOR_DUPLICATE_P(RTX) \ |
| (CONST_VECTOR_NELTS_PER_PATTERN (RTX) == 1) |
| |
| #define CONST_VECTOR_STEPPED_P(RTX) \ |
| (CONST_VECTOR_NELTS_PER_PATTERN (RTX) == 3) |
| |
| #define CONST_VECTOR_ENCODED_ELT(RTX, N) XCVECEXP (RTX, 0, N, CONST_VECTOR) |
| |
| /* Return the number of elements encoded directly in a CONST_VECTOR. */ |
| |
| inline unsigned int |
| const_vector_encoded_nelts (const_rtx x) |
| { |
| return CONST_VECTOR_NPATTERNS (x) * CONST_VECTOR_NELTS_PER_PATTERN (x); |
| } |
| |
| /* For a CONST_VECTOR, return the number of elements in a vector. */ |
| #define CONST_VECTOR_NUNITS(RTX) GET_MODE_NUNITS (GET_MODE (RTX)) |
| |
| /* For a SUBREG rtx, SUBREG_REG extracts the value we want a subreg of. |
| SUBREG_BYTE extracts the byte-number. */ |
| |
| #define SUBREG_REG(RTX) XCEXP (RTX, 0, SUBREG) |
| #define SUBREG_BYTE(RTX) XCSUBREG (RTX, 1, SUBREG) |
| |
| /* in rtlanal.cc */ |
| /* Return the right cost to give to an operation |
| to make the cost of the corresponding register-to-register instruction |
| N times that of a fast register-to-register instruction. */ |
| #define COSTS_N_INSNS(N) ((N) * 4) |
| |
| /* Maximum cost of an rtl expression. This value has the special meaning |
| not to use an rtx with this cost under any circumstances. */ |
| #define MAX_COST INT_MAX |
| |
| /* Return true if CODE always has VOIDmode. */ |
| |
| static inline bool |
| always_void_p (enum rtx_code code) |
| { |
| return code == SET; |
| } |
| |
| /* A structure to hold all available cost information about an rtl |
| expression. */ |
| struct full_rtx_costs |
| { |
| int speed; |
| int size; |
| }; |
| |
| /* Initialize a full_rtx_costs structure C to the maximum cost. */ |
| static inline void |
| init_costs_to_max (struct full_rtx_costs *c) |
| { |
| c->speed = MAX_COST; |
| c->size = MAX_COST; |
| } |
| |
| /* Initialize a full_rtx_costs structure C to zero cost. */ |
| static inline void |
| init_costs_to_zero (struct full_rtx_costs *c) |
| { |
| c->speed = 0; |
| c->size = 0; |
| } |
| |
| /* Compare two full_rtx_costs structures A and B, returning true |
| if A < B when optimizing for speed. */ |
| static inline bool |
| costs_lt_p (struct full_rtx_costs *a, struct full_rtx_costs *b, |
| bool speed) |
| { |
| if (speed) |
| return (a->speed < b->speed |
| || (a->speed == b->speed && a->size < b->size)); |
| else |
| return (a->size < b->size |
| || (a->size == b->size && a->speed < b->speed)); |
| } |
| |
| /* Increase both members of the full_rtx_costs structure C by the |
| cost of N insns. */ |
| static inline void |
| costs_add_n_insns (struct full_rtx_costs *c, int n) |
| { |
| c->speed += COSTS_N_INSNS (n); |
| c->size += COSTS_N_INSNS (n); |
| } |
| |
| /* Describes the shape of a subreg: |
| |
| inner_mode == the mode of the SUBREG_REG |
| offset == the SUBREG_BYTE |
| outer_mode == the mode of the SUBREG itself. */ |
| class subreg_shape { |
| public: |
| subreg_shape (machine_mode, poly_uint16, machine_mode); |
| bool operator == (const subreg_shape &) const; |
| bool operator != (const subreg_shape &) const; |
| unsigned HOST_WIDE_INT unique_id () const; |
| |
| machine_mode inner_mode; |
| poly_uint16 offset; |
| machine_mode outer_mode; |
| }; |
| |
| inline |
| subreg_shape::subreg_shape (machine_mode inner_mode_in, |
| poly_uint16 offset_in, |
| machine_mode outer_mode_in) |
| : inner_mode (inner_mode_in), offset (offset_in), outer_mode (outer_mode_in) |
| {} |
| |
| inline bool |
| subreg_shape::operator == (const subreg_shape &other) const |
| { |
| return (inner_mode == other.inner_mode |
| && known_eq (offset, other.offset) |
| && outer_mode == other.outer_mode); |
| } |
| |
| inline bool |
| subreg_shape::operator != (const subreg_shape &other) const |
| { |
| return !operator == (other); |
| } |
| |
| /* Return an integer that uniquely identifies this shape. Structures |
| like rtx_def assume that a mode can fit in an 8-bit bitfield and no |
| current mode is anywhere near being 65536 bytes in size, so the |
| id comfortably fits in an int. */ |
| |
| inline unsigned HOST_WIDE_INT |
| subreg_shape::unique_id () const |
| { |
| { STATIC_ASSERT (MAX_MACHINE_MODE <= 256); } |
| { STATIC_ASSERT (NUM_POLY_INT_COEFFS <= 3); } |
| { STATIC_ASSERT (sizeof (offset.coeffs[0]) <= 2); } |
| int res = (int) inner_mode + ((int) outer_mode << 8); |
| for (int i = 0; i < NUM_POLY_INT_COEFFS; ++i) |
| res += (HOST_WIDE_INT) offset.coeffs[i] << ((1 + i) * 16); |
| return res; |
| } |
| |
| /* Return the shape of a SUBREG rtx. */ |
| |
| static inline subreg_shape |
| shape_of_subreg (const_rtx x) |
| { |
| return subreg_shape (GET_MODE (SUBREG_REG (x)), |
| SUBREG_BYTE (x), GET_MODE (x)); |
| } |
| |
| /* Information about an address. This structure is supposed to be able |
| to represent all supported target addresses. Please extend it if it |
| is not yet general enough. */ |
| struct address_info { |
| /* The mode of the value being addressed, or VOIDmode if this is |
| a load-address operation with no known address mode. */ |
| machine_mode mode; |
| |
| /* The address space. */ |
| addr_space_t as; |
| |
| /* True if this is an RTX_AUTOINC address. */ |
| bool autoinc_p; |
| |
| /* A pointer to the top-level address. */ |
| rtx *outer; |
| |
| /* A pointer to the inner address, after all address mutations |
| have been stripped from the top-level address. It can be one |
| of the following: |
| |
| - A {PRE,POST}_{INC,DEC} of *BASE. SEGMENT, INDEX and DISP are null. |
| |
| - A {PRE,POST}_MODIFY of *BASE. In this case either INDEX or DISP |
| points to the step value, depending on whether the step is variable |
| or constant respectively. SEGMENT is null. |
| |
| - A plain sum of the form SEGMENT + BASE + INDEX + DISP, |
| with null fields evaluating to 0. */ |
| rtx *inner; |
| |
| /* Components that make up *INNER. Each one may be null or nonnull. |
| When nonnull, their meanings are as follows: |
| |
| - *SEGMENT is the "segment" of memory to which the address refers. |
| This value is entirely target-specific and is only called a "segment" |
| because that's its most typical use. It contains exactly one UNSPEC, |
| pointed to by SEGMENT_TERM. The contents of *SEGMENT do not need |
| reloading. |
| |
| - *BASE is a variable expression representing a base address. |
| It contains exactly one REG, SUBREG or MEM, pointed to by BASE_TERM. |
| |
| - *INDEX is a variable expression representing an index value. |
| It may be a scaled expression, such as a MULT. It has exactly |
| one REG, SUBREG or MEM, pointed to by INDEX_TERM. |
| |
| - *DISP is a constant, possibly mutated. DISP_TERM points to the |
| unmutated RTX_CONST_OBJ. */ |
| rtx *segment; |
| rtx *base; |
| rtx *index; |
| rtx *disp; |
| |
| rtx *segment_term; |
| rtx *base_term; |
| rtx *index_term; |
| rtx *disp_term; |
| |
| /* In a {PRE,POST}_MODIFY address, this points to a second copy |
| of BASE_TERM, otherwise it is null. */ |
| rtx *base_term2; |
| |
| /* ADDRESS if this structure describes an address operand, MEM if |
| it describes a MEM address. */ |
| enum rtx_code addr_outer_code; |
| |
| /* If BASE is nonnull, this is the code of the rtx that contains it. */ |
| enum rtx_code base_outer_code; |
| }; |
| |
| /* This is used to bundle an rtx and a mode together so that the pair |
| can be used with the wi:: routines. If we ever put modes into rtx |
| integer constants, this should go away and then just pass an rtx in. */ |
| typedef std::pair <rtx, machine_mode> rtx_mode_t; |
| |
| namespace wi |
| { |
| template <> |
| struct int_traits <rtx_mode_t> |
| { |
| static const enum precision_type precision_type = VAR_PRECISION; |
| static const bool host_dependent_precision = false; |
| /* This ought to be true, except for the special case that BImode |
| is canonicalized to STORE_FLAG_VALUE, which might be 1. */ |
| static const bool is_sign_extended = false; |
| static unsigned int get_precision (const rtx_mode_t &); |
| static wi::storage_ref decompose (HOST_WIDE_INT *, unsigned int, |
| const rtx_mode_t &); |
| }; |
| } |
| |
| inline unsigned int |
| wi::int_traits <rtx_mode_t>::get_precision (const rtx_mode_t &x) |
| { |
| return GET_MODE_PRECISION (as_a <scalar_mode> (x.second)); |
| } |
| |
| inline wi::storage_ref |
| wi::int_traits <rtx_mode_t>::decompose (HOST_WIDE_INT *, |
| unsigned int precision, |
| const rtx_mode_t &x) |
| { |
| gcc_checking_assert (precision == get_precision (x)); |
| switch (GET_CODE (x.first)) |
| { |
| case CONST_INT: |
| if (precision < HOST_BITS_PER_WIDE_INT) |
| /* Nonzero BImodes are stored as STORE_FLAG_VALUE, which on many |
| targets is 1 rather than -1. */ |
| gcc_checking_assert (INTVAL (x.first) |
| == sext_hwi (INTVAL (x.first), precision) |
| || (x.second == BImode && INTVAL (x.first) == 1)); |
| |
| return wi::storage_ref (&INTVAL (x.first), 1, precision); |
| |
| case CONST_WIDE_INT: |
| return wi::storage_ref (&CONST_WIDE_INT_ELT (x.first, 0), |
| CONST_WIDE_INT_NUNITS (x.first), precision); |
| |
| #if TARGET_SUPPORTS_WIDE_INT == 0 |
| case CONST_DOUBLE: |
| return wi::storage_ref (&CONST_DOUBLE_LOW (x.first), 2, precision); |
| #endif |
| |
| default: |
| gcc_unreachable (); |
| } |
| } |
| |
| namespace wi |
| { |
| hwi_with_prec shwi (HOST_WIDE_INT, machine_mode mode); |
| wide_int min_value (machine_mode, signop); |
| wide_int max_value (machine_mode, signop); |
| } |
| |
| inline wi::hwi_with_prec |
| wi::shwi (HOST_WIDE_INT val, machine_mode mode) |
| { |
| return shwi (val, GET_MODE_PRECISION (as_a <scalar_mode> (mode))); |
| } |
| |
| /* Produce the smallest number that is represented in MODE. The precision |
| is taken from MODE and the sign from SGN. */ |
| inline wide_int |
| wi::min_value (machine_mode mode, signop sgn) |
| { |
| return min_value (GET_MODE_PRECISION (as_a <scalar_mode> (mode)), sgn); |
| } |
| |
| /* Produce the largest number that is represented in MODE. The precision |
| is taken from MODE and the sign from SGN. */ |
| inline wide_int |
| wi::max_value (machine_mode mode, signop sgn) |
| { |
| return max_value (GET_MODE_PRECISION (as_a <scalar_mode> (mode)), sgn); |
| } |
| |
| namespace wi |
| { |
| typedef poly_int<NUM_POLY_INT_COEFFS, |
| generic_wide_int <wide_int_ref_storage <false, false> > > |
| rtx_to_poly_wide_ref; |
| rtx_to_poly_wide_ref to_poly_wide (const_rtx, machine_mode); |
| } |
| |
| /* Return the value of a CONST_POLY_INT in its native precision. */ |
| |
| inline wi::rtx_to_poly_wide_ref |
| const_poly_int_value (const_rtx x) |
| { |
| poly_int<NUM_POLY_INT_COEFFS, WIDE_INT_REF_FOR (wide_int)> res; |
| for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) |
| res.coeffs[i] = CONST_POLY_INT_COEFFS (x)[i]; |
| return res; |
| } |
| |
| /* Return true if X is a scalar integer or a CONST_POLY_INT. The value |
| can then be extracted using wi::to_poly_wide. */ |
| |
| inline bool |
| poly_int_rtx_p (const_rtx x) |
| { |
| return CONST_SCALAR_INT_P (x) || CONST_POLY_INT_P (x); |
| } |
| |
| /* Access X (which satisfies poly_int_rtx_p) as a poly_wide_int. |
| MODE is the mode of X. */ |
| |
| inline wi::rtx_to_poly_wide_ref |
| wi::to_poly_wide (const_rtx x, machine_mode mode) |
| { |
| if (CONST_POLY_INT_P (x)) |
| return const_poly_int_value (x); |
| return rtx_mode_t (const_cast<rtx> (x), mode); |
| } |
| |
| /* Return the value of X as a poly_int64. */ |
| |
| inline poly_int64 |
| rtx_to_poly_int64 (const_rtx x) |
| { |
| if (CONST_POLY_INT_P (x)) |
| { |
| poly_int64 res; |
| for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) |
| res.coeffs[i] = CONST_POLY_INT_COEFFS (x)[i].to_shwi (); |
| return res; |
| } |
| return INTVAL (x); |
| } |
| |
| /* Return true if arbitrary value X is an integer constant that can |
| be represented as a poly_int64. Store the value in *RES if so, |
| otherwise leave it unmodified. */ |
| |
| inline bool |
| poly_int_rtx_p (const_rtx x, poly_int64_pod *res) |
| { |
| if (CONST_INT_P (x)) |
| { |
| *res = INTVAL (x); |
| return true; |
| } |
| if (CONST_POLY_INT_P (x)) |
| { |
| for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) |
| if (!wi::fits_shwi_p (CONST_POLY_INT_COEFFS (x)[i])) |
| return false; |
| for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) |
| res->coeffs[i] = CONST_POLY_INT_COEFFS (x)[i].to_shwi (); |
| return true; |
| } |
| return false; |
| } |
| |
| extern void init_rtlanal (void); |
| extern int rtx_cost (rtx, machine_mode, enum rtx_code, int, bool); |
| extern int address_cost (rtx, machine_mode, addr_space_t, bool); |
| extern void get_full_rtx_cost (rtx, machine_mode, enum rtx_code, int, |
| struct full_rtx_costs *); |
| extern bool native_encode_rtx (machine_mode, rtx, vec<target_unit> &, |
| unsigned int, unsigned int); |
| extern rtx native_decode_rtx (machine_mode, const vec<target_unit> &, |
| unsigned int); |
| extern rtx native_decode_vector_rtx (machine_mode, const vec<target_unit> &, |
| unsigned int, unsigned int, unsigned int); |
| extern poly_uint64 subreg_lsb (const_rtx); |
| extern poly_uint64 subreg_size_lsb (poly_uint64, poly_uint64, poly_uint64); |
| extern poly_uint64 subreg_size_offset_from_lsb (poly_uint64, poly_uint64, |
| poly_uint64); |
| extern bool read_modify_subreg_p (const_rtx); |
| |
| /* Given a subreg's OUTER_MODE, INNER_MODE, and SUBREG_BYTE, return the |
| bit offset at which the subreg begins (counting from the least significant |
| bit of the operand). */ |
| |
| inline poly_uint64 |
| subreg_lsb_1 (machine_mode outer_mode, machine_mode inner_mode, |
| poly_uint64 subreg_byte) |
| { |
| return subreg_size_lsb (GET_MODE_SIZE (outer_mode), |
| GET_MODE_SIZE (inner_mode), subreg_byte); |
| } |
| |
| /* Return the subreg byte offset for a subreg whose outer mode is |
| OUTER_MODE, whose inner mode is INNER_MODE, and where there are |
| LSB_SHIFT *bits* between the lsb of the outer value and the lsb of |
| the inner value. This is the inverse of subreg_lsb_1 (which converts |
| byte offsets to bit shifts). */ |
| |
| inline poly_uint64 |
| subreg_offset_from_lsb (machine_mode outer_mode, |
| machine_mode inner_mode, |
| poly_uint64 lsb_shift) |
| { |
| return subreg_size_offset_from_lsb (GET_MODE_SIZE (outer_mode), |
| GET_MODE_SIZE (inner_mode), lsb_shift); |
| } |
| |
| extern unsigned int subreg_regno_offset (unsigned int, machine_mode, |
| poly_uint64, machine_mode); |
| extern bool subreg_offset_representable_p (unsigned int, machine_mode, |
| poly_uint64, machine_mode); |
| extern unsigned int subreg_regno (const_rtx); |
| extern int simplify_subreg_regno (unsigned int, machine_mode, |
| poly_uint64, machine_mode); |
| extern int lowpart_subreg_regno (unsigned int, machine_mode, |
| machine_mode); |
| extern unsigned int subreg_nregs (const_rtx); |
| extern unsigned int subreg_nregs_with_regno (unsigned int, const_rtx); |
| extern unsigned HOST_WIDE_INT nonzero_bits (const_rtx, machine_mode); |
| extern unsigned int num_sign_bit_copies (const_rtx, machine_mode); |
| extern bool constant_pool_constant_p (rtx); |
| extern bool truncated_to_mode (machine_mode, const_rtx); |
| extern int low_bitmask_len (machine_mode, unsigned HOST_WIDE_INT); |
| extern void split_double (rtx, rtx *, rtx *); |
| extern rtx *strip_address_mutations (rtx *, enum rtx_code * = 0); |
| extern void decompose_address (struct address_info *, rtx *, |
| machine_mode, addr_space_t, enum rtx_code); |
| extern void decompose_lea_address (struct address_info *, rtx *); |
| extern void decompose_mem_address (struct address_info *, rtx); |
| extern void update_address (struct address_info *); |
| extern HOST_WIDE_INT get_index_scale (const struct address_info *); |
| extern enum rtx_code get_index_code (const struct address_info *); |
| |
| /* 1 if RTX is a subreg containing a reg that is already known to be |
| sign- or zero-extended from the mode of the subreg to the mode of |
| the reg. SUBREG_PROMOTED_UNSIGNED_P gives the signedness of the |
| extension. |
| |
| When used as a LHS, is means that this extension must be done |
| when assigning to SUBREG_REG. */ |
| |
| #define SUBREG_PROMOTED_VAR_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED", (RTX), SUBREG)->in_struct) |
| |
| /* Valid for subregs which are SUBREG_PROMOTED_VAR_P(). In that case |
| this gives the necessary extensions: |
| 0 - signed (SPR_SIGNED) |
| 1 - normal unsigned (SPR_UNSIGNED) |
| 2 - value is both sign and unsign extended for mode |
| (SPR_SIGNED_AND_UNSIGNED). |
| -1 - pointer unsigned, which most often can be handled like unsigned |
| extension, except for generating instructions where we need to |
| emit special code (ptr_extend insns) on some architectures |
| (SPR_POINTER). */ |
| |
| const int SRP_POINTER = -1; |
| const int SRP_SIGNED = 0; |
| const int SRP_UNSIGNED = 1; |
| const int SRP_SIGNED_AND_UNSIGNED = 2; |
| |
| /* Sets promoted mode for SUBREG_PROMOTED_VAR_P(). */ |
| #define SUBREG_PROMOTED_SET(RTX, VAL) \ |
| do { \ |
| rtx const _rtx = RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_SET", \ |
| (RTX), SUBREG); \ |
| switch (VAL) \ |
| { \ |
| case SRP_POINTER: \ |
| _rtx->volatil = 0; \ |
| _rtx->unchanging = 0; \ |
| break; \ |
| case SRP_SIGNED: \ |
| _rtx->volatil = 0; \ |
| _rtx->unchanging = 1; \ |
| break; \ |
| case SRP_UNSIGNED: \ |
| _rtx->volatil = 1; \ |
| _rtx->unchanging = 0; \ |
| break; \ |
| case SRP_SIGNED_AND_UNSIGNED: \ |
| _rtx->volatil = 1; \ |
| _rtx->unchanging = 1; \ |
| break; \ |
| } \ |
| } while (0) |
| |
| /* Gets the value stored in promoted mode for SUBREG_PROMOTED_VAR_P(), |
| including SRP_SIGNED_AND_UNSIGNED if promoted for |
| both signed and unsigned. */ |
| #define SUBREG_PROMOTED_GET(RTX) \ |
| (2 * (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_GET", (RTX), SUBREG)->volatil)\ |
| + (RTX)->unchanging - 1) |
| |
| /* Returns sign of promoted mode for SUBREG_PROMOTED_VAR_P(). */ |
| #define SUBREG_PROMOTED_SIGN(RTX) \ |
| ((RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_SIGN", (RTX), SUBREG)->volatil) ? 1\ |
| : (RTX)->unchanging - 1) |
| |
| /* Predicate to check if RTX of SUBREG_PROMOTED_VAR_P() is promoted |
| for SIGNED type. */ |
| #define SUBREG_PROMOTED_SIGNED_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_SIGNED_P", (RTX), SUBREG)->unchanging) |
| |
| /* Predicate to check if RTX of SUBREG_PROMOTED_VAR_P() is promoted |
| for UNSIGNED type. */ |
| #define SUBREG_PROMOTED_UNSIGNED_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_UNSIGNED_P", (RTX), SUBREG)->volatil) |
| |
| /* Checks if RTX of SUBREG_PROMOTED_VAR_P() is promoted for given SIGN. */ |
| #define SUBREG_CHECK_PROMOTED_SIGN(RTX, SIGN) \ |
| ((SIGN) == SRP_POINTER ? SUBREG_PROMOTED_GET (RTX) == SRP_POINTER \ |
| : (SIGN) == SRP_SIGNED ? SUBREG_PROMOTED_SIGNED_P (RTX) \ |
| : SUBREG_PROMOTED_UNSIGNED_P (RTX)) |
| |
| /* True if the REG is the static chain register for some CALL_INSN. */ |
| #define STATIC_CHAIN_REG_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("STATIC_CHAIN_REG_P", (RTX), REG)->jump) |
| |
| /* True if the subreg was generated by LRA for reload insns. Such |
| subregs are valid only during LRA. */ |
| #define LRA_SUBREG_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("LRA_SUBREG_P", (RTX), SUBREG)->jump) |
| |
| /* Access various components of an ASM_OPERANDS rtx. */ |
| |
| #define ASM_OPERANDS_TEMPLATE(RTX) XCSTR (RTX, 0, ASM_OPERANDS) |
| #define ASM_OPERANDS_OUTPUT_CONSTRAINT(RTX) XCSTR (RTX, 1, ASM_OPERANDS) |
| #define ASM_OPERANDS_OUTPUT_IDX(RTX) XCINT (RTX, 2, ASM_OPERANDS) |
| #define ASM_OPERANDS_INPUT_VEC(RTX) XCVEC (RTX, 3, ASM_OPERANDS) |
| #define ASM_OPERANDS_INPUT_CONSTRAINT_VEC(RTX) XCVEC (RTX, 4, ASM_OPERANDS) |
| #define ASM_OPERANDS_INPUT(RTX, N) XCVECEXP (RTX, 3, N, ASM_OPERANDS) |
| #define ASM_OPERANDS_INPUT_LENGTH(RTX) XCVECLEN (RTX, 3, ASM_OPERANDS) |
| #define ASM_OPERANDS_INPUT_CONSTRAINT_EXP(RTX, N) \ |
| XCVECEXP (RTX, 4, N, ASM_OPERANDS) |
| #define ASM_OPERANDS_INPUT_CONSTRAINT(RTX, N) \ |
| XSTR (XCVECEXP (RTX, 4, N, ASM_OPERANDS), 0) |
| #define ASM_OPERANDS_INPUT_MODE(RTX, N) \ |
| GET_MODE (XCVECEXP (RTX, 4, N, ASM_OPERANDS)) |
| #define ASM_OPERANDS_LABEL_VEC(RTX) XCVEC (RTX, 5, ASM_OPERANDS) |
| #define ASM_OPERANDS_LABEL_LENGTH(RTX) XCVECLEN (RTX, 5, ASM_OPERANDS) |
| #define ASM_OPERANDS_LABEL(RTX, N) XCVECEXP (RTX, 5, N, ASM_OPERANDS) |
| #define ASM_OPERANDS_SOURCE_LOCATION(RTX) XCUINT (RTX, 6, ASM_OPERANDS) |
| #define ASM_INPUT_SOURCE_LOCATION(RTX) XCUINT (RTX, 1, ASM_INPUT) |
| |
| /* 1 if RTX is a mem that is statically allocated in read-only memory. */ |
| #define MEM_READONLY_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("MEM_READONLY_P", (RTX), MEM)->unchanging) |
| |
| /* 1 if RTX is a mem and we should keep the alias set for this mem |
| unchanged when we access a component. Set to 1, or example, when we |
| are already in a non-addressable component of an aggregate. */ |
| #define MEM_KEEP_ALIAS_SET_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("MEM_KEEP_ALIAS_SET_P", (RTX), MEM)->jump) |
| |
| /* 1 if RTX is a mem or asm_operand for a volatile reference. */ |
| #define MEM_VOLATILE_P(RTX) \ |
| (RTL_FLAG_CHECK3 ("MEM_VOLATILE_P", (RTX), MEM, ASM_OPERANDS, \ |
| ASM_INPUT)->volatil) |
| |
| /* 1 if RTX is a mem that cannot trap. */ |
| #define MEM_NOTRAP_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("MEM_NOTRAP_P", (RTX), MEM)->call) |
| |
| /* The memory attribute block. We provide access macros for each value |
| in the block and provide defaults if none specified. */ |
| #define MEM_ATTRS(RTX) X0MEMATTR (RTX, 1) |
| |
| /* The register attribute block. We provide access macros for each value |
| in the block and provide defaults if none specified. */ |
| #define REG_ATTRS(RTX) (REG_CHECK (RTX)->attrs) |
| |
| #ifndef GENERATOR_FILE |
| /* For a MEM rtx, the alias set. If 0, this MEM is not in any alias |
| set, and may alias anything. Otherwise, the MEM can only alias |
| MEMs in a conflicting alias set. This value is set in a |
| language-dependent manner in the front-end, and should not be |
| altered in the back-end. These set numbers are tested with |
| alias_sets_conflict_p. */ |
| #define MEM_ALIAS_SET(RTX) (get_mem_attrs (RTX)->alias) |
| |
| /* For a MEM rtx, the decl it is known to refer to, if it is known to |
| refer to part of a DECL. It may also be a COMPONENT_REF. */ |
| #define MEM_EXPR(RTX) (get_mem_attrs (RTX)->expr) |
| |
| /* For a MEM rtx, true if its MEM_OFFSET is known. */ |
| #define MEM_OFFSET_KNOWN_P(RTX) (get_mem_attrs (RTX)->offset_known_p) |
| |
| /* For a MEM rtx, the offset from the start of MEM_EXPR. */ |
| #define MEM_OFFSET(RTX) (get_mem_attrs (RTX)->offset) |
| |
| /* For a MEM rtx, the address space. */ |
| #define MEM_ADDR_SPACE(RTX) (get_mem_attrs (RTX)->addrspace) |
| |
| /* For a MEM rtx, true if its MEM_SIZE is known. */ |
| #define MEM_SIZE_KNOWN_P(RTX) (get_mem_attrs (RTX)->size_known_p) |
| |
| /* For a MEM rtx, the size in bytes of the MEM. */ |
| #define MEM_SIZE(RTX) (get_mem_attrs (RTX)->size) |
| |
| /* For a MEM rtx, the alignment in bits. We can use the alignment of the |
| mode as a default when STRICT_ALIGNMENT, but not if not. */ |
| #define MEM_ALIGN(RTX) (get_mem_attrs (RTX)->align) |
| #else |
| #define MEM_ADDR_SPACE(RTX) ADDR_SPACE_GENERIC |
| #endif |
| |
| /* For a REG rtx, the decl it is known to refer to, if it is known to |
| refer to part of a DECL. */ |
| #define REG_EXPR(RTX) (REG_ATTRS (RTX) == 0 ? 0 : REG_ATTRS (RTX)->decl) |
| |
| /* For a REG rtx, the offset from the start of REG_EXPR, if known, as an |
| HOST_WIDE_INT. */ |
| #define REG_OFFSET(RTX) (REG_ATTRS (RTX) == 0 ? 0 : REG_ATTRS (RTX)->offset) |
| |
| /* Copy the attributes that apply to memory locations from RHS to LHS. */ |
| #define MEM_COPY_ATTRIBUTES(LHS, RHS) \ |
| (MEM_VOLATILE_P (LHS) = MEM_VOLATILE_P (RHS), \ |
| MEM_NOTRAP_P (LHS) = MEM_NOTRAP_P (RHS), \ |
| MEM_READONLY_P (LHS) = MEM_READONLY_P (RHS), \ |
| MEM_KEEP_ALIAS_SET_P (LHS) = MEM_KEEP_ALIAS_SET_P (RHS), \ |
| MEM_POINTER (LHS) = MEM_POINTER (RHS), \ |
| MEM_ATTRS (LHS) = MEM_ATTRS (RHS)) |
| |
| /* 1 if RTX is a label_ref for a nonlocal label. */ |
| /* Likewise in an expr_list for a REG_LABEL_OPERAND or |
| REG_LABEL_TARGET note. */ |
| #define LABEL_REF_NONLOCAL_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("LABEL_REF_NONLOCAL_P", (RTX), LABEL_REF)->volatil) |
| |
| /* 1 if RTX is a code_label that should always be considered to be needed. */ |
| #define LABEL_PRESERVE_P(RTX) \ |
| (RTL_FLAG_CHECK2 ("LABEL_PRESERVE_P", (RTX), CODE_LABEL, NOTE)->in_struct) |
| |
| /* During sched, 1 if RTX is an insn that must be scheduled together |
| with the preceding insn. */ |
| #define SCHED_GROUP_P(RTX) \ |
| (RTL_FLAG_CHECK4 ("SCHED_GROUP_P", (RTX), DEBUG_INSN, INSN, \ |
| JUMP_INSN, CALL_INSN)->in_struct) |
| |
| /* For a SET rtx, SET_DEST is the place that is set |
| and SET_SRC is the value it is set to. */ |
| #define SET_DEST(RTX) XC2EXP (RTX, 0, SET, CLOBBER) |
| #define SET_SRC(RTX) XCEXP (RTX, 1, SET) |
| #define SET_IS_RETURN_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("SET_IS_RETURN_P", (RTX), SET)->jump) |
| |
| /* For a TRAP_IF rtx, TRAP_CONDITION is an expression. */ |
| #define TRAP_CONDITION(RTX) XCEXP (RTX, 0, TRAP_IF) |
| #define TRAP_CODE(RTX) XCEXP (RTX, 1, TRAP_IF) |
| |
| /* For a COND_EXEC rtx, COND_EXEC_TEST is the condition to base |
| conditionally executing the code on, COND_EXEC_CODE is the code |
| to execute if the condition is true. */ |
| #define COND_EXEC_TEST(RTX) XCEXP (RTX, 0, COND_EXEC) |
| #define COND_EXEC_CODE(RTX) XCEXP (RTX, 1, COND_EXEC) |
| |
| /* 1 if RTX is a symbol_ref that addresses this function's rtl |
| constants pool. */ |
| #define CONSTANT_POOL_ADDRESS_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("CONSTANT_POOL_ADDRESS_P", (RTX), SYMBOL_REF)->unchanging) |
| |
| /* 1 if RTX is a symbol_ref that addresses a value in the file's |
| tree constant pool. This information is private to varasm.cc. */ |
| #define TREE_CONSTANT_POOL_ADDRESS_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("TREE_CONSTANT_POOL_ADDRESS_P", \ |
| (RTX), SYMBOL_REF)->frame_related) |
| |
| /* Used if RTX is a symbol_ref, for machine-specific purposes. */ |
| #define SYMBOL_REF_FLAG(RTX) \ |
| (RTL_FLAG_CHECK1 ("SYMBOL_REF_FLAG", (RTX), SYMBOL_REF)->volatil) |
| |
| /* 1 if RTX is a symbol_ref that has been the library function in |
| emit_library_call. */ |
| #define SYMBOL_REF_USED(RTX) \ |
| (RTL_FLAG_CHECK1 ("SYMBOL_REF_USED", (RTX), SYMBOL_REF)->used) |
| |
| /* 1 if RTX is a symbol_ref for a weak symbol. */ |
| #define SYMBOL_REF_WEAK(RTX) \ |
| (RTL_FLAG_CHECK1 ("SYMBOL_REF_WEAK", (RTX), SYMBOL_REF)->return_val) |
| |
| /* A pointer attached to the SYMBOL_REF; either SYMBOL_REF_DECL or |
| SYMBOL_REF_CONSTANT. */ |
| #define SYMBOL_REF_DATA(RTX) X0ANY ((RTX), 1) |
| |
| /* Set RTX's SYMBOL_REF_DECL to DECL. RTX must not be a constant |
| pool symbol. */ |
| #define SET_SYMBOL_REF_DECL(RTX, DECL) \ |
| (gcc_assert (!CONSTANT_POOL_ADDRESS_P (RTX)), X0TREE ((RTX), 1) = (DECL)) |
| |
| /* The tree (decl or constant) associated with the symbol, or null. */ |
| #define SYMBOL_REF_DECL(RTX) \ |
| (CONSTANT_POOL_ADDRESS_P (RTX) ? NULL : X0TREE ((RTX), 1)) |
| |
| /* Set RTX's SYMBOL_REF_CONSTANT to C. RTX must be a constant pool symbol. */ |
| #define SET_SYMBOL_REF_CONSTANT(RTX, C) \ |
| (gcc_assert (CONSTANT_POOL_ADDRESS_P (RTX)), X0CONSTANT ((RTX), 1) = (C)) |
| |
| /* The rtx constant pool entry for a symbol, or null. */ |
| #define SYMBOL_REF_CONSTANT(RTX) \ |
| (CONSTANT_POOL_ADDRESS_P (RTX) ? X0CONSTANT ((RTX), 1) : NULL) |
| |
| /* A set of flags on a symbol_ref that are, in some respects, redundant with |
| information derivable from the tree decl associated with this symbol. |
| Except that we build a *lot* of SYMBOL_REFs that aren't associated with a |
| decl. In some cases this is a bug. But beyond that, it's nice to cache |
| this information to avoid recomputing it. Finally, this allows space for |
| the target to store more than one bit of information, as with |
| SYMBOL_REF_FLAG. */ |
| #define SYMBOL_REF_FLAGS(RTX) \ |
| (RTL_FLAG_CHECK1 ("SYMBOL_REF_FLAGS", (RTX), SYMBOL_REF) \ |
| ->u2.symbol_ref_flags) |
| |
| /* These flags are common enough to be defined for all targets. They |
| are computed by the default version of targetm.encode_section_info. */ |
| |
| /* Set if this symbol is a function. */ |
| #define SYMBOL_FLAG_FUNCTION (1 << 0) |
| #define SYMBOL_REF_FUNCTION_P(RTX) \ |
| ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_FUNCTION) != 0) |
| /* Set if targetm.binds_local_p is true. */ |
| #define SYMBOL_FLAG_LOCAL (1 << 1) |
| #define SYMBOL_REF_LOCAL_P(RTX) \ |
| ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_LOCAL) != 0) |
| /* Set if targetm.in_small_data_p is true. */ |
| #define SYMBOL_FLAG_SMALL (1 << 2) |
| #define SYMBOL_REF_SMALL_P(RTX) \ |
| ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_SMALL) != 0) |
| /* The three-bit field at [5:3] is true for TLS variables; use |
| SYMBOL_REF_TLS_MODEL to extract the field as an enum tls_model. */ |
| #define SYMBOL_FLAG_TLS_SHIFT 3 |
| #define SYMBOL_REF_TLS_MODEL(RTX) \ |
| ((enum tls_model) ((SYMBOL_REF_FLAGS (RTX) >> SYMBOL_FLAG_TLS_SHIFT) & 7)) |
| /* Set if this symbol is not defined in this translation unit. */ |
| #define SYMBOL_FLAG_EXTERNAL (1 << 6) |
| #define SYMBOL_REF_EXTERNAL_P(RTX) \ |
| ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_EXTERNAL) != 0) |
| /* Set if this symbol has a block_symbol structure associated with it. */ |
| #define SYMBOL_FLAG_HAS_BLOCK_INFO (1 << 7) |
| #define SYMBOL_REF_HAS_BLOCK_INFO_P(RTX) \ |
| ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_HAS_BLOCK_INFO) != 0) |
| /* Set if this symbol is a section anchor. SYMBOL_REF_ANCHOR_P implies |
| SYMBOL_REF_HAS_BLOCK_INFO_P. */ |
| #define SYMBOL_FLAG_ANCHOR (1 << 8) |
| #define SYMBOL_REF_ANCHOR_P(RTX) \ |
| ((SYMBOL_REF_FLAGS (RTX) & SYMBOL_FLAG_ANCHOR) != 0) |
| |
| /* Subsequent bits are available for the target to use. */ |
| #define SYMBOL_FLAG_MACH_DEP_SHIFT 9 |
| #define SYMBOL_FLAG_MACH_DEP (1 << SYMBOL_FLAG_MACH_DEP_SHIFT) |
| |
| /* If SYMBOL_REF_HAS_BLOCK_INFO_P (RTX), this is the object_block |
| structure to which the symbol belongs, or NULL if it has not been |
| assigned a block. */ |
| #define SYMBOL_REF_BLOCK(RTX) (BLOCK_SYMBOL_CHECK (RTX)->block) |
| |
| /* If SYMBOL_REF_HAS_BLOCK_INFO_P (RTX), this is the offset of RTX from |
| the first object in SYMBOL_REF_BLOCK (RTX). The value is negative if |
| RTX has not yet been assigned to a block, or it has not been given an |
| offset within that block. */ |
| #define SYMBOL_REF_BLOCK_OFFSET(RTX) (BLOCK_SYMBOL_CHECK (RTX)->offset) |
| |
| /* True if RTX is flagged to be a scheduling barrier. */ |
| #define PREFETCH_SCHEDULE_BARRIER_P(RTX) \ |
| (RTL_FLAG_CHECK1 ("PREFETCH_SCHEDULE_BARRIER_P", (RTX), PREFETCH)->volatil) |
| |
| /* Indicate whether the machine has any sort of auto increment addressing. |
| If not, we can avoid checking for REG_INC notes. */ |
| |
| #if (defined (HAVE_PRE_INCREMENT) || defined (HAVE_PRE_DECREMENT) \ |
| || defined (HAVE_POST_INCREMENT) || defined (HAVE_POST_DECREMENT) \ |
| || defined (HAVE_PRE_MODIFY_DISP) || defined (HAVE_POST_MODIFY_DISP) \ |
| || defined (HAVE_PRE_MODIFY_REG) || defined (HAVE_POST_MODIFY_REG)) |
| #define AUTO_INC_DEC 1 |
| #else |
| #define AUTO_INC_DEC 0 |
| #endif |
| |
| /* Define a macro to look for REG_INC notes, |
| but save time on machines where they never exist. */ |
| |
| #if AUTO_INC_DEC |
| #define FIND_REG_INC_NOTE(INSN, REG) \ |
| ((REG) != NULL_RTX && REG_P ((REG)) \ |
| ? find_regno_note ((INSN), REG_INC, REGNO (REG)) \ |
| : find_reg_note ((INSN), REG_INC, (REG))) |
| #else |
| #define FIND_REG_INC_NOTE(INSN, REG) 0 |
| #endif |
| |
| #ifndef HAVE_PRE_INCREMENT |
| #define HAVE_PRE_INCREMENT 0 |
| #endif |
| |
| #ifndef HAVE_PRE_DECREMENT |
| #define HAVE_PRE_DECREMENT 0 |
| #endif |
| |
| #ifndef HAVE_POST_INCREMENT |
| #define HAVE_POST_INCREMENT 0 |
| #endif |
| |
| #ifndef HAVE_POST_DECREMENT |
| #define HAVE_POST_DECREMENT 0 |
| #endif |
| |
| #ifndef HAVE_POST_MODIFY_DISP |
| #define HAVE_POST_MODIFY_DISP 0 |
| #endif |
| |
| #ifndef HAVE_POST_MODIFY_REG |
| #define HAVE_POST_MODIFY_REG 0 |
| #endif |
| |
| #ifndef HAVE_PRE_MODIFY_DISP |
| #define HAVE_PRE_MODIFY_DISP 0 |
| #endif |
| |
| #ifndef HAVE_PRE_MODIFY_REG |
| #define HAVE_PRE_MODIFY_REG 0 |
| #endif |
| |
| |
| /* Some architectures do not have complete pre/post increment/decrement |
| instruction sets, or only move some modes efficiently. These macros |
| allow us to tune autoincrement generation. */ |
| |
| #ifndef USE_LOAD_POST_INCREMENT |
| #define USE_LOAD_POST_INCREMENT(MODE) HAVE_POST_INCREMENT |
| #endif |
| |
| #ifndef USE_LOAD_POST_DECREMENT |
| #define USE_LOAD_POST_DECREMENT(MODE) HAVE_POST_DECREMENT |
| #endif |
| |
| #ifndef USE_LOAD_PRE_INCREMENT |
| #define USE_LOAD_PRE_INCREMENT(MODE) HAVE_PRE_INCREMENT |
| #endif |
| |
| #ifndef USE_LOAD_PRE_DECREMENT |
| #define USE_LOAD_PRE_DECREMENT(MODE) HAVE_PRE_DECREMENT |
| #endif |
| |
| #ifndef USE_STORE_POST_INCREMENT |
| #define USE_STORE_POST_INCREMENT(MODE) HAVE_POST_INCREMENT |
| #endif |
| |
| #ifndef USE_STORE_POST_DECREMENT |
| #define USE_STORE_POST_DECREMENT(MODE) HAVE_POST_DECREMENT |
| #endif |
| |
| #ifndef USE_STORE_PRE_INCREMENT |
| #define USE_STORE_PRE_INCREMENT(MODE) HAVE_PRE_INCREMENT |
| #endif |
| |
| #ifndef USE_STORE_PRE_DECREMENT |
| #define USE_STORE_PRE_DECREMENT(MODE) HAVE_PRE_DECREMENT |
| #endif |
| |
| /* Nonzero when we are generating CONCATs. */ |
| extern int generating_concat_p; |
| |
| /* Nonzero when we are expanding trees to RTL. */ |
| extern int currently_expanding_to_rtl; |
| |
| /* Generally useful functions. */ |
| |
| #ifndef GENERATOR_FILE |
| /* Return the cost of SET X. SPEED_P is true if optimizing for speed |
| rather than size. */ |
| |
| static inline int |
| set_rtx_cost (rtx x, bool speed_p) |
| { |
| return rtx_cost (x, VOIDmode, INSN, 4, speed_p); |
| } |
| |
| /* Like set_rtx_cost, but return both the speed and size costs in C. */ |
| |
| static inline void |
| get_full_set_rtx_cost (rtx x, struct full_rtx_costs *c) |
| { |
| get_full_rtx_cost (x, VOIDmode, INSN, 4, c); |
| } |
| |
| /* Return the cost of moving X into a register, relative to the cost |
| of a register move. SPEED_P is true if optimizing for speed rather |
| than size. */ |
| |
| static inline int |
| set_src_cost (rtx x, machine_mode mode, bool speed_p) |
| { |
| return rtx_cost (x, mode, SET, 1, speed_p); |
| } |
| |
| /* Like set_src_cost, but return both the speed and size costs in C. */ |
| |
| static inline void |
| get_full_set_src_cost (rtx x, machine_mode mode, struct full_rtx_costs *c) |
| { |
| get_full_rtx_cost (x, mode, SET, 1, c); |
| } |
| #endif |
| |
| /* A convenience macro to validate the arguments of a zero_extract |
| expression. It determines whether SIZE lies inclusively within |
| [1, RANGE], POS lies inclusively within between [0, RANGE - 1] |
| and the sum lies inclusively within [1, RANGE]. RANGE must be |
| >= 1, but SIZE and POS may be negative. */ |
| #define EXTRACT_ARGS_IN_RANGE(SIZE, POS, RANGE) \ |
| (IN_RANGE ((POS), 0, (unsigned HOST_WIDE_INT) (RANGE) - 1) \ |
| && IN_RANGE ((SIZE), 1, (unsigned HOST_WIDE_INT) (RANGE) \ |
| - (unsigned HOST_WIDE_INT)(POS))) |
| |
| /* In explow.cc */ |
| extern HOST_WIDE_INT trunc_int_for_mode (HOST_WIDE_INT, machine_mode); |
| extern poly_int64 trunc_int_for_mode (poly_int64, machine_mode); |
| extern rtx plus_constant (machine_mode, rtx, poly_int64, bool = false); |
| extern HOST_WIDE_INT get_stack_check_protect (void); |
| |
| /* In rtl.cc */ |
| extern rtx rtx_alloc (RTX_CODE CXX_MEM_STAT_INFO); |
| inline rtx |
| rtx_init (rtx rt, RTX_CODE code) |
| { |
| memset (rt, 0, RTX_HDR_SIZE); |
| PUT_CODE (rt, code); |
| return rt; |
| } |
| #define rtx_alloca(code) \ |
| rtx_init ((rtx) alloca (RTX_CODE_SIZE ((code))), (code)) |
| extern rtx rtx_alloc_stat_v (RTX_CODE MEM_STAT_DECL, int); |
| #define rtx_alloc_v(c, SZ) rtx_alloc_stat_v (c MEM_STAT_INFO, SZ) |
| #define const_wide_int_alloc(NWORDS) \ |
| rtx_alloc_v (CONST_WIDE_INT, \ |
| (sizeof (struct hwivec_def) \ |
| + ((NWORDS)-1) * sizeof (HOST_WIDE_INT))) \ |
| |
| extern rtvec rtvec_alloc (size_t); |
| extern rtvec shallow_copy_rtvec (rtvec); |
| extern bool shared_const_p (const_rtx); |
| extern rtx copy_rtx (rtx); |
| extern enum rtx_code classify_insn (rtx); |
| extern void dump_rtx_statistics (void); |
| |
| /* In emit-rtl.cc */ |
| extern rtx copy_rtx_if_shared (rtx); |
| |
| /* In rtl.cc */ |
| extern unsigned int rtx_size (const_rtx); |
| extern rtx shallow_copy_rtx (const_rtx CXX_MEM_STAT_INFO); |
| extern int rtx_equal_p (const_rtx, const_rtx); |
| extern bool rtvec_all_equal_p (const_rtvec); |
| extern bool rtvec_series_p (rtvec, int); |
| |
| /* Return true if X is a vector constant with a duplicated element value. */ |
| |
| inline bool |
| const_vec_duplicate_p (const_rtx x) |
| { |
| return (GET_CODE (x) == CONST_VECTOR |
| && CONST_VECTOR_NPATTERNS (x) == 1 |
| && CONST_VECTOR_DUPLICATE_P (x)); |
| } |
| |
| /* Return true if X is a vector constant with a duplicated element value. |
| Store the duplicated element in *ELT if so. */ |
| |
| template <typename T> |
| inline bool |
| const_vec_duplicate_p (T x, T *elt) |
| { |
| if (const_vec_duplicate_p (x)) |
| { |
| *elt = CONST_VECTOR_ENCODED_ELT (x, 0); |
| return true; |
| } |
| return false; |
| } |
| |
| /* Return true if X is a vector with a duplicated element value, either |
| constant or nonconstant. Store the duplicated element in *ELT if so. */ |
| |
| template <typename T> |
| inline bool |
| vec_duplicate_p (T x, T *elt) |
| { |
| if (GET_CODE (x) == VEC_DUPLICATE |
| && !VECTOR_MODE_P (GET_MODE (XEXP (x, 0)))) |
| { |
| *elt = XEXP (x, 0); |
| return true; |
| } |
| return const_vec_duplicate_p (x, elt); |
| } |
| |
| /* If X is a vector constant with a duplicated element value, return that |
| element value, otherwise return X. */ |
| |
| template <typename T> |
| inline T |
| unwrap_const_vec_duplicate (T x) |
| { |
| if (const_vec_duplicate_p (x)) |
| x = CONST_VECTOR_ELT (x, 0); |
| return x; |
| } |
| |
| /* In emit-rtl.cc. */ |
| extern wide_int const_vector_int_elt (const_rtx, unsigned int); |
| extern rtx const_vector_elt (const_rtx, unsigned int); |
| extern bool const_vec_series_p_1 (const_rtx, rtx *, rtx *); |
| |
| /* Return true if X is an integer constant vector that contains a linear |
| series of the form: |
| |
| { B, B + S, B + 2 * S, B + 3 * S, ... } |
| |
| for a nonzero S. Store B and S in *BASE_OUT and *STEP_OUT on sucess. */ |
| |
| inline bool |
| const_vec_series_p (const_rtx x, rtx *base_out, rtx *step_out) |
| { |
| if (GET_CODE (x) == CONST_VECTOR |
| && CONST_VECTOR_NPATTERNS (x) == 1 |
| && !CONST_VECTOR_DUPLICATE_P (x)) |
| return const_vec_series_p_1 (x, base_out, step_out); |
| return false; |
| } |
| |
| /* Return true if X is a vector that contains a linear series of the |
| form: |
| |
| { B, B + S, B + 2 * S, B + 3 * S, ... } |
|