| /* Analyze RTL for GNU compiler. |
| Copyright (C) 1987-2022 Free Software Foundation, Inc. |
| |
| This file is part of GCC. |
| |
| GCC is free software; you can redistribute it and/or modify it under |
| the terms of the GNU General Public License as published by the Free |
| Software Foundation; either version 3, or (at your option) any later |
| version. |
| |
| GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
| WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| for more details. |
| |
| You should have received a copy of the GNU General Public License |
| along with GCC; see the file COPYING3. If not see |
| <http://www.gnu.org/licenses/>. */ |
| |
| |
| #include "config.h" |
| #include "system.h" |
| #include "coretypes.h" |
| #include "backend.h" |
| #include "target.h" |
| #include "rtl.h" |
| #include "rtlanal.h" |
| #include "tree.h" |
| #include "predict.h" |
| #include "df.h" |
| #include "memmodel.h" |
| #include "tm_p.h" |
| #include "insn-config.h" |
| #include "regs.h" |
| #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */ |
| #include "recog.h" |
| #include "addresses.h" |
| #include "rtl-iter.h" |
| #include "hard-reg-set.h" |
| #include "function-abi.h" |
| |
| /* Forward declarations */ |
| static void set_of_1 (rtx, const_rtx, void *); |
| static bool covers_regno_p (const_rtx, unsigned int); |
| static bool covers_regno_no_parallel_p (const_rtx, unsigned int); |
| static int computed_jump_p_1 (const_rtx); |
| static void parms_set (rtx, const_rtx, void *); |
| |
| static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, scalar_int_mode, |
| const_rtx, machine_mode, |
| unsigned HOST_WIDE_INT); |
| static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, scalar_int_mode, |
| const_rtx, machine_mode, |
| unsigned HOST_WIDE_INT); |
| static unsigned int cached_num_sign_bit_copies (const_rtx, scalar_int_mode, |
| const_rtx, machine_mode, |
| unsigned int); |
| static unsigned int num_sign_bit_copies1 (const_rtx, scalar_int_mode, |
| const_rtx, machine_mode, |
| unsigned int); |
| |
| rtx_subrtx_bound_info rtx_all_subrtx_bounds[NUM_RTX_CODE]; |
| rtx_subrtx_bound_info rtx_nonconst_subrtx_bounds[NUM_RTX_CODE]; |
| |
| /* Truncation narrows the mode from SOURCE mode to DESTINATION mode. |
| If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is |
| SIGN_EXTEND then while narrowing we also have to enforce the |
| representation and sign-extend the value to mode DESTINATION_REP. |
| |
| If the value is already sign-extended to DESTINATION_REP mode we |
| can just switch to DESTINATION mode on it. For each pair of |
| integral modes SOURCE and DESTINATION, when truncating from SOURCE |
| to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION] |
| contains the number of high-order bits in SOURCE that have to be |
| copies of the sign-bit so that we can do this mode-switch to |
| DESTINATION. */ |
| |
| static unsigned int |
| num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1]; |
| |
| /* Store X into index I of ARRAY. ARRAY is known to have at least I |
| elements. Return the new base of ARRAY. */ |
| |
| template <typename T> |
| typename T::value_type * |
| generic_subrtx_iterator <T>::add_single_to_queue (array_type &array, |
| value_type *base, |
| size_t i, value_type x) |
| { |
| if (base == array.stack) |
| { |
| if (i < LOCAL_ELEMS) |
| { |
| base[i] = x; |
| return base; |
| } |
| gcc_checking_assert (i == LOCAL_ELEMS); |
| /* A previous iteration might also have moved from the stack to the |
| heap, in which case the heap array will already be big enough. */ |
| if (vec_safe_length (array.heap) <= i) |
| vec_safe_grow (array.heap, i + 1, true); |
| base = array.heap->address (); |
| memcpy (base, array.stack, sizeof (array.stack)); |
| base[LOCAL_ELEMS] = x; |
| return base; |
| } |
| unsigned int length = array.heap->length (); |
| if (length > i) |
| { |
| gcc_checking_assert (base == array.heap->address ()); |
| base[i] = x; |
| return base; |
| } |
| else |
| { |
| gcc_checking_assert (i == length); |
| vec_safe_push (array.heap, x); |
| return array.heap->address (); |
| } |
| } |
| |
| /* Add the subrtxes of X to worklist ARRAY, starting at END. Return the |
| number of elements added to the worklist. */ |
| |
| template <typename T> |
| size_t |
| generic_subrtx_iterator <T>::add_subrtxes_to_queue (array_type &array, |
| value_type *base, |
| size_t end, rtx_type x) |
| { |
| enum rtx_code code = GET_CODE (x); |
| const char *format = GET_RTX_FORMAT (code); |
| size_t orig_end = end; |
| if (__builtin_expect (INSN_P (x), false)) |
| { |
| /* Put the pattern at the top of the queue, since that's what |
| we're likely to want most. It also allows for the SEQUENCE |
| code below. */ |
| for (int i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; --i) |
| if (format[i] == 'e') |
| { |
| value_type subx = T::get_value (x->u.fld[i].rt_rtx); |
| if (__builtin_expect (end < LOCAL_ELEMS, true)) |
| base[end++] = subx; |
| else |
| base = add_single_to_queue (array, base, end++, subx); |
| } |
| } |
| else |
| for (int i = 0; format[i]; ++i) |
| if (format[i] == 'e') |
| { |
| value_type subx = T::get_value (x->u.fld[i].rt_rtx); |
| if (__builtin_expect (end < LOCAL_ELEMS, true)) |
| base[end++] = subx; |
| else |
| base = add_single_to_queue (array, base, end++, subx); |
| } |
| else if (format[i] == 'E') |
| { |
| unsigned int length = GET_NUM_ELEM (x->u.fld[i].rt_rtvec); |
| rtx *vec = x->u.fld[i].rt_rtvec->elem; |
| if (__builtin_expect (end + length <= LOCAL_ELEMS, true)) |
| for (unsigned int j = 0; j < length; j++) |
| base[end++] = T::get_value (vec[j]); |
| else |
| for (unsigned int j = 0; j < length; j++) |
| base = add_single_to_queue (array, base, end++, |
| T::get_value (vec[j])); |
| if (code == SEQUENCE && end == length) |
| /* If the subrtxes of the sequence fill the entire array then |
| we know that no other parts of a containing insn are queued. |
| The caller is therefore iterating over the sequence as a |
| PATTERN (...), so we also want the patterns of the |
| subinstructions. */ |
| for (unsigned int j = 0; j < length; j++) |
| { |
| typename T::rtx_type x = T::get_rtx (base[j]); |
| if (INSN_P (x)) |
| base[j] = T::get_value (PATTERN (x)); |
| } |
| } |
| return end - orig_end; |
| } |
| |
| template <typename T> |
| void |
| generic_subrtx_iterator <T>::free_array (array_type &array) |
| { |
| vec_free (array.heap); |
| } |
| |
| template <typename T> |
| const size_t generic_subrtx_iterator <T>::LOCAL_ELEMS; |
| |
| template class generic_subrtx_iterator <const_rtx_accessor>; |
| template class generic_subrtx_iterator <rtx_var_accessor>; |
| template class generic_subrtx_iterator <rtx_ptr_accessor>; |
| |
| /* Return 1 if the value of X is unstable |
| (would be different at a different point in the program). |
| The frame pointer, arg pointer, etc. are considered stable |
| (within one function) and so is anything marked `unchanging'. */ |
| |
| int |
| rtx_unstable_p (const_rtx x) |
| { |
| const RTX_CODE code = GET_CODE (x); |
| int i; |
| const char *fmt; |
| |
| switch (code) |
| { |
| case MEM: |
| return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0)); |
| |
| case CONST: |
| CASE_CONST_ANY: |
| case SYMBOL_REF: |
| case LABEL_REF: |
| return 0; |
| |
| case REG: |
| /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */ |
| if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx |
| /* The arg pointer varies if it is not a fixed register. */ |
| || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) |
| return 0; |
| /* ??? When call-clobbered, the value is stable modulo the restore |
| that must happen after a call. This currently screws up local-alloc |
| into believing that the restore is not needed. */ |
| if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED && x == pic_offset_table_rtx) |
| return 0; |
| return 1; |
| |
| case ASM_OPERANDS: |
| if (MEM_VOLATILE_P (x)) |
| return 1; |
| |
| /* Fall through. */ |
| |
| default: |
| break; |
| } |
| |
| fmt = GET_RTX_FORMAT (code); |
| for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
| if (fmt[i] == 'e') |
| { |
| if (rtx_unstable_p (XEXP (x, i))) |
| return 1; |
| } |
| else if (fmt[i] == 'E') |
| { |
| int j; |
| for (j = 0; j < XVECLEN (x, i); j++) |
| if (rtx_unstable_p (XVECEXP (x, i, j))) |
| return 1; |
| } |
| |
| return 0; |
| } |
| |
| /* Return 1 if X has a value that can vary even between two |
| executions of the program. 0 means X can be compared reliably |
| against certain constants or near-constants. |
| FOR_ALIAS is nonzero if we are called from alias analysis; if it is |
| zero, we are slightly more conservative. |
| The frame pointer and the arg pointer are considered constant. */ |
| |
| bool |
| rtx_varies_p (const_rtx x, bool for_alias) |
| { |
| RTX_CODE code; |
| int i; |
| const char *fmt; |
| |
| if (!x) |
| return 0; |
| |
| code = GET_CODE (x); |
| switch (code) |
| { |
| case MEM: |
| return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias); |
| |
| case CONST: |
| CASE_CONST_ANY: |
| case SYMBOL_REF: |
| case LABEL_REF: |
| return 0; |
| |
| case REG: |
| /* Note that we have to test for the actual rtx used for the frame |
| and arg pointers and not just the register number in case we have |
| eliminated the frame and/or arg pointer and are using it |
| for pseudos. */ |
| if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx |
| /* The arg pointer varies if it is not a fixed register. */ |
| || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) |
| return 0; |
| if (x == pic_offset_table_rtx |
| /* ??? When call-clobbered, the value is stable modulo the restore |
| that must happen after a call. This currently screws up |
| local-alloc into believing that the restore is not needed, so we |
| must return 0 only if we are called from alias analysis. */ |
| && (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED || for_alias)) |
| return 0; |
| return 1; |
| |
| case LO_SUM: |
| /* The operand 0 of a LO_SUM is considered constant |
| (in fact it is related specifically to operand 1) |
| during alias analysis. */ |
| return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias)) |
| || rtx_varies_p (XEXP (x, 1), for_alias); |
| |
| case ASM_OPERANDS: |
| if (MEM_VOLATILE_P (x)) |
| return 1; |
| |
| /* Fall through. */ |
| |
| default: |
| break; |
| } |
| |
| fmt = GET_RTX_FORMAT (code); |
| for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
| if (fmt[i] == 'e') |
| { |
| if (rtx_varies_p (XEXP (x, i), for_alias)) |
| return 1; |
| } |
| else if (fmt[i] == 'E') |
| { |
| int j; |
| for (j = 0; j < XVECLEN (x, i); j++) |
| if (rtx_varies_p (XVECEXP (x, i, j), for_alias)) |
| return 1; |
| } |
| |
| return 0; |
| } |
| |
| /* Compute an approximation for the offset between the register |
| FROM and TO for the current function, as it was at the start |
| of the routine. */ |
| |
| static poly_int64 |
| get_initial_register_offset (int from, int to) |
| { |
| static const struct elim_table_t |
| { |
| const int from; |
| const int to; |
| } table[] = ELIMINABLE_REGS; |
| poly_int64 offset1, offset2; |
| unsigned int i, j; |
| |
| if (to == from) |
| return 0; |
| |
| /* It is not safe to call INITIAL_ELIMINATION_OFFSET before the epilogue |
| is completed, but we need to give at least an estimate for the stack |
| pointer based on the frame size. */ |
| if (!epilogue_completed) |
| { |
| offset1 = crtl->outgoing_args_size + get_frame_size (); |
| #if !STACK_GROWS_DOWNWARD |
| offset1 = - offset1; |
| #endif |
| if (to == STACK_POINTER_REGNUM) |
| return offset1; |
| else if (from == STACK_POINTER_REGNUM) |
| return - offset1; |
| else |
| return 0; |
| } |
| |
| for (i = 0; i < ARRAY_SIZE (table); i++) |
| if (table[i].from == from) |
| { |
| if (table[i].to == to) |
| { |
| INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to, |
| offset1); |
| return offset1; |
| } |
| for (j = 0; j < ARRAY_SIZE (table); j++) |
| { |
| if (table[j].to == to |
| && table[j].from == table[i].to) |
| { |
| INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to, |
| offset1); |
| INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to, |
| offset2); |
| return offset1 + offset2; |
| } |
| if (table[j].from == to |
| && table[j].to == table[i].to) |
| { |
| INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to, |
| offset1); |
| INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to, |
| offset2); |
| return offset1 - offset2; |
| } |
| } |
| } |
| else if (table[i].to == from) |
| { |
| if (table[i].from == to) |
| { |
| INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to, |
| offset1); |
| return - offset1; |
| } |
| for (j = 0; j < ARRAY_SIZE (table); j++) |
| { |
| if (table[j].to == to |
| && table[j].from == table[i].from) |
| { |
| INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to, |
| offset1); |
| INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to, |
| offset2); |
| return - offset1 + offset2; |
| } |
| if (table[j].from == to |
| && table[j].to == table[i].from) |
| { |
| INITIAL_ELIMINATION_OFFSET (table[i].from, table[i].to, |
| offset1); |
| INITIAL_ELIMINATION_OFFSET (table[j].from, table[j].to, |
| offset2); |
| return - offset1 - offset2; |
| } |
| } |
| } |
| |
| /* If the requested register combination was not found, |
| try a different more simple combination. */ |
| if (from == ARG_POINTER_REGNUM) |
| return get_initial_register_offset (HARD_FRAME_POINTER_REGNUM, to); |
| else if (to == ARG_POINTER_REGNUM) |
| return get_initial_register_offset (from, HARD_FRAME_POINTER_REGNUM); |
| else if (from == HARD_FRAME_POINTER_REGNUM) |
| return get_initial_register_offset (FRAME_POINTER_REGNUM, to); |
| else if (to == HARD_FRAME_POINTER_REGNUM) |
| return get_initial_register_offset (from, FRAME_POINTER_REGNUM); |
| else |
| return 0; |
| } |
| |
| /* Return nonzero if the use of X+OFFSET as an address in a MEM with SIZE |
| bytes can cause a trap. MODE is the mode of the MEM (not that of X) and |
| UNALIGNED_MEMS controls whether nonzero is returned for unaligned memory |
| references on strict alignment machines. */ |
| |
| static int |
| rtx_addr_can_trap_p_1 (const_rtx x, poly_int64 offset, poly_int64 size, |
| machine_mode mode, bool unaligned_mems) |
| { |
| enum rtx_code code = GET_CODE (x); |
| gcc_checking_assert (mode == BLKmode |
| || mode == VOIDmode |
| || known_size_p (size)); |
| poly_int64 const_x1; |
| |
| /* The offset must be a multiple of the mode size if we are considering |
| unaligned memory references on strict alignment machines. */ |
| if (STRICT_ALIGNMENT |
| && unaligned_mems |
| && mode != BLKmode |
| && mode != VOIDmode) |
| { |
| poly_int64 actual_offset = offset; |
| |
| #ifdef SPARC_STACK_BOUNDARY_HACK |
| /* ??? The SPARC port may claim a STACK_BOUNDARY higher than |
| the real alignment of %sp. However, when it does this, the |
| alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */ |
| if (SPARC_STACK_BOUNDARY_HACK |
| && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx)) |
| actual_offset -= STACK_POINTER_OFFSET; |
| #endif |
| |
| if (!multiple_p (actual_offset, GET_MODE_SIZE (mode))) |
| return 1; |
| } |
| |
| switch (code) |
| { |
| case SYMBOL_REF: |
| if (SYMBOL_REF_WEAK (x)) |
| return 1; |
| if (!CONSTANT_POOL_ADDRESS_P (x) && !SYMBOL_REF_FUNCTION_P (x)) |
| { |
| tree decl; |
| poly_int64 decl_size; |
| |
| if (maybe_lt (offset, 0)) |
| return 1; |
| if (!known_size_p (size)) |
| return maybe_ne (offset, 0); |
| |
| /* If the size of the access or of the symbol is unknown, |
| assume the worst. */ |
| decl = SYMBOL_REF_DECL (x); |
| |
| /* Else check that the access is in bounds. TODO: restructure |
| expr_size/tree_expr_size/int_expr_size and just use the latter. */ |
| if (!decl) |
| decl_size = -1; |
| else if (DECL_P (decl) && DECL_SIZE_UNIT (decl)) |
| { |
| if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &decl_size)) |
| decl_size = -1; |
| } |
| else if (TREE_CODE (decl) == STRING_CST) |
| decl_size = TREE_STRING_LENGTH (decl); |
| else if (TYPE_SIZE_UNIT (TREE_TYPE (decl))) |
| decl_size = int_size_in_bytes (TREE_TYPE (decl)); |
| else |
| decl_size = -1; |
| |
| return (!known_size_p (decl_size) || known_eq (decl_size, 0) |
| ? maybe_ne (offset, 0) |
| : !known_subrange_p (offset, size, 0, decl_size)); |
| } |
| |
| return 0; |
| |
| case LABEL_REF: |
| return 0; |
| |
| case REG: |
| /* Stack references are assumed not to trap, but we need to deal with |
| nonsensical offsets. */ |
| if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx |
| || x == stack_pointer_rtx |
| /* The arg pointer varies if it is not a fixed register. */ |
| || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) |
| { |
| #ifdef RED_ZONE_SIZE |
| poly_int64 red_zone_size = RED_ZONE_SIZE; |
| #else |
| poly_int64 red_zone_size = 0; |
| #endif |
| poly_int64 stack_boundary = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; |
| poly_int64 low_bound, high_bound; |
| |
| if (!known_size_p (size)) |
| return 1; |
| |
| if (x == frame_pointer_rtx) |
| { |
| if (FRAME_GROWS_DOWNWARD) |
| { |
| high_bound = targetm.starting_frame_offset (); |
| low_bound = high_bound - get_frame_size (); |
| } |
| else |
| { |
| low_bound = targetm.starting_frame_offset (); |
| high_bound = low_bound + get_frame_size (); |
| } |
| } |
| else if (x == hard_frame_pointer_rtx) |
| { |
| poly_int64 sp_offset |
| = get_initial_register_offset (STACK_POINTER_REGNUM, |
| HARD_FRAME_POINTER_REGNUM); |
| poly_int64 ap_offset |
| = get_initial_register_offset (ARG_POINTER_REGNUM, |
| HARD_FRAME_POINTER_REGNUM); |
| |
| #if STACK_GROWS_DOWNWARD |
| low_bound = sp_offset - red_zone_size - stack_boundary; |
| high_bound = ap_offset |
| + FIRST_PARM_OFFSET (current_function_decl) |
| #if !ARGS_GROW_DOWNWARD |
| + crtl->args.size |
| #endif |
| + stack_boundary; |
| #else |
| high_bound = sp_offset + red_zone_size + stack_boundary; |
| low_bound = ap_offset |
| + FIRST_PARM_OFFSET (current_function_decl) |
| #if ARGS_GROW_DOWNWARD |
| - crtl->args.size |
| #endif |
| - stack_boundary; |
| #endif |
| } |
| else if (x == stack_pointer_rtx) |
| { |
| poly_int64 ap_offset |
| = get_initial_register_offset (ARG_POINTER_REGNUM, |
| STACK_POINTER_REGNUM); |
| |
| #if STACK_GROWS_DOWNWARD |
| low_bound = - red_zone_size - stack_boundary; |
| high_bound = ap_offset |
| + FIRST_PARM_OFFSET (current_function_decl) |
| #if !ARGS_GROW_DOWNWARD |
| + crtl->args.size |
| #endif |
| + stack_boundary; |
| #else |
| high_bound = red_zone_size + stack_boundary; |
| low_bound = ap_offset |
| + FIRST_PARM_OFFSET (current_function_decl) |
| #if ARGS_GROW_DOWNWARD |
| - crtl->args.size |
| #endif |
| - stack_boundary; |
| #endif |
| } |
| else |
| { |
| /* We assume that accesses are safe to at least the |
| next stack boundary. |
| Examples are varargs and __builtin_return_address. */ |
| #if ARGS_GROW_DOWNWARD |
| high_bound = FIRST_PARM_OFFSET (current_function_decl) |
| + stack_boundary; |
| low_bound = FIRST_PARM_OFFSET (current_function_decl) |
| - crtl->args.size - stack_boundary; |
| #else |
| low_bound = FIRST_PARM_OFFSET (current_function_decl) |
| - stack_boundary; |
| high_bound = FIRST_PARM_OFFSET (current_function_decl) |
| + crtl->args.size + stack_boundary; |
| #endif |
| } |
| |
| if (known_ge (offset, low_bound) |
| && known_le (offset, high_bound - size)) |
| return 0; |
| return 1; |
| } |
| /* All of the virtual frame registers are stack references. */ |
| if (REGNO (x) >= FIRST_VIRTUAL_REGISTER |
| && REGNO (x) <= LAST_VIRTUAL_REGISTER) |
| return 0; |
| return 1; |
| |
| case CONST: |
| return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size, |
| mode, unaligned_mems); |
| |
| case PLUS: |
| /* An address is assumed not to trap if: |
| - it is the pic register plus a const unspec without offset. */ |
| if (XEXP (x, 0) == pic_offset_table_rtx |
| && GET_CODE (XEXP (x, 1)) == CONST |
| && GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC |
| && known_eq (offset, 0)) |
| return 0; |
| |
| /* - or it is an address that can't trap plus a constant integer. */ |
| if (poly_int_rtx_p (XEXP (x, 1), &const_x1) |
| && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + const_x1, |
| size, mode, unaligned_mems)) |
| return 0; |
| |
| return 1; |
| |
| case LO_SUM: |
| case PRE_MODIFY: |
| return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size, |
| mode, unaligned_mems); |
| |
| case PRE_DEC: |
| case PRE_INC: |
| case POST_DEC: |
| case POST_INC: |
| case POST_MODIFY: |
| return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size, |
| mode, unaligned_mems); |
| |
| default: |
| break; |
| } |
| |
| /* If it isn't one of the case above, it can cause a trap. */ |
| return 1; |
| } |
| |
| /* Return nonzero if the use of X as an address in a MEM can cause a trap. */ |
| |
| int |
| rtx_addr_can_trap_p (const_rtx x) |
| { |
| return rtx_addr_can_trap_p_1 (x, 0, -1, BLKmode, false); |
| } |
| |
| /* Return true if X contains a MEM subrtx. */ |
| |
| bool |
| contains_mem_rtx_p (rtx x) |
| { |
| subrtx_iterator::array_type array; |
| FOR_EACH_SUBRTX (iter, array, x, ALL) |
| if (MEM_P (*iter)) |
| return true; |
| |
| return false; |
| } |
| |
| /* Return true if X is an address that is known to not be zero. */ |
| |
| bool |
| nonzero_address_p (const_rtx x) |
| { |
| const enum rtx_code code = GET_CODE (x); |
| |
| switch (code) |
| { |
| case SYMBOL_REF: |
| return flag_delete_null_pointer_checks && !SYMBOL_REF_WEAK (x); |
| |
| case LABEL_REF: |
| return true; |
| |
| case REG: |
| /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */ |
| if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx |
| || x == stack_pointer_rtx |
| || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) |
| return true; |
| /* All of the virtual frame registers are stack references. */ |
| if (REGNO (x) >= FIRST_VIRTUAL_REGISTER |
| && REGNO (x) <= LAST_VIRTUAL_REGISTER) |
| return true; |
| return false; |
| |
| case CONST: |
| return nonzero_address_p (XEXP (x, 0)); |
| |
| case PLUS: |
| /* Handle PIC references. */ |
| if (XEXP (x, 0) == pic_offset_table_rtx |
| && CONSTANT_P (XEXP (x, 1))) |
| return true; |
| return false; |
| |
| case PRE_MODIFY: |
| /* Similar to the above; allow positive offsets. Further, since |
| auto-inc is only allowed in memories, the register must be a |
| pointer. */ |
| if (CONST_INT_P (XEXP (x, 1)) |
| && INTVAL (XEXP (x, 1)) > 0) |
| return true; |
| return nonzero_address_p (XEXP (x, 0)); |
| |
| case PRE_INC: |
| /* Similarly. Further, the offset is always positive. */ |
| return true; |
| |
| case PRE_DEC: |
| case POST_DEC: |
| case POST_INC: |
| case POST_MODIFY: |
| return nonzero_address_p (XEXP (x, 0)); |
| |
| case LO_SUM: |
| return nonzero_address_p (XEXP (x, 1)); |
| |
| default: |
| break; |
| } |
| |
| /* If it isn't one of the case above, might be zero. */ |
| return false; |
| } |
| |
| /* Return 1 if X refers to a memory location whose address |
| cannot be compared reliably with constant addresses, |
| or if X refers to a BLKmode memory object. |
| FOR_ALIAS is nonzero if we are called from alias analysis; if it is |
| zero, we are slightly more conservative. */ |
| |
| bool |
| rtx_addr_varies_p (const_rtx x, bool for_alias) |
| { |
| enum rtx_code code; |
| int i; |
| const char *fmt; |
| |
| if (x == 0) |
| return 0; |
| |
| code = GET_CODE (x); |
| if (code == MEM) |
| return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias); |
| |
| fmt = GET_RTX_FORMAT (code); |
| for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
| if (fmt[i] == 'e') |
| { |
| if (rtx_addr_varies_p (XEXP (x, i), for_alias)) |
| return 1; |
| } |
| else if (fmt[i] == 'E') |
| { |
| int j; |
| for (j = 0; j < XVECLEN (x, i); j++) |
| if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias)) |
| return 1; |
| } |
| return 0; |
| } |
| |
| /* Return the CALL in X if there is one. */ |
| |
| rtx |
| get_call_rtx_from (const rtx_insn *insn) |
| { |
| rtx x = PATTERN (insn); |
| if (GET_CODE (x) == PARALLEL) |
| x = XVECEXP (x, 0, 0); |
| if (GET_CODE (x) == SET) |
| x = SET_SRC (x); |
| if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0))) |
| return x; |
| return NULL_RTX; |
| } |
| |
| /* Get the declaration of the function called by INSN. */ |
| |
| tree |
| get_call_fndecl (const rtx_insn *insn) |
| { |
| rtx note, datum; |
| |
| note = find_reg_note (insn, REG_CALL_DECL, NULL_RTX); |
| if (note == NULL_RTX) |
| return NULL_TREE; |
| |
| datum = XEXP (note, 0); |
| if (datum != NULL_RTX) |
| return SYMBOL_REF_DECL (datum); |
| |
| return NULL_TREE; |
| } |
| |
| /* Return the value of the integer term in X, if one is apparent; |
| otherwise return 0. |
| Only obvious integer terms are detected. |
| This is used in cse.cc with the `related_value' field. */ |
| |
| HOST_WIDE_INT |
| get_integer_term (const_rtx x) |
| { |
| if (GET_CODE (x) == CONST) |
| x = XEXP (x, 0); |
| |
| if (GET_CODE (x) == MINUS |
| && CONST_INT_P (XEXP (x, 1))) |
| return - INTVAL (XEXP (x, 1)); |
| if (GET_CODE (x) == PLUS |
| && CONST_INT_P (XEXP (x, 1))) |
| return INTVAL (XEXP (x, 1)); |
| return 0; |
| } |
| |
| /* If X is a constant, return the value sans apparent integer term; |
| otherwise return 0. |
| Only obvious integer terms are detected. */ |
| |
| rtx |
| get_related_value (const_rtx x) |
| { |
| if (GET_CODE (x) != CONST) |
| return 0; |
| x = XEXP (x, 0); |
| if (GET_CODE (x) == PLUS |
| && CONST_INT_P (XEXP (x, 1))) |
| return XEXP (x, 0); |
| else if (GET_CODE (x) == MINUS |
| && CONST_INT_P (XEXP (x, 1))) |
| return XEXP (x, 0); |
| return 0; |
| } |
| |
| /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points |
| to somewhere in the same object or object_block as SYMBOL. */ |
| |
| bool |
| offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset) |
| { |
| tree decl; |
| |
| if (GET_CODE (symbol) != SYMBOL_REF) |
| return false; |
| |
| if (offset == 0) |
| return true; |
| |
| if (offset > 0) |
| { |
| if (CONSTANT_POOL_ADDRESS_P (symbol) |
| && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol))) |
| return true; |
| |
| decl = SYMBOL_REF_DECL (symbol); |
| if (decl && offset < int_size_in_bytes (TREE_TYPE (decl))) |
| return true; |
| } |
| |
| if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) |
| && SYMBOL_REF_BLOCK (symbol) |
| && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0 |
| && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol) |
| < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size)) |
| return true; |
| |
| return false; |
| } |
| |
| /* Split X into a base and a constant offset, storing them in *BASE_OUT |
| and *OFFSET_OUT respectively. */ |
| |
| void |
| split_const (rtx x, rtx *base_out, rtx *offset_out) |
| { |
| if (GET_CODE (x) == CONST) |
| { |
| x = XEXP (x, 0); |
| if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1))) |
| { |
| *base_out = XEXP (x, 0); |
| *offset_out = XEXP (x, 1); |
| return; |
| } |
| } |
| *base_out = x; |
| *offset_out = const0_rtx; |
| } |
| |
| /* Express integer value X as some value Y plus a polynomial offset, |
| where Y is either const0_rtx, X or something within X (as opposed |
| to a new rtx). Return the Y and store the offset in *OFFSET_OUT. */ |
| |
| rtx |
| strip_offset (rtx x, poly_int64_pod *offset_out) |
| { |
| rtx base = const0_rtx; |
| rtx test = x; |
| if (GET_CODE (test) == CONST) |
| test = XEXP (test, 0); |
| if (GET_CODE (test) == PLUS) |
| { |
| base = XEXP (test, 0); |
| test = XEXP (test, 1); |
| } |
| if (poly_int_rtx_p (test, offset_out)) |
| return base; |
| *offset_out = 0; |
| return x; |
| } |
| |
| /* Return the argument size in REG_ARGS_SIZE note X. */ |
| |
| poly_int64 |
| get_args_size (const_rtx x) |
| { |
| gcc_checking_assert (REG_NOTE_KIND (x) == REG_ARGS_SIZE); |
| return rtx_to_poly_int64 (XEXP (x, 0)); |
| } |
| |
| /* Return the number of places FIND appears within X. If COUNT_DEST is |
| zero, we do not count occurrences inside the destination of a SET. */ |
| |
| int |
| count_occurrences (const_rtx x, const_rtx find, int count_dest) |
| { |
| int i, j; |
| enum rtx_code code; |
| const char *format_ptr; |
| int count; |
| |
| if (x == find) |
| return 1; |
| |
| code = GET_CODE (x); |
| |
| switch (code) |
| { |
| case REG: |
| CASE_CONST_ANY: |
| case SYMBOL_REF: |
| case CODE_LABEL: |
| case PC: |
| return 0; |
| |
| case EXPR_LIST: |
| count = count_occurrences (XEXP (x, 0), find, count_dest); |
| if (XEXP (x, 1)) |
| count += count_occurrences (XEXP (x, 1), find, count_dest); |
| return count; |
| |
| case MEM: |
| if (MEM_P (find) && rtx_equal_p (x, find)) |
| return 1; |
| break; |
| |
| case SET: |
| if (SET_DEST (x) == find && ! count_dest) |
| return count_occurrences (SET_SRC (x), find, count_dest); |
| break; |
| |
| default: |
| break; |
| } |
| |
| format_ptr = GET_RTX_FORMAT (code); |
| count = 0; |
| |
| for (i = 0; i < GET_RTX_LENGTH (code); i++) |
| { |
| switch (*format_ptr++) |
| { |
| case 'e': |
| count += count_occurrences (XEXP (x, i), find, count_dest); |
| break; |
| |
| case 'E': |
| for (j = 0; j < XVECLEN (x, i); j++) |
| count += count_occurrences (XVECEXP (x, i, j), find, count_dest); |
| break; |
| } |
| } |
| return count; |
| } |
| |
| |
| /* Return TRUE if OP is a register or subreg of a register that |
| holds an unsigned quantity. Otherwise, return FALSE. */ |
| |
| bool |
| unsigned_reg_p (rtx op) |
| { |
| if (REG_P (op) |
| && REG_EXPR (op) |
| && TYPE_UNSIGNED (TREE_TYPE (REG_EXPR (op)))) |
| return true; |
| |
| if (GET_CODE (op) == SUBREG |
| && SUBREG_PROMOTED_SIGN (op)) |
| return true; |
| |
| return false; |
| } |
| |
| |
| /* Nonzero if register REG appears somewhere within IN. |
| Also works if REG is not a register; in this case it checks |
| for a subexpression of IN that is Lisp "equal" to REG. */ |
| |
| int |
| reg_mentioned_p (const_rtx reg, const_rtx in) |
| { |
| const char *fmt; |
| int i; |
| enum rtx_code code; |
| |
| if (in == 0) |
| return 0; |
| |
| if (reg == in) |
| return 1; |
| |
| if (GET_CODE (in) == LABEL_REF) |
| return reg == label_ref_label (in); |
| |
| code = GET_CODE (in); |
| |
| switch (code) |
| { |
| /* Compare registers by number. */ |
| case REG: |
| return REG_P (reg) && REGNO (in) == REGNO (reg); |
| |
| /* These codes have no constituent expressions |
| and are unique. */ |
| case SCRATCH: |
| case PC: |
| return 0; |
| |
| CASE_CONST_ANY: |
| /* These are kept unique for a given value. */ |
| return 0; |
| |
| default: |
| break; |
| } |
| |
| if (GET_CODE (reg) == code && rtx_equal_p (reg, in)) |
| return 1; |
| |
| fmt = GET_RTX_FORMAT (code); |
| |
| for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
| { |
| if (fmt[i] == 'E') |
| { |
| int j; |
| for (j = XVECLEN (in, i) - 1; j >= 0; j--) |
| if (reg_mentioned_p (reg, XVECEXP (in, i, j))) |
| return 1; |
| } |
| else if (fmt[i] == 'e' |
| && reg_mentioned_p (reg, XEXP (in, i))) |
| return 1; |
| } |
| return 0; |
| } |
| |
| /* Return 1 if in between BEG and END, exclusive of BEG and END, there is |
| no CODE_LABEL insn. */ |
| |
| int |
| no_labels_between_p (const rtx_insn *beg, const rtx_insn *end) |
| { |
| rtx_insn *p; |
| if (beg == end) |
| return 0; |
| for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p)) |
| if (LABEL_P (p)) |
| return 0; |
| return 1; |
| } |
| |
| /* Nonzero if register REG is used in an insn between |
| FROM_INSN and TO_INSN (exclusive of those two). */ |
| |
| int |
| reg_used_between_p (const_rtx reg, const rtx_insn *from_insn, |
| const rtx_insn *to_insn) |
| { |
| rtx_insn *insn; |
| |
| if (from_insn == to_insn) |
| return 0; |
| |
| for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn)) |
| if (NONDEBUG_INSN_P (insn) |
| && (reg_overlap_mentioned_p (reg, PATTERN (insn)) |
| || (CALL_P (insn) && find_reg_fusage (insn, USE, reg)))) |
| return 1; |
| return 0; |
| } |
| |
| /* Nonzero if the old value of X, a register, is referenced in BODY. If X |
| is entirely replaced by a new value and the only use is as a SET_DEST, |
| we do not consider it a reference. */ |
| |
| int |
| reg_referenced_p (const_rtx x, const_rtx body) |
| { |
| int i; |
| |
| switch (GET_CODE (body)) |
| { |
| case SET: |
| if (reg_overlap_mentioned_p (x, SET_SRC (body))) |
| return 1; |
| |
| /* If the destination is anything other than PC, a REG or a SUBREG |
| of a REG that occupies all of the REG, the insn references X if |
| it is mentioned in the destination. */ |
| if (GET_CODE (SET_DEST (body)) != PC |
| && !REG_P (SET_DEST (body)) |
| && ! (GET_CODE (SET_DEST (body)) == SUBREG |
| && REG_P (SUBREG_REG (SET_DEST (body))) |
| && !read_modify_subreg_p (SET_DEST (body))) |
| && reg_overlap_mentioned_p (x, SET_DEST (body))) |
| return 1; |
| return 0; |
| |
| case ASM_OPERANDS: |
| for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--) |
| if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i))) |
| return 1; |
| return 0; |
| |
| case CALL: |
| case USE: |
| case IF_THEN_ELSE: |
| return reg_overlap_mentioned_p (x, body); |
| |
| case TRAP_IF: |
| return reg_overlap_mentioned_p (x, TRAP_CONDITION (body)); |
| |
| case PREFETCH: |
| return reg_overlap_mentioned_p (x, XEXP (body, 0)); |
| |
| case UNSPEC: |
| case UNSPEC_VOLATILE: |
| for (i = XVECLEN (body, 0) - 1; i >= 0; i--) |
| if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i))) |
| return 1; |
| return 0; |
| |
| case PARALLEL: |
| for (i = XVECLEN (body, 0) - 1; i >= 0; i--) |
| if (reg_referenced_p (x, XVECEXP (body, 0, i))) |
| return 1; |
| return 0; |
| |
| case CLOBBER: |
| if (MEM_P (XEXP (body, 0))) |
| if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0))) |
| return 1; |
| return 0; |
| |
| case COND_EXEC: |
| if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body))) |
| return 1; |
| return reg_referenced_p (x, COND_EXEC_CODE (body)); |
| |
| default: |
| return 0; |
| } |
| } |
| |
| /* Nonzero if register REG is set or clobbered in an insn between |
| FROM_INSN and TO_INSN (exclusive of those two). */ |
| |
| int |
| reg_set_between_p (const_rtx reg, const rtx_insn *from_insn, |
| const rtx_insn *to_insn) |
| { |
| const rtx_insn *insn; |
| |
| if (from_insn == to_insn) |
| return 0; |
| |
| for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn)) |
| if (INSN_P (insn) && reg_set_p (reg, insn)) |
| return 1; |
| return 0; |
| } |
| |
| /* Return true if REG is set or clobbered inside INSN. */ |
| |
| int |
| reg_set_p (const_rtx reg, const_rtx insn) |
| { |
| /* After delay slot handling, call and branch insns might be in a |
| sequence. Check all the elements there. */ |
| if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) |
| { |
| for (int i = 0; i < XVECLEN (PATTERN (insn), 0); ++i) |
| if (reg_set_p (reg, XVECEXP (PATTERN (insn), 0, i))) |
| return true; |
| |
| return false; |
| } |
| |
| /* We can be passed an insn or part of one. If we are passed an insn, |
| check if a side-effect of the insn clobbers REG. */ |
| if (INSN_P (insn) |
| && (FIND_REG_INC_NOTE (insn, reg) |
| || (CALL_P (insn) |
| && ((REG_P (reg) |
| && REGNO (reg) < FIRST_PSEUDO_REGISTER |
| && (insn_callee_abi (as_a<const rtx_insn *> (insn)) |
| .clobbers_reg_p (GET_MODE (reg), REGNO (reg)))) |
| || MEM_P (reg) |
| || find_reg_fusage (insn, CLOBBER, reg))))) |
| return true; |
| |
| /* There are no REG_INC notes for SP autoinc. */ |
| if (reg == stack_pointer_rtx && INSN_P (insn)) |
| { |
| subrtx_var_iterator::array_type array; |
| FOR_EACH_SUBRTX_VAR (iter, array, PATTERN (insn), NONCONST) |
| { |
| rtx mem = *iter; |
| if (mem |
| && MEM_P (mem) |
| && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0))) == RTX_AUTOINC) |
| { |
| if (XEXP (XEXP (mem, 0), 0) == stack_pointer_rtx) |
| return true; |
| iter.skip_subrtxes (); |
| } |
| } |
| } |
| |
| return set_of (reg, insn) != NULL_RTX; |
| } |
| |
| /* Similar to reg_set_between_p, but check all registers in X. Return 0 |
| only if none of them are modified between START and END. Return 1 if |
| X contains a MEM; this routine does use memory aliasing. */ |
| |
| int |
| modified_between_p (const_rtx x, const rtx_insn *start, const rtx_insn *end) |
| { |
| const enum rtx_code code = GET_CODE (x); |
| const char *fmt; |
| int i, j; |
| rtx_insn *insn; |
| |
| if (start == end) |
| return 0; |
| |
| switch (code) |
| { |
| CASE_CONST_ANY: |
| case CONST: |
| case SYMBOL_REF: |
| case LABEL_REF: |
| return 0; |
| |
| case PC: |
| return 1; |
| |
| case MEM: |
| if (modified_between_p (XEXP (x, 0), start, end)) |
| return 1; |
| if (MEM_READONLY_P (x)) |
| return 0; |
| for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn)) |
| if (memory_modified_in_insn_p (x, insn)) |
| return 1; |
| return 0; |
| |
| case REG: |
| return reg_set_between_p (x, start, end); |
| |
| default: |
| break; |
| } |
| |
| fmt = GET_RTX_FORMAT (code); |
| for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
| { |
| if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end)) |
| return 1; |
| |
| else if (fmt[i] == 'E') |
| for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
| if (modified_between_p (XVECEXP (x, i, j), start, end)) |
| return 1; |
| } |
| |
| return 0; |
| } |
| |
| /* Similar to reg_set_p, but check all registers in X. Return 0 only if none |
| of them are modified in INSN. Return 1 if X contains a MEM; this routine |
| does use memory aliasing. */ |
| |
| int |
| modified_in_p (const_rtx x, const_rtx insn) |
| { |
| const enum rtx_code code = GET_CODE (x); |
| const char *fmt; |
| int i, j; |
| |
| switch (code) |
| { |
| CASE_CONST_ANY: |
| case CONST: |
| case SYMBOL_REF: |
| case LABEL_REF: |
| return 0; |
| |
| case PC: |
| return 1; |
| |
| case MEM: |
| if (modified_in_p (XEXP (x, 0), insn)) |
| return 1; |
| if (MEM_READONLY_P (x)) |
| return 0; |
| if (memory_modified_in_insn_p (x, insn)) |
| return 1; |
| return 0; |
| |
| case REG: |
| return reg_set_p (x, insn); |
| |
| default: |
| break; |
| } |
| |
| fmt = GET_RTX_FORMAT (code); |
| for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
| { |
| if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn)) |
| return 1; |
| |
| else if (fmt[i] == 'E') |
| for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
| if (modified_in_p (XVECEXP (x, i, j), insn)) |
| return 1; |
| } |
| |
| return 0; |
| } |
| |
| /* Return true if X is a SUBREG and if storing a value to X would |
| preserve some of its SUBREG_REG. For example, on a normal 32-bit |
| target, using a SUBREG to store to one half of a DImode REG would |
| preserve the other half. */ |
| |
| bool |
| read_modify_subreg_p (const_rtx x) |
| { |
| if (GET_CODE (x) != SUBREG) |
| return false; |
| poly_uint64 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))); |
| poly_uint64 osize = GET_MODE_SIZE (GET_MODE (x)); |
| poly_uint64 regsize = REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x))); |
| /* The inner and outer modes of a subreg must be ordered, so that we |
| can tell whether they're paradoxical or partial. */ |
| gcc_checking_assert (ordered_p (isize, osize)); |
| return (maybe_gt (isize, osize) && maybe_gt (isize, regsize)); |
| } |
| |
| /* Helper function for set_of. */ |
| struct set_of_data |
| { |
| const_rtx found; |
| const_rtx pat; |
| }; |
| |
| static void |
| set_of_1 (rtx x, const_rtx pat, void *data1) |
| { |
| struct set_of_data *const data = (struct set_of_data *) (data1); |
| if (rtx_equal_p (x, data->pat) |
| || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x))) |
| data->found = pat; |
| } |
| |
| /* Give an INSN, return a SET or CLOBBER expression that does modify PAT |
| (either directly or via STRICT_LOW_PART and similar modifiers). */ |
| const_rtx |
| set_of (const_rtx pat, const_rtx insn) |
| { |
| struct set_of_data data; |
| data.found = NULL_RTX; |
| data.pat = pat; |
| note_pattern_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data); |
| return data.found; |
| } |
| |
| /* Check whether instruction pattern PAT contains a SET with the following |
| properties: |
| |
| - the SET is executed unconditionally; and |
| - either: |
| - the destination of the SET is a REG that contains REGNO; or |
| - both: |
| - the destination of the SET is a SUBREG of such a REG; and |
| - writing to the subreg clobbers all of the SUBREG_REG |
| (in other words, read_modify_subreg_p is false). |
| |
| If PAT does have a SET like that, return the set, otherwise return null. |
| |
| This is intended to be an alternative to single_set for passes that |
| can handle patterns with multiple_sets. */ |
| rtx |
| simple_regno_set (rtx pat, unsigned int regno) |
| { |
| if (GET_CODE (pat) == PARALLEL) |
| { |
| int last = XVECLEN (pat, 0) - 1; |
| for (int i = 0; i < last; ++i) |
| if (rtx set = simple_regno_set (XVECEXP (pat, 0, i), regno)) |
| return set; |
| |
| pat = XVECEXP (pat, 0, last); |
| } |
| |
| if (GET_CODE (pat) == SET |
| && covers_regno_no_parallel_p (SET_DEST (pat), regno)) |
| return pat; |
| |
| return nullptr; |
| } |
| |
| /* Add all hard register in X to *PSET. */ |
| void |
| find_all_hard_regs (const_rtx x, HARD_REG_SET *pset) |
| { |
| subrtx_iterator::array_type array; |
| FOR_EACH_SUBRTX (iter, array, x, NONCONST) |
| { |
| const_rtx x = *iter; |
| if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) |
| add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x)); |
| } |
| } |
| |
| /* This function, called through note_stores, collects sets and |
| clobbers of hard registers in a HARD_REG_SET, which is pointed to |
| by DATA. */ |
| void |
| record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data) |
| { |
| HARD_REG_SET *pset = (HARD_REG_SET *)data; |
| if (REG_P (x) && HARD_REGISTER_P (x)) |
| add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x)); |
| } |
| |
| /* Examine INSN, and compute the set of hard registers written by it. |
| Store it in *PSET. Should only be called after reload. |
| |
| IMPLICIT is true if we should include registers that are fully-clobbered |
| by calls. This should be used with caution, since it doesn't include |
| partially-clobbered registers. */ |
| void |
| find_all_hard_reg_sets (const rtx_insn *insn, HARD_REG_SET *pset, bool implicit) |
| { |
| rtx link; |
| |
| CLEAR_HARD_REG_SET (*pset); |
| note_stores (insn, record_hard_reg_sets, pset); |
| if (CALL_P (insn) && implicit) |
| *pset |= insn_callee_abi (insn).full_reg_clobbers (); |
| for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
| if (REG_NOTE_KIND (link) == REG_INC) |
| record_hard_reg_sets (XEXP (link, 0), NULL, pset); |
| } |
| |
| /* Like record_hard_reg_sets, but called through note_uses. */ |
| void |
| record_hard_reg_uses (rtx *px, void *data) |
| { |
| find_all_hard_regs (*px, (HARD_REG_SET *) data); |
| } |
| |
| /* Given an INSN, return a SET expression if this insn has only a single SET. |
| It may also have CLOBBERs, USEs, or SET whose output |
| will not be used, which we ignore. */ |
| |
| rtx |
| single_set_2 (const rtx_insn *insn, const_rtx pat) |
| { |
| rtx set = NULL; |
| int set_verified = 1; |
| int i; |
| |
| if (GET_CODE (pat) == PARALLEL) |
| { |
| for (i = 0; i < XVECLEN (pat, 0); i++) |
| { |
| rtx sub = XVECEXP (pat, 0, i); |
| switch (GET_CODE (sub)) |
| { |
| case USE: |
| case CLOBBER: |
| break; |
| |
| case SET: |
| /* We can consider insns having multiple sets, where all |
| but one are dead as single set insns. In common case |
| only single set is present in the pattern so we want |
| to avoid checking for REG_UNUSED notes unless necessary. |
| |
| When we reach set first time, we just expect this is |
| the single set we are looking for and only when more |
| sets are found in the insn, we check them. */ |
| if (!set_verified) |
| { |
| if (find_reg_note (insn, REG_UNUSED, SET_DEST (set)) |
| && !side_effects_p (set)) |
| set = NULL; |
| else |
| set_verified = 1; |
| } |
| if (!set) |
| set = sub, set_verified = 0; |
| else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub)) |
| || side_effects_p (sub)) |
| return NULL_RTX; |
| break; |
| |
| default: |
| return NULL_RTX; |
| } |
| } |
| } |
| return set; |
| } |
| |
| /* Given an INSN, return nonzero if it has more than one SET, else return |
| zero. */ |
| |
| int |
| multiple_sets (const_rtx insn) |
| { |
| int found; |
| int i; |
| |
| /* INSN must be an insn. */ |
| if (! INSN_P (insn)) |
| return 0; |
| |
| /* Only a PARALLEL can have multiple SETs. */ |
| if (GET_CODE (PATTERN (insn)) == PARALLEL) |
| { |
| for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++) |
| if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET) |
| { |
| /* If we have already found a SET, then return now. */ |
| if (found) |
| return 1; |
| else |
| found = 1; |
| } |
| } |
| |
| /* Either zero or one SET. */ |
| return 0; |
| } |
| |
| /* Return nonzero if the destination of SET equals the source |
| and there are no side effects. */ |
| |
| int |
| set_noop_p (const_rtx set) |
| { |
| rtx src = SET_SRC (set); |
| rtx dst = SET_DEST (set); |
| |
| if (dst == pc_rtx && src == pc_rtx) |
| return 1; |
| |
| if (MEM_P (dst) && MEM_P (src)) |
| return rtx_equal_p (dst, src) && !side_effects_p (dst); |
| |
| if (GET_CODE (dst) == ZERO_EXTRACT) |
| return rtx_equal_p (XEXP (dst, 0), src) |
| && !BITS_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx |
| && !side_effects_p (src); |
| |
| if (GET_CODE (dst) == STRICT_LOW_PART) |
| dst = XEXP (dst, 0); |
| |
| if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG) |
| { |
| if (maybe_ne (SUBREG_BYTE (src), SUBREG_BYTE (dst))) |
| return 0; |
| src = SUBREG_REG (src); |
| dst = SUBREG_REG (dst); |
| if (GET_MODE (src) != GET_MODE (dst)) |
| /* It is hard to tell whether subregs refer to the same bits, so act |
| conservatively and return 0. */ |
| return 0; |
| } |
| |
| /* It is a NOOP if destination overlaps with selected src vector |
| elements. */ |
| if (GET_CODE (src) == VEC_SELECT |
| && REG_P (XEXP (src, 0)) && REG_P (dst) |
| && HARD_REGISTER_P (XEXP (src, 0)) |
| && HARD_REGISTER_P (dst)) |
| { |
| int i; |
| rtx par = XEXP (src, 1); |
| rtx src0 = XEXP (src, 0); |
| poly_int64 c0; |
| if (!poly_int_rtx_p (XVECEXP (par, 0, 0), &c0)) |
| return 0; |
| poly_int64 offset = GET_MODE_UNIT_SIZE (GET_MODE (src0)) * c0; |
| |
| for (i = 1; i < XVECLEN (par, 0); i++) |
| { |
| poly_int64 c0i; |
| if (!poly_int_rtx_p (XVECEXP (par, 0, i), &c0i) |
| || maybe_ne (c0i, c0 + i)) |
| return 0; |
| } |
| return |
| REG_CAN_CHANGE_MODE_P (REGNO (dst), GET_MODE (src0), GET_MODE (dst)) |
| && simplify_subreg_regno (REGNO (src0), GET_MODE (src0), |
| offset, GET_MODE (dst)) == (int) REGNO (dst); |
| } |
| |
| return (REG_P (src) && REG_P (dst) |
| && REGNO (src) == REGNO (dst)); |
| } |
| |
| /* Return nonzero if an insn consists only of SETs, each of which only sets a |
| value to itself. */ |
| |
| int |
| noop_move_p (const rtx_insn *insn) |
| { |
| rtx pat = PATTERN (insn); |
| |
| if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE) |
| return 1; |
| |
| /* Check the code to be executed for COND_EXEC. */ |
| if (GET_CODE (pat) == COND_EXEC) |
| pat = COND_EXEC_CODE (pat); |
| |
| if (GET_CODE (pat) == SET && set_noop_p (pat)) |
| return 1; |
| |
| if (GET_CODE (pat) == PARALLEL) |
| { |
| int i; |
| /* If nothing but SETs of registers to themselves, |
| this insn can also be deleted. */ |
| for (i = 0; i < XVECLEN (pat, 0); i++) |
| { |
| rtx tem = XVECEXP (pat, 0, i); |
| |
| if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER) |
| continue; |
| |
| if (GET_CODE (tem) != SET || ! set_noop_p (tem)) |
| return 0; |
| } |
| |
| return 1; |
| } |
| return 0; |
| } |
| |
| |
| /* Return nonzero if register in range [REGNO, ENDREGNO) |
| appears either explicitly or implicitly in X |
| other than being stored into. |
| |
| References contained within the substructure at LOC do not count. |
| LOC may be zero, meaning don't ignore anything. */ |
| |
| bool |
| refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x, |
| rtx *loc) |
| { |
| int i; |
| unsigned int x_regno; |
| RTX_CODE code; |
| const char *fmt; |
| |
| repeat: |
| /* The contents of a REG_NONNEG note is always zero, so we must come here |
| upon repeat in case the last REG_NOTE is a REG_NONNEG note. */ |
| if (x == 0) |
| return false; |
| |
| code = GET_CODE (x); |
| |
| switch (code) |
| { |
| case REG: |
| x_regno = REGNO (x); |
| |
| /* If we modifying the stack, frame, or argument pointer, it will |
| clobber a virtual register. In fact, we could be more precise, |
| but it isn't worth it. */ |
| if ((x_regno == STACK_POINTER_REGNUM |
| || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM |
| && x_regno == ARG_POINTER_REGNUM) |
| || x_regno == FRAME_POINTER_REGNUM) |
| && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER) |
| return true; |
| |
| return endregno > x_regno && regno < END_REGNO (x); |
| |
| case SUBREG: |
| /* If this is a SUBREG of a hard reg, we can see exactly which |
| registers are being modified. Otherwise, handle normally. */ |
| if (REG_P (SUBREG_REG (x)) |
| && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER) |
| { |
| unsigned int inner_regno = subreg_regno (x); |
| unsigned int inner_endregno |
| = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER |
| ? subreg_nregs (x) : 1); |
| |
| return endregno > inner_regno && regno < inner_endregno; |
| } |
| break; |
| |
| case CLOBBER: |
| case SET: |
| if (&SET_DEST (x) != loc |
| /* Note setting a SUBREG counts as referring to the REG it is in for |
| a pseudo but not for hard registers since we can |
| treat each word individually. */ |
| && ((GET_CODE (SET_DEST (x)) == SUBREG |
| && loc != &SUBREG_REG (SET_DEST (x)) |
| && REG_P (SUBREG_REG (SET_DEST (x))) |
| && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER |
| && refers_to_regno_p (regno, endregno, |
| SUBREG_REG (SET_DEST (x)), loc)) |
| || (!REG_P (SET_DEST (x)) |
| && refers_to_regno_p (regno, endregno, SET_DEST (x), loc)))) |
| return true; |
| |
| if (code == CLOBBER || loc == &SET_SRC (x)) |
| return false; |
| x = SET_SRC (x); |
| goto repeat; |
| |
| default: |
| break; |
| } |
| |
| /* X does not match, so try its subexpressions. */ |
| |
| fmt = GET_RTX_FORMAT (code); |
| for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
| { |
| if (fmt[i] == 'e' && loc != &XEXP (x, i)) |
| { |
| if (i == 0) |
| { |
| x = XEXP (x, 0); |
| goto repeat; |
| } |
| else |
| if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc)) |
| return true; |
| } |
| else if (fmt[i] == 'E') |
| { |
| int j; |
| for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
| if (loc != &XVECEXP (x, i, j) |
| && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc)) |
| return true; |
| } |
| } |
| return false; |
| } |
| |
| /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG, |
| we check if any register number in X conflicts with the relevant register |
| numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN |
| contains a MEM (we don't bother checking for memory addresses that can't |
| conflict because we expect this to be a rare case. */ |
| |
| int |
| reg_overlap_mentioned_p (const_rtx x, const_rtx in) |
| { |
| unsigned int regno, endregno; |
| |
| /* If either argument is a constant, then modifying X cannot |
| affect IN. Here we look at IN, we can profitably combine |
| CONSTANT_P (x) with the switch statement below. */ |
| if (CONSTANT_P (in)) |
| return 0; |
| |
| recurse: |
| switch (GET_CODE (x)) |
| { |
| case CLOBBER: |
| case STRICT_LOW_PART: |
| case ZERO_EXTRACT: |
| case SIGN_EXTRACT: |
| /* Overly conservative. */ |
| x = XEXP (x, 0); |
| goto recurse; |
| |
| case SUBREG: |
| regno = REGNO (SUBREG_REG (x)); |
| if (regno < FIRST_PSEUDO_REGISTER) |
| regno = subreg_regno (x); |
| endregno = regno + (regno < FIRST_PSEUDO_REGISTER |
| ? subreg_nregs (x) : 1); |
| goto do_reg; |
| |
| case REG: |
| regno = REGNO (x); |
| endregno = END_REGNO (x); |
| do_reg: |
| return refers_to_regno_p (regno, endregno, in, (rtx*) 0); |
| |
| case MEM: |
| { |
| const char *fmt; |
| int i; |
| |
| if (MEM_P (in)) |
| return 1; |
| |
| fmt = GET_RTX_FORMAT (GET_CODE (in)); |
| for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--) |
| if (fmt[i] == 'e') |
| { |
| if (reg_overlap_mentioned_p (x, XEXP (in, i))) |
| return 1; |
| } |
| else if (fmt[i] == 'E') |
| { |
| int j; |
| for (j = XVECLEN (in, i) - 1; j >= 0; --j) |
| if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j))) |
| return 1; |
| } |
| |
| return 0; |
| } |
| |
| case SCRATCH: |
| case PC: |
| return reg_mentioned_p (x, in); |
| |
| case PARALLEL: |
| { |
| int i; |
| |
| /* If any register in here refers to it we return true. */ |
| for (i = XVECLEN (x, 0) - 1; i >= 0; i--) |
| if (XEXP (XVECEXP (x, 0, i), 0) != 0 |
| && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in)) |
| return 1; |
| return 0; |
| } |
| |
| default: |
| gcc_assert (CONSTANT_P (x)); |
| return 0; |
| } |
| } |
| |
| /* Call FUN on each register or MEM that is stored into or clobbered by X. |
| (X would be the pattern of an insn). DATA is an arbitrary pointer, |
| ignored by note_stores, but passed to FUN. |
| |
| FUN receives three arguments: |
| 1. the REG, MEM or PC being stored in or clobbered, |
| 2. the SET or CLOBBER rtx that does the store, |
| 3. the pointer DATA provided to note_stores. |
| |
| If the item being stored in or clobbered is a SUBREG of a hard register, |
| the SUBREG will be passed. */ |
| |
| void |
| note_pattern_stores (const_rtx x, |
| void (*fun) (rtx, const_rtx, void *), void *data) |
| { |
| int i; |
| |
| if (GET_CODE (x) == COND_EXEC) |
| x = COND_EXEC_CODE (x); |
| |
| if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER) |
| { |
| rtx dest = SET_DEST (x); |
| |
| while ((GET_CODE (dest) == SUBREG |
| && (!REG_P (SUBREG_REG (dest)) |
| || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER)) |
| || GET_CODE (dest) == ZERO_EXTRACT |
| || GET_CODE (dest) == STRICT_LOW_PART) |
| dest = XEXP (dest, 0); |
| |
| /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions, |
| each of whose first operand is a register. */ |
| if (GET_CODE (dest) == PARALLEL) |
| { |
| for (i = XVECLEN (dest, 0) - 1; i >= 0; i--) |
| if (XEXP (XVECEXP (dest, 0, i), 0) != 0) |
| (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data); |
| } |
| else |
| (*fun) (dest, x, data); |
| } |
| |
| else if (GET_CODE (x) == PARALLEL) |
| for (i = XVECLEN (x, 0) - 1; i >= 0; i--) |
| note_pattern_stores (XVECEXP (x, 0, i), fun, data); |
| } |
| |
| /* Same, but for an instruction. If the instruction is a call, include |
| any CLOBBERs in its CALL_INSN_FUNCTION_USAGE. */ |
| |
| void |
| note_stores (const rtx_insn *insn, |
| void (*fun) (rtx, const_rtx, void *), void *data) |
| { |
| if (CALL_P (insn)) |
| for (rtx link = CALL_INSN_FUNCTION_USAGE (insn); |
| link; link = XEXP (link, 1)) |
| if (GET_CODE (XEXP (link, 0)) == CLOBBER) |
| note_pattern_stores (XEXP (link, 0), fun, data); |
| note_pattern_stores (PATTERN (insn), fun, data); |
| } |
| |
| /* Like notes_stores, but call FUN for each expression that is being |
| referenced in PBODY, a pointer to the PATTERN of an insn. We only call |
| FUN for each expression, not any interior subexpressions. FUN receives a |
| pointer to the expression and the DATA passed to this function. |
| |
| Note that this is not quite the same test as that done in reg_referenced_p |
| since that considers something as being referenced if it is being |
| partially set, while we do not. */ |
| |
| void |
| note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data) |
| { |
| rtx body = *pbody; |
| int i; |
| |
| switch (GET_CODE (body)) |
| { |
| case COND_EXEC: |
| (*fun) (&COND_EXEC_TEST (body), data); |
| note_uses (&COND_EXEC_CODE (body), fun, data); |
| return; |
| |
| case PARALLEL: |
| for (i = XVECLEN (body, 0) - 1; i >= 0; i--) |
| note_uses (&XVECEXP (body, 0, i), fun, data); |
| return; |
| |
| case SEQUENCE: |
| for (i = XVECLEN (body, 0) - 1; i >= 0; i--) |
| note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data); |
| return; |
| |
| case USE: |
| (*fun) (&XEXP (body, 0), data); |
| return; |
| |
| case ASM_OPERANDS: |
| for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--) |
| (*fun) (&ASM_OPERANDS_INPUT (body, i), data); |
| return; |
| |
| case TRAP_IF: |
| (*fun) (&TRAP_CONDITION (body), data); |
| return; |
| |
| case PREFETCH: |
| (*fun) (&XEXP (body, 0), data); |
| return; |
| |
| case UNSPEC: |
| case UNSPEC_VOLATILE: |
| for (i = XVECLEN (body, 0) - 1; i >= 0; i--) |
| (*fun) (&XVECEXP (body, 0, i), data); |
| return; |
| |
| case CLOBBER: |
| if (MEM_P (XEXP (body, 0))) |
| (*fun) (&XEXP (XEXP (body, 0), 0), data); |
| return; |
| |
| case SET: |
| { |
| rtx dest = SET_DEST (body); |
| |
| /* For sets we replace everything in source plus registers in memory |
| expression in store and operands of a ZERO_EXTRACT. */ |
| (*fun) (&SET_SRC (body), data); |
| |
| if (GET_CODE (dest) == ZERO_EXTRACT) |
| { |
| (*fun) (&XEXP (dest, 1), data); |
| (*fun) (&XEXP (dest, 2), data); |
| } |
| |
| while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART) |
| dest = XEXP (dest, 0); |
| |
| if (MEM_P (dest)) |
| (*fun) (&XEXP (dest, 0), data); |
| } |
| return; |
| |
| default: |
| /* All the other possibilities never store. */ |
| (*fun) (pbody, data); |
| return; |
| } |
| } |
| |
| /* Try to add a description of REG X to this object, stopping once |
| the REF_END limit has been reached. FLAGS is a bitmask of |
| rtx_obj_reference flags that describe the context. */ |
| |
| void |
| rtx_properties::try_to_add_reg (const_rtx x, unsigned int flags) |
| { |
| if (REG_NREGS (x) != 1) |
| flags |= rtx_obj_flags::IS_MULTIREG; |
| machine_mode mode = GET_MODE (x); |
| unsigned int start_regno = REGNO (x); |
| unsigned int end_regno = END_REGNO (x); |
| for (unsigned int regno = start_regno; regno < end_regno; ++regno) |
| if (ref_iter != ref_end) |
| *ref_iter++ = rtx_obj_reference (regno, flags, mode, |
| regno - start_regno); |
| } |
| |
| /* Add a description of destination X to this object. FLAGS is a bitmask |
| of rtx_obj_reference flags that describe the context. |
| |
| This routine accepts all rtxes that can legitimately appear in a |
| SET_DEST. */ |
| |
| void |
| rtx_properties::try_to_add_dest (const_rtx x, unsigned int flags) |
| { |
| /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions, |
| each of whose first operand is a register. */ |
| if (__builtin_expect (GET_CODE (x) == PARALLEL, 0)) |
| { |
| for (int i = XVECLEN (x, 0) - 1; i >= 0; --i) |
| if (rtx dest = XEXP (XVECEXP (x, 0, i), 0)) |
| try_to_add_dest (dest, flags); |
| return; |
| } |
| |
| unsigned int base_flags = flags & rtx_obj_flags::STICKY_FLAGS; |
| flags |= rtx_obj_flags::IS_WRITE; |
| for (;;) |
| if (GET_CODE (x) == ZERO_EXTRACT) |
| { |
| try_to_add_src (XEXP (x, 1), base_flags); |
| try_to_add_src (XEXP (x, 2), base_flags); |
| flags |= rtx_obj_flags::IS_READ; |
| x = XEXP (x, 0); |
| } |
| else if (GET_CODE (x) == STRICT_LOW_PART) |
| { |
| flags |= rtx_obj_flags::IS_READ; |
| x = XEXP (x, 0); |
| } |
| else if (GET_CODE (x) == SUBREG) |
| { |
| flags |= rtx_obj_flags::IN_SUBREG; |
| if (read_modify_subreg_p (x)) |
| flags |= rtx_obj_flags::IS_READ; |
| x = SUBREG_REG (x); |
| } |
| else |
| break; |
| |
| if (MEM_P (x)) |
| { |
| if (ref_iter != ref_end) |
| *ref_iter++ = rtx_obj_reference (MEM_REGNO, flags, GET_MODE (x)); |
| |
| unsigned int addr_flags = base_flags | rtx_obj_flags::IN_MEM_STORE; |
| if (flags & rtx_obj_flags::IS_READ) |
| addr_flags |= rtx_obj_flags::IN_MEM_LOAD; |
| try_to_add_src (XEXP (x, 0), addr_flags); |
| return; |
| } |
| |
| if (__builtin_expect (REG_P (x), 1)) |
| { |
| /* We want to keep sp alive everywhere - by making all |
| writes to sp also use sp. */ |
| if (REGNO (x) == STACK_POINTER_REGNUM) |
| flags |= rtx_obj_flags::IS_READ; |
| try_to_add_reg (x, flags); |
| return; |
| } |
| } |
| |
| /* Try to add a description of source X to this object, stopping once |
| the REF_END limit has been reached. FLAGS is a bitmask of |
| rtx_obj_reference flags that describe the context. |
| |
| This routine accepts all rtxes that can legitimately appear in a SET_SRC. */ |
| |
| void |
| rtx_properties::try_to_add_src (const_rtx x, unsigned int flags) |
| { |
| unsigned int base_flags = flags & rtx_obj_flags::STICKY_FLAGS; |
| subrtx_iterator::array_type array; |
| FOR_EACH_SUBRTX (iter, array, x, NONCONST) |
| { |
| const_rtx x = *iter; |
| rtx_code code = GET_CODE (x); |
| if (code == REG) |
| try_to_add_reg (x, flags | rtx_obj_flags::IS_READ); |
| else if (code == MEM) |
| { |
| if (MEM_VOLATILE_P (x)) |
| has_volatile_refs = true; |
| |
| if (!MEM_READONLY_P (x) && ref_iter != ref_end) |
| { |
| auto mem_flags = flags | rtx_obj_flags::IS_READ; |
| *ref_iter++ = rtx_obj_reference (MEM_REGNO, mem_flags, |
| GET_MODE (x)); |
| } |
| |
| try_to_add_src (XEXP (x, 0), |
| base_flags | rtx_obj_flags::IN_MEM_LOAD); |
| iter.skip_subrtxes (); |
| } |
| else if (code == SUBREG) |
| { |
| try_to_add_src (SUBREG_REG (x), flags | rtx_obj_flags::IN_SUBREG); |
| iter.skip_subrtxes (); |
| } |
| else if (code == UNSPEC_VOLATILE) |
| has_volatile_refs = true; |
| else if (code == ASM_INPUT || code == ASM_OPERANDS) |
| { |
| has_asm = true; |
| if (MEM_VOLATILE_P (x)) |
| has_volatile_refs = true; |
| } |
| else if (code == PRE_INC |
| || code == PRE_DEC |
| || code == POST_INC |
| || code == POST_DEC |
| || code == PRE_MODIFY |
| || code == POST_MODIFY) |
| { |
| has_pre_post_modify = true; |
| |
| unsigned int addr_flags = (base_flags |
| | rtx_obj_flags::IS_PRE_POST_MODIFY |
| | rtx_obj_flags::IS_READ); |
| try_to_add_dest (XEXP (x, 0), addr_flags); |
| if (code == PRE_MODIFY || code == POST_MODIFY) |
| iter.substitute (XEXP (XEXP (x, 1), 1)); |
| else |
| iter.skip_subrtxes (); |
| } |
| else if (code == CALL) |
| has_call = true; |
| } |
| } |
| |
| /* Try to add a description of instruction pattern PAT to this object, |
| stopping once the REF_END limit has been reached. */ |
| |
| void |
| rtx_properties::try_to_add_pattern (const_rtx pat) |
| { |
| switch (GET_CODE (pat)) |
| { |
| case COND_EXEC: |
| try_to_add_src (COND_EXEC_TEST (pat)); |
| try_to_add_pattern (COND_EXEC_CODE (pat)); |
| break; |
| |
| case PARALLEL: |
| { |
| int last = XVECLEN (pat, 0) - 1; |
| for (int i = 0; i < last; ++i) |
| try_to_add_pattern (XVECEXP (pat, 0, i)); |
| try_to_add_pattern (XVECEXP (pat, 0, last)); |
| break; |
| } |
| |
| case ASM_OPERANDS: |
| for (int i = 0, len = ASM_OPERANDS_INPUT_LENGTH (pat); i < len; ++i) |
| try_to_add_src (ASM_OPERANDS_INPUT (pat, i)); |
| break; |
| |
| case CLOBBER: |
| try_to_add_dest (XEXP (pat, 0), rtx_obj_flags::IS_CLOBBER); |
| break; |
| |
| case SET: |
| try_to_add_dest (SET_DEST (pat)); |
| try_to_add_src (SET_SRC (pat)); |
| break; |
| |
| default: |
| /* All the other possibilities never store and can use a normal |
| rtx walk. This includes: |
| |
| - USE |
| - TRAP_IF |
| - PREFETCH |
| - UNSPEC |
| - UNSPEC_VOLATILE. */ |
| try_to_add_src (pat); |
| break; |
| } |
| } |
| |
| /* Try to add a description of INSN to this object, stopping once |
| the REF_END limit has been reached. INCLUDE_NOTES is true if the |
| description should include REG_EQUAL and REG_EQUIV notes; all such |
| references will then be marked with rtx_obj_flags::IN_NOTE. |
| |
| For calls, this description includes all accesses in |
| CALL_INSN_FUNCTION_USAGE. It also include all implicit accesses |
| to global registers by the target function. However, it does not |
| include clobbers performed by the target function; callers that want |
| this information should instead use the function_abi interface. */ |
| |
| void |
| rtx_properties::try_to_add_insn (const rtx_insn *insn, bool include_notes) |
| { |
| if (CALL_P (insn)) |
| { |
| /* Non-const functions can read from global registers. Impure |
| functions can also set them. |
| |
| Adding the global registers first removes a situation in which |
| a fixed-form clobber of register R could come before a real set |
| of register R. */ |
| if (!hard_reg_set_empty_p (global_reg_set) |
| && !RTL_CONST_CALL_P (insn)) |
| { |
| unsigned int flags = rtx_obj_flags::IS_READ; |
| if (!RTL_PURE_CALL_P (insn)) |
| flags |= rtx_obj_flags::IS_WRITE; |
| for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno) |
| /* As a special case, the stack pointer is invariant across calls |
| even if it has been marked global; see the corresponding |
| handling in df_get_call_refs. */ |
| if (regno != STACK_POINTER_REGNUM |
| && global_regs[regno] |
| && ref_iter != ref_end) |
| *ref_iter++ = rtx_obj_reference (regno, flags, |
| reg_raw_mode[regno], 0); |
| } |
| /* Untyped calls implicitly set all function value registers. |
| Again, we add them first in case the main pattern contains |
| a fixed-form clobber. */ |
| if (find_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX)) |
| for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno) |
| if (targetm.calls.function_value_regno_p (regno) |
| && ref_iter != ref_end) |
| *ref_iter++ = rtx_obj_reference (regno, rtx_obj_flags::IS_WRITE, |
| reg_raw_mode[regno], 0); |
| if (ref_iter != ref_end && !RTL_CONST_CALL_P (insn)) |
| { |
| auto mem_flags = rtx_obj_flags::IS_READ; |
| if (!RTL_PURE_CALL_P (insn)) |
| mem_flags |= rtx_obj_flags::IS_WRITE; |
| *ref_iter++ = rtx_obj_reference (MEM_REGNO, mem_flags, BLKmode); |
| } |
| try_to_add_pattern (PATTERN (insn)); |
| for (rtx link = CALL_INSN_FUNCTION_USAGE (insn); link; |
| link = XEXP (link, 1)) |
| { |
| rtx x = XEXP (link, 0); |
| if (GET_CODE (x) == CLOBBER) |
| try_to_add_dest (XEXP (x, 0), rtx_obj_flags::IS_CLOBBER); |
| else if (GET_CODE (x) == USE) |
| try_to_add_src (XEXP (x, 0)); |
| } |
| } |
| else |
| try_to_add_pattern (PATTERN (insn)); |
| |
| if (include_notes) |
| for (rtx note = REG_NOTES (insn); note; note = XEXP (note, 1)) |
| if (REG_NOTE_KIND (note) == REG_EQUAL |
| || REG_NOTE_KIND (note) == REG_EQUIV) |
| try_to_add_note (XEXP (note, 0)); |
| } |
| |
| /* Grow the storage by a bit while keeping the contents of the first |
| START elements. */ |
| |
| void |
| vec_rtx_properties_base::grow (ptrdiff_t start) |
| { |
| /* The same heuristic that vec uses. */ |
| ptrdiff_t new_elems = (ref_end - ref_begin) * 3 / 2; |
| if (ref_begin == m_storage) |
| { |
| ref_begin = XNEWVEC (rtx_obj_reference, new_elems); |
| if (start) |
| memcpy (ref_begin, m_storage, start * sizeof (rtx_obj_reference)); |
| } |
| else |
| ref_begin = reinterpret_cast<rtx_obj_reference *> |
| (xrealloc (ref_begin, new_elems * sizeof (rtx_obj_reference))); |
| ref_iter = ref_begin + start; |
| ref_end = ref_begin + new_elems; |
| } |
| |
| /* Return nonzero if X's old contents don't survive after INSN. |
| This will be true if X is a register and X dies in INSN or because |
| INSN entirely sets X. |
| |
| "Entirely set" means set directly and not through a SUBREG, or |
| ZERO_EXTRACT, so no trace of the old contents remains. |
| Likewise, REG_INC does not count. |
| |
| REG may be a hard or pseudo reg. Renumbering is not taken into account, |
| but for this use that makes no difference, since regs don't overlap |
| during their lifetimes. Therefore, this function may be used |
| at any time after deaths have been computed. |
| |
| If REG is a hard reg that occupies multiple machine registers, this |
| function will only return 1 if each of those registers will be replaced |
| by INSN. */ |
| |
| int |
| dead_or_set_p (const rtx_insn *insn, const_rtx x) |
| { |
| unsigned int regno, end_regno; |
| unsigned int i; |
| |
| gcc_assert (REG_P (x)); |
| |
| regno = REGNO (x); |
| end_regno = END_REGNO (x); |
| for (i = regno; i < end_regno; i++) |
| if (! dead_or_set_regno_p (insn, i)) |
| return 0; |
| |
| return 1; |
| } |
| |
| /* Return TRUE iff DEST is a register or subreg of a register, is a |
| complete rather than read-modify-write destination, and contains |
| register TEST_REGNO. */ |
| |
| static bool |
| covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno) |
| { |
| unsigned int regno, endregno; |
| |
| if (GET_CODE (dest) == SUBREG && !read_modify_subreg_p (dest)) |
| dest = SUBREG_REG (dest); |
| |
| if (!REG_P (dest)) |
| return false; |
| |
| regno = REGNO (dest); |
| endregno = END_REGNO (dest); |
| return (test_regno >= regno && test_regno < endregno); |
| } |
| |
| /* Like covers_regno_no_parallel_p, but also handles PARALLELs where |
| any member matches the covers_regno_no_parallel_p criteria. */ |
| |
| static bool |
| covers_regno_p (const_rtx dest, unsigned int test_regno) |
| { |
| if (GET_CODE (dest) == PARALLEL) |
| { |
| /* Some targets place small structures in registers for return |
| values of functions, and those registers are wrapped in |
| PARALLELs that we may see as the destination of a SET. */ |
| int i; |
| |
| for (i = XVECLEN (dest, 0) - 1; i >= 0; i--) |
| { |
| rtx inner = XEXP (XVECEXP (dest, 0, i), 0); |
| if (inner != NULL_RTX |
| && covers_regno_no_parallel_p (inner, test_regno)) |
| return true; |
| } |
| |
| return false; |
| } |
| else |
| return covers_regno_no_parallel_p (dest, test_regno); |
| } |
| |
| /* Utility function for dead_or_set_p to check an individual register. */ |
| |
| int |
| dead_or_set_regno_p (const rtx_insn *insn, unsigned int test_regno) |
| { |
| const_rtx pattern; |
| |
| /* See if there is a death note for something that includes TEST_REGNO. */ |
| if (find_regno_note (insn, REG_DEAD, test_regno)) |
| return 1; |
| |
| if (CALL_P (insn) |
| && find_regno_fusage (insn, CLOBBER, test_regno)) |
| return 1; |
| |
| pattern = PATTERN (insn); |
| |
| /* If a COND_EXEC is not executed, the value survives. */ |
| if (GET_CODE (pattern) == COND_EXEC) |
| return 0; |
| |
| if (GET_CODE (pattern) == SET || GET_CODE (pattern) == CLOBBER) |
| return covers_regno_p (SET_DEST (pattern), test_regno); |
| else if (GET_CODE (pattern) == PARALLEL) |
| { |
| int i; |
| |
| for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--) |
| { |
| rtx body = XVECEXP (pattern, 0, i); |
| |
| if (GET_CODE (body) == COND_EXEC) |
| body = COND_EXEC_CODE (body); |
| |
| if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER) |
| && covers_regno_p (SET_DEST (body), test_regno)) |
| return 1; |
| } |
| } |
| |
| return 0; |
| } |
| |
| /* Return the reg-note of kind KIND in insn INSN, if there is one. |
| If DATUM is nonzero, look for one whose datum is DATUM. */ |
| |
| rtx |
| find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum) |
| { |
| rtx link; |
| |
| gcc_checking_assert (insn); |
| |
| /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */ |
| if (! INSN_P (insn)) |
| return 0; |
| if (datum == 0) |
| { |
| for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
| if (REG_NOTE_KIND (link) == kind) |
| return link; |
| return 0; |
| } |
| |
| for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
| if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0)) |
| return link; |
| return 0; |
| } |
| |
| /* Return the reg-note of kind KIND in insn INSN which applies to register |
| number REGNO, if any. Return 0 if there is no such reg-note. Note that |
| the REGNO of this NOTE need not be REGNO if REGNO is a hard register; |
| it might be the case that the note overlaps REGNO. */ |
| |
| rtx |
| find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno) |
| { |
| rtx link; |
| |
| /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */ |
| if (! INSN_P (insn)) |
| return 0; |
| |
| for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
| if (REG_NOTE_KIND (link) == kind |
| /* Verify that it is a register, so that scratch and MEM won't cause a |
| problem here. */ |
| && REG_P (XEXP (link, 0)) |
| && REGNO (XEXP (link, 0)) <= regno |
| && END_REGNO (XEXP (link, 0)) > regno) |
| return link; |
| return 0; |
| } |
| |
| /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and |
| has such a note. */ |
| |
| rtx |
| find_reg_equal_equiv_note (const_rtx insn) |
| { |
| rtx link; |
| |
| if (!INSN_P (insn)) |
| return 0; |
| |
| for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
| if (REG_NOTE_KIND (link) == REG_EQUAL |
| || REG_NOTE_KIND (link) == REG_EQUIV) |
| { |
| /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on |
| insns that have multiple sets. Checking single_set to |
| make sure of this is not the proper check, as explained |
| in the comment in set_unique_reg_note. |
| |
| This should be changed into an assert. */ |
| if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn)) |
| return 0; |
| return link; |
| } |
| return NULL; |
| } |
| |
| /* Check whether INSN is a single_set whose source is known to be |
| equivalent to a constant. Return that constant if so, otherwise |
| return null. */ |
| |
| rtx |
| find_constant_src (const rtx_insn *insn) |
| { |
| rtx note, set, x; |
| |
| set = single_set (insn); |
| if (set) |
| { |
| x = avoid_constant_pool_reference (SET_SRC (set)); |
| if (CONSTANT_P (x)) |
| return x; |
| } |
| |
| note = find_reg_equal_equiv_note (insn); |
| if (note && CONSTANT_P (XEXP (note, 0))) |
| return XEXP (note, 0); |
| |
| return NULL_RTX; |
| } |
| |
| /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found |
| in the CALL_INSN_FUNCTION_USAGE information of INSN. */ |
| |
| int |
| find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum) |
| { |
| /* If it's not a CALL_INSN, it can't possibly have a |
| CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */ |
| if (!CALL_P (insn)) |
| return 0; |
| |
| gcc_assert (datum); |
| |
| if (!REG_P (datum)) |
| { |
| rtx link; |
| |
| for (link = CALL_INSN_FUNCTION_USAGE (insn); |
| link; |
| link = XEXP (link, 1)) |
| if (GET_CODE (XEXP (link, 0)) == code |
| && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0))) |
| return 1; |
| } |
| else |
| { |
| unsigned int regno = REGNO (datum); |
| |
| /* CALL_INSN_FUNCTION_USAGE information cannot contain references |
| to pseudo registers, so don't bother checking. */ |
| |
| if (regno < FIRST_PSEUDO_REGISTER) |
| { |
| unsigned int end_regno = END_REGNO (datum); |
| unsigned int i; |
| |
| for (i = regno; i < end_regno; i++) |
| if (find_regno_fusage (insn, code, i)) |
| return 1; |
| } |
| } |
| |
| return 0; |
| } |
| |
| /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found |
| in the CALL_INSN_FUNCTION_USAGE information of INSN. */ |
| |
| int |
| find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno) |
| { |
| rtx link; |
| |
| /* CALL_INSN_FUNCTION_USAGE information cannot contain references |
| to pseudo registers, so don't bother checking. */ |
| |
| if (regno >= FIRST_PSEUDO_REGISTER |
| || !CALL_P (insn) ) |
| return 0; |
| |
| for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1)) |
| { |
| rtx op, reg; |
| |
| if (GET_CODE (op = XEXP (link, 0)) == code |
| && REG_P (reg = XEXP (op, 0)) |
| && REGNO (reg) <= regno |
| && END_REGNO (reg) > regno) |
| return 1; |
| } |
| |
| return 0; |
| } |
| |
| |
| /* Return true if KIND is an integer REG_NOTE. */ |
| |
| static bool |
| int_reg_note_p (enum reg_note kind) |
| { |
| return kind == REG_BR_PROB; |
| } |
| |
| /* Allocate a register note with kind KIND and datum DATUM. LIST is |
| stored as the pointer to the next register note. */ |
| |
| rtx |
| alloc_reg_note (enum reg_note kind, rtx datum, rtx list) |
| { |
| rtx note; |
| |
| gcc_checking_assert (!int_reg_note_p (kind)); |
| switch (kind) |
| { |
| case REG_LABEL_TARGET: |
| case REG_LABEL_OPERAND: |
| case REG_TM: |
| /* These types of register notes use an INSN_LIST rather than an |
| EXPR_LIST, so that copying is done right and dumps look |
| better. */ |
| note = alloc_INSN_LIST (datum, list); |
| PUT_REG_NOTE_KIND (note, kind); |
| break; |
| |
| default: |
| note = alloc_EXPR_LIST (kind, datum, list); |
| break; |
| } |
| |
| return note; |
| } |
| |
| /* Add register note with kind KIND and datum DATUM to INSN. */ |
| |
| void |
| add_reg_note (rtx insn, enum reg_note kind, rtx datum) |
| { |
| REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn)); |
| } |
| |
| /* Add an integer register note with kind KIND and datum DATUM to INSN. */ |
| |
| void |
| add_int_reg_note (rtx_insn *insn, enum reg_note kind, int datum) |
| { |
| gcc_checking_assert (int_reg_note_p (kind)); |
| REG_NOTES (insn) = gen_rtx_INT_LIST ((machine_mode) kind, |
| datum, REG_NOTES (insn)); |
| } |
| |
| /* Add a REG_ARGS_SIZE note to INSN with value VALUE. */ |
| |
| void |
| add_args_size_note (rtx_insn *insn, poly_int64 value) |
| { |
| gcc_checking_assert (!find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX)); |
| add_reg_note (insn, REG_ARGS_SIZE, gen_int_mode (value, Pmode)); |
| } |
| |
| /* Add a register note like NOTE to INSN. */ |
| |
| void |
| add_shallow_copy_of_reg_note (rtx_insn *insn, rtx note) |
| { |
| if (GET_CODE (note) == INT_LIST) |
| add_int_reg_note (insn, REG_NOTE_KIND (note), XINT (note, 0)); |
| else |
| add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0)); |
| } |
| |
| /* Duplicate NOTE and return the copy. */ |
| rtx |
| duplicate_reg_note (rtx note) |
| { |
| reg_note kind = REG_NOTE_KIND (note); |
| |
| if (GET_CODE (note) == INT_LIST) |
| return gen_rtx_INT_LIST ((machine_mode) kind, XINT (note, 0), NULL_RTX); |
| else if (GET_CODE (note) == EXPR_LIST) |
| return alloc_reg_note (kind, copy_insn_1 (XEXP (note, 0)), NULL_RTX); |
| else |
| return alloc_reg_note (kind, XEXP (note, 0), NULL_RTX); |
| } |
| |
| /* Remove register note NOTE from the REG_NOTES of INSN. */ |
| |
| void |
| remove_note (rtx_insn *insn, const_rtx note) |
| { |
| rtx link; |
| |
| if (note == NULL_RTX) |
| return; |
| |
| if (REG_NOTES (insn) == note) |
| REG_NOTES (insn) = XEXP (note, 1); |
| else |
| for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
| if (XEXP (link, 1) == note) |
| { |
| XEXP (link, 1) = XEXP (note, 1); |
| break; |
| } |
| |
| switch (REG_NOTE_KIND (note)) |
| { |
| case REG_EQUAL: |
| case REG_EQUIV: |
| df_notes_rescan (insn); |
| break; |
| default: |
| break; |
| } |
| } |
| |
| /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. |
| If NO_RESCAN is false and any notes were removed, call |
| df_notes_rescan. Return true if any note has been removed. */ |
| |
| bool |
| remove_reg_equal_equiv_notes (rtx_insn *insn, bool no_rescan) |
| { |
| rtx *loc; |
| bool ret = false; |
| |
| loc = ®_NOTES (insn); |
| while (*loc) |
| { |
| enum reg_note kind = REG_NOTE_KIND (*loc); |
| if (kind == REG_EQUAL || kind == REG_EQUIV) |
| { |
| *loc = XEXP (*loc, 1); |
| ret = true; |
| } |
| else |
| loc = &XEXP (*loc, 1); |
| } |
| if (ret && !no_rescan) |
| df_notes_rescan (insn); |
| return ret; |
| } |
| |
| /* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO. */ |
| |
| void |
| remove_reg_equal_equiv_notes_for_regno (unsigned int regno) |
| { |
| df_ref eq_use; |
| |
| if (!df) |
| return; |
| |
| /* This loop is a little tricky. We cannot just go down the chain because |
| it is being modified by some actions in the loop. So we just iterate |
| over the head. We plan to drain the list anyway. */ |
| while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL) |
| { |
| rtx_insn *insn = DF_REF_INSN (eq_use); |
| rtx note = find_reg_equal_equiv_note (insn); |
| |
| /* This assert is generally triggered when someone deletes a REG_EQUAL |
| or REG_EQUIV note by hacking the list manually rather than calling |
| remove_note. */ |
| gcc_assert (note); |
| |
| remove_note (insn, note); |
| } |
| } |
| |
| /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and |
| return 1 if it is found. A simple equality test is used to determine if |
| NODE matches. */ |
| |
| bool |
| in_insn_list_p (const rtx_insn_list *listp, const rtx_insn *node) |
| { |
| const_rtx x; |
| |
| for (x = listp; x; x = XEXP (x, 1)) |
| if (node == XEXP (x, 0)) |
| return true; |
| |
| return false; |
| } |
| |
| /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and |
| remove that entry from the list if it is found. |
| |
| A simple equality test is used to determine if NODE matches. */ |
| |
| void |
| remove_node_from_expr_list (const_rtx node, rtx_expr_list **listp) |
| { |
| rtx_expr_list *temp = *listp; |
| rtx_expr_list *prev = NULL; |
| |
| while (temp) |
| { |
| if (node == temp->element ()) |
| { |
| /* Splice the node out of the list. */ |
| if (prev) |
| XEXP (prev, 1) = temp->next (); |
| else |
| *listp = temp->next (); |
| |
| return; |
| } |
| |
| prev = temp; |
| temp = temp->next (); |
| } |
| } |
| |
| /* Search LISTP (an INSN_LIST) for an entry whose first operand is NODE and |
| remove that entry from the list if it is found. |
| |
| A simple equality test is used to determine if NODE matches. */ |
| |
| void |
| remove_node_from_insn_list (const rtx_insn *node, rtx_insn_list **listp) |
| { |
| rtx_insn_list *temp = *listp; |
| rtx_insn_list *prev = NULL; |
| |
| while (temp) |
| { |
| if (node == temp->insn ()) |
| { |
| /* Splice the node out of the list. */ |
| if (prev) |
| XEXP (prev, 1) = temp->next (); |
| else |
| *listp = temp->next (); |
| |
| return; |
| } |
| |
| prev = temp; |
| temp = temp->next (); |
| } |
| } |
| |
| /* Nonzero if X contains any volatile instructions. These are instructions |
| which may cause unpredictable machine state instructions, and thus no |
| instructions or register uses should be moved or combined across them. |
| This includes only volatile asms and UNSPEC_VOLATILE instructions. */ |
| |
| int |
| volatile_insn_p (const_rtx x) |
| { |
| const RTX_CODE code = GET_CODE (x); |
| switch (code) |
| { |
| case LABEL_REF: |
| case SYMBOL_REF: |
| case CONST: |
| CASE_CONST_ANY: |
| case PC: |
| case REG: |
| case SCRATCH: |
| case CLOBBER: |
| case ADDR_VEC: |
| case ADDR_DIFF_VEC: |
| case CALL: |
| case MEM: |
| return 0; |
| |
| case UNSPEC_VOLATILE: |
| return 1; |
| |
| case ASM_INPUT: |
| case ASM_OPERANDS: |
| if (MEM_VOLATILE_P (x)) |
| return 1; |
| |
| default: |
| break; |
| } |
| |
| /* Recursively scan the operands of this expression. */ |
| |
| { |
| const char *const fmt = GET_RTX_FORMAT (code); |
| int i; |
| |
| for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
| { |
| if (fmt[i] == 'e') |
| { |
| if (volatile_insn_p (XEXP (x, i))) |
| return 1; |
| } |
| else if (fmt[i] == 'E') |
| { |
| int j; |
| for (j = 0; j < XVECLEN (x, i); j++) |
| if (volatile_insn_p (XVECEXP (x, i, j))) |
| return 1; |
| } |
| } |
| } |
| return 0; |
| } |
| |
| /* Nonzero if X contains any volatile memory references |
| UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */ |
| |
| int |
| volatile_refs_p (const_rtx x) |
| { |
| const RTX_CODE code = GET_CODE (x); |
| switch (code) |
| { |
| case LABEL_REF: |
| case SYMBOL_REF: |
| case CONST: |
| CASE_CONST_ANY: |
| case PC: |
| case REG: |
| case SCRATCH: |
| case CLOBBER: |
| case ADDR_VEC: |
| case ADDR_DIFF_VEC: |
| return 0; |
| |
| case UNSPEC_VOLATILE: |
| return 1; |
| |
| case MEM: |
| case ASM_INPUT: |
| case ASM_OPERANDS: |
| if (MEM_VOLATILE_P (x)) |
| return 1; |
| |
| default: |
| break; |
| } |
| |
| /* Recursively scan the operands of this expression. */ |
| |
| { |
| const char *const fmt = GET_RTX_FORMAT (code); |
| int i; |
| |
| for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
| { |
| if (fmt[i] == 'e') |
| { |
| if (volatile_refs_p (XEXP (x, i))) |
| return 1; |
| } |
| else if (fmt[i] == 'E') |
| { |
| int j; |
| for (j = 0; j < XVECLEN (x, i); j++) |
| if (volatile_refs_p (XVECEXP (x, i, j))) |
| return 1; |
| } |
| } |
| } |
| return 0; |
| } |
| |
| /* Similar to above, except that it also rejects register pre- and post- |
| incrementing. */ |
| |
| int |
| side_effects_p (const_rtx x) |
| { |
| const RTX_CODE code = GET_CODE (x); |
| switch (code) |
| { |
| case LABEL_REF: |
| case SYMBOL_REF: |
| case CONST: |
| CASE_CONST_ANY: |
| case PC: |
| case REG: |
| case SCRATCH: |
| case ADDR_VEC: |
| case ADDR_DIFF_VEC: |
| case VAR_LOCATION: |
| return 0; |
| |
| case CLOBBER: |
| /* Reject CLOBBER with a non-VOID mode. These are made by combine.cc |
| when some combination can't be done. If we see one, don't think |
| that we can simplify the expression. */ |
| return (GET_MODE (x) != VOIDmode); |
| |
| case PRE_INC: |
| case PRE_DEC: |
| case POST_INC: |
| case POST_DEC: |
| case PRE_MODIFY: |
| case POST_MODIFY: |
| case CALL: |
| case UNSPEC_VOLATILE: |
| return 1; |
| |
| case MEM: |
| case ASM_INPUT: |
| case ASM_OPERANDS: |
| if (MEM_VOLATILE_P (x)) |
| return 1; |
| |
| default: |
| break; |
| } |
| |
| /* Recursively scan the operands of this expression. */ |
| |
| { |
| const char *fmt = GET_RTX_FORMAT (code); |
| int i; |
| |
|