| /* Expand builtin functions. |
| Copyright (C) 1988-2022 Free Software Foundation, Inc. |
| |
| This file is part of GCC. |
| |
| GCC is free software; you can redistribute it and/or modify it under |
| the terms of the GNU General Public License as published by the Free |
| Software Foundation; either version 3, or (at your option) any later |
| version. |
| |
| GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
| WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| for more details. |
| |
| You should have received a copy of the GNU General Public License |
| along with GCC; see the file COPYING3. If not see |
| <http://www.gnu.org/licenses/>. */ |
| |
| /* Legacy warning! Please add no further builtin simplifications here |
| (apart from pure constant folding) - builtin simplifications should go |
| to match.pd or gimple-fold.cc instead. */ |
| |
| #include "config.h" |
| #include "system.h" |
| #include "coretypes.h" |
| #include "backend.h" |
| #include "target.h" |
| #include "rtl.h" |
| #include "tree.h" |
| #include "memmodel.h" |
| #include "gimple.h" |
| #include "predict.h" |
| #include "tm_p.h" |
| #include "stringpool.h" |
| #include "tree-vrp.h" |
| #include "tree-ssanames.h" |
| #include "expmed.h" |
| #include "optabs.h" |
| #include "emit-rtl.h" |
| #include "recog.h" |
| #include "diagnostic-core.h" |
| #include "alias.h" |
| #include "fold-const.h" |
| #include "fold-const-call.h" |
| #include "gimple-ssa-warn-access.h" |
| #include "stor-layout.h" |
| #include "calls.h" |
| #include "varasm.h" |
| #include "tree-object-size.h" |
| #include "tree-ssa-strlen.h" |
| #include "realmpfr.h" |
| #include "cfgrtl.h" |
| #include "except.h" |
| #include "dojump.h" |
| #include "explow.h" |
| #include "stmt.h" |
| #include "expr.h" |
| #include "libfuncs.h" |
| #include "output.h" |
| #include "typeclass.h" |
| #include "langhooks.h" |
| #include "value-prof.h" |
| #include "builtins.h" |
| #include "stringpool.h" |
| #include "attribs.h" |
| #include "asan.h" |
| #include "internal-fn.h" |
| #include "case-cfn-macros.h" |
| #include "gimple-fold.h" |
| #include "intl.h" |
| #include "file-prefix-map.h" /* remap_macro_filename() */ |
| #include "gomp-constants.h" |
| #include "omp-general.h" |
| #include "tree-dfa.h" |
| #include "gimple-iterator.h" |
| #include "gimple-ssa.h" |
| #include "tree-ssa-live.h" |
| #include "tree-outof-ssa.h" |
| #include "attr-fnspec.h" |
| #include "demangle.h" |
| #include "gimple-range.h" |
| #include "pointer-query.h" |
| |
| struct target_builtins default_target_builtins; |
| #if SWITCHABLE_TARGET |
| struct target_builtins *this_target_builtins = &default_target_builtins; |
| #endif |
| |
| /* Define the names of the builtin function types and codes. */ |
| const char *const built_in_class_names[BUILT_IN_LAST] |
| = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"}; |
| |
| #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X, |
| const char * built_in_names[(int) END_BUILTINS] = |
| { |
| #include "builtins.def" |
| }; |
| |
| /* Setup an array of builtin_info_type, make sure each element decl is |
| initialized to NULL_TREE. */ |
| builtin_info_type builtin_info[(int)END_BUILTINS]; |
| |
| /* Non-zero if __builtin_constant_p should be folded right away. */ |
| bool force_folding_builtin_constant_p; |
| |
| static int target_char_cast (tree, char *); |
| static int apply_args_size (void); |
| static int apply_result_size (void); |
| static rtx result_vector (int, rtx); |
| static void expand_builtin_prefetch (tree); |
| static rtx expand_builtin_apply_args (void); |
| static rtx expand_builtin_apply_args_1 (void); |
| static rtx expand_builtin_apply (rtx, rtx, rtx); |
| static void expand_builtin_return (rtx); |
| static enum type_class type_to_class (tree); |
| static rtx expand_builtin_classify_type (tree); |
| static rtx expand_builtin_mathfn_3 (tree, rtx, rtx); |
| static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx); |
| static rtx expand_builtin_interclass_mathfn (tree, rtx); |
| static rtx expand_builtin_sincos (tree); |
| static rtx expand_builtin_fegetround (tree, rtx, machine_mode); |
| static rtx expand_builtin_feclear_feraise_except (tree, rtx, machine_mode, |
| optab); |
| static rtx expand_builtin_cexpi (tree, rtx); |
| static rtx expand_builtin_int_roundingfn (tree, rtx); |
| static rtx expand_builtin_int_roundingfn_2 (tree, rtx); |
| static rtx expand_builtin_next_arg (void); |
| static rtx expand_builtin_va_start (tree); |
| static rtx expand_builtin_va_end (tree); |
| static rtx expand_builtin_va_copy (tree); |
| static rtx inline_expand_builtin_bytecmp (tree, rtx); |
| static rtx expand_builtin_strcmp (tree, rtx); |
| static rtx expand_builtin_strncmp (tree, rtx, machine_mode); |
| static rtx expand_builtin_memcpy (tree, rtx); |
| static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len, |
| rtx target, tree exp, |
| memop_ret retmode, |
| bool might_overlap); |
| static rtx expand_builtin_memmove (tree, rtx); |
| static rtx expand_builtin_mempcpy (tree, rtx); |
| static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret); |
| static rtx expand_builtin_strcpy (tree, rtx); |
| static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx); |
| static rtx expand_builtin_stpcpy (tree, rtx, machine_mode); |
| static rtx expand_builtin_strncpy (tree, rtx); |
| static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree); |
| static rtx expand_builtin_bzero (tree); |
| static rtx expand_builtin_strlen (tree, rtx, machine_mode); |
| static rtx expand_builtin_strnlen (tree, rtx, machine_mode); |
| static rtx expand_builtin_alloca (tree); |
| static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab); |
| static rtx expand_builtin_frame_address (tree, tree); |
| static tree stabilize_va_list_loc (location_t, tree, int); |
| static rtx expand_builtin_expect (tree, rtx); |
| static rtx expand_builtin_expect_with_probability (tree, rtx); |
| static tree fold_builtin_constant_p (tree); |
| static tree fold_builtin_classify_type (tree); |
| static tree fold_builtin_strlen (location_t, tree, tree, tree); |
| static tree fold_builtin_inf (location_t, tree, int); |
| static tree rewrite_call_expr (location_t, tree, int, tree, int, ...); |
| static bool validate_arg (const_tree, enum tree_code code); |
| static rtx expand_builtin_fabs (tree, rtx, rtx); |
| static rtx expand_builtin_signbit (tree, rtx); |
| static tree fold_builtin_memcmp (location_t, tree, tree, tree); |
| static tree fold_builtin_isascii (location_t, tree); |
| static tree fold_builtin_toascii (location_t, tree); |
| static tree fold_builtin_isdigit (location_t, tree); |
| static tree fold_builtin_fabs (location_t, tree, tree); |
| static tree fold_builtin_abs (location_t, tree, tree); |
| static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code, |
| enum tree_code); |
| static tree fold_builtin_varargs (location_t, tree, tree*, int); |
| |
| static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree); |
| static tree fold_builtin_strspn (location_t, tree, tree, tree); |
| static tree fold_builtin_strcspn (location_t, tree, tree, tree); |
| |
| static rtx expand_builtin_object_size (tree); |
| static rtx expand_builtin_memory_chk (tree, rtx, machine_mode, |
| enum built_in_function); |
| static void maybe_emit_chk_warning (tree, enum built_in_function); |
| static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function); |
| static tree fold_builtin_object_size (tree, tree, enum built_in_function); |
| |
| unsigned HOST_WIDE_INT target_newline; |
| unsigned HOST_WIDE_INT target_percent; |
| static unsigned HOST_WIDE_INT target_c; |
| static unsigned HOST_WIDE_INT target_s; |
| char target_percent_c[3]; |
| char target_percent_s[3]; |
| char target_percent_s_newline[4]; |
| static tree do_mpfr_remquo (tree, tree, tree); |
| static tree do_mpfr_lgamma_r (tree, tree, tree); |
| static void expand_builtin_sync_synchronize (void); |
| |
| /* Return true if NAME starts with __builtin_ or __sync_. */ |
| |
| static bool |
| is_builtin_name (const char *name) |
| { |
| return (startswith (name, "__builtin_") |
| || startswith (name, "__sync_") |
| || startswith (name, "__atomic_")); |
| } |
| |
| /* Return true if NODE should be considered for inline expansion regardless |
| of the optimization level. This means whenever a function is invoked with |
| its "internal" name, which normally contains the prefix "__builtin". */ |
| |
| bool |
| called_as_built_in (tree node) |
| { |
| /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since |
| we want the name used to call the function, not the name it |
| will have. */ |
| const char *name = IDENTIFIER_POINTER (DECL_NAME (node)); |
| return is_builtin_name (name); |
| } |
| |
| /* Compute values M and N such that M divides (address of EXP - N) and such |
| that N < M. If these numbers can be determined, store M in alignp and N in |
| *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to |
| *alignp and any bit-offset to *bitposp. |
| |
| Note that the address (and thus the alignment) computed here is based |
| on the address to which a symbol resolves, whereas DECL_ALIGN is based |
| on the address at which an object is actually located. These two |
| addresses are not always the same. For example, on ARM targets, |
| the address &foo of a Thumb function foo() has the lowest bit set, |
| whereas foo() itself starts on an even address. |
| |
| If ADDR_P is true we are taking the address of the memory reference EXP |
| and thus cannot rely on the access taking place. */ |
| |
| bool |
| get_object_alignment_2 (tree exp, unsigned int *alignp, |
| unsigned HOST_WIDE_INT *bitposp, bool addr_p) |
| { |
| poly_int64 bitsize, bitpos; |
| tree offset; |
| machine_mode mode; |
| int unsignedp, reversep, volatilep; |
| unsigned int align = BITS_PER_UNIT; |
| bool known_alignment = false; |
| |
| /* Get the innermost object and the constant (bitpos) and possibly |
| variable (offset) offset of the access. */ |
| exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode, |
| &unsignedp, &reversep, &volatilep); |
| |
| /* Extract alignment information from the innermost object and |
| possibly adjust bitpos and offset. */ |
| if (TREE_CODE (exp) == FUNCTION_DECL) |
| { |
| /* Function addresses can encode extra information besides their |
| alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION |
| allows the low bit to be used as a virtual bit, we know |
| that the address itself must be at least 2-byte aligned. */ |
| if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn) |
| align = 2 * BITS_PER_UNIT; |
| } |
| else if (TREE_CODE (exp) == LABEL_DECL) |
| ; |
| else if (TREE_CODE (exp) == CONST_DECL) |
| { |
| /* The alignment of a CONST_DECL is determined by its initializer. */ |
| exp = DECL_INITIAL (exp); |
| align = TYPE_ALIGN (TREE_TYPE (exp)); |
| if (CONSTANT_CLASS_P (exp)) |
| align = targetm.constant_alignment (exp, align); |
| |
| known_alignment = true; |
| } |
| else if (DECL_P (exp)) |
| { |
| align = DECL_ALIGN (exp); |
| known_alignment = true; |
| } |
| else if (TREE_CODE (exp) == INDIRECT_REF |
| || TREE_CODE (exp) == MEM_REF |
| || TREE_CODE (exp) == TARGET_MEM_REF) |
| { |
| tree addr = TREE_OPERAND (exp, 0); |
| unsigned ptr_align; |
| unsigned HOST_WIDE_INT ptr_bitpos; |
| unsigned HOST_WIDE_INT ptr_bitmask = ~0; |
| |
| /* If the address is explicitely aligned, handle that. */ |
| if (TREE_CODE (addr) == BIT_AND_EXPR |
| && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST) |
| { |
| ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)); |
| ptr_bitmask *= BITS_PER_UNIT; |
| align = least_bit_hwi (ptr_bitmask); |
| addr = TREE_OPERAND (addr, 0); |
| } |
| |
| known_alignment |
| = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos); |
| align = MAX (ptr_align, align); |
| |
| /* Re-apply explicit alignment to the bitpos. */ |
| ptr_bitpos &= ptr_bitmask; |
| |
| /* The alignment of the pointer operand in a TARGET_MEM_REF |
| has to take the variable offset parts into account. */ |
| if (TREE_CODE (exp) == TARGET_MEM_REF) |
| { |
| if (TMR_INDEX (exp)) |
| { |
| unsigned HOST_WIDE_INT step = 1; |
| if (TMR_STEP (exp)) |
| step = TREE_INT_CST_LOW (TMR_STEP (exp)); |
| align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT); |
| } |
| if (TMR_INDEX2 (exp)) |
| align = BITS_PER_UNIT; |
| known_alignment = false; |
| } |
| |
| /* When EXP is an actual memory reference then we can use |
| TYPE_ALIGN of a pointer indirection to derive alignment. |
| Do so only if get_pointer_alignment_1 did not reveal absolute |
| alignment knowledge and if using that alignment would |
| improve the situation. */ |
| unsigned int talign; |
| if (!addr_p && !known_alignment |
| && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT) |
| && talign > align) |
| align = talign; |
| else |
| { |
| /* Else adjust bitpos accordingly. */ |
| bitpos += ptr_bitpos; |
| if (TREE_CODE (exp) == MEM_REF |
| || TREE_CODE (exp) == TARGET_MEM_REF) |
| bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT; |
| } |
| } |
| else if (TREE_CODE (exp) == STRING_CST) |
| { |
| /* STRING_CST are the only constant objects we allow to be not |
| wrapped inside a CONST_DECL. */ |
| align = TYPE_ALIGN (TREE_TYPE (exp)); |
| if (CONSTANT_CLASS_P (exp)) |
| align = targetm.constant_alignment (exp, align); |
| |
| known_alignment = true; |
| } |
| |
| /* If there is a non-constant offset part extract the maximum |
| alignment that can prevail. */ |
| if (offset) |
| { |
| unsigned int trailing_zeros = tree_ctz (offset); |
| if (trailing_zeros < HOST_BITS_PER_INT) |
| { |
| unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT; |
| if (inner) |
| align = MIN (align, inner); |
| } |
| } |
| |
| /* Account for the alignment of runtime coefficients, so that the constant |
| bitpos is guaranteed to be accurate. */ |
| unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]); |
| if (alt_align != 0 && alt_align < align) |
| { |
| align = alt_align; |
| known_alignment = false; |
| } |
| |
| *alignp = align; |
| *bitposp = bitpos.coeffs[0] & (align - 1); |
| return known_alignment; |
| } |
| |
| /* For a memory reference expression EXP compute values M and N such that M |
| divides (&EXP - N) and such that N < M. If these numbers can be determined, |
| store M in alignp and N in *BITPOSP and return true. Otherwise return false |
| and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */ |
| |
| bool |
| get_object_alignment_1 (tree exp, unsigned int *alignp, |
| unsigned HOST_WIDE_INT *bitposp) |
| { |
| /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal |
| with it. */ |
| if (TREE_CODE (exp) == WITH_SIZE_EXPR) |
| exp = TREE_OPERAND (exp, 0); |
| return get_object_alignment_2 (exp, alignp, bitposp, false); |
| } |
| |
| /* Return the alignment in bits of EXP, an object. */ |
| |
| unsigned int |
| get_object_alignment (tree exp) |
| { |
| unsigned HOST_WIDE_INT bitpos = 0; |
| unsigned int align; |
| |
| get_object_alignment_1 (exp, &align, &bitpos); |
| |
| /* align and bitpos now specify known low bits of the pointer. |
| ptr & (align - 1) == bitpos. */ |
| |
| if (bitpos != 0) |
| align = least_bit_hwi (bitpos); |
| return align; |
| } |
| |
| /* For a pointer valued expression EXP compute values M and N such that M |
| divides (EXP - N) and such that N < M. If these numbers can be determined, |
| store M in alignp and N in *BITPOSP and return true. Return false if |
| the results are just a conservative approximation. |
| |
| If EXP is not a pointer, false is returned too. */ |
| |
| bool |
| get_pointer_alignment_1 (tree exp, unsigned int *alignp, |
| unsigned HOST_WIDE_INT *bitposp) |
| { |
| STRIP_NOPS (exp); |
| |
| if (TREE_CODE (exp) == ADDR_EXPR) |
| return get_object_alignment_2 (TREE_OPERAND (exp, 0), |
| alignp, bitposp, true); |
| else if (TREE_CODE (exp) == POINTER_PLUS_EXPR) |
| { |
| unsigned int align; |
| unsigned HOST_WIDE_INT bitpos; |
| bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0), |
| &align, &bitpos); |
| if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) |
| bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT; |
| else |
| { |
| unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1)); |
| if (trailing_zeros < HOST_BITS_PER_INT) |
| { |
| unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT; |
| if (inner) |
| align = MIN (align, inner); |
| } |
| } |
| *alignp = align; |
| *bitposp = bitpos & (align - 1); |
| return res; |
| } |
| else if (TREE_CODE (exp) == SSA_NAME |
| && POINTER_TYPE_P (TREE_TYPE (exp))) |
| { |
| unsigned int ptr_align, ptr_misalign; |
| struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp); |
| |
| if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign)) |
| { |
| *bitposp = ptr_misalign * BITS_PER_UNIT; |
| *alignp = ptr_align * BITS_PER_UNIT; |
| /* Make sure to return a sensible alignment when the multiplication |
| by BITS_PER_UNIT overflowed. */ |
| if (*alignp == 0) |
| *alignp = 1u << (HOST_BITS_PER_INT - 1); |
| /* We cannot really tell whether this result is an approximation. */ |
| return false; |
| } |
| else |
| { |
| *bitposp = 0; |
| *alignp = BITS_PER_UNIT; |
| return false; |
| } |
| } |
| else if (TREE_CODE (exp) == INTEGER_CST) |
| { |
| *alignp = BIGGEST_ALIGNMENT; |
| *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT) |
| & (BIGGEST_ALIGNMENT - 1)); |
| return true; |
| } |
| |
| *bitposp = 0; |
| *alignp = BITS_PER_UNIT; |
| return false; |
| } |
| |
| /* Return the alignment in bits of EXP, a pointer valued expression. |
| The alignment returned is, by default, the alignment of the thing that |
| EXP points to. If it is not a POINTER_TYPE, 0 is returned. |
| |
| Otherwise, look at the expression to see if we can do better, i.e., if the |
| expression is actually pointing at an object whose alignment is tighter. */ |
| |
| unsigned int |
| get_pointer_alignment (tree exp) |
| { |
| unsigned HOST_WIDE_INT bitpos = 0; |
| unsigned int align; |
| |
| get_pointer_alignment_1 (exp, &align, &bitpos); |
| |
| /* align and bitpos now specify known low bits of the pointer. |
| ptr & (align - 1) == bitpos. */ |
| |
| if (bitpos != 0) |
| align = least_bit_hwi (bitpos); |
| |
| return align; |
| } |
| |
| /* Return the number of leading non-zero elements in the sequence |
| [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes. |
| ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */ |
| |
| unsigned |
| string_length (const void *ptr, unsigned eltsize, unsigned maxelts) |
| { |
| gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4); |
| |
| unsigned n; |
| |
| if (eltsize == 1) |
| { |
| /* Optimize the common case of plain char. */ |
| for (n = 0; n < maxelts; n++) |
| { |
| const char *elt = (const char*) ptr + n; |
| if (!*elt) |
| break; |
| } |
| } |
| else |
| { |
| for (n = 0; n < maxelts; n++) |
| { |
| const char *elt = (const char*) ptr + n * eltsize; |
| if (!memcmp (elt, "\0\0\0\0", eltsize)) |
| break; |
| } |
| } |
| return n; |
| } |
| |
| /* Compute the length of a null-terminated character string or wide |
| character string handling character sizes of 1, 2, and 4 bytes. |
| TREE_STRING_LENGTH is not the right way because it evaluates to |
| the size of the character array in bytes (as opposed to characters) |
| and because it can contain a zero byte in the middle. |
| |
| ONLY_VALUE should be nonzero if the result is not going to be emitted |
| into the instruction stream and zero if it is going to be expanded. |
| E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3 |
| is returned, otherwise NULL, since |
| len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not |
| evaluate the side-effects. |
| |
| If ONLY_VALUE is two then we do not emit warnings about out-of-bound |
| accesses. Note that this implies the result is not going to be emitted |
| into the instruction stream. |
| |
| Additional information about the string accessed may be recorded |
| in DATA. For example, if ARG references an unterminated string, |
| then the declaration will be stored in the DECL field. If the |
| length of the unterminated string can be determined, it'll be |
| stored in the LEN field. Note this length could well be different |
| than what a C strlen call would return. |
| |
| ELTSIZE is 1 for normal single byte character strings, and 2 or |
| 4 for wide characer strings. ELTSIZE is by default 1. |
| |
| The value returned is of type `ssizetype'. */ |
| |
| tree |
| c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize) |
| { |
| /* If we were not passed a DATA pointer, then get one to a local |
| structure. That avoids having to check DATA for NULL before |
| each time we want to use it. */ |
| c_strlen_data local_strlen_data = { }; |
| if (!data) |
| data = &local_strlen_data; |
| |
| gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4); |
| |
| tree src = STRIP_NOPS (arg); |
| if (TREE_CODE (src) == COND_EXPR |
| && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0)))) |
| { |
| tree len1, len2; |
| |
| len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize); |
| len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize); |
| if (tree_int_cst_equal (len1, len2)) |
| return len1; |
| } |
| |
| if (TREE_CODE (src) == COMPOUND_EXPR |
| && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0)))) |
| return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize); |
| |
| location_t loc = EXPR_LOC_OR_LOC (src, input_location); |
| |
| /* Offset from the beginning of the string in bytes. */ |
| tree byteoff; |
| tree memsize; |
| tree decl; |
| src = string_constant (src, &byteoff, &memsize, &decl); |
| if (src == 0) |
| return NULL_TREE; |
| |
| /* Determine the size of the string element. */ |
| if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))))) |
| return NULL_TREE; |
| |
| /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible |
| length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible |
| in case the latter is less than the size of the array, such as when |
| SRC refers to a short string literal used to initialize a large array. |
| In that case, the elements of the array after the terminating NUL are |
| all NUL. */ |
| HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src); |
| strelts = strelts / eltsize; |
| |
| if (!tree_fits_uhwi_p (memsize)) |
| return NULL_TREE; |
| |
| HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize; |
| |
| /* PTR can point to the byte representation of any string type, including |
| char* and wchar_t*. */ |
| const char *ptr = TREE_STRING_POINTER (src); |
| |
| if (byteoff && TREE_CODE (byteoff) != INTEGER_CST) |
| { |
| /* The code below works only for single byte character types. */ |
| if (eltsize != 1) |
| return NULL_TREE; |
| |
| /* If the string has an internal NUL character followed by any |
| non-NUL characters (e.g., "foo\0bar"), we can't compute |
| the offset to the following NUL if we don't know where to |
| start searching for it. */ |
| unsigned len = string_length (ptr, eltsize, strelts); |
| |
| /* Return when an embedded null character is found or none at all. |
| In the latter case, set the DECL/LEN field in the DATA structure |
| so that callers may examine them. */ |
| if (len + 1 < strelts) |
| return NULL_TREE; |
| else if (len >= maxelts) |
| { |
| data->decl = decl; |
| data->off = byteoff; |
| data->minlen = ssize_int (len); |
| return NULL_TREE; |
| } |
| |
| /* For empty strings the result should be zero. */ |
| if (len == 0) |
| return ssize_int (0); |
| |
| /* We don't know the starting offset, but we do know that the string |
| has no internal zero bytes. If the offset falls within the bounds |
| of the string subtract the offset from the length of the string, |
| and return that. Otherwise the length is zero. Take care to |
| use SAVE_EXPR in case the OFFSET has side-effects. */ |
| tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff) |
| : byteoff; |
| offsave = fold_convert_loc (loc, sizetype, offsave); |
| tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave, |
| size_int (len)); |
| tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len), |
| offsave); |
| lenexp = fold_convert_loc (loc, ssizetype, lenexp); |
| return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp, |
| build_zero_cst (ssizetype)); |
| } |
| |
| /* Offset from the beginning of the string in elements. */ |
| HOST_WIDE_INT eltoff; |
| |
| /* We have a known offset into the string. Start searching there for |
| a null character if we can represent it as a single HOST_WIDE_INT. */ |
| if (byteoff == 0) |
| eltoff = 0; |
| else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize) |
| eltoff = -1; |
| else |
| eltoff = tree_to_uhwi (byteoff) / eltsize; |
| |
| /* If the offset is known to be out of bounds, warn, and call strlen at |
| runtime. */ |
| if (eltoff < 0 || eltoff >= maxelts) |
| { |
| /* Suppress multiple warnings for propagated constant strings. */ |
| if (only_value != 2 |
| && !warning_suppressed_p (arg, OPT_Warray_bounds) |
| && warning_at (loc, OPT_Warray_bounds, |
| "offset %qwi outside bounds of constant string", |
| eltoff)) |
| { |
| if (decl) |
| inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl); |
| suppress_warning (arg, OPT_Warray_bounds); |
| } |
| return NULL_TREE; |
| } |
| |
| /* If eltoff is larger than strelts but less than maxelts the |
| string length is zero, since the excess memory will be zero. */ |
| if (eltoff > strelts) |
| return ssize_int (0); |
| |
| /* Use strlen to search for the first zero byte. Since any strings |
| constructed with build_string will have nulls appended, we win even |
| if we get handed something like (char[4])"abcd". |
| |
| Since ELTOFF is our starting index into the string, no further |
| calculation is needed. */ |
| unsigned len = string_length (ptr + eltoff * eltsize, eltsize, |
| strelts - eltoff); |
| |
| /* Don't know what to return if there was no zero termination. |
| Ideally this would turn into a gcc_checking_assert over time. |
| Set DECL/LEN so callers can examine them. */ |
| if (len >= maxelts - eltoff) |
| { |
| data->decl = decl; |
| data->off = byteoff; |
| data->minlen = ssize_int (len); |
| return NULL_TREE; |
| } |
| |
| return ssize_int (len); |
| } |
| |
| /* Return a constant integer corresponding to target reading |
| GET_MODE_BITSIZE (MODE) bits from string constant STR. If |
| NULL_TERMINATED_P, reading stops after '\0' character, all further ones |
| are assumed to be zero, otherwise it reads as many characters |
| as needed. */ |
| |
| rtx |
| c_readstr (const char *str, scalar_int_mode mode, |
| bool null_terminated_p/*=true*/) |
| { |
| HOST_WIDE_INT ch; |
| unsigned int i, j; |
| HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT]; |
| |
| gcc_assert (GET_MODE_CLASS (mode) == MODE_INT); |
| unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1) |
| / HOST_BITS_PER_WIDE_INT; |
| |
| gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT); |
| for (i = 0; i < len; i++) |
| tmp[i] = 0; |
| |
| ch = 1; |
| for (i = 0; i < GET_MODE_SIZE (mode); i++) |
| { |
| j = i; |
| if (WORDS_BIG_ENDIAN) |
| j = GET_MODE_SIZE (mode) - i - 1; |
| if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN |
| && GET_MODE_SIZE (mode) >= UNITS_PER_WORD) |
| j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1; |
| j *= BITS_PER_UNIT; |
| |
| if (ch || !null_terminated_p) |
| ch = (unsigned char) str[i]; |
| tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT); |
| } |
| |
| wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode)); |
| return immed_wide_int_const (c, mode); |
| } |
| |
| /* Cast a target constant CST to target CHAR and if that value fits into |
| host char type, return zero and put that value into variable pointed to by |
| P. */ |
| |
| static int |
| target_char_cast (tree cst, char *p) |
| { |
| unsigned HOST_WIDE_INT val, hostval; |
| |
| if (TREE_CODE (cst) != INTEGER_CST |
| || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT) |
| return 1; |
| |
| /* Do not care if it fits or not right here. */ |
| val = TREE_INT_CST_LOW (cst); |
| |
| if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT) |
| val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1; |
| |
| hostval = val; |
| if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT) |
| hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1; |
| |
| if (val != hostval) |
| return 1; |
| |
| *p = hostval; |
| return 0; |
| } |
| |
| /* Similar to save_expr, but assumes that arbitrary code is not executed |
| in between the multiple evaluations. In particular, we assume that a |
| non-addressable local variable will not be modified. */ |
| |
| static tree |
| builtin_save_expr (tree exp) |
| { |
| if (TREE_CODE (exp) == SSA_NAME |
| || (TREE_ADDRESSABLE (exp) == 0 |
| && (TREE_CODE (exp) == PARM_DECL |
| || (VAR_P (exp) && !TREE_STATIC (exp))))) |
| return exp; |
| |
| return save_expr (exp); |
| } |
| |
| /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT |
| times to get the address of either a higher stack frame, or a return |
| address located within it (depending on FNDECL_CODE). */ |
| |
| static rtx |
| expand_builtin_return_addr (enum built_in_function fndecl_code, int count) |
| { |
| int i; |
| rtx tem = INITIAL_FRAME_ADDRESS_RTX; |
| if (tem == NULL_RTX) |
| { |
| /* For a zero count with __builtin_return_address, we don't care what |
| frame address we return, because target-specific definitions will |
| override us. Therefore frame pointer elimination is OK, and using |
| the soft frame pointer is OK. |
| |
| For a nonzero count, or a zero count with __builtin_frame_address, |
| we require a stable offset from the current frame pointer to the |
| previous one, so we must use the hard frame pointer, and |
| we must disable frame pointer elimination. */ |
| if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS) |
| tem = frame_pointer_rtx; |
| else |
| { |
| tem = hard_frame_pointer_rtx; |
| |
| /* Tell reload not to eliminate the frame pointer. */ |
| crtl->accesses_prior_frames = 1; |
| } |
| } |
| |
| if (count > 0) |
| SETUP_FRAME_ADDRESSES (); |
| |
| /* On the SPARC, the return address is not in the frame, it is in a |
| register. There is no way to access it off of the current frame |
| pointer, but it can be accessed off the previous frame pointer by |
| reading the value from the register window save area. */ |
| if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS) |
| count--; |
| |
| /* Scan back COUNT frames to the specified frame. */ |
| for (i = 0; i < count; i++) |
| { |
| /* Assume the dynamic chain pointer is in the word that the |
| frame address points to, unless otherwise specified. */ |
| tem = DYNAMIC_CHAIN_ADDRESS (tem); |
| tem = memory_address (Pmode, tem); |
| tem = gen_frame_mem (Pmode, tem); |
| tem = copy_to_reg (tem); |
| } |
| |
| /* For __builtin_frame_address, return what we've got. But, on |
| the SPARC for example, we may have to add a bias. */ |
| if (fndecl_code == BUILT_IN_FRAME_ADDRESS) |
| return FRAME_ADDR_RTX (tem); |
| |
| /* For __builtin_return_address, get the return address from that frame. */ |
| #ifdef RETURN_ADDR_RTX |
| tem = RETURN_ADDR_RTX (count, tem); |
| #else |
| tem = memory_address (Pmode, |
| plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode))); |
| tem = gen_frame_mem (Pmode, tem); |
| #endif |
| return tem; |
| } |
| |
| /* Alias set used for setjmp buffer. */ |
| static alias_set_type setjmp_alias_set = -1; |
| |
| /* Construct the leading half of a __builtin_setjmp call. Control will |
| return to RECEIVER_LABEL. This is also called directly by the SJLJ |
| exception handling code. */ |
| |
| void |
| expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label) |
| { |
| machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); |
| rtx stack_save; |
| rtx mem; |
| |
| if (setjmp_alias_set == -1) |
| setjmp_alias_set = new_alias_set (); |
| |
| buf_addr = convert_memory_address (Pmode, buf_addr); |
| |
| buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX)); |
| |
| /* We store the frame pointer and the address of receiver_label in |
| the buffer and use the rest of it for the stack save area, which |
| is machine-dependent. */ |
| |
| mem = gen_rtx_MEM (Pmode, buf_addr); |
| set_mem_alias_set (mem, setjmp_alias_set); |
| emit_move_insn (mem, hard_frame_pointer_rtx); |
| |
| mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr, |
| GET_MODE_SIZE (Pmode))), |
| set_mem_alias_set (mem, setjmp_alias_set); |
| |
| emit_move_insn (validize_mem (mem), |
| force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label))); |
| |
| stack_save = gen_rtx_MEM (sa_mode, |
| plus_constant (Pmode, buf_addr, |
| 2 * GET_MODE_SIZE (Pmode))); |
| set_mem_alias_set (stack_save, setjmp_alias_set); |
| emit_stack_save (SAVE_NONLOCAL, &stack_save); |
| |
| /* If there is further processing to do, do it. */ |
| if (targetm.have_builtin_setjmp_setup ()) |
| emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr)); |
| |
| /* We have a nonlocal label. */ |
| cfun->has_nonlocal_label = 1; |
| } |
| |
| /* Construct the trailing part of a __builtin_setjmp call. This is |
| also called directly by the SJLJ exception handling code. |
| If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */ |
| |
| void |
| expand_builtin_setjmp_receiver (rtx receiver_label) |
| { |
| rtx chain; |
| |
| /* Mark the FP as used when we get here, so we have to make sure it's |
| marked as used by this function. */ |
| emit_use (hard_frame_pointer_rtx); |
| |
| /* Mark the static chain as clobbered here so life information |
| doesn't get messed up for it. */ |
| chain = rtx_for_static_chain (current_function_decl, true); |
| if (chain && REG_P (chain)) |
| emit_clobber (chain); |
| |
| if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM]) |
| { |
| /* If the argument pointer can be eliminated in favor of the |
| frame pointer, we don't need to restore it. We assume here |
| that if such an elimination is present, it can always be used. |
| This is the case on all known machines; if we don't make this |
| assumption, we do unnecessary saving on many machines. */ |
| size_t i; |
| static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS; |
| |
| for (i = 0; i < ARRAY_SIZE (elim_regs); i++) |
| if (elim_regs[i].from == ARG_POINTER_REGNUM |
| && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM) |
| break; |
| |
| if (i == ARRAY_SIZE (elim_regs)) |
| { |
| /* Now restore our arg pointer from the address at which it |
| was saved in our stack frame. */ |
| emit_move_insn (crtl->args.internal_arg_pointer, |
| copy_to_reg (get_arg_pointer_save_area ())); |
| } |
| } |
| |
| if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ()) |
| emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label)); |
| else if (targetm.have_nonlocal_goto_receiver ()) |
| emit_insn (targetm.gen_nonlocal_goto_receiver ()); |
| else |
| { /* Nothing */ } |
| |
| /* We must not allow the code we just generated to be reordered by |
| scheduling. Specifically, the update of the frame pointer must |
| happen immediately, not later. */ |
| emit_insn (gen_blockage ()); |
| } |
| |
| /* __builtin_longjmp is passed a pointer to an array of five words (not |
| all will be used on all machines). It operates similarly to the C |
| library function of the same name, but is more efficient. Much of |
| the code below is copied from the handling of non-local gotos. */ |
| |
| static void |
| expand_builtin_longjmp (rtx buf_addr, rtx value) |
| { |
| rtx fp, lab, stack; |
| rtx_insn *insn, *last; |
| machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); |
| |
| /* DRAP is needed for stack realign if longjmp is expanded to current |
| function */ |
| if (SUPPORTS_STACK_ALIGNMENT) |
| crtl->need_drap = true; |
| |
| if (setjmp_alias_set == -1) |
| setjmp_alias_set = new_alias_set (); |
| |
| buf_addr = convert_memory_address (Pmode, buf_addr); |
| |
| buf_addr = force_reg (Pmode, buf_addr); |
| |
| /* We require that the user must pass a second argument of 1, because |
| that is what builtin_setjmp will return. */ |
| gcc_assert (value == const1_rtx); |
| |
| last = get_last_insn (); |
| if (targetm.have_builtin_longjmp ()) |
| emit_insn (targetm.gen_builtin_longjmp (buf_addr)); |
| else |
| { |
| fp = gen_rtx_MEM (Pmode, buf_addr); |
| lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr, |
| GET_MODE_SIZE (Pmode))); |
| |
| stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr, |
| 2 * GET_MODE_SIZE (Pmode))); |
| set_mem_alias_set (fp, setjmp_alias_set); |
| set_mem_alias_set (lab, setjmp_alias_set); |
| set_mem_alias_set (stack, setjmp_alias_set); |
| |
| /* Pick up FP, label, and SP from the block and jump. This code is |
| from expand_goto in stmt.cc; see there for detailed comments. */ |
| if (targetm.have_nonlocal_goto ()) |
| /* We have to pass a value to the nonlocal_goto pattern that will |
| get copied into the static_chain pointer, but it does not matter |
| what that value is, because builtin_setjmp does not use it. */ |
| emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp)); |
| else |
| { |
| emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))); |
| emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx)); |
| |
| lab = copy_to_reg (lab); |
| |
| /* Restore the frame pointer and stack pointer. We must use a |
| temporary since the setjmp buffer may be a local. */ |
| fp = copy_to_reg (fp); |
| emit_stack_restore (SAVE_NONLOCAL, stack); |
| |
| /* Ensure the frame pointer move is not optimized. */ |
| emit_insn (gen_blockage ()); |
| emit_clobber (hard_frame_pointer_rtx); |
| emit_clobber (frame_pointer_rtx); |
| emit_move_insn (hard_frame_pointer_rtx, fp); |
| |
| emit_use (hard_frame_pointer_rtx); |
| emit_use (stack_pointer_rtx); |
| emit_indirect_jump (lab); |
| } |
| } |
| |
| /* Search backwards and mark the jump insn as a non-local goto. |
| Note that this precludes the use of __builtin_longjmp to a |
| __builtin_setjmp target in the same function. However, we've |
| already cautioned the user that these functions are for |
| internal exception handling use only. */ |
| for (insn = get_last_insn (); insn; insn = PREV_INSN (insn)) |
| { |
| gcc_assert (insn != last); |
| |
| if (JUMP_P (insn)) |
| { |
| add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx); |
| break; |
| } |
| else if (CALL_P (insn)) |
| break; |
| } |
| } |
| |
| static inline bool |
| more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter) |
| { |
| return (iter->i < iter->n); |
| } |
| |
| /* This function validates the types of a function call argument list |
| against a specified list of tree_codes. If the last specifier is a 0, |
| that represents an ellipsis, otherwise the last specifier must be a |
| VOID_TYPE. */ |
| |
| static bool |
| validate_arglist (const_tree callexpr, ...) |
| { |
| enum tree_code code; |
| bool res = 0; |
| va_list ap; |
| const_call_expr_arg_iterator iter; |
| const_tree arg; |
| |
| va_start (ap, callexpr); |
| init_const_call_expr_arg_iterator (callexpr, &iter); |
| |
| /* Get a bitmap of pointer argument numbers declared attribute nonnull. */ |
| tree fn = CALL_EXPR_FN (callexpr); |
| bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn))); |
| |
| for (unsigned argno = 1; ; ++argno) |
| { |
| code = (enum tree_code) va_arg (ap, int); |
| |
| switch (code) |
| { |
| case 0: |
| /* This signifies an ellipses, any further arguments are all ok. */ |
| res = true; |
| goto end; |
| case VOID_TYPE: |
| /* This signifies an endlink, if no arguments remain, return |
| true, otherwise return false. */ |
| res = !more_const_call_expr_args_p (&iter); |
| goto end; |
| case POINTER_TYPE: |
| /* The actual argument must be nonnull when either the whole |
| called function has been declared nonnull, or when the formal |
| argument corresponding to the actual argument has been. */ |
| if (argmap |
| && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno))) |
| { |
| arg = next_const_call_expr_arg (&iter); |
| if (!validate_arg (arg, code) || integer_zerop (arg)) |
| goto end; |
| break; |
| } |
| /* FALLTHRU */ |
| default: |
| /* If no parameters remain or the parameter's code does not |
| match the specified code, return false. Otherwise continue |
| checking any remaining arguments. */ |
| arg = next_const_call_expr_arg (&iter); |
| if (!validate_arg (arg, code)) |
| goto end; |
| break; |
| } |
| } |
| |
| /* We need gotos here since we can only have one VA_CLOSE in a |
| function. */ |
| end: ; |
| va_end (ap); |
| |
| BITMAP_FREE (argmap); |
| |
| return res; |
| } |
| |
| /* Expand a call to __builtin_nonlocal_goto. We're passed the target label |
| and the address of the save area. */ |
| |
| static rtx |
| expand_builtin_nonlocal_goto (tree exp) |
| { |
| tree t_label, t_save_area; |
| rtx r_label, r_save_area, r_fp, r_sp; |
| rtx_insn *insn; |
| |
| if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) |
| return NULL_RTX; |
| |
| t_label = CALL_EXPR_ARG (exp, 0); |
| t_save_area = CALL_EXPR_ARG (exp, 1); |
| |
| r_label = expand_normal (t_label); |
| r_label = convert_memory_address (Pmode, r_label); |
| r_save_area = expand_normal (t_save_area); |
| r_save_area = convert_memory_address (Pmode, r_save_area); |
| /* Copy the address of the save location to a register just in case it was |
| based on the frame pointer. */ |
| r_save_area = copy_to_reg (r_save_area); |
| r_fp = gen_rtx_MEM (Pmode, r_save_area); |
| r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL), |
| plus_constant (Pmode, r_save_area, |
| GET_MODE_SIZE (Pmode))); |
| |
| crtl->has_nonlocal_goto = 1; |
| |
| /* ??? We no longer need to pass the static chain value, afaik. */ |
| if (targetm.have_nonlocal_goto ()) |
| emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp)); |
| else |
| { |
| emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))); |
| emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx)); |
| |
| r_label = copy_to_reg (r_label); |
| |
| /* Restore the frame pointer and stack pointer. We must use a |
| temporary since the setjmp buffer may be a local. */ |
| r_fp = copy_to_reg (r_fp); |
| emit_stack_restore (SAVE_NONLOCAL, r_sp); |
| |
| /* Ensure the frame pointer move is not optimized. */ |
| emit_insn (gen_blockage ()); |
| emit_clobber (hard_frame_pointer_rtx); |
| emit_clobber (frame_pointer_rtx); |
| emit_move_insn (hard_frame_pointer_rtx, r_fp); |
| |
| /* USE of hard_frame_pointer_rtx added for consistency; |
| not clear if really needed. */ |
| emit_use (hard_frame_pointer_rtx); |
| emit_use (stack_pointer_rtx); |
| |
| /* If the architecture is using a GP register, we must |
| conservatively assume that the target function makes use of it. |
| The prologue of functions with nonlocal gotos must therefore |
| initialize the GP register to the appropriate value, and we |
| must then make sure that this value is live at the point |
| of the jump. (Note that this doesn't necessarily apply |
| to targets with a nonlocal_goto pattern; they are free |
| to implement it in their own way. Note also that this is |
| a no-op if the GP register is a global invariant.) */ |
| unsigned regnum = PIC_OFFSET_TABLE_REGNUM; |
| if (regnum != INVALID_REGNUM && fixed_regs[regnum]) |
| emit_use (pic_offset_table_rtx); |
| |
| emit_indirect_jump (r_label); |
| } |
| |
| /* Search backwards to the jump insn and mark it as a |
| non-local goto. */ |
| for (insn = get_last_insn (); insn; insn = PREV_INSN (insn)) |
| { |
| if (JUMP_P (insn)) |
| { |
| add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx); |
| break; |
| } |
| else if (CALL_P (insn)) |
| break; |
| } |
| |
| return const0_rtx; |
| } |
| |
| /* __builtin_update_setjmp_buf is passed a pointer to an array of five words |
| (not all will be used on all machines) that was passed to __builtin_setjmp. |
| It updates the stack pointer in that block to the current value. This is |
| also called directly by the SJLJ exception handling code. */ |
| |
| void |
| expand_builtin_update_setjmp_buf (rtx buf_addr) |
| { |
| machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); |
| buf_addr = convert_memory_address (Pmode, buf_addr); |
| rtx stack_save |
| = gen_rtx_MEM (sa_mode, |
| memory_address |
| (sa_mode, |
| plus_constant (Pmode, buf_addr, |
| 2 * GET_MODE_SIZE (Pmode)))); |
| |
| emit_stack_save (SAVE_NONLOCAL, &stack_save); |
| } |
| |
| /* Expand a call to __builtin_prefetch. For a target that does not support |
| data prefetch, evaluate the memory address argument in case it has side |
| effects. */ |
| |
| static void |
| expand_builtin_prefetch (tree exp) |
| { |
| tree arg0, arg1, arg2; |
| int nargs; |
| rtx op0, op1, op2; |
| |
| if (!validate_arglist (exp, POINTER_TYPE, 0)) |
| return; |
| |
| arg0 = CALL_EXPR_ARG (exp, 0); |
| |
| /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to |
| zero (read) and argument 2 (locality) defaults to 3 (high degree of |
| locality). */ |
| nargs = call_expr_nargs (exp); |
| if (nargs > 1) |
| arg1 = CALL_EXPR_ARG (exp, 1); |
| else |
| arg1 = integer_zero_node; |
| if (nargs > 2) |
| arg2 = CALL_EXPR_ARG (exp, 2); |
| else |
| arg2 = integer_three_node; |
| |
| /* Argument 0 is an address. */ |
| op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL); |
| |
| /* Argument 1 (read/write flag) must be a compile-time constant int. */ |
| if (TREE_CODE (arg1) != INTEGER_CST) |
| { |
| error ("second argument to %<__builtin_prefetch%> must be a constant"); |
| arg1 = integer_zero_node; |
| } |
| op1 = expand_normal (arg1); |
| /* Argument 1 must be either zero or one. */ |
| if (INTVAL (op1) != 0 && INTVAL (op1) != 1) |
| { |
| warning (0, "invalid second argument to %<__builtin_prefetch%>;" |
| " using zero"); |
| op1 = const0_rtx; |
| } |
| |
| /* Argument 2 (locality) must be a compile-time constant int. */ |
| if (TREE_CODE (arg2) != INTEGER_CST) |
| { |
| error ("third argument to %<__builtin_prefetch%> must be a constant"); |
| arg2 = integer_zero_node; |
| } |
| op2 = expand_normal (arg2); |
| /* Argument 2 must be 0, 1, 2, or 3. */ |
| if (INTVAL (op2) < 0 || INTVAL (op2) > 3) |
| { |
| warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero"); |
| op2 = const0_rtx; |
| } |
| |
| if (targetm.have_prefetch ()) |
| { |
| class expand_operand ops[3]; |
| |
| create_address_operand (&ops[0], op0); |
| create_integer_operand (&ops[1], INTVAL (op1)); |
| create_integer_operand (&ops[2], INTVAL (op2)); |
| if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops)) |
| return; |
| } |
| |
| /* Don't do anything with direct references to volatile memory, but |
| generate code to handle other side effects. */ |
| if (!MEM_P (op0) && side_effects_p (op0)) |
| emit_insn (op0); |
| } |
| |
| /* Get a MEM rtx for expression EXP which is the address of an operand |
| to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is |
| the maximum length of the block of memory that might be accessed or |
| NULL if unknown. */ |
| |
| rtx |
| get_memory_rtx (tree exp, tree len) |
| { |
| tree orig_exp = exp; |
| rtx addr, mem; |
| |
| /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived |
| from its expression, for expr->a.b only <variable>.a.b is recorded. */ |
| if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp)) |
| exp = TREE_OPERAND (exp, 0); |
| |
| addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL); |
| mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr)); |
| |
| /* Get an expression we can use to find the attributes to assign to MEM. |
| First remove any nops. */ |
| while (CONVERT_EXPR_P (exp) |
| && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0)))) |
| exp = TREE_OPERAND (exp, 0); |
| |
| /* Build a MEM_REF representing the whole accessed area as a byte blob, |
| (as builtin stringops may alias with anything). */ |
| exp = fold_build2 (MEM_REF, |
| build_array_type (char_type_node, |
| build_range_type (sizetype, |
| size_one_node, len)), |
| exp, build_int_cst (ptr_type_node, 0)); |
| |
| /* If the MEM_REF has no acceptable address, try to get the base object |
| from the original address we got, and build an all-aliasing |
| unknown-sized access to that one. */ |
| if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0))) |
| set_mem_attributes (mem, exp, 0); |
| else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR |
| && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0), |
| 0)))) |
| { |
| exp = build_fold_addr_expr (exp); |
| exp = fold_build2 (MEM_REF, |
| build_array_type (char_type_node, |
| build_range_type (sizetype, |
| size_zero_node, |
| NULL)), |
| exp, build_int_cst (ptr_type_node, 0)); |
| set_mem_attributes (mem, exp, 0); |
| } |
| set_mem_alias_set (mem, 0); |
| return mem; |
| } |
| |
| /* Built-in functions to perform an untyped call and return. */ |
| |
| #define apply_args_mode \ |
| (this_target_builtins->x_apply_args_mode) |
| #define apply_result_mode \ |
| (this_target_builtins->x_apply_result_mode) |
| |
| /* Return the size required for the block returned by __builtin_apply_args, |
| and initialize apply_args_mode. */ |
| |
| static int |
| apply_args_size (void) |
| { |
| static int size = -1; |
| int align; |
| unsigned int regno; |
| |
| /* The values computed by this function never change. */ |
| if (size < 0) |
| { |
| /* The first value is the incoming arg-pointer. */ |
| size = GET_MODE_SIZE (Pmode); |
| |
| /* The second value is the structure value address unless this is |
| passed as an "invisible" first argument. */ |
| if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0)) |
| size += GET_MODE_SIZE (Pmode); |
| |
| for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
| if (FUNCTION_ARG_REGNO_P (regno)) |
| { |
| fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno); |
| |
| gcc_assert (mode != VOIDmode); |
| |
| align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; |
| if (size % align != 0) |
| size = CEIL (size, align) * align; |
| size += GET_MODE_SIZE (mode); |
| apply_args_mode[regno] = mode; |
| } |
| else |
| { |
| apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode); |
| } |
| } |
| return size; |
| } |
| |
| /* Return the size required for the block returned by __builtin_apply, |
| and initialize apply_result_mode. */ |
| |
| static int |
| apply_result_size (void) |
| { |
| static int size = -1; |
| int align, regno; |
| |
| /* The values computed by this function never change. */ |
| if (size < 0) |
| { |
| size = 0; |
| |
| for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
| if (targetm.calls.function_value_regno_p (regno)) |
| { |
| fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno); |
| |
| gcc_assert (mode != VOIDmode); |
| |
| align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; |
| if (size % align != 0) |
| size = CEIL (size, align) * align; |
| size += GET_MODE_SIZE (mode); |
| apply_result_mode[regno] = mode; |
| } |
| else |
| apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode); |
| |
| /* Allow targets that use untyped_call and untyped_return to override |
| the size so that machine-specific information can be stored here. */ |
| #ifdef APPLY_RESULT_SIZE |
| size = APPLY_RESULT_SIZE; |
| #endif |
| } |
| return size; |
| } |
| |
| /* Create a vector describing the result block RESULT. If SAVEP is true, |
| the result block is used to save the values; otherwise it is used to |
| restore the values. */ |
| |
| static rtx |
| result_vector (int savep, rtx result) |
| { |
| int regno, size, align, nelts; |
| fixed_size_mode mode; |
| rtx reg, mem; |
| rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER); |
| |
| size = nelts = 0; |
| for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
| if ((mode = apply_result_mode[regno]) != VOIDmode) |
| { |
| align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; |
| if (size % align != 0) |
| size = CEIL (size, align) * align; |
| reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno)); |
| mem = adjust_address (result, mode, size); |
| savevec[nelts++] = (savep |
| ? gen_rtx_SET (mem, reg) |
| : gen_rtx_SET (reg, mem)); |
| size += GET_MODE_SIZE (mode); |
| } |
| return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec)); |
| } |
| |
| /* Save the state required to perform an untyped call with the same |
| arguments as were passed to the current function. */ |
| |
| static rtx |
| expand_builtin_apply_args_1 (void) |
| { |
| rtx registers, tem; |
| int size, align, regno; |
| fixed_size_mode mode; |
| rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1); |
| |
| /* Create a block where the arg-pointer, structure value address, |
| and argument registers can be saved. */ |
| registers = assign_stack_local (BLKmode, apply_args_size (), -1); |
| |
| /* Walk past the arg-pointer and structure value address. */ |
| size = GET_MODE_SIZE (Pmode); |
| if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0)) |
| size += GET_MODE_SIZE (Pmode); |
| |
| /* Save each register used in calling a function to the block. */ |
| for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
| if ((mode = apply_args_mode[regno]) != VOIDmode) |
| { |
| align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; |
| if (size % align != 0) |
| size = CEIL (size, align) * align; |
| |
| tem = gen_rtx_REG (mode, INCOMING_REGNO (regno)); |
| |
| emit_move_insn (adjust_address (registers, mode, size), tem); |
| size += GET_MODE_SIZE (mode); |
| } |
| |
| /* Save the arg pointer to the block. */ |
| tem = copy_to_reg (crtl->args.internal_arg_pointer); |
| /* We need the pointer as the caller actually passed them to us, not |
| as we might have pretended they were passed. Make sure it's a valid |
| operand, as emit_move_insn isn't expected to handle a PLUS. */ |
| if (STACK_GROWS_DOWNWARD) |
| tem |
| = force_operand (plus_constant (Pmode, tem, |
| crtl->args.pretend_args_size), |
| NULL_RTX); |
| emit_move_insn (adjust_address (registers, Pmode, 0), tem); |
| |
| size = GET_MODE_SIZE (Pmode); |
| |
| /* Save the structure value address unless this is passed as an |
| "invisible" first argument. */ |
| if (struct_incoming_value) |
| emit_move_insn (adjust_address (registers, Pmode, size), |
| copy_to_reg (struct_incoming_value)); |
| |
| /* Return the address of the block. */ |
| return copy_addr_to_reg (XEXP (registers, 0)); |
| } |
| |
| /* __builtin_apply_args returns block of memory allocated on |
| the stack into which is stored the arg pointer, structure |
| value address, static chain, and all the registers that might |
| possibly be used in performing a function call. The code is |
| moved to the start of the function so the incoming values are |
| saved. */ |
| |
| static rtx |
| expand_builtin_apply_args (void) |
| { |
| /* Don't do __builtin_apply_args more than once in a function. |
| Save the result of the first call and reuse it. */ |
| if (apply_args_value != 0) |
| return apply_args_value; |
| { |
| /* When this function is called, it means that registers must be |
| saved on entry to this function. So we migrate the |
| call to the first insn of this function. */ |
| rtx temp; |
| |
| start_sequence (); |
| temp = expand_builtin_apply_args_1 (); |
| rtx_insn *seq = get_insns (); |
| end_sequence (); |
| |
| apply_args_value = temp; |
| |
| /* Put the insns after the NOTE that starts the function. |
| If this is inside a start_sequence, make the outer-level insn |
| chain current, so the code is placed at the start of the |
| function. If internal_arg_pointer is a non-virtual pseudo, |
| it needs to be placed after the function that initializes |
| that pseudo. */ |
| push_topmost_sequence (); |
| if (REG_P (crtl->args.internal_arg_pointer) |
| && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER) |
| emit_insn_before (seq, parm_birth_insn); |
| else |
| emit_insn_before (seq, NEXT_INSN (entry_of_function ())); |
| pop_topmost_sequence (); |
| return temp; |
| } |
| } |
| |
| /* Perform an untyped call and save the state required to perform an |
| untyped return of whatever value was returned by the given function. */ |
| |
| static rtx |
| expand_builtin_apply (rtx function, rtx arguments, rtx argsize) |
| { |
| int size, align, regno; |
| fixed_size_mode mode; |
| rtx incoming_args, result, reg, dest, src; |
| rtx_call_insn *call_insn; |
| rtx old_stack_level = 0; |
| rtx call_fusage = 0; |
| rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0); |
| |
| arguments = convert_memory_address (Pmode, arguments); |
| |
| /* Create a block where the return registers can be saved. */ |
| result = assign_stack_local (BLKmode, apply_result_size (), -1); |
| |
| /* Fetch the arg pointer from the ARGUMENTS block. */ |
| incoming_args = gen_reg_rtx (Pmode); |
| emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments)); |
| if (!STACK_GROWS_DOWNWARD) |
| incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize, |
| incoming_args, 0, OPTAB_LIB_WIDEN); |
| |
| /* Push a new argument block and copy the arguments. Do not allow |
| the (potential) memcpy call below to interfere with our stack |
| manipulations. */ |
| do_pending_stack_adjust (); |
| NO_DEFER_POP; |
| |
| /* Save the stack with nonlocal if available. */ |
| if (targetm.have_save_stack_nonlocal ()) |
| emit_stack_save (SAVE_NONLOCAL, &old_stack_level); |
| else |
| emit_stack_save (SAVE_BLOCK, &old_stack_level); |
| |
| /* Allocate a block of memory onto the stack and copy the memory |
| arguments to the outgoing arguments address. We can pass TRUE |
| as the 4th argument because we just saved the stack pointer |
| and will restore it right after the call. */ |
| allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true); |
| |
| /* Set DRAP flag to true, even though allocate_dynamic_stack_space |
| may have already set current_function_calls_alloca to true. |
| current_function_calls_alloca won't be set if argsize is zero, |
| so we have to guarantee need_drap is true here. */ |
| if (SUPPORTS_STACK_ALIGNMENT) |
| crtl->need_drap = true; |
| |
| dest = virtual_outgoing_args_rtx; |
| if (!STACK_GROWS_DOWNWARD) |
| { |
| if (CONST_INT_P (argsize)) |
| dest = plus_constant (Pmode, dest, -INTVAL (argsize)); |
| else |
| dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize)); |
| } |
| dest = gen_rtx_MEM (BLKmode, dest); |
| set_mem_align (dest, PARM_BOUNDARY); |
| src = gen_rtx_MEM (BLKmode, incoming_args); |
| set_mem_align (src, PARM_BOUNDARY); |
| emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL); |
| |
| /* Refer to the argument block. */ |
| apply_args_size (); |
| arguments = gen_rtx_MEM (BLKmode, arguments); |
| set_mem_align (arguments, PARM_BOUNDARY); |
| |
| /* Walk past the arg-pointer and structure value address. */ |
| size = GET_MODE_SIZE (Pmode); |
| if (struct_value) |
| size += GET_MODE_SIZE (Pmode); |
| |
| /* Restore each of the registers previously saved. Make USE insns |
| for each of these registers for use in making the call. */ |
| for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
| if ((mode = apply_args_mode[regno]) != VOIDmode) |
| { |
| align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; |
| if (size % align != 0) |
| size = CEIL (size, align) * align; |
| reg = gen_rtx_REG (mode, regno); |
| emit_move_insn (reg, adjust_address (arguments, mode, size)); |
| use_reg (&call_fusage, reg); |
| size += GET_MODE_SIZE (mode); |
| } |
| |
| /* Restore the structure value address unless this is passed as an |
| "invisible" first argument. */ |
| size = GET_MODE_SIZE (Pmode); |
| if (struct_value) |
| { |
| rtx value = gen_reg_rtx (Pmode); |
| emit_move_insn (value, adjust_address (arguments, Pmode, size)); |
| emit_move_insn (struct_value, value); |
| if (REG_P (struct_value)) |
| use_reg (&call_fusage, struct_value); |
| } |
| |
| /* All arguments and registers used for the call are set up by now! */ |
| function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0); |
| |
| /* Ensure address is valid. SYMBOL_REF is already valid, so no need, |
| and we don't want to load it into a register as an optimization, |
| because prepare_call_address already did it if it should be done. */ |
| if (GET_CODE (function) != SYMBOL_REF) |
| function = memory_address (FUNCTION_MODE, function); |
| |
| /* Generate the actual call instruction and save the return value. */ |
| if (targetm.have_untyped_call ()) |
| { |
| rtx mem = gen_rtx_MEM (FUNCTION_MODE, function); |
| rtx_insn *seq = targetm.gen_untyped_call (mem, result, |
| result_vector (1, result)); |
| for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn)) |
| if (CALL_P (insn)) |
| add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX); |
| emit_insn (seq); |
| } |
| else if (targetm.have_call_value ()) |
| { |
| rtx valreg = 0; |
| |
| /* Locate the unique return register. It is not possible to |
| express a call that sets more than one return register using |
| call_value; use untyped_call for that. In fact, untyped_call |
| only needs to save the return registers in the given block. */ |
| for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
| if ((mode = apply_result_mode[regno]) != VOIDmode) |
| { |
| gcc_assert (!valreg); /* have_untyped_call required. */ |
| |
| valreg = gen_rtx_REG (mode, regno); |
| } |
| |
| emit_insn (targetm.gen_call_value (valreg, |
| gen_rtx_MEM (FUNCTION_MODE, function), |
| const0_rtx, NULL_RTX, const0_rtx)); |
| |
| emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg); |
| } |
| else |
| gcc_unreachable (); |
| |
| /* Find the CALL insn we just emitted, and attach the register usage |
| information. */ |
| call_insn = last_call_insn (); |
| add_function_usage_to (call_insn, call_fusage); |
| |
| /* Restore the stack. */ |
| if (targetm.have_save_stack_nonlocal ()) |
| emit_stack_restore (SAVE_NONLOCAL, old_stack_level); |
| else |
| emit_stack_restore (SAVE_BLOCK, old_stack_level); |
| fixup_args_size_notes (call_insn, get_last_insn (), 0); |
| |
| OK_DEFER_POP; |
| |
| /* Return the address of the result block. */ |
| result = copy_addr_to_reg (XEXP (result, 0)); |
| return convert_memory_address (ptr_mode, result); |
| } |
| |
| /* Perform an untyped return. */ |
| |
| static void |
| expand_builtin_return (rtx result) |
| { |
| int size, align, regno; |
| fixed_size_mode mode; |
| rtx reg; |
| rtx_insn *call_fusage = 0; |
| |
| result = convert_memory_address (Pmode, result); |
| |
| apply_result_size (); |
| result = gen_rtx_MEM (BLKmode, result); |
| |
| if (targetm.have_untyped_return ()) |
| { |
| rtx vector = result_vector (0, result); |
| emit_jump_insn (targetm.gen_untyped_return (result, vector)); |
| emit_barrier (); |
| return; |
| } |
| |
| /* Restore the return value and note that each value is used. */ |
| size = 0; |
| for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
| if ((mode = apply_result_mode[regno]) != VOIDmode) |
| { |
| align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; |
| if (size % align != 0) |
| size = CEIL (size, align) * align; |
| reg = gen_rtx_REG (mode, INCOMING_REGNO (regno)); |
| emit_move_insn (reg, adjust_address (result, mode, size)); |
| |
| push_to_sequence (call_fusage); |
| emit_use (reg); |
| call_fusage = get_insns (); |
| end_sequence (); |
| size += GET_MODE_SIZE (mode); |
| } |
| |
| /* Put the USE insns before the return. */ |
| emit_insn (call_fusage); |
| |
| /* Return whatever values was restored by jumping directly to the end |
| of the function. */ |
| expand_naked_return (); |
| } |
| |
| /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */ |
| |
| static enum type_class |
| type_to_class (tree type) |
| { |
| switch (TREE_CODE (type)) |
| { |
| case VOID_TYPE: return void_type_class; |
| case INTEGER_TYPE: return integer_type_class; |
| case ENUMERAL_TYPE: return enumeral_type_class; |
| case BOOLEAN_TYPE: return boolean_type_class; |
| case POINTER_TYPE: return pointer_type_class; |
| case REFERENCE_TYPE: return reference_type_class; |
| case OFFSET_TYPE: return offset_type_class; |
| case REAL_TYPE: return real_type_class; |
| case COMPLEX_TYPE: return complex_type_class; |
| case FUNCTION_TYPE: return function_type_class; |
| case METHOD_TYPE: return method_type_class; |
| case RECORD_TYPE: return record_type_class; |
| case UNION_TYPE: |
| case QUAL_UNION_TYPE: return union_type_class; |
| case ARRAY_TYPE: return (TYPE_STRING_FLAG (type) |
| ? string_type_class : array_type_class); |
| case LANG_TYPE: return lang_type_class; |
| case OPAQUE_TYPE: return opaque_type_class; |
| default: return no_type_class; |
| } |
| } |
| |
| /* Expand a call EXP to __builtin_classify_type. */ |
| |
| static rtx |
| expand_builtin_classify_type (tree exp) |
| { |
| if (call_expr_nargs (exp)) |
| return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))); |
| return GEN_INT (no_type_class); |
| } |
| |
| /* This helper macro, meant to be used in mathfn_built_in below, determines |
| which among a set of builtin math functions is appropriate for a given type |
| mode. The `F' (float) and `L' (long double) are automatically generated |
| from the 'double' case. If a function supports the _Float<N> and _Float<N>X |
| types, there are additional types that are considered with 'F32', 'F64', |
| 'F128', etc. suffixes. */ |
| #define CASE_MATHFN(MATHFN) \ |
| CASE_CFN_##MATHFN: \ |
| fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \ |
| fcodel = BUILT_IN_##MATHFN##L ; break; |
| /* Similar to the above, but also add support for the _Float<N> and _Float<N>X |
| types. */ |
| #define CASE_MATHFN_FLOATN(MATHFN) \ |
| CASE_CFN_##MATHFN: \ |
| fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \ |
| fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \ |
| fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \ |
| fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \ |
| fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\ |
| break; |
| /* Similar to above, but appends _R after any F/L suffix. */ |
| #define CASE_MATHFN_REENT(MATHFN) \ |
| case CFN_BUILT_IN_##MATHFN##_R: \ |
| case CFN_BUILT_IN_##MATHFN##F_R: \ |
| case CFN_BUILT_IN_##MATHFN##L_R: \ |
| fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \ |
| fcodel = BUILT_IN_##MATHFN##L_R ; break; |
| |
| /* Return a function equivalent to FN but operating on floating-point |
| values of type TYPE, or END_BUILTINS if no such function exists. |
| This is purely an operation on function codes; it does not guarantee |
| that the target actually has an implementation of the function. */ |
| |
| static built_in_function |
| mathfn_built_in_2 (tree type, combined_fn fn) |
| { |
| tree mtype; |
| built_in_function fcode, fcodef, fcodel; |
| built_in_function fcodef16 = END_BUILTINS; |
| built_in_function fcodef32 = END_BUILTINS; |
| built_in_function fcodef64 = END_BUILTINS; |
| built_in_function fcodef128 = END_BUILTINS; |
| built_in_function fcodef32x = END_BUILTINS; |
| built_in_function fcodef64x = END_BUILTINS; |
| built_in_function fcodef128x = END_BUILTINS; |
| |
| switch (fn) |
| { |
| #define SEQ_OF_CASE_MATHFN \ |
| CASE_MATHFN (ACOS) \ |
| CASE_MATHFN (ACOSH) \ |
| CASE_MATHFN (ASIN) \ |
| CASE_MATHFN (ASINH) \ |
| CASE_MATHFN (ATAN) \ |
| CASE_MATHFN (ATAN2) \ |
| CASE_MATHFN (ATANH) \ |
| CASE_MATHFN (CBRT) \ |
| CASE_MATHFN_FLOATN (CEIL) \ |
| CASE_MATHFN (CEXPI) \ |
| CASE_MATHFN_FLOATN (COPYSIGN) \ |
| CASE_MATHFN (COS) \ |
| CASE_MATHFN (COSH) \ |
| CASE_MATHFN (DREM) \ |
| CASE_MATHFN (ERF) \ |
| CASE_MATHFN (ERFC) \ |
| CASE_MATHFN (EXP) \ |
| CASE_MATHFN (EXP10) \ |
| CASE_MATHFN (EXP2) \ |
| CASE_MATHFN (EXPM1) \ |
| CASE_MATHFN (FABS) \ |
| CASE_MATHFN (FDIM) \ |
| CASE_MATHFN_FLOATN (FLOOR) \ |
| CASE_MATHFN_FLOATN (FMA) \ |
| CASE_MATHFN_FLOATN (FMAX) \ |
| CASE_MATHFN_FLOATN (FMIN) \ |
| CASE_MATHFN (FMOD) \ |
| CASE_MATHFN (FREXP) \ |
| CASE_MATHFN (GAMMA) \ |
| CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \ |
| CASE_MATHFN (HUGE_VAL) \ |
| CASE_MATHFN (HYPOT) \ |
| CASE_MATHFN (ILOGB) \ |
| CASE_MATHFN (ICEIL) \ |
| CASE_MATHFN (IFLOOR) \ |
| CASE_MATHFN (INF) \ |
| CASE_MATHFN (IRINT) \ |
| CASE_MATHFN (IROUND) \ |
| CASE_MATHFN (ISINF) \ |
| CASE_MATHFN (J0) \ |
| CASE_MATHFN (J1) \ |
| CASE_MATHFN (JN) \ |
| CASE_MATHFN (LCEIL) \ |
| CASE_MATHFN (LDEXP) \ |
| CASE_MATHFN (LFLOOR) \ |
| CASE_MATHFN (LGAMMA) \ |
| CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \ |
| CASE_MATHFN (LLCEIL) \ |
| CASE_MATHFN (LLFLOOR) \ |
| CASE_MATHFN (LLRINT) \ |
| CASE_MATHFN (LLROUND) \ |
| CASE_MATHFN (LOG) \ |
| CASE_MATHFN (LOG10) \ |
| CASE_MATHFN (LOG1P) \ |
| CASE_MATHFN (LOG2) \ |
| CASE_MATHFN (LOGB) \ |
| CASE_MATHFN (LRINT) \ |
| CASE_MATHFN (LROUND) \ |
| CASE_MATHFN (MODF) \ |
| CASE_MATHFN (NAN) \ |
| CASE_MATHFN (NANS) \ |
| CASE_MATHFN_FLOATN (NEARBYINT) \ |
| CASE_MATHFN (NEXTAFTER) \ |
| CASE_MATHFN (NEXTTOWARD) \ |
| CASE_MATHFN (POW) \ |
| CASE_MATHFN (POWI) \ |
| CASE_MATHFN (POW10) \ |
| CASE_MATHFN (REMAINDER) \ |
| CASE_MATHFN (REMQUO) \ |
| CASE_MATHFN_FLOATN (RINT) \ |
| CASE_MATHFN_FLOATN (ROUND) \ |
| CASE_MATHFN_FLOATN (ROUNDEVEN) \ |
| CASE_MATHFN (SCALB) \ |
| CASE_MATHFN (SCALBLN) \ |
| CASE_MATHFN (SCALBN) \ |
| CASE_MATHFN (SIGNBIT) \ |
| CASE_MATHFN (SIGNIFICAND) \ |
| CASE_MATHFN (SIN) \ |
| CASE_MATHFN (SINCOS) \ |
| CASE_MATHFN (SINH) \ |
| CASE_MATHFN_FLOATN (SQRT) \ |
| CASE_MATHFN (TAN) \ |
| CASE_MATHFN (TANH) \ |
| CASE_MATHFN (TGAMMA) \ |
| CASE_MATHFN_FLOATN (TRUNC) \ |
| CASE_MATHFN (Y0) \ |
| CASE_MATHFN (Y1) \ |
| CASE_MATHFN (YN) |
| |
| SEQ_OF_CASE_MATHFN |
| |
| default: |
| return END_BUILTINS; |
| } |
| |
| mtype = TYPE_MAIN_VARIANT (type); |
| if (mtype == double_type_node) |
| return fcode; |
| else if (mtype == float_type_node) |
| return fcodef; |
| else if (mtype == long_double_type_node) |
| return fcodel; |
| else if (mtype == float16_type_node) |
| return fcodef16; |
| else if (mtype == float32_type_node) |
| return fcodef32; |
| else if (mtype == float64_type_node) |
| return fcodef64; |
| else if (mtype == float128_type_node) |
| return fcodef128; |
| else if (mtype == float32x_type_node) |
| return fcodef32x; |
| else if (mtype == float64x_type_node) |
| return fcodef64x; |
| else if (mtype == float128x_type_node) |
| return fcodef128x; |
| else |
| return END_BUILTINS; |
| } |
| |
| #undef CASE_MATHFN |
| #undef CASE_MATHFN_FLOATN |
| #undef CASE_MATHFN_REENT |
| |
| /* Return mathematic function equivalent to FN but operating directly on TYPE, |
| if available. If IMPLICIT_P is true use the implicit builtin declaration, |
| otherwise use the explicit declaration. If we can't do the conversion, |
| return null. */ |
| |
| static tree |
| mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p) |
| { |
| built_in_function fcode2 = mathfn_built_in_2 (type, fn); |
| if (fcode2 == END_BUILTINS) |
| return NULL_TREE; |
| |
| if (implicit_p && !builtin_decl_implicit_p (fcode2)) |
| return NULL_TREE; |
| |
| return builtin_decl_explicit (fcode2); |
| } |
| |
| /* Like mathfn_built_in_1, but always use the implicit array. */ |
| |
| tree |
| mathfn_built_in (tree type, combined_fn fn) |
| { |
| return mathfn_built_in_1 (type, fn, /*implicit=*/ 1); |
| } |
| |
| /* Like mathfn_built_in_1, but take a built_in_function and |
| always use the implicit array. */ |
| |
| tree |
| mathfn_built_in (tree type, enum built_in_function fn) |
| { |
| return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1); |
| } |
| |
| /* Return the type associated with a built in function, i.e., the one |
| to be passed to mathfn_built_in to get the type-specific |
| function. */ |
| |
| tree |
| mathfn_built_in_type (combined_fn fn) |
| { |
| #define CASE_MATHFN(MATHFN) \ |
| case CFN_BUILT_IN_##MATHFN: \ |
| return double_type_node; \ |
| case CFN_BUILT_IN_##MATHFN##F: \ |
| return float_type_node; \ |
| case CFN_BUILT_IN_##MATHFN##L: \ |
| return long_double_type_node; |
| |
| #define CASE_MATHFN_FLOATN(MATHFN) \ |
| CASE_MATHFN(MATHFN) \ |
| case CFN_BUILT_IN_##MATHFN##F16: \ |
| return float16_type_node; \ |
| case CFN_BUILT_IN_##MATHFN##F32: \ |
| return float32_type_node; \ |
| case CFN_BUILT_IN_##MATHFN##F64: \ |
| return float64_type_node; \ |
| case CFN_BUILT_IN_##MATHFN##F128: \ |
| return float128_type_node; \ |
| case CFN_BUILT_IN_##MATHFN##F32X: \ |
| return float32x_type_node; \ |
| case CFN_BUILT_IN_##MATHFN##F64X: \ |
| return float64x_type_node; \ |
| case CFN_BUILT_IN_##MATHFN##F128X: \ |
| return float128x_type_node; |
| |
| /* Similar to above, but appends _R after any F/L suffix. */ |
| #define CASE_MATHFN_REENT(MATHFN) \ |
| case CFN_BUILT_IN_##MATHFN##_R: \ |
| return double_type_node; \ |
| case CFN_BUILT_IN_##MATHFN##F_R: \ |
| return float_type_node; \ |
| case CFN_BUILT_IN_##MATHFN##L_R: \ |
| return long_double_type_node; |
| |
| switch (fn) |
| { |
| SEQ_OF_CASE_MATHFN |
| |
| default: |
| return NULL_TREE; |
| } |
| |
| #undef CASE_MATHFN |
| #undef CASE_MATHFN_FLOATN |
| #undef CASE_MATHFN_REENT |
| #undef SEQ_OF_CASE_MATHFN |
| } |
| |
| /* Check whether there is an internal function associated with function FN |
| and return type RETURN_TYPE. Return the function if so, otherwise return |
| IFN_LAST. |
| |
| Note that this function only tests whether the function is defined in |
| internals.def, not whether it is actually available on the target. */ |
| |
| static internal_fn |
| associated_internal_fn (built_in_function fn, tree return_type) |
| { |
| switch (fn) |
| { |
| #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \ |
| CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; |
| #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \ |
| CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \ |
| CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME; |
| #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \ |
| CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME; |
| #include "internal-fn.def" |
| |
| CASE_FLT_FN (BUILT_IN_POW10): |
| return IFN_EXP10; |
| |
| CASE_FLT_FN (BUILT_IN_DREM): |
| return IFN_REMAINDER; |
| |
| CASE_FLT_FN (BUILT_IN_SCALBN): |
| CASE_FLT_FN (BUILT_IN_SCALBLN): |
| if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2) |
| return IFN_LDEXP; |
| return IFN_LAST; |
| |
| default: |
| return IFN_LAST; |
| } |
| } |
| |
| /* If BUILT_IN_NORMAL function FNDECL has an associated internal function, |
| return its code, otherwise return IFN_LAST. Note that this function |
| only tests whether the function is defined in internals.def, not whether |
| it is actually available on the target. */ |
| |
| internal_fn |
| associated_internal_fn (tree fndecl) |
| { |
| gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL); |
| return associated_internal_fn (DECL_FUNCTION_CODE (fndecl), |
| TREE_TYPE (TREE_TYPE (fndecl))); |
| } |
| |
| /* Check whether there is an internal function associated with function CFN |
| and return type RETURN_TYPE. Return the function if so, otherwise return |
| IFN_LAST. |
| |
| Note that this function only tests whether the function is defined in |
| internals.def, not whether it is actually available on the target. */ |
| |
| internal_fn |
| associated_internal_fn (combined_fn cfn, tree return_type) |
| { |
| if (internal_fn_p (cfn)) |
| return as_internal_fn (cfn); |
| return associated_internal_fn (as_builtin_fn (cfn), return_type); |
| } |
| |
| /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced |
| on the current target by a call to an internal function, return the |
| code of that internal function, otherwise return IFN_LAST. The caller |
| is responsible for ensuring that any side-effects of the built-in |
| call are dealt with correctly. E.g. if CALL sets errno, the caller |
| must decide that the errno result isn't needed or make it available |
| in some other way. */ |
| |
| internal_fn |
| replacement_internal_fn (gcall *call) |
| { |
| if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)) |
| { |
| internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call)); |
| if (ifn != IFN_LAST) |
| { |
| tree_pair types = direct_internal_fn_types (ifn, call); |
| optimization_type opt_type = bb_optimization_type (gimple_bb (call)); |
| if (direct_internal_fn_supported_p (ifn, types, opt_type)) |
| return ifn; |
| } |
| } |
| return IFN_LAST; |
| } |
| |
| /* Expand a call to the builtin trinary math functions (fma). |
| Return NULL_RTX if a normal call should be emitted rather than expanding the |
| function in-line. EXP is the expression that is a call to the builtin |
| function; if convenient, the result should be placed in TARGET. |
| SUBTARGET may be used as the target for computing one of EXP's |
| operands. */ |
| |
| static rtx |
| expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget) |
| { |
| optab builtin_optab; |
| rtx op0, op1, op2, result; |
| rtx_insn *insns; |
| tree fndecl = get_callee_fndecl (exp); |
| tree arg0, arg1, arg2; |
| machine_mode mode; |
| |
| if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE)) |
| return NULL_RTX; |
| |
| arg0 = CALL_EXPR_ARG (exp, 0); |
| arg1 = CALL_EXPR_ARG (exp, 1); |
| arg2 = CALL_EXPR_ARG (exp, 2); |
| |
| switch (DECL_FUNCTION_CODE (fndecl)) |
| { |
| CASE_FLT_FN (BUILT_IN_FMA): |
| CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA): |
| builtin_optab = fma_optab; break; |
| default: |
| gcc_unreachable (); |
| } |
| |
| /* Make a suitable register to place result in. */ |
| mode = TYPE_MODE (TREE_TYPE (exp)); |
| |
| /* Before working hard, check whether the instruction is available. */ |
| if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing) |
| return NULL_RTX; |
| |
| result = gen_reg_rtx (mode); |
| |
| /* Always stabilize the argument list. */ |
| CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0); |
| CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1); |
| CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2); |
| |
| op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL); |
| op1 = expand_normal (arg1); |
| op2 = expand_normal (arg2); |
| |
| start_sequence (); |
| |
| /* Compute into RESULT. |
| Set RESULT to wherever the result comes back. */ |
| result = expand_ternary_op (mode, builtin_optab, op0, op1, op2, |
| result, 0); |
| |
| /* If we were unable to expand via the builtin, stop the sequence |
| (without outputting the insns) and call to the library function |
| with the stabilized argument list. */ |
| if (result == 0) |
| { |
| end_sequence (); |
| return expand_call (exp, target, target == const0_rtx); |
| } |
| |
| /* Output the entire sequence. */ |
| insns = get_insns (); |
| end_sequence (); |
| emit_insn (insns); |
| |
| return result; |
| } |
| |
| /* Expand a call to the builtin sin and cos math functions. |
| Return NULL_RTX if a normal call should be emitted rather than expanding the |
| function in-line. EXP is the expression that is a call to the builtin |
| function; if convenient, the result should be placed in TARGET. |
| SUBTARGET may be used as the target for computing one of EXP's |
| operands. */ |
| |
| static rtx |
| expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget) |
| { |
| optab builtin_optab; |
| rtx op0; |
| rtx_insn *insns; |
| tree fndecl = get_callee_fndecl (exp); |
| machine_mode mode; |
| tree arg; |
| |
| if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE)) |
| return NULL_RTX; |
| |
| arg = CALL_EXPR_ARG (exp, 0); |
| |
| switch (DECL_FUNCTION_CODE (fndecl)) |
| { |
| CASE_FLT_FN (BUILT_IN_SIN): |
| CASE_FLT_FN (BUILT_IN_COS): |
| builtin_optab = sincos_optab; break; |
| default: |
| gcc_unreachable (); |
| } |
| |
| /* Make a suitable register to place result in. */ |
| mode = TYPE_MODE (TREE_TYPE (exp)); |
| |
| /* Check if sincos insn is available, otherwise fallback |
| to sin or cos insn. */ |
| if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing) |
| switch (DECL_FUNCTION_CODE (fndecl)) |
| { |
| CASE_FLT_FN (BUILT_IN_SIN): |
| builtin_optab = sin_optab; break; |
| CASE_FLT_FN (BUILT_IN_COS): |
| builtin_optab = cos_optab; break; |
| default: |
| gcc_unreachable (); |
| } |
| |
| /* Before working hard, check whether the instruction is available. */ |
| if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing) |
| { |
| rtx result = gen_reg_rtx (mode); |
| |
| /* Wrap the computation of the argument in a SAVE_EXPR, as we may |
| need to expand the argument again. This way, we will not perform |
| side-effects more the once. */ |
| CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg); |
| |
| op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL); |
| |
| start_sequence (); |
| |
| /* Compute into RESULT. |
| Set RESULT to wherever the result comes back. */ |
| if (builtin_optab == sincos_optab) |
| { |
| int ok; |
| |
| switch (DECL_FUNCTION_CODE (fndecl)) |
| { |
| CASE_FLT_FN (BUILT_IN_SIN): |
| ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0); |
| break; |
| CASE_FLT_FN (BUILT_IN_COS): |
| ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0); |
| break; |
| default: |
| gcc_unreachable (); |
| } |
| gcc_assert (ok); |
| } |
| else |
| result = expand_unop (mode, builtin_optab, op0, result, 0); |
| |
| if (result != 0) |
| { |
| /* Output the entire sequence. */ |
| insns = get_insns (); |
| end_sequence (); |
| emit_insn (insns); |
| return result; |
| } |
| |
| /* If we were unable to expand via the builtin, stop the sequence |
| (without outputting the insns) and call to the library function |
| with the stabilized argument list. */ |
| end_sequence (); |
| } |
| |
| return expand_call (exp, target, target == const0_rtx); |
| } |
| |
| /* Given an interclass math builtin decl FNDECL and it's argument ARG |
| return an RTL instruction code that implements the functionality. |
| If that isn't possible or available return CODE_FOR_nothing. */ |
| |
| static enum insn_code |
| interclass_mathfn_icode (tree arg, tree fndecl) |
| { |
| bool errno_set = false; |
| optab builtin_optab = unknown_optab; |
| machine_mode mode; |
| |
| switch (DECL_FUNCTION_CODE (fndecl)) |
| { |
| CASE_FLT_FN (BUILT_IN_ILOGB): |
| errno_set = true; builtin_optab = ilogb_optab; break; |
| CASE_FLT_FN (BUILT_IN_ISINF): |
| builtin_optab = isinf_optab; break; |
| case BUILT_IN_ISNORMAL: |
| case BUILT_IN_ISFINITE: |
| CASE_FLT_FN (BUILT_IN_FINITE): |
| case BUILT_IN_FINITED32: |
| case BUILT_IN_FINITED64: |
| case BUILT_IN_FINITED128: |
| case BUILT_IN_ISINFD32: |
| case BUILT_IN_ISINFD64: |
| case BUILT_IN_ISINFD128: |
| /* These builtins have no optabs (yet). */ |
| break; |
| default: |
| gcc_unreachable (); |
| } |
| |
| /* There's no easy way to detect the case we need to set EDOM. */ |
| if (flag_errno_math && errno_set) |
| return CODE_FOR_nothing; |
| |
| /* Optab mode depends on the mode of the input argument. */ |
| mode = TYPE_MODE (TREE_TYPE (arg)); |
| |
| if (builtin_optab) |
| return optab_handler (builtin_optab, mode); |
| return CODE_FOR_nothing; |
| } |
| |
| /* Expand a call to one of the builtin math functions that operate on |
| floating point argument and output an integer result (ilogb, isinf, |
| isnan, etc). |
| Return 0 if a normal call should be emitted rather than expanding the |
| function in-line. EXP is the expression that is a call to the builtin |
| function; if convenient, the result should be placed in TARGET. */ |
| |
| static rtx |
| expand_builtin_interclass_mathfn (tree exp, rtx target) |
| { |
| enum insn_code icode = CODE_FOR_nothing; |
| rtx op0; |
| tree fndecl = get_callee_fndecl (exp); |
| machine_mode mode; |
| tree arg; |
| |
| if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE)) |
| return NULL_RTX; |
| |
| arg = CALL_EXPR_ARG (exp, 0); |
| icode = interclass_mathfn_icode (arg, fndecl); |
| mode = TYPE_MODE (TREE_TYPE (arg)); |
| |
| if (icode != CODE_FOR_nothing) |
| { |
| class expand_operand ops[1]; |
| rtx_insn *last = get_last_insn (); |
| tree orig_arg = arg; |
| |
| /* Wrap the computation of the argument in a SAVE_EXPR, as we may |
| need to expand the argument again. This way, we will not perform |
| side-effects more the once. */ |
| CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg); |
| |
| op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL); |
| |
| if (mode != GET_MODE (op0)) |
| op0 = convert_to_mode (mode, op0, 0); |
| |
| create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp))); |
| if (maybe_legitimize_operands (icode, 0, 1, ops) |
| && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN)) |
| return ops[0].value; |
| |
| delete_insns_since (last); |
| CALL_EXPR_ARG (exp, 0) = orig_arg; |
| } |
| |
| return NULL_RTX; |
| } |
| |
| /* Expand a call to the builtin sincos math function. |
| Return NULL_RTX if a normal call should be emitted rather than expanding the |
| function in-line. EXP is the expression that is a call to the builtin |
| function. */ |
| |
| static rtx |
| expand_builtin_sincos (tree exp) |
| { |
| rtx op0, op1, op2, target1, target2; |
| machine_mode mode; |
| tree arg, sinp, cosp; |
| int result; |
| location_t loc = EXPR_LOCATION (exp); |
| tree alias_type, alias_off; |
| |
| if (!validate_arglist (exp, REAL_TYPE, |
| POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) |
| return NULL_RTX; |
| |
| arg = CALL_EXPR_ARG (exp, 0); |
| sinp = CALL_EXPR_ARG (exp, 1); |
| cosp = CALL_EXPR_ARG (exp, 2); |
| |
| /* Make a suitable register to place result in. */ |
| mode = TYPE_MODE (TREE_TYPE (arg)); |
| |
| /* Check if sincos insn is available, otherwise emit the call. */ |
| if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing) |
| return NULL_RTX; |
| |
| target1 = gen_reg_rtx (mode); |
| target2 = gen_reg_rtx (mode); |
| |
| op0 = expand_normal (arg); |
| alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true); |
| alias_off = build_int_cst (alias_type, 0); |
| op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg), |
| sinp, alias_off)); |
| op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg), |
| cosp, alias_off)); |
| |
| /* Compute into target1 and target2. |
| Set TARGET to wherever the result comes back. */ |
| result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0); |
| gcc_assert (result); |
| |
| /* Move target1 and target2 to the memory locations indicated |
| by op1 and op2. */ |
| emit_move_insn (op1, target1); |
| emit_move_insn (op2, target2); |
| |
| return const0_rtx; |
| } |
| |
| /* Expand call EXP to the fegetround builtin (from C99 fenv.h), returning the |
| result and setting it in TARGET. Otherwise return NULL_RTX on failure. */ |
| static rtx |
| expand_builtin_fegetround (tree exp, rtx target, machine_mode target_mode) |
| { |
| if (!validate_arglist (exp, VOID_TYPE)) |
| return NULL_RTX; |
| |
| insn_code icode = direct_optab_handler (fegetround_optab, SImode); |
| if (icode == CODE_FOR_nothing) |
| return NULL_RTX; |
| |
| if (target == 0 |
| || GET_MODE (target) != target_mode |
| || !(*insn_data[icode].operand[0].predicate) (target, target_mode)) |
| target = gen_reg_rtx (target_mode); |
| |
| rtx pat = GEN_FCN (icode) (target); |
| if (!pat) |
| return NULL_RTX; |
| emit_insn (pat); |
| |
| return target; |
| } |
| |
| /* Expand call EXP to either feclearexcept or feraiseexcept builtins (from C99 |
| fenv.h), returning the result and setting it in TARGET. Otherwise return |
| NULL_RTX on failure. */ |
| static rtx |
| expand_builtin_feclear_feraise_except (tree exp, rtx target, |
| machine_mode target_mode, optab op_optab) |
| { |
| if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE)) |
| return NULL_RTX; |
| rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0)); |
| |
| insn_code icode = direct_optab_handler (op_optab, SImode); |
| if (icode == CODE_FOR_nothing) |
| return NULL_RTX; |
| |
| if (!(*insn_data[icode].operand[1].predicate) (op0, GET_MODE (op0))) |
| return NULL_RTX; |
| |
| if (target == 0 |
| || GET_MODE (target) != target_mode |
| || !(*insn_data[icode].operand[0].predicate) (target, target_mode)) |
| target = gen_reg_rtx (target_mode); |
| |
| rtx pat = GEN_FCN (icode) (target, op0); |
| if (!pat) |
| return NULL_RTX; |
| emit_insn (pat); |
| |
| return target; |
| } |
| |
| /* Expand a call to the internal cexpi builtin to the sincos math function. |
| EXP is the expression that is a call to the builtin function; if convenient, |
| the result should be placed in TARGET. */ |
| |
| static rtx |
| expand_builtin_cexpi (tree exp, rtx target) |
| { |
| tree fndecl = get_callee_fndecl (exp); |
| tree arg, type; |
| machine_mode mode; |
| rtx op0, op1, op2; |
| location_t loc = EXPR_LOCATION (exp); |
| |
| if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE)) |
| return NULL_RTX; |
| |
| arg = CALL_EXPR_ARG (exp, 0); |
| type = TREE_TYPE (arg); |
| mode = TYPE_MODE (TREE_TYPE (arg)); |
| |
| /* Try expanding via a sincos optab, fall back to emitting a libcall |
| to sincos or cexp. We are sure we have sincos or cexp because cexpi |
| is only generated from sincos, cexp or if we have either of them. */ |
| if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing) |
| { |
| op1 = gen_reg_rtx (mode); |
| op2 = gen_reg_rtx (mode); |
| |
| op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL); |
| |
| /* Compute into op1 and op2. */ |
| expand_twoval_unop (sincos_optab, op0, op2, op1, 0); |
| } |
| else if (targetm.libc_has_function (function_sincos, type)) |
| { |
| tree call, fn = NULL_TREE; |
| tree top1, top2; |
| rtx op1a, op2a; |
| |
| if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF) |
| fn = builtin_decl_explicit (BUILT_IN_SINCOSF); |
| else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI) |
| fn = builtin_decl_explicit (BUILT_IN_SINCOS); |
| else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL) |
| fn = builtin_decl_explicit (BUILT_IN_SINCOSL); |
| else |
| gcc_unreachable (); |
| |
| op1 = assign_temp (TREE_TYPE (arg), 1, 1); |
| op2 = assign_temp (TREE_TYPE (arg), 1, 1); |
| op1a = copy_addr_to_reg (XEXP (op1, 0)); |
| op2a = copy_addr_to_reg (XEXP (op2, 0)); |
| top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a); |
| top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a); |
| |
| /* Make sure not to fold the sincos call again. */ |
| call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn); |
| expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)), |
| call, 3, arg, top1, top2)); |
| } |
| else |
| { |
| tree call, fn = NULL_TREE, narg; |
| tree ctype = build_complex_type (type); |
| |
| if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF) |
| fn = builtin_decl_explicit (BUILT_IN_CEXPF); |
| else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI) |
| fn = builtin_decl_explicit (BUILT_IN_CEXP); |
| else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL) |
| fn = builtin_decl_explicit (BUILT_IN_CEXPL); |
| else |
| gcc_unreachable (); |
| |
| /* If we don't have a decl for cexp create one. This is the |
| friendliest fallback if the user calls __builtin_cexpi |
| without full target C99 function support. */ |
| if (fn == NULL_TREE) |
| { |
| tree fntype; |
| const char *name = NULL; |
| |
| if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF) |
| name = "cexpf"; |
| else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI) |
| name = "cexp"; |
| else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL) |
| name = "cexpl"; |
| |
| fntype = build_function_type_list (ctype, ctype, NULL_TREE); |
| fn = build_fn_decl (name, fntype); |
| } |
| |
| narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype, |
| build_real (type, dconst0), arg); |
| |
| /* Make sure not to fold the cexp call again. */ |
| call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn); |
| return expand_expr (build_call_nary (ctype, call, 1, narg), |
| target, VOIDmode, EXPAND_NORMAL); |
| } |
| |
| /* Now build the proper return type. */ |
| return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type), |
| make_tree (TREE_TYPE (arg), op2), |
| make_tree (TREE_TYPE (arg), op1)), |
| target, VOIDmode, EXPAND_NORMAL); |
| } |
| |
| /* Conveniently construct a function call expression. FNDECL names the |
| function to be called, N is the number of arguments, and the "..." |
| parameters are the argument expressions. Unlike build_call_exr |
| this doesn't fold the call, hence it will always return a CALL_EXPR. */ |
| |
| static tree |
| build_call_nofold_loc (location_t loc, tree fndecl, int n, ...) |
| { |
| va_list ap; |
| tree fntype = TREE_TYPE (fndecl); |
| tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl); |
| |
| va_start (ap, n); |
| fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap); |
| va_end (ap); |
| SET_EXPR_LOCATION (fn, loc); |
| return fn; |
| } |
| |
| /* Expand a call to one of the builtin rounding functions gcc defines |
| as an extension (lfloor and lceil). As these are gcc extensions we |
| do not need to worry about setting errno to EDOM. |
| If expanding via optab fails, lower expression to (int)(floor(x)). |
| EXP is the expression that is a call to the builtin function; |
| if convenient, the result should be placed in TARGET. */ |
| |
| static rtx |
| expand_builtin_int_roundingfn (tree exp, rtx target) |
| { |
| convert_optab builtin_optab; |
| rtx op0, tmp; |
| rtx_insn *insns; |
| tree fndecl = get_callee_fndecl (exp); |
| enum built_in_function fallback_fn; |
| tree fallback_fndecl; |
| machine_mode mode; |
| tree arg; |
| |
| if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE)) |
| return NULL_RTX; |
| |
| arg = CALL_EXPR_ARG (exp, 0); |
| |
| switch (DECL_FUNCTION_CODE (fndecl)) |
| { |
| CASE_FLT_FN (BUILT_IN_ICEIL): |
| CASE_FLT_FN (BUILT_IN_LCEIL): |
| CASE_FLT_FN (BUILT_IN_LLCEIL): |
| builtin_optab = lceil_optab; |
| fallback_fn = BUILT_IN_CEIL; |
| break; |
| |
| CASE_FLT_FN (BUILT_IN_IFLOOR): |
| CASE_FLT_FN (BUILT_IN_LFLOOR): |
| CASE_FLT_FN (BUILT_IN_LLFLOOR): |
| builtin_optab = lfloor_optab; |
| fallback_fn = BUILT_IN_FLOOR; |
| break; |
| |
| default: |
| gcc_unreachable (); |
| } |
| |
| /* Make a suitable register to place result in. */ |
| mode = TYPE_MODE (TREE_TYPE (exp)); |
| |
| target = gen_reg_rtx (mode); |
| |
| /* Wrap the computation of the argument in a SAVE_EXPR, as we may |
| need to expand the argument again. This way, we will not perform |
| side-effects more the once. */ |
| CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg); |
| |
| op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL); |
| |
| start_sequence (); |
| |
| /* Compute into TARGET. */ |
| if (expand_sfix_optab (target, op0, builtin_optab)) |
| { |
| /* Output the entire sequence. */ |
| insns = get_insns (); |
| end_sequence (); |
| emit_insn (insns); |
| return target; |
| } |
| |
| /* If we were unable to expand via the builtin, stop the sequence |
| (without outputting the insns). */ |
| end_sequence (); |
| |
| /* Fall back to floating point rounding optab. */ |
| fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn); |
| |
| /* For non-C99 targets we may end up without a fallback fndecl here |
| if the user called __builtin_lfloor directly. In this case emit |
| a call to the floor/ceil variants nevertheless. This should result |
| in the best user experience for not full C99 targets. */ |
| if (fallback_fndecl == NULL_TREE) |
| { |
| tree fntype; |
| const char *name = NULL; |
| |
| switch (DECL_FUNCTION_CODE (fndecl)) |
| { |
| case BUILT_IN_ICEIL: |
| case BUILT_IN_LCEIL: |
| case BUILT_IN_LLCEIL: |
| name = "ceil"; |
| break; |
| case BUILT_IN_ICEILF: |
| case BUILT_IN_LCEILF: |
| case BUILT_IN_LLCEILF: |
| name = "ceilf"; |
| break; |
| case BUILT_IN_ICEILL: |
| case BUILT_IN_LCEILL: |
| case BUILT_IN_LLCEILL: |
| name = "ceill"; |
| break; |
| case BUILT_IN_IFLOOR: |
| case BUILT_IN_LFLOOR: |
| case BUILT_IN_LLFLOOR: |
| name = "floor"; |
| break; |
| case BUILT_IN_IFLOORF: |
| case BUILT_IN_LFLOORF: |
| case BUILT_IN_LLFLOORF: |
| name = "floorf"; |
| break; |
| case BUILT_IN_IFLOORL: |
| case BUILT_IN_LFLOORL: |
| case BUILT_IN_LLFLOORL: |
| name = "floorl"; |
| break; |
| default: |
| gcc_unreachable (); |
| } |
| |
| fntype = build_function_type_list (TREE_TYPE (arg), |
| TREE_TYPE (arg), NULL_TREE); |
| fallback_fndecl = build_fn_decl (name, fntype); |
| } |
| |
| exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg); |
| |
| tmp = expand_normal (exp); |
| tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp)); |
| |
| /* Truncate the result of floating point optab to integer |
| via expand_fix (). */ |
| target = gen_reg_rtx (mode); |
| expand_fix (target, tmp, 0); |
| |
| return target; |
| } |
| |
| /* Expand a call to one of the builtin math functions doing integer |
| conversion (lrint). |
| Return 0 if a normal call should be emitted rather than expanding the |
| function in-line. EXP is the expression that is a call to the builtin |
| function; if convenient, the result should be placed in TARGET. */ |
| |
| static rtx |
| expand_builtin_int_roundingfn_2 (tree exp, rtx target) |
| { |
| convert_optab builtin_optab; |
| rtx op0; |
| rtx_insn *insns; |
| tree fndecl = get_callee_fndecl (exp); |
| tree arg; |
| machine_mode mode; |
| enum built_in_function fallback_fn = BUILT_IN_NONE; |
| |
| if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE)) |
| return NULL_RTX; |
| |
| arg = CALL_EXPR_ARG (exp, 0); |
| |
| switch (DECL_FUNCTION_CODE (fndecl)) |
| { |
| CASE_FLT_FN (BUILT_IN_IRINT): |
| fallback_fn = BUILT_IN_LRINT; |
| gcc_fallthrough (); |
| CASE_FLT_FN (BUILT_IN_LRINT): |
| CASE_FLT_FN (BUILT_IN_LLRINT): |
| builtin_optab = lrint_optab; |
| break; |
| |
| CASE_FLT_FN (BUILT_IN_IROUND): |
| fallback_fn = BUILT_IN_LROUND; |
| gcc_fallthrough (); |
| CASE_FLT_FN (BUILT_IN_LROUND): |
| CASE_FLT_FN (BUILT_IN_LLROUND): |
| builtin_optab = lround_optab; |
| break; |
| |
| default: |
| gcc_unreachable (); |
| } |
| |
| /* There's no easy way to detect the case we need to set EDOM. */ |
| if (flag_errno_math && fallback_fn == BUILT_IN_NONE) |
| return NULL_RTX; |
| |
| /* Make a suitable register to place result in. */ |
| mode = TYPE_MODE (TREE_TYPE (exp)); |
| |
| /* There's no easy way to detect the case we need to set EDOM. */ |
| if (!flag_errno_math) |
| { |
| rtx result = gen_reg_rtx (mode); |
| |
| /* Wrap the computation of the argument in a SAVE_EXPR, as we may |
| need to expand the argument again. This way, we will not perform |
| side-effects more the once. */ |
| CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg); |
| |
| op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL); |
| |
| start_sequence (); |
| |
| if (expand_sfix_optab (result, op0, builtin_optab)) |
| { |
| /* Output the entire sequence. */ |
| insns = get_insns (); |
| end_sequence (); |
| emit_insn (insns); |
| return result; |
| } |
| |
| /* If we were unable to expand via the builtin, stop the sequence |
| (without outputting the insns) and call to the library function |
| with the stabilized argument list. */ |
| end_sequence (); |
| } |
| |
| if (fallback_fn != BUILT_IN_NONE) |
| { |
| /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99 |
| targets, (int) round (x) should never be transformed into |
| BUILT_IN_IROUND and if __builtin_iround is called directly, emit |
| a call to lround in the hope that the target provides at least some |
| C99 functions. This should result in the best user experience for |
| not full C99 targets. |
| As scalar float conversions with same mode are useless in GIMPLE, |
| we can end up e.g. with _Float32 argument passed to float builtin, |
| try to get the type from the builtin prototype first. */ |
| tree fallback_fndecl = NULL_TREE; |
| if (tree argtypes = TYPE_ARG_TYPES (TREE_TYPE (fndecl))) |
| fallback_fndecl |
| = mathfn_built_in_1 (TREE_VALUE (argtypes), |
| as_combined_fn (fallback_fn), 0); |
| if (fallback_fndecl == NULL_TREE) |
| fallback_fndecl |
| = mathfn_built_in_1 (TREE_TYPE (arg), |
| as_combined_fn (fallback_fn), 0); |
| if (fallback_fndecl) |
| { |
| exp = build_call_nofold_loc (EXPR_LOCATION (exp), |
| fallback_fndecl, 1, arg); |
| |
| target = expand_call (exp, NULL_RTX, target == const0_rtx); |
| target = maybe_emit_group_store (target, TREE_TYPE (exp)); |
| return convert_to_mode (mode, target, 0); |
| } |
| } |
| |
| return expand_call (exp, target, target == const0_rtx); |
| } |
| |
| /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if |
| a normal call should be emitted rather than expanding the function |
| in-line. EXP is the expression that is a call to the builtin |
| function; if convenient, the result should be placed in TARGET. */ |
| |
| static rtx |
| expand_builtin_powi (tree exp, rtx target) |
| { |
| tree arg0, arg1; |
| rtx op0, op1; |
| machine_mode mode; |
| machine_mode mode2; |
| |
| if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE)) |
| return NULL_RTX; |
| |
| arg0 = CALL_EXPR_ARG (exp, 0); |
| arg1 = CALL_EXPR_ARG (exp, 1); |
| mode = TYPE_MODE (TREE_TYPE (exp)); |
| |
| /* Emit a libcall to libgcc. */ |
| |
| /* Mode of the 2nd argument must match that of an int. */ |
| mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require (); |
| |
| if (target == NULL_RTX) |
| target = gen_reg_rtx (mode); |
| |
| op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL); |
| if (GET_MODE (op0) != mode) |
| op0 = convert_to_mode (mode, op0, 0); |
| op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL); |
| if (GET_MODE (op1) != mode2) |
| op1 = convert_to_mode (mode2, op1, 0); |
| |
| target = emit_library_call_value (optab_libfunc (powi_optab, mode), |
| target, LCT_CONST, mode, |
| op0, mode, op1, mode2); |
| |
| return target; |
| } |
| |
| /* Expand expression EXP which is a call to the strlen builtin. Return |
| NULL_RTX if we failed and the caller should emit a normal call, otherwise |
| try to get the result in TARGET, if convenient. */ |
| |
| static rtx |
| expand_builtin_strlen (tree exp, rtx target, |
| machine_mode target_mode) |
| { |
| if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE)) |
| return NULL_RTX; |
| |
| tree src = CALL_EXPR_ARG (exp, 0); |
| |
| /* If the length can be computed at compile-time, return it. */ |
| if (tree len = c_strlen (src, 0)) |
| return expand_expr (len, target, target_mode, EXPAND_NORMAL); |
| |
| /* If the length can be computed at compile-time and is constant |
| integer, but there are side-effects in src, evaluate |
| src for side-effects, then return len. |
| E.g. x = strlen (i++ ? "xfoo" + 1 : "bar"); |
| can be optimized into: i++; x = 3; */ |
| tree len = c_strlen (src, 1); |
| if (len && TREE_CODE (len) == INTEGER_CST) |
| { |
| expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL); |
| return expand_expr (len, target, target_mode, EXPAND_NORMAL); |
| } |
| |
| unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT; |
| |
| /* If SRC is not a pointer type, don't do this operation inline. */ |
| if (align == 0) |
| return NULL_RTX; |
| |
| /* Bail out if we can't compute strlen in the right mode. */ |
| machine_mode insn_mode; |
| enum insn_code icode = CODE_FOR_nothing; |
| FOR_EACH_MODE_FROM (insn_mode, target_mode) |
| { |
| icode = optab_handler (strlen_optab, insn_mode); |
| if (icode != CODE_FOR_nothing) |
| break; |
| } |
| if (insn_mode == VOIDmode) |
| return NULL_RTX; |
| |
| /* Make a place to hold the source address. We will not expand |
| the actual source until we are sure that the expansion will |
| not fail -- there are trees that cannot be expanded twice. */ |
| rtx src_reg = gen_reg_rtx (Pmode); |
| |
| /* Mark the beginning of the strlen sequence so we can emit the |
| source operand later. */ |
| rtx_insn *before_strlen = get_last_insn (); |
| |
| class expand_operand ops[4]; |
| create_output_operand (&ops[0], target, insn_mode); |
| create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg)); |
| create_integer_operand (&ops[2], 0); |
| create_integer_operand (&ops[3], align); |
| if (!maybe_expand_insn (icode, 4, ops)) |
| return NULL_RTX; |
| |
| /* Check to see if the argument was declared attribute nonstring |
| and if so, issue a warning since at this point it's not known |
| to be nul-terminated. */ |
| maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp); |
| |
| /* Now that we are assured of success, expand the source. */ |
| start_sequence (); |
| rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL); |
| if (pat != src_reg) |
| { |
| #ifdef POINTERS_EXTEND_UNSIGNED |
| if (GET_MODE (pat) != Pmode) |
| pat = convert_to_mode (Pmode, pat, |
| POINTERS_EXTEND_UNSIGNED); |
| #endif |
| emit_move_insn (src_reg, pat); |
| } |
| pat = get_insns (); |
| end_sequence (); |
| |
| if (before_strlen) |
| emit_insn_after (pat, before_strlen); |
| else |
| emit_insn_before (pat, get_insns ()); |
| |
| /* Return the value in the proper mode for this function. */ |
| if (GET_MODE (ops[0].value) == target_mode) |
| target = ops[0].value; |
| else if (target != 0) |
| convert_move (target, ops[0].value, 0); |
| else |
| target = convert_to_mode (target_mode, ops[0].value, 0); |
| |
| return target; |
| } |
| |
| /* Expand call EXP to the strnlen built-in, returning the result |
| and setting it in TARGET. Otherwise return NULL_RTX on failure. */ |
| |
| static rtx |
| expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode) |
| { |
| if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) |
| return NULL_RTX; |
| |
| tree src = CALL_EXPR_ARG (exp, 0); |
| tree bound = CALL_EXPR_ARG (exp, 1); |
| |
| if (!bound) |
| return NULL_RTX; |
| |
| location_t loc = UNKNOWN_LOCATION; |
| if (EXPR_HAS_LOCATION (exp)) |
| loc = EXPR_LOCATION (exp); |
| |
| /* FIXME: Change c_strlen() to return sizetype instead of ssizetype |
| so these conversions aren't necessary. */ |
| c_strlen_data lendata = { }; |
| tree len = c_strlen (src, 0, &lendata, 1); |
| if (len) |
| len = fold_convert_loc (loc, TREE_TYPE (bound), len); |
| |
| if (TREE_CODE (bound) == INTEGER_CST) |
| { |
| if (!len) |
| return NULL_RTX; |
| |
| len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound); |
| return expand_expr (len, target, target_mode, EXPAND_NORMAL); |
| } |
| |
| if (TREE_CODE (bound) != SSA_NAME) |
| return NULL_RTX; |
| |
| wide_int min, max; |
| value_range r; |
| get_global_range_query ()->range_of_expr (r, bound); |
| if (r.kind () != VR_RANGE) |
| return NULL_RTX; |
| min = r.lower_bound (); |
| max = r.upper_bound (); |
| |
| if (!len || TREE_CODE (len) != INTEGER_CST) |
| { |
| bool exact; |
| lendata.decl = unterminated_array (src, &len, &exact); |
| if (!lendata.decl) |
| return NULL_RTX; |
| } |
| |
| if (lendata
|