comparison gcc/targhooks.c @ 132:d34655255c78

update gcc-8.2
author mir3636
date Thu, 25 Oct 2018 10:21:07 +0900
parents 84e7813d76e9
children 1830386684a0
comparison
equal deleted inserted replaced
130:e108057fa461 132:d34655255c78
1 /* Default target hook functions. 1 /* Default target hook functions.
2 Copyright (C) 2003-2017 Free Software Foundation, Inc. 2 Copyright (C) 2003-2018 Free Software Foundation, Inc.
3 3
4 This file is part of GCC. 4 This file is part of GCC.
5 5
6 GCC is free software; you can redistribute it and/or modify it under 6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free 7 the terms of the GNU General Public License as published by the Free
79 #include "opts.h" 79 #include "opts.h"
80 #include "gimplify.h" 80 #include "gimplify.h"
81 #include "predict.h" 81 #include "predict.h"
82 #include "params.h" 82 #include "params.h"
83 #include "real.h" 83 #include "real.h"
84 84 #include "langhooks.h"
85 #include "sbitmap.h"
85 86
86 bool 87 bool
87 default_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED, 88 default_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
88 rtx addr ATTRIBUTE_UNUSED, 89 rtx addr ATTRIBUTE_UNUSED,
89 bool strict ATTRIBUTE_UNUSED) 90 bool strict ATTRIBUTE_UNUSED)
168 { 169 {
169 return x; 170 return x;
170 } 171 }
171 172
172 bool 173 bool
173 default_legitimize_address_displacement (rtx *disp ATTRIBUTE_UNUSED, 174 default_legitimize_address_displacement (rtx *, rtx *, poly_int64,
174 rtx *offset ATTRIBUTE_UNUSED, 175 machine_mode)
175 machine_mode mode ATTRIBUTE_UNUSED)
176 { 176 {
177 return false; 177 return false;
178 } 178 }
179 179
180 bool 180 bool
389 const char *skipped = name + (*name == '*' ? 1 : 0); 389 const char *skipped = name + (*name == '*' ? 1 : 0);
390 const char *stripped = targetm.strip_name_encoding (skipped); 390 const char *stripped = targetm.strip_name_encoding (skipped);
391 if (*name != '*' && user_label_prefix[0]) 391 if (*name != '*' && user_label_prefix[0])
392 stripped = ACONCAT ((user_label_prefix, stripped, NULL)); 392 stripped = ACONCAT ((user_label_prefix, stripped, NULL));
393 return get_identifier (stripped); 393 return get_identifier (stripped);
394 }
395
396 /* The default implementation of TARGET_TRANSLATE_MODE_ATTRIBUTE. */
397
398 machine_mode
399 default_translate_mode_attribute (machine_mode mode)
400 {
401 return mode;
394 } 402 }
395 403
396 /* True if MODE is valid for the target. By "valid", we mean able to 404 /* True if MODE is valid for the target. By "valid", we mean able to
397 be manipulated in non-trivial ways. In particular, this means all 405 be manipulated in non-trivial ways. In particular, this means all
398 the arithmetic is supported. 406 the arithmetic is supported.
561 return cand; 569 return cand;
562 } 570 }
563 return opt_scalar_float_mode (); 571 return opt_scalar_float_mode ();
564 } 572 }
565 573
574 /* Define this to return true if the _Floatn and _Floatnx built-in functions
575 should implicitly enable the built-in function without the __builtin_ prefix
576 in addition to the normal built-in function with the __builtin_ prefix. The
577 default is to only enable built-in functions without the __builtin_ prefix
578 for the GNU C langauge. The argument FUNC is the enum builtin_in_function
579 id of the function to be enabled. */
580
581 bool
582 default_floatn_builtin_p (int func ATTRIBUTE_UNUSED)
583 {
584 static bool first_time_p = true;
585 static bool c_or_objective_c;
586
587 if (first_time_p)
588 {
589 first_time_p = false;
590 c_or_objective_c = lang_GNU_C () || lang_GNU_OBJC ();
591 }
592
593 return c_or_objective_c;
594 }
595
566 /* Make some target macros useable by target-independent code. */ 596 /* Make some target macros useable by target-independent code. */
567 bool 597 bool
568 targhook_words_big_endian (void) 598 targhook_words_big_endian (void)
569 { 599 {
570 return !!WORDS_BIG_ENDIAN; 600 return !!WORDS_BIG_ENDIAN;
689 719
690 case cond_branch_taken: 720 case cond_branch_taken:
691 return 3; 721 return 3;
692 722
693 case vec_construct: 723 case vec_construct:
694 return TYPE_VECTOR_SUBPARTS (vectype) - 1; 724 return estimated_poly_value (TYPE_VECTOR_SUBPARTS (vectype)) - 1;
695 725
696 default: 726 default:
697 gcc_unreachable (); 727 gcc_unreachable ();
698 } 728 }
699 } 729 }
729 cumulative_args_t ca ATTRIBUTE_UNUSED, 759 cumulative_args_t ca ATTRIBUTE_UNUSED,
730 machine_mode mode ATTRIBUTE_UNUSED, 760 machine_mode mode ATTRIBUTE_UNUSED,
731 tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED) 761 tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
732 { 762 {
733 return 0; 763 return 0;
764 }
765
766 void
767 hook_void_CUMULATIVE_ARGS_tree (cumulative_args_t ca ATTRIBUTE_UNUSED,
768 tree ATTRIBUTE_UNUSED)
769 {
734 } 770 }
735 771
736 void 772 void
737 default_function_arg_advance (cumulative_args_t ca ATTRIBUTE_UNUSED, 773 default_function_arg_advance (cumulative_args_t ca ATTRIBUTE_UNUSED,
738 machine_mode mode ATTRIBUTE_UNUSED, 774 machine_mode mode ATTRIBUTE_UNUSED,
765 if (!type || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) 801 if (!type || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
766 return PAD_UPWARD; 802 return PAD_UPWARD;
767 size = int_size_in_bytes (type); 803 size = int_size_in_bytes (type);
768 } 804 }
769 else 805 else
770 size = GET_MODE_SIZE (mode); 806 /* Targets with variable-sized modes must override this hook
807 and handle variable-sized modes explicitly. */
808 size = GET_MODE_SIZE (mode).to_constant ();
771 809
772 if (size < (PARM_BOUNDARY / BITS_PER_UNIT)) 810 if (size < (PARM_BOUNDARY / BITS_PER_UNIT))
773 return PAD_DOWNWARD; 811 return PAD_DOWNWARD;
774 812
775 return PAD_UPWARD; 813 return PAD_UPWARD;
947 rtx 985 rtx
948 default_libcall_value (machine_mode mode ATTRIBUTE_UNUSED, 986 default_libcall_value (machine_mode mode ATTRIBUTE_UNUSED,
949 const_rtx fun ATTRIBUTE_UNUSED) 987 const_rtx fun ATTRIBUTE_UNUSED)
950 { 988 {
951 #ifdef LIBCALL_VALUE 989 #ifdef LIBCALL_VALUE
952 return LIBCALL_VALUE (mode); 990 return LIBCALL_VALUE (MACRO_MODE (mode));
953 #else 991 #else
954 gcc_unreachable (); 992 gcc_unreachable ();
955 #endif 993 #endif
956 } 994 }
957 995
1015 rtx ARG_UNUSED (r_chain)) 1053 rtx ARG_UNUSED (r_chain))
1016 { 1054 {
1017 sorry ("nested function trampolines not supported on this target"); 1055 sorry ("nested function trampolines not supported on this target");
1018 } 1056 }
1019 1057
1020 int 1058 poly_int64
1021 default_return_pops_args (tree fundecl ATTRIBUTE_UNUSED, 1059 default_return_pops_args (tree, tree, poly_int64)
1022 tree funtype ATTRIBUTE_UNUSED,
1023 int size ATTRIBUTE_UNUSED)
1024 { 1060 {
1025 return 0; 1061 return 0;
1026 } 1062 }
1027 1063
1028 reg_class_t 1064 reg_class_t
1077 sri->icode = sri->prev_sri->t_icode; 1113 sri->icode = sri->prev_sri->t_icode;
1078 return NO_REGS; 1114 return NO_REGS;
1079 } 1115 }
1080 #ifdef SECONDARY_INPUT_RELOAD_CLASS 1116 #ifdef SECONDARY_INPUT_RELOAD_CLASS
1081 if (in_p) 1117 if (in_p)
1082 rclass = SECONDARY_INPUT_RELOAD_CLASS (reload_class, reload_mode, x); 1118 rclass = SECONDARY_INPUT_RELOAD_CLASS (reload_class,
1119 MACRO_MODE (reload_mode), x);
1083 #endif 1120 #endif
1084 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS 1121 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
1085 if (! in_p) 1122 if (! in_p)
1086 rclass = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class, reload_mode, x); 1123 rclass = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class,
1124 MACRO_MODE (reload_mode), x);
1087 #endif 1125 #endif
1088 if (rclass != NO_REGS) 1126 if (rclass != NO_REGS)
1089 { 1127 {
1090 enum insn_code icode 1128 enum insn_code icode
1091 = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab, 1129 = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
1149 1187
1150 machine_mode 1188 machine_mode
1151 default_secondary_memory_needed_mode (machine_mode mode) 1189 default_secondary_memory_needed_mode (machine_mode mode)
1152 { 1190 {
1153 if (!targetm.lra_p () 1191 if (!targetm.lra_p ()
1154 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD 1192 && known_lt (GET_MODE_BITSIZE (mode), BITS_PER_WORD)
1155 && INTEGRAL_MODE_P (mode)) 1193 && INTEGRAL_MODE_P (mode))
1156 return mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0).require (); 1194 return mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0).require ();
1157 return mode; 1195 return mode;
1158 } 1196 }
1159 1197
1162 1200
1163 int 1201 int
1164 default_reloc_rw_mask (void) 1202 default_reloc_rw_mask (void)
1165 { 1203 {
1166 return flag_pic ? 3 : 0; 1204 return flag_pic ? 3 : 0;
1205 }
1206
1207 /* By default, address diff vectors are generated
1208 for jump tables when flag_pic is true. */
1209
1210 bool
1211 default_generate_pic_addr_diff_vec (void)
1212 {
1213 return flag_pic;
1167 } 1214 }
1168 1215
1169 /* By default, do no modification. */ 1216 /* By default, do no modification. */
1170 tree default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED, 1217 tree default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED,
1171 tree id) 1218 tree id)
1251 default_preferred_simd_mode (scalar_mode) 1298 default_preferred_simd_mode (scalar_mode)
1252 { 1299 {
1253 return word_mode; 1300 return word_mode;
1254 } 1301 }
1255 1302
1303 /* By default do not split reductions further. */
1304
1305 machine_mode
1306 default_split_reduction (machine_mode mode)
1307 {
1308 return mode;
1309 }
1310
1256 /* By default only the size derived from the preferred vector mode 1311 /* By default only the size derived from the preferred vector mode
1257 is tried. */ 1312 is tried. */
1258 1313
1259 unsigned int 1314 void
1260 default_autovectorize_vector_sizes (void) 1315 default_autovectorize_vector_sizes (vector_sizes *)
1261 { 1316 {
1262 return 0; 1317 }
1263 } 1318
1264 1319 /* By default a vector of integers is used as a mask. */
1265 /* By defaults a vector of integers is used as a mask. */
1266 1320
1267 opt_machine_mode 1321 opt_machine_mode
1268 default_get_mask_mode (unsigned nunits, unsigned vector_size) 1322 default_get_mask_mode (poly_uint64 nunits, poly_uint64 vector_size)
1269 { 1323 {
1270 unsigned elem_size = vector_size / nunits; 1324 unsigned int elem_size = vector_element_size (vector_size, nunits);
1271 scalar_int_mode elem_mode 1325 scalar_int_mode elem_mode
1272 = smallest_int_mode_for_size (elem_size * BITS_PER_UNIT); 1326 = smallest_int_mode_for_size (elem_size * BITS_PER_UNIT);
1273 machine_mode vector_mode; 1327 machine_mode vector_mode;
1274 1328
1275 gcc_assert (elem_size * nunits == vector_size); 1329 gcc_assert (known_eq (elem_size * nunits, vector_size));
1276 1330
1277 if (mode_for_vector (elem_mode, nunits).exists (&vector_mode) 1331 if (mode_for_vector (elem_mode, nunits).exists (&vector_mode)
1278 && VECTOR_MODE_P (vector_mode) 1332 && VECTOR_MODE_P (vector_mode)
1279 && targetm.vector_mode_supported_p (vector_mode)) 1333 && targetm.vector_mode_supported_p (vector_mode))
1280 return vector_mode; 1334 return vector_mode;
1281 1335
1282 return opt_machine_mode (); 1336 return opt_machine_mode ();
1337 }
1338
1339 /* By default consider masked stores to be expensive. */
1340
1341 bool
1342 default_empty_mask_is_expensive (unsigned ifn)
1343 {
1344 return ifn == IFN_MASK_STORE;
1283 } 1345 }
1284 1346
1285 /* By default, the cost model accumulates three separate costs (prologue, 1347 /* By default, the cost model accumulates three separate costs (prologue,
1286 loop body, and epilogue) for a vectorized loop or block. So allocate an 1348 loop body, and epilogue) for a vectorized loop or block. So allocate an
1287 array of three unsigned ints, set it to zero, and return its address. */ 1349 array of three unsigned ints, set it to zero, and return its address. */
1491 /* The defualt implementation of TARGET_HARD_REGNO_NREGS. */ 1553 /* The defualt implementation of TARGET_HARD_REGNO_NREGS. */
1492 1554
1493 unsigned int 1555 unsigned int
1494 default_hard_regno_nregs (unsigned int, machine_mode mode) 1556 default_hard_regno_nregs (unsigned int, machine_mode mode)
1495 { 1557 {
1496 return CEIL (GET_MODE_SIZE (mode), UNITS_PER_WORD); 1558 /* Targets with variable-sized modes must provide their own definition
1559 of this hook. */
1560 return CEIL (GET_MODE_SIZE (mode).to_constant (), UNITS_PER_WORD);
1497 } 1561 }
1498 1562
1499 bool 1563 bool
1500 default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED) 1564 default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED)
1501 { 1565 {
1609 bool in ATTRIBUTE_UNUSED) 1673 bool in ATTRIBUTE_UNUSED)
1610 { 1674 {
1611 #ifndef MEMORY_MOVE_COST 1675 #ifndef MEMORY_MOVE_COST
1612 return (4 + memory_move_secondary_cost (mode, (enum reg_class) rclass, in)); 1676 return (4 + memory_move_secondary_cost (mode, (enum reg_class) rclass, in));
1613 #else 1677 #else
1614 return MEMORY_MOVE_COST (mode, (enum reg_class) rclass, in); 1678 return MEMORY_MOVE_COST (MACRO_MODE (mode), (enum reg_class) rclass, in);
1615 #endif 1679 #endif
1616 } 1680 }
1617 1681
1618 /* Compute cost of moving data from a register of class FROM to one of 1682 /* Compute cost of moving data from a register of class FROM to one of
1619 TO, using MODE. */ 1683 TO, using MODE. */
1624 reg_class_t to ATTRIBUTE_UNUSED) 1688 reg_class_t to ATTRIBUTE_UNUSED)
1625 { 1689 {
1626 #ifndef REGISTER_MOVE_COST 1690 #ifndef REGISTER_MOVE_COST
1627 return 2; 1691 return 2;
1628 #else 1692 #else
1629 return REGISTER_MOVE_COST (mode, (enum reg_class) from, (enum reg_class) to); 1693 return REGISTER_MOVE_COST (MACRO_MODE (mode),
1694 (enum reg_class) from, (enum reg_class) to);
1630 #endif 1695 #endif
1631 } 1696 }
1632 1697
1633 /* The default implementation of TARGET_SLOW_UNALIGNED_ACCESS. */ 1698 /* The default implementation of TARGET_SLOW_UNALIGNED_ACCESS. */
1634 1699
1635 bool 1700 bool
1636 default_slow_unaligned_access (machine_mode, unsigned int) 1701 default_slow_unaligned_access (machine_mode, unsigned int)
1637 { 1702 {
1638 return STRICT_ALIGNMENT; 1703 return STRICT_ALIGNMENT;
1704 }
1705
1706 /* The default implementation of TARGET_ESTIMATED_POLY_VALUE. */
1707
1708 HOST_WIDE_INT
1709 default_estimated_poly_value (poly_int64 x)
1710 {
1711 return x.coeffs[0];
1639 } 1712 }
1640 1713
1641 /* For hooks which use the MOVE_RATIO macro, this gives the legacy default 1714 /* For hooks which use the MOVE_RATIO macro, this gives the legacy default
1642 behavior. SPEED_P is true if we are compiling for speed. */ 1715 behavior. SPEED_P is true if we are compiling for speed. */
1643 1716
1805 unsigned char 1878 unsigned char
1806 default_class_max_nregs (reg_class_t rclass ATTRIBUTE_UNUSED, 1879 default_class_max_nregs (reg_class_t rclass ATTRIBUTE_UNUSED,
1807 machine_mode mode ATTRIBUTE_UNUSED) 1880 machine_mode mode ATTRIBUTE_UNUSED)
1808 { 1881 {
1809 #ifdef CLASS_MAX_NREGS 1882 #ifdef CLASS_MAX_NREGS
1810 return (unsigned char) CLASS_MAX_NREGS ((enum reg_class) rclass, mode); 1883 return (unsigned char) CLASS_MAX_NREGS ((enum reg_class) rclass,
1884 MACRO_MODE (mode));
1811 #else 1885 #else
1812 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD); 1886 /* Targets with variable-sized modes must provide their own definition
1887 of this hook. */
1888 unsigned int size = GET_MODE_SIZE (mode).to_constant ();
1889 return (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1813 #endif 1890 #endif
1814 } 1891 }
1815 1892
1816 /* Determine the debugging unwind mechanism for the target. */ 1893 /* Determine the debugging unwind mechanism for the target. */
1817 1894
1832 #endif 1909 #endif
1833 1910
1834 return UI_NONE; 1911 return UI_NONE;
1835 } 1912 }
1836 1913
1914 /* Targets that set NUM_POLY_INT_COEFFS to something greater than 1
1915 must define this hook. */
1916
1917 unsigned int
1918 default_dwarf_poly_indeterminate_value (unsigned int, unsigned int *, int *)
1919 {
1920 gcc_unreachable ();
1921 }
1922
1837 /* Determine the correct mode for a Dwarf frame register that represents 1923 /* Determine the correct mode for a Dwarf frame register that represents
1838 register REGNO. */ 1924 register REGNO. */
1839 1925
1840 machine_mode 1926 machine_mode
1841 default_dwarf_frame_reg_mode (int regno) 1927 default_dwarf_frame_reg_mode (int regno)
1848 } 1934 }
1849 1935
1850 /* To be used by targets where reg_raw_mode doesn't return the right 1936 /* To be used by targets where reg_raw_mode doesn't return the right
1851 mode for registers used in apply_builtin_return and apply_builtin_arg. */ 1937 mode for registers used in apply_builtin_return and apply_builtin_arg. */
1852 1938
1853 machine_mode 1939 fixed_size_mode
1854 default_get_reg_raw_mode (int regno) 1940 default_get_reg_raw_mode (int regno)
1855 { 1941 {
1856 return reg_raw_mode[regno]; 1942 /* Targets must override this hook if the underlying register is
1943 variable-sized. */
1944 return as_a <fixed_size_mode> (reg_raw_mode[regno]);
1857 } 1945 }
1858 1946
1859 /* Return true if a leaf function should stay leaf even with profiling 1947 /* Return true if a leaf function should stay leaf even with profiling
1860 enabled. */ 1948 enabled. */
1861 1949
2064 2152
2065 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false); 2153 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
2066 if (indirect) 2154 if (indirect)
2067 type = build_pointer_type (type); 2155 type = build_pointer_type (type);
2068 2156
2157 if (targetm.calls.split_complex_arg
2158 && TREE_CODE (type) == COMPLEX_TYPE
2159 && targetm.calls.split_complex_arg (type))
2160 {
2161 tree real_part, imag_part;
2162
2163 real_part = std_gimplify_va_arg_expr (valist,
2164 TREE_TYPE (type), pre_p, NULL);
2165 real_part = get_initialized_tmp_var (real_part, pre_p, NULL);
2166
2167 imag_part = std_gimplify_va_arg_expr (unshare_expr (valist),
2168 TREE_TYPE (type), pre_p, NULL);
2169 imag_part = get_initialized_tmp_var (imag_part, pre_p, NULL);
2170
2171 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
2172 }
2173
2069 align = PARM_BOUNDARY / BITS_PER_UNIT; 2174 align = PARM_BOUNDARY / BITS_PER_UNIT;
2070 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type); 2175 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
2071 2176
2072 /* When we align parameter on stack for caller, if the parameter 2177 /* When we align parameter on stack for caller, if the parameter
2073 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be 2178 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
2082 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL); 2187 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
2083 2188
2084 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually 2189 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
2085 requires greater alignment, we must perform dynamic alignment. */ 2190 requires greater alignment, we must perform dynamic alignment. */
2086 if (boundary > align 2191 if (boundary > align
2192 && !TYPE_EMPTY_P (type)
2087 && !integer_zerop (TYPE_SIZE (type))) 2193 && !integer_zerop (TYPE_SIZE (type)))
2088 { 2194 {
2089 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp, 2195 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
2090 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1)); 2196 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
2091 gimplify_and_add (t, pre_p); 2197 gimplify_and_add (t, pre_p);
2108 type = build_variant_type_copy (type); 2214 type = build_variant_type_copy (type);
2109 SET_TYPE_ALIGN (type, boundary); 2215 SET_TYPE_ALIGN (type, boundary);
2110 } 2216 }
2111 2217
2112 /* Compute the rounded size of the type. */ 2218 /* Compute the rounded size of the type. */
2113 type_size = size_in_bytes (type); 2219 type_size = arg_size_in_bytes (type);
2114 rounded_size = round_up (type_size, align); 2220 rounded_size = round_up (type_size, align);
2115 2221
2116 /* Reduce rounded_size so it's sharable with the postqueue. */ 2222 /* Reduce rounded_size so it's sharable with the postqueue. */
2117 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue); 2223 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
2118 2224
2139 addr = build_va_arg_indirect_ref (addr); 2245 addr = build_va_arg_indirect_ref (addr);
2140 2246
2141 return build_va_arg_indirect_ref (addr); 2247 return build_va_arg_indirect_ref (addr);
2142 } 2248 }
2143 2249
2144 tree
2145 default_chkp_bound_type (void)
2146 {
2147 tree res = make_node (POINTER_BOUNDS_TYPE);
2148 TYPE_PRECISION (res) = TYPE_PRECISION (size_type_node) * 2;
2149 TYPE_NAME (res) = get_identifier ("__bounds_type");
2150 SET_TYPE_MODE (res, targetm.chkp_bound_mode ());
2151 layout_type (res);
2152 return res;
2153 }
2154
2155 machine_mode
2156 default_chkp_bound_mode (void)
2157 {
2158 return VOIDmode;
2159 }
2160
2161 tree
2162 default_builtin_chkp_function (unsigned int fcode ATTRIBUTE_UNUSED)
2163 {
2164 return NULL_TREE;
2165 }
2166
2167 rtx
2168 default_chkp_function_value_bounds (const_tree ret_type ATTRIBUTE_UNUSED,
2169 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
2170 bool outgoing ATTRIBUTE_UNUSED)
2171 {
2172 gcc_unreachable ();
2173 }
2174
2175 tree
2176 default_chkp_make_bounds_constant (HOST_WIDE_INT lb ATTRIBUTE_UNUSED,
2177 HOST_WIDE_INT ub ATTRIBUTE_UNUSED)
2178 {
2179 return NULL_TREE;
2180 }
2181
2182 int
2183 default_chkp_initialize_bounds (tree var ATTRIBUTE_UNUSED,
2184 tree lb ATTRIBUTE_UNUSED,
2185 tree ub ATTRIBUTE_UNUSED,
2186 tree *stmts ATTRIBUTE_UNUSED)
2187 {
2188 return 0;
2189 }
2190
2191 void 2250 void
2192 default_setup_incoming_vararg_bounds (cumulative_args_t ca ATTRIBUTE_UNUSED, 2251 default_setup_incoming_vararg_bounds (cumulative_args_t ca ATTRIBUTE_UNUSED,
2193 machine_mode mode ATTRIBUTE_UNUSED, 2252 machine_mode mode ATTRIBUTE_UNUSED,
2194 tree type ATTRIBUTE_UNUSED, 2253 tree type ATTRIBUTE_UNUSED,
2195 int *pretend_arg_size ATTRIBUTE_UNUSED, 2254 int *pretend_arg_size ATTRIBUTE_UNUSED,
2249 default_excess_precision (enum excess_precision_type ATTRIBUTE_UNUSED) 2308 default_excess_precision (enum excess_precision_type ATTRIBUTE_UNUSED)
2250 { 2309 {
2251 return FLT_EVAL_METHOD_PROMOTE_TO_FLOAT; 2310 return FLT_EVAL_METHOD_PROMOTE_TO_FLOAT;
2252 } 2311 }
2253 2312
2254 bool 2313 /* Default implementation for
2255 default_stack_clash_protection_final_dynamic_probe (rtx residual ATTRIBUTE_UNUSED) 2314 TARGET_STACK_CLASH_PROTECTION_ALLOCA_PROBE_RANGE. */
2315 HOST_WIDE_INT
2316 default_stack_clash_protection_alloca_probe_range (void)
2256 { 2317 {
2257 return 0; 2318 return 0;
2258 } 2319 }
2259 2320
2321 /* The default implementation of TARGET_EARLY_REMAT_MODES. */
2322
2323 void
2324 default_select_early_remat_modes (sbitmap)
2325 {
2326 }
2327
2328 /* The default implementation of TARGET_PREFERRED_ELSE_VALUE. */
2329
2330 tree
2331 default_preferred_else_value (unsigned, tree type, unsigned, tree *)
2332 {
2333 return build_zero_cst (type);
2334 }
2335
2336 /* Default implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE. */
2337 bool
2338 default_have_speculation_safe_value (bool active ATTRIBUTE_UNUSED)
2339 {
2340 #ifdef HAVE_speculation_barrier
2341 return active ? HAVE_speculation_barrier : true;
2342 #else
2343 return false;
2344 #endif
2345 }
2346 /* Alternative implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE
2347 that can be used on targets that never have speculative execution. */
2348 bool
2349 speculation_safe_value_not_needed (bool active)
2350 {
2351 return !active;
2352 }
2353
2354 /* Default implementation of the speculation-safe-load builtin. This
2355 implementation simply copies val to result and generates a
2356 speculation_barrier insn, if such a pattern is defined. */
2357 rtx
2358 default_speculation_safe_value (machine_mode mode ATTRIBUTE_UNUSED,
2359 rtx result, rtx val,
2360 rtx failval ATTRIBUTE_UNUSED)
2361 {
2362 emit_move_insn (result, val);
2363
2364 #ifdef HAVE_speculation_barrier
2365 /* Assume the target knows what it is doing: if it defines a
2366 speculation barrier, but it is not enabled, then assume that one
2367 isn't needed. */
2368 if (HAVE_speculation_barrier)
2369 emit_insn (gen_speculation_barrier ());
2370 #endif
2371
2372 return result;
2373 }
2374
2260 #include "gt-targhooks.h" 2375 #include "gt-targhooks.h"