comparison gcc/tree-ssa-ccp.c @ 145:1830386684a0

gcc-9.2.0
author anatofuz
date Thu, 13 Feb 2020 11:34:05 +0900
parents 84e7813d76e9
children
comparison
equal deleted inserted replaced
131:84e7813d76e9 145:1830386684a0
1 /* Conditional constant propagation pass for the GNU compiler. 1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000-2018 Free Software Foundation, Inc. 2 Copyright (C) 2000-2020 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org> 3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com> 4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
5 5
6 This file is part of GCC. 6 This file is part of GCC.
7 7
134 #include "gimplify.h" 134 #include "gimplify.h"
135 #include "gimple-iterator.h" 135 #include "gimple-iterator.h"
136 #include "tree-cfg.h" 136 #include "tree-cfg.h"
137 #include "tree-ssa-propagate.h" 137 #include "tree-ssa-propagate.h"
138 #include "dbgcnt.h" 138 #include "dbgcnt.h"
139 #include "params.h"
140 #include "builtins.h" 139 #include "builtins.h"
141 #include "cfgloop.h" 140 #include "cfgloop.h"
142 #include "stor-layout.h" 141 #include "stor-layout.h"
143 #include "optabs-query.h" 142 #include "optabs-query.h"
144 #include "tree-ssa-ccp.h" 143 #include "tree-ssa-ccp.h"
145 #include "tree-dfa.h" 144 #include "tree-dfa.h"
146 #include "diagnostic-core.h" 145 #include "diagnostic-core.h"
147 #include "stringpool.h" 146 #include "stringpool.h"
148 #include "attribs.h" 147 #include "attribs.h"
149 #include "tree-vector-builder.h" 148 #include "tree-vector-builder.h"
149 #include "cgraph.h"
150 #include "alloc-pool.h"
151 #include "symbol-summary.h"
152 #include "ipa-utils.h"
153 #include "ipa-prop.h"
150 154
151 /* Possible lattice values. */ 155 /* Possible lattice values. */
152 typedef enum 156 typedef enum
153 { 157 {
154 UNINITIALIZED, 158 UNINITIALIZED,
155 UNDEFINED, 159 UNDEFINED,
156 CONSTANT, 160 CONSTANT,
157 VARYING 161 VARYING
158 } ccp_lattice_t; 162 } ccp_lattice_t;
159 163
160 struct ccp_prop_value_t { 164 class ccp_prop_value_t {
165 public:
161 /* Lattice value. */ 166 /* Lattice value. */
162 ccp_lattice_t lattice_val; 167 ccp_lattice_t lattice_val;
163 168
164 /* Propagated value. */ 169 /* Propagated value. */
165 tree value; 170 tree value;
290 val.lattice_val = VARYING; 295 val.lattice_val = VARYING;
291 val.mask = -1; 296 val.mask = -1;
292 if (flag_tree_bit_ccp) 297 if (flag_tree_bit_ccp)
293 { 298 {
294 wide_int nonzero_bits = get_nonzero_bits (var); 299 wide_int nonzero_bits = get_nonzero_bits (var);
295 if (nonzero_bits != -1) 300 tree value;
301 widest_int mask;
302
303 if (SSA_NAME_VAR (var)
304 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL
305 && ipcp_get_parm_bits (SSA_NAME_VAR (var), &value, &mask))
306 {
307 val.lattice_val = CONSTANT;
308 val.value = value;
309 val.mask = mask;
310 if (nonzero_bits != -1)
311 val.mask &= extend_mask (nonzero_bits,
312 TYPE_SIGN (TREE_TYPE (var)));
313 }
314 else if (nonzero_bits != -1)
296 { 315 {
297 val.lattice_val = CONSTANT; 316 val.lattice_val = CONSTANT;
298 val.value = build_zero_cst (TREE_TYPE (var)); 317 val.value = build_zero_cst (TREE_TYPE (var));
299 val.mask = extend_mask (nonzero_bits, TYPE_SIGN (TREE_TYPE (var))); 318 val.mask = extend_mask (nonzero_bits,
319 TYPE_SIGN (TREE_TYPE (var)));
300 } 320 }
301 } 321 }
302 } 322 }
303 } 323 }
304 else if (is_gimple_assign (stmt)) 324 else if (is_gimple_assign (stmt))
612 val.lattice_val = VARYING; 632 val.lattice_val = VARYING;
613 val.value = NULL_TREE; 633 val.value = NULL_TREE;
614 val.mask = -1; 634 val.mask = -1;
615 } 635 }
616 if (for_bits_p 636 if (for_bits_p
617 && val.lattice_val == CONSTANT 637 && val.lattice_val == CONSTANT)
618 && TREE_CODE (val.value) == ADDR_EXPR) 638 {
619 val = get_value_from_alignment (val.value); 639 if (TREE_CODE (val.value) == ADDR_EXPR)
640 val = get_value_from_alignment (val.value);
641 else if (TREE_CODE (val.value) != INTEGER_CST)
642 {
643 val.lattice_val = VARYING;
644 val.value = NULL_TREE;
645 val.mask = -1;
646 }
647 }
620 /* Fall back to a copy value. */ 648 /* Fall back to a copy value. */
621 if (!for_bits_p 649 if (!for_bits_p
622 && val.lattice_val == VARYING 650 && val.lattice_val == VARYING
623 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (expr)) 651 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (expr))
624 { 652 {
1620 TYPE_SIGN (TREE_TYPE (rhs1)), TYPE_PRECISION (TREE_TYPE (rhs1)), 1648 TYPE_SIGN (TREE_TYPE (rhs1)), TYPE_PRECISION (TREE_TYPE (rhs1)),
1621 value_to_wide_int (r1val), r1val.mask, 1649 value_to_wide_int (r1val), r1val.mask,
1622 TYPE_SIGN (TREE_TYPE (rhs2)), TYPE_PRECISION (TREE_TYPE (rhs2)), 1650 TYPE_SIGN (TREE_TYPE (rhs2)), TYPE_PRECISION (TREE_TYPE (rhs2)),
1623 value_to_wide_int (r2val), r2val.mask); 1651 value_to_wide_int (r2val), r2val.mask);
1624 1652
1653 /* (x * x) & 2 == 0. */
1654 if (code == MULT_EXPR && rhs1 == rhs2 && TYPE_PRECISION (type) > 1)
1655 {
1656 widest_int m = 2;
1657 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
1658 value = wi::bit_and_not (value, m);
1659 else
1660 value = 0;
1661 mask = wi::bit_and_not (mask, m);
1662 }
1663
1625 if (wi::sext (mask, TYPE_PRECISION (type)) != -1) 1664 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
1626 { 1665 {
1627 val.lattice_val = CONSTANT; 1666 val.lattice_val = CONSTANT;
1628 val.mask = mask; 1667 val.mask = mask;
1629 /* ??? Delay building trees here. */ 1668 /* ??? Delay building trees here. */
1957 val.mask = -aligni; 1996 val.mask = -aligni;
1958 } 1997 }
1959 } 1998 }
1960 break; 1999 break;
1961 } 2000 }
2001
2002 case BUILT_IN_BSWAP16:
2003 case BUILT_IN_BSWAP32:
2004 case BUILT_IN_BSWAP64:
2005 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
2006 if (val.lattice_val == UNDEFINED)
2007 break;
2008 else if (val.lattice_val == CONSTANT
2009 && val.value
2010 && TREE_CODE (val.value) == INTEGER_CST)
2011 {
2012 tree type = TREE_TYPE (gimple_call_lhs (stmt));
2013 int prec = TYPE_PRECISION (type);
2014 wide_int wval = wi::to_wide (val.value);
2015 val.value
2016 = wide_int_to_tree (type,
2017 wide_int::from (wval, prec,
2018 UNSIGNED).bswap ());
2019 val.mask
2020 = widest_int::from (wide_int::from (val.mask, prec,
2021 UNSIGNED).bswap (),
2022 UNSIGNED);
2023 if (wi::sext (val.mask, prec) != -1)
2024 break;
2025 }
2026 val.lattice_val = VARYING;
2027 val.value = NULL_TREE;
2028 val.mask = -1;
2029 break;
1962 2030
1963 default:; 2031 default:;
1964 } 2032 }
1965 } 2033 }
1966 if (is_gimple_call (stmt) && gimple_call_lhs (stmt)) 2034 if (is_gimple_call (stmt) && gimple_call_lhs (stmt))
2053 gimple **slot; 2121 gimple **slot;
2054 2122
2055 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val) 2123 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
2056 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE)) 2124 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
2057 { 2125 {
2058 clobber = build_constructor (TREE_TYPE (var), 2126 clobber = build_clobber (TREE_TYPE (var));
2059 NULL);
2060 TREE_THIS_VOLATILE (clobber) = 1;
2061 clobber_stmt = gimple_build_assign (var, clobber); 2127 clobber_stmt = gimple_build_assign (var, clobber);
2062 2128
2063 i = gsi_for_stmt (stmt); 2129 i = gsi_for_stmt (stmt);
2064 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT); 2130 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
2065 } 2131 }
2077 visited); 2143 visited);
2078 } 2144 }
2079 else if (gimple_assign_ssa_name_copy_p (stmt)) 2145 else if (gimple_assign_ssa_name_copy_p (stmt))
2080 insert_clobber_before_stack_restore (gimple_assign_lhs (stmt), var, 2146 insert_clobber_before_stack_restore (gimple_assign_lhs (stmt), var,
2081 visited); 2147 visited);
2082 else
2083 gcc_assert (is_gimple_debug (stmt));
2084 } 2148 }
2085 2149
2086 /* Advance the iterator to the previous non-debug gimple statement in the same 2150 /* Advance the iterator to the previous non-debug gimple statement in the same
2087 or dominating basic block. */ 2151 or dominating basic block. */
2088 2152
2103 } 2167 }
2104 2168
2105 /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert 2169 /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
2106 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE. 2170 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
2107 2171
2108 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a 2172 It is possible that BUILT_IN_STACK_SAVE cannot be found in a dominator when
2109 previous pass (such as DOM) duplicated it along multiple paths to a BB. In 2173 a previous pass (such as DOM) duplicated it along multiple paths to a BB.
2110 that case the function gives up without inserting the clobbers. */ 2174 In that case the function gives up without inserting the clobbers. */
2111 2175
2112 static void 2176 static void
2113 insert_clobbers_for_var (gimple_stmt_iterator i, tree var) 2177 insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
2114 { 2178 {
2115 gimple *stmt; 2179 gimple *stmt;
2157 return NULL_TREE; 2221 return NULL_TREE;
2158 2222
2159 size = tree_to_uhwi (arg); 2223 size = tree_to_uhwi (arg);
2160 2224
2161 /* Heuristic: don't fold large allocas. */ 2225 /* Heuristic: don't fold large allocas. */
2162 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME); 2226 threshold = (unsigned HOST_WIDE_INT)param_large_stack_frame;
2163 /* In case the alloca is located at function entry, it has the same lifetime 2227 /* In case the alloca is located at function entry, it has the same lifetime
2164 as a declared array, so we allow a larger size. */ 2228 as a declared array, so we allow a larger size. */
2165 block = gimple_block (stmt); 2229 block = gimple_block (stmt);
2166 if (!(cfun->after_inlining 2230 if (!(cfun->after_inlining
2167 && block 2231 && block
2168 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)) 2232 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
2169 threshold /= 10; 2233 threshold /= 10;
2170 if (size > threshold) 2234 if (size > threshold)
2171 return NULL_TREE; 2235 return NULL_TREE;
2172 2236
2237 /* We have to be able to move points-to info. We used to assert
2238 that we can but IPA PTA might end up with two UIDs here
2239 as it might need to handle more than one instance being
2240 live at the same time. Instead of trying to detect this case
2241 (using the first UID would be OK) just give up for now. */
2242 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
2243 unsigned uid = 0;
2244 if (pi != NULL
2245 && !pi->pt.anything
2246 && !pt_solution_singleton_or_null_p (&pi->pt, &uid))
2247 return NULL_TREE;
2248
2173 /* Declare array. */ 2249 /* Declare array. */
2174 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1); 2250 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
2175 n_elem = size * 8 / BITS_PER_UNIT; 2251 n_elem = size * 8 / BITS_PER_UNIT;
2176 array_type = build_array_type_nelts (elem_type, n_elem); 2252 array_type = build_array_type_nelts (elem_type, n_elem);
2177 var = create_tmp_var (array_type); 2253
2254 if (tree ssa_name = SSA_NAME_IDENTIFIER (lhs))
2255 {
2256 /* Give the temporary a name derived from the name of the VLA
2257 declaration so it can be referenced in diagnostics. */
2258 const char *name = IDENTIFIER_POINTER (ssa_name);
2259 var = create_tmp_var (array_type, name);
2260 }
2261 else
2262 var = create_tmp_var (array_type);
2263
2264 if (gimple *lhsdef = SSA_NAME_DEF_STMT (lhs))
2265 {
2266 /* Set the temporary's location to that of the VLA declaration
2267 so it can be pointed to in diagnostics. */
2268 location_t loc = gimple_location (lhsdef);
2269 DECL_SOURCE_LOCATION (var) = loc;
2270 }
2271
2178 SET_DECL_ALIGN (var, TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))); 2272 SET_DECL_ALIGN (var, TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)));
2179 { 2273 if (uid != 0)
2180 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs); 2274 SET_DECL_PT_UID (var, uid);
2181 if (pi != NULL && !pi->pt.anything)
2182 {
2183 bool singleton_p;
2184 unsigned uid;
2185 singleton_p = pt_solution_singleton_or_null_p (&pi->pt, &uid);
2186 gcc_assert (singleton_p);
2187 SET_DECL_PT_UID (var, uid);
2188 }
2189 }
2190 2275
2191 /* Fold alloca to the address of the array. */ 2276 /* Fold alloca to the address of the array. */
2192 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var)); 2277 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
2193 } 2278 }
2194 2279
2280 insert_clobbers_for_var (*gsi, var); 2365 insert_clobbers_for_var (*gsi, var);
2281 return true; 2366 return true;
2282 } 2367 }
2283 } 2368 }
2284 2369
2370 /* If there's no extra info from an assume_aligned call,
2371 drop it so it doesn't act as otherwise useless dataflow
2372 barrier. */
2373 if (gimple_call_builtin_p (stmt, BUILT_IN_ASSUME_ALIGNED))
2374 {
2375 tree ptr = gimple_call_arg (stmt, 0);
2376 ccp_prop_value_t ptrval = get_value_for_expr (ptr, true);
2377 if (ptrval.lattice_val == CONSTANT
2378 && TREE_CODE (ptrval.value) == INTEGER_CST
2379 && ptrval.mask != 0)
2380 {
2381 ccp_prop_value_t val
2382 = bit_value_assume_aligned (stmt, NULL_TREE, ptrval, false);
2383 unsigned int ptralign = least_bit_hwi (ptrval.mask.to_uhwi ());
2384 unsigned int align = least_bit_hwi (val.mask.to_uhwi ());
2385 if (ptralign == align
2386 && ((TREE_INT_CST_LOW (ptrval.value) & (align - 1))
2387 == (TREE_INT_CST_LOW (val.value) & (align - 1))))
2388 {
2389 bool res = update_call_from_tree (gsi, ptr);
2390 gcc_assert (res);
2391 return true;
2392 }
2393 }
2394 }
2395
2285 /* Propagate into the call arguments. Compared to replace_uses_in 2396 /* Propagate into the call arguments. Compared to replace_uses_in
2286 this can use the argument slot types for type verification 2397 this can use the argument slot types for type verification
2287 instead of the current argument type. We also can safely 2398 instead of the current argument type. We also can safely
2288 drop qualifiers here as we are dealing with constants anyway. */ 2399 drop qualifiers here as we are dealing with constants anyway. */
2289 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt)); 2400 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
2563 || !fndecl_built_in_p (callee, BUILT_IN_NORMAL) 2674 || !fndecl_built_in_p (callee, BUILT_IN_NORMAL)
2564 /* All regular builtins are ok, just obviously not alloca. */ 2675 /* All regular builtins are ok, just obviously not alloca. */
2565 || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee))) 2676 || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee)))
2566 return NULL_TREE; 2677 return NULL_TREE;
2567 2678
2568 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE) 2679 if (fndecl_built_in_p (callee, BUILT_IN_STACK_RESTORE))
2569 goto second_stack_restore; 2680 goto second_stack_restore;
2570 } 2681 }
2571 2682
2572 if (!gsi_end_p (i)) 2683 if (!gsi_end_p (i))
2573 return NULL_TREE; 2684 return NULL_TREE;
2621 optimize_stdarg_builtin (gimple *call) 2732 optimize_stdarg_builtin (gimple *call)
2622 { 2733 {
2623 tree callee, lhs, rhs, cfun_va_list; 2734 tree callee, lhs, rhs, cfun_va_list;
2624 bool va_list_simple_ptr; 2735 bool va_list_simple_ptr;
2625 location_t loc = gimple_location (call); 2736 location_t loc = gimple_location (call);
2626
2627 if (gimple_code (call) != GIMPLE_CALL)
2628 return NULL_TREE;
2629 2737
2630 callee = gimple_call_fndecl (call); 2738 callee = gimple_call_fndecl (call);
2631 2739
2632 cfun_va_list = targetm.fn_abi_va_list (callee); 2740 cfun_va_list = targetm.fn_abi_va_list (callee);
2633 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list) 2741 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2927 else 3035 else
2928 g = gimple_build_call_internal (fn, 3, gimple_call_arg (call, 0), 3036 g = gimple_build_call_internal (fn, 3, gimple_call_arg (call, 0),
2929 bit, flag); 3037 bit, flag);
2930 gimple_call_set_lhs (g, new_lhs); 3038 gimple_call_set_lhs (g, new_lhs);
2931 gimple_set_location (g, gimple_location (call)); 3039 gimple_set_location (g, gimple_location (call));
2932 gimple_set_vuse (g, gimple_vuse (call)); 3040 gimple_move_vops (g, call);
2933 gimple_set_vdef (g, gimple_vdef (call));
2934 bool throws = stmt_can_throw_internal (cfun, call); 3041 bool throws = stmt_can_throw_internal (cfun, call);
2935 gimple_call_set_nothrow (as_a <gcall *> (g), 3042 gimple_call_set_nothrow (as_a <gcall *> (g),
2936 gimple_call_nothrow_p (as_a <gcall *> (call))); 3043 gimple_call_nothrow_p (as_a <gcall *> (call)));
2937 SSA_NAME_DEF_STMT (gimple_vdef (call)) = g;
2938 gimple_stmt_iterator gsi = *gsip; 3044 gimple_stmt_iterator gsi = *gsip;
2939 gsi_insert_after (&gsi, g, GSI_NEW_STMT); 3045 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2940 edge e = NULL; 3046 edge e = NULL;
2941 if (throws) 3047 if (throws)
2942 { 3048 {