Mercurial > hg > CbC > GCC_original
comparison gcc/fold-const.c @ 16:04ced10e8804
gcc 7
author | kono |
---|---|
date | Fri, 27 Oct 2017 22:46:09 +0900 |
parents | f6334be47118 |
children | 84e7813d76e9 |
comparison
equal
deleted
inserted
replaced
15:561a7518be6b | 16:04ced10e8804 |
---|---|
1 /* Fold a constant sub-tree into a single node for C-compiler | 1 /* Fold a constant sub-tree into a single node for C-compiler |
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, | 2 Copyright (C) 1987-2017 Free Software Foundation, Inc. |
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 | |
4 Free Software Foundation, Inc. | |
5 | 3 |
6 This file is part of GCC. | 4 This file is part of GCC. |
7 | 5 |
8 GCC is free software; you can redistribute it and/or modify it under | 6 GCC is free software; you can redistribute it and/or modify it under |
9 the terms of the GNU General Public License as published by the Free | 7 the terms of the GNU General Public License as published by the Free |
43 corresponding tree equivalents. */ | 41 corresponding tree equivalents. */ |
44 | 42 |
45 #include "config.h" | 43 #include "config.h" |
46 #include "system.h" | 44 #include "system.h" |
47 #include "coretypes.h" | 45 #include "coretypes.h" |
48 #include "tm.h" | 46 #include "backend.h" |
47 #include "target.h" | |
48 #include "rtl.h" | |
49 #include "tree.h" | |
50 #include "gimple.h" | |
51 #include "predict.h" | |
52 #include "memmodel.h" | |
53 #include "tm_p.h" | |
54 #include "tree-ssa-operands.h" | |
55 #include "optabs-query.h" | |
56 #include "cgraph.h" | |
57 #include "diagnostic-core.h" | |
49 #include "flags.h" | 58 #include "flags.h" |
50 #include "tree.h" | 59 #include "alias.h" |
51 #include "realmpfr.h" | 60 #include "fold-const.h" |
52 #include "rtl.h" | 61 #include "fold-const-call.h" |
62 #include "stor-layout.h" | |
63 #include "calls.h" | |
64 #include "tree-iterator.h" | |
53 #include "expr.h" | 65 #include "expr.h" |
54 #include "tm_p.h" | |
55 #include "target.h" | |
56 #include "diagnostic-core.h" | |
57 #include "intl.h" | 66 #include "intl.h" |
58 #include "ggc.h" | |
59 #include "hashtab.h" | |
60 #include "langhooks.h" | 67 #include "langhooks.h" |
68 #include "tree-eh.h" | |
69 #include "gimplify.h" | |
70 #include "tree-dfa.h" | |
71 #include "builtins.h" | |
72 #include "generic-match.h" | |
73 #include "gimple-fold.h" | |
74 #include "params.h" | |
75 #include "tree-into-ssa.h" | |
61 #include "md5.h" | 76 #include "md5.h" |
62 #include "gimple.h" | 77 #include "case-cfn-macros.h" |
63 #include "tree-flow.h" | 78 #include "stringpool.h" |
79 #include "tree-vrp.h" | |
80 #include "tree-ssanames.h" | |
81 #include "selftest.h" | |
82 #include "stringpool.h" | |
83 #include "attribs.h" | |
64 | 84 |
65 /* Nonzero if we are folding constants inside an initializer; zero | 85 /* Nonzero if we are folding constants inside an initializer; zero |
66 otherwise. */ | 86 otherwise. */ |
67 int folding_initializer = 0; | 87 int folding_initializer = 0; |
68 | 88 |
86 COMPCODE_NE = 13, | 106 COMPCODE_NE = 13, |
87 COMPCODE_UNGE = 14, | 107 COMPCODE_UNGE = 14, |
88 COMPCODE_TRUE = 15 | 108 COMPCODE_TRUE = 15 |
89 }; | 109 }; |
90 | 110 |
91 static bool negate_mathfn_p (enum built_in_function); | |
92 static bool negate_expr_p (tree); | 111 static bool negate_expr_p (tree); |
93 static tree negate_expr (tree); | 112 static tree negate_expr (tree); |
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int); | |
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree); | 113 static tree associate_trees (location_t, tree, tree, enum tree_code, tree); |
96 static tree const_binop (enum tree_code, tree, tree); | |
97 static enum comparison_code comparison_to_compcode (enum tree_code); | 114 static enum comparison_code comparison_to_compcode (enum tree_code); |
98 static enum tree_code compcode_to_comparison (enum comparison_code); | 115 static enum tree_code compcode_to_comparison (enum comparison_code); |
99 static int operand_equal_for_comparison_p (tree, tree, tree); | |
100 static int twoval_comparison_p (tree, tree *, tree *, int *); | 116 static int twoval_comparison_p (tree, tree *, tree *, int *); |
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree); | 117 static tree eval_subst (location_t, tree, tree, tree, tree, tree); |
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree); | |
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree); | |
104 static tree make_bit_field_ref (location_t, tree, tree, | |
105 HOST_WIDE_INT, HOST_WIDE_INT, int); | |
106 static tree optimize_bit_field_compare (location_t, enum tree_code, | 118 static tree optimize_bit_field_compare (location_t, enum tree_code, |
107 tree, tree, tree); | 119 tree, tree, tree); |
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *, | |
109 HOST_WIDE_INT *, | |
110 enum machine_mode *, int *, int *, | |
111 tree *, tree *); | |
112 static int all_ones_mask_p (const_tree, int); | |
113 static tree sign_bit_p (tree, const_tree); | |
114 static int simple_operand_p (const_tree); | 120 static int simple_operand_p (const_tree); |
121 static bool simple_operand_p_2 (tree); | |
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int); | 122 static tree range_binop (enum tree_code, tree, tree, int, tree, int); |
116 static tree range_predecessor (tree); | 123 static tree range_predecessor (tree); |
117 static tree range_successor (tree); | 124 static tree range_successor (tree); |
118 extern tree make_range (tree, int *, tree *, tree *, bool *); | |
119 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int, | |
120 tree, tree); | |
121 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree); | 125 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree); |
122 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree); | 126 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree); |
123 static tree unextend (tree, int, int, tree); | 127 static tree unextend (tree, int, int, tree); |
124 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree); | |
125 static tree optimize_minmax_comparison (location_t, enum tree_code, | |
126 tree, tree, tree); | |
127 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *); | 128 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *); |
128 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *); | 129 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *); |
129 static tree fold_binary_op_with_conditional_arg (location_t, | 130 static tree fold_binary_op_with_conditional_arg (location_t, |
130 enum tree_code, tree, | 131 enum tree_code, tree, |
131 tree, tree, | 132 tree, tree, |
132 tree, tree, int); | 133 tree, tree, int); |
133 static tree fold_mathfn_compare (location_t, | |
134 enum built_in_function, enum tree_code, | |
135 tree, tree, tree); | |
136 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree); | |
137 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree); | |
138 static bool reorder_operands_p (const_tree, const_tree); | |
139 static tree fold_negate_const (tree, tree); | 134 static tree fold_negate_const (tree, tree); |
140 static tree fold_not_const (const_tree, tree); | 135 static tree fold_not_const (const_tree, tree); |
141 static tree fold_relational_const (enum tree_code, tree, tree, tree); | 136 static tree fold_relational_const (enum tree_code, tree, tree, tree); |
142 static tree fold_convert_const (enum tree_code, tree, tree); | 137 static tree fold_convert_const (enum tree_code, tree, tree); |
143 | 138 static tree fold_view_convert_expr (tree, tree); |
139 static tree fold_negate_expr (location_t, tree); | |
140 | |
141 | |
142 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION. | |
143 Otherwise, return LOC. */ | |
144 | |
145 static location_t | |
146 expr_location_or (tree t, location_t loc) | |
147 { | |
148 location_t tloc = EXPR_LOCATION (t); | |
149 return tloc == UNKNOWN_LOCATION ? loc : tloc; | |
150 } | |
144 | 151 |
145 /* Similar to protected_set_expr_location, but never modify x in place, | 152 /* Similar to protected_set_expr_location, but never modify x in place, |
146 if location can and needs to be set, unshare it. */ | 153 if location can and needs to be set, unshare it. */ |
147 | 154 |
148 static inline tree | 155 static inline tree |
157 x = copy_node (x); | 164 x = copy_node (x); |
158 SET_EXPR_LOCATION (x, loc); | 165 SET_EXPR_LOCATION (x, loc); |
159 } | 166 } |
160 return x; | 167 return x; |
161 } | 168 } |
162 | |
163 | |
164 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring | |
165 overflow. Suppose A, B and SUM have the same respective signs as A1, B1, | |
166 and SUM1. Then this yields nonzero if overflow occurred during the | |
167 addition. | |
168 | |
169 Overflow occurs if A and B have the same sign, but A and SUM differ in | |
170 sign. Use `^' to test whether signs differ, and `< 0' to isolate the | |
171 sign. */ | |
172 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0) | |
173 | 169 |
174 /* If ARG2 divides ARG1 with zero remainder, carries out the division | 170 /* If ARG2 divides ARG1 with zero remainder, carries out the exact |
175 of type CODE and returns the quotient. | 171 division and returns the quotient. Otherwise returns |
176 Otherwise returns NULL_TREE. */ | 172 NULL_TREE. */ |
177 | 173 |
178 tree | 174 tree |
179 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2) | 175 div_if_zero_remainder (const_tree arg1, const_tree arg2) |
180 { | 176 { |
181 double_int quo, rem; | 177 widest_int quo; |
182 int uns; | 178 |
183 | 179 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2), |
184 /* The sign of the division is according to operand two, that | 180 SIGNED, &quo)) |
185 does the correct thing for POINTER_PLUS_EXPR where we want | 181 return wide_int_to_tree (TREE_TYPE (arg1), quo); |
186 a signed division. */ | |
187 uns = TYPE_UNSIGNED (TREE_TYPE (arg2)); | |
188 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE | |
189 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2))) | |
190 uns = false; | |
191 | |
192 quo = double_int_divmod (tree_to_double_int (arg1), | |
193 tree_to_double_int (arg2), | |
194 uns, code, &rem); | |
195 | |
196 if (double_int_zero_p (rem)) | |
197 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high); | |
198 | 182 |
199 return NULL_TREE; | 183 return NULL_TREE; |
200 } | 184 } |
201 | 185 |
202 /* This is nonzero if we should defer warnings about undefined | 186 /* This is nonzero if we should defer warnings about undefined |
239 the smaller of CODE and the deferred code when deciding whether to | 223 the smaller of CODE and the deferred code when deciding whether to |
240 issue the warning. CODE may be zero to mean to always use the | 224 issue the warning. CODE may be zero to mean to always use the |
241 deferred code. */ | 225 deferred code. */ |
242 | 226 |
243 void | 227 void |
244 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code) | 228 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code) |
245 { | 229 { |
246 const char *warnmsg; | 230 const char *warnmsg; |
247 location_t locus; | 231 location_t locus; |
248 | 232 |
249 gcc_assert (fold_deferring_overflow_warnings > 0); | 233 gcc_assert (fold_deferring_overflow_warnings > 0); |
299 } | 283 } |
300 | 284 |
301 /* This is called when we fold something based on the fact that signed | 285 /* This is called when we fold something based on the fact that signed |
302 overflow is undefined. */ | 286 overflow is undefined. */ |
303 | 287 |
304 static void | 288 void |
305 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc) | 289 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc) |
306 { | 290 { |
307 if (fold_deferring_overflow_warnings > 0) | 291 if (fold_deferring_overflow_warnings > 0) |
308 { | 292 { |
309 if (fold_deferred_overflow_warning == NULL | 293 if (fold_deferred_overflow_warning == NULL |
318 } | 302 } |
319 | 303 |
320 /* Return true if the built-in mathematical function specified by CODE | 304 /* Return true if the built-in mathematical function specified by CODE |
321 is odd, i.e. -f(x) == f(-x). */ | 305 is odd, i.e. -f(x) == f(-x). */ |
322 | 306 |
323 static bool | 307 bool |
324 negate_mathfn_p (enum built_in_function code) | 308 negate_mathfn_p (combined_fn fn) |
325 { | 309 { |
326 switch (code) | 310 switch (fn) |
327 { | 311 { |
328 CASE_FLT_FN (BUILT_IN_ASIN): | 312 CASE_CFN_ASIN: |
329 CASE_FLT_FN (BUILT_IN_ASINH): | 313 CASE_CFN_ASINH: |
330 CASE_FLT_FN (BUILT_IN_ATAN): | 314 CASE_CFN_ATAN: |
331 CASE_FLT_FN (BUILT_IN_ATANH): | 315 CASE_CFN_ATANH: |
332 CASE_FLT_FN (BUILT_IN_CASIN): | 316 CASE_CFN_CASIN: |
333 CASE_FLT_FN (BUILT_IN_CASINH): | 317 CASE_CFN_CASINH: |
334 CASE_FLT_FN (BUILT_IN_CATAN): | 318 CASE_CFN_CATAN: |
335 CASE_FLT_FN (BUILT_IN_CATANH): | 319 CASE_CFN_CATANH: |
336 CASE_FLT_FN (BUILT_IN_CBRT): | 320 CASE_CFN_CBRT: |
337 CASE_FLT_FN (BUILT_IN_CPROJ): | 321 CASE_CFN_CPROJ: |
338 CASE_FLT_FN (BUILT_IN_CSIN): | 322 CASE_CFN_CSIN: |
339 CASE_FLT_FN (BUILT_IN_CSINH): | 323 CASE_CFN_CSINH: |
340 CASE_FLT_FN (BUILT_IN_CTAN): | 324 CASE_CFN_CTAN: |
341 CASE_FLT_FN (BUILT_IN_CTANH): | 325 CASE_CFN_CTANH: |
342 CASE_FLT_FN (BUILT_IN_ERF): | 326 CASE_CFN_ERF: |
343 CASE_FLT_FN (BUILT_IN_LLROUND): | 327 CASE_CFN_LLROUND: |
344 CASE_FLT_FN (BUILT_IN_LROUND): | 328 CASE_CFN_LROUND: |
345 CASE_FLT_FN (BUILT_IN_ROUND): | 329 CASE_CFN_ROUND: |
346 CASE_FLT_FN (BUILT_IN_SIN): | 330 CASE_CFN_SIN: |
347 CASE_FLT_FN (BUILT_IN_SINH): | 331 CASE_CFN_SINH: |
348 CASE_FLT_FN (BUILT_IN_TAN): | 332 CASE_CFN_TAN: |
349 CASE_FLT_FN (BUILT_IN_TANH): | 333 CASE_CFN_TANH: |
350 CASE_FLT_FN (BUILT_IN_TRUNC): | 334 CASE_CFN_TRUNC: |
351 return true; | 335 return true; |
352 | 336 |
353 CASE_FLT_FN (BUILT_IN_LLRINT): | 337 CASE_CFN_LLRINT: |
354 CASE_FLT_FN (BUILT_IN_LRINT): | 338 CASE_CFN_LRINT: |
355 CASE_FLT_FN (BUILT_IN_NEARBYINT): | 339 CASE_CFN_NEARBYINT: |
356 CASE_FLT_FN (BUILT_IN_RINT): | 340 CASE_CFN_RINT: |
357 return !flag_rounding_math; | 341 return !flag_rounding_math; |
358 | 342 |
359 default: | 343 default: |
360 break; | 344 break; |
361 } | 345 } |
366 overflow. */ | 350 overflow. */ |
367 | 351 |
368 bool | 352 bool |
369 may_negate_without_overflow_p (const_tree t) | 353 may_negate_without_overflow_p (const_tree t) |
370 { | 354 { |
371 unsigned HOST_WIDE_INT val; | |
372 unsigned int prec; | |
373 tree type; | 355 tree type; |
374 | 356 |
375 gcc_assert (TREE_CODE (t) == INTEGER_CST); | 357 gcc_assert (TREE_CODE (t) == INTEGER_CST); |
376 | 358 |
377 type = TREE_TYPE (t); | 359 type = TREE_TYPE (t); |
378 if (TYPE_UNSIGNED (type)) | 360 if (TYPE_UNSIGNED (type)) |
379 return false; | 361 return false; |
380 | 362 |
381 prec = TYPE_PRECISION (type); | 363 return !wi::only_sign_bit_p (wi::to_wide (t)); |
382 if (prec > HOST_BITS_PER_WIDE_INT) | |
383 { | |
384 if (TREE_INT_CST_LOW (t) != 0) | |
385 return true; | |
386 prec -= HOST_BITS_PER_WIDE_INT; | |
387 val = TREE_INT_CST_HIGH (t); | |
388 } | |
389 else | |
390 val = TREE_INT_CST_LOW (t); | |
391 if (prec < HOST_BITS_PER_WIDE_INT) | |
392 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1; | |
393 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1)); | |
394 } | 364 } |
395 | 365 |
396 /* Determine whether an expression T can be cheaply negated using | 366 /* Determine whether an expression T can be cheaply negated using |
397 the function negate_expr without introducing undefined overflow. */ | 367 the function negate_expr without introducing undefined overflow. */ |
398 | 368 |
408 | 378 |
409 STRIP_SIGN_NOPS (t); | 379 STRIP_SIGN_NOPS (t); |
410 switch (TREE_CODE (t)) | 380 switch (TREE_CODE (t)) |
411 { | 381 { |
412 case INTEGER_CST: | 382 case INTEGER_CST: |
413 if (TYPE_OVERFLOW_WRAPS (type)) | 383 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type)) |
414 return true; | 384 return true; |
415 | 385 |
416 /* Check that -CST will not overflow type. */ | 386 /* Check that -CST will not overflow type. */ |
417 return may_negate_without_overflow_p (t); | 387 return may_negate_without_overflow_p (t); |
418 case BIT_NOT_EXPR: | 388 case BIT_NOT_EXPR: |
419 return (INTEGRAL_TYPE_P (type) | 389 return (INTEGRAL_TYPE_P (type) |
420 && TYPE_OVERFLOW_WRAPS (type)); | 390 && TYPE_OVERFLOW_WRAPS (type)); |
421 | 391 |
422 case FIXED_CST: | 392 case FIXED_CST: |
393 return true; | |
394 | |
423 case NEGATE_EXPR: | 395 case NEGATE_EXPR: |
424 return true; | 396 return !TYPE_OVERFLOW_SANITIZED (type); |
425 | 397 |
426 case REAL_CST: | 398 case REAL_CST: |
427 /* We want to canonicalize to positive real constants. Pretend | 399 /* We want to canonicalize to positive real constants. Pretend |
428 that only negative ones can be easily negated. */ | 400 that only negative ones can be easily negated. */ |
429 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t)); | 401 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t)); |
430 | 402 |
431 case COMPLEX_CST: | 403 case COMPLEX_CST: |
432 return negate_expr_p (TREE_REALPART (t)) | 404 return negate_expr_p (TREE_REALPART (t)) |
433 && negate_expr_p (TREE_IMAGPART (t)); | 405 && negate_expr_p (TREE_IMAGPART (t)); |
434 | 406 |
407 case VECTOR_CST: | |
408 { | |
409 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type)) | |
410 return true; | |
411 | |
412 int count = VECTOR_CST_NELTS (t), i; | |
413 | |
414 for (i = 0; i < count; i++) | |
415 if (!negate_expr_p (VECTOR_CST_ELT (t, i))) | |
416 return false; | |
417 | |
418 return true; | |
419 } | |
420 | |
435 case COMPLEX_EXPR: | 421 case COMPLEX_EXPR: |
436 return negate_expr_p (TREE_OPERAND (t, 0)) | 422 return negate_expr_p (TREE_OPERAND (t, 0)) |
437 && negate_expr_p (TREE_OPERAND (t, 1)); | 423 && negate_expr_p (TREE_OPERAND (t, 1)); |
438 | 424 |
439 case CONJ_EXPR: | 425 case CONJ_EXPR: |
440 return negate_expr_p (TREE_OPERAND (t, 0)); | 426 return negate_expr_p (TREE_OPERAND (t, 0)); |
441 | 427 |
442 case PLUS_EXPR: | 428 case PLUS_EXPR: |
443 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)) | 429 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)) |
444 || HONOR_SIGNED_ZEROS (TYPE_MODE (type))) | 430 || HONOR_SIGNED_ZEROS (element_mode (type)) |
431 || (INTEGRAL_TYPE_P (type) | |
432 && ! TYPE_OVERFLOW_WRAPS (type))) | |
445 return false; | 433 return false; |
446 /* -(A + B) -> (-B) - A. */ | 434 /* -(A + B) -> (-B) - A. */ |
447 if (negate_expr_p (TREE_OPERAND (t, 1)) | 435 if (negate_expr_p (TREE_OPERAND (t, 1))) |
448 && reorder_operands_p (TREE_OPERAND (t, 0), | |
449 TREE_OPERAND (t, 1))) | |
450 return true; | 436 return true; |
451 /* -(A + B) -> (-A) - B. */ | 437 /* -(A + B) -> (-A) - B. */ |
452 return negate_expr_p (TREE_OPERAND (t, 0)); | 438 return negate_expr_p (TREE_OPERAND (t, 0)); |
453 | 439 |
454 case MINUS_EXPR: | 440 case MINUS_EXPR: |
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */ | 441 /* We can't turn -(A-B) into B-A when we honor signed zeros. */ |
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)) | 442 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)) |
457 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)) | 443 && !HONOR_SIGNED_ZEROS (element_mode (type)) |
458 && reorder_operands_p (TREE_OPERAND (t, 0), | 444 && (! INTEGRAL_TYPE_P (type) |
459 TREE_OPERAND (t, 1)); | 445 || TYPE_OVERFLOW_WRAPS (type)); |
460 | 446 |
461 case MULT_EXPR: | 447 case MULT_EXPR: |
462 if (TYPE_UNSIGNED (TREE_TYPE (t))) | 448 if (TYPE_UNSIGNED (type)) |
463 break; | 449 break; |
450 /* INT_MIN/n * n doesn't overflow while negating one operand it does | |
451 if n is a (negative) power of two. */ | |
452 if (INTEGRAL_TYPE_P (TREE_TYPE (t)) | |
453 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t)) | |
454 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST | |
455 && (wi::popcount | |
456 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1) | |
457 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST | |
458 && (wi::popcount | |
459 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1))) | |
460 break; | |
464 | 461 |
465 /* Fall through. */ | 462 /* Fall through. */ |
466 | 463 |
467 case RDIV_EXPR: | 464 case RDIV_EXPR: |
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t)))) | 465 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t)))) |
469 return negate_expr_p (TREE_OPERAND (t, 1)) | 466 return negate_expr_p (TREE_OPERAND (t, 1)) |
470 || negate_expr_p (TREE_OPERAND (t, 0)); | 467 || negate_expr_p (TREE_OPERAND (t, 0)); |
471 break; | 468 break; |
472 | 469 |
473 case TRUNC_DIV_EXPR: | 470 case TRUNC_DIV_EXPR: |
474 case ROUND_DIV_EXPR: | 471 case ROUND_DIV_EXPR: |
475 case FLOOR_DIV_EXPR: | |
476 case CEIL_DIV_EXPR: | |
477 case EXACT_DIV_EXPR: | 472 case EXACT_DIV_EXPR: |
478 /* In general we can't negate A / B, because if A is INT_MIN and | 473 if (TYPE_UNSIGNED (type)) |
474 break; | |
475 if (negate_expr_p (TREE_OPERAND (t, 0))) | |
476 return true; | |
477 /* In general we can't negate B in A / B, because if A is INT_MIN and | |
479 B is 1, we may turn this into INT_MIN / -1 which is undefined | 478 B is 1, we may turn this into INT_MIN / -1 which is undefined |
480 and actually traps on some architectures. But if overflow is | 479 and actually traps on some architectures. */ |
481 undefined, we can negate, because - (INT_MIN / 1) is an | 480 if (! INTEGRAL_TYPE_P (TREE_TYPE (t)) |
482 overflow. */ | 481 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t)) |
483 if (INTEGRAL_TYPE_P (TREE_TYPE (t)) | 482 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST |
484 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))) | 483 && ! integer_onep (TREE_OPERAND (t, 1)))) |
485 break; | 484 return negate_expr_p (TREE_OPERAND (t, 1)); |
486 return negate_expr_p (TREE_OPERAND (t, 1)) | 485 break; |
487 || negate_expr_p (TREE_OPERAND (t, 0)); | |
488 | 486 |
489 case NOP_EXPR: | 487 case NOP_EXPR: |
490 /* Negate -((double)float) as (double)(-float). */ | 488 /* Negate -((double)float) as (double)(-float). */ |
491 if (TREE_CODE (type) == REAL_TYPE) | 489 if (TREE_CODE (type) == REAL_TYPE) |
492 { | 490 { |
496 } | 494 } |
497 break; | 495 break; |
498 | 496 |
499 case CALL_EXPR: | 497 case CALL_EXPR: |
500 /* Negate -f(x) as f(-x). */ | 498 /* Negate -f(x) as f(-x). */ |
501 if (negate_mathfn_p (builtin_mathfn_code (t))) | 499 if (negate_mathfn_p (get_call_combined_fn (t))) |
502 return negate_expr_p (CALL_EXPR_ARG (t, 0)); | 500 return negate_expr_p (CALL_EXPR_ARG (t, 0)); |
503 break; | 501 break; |
504 | 502 |
505 case RSHIFT_EXPR: | 503 case RSHIFT_EXPR: |
506 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */ | 504 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */ |
507 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST) | 505 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST) |
508 { | 506 { |
509 tree op1 = TREE_OPERAND (t, 1); | 507 tree op1 = TREE_OPERAND (t, 1); |
510 if (TREE_INT_CST_HIGH (op1) == 0 | 508 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1) |
511 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1) | |
512 == TREE_INT_CST_LOW (op1)) | |
513 return true; | 509 return true; |
514 } | 510 } |
515 break; | 511 break; |
516 | 512 |
517 default: | 513 default: |
524 simplification is possible. | 520 simplification is possible. |
525 If negate_expr_p would return true for T, NULL_TREE will never be | 521 If negate_expr_p would return true for T, NULL_TREE will never be |
526 returned. */ | 522 returned. */ |
527 | 523 |
528 static tree | 524 static tree |
529 fold_negate_expr (location_t loc, tree t) | 525 fold_negate_expr_1 (location_t loc, tree t) |
530 { | 526 { |
531 tree type = TREE_TYPE (t); | 527 tree type = TREE_TYPE (t); |
532 tree tem; | 528 tree tem; |
533 | 529 |
534 switch (TREE_CODE (t)) | 530 switch (TREE_CODE (t)) |
535 { | 531 { |
536 /* Convert - (~A) to A + 1. */ | 532 /* Convert - (~A) to A + 1. */ |
537 case BIT_NOT_EXPR: | 533 case BIT_NOT_EXPR: |
538 if (INTEGRAL_TYPE_P (type)) | 534 if (INTEGRAL_TYPE_P (type)) |
539 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0), | 535 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0), |
540 build_int_cst (type, 1)); | 536 build_one_cst (type)); |
541 break; | 537 break; |
542 | 538 |
543 case INTEGER_CST: | 539 case INTEGER_CST: |
544 tem = fold_negate_const (t, type); | 540 tem = fold_negate_const (t, type); |
545 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t) | 541 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t) |
546 || !TYPE_OVERFLOW_TRAPS (type)) | 542 || (ANY_INTEGRAL_TYPE_P (type) |
543 && !TYPE_OVERFLOW_TRAPS (type) | |
544 && TYPE_OVERFLOW_WRAPS (type)) | |
545 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0) | |
547 return tem; | 546 return tem; |
548 break; | 547 break; |
549 | 548 |
550 case REAL_CST: | 549 case REAL_CST: |
551 tem = fold_negate_const (t, type); | 550 tem = fold_negate_const (t, type); |
552 /* Two's complement FP formats, such as c4x, may overflow. */ | 551 return tem; |
553 if (!TREE_OVERFLOW (tem) || !flag_trapping_math) | |
554 return tem; | |
555 break; | |
556 | 552 |
557 case FIXED_CST: | 553 case FIXED_CST: |
558 tem = fold_negate_const (t, type); | 554 tem = fold_negate_const (t, type); |
559 return tem; | 555 return tem; |
560 | 556 |
561 case COMPLEX_CST: | 557 case COMPLEX_CST: |
562 { | 558 { |
563 tree rpart = negate_expr (TREE_REALPART (t)); | 559 tree rpart = fold_negate_expr (loc, TREE_REALPART (t)); |
564 tree ipart = negate_expr (TREE_IMAGPART (t)); | 560 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t)); |
565 | 561 if (rpart && ipart) |
566 if ((TREE_CODE (rpart) == REAL_CST | |
567 && TREE_CODE (ipart) == REAL_CST) | |
568 || (TREE_CODE (rpart) == INTEGER_CST | |
569 && TREE_CODE (ipart) == INTEGER_CST)) | |
570 return build_complex (type, rpart, ipart); | 562 return build_complex (type, rpart, ipart); |
571 } | 563 } |
572 break; | 564 break; |
573 | 565 |
566 case VECTOR_CST: | |
567 { | |
568 int count = VECTOR_CST_NELTS (t), i; | |
569 | |
570 auto_vec<tree, 32> elts (count); | |
571 for (i = 0; i < count; i++) | |
572 { | |
573 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i)); | |
574 if (elt == NULL_TREE) | |
575 return NULL_TREE; | |
576 elts.quick_push (elt); | |
577 } | |
578 | |
579 return build_vector (type, elts); | |
580 } | |
581 | |
574 case COMPLEX_EXPR: | 582 case COMPLEX_EXPR: |
575 if (negate_expr_p (t)) | 583 if (negate_expr_p (t)) |
576 return fold_build2_loc (loc, COMPLEX_EXPR, type, | 584 return fold_build2_loc (loc, COMPLEX_EXPR, type, |
577 fold_negate_expr (loc, TREE_OPERAND (t, 0)), | 585 fold_negate_expr (loc, TREE_OPERAND (t, 0)), |
578 fold_negate_expr (loc, TREE_OPERAND (t, 1))); | 586 fold_negate_expr (loc, TREE_OPERAND (t, 1))); |
579 break; | 587 break; |
580 | 588 |
581 case CONJ_EXPR: | 589 case CONJ_EXPR: |
582 if (negate_expr_p (t)) | 590 if (negate_expr_p (t)) |
583 return fold_build1_loc (loc, CONJ_EXPR, type, | 591 return fold_build1_loc (loc, CONJ_EXPR, type, |
584 fold_negate_expr (loc, TREE_OPERAND (t, 0))); | 592 fold_negate_expr (loc, TREE_OPERAND (t, 0))); |
585 break; | 593 break; |
586 | 594 |
587 case NEGATE_EXPR: | 595 case NEGATE_EXPR: |
588 return TREE_OPERAND (t, 0); | 596 if (!TYPE_OVERFLOW_SANITIZED (type)) |
597 return TREE_OPERAND (t, 0); | |
598 break; | |
589 | 599 |
590 case PLUS_EXPR: | 600 case PLUS_EXPR: |
591 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)) | 601 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)) |
592 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))) | 602 && !HONOR_SIGNED_ZEROS (element_mode (type))) |
593 { | 603 { |
594 /* -(A + B) -> (-B) - A. */ | 604 /* -(A + B) -> (-B) - A. */ |
595 if (negate_expr_p (TREE_OPERAND (t, 1)) | 605 if (negate_expr_p (TREE_OPERAND (t, 1))) |
596 && reorder_operands_p (TREE_OPERAND (t, 0), | |
597 TREE_OPERAND (t, 1))) | |
598 { | 606 { |
599 tem = negate_expr (TREE_OPERAND (t, 1)); | 607 tem = negate_expr (TREE_OPERAND (t, 1)); |
600 return fold_build2_loc (loc, MINUS_EXPR, type, | 608 return fold_build2_loc (loc, MINUS_EXPR, type, |
601 tem, TREE_OPERAND (t, 0)); | 609 tem, TREE_OPERAND (t, 0)); |
602 } | 610 } |
603 | 611 |
604 /* -(A + B) -> (-A) - B. */ | 612 /* -(A + B) -> (-A) - B. */ |
605 if (negate_expr_p (TREE_OPERAND (t, 0))) | 613 if (negate_expr_p (TREE_OPERAND (t, 0))) |
606 { | 614 { |
607 tem = negate_expr (TREE_OPERAND (t, 0)); | 615 tem = negate_expr (TREE_OPERAND (t, 0)); |
608 return fold_build2_loc (loc, MINUS_EXPR, type, | 616 return fold_build2_loc (loc, MINUS_EXPR, type, |
609 tem, TREE_OPERAND (t, 1)); | 617 tem, TREE_OPERAND (t, 1)); |
610 } | 618 } |
611 } | 619 } |
612 break; | 620 break; |
613 | 621 |
614 case MINUS_EXPR: | 622 case MINUS_EXPR: |
615 /* - (A - B) -> B - A */ | 623 /* - (A - B) -> B - A */ |
616 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)) | 624 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)) |
617 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)) | 625 && !HONOR_SIGNED_ZEROS (element_mode (type))) |
618 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1))) | |
619 return fold_build2_loc (loc, MINUS_EXPR, type, | 626 return fold_build2_loc (loc, MINUS_EXPR, type, |
620 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0)); | 627 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0)); |
621 break; | 628 break; |
622 | 629 |
623 case MULT_EXPR: | 630 case MULT_EXPR: |
624 if (TYPE_UNSIGNED (type)) | 631 if (TYPE_UNSIGNED (type)) |
625 break; | 632 break; |
626 | 633 |
627 /* Fall through. */ | 634 /* Fall through. */ |
628 | 635 |
629 case RDIV_EXPR: | 636 case RDIV_EXPR: |
630 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))) | 637 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))) |
631 { | 638 { |
632 tem = TREE_OPERAND (t, 1); | 639 tem = TREE_OPERAND (t, 1); |
633 if (negate_expr_p (tem)) | 640 if (negate_expr_p (tem)) |
634 return fold_build2_loc (loc, TREE_CODE (t), type, | 641 return fold_build2_loc (loc, TREE_CODE (t), type, |
635 TREE_OPERAND (t, 0), negate_expr (tem)); | 642 TREE_OPERAND (t, 0), negate_expr (tem)); |
636 tem = TREE_OPERAND (t, 0); | 643 tem = TREE_OPERAND (t, 0); |
637 if (negate_expr_p (tem)) | 644 if (negate_expr_p (tem)) |
638 return fold_build2_loc (loc, TREE_CODE (t), type, | 645 return fold_build2_loc (loc, TREE_CODE (t), type, |
639 negate_expr (tem), TREE_OPERAND (t, 1)); | 646 negate_expr (tem), TREE_OPERAND (t, 1)); |
640 } | 647 } |
641 break; | 648 break; |
642 | 649 |
643 case TRUNC_DIV_EXPR: | 650 case TRUNC_DIV_EXPR: |
644 case ROUND_DIV_EXPR: | 651 case ROUND_DIV_EXPR: |
645 case FLOOR_DIV_EXPR: | |
646 case CEIL_DIV_EXPR: | |
647 case EXACT_DIV_EXPR: | 652 case EXACT_DIV_EXPR: |
648 /* In general we can't negate A / B, because if A is INT_MIN and | 653 if (TYPE_UNSIGNED (type)) |
654 break; | |
655 if (negate_expr_p (TREE_OPERAND (t, 0))) | |
656 return fold_build2_loc (loc, TREE_CODE (t), type, | |
657 negate_expr (TREE_OPERAND (t, 0)), | |
658 TREE_OPERAND (t, 1)); | |
659 /* In general we can't negate B in A / B, because if A is INT_MIN and | |
649 B is 1, we may turn this into INT_MIN / -1 which is undefined | 660 B is 1, we may turn this into INT_MIN / -1 which is undefined |
650 and actually traps on some architectures. But if overflow is | 661 and actually traps on some architectures. */ |
651 undefined, we can negate, because - (INT_MIN / 1) is an | 662 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t)) |
652 overflow. */ | 663 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t)) |
653 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type)) | 664 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST |
654 { | 665 && ! integer_onep (TREE_OPERAND (t, 1)))) |
655 const char * const warnmsg = G_("assuming signed overflow does not " | 666 && negate_expr_p (TREE_OPERAND (t, 1))) |
656 "occur when negating a division"); | 667 return fold_build2_loc (loc, TREE_CODE (t), type, |
657 tem = TREE_OPERAND (t, 1); | 668 TREE_OPERAND (t, 0), |
658 if (negate_expr_p (tem)) | 669 negate_expr (TREE_OPERAND (t, 1))); |
659 { | |
660 if (INTEGRAL_TYPE_P (type) | |
661 && (TREE_CODE (tem) != INTEGER_CST | |
662 || integer_onep (tem))) | |
663 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC); | |
664 return fold_build2_loc (loc, TREE_CODE (t), type, | |
665 TREE_OPERAND (t, 0), negate_expr (tem)); | |
666 } | |
667 tem = TREE_OPERAND (t, 0); | |
668 if (negate_expr_p (tem)) | |
669 { | |
670 if (INTEGRAL_TYPE_P (type) | |
671 && (TREE_CODE (tem) != INTEGER_CST | |
672 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type)))) | |
673 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC); | |
674 return fold_build2_loc (loc, TREE_CODE (t), type, | |
675 negate_expr (tem), TREE_OPERAND (t, 1)); | |
676 } | |
677 } | |
678 break; | 670 break; |
679 | 671 |
680 case NOP_EXPR: | 672 case NOP_EXPR: |
681 /* Convert -((double)float) into (double)(-float). */ | 673 /* Convert -((double)float) into (double)(-float). */ |
682 if (TREE_CODE (type) == REAL_TYPE) | 674 if (TREE_CODE (type) == REAL_TYPE) |
687 } | 679 } |
688 break; | 680 break; |
689 | 681 |
690 case CALL_EXPR: | 682 case CALL_EXPR: |
691 /* Negate -f(x) as f(-x). */ | 683 /* Negate -f(x) as f(-x). */ |
692 if (negate_mathfn_p (builtin_mathfn_code (t)) | 684 if (negate_mathfn_p (get_call_combined_fn (t)) |
693 && negate_expr_p (CALL_EXPR_ARG (t, 0))) | 685 && negate_expr_p (CALL_EXPR_ARG (t, 0))) |
694 { | 686 { |
695 tree fndecl, arg; | 687 tree fndecl, arg; |
696 | 688 |
697 fndecl = get_callee_fndecl (t); | 689 fndecl = get_callee_fndecl (t); |
699 return build_call_expr_loc (loc, fndecl, 1, arg); | 691 return build_call_expr_loc (loc, fndecl, 1, arg); |
700 } | 692 } |
701 break; | 693 break; |
702 | 694 |
703 case RSHIFT_EXPR: | 695 case RSHIFT_EXPR: |
704 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */ | 696 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */ |
705 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST) | 697 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST) |
706 { | 698 { |
707 tree op1 = TREE_OPERAND (t, 1); | 699 tree op1 = TREE_OPERAND (t, 1); |
708 if (TREE_INT_CST_HIGH (op1) == 0 | 700 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1) |
709 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1) | |
710 == TREE_INT_CST_LOW (op1)) | |
711 { | 701 { |
712 tree ntype = TYPE_UNSIGNED (type) | 702 tree ntype = TYPE_UNSIGNED (type) |
713 ? signed_type_for (type) | 703 ? signed_type_for (type) |
714 : unsigned_type_for (type); | 704 : unsigned_type_for (type); |
715 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0)); | 705 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0)); |
724 } | 714 } |
725 | 715 |
726 return NULL_TREE; | 716 return NULL_TREE; |
727 } | 717 } |
728 | 718 |
719 /* A wrapper for fold_negate_expr_1. */ | |
720 | |
721 static tree | |
722 fold_negate_expr (location_t loc, tree t) | |
723 { | |
724 tree type = TREE_TYPE (t); | |
725 STRIP_SIGN_NOPS (t); | |
726 tree tem = fold_negate_expr_1 (loc, t); | |
727 if (tem == NULL_TREE) | |
728 return NULL_TREE; | |
729 return fold_convert_loc (loc, type, tem); | |
730 } | |
731 | |
729 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be | 732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be |
730 negated in a simpler way. Also allow for T to be NULL_TREE, in which case | 733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case |
731 return NULL_TREE. */ | 734 return NULL_TREE. */ |
732 | 735 |
733 static tree | 736 static tree |
760 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that | 763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that |
761 case, we negate an operand that was subtracted. Except if it is a | 764 case, we negate an operand that was subtracted. Except if it is a |
762 literal for which we use *MINUS_LITP instead. | 765 literal for which we use *MINUS_LITP instead. |
763 | 766 |
764 If NEGATE_P is true, we are negating all of IN, again except a literal | 767 If NEGATE_P is true, we are negating all of IN, again except a literal |
765 for which we use *MINUS_LITP instead. | 768 for which we use *MINUS_LITP instead. If a variable part is of pointer |
769 type, it is negated after converting to TYPE. This prevents us from | |
770 generating illegal MINUS pointer expression. LOC is the location of | |
771 the converted variable part. | |
766 | 772 |
767 If IN is itself a literal or constant, return it as appropriate. | 773 If IN is itself a literal or constant, return it as appropriate. |
768 | 774 |
769 Note that we do not guarantee that any of the three values will be the | 775 Note that we do not guarantee that any of the three values will be the |
770 same type as IN, but they will have the same signedness and mode. */ | 776 same type as IN, but they will have the same signedness and mode. */ |
771 | 777 |
772 static tree | 778 static tree |
773 split_tree (tree in, enum tree_code code, tree *conp, tree *litp, | 779 split_tree (tree in, tree type, enum tree_code code, |
774 tree *minus_litp, int negate_p) | 780 tree *minus_varp, tree *conp, tree *minus_conp, |
781 tree *litp, tree *minus_litp, int negate_p) | |
775 { | 782 { |
776 tree var = 0; | 783 tree var = 0; |
777 | 784 *minus_varp = 0; |
778 *conp = 0; | 785 *conp = 0; |
786 *minus_conp = 0; | |
779 *litp = 0; | 787 *litp = 0; |
780 *minus_litp = 0; | 788 *minus_litp = 0; |
781 | 789 |
782 /* Strip any conversions that don't change the machine mode or signedness. */ | 790 /* Strip any conversions that don't change the machine mode or signedness. */ |
783 STRIP_SIGN_NOPS (in); | 791 STRIP_SIGN_NOPS (in); |
790 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in)) | 798 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in)) |
791 /* We can associate addition and subtraction together (even | 799 /* We can associate addition and subtraction together (even |
792 though the C standard doesn't say so) for integers because | 800 though the C standard doesn't say so) for integers because |
793 the value is not affected. For reals, the value might be | 801 the value is not affected. For reals, the value might be |
794 affected, so we can't. */ | 802 affected, so we can't. */ |
795 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR) | 803 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR) |
796 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR)))) | 804 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR) |
805 || (code == MINUS_EXPR | |
806 && (TREE_CODE (in) == PLUS_EXPR | |
807 || TREE_CODE (in) == POINTER_PLUS_EXPR))))) | |
797 { | 808 { |
798 tree op0 = TREE_OPERAND (in, 0); | 809 tree op0 = TREE_OPERAND (in, 0); |
799 tree op1 = TREE_OPERAND (in, 1); | 810 tree op1 = TREE_OPERAND (in, 1); |
800 int neg1_p = TREE_CODE (in) == MINUS_EXPR; | 811 int neg1_p = TREE_CODE (in) == MINUS_EXPR; |
801 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0; | 812 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0; |
823 var = op1, neg_var_p = neg1_p; | 834 var = op1, neg_var_p = neg1_p; |
824 | 835 |
825 /* Now do any needed negations. */ | 836 /* Now do any needed negations. */ |
826 if (neg_litp_p) | 837 if (neg_litp_p) |
827 *minus_litp = *litp, *litp = 0; | 838 *minus_litp = *litp, *litp = 0; |
828 if (neg_conp_p) | 839 if (neg_conp_p && *conp) |
829 *conp = negate_expr (*conp); | 840 *minus_conp = *conp, *conp = 0; |
830 if (neg_var_p) | 841 if (neg_var_p && var) |
831 var = negate_expr (var); | 842 *minus_varp = var, var = 0; |
832 } | 843 } |
833 else if (TREE_CONSTANT (in)) | 844 else if (TREE_CONSTANT (in)) |
834 *conp = in; | 845 *conp = in; |
846 else if (TREE_CODE (in) == BIT_NOT_EXPR | |
847 && code == PLUS_EXPR) | |
848 { | |
849 /* -1 - X is folded to ~X, undo that here. Do _not_ do this | |
850 when IN is constant. */ | |
851 *litp = build_minus_one_cst (type); | |
852 *minus_varp = TREE_OPERAND (in, 0); | |
853 } | |
835 else | 854 else |
836 var = in; | 855 var = in; |
837 | 856 |
838 if (negate_p) | 857 if (negate_p) |
839 { | 858 { |
840 if (*litp) | 859 if (*litp) |
841 *minus_litp = *litp, *litp = 0; | 860 *minus_litp = *litp, *litp = 0; |
842 else if (*minus_litp) | 861 else if (*minus_litp) |
843 *litp = *minus_litp, *minus_litp = 0; | 862 *litp = *minus_litp, *minus_litp = 0; |
844 *conp = negate_expr (*conp); | 863 if (*conp) |
845 var = negate_expr (var); | 864 *minus_conp = *conp, *conp = 0; |
846 } | 865 else if (*minus_conp) |
866 *conp = *minus_conp, *minus_conp = 0; | |
867 if (var) | |
868 *minus_varp = var, var = 0; | |
869 else if (*minus_varp) | |
870 var = *minus_varp, *minus_varp = 0; | |
871 } | |
872 | |
873 if (*litp | |
874 && TREE_OVERFLOW_P (*litp)) | |
875 *litp = drop_tree_overflow (*litp); | |
876 if (*minus_litp | |
877 && TREE_OVERFLOW_P (*minus_litp)) | |
878 *minus_litp = drop_tree_overflow (*minus_litp); | |
847 | 879 |
848 return var; | 880 return var; |
849 } | 881 } |
850 | 882 |
851 /* Re-associate trees split by the above function. T1 and T2 are | 883 /* Re-associate trees split by the above function. T1 and T2 are |
855 | 887 |
856 static tree | 888 static tree |
857 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type) | 889 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type) |
858 { | 890 { |
859 if (t1 == 0) | 891 if (t1 == 0) |
860 return t2; | 892 { |
893 gcc_assert (t2 == 0 || code != MINUS_EXPR); | |
894 return t2; | |
895 } | |
861 else if (t2 == 0) | 896 else if (t2 == 0) |
862 return t1; | 897 return t1; |
863 | 898 |
864 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't | 899 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't |
865 try to fold this since we will have infinite recursion. But do | 900 try to fold this since we will have infinite recursion. But do |
866 deal with any NEGATE_EXPRs. */ | 901 deal with any NEGATE_EXPRs. */ |
867 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code | 902 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code |
903 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR | |
868 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR) | 904 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR) |
869 { | 905 { |
870 if (code == PLUS_EXPR) | 906 if (code == PLUS_EXPR) |
871 { | 907 { |
872 if (TREE_CODE (t1) == NEGATE_EXPR) | 908 if (TREE_CODE (t1) == NEGATE_EXPR) |
900 for use in int_const_binop, size_binop and size_diffop. */ | 936 for use in int_const_binop, size_binop and size_diffop. */ |
901 | 937 |
902 static bool | 938 static bool |
903 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2) | 939 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2) |
904 { | 940 { |
905 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1)) | 941 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1)) |
906 return false; | 942 return false; |
907 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2)) | 943 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2)) |
908 return false; | 944 return false; |
909 | 945 |
910 switch (code) | 946 switch (code) |
911 { | 947 { |
912 case LSHIFT_EXPR: | 948 case LSHIFT_EXPR: |
923 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2) | 959 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2) |
924 && TYPE_MODE (type1) == TYPE_MODE (type2); | 960 && TYPE_MODE (type1) == TYPE_MODE (type2); |
925 } | 961 } |
926 | 962 |
927 | 963 |
928 /* Combine two integer constants ARG1 and ARG2 under operation CODE | 964 /* Combine two integer constants PARG1 and PARG2 under operation CODE |
929 to produce a new constant. Return NULL_TREE if we don't know how | 965 to produce a new constant. Return NULL_TREE if we don't know how |
930 to evaluate CODE at compile-time. | 966 to evaluate CODE at compile-time. */ |
931 | 967 |
932 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */ | 968 static tree |
933 | 969 int_const_binop_1 (enum tree_code code, const_tree parg1, const_tree parg2, |
934 tree | 970 int overflowable) |
935 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc) | 971 { |
936 { | 972 wide_int res; |
937 double_int op1, op2, res, tmp; | |
938 tree t; | 973 tree t; |
939 tree type = TREE_TYPE (arg1); | 974 tree type = TREE_TYPE (parg1); |
940 bool uns = TYPE_UNSIGNED (type); | 975 signop sign = TYPE_SIGN (type); |
941 bool is_sizetype | |
942 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)); | |
943 bool overflow = false; | 976 bool overflow = false; |
944 | 977 |
945 op1 = tree_to_double_int (arg1); | 978 wi::tree_to_wide_ref arg1 = wi::to_wide (parg1); |
946 op2 = tree_to_double_int (arg2); | 979 wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type)); |
947 | 980 |
948 switch (code) | 981 switch (code) |
949 { | 982 { |
950 case BIT_IOR_EXPR: | 983 case BIT_IOR_EXPR: |
951 res = double_int_ior (op1, op2); | 984 res = wi::bit_or (arg1, arg2); |
952 break; | 985 break; |
953 | 986 |
954 case BIT_XOR_EXPR: | 987 case BIT_XOR_EXPR: |
955 res = double_int_xor (op1, op2); | 988 res = wi::bit_xor (arg1, arg2); |
956 break; | 989 break; |
957 | 990 |
958 case BIT_AND_EXPR: | 991 case BIT_AND_EXPR: |
959 res = double_int_and (op1, op2); | 992 res = wi::bit_and (arg1, arg2); |
960 break; | 993 break; |
961 | 994 |
962 case RSHIFT_EXPR: | 995 case RSHIFT_EXPR: |
963 res = double_int_rshift (op1, double_int_to_shwi (op2), | 996 case LSHIFT_EXPR: |
964 TYPE_PRECISION (type), !uns); | 997 if (wi::neg_p (arg2)) |
998 { | |
999 arg2 = -arg2; | |
1000 if (code == RSHIFT_EXPR) | |
1001 code = LSHIFT_EXPR; | |
1002 else | |
1003 code = RSHIFT_EXPR; | |
1004 } | |
1005 | |
1006 if (code == RSHIFT_EXPR) | |
1007 /* It's unclear from the C standard whether shifts can overflow. | |
1008 The following code ignores overflow; perhaps a C standard | |
1009 interpretation ruling is needed. */ | |
1010 res = wi::rshift (arg1, arg2, sign); | |
1011 else | |
1012 res = wi::lshift (arg1, arg2); | |
965 break; | 1013 break; |
966 | 1014 |
967 case LSHIFT_EXPR: | 1015 case RROTATE_EXPR: |
968 /* It's unclear from the C standard whether shifts can overflow. | 1016 case LROTATE_EXPR: |
969 The following code ignores overflow; perhaps a C standard | 1017 if (wi::neg_p (arg2)) |
970 interpretation ruling is needed. */ | 1018 { |
971 res = double_int_lshift (op1, double_int_to_shwi (op2), | 1019 arg2 = -arg2; |
972 TYPE_PRECISION (type), !uns); | 1020 if (code == RROTATE_EXPR) |
1021 code = LROTATE_EXPR; | |
1022 else | |
1023 code = RROTATE_EXPR; | |
1024 } | |
1025 | |
1026 if (code == RROTATE_EXPR) | |
1027 res = wi::rrotate (arg1, arg2); | |
1028 else | |
1029 res = wi::lrotate (arg1, arg2); | |
973 break; | 1030 break; |
974 | 1031 |
975 case RROTATE_EXPR: | 1032 case PLUS_EXPR: |
976 res = double_int_rrotate (op1, double_int_to_shwi (op2), | 1033 res = wi::add (arg1, arg2, sign, &overflow); |
977 TYPE_PRECISION (type)); | |
978 break; | 1034 break; |
979 | 1035 |
980 case LROTATE_EXPR: | 1036 case MINUS_EXPR: |
981 res = double_int_lrotate (op1, double_int_to_shwi (op2), | 1037 res = wi::sub (arg1, arg2, sign, &overflow); |
982 TYPE_PRECISION (type)); | |
983 break; | 1038 break; |
984 | 1039 |
985 case PLUS_EXPR: | 1040 case MULT_EXPR: |
986 overflow = add_double (op1.low, op1.high, op2.low, op2.high, | 1041 res = wi::mul (arg1, arg2, sign, &overflow); |
987 &res.low, &res.high); | |
988 break; | 1042 break; |
989 | 1043 |
990 case MINUS_EXPR: | 1044 case MULT_HIGHPART_EXPR: |
991 neg_double (op2.low, op2.high, &res.low, &res.high); | 1045 res = wi::mul_high (arg1, arg2, sign); |
992 add_double (op1.low, op1.high, res.low, res.high, | |
993 &res.low, &res.high); | |
994 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high); | |
995 break; | 1046 break; |
996 | 1047 |
997 case MULT_EXPR: | 1048 case TRUNC_DIV_EXPR: |
998 overflow = mul_double (op1.low, op1.high, op2.low, op2.high, | 1049 case EXACT_DIV_EXPR: |
999 &res.low, &res.high); | 1050 if (arg2 == 0) |
1051 return NULL_TREE; | |
1052 res = wi::div_trunc (arg1, arg2, sign, &overflow); | |
1000 break; | 1053 break; |
1001 | 1054 |
1002 case TRUNC_DIV_EXPR: | 1055 case FLOOR_DIV_EXPR: |
1003 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR: | 1056 if (arg2 == 0) |
1004 case EXACT_DIV_EXPR: | 1057 return NULL_TREE; |
1005 /* This is a shortcut for a common special case. */ | 1058 res = wi::div_floor (arg1, arg2, sign, &overflow); |
1006 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0 | 1059 break; |
1007 && !TREE_OVERFLOW (arg1) | 1060 |
1008 && !TREE_OVERFLOW (arg2) | 1061 case CEIL_DIV_EXPR: |
1009 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0) | 1062 if (arg2 == 0) |
1010 { | 1063 return NULL_TREE; |
1011 if (code == CEIL_DIV_EXPR) | 1064 res = wi::div_ceil (arg1, arg2, sign, &overflow); |
1012 op1.low += op2.low - 1; | 1065 break; |
1013 | |
1014 res.low = op1.low / op2.low, res.high = 0; | |
1015 break; | |
1016 } | |
1017 | |
1018 /* ... fall through ... */ | |
1019 | 1066 |
1020 case ROUND_DIV_EXPR: | 1067 case ROUND_DIV_EXPR: |
1021 if (double_int_zero_p (op2)) | 1068 if (arg2 == 0) |
1022 return NULL_TREE; | 1069 return NULL_TREE; |
1023 if (double_int_one_p (op2)) | 1070 res = wi::div_round (arg1, arg2, sign, &overflow); |
1024 { | |
1025 res = op1; | |
1026 break; | |
1027 } | |
1028 if (double_int_equal_p (op1, op2) | |
1029 && ! double_int_zero_p (op1)) | |
1030 { | |
1031 res = double_int_one; | |
1032 break; | |
1033 } | |
1034 overflow = div_and_round_double (code, uns, | |
1035 op1.low, op1.high, op2.low, op2.high, | |
1036 &res.low, &res.high, | |
1037 &tmp.low, &tmp.high); | |
1038 break; | 1071 break; |
1039 | 1072 |
1040 case TRUNC_MOD_EXPR: | 1073 case TRUNC_MOD_EXPR: |
1041 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR: | 1074 if (arg2 == 0) |
1042 /* This is a shortcut for a common special case. */ | 1075 return NULL_TREE; |
1043 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0 | 1076 res = wi::mod_trunc (arg1, arg2, sign, &overflow); |
1044 && !TREE_OVERFLOW (arg1) | 1077 break; |
1045 && !TREE_OVERFLOW (arg2) | 1078 |
1046 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0) | 1079 case FLOOR_MOD_EXPR: |
1047 { | 1080 if (arg2 == 0) |
1048 if (code == CEIL_MOD_EXPR) | 1081 return NULL_TREE; |
1049 op1.low += op2.low - 1; | 1082 res = wi::mod_floor (arg1, arg2, sign, &overflow); |
1050 res.low = op1.low % op2.low, res.high = 0; | 1083 break; |
1051 break; | 1084 |
1052 } | 1085 case CEIL_MOD_EXPR: |
1053 | 1086 if (arg2 == 0) |
1054 /* ... fall through ... */ | 1087 return NULL_TREE; |
1088 res = wi::mod_ceil (arg1, arg2, sign, &overflow); | |
1089 break; | |
1055 | 1090 |
1056 case ROUND_MOD_EXPR: | 1091 case ROUND_MOD_EXPR: |
1057 if (double_int_zero_p (op2)) | 1092 if (arg2 == 0) |
1058 return NULL_TREE; | 1093 return NULL_TREE; |
1059 overflow = div_and_round_double (code, uns, | 1094 res = wi::mod_round (arg1, arg2, sign, &overflow); |
1060 op1.low, op1.high, op2.low, op2.high, | |
1061 &tmp.low, &tmp.high, | |
1062 &res.low, &res.high); | |
1063 break; | 1095 break; |
1064 | 1096 |
1065 case MIN_EXPR: | 1097 case MIN_EXPR: |
1066 res = double_int_min (op1, op2, uns); | 1098 res = wi::min (arg1, arg2, sign); |
1067 break; | 1099 break; |
1068 | 1100 |
1069 case MAX_EXPR: | 1101 case MAX_EXPR: |
1070 res = double_int_max (op1, op2, uns); | 1102 res = wi::max (arg1, arg2, sign); |
1071 break; | 1103 break; |
1072 | 1104 |
1073 default: | 1105 default: |
1074 return NULL_TREE; | 1106 return NULL_TREE; |
1075 } | 1107 } |
1076 | 1108 |
1077 if (notrunc) | 1109 t = force_fit_type (type, res, overflowable, |
1078 { | 1110 (((sign == SIGNED || overflowable == -1) |
1079 t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high); | 1111 && overflow) |
1080 | 1112 | TREE_OVERFLOW (parg1) | TREE_OVERFLOW (parg2))); |
1081 /* Propagate overflow flags ourselves. */ | |
1082 if (((!uns || is_sizetype) && overflow) | |
1083 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)) | |
1084 { | |
1085 t = copy_node (t); | |
1086 TREE_OVERFLOW (t) = 1; | |
1087 } | |
1088 } | |
1089 else | |
1090 t = force_fit_type_double (TREE_TYPE (arg1), res, 1, | |
1091 ((!uns || is_sizetype) && overflow) | |
1092 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)); | |
1093 | 1113 |
1094 return t; | 1114 return t; |
1115 } | |
1116 | |
1117 tree | |
1118 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2) | |
1119 { | |
1120 return int_const_binop_1 (code, arg1, arg2, 1); | |
1095 } | 1121 } |
1096 | 1122 |
1097 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new | 1123 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new |
1098 constant. We assume ARG1 and ARG2 have the same data type, or at least | 1124 constant. We assume ARG1 and ARG2 have the same data type, or at least |
1099 are the same kind of constant and the same machine mode. Return zero if | 1125 are the same kind of constant and the same machine mode. Return zero if |
1107 return NULL_TREE; | 1133 return NULL_TREE; |
1108 | 1134 |
1109 STRIP_NOPS (arg1); | 1135 STRIP_NOPS (arg1); |
1110 STRIP_NOPS (arg2); | 1136 STRIP_NOPS (arg2); |
1111 | 1137 |
1112 if (TREE_CODE (arg1) == INTEGER_CST) | 1138 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST) |
1113 return int_const_binop (code, arg1, arg2, 0); | 1139 { |
1114 | 1140 if (code == POINTER_PLUS_EXPR) |
1115 if (TREE_CODE (arg1) == REAL_CST) | 1141 return int_const_binop (PLUS_EXPR, |
1116 { | 1142 arg1, fold_convert (TREE_TYPE (arg1), arg2)); |
1117 enum machine_mode mode; | 1143 |
1144 return int_const_binop (code, arg1, arg2); | |
1145 } | |
1146 | |
1147 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST) | |
1148 { | |
1149 machine_mode mode; | |
1118 REAL_VALUE_TYPE d1; | 1150 REAL_VALUE_TYPE d1; |
1119 REAL_VALUE_TYPE d2; | 1151 REAL_VALUE_TYPE d2; |
1120 REAL_VALUE_TYPE value; | 1152 REAL_VALUE_TYPE value; |
1121 REAL_VALUE_TYPE result; | 1153 REAL_VALUE_TYPE result; |
1122 bool inexact; | 1154 bool inexact; |
1142 | 1174 |
1143 type = TREE_TYPE (arg1); | 1175 type = TREE_TYPE (arg1); |
1144 mode = TYPE_MODE (type); | 1176 mode = TYPE_MODE (type); |
1145 | 1177 |
1146 /* Don't perform operation if we honor signaling NaNs and | 1178 /* Don't perform operation if we honor signaling NaNs and |
1147 either operand is a NaN. */ | 1179 either operand is a signaling NaN. */ |
1148 if (HONOR_SNANS (mode) | 1180 if (HONOR_SNANS (mode) |
1149 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2))) | 1181 && (REAL_VALUE_ISSIGNALING_NAN (d1) |
1182 || REAL_VALUE_ISSIGNALING_NAN (d2))) | |
1150 return NULL_TREE; | 1183 return NULL_TREE; |
1151 | 1184 |
1152 /* Don't perform operation if it would raise a division | 1185 /* Don't perform operation if it would raise a division |
1153 by zero exception. */ | 1186 by zero exception. */ |
1154 if (code == RDIV_EXPR | 1187 if (code == RDIV_EXPR |
1155 && REAL_VALUES_EQUAL (d2, dconst0) | 1188 && real_equal (&d2, &dconst0) |
1156 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode))) | 1189 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode))) |
1157 return NULL_TREE; | 1190 return NULL_TREE; |
1158 | 1191 |
1159 /* If either operand is a NaN, just return it. Otherwise, set up | 1192 /* If either operand is a NaN, just return it. Otherwise, set up |
1160 for floating-point trap; we return an overflow. */ | 1193 for floating-point trap; we return an overflow. */ |
1161 if (REAL_VALUE_ISNAN (d1)) | 1194 if (REAL_VALUE_ISNAN (d1)) |
1162 return arg1; | 1195 { |
1196 /* Make resulting NaN value to be qNaN when flag_signaling_nans | |
1197 is off. */ | |
1198 d1.signalling = 0; | |
1199 t = build_real (type, d1); | |
1200 return t; | |
1201 } | |
1163 else if (REAL_VALUE_ISNAN (d2)) | 1202 else if (REAL_VALUE_ISNAN (d2)) |
1164 return arg2; | 1203 { |
1204 /* Make resulting NaN value to be qNaN when flag_signaling_nans | |
1205 is off. */ | |
1206 d2.signalling = 0; | |
1207 t = build_real (type, d2); | |
1208 return t; | |
1209 } | |
1165 | 1210 |
1166 inexact = real_arithmetic (&value, code, &d1, &d2); | 1211 inexact = real_arithmetic (&value, code, &d1, &d2); |
1167 real_convert (&result, mode, &value); | 1212 real_convert (&result, mode, &value); |
1168 | 1213 |
1169 /* Don't constant fold this floating point operation if | 1214 /* Don't constant fold this floating point operation if |
1204 { | 1249 { |
1205 case PLUS_EXPR: | 1250 case PLUS_EXPR: |
1206 case MINUS_EXPR: | 1251 case MINUS_EXPR: |
1207 case MULT_EXPR: | 1252 case MULT_EXPR: |
1208 case TRUNC_DIV_EXPR: | 1253 case TRUNC_DIV_EXPR: |
1254 if (TREE_CODE (arg2) != FIXED_CST) | |
1255 return NULL_TREE; | |
1209 f2 = TREE_FIXED_CST (arg2); | 1256 f2 = TREE_FIXED_CST (arg2); |
1210 break; | 1257 break; |
1211 | 1258 |
1212 case LSHIFT_EXPR: | 1259 case LSHIFT_EXPR: |
1213 case RSHIFT_EXPR: | 1260 case RSHIFT_EXPR: |
1214 f2.data.high = TREE_INT_CST_HIGH (arg2); | 1261 { |
1215 f2.data.low = TREE_INT_CST_LOW (arg2); | 1262 if (TREE_CODE (arg2) != INTEGER_CST) |
1216 f2.mode = SImode; | 1263 return NULL_TREE; |
1264 wi::tree_to_wide_ref w2 = wi::to_wide (arg2); | |
1265 f2.data.high = w2.elt (1); | |
1266 f2.data.low = w2.ulow (); | |
1267 f2.mode = SImode; | |
1268 } | |
1217 break; | 1269 break; |
1218 | 1270 |
1219 default: | 1271 default: |
1220 return NULL_TREE; | 1272 return NULL_TREE; |
1221 } | 1273 } |
1229 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)) | 1281 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)) |
1230 TREE_OVERFLOW (t) = 1; | 1282 TREE_OVERFLOW (t) = 1; |
1231 return t; | 1283 return t; |
1232 } | 1284 } |
1233 | 1285 |
1234 if (TREE_CODE (arg1) == COMPLEX_CST) | 1286 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST) |
1235 { | 1287 { |
1236 tree type = TREE_TYPE (arg1); | 1288 tree type = TREE_TYPE (arg1); |
1237 tree r1 = TREE_REALPART (arg1); | 1289 tree r1 = TREE_REALPART (arg1); |
1238 tree i1 = TREE_IMAGPART (arg1); | 1290 tree i1 = TREE_IMAGPART (arg1); |
1239 tree r2 = TREE_REALPART (arg2); | 1291 tree r2 = TREE_REALPART (arg2); |
1265 case RDIV_EXPR: | 1317 case RDIV_EXPR: |
1266 if (COMPLEX_FLOAT_TYPE_P (type)) | 1318 if (COMPLEX_FLOAT_TYPE_P (type)) |
1267 return do_mpc_arg2 (arg1, arg2, type, | 1319 return do_mpc_arg2 (arg1, arg2, type, |
1268 /* do_nonfinite= */ folding_initializer, | 1320 /* do_nonfinite= */ folding_initializer, |
1269 mpc_div); | 1321 mpc_div); |
1270 /* Fallthru ... */ | 1322 /* Fallthru. */ |
1271 case TRUNC_DIV_EXPR: | 1323 case TRUNC_DIV_EXPR: |
1272 case CEIL_DIV_EXPR: | 1324 case CEIL_DIV_EXPR: |
1273 case FLOOR_DIV_EXPR: | 1325 case FLOOR_DIV_EXPR: |
1274 case ROUND_DIV_EXPR: | 1326 case ROUND_DIV_EXPR: |
1275 if (flag_complex_method == 0) | 1327 if (flag_complex_method == 0) |
1358 | 1410 |
1359 if (real && imag) | 1411 if (real && imag) |
1360 return build_complex (type, real, imag); | 1412 return build_complex (type, real, imag); |
1361 } | 1413 } |
1362 | 1414 |
1363 if (TREE_CODE (arg1) == VECTOR_CST) | 1415 if (TREE_CODE (arg1) == VECTOR_CST |
1364 { | 1416 && TREE_CODE (arg2) == VECTOR_CST) |
1365 tree type = TREE_TYPE(arg1); | 1417 { |
1366 int count = TYPE_VECTOR_SUBPARTS (type), i; | 1418 tree type = TREE_TYPE (arg1); |
1367 tree elements1, elements2, list = NULL_TREE; | 1419 int count = VECTOR_CST_NELTS (arg1), i; |
1368 | 1420 |
1369 if(TREE_CODE(arg2) != VECTOR_CST) | 1421 auto_vec<tree, 32> elts (count); |
1370 return NULL_TREE; | |
1371 | |
1372 elements1 = TREE_VECTOR_CST_ELTS (arg1); | |
1373 elements2 = TREE_VECTOR_CST_ELTS (arg2); | |
1374 | |
1375 for (i = 0; i < count; i++) | 1422 for (i = 0; i < count; i++) |
1376 { | 1423 { |
1377 tree elem1, elem2, elem; | 1424 tree elem1 = VECTOR_CST_ELT (arg1, i); |
1378 | 1425 tree elem2 = VECTOR_CST_ELT (arg2, i); |
1379 /* The trailing elements can be empty and should be treated as 0 */ | 1426 |
1380 if(!elements1) | 1427 tree elt = const_binop (code, elem1, elem2); |
1381 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node); | 1428 |
1382 else | 1429 /* It is possible that const_binop cannot handle the given |
1383 { | 1430 code and return NULL_TREE */ |
1384 elem1 = TREE_VALUE(elements1); | 1431 if (elt == NULL_TREE) |
1385 elements1 = TREE_CHAIN (elements1); | 1432 return NULL_TREE; |
1386 } | 1433 elts.quick_push (elt); |
1387 | 1434 } |
1388 if(!elements2) | 1435 |
1389 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node); | 1436 return build_vector (type, elts); |
1390 else | 1437 } |
1391 { | 1438 |
1392 elem2 = TREE_VALUE(elements2); | 1439 /* Shifts allow a scalar offset for a vector. */ |
1393 elements2 = TREE_CHAIN (elements2); | 1440 if (TREE_CODE (arg1) == VECTOR_CST |
1394 } | 1441 && TREE_CODE (arg2) == INTEGER_CST) |
1395 | 1442 { |
1396 elem = const_binop (code, elem1, elem2); | 1443 tree type = TREE_TYPE (arg1); |
1397 | 1444 int count = VECTOR_CST_NELTS (arg1), i; |
1398 /* It is possible that const_binop cannot handle the given | 1445 |
1399 code and return NULL_TREE */ | 1446 auto_vec<tree, 32> elts (count); |
1400 if(elem == NULL_TREE) | 1447 for (i = 0; i < count; i++) |
1401 return NULL_TREE; | 1448 { |
1402 | 1449 tree elem1 = VECTOR_CST_ELT (arg1, i); |
1403 list = tree_cons (NULL_TREE, elem, list); | 1450 |
1404 } | 1451 tree elt = const_binop (code, elem1, arg2); |
1405 return build_vector(type, nreverse(list)); | 1452 |
1453 /* It is possible that const_binop cannot handle the given | |
1454 code and return NULL_TREE. */ | |
1455 if (elt == NULL_TREE) | |
1456 return NULL_TREE; | |
1457 elts.quick_push (elt); | |
1458 } | |
1459 | |
1460 return build_vector (type, elts); | |
1406 } | 1461 } |
1407 return NULL_TREE; | 1462 return NULL_TREE; |
1408 } | 1463 } |
1409 | 1464 |
1410 /* Create a size type INT_CST node with NUMBER sign extended. KIND | 1465 /* Overload that adds a TYPE parameter to be able to dispatch |
1466 to fold_relational_const. */ | |
1467 | |
1468 tree | |
1469 const_binop (enum tree_code code, tree type, tree arg1, tree arg2) | |
1470 { | |
1471 if (TREE_CODE_CLASS (code) == tcc_comparison) | |
1472 return fold_relational_const (code, type, arg1, arg2); | |
1473 | |
1474 /* ??? Until we make the const_binop worker take the type of the | |
1475 result as argument put those cases that need it here. */ | |
1476 switch (code) | |
1477 { | |
1478 case COMPLEX_EXPR: | |
1479 if ((TREE_CODE (arg1) == REAL_CST | |
1480 && TREE_CODE (arg2) == REAL_CST) | |
1481 || (TREE_CODE (arg1) == INTEGER_CST | |
1482 && TREE_CODE (arg2) == INTEGER_CST)) | |
1483 return build_complex (type, arg1, arg2); | |
1484 return NULL_TREE; | |
1485 | |
1486 case VEC_PACK_TRUNC_EXPR: | |
1487 case VEC_PACK_FIX_TRUNC_EXPR: | |
1488 { | |
1489 unsigned int out_nelts, in_nelts, i; | |
1490 | |
1491 if (TREE_CODE (arg1) != VECTOR_CST | |
1492 || TREE_CODE (arg2) != VECTOR_CST) | |
1493 return NULL_TREE; | |
1494 | |
1495 in_nelts = VECTOR_CST_NELTS (arg1); | |
1496 out_nelts = in_nelts * 2; | |
1497 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2) | |
1498 && out_nelts == TYPE_VECTOR_SUBPARTS (type)); | |
1499 | |
1500 auto_vec<tree, 32> elts (out_nelts); | |
1501 for (i = 0; i < out_nelts; i++) | |
1502 { | |
1503 tree elt = (i < in_nelts | |
1504 ? VECTOR_CST_ELT (arg1, i) | |
1505 : VECTOR_CST_ELT (arg2, i - in_nelts)); | |
1506 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR | |
1507 ? NOP_EXPR : FIX_TRUNC_EXPR, | |
1508 TREE_TYPE (type), elt); | |
1509 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt)) | |
1510 return NULL_TREE; | |
1511 elts.quick_push (elt); | |
1512 } | |
1513 | |
1514 return build_vector (type, elts); | |
1515 } | |
1516 | |
1517 case VEC_WIDEN_MULT_LO_EXPR: | |
1518 case VEC_WIDEN_MULT_HI_EXPR: | |
1519 case VEC_WIDEN_MULT_EVEN_EXPR: | |
1520 case VEC_WIDEN_MULT_ODD_EXPR: | |
1521 { | |
1522 unsigned int out_nelts, in_nelts, out, ofs, scale; | |
1523 | |
1524 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST) | |
1525 return NULL_TREE; | |
1526 | |
1527 in_nelts = VECTOR_CST_NELTS (arg1); | |
1528 out_nelts = in_nelts / 2; | |
1529 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2) | |
1530 && out_nelts == TYPE_VECTOR_SUBPARTS (type)); | |
1531 | |
1532 if (code == VEC_WIDEN_MULT_LO_EXPR) | |
1533 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0; | |
1534 else if (code == VEC_WIDEN_MULT_HI_EXPR) | |
1535 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts; | |
1536 else if (code == VEC_WIDEN_MULT_EVEN_EXPR) | |
1537 scale = 1, ofs = 0; | |
1538 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */ | |
1539 scale = 1, ofs = 1; | |
1540 | |
1541 auto_vec<tree, 32> elts (out_nelts); | |
1542 for (out = 0; out < out_nelts; out++) | |
1543 { | |
1544 unsigned int in = (out << scale) + ofs; | |
1545 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), | |
1546 VECTOR_CST_ELT (arg1, in)); | |
1547 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), | |
1548 VECTOR_CST_ELT (arg2, in)); | |
1549 | |
1550 if (t1 == NULL_TREE || t2 == NULL_TREE) | |
1551 return NULL_TREE; | |
1552 tree elt = const_binop (MULT_EXPR, t1, t2); | |
1553 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt)) | |
1554 return NULL_TREE; | |
1555 elts.quick_push (elt); | |
1556 } | |
1557 | |
1558 return build_vector (type, elts); | |
1559 } | |
1560 | |
1561 default:; | |
1562 } | |
1563 | |
1564 if (TREE_CODE_CLASS (code) != tcc_binary) | |
1565 return NULL_TREE; | |
1566 | |
1567 /* Make sure type and arg0 have the same saturating flag. */ | |
1568 gcc_checking_assert (TYPE_SATURATING (type) | |
1569 == TYPE_SATURATING (TREE_TYPE (arg1))); | |
1570 | |
1571 return const_binop (code, arg1, arg2); | |
1572 } | |
1573 | |
1574 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant. | |
1575 Return zero if computing the constants is not possible. */ | |
1576 | |
1577 tree | |
1578 const_unop (enum tree_code code, tree type, tree arg0) | |
1579 { | |
1580 /* Don't perform the operation, other than NEGATE and ABS, if | |
1581 flag_signaling_nans is on and the operand is a signaling NaN. */ | |
1582 if (TREE_CODE (arg0) == REAL_CST | |
1583 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) | |
1584 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0)) | |
1585 && code != NEGATE_EXPR | |
1586 && code != ABS_EXPR) | |
1587 return NULL_TREE; | |
1588 | |
1589 switch (code) | |
1590 { | |
1591 CASE_CONVERT: | |
1592 case FLOAT_EXPR: | |
1593 case FIX_TRUNC_EXPR: | |
1594 case FIXED_CONVERT_EXPR: | |
1595 return fold_convert_const (code, type, arg0); | |
1596 | |
1597 case ADDR_SPACE_CONVERT_EXPR: | |
1598 /* If the source address is 0, and the source address space | |
1599 cannot have a valid object at 0, fold to dest type null. */ | |
1600 if (integer_zerop (arg0) | |
1601 && !(targetm.addr_space.zero_address_valid | |
1602 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))))) | |
1603 return fold_convert_const (code, type, arg0); | |
1604 break; | |
1605 | |
1606 case VIEW_CONVERT_EXPR: | |
1607 return fold_view_convert_expr (type, arg0); | |
1608 | |
1609 case NEGATE_EXPR: | |
1610 { | |
1611 /* Can't call fold_negate_const directly here as that doesn't | |
1612 handle all cases and we might not be able to negate some | |
1613 constants. */ | |
1614 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0); | |
1615 if (tem && CONSTANT_CLASS_P (tem)) | |
1616 return tem; | |
1617 break; | |
1618 } | |
1619 | |
1620 case ABS_EXPR: | |
1621 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST) | |
1622 return fold_abs_const (arg0, type); | |
1623 break; | |
1624 | |
1625 case CONJ_EXPR: | |
1626 if (TREE_CODE (arg0) == COMPLEX_CST) | |
1627 { | |
1628 tree ipart = fold_negate_const (TREE_IMAGPART (arg0), | |
1629 TREE_TYPE (type)); | |
1630 return build_complex (type, TREE_REALPART (arg0), ipart); | |
1631 } | |
1632 break; | |
1633 | |
1634 case BIT_NOT_EXPR: | |
1635 if (TREE_CODE (arg0) == INTEGER_CST) | |
1636 return fold_not_const (arg0, type); | |
1637 /* Perform BIT_NOT_EXPR on each element individually. */ | |
1638 else if (TREE_CODE (arg0) == VECTOR_CST) | |
1639 { | |
1640 tree elem; | |
1641 unsigned count = VECTOR_CST_NELTS (arg0), i; | |
1642 | |
1643 auto_vec<tree, 32> elements (count); | |
1644 for (i = 0; i < count; i++) | |
1645 { | |
1646 elem = VECTOR_CST_ELT (arg0, i); | |
1647 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem); | |
1648 if (elem == NULL_TREE) | |
1649 break; | |
1650 elements.quick_push (elem); | |
1651 } | |
1652 if (i == count) | |
1653 return build_vector (type, elements); | |
1654 } | |
1655 break; | |
1656 | |
1657 case TRUTH_NOT_EXPR: | |
1658 if (TREE_CODE (arg0) == INTEGER_CST) | |
1659 return constant_boolean_node (integer_zerop (arg0), type); | |
1660 break; | |
1661 | |
1662 case REALPART_EXPR: | |
1663 if (TREE_CODE (arg0) == COMPLEX_CST) | |
1664 return fold_convert (type, TREE_REALPART (arg0)); | |
1665 break; | |
1666 | |
1667 case IMAGPART_EXPR: | |
1668 if (TREE_CODE (arg0) == COMPLEX_CST) | |
1669 return fold_convert (type, TREE_IMAGPART (arg0)); | |
1670 break; | |
1671 | |
1672 case VEC_UNPACK_LO_EXPR: | |
1673 case VEC_UNPACK_HI_EXPR: | |
1674 case VEC_UNPACK_FLOAT_LO_EXPR: | |
1675 case VEC_UNPACK_FLOAT_HI_EXPR: | |
1676 { | |
1677 unsigned int out_nelts, in_nelts, i; | |
1678 enum tree_code subcode; | |
1679 | |
1680 if (TREE_CODE (arg0) != VECTOR_CST) | |
1681 return NULL_TREE; | |
1682 | |
1683 in_nelts = VECTOR_CST_NELTS (arg0); | |
1684 out_nelts = in_nelts / 2; | |
1685 gcc_assert (out_nelts == TYPE_VECTOR_SUBPARTS (type)); | |
1686 | |
1687 unsigned int offset = 0; | |
1688 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR | |
1689 || code == VEC_UNPACK_FLOAT_LO_EXPR)) | |
1690 offset = out_nelts; | |
1691 | |
1692 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR) | |
1693 subcode = NOP_EXPR; | |
1694 else | |
1695 subcode = FLOAT_EXPR; | |
1696 | |
1697 auto_vec<tree, 32> elts (out_nelts); | |
1698 for (i = 0; i < out_nelts; i++) | |
1699 { | |
1700 tree elt = fold_convert_const (subcode, TREE_TYPE (type), | |
1701 VECTOR_CST_ELT (arg0, i + offset)); | |
1702 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt)) | |
1703 return NULL_TREE; | |
1704 elts.quick_push (elt); | |
1705 } | |
1706 | |
1707 return build_vector (type, elts); | |
1708 } | |
1709 | |
1710 case REDUC_MIN_EXPR: | |
1711 case REDUC_MAX_EXPR: | |
1712 case REDUC_PLUS_EXPR: | |
1713 { | |
1714 unsigned int nelts, i; | |
1715 enum tree_code subcode; | |
1716 | |
1717 if (TREE_CODE (arg0) != VECTOR_CST) | |
1718 return NULL_TREE; | |
1719 nelts = VECTOR_CST_NELTS (arg0); | |
1720 | |
1721 switch (code) | |
1722 { | |
1723 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break; | |
1724 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break; | |
1725 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break; | |
1726 default: gcc_unreachable (); | |
1727 } | |
1728 | |
1729 tree res = VECTOR_CST_ELT (arg0, 0); | |
1730 for (i = 1; i < nelts; i++) | |
1731 { | |
1732 res = const_binop (subcode, res, VECTOR_CST_ELT (arg0, i)); | |
1733 if (res == NULL_TREE || !CONSTANT_CLASS_P (res)) | |
1734 return NULL_TREE; | |
1735 } | |
1736 | |
1737 return res; | |
1738 } | |
1739 | |
1740 default: | |
1741 break; | |
1742 } | |
1743 | |
1744 return NULL_TREE; | |
1745 } | |
1746 | |
1747 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND | |
1411 indicates which particular sizetype to create. */ | 1748 indicates which particular sizetype to create. */ |
1412 | 1749 |
1413 tree | 1750 tree |
1414 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind) | 1751 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind) |
1415 { | 1752 { |
1452 { | 1789 { |
1453 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0)) | 1790 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0)) |
1454 return arg1; | 1791 return arg1; |
1455 } | 1792 } |
1456 | 1793 |
1457 /* Handle general case of two integer constants. */ | 1794 /* Handle general case of two integer constants. For sizetype |
1458 return int_const_binop (code, arg0, arg1, 0); | 1795 constant calculations we always want to know about overflow, |
1796 even in the unsigned case. */ | |
1797 return int_const_binop_1 (code, arg0, arg1, -1); | |
1459 } | 1798 } |
1460 | 1799 |
1461 return fold_build2_loc (loc, code, type, arg0, arg1); | 1800 return fold_build2_loc (loc, code, type, arg0, arg1); |
1462 } | 1801 } |
1463 | 1802 |
1514 INTEGER_CST to another integer type. */ | 1853 INTEGER_CST to another integer type. */ |
1515 | 1854 |
1516 static tree | 1855 static tree |
1517 fold_convert_const_int_from_int (tree type, const_tree arg1) | 1856 fold_convert_const_int_from_int (tree type, const_tree arg1) |
1518 { | 1857 { |
1519 tree t; | |
1520 | |
1521 /* Given an integer constant, make new constant with new type, | 1858 /* Given an integer constant, make new constant with new type, |
1522 appropriately sign-extended or truncated. */ | 1859 appropriately sign-extended or truncated. Use widest_int |
1523 t = force_fit_type_double (type, tree_to_double_int (arg1), | 1860 so that any extension is done according ARG1's type. */ |
1524 !POINTER_TYPE_P (TREE_TYPE (arg1)), | 1861 return force_fit_type (type, wi::to_widest (arg1), |
1525 (TREE_INT_CST_HIGH (arg1) < 0 | 1862 !POINTER_TYPE_P (TREE_TYPE (arg1)), |
1526 && (TYPE_UNSIGNED (type) | 1863 TREE_OVERFLOW (arg1)); |
1527 < TYPE_UNSIGNED (TREE_TYPE (arg1)))) | |
1528 | TREE_OVERFLOW (arg1)); | |
1529 | |
1530 return t; | |
1531 } | 1864 } |
1532 | 1865 |
1533 /* A subroutine of fold_convert_const handling conversions a REAL_CST | 1866 /* A subroutine of fold_convert_const handling conversions a REAL_CST |
1534 to an integer type. */ | 1867 to an integer type. */ |
1535 | 1868 |
1536 static tree | 1869 static tree |
1537 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1) | 1870 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1) |
1538 { | 1871 { |
1539 int overflow = 0; | 1872 bool overflow = false; |
1540 tree t; | 1873 tree t; |
1541 | 1874 |
1542 /* The following code implements the floating point to integer | 1875 /* The following code implements the floating point to integer |
1543 conversion rules required by the Java Language Specification, | 1876 conversion rules required by the Java Language Specification, |
1544 that IEEE NaNs are mapped to zero and values that overflow | 1877 that IEEE NaNs are mapped to zero and values that overflow |
1546 INT_MAX are mapped to INT_MAX, and values less than INT_MIN | 1879 INT_MAX are mapped to INT_MAX, and values less than INT_MIN |
1547 are mapped to INT_MIN. These semantics are allowed by the | 1880 are mapped to INT_MIN. These semantics are allowed by the |
1548 C and C++ standards that simply state that the behavior of | 1881 C and C++ standards that simply state that the behavior of |
1549 FP-to-integer conversion is unspecified upon overflow. */ | 1882 FP-to-integer conversion is unspecified upon overflow. */ |
1550 | 1883 |
1551 double_int val; | 1884 wide_int val; |
1552 REAL_VALUE_TYPE r; | 1885 REAL_VALUE_TYPE r; |
1553 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1); | 1886 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1); |
1554 | 1887 |
1555 switch (code) | 1888 switch (code) |
1556 { | 1889 { |
1563 } | 1896 } |
1564 | 1897 |
1565 /* If R is NaN, return zero and show we have an overflow. */ | 1898 /* If R is NaN, return zero and show we have an overflow. */ |
1566 if (REAL_VALUE_ISNAN (r)) | 1899 if (REAL_VALUE_ISNAN (r)) |
1567 { | 1900 { |
1568 overflow = 1; | 1901 overflow = true; |
1569 val = double_int_zero; | 1902 val = wi::zero (TYPE_PRECISION (type)); |
1570 } | 1903 } |
1571 | 1904 |
1572 /* See if R is less than the lower bound or greater than the | 1905 /* See if R is less than the lower bound or greater than the |
1573 upper bound. */ | 1906 upper bound. */ |
1574 | 1907 |
1575 if (! overflow) | 1908 if (! overflow) |
1576 { | 1909 { |
1577 tree lt = TYPE_MIN_VALUE (type); | 1910 tree lt = TYPE_MIN_VALUE (type); |
1578 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt); | 1911 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt); |
1579 if (REAL_VALUES_LESS (r, l)) | 1912 if (real_less (&r, &l)) |
1580 { | 1913 { |
1581 overflow = 1; | 1914 overflow = true; |
1582 val = tree_to_double_int (lt); | 1915 val = wi::to_wide (lt); |
1583 } | 1916 } |
1584 } | 1917 } |
1585 | 1918 |
1586 if (! overflow) | 1919 if (! overflow) |
1587 { | 1920 { |
1588 tree ut = TYPE_MAX_VALUE (type); | 1921 tree ut = TYPE_MAX_VALUE (type); |
1589 if (ut) | 1922 if (ut) |
1590 { | 1923 { |
1591 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut); | 1924 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut); |
1592 if (REAL_VALUES_LESS (u, r)) | 1925 if (real_less (&u, &r)) |
1593 { | 1926 { |
1594 overflow = 1; | 1927 overflow = true; |
1595 val = tree_to_double_int (ut); | 1928 val = wi::to_wide (ut); |
1596 } | 1929 } |
1597 } | 1930 } |
1598 } | 1931 } |
1599 | 1932 |
1600 if (! overflow) | 1933 if (! overflow) |
1601 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r); | 1934 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type)); |
1602 | 1935 |
1603 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1)); | 1936 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1)); |
1604 return t; | 1937 return t; |
1605 } | 1938 } |
1606 | 1939 |
1607 /* A subroutine of fold_convert_const handling conversions of a | 1940 /* A subroutine of fold_convert_const handling conversions of a |
1608 FIXED_CST to an integer type. */ | 1941 FIXED_CST to an integer type. */ |
1610 static tree | 1943 static tree |
1611 fold_convert_const_int_from_fixed (tree type, const_tree arg1) | 1944 fold_convert_const_int_from_fixed (tree type, const_tree arg1) |
1612 { | 1945 { |
1613 tree t; | 1946 tree t; |
1614 double_int temp, temp_trunc; | 1947 double_int temp, temp_trunc; |
1615 unsigned int mode; | 1948 scalar_mode mode; |
1616 | 1949 |
1617 /* Right shift FIXED_CST to temp by fbit. */ | 1950 /* Right shift FIXED_CST to temp by fbit. */ |
1618 temp = TREE_FIXED_CST (arg1).data; | 1951 temp = TREE_FIXED_CST (arg1).data; |
1619 mode = TREE_FIXED_CST (arg1).mode; | 1952 mode = TREE_FIXED_CST (arg1).mode; |
1620 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT) | 1953 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT) |
1621 { | 1954 { |
1622 temp = double_int_rshift (temp, GET_MODE_FBIT (mode), | 1955 temp = temp.rshift (GET_MODE_FBIT (mode), |
1623 HOST_BITS_PER_DOUBLE_INT, | 1956 HOST_BITS_PER_DOUBLE_INT, |
1624 SIGNED_FIXED_POINT_MODE_P (mode)); | 1957 SIGNED_FIXED_POINT_MODE_P (mode)); |
1625 | 1958 |
1626 /* Left shift temp to temp_trunc by fbit. */ | 1959 /* Left shift temp to temp_trunc by fbit. */ |
1627 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode), | 1960 temp_trunc = temp.lshift (GET_MODE_FBIT (mode), |
1628 HOST_BITS_PER_DOUBLE_INT, | 1961 HOST_BITS_PER_DOUBLE_INT, |
1629 SIGNED_FIXED_POINT_MODE_P (mode)); | 1962 SIGNED_FIXED_POINT_MODE_P (mode)); |
1630 } | 1963 } |
1631 else | 1964 else |
1632 { | 1965 { |
1633 temp = double_int_zero; | 1966 temp = double_int_zero; |
1634 temp_trunc = double_int_zero; | 1967 temp_trunc = double_int_zero; |
1635 } | 1968 } |
1636 | 1969 |
1637 /* If FIXED_CST is negative, we need to round the value toward 0. | 1970 /* If FIXED_CST is negative, we need to round the value toward 0. |
1638 By checking if the fractional bits are not zero to add 1 to temp. */ | 1971 By checking if the fractional bits are not zero to add 1 to temp. */ |
1639 if (SIGNED_FIXED_POINT_MODE_P (mode) | 1972 if (SIGNED_FIXED_POINT_MODE_P (mode) |
1640 && double_int_negative_p (temp_trunc) | 1973 && temp_trunc.is_negative () |
1641 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc)) | 1974 && TREE_FIXED_CST (arg1).data != temp_trunc) |
1642 temp = double_int_add (temp, double_int_one); | 1975 temp += double_int_one; |
1643 | 1976 |
1644 /* Given a fixed-point constant, make new constant with new type, | 1977 /* Given a fixed-point constant, make new constant with new type, |
1645 appropriately sign-extended or truncated. */ | 1978 appropriately sign-extended or truncated. */ |
1646 t = force_fit_type_double (type, temp, -1, | 1979 t = force_fit_type (type, temp, -1, |
1647 (double_int_negative_p (temp) | 1980 (temp.is_negative () |
1648 && (TYPE_UNSIGNED (type) | 1981 && (TYPE_UNSIGNED (type) |
1649 < TYPE_UNSIGNED (TREE_TYPE (arg1)))) | 1982 < TYPE_UNSIGNED (TREE_TYPE (arg1)))) |
1650 | TREE_OVERFLOW (arg1)); | 1983 | TREE_OVERFLOW (arg1)); |
1651 | 1984 |
1652 return t; | 1985 return t; |
1653 } | 1986 } |
1654 | 1987 |
1655 /* A subroutine of fold_convert_const handling conversions a REAL_CST | 1988 /* A subroutine of fold_convert_const handling conversions a REAL_CST |
1658 static tree | 1991 static tree |
1659 fold_convert_const_real_from_real (tree type, const_tree arg1) | 1992 fold_convert_const_real_from_real (tree type, const_tree arg1) |
1660 { | 1993 { |
1661 REAL_VALUE_TYPE value; | 1994 REAL_VALUE_TYPE value; |
1662 tree t; | 1995 tree t; |
1996 | |
1997 /* Don't perform the operation if flag_signaling_nans is on | |
1998 and the operand is a signaling NaN. */ | |
1999 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))) | |
2000 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1))) | |
2001 return NULL_TREE; | |
1663 | 2002 |
1664 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1)); | 2003 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1)); |
1665 t = build_real (type, value); | 2004 t = build_real (type, value); |
1666 | 2005 |
1667 /* If converting an infinity or NAN to a representation that doesn't | 2006 /* If converting an infinity or NAN to a representation that doesn't |
1692 fold_convert_const_real_from_fixed (tree type, const_tree arg1) | 2031 fold_convert_const_real_from_fixed (tree type, const_tree arg1) |
1693 { | 2032 { |
1694 REAL_VALUE_TYPE value; | 2033 REAL_VALUE_TYPE value; |
1695 tree t; | 2034 tree t; |
1696 | 2035 |
1697 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1)); | 2036 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type), |
2037 &TREE_FIXED_CST (arg1)); | |
1698 t = build_real (type, value); | 2038 t = build_real (type, value); |
1699 | 2039 |
1700 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1); | 2040 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1); |
1701 return t; | 2041 return t; |
1702 } | 2042 } |
1709 { | 2049 { |
1710 FIXED_VALUE_TYPE value; | 2050 FIXED_VALUE_TYPE value; |
1711 tree t; | 2051 tree t; |
1712 bool overflow_p; | 2052 bool overflow_p; |
1713 | 2053 |
1714 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1), | 2054 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type), |
1715 TYPE_SATURATING (type)); | 2055 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type)); |
1716 t = build_fixed (type, value); | 2056 t = build_fixed (type, value); |
1717 | 2057 |
1718 /* Propagate overflow flags. */ | 2058 /* Propagate overflow flags. */ |
1719 if (overflow_p | TREE_OVERFLOW (arg1)) | 2059 if (overflow_p | TREE_OVERFLOW (arg1)) |
1720 TREE_OVERFLOW (t) = 1; | 2060 TREE_OVERFLOW (t) = 1; |
1728 fold_convert_const_fixed_from_int (tree type, const_tree arg1) | 2068 fold_convert_const_fixed_from_int (tree type, const_tree arg1) |
1729 { | 2069 { |
1730 FIXED_VALUE_TYPE value; | 2070 FIXED_VALUE_TYPE value; |
1731 tree t; | 2071 tree t; |
1732 bool overflow_p; | 2072 bool overflow_p; |
1733 | 2073 double_int di; |
1734 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), | 2074 |
1735 TREE_INT_CST (arg1), | 2075 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2); |
2076 | |
2077 di.low = TREE_INT_CST_ELT (arg1, 0); | |
2078 if (TREE_INT_CST_NUNITS (arg1) == 1) | |
2079 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0; | |
2080 else | |
2081 di.high = TREE_INT_CST_ELT (arg1, 1); | |
2082 | |
2083 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di, | |
1736 TYPE_UNSIGNED (TREE_TYPE (arg1)), | 2084 TYPE_UNSIGNED (TREE_TYPE (arg1)), |
1737 TYPE_SATURATING (type)); | 2085 TYPE_SATURATING (type)); |
1738 t = build_fixed (type, value); | 2086 t = build_fixed (type, value); |
1739 | 2087 |
1740 /* Propagate overflow flags. */ | 2088 /* Propagate overflow flags. */ |
1751 { | 2099 { |
1752 FIXED_VALUE_TYPE value; | 2100 FIXED_VALUE_TYPE value; |
1753 tree t; | 2101 tree t; |
1754 bool overflow_p; | 2102 bool overflow_p; |
1755 | 2103 |
1756 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type), | 2104 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type), |
1757 &TREE_REAL_CST (arg1), | 2105 &TREE_REAL_CST (arg1), |
1758 TYPE_SATURATING (type)); | 2106 TYPE_SATURATING (type)); |
1759 t = build_fixed (type, value); | 2107 t = build_fixed (type, value); |
1760 | 2108 |
1761 /* Propagate overflow flags. */ | 2109 /* Propagate overflow flags. */ |
1799 else if (TREE_CODE (arg1) == INTEGER_CST) | 2147 else if (TREE_CODE (arg1) == INTEGER_CST) |
1800 return fold_convert_const_fixed_from_int (type, arg1); | 2148 return fold_convert_const_fixed_from_int (type, arg1); |
1801 else if (TREE_CODE (arg1) == REAL_CST) | 2149 else if (TREE_CODE (arg1) == REAL_CST) |
1802 return fold_convert_const_fixed_from_real (type, arg1); | 2150 return fold_convert_const_fixed_from_real (type, arg1); |
1803 } | 2151 } |
2152 else if (TREE_CODE (type) == VECTOR_TYPE) | |
2153 { | |
2154 if (TREE_CODE (arg1) == VECTOR_CST | |
2155 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1)) | |
2156 { | |
2157 int len = VECTOR_CST_NELTS (arg1); | |
2158 tree elttype = TREE_TYPE (type); | |
2159 auto_vec<tree, 32> v (len); | |
2160 for (int i = 0; i < len; ++i) | |
2161 { | |
2162 tree elt = VECTOR_CST_ELT (arg1, i); | |
2163 tree cvt = fold_convert_const (code, elttype, elt); | |
2164 if (cvt == NULL_TREE) | |
2165 return NULL_TREE; | |
2166 v.quick_push (cvt); | |
2167 } | |
2168 return build_vector (type, v); | |
2169 } | |
2170 } | |
1804 return NULL_TREE; | 2171 return NULL_TREE; |
1805 } | 2172 } |
1806 | 2173 |
1807 /* Construct a vector of zero elements of vector type TYPE. */ | 2174 /* Construct a vector of zero elements of vector type TYPE. */ |
1808 | 2175 |
1836 switch (TREE_CODE (type)) | 2203 switch (TREE_CODE (type)) |
1837 { | 2204 { |
1838 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: | 2205 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: |
1839 case POINTER_TYPE: case REFERENCE_TYPE: | 2206 case POINTER_TYPE: case REFERENCE_TYPE: |
1840 case OFFSET_TYPE: | 2207 case OFFSET_TYPE: |
1841 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) | 2208 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) |
1842 || TREE_CODE (orig) == OFFSET_TYPE) | 2209 || TREE_CODE (orig) == OFFSET_TYPE); |
1843 return true; | |
1844 return (TREE_CODE (orig) == VECTOR_TYPE | |
1845 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); | |
1846 | 2210 |
1847 case REAL_TYPE: | 2211 case REAL_TYPE: |
1848 case FIXED_POINT_TYPE: | 2212 case FIXED_POINT_TYPE: |
1849 case COMPLEX_TYPE: | |
1850 case VECTOR_TYPE: | 2213 case VECTOR_TYPE: |
1851 case VOID_TYPE: | 2214 case VOID_TYPE: |
1852 return TREE_CODE (type) == TREE_CODE (orig); | 2215 return TREE_CODE (type) == TREE_CODE (orig); |
1853 | 2216 |
1854 default: | 2217 default: |
1870 | 2233 |
1871 if (TREE_CODE (arg) == ERROR_MARK | 2234 if (TREE_CODE (arg) == ERROR_MARK |
1872 || TREE_CODE (type) == ERROR_MARK | 2235 || TREE_CODE (type) == ERROR_MARK |
1873 || TREE_CODE (orig) == ERROR_MARK) | 2236 || TREE_CODE (orig) == ERROR_MARK) |
1874 return error_mark_node; | 2237 return error_mark_node; |
1875 | |
1876 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)) | |
1877 return fold_build1_loc (loc, NOP_EXPR, type, arg); | |
1878 | 2238 |
1879 switch (TREE_CODE (type)) | 2239 switch (TREE_CODE (type)) |
1880 { | 2240 { |
1881 case POINTER_TYPE: | 2241 case POINTER_TYPE: |
1882 case REFERENCE_TYPE: | 2242 case REFERENCE_TYPE: |
1898 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) | 2258 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) |
1899 || TREE_CODE (orig) == OFFSET_TYPE) | 2259 || TREE_CODE (orig) == OFFSET_TYPE) |
1900 return fold_build1_loc (loc, NOP_EXPR, type, arg); | 2260 return fold_build1_loc (loc, NOP_EXPR, type, arg); |
1901 if (TREE_CODE (orig) == COMPLEX_TYPE) | 2261 if (TREE_CODE (orig) == COMPLEX_TYPE) |
1902 return fold_convert_loc (loc, type, | 2262 return fold_convert_loc (loc, type, |
1903 fold_build1_loc (loc, REALPART_EXPR, | 2263 fold_build1_loc (loc, REALPART_EXPR, |
1904 TREE_TYPE (orig), arg)); | 2264 TREE_TYPE (orig), arg)); |
1905 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE | 2265 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE |
1906 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); | 2266 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); |
1907 return fold_build1_loc (loc, NOP_EXPR, type, arg); | 2267 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg); |
1908 | 2268 |
1909 case REAL_TYPE: | 2269 case REAL_TYPE: |
1910 if (TREE_CODE (arg) == INTEGER_CST) | 2270 if (TREE_CODE (arg) == INTEGER_CST) |
1911 { | 2271 { |
1912 tem = fold_convert_const (FLOAT_EXPR, type, arg); | 2272 tem = fold_convert_const (FLOAT_EXPR, type, arg); |
2018 || TREE_CODE (orig) == VECTOR_TYPE); | 2378 || TREE_CODE (orig) == VECTOR_TYPE); |
2019 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg); | 2379 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg); |
2020 | 2380 |
2021 case VOID_TYPE: | 2381 case VOID_TYPE: |
2022 tem = fold_ignored_result (arg); | 2382 tem = fold_ignored_result (arg); |
2023 if (TREE_CODE (tem) == MODIFY_EXPR) | |
2024 goto fold_convert_exit; | |
2025 return fold_build1_loc (loc, NOP_EXPR, type, tem); | 2383 return fold_build1_loc (loc, NOP_EXPR, type, tem); |
2026 | 2384 |
2027 default: | 2385 default: |
2386 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)) | |
2387 return fold_build1_loc (loc, NOP_EXPR, type, arg); | |
2028 gcc_unreachable (); | 2388 gcc_unreachable (); |
2029 } | 2389 } |
2030 fold_convert_exit: | 2390 fold_convert_exit: |
2031 protected_set_expr_location_unshare (tem, loc); | 2391 protected_set_expr_location_unshare (tem, loc); |
2032 return tem; | 2392 return tem; |
2093 if (! maybe_lvalue_p (x)) | 2453 if (! maybe_lvalue_p (x)) |
2094 return x; | 2454 return x; |
2095 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x); | 2455 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x); |
2096 } | 2456 } |
2097 | 2457 |
2098 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C. | |
2099 Zero means allow extended lvalues. */ | |
2100 | |
2101 int pedantic_lvalues; | |
2102 | |
2103 /* When pedantic, return an expr equal to X but certainly not valid as a | 2458 /* When pedantic, return an expr equal to X but certainly not valid as a |
2104 pedantic lvalue. Otherwise, return X. */ | 2459 pedantic lvalue. Otherwise, return X. */ |
2105 | 2460 |
2106 static tree | 2461 static tree |
2107 pedantic_non_lvalue_loc (location_t loc, tree x) | 2462 pedantic_non_lvalue_loc (location_t loc, tree x) |
2108 { | 2463 { |
2109 if (pedantic_lvalues) | |
2110 return non_lvalue_loc (loc, x); | |
2111 | |
2112 return protected_set_expr_location_unshare (x, loc); | 2464 return protected_set_expr_location_unshare (x, loc); |
2113 } | 2465 } |
2114 | 2466 |
2115 /* Given a tree comparison code, return the code that is the logical inverse | 2467 /* Given a tree comparison code, return the code that is the logical inverse. |
2116 of the given code. It is not safe to do this for floating-point | 2468 It is generally not safe to do this for floating-point comparisons, except |
2117 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode | 2469 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return |
2118 as well: if reversing the comparison is unsafe, return ERROR_MARK. */ | 2470 ERROR_MARK in this case. */ |
2119 | 2471 |
2120 enum tree_code | 2472 enum tree_code |
2121 invert_tree_comparison (enum tree_code code, bool honor_nans) | 2473 invert_tree_comparison (enum tree_code code, bool honor_nans) |
2122 { | 2474 { |
2123 if (honor_nans && flag_trapping_math) | 2475 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR |
2476 && code != ORDERED_EXPR && code != UNORDERED_EXPR) | |
2124 return ERROR_MARK; | 2477 return ERROR_MARK; |
2125 | 2478 |
2126 switch (code) | 2479 switch (code) |
2127 { | 2480 { |
2128 case EQ_EXPR: | 2481 case EQ_EXPR: |
2289 combine_comparisons (location_t loc, | 2642 combine_comparisons (location_t loc, |
2290 enum tree_code code, enum tree_code lcode, | 2643 enum tree_code code, enum tree_code lcode, |
2291 enum tree_code rcode, tree truth_type, | 2644 enum tree_code rcode, tree truth_type, |
2292 tree ll_arg, tree lr_arg) | 2645 tree ll_arg, tree lr_arg) |
2293 { | 2646 { |
2294 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg))); | 2647 bool honor_nans = HONOR_NANS (ll_arg); |
2295 enum comparison_code lcompcode = comparison_to_compcode (lcode); | 2648 enum comparison_code lcompcode = comparison_to_compcode (lcode); |
2296 enum comparison_code rcompcode = comparison_to_compcode (rcode); | 2649 enum comparison_code rcompcode = comparison_to_compcode (rcode); |
2297 int compcode; | 2650 int compcode; |
2298 | 2651 |
2299 switch (code) | 2652 switch (code) |
2367 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg); | 2720 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg); |
2368 } | 2721 } |
2369 } | 2722 } |
2370 | 2723 |
2371 /* Return nonzero if two operands (typically of the same tree node) | 2724 /* Return nonzero if two operands (typically of the same tree node) |
2372 are necessarily equal. If either argument has side-effects this | 2725 are necessarily equal. FLAGS modifies behavior as follows: |
2373 function returns zero. FLAGS modifies behavior as follows: | |
2374 | 2726 |
2375 If OEP_ONLY_CONST is set, only return nonzero for constants. | 2727 If OEP_ONLY_CONST is set, only return nonzero for constants. |
2376 This function tests whether the operands are indistinguishable; | 2728 This function tests whether the operands are indistinguishable; |
2377 it does not test whether they are equal using C's == operation. | 2729 it does not test whether they are equal using C's == operation. |
2378 The distinction is important for IEEE floating point, because | 2730 The distinction is important for IEEE floating point, because |
2390 Unless comparing arbitrary expression trees, such as from different | 2742 Unless comparing arbitrary expression trees, such as from different |
2391 statements, this flag can usually be left unset. | 2743 statements, this flag can usually be left unset. |
2392 | 2744 |
2393 If OEP_PURE_SAME is set, then pure functions with identical arguments | 2745 If OEP_PURE_SAME is set, then pure functions with identical arguments |
2394 are considered the same. It is used when the caller has other ways | 2746 are considered the same. It is used when the caller has other ways |
2395 to ensure that global memory is unchanged in between. */ | 2747 to ensure that global memory is unchanged in between. |
2748 | |
2749 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects, | |
2750 not values of expressions. | |
2751 | |
2752 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects | |
2753 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs. | |
2754 | |
2755 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on | |
2756 any operand with side effect. This is unnecesarily conservative in the | |
2757 case we know that arg0 and arg1 are in disjoint code paths (such as in | |
2758 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing | |
2759 addresses with TREE_CONSTANT flag set so we know that &var == &var | |
2760 even if var is volatile. */ | |
2396 | 2761 |
2397 int | 2762 int |
2398 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags) | 2763 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags) |
2399 { | 2764 { |
2765 /* When checking, verify at the outermost operand_equal_p call that | |
2766 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same | |
2767 hash value. */ | |
2768 if (flag_checking && !(flags & OEP_NO_HASH_CHECK)) | |
2769 { | |
2770 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK)) | |
2771 { | |
2772 if (arg0 != arg1) | |
2773 { | |
2774 inchash::hash hstate0 (0), hstate1 (0); | |
2775 inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK); | |
2776 inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK); | |
2777 hashval_t h0 = hstate0.end (); | |
2778 hashval_t h1 = hstate1.end (); | |
2779 gcc_assert (h0 == h1); | |
2780 } | |
2781 return 1; | |
2782 } | |
2783 else | |
2784 return 0; | |
2785 } | |
2786 | |
2400 /* If either is ERROR_MARK, they aren't equal. */ | 2787 /* If either is ERROR_MARK, they aren't equal. */ |
2401 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK | 2788 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK |
2402 || TREE_TYPE (arg0) == error_mark_node | 2789 || TREE_TYPE (arg0) == error_mark_node |
2403 || TREE_TYPE (arg1) == error_mark_node) | 2790 || TREE_TYPE (arg1) == error_mark_node) |
2404 return 0; | 2791 return 0; |
2406 /* Similar, if either does not have a type (like a released SSA name), | 2793 /* Similar, if either does not have a type (like a released SSA name), |
2407 they aren't equal. */ | 2794 they aren't equal. */ |
2408 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1)) | 2795 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1)) |
2409 return 0; | 2796 return 0; |
2410 | 2797 |
2798 /* We cannot consider pointers to different address space equal. */ | |
2799 if (POINTER_TYPE_P (TREE_TYPE (arg0)) | |
2800 && POINTER_TYPE_P (TREE_TYPE (arg1)) | |
2801 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))) | |
2802 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1))))) | |
2803 return 0; | |
2804 | |
2411 /* Check equality of integer constants before bailing out due to | 2805 /* Check equality of integer constants before bailing out due to |
2412 precision differences. */ | 2806 precision differences. */ |
2413 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) | 2807 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) |
2414 return tree_int_cst_equal (arg0, arg1); | 2808 { |
2415 | 2809 /* Address of INTEGER_CST is not defined; check that we did not forget |
2416 /* If both types don't have the same signedness, then we can't consider | 2810 to drop the OEP_ADDRESS_OF flags. */ |
2417 them equal. We must check this before the STRIP_NOPS calls | 2811 gcc_checking_assert (!(flags & OEP_ADDRESS_OF)); |
2418 because they may change the signedness of the arguments. As pointers | 2812 return tree_int_cst_equal (arg0, arg1); |
2419 strictly don't have a signedness, require either two pointers or | 2813 } |
2420 two non-pointers as well. */ | 2814 |
2421 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)) | 2815 if (!(flags & OEP_ADDRESS_OF)) |
2422 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1))) | 2816 { |
2423 return 0; | 2817 /* If both types don't have the same signedness, then we can't consider |
2424 | 2818 them equal. We must check this before the STRIP_NOPS calls |
2425 /* We cannot consider pointers to different address space equal. */ | 2819 because they may change the signedness of the arguments. As pointers |
2426 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1)) | 2820 strictly don't have a signedness, require either two pointers or |
2427 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))) | 2821 two non-pointers as well. */ |
2428 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1))))) | 2822 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)) |
2429 return 0; | 2823 || POINTER_TYPE_P (TREE_TYPE (arg0)) |
2430 | 2824 != POINTER_TYPE_P (TREE_TYPE (arg1))) |
2431 /* If both types don't have the same precision, then it is not safe | 2825 return 0; |
2432 to strip NOPs. */ | 2826 |
2433 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1))) | 2827 /* If both types don't have the same precision, then it is not safe |
2434 return 0; | 2828 to strip NOPs. */ |
2435 | 2829 if (element_precision (TREE_TYPE (arg0)) |
2436 STRIP_NOPS (arg0); | 2830 != element_precision (TREE_TYPE (arg1))) |
2437 STRIP_NOPS (arg1); | 2831 return 0; |
2832 | |
2833 STRIP_NOPS (arg0); | |
2834 STRIP_NOPS (arg1); | |
2835 } | |
2836 #if 0 | |
2837 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the | |
2838 sanity check once the issue is solved. */ | |
2839 else | |
2840 /* Addresses of conversions and SSA_NAMEs (and many other things) | |
2841 are not defined. Check that we did not forget to drop the | |
2842 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */ | |
2843 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1) | |
2844 && TREE_CODE (arg0) != SSA_NAME); | |
2845 #endif | |
2438 | 2846 |
2439 /* In case both args are comparisons but with different comparison | 2847 /* In case both args are comparisons but with different comparison |
2440 code, try to swap the comparison operands of one arg to produce | 2848 code, try to swap the comparison operands of one arg to produce |
2441 a match and compare that variant. */ | 2849 a match and compare that variant. */ |
2442 if (TREE_CODE (arg0) != TREE_CODE (arg1) | 2850 if (TREE_CODE (arg0) != TREE_CODE (arg1) |
2450 TREE_OPERAND (arg1, 1), flags) | 2858 TREE_OPERAND (arg1, 1), flags) |
2451 && operand_equal_p (TREE_OPERAND (arg0, 1), | 2859 && operand_equal_p (TREE_OPERAND (arg0, 1), |
2452 TREE_OPERAND (arg1, 0), flags); | 2860 TREE_OPERAND (arg1, 0), flags); |
2453 } | 2861 } |
2454 | 2862 |
2455 if (TREE_CODE (arg0) != TREE_CODE (arg1) | 2863 if (TREE_CODE (arg0) != TREE_CODE (arg1)) |
2456 /* This is needed for conversions and for COMPONENT_REF. | 2864 { |
2457 Might as well play it safe and always test this. */ | 2865 /* NOP_EXPR and CONVERT_EXPR are considered equal. */ |
2458 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK | 2866 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)) |
2867 ; | |
2868 else if (flags & OEP_ADDRESS_OF) | |
2869 { | |
2870 /* If we are interested in comparing addresses ignore | |
2871 MEM_REF wrappings of the base that can appear just for | |
2872 TBAA reasons. */ | |
2873 if (TREE_CODE (arg0) == MEM_REF | |
2874 && DECL_P (arg1) | |
2875 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR | |
2876 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1 | |
2877 && integer_zerop (TREE_OPERAND (arg0, 1))) | |
2878 return 1; | |
2879 else if (TREE_CODE (arg1) == MEM_REF | |
2880 && DECL_P (arg0) | |
2881 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR | |
2882 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0 | |
2883 && integer_zerop (TREE_OPERAND (arg1, 1))) | |
2884 return 1; | |
2885 return 0; | |
2886 } | |
2887 else | |
2888 return 0; | |
2889 } | |
2890 | |
2891 /* When not checking adddresses, this is needed for conversions and for | |
2892 COMPONENT_REF. Might as well play it safe and always test this. */ | |
2893 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK | |
2459 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK | 2894 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK |
2460 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))) | 2895 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)) |
2896 && !(flags & OEP_ADDRESS_OF))) | |
2461 return 0; | 2897 return 0; |
2462 | 2898 |
2463 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal. | 2899 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal. |
2464 We don't care about side effects in that case because the SAVE_EXPR | 2900 We don't care about side effects in that case because the SAVE_EXPR |
2465 takes care of that for us. In all other cases, two expressions are | 2901 takes care of that for us. In all other cases, two expressions are |
2466 equal if they have no side effects. If we have two identical | 2902 equal if they have no side effects. If we have two identical |
2467 expressions with side effects that should be treated the same due | 2903 expressions with side effects that should be treated the same due |
2468 to the only side effects being identical SAVE_EXPR's, that will | 2904 to the only side effects being identical SAVE_EXPR's, that will |
2469 be detected in the recursive calls below. */ | 2905 be detected in the recursive calls below. |
2906 If we are taking an invariant address of two identical objects | |
2907 they are necessarily equal as well. */ | |
2470 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST) | 2908 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST) |
2471 && (TREE_CODE (arg0) == SAVE_EXPR | 2909 && (TREE_CODE (arg0) == SAVE_EXPR |
2910 || (flags & OEP_MATCH_SIDE_EFFECTS) | |
2472 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1)))) | 2911 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1)))) |
2473 return 1; | 2912 return 1; |
2474 | 2913 |
2475 /* Next handle constant cases, those for which we can return 1 even | 2914 /* Next handle constant cases, those for which we can return 1 even |
2476 if ONLY_CONST is set. */ | 2915 if ONLY_CONST is set. */ |
2483 case FIXED_CST: | 2922 case FIXED_CST: |
2484 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0), | 2923 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0), |
2485 TREE_FIXED_CST (arg1)); | 2924 TREE_FIXED_CST (arg1)); |
2486 | 2925 |
2487 case REAL_CST: | 2926 case REAL_CST: |
2488 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0), | 2927 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1))) |
2489 TREE_REAL_CST (arg1))) | |
2490 return 1; | 2928 return 1; |
2491 | 2929 |
2492 | 2930 |
2493 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))) | 2931 if (!HONOR_SIGNED_ZEROS (arg0)) |
2494 { | 2932 { |
2495 /* If we do not distinguish between signed and unsigned zero, | 2933 /* If we do not distinguish between signed and unsigned zero, |
2496 consider them equal. */ | 2934 consider them equal. */ |
2497 if (real_zerop (arg0) && real_zerop (arg1)) | 2935 if (real_zerop (arg0) && real_zerop (arg1)) |
2498 return 1; | 2936 return 1; |
2499 } | 2937 } |
2500 return 0; | 2938 return 0; |
2501 | 2939 |
2502 case VECTOR_CST: | 2940 case VECTOR_CST: |
2503 { | 2941 { |
2504 tree v1, v2; | 2942 unsigned i; |
2505 | 2943 |
2506 v1 = TREE_VECTOR_CST_ELTS (arg0); | 2944 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1)) |
2507 v2 = TREE_VECTOR_CST_ELTS (arg1); | 2945 return 0; |
2508 while (v1 && v2) | 2946 |
2947 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i) | |
2509 { | 2948 { |
2510 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2), | 2949 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i), |
2511 flags)) | 2950 VECTOR_CST_ELT (arg1, i), flags)) |
2512 return 0; | 2951 return 0; |
2513 v1 = TREE_CHAIN (v1); | |
2514 v2 = TREE_CHAIN (v2); | |
2515 } | 2952 } |
2516 | 2953 return 1; |
2517 return v1 == v2; | |
2518 } | 2954 } |
2519 | 2955 |
2520 case COMPLEX_CST: | 2956 case COMPLEX_CST: |
2521 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1), | 2957 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1), |
2522 flags) | 2958 flags) |
2528 && ! memcmp (TREE_STRING_POINTER (arg0), | 2964 && ! memcmp (TREE_STRING_POINTER (arg0), |
2529 TREE_STRING_POINTER (arg1), | 2965 TREE_STRING_POINTER (arg1), |
2530 TREE_STRING_LENGTH (arg0))); | 2966 TREE_STRING_LENGTH (arg0))); |
2531 | 2967 |
2532 case ADDR_EXPR: | 2968 case ADDR_EXPR: |
2969 gcc_checking_assert (!(flags & OEP_ADDRESS_OF)); | |
2533 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), | 2970 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), |
2534 0); | 2971 flags | OEP_ADDRESS_OF |
2972 | OEP_MATCH_SIDE_EFFECTS); | |
2973 case CONSTRUCTOR: | |
2974 /* In GIMPLE empty constructors are allowed in initializers of | |
2975 aggregates. */ | |
2976 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1); | |
2535 default: | 2977 default: |
2536 break; | 2978 break; |
2537 } | 2979 } |
2538 | 2980 |
2539 if (flags & OEP_ONLY_CONST) | 2981 if (flags & OEP_ONLY_CONST) |
2581 && operand_equal_p (TREE_OPERAND (arg0, 1), | 3023 && operand_equal_p (TREE_OPERAND (arg0, 1), |
2582 TREE_OPERAND (arg1, 0), flags)); | 3024 TREE_OPERAND (arg1, 0), flags)); |
2583 | 3025 |
2584 case tcc_reference: | 3026 case tcc_reference: |
2585 /* If either of the pointer (or reference) expressions we are | 3027 /* If either of the pointer (or reference) expressions we are |
2586 dereferencing contain a side effect, these cannot be equal. */ | 3028 dereferencing contain a side effect, these cannot be equal, |
2587 if (TREE_SIDE_EFFECTS (arg0) | 3029 but their addresses can be. */ |
2588 || TREE_SIDE_EFFECTS (arg1)) | 3030 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0 |
3031 && (TREE_SIDE_EFFECTS (arg0) | |
3032 || TREE_SIDE_EFFECTS (arg1))) | |
2589 return 0; | 3033 return 0; |
2590 | 3034 |
2591 switch (TREE_CODE (arg0)) | 3035 switch (TREE_CODE (arg0)) |
2592 { | 3036 { |
2593 case INDIRECT_REF: | 3037 case INDIRECT_REF: |
3038 if (!(flags & OEP_ADDRESS_OF) | |
3039 && (TYPE_ALIGN (TREE_TYPE (arg0)) | |
3040 != TYPE_ALIGN (TREE_TYPE (arg1)))) | |
3041 return 0; | |
3042 flags &= ~OEP_ADDRESS_OF; | |
3043 return OP_SAME (0); | |
3044 | |
3045 case IMAGPART_EXPR: | |
3046 /* Require the same offset. */ | |
3047 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)), | |
3048 TYPE_SIZE (TREE_TYPE (arg1)), | |
3049 flags & ~OEP_ADDRESS_OF)) | |
3050 return 0; | |
3051 | |
3052 /* Fallthru. */ | |
2594 case REALPART_EXPR: | 3053 case REALPART_EXPR: |
2595 case IMAGPART_EXPR: | 3054 case VIEW_CONVERT_EXPR: |
2596 return OP_SAME (0); | 3055 return OP_SAME (0); |
2597 | 3056 |
3057 case TARGET_MEM_REF: | |
2598 case MEM_REF: | 3058 case MEM_REF: |
2599 /* Require equal access sizes, and similar pointer types. | 3059 if (!(flags & OEP_ADDRESS_OF)) |
2600 We can have incomplete types for array references of | 3060 { |
2601 variable-sized arrays from the Fortran frontent | 3061 /* Require equal access sizes */ |
2602 though. */ | 3062 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1)) |
2603 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1)) | 3063 && (!TYPE_SIZE (TREE_TYPE (arg0)) |
2604 || (TYPE_SIZE (TREE_TYPE (arg0)) | 3064 || !TYPE_SIZE (TREE_TYPE (arg1)) |
2605 && TYPE_SIZE (TREE_TYPE (arg1)) | 3065 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)), |
2606 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)), | 3066 TYPE_SIZE (TREE_TYPE (arg1)), |
2607 TYPE_SIZE (TREE_TYPE (arg1)), flags))) | 3067 flags))) |
2608 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1))) | 3068 return 0; |
2609 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1)))) | 3069 /* Verify that access happens in similar types. */ |
2610 && OP_SAME (0) && OP_SAME (1)); | 3070 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))) |
3071 return 0; | |
3072 /* Verify that accesses are TBAA compatible. */ | |
3073 if (!alias_ptr_types_compatible_p | |
3074 (TREE_TYPE (TREE_OPERAND (arg0, 1)), | |
3075 TREE_TYPE (TREE_OPERAND (arg1, 1))) | |
3076 || (MR_DEPENDENCE_CLIQUE (arg0) | |
3077 != MR_DEPENDENCE_CLIQUE (arg1)) | |
3078 || (MR_DEPENDENCE_BASE (arg0) | |
3079 != MR_DEPENDENCE_BASE (arg1))) | |
3080 return 0; | |
3081 /* Verify that alignment is compatible. */ | |
3082 if (TYPE_ALIGN (TREE_TYPE (arg0)) | |
3083 != TYPE_ALIGN (TREE_TYPE (arg1))) | |
3084 return 0; | |
3085 } | |
3086 flags &= ~OEP_ADDRESS_OF; | |
3087 return (OP_SAME (0) && OP_SAME (1) | |
3088 /* TARGET_MEM_REF require equal extra operands. */ | |
3089 && (TREE_CODE (arg0) != TARGET_MEM_REF | |
3090 || (OP_SAME_WITH_NULL (2) | |
3091 && OP_SAME_WITH_NULL (3) | |
3092 && OP_SAME_WITH_NULL (4)))); | |
2611 | 3093 |
2612 case ARRAY_REF: | 3094 case ARRAY_REF: |
2613 case ARRAY_RANGE_REF: | 3095 case ARRAY_RANGE_REF: |
2614 /* Operands 2 and 3 may be null. | 3096 if (!OP_SAME (0)) |
2615 Compare the array index by value if it is constant first as we | 3097 return 0; |
3098 flags &= ~OEP_ADDRESS_OF; | |
3099 /* Compare the array index by value if it is constant first as we | |
2616 may have different types but same value here. */ | 3100 may have different types but same value here. */ |
2617 return (OP_SAME (0) | 3101 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1), |
2618 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1), | 3102 TREE_OPERAND (arg1, 1)) |
2619 TREE_OPERAND (arg1, 1)) | 3103 || OP_SAME (1)) |
2620 || OP_SAME (1)) | |
2621 && OP_SAME_WITH_NULL (2) | 3104 && OP_SAME_WITH_NULL (2) |
2622 && OP_SAME_WITH_NULL (3)); | 3105 && OP_SAME_WITH_NULL (3) |
3106 /* Compare low bound and element size as with OEP_ADDRESS_OF | |
3107 we have to account for the offset of the ref. */ | |
3108 && (TREE_TYPE (TREE_OPERAND (arg0, 0)) | |
3109 == TREE_TYPE (TREE_OPERAND (arg1, 0)) | |
3110 || (operand_equal_p (array_ref_low_bound | |
3111 (CONST_CAST_TREE (arg0)), | |
3112 array_ref_low_bound | |
3113 (CONST_CAST_TREE (arg1)), flags) | |
3114 && operand_equal_p (array_ref_element_size | |
3115 (CONST_CAST_TREE (arg0)), | |
3116 array_ref_element_size | |
3117 (CONST_CAST_TREE (arg1)), | |
3118 flags)))); | |
2623 | 3119 |
2624 case COMPONENT_REF: | 3120 case COMPONENT_REF: |
2625 /* Handle operand 2 the same as for ARRAY_REF. Operand 0 | 3121 /* Handle operand 2 the same as for ARRAY_REF. Operand 0 |
2626 may be NULL when we're called to compare MEM_EXPRs. */ | 3122 may be NULL when we're called to compare MEM_EXPRs. */ |
2627 return OP_SAME_WITH_NULL (0) | 3123 if (!OP_SAME_WITH_NULL (0) |
2628 && OP_SAME (1) | 3124 || !OP_SAME (1)) |
2629 && OP_SAME_WITH_NULL (2); | 3125 return 0; |
3126 flags &= ~OEP_ADDRESS_OF; | |
3127 return OP_SAME_WITH_NULL (2); | |
2630 | 3128 |
2631 case BIT_FIELD_REF: | 3129 case BIT_FIELD_REF: |
2632 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2); | 3130 if (!OP_SAME (0)) |
3131 return 0; | |
3132 flags &= ~OEP_ADDRESS_OF; | |
3133 return OP_SAME (1) && OP_SAME (2); | |
2633 | 3134 |
2634 default: | 3135 default: |
2635 return 0; | 3136 return 0; |
2636 } | 3137 } |
2637 | 3138 |
2638 case tcc_expression: | 3139 case tcc_expression: |
2639 switch (TREE_CODE (arg0)) | 3140 switch (TREE_CODE (arg0)) |
2640 { | 3141 { |
2641 case ADDR_EXPR: | 3142 case ADDR_EXPR: |
3143 /* Be sure we pass right ADDRESS_OF flag. */ | |
3144 gcc_checking_assert (!(flags & OEP_ADDRESS_OF)); | |
3145 return operand_equal_p (TREE_OPERAND (arg0, 0), | |
3146 TREE_OPERAND (arg1, 0), | |
3147 flags | OEP_ADDRESS_OF); | |
3148 | |
2642 case TRUTH_NOT_EXPR: | 3149 case TRUTH_NOT_EXPR: |
2643 return OP_SAME (0); | 3150 return OP_SAME (0); |
2644 | 3151 |
2645 case TRUTH_ANDIF_EXPR: | 3152 case TRUTH_ANDIF_EXPR: |
2646 case TRUTH_ORIF_EXPR: | 3153 case TRUTH_ORIF_EXPR: |
2665 TREE_OPERAND (arg1, 1), flags) | 3172 TREE_OPERAND (arg1, 1), flags) |
2666 && operand_equal_p (TREE_OPERAND (arg0, 1), | 3173 && operand_equal_p (TREE_OPERAND (arg0, 1), |
2667 TREE_OPERAND (arg1, 0), flags)); | 3174 TREE_OPERAND (arg1, 0), flags)); |
2668 | 3175 |
2669 case COND_EXPR: | 3176 case COND_EXPR: |
3177 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2)) | |
3178 return 0; | |
3179 flags &= ~OEP_ADDRESS_OF; | |
3180 return OP_SAME (0); | |
3181 | |
3182 case BIT_INSERT_EXPR: | |
3183 /* BIT_INSERT_EXPR has an implict operand as the type precision | |
3184 of op1. Need to check to make sure they are the same. */ | |
3185 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST | |
3186 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST | |
3187 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1))) | |
3188 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1)))) | |
3189 return false; | |
3190 /* FALLTHRU */ | |
3191 | |
2670 case VEC_COND_EXPR: | 3192 case VEC_COND_EXPR: |
2671 case DOT_PROD_EXPR: | 3193 case DOT_PROD_EXPR: |
2672 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2); | 3194 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2); |
2673 | 3195 |
3196 case MODIFY_EXPR: | |
3197 case INIT_EXPR: | |
3198 case COMPOUND_EXPR: | |
3199 case PREDECREMENT_EXPR: | |
3200 case PREINCREMENT_EXPR: | |
3201 case POSTDECREMENT_EXPR: | |
3202 case POSTINCREMENT_EXPR: | |
3203 if (flags & OEP_LEXICOGRAPHIC) | |
3204 return OP_SAME (0) && OP_SAME (1); | |
3205 return 0; | |
3206 | |
3207 case CLEANUP_POINT_EXPR: | |
3208 case EXPR_STMT: | |
3209 if (flags & OEP_LEXICOGRAPHIC) | |
3210 return OP_SAME (0); | |
3211 return 0; | |
3212 | |
2674 default: | 3213 default: |
2675 return 0; | 3214 return 0; |
2676 } | 3215 } |
2677 | 3216 |
2678 case tcc_vl_exp: | 3217 case tcc_vl_exp: |
2679 switch (TREE_CODE (arg0)) | 3218 switch (TREE_CODE (arg0)) |
2680 { | 3219 { |
2681 case CALL_EXPR: | 3220 case CALL_EXPR: |
2682 /* If the CALL_EXPRs call different functions, then they | 3221 if ((CALL_EXPR_FN (arg0) == NULL_TREE) |
2683 clearly can not be equal. */ | 3222 != (CALL_EXPR_FN (arg1) == NULL_TREE)) |
2684 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1), | 3223 /* If not both CALL_EXPRs are either internal or normal function |
2685 flags)) | 3224 functions, then they are not equal. */ |
2686 return 0; | 3225 return 0; |
2687 | 3226 else if (CALL_EXPR_FN (arg0) == NULL_TREE) |
3227 { | |
3228 /* If the CALL_EXPRs call different internal functions, then they | |
3229 are not equal. */ | |
3230 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1)) | |
3231 return 0; | |
3232 } | |
3233 else | |
3234 { | |
3235 /* If the CALL_EXPRs call different functions, then they are not | |
3236 equal. */ | |
3237 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1), | |
3238 flags)) | |
3239 return 0; | |
3240 } | |
3241 | |
3242 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */ | |
2688 { | 3243 { |
2689 unsigned int cef = call_expr_flags (arg0); | 3244 unsigned int cef = call_expr_flags (arg0); |
2690 if (flags & OEP_PURE_SAME) | 3245 if (flags & OEP_PURE_SAME) |
2691 cef &= ECF_CONST | ECF_PURE; | 3246 cef &= ECF_CONST | ECF_PURE; |
2692 else | 3247 else |
2693 cef &= ECF_CONST; | 3248 cef &= ECF_CONST; |
2694 if (!cef) | 3249 if (!cef && !(flags & OEP_LEXICOGRAPHIC)) |
2695 return 0; | 3250 return 0; |
2696 } | 3251 } |
2697 | 3252 |
2698 /* Now see if all the arguments are the same. */ | 3253 /* Now see if all the arguments are the same. */ |
2699 { | 3254 { |
2720 return (TREE_CODE (arg0) == FUNCTION_DECL | 3275 return (TREE_CODE (arg0) == FUNCTION_DECL |
2721 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1) | 3276 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1) |
2722 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1) | 3277 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1) |
2723 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1)); | 3278 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1)); |
2724 | 3279 |
3280 case tcc_exceptional: | |
3281 if (TREE_CODE (arg0) == CONSTRUCTOR) | |
3282 { | |
3283 /* In GIMPLE constructors are used only to build vectors from | |
3284 elements. Individual elements in the constructor must be | |
3285 indexed in increasing order and form an initial sequence. | |
3286 | |
3287 We make no effort to compare constructors in generic. | |
3288 (see sem_variable::equals in ipa-icf which can do so for | |
3289 constants). */ | |
3290 if (!VECTOR_TYPE_P (TREE_TYPE (arg0)) | |
3291 || !VECTOR_TYPE_P (TREE_TYPE (arg1))) | |
3292 return 0; | |
3293 | |
3294 /* Be sure that vectors constructed have the same representation. | |
3295 We only tested element precision and modes to match. | |
3296 Vectors may be BLKmode and thus also check that the number of | |
3297 parts match. */ | |
3298 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) | |
3299 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))) | |
3300 return 0; | |
3301 | |
3302 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0); | |
3303 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1); | |
3304 unsigned int len = vec_safe_length (v0); | |
3305 | |
3306 if (len != vec_safe_length (v1)) | |
3307 return 0; | |
3308 | |
3309 for (unsigned int i = 0; i < len; i++) | |
3310 { | |
3311 constructor_elt *c0 = &(*v0)[i]; | |
3312 constructor_elt *c1 = &(*v1)[i]; | |
3313 | |
3314 if (!operand_equal_p (c0->value, c1->value, flags) | |
3315 /* In GIMPLE the indexes can be either NULL or matching i. | |
3316 Double check this so we won't get false | |
3317 positives for GENERIC. */ | |
3318 || (c0->index | |
3319 && (TREE_CODE (c0->index) != INTEGER_CST | |
3320 || !compare_tree_int (c0->index, i))) | |
3321 || (c1->index | |
3322 && (TREE_CODE (c1->index) != INTEGER_CST | |
3323 || !compare_tree_int (c1->index, i)))) | |
3324 return 0; | |
3325 } | |
3326 return 1; | |
3327 } | |
3328 else if (TREE_CODE (arg0) == STATEMENT_LIST | |
3329 && (flags & OEP_LEXICOGRAPHIC)) | |
3330 { | |
3331 /* Compare the STATEMENT_LISTs. */ | |
3332 tree_stmt_iterator tsi1, tsi2; | |
3333 tree body1 = CONST_CAST_TREE (arg0); | |
3334 tree body2 = CONST_CAST_TREE (arg1); | |
3335 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ; | |
3336 tsi_next (&tsi1), tsi_next (&tsi2)) | |
3337 { | |
3338 /* The lists don't have the same number of statements. */ | |
3339 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2)) | |
3340 return 0; | |
3341 if (tsi_end_p (tsi1) && tsi_end_p (tsi2)) | |
3342 return 1; | |
3343 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2), | |
3344 OEP_LEXICOGRAPHIC)) | |
3345 return 0; | |
3346 } | |
3347 } | |
3348 return 0; | |
3349 | |
3350 case tcc_statement: | |
3351 switch (TREE_CODE (arg0)) | |
3352 { | |
3353 case RETURN_EXPR: | |
3354 if (flags & OEP_LEXICOGRAPHIC) | |
3355 return OP_SAME_WITH_NULL (0); | |
3356 return 0; | |
3357 default: | |
3358 return 0; | |
3359 } | |
3360 | |
2725 default: | 3361 default: |
2726 return 0; | 3362 return 0; |
2727 } | 3363 } |
2728 | 3364 |
2729 #undef OP_SAME | 3365 #undef OP_SAME |
2730 #undef OP_SAME_WITH_NULL | 3366 #undef OP_SAME_WITH_NULL |
2731 } | 3367 } |
2732 | 3368 |
2733 /* Similar to operand_equal_p, but see if ARG0 might have been made by | 3369 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1 |
2734 shorten_compare from ARG1 when ARG1 was being compared with OTHER. | 3370 with a different signedness or a narrower precision. */ |
2735 | 3371 |
2736 When in doubt, return 0. */ | 3372 static bool |
2737 | 3373 operand_equal_for_comparison_p (tree arg0, tree arg1) |
2738 static int | 3374 { |
2739 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other) | |
2740 { | |
2741 int unsignedp1, unsignedpo; | |
2742 tree primarg0, primarg1, primother; | |
2743 unsigned int correct_width; | |
2744 | |
2745 if (operand_equal_p (arg0, arg1, 0)) | 3375 if (operand_equal_p (arg0, arg1, 0)) |
2746 return 1; | 3376 return true; |
2747 | 3377 |
2748 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)) | 3378 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)) |
2749 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1))) | 3379 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1))) |
2750 return 0; | 3380 return false; |
2751 | 3381 |
2752 /* Discard any conversions that don't change the modes of ARG0 and ARG1 | 3382 /* Discard any conversions that don't change the modes of ARG0 and ARG1 |
2753 and see if the inner values are the same. This removes any | 3383 and see if the inner values are the same. This removes any |
2754 signedness comparison, which doesn't matter here. */ | 3384 signedness comparison, which doesn't matter here. */ |
2755 primarg0 = arg0, primarg1 = arg1; | 3385 tree op0 = arg0; |
2756 STRIP_NOPS (primarg0); | 3386 tree op1 = arg1; |
2757 STRIP_NOPS (primarg1); | 3387 STRIP_NOPS (op0); |
2758 if (operand_equal_p (primarg0, primarg1, 0)) | 3388 STRIP_NOPS (op1); |
2759 return 1; | 3389 if (operand_equal_p (op0, op1, 0)) |
2760 | 3390 return true; |
2761 /* Duplicate what shorten_compare does to ARG1 and see if that gives the | 3391 |
2762 actual comparison operand, ARG0. | 3392 /* Discard a single widening conversion from ARG1 and see if the inner |
2763 | 3393 value is the same as ARG0. */ |
2764 First throw away any conversions to wider types | 3394 if (CONVERT_EXPR_P (arg1) |
2765 already present in the operands. */ | 3395 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))) |
2766 | 3396 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))) |
2767 primarg1 = get_narrower (arg1, &unsignedp1); | 3397 < TYPE_PRECISION (TREE_TYPE (arg1)) |
2768 primother = get_narrower (other, &unsignedpo); | 3398 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) |
2769 | 3399 return true; |
2770 correct_width = TYPE_PRECISION (TREE_TYPE (arg1)); | 3400 |
2771 if (unsignedp1 == unsignedpo | 3401 return false; |
2772 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width | |
2773 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width) | |
2774 { | |
2775 tree type = TREE_TYPE (arg0); | |
2776 | |
2777 /* Make sure shorter operand is extended the right way | |
2778 to match the longer operand. */ | |
2779 primarg1 = fold_convert (signed_or_unsigned_type_for | |
2780 (unsignedp1, TREE_TYPE (primarg1)), primarg1); | |
2781 | |
2782 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0)) | |
2783 return 1; | |
2784 } | |
2785 | |
2786 return 0; | |
2787 } | 3402 } |
2788 | 3403 |
2789 /* See if ARG is an expression that is either a comparison or is performing | 3404 /* See if ARG is an expression that is either a comparison or is performing |
2790 arithmetic on comparisons. The comparisons must only be comparing | 3405 arithmetic on comparisons. The comparisons must only be comparing |
2791 two different values, which will be stored in *CVAL1 and *CVAL2; if | 3406 two different values, which will be stored in *CVAL1 and *CVAL2; if |
2991 fold_ignored_result (omitted), t); | 3606 fold_ignored_result (omitted), t); |
2992 | 3607 |
2993 return non_lvalue_loc (loc, t); | 3608 return non_lvalue_loc (loc, t); |
2994 } | 3609 } |
2995 | 3610 |
2996 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */ | |
2997 | |
2998 static tree | |
2999 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result, | |
3000 tree omitted) | |
3001 { | |
3002 tree t = fold_convert_loc (loc, type, result); | |
3003 | |
3004 /* If the resulting operand is an empty statement, just return the omitted | |
3005 statement casted to void. */ | |
3006 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted)) | |
3007 return build1_loc (loc, NOP_EXPR, void_type_node, | |
3008 fold_ignored_result (omitted)); | |
3009 | |
3010 if (TREE_SIDE_EFFECTS (omitted)) | |
3011 return build2_loc (loc, COMPOUND_EXPR, type, | |
3012 fold_ignored_result (omitted), t); | |
3013 | |
3014 return pedantic_non_lvalue_loc (loc, t); | |
3015 } | |
3016 | |
3017 /* Return a tree for the case when the result of an expression is RESULT | 3611 /* Return a tree for the case when the result of an expression is RESULT |
3018 converted to TYPE and OMITTED1 and OMITTED2 were previously operands | 3612 converted to TYPE and OMITTED1 and OMITTED2 were previously operands |
3019 of the expression but are now not needed. | 3613 of the expression but are now not needed. |
3020 | 3614 |
3021 If OMITTED1 or OMITTED2 has side effects, they must be evaluated. | 3615 If OMITTED1 or OMITTED2 has side effects, they must be evaluated. |
3043 returns a truth value (0 or 1). | 3637 returns a truth value (0 or 1). |
3044 | 3638 |
3045 FIXME: one would think we would fold the result, but it causes | 3639 FIXME: one would think we would fold the result, but it causes |
3046 problems with the dominator optimizer. */ | 3640 problems with the dominator optimizer. */ |
3047 | 3641 |
3048 tree | 3642 static tree |
3049 fold_truth_not_expr (location_t loc, tree arg) | 3643 fold_truth_not_expr (location_t loc, tree arg) |
3050 { | 3644 { |
3051 tree type = TREE_TYPE (arg); | 3645 tree type = TREE_TYPE (arg); |
3052 enum tree_code code = TREE_CODE (arg); | 3646 enum tree_code code = TREE_CODE (arg); |
3053 location_t loc1, loc2; | 3647 location_t loc1, loc2; |
3063 && flag_trapping_math | 3657 && flag_trapping_math |
3064 && code != ORDERED_EXPR && code != UNORDERED_EXPR | 3658 && code != ORDERED_EXPR && code != UNORDERED_EXPR |
3065 && code != NE_EXPR && code != EQ_EXPR) | 3659 && code != NE_EXPR && code != EQ_EXPR) |
3066 return NULL_TREE; | 3660 return NULL_TREE; |
3067 | 3661 |
3068 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type))); | 3662 code = invert_tree_comparison (code, HONOR_NANS (op_type)); |
3069 if (code == ERROR_MARK) | 3663 if (code == ERROR_MARK) |
3070 return NULL_TREE; | 3664 return NULL_TREE; |
3071 | 3665 |
3072 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0), | 3666 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0), |
3073 TREE_OPERAND (arg, 1)); | 3667 TREE_OPERAND (arg, 1)); |
3668 if (TREE_NO_WARNING (arg)) | |
3669 TREE_NO_WARNING (ret) = 1; | |
3670 return ret; | |
3074 } | 3671 } |
3075 | 3672 |
3076 switch (code) | 3673 switch (code) |
3077 { | 3674 { |
3078 case INTEGER_CST: | 3675 case INTEGER_CST: |
3079 return constant_boolean_node (integer_zerop (arg), type); | 3676 return constant_boolean_node (integer_zerop (arg), type); |
3080 | 3677 |
3081 case TRUTH_AND_EXPR: | 3678 case TRUTH_AND_EXPR: |
3082 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); | 3679 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); |
3083 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1)); | 3680 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc); |
3084 if (loc1 == UNKNOWN_LOCATION) | |
3085 loc1 = loc; | |
3086 if (loc2 == UNKNOWN_LOCATION) | |
3087 loc2 = loc; | |
3088 return build2_loc (loc, TRUTH_OR_EXPR, type, | 3681 return build2_loc (loc, TRUTH_OR_EXPR, type, |
3089 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), | 3682 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), |
3090 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); | 3683 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); |
3091 | 3684 |
3092 case TRUTH_OR_EXPR: | 3685 case TRUTH_OR_EXPR: |
3093 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); | 3686 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); |
3094 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1)); | 3687 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc); |
3095 if (loc1 == UNKNOWN_LOCATION) | |
3096 loc1 = loc; | |
3097 if (loc2 == UNKNOWN_LOCATION) | |
3098 loc2 = loc; | |
3099 return build2_loc (loc, TRUTH_AND_EXPR, type, | 3688 return build2_loc (loc, TRUTH_AND_EXPR, type, |
3100 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), | 3689 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), |
3101 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); | 3690 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); |
3102 | 3691 |
3103 case TRUTH_XOR_EXPR: | 3692 case TRUTH_XOR_EXPR: |
3113 return build2_loc (loc, TRUTH_XOR_EXPR, type, | 3702 return build2_loc (loc, TRUTH_XOR_EXPR, type, |
3114 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)), | 3703 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)), |
3115 TREE_OPERAND (arg, 1)); | 3704 TREE_OPERAND (arg, 1)); |
3116 | 3705 |
3117 case TRUTH_ANDIF_EXPR: | 3706 case TRUTH_ANDIF_EXPR: |
3118 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); | 3707 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); |
3119 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1)); | 3708 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc); |
3120 if (loc1 == UNKNOWN_LOCATION) | |
3121 loc1 = loc; | |
3122 if (loc2 == UNKNOWN_LOCATION) | |
3123 loc2 = loc; | |
3124 return build2_loc (loc, TRUTH_ORIF_EXPR, type, | 3709 return build2_loc (loc, TRUTH_ORIF_EXPR, type, |
3125 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), | 3710 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), |
3126 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); | 3711 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); |
3127 | 3712 |
3128 case TRUTH_ORIF_EXPR: | 3713 case TRUTH_ORIF_EXPR: |
3129 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); | 3714 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); |
3130 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1)); | 3715 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc); |
3131 if (loc1 == UNKNOWN_LOCATION) | |
3132 loc1 = loc; | |
3133 if (loc2 == UNKNOWN_LOCATION) | |
3134 loc2 = loc; | |
3135 return build2_loc (loc, TRUTH_ANDIF_EXPR, type, | 3716 return build2_loc (loc, TRUTH_ANDIF_EXPR, type, |
3136 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), | 3717 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), |
3137 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); | 3718 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); |
3138 | 3719 |
3139 case TRUTH_NOT_EXPR: | 3720 case TRUTH_NOT_EXPR: |
3142 case COND_EXPR: | 3723 case COND_EXPR: |
3143 { | 3724 { |
3144 tree arg1 = TREE_OPERAND (arg, 1); | 3725 tree arg1 = TREE_OPERAND (arg, 1); |
3145 tree arg2 = TREE_OPERAND (arg, 2); | 3726 tree arg2 = TREE_OPERAND (arg, 2); |
3146 | 3727 |
3147 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1)); | 3728 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc); |
3148 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2)); | 3729 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc); |
3149 if (loc1 == UNKNOWN_LOCATION) | |
3150 loc1 = loc; | |
3151 if (loc2 == UNKNOWN_LOCATION) | |
3152 loc2 = loc; | |
3153 | 3730 |
3154 /* A COND_EXPR may have a throw as one operand, which | 3731 /* A COND_EXPR may have a throw as one operand, which |
3155 then has void type. Just leave void operands | 3732 then has void type. Just leave void operands |
3156 as they are. */ | 3733 as they are. */ |
3157 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0), | 3734 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0), |
3160 VOID_TYPE_P (TREE_TYPE (arg2)) | 3737 VOID_TYPE_P (TREE_TYPE (arg2)) |
3161 ? arg2 : invert_truthvalue_loc (loc2, arg2)); | 3738 ? arg2 : invert_truthvalue_loc (loc2, arg2)); |
3162 } | 3739 } |
3163 | 3740 |
3164 case COMPOUND_EXPR: | 3741 case COMPOUND_EXPR: |
3165 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1)); | 3742 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc); |
3166 if (loc1 == UNKNOWN_LOCATION) | |
3167 loc1 = loc; | |
3168 return build2_loc (loc, COMPOUND_EXPR, type, | 3743 return build2_loc (loc, COMPOUND_EXPR, type, |
3169 TREE_OPERAND (arg, 0), | 3744 TREE_OPERAND (arg, 0), |
3170 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1))); | 3745 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1))); |
3171 | 3746 |
3172 case NON_LVALUE_EXPR: | 3747 case NON_LVALUE_EXPR: |
3173 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); | 3748 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); |
3174 if (loc1 == UNKNOWN_LOCATION) | |
3175 loc1 = loc; | |
3176 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)); | 3749 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)); |
3177 | 3750 |
3178 CASE_CONVERT: | 3751 CASE_CONVERT: |
3179 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE) | 3752 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE) |
3180 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg); | 3753 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg); |
3181 | 3754 |
3182 /* ... fall through ... */ | 3755 /* fall through */ |
3183 | 3756 |
3184 case FLOAT_EXPR: | 3757 case FLOAT_EXPR: |
3185 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); | 3758 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); |
3186 if (loc1 == UNKNOWN_LOCATION) | |
3187 loc1 = loc; | |
3188 return build1_loc (loc, TREE_CODE (arg), type, | 3759 return build1_loc (loc, TREE_CODE (arg), type, |
3189 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0))); | 3760 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0))); |
3190 | 3761 |
3191 case BIT_AND_EXPR: | 3762 case BIT_AND_EXPR: |
3192 if (!integer_onep (TREE_OPERAND (arg, 1))) | 3763 if (!integer_onep (TREE_OPERAND (arg, 1))) |
3195 | 3766 |
3196 case SAVE_EXPR: | 3767 case SAVE_EXPR: |
3197 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg); | 3768 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg); |
3198 | 3769 |
3199 case CLEANUP_POINT_EXPR: | 3770 case CLEANUP_POINT_EXPR: |
3200 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); | 3771 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); |
3201 if (loc1 == UNKNOWN_LOCATION) | |
3202 loc1 = loc; | |
3203 return build1_loc (loc, CLEANUP_POINT_EXPR, type, | 3772 return build1_loc (loc, CLEANUP_POINT_EXPR, type, |
3204 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0))); | 3773 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0))); |
3205 | 3774 |
3206 default: | 3775 default: |
3207 return NULL_TREE; | 3776 return NULL_TREE; |
3208 } | 3777 } |
3209 } | 3778 } |
3210 | 3779 |
3780 /* Fold the truth-negation of ARG. This never alters ARG itself. We | |
3781 assume that ARG is an operation that returns a truth value (0 or 1 | |
3782 for scalars, 0 or -1 for vectors). Return the folded expression if | |
3783 folding is successful. Otherwise, return NULL_TREE. */ | |
3784 | |
3785 static tree | |
3786 fold_invert_truthvalue (location_t loc, tree arg) | |
3787 { | |
3788 tree type = TREE_TYPE (arg); | |
3789 return fold_unary_loc (loc, VECTOR_TYPE_P (type) | |
3790 ? BIT_NOT_EXPR | |
3791 : TRUTH_NOT_EXPR, | |
3792 type, arg); | |
3793 } | |
3794 | |
3211 /* Return a simplified tree node for the truth-negation of ARG. This | 3795 /* Return a simplified tree node for the truth-negation of ARG. This |
3212 never alters ARG itself. We assume that ARG is an operation that | 3796 never alters ARG itself. We assume that ARG is an operation that |
3213 returns a truth value (0 or 1). | 3797 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */ |
3214 | |
3215 FIXME: one would think we would fold the result, but it causes | |
3216 problems with the dominator optimizer. */ | |
3217 | 3798 |
3218 tree | 3799 tree |
3219 invert_truthvalue_loc (location_t loc, tree arg) | 3800 invert_truthvalue_loc (location_t loc, tree arg) |
3220 { | 3801 { |
3221 tree tem; | |
3222 | |
3223 if (TREE_CODE (arg) == ERROR_MARK) | 3802 if (TREE_CODE (arg) == ERROR_MARK) |
3224 return arg; | 3803 return arg; |
3225 | 3804 |
3226 tem = fold_truth_not_expr (loc, arg); | 3805 tree type = TREE_TYPE (arg); |
3227 if (!tem) | 3806 return fold_build1_loc (loc, VECTOR_TYPE_P (type) |
3228 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg); | 3807 ? BIT_NOT_EXPR |
3229 | 3808 : TRUTH_NOT_EXPR, |
3230 return tem; | 3809 type, arg); |
3231 } | |
3232 | |
3233 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both | |
3234 operands are another bit-wise operation with a common input. If so, | |
3235 distribute the bit operations to save an operation and possibly two if | |
3236 constants are involved. For example, convert | |
3237 (A | B) & (A | C) into A | (B & C) | |
3238 Further simplification will occur if B and C are constants. | |
3239 | |
3240 If this optimization cannot be done, 0 will be returned. */ | |
3241 | |
3242 static tree | |
3243 distribute_bit_expr (location_t loc, enum tree_code code, tree type, | |
3244 tree arg0, tree arg1) | |
3245 { | |
3246 tree common; | |
3247 tree left, right; | |
3248 | |
3249 if (TREE_CODE (arg0) != TREE_CODE (arg1) | |
3250 || TREE_CODE (arg0) == code | |
3251 || (TREE_CODE (arg0) != BIT_AND_EXPR | |
3252 && TREE_CODE (arg0) != BIT_IOR_EXPR)) | |
3253 return 0; | |
3254 | |
3255 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)) | |
3256 { | |
3257 common = TREE_OPERAND (arg0, 0); | |
3258 left = TREE_OPERAND (arg0, 1); | |
3259 right = TREE_OPERAND (arg1, 1); | |
3260 } | |
3261 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0)) | |
3262 { | |
3263 common = TREE_OPERAND (arg0, 0); | |
3264 left = TREE_OPERAND (arg0, 1); | |
3265 right = TREE_OPERAND (arg1, 0); | |
3266 } | |
3267 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0)) | |
3268 { | |
3269 common = TREE_OPERAND (arg0, 1); | |
3270 left = TREE_OPERAND (arg0, 0); | |
3271 right = TREE_OPERAND (arg1, 1); | |
3272 } | |
3273 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0)) | |
3274 { | |
3275 common = TREE_OPERAND (arg0, 1); | |
3276 left = TREE_OPERAND (arg0, 0); | |
3277 right = TREE_OPERAND (arg1, 0); | |
3278 } | |
3279 else | |
3280 return 0; | |
3281 | |
3282 common = fold_convert_loc (loc, type, common); | |
3283 left = fold_convert_loc (loc, type, left); | |
3284 right = fold_convert_loc (loc, type, right); | |
3285 return fold_build2_loc (loc, TREE_CODE (arg0), type, common, | |
3286 fold_build2_loc (loc, code, type, left, right)); | |
3287 } | |
3288 | |
3289 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation | |
3290 with code CODE. This optimization is unsafe. */ | |
3291 static tree | |
3292 distribute_real_division (location_t loc, enum tree_code code, tree type, | |
3293 tree arg0, tree arg1) | |
3294 { | |
3295 bool mul0 = TREE_CODE (arg0) == MULT_EXPR; | |
3296 bool mul1 = TREE_CODE (arg1) == MULT_EXPR; | |
3297 | |
3298 /* (A / C) +- (B / C) -> (A +- B) / C. */ | |
3299 if (mul0 == mul1 | |
3300 && operand_equal_p (TREE_OPERAND (arg0, 1), | |
3301 TREE_OPERAND (arg1, 1), 0)) | |
3302 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type, | |
3303 fold_build2_loc (loc, code, type, | |
3304 TREE_OPERAND (arg0, 0), | |
3305 TREE_OPERAND (arg1, 0)), | |
3306 TREE_OPERAND (arg0, 1)); | |
3307 | |
3308 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */ | |
3309 if (operand_equal_p (TREE_OPERAND (arg0, 0), | |
3310 TREE_OPERAND (arg1, 0), 0) | |
3311 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST | |
3312 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST) | |
3313 { | |
3314 REAL_VALUE_TYPE r0, r1; | |
3315 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1)); | |
3316 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1)); | |
3317 if (!mul0) | |
3318 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0); | |
3319 if (!mul1) | |
3320 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1); | |
3321 real_arithmetic (&r0, code, &r0, &r1); | |
3322 return fold_build2_loc (loc, MULT_EXPR, type, | |
3323 TREE_OPERAND (arg0, 0), | |
3324 build_real (type, r0)); | |
3325 } | |
3326 | |
3327 return NULL_TREE; | |
3328 } | 3810 } |
3329 | 3811 |
3330 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER | 3812 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER |
3331 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */ | 3813 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero |
3814 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER | |
3815 is the original memory reference used to preserve the alias set of | |
3816 the access. */ | |
3332 | 3817 |
3333 static tree | 3818 static tree |
3334 make_bit_field_ref (location_t loc, tree inner, tree type, | 3819 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type, |
3335 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp) | 3820 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, |
3821 int unsignedp, int reversep) | |
3336 { | 3822 { |
3337 tree result, bftype; | 3823 tree result, bftype; |
3338 | 3824 |
3339 if (bitpos == 0) | 3825 /* Attempt not to lose the access path if possible. */ |
3826 if (TREE_CODE (orig_inner) == COMPONENT_REF) | |
3827 { | |
3828 tree ninner = TREE_OPERAND (orig_inner, 0); | |
3829 machine_mode nmode; | |
3830 HOST_WIDE_INT nbitsize, nbitpos; | |
3831 tree noffset; | |
3832 int nunsignedp, nreversep, nvolatilep = 0; | |
3833 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos, | |
3834 &noffset, &nmode, &nunsignedp, | |
3835 &nreversep, &nvolatilep); | |
3836 if (base == inner | |
3837 && noffset == NULL_TREE | |
3838 && nbitsize >= bitsize | |
3839 && nbitpos <= bitpos | |
3840 && bitpos + bitsize <= nbitpos + nbitsize | |
3841 && !reversep | |
3842 && !nreversep | |
3843 && !nvolatilep) | |
3844 { | |
3845 inner = ninner; | |
3846 bitpos -= nbitpos; | |
3847 } | |
3848 } | |
3849 | |
3850 alias_set_type iset = get_alias_set (orig_inner); | |
3851 if (iset == 0 && get_alias_set (inner) != iset) | |
3852 inner = fold_build2 (MEM_REF, TREE_TYPE (inner), | |
3853 build_fold_addr_expr (inner), | |
3854 build_int_cst (ptr_type_node, 0)); | |
3855 | |
3856 if (bitpos == 0 && !reversep) | |
3340 { | 3857 { |
3341 tree size = TYPE_SIZE (TREE_TYPE (inner)); | 3858 tree size = TYPE_SIZE (TREE_TYPE (inner)); |
3342 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner)) | 3859 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner)) |
3343 || POINTER_TYPE_P (TREE_TYPE (inner))) | 3860 || POINTER_TYPE_P (TREE_TYPE (inner))) |
3344 && host_integerp (size, 0) | 3861 && tree_fits_shwi_p (size) |
3345 && tree_low_cst (size, 0) == bitsize) | 3862 && tree_to_shwi (size) == bitsize) |
3346 return fold_convert_loc (loc, type, inner); | 3863 return fold_convert_loc (loc, type, inner); |
3347 } | 3864 } |
3348 | 3865 |
3349 bftype = type; | 3866 bftype = type; |
3350 if (TYPE_PRECISION (bftype) != bitsize | 3867 if (TYPE_PRECISION (bftype) != bitsize |
3351 || TYPE_UNSIGNED (bftype) == !unsignedp) | 3868 || TYPE_UNSIGNED (bftype) == !unsignedp) |
3352 bftype = build_nonstandard_integer_type (bitsize, 0); | 3869 bftype = build_nonstandard_integer_type (bitsize, 0); |
3353 | 3870 |
3354 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner, | 3871 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner, |
3355 size_int (bitsize), bitsize_int (bitpos)); | 3872 bitsize_int (bitsize), bitsize_int (bitpos)); |
3873 REF_REVERSE_STORAGE_ORDER (result) = reversep; | |
3356 | 3874 |
3357 if (bftype != type) | 3875 if (bftype != type) |
3358 result = fold_convert_loc (loc, type, result); | 3876 result = fold_convert_loc (loc, type, result); |
3359 | 3877 |
3360 return result; | 3878 return result; |
3384 optimize_bit_field_compare (location_t loc, enum tree_code code, | 3902 optimize_bit_field_compare (location_t loc, enum tree_code code, |
3385 tree compare_type, tree lhs, tree rhs) | 3903 tree compare_type, tree lhs, tree rhs) |
3386 { | 3904 { |
3387 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize; | 3905 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize; |
3388 tree type = TREE_TYPE (lhs); | 3906 tree type = TREE_TYPE (lhs); |
3389 tree signed_type, unsigned_type; | 3907 tree unsigned_type; |
3390 int const_p = TREE_CODE (rhs) == INTEGER_CST; | 3908 int const_p = TREE_CODE (rhs) == INTEGER_CST; |
3391 enum machine_mode lmode, rmode, nmode; | 3909 machine_mode lmode, rmode; |
3910 scalar_int_mode nmode; | |
3392 int lunsignedp, runsignedp; | 3911 int lunsignedp, runsignedp; |
3912 int lreversep, rreversep; | |
3393 int lvolatilep = 0, rvolatilep = 0; | 3913 int lvolatilep = 0, rvolatilep = 0; |
3394 tree linner, rinner = NULL_TREE; | 3914 tree linner, rinner = NULL_TREE; |
3395 tree mask; | 3915 tree mask; |
3396 tree offset; | 3916 tree offset; |
3397 | 3917 |
3399 if the same as the size of the underlying object, we aren't doing an | 3919 if the same as the size of the underlying object, we aren't doing an |
3400 extraction at all and so can do nothing. We also don't want to | 3920 extraction at all and so can do nothing. We also don't want to |
3401 do anything if the inner expression is a PLACEHOLDER_EXPR since we | 3921 do anything if the inner expression is a PLACEHOLDER_EXPR since we |
3402 then will no longer be able to replace it. */ | 3922 then will no longer be able to replace it. */ |
3403 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode, | 3923 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode, |
3404 &lunsignedp, &lvolatilep, false); | 3924 &lunsignedp, &lreversep, &lvolatilep); |
3405 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0 | 3925 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0 |
3406 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR) | 3926 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep) |
3407 return 0; | 3927 return 0; |
3408 | 3928 |
3409 if (!const_p) | 3929 if (const_p) |
3930 rreversep = lreversep; | |
3931 else | |
3410 { | 3932 { |
3411 /* If this is not a constant, we can only do something if bit positions, | 3933 /* If this is not a constant, we can only do something if bit positions, |
3412 sizes, and signedness are the same. */ | 3934 sizes, signedness and storage order are the same. */ |
3413 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode, | 3935 rinner |
3414 &runsignedp, &rvolatilep, false); | 3936 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode, |
3937 &runsignedp, &rreversep, &rvolatilep); | |
3415 | 3938 |
3416 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize | 3939 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize |
3417 || lunsignedp != runsignedp || offset != 0 | 3940 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0 |
3418 || TREE_CODE (rinner) == PLACEHOLDER_EXPR) | 3941 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep) |
3419 return 0; | 3942 return 0; |
3420 } | 3943 } |
3421 | 3944 |
3945 /* Honor the C++ memory model and mimic what RTL expansion does. */ | |
3946 unsigned HOST_WIDE_INT bitstart = 0; | |
3947 unsigned HOST_WIDE_INT bitend = 0; | |
3948 if (TREE_CODE (lhs) == COMPONENT_REF) | |
3949 { | |
3950 get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset); | |
3951 if (offset != NULL_TREE) | |
3952 return 0; | |
3953 } | |
3954 | |
3422 /* See if we can find a mode to refer to this field. We should be able to, | 3955 /* See if we can find a mode to refer to this field. We should be able to, |
3423 but fail if we can't. */ | 3956 but fail if we can't. */ |
3424 if (lvolatilep | 3957 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend, |
3425 && GET_MODE_BITSIZE (lmode) > 0 | 3958 const_p ? TYPE_ALIGN (TREE_TYPE (linner)) |
3426 && flag_strict_volatile_bitfields > 0) | 3959 : MIN (TYPE_ALIGN (TREE_TYPE (linner)), |
3427 nmode = lmode; | 3960 TYPE_ALIGN (TREE_TYPE (rinner))), |
3428 else | 3961 BITS_PER_WORD, false, &nmode)) |
3429 nmode = get_best_mode (lbitsize, lbitpos, | |
3430 const_p ? TYPE_ALIGN (TREE_TYPE (linner)) | |
3431 : MIN (TYPE_ALIGN (TREE_TYPE (linner)), | |
3432 TYPE_ALIGN (TREE_TYPE (rinner))), | |
3433 word_mode, lvolatilep || rvolatilep); | |
3434 if (nmode == VOIDmode) | |
3435 return 0; | 3962 return 0; |
3436 | 3963 |
3437 /* Set signed and unsigned types of the precision of this mode for the | 3964 /* Set signed and unsigned types of the precision of this mode for the |
3438 shifts below. */ | 3965 shifts below. */ |
3439 signed_type = lang_hooks.types.type_for_mode (nmode, 0); | |
3440 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1); | 3966 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1); |
3441 | 3967 |
3442 /* Compute the bit position and size for the new reference and our offset | 3968 /* Compute the bit position and size for the new reference and our offset |
3443 within it. If the new reference is the same size as the original, we | 3969 within it. If the new reference is the same size as the original, we |
3444 won't optimize anything, so return zero. */ | 3970 won't optimize anything, so return zero. */ |
3446 nbitpos = lbitpos & ~ (nbitsize - 1); | 3972 nbitpos = lbitpos & ~ (nbitsize - 1); |
3447 lbitpos -= nbitpos; | 3973 lbitpos -= nbitpos; |
3448 if (nbitsize == lbitsize) | 3974 if (nbitsize == lbitsize) |
3449 return 0; | 3975 return 0; |
3450 | 3976 |
3451 if (BYTES_BIG_ENDIAN) | 3977 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN) |
3452 lbitpos = nbitsize - lbitsize - lbitpos; | 3978 lbitpos = nbitsize - lbitsize - lbitpos; |
3453 | 3979 |
3454 /* Make the mask to be used against the extracted field. */ | 3980 /* Make the mask to be used against the extracted field. */ |
3455 mask = build_int_cst_type (unsigned_type, -1); | 3981 mask = build_int_cst_type (unsigned_type, -1); |
3456 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize)); | 3982 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize)); |
3457 mask = const_binop (RSHIFT_EXPR, mask, | 3983 mask = const_binop (RSHIFT_EXPR, mask, |
3458 size_int (nbitsize - lbitsize - lbitpos)); | 3984 size_int (nbitsize - lbitsize - lbitpos)); |
3459 | 3985 |
3460 if (! const_p) | 3986 if (! const_p) |
3461 /* If not comparing with constant, just rework the comparison | 3987 { |
3462 and return. */ | 3988 if (nbitpos < 0) |
3463 return fold_build2_loc (loc, code, compare_type, | 3989 return 0; |
3464 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, | 3990 |
3465 make_bit_field_ref (loc, linner, | 3991 /* If not comparing with constant, just rework the comparison |
3466 unsigned_type, | 3992 and return. */ |
3467 nbitsize, nbitpos, | 3993 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type, |
3468 1), | 3994 nbitsize, nbitpos, 1, lreversep); |
3469 mask), | 3995 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask); |
3470 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, | 3996 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type, |
3471 make_bit_field_ref (loc, rinner, | 3997 nbitsize, nbitpos, 1, rreversep); |
3472 unsigned_type, | 3998 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask); |
3473 nbitsize, nbitpos, | 3999 return fold_build2_loc (loc, code, compare_type, t1, t2); |
3474 1), | 4000 } |
3475 mask)); | 4001 |
3476 | 4002 /* Otherwise, we are handling the constant case. See if the constant is too |
3477 /* Otherwise, we are handling the constant case. See if the constant is too | 4003 big for the field. Warn and return a tree for 0 (false) if so. We do |
3478 big for the field. Warn and return a tree of for 0 (false) if so. We do | |
3479 this not only for its own sake, but to avoid having to test for this | 4004 this not only for its own sake, but to avoid having to test for this |
3480 error case below. If we didn't, we might generate wrong code. | 4005 error case below. If we didn't, we might generate wrong code. |
3481 | 4006 |
3482 For unsigned fields, the constant shifted right by the field length should | 4007 For unsigned fields, the constant shifted right by the field length should |
3483 be all zero. For signed fields, the high-order bits should agree with | 4008 be all zero. For signed fields, the high-order bits should agree with |
3484 the sign bit. */ | 4009 the sign bit. */ |
3485 | 4010 |
3486 if (lunsignedp) | 4011 if (lunsignedp) |
3487 { | 4012 { |
3488 if (! integer_zerop (const_binop (RSHIFT_EXPR, | 4013 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0) |
3489 fold_convert_loc (loc, | |
3490 unsigned_type, rhs), | |
3491 size_int (lbitsize)))) | |
3492 { | 4014 { |
3493 warning (0, "comparison is always %d due to width of bit-field", | 4015 warning (0, "comparison is always %d due to width of bit-field", |
3494 code == NE_EXPR); | 4016 code == NE_EXPR); |
3495 return constant_boolean_node (code == NE_EXPR, compare_type); | 4017 return constant_boolean_node (code == NE_EXPR, compare_type); |
3496 } | 4018 } |
3497 } | 4019 } |
3498 else | 4020 else |
3499 { | 4021 { |
3500 tree tem = const_binop (RSHIFT_EXPR, | 4022 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1); |
3501 fold_convert_loc (loc, signed_type, rhs), | 4023 if (tem != 0 && tem != -1) |
3502 size_int (lbitsize - 1)); | |
3503 if (! integer_zerop (tem) && ! integer_all_onesp (tem)) | |
3504 { | 4024 { |
3505 warning (0, "comparison is always %d due to width of bit-field", | 4025 warning (0, "comparison is always %d due to width of bit-field", |
3506 code == NE_EXPR); | 4026 code == NE_EXPR); |
3507 return constant_boolean_node (code == NE_EXPR, compare_type); | 4027 return constant_boolean_node (code == NE_EXPR, compare_type); |
3508 } | 4028 } |
3509 } | 4029 } |
3510 | 4030 |
4031 if (nbitpos < 0) | |
4032 return 0; | |
4033 | |
3511 /* Single-bit compares should always be against zero. */ | 4034 /* Single-bit compares should always be against zero. */ |
3512 if (lbitsize == 1 && ! integer_zerop (rhs)) | 4035 if (lbitsize == 1 && ! integer_zerop (rhs)) |
3513 { | 4036 { |
3514 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR; | 4037 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR; |
3515 rhs = build_int_cst (type, 0); | 4038 rhs = build_int_cst (type, 0); |
3516 } | 4039 } |
3517 | 4040 |
3518 /* Make a new bitfield reference, shift the constant over the | 4041 /* Make a new bitfield reference, shift the constant over the |
3519 appropriate number of bits and mask it with the computed mask | 4042 appropriate number of bits and mask it with the computed mask |
3520 (in case this was a signed field). If we changed it, make a new one. */ | 4043 (in case this was a signed field). If we changed it, make a new one. */ |
3521 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1); | 4044 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type, |
3522 if (lvolatilep) | 4045 nbitsize, nbitpos, 1, lreversep); |
3523 { | |
3524 TREE_SIDE_EFFECTS (lhs) = 1; | |
3525 TREE_THIS_VOLATILE (lhs) = 1; | |
3526 } | |
3527 | 4046 |
3528 rhs = const_binop (BIT_AND_EXPR, | 4047 rhs = const_binop (BIT_AND_EXPR, |
3529 const_binop (LSHIFT_EXPR, | 4048 const_binop (LSHIFT_EXPR, |
3530 fold_convert_loc (loc, unsigned_type, rhs), | 4049 fold_convert_loc (loc, unsigned_type, rhs), |
3531 size_int (lbitpos)), | 4050 size_int (lbitpos)), |
3534 lhs = build2_loc (loc, code, compare_type, | 4053 lhs = build2_loc (loc, code, compare_type, |
3535 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs); | 4054 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs); |
3536 return lhs; | 4055 return lhs; |
3537 } | 4056 } |
3538 | 4057 |
3539 /* Subroutine for fold_truthop: decode a field reference. | 4058 /* Subroutine for fold_truth_andor_1: decode a field reference. |
3540 | 4059 |
3541 If EXP is a comparison reference, we return the innermost reference. | 4060 If EXP is a comparison reference, we return the innermost reference. |
3542 | 4061 |
3543 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is | 4062 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is |
3544 set to the starting bit number. | 4063 set to the starting bit number. |
3549 *PVOLATILEP is set to 1 if the any expression encountered is volatile; | 4068 *PVOLATILEP is set to 1 if the any expression encountered is volatile; |
3550 otherwise it is not changed. | 4069 otherwise it is not changed. |
3551 | 4070 |
3552 *PUNSIGNEDP is set to the signedness of the field. | 4071 *PUNSIGNEDP is set to the signedness of the field. |
3553 | 4072 |
4073 *PREVERSEP is set to the storage order of the field. | |
4074 | |
3554 *PMASK is set to the mask used. This is either contained in a | 4075 *PMASK is set to the mask used. This is either contained in a |
3555 BIT_AND_EXPR or derived from the width of the field. | 4076 BIT_AND_EXPR or derived from the width of the field. |
3556 | 4077 |
3557 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any. | 4078 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any. |
3558 | 4079 |
3559 Return 0 if this is not a component reference or is one that we can't | 4080 Return 0 if this is not a component reference or is one that we can't |
3560 do anything with. */ | 4081 do anything with. */ |
3561 | 4082 |
3562 static tree | 4083 static tree |
3563 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize, | 4084 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize, |
3564 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode, | 4085 HOST_WIDE_INT *pbitpos, machine_mode *pmode, |
3565 int *punsignedp, int *pvolatilep, | 4086 int *punsignedp, int *preversep, int *pvolatilep, |
3566 tree *pmask, tree *pand_mask) | 4087 tree *pmask, tree *pand_mask) |
3567 { | 4088 { |
4089 tree exp = *exp_; | |
3568 tree outer_type = 0; | 4090 tree outer_type = 0; |
3569 tree and_mask = 0; | 4091 tree and_mask = 0; |
3570 tree mask, inner, offset; | 4092 tree mask, inner, offset; |
3571 tree unsigned_type; | 4093 tree unsigned_type; |
3572 unsigned int precision; | 4094 unsigned int precision; |
3593 if (TREE_CODE (and_mask) != INTEGER_CST) | 4115 if (TREE_CODE (and_mask) != INTEGER_CST) |
3594 return 0; | 4116 return 0; |
3595 } | 4117 } |
3596 | 4118 |
3597 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode, | 4119 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode, |
3598 punsignedp, pvolatilep, false); | 4120 punsignedp, preversep, pvolatilep); |
3599 if ((inner == exp && and_mask == 0) | 4121 if ((inner == exp && and_mask == 0) |
3600 || *pbitsize < 0 || offset != 0 | 4122 || *pbitsize < 0 || offset != 0 |
3601 || TREE_CODE (inner) == PLACEHOLDER_EXPR) | 4123 || TREE_CODE (inner) == PLACEHOLDER_EXPR |
4124 /* Reject out-of-bound accesses (PR79731). */ | |
4125 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner)) | |
4126 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)), | |
4127 *pbitpos + *pbitsize) < 0)) | |
3602 return 0; | 4128 return 0; |
4129 | |
4130 *exp_ = exp; | |
3603 | 4131 |
3604 /* If the number of bits in the reference is the same as the bitsize of | 4132 /* If the number of bits in the reference is the same as the bitsize of |
3605 the outer type, then the outer type gives the signedness. Otherwise | 4133 the outer type, then the outer type gives the signedness. Otherwise |
3606 (in case of a small bitfield) the signedness is unchanged. */ | 4134 (in case of a small bitfield) the signedness is unchanged. */ |
3607 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type)) | 4135 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type)) |
3625 *pand_mask = and_mask; | 4153 *pand_mask = and_mask; |
3626 return inner; | 4154 return inner; |
3627 } | 4155 } |
3628 | 4156 |
3629 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order | 4157 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order |
3630 bit positions. */ | 4158 bit positions and MASK is SIGNED. */ |
3631 | 4159 |
3632 static int | 4160 static int |
3633 all_ones_mask_p (const_tree mask, int size) | 4161 all_ones_mask_p (const_tree mask, unsigned int size) |
3634 { | 4162 { |
3635 tree type = TREE_TYPE (mask); | 4163 tree type = TREE_TYPE (mask); |
3636 unsigned int precision = TYPE_PRECISION (type); | 4164 unsigned int precision = TYPE_PRECISION (type); |
3637 tree tmask; | 4165 |
3638 | 4166 /* If this function returns true when the type of the mask is |
3639 tmask = build_int_cst_type (signed_type_for (type), -1); | 4167 UNSIGNED, then there will be errors. In particular see |
3640 | 4168 gcc.c-torture/execute/990326-1.c. There does not appear to be |
3641 return | 4169 any documentation paper trail as to why this is so. But the pre |
3642 tree_int_cst_equal (mask, | 4170 wide-int worked with that restriction and it has been preserved |
3643 const_binop (RSHIFT_EXPR, | 4171 here. */ |
3644 const_binop (LSHIFT_EXPR, tmask, | 4172 if (size > precision || TYPE_SIGN (type) == UNSIGNED) |
3645 size_int (precision - size)), | 4173 return false; |
3646 size_int (precision - size))); | 4174 |
4175 return wi::mask (size, false, precision) == wi::to_wide (mask); | |
3647 } | 4176 } |
3648 | 4177 |
3649 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that | 4178 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that |
3650 represents the sign bit of EXP's type. If EXP represents a sign | 4179 represents the sign bit of EXP's type. If EXP represents a sign |
3651 or zero extension, also test VAL against the unextended type. | 4180 or zero extension, also test VAL against the unextended type. |
3652 The return value is the (sub)expression whose sign bit is VAL, | 4181 The return value is the (sub)expression whose sign bit is VAL, |
3653 or NULL_TREE otherwise. */ | 4182 or NULL_TREE otherwise. */ |
3654 | 4183 |
3655 static tree | 4184 tree |
3656 sign_bit_p (tree exp, const_tree val) | 4185 sign_bit_p (tree exp, const_tree val) |
3657 { | 4186 { |
3658 unsigned HOST_WIDE_INT mask_lo, lo; | |
3659 HOST_WIDE_INT mask_hi, hi; | |
3660 int width; | 4187 int width; |
3661 tree t; | 4188 tree t; |
3662 | 4189 |
3663 /* Tree EXP must have an integral type. */ | 4190 /* Tree EXP must have an integral type. */ |
3664 t = TREE_TYPE (exp); | 4191 t = TREE_TYPE (exp); |
3669 if (TREE_CODE (val) != INTEGER_CST | 4196 if (TREE_CODE (val) != INTEGER_CST |
3670 || TREE_OVERFLOW (val)) | 4197 || TREE_OVERFLOW (val)) |
3671 return NULL_TREE; | 4198 return NULL_TREE; |
3672 | 4199 |
3673 width = TYPE_PRECISION (t); | 4200 width = TYPE_PRECISION (t); |
3674 if (width > HOST_BITS_PER_WIDE_INT) | 4201 if (wi::only_sign_bit_p (wi::to_wide (val), width)) |
3675 { | |
3676 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1); | |
3677 lo = 0; | |
3678 | |
3679 mask_hi = ((unsigned HOST_WIDE_INT) -1 | |
3680 >> (2 * HOST_BITS_PER_WIDE_INT - width)); | |
3681 mask_lo = -1; | |
3682 } | |
3683 else | |
3684 { | |
3685 hi = 0; | |
3686 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1); | |
3687 | |
3688 mask_hi = 0; | |
3689 mask_lo = ((unsigned HOST_WIDE_INT) -1 | |
3690 >> (HOST_BITS_PER_WIDE_INT - width)); | |
3691 } | |
3692 | |
3693 /* We mask off those bits beyond TREE_TYPE (exp) so that we can | |
3694 treat VAL as if it were unsigned. */ | |
3695 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi | |
3696 && (TREE_INT_CST_LOW (val) & mask_lo) == lo) | |
3697 return exp; | 4202 return exp; |
3698 | 4203 |
3699 /* Handle extension from a narrower type. */ | 4204 /* Handle extension from a narrower type. */ |
3700 if (TREE_CODE (exp) == NOP_EXPR | 4205 if (TREE_CODE (exp) == NOP_EXPR |
3701 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width) | 4206 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width) |
3702 return sign_bit_p (TREE_OPERAND (exp, 0), val); | 4207 return sign_bit_p (TREE_OPERAND (exp, 0), val); |
3703 | 4208 |
3704 return NULL_TREE; | 4209 return NULL_TREE; |
3705 } | 4210 } |
3706 | 4211 |
3707 /* Subroutine for fold_truthop: determine if an operand is simple enough | 4212 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough |
3708 to be evaluated unconditionally. */ | 4213 to be evaluated unconditionally. */ |
3709 | 4214 |
3710 static int | 4215 static int |
3711 simple_operand_p (const_tree exp) | 4216 simple_operand_p (const_tree exp) |
3712 { | 4217 { |
3713 /* Strip any conversions that don't change the machine mode. */ | 4218 /* Strip any conversions that don't change the machine mode. */ |
3714 STRIP_NOPS (exp); | 4219 STRIP_NOPS (exp); |
3715 | 4220 |
3716 return (CONSTANT_CLASS_P (exp) | 4221 return (CONSTANT_CLASS_P (exp) |
3717 || TREE_CODE (exp) == SSA_NAME | 4222 || TREE_CODE (exp) == SSA_NAME |
3718 || (DECL_P (exp) | 4223 || (DECL_P (exp) |
3719 && ! TREE_ADDRESSABLE (exp) | 4224 && ! TREE_ADDRESSABLE (exp) |
3720 && ! TREE_THIS_VOLATILE (exp) | 4225 && ! TREE_THIS_VOLATILE (exp) |
3721 && ! DECL_NONLOCAL (exp) | 4226 && ! DECL_NONLOCAL (exp) |
3722 /* Don't regard global variables as simple. They may be | 4227 /* Don't regard global variables as simple. They may be |
3723 allocated in ways unknown to the compiler (shared memory, | 4228 allocated in ways unknown to the compiler (shared memory, |
3724 #pragma weak, etc). */ | 4229 #pragma weak, etc). */ |
3725 && ! TREE_PUBLIC (exp) | 4230 && ! TREE_PUBLIC (exp) |
3726 && ! DECL_EXTERNAL (exp) | 4231 && ! DECL_EXTERNAL (exp) |
4232 /* Weakrefs are not safe to be read, since they can be NULL. | |
4233 They are !TREE_PUBLIC && !DECL_EXTERNAL but still | |
4234 have DECL_WEAK flag set. */ | |
4235 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp)) | |
3727 /* Loading a static variable is unduly expensive, but global | 4236 /* Loading a static variable is unduly expensive, but global |
3728 registers aren't expensive. */ | 4237 registers aren't expensive. */ |
3729 && (! TREE_STATIC (exp) || DECL_REGISTER (exp)))); | 4238 && (! TREE_STATIC (exp) || DECL_REGISTER (exp)))); |
3730 } | 4239 } |
4240 | |
4241 /* Subroutine for fold_truth_andor: determine if an operand is simple enough | |
4242 to be evaluated unconditionally. | |
4243 I addition to simple_operand_p, we assume that comparisons, conversions, | |
4244 and logic-not operations are simple, if their operands are simple, too. */ | |
4245 | |
4246 static bool | |
4247 simple_operand_p_2 (tree exp) | |
4248 { | |
4249 enum tree_code code; | |
4250 | |
4251 if (TREE_SIDE_EFFECTS (exp) | |
4252 || tree_could_trap_p (exp)) | |
4253 return false; | |
4254 | |
4255 while (CONVERT_EXPR_P (exp)) | |
4256 exp = TREE_OPERAND (exp, 0); | |
4257 | |
4258 code = TREE_CODE (exp); | |
4259 | |
4260 if (TREE_CODE_CLASS (code) == tcc_comparison) | |
4261 return (simple_operand_p (TREE_OPERAND (exp, 0)) | |
4262 && simple_operand_p (TREE_OPERAND (exp, 1))); | |
4263 | |
4264 if (code == TRUTH_NOT_EXPR) | |
4265 return simple_operand_p_2 (TREE_OPERAND (exp, 0)); | |
4266 | |
4267 return simple_operand_p (exp); | |
4268 } | |
4269 | |
3731 | 4270 |
3732 /* The following functions are subroutines to fold_range_test and allow it to | 4271 /* The following functions are subroutines to fold_range_test and allow it to |
3733 try to change a logical combination of comparisons into a range test. | 4272 try to change a logical combination of comparisons into a range test. |
3734 | 4273 |
3735 For example, both | 4274 For example, both |
3821 } | 4360 } |
3822 | 4361 |
3823 return constant_boolean_node (result, type); | 4362 return constant_boolean_node (result, type); |
3824 } | 4363 } |
3825 | 4364 |
4365 /* Helper routine for make_range. Perform one step for it, return | |
4366 new expression if the loop should continue or NULL_TREE if it should | |
4367 stop. */ | |
4368 | |
4369 tree | |
4370 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1, | |
4371 tree exp_type, tree *p_low, tree *p_high, int *p_in_p, | |
4372 bool *strict_overflow_p) | |
4373 { | |
4374 tree arg0_type = TREE_TYPE (arg0); | |
4375 tree n_low, n_high, low = *p_low, high = *p_high; | |
4376 int in_p = *p_in_p, n_in_p; | |
4377 | |
4378 switch (code) | |
4379 { | |
4380 case TRUTH_NOT_EXPR: | |
4381 /* We can only do something if the range is testing for zero. */ | |
4382 if (low == NULL_TREE || high == NULL_TREE | |
4383 || ! integer_zerop (low) || ! integer_zerop (high)) | |
4384 return NULL_TREE; | |
4385 *p_in_p = ! in_p; | |
4386 return arg0; | |
4387 | |
4388 case EQ_EXPR: case NE_EXPR: | |
4389 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR: | |
4390 /* We can only do something if the range is testing for zero | |
4391 and if the second operand is an integer constant. Note that | |
4392 saying something is "in" the range we make is done by | |
4393 complementing IN_P since it will set in the initial case of | |
4394 being not equal to zero; "out" is leaving it alone. */ | |
4395 if (low == NULL_TREE || high == NULL_TREE | |
4396 || ! integer_zerop (low) || ! integer_zerop (high) | |
4397 || TREE_CODE (arg1) != INTEGER_CST) | |
4398 return NULL_TREE; | |
4399 | |
4400 switch (code) | |
4401 { | |
4402 case NE_EXPR: /* - [c, c] */ | |
4403 low = high = arg1; | |
4404 break; | |
4405 case EQ_EXPR: /* + [c, c] */ | |
4406 in_p = ! in_p, low = high = arg1; | |
4407 break; | |
4408 case GT_EXPR: /* - [-, c] */ | |
4409 low = 0, high = arg1; | |
4410 break; | |
4411 case GE_EXPR: /* + [c, -] */ | |
4412 in_p = ! in_p, low = arg1, high = 0; | |
4413 break; | |
4414 case LT_EXPR: /* - [c, -] */ | |
4415 low = arg1, high = 0; | |
4416 break; | |
4417 case LE_EXPR: /* + [-, c] */ | |
4418 in_p = ! in_p, low = 0, high = arg1; | |
4419 break; | |
4420 default: | |
4421 gcc_unreachable (); | |
4422 } | |
4423 | |
4424 /* If this is an unsigned comparison, we also know that EXP is | |
4425 greater than or equal to zero. We base the range tests we make | |
4426 on that fact, so we record it here so we can parse existing | |
4427 range tests. We test arg0_type since often the return type | |
4428 of, e.g. EQ_EXPR, is boolean. */ | |
4429 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0)) | |
4430 { | |
4431 if (! merge_ranges (&n_in_p, &n_low, &n_high, | |
4432 in_p, low, high, 1, | |
4433 build_int_cst (arg0_type, 0), | |
4434 NULL_TREE)) | |
4435 return NULL_TREE; | |
4436 | |
4437 in_p = n_in_p, low = n_low, high = n_high; | |
4438 | |
4439 /* If the high bound is missing, but we have a nonzero low | |
4440 bound, reverse the range so it goes from zero to the low bound | |
4441 minus 1. */ | |
4442 if (high == 0 && low && ! integer_zerop (low)) | |
4443 { | |
4444 in_p = ! in_p; | |
4445 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0, | |
4446 build_int_cst (TREE_TYPE (low), 1), 0); | |
4447 low = build_int_cst (arg0_type, 0); | |
4448 } | |
4449 } | |
4450 | |
4451 *p_low = low; | |
4452 *p_high = high; | |
4453 *p_in_p = in_p; | |
4454 return arg0; | |
4455 | |
4456 case NEGATE_EXPR: | |
4457 /* If flag_wrapv and ARG0_TYPE is signed, make sure | |
4458 low and high are non-NULL, then normalize will DTRT. */ | |
4459 if (!TYPE_UNSIGNED (arg0_type) | |
4460 && !TYPE_OVERFLOW_UNDEFINED (arg0_type)) | |
4461 { | |
4462 if (low == NULL_TREE) | |
4463 low = TYPE_MIN_VALUE (arg0_type); | |
4464 if (high == NULL_TREE) | |
4465 high = TYPE_MAX_VALUE (arg0_type); | |
4466 } | |
4467 | |
4468 /* (-x) IN [a,b] -> x in [-b, -a] */ | |
4469 n_low = range_binop (MINUS_EXPR, exp_type, | |
4470 build_int_cst (exp_type, 0), | |
4471 0, high, 1); | |
4472 n_high = range_binop (MINUS_EXPR, exp_type, | |
4473 build_int_cst (exp_type, 0), | |
4474 0, low, 0); | |
4475 if (n_high != 0 && TREE_OVERFLOW (n_high)) | |
4476 return NULL_TREE; | |
4477 goto normalize; | |
4478 | |
4479 case BIT_NOT_EXPR: | |
4480 /* ~ X -> -X - 1 */ | |
4481 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0), | |
4482 build_int_cst (exp_type, 1)); | |
4483 | |
4484 case PLUS_EXPR: | |
4485 case MINUS_EXPR: | |
4486 if (TREE_CODE (arg1) != INTEGER_CST) | |
4487 return NULL_TREE; | |
4488 | |
4489 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot | |
4490 move a constant to the other side. */ | |
4491 if (!TYPE_UNSIGNED (arg0_type) | |
4492 && !TYPE_OVERFLOW_UNDEFINED (arg0_type)) | |
4493 return NULL_TREE; | |
4494 | |
4495 /* If EXP is signed, any overflow in the computation is undefined, | |
4496 so we don't worry about it so long as our computations on | |
4497 the bounds don't overflow. For unsigned, overflow is defined | |
4498 and this is exactly the right thing. */ | |
4499 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR, | |
4500 arg0_type, low, 0, arg1, 0); | |
4501 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR, | |
4502 arg0_type, high, 1, arg1, 0); | |
4503 if ((n_low != 0 && TREE_OVERFLOW (n_low)) | |
4504 || (n_high != 0 && TREE_OVERFLOW (n_high))) | |
4505 return NULL_TREE; | |
4506 | |
4507 if (TYPE_OVERFLOW_UNDEFINED (arg0_type)) | |
4508 *strict_overflow_p = true; | |
4509 | |
4510 normalize: | |
4511 /* Check for an unsigned range which has wrapped around the maximum | |
4512 value thus making n_high < n_low, and normalize it. */ | |
4513 if (n_low && n_high && tree_int_cst_lt (n_high, n_low)) | |
4514 { | |
4515 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0, | |
4516 build_int_cst (TREE_TYPE (n_high), 1), 0); | |
4517 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0, | |
4518 build_int_cst (TREE_TYPE (n_low), 1), 0); | |
4519 | |
4520 /* If the range is of the form +/- [ x+1, x ], we won't | |
4521 be able to normalize it. But then, it represents the | |
4522 whole range or the empty set, so make it | |
4523 +/- [ -, - ]. */ | |
4524 if (tree_int_cst_equal (n_low, low) | |
4525 && tree_int_cst_equal (n_high, high)) | |
4526 low = high = 0; | |
4527 else | |
4528 in_p = ! in_p; | |
4529 } | |
4530 else | |
4531 low = n_low, high = n_high; | |
4532 | |
4533 *p_low = low; | |
4534 *p_high = high; | |
4535 *p_in_p = in_p; | |
4536 return arg0; | |
4537 | |
4538 CASE_CONVERT: | |
4539 case NON_LVALUE_EXPR: | |
4540 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type)) | |
4541 return NULL_TREE; | |
4542 | |
4543 if (! INTEGRAL_TYPE_P (arg0_type) | |
4544 || (low != 0 && ! int_fits_type_p (low, arg0_type)) | |
4545 || (high != 0 && ! int_fits_type_p (high, arg0_type))) | |
4546 return NULL_TREE; | |
4547 | |
4548 n_low = low, n_high = high; | |
4549 | |
4550 if (n_low != 0) | |
4551 n_low = fold_convert_loc (loc, arg0_type, n_low); | |
4552 | |
4553 if (n_high != 0) | |
4554 n_high = fold_convert_loc (loc, arg0_type, n_high); | |
4555 | |
4556 /* If we're converting arg0 from an unsigned type, to exp, | |
4557 a signed type, we will be doing the comparison as unsigned. | |
4558 The tests above have already verified that LOW and HIGH | |
4559 are both positive. | |
4560 | |
4561 So we have to ensure that we will handle large unsigned | |
4562 values the same way that the current signed bounds treat | |
4563 negative values. */ | |
4564 | |
4565 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type)) | |
4566 { | |
4567 tree high_positive; | |
4568 tree equiv_type; | |
4569 /* For fixed-point modes, we need to pass the saturating flag | |
4570 as the 2nd parameter. */ | |
4571 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type))) | |
4572 equiv_type | |
4573 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), | |
4574 TYPE_SATURATING (arg0_type)); | |
4575 else | |
4576 equiv_type | |
4577 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1); | |
4578 | |
4579 /* A range without an upper bound is, naturally, unbounded. | |
4580 Since convert would have cropped a very large value, use | |
4581 the max value for the destination type. */ | |
4582 high_positive | |
4583 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type) | |
4584 : TYPE_MAX_VALUE (arg0_type); | |
4585 | |
4586 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type)) | |
4587 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type, | |
4588 fold_convert_loc (loc, arg0_type, | |
4589 high_positive), | |
4590 build_int_cst (arg0_type, 1)); | |
4591 | |
4592 /* If the low bound is specified, "and" the range with the | |
4593 range for which the original unsigned value will be | |
4594 positive. */ | |
4595 if (low != 0) | |
4596 { | |
4597 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high, | |
4598 1, fold_convert_loc (loc, arg0_type, | |
4599 integer_zero_node), | |
4600 high_positive)) | |
4601 return NULL_TREE; | |
4602 | |
4603 in_p = (n_in_p == in_p); | |
4604 } | |
4605 else | |
4606 { | |
4607 /* Otherwise, "or" the range with the range of the input | |
4608 that will be interpreted as negative. */ | |
4609 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high, | |
4610 1, fold_convert_loc (loc, arg0_type, | |
4611 integer_zero_node), | |
4612 high_positive)) | |
4613 return NULL_TREE; | |
4614 | |
4615 in_p = (in_p != n_in_p); | |
4616 } | |
4617 } | |
4618 | |
4619 *p_low = n_low; | |
4620 *p_high = n_high; | |
4621 *p_in_p = in_p; | |
4622 return arg0; | |
4623 | |
4624 default: | |
4625 return NULL_TREE; | |
4626 } | |
4627 } | |
4628 | |
3826 /* Given EXP, a logical expression, set the range it is testing into | 4629 /* Given EXP, a logical expression, set the range it is testing into |
3827 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression | 4630 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression |
3828 actually being tested. *PLOW and *PHIGH will be made of the same | 4631 actually being tested. *PLOW and *PHIGH will be made of the same |
3829 type as the returned expression. If EXP is not a comparison, we | 4632 type as the returned expression. If EXP is not a comparison, we |
3830 will most likely not be returning a useful value and range. Set | 4633 will most likely not be returning a useful value and range. Set |
3835 tree | 4638 tree |
3836 make_range (tree exp, int *pin_p, tree *plow, tree *phigh, | 4639 make_range (tree exp, int *pin_p, tree *plow, tree *phigh, |
3837 bool *strict_overflow_p) | 4640 bool *strict_overflow_p) |
3838 { | 4641 { |
3839 enum tree_code code; | 4642 enum tree_code code; |
3840 tree arg0 = NULL_TREE, arg1 = NULL_TREE; | 4643 tree arg0, arg1 = NULL_TREE; |
3841 tree exp_type = NULL_TREE, arg0_type = NULL_TREE; | 4644 tree exp_type, nexp; |
3842 int in_p, n_in_p; | 4645 int in_p; |
3843 tree low, high, n_low, n_high; | 4646 tree low, high; |
3844 location_t loc = EXPR_LOCATION (exp); | 4647 location_t loc = EXPR_LOCATION (exp); |
3845 | 4648 |
3846 /* Start with simply saying "EXP != 0" and then look at the code of EXP | 4649 /* Start with simply saying "EXP != 0" and then look at the code of EXP |
3847 and see if we can refine the range. Some of the cases below may not | 4650 and see if we can refine the range. Some of the cases below may not |
3848 happen, but it doesn't seem worth worrying about this. We "continue" | 4651 happen, but it doesn't seem worth worrying about this. We "continue" |
3854 | 4657 |
3855 while (1) | 4658 while (1) |
3856 { | 4659 { |
3857 code = TREE_CODE (exp); | 4660 code = TREE_CODE (exp); |
3858 exp_type = TREE_TYPE (exp); | 4661 exp_type = TREE_TYPE (exp); |
4662 arg0 = NULL_TREE; | |
3859 | 4663 |
3860 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))) | 4664 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))) |
3861 { | 4665 { |
3862 if (TREE_OPERAND_LENGTH (exp) > 0) | 4666 if (TREE_OPERAND_LENGTH (exp) > 0) |
3863 arg0 = TREE_OPERAND (exp, 0); | 4667 arg0 = TREE_OPERAND (exp, 0); |
3864 if (TREE_CODE_CLASS (code) == tcc_comparison | |
3865 || TREE_CODE_CLASS (code) == tcc_unary | |
3866 || TREE_CODE_CLASS (code) == tcc_binary) | |
3867 arg0_type = TREE_TYPE (arg0); | |
3868 if (TREE_CODE_CLASS (code) == tcc_binary | 4668 if (TREE_CODE_CLASS (code) == tcc_binary |
3869 || TREE_CODE_CLASS (code) == tcc_comparison | 4669 || TREE_CODE_CLASS (code) == tcc_comparison |
3870 || (TREE_CODE_CLASS (code) == tcc_expression | 4670 || (TREE_CODE_CLASS (code) == tcc_expression |
3871 && TREE_OPERAND_LENGTH (exp) > 1)) | 4671 && TREE_OPERAND_LENGTH (exp) > 1)) |
3872 arg1 = TREE_OPERAND (exp, 1); | 4672 arg1 = TREE_OPERAND (exp, 1); |
3873 } | 4673 } |
3874 | 4674 if (arg0 == NULL_TREE) |
3875 switch (code) | 4675 break; |
3876 { | 4676 |
3877 case TRUTH_NOT_EXPR: | 4677 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low, |
3878 in_p = ! in_p, exp = arg0; | 4678 &high, &in_p, strict_overflow_p); |
3879 continue; | 4679 if (nexp == NULL_TREE) |
3880 | 4680 break; |
3881 case EQ_EXPR: case NE_EXPR: | 4681 exp = nexp; |
3882 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR: | |
3883 /* We can only do something if the range is testing for zero | |
3884 and if the second operand is an integer constant. Note that | |
3885 saying something is "in" the range we make is done by | |
3886 complementing IN_P since it will set in the initial case of | |
3887 being not equal to zero; "out" is leaving it alone. */ | |
3888 if (low == 0 || high == 0 | |
3889 || ! integer_zerop (low) || ! integer_zerop (high) | |
3890 || TREE_CODE (arg1) != INTEGER_CST) | |
3891 break; | |
3892 | |
3893 switch (code) | |
3894 { | |
3895 case NE_EXPR: /* - [c, c] */ | |
3896 low = high = arg1; | |
3897 break; | |
3898 case EQ_EXPR: /* + [c, c] */ | |
3899 in_p = ! in_p, low = high = arg1; | |
3900 break; | |
3901 case GT_EXPR: /* - [-, c] */ | |
3902 low = 0, high = arg1; | |
3903 break; | |
3904 case GE_EXPR: /* + [c, -] */ | |
3905 in_p = ! in_p, low = arg1, high = 0; | |
3906 break; | |
3907 case LT_EXPR: /* - [c, -] */ | |
3908 low = arg1, high = 0; | |
3909 break; | |
3910 case LE_EXPR: /* + [-, c] */ | |
3911 in_p = ! in_p, low = 0, high = arg1; | |
3912 break; | |
3913 default: | |
3914 gcc_unreachable (); | |
3915 } | |
3916 | |
3917 /* If this is an unsigned comparison, we also know that EXP is | |
3918 greater than or equal to zero. We base the range tests we make | |
3919 on that fact, so we record it here so we can parse existing | |
3920 range tests. We test arg0_type since often the return type | |
3921 of, e.g. EQ_EXPR, is boolean. */ | |
3922 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0)) | |
3923 { | |
3924 if (! merge_ranges (&n_in_p, &n_low, &n_high, | |
3925 in_p, low, high, 1, | |
3926 build_int_cst (arg0_type, 0), | |
3927 NULL_TREE)) | |
3928 break; | |
3929 | |
3930 in_p = n_in_p, low = n_low, high = n_high; | |
3931 | |
3932 /* If the high bound is missing, but we have a nonzero low | |
3933 bound, reverse the range so it goes from zero to the low bound | |
3934 minus 1. */ | |
3935 if (high == 0 && low && ! integer_zerop (low)) | |
3936 { | |
3937 in_p = ! in_p; | |
3938 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0, | |
3939 integer_one_node, 0); | |
3940 low = build_int_cst (arg0_type, 0); | |
3941 } | |
3942 } | |
3943 | |
3944 exp = arg0; | |
3945 continue; | |
3946 | |
3947 case NEGATE_EXPR: | |
3948 /* (-x) IN [a,b] -> x in [-b, -a] */ | |
3949 n_low = range_binop (MINUS_EXPR, exp_type, | |
3950 build_int_cst (exp_type, 0), | |
3951 0, high, 1); | |
3952 n_high = range_binop (MINUS_EXPR, exp_type, | |
3953 build_int_cst (exp_type, 0), | |
3954 0, low, 0); | |
3955 if (n_high != 0 && TREE_OVERFLOW (n_high)) | |
3956 break; | |
3957 goto normalize; | |
3958 | |
3959 case BIT_NOT_EXPR: | |
3960 /* ~ X -> -X - 1 */ | |
3961 exp = build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0), | |
3962 build_int_cst (exp_type, 1)); | |
3963 continue; | |
3964 | |
3965 case PLUS_EXPR: case MINUS_EXPR: | |
3966 if (TREE_CODE (arg1) != INTEGER_CST) | |
3967 break; | |
3968 | |
3969 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot | |
3970 move a constant to the other side. */ | |
3971 if (!TYPE_UNSIGNED (arg0_type) | |
3972 && !TYPE_OVERFLOW_UNDEFINED (arg0_type)) | |
3973 break; | |
3974 | |
3975 /* If EXP is signed, any overflow in the computation is undefined, | |
3976 so we don't worry about it so long as our computations on | |
3977 the bounds don't overflow. For unsigned, overflow is defined | |
3978 and this is exactly the right thing. */ | |
3979 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR, | |
3980 arg0_type, low, 0, arg1, 0); | |
3981 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR, | |
3982 arg0_type, high, 1, arg1, 0); | |
3983 if ((n_low != 0 && TREE_OVERFLOW (n_low)) | |
3984 || (n_high != 0 && TREE_OVERFLOW (n_high))) | |
3985 break; | |
3986 | |
3987 if (TYPE_OVERFLOW_UNDEFINED (arg0_type)) | |
3988 *strict_overflow_p = true; | |
3989 | |
3990 normalize: | |
3991 /* Check for an unsigned range which has wrapped around the maximum | |
3992 value thus making n_high < n_low, and normalize it. */ | |
3993 if (n_low && n_high && tree_int_cst_lt (n_high, n_low)) | |
3994 { | |
3995 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0, | |
3996 integer_one_node, 0); | |
3997 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0, | |
3998 integer_one_node, 0); | |
3999 | |
4000 /* If the range is of the form +/- [ x+1, x ], we won't | |
4001 be able to normalize it. But then, it represents the | |
4002 whole range or the empty set, so make it | |
4003 +/- [ -, - ]. */ | |
4004 if (tree_int_cst_equal (n_low, low) | |
4005 && tree_int_cst_equal (n_high, high)) | |
4006 low = high = 0; | |
4007 else | |
4008 in_p = ! in_p; | |
4009 } | |
4010 else | |
4011 low = n_low, high = n_high; | |
4012 | |
4013 exp = arg0; | |
4014 continue; | |
4015 | |
4016 CASE_CONVERT: case NON_LVALUE_EXPR: | |
4017 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type)) | |
4018 break; | |
4019 | |
4020 if (! INTEGRAL_TYPE_P (arg0_type) | |
4021 || (low != 0 && ! int_fits_type_p (low, arg0_type)) | |
4022 || (high != 0 && ! int_fits_type_p (high, arg0_type))) | |
4023 break; | |
4024 | |
4025 n_low = low, n_high = high; | |
4026 | |
4027 if (n_low != 0) | |
4028 n_low = fold_convert_loc (loc, arg0_type, n_low); | |
4029 | |
4030 if (n_high != 0) | |
4031 n_high = fold_convert_loc (loc, arg0_type, n_high); | |
4032 | |
4033 | |
4034 /* If we're converting arg0 from an unsigned type, to exp, | |
4035 a signed type, we will be doing the comparison as unsigned. | |
4036 The tests above have already verified that LOW and HIGH | |
4037 are both positive. | |
4038 | |
4039 So we have to ensure that we will handle large unsigned | |
4040 values the same way that the current signed bounds treat | |
4041 negative values. */ | |
4042 | |
4043 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type)) | |
4044 { | |
4045 tree high_positive; | |
4046 tree equiv_type; | |
4047 /* For fixed-point modes, we need to pass the saturating flag | |
4048 as the 2nd parameter. */ | |
4049 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type))) | |
4050 equiv_type = lang_hooks.types.type_for_mode | |
4051 (TYPE_MODE (arg0_type), | |
4052 TYPE_SATURATING (arg0_type)); | |
4053 else | |
4054 equiv_type = lang_hooks.types.type_for_mode | |
4055 (TYPE_MODE (arg0_type), 1); | |
4056 | |
4057 /* A range without an upper bound is, naturally, unbounded. | |
4058 Since convert would have cropped a very large value, use | |
4059 the max value for the destination type. */ | |
4060 high_positive | |
4061 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type) | |
4062 : TYPE_MAX_VALUE (arg0_type); | |
4063 | |
4064 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type)) | |
4065 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type, | |
4066 fold_convert_loc (loc, arg0_type, | |
4067 high_positive), | |
4068 build_int_cst (arg0_type, 1)); | |
4069 | |
4070 /* If the low bound is specified, "and" the range with the | |
4071 range for which the original unsigned value will be | |
4072 positive. */ | |
4073 if (low != 0) | |
4074 { | |
4075 if (! merge_ranges (&n_in_p, &n_low, &n_high, | |
4076 1, n_low, n_high, 1, | |
4077 fold_convert_loc (loc, arg0_type, | |
4078 integer_zero_node), | |
4079 high_positive)) | |
4080 break; | |
4081 | |
4082 in_p = (n_in_p == in_p); | |
4083 } | |
4084 else | |
4085 { | |
4086 /* Otherwise, "or" the range with the range of the input | |
4087 that will be interpreted as negative. */ | |
4088 if (! merge_ranges (&n_in_p, &n_low, &n_high, | |
4089 0, n_low, n_high, 1, | |
4090 fold_convert_loc (loc, arg0_type, | |
4091 integer_zero_node), | |
4092 high_positive)) | |
4093 break; | |
4094 | |
4095 in_p = (in_p != n_in_p); | |
4096 } | |
4097 } | |
4098 | |
4099 exp = arg0; | |
4100 low = n_low, high = n_high; | |
4101 continue; | |
4102 | |
4103 default: | |
4104 break; | |
4105 } | |
4106 | |
4107 break; | |
4108 } | 4682 } |
4109 | 4683 |
4110 /* If EXP is a constant, we can evaluate whether this is true or false. */ | 4684 /* If EXP is a constant, we can evaluate whether this is true or false. */ |
4111 if (TREE_CODE (exp) == INTEGER_CST) | 4685 if (TREE_CODE (exp) == INTEGER_CST) |
4112 { | 4686 { |
4119 } | 4693 } |
4120 | 4694 |
4121 *pin_p = in_p, *plow = low, *phigh = high; | 4695 *pin_p = in_p, *plow = low, *phigh = high; |
4122 return exp; | 4696 return exp; |
4123 } | 4697 } |
4698 | |
4699 /* Returns TRUE if [LOW, HIGH] range check can be optimized to | |
4700 a bitwise check i.e. when | |
4701 LOW == 0xXX...X00...0 | |
4702 HIGH == 0xXX...X11...1 | |
4703 Return corresponding mask in MASK and stem in VALUE. */ | |
4704 | |
4705 static bool | |
4706 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask, | |
4707 tree *value) | |
4708 { | |
4709 if (TREE_CODE (low) != INTEGER_CST | |
4710 || TREE_CODE (high) != INTEGER_CST) | |
4711 return false; | |
4712 | |
4713 unsigned prec = TYPE_PRECISION (type); | |
4714 wide_int lo = wi::to_wide (low, prec); | |
4715 wide_int hi = wi::to_wide (high, prec); | |
4716 | |
4717 wide_int end_mask = lo ^ hi; | |
4718 if ((end_mask & (end_mask + 1)) != 0 | |
4719 || (lo & end_mask) != 0) | |
4720 return false; | |
4721 | |
4722 wide_int stem_mask = ~end_mask; | |
4723 wide_int stem = lo & stem_mask; | |
4724 if (stem != (hi & stem_mask)) | |
4725 return false; | |
4726 | |
4727 *mask = wide_int_to_tree (type, stem_mask); | |
4728 *value = wide_int_to_tree (type, stem); | |
4729 | |
4730 return true; | |
4731 } | |
4124 | 4732 |
4733 /* Helper routine for build_range_check and match.pd. Return the type to | |
4734 perform the check or NULL if it shouldn't be optimized. */ | |
4735 | |
4736 tree | |
4737 range_check_type (tree etype) | |
4738 { | |
4739 /* First make sure that arithmetics in this type is valid, then make sure | |
4740 that it wraps around. */ | |
4741 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE) | |
4742 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), | |
4743 TYPE_UNSIGNED (etype)); | |
4744 | |
4745 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype)) | |
4746 { | |
4747 tree utype, minv, maxv; | |
4748 | |
4749 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN | |
4750 for the type in question, as we rely on this here. */ | |
4751 utype = unsigned_type_for (etype); | |
4752 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype)); | |
4753 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1, | |
4754 build_int_cst (TREE_TYPE (maxv), 1), 1); | |
4755 minv = fold_convert (utype, TYPE_MIN_VALUE (etype)); | |
4756 | |
4757 if (integer_zerop (range_binop (NE_EXPR, integer_type_node, | |
4758 minv, 1, maxv, 1))) | |
4759 etype = utype; | |
4760 else | |
4761 return NULL_TREE; | |
4762 } | |
4763 return etype; | |
4764 } | |
4765 | |
4125 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result | 4766 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result |
4126 type, TYPE, return an expression to test if EXP is in (or out of, depending | 4767 type, TYPE, return an expression to test if EXP is in (or out of, depending |
4127 on IN_P) the range. Return 0 if the test couldn't be created. */ | 4768 on IN_P) the range. Return 0 if the test couldn't be created. */ |
4128 | 4769 |
4129 tree | 4770 tree |
4130 build_range_check (location_t loc, tree type, tree exp, int in_p, | 4771 build_range_check (location_t loc, tree type, tree exp, int in_p, |
4131 tree low, tree high) | 4772 tree low, tree high) |
4132 { | 4773 { |
4133 tree etype = TREE_TYPE (exp), value; | 4774 tree etype = TREE_TYPE (exp), mask, value; |
4134 | 4775 |
4135 #ifdef HAVE_canonicalize_funcptr_for_compare | |
4136 /* Disable this optimization for function pointer expressions | 4776 /* Disable this optimization for function pointer expressions |
4137 on targets that require function pointer canonicalization. */ | 4777 on targets that require function pointer canonicalization. */ |
4138 if (HAVE_canonicalize_funcptr_for_compare | 4778 if (targetm.have_canonicalize_funcptr_for_compare () |
4139 && TREE_CODE (etype) == POINTER_TYPE | 4779 && TREE_CODE (etype) == POINTER_TYPE |
4140 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE) | 4780 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE) |
4141 return NULL_TREE; | 4781 return NULL_TREE; |
4142 #endif | |
4143 | 4782 |
4144 if (! in_p) | 4783 if (! in_p) |
4145 { | 4784 { |
4146 value = build_range_check (loc, type, exp, 1, low, high); | 4785 value = build_range_check (loc, type, exp, 1, low, high); |
4147 if (value != 0) | 4786 if (value != 0) |
4149 | 4788 |
4150 return 0; | 4789 return 0; |
4151 } | 4790 } |
4152 | 4791 |
4153 if (low == 0 && high == 0) | 4792 if (low == 0 && high == 0) |
4154 return build_int_cst (type, 1); | 4793 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp); |
4155 | 4794 |
4156 if (low == 0) | 4795 if (low == 0) |
4157 return fold_build2_loc (loc, LE_EXPR, type, exp, | 4796 return fold_build2_loc (loc, LE_EXPR, type, exp, |
4158 fold_convert_loc (loc, etype, high)); | 4797 fold_convert_loc (loc, etype, high)); |
4159 | 4798 |
4160 if (high == 0) | 4799 if (high == 0) |
4161 return fold_build2_loc (loc, GE_EXPR, type, exp, | 4800 return fold_build2_loc (loc, GE_EXPR, type, exp, |
4162 fold_convert_loc (loc, etype, low)); | 4801 fold_convert_loc (loc, etype, low)); |
4163 | 4802 |
4164 if (operand_equal_p (low, high, 0)) | 4803 if (operand_equal_p (low, high, 0)) |
4165 return fold_build2_loc (loc, EQ_EXPR, type, exp, | 4804 return fold_build2_loc (loc, EQ_EXPR, type, exp, |
4166 fold_convert_loc (loc, etype, low)); | 4805 fold_convert_loc (loc, etype, low)); |
4806 | |
4807 if (TREE_CODE (exp) == BIT_AND_EXPR | |
4808 && maskable_range_p (low, high, etype, &mask, &value)) | |
4809 return fold_build2_loc (loc, EQ_EXPR, type, | |
4810 fold_build2_loc (loc, BIT_AND_EXPR, etype, | |
4811 exp, mask), | |
4812 value); | |
4167 | 4813 |
4168 if (integer_zerop (low)) | 4814 if (integer_zerop (low)) |
4169 { | 4815 { |
4170 if (! TYPE_UNSIGNED (etype)) | 4816 if (! TYPE_UNSIGNED (etype)) |
4171 { | 4817 { |
4177 } | 4823 } |
4178 | 4824 |
4179 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */ | 4825 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */ |
4180 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST) | 4826 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST) |
4181 { | 4827 { |
4182 unsigned HOST_WIDE_INT lo; | 4828 int prec = TYPE_PRECISION (etype); |
4183 HOST_WIDE_INT hi; | 4829 |
4184 int prec; | 4830 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high)) |
4185 | |
4186 prec = TYPE_PRECISION (etype); | |
4187 if (prec <= HOST_BITS_PER_WIDE_INT) | |
4188 { | |
4189 hi = 0; | |
4190 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1; | |
4191 } | |
4192 else | |
4193 { | |
4194 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1; | |
4195 lo = (unsigned HOST_WIDE_INT) -1; | |
4196 } | |
4197 | |
4198 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo) | |
4199 { | 4831 { |
4200 if (TYPE_UNSIGNED (etype)) | 4832 if (TYPE_UNSIGNED (etype)) |
4201 { | 4833 { |
4202 tree signed_etype = signed_type_for (etype); | 4834 tree signed_etype = signed_type_for (etype); |
4203 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype)) | 4835 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype)) |
4206 else | 4838 else |
4207 etype = signed_etype; | 4839 etype = signed_etype; |
4208 exp = fold_convert_loc (loc, etype, exp); | 4840 exp = fold_convert_loc (loc, etype, exp); |
4209 } | 4841 } |
4210 return fold_build2_loc (loc, GT_EXPR, type, exp, | 4842 return fold_build2_loc (loc, GT_EXPR, type, exp, |
4211 build_int_cst (etype, 0)); | 4843 build_int_cst (etype, 0)); |
4212 } | 4844 } |
4213 } | 4845 } |
4214 | 4846 |
4215 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low). | 4847 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low). |
4216 This requires wrap-around arithmetics for the type of the expression. | 4848 This requires wrap-around arithmetics for the type of the expression. */ |
4217 First make sure that arithmetics in this type is valid, then make sure | 4849 etype = range_check_type (etype); |
4218 that it wraps around. */ | 4850 if (etype == NULL_TREE) |
4219 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE) | 4851 return NULL_TREE; |
4220 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), | 4852 |
4221 TYPE_UNSIGNED (etype)); | 4853 if (POINTER_TYPE_P (etype)) |
4222 | 4854 etype = unsigned_type_for (etype); |
4223 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype)) | |
4224 { | |
4225 tree utype, minv, maxv; | |
4226 | |
4227 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN | |
4228 for the type in question, as we rely on this here. */ | |
4229 utype = unsigned_type_for (etype); | |
4230 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype)); | |
4231 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1, | |
4232 integer_one_node, 1); | |
4233 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype)); | |
4234 | |
4235 if (integer_zerop (range_binop (NE_EXPR, integer_type_node, | |
4236 minv, 1, maxv, 1))) | |
4237 etype = utype; | |
4238 else | |
4239 return 0; | |
4240 } | |
4241 | 4855 |
4242 high = fold_convert_loc (loc, etype, high); | 4856 high = fold_convert_loc (loc, etype, high); |
4243 low = fold_convert_loc (loc, etype, low); | 4857 low = fold_convert_loc (loc, etype, low); |
4244 exp = fold_convert_loc (loc, etype, exp); | 4858 exp = fold_convert_loc (loc, etype, exp); |
4245 | 4859 |
4246 value = const_binop (MINUS_EXPR, high, low); | 4860 value = const_binop (MINUS_EXPR, high, low); |
4247 | |
4248 | |
4249 if (POINTER_TYPE_P (etype)) | |
4250 { | |
4251 if (value != 0 && !TREE_OVERFLOW (value)) | |
4252 { | |
4253 low = fold_convert_loc (loc, sizetype, low); | |
4254 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low); | |
4255 return build_range_check (loc, type, | |
4256 fold_build2_loc (loc, POINTER_PLUS_EXPR, | |
4257 etype, exp, low), | |
4258 1, build_int_cst (etype, 0), value); | |
4259 } | |
4260 return 0; | |
4261 } | |
4262 | 4861 |
4263 if (value != 0 && !TREE_OVERFLOW (value)) | 4862 if (value != 0 && !TREE_OVERFLOW (value)) |
4264 return build_range_check (loc, type, | 4863 return build_range_check (loc, type, |
4265 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low), | 4864 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low), |
4266 1, build_int_cst (etype, 0), value); | 4865 1, build_int_cst (etype, 0), value); |
4277 | 4876 |
4278 if (INTEGRAL_TYPE_P (type) | 4877 if (INTEGRAL_TYPE_P (type) |
4279 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0)) | 4878 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0)) |
4280 return 0; | 4879 return 0; |
4281 else | 4880 else |
4282 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0); | 4881 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, |
4882 build_int_cst (TREE_TYPE (val), 1), 0); | |
4283 } | 4883 } |
4284 | 4884 |
4285 /* Return the successor of VAL in its type, handling the infinite case. */ | 4885 /* Return the successor of VAL in its type, handling the infinite case. */ |
4286 | 4886 |
4287 static tree | 4887 static tree |
4291 | 4891 |
4292 if (INTEGRAL_TYPE_P (type) | 4892 if (INTEGRAL_TYPE_P (type) |
4293 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0)) | 4893 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0)) |
4294 return 0; | 4894 return 0; |
4295 else | 4895 else |
4296 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0); | 4896 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, |
4897 build_int_cst (TREE_TYPE (val), 1), 0); | |
4297 } | 4898 } |
4298 | 4899 |
4299 /* Given two ranges, see if we can merge them into one. Return 1 if we | 4900 /* Given two ranges, see if we can merge them into one. Return 1 if we |
4300 can, 0 if we can't. Set the output range into the specified parameters. */ | 4901 can, 0 if we can't. Set the output range into the specified parameters. */ |
4301 | 4902 |
4471 break; | 5072 break; |
4472 case POINTER_TYPE: | 5073 case POINTER_TYPE: |
4473 if (TYPE_UNSIGNED (TREE_TYPE (high1)) | 5074 if (TYPE_UNSIGNED (TREE_TYPE (high1)) |
4474 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE, | 5075 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE, |
4475 high1, 1, | 5076 high1, 1, |
4476 integer_one_node, 1))) | 5077 build_int_cst (TREE_TYPE (high1), 1), |
5078 1))) | |
4477 high1 = 0; | 5079 high1 = 0; |
4478 break; | 5080 break; |
4479 default: | 5081 default: |
4480 break; | 5082 break; |
4481 } | 5083 } |
4546 even though the original expressions could be positive or | 5148 even though the original expressions could be positive or |
4547 negative, depending on the sign of A. | 5149 negative, depending on the sign of A. |
4548 | 5150 |
4549 Note that all these transformations are correct if A is | 5151 Note that all these transformations are correct if A is |
4550 NaN, since the two alternatives (A and -A) are also NaNs. */ | 5152 NaN, since the two alternatives (A and -A) are also NaNs. */ |
4551 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)) | 5153 if (!HONOR_SIGNED_ZEROS (element_mode (type)) |
4552 && (FLOAT_TYPE_P (TREE_TYPE (arg01)) | 5154 && (FLOAT_TYPE_P (TREE_TYPE (arg01)) |
4553 ? real_zerop (arg01) | 5155 ? real_zerop (arg01) |
4554 : integer_zerop (arg01)) | 5156 : integer_zerop (arg01)) |
4555 && ((TREE_CODE (arg2) == NEGATE_EXPR | 5157 && ((TREE_CODE (arg2) == NEGATE_EXPR |
4556 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0)) | 5158 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0)) |
4565 switch (comp_code) | 5167 switch (comp_code) |
4566 { | 5168 { |
4567 case EQ_EXPR: | 5169 case EQ_EXPR: |
4568 case UNEQ_EXPR: | 5170 case UNEQ_EXPR: |
4569 tem = fold_convert_loc (loc, arg1_type, arg1); | 5171 tem = fold_convert_loc (loc, arg1_type, arg1); |
4570 return pedantic_non_lvalue_loc (loc, | 5172 return fold_convert_loc (loc, type, negate_expr (tem)); |
4571 fold_convert_loc (loc, type, | |
4572 negate_expr (tem))); | |
4573 case NE_EXPR: | 5173 case NE_EXPR: |
4574 case LTGT_EXPR: | 5174 case LTGT_EXPR: |
4575 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); | 5175 return fold_convert_loc (loc, type, arg1); |
4576 case UNGE_EXPR: | 5176 case UNGE_EXPR: |
4577 case UNGT_EXPR: | 5177 case UNGT_EXPR: |
4578 if (flag_trapping_math) | 5178 if (flag_trapping_math) |
4579 break; | 5179 break; |
4580 /* Fall through. */ | 5180 /* Fall through. */ |
4581 case GE_EXPR: | 5181 case GE_EXPR: |
4582 case GT_EXPR: | 5182 case GT_EXPR: |
4583 if (TYPE_UNSIGNED (TREE_TYPE (arg1))) | 5183 if (TYPE_UNSIGNED (TREE_TYPE (arg1))) |
4584 arg1 = fold_convert_loc (loc, signed_type_for | 5184 break; |
4585 (TREE_TYPE (arg1)), arg1); | |
4586 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1); | 5185 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1); |
4587 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); | 5186 return fold_convert_loc (loc, type, tem); |
4588 case UNLE_EXPR: | 5187 case UNLE_EXPR: |
4589 case UNLT_EXPR: | 5188 case UNLT_EXPR: |
4590 if (flag_trapping_math) | 5189 if (flag_trapping_math) |
4591 break; | 5190 break; |
5191 /* FALLTHRU */ | |
4592 case LE_EXPR: | 5192 case LE_EXPR: |
4593 case LT_EXPR: | 5193 case LT_EXPR: |
4594 if (TYPE_UNSIGNED (TREE_TYPE (arg1))) | 5194 if (TYPE_UNSIGNED (TREE_TYPE (arg1))) |
4595 arg1 = fold_convert_loc (loc, signed_type_for | 5195 break; |
4596 (TREE_TYPE (arg1)), arg1); | |
4597 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1); | 5196 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1); |
4598 return negate_expr (fold_convert_loc (loc, type, tem)); | 5197 return negate_expr (fold_convert_loc (loc, type, tem)); |
4599 default: | 5198 default: |
4600 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison); | 5199 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison); |
4601 break; | 5200 break; |
4604 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise | 5203 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise |
4605 A == 0 ? A : 0 is always 0 unless A is -0. Note that | 5204 A == 0 ? A : 0 is always 0 unless A is -0. Note that |
4606 both transformations are correct when A is NaN: A != 0 | 5205 both transformations are correct when A is NaN: A != 0 |
4607 is then true, and A == 0 is false. */ | 5206 is then true, and A == 0 is false. */ |
4608 | 5207 |
4609 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)) | 5208 if (!HONOR_SIGNED_ZEROS (element_mode (type)) |
4610 && integer_zerop (arg01) && integer_zerop (arg2)) | 5209 && integer_zerop (arg01) && integer_zerop (arg2)) |
4611 { | 5210 { |
4612 if (comp_code == NE_EXPR) | 5211 if (comp_code == NE_EXPR) |
4613 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); | 5212 return fold_convert_loc (loc, type, arg1); |
4614 else if (comp_code == EQ_EXPR) | 5213 else if (comp_code == EQ_EXPR) |
4615 return build_int_cst (type, 0); | 5214 return build_zero_cst (type); |
4616 } | 5215 } |
4617 | 5216 |
4618 /* Try some transformations of A op B ? A : B. | 5217 /* Try some transformations of A op B ? A : B. |
4619 | 5218 |
4620 A == B? A : B same as B | 5219 A == B? A : B same as B |
4639 | 5238 |
4640 The conversions to max() and min() are not correct if B is | 5239 The conversions to max() and min() are not correct if B is |
4641 a number and A is not. The conditions in the original | 5240 a number and A is not. The conditions in the original |
4642 expressions will be false, so all four give B. The min() | 5241 expressions will be false, so all four give B. The min() |
4643 and max() versions would give a NaN instead. */ | 5242 and max() versions would give a NaN instead. */ |
4644 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)) | 5243 if (!HONOR_SIGNED_ZEROS (element_mode (type)) |
4645 && operand_equal_for_comparison_p (arg01, arg2, arg00) | 5244 && operand_equal_for_comparison_p (arg01, arg2) |
4646 /* Avoid these transformations if the COND_EXPR may be used | 5245 /* Avoid these transformations if the COND_EXPR may be used |
4647 as an lvalue in the C++ front-end. PR c++/19199. */ | 5246 as an lvalue in the C++ front-end. PR c++/19199. */ |
4648 && (in_gimple_form | 5247 && (in_gimple_form |
4649 || (strcmp (lang_hooks.name, "GNU C++") != 0 | 5248 || VECTOR_TYPE_P (type) |
5249 || (! lang_GNU_CXX () | |
4650 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0) | 5250 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0) |
4651 || ! maybe_lvalue_p (arg1) | 5251 || ! maybe_lvalue_p (arg1) |
4652 || ! maybe_lvalue_p (arg2))) | 5252 || ! maybe_lvalue_p (arg2))) |
4653 { | 5253 { |
4654 tree comp_op0 = arg00; | 5254 tree comp_op0 = arg00; |
4655 tree comp_op1 = arg01; | 5255 tree comp_op1 = arg01; |
4656 tree comp_type = TREE_TYPE (comp_op0); | 5256 tree comp_type = TREE_TYPE (comp_op0); |
4657 | 5257 |
4658 /* Avoid adding NOP_EXPRs in case this is an lvalue. */ | |
4659 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type)) | |
4660 { | |
4661 comp_type = type; | |
4662 comp_op0 = arg1; | |
4663 comp_op1 = arg2; | |
4664 } | |
4665 | |
4666 switch (comp_code) | 5258 switch (comp_code) |
4667 { | 5259 { |
4668 case EQ_EXPR: | 5260 case EQ_EXPR: |
4669 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2)); | 5261 return fold_convert_loc (loc, type, arg2); |
4670 case NE_EXPR: | 5262 case NE_EXPR: |
4671 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); | 5263 return fold_convert_loc (loc, type, arg1); |
4672 case LE_EXPR: | 5264 case LE_EXPR: |
4673 case LT_EXPR: | 5265 case LT_EXPR: |
4674 case UNLE_EXPR: | 5266 case UNLE_EXPR: |
4675 case UNLT_EXPR: | 5267 case UNLT_EXPR: |
4676 /* In C++ a ?: expression can be an lvalue, so put the | 5268 /* In C++ a ?: expression can be an lvalue, so put the |
4677 operand which will be used if they are equal first | 5269 operand which will be used if they are equal first |
4678 so that we can convert this back to the | 5270 so that we can convert this back to the |
4679 corresponding COND_EXPR. */ | 5271 corresponding COND_EXPR. */ |
4680 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) | 5272 if (!HONOR_NANS (arg1)) |
4681 { | 5273 { |
4682 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0); | 5274 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0); |
4683 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1); | 5275 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1); |
4684 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR) | 5276 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR) |
4685 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1) | 5277 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1) |
4686 : fold_build2_loc (loc, MIN_EXPR, comp_type, | 5278 : fold_build2_loc (loc, MIN_EXPR, comp_type, |
4687 comp_op1, comp_op0); | 5279 comp_op1, comp_op0); |
4688 return pedantic_non_lvalue_loc (loc, | 5280 return fold_convert_loc (loc, type, tem); |
4689 fold_convert_loc (loc, type, tem)); | |
4690 } | 5281 } |
4691 break; | 5282 break; |
4692 case GE_EXPR: | 5283 case GE_EXPR: |
4693 case GT_EXPR: | 5284 case GT_EXPR: |
4694 case UNGE_EXPR: | 5285 case UNGE_EXPR: |
4695 case UNGT_EXPR: | 5286 case UNGT_EXPR: |
4696 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) | 5287 if (!HONOR_NANS (arg1)) |
4697 { | 5288 { |
4698 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0); | 5289 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0); |
4699 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1); | 5290 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1); |
4700 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR) | 5291 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR) |
4701 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1) | 5292 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1) |
4702 : fold_build2_loc (loc, MAX_EXPR, comp_type, | 5293 : fold_build2_loc (loc, MAX_EXPR, comp_type, |
4703 comp_op1, comp_op0); | 5294 comp_op1, comp_op0); |
4704 return pedantic_non_lvalue_loc (loc, | 5295 return fold_convert_loc (loc, type, tem); |
4705 fold_convert_loc (loc, type, tem)); | |
4706 } | 5296 } |
4707 break; | 5297 break; |
4708 case UNEQ_EXPR: | 5298 case UNEQ_EXPR: |
4709 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) | 5299 if (!HONOR_NANS (arg1)) |
4710 return pedantic_non_lvalue_loc (loc, | 5300 return fold_convert_loc (loc, type, arg2); |
4711 fold_convert_loc (loc, type, arg2)); | |
4712 break; | 5301 break; |
4713 case LTGT_EXPR: | 5302 case LTGT_EXPR: |
4714 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) | 5303 if (!HONOR_NANS (arg1)) |
4715 return pedantic_non_lvalue_loc (loc, | 5304 return fold_convert_loc (loc, type, arg1); |
4716 fold_convert_loc (loc, type, arg1)); | |
4717 break; | 5305 break; |
4718 default: | 5306 default: |
4719 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison); | 5307 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison); |
4720 break; | 5308 break; |
4721 } | 5309 } |
4722 } | 5310 } |
4723 | |
4724 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers, | |
4725 we might still be able to simplify this. For example, | |
4726 if C1 is one less or one more than C2, this might have started | |
4727 out as a MIN or MAX and been transformed by this function. | |
4728 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */ | |
4729 | |
4730 if (INTEGRAL_TYPE_P (type) | |
4731 && TREE_CODE (arg01) == INTEGER_CST | |
4732 && TREE_CODE (arg2) == INTEGER_CST) | |
4733 switch (comp_code) | |
4734 { | |
4735 case EQ_EXPR: | |
4736 if (TREE_CODE (arg1) == INTEGER_CST) | |
4737 break; | |
4738 /* We can replace A with C1 in this case. */ | |
4739 arg1 = fold_convert_loc (loc, type, arg01); | |
4740 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2); | |
4741 | |
4742 case LT_EXPR: | |
4743 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for | |
4744 MIN_EXPR, to preserve the signedness of the comparison. */ | |
4745 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), | |
4746 OEP_ONLY_CONST) | |
4747 && operand_equal_p (arg01, | |
4748 const_binop (PLUS_EXPR, arg2, | |
4749 build_int_cst (type, 1)), | |
4750 OEP_ONLY_CONST)) | |
4751 { | |
4752 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00, | |
4753 fold_convert_loc (loc, TREE_TYPE (arg00), | |
4754 arg2)); | |
4755 return pedantic_non_lvalue_loc (loc, | |
4756 fold_convert_loc (loc, type, tem)); | |
4757 } | |
4758 break; | |
4759 | |
4760 case LE_EXPR: | |
4761 /* If C1 is C2 - 1, this is min(A, C2), with the same care | |
4762 as above. */ | |
4763 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), | |
4764 OEP_ONLY_CONST) | |
4765 && operand_equal_p (arg01, | |
4766 const_binop (MINUS_EXPR, arg2, | |
4767 build_int_cst (type, 1)), | |
4768 OEP_ONLY_CONST)) | |
4769 { | |
4770 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00, | |
4771 fold_convert_loc (loc, TREE_TYPE (arg00), | |
4772 arg2)); | |
4773 return pedantic_non_lvalue_loc (loc, | |
4774 fold_convert_loc (loc, type, tem)); | |
4775 } | |
4776 break; | |
4777 | |
4778 case GT_EXPR: | |
4779 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for | |
4780 MAX_EXPR, to preserve the signedness of the comparison. */ | |
4781 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), | |
4782 OEP_ONLY_CONST) | |
4783 && operand_equal_p (arg01, | |
4784 const_binop (MINUS_EXPR, arg2, | |
4785 build_int_cst (type, 1)), | |
4786 OEP_ONLY_CONST)) | |
4787 { | |
4788 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00, | |
4789 fold_convert_loc (loc, TREE_TYPE (arg00), | |
4790 arg2)); | |
4791 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); | |
4792 } | |
4793 break; | |
4794 | |
4795 case GE_EXPR: | |
4796 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */ | |
4797 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), | |
4798 OEP_ONLY_CONST) | |
4799 && operand_equal_p (arg01, | |
4800 const_binop (PLUS_EXPR, arg2, | |
4801 build_int_cst (type, 1)), | |
4802 OEP_ONLY_CONST)) | |
4803 { | |
4804 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00, | |
4805 fold_convert_loc (loc, TREE_TYPE (arg00), | |
4806 arg2)); | |
4807 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); | |
4808 } | |
4809 break; | |
4810 case NE_EXPR: | |
4811 break; | |
4812 default: | |
4813 gcc_unreachable (); | |
4814 } | |
4815 | 5311 |
4816 return NULL_TREE; | 5312 return NULL_TREE; |
4817 } | 5313 } |
4818 | 5314 |
4819 | 5315 |
4834 int or_op = (code == TRUTH_ORIF_EXPR | 5330 int or_op = (code == TRUTH_ORIF_EXPR |
4835 || code == TRUTH_OR_EXPR); | 5331 || code == TRUTH_OR_EXPR); |
4836 int in0_p, in1_p, in_p; | 5332 int in0_p, in1_p, in_p; |
4837 tree low0, low1, low, high0, high1, high; | 5333 tree low0, low1, low, high0, high1, high; |
4838 bool strict_overflow_p = false; | 5334 bool strict_overflow_p = false; |
4839 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p); | 5335 tree tem, lhs, rhs; |
4840 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p); | |
4841 tree tem; | |
4842 const char * const warnmsg = G_("assuming signed overflow does not occur " | 5336 const char * const warnmsg = G_("assuming signed overflow does not occur " |
4843 "when simplifying range test"); | 5337 "when simplifying range test"); |
5338 | |
5339 if (!INTEGRAL_TYPE_P (type)) | |
5340 return 0; | |
5341 | |
5342 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p); | |
5343 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p); | |
4844 | 5344 |
4845 /* If this is an OR operation, invert both sides; we will invert | 5345 /* If this is an OR operation, invert both sides; we will invert |
4846 again at the end. */ | 5346 again at the end. */ |
4847 if (or_op) | 5347 if (or_op) |
4848 in0_p = ! in0_p, in1_p = ! in1_p; | 5348 in0_p = ! in0_p, in1_p = ! in1_p; |
4852 ranges is always true or always false, consider it to be the same | 5352 ranges is always true or always false, consider it to be the same |
4853 expression as the other. */ | 5353 expression as the other. */ |
4854 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0)) | 5354 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0)) |
4855 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0, | 5355 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0, |
4856 in1_p, low1, high1) | 5356 in1_p, low1, high1) |
4857 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type, | 5357 && 0 != (tem = (build_range_check (loc, type, |
4858 lhs != 0 ? lhs | 5358 lhs != 0 ? lhs |
4859 : rhs != 0 ? rhs : integer_zero_node, | 5359 : rhs != 0 ? rhs : integer_zero_node, |
4860 in_p, low, high)))) | 5360 in_p, low, high)))) |
4861 { | 5361 { |
4862 if (strict_overflow_p) | 5362 if (strict_overflow_p) |
4866 | 5366 |
4867 /* On machines where the branch cost is expensive, if this is a | 5367 /* On machines where the branch cost is expensive, if this is a |
4868 short-circuited branch and the underlying object on both sides | 5368 short-circuited branch and the underlying object on both sides |
4869 is the same, make a non-short-circuit operation. */ | 5369 is the same, make a non-short-circuit operation. */ |
4870 else if (LOGICAL_OP_NON_SHORT_CIRCUIT | 5370 else if (LOGICAL_OP_NON_SHORT_CIRCUIT |
5371 && !flag_sanitize_coverage | |
4871 && lhs != 0 && rhs != 0 | 5372 && lhs != 0 && rhs != 0 |
4872 && (code == TRUTH_ANDIF_EXPR | 5373 && (code == TRUTH_ANDIF_EXPR |
4873 || code == TRUTH_ORIF_EXPR) | 5374 || code == TRUTH_ORIF_EXPR) |
4874 && operand_equal_p (lhs, rhs, 0)) | 5375 && operand_equal_p (lhs, rhs, 0)) |
4875 { | 5376 { |
4879 if (simple_operand_p (lhs)) | 5380 if (simple_operand_p (lhs)) |
4880 return build2_loc (loc, code == TRUTH_ANDIF_EXPR | 5381 return build2_loc (loc, code == TRUTH_ANDIF_EXPR |
4881 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR, | 5382 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR, |
4882 type, op0, op1); | 5383 type, op0, op1); |
4883 | 5384 |
4884 else if (lang_hooks.decls.global_bindings_p () == 0 | 5385 else if (!lang_hooks.decls.global_bindings_p () |
4885 && ! CONTAINS_PLACEHOLDER_P (lhs)) | 5386 && !CONTAINS_PLACEHOLDER_P (lhs)) |
4886 { | 5387 { |
4887 tree common = save_expr (lhs); | 5388 tree common = save_expr (lhs); |
4888 | 5389 |
4889 if (0 != (lhs = build_range_check (loc, type, common, | 5390 if (0 != (lhs = build_range_check (loc, type, common, |
4890 or_op ? ! in0_p : in0_p, | 5391 or_op ? ! in0_p : in0_p, |
4904 } | 5405 } |
4905 | 5406 |
4906 return 0; | 5407 return 0; |
4907 } | 5408 } |
4908 | 5409 |
4909 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P | 5410 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P |
4910 bit value. Arrange things so the extra bits will be set to zero if and | 5411 bit value. Arrange things so the extra bits will be set to zero if and |
4911 only if C is signed-extended to its full width. If MASK is nonzero, | 5412 only if C is signed-extended to its full width. If MASK is nonzero, |
4912 it is an INTEGER_CST that should be AND'ed with the extra bits. */ | 5413 it is an INTEGER_CST that should be AND'ed with the extra bits. */ |
4913 | 5414 |
4914 static tree | 5415 static tree |
4915 unextend (tree c, int p, int unsignedp, tree mask) | 5416 unextend (tree c, int p, int unsignedp, tree mask) |
4916 { | 5417 { |
4917 tree type = TREE_TYPE (c); | 5418 tree type = TREE_TYPE (c); |
4918 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type)); | 5419 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type)); |
4919 tree temp; | 5420 tree temp; |
4920 | 5421 |
4921 if (p == modesize || unsignedp) | 5422 if (p == modesize || unsignedp) |
4922 return c; | 5423 return c; |
4923 | 5424 |
4924 /* We work by getting just the sign bit into the low-order bit, then | 5425 /* We work by getting just the sign bit into the low-order bit, then |
4925 into the high-order bit, then sign-extend. We then XOR that value | 5426 into the high-order bit, then sign-extend. We then XOR that value |
4926 with C. */ | 5427 with C. */ |
4927 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1)); | 5428 temp = build_int_cst (TREE_TYPE (c), |
4928 temp = const_binop (BIT_AND_EXPR, temp, size_int (1)); | 5429 wi::extract_uhwi (wi::to_wide (c), p - 1, 1)); |
4929 | 5430 |
4930 /* We must use a signed type in order to get an arithmetic right shift. | 5431 /* We must use a signed type in order to get an arithmetic right shift. |
4931 However, we must also avoid introducing accidental overflows, so that | 5432 However, we must also avoid introducing accidental overflows, so that |
4932 a subsequent call to integer_zerop will work. Hence we must | 5433 a subsequent call to integer_zerop will work. Hence we must |
4933 do the type conversion here. At this point, the constant is either | 5434 do the type conversion here. At this point, the constant is either |
5001 lhs = newlhs; | 5502 lhs = newlhs; |
5002 lhs_code = TREE_CODE (lhs); | 5503 lhs_code = TREE_CODE (lhs); |
5003 } | 5504 } |
5004 } | 5505 } |
5005 | 5506 |
5006 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type))); | 5507 inv_code = invert_tree_comparison (code, HONOR_NANS (type)); |
5007 if (inv_code == rhs_code | 5508 if (inv_code == rhs_code |
5008 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0) | 5509 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0) |
5009 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0)) | 5510 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0)) |
5010 return lhs; | 5511 return lhs; |
5011 if (!rhs_only && inv_code == lhs_code | 5512 if (!rhs_only && inv_code == lhs_code |
5041 two operands. | 5542 two operands. |
5042 | 5543 |
5043 We return the simplified tree or 0 if no optimization is possible. */ | 5544 We return the simplified tree or 0 if no optimization is possible. */ |
5044 | 5545 |
5045 static tree | 5546 static tree |
5046 fold_truthop (location_t loc, enum tree_code code, tree truth_type, | 5547 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type, |
5047 tree lhs, tree rhs) | 5548 tree lhs, tree rhs) |
5048 { | 5549 { |
5049 /* If this is the "or" of two comparisons, we can do something if | 5550 /* If this is the "or" of two comparisons, we can do something if |
5050 the comparisons are NE_EXPR. If this is the "and", we can do something | 5551 the comparisons are NE_EXPR. If this is the "and", we can do something |
5051 if the comparisons are EQ_EXPR. I.e., | 5552 if the comparisons are EQ_EXPR. I.e., |
5052 (a->b == 2 && a->c == 4) can become (a->new == NEW). | 5553 (a->b == 2 && a->c == 4) can become (a->new == NEW). |
5062 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos; | 5563 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos; |
5063 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos; | 5564 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos; |
5064 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos; | 5565 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos; |
5065 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos; | 5566 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos; |
5066 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp; | 5567 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp; |
5067 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode; | 5568 int ll_reversep, lr_reversep, rl_reversep, rr_reversep; |
5068 enum machine_mode lnmode, rnmode; | 5569 machine_mode ll_mode, lr_mode, rl_mode, rr_mode; |
5570 scalar_int_mode lnmode, rnmode; | |
5069 tree ll_mask, lr_mask, rl_mask, rr_mask; | 5571 tree ll_mask, lr_mask, rl_mask, rr_mask; |
5070 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask; | 5572 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask; |
5071 tree l_const, r_const; | 5573 tree l_const, r_const; |
5072 tree lntype, rntype, result; | 5574 tree lntype, rntype, result; |
5073 HOST_WIDE_INT first_bit, end_bit; | 5575 HOST_WIDE_INT first_bit, end_bit; |
5074 int volatilep; | 5576 int volatilep; |
5075 tree orig_lhs = lhs, orig_rhs = rhs; | |
5076 enum tree_code orig_code = code; | |
5077 | 5577 |
5078 /* Start by getting the comparison codes. Fail if anything is volatile. | 5578 /* Start by getting the comparison codes. Fail if anything is volatile. |
5079 If one operand is a BIT_AND_EXPR with the constant one, treat it as if | 5579 If one operand is a BIT_AND_EXPR with the constant one, treat it as if |
5080 it were surrounded with a NE_EXPR. */ | 5580 it were surrounded with a NE_EXPR. */ |
5081 | 5581 |
5135 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR); | 5635 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR); |
5136 | 5636 |
5137 /* If the RHS can be evaluated unconditionally and its operands are | 5637 /* If the RHS can be evaluated unconditionally and its operands are |
5138 simple, it wins to evaluate the RHS unconditionally on machines | 5638 simple, it wins to evaluate the RHS unconditionally on machines |
5139 with expensive branches. In this case, this isn't a comparison | 5639 with expensive branches. In this case, this isn't a comparison |
5140 that can be merged. Avoid doing this if the RHS is a floating-point | 5640 that can be merged. */ |
5141 comparison since those can trap. */ | |
5142 | 5641 |
5143 if (BRANCH_COST (optimize_function_for_speed_p (cfun), | 5642 if (BRANCH_COST (optimize_function_for_speed_p (cfun), |
5144 false) >= 2 | 5643 false) >= 2 |
5145 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg)) | 5644 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg)) |
5146 && simple_operand_p (rl_arg) | 5645 && simple_operand_p (rl_arg) |
5165 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg))) | 5664 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg))) |
5166 return build2_loc (loc, EQ_EXPR, truth_type, | 5665 return build2_loc (loc, EQ_EXPR, truth_type, |
5167 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg), | 5666 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg), |
5168 ll_arg, rl_arg), | 5667 ll_arg, rl_arg), |
5169 build_int_cst (TREE_TYPE (ll_arg), 0)); | 5668 build_int_cst (TREE_TYPE (ll_arg), 0)); |
5170 | |
5171 if (LOGICAL_OP_NON_SHORT_CIRCUIT) | |
5172 { | |
5173 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs) | |
5174 return build2_loc (loc, code, truth_type, lhs, rhs); | |
5175 return NULL_TREE; | |
5176 } | |
5177 } | 5669 } |
5178 | 5670 |
5179 /* See if the comparisons can be merged. Then get all the parameters for | 5671 /* See if the comparisons can be merged. Then get all the parameters for |
5180 each side. */ | 5672 each side. */ |
5181 | 5673 |
5182 if ((lcode != EQ_EXPR && lcode != NE_EXPR) | 5674 if ((lcode != EQ_EXPR && lcode != NE_EXPR) |
5183 || (rcode != EQ_EXPR && rcode != NE_EXPR)) | 5675 || (rcode != EQ_EXPR && rcode != NE_EXPR)) |
5184 return 0; | 5676 return 0; |
5185 | 5677 |
5678 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0; | |
5186 volatilep = 0; | 5679 volatilep = 0; |
5187 ll_inner = decode_field_reference (loc, ll_arg, | 5680 ll_inner = decode_field_reference (loc, &ll_arg, |
5188 &ll_bitsize, &ll_bitpos, &ll_mode, | 5681 &ll_bitsize, &ll_bitpos, &ll_mode, |
5189 &ll_unsignedp, &volatilep, &ll_mask, | 5682 &ll_unsignedp, &ll_reversep, &volatilep, |
5190 &ll_and_mask); | 5683 &ll_mask, &ll_and_mask); |
5191 lr_inner = decode_field_reference (loc, lr_arg, | 5684 lr_inner = decode_field_reference (loc, &lr_arg, |
5192 &lr_bitsize, &lr_bitpos, &lr_mode, | 5685 &lr_bitsize, &lr_bitpos, &lr_mode, |
5193 &lr_unsignedp, &volatilep, &lr_mask, | 5686 &lr_unsignedp, &lr_reversep, &volatilep, |
5194 &lr_and_mask); | 5687 &lr_mask, &lr_and_mask); |
5195 rl_inner = decode_field_reference (loc, rl_arg, | 5688 rl_inner = decode_field_reference (loc, &rl_arg, |
5196 &rl_bitsize, &rl_bitpos, &rl_mode, | 5689 &rl_bitsize, &rl_bitpos, &rl_mode, |
5197 &rl_unsignedp, &volatilep, &rl_mask, | 5690 &rl_unsignedp, &rl_reversep, &volatilep, |
5198 &rl_and_mask); | 5691 &rl_mask, &rl_and_mask); |
5199 rr_inner = decode_field_reference (loc, rr_arg, | 5692 rr_inner = decode_field_reference (loc, &rr_arg, |
5200 &rr_bitsize, &rr_bitpos, &rr_mode, | 5693 &rr_bitsize, &rr_bitpos, &rr_mode, |
5201 &rr_unsignedp, &volatilep, &rr_mask, | 5694 &rr_unsignedp, &rr_reversep, &volatilep, |
5202 &rr_and_mask); | 5695 &rr_mask, &rr_and_mask); |
5203 | 5696 |
5204 /* It must be true that the inner operation on the lhs of each | 5697 /* It must be true that the inner operation on the lhs of each |
5205 comparison must be the same if we are to be able to do anything. | 5698 comparison must be the same if we are to be able to do anything. |
5206 Then see if we have constants. If not, the same must be true for | 5699 Then see if we have constants. If not, the same must be true for |
5207 the rhs's. */ | 5700 the rhs's. */ |
5208 if (volatilep || ll_inner == 0 || rl_inner == 0 | 5701 if (volatilep |
5702 || ll_reversep != rl_reversep | |
5703 || ll_inner == 0 || rl_inner == 0 | |
5209 || ! operand_equal_p (ll_inner, rl_inner, 0)) | 5704 || ! operand_equal_p (ll_inner, rl_inner, 0)) |
5210 return 0; | 5705 return 0; |
5211 | 5706 |
5212 if (TREE_CODE (lr_arg) == INTEGER_CST | 5707 if (TREE_CODE (lr_arg) == INTEGER_CST |
5213 && TREE_CODE (rr_arg) == INTEGER_CST) | 5708 && TREE_CODE (rr_arg) == INTEGER_CST) |
5214 l_const = lr_arg, r_const = rr_arg; | 5709 { |
5215 else if (lr_inner == 0 || rr_inner == 0 | 5710 l_const = lr_arg, r_const = rr_arg; |
5711 lr_reversep = ll_reversep; | |
5712 } | |
5713 else if (lr_reversep != rr_reversep | |
5714 || lr_inner == 0 || rr_inner == 0 | |
5216 || ! operand_equal_p (lr_inner, rr_inner, 0)) | 5715 || ! operand_equal_p (lr_inner, rr_inner, 0)) |
5217 return 0; | 5716 return 0; |
5218 else | 5717 else |
5219 l_const = r_const = 0; | 5718 l_const = r_const = 0; |
5220 | 5719 |
5252 /* See if we can find a mode that contains both fields being compared on | 5751 /* See if we can find a mode that contains both fields being compared on |
5253 the left. If we can't, fail. Otherwise, update all constants and masks | 5752 the left. If we can't, fail. Otherwise, update all constants and masks |
5254 to be relative to a field of that size. */ | 5753 to be relative to a field of that size. */ |
5255 first_bit = MIN (ll_bitpos, rl_bitpos); | 5754 first_bit = MIN (ll_bitpos, rl_bitpos); |
5256 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize); | 5755 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize); |
5257 lnmode = get_best_mode (end_bit - first_bit, first_bit, | 5756 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0, |
5258 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode, | 5757 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD, |
5259 volatilep); | 5758 volatilep, &lnmode)) |
5260 if (lnmode == VOIDmode) | |
5261 return 0; | 5759 return 0; |
5262 | 5760 |
5263 lnbitsize = GET_MODE_BITSIZE (lnmode); | 5761 lnbitsize = GET_MODE_BITSIZE (lnmode); |
5264 lnbitpos = first_bit & ~ (lnbitsize - 1); | 5762 lnbitpos = first_bit & ~ (lnbitsize - 1); |
5265 lntype = lang_hooks.types.type_for_size (lnbitsize, 1); | 5763 lntype = lang_hooks.types.type_for_size (lnbitsize, 1); |
5266 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos; | 5764 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos; |
5267 | 5765 |
5268 if (BYTES_BIG_ENDIAN) | 5766 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN) |
5269 { | 5767 { |
5270 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize; | 5768 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize; |
5271 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize; | 5769 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize; |
5272 } | 5770 } |
5273 | 5771 |
5317 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos) | 5815 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos) |
5318 return 0; | 5816 return 0; |
5319 | 5817 |
5320 first_bit = MIN (lr_bitpos, rr_bitpos); | 5818 first_bit = MIN (lr_bitpos, rr_bitpos); |
5321 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize); | 5819 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize); |
5322 rnmode = get_best_mode (end_bit - first_bit, first_bit, | 5820 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0, |
5323 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode, | 5821 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD, |
5324 volatilep); | 5822 volatilep, &rnmode)) |
5325 if (rnmode == VOIDmode) | |
5326 return 0; | 5823 return 0; |
5327 | 5824 |
5328 rnbitsize = GET_MODE_BITSIZE (rnmode); | 5825 rnbitsize = GET_MODE_BITSIZE (rnmode); |
5329 rnbitpos = first_bit & ~ (rnbitsize - 1); | 5826 rnbitpos = first_bit & ~ (rnbitsize - 1); |
5330 rntype = lang_hooks.types.type_for_size (rnbitsize, 1); | 5827 rntype = lang_hooks.types.type_for_size (rnbitsize, 1); |
5331 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos; | 5828 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos; |
5332 | 5829 |
5333 if (BYTES_BIG_ENDIAN) | 5830 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN) |
5334 { | 5831 { |
5335 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize; | 5832 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize; |
5336 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize; | 5833 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize; |
5337 } | 5834 } |
5338 | 5835 |
5348 same size and the bits being compared are in the same position | 5845 same size and the bits being compared are in the same position |
5349 then we can do this by masking both and comparing the masked | 5846 then we can do this by masking both and comparing the masked |
5350 results. */ | 5847 results. */ |
5351 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask); | 5848 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask); |
5352 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask); | 5849 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask); |
5353 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos) | 5850 if (lnbitsize == rnbitsize |
5354 { | 5851 && xll_bitpos == xlr_bitpos |
5355 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos, | 5852 && lnbitpos >= 0 |
5356 ll_unsignedp || rl_unsignedp); | 5853 && rnbitpos >= 0) |
5854 { | |
5855 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, | |
5856 lntype, lnbitsize, lnbitpos, | |
5857 ll_unsignedp || rl_unsignedp, ll_reversep); | |
5357 if (! all_ones_mask_p (ll_mask, lnbitsize)) | 5858 if (! all_ones_mask_p (ll_mask, lnbitsize)) |
5358 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask); | 5859 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask); |
5359 | 5860 |
5360 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos, | 5861 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, |
5361 lr_unsignedp || rr_unsignedp); | 5862 rntype, rnbitsize, rnbitpos, |
5863 lr_unsignedp || rr_unsignedp, lr_reversep); | |
5362 if (! all_ones_mask_p (lr_mask, rnbitsize)) | 5864 if (! all_ones_mask_p (lr_mask, rnbitsize)) |
5363 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask); | 5865 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask); |
5364 | 5866 |
5365 return build2_loc (loc, wanted_code, truth_type, lhs, rhs); | 5867 return build2_loc (loc, wanted_code, truth_type, lhs, rhs); |
5366 } | 5868 } |
5370 field containing them both. | 5872 field containing them both. |
5371 | 5873 |
5372 Note that we still must mask the lhs/rhs expressions. Furthermore, | 5874 Note that we still must mask the lhs/rhs expressions. Furthermore, |
5373 the mask must be shifted to account for the shift done by | 5875 the mask must be shifted to account for the shift done by |
5374 make_bit_field_ref. */ | 5876 make_bit_field_ref. */ |
5375 if ((ll_bitsize + ll_bitpos == rl_bitpos | 5877 if (((ll_bitsize + ll_bitpos == rl_bitpos |
5376 && lr_bitsize + lr_bitpos == rr_bitpos) | 5878 && lr_bitsize + lr_bitpos == rr_bitpos) |
5377 || (ll_bitpos == rl_bitpos + rl_bitsize | 5879 || (ll_bitpos == rl_bitpos + rl_bitsize |
5378 && lr_bitpos == rr_bitpos + rr_bitsize)) | 5880 && lr_bitpos == rr_bitpos + rr_bitsize)) |
5881 && ll_bitpos >= 0 | |
5882 && rl_bitpos >= 0 | |
5883 && lr_bitpos >= 0 | |
5884 && rr_bitpos >= 0) | |
5379 { | 5885 { |
5380 tree type; | 5886 tree type; |
5381 | 5887 |
5382 lhs = make_bit_field_ref (loc, ll_inner, lntype, | 5888 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype, |
5383 ll_bitsize + rl_bitsize, | 5889 ll_bitsize + rl_bitsize, |
5384 MIN (ll_bitpos, rl_bitpos), ll_unsignedp); | 5890 MIN (ll_bitpos, rl_bitpos), |
5385 rhs = make_bit_field_ref (loc, lr_inner, rntype, | 5891 ll_unsignedp, ll_reversep); |
5892 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype, | |
5386 lr_bitsize + rr_bitsize, | 5893 lr_bitsize + rr_bitsize, |
5387 MIN (lr_bitpos, rr_bitpos), lr_unsignedp); | 5894 MIN (lr_bitpos, rr_bitpos), |
5895 lr_unsignedp, lr_reversep); | |
5388 | 5896 |
5389 ll_mask = const_binop (RSHIFT_EXPR, ll_mask, | 5897 ll_mask = const_binop (RSHIFT_EXPR, ll_mask, |
5390 size_int (MIN (xll_bitpos, xrl_bitpos))); | 5898 size_int (MIN (xll_bitpos, xrl_bitpos))); |
5391 lr_mask = const_binop (RSHIFT_EXPR, lr_mask, | 5899 lr_mask = const_binop (RSHIFT_EXPR, lr_mask, |
5392 size_int (MIN (xlr_bitpos, xrr_bitpos))); | 5900 size_int (MIN (xlr_bitpos, xrr_bitpos))); |
5440 warning (0, "%<and%> of mutually exclusive equal-tests is always 0"); | 5948 warning (0, "%<and%> of mutually exclusive equal-tests is always 0"); |
5441 return constant_boolean_node (false, truth_type); | 5949 return constant_boolean_node (false, truth_type); |
5442 } | 5950 } |
5443 } | 5951 } |
5444 | 5952 |
5953 if (lnbitpos < 0) | |
5954 return 0; | |
5955 | |
5445 /* Construct the expression we will return. First get the component | 5956 /* Construct the expression we will return. First get the component |
5446 reference we will make. Unless the mask is all ones the width of | 5957 reference we will make. Unless the mask is all ones the width of |
5447 that field, perform the mask operation. Then compare with the | 5958 that field, perform the mask operation. Then compare with the |
5448 merged constant. */ | 5959 merged constant. */ |
5449 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos, | 5960 result = make_bit_field_ref (loc, ll_inner, ll_arg, |
5450 ll_unsignedp || rl_unsignedp); | 5961 lntype, lnbitsize, lnbitpos, |
5962 ll_unsignedp || rl_unsignedp, ll_reversep); | |
5451 | 5963 |
5452 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask); | 5964 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask); |
5453 if (! all_ones_mask_p (ll_mask, lnbitsize)) | 5965 if (! all_ones_mask_p (ll_mask, lnbitsize)) |
5454 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask); | 5966 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask); |
5455 | 5967 |
5456 return build2_loc (loc, wanted_code, truth_type, result, | 5968 return build2_loc (loc, wanted_code, truth_type, result, |
5457 const_binop (BIT_IOR_EXPR, l_const, r_const)); | 5969 const_binop (BIT_IOR_EXPR, l_const, r_const)); |
5458 } | |
5459 | |
5460 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a | |
5461 constant. */ | |
5462 | |
5463 static tree | |
5464 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type, | |
5465 tree op0, tree op1) | |
5466 { | |
5467 tree arg0 = op0; | |
5468 enum tree_code op_code; | |
5469 tree comp_const; | |
5470 tree minmax_const; | |
5471 int consts_equal, consts_lt; | |
5472 tree inner; | |
5473 | |
5474 STRIP_SIGN_NOPS (arg0); | |
5475 | |
5476 op_code = TREE_CODE (arg0); | |
5477 minmax_const = TREE_OPERAND (arg0, 1); | |
5478 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1); | |
5479 consts_equal = tree_int_cst_equal (minmax_const, comp_const); | |
5480 consts_lt = tree_int_cst_lt (minmax_const, comp_const); | |
5481 inner = TREE_OPERAND (arg0, 0); | |
5482 | |
5483 /* If something does not permit us to optimize, return the original tree. */ | |
5484 if ((op_code != MIN_EXPR && op_code != MAX_EXPR) | |
5485 || TREE_CODE (comp_const) != INTEGER_CST | |
5486 || TREE_OVERFLOW (comp_const) | |
5487 || TREE_CODE (minmax_const) != INTEGER_CST | |
5488 || TREE_OVERFLOW (minmax_const)) | |
5489 return NULL_TREE; | |
5490 | |
5491 /* Now handle all the various comparison codes. We only handle EQ_EXPR | |
5492 and GT_EXPR, doing the rest with recursive calls using logical | |
5493 simplifications. */ | |
5494 switch (code) | |
5495 { | |
5496 case NE_EXPR: case LT_EXPR: case LE_EXPR: | |
5497 { | |
5498 tree tem | |
5499 = optimize_minmax_comparison (loc, | |
5500 invert_tree_comparison (code, false), | |
5501 type, op0, op1); | |
5502 if (tem) | |
5503 return invert_truthvalue_loc (loc, tem); | |
5504 return NULL_TREE; | |
5505 } | |
5506 | |
5507 case GE_EXPR: | |
5508 return | |
5509 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type, | |
5510 optimize_minmax_comparison | |
5511 (loc, EQ_EXPR, type, arg0, comp_const), | |
5512 optimize_minmax_comparison | |
5513 (loc, GT_EXPR, type, arg0, comp_const)); | |
5514 | |
5515 case EQ_EXPR: | |
5516 if (op_code == MAX_EXPR && consts_equal) | |
5517 /* MAX (X, 0) == 0 -> X <= 0 */ | |
5518 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const); | |
5519 | |
5520 else if (op_code == MAX_EXPR && consts_lt) | |
5521 /* MAX (X, 0) == 5 -> X == 5 */ | |
5522 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const); | |
5523 | |
5524 else if (op_code == MAX_EXPR) | |
5525 /* MAX (X, 0) == -1 -> false */ | |
5526 return omit_one_operand_loc (loc, type, integer_zero_node, inner); | |
5527 | |
5528 else if (consts_equal) | |
5529 /* MIN (X, 0) == 0 -> X >= 0 */ | |
5530 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const); | |
5531 | |
5532 else if (consts_lt) | |
5533 /* MIN (X, 0) == 5 -> false */ | |
5534 return omit_one_operand_loc (loc, type, integer_zero_node, inner); | |
5535 | |
5536 else | |
5537 /* MIN (X, 0) == -1 -> X == -1 */ | |
5538 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const); | |
5539 | |
5540 case GT_EXPR: | |
5541 if (op_code == MAX_EXPR && (consts_equal || consts_lt)) | |
5542 /* MAX (X, 0) > 0 -> X > 0 | |
5543 MAX (X, 0) > 5 -> X > 5 */ | |
5544 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const); | |
5545 | |
5546 else if (op_code == MAX_EXPR) | |
5547 /* MAX (X, 0) > -1 -> true */ | |
5548 return omit_one_operand_loc (loc, type, integer_one_node, inner); | |
5549 | |
5550 else if (op_code == MIN_EXPR && (consts_equal || consts_lt)) | |
5551 /* MIN (X, 0) > 0 -> false | |
5552 MIN (X, 0) > 5 -> false */ | |
5553 return omit_one_operand_loc (loc, type, integer_zero_node, inner); | |
5554 | |
5555 else | |
5556 /* MIN (X, 0) > -1 -> X > -1 */ | |
5557 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const); | |
5558 | |
5559 default: | |
5560 return NULL_TREE; | |
5561 } | |
5562 } | 5970 } |
5563 | 5971 |
5564 /* T is an integer expression that is being multiplied, divided, or taken a | 5972 /* T is an integer expression that is being multiplied, divided, or taken a |
5565 modulus (CODE says which and what kind of divide or modulus) by a | 5973 modulus (CODE says which and what kind of divide or modulus) by a |
5566 constant C. See if we can eliminate that operation by folding it with | 5974 constant C. See if we can eliminate that operation by folding it with |
5605 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type, | 6013 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type, |
5606 bool *strict_overflow_p) | 6014 bool *strict_overflow_p) |
5607 { | 6015 { |
5608 tree type = TREE_TYPE (t); | 6016 tree type = TREE_TYPE (t); |
5609 enum tree_code tcode = TREE_CODE (t); | 6017 enum tree_code tcode = TREE_CODE (t); |
5610 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type)) | 6018 tree ctype = (wide_type != 0 |
5611 > GET_MODE_SIZE (TYPE_MODE (type))) | 6019 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type)) |
6020 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type))) | |
5612 ? wide_type : type); | 6021 ? wide_type : type); |
5613 tree t1, t2; | 6022 tree t1, t2; |
5614 int same_p = tcode == code; | 6023 int same_p = tcode == code; |
5615 tree op0 = NULL_TREE, op1 = NULL_TREE; | 6024 tree op0 = NULL_TREE, op1 = NULL_TREE; |
5616 bool sub_strict_overflow_p; | 6025 bool sub_strict_overflow_p; |
5631 { | 6040 { |
5632 case INTEGER_CST: | 6041 case INTEGER_CST: |
5633 /* For a constant, we can always simplify if we are a multiply | 6042 /* For a constant, we can always simplify if we are a multiply |
5634 or (for divide and modulus) if it is a multiple of our constant. */ | 6043 or (for divide and modulus) if it is a multiple of our constant. */ |
5635 if (code == MULT_EXPR | 6044 if (code == MULT_EXPR |
5636 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c))) | 6045 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c), |
5637 return const_binop (code, fold_convert (ctype, t), | 6046 TYPE_SIGN (type))) |
5638 fold_convert (ctype, c)); | 6047 { |
6048 tree tem = const_binop (code, fold_convert (ctype, t), | |
6049 fold_convert (ctype, c)); | |
6050 /* If the multiplication overflowed, we lost information on it. | |
6051 See PR68142 and PR69845. */ | |
6052 if (TREE_OVERFLOW (tem)) | |
6053 return NULL_TREE; | |
6054 return tem; | |
6055 } | |
5639 break; | 6056 break; |
5640 | 6057 |
5641 CASE_CONVERT: case NON_LVALUE_EXPR: | 6058 CASE_CONVERT: case NON_LVALUE_EXPR: |
5642 /* If op0 is an expression ... */ | 6059 /* If op0 is an expression ... */ |
5643 if ((COMPARISON_CLASS_P (op0) | 6060 if ((COMPARISON_CLASS_P (op0) |
5645 || BINARY_CLASS_P (op0) | 6062 || BINARY_CLASS_P (op0) |
5646 || VL_EXP_CLASS_P (op0) | 6063 || VL_EXP_CLASS_P (op0) |
5647 || EXPRESSION_CLASS_P (op0)) | 6064 || EXPRESSION_CLASS_P (op0)) |
5648 /* ... and has wrapping overflow, and its type is smaller | 6065 /* ... and has wrapping overflow, and its type is smaller |
5649 than ctype, then we cannot pass through as widening. */ | 6066 than ctype, then we cannot pass through as widening. */ |
5650 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)) | 6067 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0)) |
5651 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE | 6068 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))) |
5652 && TYPE_IS_SIZETYPE (TREE_TYPE (op0))) | |
5653 && (TYPE_PRECISION (ctype) | 6069 && (TYPE_PRECISION (ctype) |
5654 > TYPE_PRECISION (TREE_TYPE (op0)))) | 6070 > TYPE_PRECISION (TREE_TYPE (op0)))) |
5655 /* ... or this is a truncation (t is narrower than op0), | 6071 /* ... or this is a truncation (t is narrower than op0), |
5656 then we cannot pass through this narrowing. */ | 6072 then we cannot pass through this narrowing. */ |
5657 || (TYPE_PRECISION (type) | 6073 || (TYPE_PRECISION (type) |
5662 && (TYPE_UNSIGNED (ctype) | 6078 && (TYPE_UNSIGNED (ctype) |
5663 != TYPE_UNSIGNED (TREE_TYPE (op0)))) | 6079 != TYPE_UNSIGNED (TREE_TYPE (op0)))) |
5664 /* ... or has undefined overflow while the converted to | 6080 /* ... or has undefined overflow while the converted to |
5665 type has not, we cannot do the operation in the inner type | 6081 type has not, we cannot do the operation in the inner type |
5666 as that would introduce undefined overflow. */ | 6082 as that would introduce undefined overflow. */ |
5667 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)) | 6083 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0)) |
6084 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))) | |
5668 && !TYPE_OVERFLOW_UNDEFINED (type)))) | 6085 && !TYPE_OVERFLOW_UNDEFINED (type)))) |
5669 break; | 6086 break; |
5670 | 6087 |
5671 /* Pass the constant down and see if we can make a simplification. If | 6088 /* Pass the constant down and see if we can make a simplification. If |
5672 we can, replace this expression with the inner simplification for | 6089 we can, replace this expression with the inner simplification for |
5698 /* If the constant is negative, we cannot simplify this. */ | 6115 /* If the constant is negative, we cannot simplify this. */ |
5699 if (tree_int_cst_sgn (c) == -1) | 6116 if (tree_int_cst_sgn (c) == -1) |
5700 break; | 6117 break; |
5701 /* FALLTHROUGH */ | 6118 /* FALLTHROUGH */ |
5702 case NEGATE_EXPR: | 6119 case NEGATE_EXPR: |
6120 /* For division and modulus, type can't be unsigned, as e.g. | |
6121 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2. | |
6122 For signed types, even with wrapping overflow, this is fine. */ | |
6123 if (code != MULT_EXPR && TYPE_UNSIGNED (type)) | |
6124 break; | |
5703 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p)) | 6125 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p)) |
5704 != 0) | 6126 != 0) |
5705 return fold_build1 (tcode, ctype, fold_convert (ctype, t1)); | 6127 return fold_build1 (tcode, ctype, fold_convert (ctype, t1)); |
5706 break; | 6128 break; |
5707 | 6129 |
5736 multiplication. */ | 6158 multiplication. */ |
5737 if (TREE_CODE (op1) == INTEGER_CST | 6159 if (TREE_CODE (op1) == INTEGER_CST |
5738 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0))) | 6160 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0))) |
5739 /* const_binop may not detect overflow correctly, | 6161 /* const_binop may not detect overflow correctly, |
5740 so check for it explicitly here. */ | 6162 so check for it explicitly here. */ |
5741 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1) | 6163 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), |
5742 && TREE_INT_CST_HIGH (op1) == 0 | 6164 wi::to_wide (op1)) |
5743 && 0 != (t1 = fold_convert (ctype, | 6165 && 0 != (t1 = fold_convert (ctype, |
5744 const_binop (LSHIFT_EXPR, | 6166 const_binop (LSHIFT_EXPR, |
5745 size_one_node, | 6167 size_one_node, |
5746 op1))) | 6168 op1))) |
5747 && !TREE_OVERFLOW (t1)) | 6169 && !TREE_OVERFLOW (t1)) |
5760 constant. */ | 6182 constant. */ |
5761 sub_strict_overflow_p = false; | 6183 sub_strict_overflow_p = false; |
5762 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p); | 6184 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p); |
5763 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p); | 6185 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p); |
5764 if (t1 != 0 && t2 != 0 | 6186 if (t1 != 0 && t2 != 0 |
6187 && TYPE_OVERFLOW_WRAPS (ctype) | |
5765 && (code == MULT_EXPR | 6188 && (code == MULT_EXPR |
5766 /* If not multiplication, we can only do this if both operands | 6189 /* If not multiplication, we can only do this if both operands |
5767 are divisible by c. */ | 6190 are divisible by c. */ |
5768 || (multiple_of_p (ctype, op0, c) | 6191 || (multiple_of_p (ctype, op0, c) |
5769 && multiple_of_p (ctype, op1, c)))) | 6192 && multiple_of_p (ctype, op1, c)))) |
5780 { | 6203 { |
5781 tcode = PLUS_EXPR, op1 = negate_expr (op1); | 6204 tcode = PLUS_EXPR, op1 = negate_expr (op1); |
5782 /* If OP1 was not easily negatable, the constant may be OP0. */ | 6205 /* If OP1 was not easily negatable, the constant may be OP0. */ |
5783 if (TREE_CODE (op0) == INTEGER_CST) | 6206 if (TREE_CODE (op0) == INTEGER_CST) |
5784 { | 6207 { |
5785 tree tem = op0; | 6208 std::swap (op0, op1); |
5786 op0 = op1; | 6209 std::swap (t1, t2); |
5787 op1 = tem; | |
5788 tem = t1; | |
5789 t1 = t2; | |
5790 t2 = tem; | |
5791 } | 6210 } |
5792 } | 6211 } |
5793 | 6212 |
5794 if (TREE_CODE (op1) != INTEGER_CST) | 6213 if (TREE_CODE (op1) != INTEGER_CST) |
5795 break; | 6214 break; |
5809 } | 6228 } |
5810 | 6229 |
5811 /* If it's a multiply or a division/modulus operation of a multiple | 6230 /* If it's a multiply or a division/modulus operation of a multiple |
5812 of our constant, do the operation and verify it doesn't overflow. */ | 6231 of our constant, do the operation and verify it doesn't overflow. */ |
5813 if (code == MULT_EXPR | 6232 if (code == MULT_EXPR |
5814 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c))) | 6233 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c), |
6234 TYPE_SIGN (type))) | |
5815 { | 6235 { |
5816 op1 = const_binop (code, fold_convert (ctype, op1), | 6236 op1 = const_binop (code, fold_convert (ctype, op1), |
5817 fold_convert (ctype, c)); | 6237 fold_convert (ctype, c)); |
5818 /* We allow the constant to overflow with wrapping semantics. */ | 6238 /* We allow the constant to overflow with wrapping semantics. */ |
5819 if (op1 == 0 | 6239 if (op1 == 0 |
5821 break; | 6241 break; |
5822 } | 6242 } |
5823 else | 6243 else |
5824 break; | 6244 break; |
5825 | 6245 |
5826 /* If we have an unsigned type is not a sizetype, we cannot widen | 6246 /* If we have an unsigned type, we cannot widen the operation since it |
5827 the operation since it will change the result if the original | 6247 will change the result if the original computation overflowed. */ |
5828 computation overflowed. */ | 6248 if (TYPE_UNSIGNED (ctype) && ctype != type) |
5829 if (TYPE_UNSIGNED (ctype) | |
5830 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)) | |
5831 && ctype != type) | |
5832 break; | 6249 break; |
5833 | |
5834 /* If we were able to eliminate our operation from the first side, | |
5835 apply our operation to the second side and reform the PLUS. */ | |
5836 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR)) | |
5837 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1); | |
5838 | 6250 |
5839 /* The last case is if we are a multiply. In that case, we can | 6251 /* The last case is if we are a multiply. In that case, we can |
5840 apply the distributive law to commute the multiply and addition | 6252 apply the distributive law to commute the multiply and addition |
5841 if the multiplication of the constants doesn't overflow. */ | 6253 if the multiplication of the constants doesn't overflow |
5842 if (code == MULT_EXPR) | 6254 and overflow is defined. With undefined overflow |
6255 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */ | |
6256 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype)) | |
5843 return fold_build2 (tcode, ctype, | 6257 return fold_build2 (tcode, ctype, |
5844 fold_build2 (code, ctype, | 6258 fold_build2 (code, ctype, |
5845 fold_convert (ctype, op0), | 6259 fold_convert (ctype, op0), |
5846 fold_convert (ctype, c)), | 6260 fold_convert (ctype, c)), |
5847 op1); | 6261 op1); |
5851 case MULT_EXPR: | 6265 case MULT_EXPR: |
5852 /* We have a special case here if we are doing something like | 6266 /* We have a special case here if we are doing something like |
5853 (C * 8) % 4 since we know that's zero. */ | 6267 (C * 8) % 4 since we know that's zero. */ |
5854 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR | 6268 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR |
5855 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR) | 6269 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR) |
5856 /* If the multiplication can overflow we cannot optimize this. | 6270 /* If the multiplication can overflow we cannot optimize this. */ |
5857 ??? Until we can properly mark individual operations as | 6271 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)) |
5858 not overflowing we need to treat sizetype special here as | |
5859 stor-layout relies on this opimization to make | |
5860 DECL_FIELD_BIT_OFFSET always a constant. */ | |
5861 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)) | |
5862 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE | |
5863 && TYPE_IS_SIZETYPE (TREE_TYPE (t)))) | |
5864 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST | 6272 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST |
5865 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c))) | 6273 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c), |
6274 TYPE_SIGN (type))) | |
5866 { | 6275 { |
5867 *strict_overflow_p = true; | 6276 *strict_overflow_p = true; |
5868 return omit_one_operand (type, integer_zero_node, op0); | 6277 return omit_one_operand (type, integer_zero_node, op0); |
5869 } | 6278 } |
5870 | 6279 |
5874 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR: | 6283 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR: |
5875 /* If we can extract our operation from the LHS, do so and return a | 6284 /* If we can extract our operation from the LHS, do so and return a |
5876 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise, | 6285 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise, |
5877 do something only if the second operand is a constant. */ | 6286 do something only if the second operand is a constant. */ |
5878 if (same_p | 6287 if (same_p |
6288 && TYPE_OVERFLOW_WRAPS (ctype) | |
5879 && (t1 = extract_muldiv (op0, c, code, wide_type, | 6289 && (t1 = extract_muldiv (op0, c, code, wide_type, |
5880 strict_overflow_p)) != 0) | 6290 strict_overflow_p)) != 0) |
5881 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), | 6291 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), |
5882 fold_convert (ctype, op1)); | 6292 fold_convert (ctype, op1)); |
5883 else if (tcode == MULT_EXPR && code == MULT_EXPR | 6293 else if (tcode == MULT_EXPR && code == MULT_EXPR |
6294 && TYPE_OVERFLOW_WRAPS (ctype) | |
5884 && (t1 = extract_muldiv (op1, c, code, wide_type, | 6295 && (t1 = extract_muldiv (op1, c, code, wide_type, |
5885 strict_overflow_p)) != 0) | 6296 strict_overflow_p)) != 0) |
5886 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), | 6297 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), |
5887 fold_convert (ctype, t1)); | 6298 fold_convert (ctype, t1)); |
5888 else if (TREE_CODE (op1) != INTEGER_CST) | 6299 else if (TREE_CODE (op1) != INTEGER_CST) |
5889 return 0; | 6300 return 0; |
5890 | 6301 |
5891 /* If these are the same operation types, we can associate them | 6302 /* If these are the same operation types, we can associate them |
5892 assuming no overflow. */ | 6303 assuming no overflow. */ |
5893 if (tcode == code | 6304 if (tcode == code) |
5894 && 0 != (t1 = int_const_binop (MULT_EXPR, | 6305 { |
5895 fold_convert (ctype, op1), | 6306 bool overflow_p = false; |
5896 fold_convert (ctype, c), 1)) | 6307 bool overflow_mul_p; |
5897 && 0 != (t1 = force_fit_type_double (ctype, tree_to_double_int (t1), | 6308 signop sign = TYPE_SIGN (ctype); |
5898 (TYPE_UNSIGNED (ctype) | 6309 unsigned prec = TYPE_PRECISION (ctype); |
5899 && tcode != MULT_EXPR) ? -1 : 1, | 6310 wide_int mul = wi::mul (wi::to_wide (op1, prec), |
5900 TREE_OVERFLOW (t1))) | 6311 wi::to_wide (c, prec), |
5901 && !TREE_OVERFLOW (t1)) | 6312 sign, &overflow_mul_p); |
5902 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1); | 6313 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1); |
6314 if (overflow_mul_p | |
6315 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED)) | |
6316 overflow_p = true; | |
6317 if (!overflow_p) | |
6318 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), | |
6319 wide_int_to_tree (ctype, mul)); | |
6320 } | |
5903 | 6321 |
5904 /* If these operations "cancel" each other, we have the main | 6322 /* If these operations "cancel" each other, we have the main |
5905 optimizations of this pass, which occur when either constant is a | 6323 optimizations of this pass, which occur when either constant is a |
5906 multiple of the other, in which case we replace this with either an | 6324 multiple of the other, in which case we replace this with either an |
5907 operation or CODE or TCODE. | 6325 operation or CODE or TCODE. |
5908 | 6326 |
5909 If we have an unsigned type that is not a sizetype, we cannot do | 6327 If we have an unsigned type, we cannot do this since it will change |
5910 this since it will change the result if the original computation | 6328 the result if the original computation overflowed. */ |
5911 overflowed. */ | 6329 if (TYPE_OVERFLOW_UNDEFINED (ctype) |
5912 if ((TYPE_OVERFLOW_UNDEFINED (ctype) | |
5913 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))) | |
5914 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR) | 6330 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR) |
5915 || (tcode == MULT_EXPR | 6331 || (tcode == MULT_EXPR |
5916 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR | 6332 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR |
5917 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR | 6333 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR |
5918 && code != MULT_EXPR))) | 6334 && code != MULT_EXPR))) |
5919 { | 6335 { |
5920 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c))) | 6336 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c), |
6337 TYPE_SIGN (type))) | |
5921 { | 6338 { |
5922 if (TYPE_OVERFLOW_UNDEFINED (ctype)) | 6339 if (TYPE_OVERFLOW_UNDEFINED (ctype)) |
5923 *strict_overflow_p = true; | 6340 *strict_overflow_p = true; |
5924 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), | 6341 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), |
5925 fold_convert (ctype, | 6342 fold_convert (ctype, |
5926 const_binop (TRUNC_DIV_EXPR, | 6343 const_binop (TRUNC_DIV_EXPR, |
5927 op1, c))); | 6344 op1, c))); |
5928 } | 6345 } |
5929 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1))) | 6346 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1), |
6347 TYPE_SIGN (type))) | |
5930 { | 6348 { |
5931 if (TYPE_OVERFLOW_UNDEFINED (ctype)) | 6349 if (TYPE_OVERFLOW_UNDEFINED (ctype)) |
5932 *strict_overflow_p = true; | 6350 *strict_overflow_p = true; |
5933 return fold_build2 (code, ctype, fold_convert (ctype, op0), | 6351 return fold_build2 (code, ctype, fold_convert (ctype, op0), |
5934 fold_convert (ctype, | 6352 fold_convert (ctype, |
5944 | 6362 |
5945 return 0; | 6363 return 0; |
5946 } | 6364 } |
5947 | 6365 |
5948 /* Return a node which has the indicated constant VALUE (either 0 or | 6366 /* Return a node which has the indicated constant VALUE (either 0 or |
5949 1), and is of the indicated TYPE. */ | 6367 1 for scalars or {-1,-1,..} or {0,0,...} for vectors), |
6368 and is of the indicated TYPE. */ | |
5950 | 6369 |
5951 tree | 6370 tree |
5952 constant_boolean_node (int value, tree type) | 6371 constant_boolean_node (bool value, tree type) |
5953 { | 6372 { |
5954 if (type == integer_type_node) | 6373 if (type == integer_type_node) |
5955 return value ? integer_one_node : integer_zero_node; | 6374 return value ? integer_one_node : integer_zero_node; |
5956 else if (type == boolean_type_node) | 6375 else if (type == boolean_type_node) |
5957 return value ? boolean_true_node : boolean_false_node; | 6376 return value ? boolean_true_node : boolean_false_node; |
6377 else if (TREE_CODE (type) == VECTOR_TYPE) | |
6378 return build_vector_from_val (type, | |
6379 build_int_cst (TREE_TYPE (type), | |
6380 value ? -1 : 0)); | |
5958 else | 6381 else |
5959 return build_int_cst (type, value); | 6382 return fold_convert (type, value ? integer_one_node : integer_zero_node); |
5960 } | 6383 } |
5961 | 6384 |
5962 | 6385 |
5963 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'. | 6386 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'. |
5964 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here | 6387 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here |
5978 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1); | 6401 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1); |
5979 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0); | 6402 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0); |
5980 tree test, true_value, false_value; | 6403 tree test, true_value, false_value; |
5981 tree lhs = NULL_TREE; | 6404 tree lhs = NULL_TREE; |
5982 tree rhs = NULL_TREE; | 6405 tree rhs = NULL_TREE; |
5983 | 6406 enum tree_code cond_code = COND_EXPR; |
5984 if (TREE_CODE (cond) == COND_EXPR) | 6407 |
6408 if (TREE_CODE (cond) == COND_EXPR | |
6409 || TREE_CODE (cond) == VEC_COND_EXPR) | |
5985 { | 6410 { |
5986 test = TREE_OPERAND (cond, 0); | 6411 test = TREE_OPERAND (cond, 0); |
5987 true_value = TREE_OPERAND (cond, 1); | 6412 true_value = TREE_OPERAND (cond, 1); |
5988 false_value = TREE_OPERAND (cond, 2); | 6413 false_value = TREE_OPERAND (cond, 2); |
5989 /* If this operand throws an expression, then it does not make | 6414 /* If this operand throws an expression, then it does not make |
5992 if (VOID_TYPE_P (TREE_TYPE (true_value))) | 6417 if (VOID_TYPE_P (TREE_TYPE (true_value))) |
5993 lhs = true_value; | 6418 lhs = true_value; |
5994 if (VOID_TYPE_P (TREE_TYPE (false_value))) | 6419 if (VOID_TYPE_P (TREE_TYPE (false_value))) |
5995 rhs = false_value; | 6420 rhs = false_value; |
5996 } | 6421 } |
5997 else | 6422 else if (!(TREE_CODE (type) != VECTOR_TYPE |
6423 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE)) | |
5998 { | 6424 { |
5999 tree testtype = TREE_TYPE (cond); | 6425 tree testtype = TREE_TYPE (cond); |
6000 test = cond; | 6426 test = cond; |
6001 true_value = constant_boolean_node (true, testtype); | 6427 true_value = constant_boolean_node (true, testtype); |
6002 false_value = constant_boolean_node (false, testtype); | 6428 false_value = constant_boolean_node (false, testtype); |
6003 } | 6429 } |
6430 else | |
6431 /* Detect the case of mixing vector and scalar types - bail out. */ | |
6432 return NULL_TREE; | |
6433 | |
6434 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE) | |
6435 cond_code = VEC_COND_EXPR; | |
6004 | 6436 |
6005 /* This transformation is only worthwhile if we don't have to wrap ARG | 6437 /* This transformation is only worthwhile if we don't have to wrap ARG |
6006 in a SAVE_EXPR and the operation can be simplified on at least one | 6438 in a SAVE_EXPR and the operation can be simplified without recursing |
6007 of the branches once its pushed inside the COND_EXPR. */ | 6439 on at least one of the branches once its pushed inside the COND_EXPR. */ |
6008 if (!TREE_CONSTANT (arg) | 6440 if (!TREE_CONSTANT (arg) |
6009 && (TREE_SIDE_EFFECTS (arg) | 6441 && (TREE_SIDE_EFFECTS (arg) |
6442 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR | |
6010 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value))) | 6443 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value))) |
6011 return NULL_TREE; | 6444 return NULL_TREE; |
6012 | 6445 |
6013 arg = fold_convert_loc (loc, arg_type, arg); | 6446 arg = fold_convert_loc (loc, arg_type, arg); |
6014 if (lhs == 0) | 6447 if (lhs == 0) |
6030 | 6463 |
6031 /* Check that we have simplified at least one of the branches. */ | 6464 /* Check that we have simplified at least one of the branches. */ |
6032 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs)) | 6465 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs)) |
6033 return NULL_TREE; | 6466 return NULL_TREE; |
6034 | 6467 |
6035 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs); | 6468 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs); |
6036 } | 6469 } |
6037 | 6470 |
6038 | 6471 |
6039 /* Subroutine of fold() that checks for the addition of +/- 0.0. | 6472 /* Subroutine of fold() that checks for the addition of +/- 0.0. |
6040 | 6473 |
6053 { | 6486 { |
6054 if (!real_zerop (addend)) | 6487 if (!real_zerop (addend)) |
6055 return false; | 6488 return false; |
6056 | 6489 |
6057 /* Don't allow the fold with -fsignaling-nans. */ | 6490 /* Don't allow the fold with -fsignaling-nans. */ |
6058 if (HONOR_SNANS (TYPE_MODE (type))) | 6491 if (HONOR_SNANS (element_mode (type))) |
6059 return false; | 6492 return false; |
6060 | 6493 |
6061 /* Allow the fold if zeros aren't signed, or their sign isn't important. */ | 6494 /* Allow the fold if zeros aren't signed, or their sign isn't important. */ |
6062 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))) | 6495 if (!HONOR_SIGNED_ZEROS (element_mode (type))) |
6063 return true; | 6496 return true; |
6064 | 6497 |
6498 /* In a vector or complex, we would need to check the sign of all zeros. */ | |
6499 if (TREE_CODE (addend) != REAL_CST) | |
6500 return false; | |
6501 | |
6065 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */ | 6502 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */ |
6066 if (TREE_CODE (addend) == REAL_CST | 6503 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend))) |
6067 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend))) | |
6068 negate = !negate; | 6504 negate = !negate; |
6069 | 6505 |
6070 /* The mode has signed zeros, and we have to honor their sign. | 6506 /* The mode has signed zeros, and we have to honor their sign. |
6071 In this situation, there is only one case we can return true for. | 6507 In this situation, there is only one case we can return true for. |
6072 X - 0 is the same as X unless rounding towards -infinity is | 6508 X - 0 is the same as X unless rounding towards -infinity is |
6073 supported. */ | 6509 supported. */ |
6074 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)); | 6510 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)); |
6075 } | 6511 } |
6076 | 6512 |
6077 /* Subroutine of fold() that checks comparisons of built-in math | 6513 /* Subroutine of match.pd that optimizes comparisons of a division by |
6078 functions against real constants. | |
6079 | |
6080 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison | |
6081 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE | |
6082 is the type of the result and ARG0 and ARG1 are the operands of the | |
6083 comparison. ARG1 must be a TREE_REAL_CST. | |
6084 | |
6085 The function returns the constant folded tree if a simplification | |
6086 can be made, and NULL_TREE otherwise. */ | |
6087 | |
6088 static tree | |
6089 fold_mathfn_compare (location_t loc, | |
6090 enum built_in_function fcode, enum tree_code code, | |
6091 tree type, tree arg0, tree arg1) | |
6092 { | |
6093 REAL_VALUE_TYPE c; | |
6094 | |
6095 if (BUILTIN_SQRT_P (fcode)) | |
6096 { | |
6097 tree arg = CALL_EXPR_ARG (arg0, 0); | |
6098 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0)); | |
6099 | |
6100 c = TREE_REAL_CST (arg1); | |
6101 if (REAL_VALUE_NEGATIVE (c)) | |
6102 { | |
6103 /* sqrt(x) < y is always false, if y is negative. */ | |
6104 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR) | |
6105 return omit_one_operand_loc (loc, type, integer_zero_node, arg); | |
6106 | |
6107 /* sqrt(x) > y is always true, if y is negative and we | |
6108 don't care about NaNs, i.e. negative values of x. */ | |
6109 if (code == NE_EXPR || !HONOR_NANS (mode)) | |
6110 return omit_one_operand_loc (loc, type, integer_one_node, arg); | |
6111 | |
6112 /* sqrt(x) > y is the same as x >= 0, if y is negative. */ | |
6113 return fold_build2_loc (loc, GE_EXPR, type, arg, | |
6114 build_real (TREE_TYPE (arg), dconst0)); | |
6115 } | |
6116 else if (code == GT_EXPR || code == GE_EXPR) | |
6117 { | |
6118 REAL_VALUE_TYPE c2; | |
6119 | |
6120 REAL_ARITHMETIC (c2, MULT_EXPR, c, c); | |
6121 real_convert (&c2, mode, &c2); | |
6122 | |
6123 if (REAL_VALUE_ISINF (c2)) | |
6124 { | |
6125 /* sqrt(x) > y is x == +Inf, when y is very large. */ | |
6126 if (HONOR_INFINITIES (mode)) | |
6127 return fold_build2_loc (loc, EQ_EXPR, type, arg, | |
6128 build_real (TREE_TYPE (arg), c2)); | |
6129 | |
6130 /* sqrt(x) > y is always false, when y is very large | |
6131 and we don't care about infinities. */ | |
6132 return omit_one_operand_loc (loc, type, integer_zero_node, arg); | |
6133 } | |
6134 | |
6135 /* sqrt(x) > c is the same as x > c*c. */ | |
6136 return fold_build2_loc (loc, code, type, arg, | |
6137 build_real (TREE_TYPE (arg), c2)); | |
6138 } | |
6139 else if (code == LT_EXPR || code == LE_EXPR) | |
6140 { | |
6141 REAL_VALUE_TYPE c2; | |
6142 | |
6143 REAL_ARITHMETIC (c2, MULT_EXPR, c, c); | |
6144 real_convert (&c2, mode, &c2); | |
6145 | |
6146 if (REAL_VALUE_ISINF (c2)) | |
6147 { | |
6148 /* sqrt(x) < y is always true, when y is a very large | |
6149 value and we don't care about NaNs or Infinities. */ | |
6150 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode)) | |
6151 return omit_one_operand_loc (loc, type, integer_one_node, arg); | |
6152 | |
6153 /* sqrt(x) < y is x != +Inf when y is very large and we | |
6154 don't care about NaNs. */ | |
6155 if (! HONOR_NANS (mode)) | |
6156 return fold_build2_loc (loc, NE_EXPR, type, arg, | |
6157 build_real (TREE_TYPE (arg), c2)); | |
6158 | |
6159 /* sqrt(x) < y is x >= 0 when y is very large and we | |
6160 don't care about Infinities. */ | |
6161 if (! HONOR_INFINITIES (mode)) | |
6162 return fold_build2_loc (loc, GE_EXPR, type, arg, | |
6163 build_real (TREE_TYPE (arg), dconst0)); | |
6164 | |
6165 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */ | |
6166 if (lang_hooks.decls.global_bindings_p () != 0 | |
6167 || CONTAINS_PLACEHOLDER_P (arg)) | |
6168 return NULL_TREE; | |
6169 | |
6170 arg = save_expr (arg); | |
6171 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, | |
6172 fold_build2_loc (loc, GE_EXPR, type, arg, | |
6173 build_real (TREE_TYPE (arg), | |
6174 dconst0)), | |
6175 fold_build2_loc (loc, NE_EXPR, type, arg, | |
6176 build_real (TREE_TYPE (arg), | |
6177 c2))); | |
6178 } | |
6179 | |
6180 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */ | |
6181 if (! HONOR_NANS (mode)) | |
6182 return fold_build2_loc (loc, code, type, arg, | |
6183 build_real (TREE_TYPE (arg), c2)); | |
6184 | |
6185 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */ | |
6186 if (lang_hooks.decls.global_bindings_p () == 0 | |
6187 && ! CONTAINS_PLACEHOLDER_P (arg)) | |
6188 { | |
6189 arg = save_expr (arg); | |
6190 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, | |
6191 fold_build2_loc (loc, GE_EXPR, type, arg, | |
6192 build_real (TREE_TYPE (arg), | |
6193 dconst0)), | |
6194 fold_build2_loc (loc, code, type, arg, | |
6195 build_real (TREE_TYPE (arg), | |
6196 c2))); | |
6197 } | |
6198 } | |
6199 } | |
6200 | |
6201 return NULL_TREE; | |
6202 } | |
6203 | |
6204 /* Subroutine of fold() that optimizes comparisons against Infinities, | |
6205 either +Inf or -Inf. | |
6206 | |
6207 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, | |
6208 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1 | |
6209 are the operands of the comparison. ARG1 must be a TREE_REAL_CST. | |
6210 | |
6211 The function returns the constant folded tree if a simplification | |
6212 can be made, and NULL_TREE otherwise. */ | |
6213 | |
6214 static tree | |
6215 fold_inf_compare (location_t loc, enum tree_code code, tree type, | |
6216 tree arg0, tree arg1) | |
6217 { | |
6218 enum machine_mode mode; | |
6219 REAL_VALUE_TYPE max; | |
6220 tree temp; | |
6221 bool neg; | |
6222 | |
6223 mode = TYPE_MODE (TREE_TYPE (arg0)); | |
6224 | |
6225 /* For negative infinity swap the sense of the comparison. */ | |
6226 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)); | |
6227 if (neg) | |
6228 code = swap_tree_comparison (code); | |
6229 | |
6230 switch (code) | |
6231 { | |
6232 case GT_EXPR: | |
6233 /* x > +Inf is always false, if with ignore sNANs. */ | |
6234 if (HONOR_SNANS (mode)) | |
6235 return NULL_TREE; | |
6236 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); | |
6237 | |
6238 case LE_EXPR: | |
6239 /* x <= +Inf is always true, if we don't case about NaNs. */ | |
6240 if (! HONOR_NANS (mode)) | |
6241 return omit_one_operand_loc (loc, type, integer_one_node, arg0); | |
6242 | |
6243 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */ | |
6244 if (lang_hooks.decls.global_bindings_p () == 0 | |
6245 && ! CONTAINS_PLACEHOLDER_P (arg0)) | |
6246 { | |
6247 arg0 = save_expr (arg0); | |
6248 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0); | |
6249 } | |
6250 break; | |
6251 | |
6252 case EQ_EXPR: | |
6253 case GE_EXPR: | |
6254 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */ | |
6255 real_maxval (&max, neg, mode); | |
6256 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type, | |
6257 arg0, build_real (TREE_TYPE (arg0), max)); | |
6258 | |
6259 case LT_EXPR: | |
6260 /* x < +Inf is always equal to x <= DBL_MAX. */ | |
6261 real_maxval (&max, neg, mode); | |
6262 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type, | |
6263 arg0, build_real (TREE_TYPE (arg0), max)); | |
6264 | |
6265 case NE_EXPR: | |
6266 /* x != +Inf is always equal to !(x > DBL_MAX). */ | |
6267 real_maxval (&max, neg, mode); | |
6268 if (! HONOR_NANS (mode)) | |
6269 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type, | |
6270 arg0, build_real (TREE_TYPE (arg0), max)); | |
6271 | |
6272 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type, | |
6273 arg0, build_real (TREE_TYPE (arg0), max)); | |
6274 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp); | |
6275 | |
6276 default: | |
6277 break; | |
6278 } | |
6279 | |
6280 return NULL_TREE; | |
6281 } | |
6282 | |
6283 /* Subroutine of fold() that optimizes comparisons of a division by | |
6284 a nonzero integer constant against an integer constant, i.e. | 6514 a nonzero integer constant against an integer constant, i.e. |
6285 X/C1 op C2. | 6515 X/C1 op C2. |
6286 | 6516 |
6287 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, | 6517 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, |
6288 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1 | 6518 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */ |
6289 are the operands of the comparison. ARG1 must be a TREE_REAL_CST. | 6519 |
6290 | 6520 enum tree_code |
6291 The function returns the constant folded tree if a simplification | 6521 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo, |
6292 can be made, and NULL_TREE otherwise. */ | 6522 tree *hi, bool *neg_overflow) |
6293 | 6523 { |
6294 static tree | 6524 tree prod, tmp, type = TREE_TYPE (c1); |
6295 fold_div_compare (location_t loc, | 6525 signop sign = TYPE_SIGN (type); |
6296 enum tree_code code, tree type, tree arg0, tree arg1) | 6526 bool overflow; |
6297 { | |
6298 tree prod, tmp, hi, lo; | |
6299 tree arg00 = TREE_OPERAND (arg0, 0); | |
6300 tree arg01 = TREE_OPERAND (arg0, 1); | |
6301 double_int val; | |
6302 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0)); | |
6303 bool neg_overflow; | |
6304 int overflow; | |
6305 | 6527 |
6306 /* We have to do this the hard way to detect unsigned overflow. | 6528 /* We have to do this the hard way to detect unsigned overflow. |
6307 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */ | 6529 prod = int_const_binop (MULT_EXPR, c1, c2); */ |
6308 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01), | 6530 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow); |
6309 TREE_INT_CST_HIGH (arg01), | 6531 prod = force_fit_type (type, val, -1, overflow); |
6310 TREE_INT_CST_LOW (arg1), | 6532 *neg_overflow = false; |
6311 TREE_INT_CST_HIGH (arg1), | 6533 |
6312 &val.low, &val.high, unsigned_p); | 6534 if (sign == UNSIGNED) |
6313 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow); | 6535 { |
6314 neg_overflow = false; | 6536 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1)); |
6315 | 6537 *lo = prod; |
6316 if (unsigned_p) | 6538 |
6317 { | 6539 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */ |
6318 tmp = int_const_binop (MINUS_EXPR, arg01, | 6540 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow); |
6319 build_int_cst (TREE_TYPE (arg01), 1), 0); | 6541 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod)); |
6320 lo = prod; | 6542 } |
6321 | 6543 else if (tree_int_cst_sgn (c1) >= 0) |
6322 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */ | 6544 { |
6323 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod), | 6545 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1)); |
6324 TREE_INT_CST_HIGH (prod), | 6546 switch (tree_int_cst_sgn (c2)) |
6325 TREE_INT_CST_LOW (tmp), | |
6326 TREE_INT_CST_HIGH (tmp), | |
6327 &val.low, &val.high, unsigned_p); | |
6328 hi = force_fit_type_double (TREE_TYPE (arg00), val, | |
6329 -1, overflow | TREE_OVERFLOW (prod)); | |
6330 } | |
6331 else if (tree_int_cst_sgn (arg01) >= 0) | |
6332 { | |
6333 tmp = int_const_binop (MINUS_EXPR, arg01, | |
6334 build_int_cst (TREE_TYPE (arg01), 1), 0); | |
6335 switch (tree_int_cst_sgn (arg1)) | |
6336 { | 6547 { |
6337 case -1: | 6548 case -1: |
6338 neg_overflow = true; | 6549 *neg_overflow = true; |
6339 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0); | 6550 *lo = int_const_binop (MINUS_EXPR, prod, tmp); |
6340 hi = prod; | 6551 *hi = prod; |
6341 break; | 6552 break; |
6342 | 6553 |
6343 case 0: | 6554 case 0: |
6344 lo = fold_negate_const (tmp, TREE_TYPE (arg0)); | 6555 *lo = fold_negate_const (tmp, type); |
6345 hi = tmp; | 6556 *hi = tmp; |
6346 break; | 6557 break; |
6347 | 6558 |
6348 case 1: | 6559 case 1: |
6349 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0); | 6560 *hi = int_const_binop (PLUS_EXPR, prod, tmp); |
6350 lo = prod; | 6561 *lo = prod; |
6351 break; | 6562 break; |
6352 | 6563 |
6353 default: | 6564 default: |
6354 gcc_unreachable (); | 6565 gcc_unreachable (); |
6355 } | 6566 } |
6357 else | 6568 else |
6358 { | 6569 { |
6359 /* A negative divisor reverses the relational operators. */ | 6570 /* A negative divisor reverses the relational operators. */ |
6360 code = swap_tree_comparison (code); | 6571 code = swap_tree_comparison (code); |
6361 | 6572 |
6362 tmp = int_const_binop (PLUS_EXPR, arg01, | 6573 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1)); |
6363 build_int_cst (TREE_TYPE (arg01), 1), 0); | 6574 switch (tree_int_cst_sgn (c2)) |
6364 switch (tree_int_cst_sgn (arg1)) | |
6365 { | 6575 { |
6366 case -1: | 6576 case -1: |
6367 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0); | 6577 *hi = int_const_binop (MINUS_EXPR, prod, tmp); |
6368 lo = prod; | 6578 *lo = prod; |
6369 break; | 6579 break; |
6370 | 6580 |