Mercurial > hg > CbC > CbC_gcc
comparison gcc/tree-ssa-ccp.c @ 67:f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
author | nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Tue, 22 Mar 2011 17:18:12 +0900 |
parents | b7f97abdc517 |
children | 04ced10e8804 |
comparison
equal
deleted
inserted
replaced
65:65488c3d617d | 67:f6334be47118 |
---|---|
1 /* Conditional constant propagation pass for the GNU compiler. | 1 /* Conditional constant propagation pass for the GNU compiler. |
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, | 2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, |
3 2010 Free Software Foundation, Inc. | 3 2010, 2011 Free Software Foundation, Inc. |
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org> | 4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org> |
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com> | 5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com> |
6 | 6 |
7 This file is part of GCC. | 7 This file is part of GCC. |
8 | 8 |
96 | 96 |
97 After propagation, every variable V_i that ends up with a lattice | 97 After propagation, every variable V_i that ends up with a lattice |
98 value of CONSTANT will have the associated constant value in the | 98 value of CONSTANT will have the associated constant value in the |
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for | 99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for |
100 final substitution and folding. | 100 final substitution and folding. |
101 | |
102 | |
103 Constant propagation in stores and loads (STORE-CCP) | |
104 ---------------------------------------------------- | |
105 | |
106 While CCP has all the logic to propagate constants in GIMPLE | |
107 registers, it is missing the ability to associate constants with | |
108 stores and loads (i.e., pointer dereferences, structures and | |
109 global/aliased variables). We don't keep loads and stores in | |
110 SSA, but we do build a factored use-def web for them (in the | |
111 virtual operands). | |
112 | |
113 For instance, consider the following code fragment: | |
114 | |
115 struct A a; | |
116 const int B = 42; | |
117 | |
118 void foo (int i) | |
119 { | |
120 if (i > 10) | |
121 a.a = 42; | |
122 else | |
123 { | |
124 a.b = 21; | |
125 a.a = a.b + 21; | |
126 } | |
127 | |
128 if (a.a != B) | |
129 never_executed (); | |
130 } | |
131 | |
132 We should be able to deduce that the predicate 'a.a != B' is always | |
133 false. To achieve this, we associate constant values to the SSA | |
134 names in the VDEF operands for each store. Additionally, | |
135 since we also glob partial loads/stores with the base symbol, we | |
136 also keep track of the memory reference where the constant value | |
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance, | |
138 | |
139 # a_5 = VDEF <a_4> | |
140 a.a = 2; | |
141 | |
142 # VUSE <a_5> | |
143 x_3 = a.b; | |
144 | |
145 In the example above, CCP will associate value '2' with 'a_5', but | |
146 it would be wrong to replace the load from 'a.b' with '2', because | |
147 '2' had been stored into a.a. | |
148 | |
149 Note that the initial value of virtual operands is VARYING, not | |
150 UNDEFINED. Consider, for instance global variables: | |
151 | |
152 int A; | |
153 | |
154 foo (int i) | |
155 { | |
156 if (i_3 > 10) | |
157 A_4 = 3; | |
158 # A_5 = PHI (A_4, A_2); | |
159 | |
160 # VUSE <A_5> | |
161 A.0_6 = A; | |
162 | |
163 return A.0_6; | |
164 } | |
165 | |
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have | |
167 been defined outside of foo. If we were to assume it UNDEFINED, we | |
168 would erroneously optimize the above into 'return 3;'. | |
169 | |
170 Though STORE-CCP is not too expensive, it does have to do more work | |
171 than regular CCP, so it is only enabled at -O2. Both regular CCP | |
172 and STORE-CCP use the exact same algorithm. The only distinction | |
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is | |
174 set to true. This affects the evaluation of statements and PHI | |
175 nodes. | |
176 | 101 |
177 References: | 102 References: |
178 | 103 |
179 Constant propagation with conditional branches, | 104 Constant propagation with conditional branches, |
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210. | 105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210. |
192 #include "tree.h" | 117 #include "tree.h" |
193 #include "flags.h" | 118 #include "flags.h" |
194 #include "tm_p.h" | 119 #include "tm_p.h" |
195 #include "basic-block.h" | 120 #include "basic-block.h" |
196 #include "output.h" | 121 #include "output.h" |
197 #include "expr.h" | |
198 #include "function.h" | 122 #include "function.h" |
199 #include "diagnostic.h" | |
200 #include "tree-pretty-print.h" | 123 #include "tree-pretty-print.h" |
201 #include "gimple-pretty-print.h" | 124 #include "gimple-pretty-print.h" |
202 #include "timevar.h" | 125 #include "timevar.h" |
203 #include "tree-dump.h" | 126 #include "tree-dump.h" |
204 #include "tree-flow.h" | 127 #include "tree-flow.h" |
205 #include "tree-pass.h" | 128 #include "tree-pass.h" |
206 #include "tree-ssa-propagate.h" | 129 #include "tree-ssa-propagate.h" |
207 #include "value-prof.h" | 130 #include "value-prof.h" |
208 #include "langhooks.h" | 131 #include "langhooks.h" |
209 #include "target.h" | 132 #include "target.h" |
210 #include "toplev.h" | 133 #include "diagnostic-core.h" |
211 #include "dbgcnt.h" | 134 #include "dbgcnt.h" |
212 | 135 |
213 | 136 |
214 /* Possible lattice values. */ | 137 /* Possible lattice values. */ |
215 typedef enum | 138 typedef enum |
217 UNINITIALIZED, | 140 UNINITIALIZED, |
218 UNDEFINED, | 141 UNDEFINED, |
219 CONSTANT, | 142 CONSTANT, |
220 VARYING | 143 VARYING |
221 } ccp_lattice_t; | 144 } ccp_lattice_t; |
145 | |
146 struct prop_value_d { | |
147 /* Lattice value. */ | |
148 ccp_lattice_t lattice_val; | |
149 | |
150 /* Propagated value. */ | |
151 tree value; | |
152 | |
153 /* Mask that applies to the propagated value during CCP. For | |
154 X with a CONSTANT lattice value X & ~mask == value & ~mask. */ | |
155 double_int mask; | |
156 }; | |
157 | |
158 typedef struct prop_value_d prop_value_t; | |
222 | 159 |
223 /* Array of propagated constant values. After propagation, | 160 /* Array of propagated constant values. After propagation, |
224 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If | 161 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If |
225 the constant is held in an SSA name representing a memory store | 162 the constant is held in an SSA name representing a memory store |
226 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual | 163 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual |
228 doing the store). */ | 165 doing the store). */ |
229 static prop_value_t *const_val; | 166 static prop_value_t *const_val; |
230 | 167 |
231 static void canonicalize_float_value (prop_value_t *); | 168 static void canonicalize_float_value (prop_value_t *); |
232 static bool ccp_fold_stmt (gimple_stmt_iterator *); | 169 static bool ccp_fold_stmt (gimple_stmt_iterator *); |
170 static tree fold_ctor_reference (tree type, tree ctor, | |
171 unsigned HOST_WIDE_INT offset, | |
172 unsigned HOST_WIDE_INT size); | |
233 | 173 |
234 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */ | 174 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */ |
235 | 175 |
236 static void | 176 static void |
237 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val) | 177 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val) |
247 case VARYING: | 187 case VARYING: |
248 fprintf (outf, "%sVARYING", prefix); | 188 fprintf (outf, "%sVARYING", prefix); |
249 break; | 189 break; |
250 case CONSTANT: | 190 case CONSTANT: |
251 fprintf (outf, "%sCONSTANT ", prefix); | 191 fprintf (outf, "%sCONSTANT ", prefix); |
252 print_generic_expr (outf, val.value, dump_flags); | 192 if (TREE_CODE (val.value) != INTEGER_CST |
193 || double_int_zero_p (val.mask)) | |
194 print_generic_expr (outf, val.value, dump_flags); | |
195 else | |
196 { | |
197 double_int cval = double_int_and_not (tree_to_double_int (val.value), | |
198 val.mask); | |
199 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX, | |
200 prefix, cval.high, cval.low); | |
201 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")", | |
202 val.mask.high, val.mask.low); | |
203 } | |
253 break; | 204 break; |
254 default: | 205 default: |
255 gcc_unreachable (); | 206 gcc_unreachable (); |
256 } | 207 } |
257 } | 208 } |
259 | 210 |
260 /* Print lattice value VAL to stderr. */ | 211 /* Print lattice value VAL to stderr. */ |
261 | 212 |
262 void debug_lattice_value (prop_value_t val); | 213 void debug_lattice_value (prop_value_t val); |
263 | 214 |
264 void | 215 DEBUG_FUNCTION void |
265 debug_lattice_value (prop_value_t val) | 216 debug_lattice_value (prop_value_t val) |
266 { | 217 { |
267 dump_lattice_value (stderr, "", val); | 218 dump_lattice_value (stderr, "", val); |
268 fprintf (stderr, "\n"); | 219 fprintf (stderr, "\n"); |
269 } | 220 } |
289 | 240 |
290 static prop_value_t | 241 static prop_value_t |
291 get_default_value (tree var) | 242 get_default_value (tree var) |
292 { | 243 { |
293 tree sym = SSA_NAME_VAR (var); | 244 tree sym = SSA_NAME_VAR (var); |
294 prop_value_t val = { UNINITIALIZED, NULL_TREE }; | 245 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } }; |
295 gimple stmt; | 246 gimple stmt; |
296 | 247 |
297 stmt = SSA_NAME_DEF_STMT (var); | 248 stmt = SSA_NAME_DEF_STMT (var); |
298 | 249 |
299 if (gimple_nop_p (stmt)) | 250 if (gimple_nop_p (stmt)) |
300 { | 251 { |
301 /* Variables defined by an empty statement are those used | 252 /* Variables defined by an empty statement are those used |
302 before being initialized. If VAR is a local variable, we | 253 before being initialized. If VAR is a local variable, we |
303 can assume initially that it is UNDEFINED, otherwise we must | 254 can assume initially that it is UNDEFINED, otherwise we must |
304 consider it VARYING. */ | 255 consider it VARYING. */ |
305 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL) | 256 if (is_gimple_reg (sym) |
257 && TREE_CODE (sym) == VAR_DECL) | |
306 val.lattice_val = UNDEFINED; | 258 val.lattice_val = UNDEFINED; |
307 else | 259 else |
308 val.lattice_val = VARYING; | 260 { |
261 val.lattice_val = VARYING; | |
262 val.mask = double_int_minus_one; | |
263 } | |
309 } | 264 } |
310 else if (is_gimple_assign (stmt) | 265 else if (is_gimple_assign (stmt) |
311 /* Value-returning GIMPLE_CALL statements assign to | 266 /* Value-returning GIMPLE_CALL statements assign to |
312 a variable, and are treated similarly to GIMPLE_ASSIGN. */ | 267 a variable, and are treated similarly to GIMPLE_ASSIGN. */ |
313 || (is_gimple_call (stmt) | 268 || (is_gimple_call (stmt) |
329 } | 284 } |
330 else | 285 else |
331 { | 286 { |
332 /* Otherwise, VAR will never take on a constant value. */ | 287 /* Otherwise, VAR will never take on a constant value. */ |
333 val.lattice_val = VARYING; | 288 val.lattice_val = VARYING; |
289 val.mask = double_int_minus_one; | |
334 } | 290 } |
335 | 291 |
336 return val; | 292 return val; |
337 } | 293 } |
338 | 294 |
354 canonicalize_float_value (val); | 310 canonicalize_float_value (val); |
355 | 311 |
356 return val; | 312 return val; |
357 } | 313 } |
358 | 314 |
315 /* Return the constant tree value associated with VAR. */ | |
316 | |
317 static inline tree | |
318 get_constant_value (tree var) | |
319 { | |
320 prop_value_t *val; | |
321 if (TREE_CODE (var) != SSA_NAME) | |
322 { | |
323 if (is_gimple_min_invariant (var)) | |
324 return var; | |
325 return NULL_TREE; | |
326 } | |
327 val = get_value (var); | |
328 if (val | |
329 && val->lattice_val == CONSTANT | |
330 && (TREE_CODE (val->value) != INTEGER_CST | |
331 || double_int_zero_p (val->mask))) | |
332 return val->value; | |
333 return NULL_TREE; | |
334 } | |
335 | |
359 /* Sets the value associated with VAR to VARYING. */ | 336 /* Sets the value associated with VAR to VARYING. */ |
360 | 337 |
361 static inline void | 338 static inline void |
362 set_value_varying (tree var) | 339 set_value_varying (tree var) |
363 { | 340 { |
364 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)]; | 341 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)]; |
365 | 342 |
366 val->lattice_val = VARYING; | 343 val->lattice_val = VARYING; |
367 val->value = NULL_TREE; | 344 val->value = NULL_TREE; |
345 val->mask = double_int_minus_one; | |
368 } | 346 } |
369 | 347 |
370 /* For float types, modify the value of VAL to make ccp work correctly | 348 /* For float types, modify the value of VAL to make ccp work correctly |
371 for non-standard values (-0, NaN): | 349 for non-standard values (-0, NaN): |
372 | 350 |
412 val->value = NULL; | 390 val->value = NULL; |
413 return; | 391 return; |
414 } | 392 } |
415 } | 393 } |
416 | 394 |
395 /* Return whether the lattice transition is valid. */ | |
396 | |
397 static bool | |
398 valid_lattice_transition (prop_value_t old_val, prop_value_t new_val) | |
399 { | |
400 /* Lattice transitions must always be monotonically increasing in | |
401 value. */ | |
402 if (old_val.lattice_val < new_val.lattice_val) | |
403 return true; | |
404 | |
405 if (old_val.lattice_val != new_val.lattice_val) | |
406 return false; | |
407 | |
408 if (!old_val.value && !new_val.value) | |
409 return true; | |
410 | |
411 /* Now both lattice values are CONSTANT. */ | |
412 | |
413 /* Allow transitioning from &x to &x & ~3. */ | |
414 if (TREE_CODE (old_val.value) != INTEGER_CST | |
415 && TREE_CODE (new_val.value) == INTEGER_CST) | |
416 return true; | |
417 | |
418 /* Bit-lattices have to agree in the still valid bits. */ | |
419 if (TREE_CODE (old_val.value) == INTEGER_CST | |
420 && TREE_CODE (new_val.value) == INTEGER_CST) | |
421 return double_int_equal_p | |
422 (double_int_and_not (tree_to_double_int (old_val.value), | |
423 new_val.mask), | |
424 double_int_and_not (tree_to_double_int (new_val.value), | |
425 new_val.mask)); | |
426 | |
427 /* Otherwise constant values have to agree. */ | |
428 return operand_equal_p (old_val.value, new_val.value, 0); | |
429 } | |
430 | |
417 /* Set the value for variable VAR to NEW_VAL. Return true if the new | 431 /* Set the value for variable VAR to NEW_VAL. Return true if the new |
418 value is different from VAR's previous value. */ | 432 value is different from VAR's previous value. */ |
419 | 433 |
420 static bool | 434 static bool |
421 set_lattice_value (tree var, prop_value_t new_val) | 435 set_lattice_value (tree var, prop_value_t new_val) |
422 { | 436 { |
423 prop_value_t *old_val = get_value (var); | 437 /* We can deal with old UNINITIALIZED values just fine here. */ |
438 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)]; | |
424 | 439 |
425 canonicalize_float_value (&new_val); | 440 canonicalize_float_value (&new_val); |
426 | 441 |
427 /* Lattice transitions must always be monotonically increasing in | 442 /* We have to be careful to not go up the bitwise lattice |
428 value. If *OLD_VAL and NEW_VAL are the same, return false to | 443 represented by the mask. |
429 inform the caller that this was a non-transition. */ | 444 ??? This doesn't seem to be the best place to enforce this. */ |
430 | 445 if (new_val.lattice_val == CONSTANT |
431 gcc_assert (old_val->lattice_val < new_val.lattice_val | 446 && old_val->lattice_val == CONSTANT |
432 || (old_val->lattice_val == new_val.lattice_val | 447 && TREE_CODE (new_val.value) == INTEGER_CST |
433 && ((!old_val->value && !new_val.value) | 448 && TREE_CODE (old_val->value) == INTEGER_CST) |
434 || operand_equal_p (old_val->value, new_val.value, 0)))); | 449 { |
435 | 450 double_int diff; |
436 if (old_val->lattice_val != new_val.lattice_val) | 451 diff = double_int_xor (tree_to_double_int (new_val.value), |
437 { | 452 tree_to_double_int (old_val->value)); |
453 new_val.mask = double_int_ior (new_val.mask, | |
454 double_int_ior (old_val->mask, diff)); | |
455 } | |
456 | |
457 gcc_assert (valid_lattice_transition (*old_val, new_val)); | |
458 | |
459 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the | |
460 caller that this was a non-transition. */ | |
461 if (old_val->lattice_val != new_val.lattice_val | |
462 || (new_val.lattice_val == CONSTANT | |
463 && TREE_CODE (new_val.value) == INTEGER_CST | |
464 && (TREE_CODE (old_val->value) != INTEGER_CST | |
465 || !double_int_equal_p (new_val.mask, old_val->mask)))) | |
466 { | |
467 /* ??? We would like to delay creation of INTEGER_CSTs from | |
468 partially constants here. */ | |
469 | |
438 if (dump_file && (dump_flags & TDF_DETAILS)) | 470 if (dump_file && (dump_flags & TDF_DETAILS)) |
439 { | 471 { |
440 dump_lattice_value (dump_file, "Lattice value changed to ", new_val); | 472 dump_lattice_value (dump_file, "Lattice value changed to ", new_val); |
441 fprintf (dump_file, ". Adding SSA edges to worklist.\n"); | 473 fprintf (dump_file, ". Adding SSA edges to worklist.\n"); |
442 } | 474 } |
443 | 475 |
444 *old_val = new_val; | 476 *old_val = new_val; |
445 | 477 |
446 gcc_assert (new_val.lattice_val != UNDEFINED); | 478 gcc_assert (new_val.lattice_val != UNINITIALIZED); |
447 return true; | 479 return true; |
448 } | 480 } |
449 | 481 |
450 return false; | 482 return false; |
451 } | 483 } |
452 | 484 |
485 static prop_value_t get_value_for_expr (tree, bool); | |
486 static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree); | |
487 static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *, | |
488 tree, double_int, double_int, | |
489 tree, double_int, double_int); | |
490 | |
491 /* Return a double_int that can be used for bitwise simplifications | |
492 from VAL. */ | |
493 | |
494 static double_int | |
495 value_to_double_int (prop_value_t val) | |
496 { | |
497 if (val.value | |
498 && TREE_CODE (val.value) == INTEGER_CST) | |
499 return tree_to_double_int (val.value); | |
500 else | |
501 return double_int_zero; | |
502 } | |
503 | |
504 /* Return the value for the address expression EXPR based on alignment | |
505 information. */ | |
506 | |
507 static prop_value_t | |
508 get_value_from_alignment (tree expr) | |
509 { | |
510 prop_value_t val; | |
511 HOST_WIDE_INT bitsize, bitpos; | |
512 tree base, offset; | |
513 enum machine_mode mode; | |
514 int align; | |
515 | |
516 gcc_assert (TREE_CODE (expr) == ADDR_EXPR); | |
517 | |
518 base = get_inner_reference (TREE_OPERAND (expr, 0), | |
519 &bitsize, &bitpos, &offset, | |
520 &mode, &align, &align, false); | |
521 if (TREE_CODE (base) == MEM_REF) | |
522 val = bit_value_binop (PLUS_EXPR, TREE_TYPE (expr), | |
523 TREE_OPERAND (base, 0), TREE_OPERAND (base, 1)); | |
524 else if (base | |
525 /* ??? While function decls have DECL_ALIGN their addresses | |
526 may encode extra information in the lower bits on some | |
527 targets (PR47239). Simply punt for function decls for now. */ | |
528 && TREE_CODE (base) != FUNCTION_DECL | |
529 && ((align = get_object_alignment (base, BIGGEST_ALIGNMENT)) | |
530 > BITS_PER_UNIT)) | |
531 { | |
532 val.lattice_val = CONSTANT; | |
533 /* We assume pointers are zero-extended. */ | |
534 val.mask = double_int_and_not | |
535 (double_int_mask (TYPE_PRECISION (TREE_TYPE (expr))), | |
536 uhwi_to_double_int (align / BITS_PER_UNIT - 1)); | |
537 val.value = build_int_cst (TREE_TYPE (expr), 0); | |
538 } | |
539 else | |
540 { | |
541 val.lattice_val = VARYING; | |
542 val.mask = double_int_minus_one; | |
543 val.value = NULL_TREE; | |
544 } | |
545 if (bitpos != 0) | |
546 { | |
547 double_int value, mask; | |
548 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask, | |
549 TREE_TYPE (expr), value_to_double_int (val), val.mask, | |
550 TREE_TYPE (expr), | |
551 shwi_to_double_int (bitpos / BITS_PER_UNIT), | |
552 double_int_zero); | |
553 val.lattice_val = double_int_minus_one_p (mask) ? VARYING : CONSTANT; | |
554 val.mask = mask; | |
555 if (val.lattice_val == CONSTANT) | |
556 val.value = double_int_to_tree (TREE_TYPE (expr), value); | |
557 else | |
558 val.value = NULL_TREE; | |
559 } | |
560 /* ??? We should handle i * 4 and more complex expressions from | |
561 the offset, possibly by just expanding get_value_for_expr. */ | |
562 if (offset != NULL_TREE) | |
563 { | |
564 double_int value, mask; | |
565 prop_value_t oval = get_value_for_expr (offset, true); | |
566 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask, | |
567 TREE_TYPE (expr), value_to_double_int (val), val.mask, | |
568 TREE_TYPE (expr), value_to_double_int (oval), | |
569 oval.mask); | |
570 val.mask = mask; | |
571 if (double_int_minus_one_p (mask)) | |
572 { | |
573 val.lattice_val = VARYING; | |
574 val.value = NULL_TREE; | |
575 } | |
576 else | |
577 { | |
578 val.lattice_val = CONSTANT; | |
579 val.value = double_int_to_tree (TREE_TYPE (expr), value); | |
580 } | |
581 } | |
582 | |
583 return val; | |
584 } | |
585 | |
586 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true | |
587 return constant bits extracted from alignment information for | |
588 invariant addresses. */ | |
589 | |
590 static prop_value_t | |
591 get_value_for_expr (tree expr, bool for_bits_p) | |
592 { | |
593 prop_value_t val; | |
594 | |
595 if (TREE_CODE (expr) == SSA_NAME) | |
596 { | |
597 val = *get_value (expr); | |
598 if (for_bits_p | |
599 && val.lattice_val == CONSTANT | |
600 && TREE_CODE (val.value) == ADDR_EXPR) | |
601 val = get_value_from_alignment (val.value); | |
602 } | |
603 else if (is_gimple_min_invariant (expr) | |
604 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR)) | |
605 { | |
606 val.lattice_val = CONSTANT; | |
607 val.value = expr; | |
608 val.mask = double_int_zero; | |
609 canonicalize_float_value (&val); | |
610 } | |
611 else if (TREE_CODE (expr) == ADDR_EXPR) | |
612 val = get_value_from_alignment (expr); | |
613 else | |
614 { | |
615 val.lattice_val = VARYING; | |
616 val.mask = double_int_minus_one; | |
617 val.value = NULL_TREE; | |
618 } | |
619 return val; | |
620 } | |
453 | 621 |
454 /* Return the likely CCP lattice value for STMT. | 622 /* Return the likely CCP lattice value for STMT. |
455 | 623 |
456 If STMT has no operands, then return CONSTANT. | 624 If STMT has no operands, then return CONSTANT. |
457 | 625 |
664 for (i = 0; i < num_ssa_names; i++) | 832 for (i = 0; i < num_ssa_names; i++) |
665 { | 833 { |
666 if (!dbg_cnt (ccp)) | 834 if (!dbg_cnt (ccp)) |
667 { | 835 { |
668 const_val[i].lattice_val = VARYING; | 836 const_val[i].lattice_val = VARYING; |
837 const_val[i].mask = double_int_minus_one; | |
669 const_val[i].value = NULL_TREE; | 838 const_val[i].value = NULL_TREE; |
670 } | 839 } |
671 } | 840 } |
672 } | 841 } |
673 | 842 |
679 | 848 |
680 static bool | 849 static bool |
681 ccp_finalize (void) | 850 ccp_finalize (void) |
682 { | 851 { |
683 bool something_changed; | 852 bool something_changed; |
853 unsigned i; | |
684 | 854 |
685 do_dbg_cnt (); | 855 do_dbg_cnt (); |
856 | |
857 /* Derive alignment and misalignment information from partially | |
858 constant pointers in the lattice. */ | |
859 for (i = 1; i < num_ssa_names; ++i) | |
860 { | |
861 tree name = ssa_name (i); | |
862 prop_value_t *val; | |
863 struct ptr_info_def *pi; | |
864 unsigned int tem, align; | |
865 | |
866 if (!name | |
867 || !POINTER_TYPE_P (TREE_TYPE (name))) | |
868 continue; | |
869 | |
870 val = get_value (name); | |
871 if (val->lattice_val != CONSTANT | |
872 || TREE_CODE (val->value) != INTEGER_CST) | |
873 continue; | |
874 | |
875 /* Trailing constant bits specify the alignment, trailing value | |
876 bits the misalignment. */ | |
877 tem = val->mask.low; | |
878 align = (tem & -tem); | |
879 if (align == 1) | |
880 continue; | |
881 | |
882 pi = get_ptr_info (name); | |
883 pi->align = align; | |
884 pi->misalign = TREE_INT_CST_LOW (val->value) & (align - 1); | |
885 } | |
886 | |
686 /* Perform substitutions based on the known constant values. */ | 887 /* Perform substitutions based on the known constant values. */ |
687 something_changed = substitute_and_fold (const_val, ccp_fold_stmt); | 888 something_changed = substitute_and_fold (get_constant_value, |
889 ccp_fold_stmt, true); | |
688 | 890 |
689 free (const_val); | 891 free (const_val); |
690 const_val = NULL; | 892 const_val = NULL; |
691 return something_changed;; | 893 return something_changed;; |
692 } | 894 } |
718 else if (val1->lattice_val == VARYING | 920 else if (val1->lattice_val == VARYING |
719 || val2->lattice_val == VARYING) | 921 || val2->lattice_val == VARYING) |
720 { | 922 { |
721 /* any M VARYING = VARYING. */ | 923 /* any M VARYING = VARYING. */ |
722 val1->lattice_val = VARYING; | 924 val1->lattice_val = VARYING; |
925 val1->mask = double_int_minus_one; | |
723 val1->value = NULL_TREE; | 926 val1->value = NULL_TREE; |
927 } | |
928 else if (val1->lattice_val == CONSTANT | |
929 && val2->lattice_val == CONSTANT | |
930 && TREE_CODE (val1->value) == INTEGER_CST | |
931 && TREE_CODE (val2->value) == INTEGER_CST) | |
932 { | |
933 /* Ci M Cj = Ci if (i == j) | |
934 Ci M Cj = VARYING if (i != j) | |
935 | |
936 For INTEGER_CSTs mask unequal bits. If no equal bits remain, | |
937 drop to varying. */ | |
938 val1->mask | |
939 = double_int_ior (double_int_ior (val1->mask, | |
940 val2->mask), | |
941 double_int_xor (tree_to_double_int (val1->value), | |
942 tree_to_double_int (val2->value))); | |
943 if (double_int_minus_one_p (val1->mask)) | |
944 { | |
945 val1->lattice_val = VARYING; | |
946 val1->value = NULL_TREE; | |
947 } | |
724 } | 948 } |
725 else if (val1->lattice_val == CONSTANT | 949 else if (val1->lattice_val == CONSTANT |
726 && val2->lattice_val == CONSTANT | 950 && val2->lattice_val == CONSTANT |
727 && simple_cst_equal (val1->value, val2->value) == 1) | 951 && simple_cst_equal (val1->value, val2->value) == 1) |
728 { | 952 { |
729 /* Ci M Cj = Ci if (i == j) | 953 /* Ci M Cj = Ci if (i == j) |
730 Ci M Cj = VARYING if (i != j) | 954 Ci M Cj = VARYING if (i != j) |
731 | 955 |
732 If these two values come from memory stores, make sure that | 956 VAL1 already contains the value we want for equivalent values. */ |
733 they come from the same memory reference. */ | 957 } |
734 val1->lattice_val = CONSTANT; | 958 else if (val1->lattice_val == CONSTANT |
735 val1->value = val1->value; | 959 && val2->lattice_val == CONSTANT |
960 && (TREE_CODE (val1->value) == ADDR_EXPR | |
961 || TREE_CODE (val2->value) == ADDR_EXPR)) | |
962 { | |
963 /* When not equal addresses are involved try meeting for | |
964 alignment. */ | |
965 prop_value_t tem = *val2; | |
966 if (TREE_CODE (val1->value) == ADDR_EXPR) | |
967 *val1 = get_value_for_expr (val1->value, true); | |
968 if (TREE_CODE (val2->value) == ADDR_EXPR) | |
969 tem = get_value_for_expr (val2->value, true); | |
970 ccp_lattice_meet (val1, &tem); | |
736 } | 971 } |
737 else | 972 else |
738 { | 973 { |
739 /* Any other combination is VARYING. */ | 974 /* Any other combination is VARYING. */ |
740 val1->lattice_val = VARYING; | 975 val1->lattice_val = VARYING; |
976 val1->mask = double_int_minus_one; | |
741 val1->value = NULL_TREE; | 977 val1->value = NULL_TREE; |
742 } | 978 } |
743 } | 979 } |
744 | 980 |
745 | 981 |
796 /* If the incoming edge is executable, Compute the meet operator for | 1032 /* If the incoming edge is executable, Compute the meet operator for |
797 the existing value of the PHI node and the current PHI argument. */ | 1033 the existing value of the PHI node and the current PHI argument. */ |
798 if (e->flags & EDGE_EXECUTABLE) | 1034 if (e->flags & EDGE_EXECUTABLE) |
799 { | 1035 { |
800 tree arg = gimple_phi_arg (phi, i)->def; | 1036 tree arg = gimple_phi_arg (phi, i)->def; |
801 prop_value_t arg_val; | 1037 prop_value_t arg_val = get_value_for_expr (arg, false); |
802 | |
803 if (is_gimple_min_invariant (arg)) | |
804 { | |
805 arg_val.lattice_val = CONSTANT; | |
806 arg_val.value = arg; | |
807 } | |
808 else | |
809 arg_val = *(get_value (arg)); | |
810 | 1038 |
811 ccp_lattice_meet (&new_val, &arg_val); | 1039 ccp_lattice_meet (&new_val, &arg_val); |
812 | 1040 |
813 if (dump_file && (dump_flags & TDF_DETAILS)) | 1041 if (dump_file && (dump_flags & TDF_DETAILS)) |
814 { | 1042 { |
839 } | 1067 } |
840 else | 1068 else |
841 return SSA_PROP_NOT_INTERESTING; | 1069 return SSA_PROP_NOT_INTERESTING; |
842 } | 1070 } |
843 | 1071 |
1072 /* Return the constant value for OP or OP otherwise. */ | |
1073 | |
1074 static tree | |
1075 valueize_op (tree op) | |
1076 { | |
1077 if (TREE_CODE (op) == SSA_NAME) | |
1078 { | |
1079 tree tem = get_constant_value (op); | |
1080 if (tem) | |
1081 return tem; | |
1082 } | |
1083 return op; | |
1084 } | |
1085 | |
844 /* CCP specific front-end to the non-destructive constant folding | 1086 /* CCP specific front-end to the non-destructive constant folding |
845 routines. | 1087 routines. |
846 | 1088 |
847 Attempt to simplify the RHS of STMT knowing that one or more | 1089 Attempt to simplify the RHS of STMT knowing that one or more |
848 operands are constants. | 1090 operands are constants. |
869 | 1111 |
870 if (TREE_CODE (rhs) == SSA_NAME) | 1112 if (TREE_CODE (rhs) == SSA_NAME) |
871 { | 1113 { |
872 /* If the RHS is an SSA_NAME, return its known constant value, | 1114 /* If the RHS is an SSA_NAME, return its known constant value, |
873 if any. */ | 1115 if any. */ |
874 return get_value (rhs)->value; | 1116 return get_constant_value (rhs); |
875 } | 1117 } |
876 /* Handle propagating invariant addresses into address operations. | 1118 /* Handle propagating invariant addresses into address operations. |
877 The folding we do here matches that in tree-ssa-forwprop.c. */ | 1119 The folding we do here matches that in tree-ssa-forwprop.c. */ |
878 else if (TREE_CODE (rhs) == ADDR_EXPR) | 1120 else if (TREE_CODE (rhs) == ADDR_EXPR) |
879 { | 1121 { |
880 tree *base; | 1122 tree *base; |
881 base = &TREE_OPERAND (rhs, 0); | 1123 base = &TREE_OPERAND (rhs, 0); |
882 while (handled_component_p (*base)) | 1124 while (handled_component_p (*base)) |
883 base = &TREE_OPERAND (*base, 0); | 1125 base = &TREE_OPERAND (*base, 0); |
884 if (TREE_CODE (*base) == INDIRECT_REF | 1126 if (TREE_CODE (*base) == MEM_REF |
885 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME) | 1127 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME) |
886 { | 1128 { |
887 prop_value_t *val = get_value (TREE_OPERAND (*base, 0)); | 1129 tree val = get_constant_value (TREE_OPERAND (*base, 0)); |
888 if (val->lattice_val == CONSTANT | 1130 if (val |
889 && TREE_CODE (val->value) == ADDR_EXPR | 1131 && TREE_CODE (val) == ADDR_EXPR) |
890 && may_propagate_address_into_dereference | |
891 (val->value, *base)) | |
892 { | 1132 { |
1133 tree ret, save = *base; | |
1134 tree new_base; | |
1135 new_base = fold_build2 (MEM_REF, TREE_TYPE (*base), | |
1136 unshare_expr (val), | |
1137 TREE_OPERAND (*base, 1)); | |
893 /* We need to return a new tree, not modify the IL | 1138 /* We need to return a new tree, not modify the IL |
894 or share parts of it. So play some tricks to | 1139 or share parts of it. So play some tricks to |
895 avoid manually building it. */ | 1140 avoid manually building it. */ |
896 tree ret, save = *base; | 1141 *base = new_base; |
897 *base = TREE_OPERAND (val->value, 0); | |
898 ret = unshare_expr (rhs); | 1142 ret = unshare_expr (rhs); |
899 recompute_tree_invariant_for_addr_expr (ret); | 1143 recompute_tree_invariant_for_addr_expr (ret); |
900 *base = save; | 1144 *base = save; |
901 return ret; | 1145 return ret; |
902 } | 1146 } |
911 tree val, list; | 1155 tree val, list; |
912 | 1156 |
913 list = NULL_TREE; | 1157 list = NULL_TREE; |
914 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val) | 1158 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val) |
915 { | 1159 { |
916 if (TREE_CODE (val) == SSA_NAME | 1160 val = valueize_op (val); |
917 && get_value (val)->lattice_val == CONSTANT) | |
918 val = get_value (val)->value; | |
919 if (TREE_CODE (val) == INTEGER_CST | 1161 if (TREE_CODE (val) == INTEGER_CST |
920 || TREE_CODE (val) == REAL_CST | 1162 || TREE_CODE (val) == REAL_CST |
921 || TREE_CODE (val) == FIXED_CST) | 1163 || TREE_CODE (val) == FIXED_CST) |
922 list = tree_cons (NULL_TREE, val, list); | 1164 list = tree_cons (NULL_TREE, val, list); |
923 else | 1165 else |
932 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR | 1174 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR |
933 || TREE_CODE (rhs) == REALPART_EXPR | 1175 || TREE_CODE (rhs) == REALPART_EXPR |
934 || TREE_CODE (rhs) == IMAGPART_EXPR) | 1176 || TREE_CODE (rhs) == IMAGPART_EXPR) |
935 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) | 1177 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) |
936 { | 1178 { |
937 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0)); | 1179 tree val = get_constant_value (TREE_OPERAND (rhs, 0)); |
938 if (val->lattice_val == CONSTANT) | 1180 if (val) |
939 return fold_unary_loc (EXPR_LOCATION (rhs), | 1181 return fold_unary_loc (EXPR_LOCATION (rhs), |
940 TREE_CODE (rhs), | 1182 TREE_CODE (rhs), |
941 TREE_TYPE (rhs), val->value); | 1183 TREE_TYPE (rhs), val); |
942 } | 1184 } |
943 else if (TREE_CODE (rhs) == INDIRECT_REF | 1185 else if (TREE_CODE (rhs) == MEM_REF |
944 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) | 1186 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) |
945 { | 1187 { |
946 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0)); | 1188 tree val = get_constant_value (TREE_OPERAND (rhs, 0)); |
947 if (val->lattice_val == CONSTANT | 1189 if (val |
948 && TREE_CODE (val->value) == ADDR_EXPR | 1190 && TREE_CODE (val) == ADDR_EXPR) |
949 && useless_type_conversion_p (TREE_TYPE (rhs), | 1191 { |
950 TREE_TYPE (TREE_TYPE (val->value)))) | 1192 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs), |
951 rhs = TREE_OPERAND (val->value, 0); | 1193 unshare_expr (val), |
1194 TREE_OPERAND (rhs, 1)); | |
1195 if (tem) | |
1196 rhs = tem; | |
1197 } | |
952 } | 1198 } |
953 return fold_const_aggregate_ref (rhs); | 1199 return fold_const_aggregate_ref (rhs); |
954 } | 1200 } |
955 else if (kind == tcc_declaration) | 1201 else if (kind == tcc_declaration) |
956 return get_symbol_constant_value (rhs); | 1202 return get_symbol_constant_value (rhs); |
961 { | 1207 { |
962 /* Handle unary operators that can appear in GIMPLE form. | 1208 /* Handle unary operators that can appear in GIMPLE form. |
963 Note that we know the single operand must be a constant, | 1209 Note that we know the single operand must be a constant, |
964 so this should almost always return a simplified RHS. */ | 1210 so this should almost always return a simplified RHS. */ |
965 tree lhs = gimple_assign_lhs (stmt); | 1211 tree lhs = gimple_assign_lhs (stmt); |
966 tree op0 = gimple_assign_rhs1 (stmt); | 1212 tree op0 = valueize_op (gimple_assign_rhs1 (stmt)); |
967 | |
968 /* Simplify the operand down to a constant. */ | |
969 if (TREE_CODE (op0) == SSA_NAME) | |
970 { | |
971 prop_value_t *val = get_value (op0); | |
972 if (val->lattice_val == CONSTANT) | |
973 op0 = get_value (op0)->value; | |
974 } | |
975 | 1213 |
976 /* Conversions are useless for CCP purposes if they are | 1214 /* Conversions are useless for CCP purposes if they are |
977 value-preserving. Thus the restrictions that | 1215 value-preserving. Thus the restrictions that |
978 useless_type_conversion_p places for pointer type conversions | 1216 useless_type_conversion_p places for pointer type conversions |
979 do not apply here. Substitution later will only substitute to | 1217 do not apply here. Substitution later will only substitute to |
980 allowed places. */ | 1218 allowed places. */ |
981 if (CONVERT_EXPR_CODE_P (subcode) | 1219 if (CONVERT_EXPR_CODE_P (subcode) |
982 && POINTER_TYPE_P (TREE_TYPE (lhs)) | 1220 && POINTER_TYPE_P (TREE_TYPE (lhs)) |
983 && POINTER_TYPE_P (TREE_TYPE (op0)) | 1221 && POINTER_TYPE_P (TREE_TYPE (op0))) |
984 /* Do not allow differences in volatile qualification | |
985 as this might get us confused as to whether a | |
986 propagation destination statement is volatile | |
987 or not. See PR36988. */ | |
988 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs))) | |
989 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0))))) | |
990 { | 1222 { |
991 tree tem; | 1223 tree tem; |
992 /* Still try to generate a constant of correct type. */ | 1224 /* Try to re-construct array references on-the-fly. */ |
993 if (!useless_type_conversion_p (TREE_TYPE (lhs), | 1225 if (!useless_type_conversion_p (TREE_TYPE (lhs), |
994 TREE_TYPE (op0)) | 1226 TREE_TYPE (op0)) |
995 && ((tem = maybe_fold_offset_to_address | 1227 && ((tem = maybe_fold_offset_to_address |
996 (loc, | 1228 (loc, |
997 op0, integer_zero_node, TREE_TYPE (lhs))) | 1229 op0, integer_zero_node, TREE_TYPE (lhs))) |
1006 } | 1238 } |
1007 | 1239 |
1008 case GIMPLE_BINARY_RHS: | 1240 case GIMPLE_BINARY_RHS: |
1009 { | 1241 { |
1010 /* Handle binary operators that can appear in GIMPLE form. */ | 1242 /* Handle binary operators that can appear in GIMPLE form. */ |
1011 tree op0 = gimple_assign_rhs1 (stmt); | 1243 tree op0 = valueize_op (gimple_assign_rhs1 (stmt)); |
1012 tree op1 = gimple_assign_rhs2 (stmt); | 1244 tree op1 = valueize_op (gimple_assign_rhs2 (stmt)); |
1013 | 1245 |
1014 /* Simplify the operands down to constants when appropriate. */ | 1246 /* Translate &x + CST into an invariant form suitable for |
1015 if (TREE_CODE (op0) == SSA_NAME) | 1247 further propagation. */ |
1016 { | |
1017 prop_value_t *val = get_value (op0); | |
1018 if (val->lattice_val == CONSTANT) | |
1019 op0 = val->value; | |
1020 } | |
1021 | |
1022 if (TREE_CODE (op1) == SSA_NAME) | |
1023 { | |
1024 prop_value_t *val = get_value (op1); | |
1025 if (val->lattice_val == CONSTANT) | |
1026 op1 = val->value; | |
1027 } | |
1028 | |
1029 /* Fold &foo + CST into an invariant reference if possible. */ | |
1030 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR | 1248 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR |
1031 && TREE_CODE (op0) == ADDR_EXPR | 1249 && TREE_CODE (op0) == ADDR_EXPR |
1032 && TREE_CODE (op1) == INTEGER_CST) | 1250 && TREE_CODE (op1) == INTEGER_CST) |
1033 { | 1251 { |
1034 tree tem = maybe_fold_offset_to_address | 1252 tree off = fold_convert (ptr_type_node, op1); |
1035 (loc, op0, op1, TREE_TYPE (op0)); | 1253 return build_fold_addr_expr |
1036 if (tem != NULL_TREE) | 1254 (fold_build2 (MEM_REF, |
1037 return tem; | 1255 TREE_TYPE (TREE_TYPE (op0)), |
1256 unshare_expr (op0), off)); | |
1038 } | 1257 } |
1039 | 1258 |
1040 return fold_binary_loc (loc, subcode, | 1259 return fold_binary_loc (loc, subcode, |
1041 gimple_expr_type (stmt), op0, op1); | 1260 gimple_expr_type (stmt), op0, op1); |
1261 } | |
1262 | |
1263 case GIMPLE_TERNARY_RHS: | |
1264 { | |
1265 /* Handle ternary operators that can appear in GIMPLE form. */ | |
1266 tree op0 = valueize_op (gimple_assign_rhs1 (stmt)); | |
1267 tree op1 = valueize_op (gimple_assign_rhs2 (stmt)); | |
1268 tree op2 = valueize_op (gimple_assign_rhs3 (stmt)); | |
1269 | |
1270 return fold_ternary_loc (loc, subcode, | |
1271 gimple_expr_type (stmt), op0, op1, op2); | |
1042 } | 1272 } |
1043 | 1273 |
1044 default: | 1274 default: |
1045 gcc_unreachable (); | 1275 gcc_unreachable (); |
1046 } | 1276 } |
1047 } | 1277 } |
1048 break; | 1278 break; |
1049 | 1279 |
1050 case GIMPLE_CALL: | 1280 case GIMPLE_CALL: |
1051 { | 1281 { |
1052 tree fn = gimple_call_fn (stmt); | 1282 tree fn = valueize_op (gimple_call_fn (stmt)); |
1053 prop_value_t *val; | |
1054 | |
1055 if (TREE_CODE (fn) == SSA_NAME) | |
1056 { | |
1057 val = get_value (fn); | |
1058 if (val->lattice_val == CONSTANT) | |
1059 fn = val->value; | |
1060 } | |
1061 if (TREE_CODE (fn) == ADDR_EXPR | 1283 if (TREE_CODE (fn) == ADDR_EXPR |
1062 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL | 1284 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL |
1063 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))) | 1285 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))) |
1064 { | 1286 { |
1065 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt)); | 1287 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt)); |
1066 tree call, retval; | 1288 tree call, retval; |
1067 unsigned i; | 1289 unsigned i; |
1068 for (i = 0; i < gimple_call_num_args (stmt); ++i) | 1290 for (i = 0; i < gimple_call_num_args (stmt); ++i) |
1069 { | 1291 args[i] = valueize_op (gimple_call_arg (stmt, i)); |
1070 args[i] = gimple_call_arg (stmt, i); | |
1071 if (TREE_CODE (args[i]) == SSA_NAME) | |
1072 { | |
1073 val = get_value (args[i]); | |
1074 if (val->lattice_val == CONSTANT) | |
1075 args[i] = val->value; | |
1076 } | |
1077 } | |
1078 call = build_call_array_loc (loc, | 1292 call = build_call_array_loc (loc, |
1079 gimple_call_return_type (stmt), | 1293 gimple_call_return_type (stmt), |
1080 fn, gimple_call_num_args (stmt), args); | 1294 fn, gimple_call_num_args (stmt), args); |
1081 retval = fold_call_expr (EXPR_LOCATION (call), call, false); | 1295 retval = fold_call_expr (EXPR_LOCATION (call), call, false); |
1082 if (retval) | 1296 if (retval) |
1088 } | 1302 } |
1089 | 1303 |
1090 case GIMPLE_COND: | 1304 case GIMPLE_COND: |
1091 { | 1305 { |
1092 /* Handle comparison operators that can appear in GIMPLE form. */ | 1306 /* Handle comparison operators that can appear in GIMPLE form. */ |
1093 tree op0 = gimple_cond_lhs (stmt); | 1307 tree op0 = valueize_op (gimple_cond_lhs (stmt)); |
1094 tree op1 = gimple_cond_rhs (stmt); | 1308 tree op1 = valueize_op (gimple_cond_rhs (stmt)); |
1095 enum tree_code code = gimple_cond_code (stmt); | 1309 enum tree_code code = gimple_cond_code (stmt); |
1096 | |
1097 /* Simplify the operands down to constants when appropriate. */ | |
1098 if (TREE_CODE (op0) == SSA_NAME) | |
1099 { | |
1100 prop_value_t *val = get_value (op0); | |
1101 if (val->lattice_val == CONSTANT) | |
1102 op0 = val->value; | |
1103 } | |
1104 | |
1105 if (TREE_CODE (op1) == SSA_NAME) | |
1106 { | |
1107 prop_value_t *val = get_value (op1); | |
1108 if (val->lattice_val == CONSTANT) | |
1109 op1 = val->value; | |
1110 } | |
1111 | |
1112 return fold_binary_loc (loc, code, boolean_type_node, op0, op1); | 1310 return fold_binary_loc (loc, code, boolean_type_node, op0, op1); |
1113 } | 1311 } |
1114 | 1312 |
1115 case GIMPLE_SWITCH: | 1313 case GIMPLE_SWITCH: |
1116 { | 1314 { |
1117 tree rhs = gimple_switch_index (stmt); | 1315 /* Return the constant switch index. */ |
1118 | 1316 return valueize_op (gimple_switch_index (stmt)); |
1119 if (TREE_CODE (rhs) == SSA_NAME) | |
1120 { | |
1121 /* If the RHS is an SSA_NAME, return its known constant value, | |
1122 if any. */ | |
1123 return get_value (rhs)->value; | |
1124 } | |
1125 | |
1126 return rhs; | |
1127 } | 1317 } |
1128 | 1318 |
1129 default: | 1319 default: |
1130 gcc_unreachable (); | 1320 gcc_unreachable (); |
1131 } | 1321 } |
1132 } | 1322 } |
1133 | 1323 |
1324 /* See if we can find constructor defining value of BASE. | |
1325 When we know the consructor with constant offset (such as | |
1326 base is array[40] and we do know constructor of array), then | |
1327 BIT_OFFSET is adjusted accordingly. | |
1328 | |
1329 As a special case, return error_mark_node when constructor | |
1330 is not explicitly available, but it is known to be zero | |
1331 such as 'static const int a;'. */ | |
1332 static tree | |
1333 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset) | |
1334 { | |
1335 HOST_WIDE_INT bit_offset2, size, max_size; | |
1336 if (TREE_CODE (base) == MEM_REF) | |
1337 { | |
1338 if (!integer_zerop (TREE_OPERAND (base, 1))) | |
1339 { | |
1340 if (!host_integerp (TREE_OPERAND (base, 1), 0)) | |
1341 return NULL_TREE; | |
1342 *bit_offset += (mem_ref_offset (base).low | |
1343 * BITS_PER_UNIT); | |
1344 } | |
1345 | |
1346 base = get_constant_value (TREE_OPERAND (base, 0)); | |
1347 if (!base || TREE_CODE (base) != ADDR_EXPR) | |
1348 return NULL_TREE; | |
1349 base = TREE_OPERAND (base, 0); | |
1350 } | |
1351 | |
1352 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its | |
1353 DECL_INITIAL. If BASE is a nested reference into another | |
1354 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve | |
1355 the inner reference. */ | |
1356 switch (TREE_CODE (base)) | |
1357 { | |
1358 case VAR_DECL: | |
1359 if (!const_value_known_p (base)) | |
1360 return NULL_TREE; | |
1361 | |
1362 /* Fallthru. */ | |
1363 case CONST_DECL: | |
1364 if (!DECL_INITIAL (base) | |
1365 && (TREE_STATIC (base) || DECL_EXTERNAL (base))) | |
1366 return error_mark_node; | |
1367 return DECL_INITIAL (base); | |
1368 | |
1369 case ARRAY_REF: | |
1370 case COMPONENT_REF: | |
1371 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size); | |
1372 if (max_size == -1 || size != max_size) | |
1373 return NULL_TREE; | |
1374 *bit_offset += bit_offset2; | |
1375 return get_base_constructor (base, bit_offset); | |
1376 | |
1377 case STRING_CST: | |
1378 case CONSTRUCTOR: | |
1379 return base; | |
1380 | |
1381 default: | |
1382 return NULL_TREE; | |
1383 } | |
1384 } | |
1385 | |
1386 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE | |
1387 to the memory at bit OFFSET. | |
1388 | |
1389 We do only simple job of folding byte accesses. */ | |
1390 | |
1391 static tree | |
1392 fold_string_cst_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset, | |
1393 unsigned HOST_WIDE_INT size) | |
1394 { | |
1395 if (INTEGRAL_TYPE_P (type) | |
1396 && (TYPE_MODE (type) | |
1397 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) | |
1398 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) | |
1399 == MODE_INT) | |
1400 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1 | |
1401 && size == BITS_PER_UNIT | |
1402 && !(offset % BITS_PER_UNIT)) | |
1403 { | |
1404 offset /= BITS_PER_UNIT; | |
1405 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor)) | |
1406 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor) | |
1407 [offset])); | |
1408 /* Folding | |
1409 const char a[20]="hello"; | |
1410 return a[10]; | |
1411 | |
1412 might lead to offset greater than string length. In this case we | |
1413 know value is either initialized to 0 or out of bounds. Return 0 | |
1414 in both cases. */ | |
1415 return build_zero_cst (type); | |
1416 } | |
1417 return NULL_TREE; | |
1418 } | |
1419 | |
1420 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size | |
1421 SIZE to the memory at bit OFFSET. */ | |
1422 | |
1423 static tree | |
1424 fold_array_ctor_reference (tree type, tree ctor, | |
1425 unsigned HOST_WIDE_INT offset, | |
1426 unsigned HOST_WIDE_INT size) | |
1427 { | |
1428 unsigned HOST_WIDE_INT cnt; | |
1429 tree cfield, cval; | |
1430 double_int low_bound, elt_size; | |
1431 double_int index, max_index; | |
1432 double_int access_index; | |
1433 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor)); | |
1434 HOST_WIDE_INT inner_offset; | |
1435 | |
1436 /* Compute low bound and elt size. */ | |
1437 if (domain_type && TYPE_MIN_VALUE (domain_type)) | |
1438 { | |
1439 /* Static constructors for variably sized objects makes no sense. */ | |
1440 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST); | |
1441 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type)); | |
1442 } | |
1443 else | |
1444 low_bound = double_int_zero; | |
1445 /* Static constructors for variably sized objects makes no sense. */ | |
1446 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) | |
1447 == INTEGER_CST); | |
1448 elt_size = | |
1449 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))); | |
1450 | |
1451 | |
1452 /* We can handle only constantly sized accesses that are known to not | |
1453 be larger than size of array element. */ | |
1454 if (!TYPE_SIZE_UNIT (type) | |
1455 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST | |
1456 || double_int_cmp (elt_size, | |
1457 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0) | |
1458 return NULL_TREE; | |
1459 | |
1460 /* Compute the array index we look for. */ | |
1461 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT), | |
1462 elt_size, TRUNC_DIV_EXPR); | |
1463 access_index = double_int_add (access_index, low_bound); | |
1464 | |
1465 /* And offset within the access. */ | |
1466 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT); | |
1467 | |
1468 /* See if the array field is large enough to span whole access. We do not | |
1469 care to fold accesses spanning multiple array indexes. */ | |
1470 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT) | |
1471 return NULL_TREE; | |
1472 | |
1473 index = double_int_sub (low_bound, double_int_one); | |
1474 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) | |
1475 { | |
1476 /* Array constructor might explicitely set index, or specify range | |
1477 or leave index NULL meaning that it is next index after previous | |
1478 one. */ | |
1479 if (cfield) | |
1480 { | |
1481 if (TREE_CODE (cfield) == INTEGER_CST) | |
1482 max_index = index = tree_to_double_int (cfield); | |
1483 else | |
1484 { | |
1485 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR); | |
1486 index = tree_to_double_int (TREE_OPERAND (cfield, 0)); | |
1487 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1)); | |
1488 } | |
1489 } | |
1490 else | |
1491 max_index = index = double_int_add (index, double_int_one); | |
1492 | |
1493 /* Do we have match? */ | |
1494 if (double_int_cmp (access_index, index, 1) >= 0 | |
1495 && double_int_cmp (access_index, max_index, 1) <= 0) | |
1496 return fold_ctor_reference (type, cval, inner_offset, size); | |
1497 } | |
1498 /* When memory is not explicitely mentioned in constructor, | |
1499 it is 0 (or out of range). */ | |
1500 return build_zero_cst (type); | |
1501 } | |
1502 | |
1503 /* CTOR is CONSTRUCTOR of an aggregate or vector. | |
1504 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */ | |
1505 | |
1506 static tree | |
1507 fold_nonarray_ctor_reference (tree type, tree ctor, | |
1508 unsigned HOST_WIDE_INT offset, | |
1509 unsigned HOST_WIDE_INT size) | |
1510 { | |
1511 unsigned HOST_WIDE_INT cnt; | |
1512 tree cfield, cval; | |
1513 | |
1514 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, | |
1515 cval) | |
1516 { | |
1517 tree byte_offset = DECL_FIELD_OFFSET (cfield); | |
1518 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield); | |
1519 tree field_size = DECL_SIZE (cfield); | |
1520 double_int bitoffset; | |
1521 double_int byte_offset_cst = tree_to_double_int (byte_offset); | |
1522 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT); | |
1523 double_int bitoffset_end; | |
1524 | |
1525 /* Variable sized objects in static constructors makes no sense, | |
1526 but field_size can be NULL for flexible array members. */ | |
1527 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST | |
1528 && TREE_CODE (byte_offset) == INTEGER_CST | |
1529 && (field_size != NULL_TREE | |
1530 ? TREE_CODE (field_size) == INTEGER_CST | |
1531 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE)); | |
1532 | |
1533 /* Compute bit offset of the field. */ | |
1534 bitoffset = double_int_add (tree_to_double_int (field_offset), | |
1535 double_int_mul (byte_offset_cst, | |
1536 bits_per_unit_cst)); | |
1537 /* Compute bit offset where the field ends. */ | |
1538 if (field_size != NULL_TREE) | |
1539 bitoffset_end = double_int_add (bitoffset, | |
1540 tree_to_double_int (field_size)); | |
1541 else | |
1542 bitoffset_end = double_int_zero; | |
1543 | |
1544 /* Is OFFSET in the range (BITOFFSET, BITOFFSET_END)? */ | |
1545 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) >= 0 | |
1546 && (field_size == NULL_TREE | |
1547 || double_int_cmp (uhwi_to_double_int (offset), | |
1548 bitoffset_end, 0) < 0)) | |
1549 { | |
1550 double_int access_end = double_int_add (uhwi_to_double_int (offset), | |
1551 uhwi_to_double_int (size)); | |
1552 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset), | |
1553 bitoffset); | |
1554 /* We do have overlap. Now see if field is large enough to | |
1555 cover the access. Give up for accesses spanning multiple | |
1556 fields. */ | |
1557 if (double_int_cmp (access_end, bitoffset_end, 0) > 0) | |
1558 return NULL_TREE; | |
1559 return fold_ctor_reference (type, cval, | |
1560 double_int_to_uhwi (inner_offset), size); | |
1561 } | |
1562 } | |
1563 /* When memory is not explicitely mentioned in constructor, it is 0. */ | |
1564 return build_zero_cst (type); | |
1565 } | |
1566 | |
1567 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE | |
1568 to the memory at bit OFFSET. */ | |
1569 | |
1570 static tree | |
1571 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset, | |
1572 unsigned HOST_WIDE_INT size) | |
1573 { | |
1574 tree ret; | |
1575 | |
1576 /* We found the field with exact match. */ | |
1577 if (useless_type_conversion_p (type, TREE_TYPE (ctor)) | |
1578 && !offset) | |
1579 return canonicalize_constructor_val (ctor); | |
1580 | |
1581 /* We are at the end of walk, see if we can view convert the | |
1582 result. */ | |
1583 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset | |
1584 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */ | |
1585 && operand_equal_p (TYPE_SIZE (type), | |
1586 TYPE_SIZE (TREE_TYPE (ctor)), 0)) | |
1587 { | |
1588 ret = canonicalize_constructor_val (ctor); | |
1589 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret); | |
1590 if (ret) | |
1591 STRIP_NOPS (ret); | |
1592 return ret; | |
1593 } | |
1594 if (TREE_CODE (ctor) == STRING_CST) | |
1595 return fold_string_cst_ctor_reference (type, ctor, offset, size); | |
1596 if (TREE_CODE (ctor) == CONSTRUCTOR) | |
1597 { | |
1598 | |
1599 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE) | |
1600 return fold_array_ctor_reference (type, ctor, offset, size); | |
1601 else | |
1602 return fold_nonarray_ctor_reference (type, ctor, offset, size); | |
1603 } | |
1604 | |
1605 return NULL_TREE; | |
1606 } | |
1134 | 1607 |
1135 /* Return the tree representing the element referenced by T if T is an | 1608 /* Return the tree representing the element referenced by T if T is an |
1136 ARRAY_REF or COMPONENT_REF into constant aggregates. Return | 1609 ARRAY_REF or COMPONENT_REF into constant aggregates. Return |
1137 NULL_TREE otherwise. */ | 1610 NULL_TREE otherwise. */ |
1138 | 1611 |
1139 tree | 1612 tree |
1140 fold_const_aggregate_ref (tree t) | 1613 fold_const_aggregate_ref (tree t) |
1141 { | 1614 { |
1142 prop_value_t *value; | 1615 tree ctor, idx, base; |
1143 tree base, ctor, idx, field; | 1616 HOST_WIDE_INT offset, size, max_size; |
1144 unsigned HOST_WIDE_INT cnt; | 1617 tree tem; |
1145 tree cfield, cval; | |
1146 | 1618 |
1147 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration) | 1619 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration) |
1148 return get_symbol_constant_value (t); | 1620 return get_symbol_constant_value (t); |
1149 | 1621 |
1622 tem = fold_read_from_constant_string (t); | |
1623 if (tem) | |
1624 return tem; | |
1625 | |
1150 switch (TREE_CODE (t)) | 1626 switch (TREE_CODE (t)) |
1151 { | 1627 { |
1152 case ARRAY_REF: | 1628 case ARRAY_REF: |
1153 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its | 1629 case ARRAY_RANGE_REF: |
1154 DECL_INITIAL. If BASE is a nested reference into another | 1630 /* Constant indexes are handled well by get_base_constructor. |
1155 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve | 1631 Only special case variable offsets. |
1156 the inner reference. */ | 1632 FIXME: This code can't handle nested references with variable indexes |
1157 base = TREE_OPERAND (t, 0); | 1633 (they will be handled only by iteration of ccp). Perhaps we can bring |
1158 switch (TREE_CODE (base)) | 1634 get_ref_base_and_extent here and make it use get_constant_value. */ |
1635 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME | |
1636 && (idx = get_constant_value (TREE_OPERAND (t, 1))) | |
1637 && host_integerp (idx, 0)) | |
1159 { | 1638 { |
1160 case VAR_DECL: | 1639 tree low_bound, unit_size; |
1161 if (!TREE_READONLY (base) | 1640 |
1162 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE | 1641 /* If the resulting bit-offset is constant, track it. */ |
1163 || !targetm.binds_local_p (base)) | 1642 if ((low_bound = array_ref_low_bound (t), |
1164 return NULL_TREE; | 1643 host_integerp (low_bound, 0)) |
1165 | 1644 && (unit_size = array_ref_element_size (t), |
1166 ctor = DECL_INITIAL (base); | 1645 host_integerp (unit_size, 1))) |
1167 break; | 1646 { |
1168 | 1647 offset = TREE_INT_CST_LOW (idx); |
1169 case ARRAY_REF: | 1648 offset -= TREE_INT_CST_LOW (low_bound); |
1170 case COMPONENT_REF: | 1649 offset *= TREE_INT_CST_LOW (unit_size); |
1171 ctor = fold_const_aggregate_ref (base); | 1650 offset *= BITS_PER_UNIT; |
1172 break; | 1651 |
1173 | 1652 base = TREE_OPERAND (t, 0); |
1174 case STRING_CST: | 1653 ctor = get_base_constructor (base, &offset); |
1175 case CONSTRUCTOR: | 1654 /* Empty constructor. Always fold to 0. */ |
1176 ctor = base; | 1655 if (ctor == error_mark_node) |
1177 break; | 1656 return build_zero_cst (TREE_TYPE (t)); |
1178 | 1657 /* Out of bound array access. Value is undefined, but don't fold. */ |
1179 default: | 1658 if (offset < 0) |
1180 return NULL_TREE; | 1659 return NULL_TREE; |
1660 /* We can not determine ctor. */ | |
1661 if (!ctor) | |
1662 return NULL_TREE; | |
1663 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, | |
1664 TREE_INT_CST_LOW (unit_size) | |
1665 * BITS_PER_UNIT); | |
1666 } | |
1181 } | 1667 } |
1182 | 1668 /* Fallthru. */ |
1183 if (ctor == NULL_TREE | 1669 |
1184 || (TREE_CODE (ctor) != CONSTRUCTOR | 1670 case COMPONENT_REF: |
1185 && TREE_CODE (ctor) != STRING_CST) | 1671 case BIT_FIELD_REF: |
1186 || !TREE_STATIC (ctor)) | 1672 case TARGET_MEM_REF: |
1673 case MEM_REF: | |
1674 base = get_ref_base_and_extent (t, &offset, &size, &max_size); | |
1675 ctor = get_base_constructor (base, &offset); | |
1676 | |
1677 /* Empty constructor. Always fold to 0. */ | |
1678 if (ctor == error_mark_node) | |
1679 return build_zero_cst (TREE_TYPE (t)); | |
1680 /* We do not know precise address. */ | |
1681 if (max_size == -1 || max_size != size) | |
1187 return NULL_TREE; | 1682 return NULL_TREE; |
1188 | 1683 /* We can not determine ctor. */ |
1189 /* Get the index. If we have an SSA_NAME, try to resolve it | 1684 if (!ctor) |
1190 with the current lattice value for the SSA_NAME. */ | |
1191 idx = TREE_OPERAND (t, 1); | |
1192 switch (TREE_CODE (idx)) | |
1193 { | |
1194 case SSA_NAME: | |
1195 if ((value = get_value (idx)) | |
1196 && value->lattice_val == CONSTANT | |
1197 && TREE_CODE (value->value) == INTEGER_CST) | |
1198 idx = value->value; | |
1199 else | |
1200 return NULL_TREE; | |
1201 break; | |
1202 | |
1203 case INTEGER_CST: | |
1204 break; | |
1205 | |
1206 default: | |
1207 return NULL_TREE; | |
1208 } | |
1209 | |
1210 /* Fold read from constant string. */ | |
1211 if (TREE_CODE (ctor) == STRING_CST) | |
1212 { | |
1213 if ((TYPE_MODE (TREE_TYPE (t)) | |
1214 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) | |
1215 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) | |
1216 == MODE_INT) | |
1217 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1 | |
1218 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0) | |
1219 return build_int_cst_type (TREE_TYPE (t), | |
1220 (TREE_STRING_POINTER (ctor) | |
1221 [TREE_INT_CST_LOW (idx)])); | |
1222 return NULL_TREE; | |
1223 } | |
1224 | |
1225 /* Whoo-hoo! I'll fold ya baby. Yeah! */ | |
1226 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) | |
1227 if (tree_int_cst_equal (cfield, idx)) | |
1228 { | |
1229 STRIP_NOPS (cval); | |
1230 if (TREE_CODE (cval) == ADDR_EXPR) | |
1231 { | |
1232 tree base = get_base_address (TREE_OPERAND (cval, 0)); | |
1233 if (base && TREE_CODE (base) == VAR_DECL) | |
1234 add_referenced_var (base); | |
1235 } | |
1236 return cval; | |
1237 } | |
1238 break; | |
1239 | |
1240 case COMPONENT_REF: | |
1241 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its | |
1242 DECL_INITIAL. If BASE is a nested reference into another | |
1243 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve | |
1244 the inner reference. */ | |
1245 base = TREE_OPERAND (t, 0); | |
1246 switch (TREE_CODE (base)) | |
1247 { | |
1248 case VAR_DECL: | |
1249 if (!TREE_READONLY (base) | |
1250 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE | |
1251 || !targetm.binds_local_p (base)) | |
1252 return NULL_TREE; | |
1253 | |
1254 ctor = DECL_INITIAL (base); | |
1255 break; | |
1256 | |
1257 case ARRAY_REF: | |
1258 case COMPONENT_REF: | |
1259 ctor = fold_const_aggregate_ref (base); | |
1260 break; | |
1261 | |
1262 default: | |
1263 return NULL_TREE; | |
1264 } | |
1265 | |
1266 if (ctor == NULL_TREE | |
1267 || TREE_CODE (ctor) != CONSTRUCTOR | |
1268 || !TREE_STATIC (ctor)) | |
1269 return NULL_TREE; | 1685 return NULL_TREE; |
1270 | 1686 |
1271 field = TREE_OPERAND (t, 1); | 1687 /* Out of bound array access. Value is undefined, but don't fold. */ |
1272 | 1688 if (offset < 0) |
1273 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) | 1689 return NULL_TREE; |
1274 if (cfield == field | 1690 |
1275 /* FIXME: Handle bit-fields. */ | 1691 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size); |
1276 && ! DECL_BIT_FIELD (cfield)) | |
1277 { | |
1278 STRIP_NOPS (cval); | |
1279 if (TREE_CODE (cval) == ADDR_EXPR) | |
1280 { | |
1281 tree base = get_base_address (TREE_OPERAND (cval, 0)); | |
1282 if (base && TREE_CODE (base) == VAR_DECL) | |
1283 add_referenced_var (base); | |
1284 } | |
1285 return cval; | |
1286 } | |
1287 break; | |
1288 | 1692 |
1289 case REALPART_EXPR: | 1693 case REALPART_EXPR: |
1290 case IMAGPART_EXPR: | 1694 case IMAGPART_EXPR: |
1291 { | 1695 { |
1292 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0)); | 1696 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0)); |
1294 return fold_build1_loc (EXPR_LOCATION (t), | 1698 return fold_build1_loc (EXPR_LOCATION (t), |
1295 TREE_CODE (t), TREE_TYPE (t), c); | 1699 TREE_CODE (t), TREE_TYPE (t), c); |
1296 break; | 1700 break; |
1297 } | 1701 } |
1298 | 1702 |
1299 case INDIRECT_REF: | 1703 default: |
1704 break; | |
1705 } | |
1706 | |
1707 return NULL_TREE; | |
1708 } | |
1709 | |
1710 /* Apply the operation CODE in type TYPE to the value, mask pair | |
1711 RVAL and RMASK representing a value of type RTYPE and set | |
1712 the value, mask pair *VAL and *MASK to the result. */ | |
1713 | |
1714 static void | |
1715 bit_value_unop_1 (enum tree_code code, tree type, | |
1716 double_int *val, double_int *mask, | |
1717 tree rtype, double_int rval, double_int rmask) | |
1718 { | |
1719 switch (code) | |
1720 { | |
1721 case BIT_NOT_EXPR: | |
1722 *mask = rmask; | |
1723 *val = double_int_not (rval); | |
1724 break; | |
1725 | |
1726 case NEGATE_EXPR: | |
1300 { | 1727 { |
1301 tree base = TREE_OPERAND (t, 0); | 1728 double_int temv, temm; |
1302 if (TREE_CODE (base) == SSA_NAME | 1729 /* Return ~rval + 1. */ |
1303 && (value = get_value (base)) | 1730 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask); |
1304 && value->lattice_val == CONSTANT | 1731 bit_value_binop_1 (PLUS_EXPR, type, val, mask, |
1305 && TREE_CODE (value->value) == ADDR_EXPR | 1732 type, temv, temm, |
1306 && useless_type_conversion_p (TREE_TYPE (t), | 1733 type, double_int_one, double_int_zero); |
1307 TREE_TYPE (TREE_TYPE (value->value)))) | |
1308 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0)); | |
1309 break; | 1734 break; |
1310 } | 1735 } |
1311 | 1736 |
1737 CASE_CONVERT: | |
1738 { | |
1739 bool uns; | |
1740 | |
1741 /* First extend mask and value according to the original type. */ | |
1742 uns = (TREE_CODE (rtype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (rtype) | |
1743 ? 0 : TYPE_UNSIGNED (rtype)); | |
1744 *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns); | |
1745 *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns); | |
1746 | |
1747 /* Then extend mask and value according to the target type. */ | |
1748 uns = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type) | |
1749 ? 0 : TYPE_UNSIGNED (type)); | |
1750 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns); | |
1751 *val = double_int_ext (*val, TYPE_PRECISION (type), uns); | |
1752 break; | |
1753 } | |
1754 | |
1312 default: | 1755 default: |
1756 *mask = double_int_minus_one; | |
1313 break; | 1757 break; |
1314 } | 1758 } |
1315 | 1759 } |
1316 return NULL_TREE; | 1760 |
1761 /* Apply the operation CODE in type TYPE to the value, mask pairs | |
1762 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE | |
1763 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */ | |
1764 | |
1765 static void | |
1766 bit_value_binop_1 (enum tree_code code, tree type, | |
1767 double_int *val, double_int *mask, | |
1768 tree r1type, double_int r1val, double_int r1mask, | |
1769 tree r2type, double_int r2val, double_int r2mask) | |
1770 { | |
1771 bool uns = (TREE_CODE (type) == INTEGER_TYPE | |
1772 && TYPE_IS_SIZETYPE (type) ? 0 : TYPE_UNSIGNED (type)); | |
1773 /* Assume we'll get a constant result. Use an initial varying value, | |
1774 we fall back to varying in the end if necessary. */ | |
1775 *mask = double_int_minus_one; | |
1776 switch (code) | |
1777 { | |
1778 case BIT_AND_EXPR: | |
1779 /* The mask is constant where there is a known not | |
1780 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */ | |
1781 *mask = double_int_and (double_int_ior (r1mask, r2mask), | |
1782 double_int_and (double_int_ior (r1val, r1mask), | |
1783 double_int_ior (r2val, r2mask))); | |
1784 *val = double_int_and (r1val, r2val); | |
1785 break; | |
1786 | |
1787 case BIT_IOR_EXPR: | |
1788 /* The mask is constant where there is a known | |
1789 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */ | |
1790 *mask = double_int_and_not | |
1791 (double_int_ior (r1mask, r2mask), | |
1792 double_int_ior (double_int_and_not (r1val, r1mask), | |
1793 double_int_and_not (r2val, r2mask))); | |
1794 *val = double_int_ior (r1val, r2val); | |
1795 break; | |
1796 | |
1797 case BIT_XOR_EXPR: | |
1798 /* m1 | m2 */ | |
1799 *mask = double_int_ior (r1mask, r2mask); | |
1800 *val = double_int_xor (r1val, r2val); | |
1801 break; | |
1802 | |
1803 case LROTATE_EXPR: | |
1804 case RROTATE_EXPR: | |
1805 if (double_int_zero_p (r2mask)) | |
1806 { | |
1807 HOST_WIDE_INT shift = r2val.low; | |
1808 if (code == RROTATE_EXPR) | |
1809 shift = -shift; | |
1810 *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type)); | |
1811 *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type)); | |
1812 } | |
1813 break; | |
1814 | |
1815 case LSHIFT_EXPR: | |
1816 case RSHIFT_EXPR: | |
1817 /* ??? We can handle partially known shift counts if we know | |
1818 its sign. That way we can tell that (x << (y | 8)) & 255 | |
1819 is zero. */ | |
1820 if (double_int_zero_p (r2mask)) | |
1821 { | |
1822 HOST_WIDE_INT shift = r2val.low; | |
1823 if (code == RSHIFT_EXPR) | |
1824 shift = -shift; | |
1825 /* We need to know if we are doing a left or a right shift | |
1826 to properly shift in zeros for left shift and unsigned | |
1827 right shifts and the sign bit for signed right shifts. | |
1828 For signed right shifts we shift in varying in case | |
1829 the sign bit was varying. */ | |
1830 if (shift > 0) | |
1831 { | |
1832 *mask = double_int_lshift (r1mask, shift, | |
1833 TYPE_PRECISION (type), false); | |
1834 *val = double_int_lshift (r1val, shift, | |
1835 TYPE_PRECISION (type), false); | |
1836 } | |
1837 else if (shift < 0) | |
1838 { | |
1839 /* ??? We can have sizetype related inconsistencies in | |
1840 the IL. */ | |
1841 if ((TREE_CODE (r1type) == INTEGER_TYPE | |
1842 && (TYPE_IS_SIZETYPE (r1type) | |
1843 ? 0 : TYPE_UNSIGNED (r1type))) != uns) | |
1844 break; | |
1845 | |
1846 shift = -shift; | |
1847 *mask = double_int_rshift (r1mask, shift, | |
1848 TYPE_PRECISION (type), !uns); | |
1849 *val = double_int_rshift (r1val, shift, | |
1850 TYPE_PRECISION (type), !uns); | |
1851 } | |
1852 else | |
1853 { | |
1854 *mask = r1mask; | |
1855 *val = r1val; | |
1856 } | |
1857 } | |
1858 break; | |
1859 | |
1860 case PLUS_EXPR: | |
1861 case POINTER_PLUS_EXPR: | |
1862 { | |
1863 double_int lo, hi; | |
1864 /* Do the addition with unknown bits set to zero, to give carry-ins of | |
1865 zero wherever possible. */ | |
1866 lo = double_int_add (double_int_and_not (r1val, r1mask), | |
1867 double_int_and_not (r2val, r2mask)); | |
1868 lo = double_int_ext (lo, TYPE_PRECISION (type), uns); | |
1869 /* Do the addition with unknown bits set to one, to give carry-ins of | |
1870 one wherever possible. */ | |
1871 hi = double_int_add (double_int_ior (r1val, r1mask), | |
1872 double_int_ior (r2val, r2mask)); | |
1873 hi = double_int_ext (hi, TYPE_PRECISION (type), uns); | |
1874 /* Each bit in the result is known if (a) the corresponding bits in | |
1875 both inputs are known, and (b) the carry-in to that bit position | |
1876 is known. We can check condition (b) by seeing if we got the same | |
1877 result with minimised carries as with maximised carries. */ | |
1878 *mask = double_int_ior (double_int_ior (r1mask, r2mask), | |
1879 double_int_xor (lo, hi)); | |
1880 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns); | |
1881 /* It shouldn't matter whether we choose lo or hi here. */ | |
1882 *val = lo; | |
1883 break; | |
1884 } | |
1885 | |
1886 case MINUS_EXPR: | |
1887 { | |
1888 double_int temv, temm; | |
1889 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm, | |
1890 r2type, r2val, r2mask); | |
1891 bit_value_binop_1 (PLUS_EXPR, type, val, mask, | |
1892 r1type, r1val, r1mask, | |
1893 r2type, temv, temm); | |
1894 break; | |
1895 } | |
1896 | |
1897 case MULT_EXPR: | |
1898 { | |
1899 /* Just track trailing zeros in both operands and transfer | |
1900 them to the other. */ | |
1901 int r1tz = double_int_ctz (double_int_ior (r1val, r1mask)); | |
1902 int r2tz = double_int_ctz (double_int_ior (r2val, r2mask)); | |
1903 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT) | |
1904 { | |
1905 *mask = double_int_zero; | |
1906 *val = double_int_zero; | |
1907 } | |
1908 else if (r1tz + r2tz > 0) | |
1909 { | |
1910 *mask = double_int_not (double_int_mask (r1tz + r2tz)); | |
1911 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns); | |
1912 *val = double_int_zero; | |
1913 } | |
1914 break; | |
1915 } | |
1916 | |
1917 case EQ_EXPR: | |
1918 case NE_EXPR: | |
1919 { | |
1920 double_int m = double_int_ior (r1mask, r2mask); | |
1921 if (!double_int_equal_p (double_int_and_not (r1val, m), | |
1922 double_int_and_not (r2val, m))) | |
1923 { | |
1924 *mask = double_int_zero; | |
1925 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one); | |
1926 } | |
1927 else | |
1928 { | |
1929 /* We know the result of a comparison is always one or zero. */ | |
1930 *mask = double_int_one; | |
1931 *val = double_int_zero; | |
1932 } | |
1933 break; | |
1934 } | |
1935 | |
1936 case GE_EXPR: | |
1937 case GT_EXPR: | |
1938 { | |
1939 double_int tem = r1val; | |
1940 r1val = r2val; | |
1941 r2val = tem; | |
1942 tem = r1mask; | |
1943 r1mask = r2mask; | |
1944 r2mask = tem; | |
1945 code = swap_tree_comparison (code); | |
1946 } | |
1947 /* Fallthru. */ | |
1948 case LT_EXPR: | |
1949 case LE_EXPR: | |
1950 { | |
1951 int minmax, maxmin; | |
1952 /* If the most significant bits are not known we know nothing. */ | |
1953 if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask)) | |
1954 break; | |
1955 | |
1956 /* For comparisons the signedness is in the comparison operands. */ | |
1957 uns = (TREE_CODE (r1type) == INTEGER_TYPE | |
1958 && TYPE_IS_SIZETYPE (r1type) ? 0 : TYPE_UNSIGNED (r1type)); | |
1959 /* ??? We can have sizetype related inconsistencies in the IL. */ | |
1960 if ((TREE_CODE (r2type) == INTEGER_TYPE | |
1961 && TYPE_IS_SIZETYPE (r2type) ? 0 : TYPE_UNSIGNED (r2type)) != uns) | |
1962 break; | |
1963 | |
1964 /* If we know the most significant bits we know the values | |
1965 value ranges by means of treating varying bits as zero | |
1966 or one. Do a cross comparison of the max/min pairs. */ | |
1967 maxmin = double_int_cmp (double_int_ior (r1val, r1mask), | |
1968 double_int_and_not (r2val, r2mask), uns); | |
1969 minmax = double_int_cmp (double_int_and_not (r1val, r1mask), | |
1970 double_int_ior (r2val, r2mask), uns); | |
1971 if (maxmin < 0) /* r1 is less than r2. */ | |
1972 { | |
1973 *mask = double_int_zero; | |
1974 *val = double_int_one; | |
1975 } | |
1976 else if (minmax > 0) /* r1 is not less or equal to r2. */ | |
1977 { | |
1978 *mask = double_int_zero; | |
1979 *val = double_int_zero; | |
1980 } | |
1981 else if (maxmin == minmax) /* r1 and r2 are equal. */ | |
1982 { | |
1983 /* This probably should never happen as we'd have | |
1984 folded the thing during fully constant value folding. */ | |
1985 *mask = double_int_zero; | |
1986 *val = (code == LE_EXPR ? double_int_one : double_int_zero); | |
1987 } | |
1988 else | |
1989 { | |
1990 /* We know the result of a comparison is always one or zero. */ | |
1991 *mask = double_int_one; | |
1992 *val = double_int_zero; | |
1993 } | |
1994 break; | |
1995 } | |
1996 | |
1997 default:; | |
1998 } | |
1999 } | |
2000 | |
2001 /* Return the propagation value when applying the operation CODE to | |
2002 the value RHS yielding type TYPE. */ | |
2003 | |
2004 static prop_value_t | |
2005 bit_value_unop (enum tree_code code, tree type, tree rhs) | |
2006 { | |
2007 prop_value_t rval = get_value_for_expr (rhs, true); | |
2008 double_int value, mask; | |
2009 prop_value_t val; | |
2010 gcc_assert ((rval.lattice_val == CONSTANT | |
2011 && TREE_CODE (rval.value) == INTEGER_CST) | |
2012 || double_int_minus_one_p (rval.mask)); | |
2013 bit_value_unop_1 (code, type, &value, &mask, | |
2014 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask); | |
2015 if (!double_int_minus_one_p (mask)) | |
2016 { | |
2017 val.lattice_val = CONSTANT; | |
2018 val.mask = mask; | |
2019 /* ??? Delay building trees here. */ | |
2020 val.value = double_int_to_tree (type, value); | |
2021 } | |
2022 else | |
2023 { | |
2024 val.lattice_val = VARYING; | |
2025 val.value = NULL_TREE; | |
2026 val.mask = double_int_minus_one; | |
2027 } | |
2028 return val; | |
2029 } | |
2030 | |
2031 /* Return the propagation value when applying the operation CODE to | |
2032 the values RHS1 and RHS2 yielding type TYPE. */ | |
2033 | |
2034 static prop_value_t | |
2035 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2) | |
2036 { | |
2037 prop_value_t r1val = get_value_for_expr (rhs1, true); | |
2038 prop_value_t r2val = get_value_for_expr (rhs2, true); | |
2039 double_int value, mask; | |
2040 prop_value_t val; | |
2041 gcc_assert ((r1val.lattice_val == CONSTANT | |
2042 && TREE_CODE (r1val.value) == INTEGER_CST) | |
2043 || double_int_minus_one_p (r1val.mask)); | |
2044 gcc_assert ((r2val.lattice_val == CONSTANT | |
2045 && TREE_CODE (r2val.value) == INTEGER_CST) | |
2046 || double_int_minus_one_p (r2val.mask)); | |
2047 bit_value_binop_1 (code, type, &value, &mask, | |
2048 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask, | |
2049 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask); | |
2050 if (!double_int_minus_one_p (mask)) | |
2051 { | |
2052 val.lattice_val = CONSTANT; | |
2053 val.mask = mask; | |
2054 /* ??? Delay building trees here. */ | |
2055 val.value = double_int_to_tree (type, value); | |
2056 } | |
2057 else | |
2058 { | |
2059 val.lattice_val = VARYING; | |
2060 val.value = NULL_TREE; | |
2061 val.mask = double_int_minus_one; | |
2062 } | |
2063 return val; | |
1317 } | 2064 } |
1318 | 2065 |
1319 /* Evaluate statement STMT. | 2066 /* Evaluate statement STMT. |
1320 Valid only for assignments, calls, conditionals, and switches. */ | 2067 Valid only for assignments, calls, conditionals, and switches. */ |
1321 | 2068 |
1323 evaluate_stmt (gimple stmt) | 2070 evaluate_stmt (gimple stmt) |
1324 { | 2071 { |
1325 prop_value_t val; | 2072 prop_value_t val; |
1326 tree simplified = NULL_TREE; | 2073 tree simplified = NULL_TREE; |
1327 ccp_lattice_t likelyvalue = likely_value (stmt); | 2074 ccp_lattice_t likelyvalue = likely_value (stmt); |
1328 bool is_constant; | 2075 bool is_constant = false; |
1329 | |
1330 fold_defer_overflow_warnings (); | |
1331 | |
1332 /* If the statement is likely to have a CONSTANT result, then try | |
1333 to fold the statement to determine the constant value. */ | |
1334 /* FIXME. This is the only place that we call ccp_fold. | |
1335 Since likely_value never returns CONSTANT for calls, we will | |
1336 not attempt to fold them, including builtins that may profit. */ | |
1337 if (likelyvalue == CONSTANT) | |
1338 simplified = ccp_fold (stmt); | |
1339 /* If the statement is likely to have a VARYING result, then do not | |
1340 bother folding the statement. */ | |
1341 else if (likelyvalue == VARYING) | |
1342 { | |
1343 enum gimple_code code = gimple_code (stmt); | |
1344 if (code == GIMPLE_ASSIGN) | |
1345 { | |
1346 enum tree_code subcode = gimple_assign_rhs_code (stmt); | |
1347 | |
1348 /* Other cases cannot satisfy is_gimple_min_invariant | |
1349 without folding. */ | |
1350 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS) | |
1351 simplified = gimple_assign_rhs1 (stmt); | |
1352 } | |
1353 else if (code == GIMPLE_SWITCH) | |
1354 simplified = gimple_switch_index (stmt); | |
1355 else | |
1356 /* These cannot satisfy is_gimple_min_invariant without folding. */ | |
1357 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND); | |
1358 } | |
1359 | |
1360 is_constant = simplified && is_gimple_min_invariant (simplified); | |
1361 | |
1362 fold_undefer_overflow_warnings (is_constant, stmt, 0); | |
1363 | 2076 |
1364 if (dump_file && (dump_flags & TDF_DETAILS)) | 2077 if (dump_file && (dump_flags & TDF_DETAILS)) |
1365 { | 2078 { |
1366 fprintf (dump_file, "which is likely "); | 2079 fprintf (dump_file, "which is likely "); |
1367 switch (likelyvalue) | 2080 switch (likelyvalue) |
1378 default:; | 2091 default:; |
1379 } | 2092 } |
1380 fprintf (dump_file, "\n"); | 2093 fprintf (dump_file, "\n"); |
1381 } | 2094 } |
1382 | 2095 |
1383 if (is_constant) | 2096 /* If the statement is likely to have a CONSTANT result, then try |
1384 { | 2097 to fold the statement to determine the constant value. */ |
1385 /* The statement produced a constant value. */ | 2098 /* FIXME. This is the only place that we call ccp_fold. |
1386 val.lattice_val = CONSTANT; | 2099 Since likely_value never returns CONSTANT for calls, we will |
1387 val.value = simplified; | 2100 not attempt to fold them, including builtins that may profit. */ |
1388 } | 2101 if (likelyvalue == CONSTANT) |
1389 else | 2102 { |
2103 fold_defer_overflow_warnings (); | |
2104 simplified = ccp_fold (stmt); | |
2105 is_constant = simplified && is_gimple_min_invariant (simplified); | |
2106 fold_undefer_overflow_warnings (is_constant, stmt, 0); | |
2107 if (is_constant) | |
2108 { | |
2109 /* The statement produced a constant value. */ | |
2110 val.lattice_val = CONSTANT; | |
2111 val.value = simplified; | |
2112 val.mask = double_int_zero; | |
2113 } | |
2114 } | |
2115 /* If the statement is likely to have a VARYING result, then do not | |
2116 bother folding the statement. */ | |
2117 else if (likelyvalue == VARYING) | |
2118 { | |
2119 enum gimple_code code = gimple_code (stmt); | |
2120 if (code == GIMPLE_ASSIGN) | |
2121 { | |
2122 enum tree_code subcode = gimple_assign_rhs_code (stmt); | |
2123 | |
2124 /* Other cases cannot satisfy is_gimple_min_invariant | |
2125 without folding. */ | |
2126 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS) | |
2127 simplified = gimple_assign_rhs1 (stmt); | |
2128 } | |
2129 else if (code == GIMPLE_SWITCH) | |
2130 simplified = gimple_switch_index (stmt); | |
2131 else | |
2132 /* These cannot satisfy is_gimple_min_invariant without folding. */ | |
2133 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND); | |
2134 is_constant = simplified && is_gimple_min_invariant (simplified); | |
2135 if (is_constant) | |
2136 { | |
2137 /* The statement produced a constant value. */ | |
2138 val.lattice_val = CONSTANT; | |
2139 val.value = simplified; | |
2140 val.mask = double_int_zero; | |
2141 } | |
2142 } | |
2143 | |
2144 /* Resort to simplification for bitwise tracking. */ | |
2145 if (flag_tree_bit_ccp | |
2146 && likelyvalue == CONSTANT | |
2147 && !is_constant) | |
2148 { | |
2149 enum gimple_code code = gimple_code (stmt); | |
2150 tree fndecl; | |
2151 val.lattice_val = VARYING; | |
2152 val.value = NULL_TREE; | |
2153 val.mask = double_int_minus_one; | |
2154 if (code == GIMPLE_ASSIGN) | |
2155 { | |
2156 enum tree_code subcode = gimple_assign_rhs_code (stmt); | |
2157 tree rhs1 = gimple_assign_rhs1 (stmt); | |
2158 switch (get_gimple_rhs_class (subcode)) | |
2159 { | |
2160 case GIMPLE_SINGLE_RHS: | |
2161 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
2162 || POINTER_TYPE_P (TREE_TYPE (rhs1))) | |
2163 val = get_value_for_expr (rhs1, true); | |
2164 break; | |
2165 | |
2166 case GIMPLE_UNARY_RHS: | |
2167 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
2168 || POINTER_TYPE_P (TREE_TYPE (rhs1))) | |
2169 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt)) | |
2170 || POINTER_TYPE_P (gimple_expr_type (stmt)))) | |
2171 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1); | |
2172 break; | |
2173 | |
2174 case GIMPLE_BINARY_RHS: | |
2175 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
2176 || POINTER_TYPE_P (TREE_TYPE (rhs1))) | |
2177 { | |
2178 tree lhs = gimple_assign_lhs (stmt); | |
2179 tree rhs2 = gimple_assign_rhs2 (stmt); | |
2180 val = bit_value_binop (subcode, | |
2181 TREE_TYPE (lhs), rhs1, rhs2); | |
2182 } | |
2183 break; | |
2184 | |
2185 default:; | |
2186 } | |
2187 } | |
2188 else if (code == GIMPLE_COND) | |
2189 { | |
2190 enum tree_code code = gimple_cond_code (stmt); | |
2191 tree rhs1 = gimple_cond_lhs (stmt); | |
2192 tree rhs2 = gimple_cond_rhs (stmt); | |
2193 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) | |
2194 || POINTER_TYPE_P (TREE_TYPE (rhs1))) | |
2195 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2); | |
2196 } | |
2197 else if (code == GIMPLE_CALL | |
2198 && (fndecl = gimple_call_fndecl (stmt)) | |
2199 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) | |
2200 { | |
2201 switch (DECL_FUNCTION_CODE (fndecl)) | |
2202 { | |
2203 case BUILT_IN_MALLOC: | |
2204 case BUILT_IN_REALLOC: | |
2205 case BUILT_IN_CALLOC: | |
2206 val.lattice_val = CONSTANT; | |
2207 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0); | |
2208 val.mask = shwi_to_double_int | |
2209 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT) | |
2210 / BITS_PER_UNIT - 1)); | |
2211 break; | |
2212 | |
2213 case BUILT_IN_ALLOCA: | |
2214 val.lattice_val = CONSTANT; | |
2215 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0); | |
2216 val.mask = shwi_to_double_int | |
2217 (~(((HOST_WIDE_INT) BIGGEST_ALIGNMENT) | |
2218 / BITS_PER_UNIT - 1)); | |
2219 break; | |
2220 | |
2221 default:; | |
2222 } | |
2223 } | |
2224 is_constant = (val.lattice_val == CONSTANT); | |
2225 } | |
2226 | |
2227 if (!is_constant) | |
1390 { | 2228 { |
1391 /* The statement produced a nonconstant value. If the statement | 2229 /* The statement produced a nonconstant value. If the statement |
1392 had UNDEFINED operands, then the result of the statement | 2230 had UNDEFINED operands, then the result of the statement |
1393 should be UNDEFINED. Otherwise, the statement is VARYING. */ | 2231 should be UNDEFINED. Otherwise, the statement is VARYING. */ |
1394 if (likelyvalue == UNDEFINED) | 2232 if (likelyvalue == UNDEFINED) |
1395 val.lattice_val = likelyvalue; | 2233 { |
2234 val.lattice_val = likelyvalue; | |
2235 val.mask = double_int_zero; | |
2236 } | |
1396 else | 2237 else |
1397 val.lattice_val = VARYING; | 2238 { |
2239 val.lattice_val = VARYING; | |
2240 val.mask = double_int_minus_one; | |
2241 } | |
1398 | 2242 |
1399 val.value = NULL_TREE; | 2243 val.value = NULL_TREE; |
1400 } | 2244 } |
1401 | 2245 |
1402 return val; | 2246 return val; |
1418 /* Statement evaluation will handle type mismatches in constants | 2262 /* Statement evaluation will handle type mismatches in constants |
1419 more gracefully than the final propagation. This allows us to | 2263 more gracefully than the final propagation. This allows us to |
1420 fold more conditionals here. */ | 2264 fold more conditionals here. */ |
1421 val = evaluate_stmt (stmt); | 2265 val = evaluate_stmt (stmt); |
1422 if (val.lattice_val != CONSTANT | 2266 if (val.lattice_val != CONSTANT |
1423 || TREE_CODE (val.value) != INTEGER_CST) | 2267 || !double_int_zero_p (val.mask)) |
1424 return false; | 2268 return false; |
2269 | |
2270 if (dump_file) | |
2271 { | |
2272 fprintf (dump_file, "Folding predicate "); | |
2273 print_gimple_expr (dump_file, stmt, 0, 0); | |
2274 fprintf (dump_file, " to "); | |
2275 print_generic_expr (dump_file, val.value, 0); | |
2276 fprintf (dump_file, "\n"); | |
2277 } | |
1425 | 2278 |
1426 if (integer_zerop (val.value)) | 2279 if (integer_zerop (val.value)) |
1427 gimple_cond_make_false (stmt); | 2280 gimple_cond_make_false (stmt); |
1428 else | 2281 else |
1429 gimple_cond_make_true (stmt); | 2282 gimple_cond_make_true (stmt); |
1432 } | 2285 } |
1433 | 2286 |
1434 case GIMPLE_CALL: | 2287 case GIMPLE_CALL: |
1435 { | 2288 { |
1436 tree lhs = gimple_call_lhs (stmt); | 2289 tree lhs = gimple_call_lhs (stmt); |
1437 prop_value_t *val; | 2290 tree val; |
1438 tree argt; | 2291 tree argt; |
2292 tree callee; | |
1439 bool changed = false; | 2293 bool changed = false; |
1440 unsigned i; | 2294 unsigned i; |
1441 | 2295 |
1442 /* If the call was folded into a constant make sure it goes | 2296 /* If the call was folded into a constant make sure it goes |
1443 away even if we cannot propagate into all uses because of | 2297 away even if we cannot propagate into all uses because of |
1444 type issues. */ | 2298 type issues. */ |
1445 if (lhs | 2299 if (lhs |
1446 && TREE_CODE (lhs) == SSA_NAME | 2300 && TREE_CODE (lhs) == SSA_NAME |
1447 && (val = get_value (lhs)) | 2301 && (val = get_constant_value (lhs))) |
1448 && val->lattice_val == CONSTANT) | |
1449 { | 2302 { |
1450 tree new_rhs = unshare_expr (val->value); | 2303 tree new_rhs = unshare_expr (val); |
1451 bool res; | 2304 bool res; |
1452 if (!useless_type_conversion_p (TREE_TYPE (lhs), | 2305 if (!useless_type_conversion_p (TREE_TYPE (lhs), |
1453 TREE_TYPE (new_rhs))) | 2306 TREE_TYPE (new_rhs))) |
1454 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs); | 2307 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs); |
1455 res = update_call_from_tree (gsi, new_rhs); | 2308 res = update_call_from_tree (gsi, new_rhs); |
1465 for (i = 0; i < gimple_call_num_args (stmt) && argt; | 2318 for (i = 0; i < gimple_call_num_args (stmt) && argt; |
1466 ++i, argt = TREE_CHAIN (argt)) | 2319 ++i, argt = TREE_CHAIN (argt)) |
1467 { | 2320 { |
1468 tree arg = gimple_call_arg (stmt, i); | 2321 tree arg = gimple_call_arg (stmt, i); |
1469 if (TREE_CODE (arg) == SSA_NAME | 2322 if (TREE_CODE (arg) == SSA_NAME |
1470 && (val = get_value (arg)) | 2323 && (val = get_constant_value (arg)) |
1471 && val->lattice_val == CONSTANT | |
1472 && useless_type_conversion_p | 2324 && useless_type_conversion_p |
1473 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)), | 2325 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)), |
1474 TYPE_MAIN_VARIANT (TREE_TYPE (val->value)))) | 2326 TYPE_MAIN_VARIANT (TREE_TYPE (val)))) |
1475 { | 2327 { |
1476 gimple_call_set_arg (stmt, i, unshare_expr (val->value)); | 2328 gimple_call_set_arg (stmt, i, unshare_expr (val)); |
1477 changed = true; | 2329 changed = true; |
1478 } | 2330 } |
1479 } | 2331 } |
1480 | 2332 |
2333 callee = gimple_call_fn (stmt); | |
2334 if (TREE_CODE (callee) == OBJ_TYPE_REF | |
2335 && TREE_CODE (OBJ_TYPE_REF_EXPR (callee)) == SSA_NAME) | |
2336 { | |
2337 tree expr = OBJ_TYPE_REF_EXPR (callee); | |
2338 OBJ_TYPE_REF_EXPR (callee) = valueize_op (expr); | |
2339 if (gimple_fold_call (gsi, false)) | |
2340 changed = true; | |
2341 OBJ_TYPE_REF_EXPR (callee) = expr; | |
2342 } | |
2343 | |
1481 return changed; | 2344 return changed; |
1482 } | 2345 } |
1483 | 2346 |
1484 case GIMPLE_ASSIGN: | 2347 case GIMPLE_ASSIGN: |
1485 { | 2348 { |
1486 tree lhs = gimple_assign_lhs (stmt); | 2349 tree lhs = gimple_assign_lhs (stmt); |
1487 prop_value_t *val; | 2350 tree val; |
1488 | 2351 |
1489 /* If we have a load that turned out to be constant replace it | 2352 /* If we have a load that turned out to be constant replace it |
1490 as we cannot propagate into all uses in all cases. */ | 2353 as we cannot propagate into all uses in all cases. */ |
1491 if (gimple_assign_single_p (stmt) | 2354 if (gimple_assign_single_p (stmt) |
1492 && TREE_CODE (lhs) == SSA_NAME | 2355 && TREE_CODE (lhs) == SSA_NAME |
1493 && (val = get_value (lhs)) | 2356 && (val = get_constant_value (lhs))) |
1494 && val->lattice_val == CONSTANT) | |
1495 { | 2357 { |
1496 tree rhs = unshare_expr (val->value); | 2358 tree rhs = unshare_expr (val); |
1497 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs))) | 2359 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs))) |
1498 rhs = fold_convert (TREE_TYPE (lhs), rhs); | 2360 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs); |
1499 gimple_assign_set_rhs_from_tree (gsi, rhs); | 2361 gimple_assign_set_rhs_from_tree (gsi, rhs); |
1500 return true; | 2362 return true; |
1501 } | 2363 } |
1502 | 2364 |
1503 return false; | 2365 return false; |
1524 tree lhs = gimple_get_lhs (stmt); | 2386 tree lhs = gimple_get_lhs (stmt); |
1525 | 2387 |
1526 gcc_assert (gimple_code (stmt) != GIMPLE_CALL | 2388 gcc_assert (gimple_code (stmt) != GIMPLE_CALL |
1527 || gimple_call_lhs (stmt) != NULL_TREE); | 2389 || gimple_call_lhs (stmt) != NULL_TREE); |
1528 | 2390 |
1529 if (gimple_assign_copy_p (stmt)) | 2391 if (gimple_assign_single_p (stmt) |
1530 { | 2392 && gimple_assign_rhs_code (stmt) == SSA_NAME) |
1531 tree rhs = gimple_assign_rhs1 (stmt); | 2393 /* For a simple copy operation, we copy the lattice values. */ |
1532 | 2394 val = *get_value (gimple_assign_rhs1 (stmt)); |
1533 if (TREE_CODE (rhs) == SSA_NAME) | |
1534 { | |
1535 /* For a simple copy operation, we copy the lattice values. */ | |
1536 prop_value_t *nval = get_value (rhs); | |
1537 val = *nval; | |
1538 } | |
1539 else | |
1540 val = evaluate_stmt (stmt); | |
1541 } | |
1542 else | 2395 else |
1543 /* Evaluate the statement, which could be | 2396 /* Evaluate the statement, which could be |
1544 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */ | 2397 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */ |
1545 val = evaluate_stmt (stmt); | 2398 val = evaluate_stmt (stmt); |
1546 | 2399 |
1575 prop_value_t val; | 2428 prop_value_t val; |
1576 basic_block block; | 2429 basic_block block; |
1577 | 2430 |
1578 block = gimple_bb (stmt); | 2431 block = gimple_bb (stmt); |
1579 val = evaluate_stmt (stmt); | 2432 val = evaluate_stmt (stmt); |
2433 if (val.lattice_val != CONSTANT | |
2434 || !double_int_zero_p (val.mask)) | |
2435 return SSA_PROP_VARYING; | |
1580 | 2436 |
1581 /* Find which edge out of the conditional block will be taken and add it | 2437 /* Find which edge out of the conditional block will be taken and add it |
1582 to the worklist. If no single edge can be determined statically, | 2438 to the worklist. If no single edge can be determined statically, |
1583 return SSA_PROP_VARYING to feed all the outgoing edges to the | 2439 return SSA_PROP_VARYING to feed all the outgoing edges to the |
1584 propagation engine. */ | 2440 propagation engine. */ |
1585 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0; | 2441 *taken_edge_p = find_taken_edge (block, val.value); |
1586 if (*taken_edge_p) | 2442 if (*taken_edge_p) |
1587 return SSA_PROP_INTERESTING; | 2443 return SSA_PROP_INTERESTING; |
1588 else | 2444 else |
1589 return SSA_PROP_VARYING; | 2445 return SSA_PROP_VARYING; |
1590 } | 2446 } |
1645 /* Definitions made by statements other than assignments to | 2501 /* Definitions made by statements other than assignments to |
1646 SSA_NAMEs represent unknown modifications to their outputs. | 2502 SSA_NAMEs represent unknown modifications to their outputs. |
1647 Mark them VARYING. */ | 2503 Mark them VARYING. */ |
1648 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) | 2504 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) |
1649 { | 2505 { |
1650 prop_value_t v = { VARYING, NULL_TREE }; | 2506 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } }; |
1651 set_lattice_value (def, v); | 2507 set_lattice_value (def, v); |
1652 } | 2508 } |
1653 | 2509 |
1654 return SSA_PROP_VARYING; | 2510 return SSA_PROP_VARYING; |
1655 } | 2511 } |