Mercurial > hg > CbC > CbC_gcc
comparison gcc/gimple-fold.c @ 67:f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
author | nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Tue, 22 Mar 2011 17:18:12 +0900 |
parents | b7f97abdc517 |
children | 04ced10e8804 |
comparison
equal
deleted
inserted
replaced
65:65488c3d617d | 67:f6334be47118 |
---|---|
22 #include "system.h" | 22 #include "system.h" |
23 #include "coretypes.h" | 23 #include "coretypes.h" |
24 #include "tm.h" | 24 #include "tm.h" |
25 #include "tree.h" | 25 #include "tree.h" |
26 #include "flags.h" | 26 #include "flags.h" |
27 #include "rtl.h" | |
28 #include "tm_p.h" | |
29 #include "ggc.h" | |
30 #include "basic-block.h" | |
31 #include "output.h" | |
32 #include "expr.h" | |
33 #include "function.h" | 27 #include "function.h" |
34 #include "diagnostic.h" | |
35 #include "timevar.h" | |
36 #include "tree-dump.h" | 28 #include "tree-dump.h" |
37 #include "tree-flow.h" | 29 #include "tree-flow.h" |
38 #include "tree-pass.h" | 30 #include "tree-pass.h" |
39 #include "tree-ssa-propagate.h" | 31 #include "tree-ssa-propagate.h" |
40 #include "value-prof.h" | |
41 #include "langhooks.h" | |
42 #include "target.h" | 32 #include "target.h" |
43 | 33 |
34 /* Return true when DECL can be referenced from current unit. | |
35 We can get declarations that are not possible to reference for | |
36 various reasons: | |
37 | |
38 1) When analyzing C++ virtual tables. | |
39 C++ virtual tables do have known constructors even | |
40 when they are keyed to other compilation unit. | |
41 Those tables can contain pointers to methods and vars | |
42 in other units. Those methods have both STATIC and EXTERNAL | |
43 set. | |
44 2) In WHOPR mode devirtualization might lead to reference | |
45 to method that was partitioned elsehwere. | |
46 In this case we have static VAR_DECL or FUNCTION_DECL | |
47 that has no corresponding callgraph/varpool node | |
48 declaring the body. | |
49 3) COMDAT functions referred by external vtables that | |
50 we devirtualize only during final copmilation stage. | |
51 At this time we already decided that we will not output | |
52 the function body and thus we can't reference the symbol | |
53 directly. */ | |
54 | |
55 static bool | |
56 can_refer_decl_in_current_unit_p (tree decl) | |
57 { | |
58 struct varpool_node *vnode; | |
59 struct cgraph_node *node; | |
60 | |
61 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl)) | |
62 return true; | |
63 /* External flag is set, so we deal with C++ reference | |
64 to static object from other file. */ | |
65 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl) | |
66 && TREE_CODE (decl) == VAR_DECL) | |
67 { | |
68 /* Just be sure it is not big in frontend setting | |
69 flags incorrectly. Those variables should never | |
70 be finalized. */ | |
71 gcc_checking_assert (!(vnode = varpool_get_node (decl)) | |
72 || !vnode->finalized); | |
73 return false; | |
74 } | |
75 /* When function is public, we always can introduce new reference. | |
76 Exception are the COMDAT functions where introducing a direct | |
77 reference imply need to include function body in the curren tunit. */ | |
78 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl)) | |
79 return true; | |
80 /* We are not at ltrans stage; so don't worry about WHOPR. | |
81 Also when still gimplifying all referred comdat functions will be | |
82 produced. */ | |
83 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready)) | |
84 return true; | |
85 /* If we already output the function body, we are safe. */ | |
86 if (TREE_ASM_WRITTEN (decl)) | |
87 return true; | |
88 if (TREE_CODE (decl) == FUNCTION_DECL) | |
89 { | |
90 node = cgraph_get_node (decl); | |
91 /* Check that we still have function body and that we didn't took | |
92 the decision to eliminate offline copy of the function yet. | |
93 The second is important when devirtualization happens during final | |
94 compilation stage when making a new reference no longer makes callee | |
95 to be compiled. */ | |
96 if (!node || !node->analyzed || node->global.inlined_to) | |
97 return false; | |
98 } | |
99 else if (TREE_CODE (decl) == VAR_DECL) | |
100 { | |
101 vnode = varpool_get_node (decl); | |
102 if (!vnode || !vnode->finalized) | |
103 return false; | |
104 } | |
105 return true; | |
106 } | |
107 | |
108 /* CVAL is value taken from DECL_INITIAL of variable. Try to transorm it into | |
109 acceptable form for is_gimple_min_invariant. */ | |
110 | |
111 tree | |
112 canonicalize_constructor_val (tree cval) | |
113 { | |
114 STRIP_NOPS (cval); | |
115 if (TREE_CODE (cval) == POINTER_PLUS_EXPR) | |
116 { | |
117 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval), | |
118 TREE_OPERAND (cval, 0), | |
119 TREE_OPERAND (cval, 1), | |
120 TREE_TYPE (cval)); | |
121 if (t) | |
122 cval = t; | |
123 } | |
124 if (TREE_CODE (cval) == ADDR_EXPR) | |
125 { | |
126 tree base = get_base_address (TREE_OPERAND (cval, 0)); | |
127 | |
128 if (base | |
129 && (TREE_CODE (base) == VAR_DECL | |
130 || TREE_CODE (base) == FUNCTION_DECL) | |
131 && !can_refer_decl_in_current_unit_p (base)) | |
132 return NULL_TREE; | |
133 if (base && TREE_CODE (base) == VAR_DECL) | |
134 add_referenced_var (base); | |
135 /* We never have the chance to fixup types in global initializers | |
136 during gimplification. Do so here. */ | |
137 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0))) | |
138 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0)); | |
139 } | |
140 return cval; | |
141 } | |
44 | 142 |
45 /* If SYM is a constant variable with known value, return the value. | 143 /* If SYM is a constant variable with known value, return the value. |
46 NULL_TREE is returned otherwise. */ | 144 NULL_TREE is returned otherwise. */ |
47 | 145 |
48 tree | 146 tree |
49 get_symbol_constant_value (tree sym) | 147 get_symbol_constant_value (tree sym) |
50 { | 148 { |
51 if (TREE_STATIC (sym) | 149 if (const_value_known_p (sym)) |
52 && (TREE_READONLY (sym) | |
53 || TREE_CODE (sym) == CONST_DECL)) | |
54 { | 150 { |
55 tree val = DECL_INITIAL (sym); | 151 tree val = DECL_INITIAL (sym); |
56 if (val) | 152 if (val) |
57 { | 153 { |
58 STRIP_NOPS (val); | 154 val = canonicalize_constructor_val (val); |
59 if (is_gimple_min_invariant (val)) | 155 if (val && is_gimple_min_invariant (val)) |
60 { | 156 return val; |
61 if (TREE_CODE (val) == ADDR_EXPR) | 157 else |
62 { | 158 return NULL_TREE; |
63 tree base = get_base_address (TREE_OPERAND (val, 0)); | |
64 if (base && TREE_CODE (base) == VAR_DECL) | |
65 { | |
66 TREE_ADDRESSABLE (base) = 1; | |
67 if (gimple_referenced_vars (cfun)) | |
68 add_referenced_var (base); | |
69 } | |
70 } | |
71 return val; | |
72 } | |
73 } | 159 } |
74 /* Variables declared 'const' without an initializer | 160 /* Variables declared 'const' without an initializer |
75 have zero as the initializer if they may not be | 161 have zero as the initializer if they may not be |
76 overridden at link or run time. */ | 162 overridden at link or run time. */ |
77 if (!val | 163 if (!val |
78 && !DECL_EXTERNAL (sym) | |
79 && targetm.binds_local_p (sym) | |
80 && (INTEGRAL_TYPE_P (TREE_TYPE (sym)) | 164 && (INTEGRAL_TYPE_P (TREE_TYPE (sym)) |
81 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym)))) | 165 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym)))) |
82 return fold_convert (TREE_TYPE (sym), integer_zero_node); | 166 return build_zero_cst (TREE_TYPE (sym)); |
83 } | 167 } |
84 | 168 |
85 return NULL_TREE; | 169 return NULL_TREE; |
86 } | 170 } |
87 | 171 |
90 dereference DEREF and cancel them. */ | 174 dereference DEREF and cancel them. */ |
91 | 175 |
92 bool | 176 bool |
93 may_propagate_address_into_dereference (tree addr, tree deref) | 177 may_propagate_address_into_dereference (tree addr, tree deref) |
94 { | 178 { |
95 gcc_assert (INDIRECT_REF_P (deref) | 179 gcc_assert (TREE_CODE (deref) == MEM_REF |
96 && TREE_CODE (addr) == ADDR_EXPR); | 180 && TREE_CODE (addr) == ADDR_EXPR); |
97 | 181 |
98 /* Don't propagate if ADDR's operand has incomplete type. */ | 182 /* Don't propagate if ADDR's operand has incomplete type. */ |
99 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0)))) | 183 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0)))) |
100 return false; | 184 return false; |
116 TREE_TYPE (TREE_OPERAND (addr, 0)))); | 200 TREE_TYPE (TREE_OPERAND (addr, 0)))); |
117 } | 201 } |
118 | 202 |
119 | 203 |
120 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X]. | 204 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X]. |
121 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE | 205 BASE is an array type. OFFSET is a byte displacement. |
122 is the desired result type. | |
123 | 206 |
124 LOC is the location of the original expression. */ | 207 LOC is the location of the original expression. */ |
125 | 208 |
126 static tree | 209 static tree |
127 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset, | 210 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset) |
128 tree orig_type, | |
129 bool allow_negative_idx) | |
130 { | 211 { |
131 tree min_idx, idx, idx_type, elt_offset = integer_zero_node; | 212 tree min_idx, idx, idx_type, elt_offset = integer_zero_node; |
132 tree array_type, elt_type, elt_size; | 213 tree array_type, elt_type, elt_size; |
133 tree domain_type; | 214 tree domain_type; |
134 | 215 |
153 /* Ignore stupid user tricks of indexing non-array variables. */ | 234 /* Ignore stupid user tricks of indexing non-array variables. */ |
154 array_type = TREE_TYPE (base); | 235 array_type = TREE_TYPE (base); |
155 if (TREE_CODE (array_type) != ARRAY_TYPE) | 236 if (TREE_CODE (array_type) != ARRAY_TYPE) |
156 return NULL_TREE; | 237 return NULL_TREE; |
157 elt_type = TREE_TYPE (array_type); | 238 elt_type = TREE_TYPE (array_type); |
158 if (!useless_type_conversion_p (orig_type, elt_type)) | |
159 return NULL_TREE; | |
160 | 239 |
161 /* Use signed size type for intermediate computation on the index. */ | 240 /* Use signed size type for intermediate computation on the index. */ |
162 idx_type = ssizetype; | 241 idx_type = ssizetype; |
163 | 242 |
164 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the | 243 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the |
227 | 306 |
228 /* We don't want to construct access past array bounds. For example | 307 /* We don't want to construct access past array bounds. For example |
229 char *(c[4]); | 308 char *(c[4]); |
230 c[3][2]; | 309 c[3][2]; |
231 should not be simplified into (*c)[14] or tree-vrp will | 310 should not be simplified into (*c)[14] or tree-vrp will |
232 give false warnings. The same is true for | 311 give false warnings. |
233 struct A { long x; char d[0]; } *a; | 312 This is only an issue for multi-dimensional arrays. */ |
234 (char *)a - 4; | 313 if (TREE_CODE (elt_type) == ARRAY_TYPE |
235 which should be not folded to &a->d[-8]. */ | 314 && domain_type) |
236 if (domain_type | 315 { |
237 && TYPE_MAX_VALUE (domain_type) | 316 if (TYPE_MAX_VALUE (domain_type) |
238 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST) | 317 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST |
239 { | 318 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx)) |
240 tree up_bound = TYPE_MAX_VALUE (domain_type); | |
241 | |
242 if (tree_int_cst_lt (up_bound, idx) | |
243 /* Accesses after the end of arrays of size 0 (gcc | |
244 extension) and 1 are likely intentional ("struct | |
245 hack"). */ | |
246 && compare_tree_int (up_bound, 1) > 0) | |
247 return NULL_TREE; | 319 return NULL_TREE; |
248 } | 320 else if (TYPE_MIN_VALUE (domain_type) |
249 if (domain_type | 321 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST |
250 && TYPE_MIN_VALUE (domain_type)) | 322 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type))) |
251 { | |
252 if (!allow_negative_idx | |
253 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST | |
254 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type))) | |
255 return NULL_TREE; | 323 return NULL_TREE; |
256 } | 324 else if (compare_tree_int (idx, 0) < 0) |
257 else if (!allow_negative_idx | 325 return NULL_TREE; |
258 && compare_tree_int (idx, 0) < 0) | 326 } |
259 return NULL_TREE; | |
260 | 327 |
261 { | 328 { |
262 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE); | 329 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE); |
263 SET_EXPR_LOCATION (t, loc); | 330 SET_EXPR_LOCATION (t, loc); |
264 return t; | 331 return t; |
265 } | 332 } |
266 } | 333 } |
267 | 334 |
268 | 335 |
269 /* Attempt to fold *(S+O) to S.X. | 336 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index]. |
270 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE | |
271 is the desired result type. | |
272 | |
273 LOC is the location of the original expression. */ | |
274 | |
275 static tree | |
276 maybe_fold_offset_to_component_ref (location_t loc, tree record_type, | |
277 tree base, tree offset, tree orig_type) | |
278 { | |
279 tree f, t, field_type, tail_array_field, field_offset; | |
280 tree ret; | |
281 tree new_base; | |
282 | |
283 if (TREE_CODE (record_type) != RECORD_TYPE | |
284 && TREE_CODE (record_type) != UNION_TYPE | |
285 && TREE_CODE (record_type) != QUAL_UNION_TYPE) | |
286 return NULL_TREE; | |
287 | |
288 /* Short-circuit silly cases. */ | |
289 if (useless_type_conversion_p (record_type, orig_type)) | |
290 return NULL_TREE; | |
291 | |
292 tail_array_field = NULL_TREE; | |
293 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f)) | |
294 { | |
295 int cmp; | |
296 | |
297 if (TREE_CODE (f) != FIELD_DECL) | |
298 continue; | |
299 if (DECL_BIT_FIELD (f)) | |
300 continue; | |
301 | |
302 if (!DECL_FIELD_OFFSET (f)) | |
303 continue; | |
304 field_offset = byte_position (f); | |
305 if (TREE_CODE (field_offset) != INTEGER_CST) | |
306 continue; | |
307 | |
308 /* ??? Java creates "interesting" fields for representing base classes. | |
309 They have no name, and have no context. With no context, we get into | |
310 trouble with nonoverlapping_component_refs_p. Skip them. */ | |
311 if (!DECL_FIELD_CONTEXT (f)) | |
312 continue; | |
313 | |
314 /* The previous array field isn't at the end. */ | |
315 tail_array_field = NULL_TREE; | |
316 | |
317 /* Check to see if this offset overlaps with the field. */ | |
318 cmp = tree_int_cst_compare (field_offset, offset); | |
319 if (cmp > 0) | |
320 continue; | |
321 | |
322 field_type = TREE_TYPE (f); | |
323 | |
324 /* Here we exactly match the offset being checked. If the types match, | |
325 then we can return that field. */ | |
326 if (cmp == 0 | |
327 && useless_type_conversion_p (orig_type, field_type)) | |
328 { | |
329 t = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE); | |
330 return t; | |
331 } | |
332 | |
333 /* Don't care about offsets into the middle of scalars. */ | |
334 if (!AGGREGATE_TYPE_P (field_type)) | |
335 continue; | |
336 | |
337 /* Check for array at the end of the struct. This is often | |
338 used as for flexible array members. We should be able to | |
339 turn this into an array access anyway. */ | |
340 if (TREE_CODE (field_type) == ARRAY_TYPE) | |
341 tail_array_field = f; | |
342 | |
343 /* Check the end of the field against the offset. */ | |
344 if (!DECL_SIZE_UNIT (f) | |
345 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST) | |
346 continue; | |
347 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1); | |
348 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f))) | |
349 continue; | |
350 | |
351 /* If we matched, then set offset to the displacement into | |
352 this field. */ | |
353 new_base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE); | |
354 SET_EXPR_LOCATION (new_base, loc); | |
355 | |
356 /* Recurse to possibly find the match. */ | |
357 ret = maybe_fold_offset_to_array_ref (loc, new_base, t, orig_type, | |
358 f == TYPE_FIELDS (record_type)); | |
359 if (ret) | |
360 return ret; | |
361 ret = maybe_fold_offset_to_component_ref (loc, field_type, new_base, t, | |
362 orig_type); | |
363 if (ret) | |
364 return ret; | |
365 } | |
366 | |
367 if (!tail_array_field) | |
368 return NULL_TREE; | |
369 | |
370 f = tail_array_field; | |
371 field_type = TREE_TYPE (f); | |
372 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1); | |
373 | |
374 /* If we get here, we've got an aggregate field, and a possibly | |
375 nonzero offset into them. Recurse and hope for a valid match. */ | |
376 base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE); | |
377 SET_EXPR_LOCATION (base, loc); | |
378 | |
379 t = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type, | |
380 f == TYPE_FIELDS (record_type)); | |
381 if (t) | |
382 return t; | |
383 return maybe_fold_offset_to_component_ref (loc, field_type, base, offset, | |
384 orig_type); | |
385 } | |
386 | |
387 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type | |
388 or BASE[index] or by combination of those. | |
389 | |
390 LOC is the location of original expression. | 337 LOC is the location of original expression. |
391 | 338 |
392 Before attempting the conversion strip off existing ADDR_EXPRs and | 339 Before attempting the conversion strip off existing ADDR_EXPRs. */ |
393 handled component refs. */ | |
394 | 340 |
395 tree | 341 tree |
396 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset, | 342 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset, |
397 tree orig_type) | 343 tree orig_type) |
398 { | 344 { |
399 tree ret; | 345 tree ret; |
400 tree type; | |
401 | 346 |
402 STRIP_NOPS (base); | 347 STRIP_NOPS (base); |
403 if (TREE_CODE (base) != ADDR_EXPR) | 348 if (TREE_CODE (base) != ADDR_EXPR) |
404 return NULL_TREE; | 349 return NULL_TREE; |
405 | 350 |
406 base = TREE_OPERAND (base, 0); | 351 base = TREE_OPERAND (base, 0); |
407 | 352 if (types_compatible_p (orig_type, TREE_TYPE (base)) |
408 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union, | |
409 so it needs to be removed and new COMPONENT_REF constructed. | |
410 The wrong COMPONENT_REF are often constructed by folding the | |
411 (type *)&object within the expression (type *)&object+offset */ | |
412 if (handled_component_p (base)) | |
413 { | |
414 HOST_WIDE_INT sub_offset, size, maxsize; | |
415 tree newbase; | |
416 newbase = get_ref_base_and_extent (base, &sub_offset, | |
417 &size, &maxsize); | |
418 gcc_assert (newbase); | |
419 if (size == maxsize | |
420 && size != -1 | |
421 && !(sub_offset & (BITS_PER_UNIT - 1))) | |
422 { | |
423 base = newbase; | |
424 if (sub_offset) | |
425 offset = int_const_binop (PLUS_EXPR, offset, | |
426 build_int_cst (TREE_TYPE (offset), | |
427 sub_offset / BITS_PER_UNIT), 1); | |
428 } | |
429 } | |
430 if (useless_type_conversion_p (orig_type, TREE_TYPE (base)) | |
431 && integer_zerop (offset)) | 353 && integer_zerop (offset)) |
432 return base; | 354 return base; |
433 type = TREE_TYPE (base); | 355 |
434 | 356 ret = maybe_fold_offset_to_array_ref (loc, base, offset); |
435 ret = maybe_fold_offset_to_component_ref (loc, type, base, offset, orig_type); | 357 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret))) |
436 if (!ret) | 358 return ret; |
437 ret = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type, true); | 359 return NULL_TREE; |
438 | 360 } |
439 return ret; | 361 |
440 } | 362 /* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index]. |
441 | 363 LOC is the location of the original expression. */ |
442 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type | |
443 or &BASE[index] or by combination of those. | |
444 | |
445 LOC is the location of the original expression. | |
446 | |
447 Before attempting the conversion strip off existing component refs. */ | |
448 | 364 |
449 tree | 365 tree |
450 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset, | 366 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset, |
451 tree orig_type) | 367 tree orig_type) |
452 { | 368 { |
453 tree t; | 369 tree base, ret; |
454 | 370 |
455 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr)) | 371 STRIP_NOPS (addr); |
456 && POINTER_TYPE_P (orig_type)); | 372 if (TREE_CODE (addr) != ADDR_EXPR) |
457 | 373 return NULL_TREE; |
458 t = maybe_fold_offset_to_reference (loc, addr, offset, | 374 base = TREE_OPERAND (addr, 0); |
459 TREE_TYPE (orig_type)); | 375 ret = maybe_fold_offset_to_array_ref (loc, base, offset); |
460 if (t != NULL_TREE) | 376 if (ret) |
461 { | 377 { |
462 tree orig = addr; | 378 ret = build_fold_addr_expr (ret); |
463 tree ptr_type; | 379 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret))) |
464 | |
465 /* For __builtin_object_size to function correctly we need to | |
466 make sure not to fold address arithmetic so that we change | |
467 reference from one array to another. This would happen for | |
468 example for | |
469 | |
470 struct X { char s1[10]; char s2[10] } s; | |
471 char *foo (void) { return &s.s2[-4]; } | |
472 | |
473 where we need to avoid generating &s.s1[6]. As the C and | |
474 C++ frontends create different initial trees | |
475 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some | |
476 sophisticated comparisons here. Note that checking for the | |
477 condition after the fact is easier than trying to avoid doing | |
478 the folding. */ | |
479 STRIP_NOPS (orig); | |
480 if (TREE_CODE (orig) == ADDR_EXPR) | |
481 orig = TREE_OPERAND (orig, 0); | |
482 if ((TREE_CODE (orig) == ARRAY_REF | |
483 || (TREE_CODE (orig) == COMPONENT_REF | |
484 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE)) | |
485 && (TREE_CODE (t) == ARRAY_REF | |
486 || TREE_CODE (t) == COMPONENT_REF) | |
487 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF | |
488 ? TREE_OPERAND (orig, 0) : orig, | |
489 TREE_CODE (t) == ARRAY_REF | |
490 ? TREE_OPERAND (t, 0) : t, 0)) | |
491 return NULL_TREE; | 380 return NULL_TREE; |
492 | 381 SET_EXPR_LOCATION (ret, loc); |
493 ptr_type = build_pointer_type (TREE_TYPE (t)); | 382 } |
494 if (!useless_type_conversion_p (orig_type, ptr_type)) | 383 |
495 return NULL_TREE; | 384 return ret; |
496 return build_fold_addr_expr_with_type_loc (loc, t, ptr_type); | |
497 } | |
498 | |
499 return NULL_TREE; | |
500 } | |
501 | |
502 /* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET). | |
503 Return the simplified expression, or NULL if nothing could be done. */ | |
504 | |
505 static tree | |
506 maybe_fold_stmt_indirect (tree expr, tree base, tree offset) | |
507 { | |
508 tree t; | |
509 bool volatile_p = TREE_THIS_VOLATILE (expr); | |
510 location_t loc = EXPR_LOCATION (expr); | |
511 | |
512 /* We may well have constructed a double-nested PLUS_EXPR via multiple | |
513 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that | |
514 are sometimes added. */ | |
515 base = fold (base); | |
516 STRIP_TYPE_NOPS (base); | |
517 TREE_OPERAND (expr, 0) = base; | |
518 | |
519 /* One possibility is that the address reduces to a string constant. */ | |
520 t = fold_read_from_constant_string (expr); | |
521 if (t) | |
522 return t; | |
523 | |
524 /* Add in any offset from a POINTER_PLUS_EXPR. */ | |
525 if (TREE_CODE (base) == POINTER_PLUS_EXPR) | |
526 { | |
527 tree offset2; | |
528 | |
529 offset2 = TREE_OPERAND (base, 1); | |
530 if (TREE_CODE (offset2) != INTEGER_CST) | |
531 return NULL_TREE; | |
532 base = TREE_OPERAND (base, 0); | |
533 | |
534 offset = fold_convert (sizetype, | |
535 int_const_binop (PLUS_EXPR, offset, offset2, 1)); | |
536 } | |
537 | |
538 if (TREE_CODE (base) == ADDR_EXPR) | |
539 { | |
540 tree base_addr = base; | |
541 | |
542 /* Strip the ADDR_EXPR. */ | |
543 base = TREE_OPERAND (base, 0); | |
544 | |
545 /* Fold away CONST_DECL to its value, if the type is scalar. */ | |
546 if (TREE_CODE (base) == CONST_DECL | |
547 && is_gimple_min_invariant (DECL_INITIAL (base))) | |
548 return DECL_INITIAL (base); | |
549 | |
550 /* If there is no offset involved simply return the folded base. */ | |
551 if (integer_zerop (offset)) | |
552 return base; | |
553 | |
554 /* Try folding *(&B+O) to B.X. */ | |
555 t = maybe_fold_offset_to_reference (loc, base_addr, offset, | |
556 TREE_TYPE (expr)); | |
557 if (t) | |
558 { | |
559 /* Preserve volatileness of the original expression. | |
560 We can end up with a plain decl here which is shared | |
561 and we shouldn't mess with its flags. */ | |
562 if (!SSA_VAR_P (t)) | |
563 TREE_THIS_VOLATILE (t) = volatile_p; | |
564 return t; | |
565 } | |
566 } | |
567 else | |
568 { | |
569 /* We can get here for out-of-range string constant accesses, | |
570 such as "_"[3]. Bail out of the entire substitution search | |
571 and arrange for the entire statement to be replaced by a | |
572 call to __builtin_trap. In all likelihood this will all be | |
573 constant-folded away, but in the meantime we can't leave with | |
574 something that get_expr_operands can't understand. */ | |
575 | |
576 t = base; | |
577 STRIP_NOPS (t); | |
578 if (TREE_CODE (t) == ADDR_EXPR | |
579 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST) | |
580 { | |
581 /* FIXME: Except that this causes problems elsewhere with dead | |
582 code not being deleted, and we die in the rtl expanders | |
583 because we failed to remove some ssa_name. In the meantime, | |
584 just return zero. */ | |
585 /* FIXME2: This condition should be signaled by | |
586 fold_read_from_constant_string directly, rather than | |
587 re-checking for it here. */ | |
588 return integer_zero_node; | |
589 } | |
590 | |
591 /* Try folding *(B+O) to B->X. Still an improvement. */ | |
592 if (POINTER_TYPE_P (TREE_TYPE (base))) | |
593 { | |
594 t = maybe_fold_offset_to_reference (loc, base, offset, | |
595 TREE_TYPE (expr)); | |
596 if (t) | |
597 return t; | |
598 } | |
599 } | |
600 | |
601 /* Otherwise we had an offset that we could not simplify. */ | |
602 return NULL_TREE; | |
603 } | 385 } |
604 | 386 |
605 | 387 |
606 /* A quaint feature extant in our address arithmetic is that there | 388 /* A quaint feature extant in our address arithmetic is that there |
607 can be hidden type changes here. The type of the result need | 389 can be hidden type changes here. The type of the result need |
630 if (TREE_CODE (op1) != INTEGER_CST) | 412 if (TREE_CODE (op1) != INTEGER_CST) |
631 { | 413 { |
632 /* Or op0 should now be A[0] and the non-constant offset defined | 414 /* Or op0 should now be A[0] and the non-constant offset defined |
633 via a multiplication by the array element size. */ | 415 via a multiplication by the array element size. */ |
634 if (TREE_CODE (op0) == ARRAY_REF | 416 if (TREE_CODE (op0) == ARRAY_REF |
417 /* As we will end up creating a variable index array access | |
418 in the outermost array dimension make sure there isn't | |
419 a more inner array that the index could overflow to. */ | |
420 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF | |
635 && integer_zerop (TREE_OPERAND (op0, 1)) | 421 && integer_zerop (TREE_OPERAND (op0, 1)) |
636 && TREE_CODE (op1) == SSA_NAME | 422 && TREE_CODE (op1) == SSA_NAME) |
637 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0)), 1)) | |
638 { | 423 { |
639 gimple offset_def = SSA_NAME_DEF_STMT (op1); | 424 gimple offset_def = SSA_NAME_DEF_STMT (op1); |
640 if (!is_gimple_assign (offset_def)) | 425 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0)); |
426 if (!host_integerp (elsz, 1) | |
427 || !is_gimple_assign (offset_def)) | |
428 return NULL_TREE; | |
429 | |
430 /* Do not build array references of something that we can't | |
431 see the true number of array dimensions for. */ | |
432 if (!DECL_P (TREE_OPERAND (op0, 0)) | |
433 && !handled_component_p (TREE_OPERAND (op0, 0))) | |
641 return NULL_TREE; | 434 return NULL_TREE; |
642 | 435 |
643 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR | 436 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR |
644 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST | 437 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST |
645 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), | 438 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz)) |
646 TYPE_SIZE_UNIT (TREE_TYPE (op0)))) | |
647 return build_fold_addr_expr | 439 return build_fold_addr_expr |
648 (build4 (ARRAY_REF, TREE_TYPE (op0), | 440 (build4 (ARRAY_REF, TREE_TYPE (op0), |
649 TREE_OPERAND (op0, 0), | 441 TREE_OPERAND (op0, 0), |
650 gimple_assign_rhs1 (offset_def), | 442 gimple_assign_rhs1 (offset_def), |
651 TREE_OPERAND (op0, 2), | 443 TREE_OPERAND (op0, 2), |
652 TREE_OPERAND (op0, 3))); | 444 TREE_OPERAND (op0, 3))); |
653 else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0))) | 445 else if (integer_onep (elsz) |
654 && gimple_assign_rhs_code (offset_def) != MULT_EXPR) | 446 && gimple_assign_rhs_code (offset_def) != MULT_EXPR) |
655 return build_fold_addr_expr | 447 return build_fold_addr_expr |
656 (build4 (ARRAY_REF, TREE_TYPE (op0), | 448 (build4 (ARRAY_REF, TREE_TYPE (op0), |
657 TREE_OPERAND (op0, 0), | 449 TREE_OPERAND (op0, 0), |
658 op1, | 450 op1, |
659 TREE_OPERAND (op0, 2), | 451 TREE_OPERAND (op0, 2), |
660 TREE_OPERAND (op0, 3))); | 452 TREE_OPERAND (op0, 3))); |
661 } | 453 } |
454 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE | |
455 /* Dto. */ | |
456 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE | |
457 && TREE_CODE (op1) == SSA_NAME) | |
458 { | |
459 gimple offset_def = SSA_NAME_DEF_STMT (op1); | |
460 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0))); | |
461 if (!host_integerp (elsz, 1) | |
462 || !is_gimple_assign (offset_def)) | |
463 return NULL_TREE; | |
464 | |
465 /* Do not build array references of something that we can't | |
466 see the true number of array dimensions for. */ | |
467 if (!DECL_P (op0) | |
468 && !handled_component_p (op0)) | |
469 return NULL_TREE; | |
470 | |
471 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR | |
472 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST | |
473 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz)) | |
474 return build_fold_addr_expr | |
475 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)), | |
476 op0, gimple_assign_rhs1 (offset_def), | |
477 integer_zero_node, NULL_TREE)); | |
478 else if (integer_onep (elsz) | |
479 && gimple_assign_rhs_code (offset_def) != MULT_EXPR) | |
480 return build_fold_addr_expr | |
481 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)), | |
482 op0, op1, | |
483 integer_zero_node, NULL_TREE)); | |
484 } | |
485 | |
662 return NULL_TREE; | 486 return NULL_TREE; |
663 } | 487 } |
664 | 488 |
665 /* If the first operand is an ARRAY_REF, expand it so that we can fold | 489 /* If the first operand is an ARRAY_REF, expand it so that we can fold |
666 the offset into it. */ | 490 the offset into it. */ |
711 if (VOID_TYPE_P (ptd_type) | 535 if (VOID_TYPE_P (ptd_type) |
712 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE) | 536 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE) |
713 ptd_type = TREE_TYPE (TREE_TYPE (op0)); | 537 ptd_type = TREE_TYPE (TREE_TYPE (op0)); |
714 | 538 |
715 /* At which point we can try some of the same things as for indirects. */ | 539 /* At which point we can try some of the same things as for indirects. */ |
716 t = maybe_fold_offset_to_array_ref (loc, op0, op1, ptd_type, true); | 540 t = maybe_fold_offset_to_array_ref (loc, op0, op1); |
717 if (!t) | |
718 t = maybe_fold_offset_to_component_ref (loc, TREE_TYPE (op0), op0, op1, | |
719 ptd_type); | |
720 if (t) | 541 if (t) |
721 { | 542 { |
722 t = build1 (ADDR_EXPR, res_type, t); | 543 t = build_fold_addr_expr (t); |
544 if (!useless_type_conversion_p (res_type, TREE_TYPE (t))) | |
545 return NULL_TREE; | |
723 SET_EXPR_LOCATION (t, loc); | 546 SET_EXPR_LOCATION (t, loc); |
724 } | 547 } |
725 | 548 |
726 return t; | 549 return t; |
727 } | 550 } |
733 | 556 |
734 static tree | 557 static tree |
735 maybe_fold_reference (tree expr, bool is_lhs) | 558 maybe_fold_reference (tree expr, bool is_lhs) |
736 { | 559 { |
737 tree *t = &expr; | 560 tree *t = &expr; |
738 | 561 tree result; |
739 if (TREE_CODE (expr) == ARRAY_REF | 562 |
740 && !is_lhs) | 563 if (!is_lhs |
741 { | 564 && (result = fold_const_aggregate_ref (expr)) |
742 tree tem = fold_read_from_constant_string (expr); | 565 && is_gimple_min_invariant (result)) |
743 if (tem) | 566 return result; |
744 return tem; | |
745 } | |
746 | 567 |
747 /* ??? We might want to open-code the relevant remaining cases | 568 /* ??? We might want to open-code the relevant remaining cases |
748 to avoid using the generic fold. */ | 569 to avoid using the generic fold. */ |
749 if (handled_component_p (*t) | 570 if (handled_component_p (*t) |
750 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0))) | 571 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0))) |
755 } | 576 } |
756 | 577 |
757 while (handled_component_p (*t)) | 578 while (handled_component_p (*t)) |
758 t = &TREE_OPERAND (*t, 0); | 579 t = &TREE_OPERAND (*t, 0); |
759 | 580 |
760 if (TREE_CODE (*t) == INDIRECT_REF) | 581 /* Fold back MEM_REFs to reference trees. */ |
761 { | 582 if (TREE_CODE (*t) == MEM_REF |
762 tree tem = maybe_fold_stmt_indirect (*t, TREE_OPERAND (*t, 0), | 583 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR |
763 integer_zero_node); | 584 && integer_zerop (TREE_OPERAND (*t, 1)) |
764 /* Avoid folding *"abc" = 5 into 'a' = 5. */ | 585 && (TREE_THIS_VOLATILE (*t) |
765 if (is_lhs && tem && CONSTANT_CLASS_P (tem)) | 586 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))) |
766 tem = NULL_TREE; | 587 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1))) |
767 if (!tem | 588 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t)) |
768 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR) | 589 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1))))) |
769 /* If we had a good reason for propagating the address here, | 590 /* We have to look out here to not drop a required conversion |
770 make sure we end up with valid gimple. See PR34989. */ | 591 from the rhs to the lhs if is_lhs, but we don't have the |
771 tem = TREE_OPERAND (TREE_OPERAND (*t, 0), 0); | 592 rhs here to verify that. Thus require strict type |
772 | 593 compatibility. */ |
594 && types_compatible_p (TREE_TYPE (*t), | |
595 TREE_TYPE (TREE_OPERAND | |
596 (TREE_OPERAND (*t, 0), 0)))) | |
597 { | |
598 tree tem; | |
599 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0); | |
600 tem = maybe_fold_reference (expr, is_lhs); | |
601 if (tem) | |
602 return tem; | |
603 return expr; | |
604 } | |
605 /* Canonicalize MEM_REFs invariant address operand. */ | |
606 else if (TREE_CODE (*t) == MEM_REF | |
607 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0))) | |
608 { | |
609 bool volatile_p = TREE_THIS_VOLATILE (*t); | |
610 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t), | |
611 TREE_OPERAND (*t, 0), | |
612 TREE_OPERAND (*t, 1)); | |
613 if (tem) | |
614 { | |
615 TREE_THIS_VOLATILE (tem) = volatile_p; | |
616 *t = tem; | |
617 tem = maybe_fold_reference (expr, is_lhs); | |
618 if (tem) | |
619 return tem; | |
620 return expr; | |
621 } | |
622 } | |
623 else if (TREE_CODE (*t) == TARGET_MEM_REF) | |
624 { | |
625 tree tem = maybe_fold_tmr (*t); | |
773 if (tem) | 626 if (tem) |
774 { | 627 { |
775 *t = tem; | 628 *t = tem; |
776 tem = maybe_fold_reference (expr, is_lhs); | 629 tem = maybe_fold_reference (expr, is_lhs); |
777 if (tem) | 630 if (tem) |
851 if (set) | 704 if (set) |
852 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem, | 705 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem, |
853 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs)); | 706 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs)); |
854 } | 707 } |
855 | 708 |
856 else if (TREE_CODE (rhs) == TARGET_MEM_REF) | |
857 return maybe_fold_tmr (rhs); | |
858 | |
859 else if (REFERENCE_CLASS_P (rhs)) | 709 else if (REFERENCE_CLASS_P (rhs)) |
860 return maybe_fold_reference (rhs, false); | 710 return maybe_fold_reference (rhs, false); |
861 | 711 |
862 else if (TREE_CODE (rhs) == ADDR_EXPR) | 712 else if (TREE_CODE (rhs) == ADDR_EXPR) |
863 { | 713 { |
864 tree tem = maybe_fold_reference (TREE_OPERAND (rhs, 0), true); | 714 tree ref = TREE_OPERAND (rhs, 0); |
865 if (tem) | 715 tree tem = maybe_fold_reference (ref, true); |
716 if (tem | |
717 && TREE_CODE (tem) == MEM_REF | |
718 && integer_zerop (TREE_OPERAND (tem, 1))) | |
719 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0)); | |
720 else if (tem) | |
866 result = fold_convert (TREE_TYPE (rhs), | 721 result = fold_convert (TREE_TYPE (rhs), |
867 build_fold_addr_expr_loc (loc, tem)); | 722 build_fold_addr_expr_loc (loc, tem)); |
723 else if (TREE_CODE (ref) == MEM_REF | |
724 && integer_zerop (TREE_OPERAND (ref, 1))) | |
725 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0)); | |
868 } | 726 } |
869 | 727 |
870 else if (TREE_CODE (rhs) == CONSTRUCTOR | 728 else if (TREE_CODE (rhs) == CONSTRUCTOR |
871 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE | 729 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE |
872 && (CONSTRUCTOR_NELTS (rhs) | 730 && (CONSTRUCTOR_NELTS (rhs) |
982 gimple_assign_rhs2 (stmt), | 840 gimple_assign_rhs2 (stmt), |
983 gimple_assign_rhs1 (stmt)); | 841 gimple_assign_rhs1 (stmt)); |
984 } | 842 } |
985 break; | 843 break; |
986 | 844 |
845 case GIMPLE_TERNARY_RHS: | |
846 result = fold_ternary_loc (loc, subcode, | |
847 TREE_TYPE (gimple_assign_lhs (stmt)), | |
848 gimple_assign_rhs1 (stmt), | |
849 gimple_assign_rhs2 (stmt), | |
850 gimple_assign_rhs3 (stmt)); | |
851 | |
852 if (result) | |
853 { | |
854 STRIP_USELESS_TYPE_CONVERSION (result); | |
855 if (valid_gimple_rhs_p (result)) | |
856 return result; | |
857 | |
858 /* Fold might have produced non-GIMPLE, so if we trust it blindly | |
859 we lose canonicalization opportunities. Do not go again | |
860 through fold here though, or the same non-GIMPLE will be | |
861 produced. */ | |
862 if (commutative_ternary_tree_code (subcode) | |
863 && tree_swap_operands_p (gimple_assign_rhs1 (stmt), | |
864 gimple_assign_rhs2 (stmt), false)) | |
865 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)), | |
866 gimple_assign_rhs2 (stmt), | |
867 gimple_assign_rhs1 (stmt), | |
868 gimple_assign_rhs3 (stmt)); | |
869 } | |
870 break; | |
871 | |
987 case GIMPLE_INVALID_RHS: | 872 case GIMPLE_INVALID_RHS: |
988 gcc_unreachable (); | 873 gcc_unreachable (); |
989 } | 874 } |
990 | 875 |
991 return NULL_TREE; | 876 return NULL_TREE; |
1022 RHS of an assignment. Insert the necessary statements before | 907 RHS of an assignment. Insert the necessary statements before |
1023 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL | 908 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL |
1024 is replaced. If the call is expected to produces a result, then it | 909 is replaced. If the call is expected to produces a result, then it |
1025 is replaced by an assignment of the new RHS to the result variable. | 910 is replaced by an assignment of the new RHS to the result variable. |
1026 If the result is to be ignored, then the call is replaced by a | 911 If the result is to be ignored, then the call is replaced by a |
1027 GIMPLE_NOP. */ | 912 GIMPLE_NOP. A proper VDEF chain is retained by making the first |
913 VUSE and the last VDEF of the whole sequence be the same as the replaced | |
914 statement and using new SSA names for stores in between. */ | |
1028 | 915 |
1029 void | 916 void |
1030 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr) | 917 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr) |
1031 { | 918 { |
1032 tree lhs; | 919 tree lhs; |
1034 gimple stmt, new_stmt; | 921 gimple stmt, new_stmt; |
1035 gimple_stmt_iterator i; | 922 gimple_stmt_iterator i; |
1036 gimple_seq stmts = gimple_seq_alloc(); | 923 gimple_seq stmts = gimple_seq_alloc(); |
1037 struct gimplify_ctx gctx; | 924 struct gimplify_ctx gctx; |
1038 gimple last = NULL; | 925 gimple last = NULL; |
926 gimple laststore = NULL; | |
927 tree reaching_vuse; | |
1039 | 928 |
1040 stmt = gsi_stmt (*si_p); | 929 stmt = gsi_stmt (*si_p); |
1041 | 930 |
1042 gcc_assert (is_gimple_call (stmt)); | 931 gcc_assert (is_gimple_call (stmt)); |
1043 | 932 |
1044 lhs = gimple_call_lhs (stmt); | 933 lhs = gimple_call_lhs (stmt); |
934 reaching_vuse = gimple_vuse (stmt); | |
1045 | 935 |
1046 push_gimplify_context (&gctx); | 936 push_gimplify_context (&gctx); |
1047 | 937 |
1048 if (lhs == NULL_TREE) | 938 if (lhs == NULL_TREE) |
1049 gimplify_and_add (expr, &stmts); | 939 { |
940 gimplify_and_add (expr, &stmts); | |
941 /* We can end up with folding a memcpy of an empty class assignment | |
942 which gets optimized away by C++ gimplification. */ | |
943 if (gimple_seq_empty_p (stmts)) | |
944 { | |
945 pop_gimplify_context (NULL); | |
946 if (gimple_in_ssa_p (cfun)) | |
947 { | |
948 unlink_stmt_vdef (stmt); | |
949 release_defs (stmt); | |
950 } | |
951 gsi_remove (si_p, true); | |
952 return; | |
953 } | |
954 } | |
1050 else | 955 else |
1051 tmp = get_initialized_tmp_var (expr, &stmts, NULL); | 956 tmp = get_initialized_tmp_var (expr, &stmts, NULL); |
1052 | 957 |
1053 pop_gimplify_context (NULL); | 958 pop_gimplify_context (NULL); |
1054 | 959 |
1062 { | 967 { |
1063 gsi_insert_before (si_p, last, GSI_NEW_STMT); | 968 gsi_insert_before (si_p, last, GSI_NEW_STMT); |
1064 gsi_next (si_p); | 969 gsi_next (si_p); |
1065 } | 970 } |
1066 new_stmt = gsi_stmt (i); | 971 new_stmt = gsi_stmt (i); |
1067 find_new_referenced_vars (new_stmt); | 972 if (gimple_in_ssa_p (cfun)) |
1068 mark_symbols_for_renaming (new_stmt); | 973 { |
974 find_new_referenced_vars (new_stmt); | |
975 mark_symbols_for_renaming (new_stmt); | |
976 } | |
977 /* If the new statement has a VUSE, update it with exact SSA name we | |
978 know will reach this one. */ | |
979 if (gimple_vuse (new_stmt)) | |
980 { | |
981 /* If we've also seen a previous store create a new VDEF for | |
982 the latter one, and make that the new reaching VUSE. */ | |
983 if (laststore) | |
984 { | |
985 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore); | |
986 gimple_set_vdef (laststore, reaching_vuse); | |
987 update_stmt (laststore); | |
988 laststore = NULL; | |
989 } | |
990 gimple_set_vuse (new_stmt, reaching_vuse); | |
991 gimple_set_modified (new_stmt, true); | |
992 } | |
993 if (gimple_assign_single_p (new_stmt) | |
994 && !is_gimple_reg (gimple_assign_lhs (new_stmt))) | |
995 { | |
996 laststore = new_stmt; | |
997 } | |
1069 last = new_stmt; | 998 last = new_stmt; |
1070 } | 999 } |
1071 | 1000 |
1072 if (lhs == NULL_TREE) | 1001 if (lhs == NULL_TREE) |
1073 { | 1002 { |
1074 unlink_stmt_vdef (stmt); | 1003 /* If we replace a call without LHS that has a VDEF and our new |
1075 release_defs (stmt); | 1004 sequence ends with a store we must make that store have the same |
1005 vdef in order not to break the sequencing. This can happen | |
1006 for instance when folding memcpy calls into assignments. */ | |
1007 if (gimple_vdef (stmt) && laststore) | |
1008 { | |
1009 gimple_set_vdef (laststore, gimple_vdef (stmt)); | |
1010 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) | |
1011 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore; | |
1012 update_stmt (laststore); | |
1013 } | |
1014 else if (gimple_in_ssa_p (cfun)) | |
1015 { | |
1016 unlink_stmt_vdef (stmt); | |
1017 release_defs (stmt); | |
1018 } | |
1076 new_stmt = last; | 1019 new_stmt = last; |
1077 } | 1020 } |
1078 else | 1021 else |
1079 { | 1022 { |
1080 if (last) | 1023 if (last) |
1081 { | 1024 { |
1082 gsi_insert_before (si_p, last, GSI_NEW_STMT); | 1025 gsi_insert_before (si_p, last, GSI_NEW_STMT); |
1083 gsi_next (si_p); | 1026 gsi_next (si_p); |
1084 } | 1027 } |
1028 if (laststore && is_gimple_reg (lhs)) | |
1029 { | |
1030 gimple_set_vdef (laststore, gimple_vdef (stmt)); | |
1031 update_stmt (laststore); | |
1032 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) | |
1033 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore; | |
1034 laststore = NULL; | |
1035 } | |
1036 else if (laststore) | |
1037 { | |
1038 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore); | |
1039 gimple_set_vdef (laststore, reaching_vuse); | |
1040 update_stmt (laststore); | |
1041 laststore = NULL; | |
1042 } | |
1085 new_stmt = gimple_build_assign (lhs, tmp); | 1043 new_stmt = gimple_build_assign (lhs, tmp); |
1086 gimple_set_vuse (new_stmt, gimple_vuse (stmt)); | 1044 if (!is_gimple_reg (tmp)) |
1087 gimple_set_vdef (new_stmt, gimple_vdef (stmt)); | 1045 gimple_set_vuse (new_stmt, reaching_vuse); |
1088 move_ssa_defining_stmt_for_defs (new_stmt, stmt); | 1046 if (!is_gimple_reg (lhs)) |
1047 { | |
1048 gimple_set_vdef (new_stmt, gimple_vdef (stmt)); | |
1049 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) | |
1050 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt; | |
1051 } | |
1052 else if (reaching_vuse == gimple_vuse (stmt)) | |
1053 unlink_stmt_vdef (stmt); | |
1089 } | 1054 } |
1090 | 1055 |
1091 gimple_set_location (new_stmt, gimple_location (stmt)); | 1056 gimple_set_location (new_stmt, gimple_location (stmt)); |
1092 gsi_replace (si_p, new_stmt, false); | 1057 gsi_replace (si_p, new_stmt, false); |
1093 } | 1058 } |
1155 *length = val; | 1120 *length = val; |
1156 return true; | 1121 return true; |
1157 } | 1122 } |
1158 | 1123 |
1159 /* If we were already here, break the infinite cycle. */ | 1124 /* If we were already here, break the infinite cycle. */ |
1160 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg))) | 1125 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg))) |
1161 return true; | 1126 return true; |
1162 bitmap_set_bit (visited, SSA_NAME_VERSION (arg)); | |
1163 | 1127 |
1164 var = arg; | 1128 var = arg; |
1165 def_stmt = SSA_NAME_DEF_STMT (var); | 1129 def_stmt = SSA_NAME_DEF_STMT (var); |
1166 | 1130 |
1167 switch (gimple_code (def_stmt)) | 1131 switch (gimple_code (def_stmt)) |
1310 { | 1274 { |
1311 tree new_val = | 1275 tree new_val = |
1312 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]); | 1276 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]); |
1313 | 1277 |
1314 /* If the result is not a valid gimple value, or not a cast | 1278 /* If the result is not a valid gimple value, or not a cast |
1315 of a valid gimple value, then we can not use the result. */ | 1279 of a valid gimple value, then we cannot use the result. */ |
1316 if (is_gimple_val (new_val) | 1280 if (is_gimple_val (new_val) |
1317 || (is_gimple_cast (new_val) | 1281 || (CONVERT_EXPR_P (new_val) |
1318 && is_gimple_val (TREE_OPERAND (new_val, 0)))) | 1282 && is_gimple_val (TREE_OPERAND (new_val, 0)))) |
1319 return new_val; | 1283 return new_val; |
1320 } | 1284 } |
1321 break; | 1285 break; |
1322 | 1286 |
1399 if (result && ignore) | 1363 if (result && ignore) |
1400 result = fold_ignored_result (result); | 1364 result = fold_ignored_result (result); |
1401 return result; | 1365 return result; |
1402 } | 1366 } |
1403 | 1367 |
1404 /* Search for a base binfo of BINFO that corresponds to TYPE and return it if | 1368 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN |
1405 it is found or NULL_TREE if it is not. */ | 1369 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression. |
1406 | 1370 KNOWN_BINFO carries the binfo describing the true type of |
1407 static tree | 1371 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied |
1408 get_base_binfo_for_type (tree binfo, tree type) | 1372 with a this adjustment, the constant which should be added to this pointer |
1409 { | 1373 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function |
1410 int i; | 1374 is a thunk (other than a this adjustment which is dealt with by DELTA). */ |
1411 tree base_binfo; | |
1412 tree res = NULL_TREE; | |
1413 | |
1414 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) | |
1415 if (TREE_TYPE (base_binfo) == type) | |
1416 { | |
1417 gcc_assert (!res); | |
1418 res = base_binfo; | |
1419 } | |
1420 | |
1421 return res; | |
1422 } | |
1423 | |
1424 /* Return a binfo describing the part of object referenced by expression REF. | |
1425 Return NULL_TREE if it cannot be determined. REF can consist of a series of | |
1426 COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just | |
1427 a simple declaration, indirect reference or an SSA_NAME. If the function | |
1428 discovers an INDIRECT_REF or an SSA_NAME, it will assume that the | |
1429 encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE. | |
1430 Otherwise the first non-artificial field declaration or the base declaration | |
1431 will be examined to get the encapsulating type. */ | |
1432 | 1375 |
1433 tree | 1376 tree |
1434 gimple_get_relevant_ref_binfo (tree ref, tree known_binfo) | 1377 gimple_get_virt_mehtod_for_binfo (HOST_WIDE_INT token, tree known_binfo, |
1435 { | 1378 tree *delta, bool refuse_thunks) |
1436 while (true) | |
1437 { | |
1438 if (TREE_CODE (ref) == COMPONENT_REF) | |
1439 { | |
1440 tree par_type; | |
1441 tree binfo, base_binfo; | |
1442 tree field = TREE_OPERAND (ref, 1); | |
1443 | |
1444 if (!DECL_ARTIFICIAL (field)) | |
1445 { | |
1446 tree type = TREE_TYPE (field); | |
1447 if (TREE_CODE (type) == RECORD_TYPE) | |
1448 return TYPE_BINFO (type); | |
1449 else | |
1450 return NULL_TREE; | |
1451 } | |
1452 | |
1453 par_type = TREE_TYPE (TREE_OPERAND (ref, 0)); | |
1454 binfo = TYPE_BINFO (par_type); | |
1455 if (!binfo | |
1456 || BINFO_N_BASE_BINFOS (binfo) == 0) | |
1457 return NULL_TREE; | |
1458 | |
1459 base_binfo = BINFO_BASE_BINFO (binfo, 0); | |
1460 if (BINFO_TYPE (base_binfo) != TREE_TYPE (field)) | |
1461 { | |
1462 tree d_binfo; | |
1463 | |
1464 d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0), | |
1465 known_binfo); | |
1466 /* Get descendant binfo. */ | |
1467 if (!d_binfo) | |
1468 return NULL_TREE; | |
1469 return get_base_binfo_for_type (d_binfo, TREE_TYPE (field)); | |
1470 } | |
1471 | |
1472 ref = TREE_OPERAND (ref, 0); | |
1473 } | |
1474 else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE) | |
1475 return TYPE_BINFO (TREE_TYPE (ref)); | |
1476 else if (known_binfo | |
1477 && (TREE_CODE (ref) == SSA_NAME | |
1478 || TREE_CODE (ref) == INDIRECT_REF)) | |
1479 return known_binfo; | |
1480 else | |
1481 return NULL_TREE; | |
1482 } | |
1483 } | |
1484 | |
1485 /* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is | |
1486 integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO | |
1487 carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */ | |
1488 | |
1489 tree | |
1490 gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo) | |
1491 { | 1379 { |
1492 HOST_WIDE_INT i; | 1380 HOST_WIDE_INT i; |
1493 tree v, fndecl; | 1381 tree v, fndecl; |
1382 struct cgraph_node *node; | |
1494 | 1383 |
1495 v = BINFO_VIRTUALS (known_binfo); | 1384 v = BINFO_VIRTUALS (known_binfo); |
1385 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */ | |
1386 if (!v) | |
1387 return NULL_TREE; | |
1496 i = 0; | 1388 i = 0; |
1497 while (i != token) | 1389 while (i != token) |
1498 { | 1390 { |
1499 i += (TARGET_VTABLE_USES_DESCRIPTORS | 1391 i += (TARGET_VTABLE_USES_DESCRIPTORS |
1500 ? TARGET_VTABLE_USES_DESCRIPTORS : 1); | 1392 ? TARGET_VTABLE_USES_DESCRIPTORS : 1); |
1501 v = TREE_CHAIN (v); | 1393 v = TREE_CHAIN (v); |
1502 } | 1394 } |
1503 | 1395 |
1504 fndecl = TREE_VALUE (v); | 1396 fndecl = TREE_VALUE (v); |
1505 return build_fold_addr_expr (fndecl); | 1397 node = cgraph_get_node_or_alias (fndecl); |
1506 } | 1398 if (refuse_thunks |
1507 | 1399 && (!node |
1508 | 1400 /* Bail out if it is a thunk declaration. Since simple this_adjusting |
1509 /* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE | 1401 thunks are represented by a constant in TREE_PURPOSE of items in |
1510 is not NULL_TREE, it is the true type of the outmost encapsulating object if | 1402 BINFO_VIRTUALS, this is a more complicate type which we cannot handle as |
1511 that comes from a pointer SSA_NAME. If the true outmost encapsulating type | 1403 yet. |
1512 can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used | 1404 |
1513 regardless of KNOWN_TYPE (which thus can be NULL_TREE). */ | 1405 FIXME: Remove the following condition once we are able to represent |
1514 | 1406 thunk information on call graph edges. */ |
1515 tree | 1407 || (node->same_body_alias && node->thunk.thunk_p))) |
1516 gimple_fold_obj_type_ref (tree ref, tree known_type) | |
1517 { | |
1518 tree obj = OBJ_TYPE_REF_OBJECT (ref); | |
1519 tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE; | |
1520 tree binfo; | |
1521 | |
1522 if (TREE_CODE (obj) == ADDR_EXPR) | |
1523 obj = TREE_OPERAND (obj, 0); | |
1524 | |
1525 binfo = gimple_get_relevant_ref_binfo (obj, known_binfo); | |
1526 if (binfo) | |
1527 { | |
1528 HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1); | |
1529 return gimple_fold_obj_type_ref_known_binfo (token, binfo); | |
1530 } | |
1531 else | |
1532 return NULL_TREE; | 1408 return NULL_TREE; |
1409 | |
1410 /* When cgraph node is missing and function is not public, we cannot | |
1411 devirtualize. This can happen in WHOPR when the actual method | |
1412 ends up in other partition, because we found devirtualization | |
1413 possibility too late. */ | |
1414 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v))) | |
1415 return NULL_TREE; | |
1416 | |
1417 *delta = TREE_PURPOSE (v); | |
1418 gcc_checking_assert (host_integerp (*delta, 0)); | |
1419 return fndecl; | |
1420 } | |
1421 | |
1422 /* Generate code adjusting the first parameter of a call statement determined | |
1423 by GSI by DELTA. */ | |
1424 | |
1425 void | |
1426 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta) | |
1427 { | |
1428 gimple call_stmt = gsi_stmt (*gsi); | |
1429 tree parm, tmp; | |
1430 gimple new_stmt; | |
1431 | |
1432 delta = fold_convert (sizetype, delta); | |
1433 gcc_assert (gimple_call_num_args (call_stmt) >= 1); | |
1434 parm = gimple_call_arg (call_stmt, 0); | |
1435 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm))); | |
1436 tmp = create_tmp_var (TREE_TYPE (parm), NULL); | |
1437 add_referenced_var (tmp); | |
1438 | |
1439 tmp = make_ssa_name (tmp, NULL); | |
1440 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta); | |
1441 SSA_NAME_DEF_STMT (tmp) = new_stmt; | |
1442 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); | |
1443 gimple_call_set_arg (call_stmt, 0, tmp); | |
1533 } | 1444 } |
1534 | 1445 |
1535 /* Attempt to fold a call statement referenced by the statement iterator GSI. | 1446 /* Attempt to fold a call statement referenced by the statement iterator GSI. |
1536 The statement may be replaced by another statement, e.g., if the call | 1447 The statement may be replaced by another statement, e.g., if the call |
1537 simplifies to a constant value. Return true if any changes were made. | 1448 simplifies to a constant value. Return true if any changes were made. |
1538 It is assumed that the operands have been previously folded. */ | 1449 It is assumed that the operands have been previously folded. */ |
1539 | 1450 |
1540 static bool | 1451 bool |
1541 fold_gimple_call (gimple_stmt_iterator *gsi) | 1452 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace) |
1542 { | 1453 { |
1543 gimple stmt = gsi_stmt (*gsi); | 1454 gimple stmt = gsi_stmt (*gsi); |
1544 | 1455 |
1545 tree callee = gimple_call_fndecl (stmt); | 1456 tree callee = gimple_call_fndecl (stmt); |
1546 | 1457 |
1547 /* Check for builtins that CCP can handle using information not | 1458 /* Check for builtins that CCP can handle using information not |
1548 available in the generic fold routines. */ | 1459 available in the generic fold routines. */ |
1549 if (callee && DECL_BUILT_IN (callee)) | 1460 if (!inplace && callee && DECL_BUILT_IN (callee)) |
1550 { | 1461 { |
1551 tree result = gimple_fold_builtin (stmt); | 1462 tree result = gimple_fold_builtin (stmt); |
1552 | 1463 |
1553 if (result) | 1464 if (result) |
1554 { | 1465 { |
1555 if (!update_call_from_tree (gsi, result)) | 1466 if (!update_call_from_tree (gsi, result)) |
1556 gimplify_and_update_call_from_tree (gsi, result); | 1467 gimplify_and_update_call_from_tree (gsi, result); |
1557 return true; | 1468 return true; |
1558 } | 1469 } |
1559 } | 1470 } |
1560 else | |
1561 { | |
1562 /* ??? Should perhaps do this in fold proper. However, doing it | |
1563 there requires that we create a new CALL_EXPR, and that requires | |
1564 copying EH region info to the new node. Easier to just do it | |
1565 here where we can just smash the call operand. */ | |
1566 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */ | |
1567 callee = gimple_call_fn (stmt); | |
1568 if (TREE_CODE (callee) == OBJ_TYPE_REF | |
1569 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR) | |
1570 { | |
1571 tree t; | |
1572 | |
1573 t = gimple_fold_obj_type_ref (callee, NULL_TREE); | |
1574 if (t) | |
1575 { | |
1576 gimple_call_set_fn (stmt, t); | |
1577 return true; | |
1578 } | |
1579 } | |
1580 } | |
1581 | |
1582 return false; | 1471 return false; |
1583 } | 1472 } |
1584 | 1473 |
1585 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument | 1474 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument |
1586 distinguishes both cases. */ | 1475 distinguishes both cases. */ |
1628 { | 1517 { |
1629 gimple_call_set_arg (stmt, i, tmp); | 1518 gimple_call_set_arg (stmt, i, tmp); |
1630 changed = true; | 1519 changed = true; |
1631 } | 1520 } |
1632 } | 1521 } |
1633 /* The entire statement may be replaced in this case. */ | 1522 changed |= gimple_fold_call (gsi, inplace); |
1634 if (!inplace) | |
1635 changed |= fold_gimple_call (gsi); | |
1636 break; | 1523 break; |
1637 | 1524 |
1638 case GIMPLE_ASM: | 1525 case GIMPLE_ASM: |
1639 /* Fold *& in asm operands. */ | 1526 /* Fold *& in asm operands. */ |
1640 for (i = 0; i < gimple_asm_noutputs (stmt); ++i) | 1527 for (i = 0; i < gimple_asm_noutputs (stmt); ++i) |
1655 if (REFERENCE_CLASS_P (op) | 1542 if (REFERENCE_CLASS_P (op) |
1656 && (op = maybe_fold_reference (op, false)) != NULL_TREE) | 1543 && (op = maybe_fold_reference (op, false)) != NULL_TREE) |
1657 { | 1544 { |
1658 TREE_VALUE (link) = op; | 1545 TREE_VALUE (link) = op; |
1659 changed = true; | 1546 changed = true; |
1547 } | |
1548 } | |
1549 break; | |
1550 | |
1551 case GIMPLE_DEBUG: | |
1552 if (gimple_debug_bind_p (stmt)) | |
1553 { | |
1554 tree val = gimple_debug_bind_get_value (stmt); | |
1555 if (val | |
1556 && REFERENCE_CLASS_P (val)) | |
1557 { | |
1558 tree tem = maybe_fold_reference (val, false); | |
1559 if (tem) | |
1560 { | |
1561 gimple_debug_bind_set_value (stmt, tem); | |
1562 changed = true; | |
1563 } | |
1660 } | 1564 } |
1661 } | 1565 } |
1662 break; | 1566 break; |
1663 | 1567 |
1664 default:; | 1568 default:; |
1712 bool changed = fold_stmt_1 (&gsi, true); | 1616 bool changed = fold_stmt_1 (&gsi, true); |
1713 gcc_assert (gsi_stmt (gsi) == stmt); | 1617 gcc_assert (gsi_stmt (gsi) == stmt); |
1714 return changed; | 1618 return changed; |
1715 } | 1619 } |
1716 | 1620 |
1621 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE | |
1622 if EXPR is null or we don't know how. | |
1623 If non-null, the result always has boolean type. */ | |
1624 | |
1625 static tree | |
1626 canonicalize_bool (tree expr, bool invert) | |
1627 { | |
1628 if (!expr) | |
1629 return NULL_TREE; | |
1630 else if (invert) | |
1631 { | |
1632 if (integer_nonzerop (expr)) | |
1633 return boolean_false_node; | |
1634 else if (integer_zerop (expr)) | |
1635 return boolean_true_node; | |
1636 else if (TREE_CODE (expr) == SSA_NAME) | |
1637 return fold_build2 (EQ_EXPR, boolean_type_node, expr, | |
1638 build_int_cst (TREE_TYPE (expr), 0)); | |
1639 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison) | |
1640 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false), | |
1641 boolean_type_node, | |
1642 TREE_OPERAND (expr, 0), | |
1643 TREE_OPERAND (expr, 1)); | |
1644 else | |
1645 return NULL_TREE; | |
1646 } | |
1647 else | |
1648 { | |
1649 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE) | |
1650 return expr; | |
1651 if (integer_nonzerop (expr)) | |
1652 return boolean_true_node; | |
1653 else if (integer_zerop (expr)) | |
1654 return boolean_false_node; | |
1655 else if (TREE_CODE (expr) == SSA_NAME) | |
1656 return fold_build2 (NE_EXPR, boolean_type_node, expr, | |
1657 build_int_cst (TREE_TYPE (expr), 0)); | |
1658 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison) | |
1659 return fold_build2 (TREE_CODE (expr), | |
1660 boolean_type_node, | |
1661 TREE_OPERAND (expr, 0), | |
1662 TREE_OPERAND (expr, 1)); | |
1663 else | |
1664 return NULL_TREE; | |
1665 } | |
1666 } | |
1667 | |
1668 /* Check to see if a boolean expression EXPR is logically equivalent to the | |
1669 comparison (OP1 CODE OP2). Check for various identities involving | |
1670 SSA_NAMEs. */ | |
1671 | |
1672 static bool | |
1673 same_bool_comparison_p (const_tree expr, enum tree_code code, | |
1674 const_tree op1, const_tree op2) | |
1675 { | |
1676 gimple s; | |
1677 | |
1678 /* The obvious case. */ | |
1679 if (TREE_CODE (expr) == code | |
1680 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0) | |
1681 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0)) | |
1682 return true; | |
1683 | |
1684 /* Check for comparing (name, name != 0) and the case where expr | |
1685 is an SSA_NAME with a definition matching the comparison. */ | |
1686 if (TREE_CODE (expr) == SSA_NAME | |
1687 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE) | |
1688 { | |
1689 if (operand_equal_p (expr, op1, 0)) | |
1690 return ((code == NE_EXPR && integer_zerop (op2)) | |
1691 || (code == EQ_EXPR && integer_nonzerop (op2))); | |
1692 s = SSA_NAME_DEF_STMT (expr); | |
1693 if (is_gimple_assign (s) | |
1694 && gimple_assign_rhs_code (s) == code | |
1695 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0) | |
1696 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0)) | |
1697 return true; | |
1698 } | |
1699 | |
1700 /* If op1 is of the form (name != 0) or (name == 0), and the definition | |
1701 of name is a comparison, recurse. */ | |
1702 if (TREE_CODE (op1) == SSA_NAME | |
1703 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE) | |
1704 { | |
1705 s = SSA_NAME_DEF_STMT (op1); | |
1706 if (is_gimple_assign (s) | |
1707 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison) | |
1708 { | |
1709 enum tree_code c = gimple_assign_rhs_code (s); | |
1710 if ((c == NE_EXPR && integer_zerop (op2)) | |
1711 || (c == EQ_EXPR && integer_nonzerop (op2))) | |
1712 return same_bool_comparison_p (expr, c, | |
1713 gimple_assign_rhs1 (s), | |
1714 gimple_assign_rhs2 (s)); | |
1715 if ((c == EQ_EXPR && integer_zerop (op2)) | |
1716 || (c == NE_EXPR && integer_nonzerop (op2))) | |
1717 return same_bool_comparison_p (expr, | |
1718 invert_tree_comparison (c, false), | |
1719 gimple_assign_rhs1 (s), | |
1720 gimple_assign_rhs2 (s)); | |
1721 } | |
1722 } | |
1723 return false; | |
1724 } | |
1725 | |
1726 /* Check to see if two boolean expressions OP1 and OP2 are logically | |
1727 equivalent. */ | |
1728 | |
1729 static bool | |
1730 same_bool_result_p (const_tree op1, const_tree op2) | |
1731 { | |
1732 /* Simple cases first. */ | |
1733 if (operand_equal_p (op1, op2, 0)) | |
1734 return true; | |
1735 | |
1736 /* Check the cases where at least one of the operands is a comparison. | |
1737 These are a bit smarter than operand_equal_p in that they apply some | |
1738 identifies on SSA_NAMEs. */ | |
1739 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison | |
1740 && same_bool_comparison_p (op1, TREE_CODE (op2), | |
1741 TREE_OPERAND (op2, 0), | |
1742 TREE_OPERAND (op2, 1))) | |
1743 return true; | |
1744 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison | |
1745 && same_bool_comparison_p (op2, TREE_CODE (op1), | |
1746 TREE_OPERAND (op1, 0), | |
1747 TREE_OPERAND (op1, 1))) | |
1748 return true; | |
1749 | |
1750 /* Default case. */ | |
1751 return false; | |
1752 } | |
1753 | |
1754 /* Forward declarations for some mutually recursive functions. */ | |
1755 | |
1756 static tree | |
1757 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, | |
1758 enum tree_code code2, tree op2a, tree op2b); | |
1759 static tree | |
1760 and_var_with_comparison (tree var, bool invert, | |
1761 enum tree_code code2, tree op2a, tree op2b); | |
1762 static tree | |
1763 and_var_with_comparison_1 (gimple stmt, | |
1764 enum tree_code code2, tree op2a, tree op2b); | |
1765 static tree | |
1766 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, | |
1767 enum tree_code code2, tree op2a, tree op2b); | |
1768 static tree | |
1769 or_var_with_comparison (tree var, bool invert, | |
1770 enum tree_code code2, tree op2a, tree op2b); | |
1771 static tree | |
1772 or_var_with_comparison_1 (gimple stmt, | |
1773 enum tree_code code2, tree op2a, tree op2b); | |
1774 | |
1775 /* Helper function for and_comparisons_1: try to simplify the AND of the | |
1776 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B). | |
1777 If INVERT is true, invert the value of the VAR before doing the AND. | |
1778 Return NULL_EXPR if we can't simplify this to a single expression. */ | |
1779 | |
1780 static tree | |
1781 and_var_with_comparison (tree var, bool invert, | |
1782 enum tree_code code2, tree op2a, tree op2b) | |
1783 { | |
1784 tree t; | |
1785 gimple stmt = SSA_NAME_DEF_STMT (var); | |
1786 | |
1787 /* We can only deal with variables whose definitions are assignments. */ | |
1788 if (!is_gimple_assign (stmt)) | |
1789 return NULL_TREE; | |
1790 | |
1791 /* If we have an inverted comparison, apply DeMorgan's law and rewrite | |
1792 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b)) | |
1793 Then we only have to consider the simpler non-inverted cases. */ | |
1794 if (invert) | |
1795 t = or_var_with_comparison_1 (stmt, | |
1796 invert_tree_comparison (code2, false), | |
1797 op2a, op2b); | |
1798 else | |
1799 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b); | |
1800 return canonicalize_bool (t, invert); | |
1801 } | |
1802 | |
1803 /* Try to simplify the AND of the ssa variable defined by the assignment | |
1804 STMT with the comparison specified by (OP2A CODE2 OP2B). | |
1805 Return NULL_EXPR if we can't simplify this to a single expression. */ | |
1806 | |
1807 static tree | |
1808 and_var_with_comparison_1 (gimple stmt, | |
1809 enum tree_code code2, tree op2a, tree op2b) | |
1810 { | |
1811 tree var = gimple_assign_lhs (stmt); | |
1812 tree true_test_var = NULL_TREE; | |
1813 tree false_test_var = NULL_TREE; | |
1814 enum tree_code innercode = gimple_assign_rhs_code (stmt); | |
1815 | |
1816 /* Check for identities like (var AND (var == 0)) => false. */ | |
1817 if (TREE_CODE (op2a) == SSA_NAME | |
1818 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE) | |
1819 { | |
1820 if ((code2 == NE_EXPR && integer_zerop (op2b)) | |
1821 || (code2 == EQ_EXPR && integer_nonzerop (op2b))) | |
1822 { | |
1823 true_test_var = op2a; | |
1824 if (var == true_test_var) | |
1825 return var; | |
1826 } | |
1827 else if ((code2 == EQ_EXPR && integer_zerop (op2b)) | |
1828 || (code2 == NE_EXPR && integer_nonzerop (op2b))) | |
1829 { | |
1830 false_test_var = op2a; | |
1831 if (var == false_test_var) | |
1832 return boolean_false_node; | |
1833 } | |
1834 } | |
1835 | |
1836 /* If the definition is a comparison, recurse on it. */ | |
1837 if (TREE_CODE_CLASS (innercode) == tcc_comparison) | |
1838 { | |
1839 tree t = and_comparisons_1 (innercode, | |
1840 gimple_assign_rhs1 (stmt), | |
1841 gimple_assign_rhs2 (stmt), | |
1842 code2, | |
1843 op2a, | |
1844 op2b); | |
1845 if (t) | |
1846 return t; | |
1847 } | |
1848 | |
1849 /* If the definition is an AND or OR expression, we may be able to | |
1850 simplify by reassociating. */ | |
1851 if (innercode == TRUTH_AND_EXPR | |
1852 || innercode == TRUTH_OR_EXPR | |
1853 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE | |
1854 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))) | |
1855 { | |
1856 tree inner1 = gimple_assign_rhs1 (stmt); | |
1857 tree inner2 = gimple_assign_rhs2 (stmt); | |
1858 gimple s; | |
1859 tree t; | |
1860 tree partial = NULL_TREE; | |
1861 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR); | |
1862 | |
1863 /* Check for boolean identities that don't require recursive examination | |
1864 of inner1/inner2: | |
1865 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var | |
1866 inner1 AND (inner1 OR inner2) => inner1 | |
1867 !inner1 AND (inner1 AND inner2) => false | |
1868 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2 | |
1869 Likewise for similar cases involving inner2. */ | |
1870 if (inner1 == true_test_var) | |
1871 return (is_and ? var : inner1); | |
1872 else if (inner2 == true_test_var) | |
1873 return (is_and ? var : inner2); | |
1874 else if (inner1 == false_test_var) | |
1875 return (is_and | |
1876 ? boolean_false_node | |
1877 : and_var_with_comparison (inner2, false, code2, op2a, op2b)); | |
1878 else if (inner2 == false_test_var) | |
1879 return (is_and | |
1880 ? boolean_false_node | |
1881 : and_var_with_comparison (inner1, false, code2, op2a, op2b)); | |
1882 | |
1883 /* Next, redistribute/reassociate the AND across the inner tests. | |
1884 Compute the first partial result, (inner1 AND (op2a code op2b)) */ | |
1885 if (TREE_CODE (inner1) == SSA_NAME | |
1886 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1)) | |
1887 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison | |
1888 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s), | |
1889 gimple_assign_rhs1 (s), | |
1890 gimple_assign_rhs2 (s), | |
1891 code2, op2a, op2b))) | |
1892 { | |
1893 /* Handle the AND case, where we are reassociating: | |
1894 (inner1 AND inner2) AND (op2a code2 op2b) | |
1895 => (t AND inner2) | |
1896 If the partial result t is a constant, we win. Otherwise | |
1897 continue on to try reassociating with the other inner test. */ | |
1898 if (is_and) | |
1899 { | |
1900 if (integer_onep (t)) | |
1901 return inner2; | |
1902 else if (integer_zerop (t)) | |
1903 return boolean_false_node; | |
1904 } | |
1905 | |
1906 /* Handle the OR case, where we are redistributing: | |
1907 (inner1 OR inner2) AND (op2a code2 op2b) | |
1908 => (t OR (inner2 AND (op2a code2 op2b))) */ | |
1909 else if (integer_onep (t)) | |
1910 return boolean_true_node; | |
1911 | |
1912 /* Save partial result for later. */ | |
1913 partial = t; | |
1914 } | |
1915 | |
1916 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */ | |
1917 if (TREE_CODE (inner2) == SSA_NAME | |
1918 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2)) | |
1919 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison | |
1920 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s), | |
1921 gimple_assign_rhs1 (s), | |
1922 gimple_assign_rhs2 (s), | |
1923 code2, op2a, op2b))) | |
1924 { | |
1925 /* Handle the AND case, where we are reassociating: | |
1926 (inner1 AND inner2) AND (op2a code2 op2b) | |
1927 => (inner1 AND t) */ | |
1928 if (is_and) | |
1929 { | |
1930 if (integer_onep (t)) | |
1931 return inner1; | |
1932 else if (integer_zerop (t)) | |
1933 return boolean_false_node; | |
1934 /* If both are the same, we can apply the identity | |
1935 (x AND x) == x. */ | |
1936 else if (partial && same_bool_result_p (t, partial)) | |
1937 return t; | |
1938 } | |
1939 | |
1940 /* Handle the OR case. where we are redistributing: | |
1941 (inner1 OR inner2) AND (op2a code2 op2b) | |
1942 => (t OR (inner1 AND (op2a code2 op2b))) | |
1943 => (t OR partial) */ | |
1944 else | |
1945 { | |
1946 if (integer_onep (t)) | |
1947 return boolean_true_node; | |
1948 else if (partial) | |
1949 { | |
1950 /* We already got a simplification for the other | |
1951 operand to the redistributed OR expression. The | |
1952 interesting case is when at least one is false. | |
1953 Or, if both are the same, we can apply the identity | |
1954 (x OR x) == x. */ | |
1955 if (integer_zerop (partial)) | |
1956 return t; | |
1957 else if (integer_zerop (t)) | |
1958 return partial; | |
1959 else if (same_bool_result_p (t, partial)) | |
1960 return t; | |
1961 } | |
1962 } | |
1963 } | |
1964 } | |
1965 return NULL_TREE; | |
1966 } | |
1967 | |
1968 /* Try to simplify the AND of two comparisons defined by | |
1969 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively. | |
1970 If this can be done without constructing an intermediate value, | |
1971 return the resulting tree; otherwise NULL_TREE is returned. | |
1972 This function is deliberately asymmetric as it recurses on SSA_DEFs | |
1973 in the first comparison but not the second. */ | |
1974 | |
1975 static tree | |
1976 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, | |
1977 enum tree_code code2, tree op2a, tree op2b) | |
1978 { | |
1979 /* First check for ((x CODE1 y) AND (x CODE2 y)). */ | |
1980 if (operand_equal_p (op1a, op2a, 0) | |
1981 && operand_equal_p (op1b, op2b, 0)) | |
1982 { | |
1983 tree t = combine_comparisons (UNKNOWN_LOCATION, | |
1984 TRUTH_ANDIF_EXPR, code1, code2, | |
1985 boolean_type_node, op1a, op1b); | |
1986 if (t) | |
1987 return t; | |
1988 } | |
1989 | |
1990 /* Likewise the swapped case of the above. */ | |
1991 if (operand_equal_p (op1a, op2b, 0) | |
1992 && operand_equal_p (op1b, op2a, 0)) | |
1993 { | |
1994 tree t = combine_comparisons (UNKNOWN_LOCATION, | |
1995 TRUTH_ANDIF_EXPR, code1, | |
1996 swap_tree_comparison (code2), | |
1997 boolean_type_node, op1a, op1b); | |
1998 if (t) | |
1999 return t; | |
2000 } | |
2001 | |
2002 /* If both comparisons are of the same value against constants, we might | |
2003 be able to merge them. */ | |
2004 if (operand_equal_p (op1a, op2a, 0) | |
2005 && TREE_CODE (op1b) == INTEGER_CST | |
2006 && TREE_CODE (op2b) == INTEGER_CST) | |
2007 { | |
2008 int cmp = tree_int_cst_compare (op1b, op2b); | |
2009 | |
2010 /* If we have (op1a == op1b), we should either be able to | |
2011 return that or FALSE, depending on whether the constant op1b | |
2012 also satisfies the other comparison against op2b. */ | |
2013 if (code1 == EQ_EXPR) | |
2014 { | |
2015 bool done = true; | |
2016 bool val; | |
2017 switch (code2) | |
2018 { | |
2019 case EQ_EXPR: val = (cmp == 0); break; | |
2020 case NE_EXPR: val = (cmp != 0); break; | |
2021 case LT_EXPR: val = (cmp < 0); break; | |
2022 case GT_EXPR: val = (cmp > 0); break; | |
2023 case LE_EXPR: val = (cmp <= 0); break; | |
2024 case GE_EXPR: val = (cmp >= 0); break; | |
2025 default: done = false; | |
2026 } | |
2027 if (done) | |
2028 { | |
2029 if (val) | |
2030 return fold_build2 (code1, boolean_type_node, op1a, op1b); | |
2031 else | |
2032 return boolean_false_node; | |
2033 } | |
2034 } | |
2035 /* Likewise if the second comparison is an == comparison. */ | |
2036 else if (code2 == EQ_EXPR) | |
2037 { | |
2038 bool done = true; | |
2039 bool val; | |
2040 switch (code1) | |
2041 { | |
2042 case EQ_EXPR: val = (cmp == 0); break; | |
2043 case NE_EXPR: val = (cmp != 0); break; | |
2044 case LT_EXPR: val = (cmp > 0); break; | |
2045 case GT_EXPR: val = (cmp < 0); break; | |
2046 case LE_EXPR: val = (cmp >= 0); break; | |
2047 case GE_EXPR: val = (cmp <= 0); break; | |
2048 default: done = false; | |
2049 } | |
2050 if (done) | |
2051 { | |
2052 if (val) | |
2053 return fold_build2 (code2, boolean_type_node, op2a, op2b); | |
2054 else | |
2055 return boolean_false_node; | |
2056 } | |
2057 } | |
2058 | |
2059 /* Same business with inequality tests. */ | |
2060 else if (code1 == NE_EXPR) | |
2061 { | |
2062 bool val; | |
2063 switch (code2) | |
2064 { | |
2065 case EQ_EXPR: val = (cmp != 0); break; | |
2066 case NE_EXPR: val = (cmp == 0); break; | |
2067 case LT_EXPR: val = (cmp >= 0); break; | |
2068 case GT_EXPR: val = (cmp <= 0); break; | |
2069 case LE_EXPR: val = (cmp > 0); break; | |
2070 case GE_EXPR: val = (cmp < 0); break; | |
2071 default: | |
2072 val = false; | |
2073 } | |
2074 if (val) | |
2075 return fold_build2 (code2, boolean_type_node, op2a, op2b); | |
2076 } | |
2077 else if (code2 == NE_EXPR) | |
2078 { | |
2079 bool val; | |
2080 switch (code1) | |
2081 { | |
2082 case EQ_EXPR: val = (cmp == 0); break; | |
2083 case NE_EXPR: val = (cmp != 0); break; | |
2084 case LT_EXPR: val = (cmp <= 0); break; | |
2085 case GT_EXPR: val = (cmp >= 0); break; | |
2086 case LE_EXPR: val = (cmp < 0); break; | |
2087 case GE_EXPR: val = (cmp > 0); break; | |
2088 default: | |
2089 val = false; | |
2090 } | |
2091 if (val) | |
2092 return fold_build2 (code1, boolean_type_node, op1a, op1b); | |
2093 } | |
2094 | |
2095 /* Chose the more restrictive of two < or <= comparisons. */ | |
2096 else if ((code1 == LT_EXPR || code1 == LE_EXPR) | |
2097 && (code2 == LT_EXPR || code2 == LE_EXPR)) | |
2098 { | |
2099 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR)) | |
2100 return fold_build2 (code1, boolean_type_node, op1a, op1b); | |
2101 else | |
2102 return fold_build2 (code2, boolean_type_node, op2a, op2b); | |
2103 } | |
2104 | |
2105 /* Likewise chose the more restrictive of two > or >= comparisons. */ | |
2106 else if ((code1 == GT_EXPR || code1 == GE_EXPR) | |
2107 && (code2 == GT_EXPR || code2 == GE_EXPR)) | |
2108 { | |
2109 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR)) | |
2110 return fold_build2 (code1, boolean_type_node, op1a, op1b); | |
2111 else | |
2112 return fold_build2 (code2, boolean_type_node, op2a, op2b); | |
2113 } | |
2114 | |
2115 /* Check for singleton ranges. */ | |
2116 else if (cmp == 0 | |
2117 && ((code1 == LE_EXPR && code2 == GE_EXPR) | |
2118 || (code1 == GE_EXPR && code2 == LE_EXPR))) | |
2119 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b); | |
2120 | |
2121 /* Check for disjoint ranges. */ | |
2122 else if (cmp <= 0 | |
2123 && (code1 == LT_EXPR || code1 == LE_EXPR) | |
2124 && (code2 == GT_EXPR || code2 == GE_EXPR)) | |
2125 return boolean_false_node; | |
2126 else if (cmp >= 0 | |
2127 && (code1 == GT_EXPR || code1 == GE_EXPR) | |
2128 && (code2 == LT_EXPR || code2 == LE_EXPR)) | |
2129 return boolean_false_node; | |
2130 } | |
2131 | |
2132 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where | |
2133 NAME's definition is a truth value. See if there are any simplifications | |
2134 that can be done against the NAME's definition. */ | |
2135 if (TREE_CODE (op1a) == SSA_NAME | |
2136 && (code1 == NE_EXPR || code1 == EQ_EXPR) | |
2137 && (integer_zerop (op1b) || integer_onep (op1b))) | |
2138 { | |
2139 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b)) | |
2140 || (code1 == NE_EXPR && integer_onep (op1b))); | |
2141 gimple stmt = SSA_NAME_DEF_STMT (op1a); | |
2142 switch (gimple_code (stmt)) | |
2143 { | |
2144 case GIMPLE_ASSIGN: | |
2145 /* Try to simplify by copy-propagating the definition. */ | |
2146 return and_var_with_comparison (op1a, invert, code2, op2a, op2b); | |
2147 | |
2148 case GIMPLE_PHI: | |
2149 /* If every argument to the PHI produces the same result when | |
2150 ANDed with the second comparison, we win. | |
2151 Do not do this unless the type is bool since we need a bool | |
2152 result here anyway. */ | |
2153 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE) | |
2154 { | |
2155 tree result = NULL_TREE; | |
2156 unsigned i; | |
2157 for (i = 0; i < gimple_phi_num_args (stmt); i++) | |
2158 { | |
2159 tree arg = gimple_phi_arg_def (stmt, i); | |
2160 | |
2161 /* If this PHI has itself as an argument, ignore it. | |
2162 If all the other args produce the same result, | |
2163 we're still OK. */ | |
2164 if (arg == gimple_phi_result (stmt)) | |
2165 continue; | |
2166 else if (TREE_CODE (arg) == INTEGER_CST) | |
2167 { | |
2168 if (invert ? integer_nonzerop (arg) : integer_zerop (arg)) | |
2169 { | |
2170 if (!result) | |
2171 result = boolean_false_node; | |
2172 else if (!integer_zerop (result)) | |
2173 return NULL_TREE; | |
2174 } | |
2175 else if (!result) | |
2176 result = fold_build2 (code2, boolean_type_node, | |
2177 op2a, op2b); | |
2178 else if (!same_bool_comparison_p (result, | |
2179 code2, op2a, op2b)) | |
2180 return NULL_TREE; | |
2181 } | |
2182 else if (TREE_CODE (arg) == SSA_NAME) | |
2183 { | |
2184 tree temp = and_var_with_comparison (arg, invert, | |
2185 code2, op2a, op2b); | |
2186 if (!temp) | |
2187 return NULL_TREE; | |
2188 else if (!result) | |
2189 result = temp; | |
2190 else if (!same_bool_result_p (result, temp)) | |
2191 return NULL_TREE; | |
2192 } | |
2193 else | |
2194 return NULL_TREE; | |
2195 } | |
2196 return result; | |
2197 } | |
2198 | |
2199 default: | |
2200 break; | |
2201 } | |
2202 } | |
2203 return NULL_TREE; | |
2204 } | |
2205 | |
2206 /* Try to simplify the AND of two comparisons, specified by | |
2207 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively. | |
2208 If this can be simplified to a single expression (without requiring | |
2209 introducing more SSA variables to hold intermediate values), | |
2210 return the resulting tree. Otherwise return NULL_TREE. | |
2211 If the result expression is non-null, it has boolean type. */ | |
2212 | |
2213 tree | |
2214 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b, | |
2215 enum tree_code code2, tree op2a, tree op2b) | |
2216 { | |
2217 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b); | |
2218 if (t) | |
2219 return t; | |
2220 else | |
2221 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b); | |
2222 } | |
2223 | |
2224 /* Helper function for or_comparisons_1: try to simplify the OR of the | |
2225 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B). | |
2226 If INVERT is true, invert the value of VAR before doing the OR. | |
2227 Return NULL_EXPR if we can't simplify this to a single expression. */ | |
2228 | |
2229 static tree | |
2230 or_var_with_comparison (tree var, bool invert, | |
2231 enum tree_code code2, tree op2a, tree op2b) | |
2232 { | |
2233 tree t; | |
2234 gimple stmt = SSA_NAME_DEF_STMT (var); | |
2235 | |
2236 /* We can only deal with variables whose definitions are assignments. */ | |
2237 if (!is_gimple_assign (stmt)) | |
2238 return NULL_TREE; | |
2239 | |
2240 /* If we have an inverted comparison, apply DeMorgan's law and rewrite | |
2241 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b)) | |
2242 Then we only have to consider the simpler non-inverted cases. */ | |
2243 if (invert) | |
2244 t = and_var_with_comparison_1 (stmt, | |
2245 invert_tree_comparison (code2, false), | |
2246 op2a, op2b); | |
2247 else | |
2248 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b); | |
2249 return canonicalize_bool (t, invert); | |
2250 } | |
2251 | |
2252 /* Try to simplify the OR of the ssa variable defined by the assignment | |
2253 STMT with the comparison specified by (OP2A CODE2 OP2B). | |
2254 Return NULL_EXPR if we can't simplify this to a single expression. */ | |
2255 | |
2256 static tree | |
2257 or_var_with_comparison_1 (gimple stmt, | |
2258 enum tree_code code2, tree op2a, tree op2b) | |
2259 { | |
2260 tree var = gimple_assign_lhs (stmt); | |
2261 tree true_test_var = NULL_TREE; | |
2262 tree false_test_var = NULL_TREE; | |
2263 enum tree_code innercode = gimple_assign_rhs_code (stmt); | |
2264 | |
2265 /* Check for identities like (var OR (var != 0)) => true . */ | |
2266 if (TREE_CODE (op2a) == SSA_NAME | |
2267 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE) | |
2268 { | |
2269 if ((code2 == NE_EXPR && integer_zerop (op2b)) | |
2270 || (code2 == EQ_EXPR && integer_nonzerop (op2b))) | |
2271 { | |
2272 true_test_var = op2a; | |
2273 if (var == true_test_var) | |
2274 return var; | |
2275 } | |
2276 else if ((code2 == EQ_EXPR && integer_zerop (op2b)) | |
2277 || (code2 == NE_EXPR && integer_nonzerop (op2b))) | |
2278 { | |
2279 false_test_var = op2a; | |
2280 if (var == false_test_var) | |
2281 return boolean_true_node; | |
2282 } | |
2283 } | |
2284 | |
2285 /* If the definition is a comparison, recurse on it. */ | |
2286 if (TREE_CODE_CLASS (innercode) == tcc_comparison) | |
2287 { | |
2288 tree t = or_comparisons_1 (innercode, | |
2289 gimple_assign_rhs1 (stmt), | |
2290 gimple_assign_rhs2 (stmt), | |
2291 code2, | |
2292 op2a, | |
2293 op2b); | |
2294 if (t) | |
2295 return t; | |
2296 } | |
2297 | |
2298 /* If the definition is an AND or OR expression, we may be able to | |
2299 simplify by reassociating. */ | |
2300 if (innercode == TRUTH_AND_EXPR | |
2301 || innercode == TRUTH_OR_EXPR | |
2302 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE | |
2303 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))) | |
2304 { | |
2305 tree inner1 = gimple_assign_rhs1 (stmt); | |
2306 tree inner2 = gimple_assign_rhs2 (stmt); | |
2307 gimple s; | |
2308 tree t; | |
2309 tree partial = NULL_TREE; | |
2310 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR); | |
2311 | |
2312 /* Check for boolean identities that don't require recursive examination | |
2313 of inner1/inner2: | |
2314 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var | |
2315 inner1 OR (inner1 AND inner2) => inner1 | |
2316 !inner1 OR (inner1 OR inner2) => true | |
2317 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2 | |
2318 */ | |
2319 if (inner1 == true_test_var) | |
2320 return (is_or ? var : inner1); | |
2321 else if (inner2 == true_test_var) | |
2322 return (is_or ? var : inner2); | |
2323 else if (inner1 == false_test_var) | |
2324 return (is_or | |
2325 ? boolean_true_node | |
2326 : or_var_with_comparison (inner2, false, code2, op2a, op2b)); | |
2327 else if (inner2 == false_test_var) | |
2328 return (is_or | |
2329 ? boolean_true_node | |
2330 : or_var_with_comparison (inner1, false, code2, op2a, op2b)); | |
2331 | |
2332 /* Next, redistribute/reassociate the OR across the inner tests. | |
2333 Compute the first partial result, (inner1 OR (op2a code op2b)) */ | |
2334 if (TREE_CODE (inner1) == SSA_NAME | |
2335 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1)) | |
2336 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison | |
2337 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s), | |
2338 gimple_assign_rhs1 (s), | |
2339 gimple_assign_rhs2 (s), | |
2340 code2, op2a, op2b))) | |
2341 { | |
2342 /* Handle the OR case, where we are reassociating: | |
2343 (inner1 OR inner2) OR (op2a code2 op2b) | |
2344 => (t OR inner2) | |
2345 If the partial result t is a constant, we win. Otherwise | |
2346 continue on to try reassociating with the other inner test. */ | |
2347 if (is_or) | |
2348 { | |
2349 if (integer_onep (t)) | |
2350 return boolean_true_node; | |
2351 else if (integer_zerop (t)) | |
2352 return inner2; | |
2353 } | |
2354 | |
2355 /* Handle the AND case, where we are redistributing: | |
2356 (inner1 AND inner2) OR (op2a code2 op2b) | |
2357 => (t AND (inner2 OR (op2a code op2b))) */ | |
2358 else if (integer_zerop (t)) | |
2359 return boolean_false_node; | |
2360 | |
2361 /* Save partial result for later. */ | |
2362 partial = t; | |
2363 } | |
2364 | |
2365 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */ | |
2366 if (TREE_CODE (inner2) == SSA_NAME | |
2367 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2)) | |
2368 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison | |
2369 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s), | |
2370 gimple_assign_rhs1 (s), | |
2371 gimple_assign_rhs2 (s), | |
2372 code2, op2a, op2b))) | |
2373 { | |
2374 /* Handle the OR case, where we are reassociating: | |
2375 (inner1 OR inner2) OR (op2a code2 op2b) | |
2376 => (inner1 OR t) | |
2377 => (t OR partial) */ | |
2378 if (is_or) | |
2379 { | |
2380 if (integer_zerop (t)) | |
2381 return inner1; | |
2382 else if (integer_onep (t)) | |
2383 return boolean_true_node; | |
2384 /* If both are the same, we can apply the identity | |
2385 (x OR x) == x. */ | |
2386 else if (partial && same_bool_result_p (t, partial)) | |
2387 return t; | |
2388 } | |
2389 | |
2390 /* Handle the AND case, where we are redistributing: | |
2391 (inner1 AND inner2) OR (op2a code2 op2b) | |
2392 => (t AND (inner1 OR (op2a code2 op2b))) | |
2393 => (t AND partial) */ | |
2394 else | |
2395 { | |
2396 if (integer_zerop (t)) | |
2397 return boolean_false_node; | |
2398 else if (partial) | |
2399 { | |
2400 /* We already got a simplification for the other | |
2401 operand to the redistributed AND expression. The | |
2402 interesting case is when at least one is true. | |
2403 Or, if both are the same, we can apply the identity | |
2404 (x AND x) == x. */ | |
2405 if (integer_onep (partial)) | |
2406 return t; | |
2407 else if (integer_onep (t)) | |
2408 return partial; | |
2409 else if (same_bool_result_p (t, partial)) | |
2410 return t; | |
2411 } | |
2412 } | |
2413 } | |
2414 } | |
2415 return NULL_TREE; | |
2416 } | |
2417 | |
2418 /* Try to simplify the OR of two comparisons defined by | |
2419 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively. | |
2420 If this can be done without constructing an intermediate value, | |
2421 return the resulting tree; otherwise NULL_TREE is returned. | |
2422 This function is deliberately asymmetric as it recurses on SSA_DEFs | |
2423 in the first comparison but not the second. */ | |
2424 | |
2425 static tree | |
2426 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, | |
2427 enum tree_code code2, tree op2a, tree op2b) | |
2428 { | |
2429 /* First check for ((x CODE1 y) OR (x CODE2 y)). */ | |
2430 if (operand_equal_p (op1a, op2a, 0) | |
2431 && operand_equal_p (op1b, op2b, 0)) | |
2432 { | |
2433 tree t = combine_comparisons (UNKNOWN_LOCATION, | |
2434 TRUTH_ORIF_EXPR, code1, code2, | |
2435 boolean_type_node, op1a, op1b); | |
2436 if (t) | |
2437 return t; | |
2438 } | |
2439 | |
2440 /* Likewise the swapped case of the above. */ | |
2441 if (operand_equal_p (op1a, op2b, 0) | |
2442 && operand_equal_p (op1b, op2a, 0)) | |
2443 { | |
2444 tree t = combine_comparisons (UNKNOWN_LOCATION, | |
2445 TRUTH_ORIF_EXPR, code1, | |
2446 swap_tree_comparison (code2), | |
2447 boolean_type_node, op1a, op1b); | |
2448 if (t) | |
2449 return t; | |
2450 } | |
2451 | |
2452 /* If both comparisons are of the same value against constants, we might | |
2453 be able to merge them. */ | |
2454 if (operand_equal_p (op1a, op2a, 0) | |
2455 && TREE_CODE (op1b) == INTEGER_CST | |
2456 && TREE_CODE (op2b) == INTEGER_CST) | |
2457 { | |
2458 int cmp = tree_int_cst_compare (op1b, op2b); | |
2459 | |
2460 /* If we have (op1a != op1b), we should either be able to | |
2461 return that or TRUE, depending on whether the constant op1b | |
2462 also satisfies the other comparison against op2b. */ | |
2463 if (code1 == NE_EXPR) | |
2464 { | |
2465 bool done = true; | |
2466 bool val; | |
2467 switch (code2) | |
2468 { | |
2469 case EQ_EXPR: val = (cmp == 0); break; | |
2470 case NE_EXPR: val = (cmp != 0); break; | |
2471 case LT_EXPR: val = (cmp < 0); break; | |
2472 case GT_EXPR: val = (cmp > 0); break; | |
2473 case LE_EXPR: val = (cmp <= 0); break; | |
2474 case GE_EXPR: val = (cmp >= 0); break; | |
2475 default: done = false; | |
2476 } | |
2477 if (done) | |
2478 { | |
2479 if (val) | |
2480 return boolean_true_node; | |
2481 else | |
2482 return fold_build2 (code1, boolean_type_node, op1a, op1b); | |
2483 } | |
2484 } | |
2485 /* Likewise if the second comparison is a != comparison. */ | |
2486 else if (code2 == NE_EXPR) | |
2487 { | |
2488 bool done = true; | |
2489 bool val; | |
2490 switch (code1) | |
2491 { | |
2492 case EQ_EXPR: val = (cmp == 0); break; | |
2493 case NE_EXPR: val = (cmp != 0); break; | |
2494 case LT_EXPR: val = (cmp > 0); break; | |
2495 case GT_EXPR: val = (cmp < 0); break; | |
2496 case LE_EXPR: val = (cmp >= 0); break; | |
2497 case GE_EXPR: val = (cmp <= 0); break; | |
2498 default: done = false; | |
2499 } | |
2500 if (done) | |
2501 { | |
2502 if (val) | |
2503 return boolean_true_node; | |
2504 else | |
2505 return fold_build2 (code2, boolean_type_node, op2a, op2b); | |
2506 } | |
2507 } | |
2508 | |
2509 /* See if an equality test is redundant with the other comparison. */ | |
2510 else if (code1 == EQ_EXPR) | |
2511 { | |
2512 bool val; | |
2513 switch (code2) | |
2514 { | |
2515 case EQ_EXPR: val = (cmp == 0); break; | |
2516 case NE_EXPR: val = (cmp != 0); break; | |
2517 case LT_EXPR: val = (cmp < 0); break; | |
2518 case GT_EXPR: val = (cmp > 0); break; | |
2519 case LE_EXPR: val = (cmp <= 0); break; | |
2520 case GE_EXPR: val = (cmp >= 0); break; | |
2521 default: | |
2522 val = false; | |
2523 } | |
2524 if (val) | |
2525 return fold_build2 (code2, boolean_type_node, op2a, op2b); | |
2526 } | |
2527 else if (code2 == EQ_EXPR) | |
2528 { | |
2529 bool val; | |
2530 switch (code1) | |
2531 { | |
2532 case EQ_EXPR: val = (cmp == 0); break; | |
2533 case NE_EXPR: val = (cmp != 0); break; | |
2534 case LT_EXPR: val = (cmp > 0); break; | |
2535 case GT_EXPR: val = (cmp < 0); break; | |
2536 case LE_EXPR: val = (cmp >= 0); break; | |
2537 case GE_EXPR: val = (cmp <= 0); break; | |
2538 default: | |
2539 val = false; | |
2540 } | |
2541 if (val) | |
2542 return fold_build2 (code1, boolean_type_node, op1a, op1b); | |
2543 } | |
2544 | |
2545 /* Chose the less restrictive of two < or <= comparisons. */ | |
2546 else if ((code1 == LT_EXPR || code1 == LE_EXPR) | |
2547 && (code2 == LT_EXPR || code2 == LE_EXPR)) | |
2548 { | |
2549 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR)) | |
2550 return fold_build2 (code2, boolean_type_node, op2a, op2b); | |
2551 else | |
2552 return fold_build2 (code1, boolean_type_node, op1a, op1b); | |
2553 } | |
2554 | |
2555 /* Likewise chose the less restrictive of two > or >= comparisons. */ | |
2556 else if ((code1 == GT_EXPR || code1 == GE_EXPR) | |
2557 && (code2 == GT_EXPR || code2 == GE_EXPR)) | |
2558 { | |
2559 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR)) | |
2560 return fold_build2 (code2, boolean_type_node, op2a, op2b); | |
2561 else | |
2562 return fold_build2 (code1, boolean_type_node, op1a, op1b); | |
2563 } | |
2564 | |
2565 /* Check for singleton ranges. */ | |
2566 else if (cmp == 0 | |
2567 && ((code1 == LT_EXPR && code2 == GT_EXPR) | |
2568 || (code1 == GT_EXPR && code2 == LT_EXPR))) | |
2569 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b); | |
2570 | |
2571 /* Check for less/greater pairs that don't restrict the range at all. */ | |
2572 else if (cmp >= 0 | |
2573 && (code1 == LT_EXPR || code1 == LE_EXPR) | |
2574 && (code2 == GT_EXPR || code2 == GE_EXPR)) | |
2575 return boolean_true_node; | |
2576 else if (cmp <= 0 | |
2577 && (code1 == GT_EXPR || code1 == GE_EXPR) | |
2578 && (code2 == LT_EXPR || code2 == LE_EXPR)) | |
2579 return boolean_true_node; | |
2580 } | |
2581 | |
2582 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where | |
2583 NAME's definition is a truth value. See if there are any simplifications | |
2584 that can be done against the NAME's definition. */ | |
2585 if (TREE_CODE (op1a) == SSA_NAME | |
2586 && (code1 == NE_EXPR || code1 == EQ_EXPR) | |
2587 && (integer_zerop (op1b) || integer_onep (op1b))) | |
2588 { | |
2589 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b)) | |
2590 || (code1 == NE_EXPR && integer_onep (op1b))); | |
2591 gimple stmt = SSA_NAME_DEF_STMT (op1a); | |
2592 switch (gimple_code (stmt)) | |
2593 { | |
2594 case GIMPLE_ASSIGN: | |
2595 /* Try to simplify by copy-propagating the definition. */ | |
2596 return or_var_with_comparison (op1a, invert, code2, op2a, op2b); | |
2597 | |
2598 case GIMPLE_PHI: | |
2599 /* If every argument to the PHI produces the same result when | |
2600 ORed with the second comparison, we win. | |
2601 Do not do this unless the type is bool since we need a bool | |
2602 result here anyway. */ | |
2603 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE) | |
2604 { | |
2605 tree result = NULL_TREE; | |
2606 unsigned i; | |
2607 for (i = 0; i < gimple_phi_num_args (stmt); i++) | |
2608 { | |
2609 tree arg = gimple_phi_arg_def (stmt, i); | |
2610 | |
2611 /* If this PHI has itself as an argument, ignore it. | |
2612 If all the other args produce the same result, | |
2613 we're still OK. */ | |
2614 if (arg == gimple_phi_result (stmt)) | |
2615 continue; | |
2616 else if (TREE_CODE (arg) == INTEGER_CST) | |
2617 { | |
2618 if (invert ? integer_zerop (arg) : integer_nonzerop (arg)) | |
2619 { | |
2620 if (!result) | |
2621 result = boolean_true_node; | |
2622 else if (!integer_onep (result)) | |
2623 return NULL_TREE; | |
2624 } | |
2625 else if (!result) | |
2626 result = fold_build2 (code2, boolean_type_node, | |
2627 op2a, op2b); | |
2628 else if (!same_bool_comparison_p (result, | |
2629 code2, op2a, op2b)) | |
2630 return NULL_TREE; | |
2631 } | |
2632 else if (TREE_CODE (arg) == SSA_NAME) | |
2633 { | |
2634 tree temp = or_var_with_comparison (arg, invert, | |
2635 code2, op2a, op2b); | |
2636 if (!temp) | |
2637 return NULL_TREE; | |
2638 else if (!result) | |
2639 result = temp; | |
2640 else if (!same_bool_result_p (result, temp)) | |
2641 return NULL_TREE; | |
2642 } | |
2643 else | |
2644 return NULL_TREE; | |
2645 } | |
2646 return result; | |
2647 } | |
2648 | |
2649 default: | |
2650 break; | |
2651 } | |
2652 } | |
2653 return NULL_TREE; | |
2654 } | |
2655 | |
2656 /* Try to simplify the OR of two comparisons, specified by | |
2657 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively. | |
2658 If this can be simplified to a single expression (without requiring | |
2659 introducing more SSA variables to hold intermediate values), | |
2660 return the resulting tree. Otherwise return NULL_TREE. | |
2661 If the result expression is non-null, it has boolean type. */ | |
2662 | |
2663 tree | |
2664 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b, | |
2665 enum tree_code code2, tree op2a, tree op2b) | |
2666 { | |
2667 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b); | |
2668 if (t) | |
2669 return t; | |
2670 else | |
2671 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b); | |
2672 } |