0
|
1 /* Conditional constant propagation pass for the GNU compiler.
|
|
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
|
|
3 Free Software Foundation, Inc.
|
|
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
|
|
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
|
|
6
|
|
7 This file is part of GCC.
|
|
8
|
|
9 GCC is free software; you can redistribute it and/or modify it
|
|
10 under the terms of the GNU General Public License as published by the
|
|
11 Free Software Foundation; either version 3, or (at your option) any
|
|
12 later version.
|
|
13
|
|
14 GCC is distributed in the hope that it will be useful, but WITHOUT
|
|
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
17 for more details.
|
|
18
|
|
19 You should have received a copy of the GNU General Public License
|
|
20 along with GCC; see the file COPYING3. If not see
|
|
21 <http://www.gnu.org/licenses/>. */
|
|
22
|
|
23 /* Conditional constant propagation (CCP) is based on the SSA
|
|
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
|
|
25 the form VAR = CST are propagated from the assignments into uses of
|
|
26 VAR, which in turn may generate new constants. The simulation uses
|
|
27 a four level lattice to keep track of constant values associated
|
|
28 with SSA names. Given an SSA name V_i, it may take one of the
|
|
29 following values:
|
|
30
|
|
31 UNINITIALIZED -> the initial state of the value. This value
|
|
32 is replaced with a correct initial value
|
|
33 the first time the value is used, so the
|
|
34 rest of the pass does not need to care about
|
|
35 it. Using this value simplifies initialization
|
|
36 of the pass, and prevents us from needlessly
|
|
37 scanning statements that are never reached.
|
|
38
|
|
39 UNDEFINED -> V_i is a local variable whose definition
|
|
40 has not been processed yet. Therefore we
|
|
41 don't yet know if its value is a constant
|
|
42 or not.
|
|
43
|
|
44 CONSTANT -> V_i has been found to hold a constant
|
|
45 value C.
|
|
46
|
|
47 VARYING -> V_i cannot take a constant value, or if it
|
|
48 does, it is not possible to determine it
|
|
49 at compile time.
|
|
50
|
|
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
|
|
52
|
|
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
|
|
54 evaluates into a constant and conditional jumps whose predicate
|
|
55 evaluates into a boolean true or false. When an assignment of
|
|
56 the form V_i = CONST is found, V_i's lattice value is set to
|
|
57 CONSTANT and CONST is associated with it. This causes the
|
|
58 propagation engine to add all the SSA edges coming out the
|
|
59 assignment into the worklists, so that statements that use V_i
|
|
60 can be visited.
|
|
61
|
|
62 If the statement is a conditional with a constant predicate, we
|
|
63 mark the outgoing edges as executable or not executable
|
|
64 depending on the predicate's value. This is then used when
|
|
65 visiting PHI nodes to know when a PHI argument can be ignored.
|
|
66
|
|
67
|
|
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
|
|
69 same constant C, then the LHS of the PHI is set to C. This
|
|
70 evaluation is known as the "meet operation". Since one of the
|
|
71 goals of this evaluation is to optimistically return constant
|
|
72 values as often as possible, it uses two main short cuts:
|
|
73
|
|
74 - If an argument is flowing in through a non-executable edge, it
|
|
75 is ignored. This is useful in cases like this:
|
|
76
|
|
77 if (PRED)
|
|
78 a_9 = 3;
|
|
79 else
|
|
80 a_10 = 100;
|
|
81 a_11 = PHI (a_9, a_10)
|
|
82
|
|
83 If PRED is known to always evaluate to false, then we can
|
|
84 assume that a_11 will always take its value from a_10, meaning
|
|
85 that instead of consider it VARYING (a_9 and a_10 have
|
|
86 different values), we can consider it CONSTANT 100.
|
|
87
|
|
88 - If an argument has an UNDEFINED value, then it does not affect
|
|
89 the outcome of the meet operation. If a variable V_i has an
|
|
90 UNDEFINED value, it means that either its defining statement
|
|
91 hasn't been visited yet or V_i has no defining statement, in
|
|
92 which case the original symbol 'V' is being used
|
|
93 uninitialized. Since 'V' is a local variable, the compiler
|
|
94 may assume any initial value for it.
|
|
95
|
|
96
|
|
97 After propagation, every variable V_i that ends up with a lattice
|
|
98 value of CONSTANT will have the associated constant value in the
|
|
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
|
|
100 final substitution and folding.
|
|
101
|
|
102
|
|
103 Constant propagation in stores and loads (STORE-CCP)
|
|
104 ----------------------------------------------------
|
|
105
|
|
106 While CCP has all the logic to propagate constants in GIMPLE
|
|
107 registers, it is missing the ability to associate constants with
|
|
108 stores and loads (i.e., pointer dereferences, structures and
|
|
109 global/aliased variables). We don't keep loads and stores in
|
|
110 SSA, but we do build a factored use-def web for them (in the
|
|
111 virtual operands).
|
|
112
|
|
113 For instance, consider the following code fragment:
|
|
114
|
|
115 struct A a;
|
|
116 const int B = 42;
|
|
117
|
|
118 void foo (int i)
|
|
119 {
|
|
120 if (i > 10)
|
|
121 a.a = 42;
|
|
122 else
|
|
123 {
|
|
124 a.b = 21;
|
|
125 a.a = a.b + 21;
|
|
126 }
|
|
127
|
|
128 if (a.a != B)
|
|
129 never_executed ();
|
|
130 }
|
|
131
|
|
132 We should be able to deduce that the predicate 'a.a != B' is always
|
|
133 false. To achieve this, we associate constant values to the SSA
|
|
134 names in the VDEF operands for each store. Additionally,
|
|
135 since we also glob partial loads/stores with the base symbol, we
|
|
136 also keep track of the memory reference where the constant value
|
|
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
|
|
138
|
|
139 # a_5 = VDEF <a_4>
|
|
140 a.a = 2;
|
|
141
|
|
142 # VUSE <a_5>
|
|
143 x_3 = a.b;
|
|
144
|
|
145 In the example above, CCP will associate value '2' with 'a_5', but
|
|
146 it would be wrong to replace the load from 'a.b' with '2', because
|
|
147 '2' had been stored into a.a.
|
|
148
|
|
149 Note that the initial value of virtual operands is VARYING, not
|
|
150 UNDEFINED. Consider, for instance global variables:
|
|
151
|
|
152 int A;
|
|
153
|
|
154 foo (int i)
|
|
155 {
|
|
156 if (i_3 > 10)
|
|
157 A_4 = 3;
|
|
158 # A_5 = PHI (A_4, A_2);
|
|
159
|
|
160 # VUSE <A_5>
|
|
161 A.0_6 = A;
|
|
162
|
|
163 return A.0_6;
|
|
164 }
|
|
165
|
|
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
|
|
167 been defined outside of foo. If we were to assume it UNDEFINED, we
|
|
168 would erroneously optimize the above into 'return 3;'.
|
|
169
|
|
170 Though STORE-CCP is not too expensive, it does have to do more work
|
|
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
|
|
172 and STORE-CCP use the exact same algorithm. The only distinction
|
|
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
|
|
174 set to true. This affects the evaluation of statements and PHI
|
|
175 nodes.
|
|
176
|
|
177 References:
|
|
178
|
|
179 Constant propagation with conditional branches,
|
|
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
|
|
181
|
|
182 Building an Optimizing Compiler,
|
|
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
|
|
184
|
|
185 Advanced Compiler Design and Implementation,
|
|
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
|
|
187
|
|
188 #include "config.h"
|
|
189 #include "system.h"
|
|
190 #include "coretypes.h"
|
|
191 #include "tm.h"
|
|
192 #include "tree.h"
|
|
193 #include "flags.h"
|
|
194 #include "rtl.h"
|
|
195 #include "tm_p.h"
|
|
196 #include "ggc.h"
|
|
197 #include "basic-block.h"
|
|
198 #include "output.h"
|
|
199 #include "expr.h"
|
|
200 #include "function.h"
|
|
201 #include "diagnostic.h"
|
|
202 #include "timevar.h"
|
|
203 #include "tree-dump.h"
|
|
204 #include "tree-flow.h"
|
|
205 #include "tree-pass.h"
|
|
206 #include "tree-ssa-propagate.h"
|
|
207 #include "value-prof.h"
|
|
208 #include "langhooks.h"
|
|
209 #include "target.h"
|
|
210 #include "toplev.h"
|
|
211
|
|
212
|
|
213 /* Possible lattice values. */
|
|
214 typedef enum
|
|
215 {
|
|
216 UNINITIALIZED,
|
|
217 UNDEFINED,
|
|
218 CONSTANT,
|
|
219 VARYING
|
|
220 } ccp_lattice_t;
|
|
221
|
|
222 /* Array of propagated constant values. After propagation,
|
|
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
|
|
224 the constant is held in an SSA name representing a memory store
|
|
225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
|
|
226 memory reference used to store (i.e., the LHS of the assignment
|
|
227 doing the store). */
|
|
228 static prop_value_t *const_val;
|
|
229
|
|
230 static void canonicalize_float_value (prop_value_t *);
|
|
231
|
|
232 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
|
|
233
|
|
234 static void
|
|
235 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
|
|
236 {
|
|
237 switch (val.lattice_val)
|
|
238 {
|
|
239 case UNINITIALIZED:
|
|
240 fprintf (outf, "%sUNINITIALIZED", prefix);
|
|
241 break;
|
|
242 case UNDEFINED:
|
|
243 fprintf (outf, "%sUNDEFINED", prefix);
|
|
244 break;
|
|
245 case VARYING:
|
|
246 fprintf (outf, "%sVARYING", prefix);
|
|
247 break;
|
|
248 case CONSTANT:
|
|
249 fprintf (outf, "%sCONSTANT ", prefix);
|
|
250 print_generic_expr (outf, val.value, dump_flags);
|
|
251 break;
|
|
252 default:
|
|
253 gcc_unreachable ();
|
|
254 }
|
|
255 }
|
|
256
|
|
257
|
|
258 /* Print lattice value VAL to stderr. */
|
|
259
|
|
260 void debug_lattice_value (prop_value_t val);
|
|
261
|
|
262 void
|
|
263 debug_lattice_value (prop_value_t val)
|
|
264 {
|
|
265 dump_lattice_value (stderr, "", val);
|
|
266 fprintf (stderr, "\n");
|
|
267 }
|
|
268
|
|
269
|
|
270
|
|
271 /* If SYM is a constant variable with known value, return the value.
|
|
272 NULL_TREE is returned otherwise. */
|
|
273
|
|
274 tree
|
|
275 get_symbol_constant_value (tree sym)
|
|
276 {
|
|
277 if (TREE_STATIC (sym)
|
|
278 && TREE_READONLY (sym)
|
|
279 && !MTAG_P (sym))
|
|
280 {
|
|
281 tree val = DECL_INITIAL (sym);
|
|
282 if (val)
|
|
283 {
|
|
284 STRIP_USELESS_TYPE_CONVERSION (val);
|
|
285 if (is_gimple_min_invariant (val))
|
|
286 return val;
|
|
287 }
|
|
288 /* Variables declared 'const' without an initializer
|
|
289 have zero as the initializer if they may not be
|
|
290 overridden at link or run time. */
|
|
291 if (!val
|
|
292 && !DECL_EXTERNAL (sym)
|
|
293 && targetm.binds_local_p (sym)
|
|
294 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
|
|
295 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
|
|
296 return fold_convert (TREE_TYPE (sym), integer_zero_node);
|
|
297 }
|
|
298
|
|
299 return NULL_TREE;
|
|
300 }
|
|
301
|
|
302 /* Compute a default value for variable VAR and store it in the
|
|
303 CONST_VAL array. The following rules are used to get default
|
|
304 values:
|
|
305
|
|
306 1- Global and static variables that are declared constant are
|
|
307 considered CONSTANT.
|
|
308
|
|
309 2- Any other value is considered UNDEFINED. This is useful when
|
|
310 considering PHI nodes. PHI arguments that are undefined do not
|
|
311 change the constant value of the PHI node, which allows for more
|
|
312 constants to be propagated.
|
|
313
|
|
314 3- Variables defined by statements other than assignments and PHI
|
|
315 nodes are considered VARYING.
|
|
316
|
|
317 4- Initial values of variables that are not GIMPLE registers are
|
|
318 considered VARYING. */
|
|
319
|
|
320 static prop_value_t
|
|
321 get_default_value (tree var)
|
|
322 {
|
|
323 tree sym = SSA_NAME_VAR (var);
|
|
324 prop_value_t val = { UNINITIALIZED, NULL_TREE };
|
|
325 tree cst_val;
|
|
326
|
|
327 if (!is_gimple_reg (var))
|
|
328 {
|
|
329 /* Short circuit for regular CCP. We are not interested in any
|
|
330 non-register when DO_STORE_CCP is false. */
|
|
331 val.lattice_val = VARYING;
|
|
332 }
|
|
333 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
|
|
334 {
|
|
335 /* Globals and static variables declared 'const' take their
|
|
336 initial value. */
|
|
337 val.lattice_val = CONSTANT;
|
|
338 val.value = cst_val;
|
|
339 }
|
|
340 else
|
|
341 {
|
|
342 gimple stmt = SSA_NAME_DEF_STMT (var);
|
|
343
|
|
344 if (gimple_nop_p (stmt))
|
|
345 {
|
|
346 /* Variables defined by an empty statement are those used
|
|
347 before being initialized. If VAR is a local variable, we
|
|
348 can assume initially that it is UNDEFINED, otherwise we must
|
|
349 consider it VARYING. */
|
|
350 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
|
|
351 val.lattice_val = UNDEFINED;
|
|
352 else
|
|
353 val.lattice_val = VARYING;
|
|
354 }
|
|
355 else if (is_gimple_assign (stmt)
|
|
356 /* Value-returning GIMPLE_CALL statements assign to
|
|
357 a variable, and are treated similarly to GIMPLE_ASSIGN. */
|
|
358 || (is_gimple_call (stmt)
|
|
359 && gimple_call_lhs (stmt) != NULL_TREE)
|
|
360 || gimple_code (stmt) == GIMPLE_PHI)
|
|
361 {
|
|
362 /* Any other variable defined by an assignment or a PHI node
|
|
363 is considered UNDEFINED. */
|
|
364 val.lattice_val = UNDEFINED;
|
|
365 }
|
|
366 else
|
|
367 {
|
|
368 /* Otherwise, VAR will never take on a constant value. */
|
|
369 val.lattice_val = VARYING;
|
|
370 }
|
|
371 }
|
|
372
|
|
373 return val;
|
|
374 }
|
|
375
|
|
376
|
|
377 /* Get the constant value associated with variable VAR. */
|
|
378
|
|
379 static inline prop_value_t *
|
|
380 get_value (tree var)
|
|
381 {
|
|
382 prop_value_t *val;
|
|
383
|
|
384 if (const_val == NULL)
|
|
385 return NULL;
|
|
386
|
|
387 val = &const_val[SSA_NAME_VERSION (var)];
|
|
388 if (val->lattice_val == UNINITIALIZED)
|
|
389 *val = get_default_value (var);
|
|
390
|
|
391 canonicalize_float_value (val);
|
|
392
|
|
393 return val;
|
|
394 }
|
|
395
|
|
396 /* Sets the value associated with VAR to VARYING. */
|
|
397
|
|
398 static inline void
|
|
399 set_value_varying (tree var)
|
|
400 {
|
|
401 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
|
|
402
|
|
403 val->lattice_val = VARYING;
|
|
404 val->value = NULL_TREE;
|
|
405 }
|
|
406
|
|
407 /* For float types, modify the value of VAL to make ccp work correctly
|
|
408 for non-standard values (-0, NaN):
|
|
409
|
|
410 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
|
|
411 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
|
|
412 This is to fix the following problem (see PR 29921): Suppose we have
|
|
413
|
|
414 x = 0.0 * y
|
|
415
|
|
416 and we set value of y to NaN. This causes value of x to be set to NaN.
|
|
417 When we later determine that y is in fact VARYING, fold uses the fact
|
|
418 that HONOR_NANS is false, and we try to change the value of x to 0,
|
|
419 causing an ICE. With HONOR_NANS being false, the real appearance of
|
|
420 NaN would cause undefined behavior, though, so claiming that y (and x)
|
|
421 are UNDEFINED initially is correct. */
|
|
422
|
|
423 static void
|
|
424 canonicalize_float_value (prop_value_t *val)
|
|
425 {
|
|
426 enum machine_mode mode;
|
|
427 tree type;
|
|
428 REAL_VALUE_TYPE d;
|
|
429
|
|
430 if (val->lattice_val != CONSTANT
|
|
431 || TREE_CODE (val->value) != REAL_CST)
|
|
432 return;
|
|
433
|
|
434 d = TREE_REAL_CST (val->value);
|
|
435 type = TREE_TYPE (val->value);
|
|
436 mode = TYPE_MODE (type);
|
|
437
|
|
438 if (!HONOR_SIGNED_ZEROS (mode)
|
|
439 && REAL_VALUE_MINUS_ZERO (d))
|
|
440 {
|
|
441 val->value = build_real (type, dconst0);
|
|
442 return;
|
|
443 }
|
|
444
|
|
445 if (!HONOR_NANS (mode)
|
|
446 && REAL_VALUE_ISNAN (d))
|
|
447 {
|
|
448 val->lattice_val = UNDEFINED;
|
|
449 val->value = NULL;
|
|
450 return;
|
|
451 }
|
|
452 }
|
|
453
|
|
454 /* Set the value for variable VAR to NEW_VAL. Return true if the new
|
|
455 value is different from VAR's previous value. */
|
|
456
|
|
457 static bool
|
|
458 set_lattice_value (tree var, prop_value_t new_val)
|
|
459 {
|
|
460 prop_value_t *old_val = get_value (var);
|
|
461
|
|
462 canonicalize_float_value (&new_val);
|
|
463
|
|
464 /* Lattice transitions must always be monotonically increasing in
|
|
465 value. If *OLD_VAL and NEW_VAL are the same, return false to
|
|
466 inform the caller that this was a non-transition. */
|
|
467
|
|
468 gcc_assert (old_val->lattice_val < new_val.lattice_val
|
|
469 || (old_val->lattice_val == new_val.lattice_val
|
|
470 && ((!old_val->value && !new_val.value)
|
|
471 || operand_equal_p (old_val->value, new_val.value, 0))));
|
|
472
|
|
473 if (old_val->lattice_val != new_val.lattice_val)
|
|
474 {
|
|
475 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
476 {
|
|
477 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
|
|
478 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
|
|
479 }
|
|
480
|
|
481 *old_val = new_val;
|
|
482
|
|
483 gcc_assert (new_val.lattice_val != UNDEFINED);
|
|
484 return true;
|
|
485 }
|
|
486
|
|
487 return false;
|
|
488 }
|
|
489
|
|
490
|
|
491 /* Return the likely CCP lattice value for STMT.
|
|
492
|
|
493 If STMT has no operands, then return CONSTANT.
|
|
494
|
|
495 Else if undefinedness of operands of STMT cause its value to be
|
|
496 undefined, then return UNDEFINED.
|
|
497
|
|
498 Else if any operands of STMT are constants, then return CONSTANT.
|
|
499
|
|
500 Else return VARYING. */
|
|
501
|
|
502 static ccp_lattice_t
|
|
503 likely_value (gimple stmt)
|
|
504 {
|
|
505 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
|
|
506 tree use;
|
|
507 ssa_op_iter iter;
|
|
508
|
|
509 enum gimple_code code = gimple_code (stmt);
|
|
510
|
|
511 /* This function appears to be called only for assignments, calls,
|
|
512 conditionals, and switches, due to the logic in visit_stmt. */
|
|
513 gcc_assert (code == GIMPLE_ASSIGN
|
|
514 || code == GIMPLE_CALL
|
|
515 || code == GIMPLE_COND
|
|
516 || code == GIMPLE_SWITCH);
|
|
517
|
|
518 /* If the statement has volatile operands, it won't fold to a
|
|
519 constant value. */
|
|
520 if (gimple_has_volatile_ops (stmt))
|
|
521 return VARYING;
|
|
522
|
|
523 /* If we are not doing store-ccp, statements with loads
|
|
524 and/or stores will never fold into a constant. */
|
|
525 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
|
|
526 return VARYING;
|
|
527
|
|
528 /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
|
|
529 is_gimple_min_invariant, so we do not consider calls or
|
|
530 other forms of assignment. */
|
|
531 if (gimple_assign_single_p (stmt)
|
|
532 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
|
|
533 return CONSTANT;
|
|
534
|
|
535 if (code == GIMPLE_COND
|
|
536 && is_gimple_min_invariant (gimple_cond_lhs (stmt))
|
|
537 && is_gimple_min_invariant (gimple_cond_rhs (stmt)))
|
|
538 return CONSTANT;
|
|
539
|
|
540 if (code == GIMPLE_SWITCH
|
|
541 && is_gimple_min_invariant (gimple_switch_index (stmt)))
|
|
542 return CONSTANT;
|
|
543
|
|
544 /* Arrive here for more complex cases. */
|
|
545
|
|
546 has_constant_operand = false;
|
|
547 has_undefined_operand = false;
|
|
548 all_undefined_operands = true;
|
|
549 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
|
|
550 {
|
|
551 prop_value_t *val = get_value (use);
|
|
552
|
|
553 if (val->lattice_val == UNDEFINED)
|
|
554 has_undefined_operand = true;
|
|
555 else
|
|
556 all_undefined_operands = false;
|
|
557
|
|
558 if (val->lattice_val == CONSTANT)
|
|
559 has_constant_operand = true;
|
|
560 }
|
|
561
|
|
562 /* If the operation combines operands like COMPLEX_EXPR make sure to
|
|
563 not mark the result UNDEFINED if only one part of the result is
|
|
564 undefined. */
|
|
565 if (has_undefined_operand && all_undefined_operands)
|
|
566 return UNDEFINED;
|
|
567 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
|
|
568 {
|
|
569 switch (gimple_assign_rhs_code (stmt))
|
|
570 {
|
|
571 /* Unary operators are handled with all_undefined_operands. */
|
|
572 case PLUS_EXPR:
|
|
573 case MINUS_EXPR:
|
|
574 case POINTER_PLUS_EXPR:
|
|
575 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
|
|
576 Not bitwise operators, one VARYING operand may specify the
|
|
577 result completely. Not logical operators for the same reason.
|
|
578 Not COMPLEX_EXPR as one VARYING operand makes the result partly
|
|
579 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
|
|
580 the undefined operand may be promoted. */
|
|
581 return UNDEFINED;
|
|
582
|
|
583 default:
|
|
584 ;
|
|
585 }
|
|
586 }
|
|
587 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
|
|
588 fall back to VARYING even if there were CONSTANT operands. */
|
|
589 if (has_undefined_operand)
|
|
590 return VARYING;
|
|
591
|
|
592 if (has_constant_operand
|
|
593 /* We do not consider virtual operands here -- load from read-only
|
|
594 memory may have only VARYING virtual operands, but still be
|
|
595 constant. */
|
|
596 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
|
|
597 return CONSTANT;
|
|
598
|
|
599 return VARYING;
|
|
600 }
|
|
601
|
|
602 /* Returns true if STMT cannot be constant. */
|
|
603
|
|
604 static bool
|
|
605 surely_varying_stmt_p (gimple stmt)
|
|
606 {
|
|
607 /* If the statement has operands that we cannot handle, it cannot be
|
|
608 constant. */
|
|
609 if (gimple_has_volatile_ops (stmt))
|
|
610 return true;
|
|
611
|
|
612 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
|
|
613 return true;
|
|
614
|
|
615 /* If it is a call and does not return a value or is not a
|
|
616 builtin and not an indirect call, it is varying. */
|
|
617 if (is_gimple_call (stmt))
|
|
618 {
|
|
619 tree fndecl;
|
|
620 if (!gimple_call_lhs (stmt)
|
|
621 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
|
|
622 && !DECL_BUILT_IN (fndecl)))
|
|
623 return true;
|
|
624 }
|
|
625
|
|
626 /* Anything other than assignments and conditional jumps are not
|
|
627 interesting for CCP. */
|
|
628 if (gimple_code (stmt) != GIMPLE_ASSIGN
|
|
629 && gimple_code (stmt) != GIMPLE_COND
|
|
630 && gimple_code (stmt) != GIMPLE_SWITCH
|
|
631 && gimple_code (stmt) != GIMPLE_CALL)
|
|
632 return true;
|
|
633
|
|
634 return false;
|
|
635 }
|
|
636
|
|
637 /* Initialize local data structures for CCP. */
|
|
638
|
|
639 static void
|
|
640 ccp_initialize (void)
|
|
641 {
|
|
642 basic_block bb;
|
|
643
|
|
644 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
|
|
645
|
|
646 /* Initialize simulation flags for PHI nodes and statements. */
|
|
647 FOR_EACH_BB (bb)
|
|
648 {
|
|
649 gimple_stmt_iterator i;
|
|
650
|
|
651 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
|
|
652 {
|
|
653 gimple stmt = gsi_stmt (i);
|
|
654 bool is_varying = surely_varying_stmt_p (stmt);
|
|
655
|
|
656 if (is_varying)
|
|
657 {
|
|
658 tree def;
|
|
659 ssa_op_iter iter;
|
|
660
|
|
661 /* If the statement will not produce a constant, mark
|
|
662 all its outputs VARYING. */
|
|
663 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
|
|
664 {
|
|
665 if (is_varying)
|
|
666 set_value_varying (def);
|
|
667 }
|
|
668 }
|
|
669 prop_set_simulate_again (stmt, !is_varying);
|
|
670 }
|
|
671 }
|
|
672
|
|
673 /* Now process PHI nodes. We never clear the simulate_again flag on
|
|
674 phi nodes, since we do not know which edges are executable yet,
|
|
675 except for phi nodes for virtual operands when we do not do store ccp. */
|
|
676 FOR_EACH_BB (bb)
|
|
677 {
|
|
678 gimple_stmt_iterator i;
|
|
679
|
|
680 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
|
|
681 {
|
|
682 gimple phi = gsi_stmt (i);
|
|
683
|
|
684 if (!is_gimple_reg (gimple_phi_result (phi)))
|
|
685 prop_set_simulate_again (phi, false);
|
|
686 else
|
|
687 prop_set_simulate_again (phi, true);
|
|
688 }
|
|
689 }
|
|
690 }
|
|
691
|
|
692
|
|
693 /* Do final substitution of propagated values, cleanup the flowgraph and
|
|
694 free allocated storage.
|
|
695
|
|
696 Return TRUE when something was optimized. */
|
|
697
|
|
698 static bool
|
|
699 ccp_finalize (void)
|
|
700 {
|
|
701 /* Perform substitutions based on the known constant values. */
|
|
702 bool something_changed = substitute_and_fold (const_val, false);
|
|
703
|
|
704 free (const_val);
|
|
705 const_val = NULL;
|
|
706 return something_changed;;
|
|
707 }
|
|
708
|
|
709
|
|
710 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
|
|
711 in VAL1.
|
|
712
|
|
713 any M UNDEFINED = any
|
|
714 any M VARYING = VARYING
|
|
715 Ci M Cj = Ci if (i == j)
|
|
716 Ci M Cj = VARYING if (i != j)
|
|
717 */
|
|
718
|
|
719 static void
|
|
720 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
|
|
721 {
|
|
722 if (val1->lattice_val == UNDEFINED)
|
|
723 {
|
|
724 /* UNDEFINED M any = any */
|
|
725 *val1 = *val2;
|
|
726 }
|
|
727 else if (val2->lattice_val == UNDEFINED)
|
|
728 {
|
|
729 /* any M UNDEFINED = any
|
|
730 Nothing to do. VAL1 already contains the value we want. */
|
|
731 ;
|
|
732 }
|
|
733 else if (val1->lattice_val == VARYING
|
|
734 || val2->lattice_val == VARYING)
|
|
735 {
|
|
736 /* any M VARYING = VARYING. */
|
|
737 val1->lattice_val = VARYING;
|
|
738 val1->value = NULL_TREE;
|
|
739 }
|
|
740 else if (val1->lattice_val == CONSTANT
|
|
741 && val2->lattice_val == CONSTANT
|
|
742 && simple_cst_equal (val1->value, val2->value) == 1)
|
|
743 {
|
|
744 /* Ci M Cj = Ci if (i == j)
|
|
745 Ci M Cj = VARYING if (i != j)
|
|
746
|
|
747 If these two values come from memory stores, make sure that
|
|
748 they come from the same memory reference. */
|
|
749 val1->lattice_val = CONSTANT;
|
|
750 val1->value = val1->value;
|
|
751 }
|
|
752 else
|
|
753 {
|
|
754 /* Any other combination is VARYING. */
|
|
755 val1->lattice_val = VARYING;
|
|
756 val1->value = NULL_TREE;
|
|
757 }
|
|
758 }
|
|
759
|
|
760
|
|
761 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
|
|
762 lattice values to determine PHI_NODE's lattice value. The value of a
|
|
763 PHI node is determined calling ccp_lattice_meet with all the arguments
|
|
764 of the PHI node that are incoming via executable edges. */
|
|
765
|
|
766 static enum ssa_prop_result
|
|
767 ccp_visit_phi_node (gimple phi)
|
|
768 {
|
|
769 unsigned i;
|
|
770 prop_value_t *old_val, new_val;
|
|
771
|
|
772 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
773 {
|
|
774 fprintf (dump_file, "\nVisiting PHI node: ");
|
|
775 print_gimple_stmt (dump_file, phi, 0, dump_flags);
|
|
776 }
|
|
777
|
|
778 old_val = get_value (gimple_phi_result (phi));
|
|
779 switch (old_val->lattice_val)
|
|
780 {
|
|
781 case VARYING:
|
|
782 return SSA_PROP_VARYING;
|
|
783
|
|
784 case CONSTANT:
|
|
785 new_val = *old_val;
|
|
786 break;
|
|
787
|
|
788 case UNDEFINED:
|
|
789 new_val.lattice_val = UNDEFINED;
|
|
790 new_val.value = NULL_TREE;
|
|
791 break;
|
|
792
|
|
793 default:
|
|
794 gcc_unreachable ();
|
|
795 }
|
|
796
|
|
797 for (i = 0; i < gimple_phi_num_args (phi); i++)
|
|
798 {
|
|
799 /* Compute the meet operator over all the PHI arguments flowing
|
|
800 through executable edges. */
|
|
801 edge e = gimple_phi_arg_edge (phi, i);
|
|
802
|
|
803 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
804 {
|
|
805 fprintf (dump_file,
|
|
806 "\n Argument #%d (%d -> %d %sexecutable)\n",
|
|
807 i, e->src->index, e->dest->index,
|
|
808 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
|
|
809 }
|
|
810
|
|
811 /* If the incoming edge is executable, Compute the meet operator for
|
|
812 the existing value of the PHI node and the current PHI argument. */
|
|
813 if (e->flags & EDGE_EXECUTABLE)
|
|
814 {
|
|
815 tree arg = gimple_phi_arg (phi, i)->def;
|
|
816 prop_value_t arg_val;
|
|
817
|
|
818 if (is_gimple_min_invariant (arg))
|
|
819 {
|
|
820 arg_val.lattice_val = CONSTANT;
|
|
821 arg_val.value = arg;
|
|
822 }
|
|
823 else
|
|
824 arg_val = *(get_value (arg));
|
|
825
|
|
826 ccp_lattice_meet (&new_val, &arg_val);
|
|
827
|
|
828 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
829 {
|
|
830 fprintf (dump_file, "\t");
|
|
831 print_generic_expr (dump_file, arg, dump_flags);
|
|
832 dump_lattice_value (dump_file, "\tValue: ", arg_val);
|
|
833 fprintf (dump_file, "\n");
|
|
834 }
|
|
835
|
|
836 if (new_val.lattice_val == VARYING)
|
|
837 break;
|
|
838 }
|
|
839 }
|
|
840
|
|
841 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
842 {
|
|
843 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
|
|
844 fprintf (dump_file, "\n\n");
|
|
845 }
|
|
846
|
|
847 /* Make the transition to the new value. */
|
|
848 if (set_lattice_value (gimple_phi_result (phi), new_val))
|
|
849 {
|
|
850 if (new_val.lattice_val == VARYING)
|
|
851 return SSA_PROP_VARYING;
|
|
852 else
|
|
853 return SSA_PROP_INTERESTING;
|
|
854 }
|
|
855 else
|
|
856 return SSA_PROP_NOT_INTERESTING;
|
|
857 }
|
|
858
|
|
859 /* Return true if we may propagate the address expression ADDR into the
|
|
860 dereference DEREF and cancel them. */
|
|
861
|
|
862 bool
|
|
863 may_propagate_address_into_dereference (tree addr, tree deref)
|
|
864 {
|
|
865 gcc_assert (INDIRECT_REF_P (deref)
|
|
866 && TREE_CODE (addr) == ADDR_EXPR);
|
|
867
|
|
868 /* Don't propagate if ADDR's operand has incomplete type. */
|
|
869 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
|
|
870 return false;
|
|
871
|
|
872 /* If the address is invariant then we do not need to preserve restrict
|
|
873 qualifications. But we do need to preserve volatile qualifiers until
|
|
874 we can annotate the folded dereference itself properly. */
|
|
875 if (is_gimple_min_invariant (addr)
|
|
876 && (!TREE_THIS_VOLATILE (deref)
|
|
877 || TYPE_VOLATILE (TREE_TYPE (addr))))
|
|
878 return useless_type_conversion_p (TREE_TYPE (deref),
|
|
879 TREE_TYPE (TREE_OPERAND (addr, 0)));
|
|
880
|
|
881 /* Else both the address substitution and the folding must result in
|
|
882 a valid useless type conversion sequence. */
|
|
883 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
|
|
884 TREE_TYPE (addr))
|
|
885 && useless_type_conversion_p (TREE_TYPE (deref),
|
|
886 TREE_TYPE (TREE_OPERAND (addr, 0))));
|
|
887 }
|
|
888
|
|
889 /* CCP specific front-end to the non-destructive constant folding
|
|
890 routines.
|
|
891
|
|
892 Attempt to simplify the RHS of STMT knowing that one or more
|
|
893 operands are constants.
|
|
894
|
|
895 If simplification is possible, return the simplified RHS,
|
|
896 otherwise return the original RHS or NULL_TREE. */
|
|
897
|
|
898 static tree
|
|
899 ccp_fold (gimple stmt)
|
|
900 {
|
|
901 switch (gimple_code (stmt))
|
|
902 {
|
|
903 case GIMPLE_ASSIGN:
|
|
904 {
|
|
905 enum tree_code subcode = gimple_assign_rhs_code (stmt);
|
|
906
|
|
907 switch (get_gimple_rhs_class (subcode))
|
|
908 {
|
|
909 case GIMPLE_SINGLE_RHS:
|
|
910 {
|
|
911 tree rhs = gimple_assign_rhs1 (stmt);
|
|
912 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
|
|
913
|
|
914 if (TREE_CODE (rhs) == SSA_NAME)
|
|
915 {
|
|
916 /* If the RHS is an SSA_NAME, return its known constant value,
|
|
917 if any. */
|
|
918 return get_value (rhs)->value;
|
|
919 }
|
|
920 /* Handle propagating invariant addresses into address operations.
|
|
921 The folding we do here matches that in tree-ssa-forwprop.c. */
|
|
922 else if (TREE_CODE (rhs) == ADDR_EXPR)
|
|
923 {
|
|
924 tree *base;
|
|
925 base = &TREE_OPERAND (rhs, 0);
|
|
926 while (handled_component_p (*base))
|
|
927 base = &TREE_OPERAND (*base, 0);
|
|
928 if (TREE_CODE (*base) == INDIRECT_REF
|
|
929 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
|
|
930 {
|
|
931 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
|
|
932 if (val->lattice_val == CONSTANT
|
|
933 && TREE_CODE (val->value) == ADDR_EXPR
|
|
934 && may_propagate_address_into_dereference
|
|
935 (val->value, *base))
|
|
936 {
|
|
937 /* We need to return a new tree, not modify the IL
|
|
938 or share parts of it. So play some tricks to
|
|
939 avoid manually building it. */
|
|
940 tree ret, save = *base;
|
|
941 *base = TREE_OPERAND (val->value, 0);
|
|
942 ret = unshare_expr (rhs);
|
|
943 recompute_tree_invariant_for_addr_expr (ret);
|
|
944 *base = save;
|
|
945 return ret;
|
|
946 }
|
|
947 }
|
|
948 }
|
|
949
|
|
950 if (kind == tcc_reference)
|
|
951 {
|
|
952 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR
|
|
953 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
|
|
954 {
|
|
955 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
|
|
956 if (val->lattice_val == CONSTANT)
|
|
957 return fold_unary (VIEW_CONVERT_EXPR,
|
|
958 TREE_TYPE (rhs), val->value);
|
|
959 }
|
|
960 return fold_const_aggregate_ref (rhs);
|
|
961 }
|
|
962 else if (kind == tcc_declaration)
|
|
963 return get_symbol_constant_value (rhs);
|
|
964 return rhs;
|
|
965 }
|
|
966
|
|
967 case GIMPLE_UNARY_RHS:
|
|
968 {
|
|
969 /* Handle unary operators that can appear in GIMPLE form.
|
|
970 Note that we know the single operand must be a constant,
|
|
971 so this should almost always return a simplified RHS. */
|
|
972 tree lhs = gimple_assign_lhs (stmt);
|
|
973 tree op0 = gimple_assign_rhs1 (stmt);
|
|
974
|
|
975 /* Simplify the operand down to a constant. */
|
|
976 if (TREE_CODE (op0) == SSA_NAME)
|
|
977 {
|
|
978 prop_value_t *val = get_value (op0);
|
|
979 if (val->lattice_val == CONSTANT)
|
|
980 op0 = get_value (op0)->value;
|
|
981 }
|
|
982
|
|
983 /* Conversions are useless for CCP purposes if they are
|
|
984 value-preserving. Thus the restrictions that
|
|
985 useless_type_conversion_p places for pointer type conversions
|
|
986 do not apply here. Substitution later will only substitute to
|
|
987 allowed places. */
|
|
988 if (CONVERT_EXPR_CODE_P (subcode)
|
|
989 && POINTER_TYPE_P (TREE_TYPE (lhs))
|
|
990 && POINTER_TYPE_P (TREE_TYPE (op0))
|
|
991 /* Do not allow differences in volatile qualification
|
|
992 as this might get us confused as to whether a
|
|
993 propagation destination statement is volatile
|
|
994 or not. See PR36988. */
|
|
995 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
|
|
996 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
|
|
997 {
|
|
998 tree tem;
|
|
999 /* Still try to generate a constant of correct type. */
|
|
1000 if (!useless_type_conversion_p (TREE_TYPE (lhs),
|
|
1001 TREE_TYPE (op0))
|
|
1002 && ((tem = maybe_fold_offset_to_address
|
|
1003 (op0, integer_zero_node, TREE_TYPE (lhs)))
|
|
1004 != NULL_TREE))
|
|
1005 return tem;
|
|
1006 return op0;
|
|
1007 }
|
|
1008
|
|
1009 return fold_unary_ignore_overflow (subcode,
|
|
1010 gimple_expr_type (stmt), op0);
|
|
1011 }
|
|
1012
|
|
1013 case GIMPLE_BINARY_RHS:
|
|
1014 {
|
|
1015 /* Handle binary operators that can appear in GIMPLE form. */
|
|
1016 tree op0 = gimple_assign_rhs1 (stmt);
|
|
1017 tree op1 = gimple_assign_rhs2 (stmt);
|
|
1018
|
|
1019 /* Simplify the operands down to constants when appropriate. */
|
|
1020 if (TREE_CODE (op0) == SSA_NAME)
|
|
1021 {
|
|
1022 prop_value_t *val = get_value (op0);
|
|
1023 if (val->lattice_val == CONSTANT)
|
|
1024 op0 = val->value;
|
|
1025 }
|
|
1026
|
|
1027 if (TREE_CODE (op1) == SSA_NAME)
|
|
1028 {
|
|
1029 prop_value_t *val = get_value (op1);
|
|
1030 if (val->lattice_val == CONSTANT)
|
|
1031 op1 = val->value;
|
|
1032 }
|
|
1033
|
|
1034 /* Fold &foo + CST into an invariant reference if possible. */
|
|
1035 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
|
|
1036 && TREE_CODE (op0) == ADDR_EXPR
|
|
1037 && TREE_CODE (op1) == INTEGER_CST)
|
|
1038 {
|
|
1039 tree lhs = gimple_assign_lhs (stmt);
|
|
1040 tree tem = maybe_fold_offset_to_address (op0, op1,
|
|
1041 TREE_TYPE (lhs));
|
|
1042 if (tem != NULL_TREE)
|
|
1043 return tem;
|
|
1044 }
|
|
1045
|
|
1046 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
|
|
1047 }
|
|
1048
|
|
1049 default:
|
|
1050 gcc_unreachable ();
|
|
1051 }
|
|
1052 }
|
|
1053 break;
|
|
1054
|
|
1055 case GIMPLE_CALL:
|
|
1056 {
|
|
1057 tree fn = gimple_call_fn (stmt);
|
|
1058 prop_value_t *val;
|
|
1059
|
|
1060 if (TREE_CODE (fn) == SSA_NAME)
|
|
1061 {
|
|
1062 val = get_value (fn);
|
|
1063 if (val->lattice_val == CONSTANT)
|
|
1064 fn = val->value;
|
|
1065 }
|
|
1066 if (TREE_CODE (fn) == ADDR_EXPR
|
|
1067 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
|
|
1068 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
|
|
1069 {
|
|
1070 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
|
|
1071 tree call, retval;
|
|
1072 unsigned i;
|
|
1073 for (i = 0; i < gimple_call_num_args (stmt); ++i)
|
|
1074 {
|
|
1075 args[i] = gimple_call_arg (stmt, i);
|
|
1076 if (TREE_CODE (args[i]) == SSA_NAME)
|
|
1077 {
|
|
1078 val = get_value (args[i]);
|
|
1079 if (val->lattice_val == CONSTANT)
|
|
1080 args[i] = val->value;
|
|
1081 }
|
|
1082 }
|
|
1083 call = build_call_array (gimple_call_return_type (stmt),
|
|
1084 fn, gimple_call_num_args (stmt), args);
|
|
1085 retval = fold_call_expr (call, false);
|
|
1086 if (retval)
|
|
1087 /* fold_call_expr wraps the result inside a NOP_EXPR. */
|
|
1088 STRIP_NOPS (retval);
|
|
1089 return retval;
|
|
1090 }
|
|
1091 return NULL_TREE;
|
|
1092 }
|
|
1093
|
|
1094 case GIMPLE_COND:
|
|
1095 {
|
|
1096 /* Handle comparison operators that can appear in GIMPLE form. */
|
|
1097 tree op0 = gimple_cond_lhs (stmt);
|
|
1098 tree op1 = gimple_cond_rhs (stmt);
|
|
1099 enum tree_code code = gimple_cond_code (stmt);
|
|
1100
|
|
1101 /* Simplify the operands down to constants when appropriate. */
|
|
1102 if (TREE_CODE (op0) == SSA_NAME)
|
|
1103 {
|
|
1104 prop_value_t *val = get_value (op0);
|
|
1105 if (val->lattice_val == CONSTANT)
|
|
1106 op0 = val->value;
|
|
1107 }
|
|
1108
|
|
1109 if (TREE_CODE (op1) == SSA_NAME)
|
|
1110 {
|
|
1111 prop_value_t *val = get_value (op1);
|
|
1112 if (val->lattice_val == CONSTANT)
|
|
1113 op1 = val->value;
|
|
1114 }
|
|
1115
|
|
1116 return fold_binary (code, boolean_type_node, op0, op1);
|
|
1117 }
|
|
1118
|
|
1119 case GIMPLE_SWITCH:
|
|
1120 {
|
|
1121 tree rhs = gimple_switch_index (stmt);
|
|
1122
|
|
1123 if (TREE_CODE (rhs) == SSA_NAME)
|
|
1124 {
|
|
1125 /* If the RHS is an SSA_NAME, return its known constant value,
|
|
1126 if any. */
|
|
1127 return get_value (rhs)->value;
|
|
1128 }
|
|
1129
|
|
1130 return rhs;
|
|
1131 }
|
|
1132
|
|
1133 default:
|
|
1134 gcc_unreachable ();
|
|
1135 }
|
|
1136 }
|
|
1137
|
|
1138
|
|
1139 /* Return the tree representing the element referenced by T if T is an
|
|
1140 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
|
|
1141 NULL_TREE otherwise. */
|
|
1142
|
|
1143 tree
|
|
1144 fold_const_aggregate_ref (tree t)
|
|
1145 {
|
|
1146 prop_value_t *value;
|
|
1147 tree base, ctor, idx, field;
|
|
1148 unsigned HOST_WIDE_INT cnt;
|
|
1149 tree cfield, cval;
|
|
1150
|
|
1151 switch (TREE_CODE (t))
|
|
1152 {
|
|
1153 case ARRAY_REF:
|
|
1154 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
|
|
1155 DECL_INITIAL. If BASE is a nested reference into another
|
|
1156 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
|
|
1157 the inner reference. */
|
|
1158 base = TREE_OPERAND (t, 0);
|
|
1159 switch (TREE_CODE (base))
|
|
1160 {
|
|
1161 case VAR_DECL:
|
|
1162 if (!TREE_READONLY (base)
|
|
1163 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
|
|
1164 || !targetm.binds_local_p (base))
|
|
1165 return NULL_TREE;
|
|
1166
|
|
1167 ctor = DECL_INITIAL (base);
|
|
1168 break;
|
|
1169
|
|
1170 case ARRAY_REF:
|
|
1171 case COMPONENT_REF:
|
|
1172 ctor = fold_const_aggregate_ref (base);
|
|
1173 break;
|
|
1174
|
|
1175 case STRING_CST:
|
|
1176 case CONSTRUCTOR:
|
|
1177 ctor = base;
|
|
1178 break;
|
|
1179
|
|
1180 default:
|
|
1181 return NULL_TREE;
|
|
1182 }
|
|
1183
|
|
1184 if (ctor == NULL_TREE
|
|
1185 || (TREE_CODE (ctor) != CONSTRUCTOR
|
|
1186 && TREE_CODE (ctor) != STRING_CST)
|
|
1187 || !TREE_STATIC (ctor))
|
|
1188 return NULL_TREE;
|
|
1189
|
|
1190 /* Get the index. If we have an SSA_NAME, try to resolve it
|
|
1191 with the current lattice value for the SSA_NAME. */
|
|
1192 idx = TREE_OPERAND (t, 1);
|
|
1193 switch (TREE_CODE (idx))
|
|
1194 {
|
|
1195 case SSA_NAME:
|
|
1196 if ((value = get_value (idx))
|
|
1197 && value->lattice_val == CONSTANT
|
|
1198 && TREE_CODE (value->value) == INTEGER_CST)
|
|
1199 idx = value->value;
|
|
1200 else
|
|
1201 return NULL_TREE;
|
|
1202 break;
|
|
1203
|
|
1204 case INTEGER_CST:
|
|
1205 break;
|
|
1206
|
|
1207 default:
|
|
1208 return NULL_TREE;
|
|
1209 }
|
|
1210
|
|
1211 /* Fold read from constant string. */
|
|
1212 if (TREE_CODE (ctor) == STRING_CST)
|
|
1213 {
|
|
1214 if ((TYPE_MODE (TREE_TYPE (t))
|
|
1215 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
|
|
1216 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
|
|
1217 == MODE_INT)
|
|
1218 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
|
|
1219 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
|
|
1220 return build_int_cst_type (TREE_TYPE (t),
|
|
1221 (TREE_STRING_POINTER (ctor)
|
|
1222 [TREE_INT_CST_LOW (idx)]));
|
|
1223 return NULL_TREE;
|
|
1224 }
|
|
1225
|
|
1226 /* Whoo-hoo! I'll fold ya baby. Yeah! */
|
|
1227 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
|
|
1228 if (tree_int_cst_equal (cfield, idx))
|
|
1229 {
|
|
1230 STRIP_USELESS_TYPE_CONVERSION (cval);
|
|
1231 return cval;
|
|
1232 }
|
|
1233 break;
|
|
1234
|
|
1235 case COMPONENT_REF:
|
|
1236 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
|
|
1237 DECL_INITIAL. If BASE is a nested reference into another
|
|
1238 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
|
|
1239 the inner reference. */
|
|
1240 base = TREE_OPERAND (t, 0);
|
|
1241 switch (TREE_CODE (base))
|
|
1242 {
|
|
1243 case VAR_DECL:
|
|
1244 if (!TREE_READONLY (base)
|
|
1245 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
|
|
1246 || !targetm.binds_local_p (base))
|
|
1247 return NULL_TREE;
|
|
1248
|
|
1249 ctor = DECL_INITIAL (base);
|
|
1250 break;
|
|
1251
|
|
1252 case ARRAY_REF:
|
|
1253 case COMPONENT_REF:
|
|
1254 ctor = fold_const_aggregate_ref (base);
|
|
1255 break;
|
|
1256
|
|
1257 default:
|
|
1258 return NULL_TREE;
|
|
1259 }
|
|
1260
|
|
1261 if (ctor == NULL_TREE
|
|
1262 || TREE_CODE (ctor) != CONSTRUCTOR
|
|
1263 || !TREE_STATIC (ctor))
|
|
1264 return NULL_TREE;
|
|
1265
|
|
1266 field = TREE_OPERAND (t, 1);
|
|
1267
|
|
1268 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
|
|
1269 if (cfield == field
|
|
1270 /* FIXME: Handle bit-fields. */
|
|
1271 && ! DECL_BIT_FIELD (cfield))
|
|
1272 {
|
|
1273 STRIP_USELESS_TYPE_CONVERSION (cval);
|
|
1274 return cval;
|
|
1275 }
|
|
1276 break;
|
|
1277
|
|
1278 case REALPART_EXPR:
|
|
1279 case IMAGPART_EXPR:
|
|
1280 {
|
|
1281 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
|
|
1282 if (c && TREE_CODE (c) == COMPLEX_CST)
|
|
1283 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
|
|
1284 break;
|
|
1285 }
|
|
1286
|
|
1287 case INDIRECT_REF:
|
|
1288 {
|
|
1289 tree base = TREE_OPERAND (t, 0);
|
|
1290 if (TREE_CODE (base) == SSA_NAME
|
|
1291 && (value = get_value (base))
|
|
1292 && value->lattice_val == CONSTANT
|
|
1293 && TREE_CODE (value->value) == ADDR_EXPR)
|
|
1294 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
|
|
1295 break;
|
|
1296 }
|
|
1297
|
|
1298 default:
|
|
1299 break;
|
|
1300 }
|
|
1301
|
|
1302 return NULL_TREE;
|
|
1303 }
|
|
1304
|
|
1305 /* Evaluate statement STMT.
|
|
1306 Valid only for assignments, calls, conditionals, and switches. */
|
|
1307
|
|
1308 static prop_value_t
|
|
1309 evaluate_stmt (gimple stmt)
|
|
1310 {
|
|
1311 prop_value_t val;
|
|
1312 tree simplified = NULL_TREE;
|
|
1313 ccp_lattice_t likelyvalue = likely_value (stmt);
|
|
1314 bool is_constant;
|
|
1315
|
|
1316 fold_defer_overflow_warnings ();
|
|
1317
|
|
1318 /* If the statement is likely to have a CONSTANT result, then try
|
|
1319 to fold the statement to determine the constant value. */
|
|
1320 /* FIXME. This is the only place that we call ccp_fold.
|
|
1321 Since likely_value never returns CONSTANT for calls, we will
|
|
1322 not attempt to fold them, including builtins that may profit. */
|
|
1323 if (likelyvalue == CONSTANT)
|
|
1324 simplified = ccp_fold (stmt);
|
|
1325 /* If the statement is likely to have a VARYING result, then do not
|
|
1326 bother folding the statement. */
|
|
1327 else if (likelyvalue == VARYING)
|
|
1328 {
|
|
1329 enum gimple_code code = gimple_code (stmt);
|
|
1330 if (code == GIMPLE_ASSIGN)
|
|
1331 {
|
|
1332 enum tree_code subcode = gimple_assign_rhs_code (stmt);
|
|
1333
|
|
1334 /* Other cases cannot satisfy is_gimple_min_invariant
|
|
1335 without folding. */
|
|
1336 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
|
|
1337 simplified = gimple_assign_rhs1 (stmt);
|
|
1338 }
|
|
1339 else if (code == GIMPLE_SWITCH)
|
|
1340 simplified = gimple_switch_index (stmt);
|
|
1341 else
|
|
1342 /* These cannot satisfy is_gimple_min_invariant without folding. */
|
|
1343 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
|
|
1344 }
|
|
1345
|
|
1346 is_constant = simplified && is_gimple_min_invariant (simplified);
|
|
1347
|
|
1348 fold_undefer_overflow_warnings (is_constant, stmt, 0);
|
|
1349
|
|
1350 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
1351 {
|
|
1352 fprintf (dump_file, "which is likely ");
|
|
1353 switch (likelyvalue)
|
|
1354 {
|
|
1355 case CONSTANT:
|
|
1356 fprintf (dump_file, "CONSTANT");
|
|
1357 break;
|
|
1358 case UNDEFINED:
|
|
1359 fprintf (dump_file, "UNDEFINED");
|
|
1360 break;
|
|
1361 case VARYING:
|
|
1362 fprintf (dump_file, "VARYING");
|
|
1363 break;
|
|
1364 default:;
|
|
1365 }
|
|
1366 fprintf (dump_file, "\n");
|
|
1367 }
|
|
1368
|
|
1369 if (is_constant)
|
|
1370 {
|
|
1371 /* The statement produced a constant value. */
|
|
1372 val.lattice_val = CONSTANT;
|
|
1373 val.value = simplified;
|
|
1374 }
|
|
1375 else
|
|
1376 {
|
|
1377 /* The statement produced a nonconstant value. If the statement
|
|
1378 had UNDEFINED operands, then the result of the statement
|
|
1379 should be UNDEFINED. Otherwise, the statement is VARYING. */
|
|
1380 if (likelyvalue == UNDEFINED)
|
|
1381 val.lattice_val = likelyvalue;
|
|
1382 else
|
|
1383 val.lattice_val = VARYING;
|
|
1384
|
|
1385 val.value = NULL_TREE;
|
|
1386 }
|
|
1387
|
|
1388 return val;
|
|
1389 }
|
|
1390
|
|
1391 /* Visit the assignment statement STMT. Set the value of its LHS to the
|
|
1392 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
|
|
1393 creates virtual definitions, set the value of each new name to that
|
|
1394 of the RHS (if we can derive a constant out of the RHS).
|
|
1395 Value-returning call statements also perform an assignment, and
|
|
1396 are handled here. */
|
|
1397
|
|
1398 static enum ssa_prop_result
|
|
1399 visit_assignment (gimple stmt, tree *output_p)
|
|
1400 {
|
|
1401 prop_value_t val;
|
|
1402 enum ssa_prop_result retval;
|
|
1403
|
|
1404 tree lhs = gimple_get_lhs (stmt);
|
|
1405
|
|
1406 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
|
|
1407 || gimple_call_lhs (stmt) != NULL_TREE);
|
|
1408
|
|
1409 if (gimple_assign_copy_p (stmt))
|
|
1410 {
|
|
1411 tree rhs = gimple_assign_rhs1 (stmt);
|
|
1412
|
|
1413 if (TREE_CODE (rhs) == SSA_NAME)
|
|
1414 {
|
|
1415 /* For a simple copy operation, we copy the lattice values. */
|
|
1416 prop_value_t *nval = get_value (rhs);
|
|
1417 val = *nval;
|
|
1418 }
|
|
1419 else
|
|
1420 val = evaluate_stmt (stmt);
|
|
1421 }
|
|
1422 else
|
|
1423 /* Evaluate the statement, which could be
|
|
1424 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
|
|
1425 val = evaluate_stmt (stmt);
|
|
1426
|
|
1427 retval = SSA_PROP_NOT_INTERESTING;
|
|
1428
|
|
1429 /* Set the lattice value of the statement's output. */
|
|
1430 if (TREE_CODE (lhs) == SSA_NAME)
|
|
1431 {
|
|
1432 /* If STMT is an assignment to an SSA_NAME, we only have one
|
|
1433 value to set. */
|
|
1434 if (set_lattice_value (lhs, val))
|
|
1435 {
|
|
1436 *output_p = lhs;
|
|
1437 if (val.lattice_val == VARYING)
|
|
1438 retval = SSA_PROP_VARYING;
|
|
1439 else
|
|
1440 retval = SSA_PROP_INTERESTING;
|
|
1441 }
|
|
1442 }
|
|
1443
|
|
1444 return retval;
|
|
1445 }
|
|
1446
|
|
1447
|
|
1448 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
|
|
1449 if it can determine which edge will be taken. Otherwise, return
|
|
1450 SSA_PROP_VARYING. */
|
|
1451
|
|
1452 static enum ssa_prop_result
|
|
1453 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
|
|
1454 {
|
|
1455 prop_value_t val;
|
|
1456 basic_block block;
|
|
1457
|
|
1458 block = gimple_bb (stmt);
|
|
1459 val = evaluate_stmt (stmt);
|
|
1460
|
|
1461 /* Find which edge out of the conditional block will be taken and add it
|
|
1462 to the worklist. If no single edge can be determined statically,
|
|
1463 return SSA_PROP_VARYING to feed all the outgoing edges to the
|
|
1464 propagation engine. */
|
|
1465 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
|
|
1466 if (*taken_edge_p)
|
|
1467 return SSA_PROP_INTERESTING;
|
|
1468 else
|
|
1469 return SSA_PROP_VARYING;
|
|
1470 }
|
|
1471
|
|
1472
|
|
1473 /* Evaluate statement STMT. If the statement produces an output value and
|
|
1474 its evaluation changes the lattice value of its output, return
|
|
1475 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
|
|
1476 output value.
|
|
1477
|
|
1478 If STMT is a conditional branch and we can determine its truth
|
|
1479 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
|
|
1480 value, return SSA_PROP_VARYING. */
|
|
1481
|
|
1482 static enum ssa_prop_result
|
|
1483 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
|
|
1484 {
|
|
1485 tree def;
|
|
1486 ssa_op_iter iter;
|
|
1487
|
|
1488 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
1489 {
|
|
1490 fprintf (dump_file, "\nVisiting statement:\n");
|
|
1491 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
|
|
1492 }
|
|
1493
|
|
1494 switch (gimple_code (stmt))
|
|
1495 {
|
|
1496 case GIMPLE_ASSIGN:
|
|
1497 /* If the statement is an assignment that produces a single
|
|
1498 output value, evaluate its RHS to see if the lattice value of
|
|
1499 its output has changed. */
|
|
1500 return visit_assignment (stmt, output_p);
|
|
1501
|
|
1502 case GIMPLE_CALL:
|
|
1503 /* A value-returning call also performs an assignment. */
|
|
1504 if (gimple_call_lhs (stmt) != NULL_TREE)
|
|
1505 return visit_assignment (stmt, output_p);
|
|
1506 break;
|
|
1507
|
|
1508 case GIMPLE_COND:
|
|
1509 case GIMPLE_SWITCH:
|
|
1510 /* If STMT is a conditional branch, see if we can determine
|
|
1511 which branch will be taken. */
|
|
1512 /* FIXME. It appears that we should be able to optimize
|
|
1513 computed GOTOs here as well. */
|
|
1514 return visit_cond_stmt (stmt, taken_edge_p);
|
|
1515
|
|
1516 default:
|
|
1517 break;
|
|
1518 }
|
|
1519
|
|
1520 /* Any other kind of statement is not interesting for constant
|
|
1521 propagation and, therefore, not worth simulating. */
|
|
1522 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
1523 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
|
|
1524
|
|
1525 /* Definitions made by statements other than assignments to
|
|
1526 SSA_NAMEs represent unknown modifications to their outputs.
|
|
1527 Mark them VARYING. */
|
|
1528 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
|
|
1529 {
|
|
1530 prop_value_t v = { VARYING, NULL_TREE };
|
|
1531 set_lattice_value (def, v);
|
|
1532 }
|
|
1533
|
|
1534 return SSA_PROP_VARYING;
|
|
1535 }
|
|
1536
|
|
1537
|
|
1538 /* Main entry point for SSA Conditional Constant Propagation. */
|
|
1539
|
|
1540 static unsigned int
|
|
1541 do_ssa_ccp (void)
|
|
1542 {
|
|
1543 ccp_initialize ();
|
|
1544 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
|
|
1545 if (ccp_finalize ())
|
|
1546 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
|
|
1547 else
|
|
1548 return 0;
|
|
1549 }
|
|
1550
|
|
1551
|
|
1552 static bool
|
|
1553 gate_ccp (void)
|
|
1554 {
|
|
1555 return flag_tree_ccp != 0;
|
|
1556 }
|
|
1557
|
|
1558
|
|
1559 struct gimple_opt_pass pass_ccp =
|
|
1560 {
|
|
1561 {
|
|
1562 GIMPLE_PASS,
|
|
1563 "ccp", /* name */
|
|
1564 gate_ccp, /* gate */
|
|
1565 do_ssa_ccp, /* execute */
|
|
1566 NULL, /* sub */
|
|
1567 NULL, /* next */
|
|
1568 0, /* static_pass_number */
|
|
1569 TV_TREE_CCP, /* tv_id */
|
|
1570 PROP_cfg | PROP_ssa, /* properties_required */
|
|
1571 0, /* properties_provided */
|
|
1572 0, /* properties_destroyed */
|
|
1573 0, /* todo_flags_start */
|
|
1574 TODO_dump_func | TODO_verify_ssa
|
|
1575 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
|
|
1576 }
|
|
1577 };
|
|
1578
|
|
1579
|
|
1580 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
|
|
1581 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
|
|
1582 is the desired result type. */
|
|
1583
|
|
1584 static tree
|
|
1585 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
|
|
1586 bool allow_negative_idx)
|
|
1587 {
|
|
1588 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
|
|
1589 tree array_type, elt_type, elt_size;
|
|
1590 tree domain_type;
|
|
1591
|
|
1592 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
|
|
1593 measured in units of the size of elements type) from that ARRAY_REF).
|
|
1594 We can't do anything if either is variable.
|
|
1595
|
|
1596 The case we handle here is *(&A[N]+O). */
|
|
1597 if (TREE_CODE (base) == ARRAY_REF)
|
|
1598 {
|
|
1599 tree low_bound = array_ref_low_bound (base);
|
|
1600
|
|
1601 elt_offset = TREE_OPERAND (base, 1);
|
|
1602 if (TREE_CODE (low_bound) != INTEGER_CST
|
|
1603 || TREE_CODE (elt_offset) != INTEGER_CST)
|
|
1604 return NULL_TREE;
|
|
1605
|
|
1606 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
|
|
1607 base = TREE_OPERAND (base, 0);
|
|
1608 }
|
|
1609
|
|
1610 /* Ignore stupid user tricks of indexing non-array variables. */
|
|
1611 array_type = TREE_TYPE (base);
|
|
1612 if (TREE_CODE (array_type) != ARRAY_TYPE)
|
|
1613 return NULL_TREE;
|
|
1614 elt_type = TREE_TYPE (array_type);
|
|
1615 if (!useless_type_conversion_p (orig_type, elt_type))
|
|
1616 return NULL_TREE;
|
|
1617
|
|
1618 /* Use signed size type for intermediate computation on the index. */
|
|
1619 idx_type = signed_type_for (size_type_node);
|
|
1620
|
|
1621 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
|
|
1622 element type (so we can use the alignment if it's not constant).
|
|
1623 Otherwise, compute the offset as an index by using a division. If the
|
|
1624 division isn't exact, then don't do anything. */
|
|
1625 elt_size = TYPE_SIZE_UNIT (elt_type);
|
|
1626 if (!elt_size)
|
|
1627 return NULL;
|
|
1628 if (integer_zerop (offset))
|
|
1629 {
|
|
1630 if (TREE_CODE (elt_size) != INTEGER_CST)
|
|
1631 elt_size = size_int (TYPE_ALIGN (elt_type));
|
|
1632
|
|
1633 idx = build_int_cst (idx_type, 0);
|
|
1634 }
|
|
1635 else
|
|
1636 {
|
|
1637 unsigned HOST_WIDE_INT lquo, lrem;
|
|
1638 HOST_WIDE_INT hquo, hrem;
|
|
1639 double_int soffset;
|
|
1640
|
|
1641 /* The final array offset should be signed, so we need
|
|
1642 to sign-extend the (possibly pointer) offset here
|
|
1643 and use signed division. */
|
|
1644 soffset = double_int_sext (tree_to_double_int (offset),
|
|
1645 TYPE_PRECISION (TREE_TYPE (offset)));
|
|
1646 if (TREE_CODE (elt_size) != INTEGER_CST
|
|
1647 || div_and_round_double (TRUNC_DIV_EXPR, 0,
|
|
1648 soffset.low, soffset.high,
|
|
1649 TREE_INT_CST_LOW (elt_size),
|
|
1650 TREE_INT_CST_HIGH (elt_size),
|
|
1651 &lquo, &hquo, &lrem, &hrem)
|
|
1652 || lrem || hrem)
|
|
1653 return NULL_TREE;
|
|
1654
|
|
1655 idx = build_int_cst_wide (idx_type, lquo, hquo);
|
|
1656 }
|
|
1657
|
|
1658 /* Assume the low bound is zero. If there is a domain type, get the
|
|
1659 low bound, if any, convert the index into that type, and add the
|
|
1660 low bound. */
|
|
1661 min_idx = build_int_cst (idx_type, 0);
|
|
1662 domain_type = TYPE_DOMAIN (array_type);
|
|
1663 if (domain_type)
|
|
1664 {
|
|
1665 idx_type = domain_type;
|
|
1666 if (TYPE_MIN_VALUE (idx_type))
|
|
1667 min_idx = TYPE_MIN_VALUE (idx_type);
|
|
1668 else
|
|
1669 min_idx = fold_convert (idx_type, min_idx);
|
|
1670
|
|
1671 if (TREE_CODE (min_idx) != INTEGER_CST)
|
|
1672 return NULL_TREE;
|
|
1673
|
|
1674 elt_offset = fold_convert (idx_type, elt_offset);
|
|
1675 }
|
|
1676
|
|
1677 if (!integer_zerop (min_idx))
|
|
1678 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
|
|
1679 if (!integer_zerop (elt_offset))
|
|
1680 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
|
|
1681
|
|
1682 /* Make sure to possibly truncate late after offsetting. */
|
|
1683 idx = fold_convert (idx_type, idx);
|
|
1684
|
|
1685 /* We don't want to construct access past array bounds. For example
|
|
1686 char *(c[4]);
|
|
1687 c[3][2];
|
|
1688 should not be simplified into (*c)[14] or tree-vrp will
|
|
1689 give false warnings. The same is true for
|
|
1690 struct A { long x; char d[0]; } *a;
|
|
1691 (char *)a - 4;
|
|
1692 which should be not folded to &a->d[-8]. */
|
|
1693 if (domain_type
|
|
1694 && TYPE_MAX_VALUE (domain_type)
|
|
1695 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
|
|
1696 {
|
|
1697 tree up_bound = TYPE_MAX_VALUE (domain_type);
|
|
1698
|
|
1699 if (tree_int_cst_lt (up_bound, idx)
|
|
1700 /* Accesses after the end of arrays of size 0 (gcc
|
|
1701 extension) and 1 are likely intentional ("struct
|
|
1702 hack"). */
|
|
1703 && compare_tree_int (up_bound, 1) > 0)
|
|
1704 return NULL_TREE;
|
|
1705 }
|
|
1706 if (domain_type
|
|
1707 && TYPE_MIN_VALUE (domain_type))
|
|
1708 {
|
|
1709 if (!allow_negative_idx
|
|
1710 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
|
|
1711 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
|
|
1712 return NULL_TREE;
|
|
1713 }
|
|
1714 else if (!allow_negative_idx
|
|
1715 && compare_tree_int (idx, 0) < 0)
|
|
1716 return NULL_TREE;
|
|
1717
|
|
1718 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
|
|
1719 }
|
|
1720
|
|
1721
|
|
1722 /* Attempt to fold *(S+O) to S.X.
|
|
1723 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
|
|
1724 is the desired result type. */
|
|
1725
|
|
1726 static tree
|
|
1727 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
|
|
1728 tree orig_type, bool base_is_ptr)
|
|
1729 {
|
|
1730 tree f, t, field_type, tail_array_field, field_offset;
|
|
1731 tree ret;
|
|
1732 tree new_base;
|
|
1733
|
|
1734 if (TREE_CODE (record_type) != RECORD_TYPE
|
|
1735 && TREE_CODE (record_type) != UNION_TYPE
|
|
1736 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
|
|
1737 return NULL_TREE;
|
|
1738
|
|
1739 /* Short-circuit silly cases. */
|
|
1740 if (useless_type_conversion_p (record_type, orig_type))
|
|
1741 return NULL_TREE;
|
|
1742
|
|
1743 tail_array_field = NULL_TREE;
|
|
1744 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
|
|
1745 {
|
|
1746 int cmp;
|
|
1747
|
|
1748 if (TREE_CODE (f) != FIELD_DECL)
|
|
1749 continue;
|
|
1750 if (DECL_BIT_FIELD (f))
|
|
1751 continue;
|
|
1752
|
|
1753 if (!DECL_FIELD_OFFSET (f))
|
|
1754 continue;
|
|
1755 field_offset = byte_position (f);
|
|
1756 if (TREE_CODE (field_offset) != INTEGER_CST)
|
|
1757 continue;
|
|
1758
|
|
1759 /* ??? Java creates "interesting" fields for representing base classes.
|
|
1760 They have no name, and have no context. With no context, we get into
|
|
1761 trouble with nonoverlapping_component_refs_p. Skip them. */
|
|
1762 if (!DECL_FIELD_CONTEXT (f))
|
|
1763 continue;
|
|
1764
|
|
1765 /* The previous array field isn't at the end. */
|
|
1766 tail_array_field = NULL_TREE;
|
|
1767
|
|
1768 /* Check to see if this offset overlaps with the field. */
|
|
1769 cmp = tree_int_cst_compare (field_offset, offset);
|
|
1770 if (cmp > 0)
|
|
1771 continue;
|
|
1772
|
|
1773 field_type = TREE_TYPE (f);
|
|
1774
|
|
1775 /* Here we exactly match the offset being checked. If the types match,
|
|
1776 then we can return that field. */
|
|
1777 if (cmp == 0
|
|
1778 && useless_type_conversion_p (orig_type, field_type))
|
|
1779 {
|
|
1780 if (base_is_ptr)
|
|
1781 base = build1 (INDIRECT_REF, record_type, base);
|
|
1782 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
|
|
1783 return t;
|
|
1784 }
|
|
1785
|
|
1786 /* Don't care about offsets into the middle of scalars. */
|
|
1787 if (!AGGREGATE_TYPE_P (field_type))
|
|
1788 continue;
|
|
1789
|
|
1790 /* Check for array at the end of the struct. This is often
|
|
1791 used as for flexible array members. We should be able to
|
|
1792 turn this into an array access anyway. */
|
|
1793 if (TREE_CODE (field_type) == ARRAY_TYPE)
|
|
1794 tail_array_field = f;
|
|
1795
|
|
1796 /* Check the end of the field against the offset. */
|
|
1797 if (!DECL_SIZE_UNIT (f)
|
|
1798 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
|
|
1799 continue;
|
|
1800 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
|
|
1801 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
|
|
1802 continue;
|
|
1803
|
|
1804 /* If we matched, then set offset to the displacement into
|
|
1805 this field. */
|
|
1806 if (base_is_ptr)
|
|
1807 new_base = build1 (INDIRECT_REF, record_type, base);
|
|
1808 else
|
|
1809 new_base = base;
|
|
1810 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
|
|
1811
|
|
1812 /* Recurse to possibly find the match. */
|
|
1813 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
|
|
1814 f == TYPE_FIELDS (record_type));
|
|
1815 if (ret)
|
|
1816 return ret;
|
|
1817 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
|
|
1818 orig_type, false);
|
|
1819 if (ret)
|
|
1820 return ret;
|
|
1821 }
|
|
1822
|
|
1823 if (!tail_array_field)
|
|
1824 return NULL_TREE;
|
|
1825
|
|
1826 f = tail_array_field;
|
|
1827 field_type = TREE_TYPE (f);
|
|
1828 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
|
|
1829
|
|
1830 /* If we get here, we've got an aggregate field, and a possibly
|
|
1831 nonzero offset into them. Recurse and hope for a valid match. */
|
|
1832 if (base_is_ptr)
|
|
1833 base = build1 (INDIRECT_REF, record_type, base);
|
|
1834 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
|
|
1835
|
|
1836 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
|
|
1837 f == TYPE_FIELDS (record_type));
|
|
1838 if (t)
|
|
1839 return t;
|
|
1840 return maybe_fold_offset_to_component_ref (field_type, base, offset,
|
|
1841 orig_type, false);
|
|
1842 }
|
|
1843
|
|
1844 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
|
|
1845 or BASE[index] or by combination of those.
|
|
1846
|
|
1847 Before attempting the conversion strip off existing ADDR_EXPRs and
|
|
1848 handled component refs. */
|
|
1849
|
|
1850 tree
|
|
1851 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
|
|
1852 {
|
|
1853 tree ret;
|
|
1854 tree type;
|
|
1855 bool base_is_ptr = true;
|
|
1856
|
|
1857 STRIP_NOPS (base);
|
|
1858 if (TREE_CODE (base) == ADDR_EXPR)
|
|
1859 {
|
|
1860 base_is_ptr = false;
|
|
1861
|
|
1862 base = TREE_OPERAND (base, 0);
|
|
1863
|
|
1864 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
|
|
1865 so it needs to be removed and new COMPONENT_REF constructed.
|
|
1866 The wrong COMPONENT_REF are often constructed by folding the
|
|
1867 (type *)&object within the expression (type *)&object+offset */
|
|
1868 if (handled_component_p (base))
|
|
1869 {
|
|
1870 HOST_WIDE_INT sub_offset, size, maxsize;
|
|
1871 tree newbase;
|
|
1872 newbase = get_ref_base_and_extent (base, &sub_offset,
|
|
1873 &size, &maxsize);
|
|
1874 gcc_assert (newbase);
|
|
1875 if (size == maxsize
|
|
1876 && size != -1
|
|
1877 && !(sub_offset & (BITS_PER_UNIT - 1)))
|
|
1878 {
|
|
1879 base = newbase;
|
|
1880 if (sub_offset)
|
|
1881 offset = int_const_binop (PLUS_EXPR, offset,
|
|
1882 build_int_cst (TREE_TYPE (offset),
|
|
1883 sub_offset / BITS_PER_UNIT), 1);
|
|
1884 }
|
|
1885 }
|
|
1886 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
|
|
1887 && integer_zerop (offset))
|
|
1888 return base;
|
|
1889 type = TREE_TYPE (base);
|
|
1890 }
|
|
1891 else
|
|
1892 {
|
|
1893 base_is_ptr = true;
|
|
1894 if (!POINTER_TYPE_P (TREE_TYPE (base)))
|
|
1895 return NULL_TREE;
|
|
1896 type = TREE_TYPE (TREE_TYPE (base));
|
|
1897 }
|
|
1898 ret = maybe_fold_offset_to_component_ref (type, base, offset,
|
|
1899 orig_type, base_is_ptr);
|
|
1900 if (!ret)
|
|
1901 {
|
|
1902 if (base_is_ptr)
|
|
1903 base = build1 (INDIRECT_REF, type, base);
|
|
1904 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
|
|
1905 }
|
|
1906 return ret;
|
|
1907 }
|
|
1908
|
|
1909 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
|
|
1910 or &BASE[index] or by combination of those.
|
|
1911
|
|
1912 Before attempting the conversion strip off existing component refs. */
|
|
1913
|
|
1914 tree
|
|
1915 maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
|
|
1916 {
|
|
1917 tree t;
|
|
1918
|
|
1919 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
|
|
1920 && POINTER_TYPE_P (orig_type));
|
|
1921
|
|
1922 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
|
|
1923 if (t != NULL_TREE)
|
|
1924 {
|
|
1925 tree orig = addr;
|
|
1926 tree ptr_type;
|
|
1927
|
|
1928 /* For __builtin_object_size to function correctly we need to
|
|
1929 make sure not to fold address arithmetic so that we change
|
|
1930 reference from one array to another. This would happen for
|
|
1931 example for
|
|
1932
|
|
1933 struct X { char s1[10]; char s2[10] } s;
|
|
1934 char *foo (void) { return &s.s2[-4]; }
|
|
1935
|
|
1936 where we need to avoid generating &s.s1[6]. As the C and
|
|
1937 C++ frontends create different initial trees
|
|
1938 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
|
|
1939 sophisticated comparisons here. Note that checking for the
|
|
1940 condition after the fact is easier than trying to avoid doing
|
|
1941 the folding. */
|
|
1942 STRIP_NOPS (orig);
|
|
1943 if (TREE_CODE (orig) == ADDR_EXPR)
|
|
1944 orig = TREE_OPERAND (orig, 0);
|
|
1945 if ((TREE_CODE (orig) == ARRAY_REF
|
|
1946 || (TREE_CODE (orig) == COMPONENT_REF
|
|
1947 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
|
|
1948 && (TREE_CODE (t) == ARRAY_REF
|
|
1949 || TREE_CODE (t) == COMPONENT_REF)
|
|
1950 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
|
|
1951 ? TREE_OPERAND (orig, 0) : orig,
|
|
1952 TREE_CODE (t) == ARRAY_REF
|
|
1953 ? TREE_OPERAND (t, 0) : t, 0))
|
|
1954 return NULL_TREE;
|
|
1955
|
|
1956 ptr_type = build_pointer_type (TREE_TYPE (t));
|
|
1957 if (!useless_type_conversion_p (orig_type, ptr_type))
|
|
1958 return NULL_TREE;
|
|
1959 return build_fold_addr_expr_with_type (t, ptr_type);
|
|
1960 }
|
|
1961
|
|
1962 return NULL_TREE;
|
|
1963 }
|
|
1964
|
|
1965 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
|
|
1966 Return the simplified expression, or NULL if nothing could be done. */
|
|
1967
|
|
1968 static tree
|
|
1969 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
|
|
1970 {
|
|
1971 tree t;
|
|
1972 bool volatile_p = TREE_THIS_VOLATILE (expr);
|
|
1973
|
|
1974 /* We may well have constructed a double-nested PLUS_EXPR via multiple
|
|
1975 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
|
|
1976 are sometimes added. */
|
|
1977 base = fold (base);
|
|
1978 STRIP_TYPE_NOPS (base);
|
|
1979 TREE_OPERAND (expr, 0) = base;
|
|
1980
|
|
1981 /* One possibility is that the address reduces to a string constant. */
|
|
1982 t = fold_read_from_constant_string (expr);
|
|
1983 if (t)
|
|
1984 return t;
|
|
1985
|
|
1986 /* Add in any offset from a POINTER_PLUS_EXPR. */
|
|
1987 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
|
|
1988 {
|
|
1989 tree offset2;
|
|
1990
|
|
1991 offset2 = TREE_OPERAND (base, 1);
|
|
1992 if (TREE_CODE (offset2) != INTEGER_CST)
|
|
1993 return NULL_TREE;
|
|
1994 base = TREE_OPERAND (base, 0);
|
|
1995
|
|
1996 offset = fold_convert (sizetype,
|
|
1997 int_const_binop (PLUS_EXPR, offset, offset2, 1));
|
|
1998 }
|
|
1999
|
|
2000 if (TREE_CODE (base) == ADDR_EXPR)
|
|
2001 {
|
|
2002 tree base_addr = base;
|
|
2003
|
|
2004 /* Strip the ADDR_EXPR. */
|
|
2005 base = TREE_OPERAND (base, 0);
|
|
2006
|
|
2007 /* Fold away CONST_DECL to its value, if the type is scalar. */
|
|
2008 if (TREE_CODE (base) == CONST_DECL
|
|
2009 && is_gimple_min_invariant (DECL_INITIAL (base)))
|
|
2010 return DECL_INITIAL (base);
|
|
2011
|
|
2012 /* Try folding *(&B+O) to B.X. */
|
|
2013 t = maybe_fold_offset_to_reference (base_addr, offset,
|
|
2014 TREE_TYPE (expr));
|
|
2015 if (t)
|
|
2016 {
|
|
2017 /* Preserve volatileness of the original expression.
|
|
2018 We can end up with a plain decl here which is shared
|
|
2019 and we shouldn't mess with its flags. */
|
|
2020 if (!SSA_VAR_P (t))
|
|
2021 TREE_THIS_VOLATILE (t) = volatile_p;
|
|
2022 return t;
|
|
2023 }
|
|
2024 }
|
|
2025 else
|
|
2026 {
|
|
2027 /* We can get here for out-of-range string constant accesses,
|
|
2028 such as "_"[3]. Bail out of the entire substitution search
|
|
2029 and arrange for the entire statement to be replaced by a
|
|
2030 call to __builtin_trap. In all likelihood this will all be
|
|
2031 constant-folded away, but in the meantime we can't leave with
|
|
2032 something that get_expr_operands can't understand. */
|
|
2033
|
|
2034 t = base;
|
|
2035 STRIP_NOPS (t);
|
|
2036 if (TREE_CODE (t) == ADDR_EXPR
|
|
2037 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
|
|
2038 {
|
|
2039 /* FIXME: Except that this causes problems elsewhere with dead
|
|
2040 code not being deleted, and we die in the rtl expanders
|
|
2041 because we failed to remove some ssa_name. In the meantime,
|
|
2042 just return zero. */
|
|
2043 /* FIXME2: This condition should be signaled by
|
|
2044 fold_read_from_constant_string directly, rather than
|
|
2045 re-checking for it here. */
|
|
2046 return integer_zero_node;
|
|
2047 }
|
|
2048
|
|
2049 /* Try folding *(B+O) to B->X. Still an improvement. */
|
|
2050 if (POINTER_TYPE_P (TREE_TYPE (base)))
|
|
2051 {
|
|
2052 t = maybe_fold_offset_to_reference (base, offset,
|
|
2053 TREE_TYPE (expr));
|
|
2054 if (t)
|
|
2055 return t;
|
|
2056 }
|
|
2057 }
|
|
2058
|
|
2059 /* Otherwise we had an offset that we could not simplify. */
|
|
2060 return NULL_TREE;
|
|
2061 }
|
|
2062
|
|
2063
|
|
2064 /* A quaint feature extant in our address arithmetic is that there
|
|
2065 can be hidden type changes here. The type of the result need
|
|
2066 not be the same as the type of the input pointer.
|
|
2067
|
|
2068 What we're after here is an expression of the form
|
|
2069 (T *)(&array + const)
|
|
2070 where array is OP0, const is OP1, RES_TYPE is T and
|
|
2071 the cast doesn't actually exist, but is implicit in the
|
|
2072 type of the POINTER_PLUS_EXPR. We'd like to turn this into
|
|
2073 &array[x]
|
|
2074 which may be able to propagate further. */
|
|
2075
|
|
2076 tree
|
|
2077 maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
|
|
2078 {
|
|
2079 tree ptd_type;
|
|
2080 tree t;
|
|
2081
|
|
2082 /* It had better be a constant. */
|
|
2083 if (TREE_CODE (op1) != INTEGER_CST)
|
|
2084 return NULL_TREE;
|
|
2085 /* The first operand should be an ADDR_EXPR. */
|
|
2086 if (TREE_CODE (op0) != ADDR_EXPR)
|
|
2087 return NULL_TREE;
|
|
2088 op0 = TREE_OPERAND (op0, 0);
|
|
2089
|
|
2090 /* If the first operand is an ARRAY_REF, expand it so that we can fold
|
|
2091 the offset into it. */
|
|
2092 while (TREE_CODE (op0) == ARRAY_REF)
|
|
2093 {
|
|
2094 tree array_obj = TREE_OPERAND (op0, 0);
|
|
2095 tree array_idx = TREE_OPERAND (op0, 1);
|
|
2096 tree elt_type = TREE_TYPE (op0);
|
|
2097 tree elt_size = TYPE_SIZE_UNIT (elt_type);
|
|
2098 tree min_idx;
|
|
2099
|
|
2100 if (TREE_CODE (array_idx) != INTEGER_CST)
|
|
2101 break;
|
|
2102 if (TREE_CODE (elt_size) != INTEGER_CST)
|
|
2103 break;
|
|
2104
|
|
2105 /* Un-bias the index by the min index of the array type. */
|
|
2106 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
|
|
2107 if (min_idx)
|
|
2108 {
|
|
2109 min_idx = TYPE_MIN_VALUE (min_idx);
|
|
2110 if (min_idx)
|
|
2111 {
|
|
2112 if (TREE_CODE (min_idx) != INTEGER_CST)
|
|
2113 break;
|
|
2114
|
|
2115 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
|
|
2116 if (!integer_zerop (min_idx))
|
|
2117 array_idx = int_const_binop (MINUS_EXPR, array_idx,
|
|
2118 min_idx, 0);
|
|
2119 }
|
|
2120 }
|
|
2121
|
|
2122 /* Convert the index to a byte offset. */
|
|
2123 array_idx = fold_convert (sizetype, array_idx);
|
|
2124 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
|
|
2125
|
|
2126 /* Update the operands for the next round, or for folding. */
|
|
2127 op1 = int_const_binop (PLUS_EXPR,
|
|
2128 array_idx, op1, 0);
|
|
2129 op0 = array_obj;
|
|
2130 }
|
|
2131
|
|
2132 ptd_type = TREE_TYPE (res_type);
|
|
2133 /* If we want a pointer to void, reconstruct the reference from the
|
|
2134 array element type. A pointer to that can be trivially converted
|
|
2135 to void *. This happens as we fold (void *)(ptr p+ off). */
|
|
2136 if (VOID_TYPE_P (ptd_type)
|
|
2137 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
|
|
2138 ptd_type = TREE_TYPE (TREE_TYPE (op0));
|
|
2139
|
|
2140 /* At which point we can try some of the same things as for indirects. */
|
|
2141 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
|
|
2142 if (!t)
|
|
2143 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
|
|
2144 ptd_type, false);
|
|
2145 if (t)
|
|
2146 t = build1 (ADDR_EXPR, res_type, t);
|
|
2147
|
|
2148 return t;
|
|
2149 }
|
|
2150
|
|
2151 /* For passing state through walk_tree into fold_stmt_r and its
|
|
2152 children. */
|
|
2153
|
|
2154 struct fold_stmt_r_data
|
|
2155 {
|
|
2156 gimple stmt;
|
|
2157 bool *changed_p;
|
|
2158 bool *inside_addr_expr_p;
|
|
2159 };
|
|
2160
|
|
2161 /* Subroutine of fold_stmt called via walk_tree. We perform several
|
|
2162 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
|
|
2163
|
|
2164 static tree
|
|
2165 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
|
|
2166 {
|
|
2167 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
|
|
2168 struct fold_stmt_r_data *fold_stmt_r_data;
|
|
2169 bool *inside_addr_expr_p;
|
|
2170 bool *changed_p;
|
|
2171 tree expr = *expr_p, t;
|
|
2172 bool volatile_p = TREE_THIS_VOLATILE (expr);
|
|
2173
|
|
2174 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
|
|
2175 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
|
|
2176 changed_p = fold_stmt_r_data->changed_p;
|
|
2177
|
|
2178 /* ??? It'd be nice if walk_tree had a pre-order option. */
|
|
2179 switch (TREE_CODE (expr))
|
|
2180 {
|
|
2181 case INDIRECT_REF:
|
|
2182 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
|
|
2183 if (t)
|
|
2184 return t;
|
|
2185 *walk_subtrees = 0;
|
|
2186
|
|
2187 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
|
|
2188 integer_zero_node);
|
|
2189 /* Avoid folding *"abc" = 5 into 'a' = 5. */
|
|
2190 if (wi->is_lhs && t && TREE_CODE (t) == INTEGER_CST)
|
|
2191 t = NULL_TREE;
|
|
2192 if (!t
|
|
2193 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
|
|
2194 /* If we had a good reason for propagating the address here,
|
|
2195 make sure we end up with valid gimple. See PR34989. */
|
|
2196 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
|
|
2197 break;
|
|
2198
|
|
2199 case NOP_EXPR:
|
|
2200 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
|
|
2201 if (t)
|
|
2202 return t;
|
|
2203 *walk_subtrees = 0;
|
|
2204
|
|
2205 if (POINTER_TYPE_P (TREE_TYPE (expr))
|
|
2206 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr)))
|
|
2207 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
|
|
2208 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
|
|
2209 integer_zero_node,
|
|
2210 TREE_TYPE (TREE_TYPE (expr)))))
|
|
2211 return t;
|
|
2212 break;
|
|
2213
|
|
2214 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
|
|
2215 We'd only want to bother decomposing an existing ARRAY_REF if
|
|
2216 the base array is found to have another offset contained within.
|
|
2217 Otherwise we'd be wasting time. */
|
|
2218 case ARRAY_REF:
|
|
2219 /* If we are not processing expressions found within an
|
|
2220 ADDR_EXPR, then we can fold constant array references.
|
|
2221 Don't fold on LHS either, to avoid folding "abc"[0] = 5
|
|
2222 into 'a' = 5. */
|
|
2223 if (!*inside_addr_expr_p && !wi->is_lhs)
|
|
2224 t = fold_read_from_constant_string (expr);
|
|
2225 else
|
|
2226 t = NULL;
|
|
2227 break;
|
|
2228
|
|
2229 case ADDR_EXPR:
|
|
2230 *inside_addr_expr_p = true;
|
|
2231 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
|
|
2232 *inside_addr_expr_p = false;
|
|
2233 if (t)
|
|
2234 return t;
|
|
2235 *walk_subtrees = 0;
|
|
2236
|
|
2237 /* Make sure the value is properly considered constant, and so gets
|
|
2238 propagated as expected. */
|
|
2239 if (*changed_p)
|
|
2240 recompute_tree_invariant_for_addr_expr (expr);
|
|
2241 return NULL_TREE;
|
|
2242
|
|
2243 case COMPONENT_REF:
|
|
2244 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
|
|
2245 if (t)
|
|
2246 return t;
|
|
2247 *walk_subtrees = 0;
|
|
2248
|
|
2249 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
|
|
2250 We've already checked that the records are compatible, so we should
|
|
2251 come up with a set of compatible fields. */
|
|
2252 {
|
|
2253 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
|
|
2254 tree expr_field = TREE_OPERAND (expr, 1);
|
|
2255
|
|
2256 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
|
|
2257 {
|
|
2258 expr_field = find_compatible_field (expr_record, expr_field);
|
|
2259 TREE_OPERAND (expr, 1) = expr_field;
|
|
2260 }
|
|
2261 }
|
|
2262 break;
|
|
2263
|
|
2264 case TARGET_MEM_REF:
|
|
2265 t = maybe_fold_tmr (expr);
|
|
2266 break;
|
|
2267
|
|
2268 case POINTER_PLUS_EXPR:
|
|
2269 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
|
|
2270 if (t)
|
|
2271 return t;
|
|
2272 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
|
|
2273 if (t)
|
|
2274 return t;
|
|
2275 *walk_subtrees = 0;
|
|
2276
|
|
2277 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
|
|
2278 TREE_OPERAND (expr, 0),
|
|
2279 TREE_OPERAND (expr, 1));
|
|
2280 break;
|
|
2281
|
|
2282 case COND_EXPR:
|
|
2283 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
|
|
2284 {
|
|
2285 tree op0 = TREE_OPERAND (expr, 0);
|
|
2286 tree tem;
|
|
2287 bool set;
|
|
2288
|
|
2289 fold_defer_overflow_warnings ();
|
|
2290 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
|
|
2291 TREE_OPERAND (op0, 0),
|
|
2292 TREE_OPERAND (op0, 1));
|
|
2293 /* This is actually a conditional expression, not a GIMPLE
|
|
2294 conditional statement, however, the valid_gimple_rhs_p
|
|
2295 test still applies. */
|
|
2296 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
|
|
2297 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
|
|
2298 if (set)
|
|
2299 {
|
|
2300 COND_EXPR_COND (expr) = tem;
|
|
2301 t = expr;
|
|
2302 break;
|
|
2303 }
|
|
2304 }
|
|
2305 return NULL_TREE;
|
|
2306
|
|
2307 default:
|
|
2308 return NULL_TREE;
|
|
2309 }
|
|
2310
|
|
2311 if (t)
|
|
2312 {
|
|
2313 /* Preserve volatileness of the original expression.
|
|
2314 We can end up with a plain decl here which is shared
|
|
2315 and we shouldn't mess with its flags. */
|
|
2316 if (!SSA_VAR_P (t))
|
|
2317 TREE_THIS_VOLATILE (t) = volatile_p;
|
|
2318 *expr_p = t;
|
|
2319 *changed_p = true;
|
|
2320 }
|
|
2321
|
|
2322 return NULL_TREE;
|
|
2323 }
|
|
2324
|
|
2325 /* Return the string length, maximum string length or maximum value of
|
|
2326 ARG in LENGTH.
|
|
2327 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
|
|
2328 is not NULL and, for TYPE == 0, its value is not equal to the length
|
|
2329 we determine or if we are unable to determine the length or value,
|
|
2330 return false. VISITED is a bitmap of visited variables.
|
|
2331 TYPE is 0 if string length should be returned, 1 for maximum string
|
|
2332 length and 2 for maximum value ARG can have. */
|
|
2333
|
|
2334 static bool
|
|
2335 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
|
|
2336 {
|
|
2337 tree var, val;
|
|
2338 gimple def_stmt;
|
|
2339
|
|
2340 if (TREE_CODE (arg) != SSA_NAME)
|
|
2341 {
|
|
2342 if (TREE_CODE (arg) == COND_EXPR)
|
|
2343 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
|
|
2344 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
|
|
2345 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
|
|
2346 else if (TREE_CODE (arg) == ADDR_EXPR
|
|
2347 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
|
|
2348 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
|
|
2349 {
|
|
2350 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
|
|
2351 if (TREE_CODE (aop0) == INDIRECT_REF
|
|
2352 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
|
|
2353 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
|
|
2354 length, visited, type);
|
|
2355 }
|
|
2356
|
|
2357 if (type == 2)
|
|
2358 {
|
|
2359 val = arg;
|
|
2360 if (TREE_CODE (val) != INTEGER_CST
|
|
2361 || tree_int_cst_sgn (val) < 0)
|
|
2362 return false;
|
|
2363 }
|
|
2364 else
|
|
2365 val = c_strlen (arg, 1);
|
|
2366 if (!val)
|
|
2367 return false;
|
|
2368
|
|
2369 if (*length)
|
|
2370 {
|
|
2371 if (type > 0)
|
|
2372 {
|
|
2373 if (TREE_CODE (*length) != INTEGER_CST
|
|
2374 || TREE_CODE (val) != INTEGER_CST)
|
|
2375 return false;
|
|
2376
|
|
2377 if (tree_int_cst_lt (*length, val))
|
|
2378 *length = val;
|
|
2379 return true;
|
|
2380 }
|
|
2381 else if (simple_cst_equal (val, *length) != 1)
|
|
2382 return false;
|
|
2383 }
|
|
2384
|
|
2385 *length = val;
|
|
2386 return true;
|
|
2387 }
|
|
2388
|
|
2389 /* If we were already here, break the infinite cycle. */
|
|
2390 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
|
|
2391 return true;
|
|
2392 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
|
|
2393
|
|
2394 var = arg;
|
|
2395 def_stmt = SSA_NAME_DEF_STMT (var);
|
|
2396
|
|
2397 switch (gimple_code (def_stmt))
|
|
2398 {
|
|
2399 case GIMPLE_ASSIGN:
|
|
2400 /* The RHS of the statement defining VAR must either have a
|
|
2401 constant length or come from another SSA_NAME with a constant
|
|
2402 length. */
|
|
2403 if (gimple_assign_single_p (def_stmt)
|
|
2404 || gimple_assign_unary_nop_p (def_stmt))
|
|
2405 {
|
|
2406 tree rhs = gimple_assign_rhs1 (def_stmt);
|
|
2407 return get_maxval_strlen (rhs, length, visited, type);
|
|
2408 }
|
|
2409 return false;
|
|
2410
|
|
2411 case GIMPLE_PHI:
|
|
2412 {
|
|
2413 /* All the arguments of the PHI node must have the same constant
|
|
2414 length. */
|
|
2415 unsigned i;
|
|
2416
|
|
2417 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
|
|
2418 {
|
|
2419 tree arg = gimple_phi_arg (def_stmt, i)->def;
|
|
2420
|
|
2421 /* If this PHI has itself as an argument, we cannot
|
|
2422 determine the string length of this argument. However,
|
|
2423 if we can find a constant string length for the other
|
|
2424 PHI args then we can still be sure that this is a
|
|
2425 constant string length. So be optimistic and just
|
|
2426 continue with the next argument. */
|
|
2427 if (arg == gimple_phi_result (def_stmt))
|
|
2428 continue;
|
|
2429
|
|
2430 if (!get_maxval_strlen (arg, length, visited, type))
|
|
2431 return false;
|
|
2432 }
|
|
2433 }
|
|
2434 return true;
|
|
2435
|
|
2436 default:
|
|
2437 return false;
|
|
2438 }
|
|
2439 }
|
|
2440
|
|
2441
|
|
2442 /* Fold builtin call in statement STMT. Returns a simplified tree.
|
|
2443 We may return a non-constant expression, including another call
|
|
2444 to a different function and with different arguments, e.g.,
|
|
2445 substituting memcpy for strcpy when the string length is known.
|
|
2446 Note that some builtins expand into inline code that may not
|
|
2447 be valid in GIMPLE. Callers must take care. */
|
|
2448
|
|
2449 static tree
|
|
2450 ccp_fold_builtin (gimple stmt)
|
|
2451 {
|
|
2452 tree result, val[3];
|
|
2453 tree callee, a;
|
|
2454 int arg_idx, type;
|
|
2455 bitmap visited;
|
|
2456 bool ignore;
|
|
2457 int nargs;
|
|
2458
|
|
2459 gcc_assert (is_gimple_call (stmt));
|
|
2460
|
|
2461 ignore = (gimple_call_lhs (stmt) == NULL);
|
|
2462
|
|
2463 /* First try the generic builtin folder. If that succeeds, return the
|
|
2464 result directly. */
|
|
2465 result = fold_call_stmt (stmt, ignore);
|
|
2466 if (result)
|
|
2467 {
|
|
2468 if (ignore)
|
|
2469 STRIP_NOPS (result);
|
|
2470 return result;
|
|
2471 }
|
|
2472
|
|
2473 /* Ignore MD builtins. */
|
|
2474 callee = gimple_call_fndecl (stmt);
|
|
2475 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
|
|
2476 return NULL_TREE;
|
|
2477
|
|
2478 /* If the builtin could not be folded, and it has no argument list,
|
|
2479 we're done. */
|
|
2480 nargs = gimple_call_num_args (stmt);
|
|
2481 if (nargs == 0)
|
|
2482 return NULL_TREE;
|
|
2483
|
|
2484 /* Limit the work only for builtins we know how to simplify. */
|
|
2485 switch (DECL_FUNCTION_CODE (callee))
|
|
2486 {
|
|
2487 case BUILT_IN_STRLEN:
|
|
2488 case BUILT_IN_FPUTS:
|
|
2489 case BUILT_IN_FPUTS_UNLOCKED:
|
|
2490 arg_idx = 0;
|
|
2491 type = 0;
|
|
2492 break;
|
|
2493 case BUILT_IN_STRCPY:
|
|
2494 case BUILT_IN_STRNCPY:
|
|
2495 arg_idx = 1;
|
|
2496 type = 0;
|
|
2497 break;
|
|
2498 case BUILT_IN_MEMCPY_CHK:
|
|
2499 case BUILT_IN_MEMPCPY_CHK:
|
|
2500 case BUILT_IN_MEMMOVE_CHK:
|
|
2501 case BUILT_IN_MEMSET_CHK:
|
|
2502 case BUILT_IN_STRNCPY_CHK:
|
|
2503 arg_idx = 2;
|
|
2504 type = 2;
|
|
2505 break;
|
|
2506 case BUILT_IN_STRCPY_CHK:
|
|
2507 case BUILT_IN_STPCPY_CHK:
|
|
2508 arg_idx = 1;
|
|
2509 type = 1;
|
|
2510 break;
|
|
2511 case BUILT_IN_SNPRINTF_CHK:
|
|
2512 case BUILT_IN_VSNPRINTF_CHK:
|
|
2513 arg_idx = 1;
|
|
2514 type = 2;
|
|
2515 break;
|
|
2516 default:
|
|
2517 return NULL_TREE;
|
|
2518 }
|
|
2519
|
|
2520 if (arg_idx >= nargs)
|
|
2521 return NULL_TREE;
|
|
2522
|
|
2523 /* Try to use the dataflow information gathered by the CCP process. */
|
|
2524 visited = BITMAP_ALLOC (NULL);
|
|
2525 bitmap_clear (visited);
|
|
2526
|
|
2527 memset (val, 0, sizeof (val));
|
|
2528 a = gimple_call_arg (stmt, arg_idx);
|
|
2529 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
|
|
2530 val[arg_idx] = NULL_TREE;
|
|
2531
|
|
2532 BITMAP_FREE (visited);
|
|
2533
|
|
2534 result = NULL_TREE;
|
|
2535 switch (DECL_FUNCTION_CODE (callee))
|
|
2536 {
|
|
2537 case BUILT_IN_STRLEN:
|
|
2538 if (val[0] && nargs == 1)
|
|
2539 {
|
|
2540 tree new_val =
|
|
2541 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
|
|
2542
|
|
2543 /* If the result is not a valid gimple value, or not a cast
|
|
2544 of a valid gimple value, then we can not use the result. */
|
|
2545 if (is_gimple_val (new_val)
|
|
2546 || (is_gimple_cast (new_val)
|
|
2547 && is_gimple_val (TREE_OPERAND (new_val, 0))))
|
|
2548 return new_val;
|
|
2549 }
|
|
2550 break;
|
|
2551
|
|
2552 case BUILT_IN_STRCPY:
|
|
2553 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
|
|
2554 result = fold_builtin_strcpy (callee,
|
|
2555 gimple_call_arg (stmt, 0),
|
|
2556 gimple_call_arg (stmt, 1),
|
|
2557 val[1]);
|
|
2558 break;
|
|
2559
|
|
2560 case BUILT_IN_STRNCPY:
|
|
2561 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
|
|
2562 result = fold_builtin_strncpy (callee,
|
|
2563 gimple_call_arg (stmt, 0),
|
|
2564 gimple_call_arg (stmt, 1),
|
|
2565 gimple_call_arg (stmt, 2),
|
|
2566 val[1]);
|
|
2567 break;
|
|
2568
|
|
2569 case BUILT_IN_FPUTS:
|
|
2570 if (nargs == 2)
|
|
2571 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
|
|
2572 gimple_call_arg (stmt, 1),
|
|
2573 ignore, false, val[0]);
|
|
2574 break;
|
|
2575
|
|
2576 case BUILT_IN_FPUTS_UNLOCKED:
|
|
2577 if (nargs == 2)
|
|
2578 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
|
|
2579 gimple_call_arg (stmt, 1),
|
|
2580 ignore, true, val[0]);
|
|
2581 break;
|
|
2582
|
|
2583 case BUILT_IN_MEMCPY_CHK:
|
|
2584 case BUILT_IN_MEMPCPY_CHK:
|
|
2585 case BUILT_IN_MEMMOVE_CHK:
|
|
2586 case BUILT_IN_MEMSET_CHK:
|
|
2587 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
|
|
2588 result = fold_builtin_memory_chk (callee,
|
|
2589 gimple_call_arg (stmt, 0),
|
|
2590 gimple_call_arg (stmt, 1),
|
|
2591 gimple_call_arg (stmt, 2),
|
|
2592 gimple_call_arg (stmt, 3),
|
|
2593 val[2], ignore,
|
|
2594 DECL_FUNCTION_CODE (callee));
|
|
2595 break;
|
|
2596
|
|
2597 case BUILT_IN_STRCPY_CHK:
|
|
2598 case BUILT_IN_STPCPY_CHK:
|
|
2599 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
|
|
2600 result = fold_builtin_stxcpy_chk (callee,
|
|
2601 gimple_call_arg (stmt, 0),
|
|
2602 gimple_call_arg (stmt, 1),
|
|
2603 gimple_call_arg (stmt, 2),
|
|
2604 val[1], ignore,
|
|
2605 DECL_FUNCTION_CODE (callee));
|
|
2606 break;
|
|
2607
|
|
2608 case BUILT_IN_STRNCPY_CHK:
|
|
2609 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
|
|
2610 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
|
|
2611 gimple_call_arg (stmt, 1),
|
|
2612 gimple_call_arg (stmt, 2),
|
|
2613 gimple_call_arg (stmt, 3),
|
|
2614 val[2]);
|
|
2615 break;
|
|
2616
|
|
2617 case BUILT_IN_SNPRINTF_CHK:
|
|
2618 case BUILT_IN_VSNPRINTF_CHK:
|
|
2619 if (val[1] && is_gimple_val (val[1]))
|
|
2620 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
|
|
2621 DECL_FUNCTION_CODE (callee));
|
|
2622 break;
|
|
2623
|
|
2624 default:
|
|
2625 gcc_unreachable ();
|
|
2626 }
|
|
2627
|
|
2628 if (result && ignore)
|
|
2629 result = fold_ignored_result (result);
|
|
2630 return result;
|
|
2631 }
|
|
2632
|
|
2633 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
|
|
2634 replacement rhs for the statement or NULL_TREE if no simplification
|
|
2635 could be made. It is assumed that the operands have been previously
|
|
2636 folded. */
|
|
2637
|
|
2638 static tree
|
|
2639 fold_gimple_assign (gimple_stmt_iterator *si)
|
|
2640 {
|
|
2641 gimple stmt = gsi_stmt (*si);
|
|
2642 enum tree_code subcode = gimple_assign_rhs_code (stmt);
|
|
2643
|
|
2644 tree result = NULL;
|
|
2645
|
|
2646 switch (get_gimple_rhs_class (subcode))
|
|
2647 {
|
|
2648 case GIMPLE_SINGLE_RHS:
|
|
2649 {
|
|
2650 tree rhs = gimple_assign_rhs1 (stmt);
|
|
2651
|
|
2652 /* Try to fold a conditional expression. */
|
|
2653 if (TREE_CODE (rhs) == COND_EXPR)
|
|
2654 {
|
|
2655 tree temp = fold (COND_EXPR_COND (rhs));
|
|
2656 if (temp != COND_EXPR_COND (rhs))
|
|
2657 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
|
|
2658 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
|
|
2659 }
|
|
2660
|
|
2661 /* If we couldn't fold the RHS, hand over to the generic
|
|
2662 fold routines. */
|
|
2663 if (result == NULL_TREE)
|
|
2664 result = fold (rhs);
|
|
2665
|
|
2666 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
|
|
2667 that may have been added by fold, and "useless" type
|
|
2668 conversions that might now be apparent due to propagation. */
|
|
2669 STRIP_USELESS_TYPE_CONVERSION (result);
|
|
2670
|
|
2671 if (result != rhs && valid_gimple_rhs_p (result))
|
|
2672 return result;
|
|
2673 else
|
|
2674 /* It is possible that fold_stmt_r simplified the RHS.
|
|
2675 Make sure that the subcode of this statement still
|
|
2676 reflects the principal operator of the rhs operand. */
|
|
2677 return rhs;
|
|
2678 }
|
|
2679 break;
|
|
2680
|
|
2681 case GIMPLE_UNARY_RHS:
|
|
2682 {
|
|
2683 tree rhs = gimple_assign_rhs1 (stmt);
|
|
2684
|
|
2685 result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
|
|
2686 if (result)
|
|
2687 {
|
|
2688 /* If the operation was a conversion do _not_ mark a
|
|
2689 resulting constant with TREE_OVERFLOW if the original
|
|
2690 constant was not. These conversions have implementation
|
|
2691 defined behavior and retaining the TREE_OVERFLOW flag
|
|
2692 here would confuse later passes such as VRP. */
|
|
2693 if (CONVERT_EXPR_CODE_P (subcode)
|
|
2694 && TREE_CODE (result) == INTEGER_CST
|
|
2695 && TREE_CODE (rhs) == INTEGER_CST)
|
|
2696 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
|
|
2697
|
|
2698 STRIP_USELESS_TYPE_CONVERSION (result);
|
|
2699 if (valid_gimple_rhs_p (result))
|
|
2700 return result;
|
|
2701 }
|
|
2702 else if (CONVERT_EXPR_CODE_P (subcode)
|
|
2703 && POINTER_TYPE_P (gimple_expr_type (stmt))
|
|
2704 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
|
|
2705 {
|
|
2706 tree type = gimple_expr_type (stmt);
|
|
2707 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
|
|
2708 integer_zero_node, type);
|
|
2709 if (t)
|
|
2710 return t;
|
|
2711 }
|
|
2712 }
|
|
2713 break;
|
|
2714
|
|
2715 case GIMPLE_BINARY_RHS:
|
|
2716 /* Try to fold pointer addition. */
|
|
2717 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
|
|
2718 {
|
|
2719 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
|
|
2720 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
|
|
2721 {
|
|
2722 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
|
|
2723 if (!useless_type_conversion_p
|
|
2724 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
|
|
2725 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
|
|
2726 }
|
|
2727 result = maybe_fold_stmt_addition (type,
|
|
2728 gimple_assign_rhs1 (stmt),
|
|
2729 gimple_assign_rhs2 (stmt));
|
|
2730 }
|
|
2731
|
|
2732 if (!result)
|
|
2733 result = fold_binary (subcode,
|
|
2734 TREE_TYPE (gimple_assign_lhs (stmt)),
|
|
2735 gimple_assign_rhs1 (stmt),
|
|
2736 gimple_assign_rhs2 (stmt));
|
|
2737
|
|
2738 if (result)
|
|
2739 {
|
|
2740 STRIP_USELESS_TYPE_CONVERSION (result);
|
|
2741 if (valid_gimple_rhs_p (result))
|
|
2742 return result;
|
|
2743
|
|
2744 /* Fold might have produced non-GIMPLE, so if we trust it blindly
|
|
2745 we lose canonicalization opportunities. Do not go again
|
|
2746 through fold here though, or the same non-GIMPLE will be
|
|
2747 produced. */
|
|
2748 if (commutative_tree_code (subcode)
|
|
2749 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
|
|
2750 gimple_assign_rhs2 (stmt), false))
|
|
2751 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
|
|
2752 gimple_assign_rhs2 (stmt),
|
|
2753 gimple_assign_rhs1 (stmt));
|
|
2754 }
|
|
2755 break;
|
|
2756
|
|
2757 case GIMPLE_INVALID_RHS:
|
|
2758 gcc_unreachable ();
|
|
2759 }
|
|
2760
|
|
2761 return NULL_TREE;
|
|
2762 }
|
|
2763
|
|
2764 /* Attempt to fold a conditional statement. Return true if any changes were
|
|
2765 made. We only attempt to fold the condition expression, and do not perform
|
|
2766 any transformation that would require alteration of the cfg. It is
|
|
2767 assumed that the operands have been previously folded. */
|
|
2768
|
|
2769 static bool
|
|
2770 fold_gimple_cond (gimple stmt)
|
|
2771 {
|
|
2772 tree result = fold_binary (gimple_cond_code (stmt),
|
|
2773 boolean_type_node,
|
|
2774 gimple_cond_lhs (stmt),
|
|
2775 gimple_cond_rhs (stmt));
|
|
2776
|
|
2777 if (result)
|
|
2778 {
|
|
2779 STRIP_USELESS_TYPE_CONVERSION (result);
|
|
2780 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
|
|
2781 {
|
|
2782 gimple_cond_set_condition_from_tree (stmt, result);
|
|
2783 return true;
|
|
2784 }
|
|
2785 }
|
|
2786
|
|
2787 return false;
|
|
2788 }
|
|
2789
|
|
2790
|
|
2791 /* Attempt to fold a call statement referenced by the statement iterator GSI.
|
|
2792 The statement may be replaced by another statement, e.g., if the call
|
|
2793 simplifies to a constant value. Return true if any changes were made.
|
|
2794 It is assumed that the operands have been previously folded. */
|
|
2795
|
|
2796 static bool
|
|
2797 fold_gimple_call (gimple_stmt_iterator *gsi)
|
|
2798 {
|
|
2799 gimple stmt = gsi_stmt (*gsi);
|
|
2800
|
|
2801 tree callee = gimple_call_fndecl (stmt);
|
|
2802
|
|
2803 /* Check for builtins that CCP can handle using information not
|
|
2804 available in the generic fold routines. */
|
|
2805 if (callee && DECL_BUILT_IN (callee))
|
|
2806 {
|
|
2807 tree result = ccp_fold_builtin (stmt);
|
|
2808
|
|
2809 if (result)
|
|
2810 return update_call_from_tree (gsi, result);
|
|
2811 }
|
|
2812 else
|
|
2813 {
|
|
2814 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
|
|
2815 here are when we've propagated the address of a decl into the
|
|
2816 object slot. */
|
|
2817 /* ??? Should perhaps do this in fold proper. However, doing it
|
|
2818 there requires that we create a new CALL_EXPR, and that requires
|
|
2819 copying EH region info to the new node. Easier to just do it
|
|
2820 here where we can just smash the call operand. */
|
|
2821 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
|
|
2822 callee = gimple_call_fn (stmt);
|
|
2823 if (TREE_CODE (callee) == OBJ_TYPE_REF
|
|
2824 && lang_hooks.fold_obj_type_ref
|
|
2825 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
|
|
2826 && DECL_P (TREE_OPERAND
|
|
2827 (OBJ_TYPE_REF_OBJECT (callee), 0)))
|
|
2828 {
|
|
2829 tree t;
|
|
2830
|
|
2831 /* ??? Caution: Broken ADDR_EXPR semantics means that
|
|
2832 looking at the type of the operand of the addr_expr
|
|
2833 can yield an array type. See silly exception in
|
|
2834 check_pointer_types_r. */
|
|
2835 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
|
|
2836 t = lang_hooks.fold_obj_type_ref (callee, t);
|
|
2837 if (t)
|
|
2838 {
|
|
2839 gimple_call_set_fn (stmt, t);
|
|
2840 return true;
|
|
2841 }
|
|
2842 }
|
|
2843 }
|
|
2844
|
|
2845 return false;
|
|
2846 }
|
|
2847
|
|
2848 /* Fold the statement pointed to by GSI. In some cases, this function may
|
|
2849 replace the whole statement with a new one. Returns true iff folding
|
|
2850 makes any changes. */
|
|
2851
|
|
2852 bool
|
|
2853 fold_stmt (gimple_stmt_iterator *gsi)
|
|
2854 {
|
|
2855 tree res;
|
|
2856 struct fold_stmt_r_data fold_stmt_r_data;
|
|
2857 struct walk_stmt_info wi;
|
|
2858
|
|
2859 bool changed = false;
|
|
2860 bool inside_addr_expr = false;
|
|
2861
|
|
2862 gimple stmt = gsi_stmt (*gsi);
|
|
2863
|
|
2864 fold_stmt_r_data.stmt = stmt;
|
|
2865 fold_stmt_r_data.changed_p = &changed;
|
|
2866 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
|
|
2867
|
|
2868 memset (&wi, 0, sizeof (wi));
|
|
2869 wi.info = &fold_stmt_r_data;
|
|
2870
|
|
2871 /* Fold the individual operands.
|
|
2872 For example, fold instances of *&VAR into VAR, etc. */
|
|
2873 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
|
|
2874 gcc_assert (!res);
|
|
2875
|
|
2876 /* Fold the main computation performed by the statement. */
|
|
2877 switch (gimple_code (stmt))
|
|
2878 {
|
|
2879 case GIMPLE_ASSIGN:
|
|
2880 {
|
|
2881 tree new_rhs = fold_gimple_assign (gsi);
|
|
2882 if (new_rhs != NULL_TREE)
|
|
2883 {
|
|
2884 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
|
|
2885 changed = true;
|
|
2886 }
|
|
2887 stmt = gsi_stmt (*gsi);
|
|
2888 break;
|
|
2889 }
|
|
2890 case GIMPLE_COND:
|
|
2891 changed |= fold_gimple_cond (stmt);
|
|
2892 break;
|
|
2893 case GIMPLE_CALL:
|
|
2894 /* The entire statement may be replaced in this case. */
|
|
2895 changed |= fold_gimple_call (gsi);
|
|
2896 break;
|
|
2897
|
|
2898 default:
|
|
2899 return changed;
|
|
2900 break;
|
|
2901 }
|
|
2902
|
|
2903 return changed;
|
|
2904 }
|
|
2905
|
|
2906 /* Perform the minimal folding on statement STMT. Only operations like
|
|
2907 *&x created by constant propagation are handled. The statement cannot
|
|
2908 be replaced with a new one. Return true if the statement was
|
|
2909 changed, false otherwise. */
|
|
2910
|
|
2911 bool
|
|
2912 fold_stmt_inplace (gimple stmt)
|
|
2913 {
|
|
2914 tree res;
|
|
2915 struct fold_stmt_r_data fold_stmt_r_data;
|
|
2916 struct walk_stmt_info wi;
|
|
2917 gimple_stmt_iterator si;
|
|
2918
|
|
2919 bool changed = false;
|
|
2920 bool inside_addr_expr = false;
|
|
2921
|
|
2922 fold_stmt_r_data.stmt = stmt;
|
|
2923 fold_stmt_r_data.changed_p = &changed;
|
|
2924 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
|
|
2925
|
|
2926 memset (&wi, 0, sizeof (wi));
|
|
2927 wi.info = &fold_stmt_r_data;
|
|
2928
|
|
2929 /* Fold the individual operands.
|
|
2930 For example, fold instances of *&VAR into VAR, etc.
|
|
2931
|
|
2932 It appears that, at one time, maybe_fold_stmt_indirect
|
|
2933 would cause the walk to return non-null in order to
|
|
2934 signal that the entire statement should be replaced with
|
|
2935 a call to _builtin_trap. This functionality is currently
|
|
2936 disabled, as noted in a FIXME, and cannot be supported here. */
|
|
2937 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
|
|
2938 gcc_assert (!res);
|
|
2939
|
|
2940 /* Fold the main computation performed by the statement. */
|
|
2941 switch (gimple_code (stmt))
|
|
2942 {
|
|
2943 case GIMPLE_ASSIGN:
|
|
2944 {
|
|
2945 unsigned old_num_ops;
|
|
2946 tree new_rhs;
|
|
2947 old_num_ops = gimple_num_ops (stmt);
|
|
2948 si = gsi_for_stmt (stmt);
|
|
2949 new_rhs = fold_gimple_assign (&si);
|
|
2950 if (new_rhs != NULL_TREE
|
|
2951 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
|
|
2952 {
|
|
2953 gimple_assign_set_rhs_from_tree (&si, new_rhs);
|
|
2954 changed = true;
|
|
2955 }
|
|
2956 gcc_assert (gsi_stmt (si) == stmt);
|
|
2957 break;
|
|
2958 }
|
|
2959 case GIMPLE_COND:
|
|
2960 changed |= fold_gimple_cond (stmt);
|
|
2961 break;
|
|
2962
|
|
2963 default:
|
|
2964 break;
|
|
2965 }
|
|
2966
|
|
2967 return changed;
|
|
2968 }
|
|
2969
|
|
2970 /* Try to optimize out __builtin_stack_restore. Optimize it out
|
|
2971 if there is another __builtin_stack_restore in the same basic
|
|
2972 block and no calls or ASM_EXPRs are in between, or if this block's
|
|
2973 only outgoing edge is to EXIT_BLOCK and there are no calls or
|
|
2974 ASM_EXPRs after this __builtin_stack_restore. */
|
|
2975
|
|
2976 static tree
|
|
2977 optimize_stack_restore (gimple_stmt_iterator i)
|
|
2978 {
|
|
2979 tree callee, rhs;
|
|
2980 gimple stmt, stack_save;
|
|
2981 gimple_stmt_iterator stack_save_gsi;
|
|
2982
|
|
2983 basic_block bb = gsi_bb (i);
|
|
2984 gimple call = gsi_stmt (i);
|
|
2985
|
|
2986 if (gimple_code (call) != GIMPLE_CALL
|
|
2987 || gimple_call_num_args (call) != 1
|
|
2988 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
|
|
2989 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
|
|
2990 return NULL_TREE;
|
|
2991
|
|
2992 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
|
|
2993 {
|
|
2994 stmt = gsi_stmt (i);
|
|
2995 if (gimple_code (stmt) == GIMPLE_ASM)
|
|
2996 return NULL_TREE;
|
|
2997 if (gimple_code (stmt) != GIMPLE_CALL)
|
|
2998 continue;
|
|
2999
|
|
3000 callee = gimple_call_fndecl (stmt);
|
|
3001 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
|
|
3002 return NULL_TREE;
|
|
3003
|
|
3004 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
|
|
3005 break;
|
|
3006 }
|
|
3007
|
|
3008 if (gsi_end_p (i)
|
|
3009 && (! single_succ_p (bb)
|
|
3010 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
|
|
3011 return NULL_TREE;
|
|
3012
|
|
3013 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
|
|
3014 if (gimple_code (stack_save) != GIMPLE_CALL
|
|
3015 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
|
|
3016 || stmt_could_throw_p (stack_save)
|
|
3017 || !has_single_use (gimple_call_arg (call, 0)))
|
|
3018 return NULL_TREE;
|
|
3019
|
|
3020 callee = gimple_call_fndecl (stack_save);
|
|
3021 if (!callee
|
|
3022 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
|
|
3023 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
|
|
3024 || gimple_call_num_args (stack_save) != 0)
|
|
3025 return NULL_TREE;
|
|
3026
|
|
3027 stack_save_gsi = gsi_for_stmt (stack_save);
|
|
3028 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
|
|
3029 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
|
|
3030 if (!update_call_from_tree (&stack_save_gsi, rhs))
|
|
3031 {
|
|
3032 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
|
|
3033 return NULL_TREE;
|
|
3034 }
|
|
3035 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
|
|
3036
|
|
3037 /* No effect, so the statement will be deleted. */
|
|
3038 return integer_zero_node;
|
|
3039 }
|
|
3040
|
|
3041 /* If va_list type is a simple pointer and nothing special is needed,
|
|
3042 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
|
|
3043 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
|
|
3044 pointer assignment. */
|
|
3045
|
|
3046 static tree
|
|
3047 optimize_stdarg_builtin (gimple call)
|
|
3048 {
|
|
3049 tree callee, lhs, rhs, cfun_va_list;
|
|
3050 bool va_list_simple_ptr;
|
|
3051
|
|
3052 if (gimple_code (call) != GIMPLE_CALL)
|
|
3053 return NULL_TREE;
|
|
3054
|
|
3055 callee = gimple_call_fndecl (call);
|
|
3056
|
|
3057 cfun_va_list = targetm.fn_abi_va_list (callee);
|
|
3058 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
|
|
3059 && (TREE_TYPE (cfun_va_list) == void_type_node
|
|
3060 || TREE_TYPE (cfun_va_list) == char_type_node);
|
|
3061
|
|
3062 switch (DECL_FUNCTION_CODE (callee))
|
|
3063 {
|
|
3064 case BUILT_IN_VA_START:
|
|
3065 if (!va_list_simple_ptr
|
|
3066 || targetm.expand_builtin_va_start != NULL
|
|
3067 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
|
|
3068 return NULL_TREE;
|
|
3069
|
|
3070 if (gimple_call_num_args (call) != 2)
|
|
3071 return NULL_TREE;
|
|
3072
|
|
3073 lhs = gimple_call_arg (call, 0);
|
|
3074 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
|
|
3075 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
|
|
3076 != TYPE_MAIN_VARIANT (cfun_va_list))
|
|
3077 return NULL_TREE;
|
|
3078
|
|
3079 lhs = build_fold_indirect_ref (lhs);
|
|
3080 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
|
|
3081 1, integer_zero_node);
|
|
3082 rhs = fold_convert (TREE_TYPE (lhs), rhs);
|
|
3083 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
|
|
3084
|
|
3085 case BUILT_IN_VA_COPY:
|
|
3086 if (!va_list_simple_ptr)
|
|
3087 return NULL_TREE;
|
|
3088
|
|
3089 if (gimple_call_num_args (call) != 2)
|
|
3090 return NULL_TREE;
|
|
3091
|
|
3092 lhs = gimple_call_arg (call, 0);
|
|
3093 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
|
|
3094 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
|
|
3095 != TYPE_MAIN_VARIANT (cfun_va_list))
|
|
3096 return NULL_TREE;
|
|
3097
|
|
3098 lhs = build_fold_indirect_ref (lhs);
|
|
3099 rhs = gimple_call_arg (call, 1);
|
|
3100 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
|
|
3101 != TYPE_MAIN_VARIANT (cfun_va_list))
|
|
3102 return NULL_TREE;
|
|
3103
|
|
3104 rhs = fold_convert (TREE_TYPE (lhs), rhs);
|
|
3105 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
|
|
3106
|
|
3107 case BUILT_IN_VA_END:
|
|
3108 /* No effect, so the statement will be deleted. */
|
|
3109 return integer_zero_node;
|
|
3110
|
|
3111 default:
|
|
3112 gcc_unreachable ();
|
|
3113 }
|
|
3114 }
|
|
3115
|
|
3116 /* Convert EXPR into a GIMPLE value suitable for substitution on the
|
|
3117 RHS of an assignment. Insert the necessary statements before
|
|
3118 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
|
|
3119 is replaced. If the call is expected to produces a result, then it
|
|
3120 is replaced by an assignment of the new RHS to the result variable.
|
|
3121 If the result is to be ignored, then the call is replaced by a
|
|
3122 GIMPLE_NOP. */
|
|
3123
|
|
3124 static void
|
|
3125 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
|
|
3126 {
|
|
3127 tree lhs;
|
|
3128 tree tmp = NULL_TREE; /* Silence warning. */
|
|
3129 gimple stmt, new_stmt;
|
|
3130 gimple_stmt_iterator i;
|
|
3131 gimple_seq stmts = gimple_seq_alloc();
|
|
3132 struct gimplify_ctx gctx;
|
|
3133
|
|
3134 stmt = gsi_stmt (*si_p);
|
|
3135
|
|
3136 gcc_assert (is_gimple_call (stmt));
|
|
3137
|
|
3138 lhs = gimple_call_lhs (stmt);
|
|
3139
|
|
3140 push_gimplify_context (&gctx);
|
|
3141
|
|
3142 if (lhs == NULL_TREE)
|
|
3143 gimplify_and_add (expr, &stmts);
|
|
3144 else
|
|
3145 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
|
|
3146
|
|
3147 pop_gimplify_context (NULL);
|
|
3148
|
|
3149 if (gimple_has_location (stmt))
|
|
3150 annotate_all_with_location (stmts, gimple_location (stmt));
|
|
3151
|
|
3152 /* The replacement can expose previously unreferenced variables. */
|
|
3153 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
|
|
3154 {
|
|
3155 new_stmt = gsi_stmt (i);
|
|
3156 find_new_referenced_vars (new_stmt);
|
|
3157 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
|
|
3158 mark_symbols_for_renaming (new_stmt);
|
|
3159 gsi_next (si_p);
|
|
3160 }
|
|
3161
|
|
3162 if (lhs == NULL_TREE)
|
|
3163 new_stmt = gimple_build_nop ();
|
|
3164 else
|
|
3165 {
|
|
3166 new_stmt = gimple_build_assign (lhs, tmp);
|
|
3167 copy_virtual_operands (new_stmt, stmt);
|
|
3168 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
|
|
3169 }
|
|
3170
|
|
3171 gimple_set_location (new_stmt, gimple_location (stmt));
|
|
3172 gsi_replace (si_p, new_stmt, false);
|
|
3173 }
|
|
3174
|
|
3175 /* A simple pass that attempts to fold all builtin functions. This pass
|
|
3176 is run after we've propagated as many constants as we can. */
|
|
3177
|
|
3178 static unsigned int
|
|
3179 execute_fold_all_builtins (void)
|
|
3180 {
|
|
3181 bool cfg_changed = false;
|
|
3182 basic_block bb;
|
|
3183 unsigned int todoflags = 0;
|
|
3184
|
|
3185 FOR_EACH_BB (bb)
|
|
3186 {
|
|
3187 gimple_stmt_iterator i;
|
|
3188 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
|
|
3189 {
|
|
3190 gimple stmt, old_stmt;
|
|
3191 tree callee, result;
|
|
3192 enum built_in_function fcode;
|
|
3193
|
|
3194 stmt = gsi_stmt (i);
|
|
3195
|
|
3196 if (gimple_code (stmt) != GIMPLE_CALL)
|
|
3197 {
|
|
3198 gsi_next (&i);
|
|
3199 continue;
|
|
3200 }
|
|
3201 callee = gimple_call_fndecl (stmt);
|
|
3202 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
|
|
3203 {
|
|
3204 gsi_next (&i);
|
|
3205 continue;
|
|
3206 }
|
|
3207 fcode = DECL_FUNCTION_CODE (callee);
|
|
3208
|
|
3209 result = ccp_fold_builtin (stmt);
|
|
3210
|
|
3211 if (result)
|
|
3212 gimple_remove_stmt_histograms (cfun, stmt);
|
|
3213
|
|
3214 if (!result)
|
|
3215 switch (DECL_FUNCTION_CODE (callee))
|
|
3216 {
|
|
3217 case BUILT_IN_CONSTANT_P:
|
|
3218 /* Resolve __builtin_constant_p. If it hasn't been
|
|
3219 folded to integer_one_node by now, it's fairly
|
|
3220 certain that the value simply isn't constant. */
|
|
3221 result = integer_zero_node;
|
|
3222 break;
|
|
3223
|
|
3224 case BUILT_IN_STACK_RESTORE:
|
|
3225 result = optimize_stack_restore (i);
|
|
3226 if (result)
|
|
3227 break;
|
|
3228 gsi_next (&i);
|
|
3229 continue;
|
|
3230
|
|
3231 case BUILT_IN_VA_START:
|
|
3232 case BUILT_IN_VA_END:
|
|
3233 case BUILT_IN_VA_COPY:
|
|
3234 /* These shouldn't be folded before pass_stdarg. */
|
|
3235 result = optimize_stdarg_builtin (stmt);
|
|
3236 if (result)
|
|
3237 break;
|
|
3238 /* FALLTHRU */
|
|
3239
|
|
3240 default:
|
|
3241 gsi_next (&i);
|
|
3242 continue;
|
|
3243 }
|
|
3244
|
|
3245 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
3246 {
|
|
3247 fprintf (dump_file, "Simplified\n ");
|
|
3248 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
|
|
3249 }
|
|
3250
|
|
3251 old_stmt = stmt;
|
|
3252 push_stmt_changes (gsi_stmt_ptr (&i));
|
|
3253
|
|
3254 if (!update_call_from_tree (&i, result))
|
|
3255 {
|
|
3256 gimplify_and_update_call_from_tree (&i, result);
|
|
3257 todoflags |= TODO_rebuild_alias;
|
|
3258 }
|
|
3259
|
|
3260 stmt = gsi_stmt (i);
|
|
3261 pop_stmt_changes (gsi_stmt_ptr (&i));
|
|
3262
|
|
3263 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
|
|
3264 && gimple_purge_dead_eh_edges (bb))
|
|
3265 cfg_changed = true;
|
|
3266
|
|
3267 if (dump_file && (dump_flags & TDF_DETAILS))
|
|
3268 {
|
|
3269 fprintf (dump_file, "to\n ");
|
|
3270 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
|
|
3271 fprintf (dump_file, "\n");
|
|
3272 }
|
|
3273
|
|
3274 /* Retry the same statement if it changed into another
|
|
3275 builtin, there might be new opportunities now. */
|
|
3276 if (gimple_code (stmt) != GIMPLE_CALL)
|
|
3277 {
|
|
3278 gsi_next (&i);
|
|
3279 continue;
|
|
3280 }
|
|
3281 callee = gimple_call_fndecl (stmt);
|
|
3282 if (!callee
|
|
3283 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
|
|
3284 || DECL_FUNCTION_CODE (callee) == fcode)
|
|
3285 gsi_next (&i);
|
|
3286 }
|
|
3287 }
|
|
3288
|
|
3289 /* Delete unreachable blocks. */
|
|
3290 if (cfg_changed)
|
|
3291 todoflags |= TODO_cleanup_cfg;
|
|
3292
|
|
3293 return todoflags;
|
|
3294 }
|
|
3295
|
|
3296
|
|
3297 struct gimple_opt_pass pass_fold_builtins =
|
|
3298 {
|
|
3299 {
|
|
3300 GIMPLE_PASS,
|
|
3301 "fab", /* name */
|
|
3302 NULL, /* gate */
|
|
3303 execute_fold_all_builtins, /* execute */
|
|
3304 NULL, /* sub */
|
|
3305 NULL, /* next */
|
|
3306 0, /* static_pass_number */
|
|
3307 0, /* tv_id */
|
|
3308 PROP_cfg | PROP_ssa, /* properties_required */
|
|
3309 0, /* properties_provided */
|
|
3310 0, /* properties_destroyed */
|
|
3311 0, /* todo_flags_start */
|
|
3312 TODO_dump_func
|
|
3313 | TODO_verify_ssa
|
|
3314 | TODO_update_ssa /* todo_flags_finish */
|
|
3315 }
|
|
3316 };
|