Mercurial > hg > CbC > CbC_gcc
comparison gcc/tree-ssa-forwprop.c @ 0:a06113de4d67
first commit
author | kent <kent@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 17 Jul 2009 14:47:48 +0900 |
parents | |
children | 58ad6c70ea60 |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:a06113de4d67 |
---|---|
1 /* Forward propagation of expressions for single use variables. | |
2 Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation, Inc. | |
3 | |
4 This file is part of GCC. | |
5 | |
6 GCC is free software; you can redistribute it and/or modify | |
7 it under the terms of the GNU General Public License as published by | |
8 the Free Software Foundation; either version 3, or (at your option) | |
9 any later version. | |
10 | |
11 GCC is distributed in the hope that it will be useful, | |
12 but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 GNU General Public License for more details. | |
15 | |
16 You should have received a copy of the GNU General Public License | |
17 along with GCC; see the file COPYING3. If not see | |
18 <http://www.gnu.org/licenses/>. */ | |
19 | |
20 #include "config.h" | |
21 #include "system.h" | |
22 #include "coretypes.h" | |
23 #include "tm.h" | |
24 #include "ggc.h" | |
25 #include "tree.h" | |
26 #include "rtl.h" | |
27 #include "tm_p.h" | |
28 #include "basic-block.h" | |
29 #include "timevar.h" | |
30 #include "diagnostic.h" | |
31 #include "tree-flow.h" | |
32 #include "tree-pass.h" | |
33 #include "tree-dump.h" | |
34 #include "langhooks.h" | |
35 #include "flags.h" | |
36 #include "gimple.h" | |
37 | |
38 /* This pass propagates the RHS of assignment statements into use | |
39 sites of the LHS of the assignment. It's basically a specialized | |
40 form of tree combination. It is hoped all of this can disappear | |
41 when we have a generalized tree combiner. | |
42 | |
43 One class of common cases we handle is forward propagating a single use | |
44 variable into a COND_EXPR. | |
45 | |
46 bb0: | |
47 x = a COND b; | |
48 if (x) goto ... else goto ... | |
49 | |
50 Will be transformed into: | |
51 | |
52 bb0: | |
53 if (a COND b) goto ... else goto ... | |
54 | |
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1). | |
56 | |
57 Or (assuming c1 and c2 are constants): | |
58 | |
59 bb0: | |
60 x = a + c1; | |
61 if (x EQ/NEQ c2) goto ... else goto ... | |
62 | |
63 Will be transformed into: | |
64 | |
65 bb0: | |
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ... | |
67 | |
68 Similarly for x = a - c1. | |
69 | |
70 Or | |
71 | |
72 bb0: | |
73 x = !a | |
74 if (x) goto ... else goto ... | |
75 | |
76 Will be transformed into: | |
77 | |
78 bb0: | |
79 if (a == 0) goto ... else goto ... | |
80 | |
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1). | |
82 For these cases, we propagate A into all, possibly more than one, | |
83 COND_EXPRs that use X. | |
84 | |
85 Or | |
86 | |
87 bb0: | |
88 x = (typecast) a | |
89 if (x) goto ... else goto ... | |
90 | |
91 Will be transformed into: | |
92 | |
93 bb0: | |
94 if (a != 0) goto ... else goto ... | |
95 | |
96 (Assuming a is an integral type and x is a boolean or x is an | |
97 integral and a is a boolean.) | |
98 | |
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1). | |
100 For these cases, we propagate A into all, possibly more than one, | |
101 COND_EXPRs that use X. | |
102 | |
103 In addition to eliminating the variable and the statement which assigns | |
104 a value to the variable, we may be able to later thread the jump without | |
105 adding insane complexity in the dominator optimizer. | |
106 | |
107 Also note these transformations can cascade. We handle this by having | |
108 a worklist of COND_EXPR statements to examine. As we make a change to | |
109 a statement, we put it back on the worklist to examine on the next | |
110 iteration of the main loop. | |
111 | |
112 A second class of propagation opportunities arises for ADDR_EXPR | |
113 nodes. | |
114 | |
115 ptr = &x->y->z; | |
116 res = *ptr; | |
117 | |
118 Will get turned into | |
119 | |
120 res = x->y->z; | |
121 | |
122 Or | |
123 ptr = (type1*)&type2var; | |
124 res = *ptr | |
125 | |
126 Will get turned into (if type1 and type2 are the same size | |
127 and neither have volatile on them): | |
128 res = VIEW_CONVERT_EXPR<type1>(type2var) | |
129 | |
130 Or | |
131 | |
132 ptr = &x[0]; | |
133 ptr2 = ptr + <constant>; | |
134 | |
135 Will get turned into | |
136 | |
137 ptr2 = &x[constant/elementsize]; | |
138 | |
139 Or | |
140 | |
141 ptr = &x[0]; | |
142 offset = index * element_size; | |
143 offset_p = (pointer) offset; | |
144 ptr2 = ptr + offset_p | |
145 | |
146 Will get turned into: | |
147 | |
148 ptr2 = &x[index]; | |
149 | |
150 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to | |
151 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent | |
152 {NOT_EXPR,NEG_EXPR}. | |
153 | |
154 This will (of course) be extended as other needs arise. */ | |
155 | |
156 static bool forward_propagate_addr_expr (tree name, tree rhs); | |
157 | |
158 /* Set to true if we delete EH edges during the optimization. */ | |
159 static bool cfg_changed; | |
160 | |
161 static tree rhs_to_tree (tree type, gimple stmt); | |
162 | |
163 /* Get the next statement we can propagate NAME's value into skipping | |
164 trivial copies. Returns the statement that is suitable as a | |
165 propagation destination or NULL_TREE if there is no such one. | |
166 This only returns destinations in a single-use chain. FINAL_NAME_P | |
167 if non-NULL is written to the ssa name that represents the use. */ | |
168 | |
169 static gimple | |
170 get_prop_dest_stmt (tree name, tree *final_name_p) | |
171 { | |
172 use_operand_p use; | |
173 gimple use_stmt; | |
174 | |
175 do { | |
176 /* If name has multiple uses, bail out. */ | |
177 if (!single_imm_use (name, &use, &use_stmt)) | |
178 return NULL; | |
179 | |
180 /* If this is not a trivial copy, we found it. */ | |
181 if (!gimple_assign_copy_p (use_stmt) | |
182 || TREE_CODE (gimple_assign_lhs (use_stmt)) != SSA_NAME | |
183 || gimple_assign_rhs1 (use_stmt) != name) | |
184 break; | |
185 | |
186 /* Continue searching uses of the copy destination. */ | |
187 name = gimple_assign_lhs (use_stmt); | |
188 } while (1); | |
189 | |
190 if (final_name_p) | |
191 *final_name_p = name; | |
192 | |
193 return use_stmt; | |
194 } | |
195 | |
196 /* Get the statement we can propagate from into NAME skipping | |
197 trivial copies. Returns the statement which defines the | |
198 propagation source or NULL_TREE if there is no such one. | |
199 If SINGLE_USE_ONLY is set considers only sources which have | |
200 a single use chain up to NAME. If SINGLE_USE_P is non-null, | |
201 it is set to whether the chain to NAME is a single use chain | |
202 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */ | |
203 | |
204 static gimple | |
205 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p) | |
206 { | |
207 bool single_use = true; | |
208 | |
209 do { | |
210 gimple def_stmt = SSA_NAME_DEF_STMT (name); | |
211 | |
212 if (!has_single_use (name)) | |
213 { | |
214 single_use = false; | |
215 if (single_use_only) | |
216 return NULL; | |
217 } | |
218 | |
219 /* If name is defined by a PHI node or is the default def, bail out. */ | |
220 if (gimple_code (def_stmt) != GIMPLE_ASSIGN) | |
221 return NULL; | |
222 | |
223 /* If name is not a simple copy destination, we found it. */ | |
224 if (!gimple_assign_copy_p (def_stmt) | |
225 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) != SSA_NAME) | |
226 { | |
227 tree rhs; | |
228 | |
229 if (!single_use_only && single_use_p) | |
230 *single_use_p = single_use; | |
231 | |
232 /* We can look through pointer conversions in the search | |
233 for a useful stmt for the comparison folding. */ | |
234 rhs = gimple_assign_rhs1 (def_stmt); | |
235 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)) | |
236 && TREE_CODE (rhs) == SSA_NAME | |
237 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt))) | |
238 && POINTER_TYPE_P (TREE_TYPE (rhs))) | |
239 name = rhs; | |
240 else | |
241 return def_stmt; | |
242 } | |
243 else | |
244 { | |
245 /* Continue searching the def of the copy source name. */ | |
246 name = gimple_assign_rhs1 (def_stmt); | |
247 } | |
248 } while (1); | |
249 } | |
250 | |
251 /* Checks if the destination ssa name in DEF_STMT can be used as | |
252 propagation source. Returns true if so, otherwise false. */ | |
253 | |
254 static bool | |
255 can_propagate_from (gimple def_stmt) | |
256 { | |
257 use_operand_p use_p; | |
258 ssa_op_iter iter; | |
259 | |
260 gcc_assert (is_gimple_assign (def_stmt)); | |
261 /* If the rhs has side-effects we cannot propagate from it. */ | |
262 if (gimple_has_volatile_ops (def_stmt)) | |
263 return false; | |
264 | |
265 /* If the rhs is a load we cannot propagate from it. */ | |
266 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference | |
267 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration) | |
268 return false; | |
269 | |
270 /* Constants can be always propagated. */ | |
271 if (is_gimple_min_invariant | |
272 (rhs_to_tree (TREE_TYPE (gimple_assign_lhs (def_stmt)), def_stmt))) | |
273 return true; | |
274 | |
275 /* We cannot propagate ssa names that occur in abnormal phi nodes. */ | |
276 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE) | |
277 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p))) | |
278 return false; | |
279 | |
280 /* If the definition is a conversion of a pointer to a function type, | |
281 then we can not apply optimizations as some targets require | |
282 function pointers to be canonicalized and in this case this | |
283 optimization could eliminate a necessary canonicalization. */ | |
284 if (is_gimple_assign (def_stmt) | |
285 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))) | |
286 { | |
287 tree rhs = gimple_assign_rhs1 (def_stmt); | |
288 if (POINTER_TYPE_P (TREE_TYPE (rhs)) | |
289 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE) | |
290 return false; | |
291 } | |
292 return true; | |
293 } | |
294 | |
295 /* Remove a copy chain ending in NAME along the defs but not | |
296 further or including UP_TO_STMT. If NAME was replaced in | |
297 its only use then this function can be used to clean up | |
298 dead stmts. Returns true if UP_TO_STMT can be removed | |
299 as well, otherwise false. */ | |
300 | |
301 static bool | |
302 remove_prop_source_from_use (tree name, gimple up_to_stmt) | |
303 { | |
304 gimple_stmt_iterator gsi; | |
305 gimple stmt; | |
306 | |
307 do { | |
308 if (!has_zero_uses (name)) | |
309 return false; | |
310 | |
311 stmt = SSA_NAME_DEF_STMT (name); | |
312 if (stmt == up_to_stmt) | |
313 return true; | |
314 | |
315 gsi = gsi_for_stmt (stmt); | |
316 release_defs (stmt); | |
317 gsi_remove (&gsi, true); | |
318 | |
319 name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL; | |
320 } while (name && TREE_CODE (name) == SSA_NAME); | |
321 | |
322 return false; | |
323 } | |
324 | |
325 /* Return the rhs of a gimple_assign STMT in a form of a single tree, | |
326 converted to type TYPE. | |
327 | |
328 This should disappear, but is needed so we can combine expressions and use | |
329 the fold() interfaces. Long term, we need to develop folding and combine | |
330 routines that deal with gimple exclusively . */ | |
331 | |
332 static tree | |
333 rhs_to_tree (tree type, gimple stmt) | |
334 { | |
335 enum tree_code code = gimple_assign_rhs_code (stmt); | |
336 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS) | |
337 return fold_build2 (code, type, gimple_assign_rhs1 (stmt), | |
338 gimple_assign_rhs2 (stmt)); | |
339 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS) | |
340 return build1 (code, type, gimple_assign_rhs1 (stmt)); | |
341 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS) | |
342 return gimple_assign_rhs1 (stmt); | |
343 else | |
344 gcc_unreachable (); | |
345 } | |
346 | |
347 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns | |
348 the folded result in a form suitable for COND_EXPR_COND or | |
349 NULL_TREE, if there is no suitable simplified form. If | |
350 INVARIANT_ONLY is true only gimple_min_invariant results are | |
351 considered simplified. */ | |
352 | |
353 static tree | |
354 combine_cond_expr_cond (enum tree_code code, tree type, | |
355 tree op0, tree op1, bool invariant_only) | |
356 { | |
357 tree t; | |
358 | |
359 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison); | |
360 | |
361 t = fold_binary (code, type, op0, op1); | |
362 if (!t) | |
363 return NULL_TREE; | |
364 | |
365 /* Require that we got a boolean type out if we put one in. */ | |
366 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type)); | |
367 | |
368 /* Canonicalize the combined condition for use in a COND_EXPR. */ | |
369 t = canonicalize_cond_expr_cond (t); | |
370 | |
371 /* Bail out if we required an invariant but didn't get one. */ | |
372 if (!t || (invariant_only && !is_gimple_min_invariant (t))) | |
373 return NULL_TREE; | |
374 | |
375 return t; | |
376 } | |
377 | |
378 /* Propagate from the ssa name definition statements of COND_EXPR | |
379 in GIMPLE_COND statement STMT into the conditional if that simplifies it. | |
380 Returns zero if no statement was changed, one if there were | |
381 changes and two if cfg_cleanup needs to run. | |
382 | |
383 This must be kept in sync with forward_propagate_into_cond. */ | |
384 | |
385 static int | |
386 forward_propagate_into_gimple_cond (gimple stmt) | |
387 { | |
388 int did_something = 0; | |
389 | |
390 do { | |
391 tree tmp = NULL_TREE; | |
392 tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE; | |
393 gimple def_stmt; | |
394 bool single_use0_p = false, single_use1_p = false; | |
395 enum tree_code code = gimple_cond_code (stmt); | |
396 | |
397 /* We can do tree combining on SSA_NAME and comparison expressions. */ | |
398 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison | |
399 && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME) | |
400 { | |
401 /* For comparisons use the first operand, that is likely to | |
402 simplify comparisons against constants. */ | |
403 name = gimple_cond_lhs (stmt); | |
404 def_stmt = get_prop_source_stmt (name, false, &single_use0_p); | |
405 if (def_stmt && can_propagate_from (def_stmt)) | |
406 { | |
407 tree op1 = gimple_cond_rhs (stmt); | |
408 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt); | |
409 tmp = combine_cond_expr_cond (code, boolean_type_node, rhs0, | |
410 op1, !single_use0_p); | |
411 } | |
412 /* If that wasn't successful, try the second operand. */ | |
413 if (tmp == NULL_TREE | |
414 && TREE_CODE (gimple_cond_rhs (stmt)) == SSA_NAME) | |
415 { | |
416 tree op0 = gimple_cond_lhs (stmt); | |
417 name = gimple_cond_rhs (stmt); | |
418 def_stmt = get_prop_source_stmt (name, false, &single_use1_p); | |
419 if (!def_stmt || !can_propagate_from (def_stmt)) | |
420 return did_something; | |
421 | |
422 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt); | |
423 tmp = combine_cond_expr_cond (code, boolean_type_node, op0, rhs1, | |
424 !single_use1_p); | |
425 } | |
426 /* If that wasn't successful either, try both operands. */ | |
427 if (tmp == NULL_TREE | |
428 && rhs0 != NULL_TREE | |
429 && rhs1 != NULL_TREE) | |
430 tmp = combine_cond_expr_cond (code, boolean_type_node, rhs0, | |
431 fold_convert (TREE_TYPE (rhs0), rhs1), | |
432 !(single_use0_p && single_use1_p)); | |
433 } | |
434 | |
435 if (tmp) | |
436 { | |
437 if (dump_file && tmp) | |
438 { | |
439 tree cond = build2 (gimple_cond_code (stmt), | |
440 boolean_type_node, | |
441 gimple_cond_lhs (stmt), | |
442 gimple_cond_rhs (stmt)); | |
443 fprintf (dump_file, " Replaced '"); | |
444 print_generic_expr (dump_file, cond, 0); | |
445 fprintf (dump_file, "' with '"); | |
446 print_generic_expr (dump_file, tmp, 0); | |
447 fprintf (dump_file, "'\n"); | |
448 } | |
449 | |
450 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp)); | |
451 update_stmt (stmt); | |
452 | |
453 /* Remove defining statements. */ | |
454 remove_prop_source_from_use (name, NULL); | |
455 | |
456 if (is_gimple_min_invariant (tmp)) | |
457 did_something = 2; | |
458 else if (did_something == 0) | |
459 did_something = 1; | |
460 | |
461 /* Continue combining. */ | |
462 continue; | |
463 } | |
464 | |
465 break; | |
466 } while (1); | |
467 | |
468 return did_something; | |
469 } | |
470 | |
471 | |
472 /* Propagate from the ssa name definition statements of COND_EXPR | |
473 in the rhs of statement STMT into the conditional if that simplifies it. | |
474 Returns zero if no statement was changed, one if there were | |
475 changes and two if cfg_cleanup needs to run. | |
476 | |
477 This must be kept in sync with forward_propagate_into_gimple_cond. */ | |
478 | |
479 static int | |
480 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p) | |
481 { | |
482 gimple stmt = gsi_stmt (*gsi_p); | |
483 int did_something = 0; | |
484 | |
485 do { | |
486 tree tmp = NULL_TREE; | |
487 tree cond = gimple_assign_rhs1 (stmt); | |
488 tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE; | |
489 gimple def_stmt; | |
490 bool single_use0_p = false, single_use1_p = false; | |
491 | |
492 /* We can do tree combining on SSA_NAME and comparison expressions. */ | |
493 if (COMPARISON_CLASS_P (cond) | |
494 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME) | |
495 { | |
496 /* For comparisons use the first operand, that is likely to | |
497 simplify comparisons against constants. */ | |
498 name = TREE_OPERAND (cond, 0); | |
499 def_stmt = get_prop_source_stmt (name, false, &single_use0_p); | |
500 if (def_stmt && can_propagate_from (def_stmt)) | |
501 { | |
502 tree op1 = TREE_OPERAND (cond, 1); | |
503 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt); | |
504 tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node, | |
505 rhs0, op1, !single_use0_p); | |
506 } | |
507 /* If that wasn't successful, try the second operand. */ | |
508 if (tmp == NULL_TREE | |
509 && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME) | |
510 { | |
511 tree op0 = TREE_OPERAND (cond, 0); | |
512 name = TREE_OPERAND (cond, 1); | |
513 def_stmt = get_prop_source_stmt (name, false, &single_use1_p); | |
514 if (!def_stmt || !can_propagate_from (def_stmt)) | |
515 return did_something; | |
516 | |
517 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt); | |
518 tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node, | |
519 op0, rhs1, !single_use1_p); | |
520 } | |
521 /* If that wasn't successful either, try both operands. */ | |
522 if (tmp == NULL_TREE | |
523 && rhs0 != NULL_TREE | |
524 && rhs1 != NULL_TREE) | |
525 tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node, | |
526 rhs0, fold_convert (TREE_TYPE (rhs0), | |
527 rhs1), | |
528 !(single_use0_p && single_use1_p)); | |
529 } | |
530 else if (TREE_CODE (cond) == SSA_NAME) | |
531 { | |
532 name = cond; | |
533 def_stmt = get_prop_source_stmt (name, true, NULL); | |
534 if (def_stmt || !can_propagate_from (def_stmt)) | |
535 return did_something; | |
536 | |
537 rhs0 = gimple_assign_rhs1 (def_stmt); | |
538 tmp = combine_cond_expr_cond (NE_EXPR, boolean_type_node, rhs0, | |
539 build_int_cst (TREE_TYPE (rhs0), 0), | |
540 false); | |
541 } | |
542 | |
543 if (tmp) | |
544 { | |
545 if (dump_file && tmp) | |
546 { | |
547 fprintf (dump_file, " Replaced '"); | |
548 print_generic_expr (dump_file, cond, 0); | |
549 fprintf (dump_file, "' with '"); | |
550 print_generic_expr (dump_file, tmp, 0); | |
551 fprintf (dump_file, "'\n"); | |
552 } | |
553 | |
554 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp)); | |
555 stmt = gsi_stmt (*gsi_p); | |
556 update_stmt (stmt); | |
557 | |
558 /* Remove defining statements. */ | |
559 remove_prop_source_from_use (name, NULL); | |
560 | |
561 if (is_gimple_min_invariant (tmp)) | |
562 did_something = 2; | |
563 else if (did_something == 0) | |
564 did_something = 1; | |
565 | |
566 /* Continue combining. */ | |
567 continue; | |
568 } | |
569 | |
570 break; | |
571 } while (1); | |
572 | |
573 return did_something; | |
574 } | |
575 | |
576 /* We've just substituted an ADDR_EXPR into stmt. Update all the | |
577 relevant data structures to match. */ | |
578 | |
579 static void | |
580 tidy_after_forward_propagate_addr (gimple stmt) | |
581 { | |
582 /* We may have turned a trapping insn into a non-trapping insn. */ | |
583 if (maybe_clean_or_replace_eh_stmt (stmt, stmt) | |
584 && gimple_purge_dead_eh_edges (gimple_bb (stmt))) | |
585 cfg_changed = true; | |
586 | |
587 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR) | |
588 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt)); | |
589 | |
590 mark_symbols_for_renaming (stmt); | |
591 } | |
592 | |
593 /* DEF_RHS contains the address of the 0th element in an array. | |
594 USE_STMT uses type of DEF_RHS to compute the address of an | |
595 arbitrary element within the array. The (variable) byte offset | |
596 of the element is contained in OFFSET. | |
597 | |
598 We walk back through the use-def chains of OFFSET to verify that | |
599 it is indeed computing the offset of an element within the array | |
600 and extract the index corresponding to the given byte offset. | |
601 | |
602 We then try to fold the entire address expression into a form | |
603 &array[index]. | |
604 | |
605 If we are successful, we replace the right hand side of USE_STMT | |
606 with the new address computation. */ | |
607 | |
608 static bool | |
609 forward_propagate_addr_into_variable_array_index (tree offset, | |
610 tree def_rhs, | |
611 gimple_stmt_iterator *use_stmt_gsi) | |
612 { | |
613 tree index; | |
614 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi); | |
615 | |
616 /* Get the offset's defining statement. */ | |
617 offset_def = SSA_NAME_DEF_STMT (offset); | |
618 | |
619 /* Try to find an expression for a proper index. This is either a | |
620 multiplication expression by the element size or just the ssa name we came | |
621 along in case the element size is one. In that case, however, we do not | |
622 allow multiplications because they can be computing index to a higher | |
623 level dimension (PR 37861). */ | |
624 if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs))))) | |
625 { | |
626 if (is_gimple_assign (offset_def) | |
627 && gimple_assign_rhs_code (offset_def) == MULT_EXPR) | |
628 return false; | |
629 | |
630 index = offset; | |
631 } | |
632 else | |
633 { | |
634 /* The statement which defines OFFSET before type conversion | |
635 must be a simple GIMPLE_ASSIGN. */ | |
636 if (!is_gimple_assign (offset_def)) | |
637 return false; | |
638 | |
639 /* The RHS of the statement which defines OFFSET must be a | |
640 multiplication of an object by the size of the array elements. | |
641 This implicitly verifies that the size of the array elements | |
642 is constant. */ | |
643 offset = gimple_assign_rhs1 (offset_def); | |
644 if (gimple_assign_rhs_code (offset_def) != MULT_EXPR | |
645 || TREE_CODE (gimple_assign_rhs2 (offset_def)) != INTEGER_CST | |
646 || !simple_cst_equal (gimple_assign_rhs2 (offset_def), | |
647 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs))))) | |
648 return false; | |
649 | |
650 /* The first operand to the MULT_EXPR is the desired index. */ | |
651 index = offset; | |
652 } | |
653 | |
654 /* Replace the pointer addition with array indexing. */ | |
655 gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs)); | |
656 use_stmt = gsi_stmt (*use_stmt_gsi); | |
657 TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1) | |
658 = index; | |
659 | |
660 /* That should have created gimple, so there is no need to | |
661 record information to undo the propagation. */ | |
662 fold_stmt_inplace (use_stmt); | |
663 tidy_after_forward_propagate_addr (use_stmt); | |
664 return true; | |
665 } | |
666 | |
667 /* NAME is a SSA_NAME representing DEF_RHS which is of the form | |
668 ADDR_EXPR <whatever>. | |
669 | |
670 Try to forward propagate the ADDR_EXPR into the use USE_STMT. | |
671 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF | |
672 node or for recovery of array indexing from pointer arithmetic. | |
673 | |
674 Return true if the propagation was successful (the propagation can | |
675 be not totally successful, yet things may have been changed). */ | |
676 | |
677 static bool | |
678 forward_propagate_addr_expr_1 (tree name, tree def_rhs, | |
679 gimple_stmt_iterator *use_stmt_gsi, | |
680 bool single_use_p) | |
681 { | |
682 tree lhs, rhs, rhs2, array_ref; | |
683 tree *rhsp, *lhsp; | |
684 gimple use_stmt = gsi_stmt (*use_stmt_gsi); | |
685 enum tree_code rhs_code; | |
686 | |
687 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR); | |
688 | |
689 lhs = gimple_assign_lhs (use_stmt); | |
690 rhs_code = gimple_assign_rhs_code (use_stmt); | |
691 rhs = gimple_assign_rhs1 (use_stmt); | |
692 | |
693 /* Trivial cases. The use statement could be a trivial copy or a | |
694 useless conversion. Recurse to the uses of the lhs as copyprop does | |
695 not copy through different variant pointers and FRE does not catch | |
696 all useless conversions. Treat the case of a single-use name and | |
697 a conversion to def_rhs type separate, though. */ | |
698 if (TREE_CODE (lhs) == SSA_NAME | |
699 && ((rhs_code == SSA_NAME && rhs == name) | |
700 || CONVERT_EXPR_CODE_P (rhs_code))) | |
701 { | |
702 /* Only recurse if we don't deal with a single use or we cannot | |
703 do the propagation to the current statement. In particular | |
704 we can end up with a conversion needed for a non-invariant | |
705 address which we cannot do in a single statement. */ | |
706 if (!single_use_p | |
707 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)) | |
708 && !is_gimple_min_invariant (def_rhs))) | |
709 return forward_propagate_addr_expr (lhs, def_rhs); | |
710 | |
711 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs)); | |
712 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))) | |
713 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs)); | |
714 else | |
715 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR); | |
716 return true; | |
717 } | |
718 | |
719 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS. | |
720 ADDR_EXPR will not appear on the LHS. */ | |
721 lhsp = gimple_assign_lhs_ptr (use_stmt); | |
722 while (handled_component_p (*lhsp)) | |
723 lhsp = &TREE_OPERAND (*lhsp, 0); | |
724 lhs = *lhsp; | |
725 | |
726 /* Now see if the LHS node is an INDIRECT_REF using NAME. If so, | |
727 propagate the ADDR_EXPR into the use of NAME and fold the result. */ | |
728 if (TREE_CODE (lhs) == INDIRECT_REF | |
729 && TREE_OPERAND (lhs, 0) == name | |
730 && may_propagate_address_into_dereference (def_rhs, lhs) | |
731 && (lhsp != gimple_assign_lhs_ptr (use_stmt) | |
732 || useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (def_rhs, 0)), | |
733 TREE_TYPE (rhs)))) | |
734 { | |
735 *lhsp = unshare_expr (TREE_OPERAND (def_rhs, 0)); | |
736 fold_stmt_inplace (use_stmt); | |
737 tidy_after_forward_propagate_addr (use_stmt); | |
738 | |
739 /* Continue propagating into the RHS if this was not the only use. */ | |
740 if (single_use_p) | |
741 return true; | |
742 } | |
743 | |
744 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR | |
745 nodes from the RHS. */ | |
746 rhsp = gimple_assign_rhs1_ptr (use_stmt); | |
747 while (handled_component_p (*rhsp) | |
748 || TREE_CODE (*rhsp) == ADDR_EXPR) | |
749 rhsp = &TREE_OPERAND (*rhsp, 0); | |
750 rhs = *rhsp; | |
751 | |
752 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so, | |
753 propagate the ADDR_EXPR into the use of NAME and fold the result. */ | |
754 if (TREE_CODE (rhs) == INDIRECT_REF | |
755 && TREE_OPERAND (rhs, 0) == name | |
756 && may_propagate_address_into_dereference (def_rhs, rhs)) | |
757 { | |
758 *rhsp = unshare_expr (TREE_OPERAND (def_rhs, 0)); | |
759 fold_stmt_inplace (use_stmt); | |
760 tidy_after_forward_propagate_addr (use_stmt); | |
761 return true; | |
762 } | |
763 | |
764 /* Now see if the RHS node is an INDIRECT_REF using NAME. If so, | |
765 propagate the ADDR_EXPR into the use of NAME and try to | |
766 create a VCE and fold the result. */ | |
767 if (TREE_CODE (rhs) == INDIRECT_REF | |
768 && TREE_OPERAND (rhs, 0) == name | |
769 && TYPE_SIZE (TREE_TYPE (rhs)) | |
770 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) | |
771 /* Function decls should not be used for VCE either as it could be a | |
772 function descriptor that we want and not the actual function code. */ | |
773 && TREE_CODE (TREE_OPERAND (def_rhs, 0)) != FUNCTION_DECL | |
774 /* We should not convert volatile loads to non volatile loads. */ | |
775 && !TYPE_VOLATILE (TREE_TYPE (rhs)) | |
776 && !TYPE_VOLATILE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) | |
777 && operand_equal_p (TYPE_SIZE (TREE_TYPE (rhs)), | |
778 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0)) | |
779 { | |
780 tree def_rhs_base, new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0)); | |
781 new_rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), new_rhs); | |
782 if (TREE_CODE (new_rhs) != VIEW_CONVERT_EXPR) | |
783 { | |
784 /* If we have folded the VIEW_CONVERT_EXPR then the result is only | |
785 valid if we can replace the whole rhs of the use statement. */ | |
786 if (rhs != gimple_assign_rhs1 (use_stmt)) | |
787 return false; | |
788 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true, NULL, | |
789 true, GSI_NEW_STMT); | |
790 gimple_assign_set_rhs1 (use_stmt, new_rhs); | |
791 tidy_after_forward_propagate_addr (use_stmt); | |
792 return true; | |
793 } | |
794 /* If the defining rhs comes from an indirect reference, then do not | |
795 convert into a VIEW_CONVERT_EXPR. */ | |
796 def_rhs_base = TREE_OPERAND (def_rhs, 0); | |
797 while (handled_component_p (def_rhs_base)) | |
798 def_rhs_base = TREE_OPERAND (def_rhs_base, 0); | |
799 if (!INDIRECT_REF_P (def_rhs_base)) | |
800 { | |
801 /* We may have arbitrary VIEW_CONVERT_EXPRs in a nested component | |
802 reference. Place it there and fold the thing. */ | |
803 *rhsp = new_rhs; | |
804 fold_stmt_inplace (use_stmt); | |
805 tidy_after_forward_propagate_addr (use_stmt); | |
806 return true; | |
807 } | |
808 } | |
809 | |
810 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there | |
811 is nothing to do. */ | |
812 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR | |
813 || gimple_assign_rhs1 (use_stmt) != name) | |
814 return false; | |
815 | |
816 /* The remaining cases are all for turning pointer arithmetic into | |
817 array indexing. They only apply when we have the address of | |
818 element zero in an array. If that is not the case then there | |
819 is nothing to do. */ | |
820 array_ref = TREE_OPERAND (def_rhs, 0); | |
821 if (TREE_CODE (array_ref) != ARRAY_REF | |
822 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE | |
823 || !integer_zerop (TREE_OPERAND (array_ref, 1))) | |
824 return false; | |
825 | |
826 rhs2 = gimple_assign_rhs2 (use_stmt); | |
827 /* Try to optimize &x[0] p+ C where C is a multiple of the size | |
828 of the elements in X into &x[C/element size]. */ | |
829 if (TREE_CODE (rhs2) == INTEGER_CST) | |
830 { | |
831 tree new_rhs = maybe_fold_stmt_addition (gimple_expr_type (use_stmt), | |
832 array_ref, rhs2); | |
833 if (new_rhs) | |
834 { | |
835 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs); | |
836 use_stmt = gsi_stmt (*use_stmt_gsi); | |
837 update_stmt (use_stmt); | |
838 tidy_after_forward_propagate_addr (use_stmt); | |
839 return true; | |
840 } | |
841 } | |
842 | |
843 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by | |
844 converting a multiplication of an index by the size of the | |
845 array elements, then the result is converted into the proper | |
846 type for the arithmetic. */ | |
847 if (TREE_CODE (rhs2) == SSA_NAME | |
848 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs)) | |
849 /* Avoid problems with IVopts creating PLUS_EXPRs with a | |
850 different type than their operands. */ | |
851 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))) | |
852 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs, | |
853 use_stmt_gsi); | |
854 return false; | |
855 } | |
856 | |
857 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>. | |
858 | |
859 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME. | |
860 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF | |
861 node or for recovery of array indexing from pointer arithmetic. | |
862 Returns true, if all uses have been propagated into. */ | |
863 | |
864 static bool | |
865 forward_propagate_addr_expr (tree name, tree rhs) | |
866 { | |
867 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth; | |
868 imm_use_iterator iter; | |
869 gimple use_stmt; | |
870 bool all = true; | |
871 bool single_use_p = has_single_use (name); | |
872 | |
873 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name) | |
874 { | |
875 bool result; | |
876 tree use_rhs; | |
877 | |
878 /* If the use is not in a simple assignment statement, then | |
879 there is nothing we can do. */ | |
880 if (gimple_code (use_stmt) != GIMPLE_ASSIGN) | |
881 { | |
882 all = false; | |
883 continue; | |
884 } | |
885 | |
886 /* If the use is in a deeper loop nest, then we do not want | |
887 to propagate the ADDR_EXPR into the loop as that is likely | |
888 adding expression evaluations into the loop. */ | |
889 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth) | |
890 { | |
891 all = false; | |
892 continue; | |
893 } | |
894 | |
895 push_stmt_changes (&use_stmt); | |
896 | |
897 { | |
898 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt); | |
899 result = forward_propagate_addr_expr_1 (name, rhs, &gsi, | |
900 single_use_p); | |
901 use_stmt = gsi_stmt (gsi); | |
902 } | |
903 all &= result; | |
904 | |
905 pop_stmt_changes (&use_stmt); | |
906 | |
907 /* Remove intermediate now unused copy and conversion chains. */ | |
908 use_rhs = gimple_assign_rhs1 (use_stmt); | |
909 if (result | |
910 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME | |
911 && (TREE_CODE (use_rhs) == SSA_NAME | |
912 || (CONVERT_EXPR_P (use_rhs) | |
913 && TREE_CODE (TREE_OPERAND (use_rhs, 0)) == SSA_NAME))) | |
914 { | |
915 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt); | |
916 release_defs (use_stmt); | |
917 gsi_remove (&gsi, true); | |
918 } | |
919 } | |
920 | |
921 return all; | |
922 } | |
923 | |
924 /* Forward propagate the comparison defined in STMT like | |
925 cond_1 = x CMP y to uses of the form | |
926 a_1 = (T')cond_1 | |
927 a_1 = !cond_1 | |
928 a_1 = cond_1 != 0 | |
929 Returns true if stmt is now unused. */ | |
930 | |
931 static bool | |
932 forward_propagate_comparison (gimple stmt) | |
933 { | |
934 tree name = gimple_assign_lhs (stmt); | |
935 gimple use_stmt; | |
936 tree tmp = NULL_TREE; | |
937 | |
938 /* Don't propagate ssa names that occur in abnormal phis. */ | |
939 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME | |
940 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))) | |
941 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME | |
942 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt)))) | |
943 return false; | |
944 | |
945 /* Do not un-cse comparisons. But propagate through copies. */ | |
946 use_stmt = get_prop_dest_stmt (name, &name); | |
947 if (!use_stmt) | |
948 return false; | |
949 | |
950 /* Conversion of the condition result to another integral type. */ | |
951 if (is_gimple_assign (use_stmt) | |
952 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)) | |
953 || TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt)) | |
954 == tcc_comparison | |
955 || gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR) | |
956 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt)))) | |
957 { | |
958 tree lhs = gimple_assign_lhs (use_stmt); | |
959 | |
960 /* We can propagate the condition into a conversion. */ | |
961 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))) | |
962 { | |
963 /* Avoid using fold here as that may create a COND_EXPR with | |
964 non-boolean condition as canonical form. */ | |
965 tmp = build2 (gimple_assign_rhs_code (stmt), TREE_TYPE (lhs), | |
966 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt)); | |
967 } | |
968 /* We can propagate the condition into X op CST where op | |
969 is EQ_EXPR or NE_EXPR and CST is either one or zero. */ | |
970 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt)) | |
971 == tcc_comparison | |
972 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME | |
973 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST) | |
974 { | |
975 enum tree_code code = gimple_assign_rhs_code (use_stmt); | |
976 tree cst = gimple_assign_rhs2 (use_stmt); | |
977 tree cond; | |
978 | |
979 cond = build2 (gimple_assign_rhs_code (stmt), | |
980 TREE_TYPE (cst), | |
981 gimple_assign_rhs1 (stmt), | |
982 gimple_assign_rhs2 (stmt)); | |
983 | |
984 tmp = combine_cond_expr_cond (code, TREE_TYPE (lhs), cond, cst, false); | |
985 if (tmp == NULL_TREE) | |
986 return false; | |
987 } | |
988 /* We can propagate the condition into a statement that | |
989 computes the logical negation of the comparison result. */ | |
990 else if (gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR) | |
991 { | |
992 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt)); | |
993 bool nans = HONOR_NANS (TYPE_MODE (type)); | |
994 enum tree_code code; | |
995 code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans); | |
996 if (code == ERROR_MARK) | |
997 return false; | |
998 | |
999 tmp = build2 (code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt), | |
1000 gimple_assign_rhs2 (stmt)); | |
1001 } | |
1002 else | |
1003 return false; | |
1004 | |
1005 { | |
1006 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt); | |
1007 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp)); | |
1008 use_stmt = gsi_stmt (gsi); | |
1009 update_stmt (use_stmt); | |
1010 } | |
1011 | |
1012 /* Remove defining statements. */ | |
1013 remove_prop_source_from_use (name, stmt); | |
1014 | |
1015 if (dump_file && (dump_flags & TDF_DETAILS)) | |
1016 { | |
1017 tree old_rhs = rhs_to_tree (TREE_TYPE (gimple_assign_lhs (stmt)), | |
1018 stmt); | |
1019 fprintf (dump_file, " Replaced '"); | |
1020 print_generic_expr (dump_file, old_rhs, dump_flags); | |
1021 fprintf (dump_file, "' with '"); | |
1022 print_generic_expr (dump_file, tmp, dump_flags); | |
1023 fprintf (dump_file, "'\n"); | |
1024 } | |
1025 | |
1026 return true; | |
1027 } | |
1028 | |
1029 return false; | |
1030 } | |
1031 | |
1032 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y. | |
1033 If so, we can change STMT into lhs = y which can later be copy | |
1034 propagated. Similarly for negation. | |
1035 | |
1036 This could trivially be formulated as a forward propagation | |
1037 to immediate uses. However, we already had an implementation | |
1038 from DOM which used backward propagation via the use-def links. | |
1039 | |
1040 It turns out that backward propagation is actually faster as | |
1041 there's less work to do for each NOT/NEG expression we find. | |
1042 Backwards propagation needs to look at the statement in a single | |
1043 backlink. Forward propagation needs to look at potentially more | |
1044 than one forward link. */ | |
1045 | |
1046 static void | |
1047 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p) | |
1048 { | |
1049 gimple stmt = gsi_stmt (*gsi_p); | |
1050 tree rhs = gimple_assign_rhs1 (stmt); | |
1051 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs); | |
1052 | |
1053 /* See if the RHS_DEF_STMT has the same form as our statement. */ | |
1054 if (is_gimple_assign (rhs_def_stmt) | |
1055 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt)) | |
1056 { | |
1057 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt); | |
1058 | |
1059 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */ | |
1060 if (TREE_CODE (rhs_def_operand) == SSA_NAME | |
1061 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand)) | |
1062 { | |
1063 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand); | |
1064 stmt = gsi_stmt (*gsi_p); | |
1065 update_stmt (stmt); | |
1066 } | |
1067 } | |
1068 } | |
1069 | |
1070 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of | |
1071 the condition which we may be able to optimize better. */ | |
1072 | |
1073 static void | |
1074 simplify_gimple_switch (gimple stmt) | |
1075 { | |
1076 tree cond = gimple_switch_index (stmt); | |
1077 tree def, to, ti; | |
1078 gimple def_stmt; | |
1079 | |
1080 /* The optimization that we really care about is removing unnecessary | |
1081 casts. That will let us do much better in propagating the inferred | |
1082 constant at the switch target. */ | |
1083 if (TREE_CODE (cond) == SSA_NAME) | |
1084 { | |
1085 def_stmt = SSA_NAME_DEF_STMT (cond); | |
1086 if (is_gimple_assign (def_stmt)) | |
1087 { | |
1088 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR) | |
1089 { | |
1090 int need_precision; | |
1091 bool fail; | |
1092 | |
1093 def = gimple_assign_rhs1 (def_stmt); | |
1094 | |
1095 #ifdef ENABLE_CHECKING | |
1096 /* ??? Why was Jeff testing this? We are gimple... */ | |
1097 gcc_assert (is_gimple_val (def)); | |
1098 #endif | |
1099 | |
1100 to = TREE_TYPE (cond); | |
1101 ti = TREE_TYPE (def); | |
1102 | |
1103 /* If we have an extension that preserves value, then we | |
1104 can copy the source value into the switch. */ | |
1105 | |
1106 need_precision = TYPE_PRECISION (ti); | |
1107 fail = false; | |
1108 if (! INTEGRAL_TYPE_P (ti)) | |
1109 fail = true; | |
1110 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti)) | |
1111 fail = true; | |
1112 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti)) | |
1113 need_precision += 1; | |
1114 if (TYPE_PRECISION (to) < need_precision) | |
1115 fail = true; | |
1116 | |
1117 if (!fail) | |
1118 { | |
1119 gimple_switch_set_index (stmt, def); | |
1120 update_stmt (stmt); | |
1121 } | |
1122 } | |
1123 } | |
1124 } | |
1125 } | |
1126 | |
1127 /* Main entry point for the forward propagation optimizer. */ | |
1128 | |
1129 static unsigned int | |
1130 tree_ssa_forward_propagate_single_use_vars (void) | |
1131 { | |
1132 basic_block bb; | |
1133 unsigned int todoflags = 0; | |
1134 | |
1135 cfg_changed = false; | |
1136 | |
1137 FOR_EACH_BB (bb) | |
1138 { | |
1139 gimple_stmt_iterator gsi; | |
1140 | |
1141 /* Note we update GSI within the loop as necessary. */ | |
1142 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); ) | |
1143 { | |
1144 gimple stmt = gsi_stmt (gsi); | |
1145 | |
1146 /* If this statement sets an SSA_NAME to an address, | |
1147 try to propagate the address into the uses of the SSA_NAME. */ | |
1148 if (is_gimple_assign (stmt)) | |
1149 { | |
1150 tree lhs = gimple_assign_lhs (stmt); | |
1151 tree rhs = gimple_assign_rhs1 (stmt); | |
1152 | |
1153 if (TREE_CODE (lhs) != SSA_NAME) | |
1154 { | |
1155 gsi_next (&gsi); | |
1156 continue; | |
1157 } | |
1158 | |
1159 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR | |
1160 /* Handle pointer conversions on invariant addresses | |
1161 as well, as this is valid gimple. */ | |
1162 || (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)) | |
1163 && TREE_CODE (rhs) == ADDR_EXPR | |
1164 && POINTER_TYPE_P (TREE_TYPE (lhs)))) | |
1165 { | |
1166 STRIP_NOPS (rhs); | |
1167 if (!stmt_references_abnormal_ssa_name (stmt) | |
1168 && forward_propagate_addr_expr (lhs, rhs)) | |
1169 { | |
1170 release_defs (stmt); | |
1171 todoflags |= TODO_remove_unused_locals; | |
1172 gsi_remove (&gsi, true); | |
1173 } | |
1174 else | |
1175 gsi_next (&gsi); | |
1176 } | |
1177 else if ((gimple_assign_rhs_code (stmt) == BIT_NOT_EXPR | |
1178 || gimple_assign_rhs_code (stmt) == NEGATE_EXPR) | |
1179 && TREE_CODE (rhs) == SSA_NAME) | |
1180 { | |
1181 simplify_not_neg_expr (&gsi); | |
1182 gsi_next (&gsi); | |
1183 } | |
1184 else if (gimple_assign_rhs_code (stmt) == COND_EXPR) | |
1185 { | |
1186 /* In this case the entire COND_EXPR is in rhs1. */ | |
1187 int did_something; | |
1188 fold_defer_overflow_warnings (); | |
1189 did_something = forward_propagate_into_cond (&gsi); | |
1190 stmt = gsi_stmt (gsi); | |
1191 if (did_something == 2) | |
1192 cfg_changed = true; | |
1193 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs) | |
1194 && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL); | |
1195 gsi_next (&gsi); | |
1196 } | |
1197 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) | |
1198 == tcc_comparison) | |
1199 { | |
1200 if (forward_propagate_comparison (stmt)) | |
1201 { | |
1202 release_defs (stmt); | |
1203 todoflags |= TODO_remove_unused_locals; | |
1204 gsi_remove (&gsi, true); | |
1205 } | |
1206 else | |
1207 gsi_next (&gsi); | |
1208 } | |
1209 else | |
1210 gsi_next (&gsi); | |
1211 } | |
1212 else if (gimple_code (stmt) == GIMPLE_SWITCH) | |
1213 { | |
1214 simplify_gimple_switch (stmt); | |
1215 gsi_next (&gsi); | |
1216 } | |
1217 else if (gimple_code (stmt) == GIMPLE_COND) | |
1218 { | |
1219 int did_something; | |
1220 fold_defer_overflow_warnings (); | |
1221 did_something = forward_propagate_into_gimple_cond (stmt); | |
1222 if (did_something == 2) | |
1223 cfg_changed = true; | |
1224 fold_undefer_overflow_warnings (did_something, stmt, | |
1225 WARN_STRICT_OVERFLOW_CONDITIONAL); | |
1226 gsi_next (&gsi); | |
1227 } | |
1228 else | |
1229 gsi_next (&gsi); | |
1230 } | |
1231 } | |
1232 | |
1233 if (cfg_changed) | |
1234 todoflags |= TODO_cleanup_cfg; | |
1235 return todoflags; | |
1236 } | |
1237 | |
1238 | |
1239 static bool | |
1240 gate_forwprop (void) | |
1241 { | |
1242 return 1; | |
1243 } | |
1244 | |
1245 struct gimple_opt_pass pass_forwprop = | |
1246 { | |
1247 { | |
1248 GIMPLE_PASS, | |
1249 "forwprop", /* name */ | |
1250 gate_forwprop, /* gate */ | |
1251 tree_ssa_forward_propagate_single_use_vars, /* execute */ | |
1252 NULL, /* sub */ | |
1253 NULL, /* next */ | |
1254 0, /* static_pass_number */ | |
1255 TV_TREE_FORWPROP, /* tv_id */ | |
1256 PROP_cfg | PROP_ssa, /* properties_required */ | |
1257 0, /* properties_provided */ | |
1258 0, /* properties_destroyed */ | |
1259 0, /* todo_flags_start */ | |
1260 TODO_dump_func | |
1261 | TODO_ggc_collect | |
1262 | TODO_update_ssa | |
1263 | TODO_verify_ssa /* todo_flags_finish */ | |
1264 } | |
1265 }; | |
1266 |