comparison gcc/tree-ssa-dse.c @ 55:77e2b8dfacca gcc-4.4.5

update it from 4.4.3 to 4.5.0
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Fri, 12 Feb 2010 23:39:51 +0900
parents a06113de4d67
children b7f97abdc517
comparison
equal deleted inserted replaced
52:c156f1bd5cd9 55:77e2b8dfacca
43 case the earlier store can be deleted. 43 case the earlier store can be deleted.
44 44
45 In our SSA + virtual operand world we use immediate uses of virtual 45 In our SSA + virtual operand world we use immediate uses of virtual
46 operands to detect dead stores. If a store's virtual definition 46 operands to detect dead stores. If a store's virtual definition
47 is used precisely once by a later store to the same location which 47 is used precisely once by a later store to the same location which
48 post dominates the first store, then the first store is dead. 48 post dominates the first store, then the first store is dead.
49 49
50 The single use of the store's virtual definition ensures that 50 The single use of the store's virtual definition ensures that
51 there are no intervening aliased loads and the requirement that 51 there are no intervening aliased loads and the requirement that
52 the second load post dominate the first ensures that if the earlier 52 the second load post dominate the first ensures that if the earlier
53 store executes, then the later stores will execute before the function 53 store executes, then the later stores will execute before the function
54 exits. 54 exits.
55 55
56 It may help to think of this as first moving the earlier store to 56 It may help to think of this as first moving the earlier store to
57 the point immediately before the later store. Again, the single 57 the point immediately before the later store. Again, the single
58 use of the virtual definition and the post-dominance relationship 58 use of the virtual definition and the post-dominance relationship
59 ensure that such movement would be safe. Clearly if there are 59 ensure that such movement would be safe. Clearly if there are
60 back to back stores, then the second is redundant. 60 back to back stores, then the second is redundant.
61 61
62 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler" 62 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
63 may also help in understanding this code since it discusses the 63 may also help in understanding this code since it discusses the
64 relationship between dead store and redundant load elimination. In 64 relationship between dead store and redundant load elimination. In
75 unique ID in this bitmap. */ 75 unique ID in this bitmap. */
76 bitmap stores; 76 bitmap stores;
77 }; 77 };
78 78
79 /* We allocate a bitmap-per-block for stores which are encountered 79 /* We allocate a bitmap-per-block for stores which are encountered
80 during the scan of that block. This allows us to restore the 80 during the scan of that block. This allows us to restore the
81 global bitmap of stores when we finish processing a block. */ 81 global bitmap of stores when we finish processing a block. */
82 struct dse_block_local_data 82 struct dse_block_local_data
83 { 83 {
84 bitmap stores; 84 bitmap stores;
85 };
86
87 /* Basic blocks of the potentially dead store and the following
88 store, for memory_address_same. */
89 struct address_walk_data
90 {
91 basic_block store1_bb, store2_bb;
92 }; 85 };
93 86
94 static bool gate_dse (void); 87 static bool gate_dse (void);
95 static unsigned int tree_ssa_dse (void); 88 static unsigned int tree_ssa_dse (void);
96 static void dse_initialize_block_local_data (struct dom_walk_data *, 89 static void dse_initialize_block_local_data (struct dom_walk_data *,
97 basic_block, 90 basic_block,
98 bool); 91 bool);
99 static void dse_optimize_stmt (struct dom_walk_data *, 92 static void dse_enter_block (struct dom_walk_data *, basic_block);
100 basic_block, 93 static void dse_leave_block (struct dom_walk_data *, basic_block);
101 gimple_stmt_iterator);
102 static void dse_record_phis (struct dom_walk_data *, basic_block);
103 static void dse_finalize_block (struct dom_walk_data *, basic_block);
104 static void record_voperand_set (bitmap, bitmap *, unsigned int); 94 static void record_voperand_set (bitmap, bitmap *, unsigned int);
105 95
106 /* Returns uid of statement STMT. */ 96 /* Returns uid of statement STMT. */
107 97
108 static unsigned 98 static unsigned
149 if (bd->stores) 139 if (bd->stores)
150 bitmap_clear (bd->stores); 140 bitmap_clear (bd->stores);
151 } 141 }
152 } 142 }
153 143
154 /* Helper function for memory_address_same via walk_tree. Returns 144 /* A helper of dse_optimize_stmt.
155 non-NULL if it finds an SSA_NAME which is part of the address, 145 Given a GIMPLE_ASSIGN in STMT, find a candidate statement *USE_STMT that
156 such that the definition of the SSA_NAME post-dominates the store 146 may prove STMT to be dead.
157 we want to delete but not the store that we believe makes it 147 Return TRUE if the above conditions are met, otherwise FALSE. */
158 redundant. This indicates that the address may change between
159 the two stores. */
160
161 static tree
162 memory_ssa_name_same (tree *expr_p, int *walk_subtrees ATTRIBUTE_UNUSED,
163 void *data)
164 {
165 struct address_walk_data *walk_data = (struct address_walk_data *) data;
166 tree expr = *expr_p;
167 gimple def_stmt;
168 basic_block def_bb;
169
170 if (TREE_CODE (expr) != SSA_NAME)
171 return NULL_TREE;
172
173 /* If we've found a default definition, then there's no problem. Both
174 stores will post-dominate it. And def_bb will be NULL. */
175 if (SSA_NAME_IS_DEFAULT_DEF (expr))
176 return NULL_TREE;
177
178 def_stmt = SSA_NAME_DEF_STMT (expr);
179 def_bb = gimple_bb (def_stmt);
180
181 /* DEF_STMT must dominate both stores. So if it is in the same
182 basic block as one, it does not post-dominate that store. */
183 if (walk_data->store1_bb != def_bb
184 && dominated_by_p (CDI_POST_DOMINATORS, walk_data->store1_bb, def_bb))
185 {
186 if (walk_data->store2_bb == def_bb
187 || !dominated_by_p (CDI_POST_DOMINATORS, walk_data->store2_bb,
188 def_bb))
189 /* Return non-NULL to stop the walk. */
190 return *expr_p;
191 }
192
193 return NULL_TREE;
194 }
195
196 /* Return TRUE if the destination memory address in STORE1 and STORE2
197 might be modified after STORE1, before control reaches STORE2. */
198 148
199 static bool 149 static bool
200 memory_address_same (gimple store1, gimple store2) 150 dse_possible_dead_store_p (gimple stmt, gimple *use_stmt)
201 { 151 {
202 struct address_walk_data walk_data; 152 gimple temp;
203 153 unsigned cnt = 0;
204 walk_data.store1_bb = gimple_bb (store1); 154
205 walk_data.store2_bb = gimple_bb (store2); 155 *use_stmt = NULL;
206 156
207 return (walk_tree (gimple_assign_lhs_ptr (store1), memory_ssa_name_same, 157 /* Find the first dominated statement that clobbers (part of) the
208 &walk_data, NULL) 158 memory stmt stores to with no intermediate statement that may use
209 == NULL); 159 part of the memory stmt stores. That is, find a store that may
210 } 160 prove stmt to be a dead store. */
211 161 temp = stmt;
212 /* Return true if there is a stmt that kills the lhs of STMT and is in the
213 virtual def-use chain of STMT without a use in between the kill and STMT.
214 Returns false if no such stmt is found.
215 *FIRST_USE_P is set to the first use of the single virtual def of
216 STMT. *USE_P is set to the vop killed by *USE_STMT. */
217
218 static bool
219 get_kill_of_stmt_lhs (gimple stmt,
220 use_operand_p * first_use_p,
221 use_operand_p * use_p, gimple * use_stmt)
222 {
223 tree lhs;
224
225 gcc_assert (is_gimple_assign (stmt));
226
227 lhs = gimple_assign_lhs (stmt);
228
229 /* We now walk the chain of single uses of the single VDEFs.
230 We succeeded finding a kill if the lhs of the use stmt is
231 equal to the original lhs. We can keep walking to the next
232 use if there are no possible uses of the original lhs in
233 the stmt. */
234 do 162 do
235 { 163 {
236 tree use_lhs; 164 gimple prev, use_stmt;
237 def_operand_p def_p; 165 imm_use_iterator ui;
238 166 bool fail = false;
239 /* The stmt must have a single VDEF. */ 167 tree defvar;
240 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_VDEF); 168
241 if (def_p == NULL_DEF_OPERAND_P) 169 /* Limit stmt walking to be linear in the number of possibly
170 dead stores. */
171 if (++cnt > 256)
242 return false; 172 return false;
243 173
244 /* Get the single immediate use of the def. */ 174 if (gimple_code (temp) == GIMPLE_PHI)
245 if (!single_imm_use (DEF_FROM_PTR (def_p), first_use_p, &stmt)) 175 defvar = PHI_RESULT (temp);
246 return false; 176 else
247 first_use_p = use_p; 177 defvar = gimple_vdef (temp);
248 178 prev = temp;
249 /* If there are possible hidden uses, give up. */ 179 temp = NULL;
250 if (!gimple_assign_single_p (stmt) 180 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
251 || (TREE_CODE (gimple_assign_rhs1 (stmt)) != SSA_NAME
252 && !is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
253 return false;
254
255 /* If the use stmts lhs matches the original lhs we have
256 found the kill, otherwise continue walking. */
257 use_lhs = gimple_assign_lhs (stmt);
258 if (operand_equal_p (use_lhs, lhs, 0))
259 { 181 {
260 *use_stmt = stmt; 182 cnt++;
261 return true; 183
262 } 184 /* In simple cases we can look through PHI nodes, but we
263 } 185 have to be careful with loops and with memory references
264 while (1); 186 containing operands that are also operands of PHI nodes.
265 } 187 See gcc.c-torture/execute/20051110-*.c. */
266 188 if (gimple_code (use_stmt) == GIMPLE_PHI)
267 /* A helper of dse_optimize_stmt. 189 {
268 Given a GIMPLE_ASSIGN in STMT, check that each VDEF has one 190 if (temp
269 use, and that one use is another VDEF clobbering the first one. 191 /* We can look through PHIs to post-dominated regions
270 192 without worrying if the use not also dominates prev
271 Return TRUE if the above conditions are met, otherwise FALSE. */ 193 (in which case it would be a loop PHI with the use
272 194 in a latch block). */
273 static bool 195 || gimple_bb (prev) == gimple_bb (use_stmt)
274 dse_possible_dead_store_p (gimple stmt, 196 || !dominated_by_p (CDI_POST_DOMINATORS,
275 use_operand_p *first_use_p, 197 gimple_bb (prev), gimple_bb (use_stmt))
276 use_operand_p *use_p, 198 || dominated_by_p (CDI_DOMINATORS,
277 gimple *use_stmt, 199 gimple_bb (prev), gimple_bb (use_stmt)))
278 struct dse_global_data *dse_gd, 200 {
279 struct dse_block_local_data *bd) 201 fail = true;
280 { 202 BREAK_FROM_IMM_USE_STMT (ui);
281 ssa_op_iter op_iter; 203 }
282 bool fail = false; 204 temp = use_stmt;
283 def_operand_p var1; 205 }
284 vuse_vec_p vv; 206 /* If the statement is a use the store is not dead. */
285 tree defvar = NULL_TREE; 207 else if (ref_maybe_used_by_stmt_p (use_stmt,
286 tree prev_defvar = NULL_TREE; 208 gimple_assign_lhs (stmt)))
287 gimple temp;
288
289 /* We want to verify that each virtual definition in STMT has
290 precisely one use and that all the virtual definitions are
291 used by the same single statement. When complete, we
292 want USE_STMT to refer to the one statement which uses
293 all of the virtual definitions from STMT. */
294 *use_stmt = NULL;
295 FOR_EACH_SSA_VDEF_OPERAND (var1, vv, stmt, op_iter)
296 {
297 defvar = DEF_FROM_PTR (var1);
298
299 /* If this virtual def does not have precisely one use, then
300 we will not be able to eliminate STMT. */
301 if (!has_single_use (defvar))
302 {
303 fail = true;
304 break;
305 }
306
307 /* Get the one and only immediate use of DEFVAR. */
308 single_imm_use (defvar, use_p, &temp);
309 gcc_assert (*use_p != NULL_USE_OPERAND_P);
310 *first_use_p = *use_p;
311
312 /* ??? If we hit a GIMPLE_PHI we could skip to the PHI_RESULT uses.
313 Don't bother to do that for now. */
314 if (gimple_code (temp) == GIMPLE_PHI)
315 {
316 fail = true;
317 break;
318 }
319
320 /* In the case of memory partitions, we may get:
321
322 # MPT.764_162 = VDEF <MPT.764_161(D)>
323 x = {};
324 # MPT.764_167 = VDEF <MPT.764_162>
325 y = {};
326
327 So we must make sure we're talking about the same LHS.
328 */
329 if (is_gimple_assign (temp))
330 {
331 tree base1 = get_base_address (gimple_assign_lhs (stmt));
332 tree base2 = get_base_address (gimple_assign_lhs (temp));
333
334 while (base1 && INDIRECT_REF_P (base1))
335 base1 = TREE_OPERAND (base1, 0);
336 while (base2 && INDIRECT_REF_P (base2))
337 base2 = TREE_OPERAND (base2, 0);
338
339 if (base1 != base2)
340 { 209 {
341 fail = true; 210 fail = true;
342 break; 211 BREAK_FROM_IMM_USE_STMT (ui);
212 }
213 /* If this is a store, remember it or bail out if we have
214 multiple ones (the will be in different CFG parts then). */
215 else if (gimple_vdef (use_stmt))
216 {
217 if (temp)
218 {
219 fail = true;
220 BREAK_FROM_IMM_USE_STMT (ui);
221 }
222 temp = use_stmt;
343 } 223 }
344 } 224 }
345 225
346 /* If the immediate use of DEF_VAR is not the same as the 226 if (fail)
347 previously find immediate uses, then we will not be able 227 return false;
348 to eliminate STMT. */ 228
349 if (*use_stmt == NULL) 229 /* If we didn't find any definition this means the store is dead
230 if it isn't a store to global reachable memory. In this case
231 just pretend the stmt makes itself dead. Otherwise fail. */
232 if (!temp)
350 { 233 {
351 *use_stmt = temp; 234 if (is_hidden_global_store (stmt))
352 prev_defvar = defvar; 235 return false;
353 } 236
354 else if (temp != *use_stmt) 237 temp = stmt;
355 {
356 fail = true;
357 break; 238 break;
358 } 239 }
359 } 240 }
360 241 /* We deliberately stop on clobbering statements and not only on
361 if (fail) 242 killing ones to make walking cheaper. Otherwise we can just
362 { 243 continue walking until both stores have equal reference trees. */
363 record_voperand_set (dse_gd->stores, &bd->stores, gimple_uid (stmt)); 244 while (!stmt_may_clobber_ref_p (temp, gimple_assign_lhs (stmt)));
364 return false; 245
365 } 246 if (!is_gimple_assign (temp))
247 return false;
248
249 *use_stmt = temp;
366 250
367 return true; 251 return true;
368 } 252 }
369 253
370 254
378 operands to detect dead stores. If a store's virtual definition 262 operands to detect dead stores. If a store's virtual definition
379 is used precisely once by a later store to the same location which 263 is used precisely once by a later store to the same location which
380 post dominates the first store, then the first store is dead. */ 264 post dominates the first store, then the first store is dead. */
381 265
382 static void 266 static void
383 dse_optimize_stmt (struct dom_walk_data *walk_data, 267 dse_optimize_stmt (struct dse_global_data *dse_gd,
384 basic_block bb ATTRIBUTE_UNUSED, 268 struct dse_block_local_data *bd,
385 gimple_stmt_iterator gsi) 269 gimple_stmt_iterator gsi)
386 { 270 {
387 struct dse_block_local_data *bd
388 = (struct dse_block_local_data *)
389 VEC_last (void_p, walk_data->block_data_stack);
390 struct dse_global_data *dse_gd
391 = (struct dse_global_data *) walk_data->global_data;
392 gimple stmt = gsi_stmt (gsi); 271 gimple stmt = gsi_stmt (gsi);
393 272
394 /* If this statement has no virtual defs, then there is nothing 273 /* If this statement has no virtual defs, then there is nothing
395 to do. */ 274 to do. */
396 if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF)) 275 if (!gimple_vdef (stmt))
397 return; 276 return;
398 277
399 /* We know we have virtual definitions. If this is a GIMPLE_ASSIGN 278 /* We know we have virtual definitions. If this is a GIMPLE_ASSIGN
400 that's not also a function call, then record it into our table. */ 279 that's not also a function call, then record it into our table. */
401 if (is_gimple_call (stmt) && gimple_call_fndecl (stmt)) 280 if (is_gimple_call (stmt) && gimple_call_fndecl (stmt))
404 if (gimple_has_volatile_ops (stmt)) 283 if (gimple_has_volatile_ops (stmt))
405 return; 284 return;
406 285
407 if (is_gimple_assign (stmt)) 286 if (is_gimple_assign (stmt))
408 { 287 {
409 use_operand_p first_use_p = NULL_USE_OPERAND_P;
410 use_operand_p use_p = NULL;
411 gimple use_stmt; 288 gimple use_stmt;
412 289
413 if (!dse_possible_dead_store_p (stmt, &first_use_p, &use_p, &use_stmt, 290 record_voperand_set (dse_gd->stores, &bd->stores, gimple_uid (stmt));
414 dse_gd, bd)) 291
292 if (!dse_possible_dead_store_p (stmt, &use_stmt))
415 return; 293 return;
416
417 /* If we have precisely one immediate use at this point, then we may
418 have found redundant store. Make sure that the stores are to
419 the same memory location. This includes checking that any
420 SSA-form variables in the address will have the same values. */
421 if (use_p != NULL_USE_OPERAND_P
422 && bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt))
423 && !operand_equal_p (gimple_assign_lhs (stmt),
424 gimple_assign_lhs (use_stmt), 0)
425 && memory_address_same (stmt, use_stmt))
426 {
427 /* If we have precisely one immediate use at this point, but
428 the stores are not to the same memory location then walk the
429 virtual def-use chain to get the stmt which stores to that same
430 memory location. */
431 if (!get_kill_of_stmt_lhs (stmt, &first_use_p, &use_p, &use_stmt))
432 {
433 record_voperand_set (dse_gd->stores, &bd->stores,
434 gimple_uid (stmt));
435 return;
436 }
437 }
438 294
439 /* If we have precisely one immediate use at this point and the 295 /* If we have precisely one immediate use at this point and the
440 stores are to the same memory location or there is a chain of 296 stores are to the same memory location or there is a chain of
441 virtual uses from stmt and the stmt which stores to that same 297 virtual uses from stmt and the stmt which stores to that same
442 memory location, then we may have found redundant store. */ 298 memory location, then we may have found redundant store. */
443 if (use_p != NULL_USE_OPERAND_P 299 if (bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt))
444 && bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt))
445 && operand_equal_p (gimple_assign_lhs (stmt), 300 && operand_equal_p (gimple_assign_lhs (stmt),
446 gimple_assign_lhs (use_stmt), 0) 301 gimple_assign_lhs (use_stmt), 0))
447 && memory_address_same (stmt, use_stmt))
448 { 302 {
449 ssa_op_iter op_iter;
450 def_operand_p var1;
451 vuse_vec_p vv;
452 tree stmt_lhs;
453
454 /* If use_stmt is or might be a nop assignment, e.g. for 303 /* If use_stmt is or might be a nop assignment, e.g. for
455 struct { ... } S a, b, *p; ... 304 struct { ... } S a, b, *p; ...
456 b = a; b = b; 305 b = a; b = b;
457 or 306 or
458 b = a; b = *p; where p might be &b, 307 b = a; b = *p; where p might be &b,
460 *p = a; *p = b; where p might be &b, 309 *p = a; *p = b; where p might be &b,
461 or 310 or
462 *p = *u; *p = *v; where p might be v, then USE_STMT 311 *p = *u; *p = *v; where p might be v, then USE_STMT
463 acts as a use as well as definition, so store in STMT 312 acts as a use as well as definition, so store in STMT
464 is not dead. */ 313 is not dead. */
465 if (gimple_loaded_syms (use_stmt) 314 if (stmt != use_stmt
466 && bitmap_intersect_p (gimple_loaded_syms (use_stmt), 315 && !is_gimple_reg (gimple_assign_rhs1 (use_stmt))
467 gimple_stored_syms (use_stmt))) 316 && !is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))
468 { 317 /* ??? Should {} be invariant? */
469 record_voperand_set (dse_gd->stores, &bd->stores, 318 && gimple_assign_rhs_code (use_stmt) != CONSTRUCTOR
470 gimple_uid (stmt)); 319 && refs_may_alias_p (gimple_assign_lhs (use_stmt),
471 return; 320 gimple_assign_rhs1 (use_stmt)))
472 } 321 return;
473 322
474 if (dump_file && (dump_flags & TDF_DETAILS)) 323 if (dump_file && (dump_flags & TDF_DETAILS))
475 { 324 {
476 fprintf (dump_file, " Deleted dead store '"); 325 fprintf (dump_file, " Deleted dead store '");
477 print_gimple_stmt (dump_file, gsi_stmt (gsi), dump_flags, 0); 326 print_gimple_stmt (dump_file, gsi_stmt (gsi), dump_flags, 0);
478 fprintf (dump_file, "'\n"); 327 fprintf (dump_file, "'\n");
479 } 328 }
480 329
481 /* Then we need to fix the operand of the consuming stmt. */ 330 /* Then we need to fix the operand of the consuming stmt. */
482 stmt_lhs = USE_FROM_PTR (first_use_p); 331 unlink_stmt_vdef (stmt);
483 FOR_EACH_SSA_VDEF_OPERAND (var1, vv, stmt, op_iter)
484 {
485 tree usevar;
486 gimple temp;
487
488 single_imm_use (DEF_FROM_PTR (var1), &use_p, &temp);
489 gcc_assert (VUSE_VECT_NUM_ELEM (*vv) == 1);
490 usevar = VUSE_ELEMENT_VAR (*vv, 0);
491 SET_USE (use_p, usevar);
492
493 /* Make sure we propagate the ABNORMAL bit setting. */
494 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (stmt_lhs))
495 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (usevar) = 1;
496 }
497 332
498 /* Remove the dead store. */ 333 /* Remove the dead store. */
499 gsi_remove (&gsi, true); 334 gsi_remove (&gsi, true);
500 335
501 /* And release any SSA_NAMEs set in this statement back to the 336 /* And release any SSA_NAMEs set in this statement back to the
502 SSA_NAME manager. */ 337 SSA_NAME manager. */
503 release_defs (stmt); 338 release_defs (stmt);
504 } 339 }
505
506 record_voperand_set (dse_gd->stores, &bd->stores, gimple_uid (stmt));
507 } 340 }
508 } 341 }
509 342
510 /* Record that we have seen the PHIs at the start of BB which correspond 343 /* Record that we have seen the PHIs at the start of BB which correspond
511 to virtual operands. */ 344 to virtual operands. */
512 static void 345 static void
513 dse_record_phis (struct dom_walk_data *walk_data, basic_block bb) 346 dse_record_phi (struct dse_global_data *dse_gd,
347 struct dse_block_local_data *bd,
348 gimple phi)
349 {
350 if (!is_gimple_reg (gimple_phi_result (phi)))
351 record_voperand_set (dse_gd->stores, &bd->stores, get_stmt_uid (phi));
352 }
353
354 static void
355 dse_enter_block (struct dom_walk_data *walk_data, basic_block bb)
514 { 356 {
515 struct dse_block_local_data *bd 357 struct dse_block_local_data *bd
516 = (struct dse_block_local_data *) 358 = (struct dse_block_local_data *)
517 VEC_last (void_p, walk_data->block_data_stack); 359 VEC_last (void_p, walk_data->block_data_stack);
518 struct dse_global_data *dse_gd 360 struct dse_global_data *dse_gd
519 = (struct dse_global_data *) walk_data->global_data; 361 = (struct dse_global_data *) walk_data->global_data;
520 gimple phi;
521 gimple_stmt_iterator gsi; 362 gimple_stmt_iterator gsi;
522 363
364 for (gsi = gsi_last (bb_seq (bb)); !gsi_end_p (gsi); gsi_prev (&gsi))
365 dse_optimize_stmt (dse_gd, bd, gsi);
523 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 366 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
524 { 367 dse_record_phi (dse_gd, bd, gsi_stmt (gsi));
525 phi = gsi_stmt (gsi); 368 }
526 if (!is_gimple_reg (gimple_phi_result (phi))) 369
527 record_voperand_set (dse_gd->stores, &bd->stores, get_stmt_uid (phi)); 370 static void
528 } 371 dse_leave_block (struct dom_walk_data *walk_data,
529 } 372 basic_block bb ATTRIBUTE_UNUSED)
530
531 static void
532 dse_finalize_block (struct dom_walk_data *walk_data,
533 basic_block bb ATTRIBUTE_UNUSED)
534 { 373 {
535 struct dse_block_local_data *bd 374 struct dse_block_local_data *bd
536 = (struct dse_block_local_data *) 375 = (struct dse_block_local_data *)
537 VEC_last (void_p, walk_data->block_data_stack); 376 VEC_last (void_p, walk_data->block_data_stack);
538 struct dse_global_data *dse_gd 377 struct dse_global_data *dse_gd
562 /* We might consider making this a property of each pass so that it 401 /* We might consider making this a property of each pass so that it
563 can be [re]computed on an as-needed basis. Particularly since 402 can be [re]computed on an as-needed basis. Particularly since
564 this pass could be seen as an extension of DCE which needs post 403 this pass could be seen as an extension of DCE which needs post
565 dominators. */ 404 dominators. */
566 calculate_dominance_info (CDI_POST_DOMINATORS); 405 calculate_dominance_info (CDI_POST_DOMINATORS);
406 calculate_dominance_info (CDI_DOMINATORS);
567 407
568 /* Dead store elimination is fundamentally a walk of the post-dominator 408 /* Dead store elimination is fundamentally a walk of the post-dominator
569 tree and a backwards walk of statements within each block. */ 409 tree and a backwards walk of statements within each block. */
570 walk_data.walk_stmts_backward = true;
571 walk_data.dom_direction = CDI_POST_DOMINATORS; 410 walk_data.dom_direction = CDI_POST_DOMINATORS;
572 walk_data.initialize_block_local_data = dse_initialize_block_local_data; 411 walk_data.initialize_block_local_data = dse_initialize_block_local_data;
573 walk_data.before_dom_children_before_stmts = NULL; 412 walk_data.before_dom_children = dse_enter_block;
574 walk_data.before_dom_children_walk_stmts = dse_optimize_stmt; 413 walk_data.after_dom_children = dse_leave_block;
575 walk_data.before_dom_children_after_stmts = dse_record_phis;
576 walk_data.after_dom_children_before_stmts = NULL;
577 walk_data.after_dom_children_walk_stmts = NULL;
578 walk_data.after_dom_children_after_stmts = dse_finalize_block;
579 walk_data.interesting_blocks = NULL;
580 414
581 walk_data.block_local_data_size = sizeof (struct dse_block_local_data); 415 walk_data.block_local_data_size = sizeof (struct dse_block_local_data);
582 416
583 /* This is the main hash table for the dead store elimination pass. */ 417 /* This is the main hash table for the dead store elimination pass. */
584 dse_gd.stores = BITMAP_ALLOC (NULL); 418 dse_gd.stores = BITMAP_ALLOC (NULL);
605 gate_dse (void) 439 gate_dse (void)
606 { 440 {
607 return flag_tree_dse != 0; 441 return flag_tree_dse != 0;
608 } 442 }
609 443
610 struct gimple_opt_pass pass_dse = 444 struct gimple_opt_pass pass_dse =
611 { 445 {
612 { 446 {
613 GIMPLE_PASS, 447 GIMPLE_PASS,
614 "dse", /* name */ 448 "dse", /* name */
615 gate_dse, /* gate */ 449 gate_dse, /* gate */
616 tree_ssa_dse, /* execute */ 450 tree_ssa_dse, /* execute */
617 NULL, /* sub */ 451 NULL, /* sub */
618 NULL, /* next */ 452 NULL, /* next */
619 0, /* static_pass_number */ 453 0, /* static_pass_number */
620 TV_TREE_DSE, /* tv_id */ 454 TV_TREE_DSE, /* tv_id */
621 PROP_cfg 455 PROP_cfg | PROP_ssa, /* properties_required */
622 | PROP_ssa
623 | PROP_alias, /* properties_required */
624 0, /* properties_provided */ 456 0, /* properties_provided */
625 0, /* properties_destroyed */ 457 0, /* properties_destroyed */
626 0, /* todo_flags_start */ 458 0, /* todo_flags_start */
627 TODO_dump_func 459 TODO_dump_func
628 | TODO_ggc_collect 460 | TODO_ggc_collect
629 | TODO_verify_ssa /* todo_flags_finish */ 461 | TODO_verify_ssa /* todo_flags_finish */
630 } 462 }
631 }; 463 };
632 464
633 /* A very simple dead store pass eliminating write only local variables.
634 The pass does not require alias information and thus can be run before
635 inlining to quickly eliminate artifacts of some common C++ constructs. */
636
637 static unsigned int
638 execute_simple_dse (void)
639 {
640 gimple_stmt_iterator gsi;
641 basic_block bb;
642 bitmap variables_loaded = BITMAP_ALLOC (NULL);
643 unsigned int todo = 0;
644
645 /* Collect into VARIABLES LOADED all variables that are read in function
646 body. */
647 FOR_EACH_BB (bb)
648 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
649
650 if (gimple_loaded_syms (gsi_stmt (gsi)))
651 bitmap_ior_into (variables_loaded,
652 gimple_loaded_syms (gsi_stmt (gsi)));
653
654 /* Look for statements writing into the write only variables.
655 And try to remove them. */
656
657 FOR_EACH_BB (bb)
658 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
659 {
660 gimple stmt = gsi_stmt (gsi);
661 tree op;
662 bool removed = false;
663 ssa_op_iter iter;
664 tree size;
665
666 if (is_gimple_assign (stmt)
667 && AGGREGATE_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt)))
668 && (size = lang_hooks.expr_size (gimple_assign_lhs (stmt)))
669 && integer_zerop (size))
670 {
671 if (dump_file && (dump_flags & TDF_DETAILS))
672 {
673 fprintf (dump_file, " Deleted zero-sized store '");
674 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
675 fprintf (dump_file, "'\n");
676 }
677 removed = true;
678 gsi_remove (&gsi, true);
679 todo |= TODO_cleanup_cfg;
680 }
681 else if (gimple_stored_syms (stmt)
682 && !bitmap_empty_p (gimple_stored_syms (stmt))
683 && (is_gimple_assign (stmt)
684 || (is_gimple_call (stmt)
685 && gimple_call_lhs (stmt)))
686 && !bitmap_intersect_p (gimple_stored_syms (stmt),
687 variables_loaded))
688 {
689 unsigned int i;
690 bitmap_iterator bi;
691 bool dead = true;
692
693 /* See if STMT only stores to write-only variables and
694 verify that there are no volatile operands. tree-ssa-operands
695 sets has_volatile_ops flag for all statements involving
696 reads and writes when aliases are not built to prevent passes
697 from removing them as dead. The flag thus has no use for us
698 and we need to look into all operands. */
699
700 EXECUTE_IF_SET_IN_BITMAP (gimple_stored_syms (stmt), 0, i, bi)
701 {
702 tree var = referenced_var_lookup (i);
703 if (TREE_ADDRESSABLE (var)
704 || is_global_var (var)
705 || TREE_THIS_VOLATILE (var))
706 dead = false;
707 }
708
709 if (dead && gimple_loaded_syms (stmt))
710 EXECUTE_IF_SET_IN_BITMAP (gimple_loaded_syms (stmt), 0, i, bi)
711 if (TREE_THIS_VOLATILE (referenced_var_lookup (i)))
712 dead = false;
713
714 if (dead)
715 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_OPERANDS)
716 if (TREE_THIS_VOLATILE (op))
717 dead = false;
718
719 /* Look for possible occurrence var = indirect_ref (...) where
720 indirect_ref itself is volatile. */
721
722 if (dead && is_gimple_assign (stmt)
723 && TREE_THIS_VOLATILE (gimple_assign_rhs1 (stmt)))
724 dead = false;
725
726 if (dead)
727 {
728 /* When LHS of var = call (); is dead, simplify it into
729 call (); saving one operand. */
730 if (is_gimple_call (stmt)
731 && gimple_has_side_effects (stmt))
732 {
733 if (dump_file && (dump_flags & TDF_DETAILS))
734 {
735 fprintf (dump_file, "Deleted LHS of call: ");
736 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
737 fprintf (dump_file, "\n");
738 }
739 push_stmt_changes (gsi_stmt_ptr (&gsi));
740 gimple_call_set_lhs (stmt, NULL);
741 pop_stmt_changes (gsi_stmt_ptr (&gsi));
742 }
743 else
744 {
745 if (dump_file && (dump_flags & TDF_DETAILS))
746 {
747 fprintf (dump_file, " Deleted dead store '");
748 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
749 fprintf (dump_file, "'\n");
750 }
751 removed = true;
752 gsi_remove (&gsi, true);
753 todo |= TODO_cleanup_cfg;
754 }
755 todo |= TODO_remove_unused_locals | TODO_ggc_collect;
756 }
757 }
758 if (!removed)
759 gsi_next (&gsi);
760 }
761 BITMAP_FREE (variables_loaded);
762 return todo;
763 }
764
765 struct gimple_opt_pass pass_simple_dse =
766 {
767 {
768 GIMPLE_PASS,
769 "sdse", /* name */
770 NULL, /* gate */
771 execute_simple_dse, /* execute */
772 NULL, /* sub */
773 NULL, /* next */
774 0, /* static_pass_number */
775 0, /* tv_id */
776 PROP_ssa, /* properties_required */
777 0, /* properties_provided */
778 0, /* properties_destroyed */
779 0, /* todo_flags_start */
780 TODO_dump_func /* todo_flags_finish */
781 }
782 };