comparison gcc/tree-ssa.c @ 55:77e2b8dfacca gcc-4.4.5

update it from 4.4.3 to 4.5.0
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Fri, 12 Feb 2010 23:39:51 +0900
parents 855418dad1a3
children b7f97abdc517
comparison
equal deleted inserted replaced
52:c156f1bd5cd9 55:77e2b8dfacca
24 #include "tm.h" 24 #include "tm.h"
25 #include "tree.h" 25 #include "tree.h"
26 #include "flags.h" 26 #include "flags.h"
27 #include "rtl.h" 27 #include "rtl.h"
28 #include "tm_p.h" 28 #include "tm_p.h"
29 #include "target.h"
29 #include "ggc.h" 30 #include "ggc.h"
30 #include "langhooks.h" 31 #include "langhooks.h"
31 #include "hard-reg-set.h" 32 #include "hard-reg-set.h"
32 #include "basic-block.h" 33 #include "basic-block.h"
33 #include "output.h" 34 #include "output.h"
51 52
52 53
53 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */ 54 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
54 55
55 void 56 void
56 redirect_edge_var_map_add (edge e, tree result, tree def) 57 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
57 { 58 {
58 void **slot; 59 void **slot;
59 edge_var_map_vector old_head, head; 60 edge_var_map_vector old_head, head;
60 edge_var_map new_node; 61 edge_var_map new_node;
61 62
69 head = VEC_alloc (edge_var_map, heap, 5); 70 head = VEC_alloc (edge_var_map, heap, 5);
70 *slot = head; 71 *slot = head;
71 } 72 }
72 new_node.def = def; 73 new_node.def = def;
73 new_node.result = result; 74 new_node.result = result;
75 new_node.locus = locus;
74 76
75 VEC_safe_push (edge_var_map, heap, head, &new_node); 77 VEC_safe_push (edge_var_map, heap, head, &new_node);
76 if (old_head != head) 78 if (old_head != head)
77 { 79 {
78 /* The push did some reallocation. Update the pointer map. */ 80 /* The push did some reallocation. Update the pointer map. */
191 193
192 /* Remove the appropriate PHI arguments in E's destination block. */ 194 /* Remove the appropriate PHI arguments in E's destination block. */
193 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi)) 195 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
194 { 196 {
195 tree def; 197 tree def;
198 source_location locus ;
196 199
197 phi = gsi_stmt (gsi); 200 phi = gsi_stmt (gsi);
198 def = gimple_phi_arg_def (phi, e->dest_idx); 201 def = gimple_phi_arg_def (phi, e->dest_idx);
202 locus = gimple_phi_arg_location (phi, e->dest_idx);
199 203
200 if (def == NULL_TREE) 204 if (def == NULL_TREE)
201 continue; 205 continue;
202 206
203 redirect_edge_var_map_add (e, gimple_phi_result (phi), def); 207 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
204 } 208 }
205 209
206 e = redirect_edge_succ_nodup (e, dest); 210 e = redirect_edge_succ_nodup (e, dest);
207 211
208 return e; 212 return e;
231 { 235 {
232 tree def; 236 tree def;
233 237
234 phi = gsi_stmt (gsi); 238 phi = gsi_stmt (gsi);
235 def = redirect_edge_var_map_def (vm); 239 def = redirect_edge_var_map_def (vm);
236 add_phi_arg (phi, def, e); 240 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
237 } 241 }
238 242
239 redirect_edge_var_map_clear (e); 243 redirect_edge_var_map_clear (e);
244 }
245
246 /* Given a tree for an expression for which we might want to emit
247 locations or values in debug information (generally a variable, but
248 we might deal with other kinds of trees in the future), return the
249 tree that should be used as the variable of a DEBUG_BIND STMT or
250 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
251
252 tree
253 target_for_debug_bind (tree var)
254 {
255 if (!MAY_HAVE_DEBUG_STMTS)
256 return NULL_TREE;
257
258 if (TREE_CODE (var) != VAR_DECL
259 && TREE_CODE (var) != PARM_DECL)
260 return NULL_TREE;
261
262 if (DECL_HAS_VALUE_EXPR_P (var))
263 return target_for_debug_bind (DECL_VALUE_EXPR (var));
264
265 if (DECL_IGNORED_P (var))
266 return NULL_TREE;
267
268 if (!is_gimple_reg (var))
269 return NULL_TREE;
270
271 return var;
272 }
273
274 /* Called via walk_tree, look for SSA_NAMEs that have already been
275 released. */
276
277 static tree
278 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
279 {
280 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
281
282 if (wi && wi->is_lhs)
283 return NULL_TREE;
284
285 if (TREE_CODE (*tp) == SSA_NAME)
286 {
287 if (SSA_NAME_IN_FREE_LIST (*tp))
288 return *tp;
289
290 *walk_subtrees = 0;
291 }
292 else if (IS_TYPE_OR_DECL_P (*tp))
293 *walk_subtrees = 0;
294
295 return NULL_TREE;
296 }
297
298 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
299 by other DEBUG stmts, and replace uses of the DEF with the
300 newly-created debug temp. */
301
302 void
303 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
304 {
305 imm_use_iterator imm_iter;
306 use_operand_p use_p;
307 gimple stmt;
308 gimple def_stmt = NULL;
309 int usecount = 0;
310 tree value = NULL;
311
312 if (!MAY_HAVE_DEBUG_STMTS)
313 return;
314
315 /* If this name has already been registered for replacement, do nothing
316 as anything that uses this name isn't in SSA form. */
317 if (name_registered_for_update_p (var))
318 return;
319
320 /* Check whether there are debug stmts that reference this variable and,
321 if there are, decide whether we should use a debug temp. */
322 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
323 {
324 stmt = USE_STMT (use_p);
325
326 if (!gimple_debug_bind_p (stmt))
327 continue;
328
329 if (usecount++)
330 break;
331
332 if (gimple_debug_bind_get_value (stmt) != var)
333 {
334 /* Count this as an additional use, so as to make sure we
335 use a temp unless VAR's definition has a SINGLE_RHS that
336 can be shared. */
337 usecount++;
338 break;
339 }
340 }
341
342 if (!usecount)
343 return;
344
345 if (gsi)
346 def_stmt = gsi_stmt (*gsi);
347 else
348 def_stmt = SSA_NAME_DEF_STMT (var);
349
350 /* If we didn't get an insertion point, and the stmt has already
351 been removed, we won't be able to insert the debug bind stmt, so
352 we'll have to drop debug information. */
353 if (gimple_code (def_stmt) == GIMPLE_PHI)
354 {
355 value = degenerate_phi_result (def_stmt);
356 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
357 value = NULL;
358 }
359 else if (is_gimple_assign (def_stmt))
360 {
361 bool no_value = false;
362
363 if (!dom_info_available_p (CDI_DOMINATORS))
364 {
365 struct walk_stmt_info wi;
366
367 memset (&wi, 0, sizeof (wi));
368
369 /* When removing blocks without following reverse dominance
370 order, we may sometimes encounter SSA_NAMEs that have
371 already been released, referenced in other SSA_DEFs that
372 we're about to release. Consider:
373
374 <bb X>:
375 v_1 = foo;
376
377 <bb Y>:
378 w_2 = v_1 + bar;
379 # DEBUG w => w_2
380
381 If we deleted BB X first, propagating the value of w_2
382 won't do us any good. It's too late to recover their
383 original definition of v_1: when it was deleted, it was
384 only referenced in other DEFs, it couldn't possibly know
385 it should have been retained, and propagating every
386 single DEF just in case it might have to be propagated
387 into a DEBUG STMT would probably be too wasteful.
388
389 When dominator information is not readily available, we
390 check for and accept some loss of debug information. But
391 if it is available, there's no excuse for us to remove
392 blocks in the wrong order, so we don't even check for
393 dead SSA NAMEs. SSA verification shall catch any
394 errors. */
395 if ((!gsi && !gimple_bb (def_stmt))
396 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
397 no_value = true;
398 }
399
400 if (!no_value)
401 value = gimple_assign_rhs_to_tree (def_stmt);
402 }
403
404 if (value)
405 {
406 /* If there's a single use of VAR, and VAR is the entire debug
407 expression (usecount would have been incremented again
408 otherwise), and the definition involves only constants and
409 SSA names, then we can propagate VALUE into this single use,
410 avoiding the temp.
411
412 We can also avoid using a temp if VALUE can be shared and
413 propagated into all uses, without generating expressions that
414 wouldn't be valid gimple RHSs.
415
416 Other cases that would require unsharing or non-gimple RHSs
417 are deferred to a debug temp, although we could avoid temps
418 at the expense of duplication of expressions. */
419
420 if (CONSTANT_CLASS_P (value)
421 || gimple_code (def_stmt) == GIMPLE_PHI
422 || (usecount == 1
423 && (!gimple_assign_single_p (def_stmt)
424 || is_gimple_min_invariant (value)))
425 || is_gimple_reg (value))
426 value = unshare_expr (value);
427 else
428 {
429 gimple def_temp;
430 tree vexpr = make_node (DEBUG_EXPR_DECL);
431
432 def_temp = gimple_build_debug_bind (vexpr,
433 unshare_expr (value),
434 def_stmt);
435
436 DECL_ARTIFICIAL (vexpr) = 1;
437 TREE_TYPE (vexpr) = TREE_TYPE (value);
438 if (DECL_P (value))
439 DECL_MODE (vexpr) = DECL_MODE (value);
440 else
441 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
442
443 if (gsi)
444 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
445 else
446 {
447 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
448 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
449 }
450
451 value = vexpr;
452 }
453 }
454
455 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
456 {
457 if (!gimple_debug_bind_p (stmt))
458 continue;
459
460 if (value)
461 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
462 /* unshare_expr is not needed here. vexpr is either a
463 SINGLE_RHS, that can be safely shared, some other RHS
464 that was unshared when we found it had a single debug
465 use, or a DEBUG_EXPR_DECL, that can be safely
466 shared. */
467 SET_USE (use_p, value);
468 else
469 gimple_debug_bind_reset_value (stmt);
470
471 update_stmt (stmt);
472 }
473 }
474
475
476 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
477 other DEBUG stmts, and replace uses of the DEF with the
478 newly-created debug temp. */
479
480 void
481 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
482 {
483 gimple stmt;
484 ssa_op_iter op_iter;
485 def_operand_p def_p;
486
487 if (!MAY_HAVE_DEBUG_STMTS)
488 return;
489
490 stmt = gsi_stmt (*gsi);
491
492 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
493 {
494 tree var = DEF_FROM_PTR (def_p);
495
496 if (TREE_CODE (var) != SSA_NAME)
497 continue;
498
499 insert_debug_temp_for_var_def (gsi, var);
500 }
501 }
502
503 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
504 dominated stmts before their dominators, so that release_ssa_defs
505 stands a chance of propagating DEFs into debug bind stmts. */
506
507 void
508 release_defs_bitset (bitmap toremove)
509 {
510 unsigned j;
511 bitmap_iterator bi;
512
513 /* Performing a topological sort is probably overkill, this will
514 most likely run in slightly superlinear time, rather than the
515 pathological quadratic worst case. */
516 while (!bitmap_empty_p (toremove))
517 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
518 {
519 bool remove_now = true;
520 tree var = ssa_name (j);
521 gimple stmt;
522 imm_use_iterator uit;
523
524 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
525 {
526 ssa_op_iter dit;
527 def_operand_p def_p;
528
529 /* We can't propagate PHI nodes into debug stmts. */
530 if (gimple_code (stmt) == GIMPLE_PHI
531 || is_gimple_debug (stmt))
532 continue;
533
534 /* If we find another definition to remove that uses
535 the one we're looking at, defer the removal of this
536 one, so that it can be propagated into debug stmts
537 after the other is. */
538 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
539 {
540 tree odef = DEF_FROM_PTR (def_p);
541
542 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
543 {
544 remove_now = false;
545 break;
546 }
547 }
548
549 if (!remove_now)
550 BREAK_FROM_IMM_USE_STMT (uit);
551 }
552
553 if (remove_now)
554 {
555 gimple def = SSA_NAME_DEF_STMT (var);
556 gimple_stmt_iterator gsi = gsi_for_stmt (def);
557
558 if (gimple_code (def) == GIMPLE_PHI)
559 remove_phi_node (&gsi, true);
560 else
561 {
562 gsi_remove (&gsi, true);
563 release_defs (def);
564 }
565
566 bitmap_clear_bit (toremove, j);
567 }
568 }
240 } 569 }
241 570
242 /* Return true if SSA_NAME is malformed and mark it visited. 571 /* Return true if SSA_NAME is malformed and mark it visited.
243 572
244 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual 573 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
266 } 595 }
267 596
268 if (is_virtual && is_gimple_reg (ssa_name)) 597 if (is_virtual && is_gimple_reg (ssa_name))
269 { 598 {
270 error ("found a virtual definition for a GIMPLE register"); 599 error ("found a virtual definition for a GIMPLE register");
600 return true;
601 }
602
603 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
604 {
605 error ("virtual SSA name for non-VOP decl");
271 return true; 606 return true;
272 } 607 }
273 608
274 if (!is_virtual && !is_gimple_reg (ssa_name)) 609 if (!is_virtual && !is_gimple_reg (ssa_name))
275 { 610 {
392 { 727 {
393 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set"); 728 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
394 err = true; 729 err = true;
395 } 730 }
396 731
397 /* Make sure the use is in an appropriate list by checking the previous 732 /* Make sure the use is in an appropriate list by checking the previous
398 element to make sure it's the same. */ 733 element to make sure it's the same. */
399 if (use_p->prev == NULL) 734 if (use_p->prev == NULL)
400 { 735 {
401 error ("no immediate_use list"); 736 error ("no immediate_use list");
402 err = true; 737 err = true;
476 err = verify_ssa_name (op, !is_gimple_reg (gimple_phi_result (phi))); 811 err = verify_ssa_name (op, !is_gimple_reg (gimple_phi_result (phi)));
477 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)], 812 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
478 op_p, phi, e->flags & EDGE_ABNORMAL, NULL); 813 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
479 } 814 }
480 815
816 if (TREE_CODE (op) == ADDR_EXPR)
817 {
818 tree base = TREE_OPERAND (op, 0);
819 while (handled_component_p (base))
820 base = TREE_OPERAND (base, 0);
821 if ((TREE_CODE (base) == VAR_DECL
822 || TREE_CODE (base) == PARM_DECL
823 || TREE_CODE (base) == RESULT_DECL)
824 && !TREE_ADDRESSABLE (base))
825 {
826 error ("address taken, but ADDRESSABLE bit not set");
827 err = true;
828 }
829 }
830
481 if (e->dest != bb) 831 if (e->dest != bb)
482 { 832 {
483 error ("wrong edge %d->%d for PHI argument", 833 error ("wrong edge %d->%d for PHI argument",
484 e->src->index, e->dest->index); 834 e->src->index, e->dest->index);
485 err = true; 835 err = true;
500 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS); 850 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
501 } 851 }
502 852
503 853
504 return err; 854 return err;
505 }
506
507
508 static void
509 verify_flow_insensitive_alias_info (void)
510 {
511 tree var;
512 referenced_var_iterator rvi;
513
514 FOR_EACH_REFERENCED_VAR (var, rvi)
515 {
516 unsigned int j;
517 bitmap aliases;
518 tree alias;
519 bitmap_iterator bi;
520
521 if (!MTAG_P (var) || !MTAG_ALIASES (var))
522 continue;
523
524 aliases = MTAG_ALIASES (var);
525
526 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, j, bi)
527 {
528 alias = referenced_var (j);
529
530 if (TREE_CODE (alias) != MEMORY_PARTITION_TAG
531 && !may_be_aliased (alias))
532 {
533 error ("non-addressable variable inside an alias set");
534 debug_variable (alias);
535 goto err;
536 }
537 }
538 }
539
540 return;
541
542 err:
543 debug_variable (var);
544 internal_error ("verify_flow_insensitive_alias_info failed");
545 }
546
547
548 static void
549 verify_flow_sensitive_alias_info (void)
550 {
551 size_t i;
552 tree ptr;
553
554 for (i = 1; i < num_ssa_names; i++)
555 {
556 tree var;
557 var_ann_t ann;
558 struct ptr_info_def *pi;
559
560
561 ptr = ssa_name (i);
562 if (!ptr)
563 continue;
564
565 /* We only care for pointers that are actually referenced in the
566 program. */
567 if (!POINTER_TYPE_P (TREE_TYPE (ptr)) || !TREE_VISITED (ptr))
568 continue;
569
570 /* RESULT_DECL is special. If it's a GIMPLE register, then it
571 is only written-to only once in the return statement.
572 Otherwise, aggregate RESULT_DECLs may be written-to more than
573 once in virtual operands. */
574 var = SSA_NAME_VAR (ptr);
575 if (TREE_CODE (var) == RESULT_DECL
576 && is_gimple_reg (ptr))
577 continue;
578
579 pi = SSA_NAME_PTR_INFO (ptr);
580 if (pi == NULL)
581 continue;
582
583 ann = var_ann (var);
584 if (pi->memory_tag_needed && !pi->name_mem_tag && !ann->symbol_mem_tag)
585 {
586 error ("dereferenced pointers should have a name or a symbol tag");
587 goto err;
588 }
589
590 if (pi->name_mem_tag
591 && (pi->pt_vars == NULL || bitmap_empty_p (pi->pt_vars)))
592 {
593 error ("pointers with a memory tag, should have points-to sets");
594 goto err;
595 }
596
597 if (pi->value_escapes_p
598 && pi->escape_mask & ~ESCAPE_TO_RETURN
599 && pi->name_mem_tag)
600 {
601 tree t = memory_partition (pi->name_mem_tag);
602 if (t == NULL_TREE)
603 t = pi->name_mem_tag;
604
605 if (!is_call_clobbered (t))
606 {
607 error ("pointer escapes but its name tag is not call-clobbered");
608 goto err;
609 }
610 }
611 }
612
613 return;
614
615 err:
616 debug_variable (ptr);
617 internal_error ("verify_flow_sensitive_alias_info failed");
618 }
619
620
621 /* Verify the consistency of call clobbering information. */
622
623 static void
624 verify_call_clobbering (void)
625 {
626 unsigned int i;
627 bitmap_iterator bi;
628 tree var;
629 referenced_var_iterator rvi;
630
631 /* At all times, the result of the call_clobbered flag should
632 match the result of the call_clobbered_vars bitmap. Verify both
633 that everything in call_clobbered_vars is marked
634 call_clobbered, and that everything marked
635 call_clobbered is in call_clobbered_vars. */
636 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
637 {
638 var = referenced_var (i);
639
640 if (memory_partition (var))
641 var = memory_partition (var);
642
643 if (!MTAG_P (var) && !var_ann (var)->call_clobbered)
644 {
645 error ("variable in call_clobbered_vars but not marked "
646 "call_clobbered");
647 debug_variable (var);
648 goto err;
649 }
650 }
651
652 FOR_EACH_REFERENCED_VAR (var, rvi)
653 {
654 if (is_gimple_reg (var))
655 continue;
656
657 if (memory_partition (var))
658 var = memory_partition (var);
659
660 if (!MTAG_P (var)
661 && var_ann (var)->call_clobbered
662 && !bitmap_bit_p (gimple_call_clobbered_vars (cfun), DECL_UID (var)))
663 {
664 error ("variable marked call_clobbered but not in "
665 "call_clobbered_vars bitmap.");
666 debug_variable (var);
667 goto err;
668 }
669 }
670
671 return;
672
673 err:
674 internal_error ("verify_call_clobbering failed");
675 }
676
677
678 /* Verify invariants in memory partitions. */
679
680 static void
681 verify_memory_partitions (void)
682 {
683 unsigned i;
684 tree mpt;
685 VEC(tree,heap) *mpt_table = gimple_ssa_operands (cfun)->mpt_table;
686 struct pointer_set_t *partitioned_syms = pointer_set_create ();
687
688 for (i = 0; VEC_iterate (tree, mpt_table, i, mpt); i++)
689 {
690 unsigned j;
691 bitmap_iterator bj;
692
693 if (MPT_SYMBOLS (mpt) == NULL)
694 {
695 error ("Memory partitions should have at least one symbol");
696 debug_variable (mpt);
697 goto err;
698 }
699
700 EXECUTE_IF_SET_IN_BITMAP (MPT_SYMBOLS (mpt), 0, j, bj)
701 {
702 tree var = referenced_var (j);
703 if (pointer_set_insert (partitioned_syms, var))
704 {
705 error ("Partitioned symbols should belong to exactly one "
706 "partition");
707 debug_variable (var);
708 goto err;
709 }
710 }
711 }
712
713 pointer_set_destroy (partitioned_syms);
714
715 return;
716
717 err:
718 internal_error ("verify_memory_partitions failed");
719 }
720
721
722 /* Verify the consistency of aliasing information. */
723
724 static void
725 verify_alias_info (void)
726 {
727 verify_flow_sensitive_alias_info ();
728 verify_call_clobbering ();
729 verify_flow_insensitive_alias_info ();
730 verify_memory_partitions ();
731 } 855 }
732 856
733 857
734 /* Verify common invariants in the SSA web. 858 /* Verify common invariants in the SSA web.
735 TODO: verify the variable annotations. */ 859 TODO: verify the variable annotations. */
743 ssa_op_iter iter; 867 ssa_op_iter iter;
744 tree op; 868 tree op;
745 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS); 869 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
746 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL); 870 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
747 871
748 gcc_assert (!need_ssa_update_p ()); 872 gcc_assert (!need_ssa_update_p (cfun));
749 873
750 verify_stmts (); 874 verify_stmts ();
751 875
752 timevar_push (TV_TREE_SSA_VERIFY); 876 timevar_push (TV_TREE_SSA_VERIFY);
753 877
807 /* Now verify all the uses and vuses in every statement of the block. */ 931 /* Now verify all the uses and vuses in every statement of the block. */
808 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 932 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
809 { 933 {
810 gimple stmt = gsi_stmt (gsi); 934 gimple stmt = gsi_stmt (gsi);
811 use_operand_p use_p; 935 use_operand_p use_p;
936 bool has_err;
812 937
813 if (check_modified_stmt && gimple_modified_p (stmt)) 938 if (check_modified_stmt && gimple_modified_p (stmt))
814 { 939 {
815 error ("stmt (%p) marked modified after optimization pass: ", 940 error ("stmt (%p) marked modified after optimization pass: ",
816 (void *)stmt); 941 (void *)stmt);
825 950
826 lhs = gimple_assign_lhs (stmt); 951 lhs = gimple_assign_lhs (stmt);
827 base_address = get_base_address (lhs); 952 base_address = get_base_address (lhs);
828 953
829 if (base_address 954 if (base_address
830 && gimple_aliases_computed_p (cfun)
831 && SSA_VAR_P (base_address) 955 && SSA_VAR_P (base_address)
832 && !gimple_has_volatile_ops (stmt) 956 && !gimple_vdef (stmt)
833 && ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF)) 957 && optimize > 0)
834 { 958 {
835 error ("statement makes a memory store, but has no VDEFS"); 959 error ("statement makes a memory store, but has no VDEFS");
836 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS); 960 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
837 goto err; 961 goto err;
838 } 962 }
839 } 963 }
840 964 else if (gimple_debug_bind_p (stmt)
841 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_VIRTUALS) 965 && !gimple_debug_bind_has_value_p (stmt))
966 continue;
967
968 /* Verify the single virtual operand and its constraints. */
969 has_err = false;
970 if (gimple_vdef (stmt))
842 { 971 {
843 if (verify_ssa_name (op, true)) 972 if (gimple_vdef_op (stmt) == NULL_DEF_OPERAND_P)
844 { 973 {
845 error ("in statement"); 974 error ("statement has VDEF operand not in defs list");
846 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS|TDF_MEMSYMS); 975 has_err = true;
847 goto err;
848 } 976 }
977 if (!gimple_vuse (stmt))
978 {
979 error ("statement has VDEF but no VUSE operand");
980 has_err = true;
981 }
982 else if (SSA_NAME_VAR (gimple_vdef (stmt))
983 != SSA_NAME_VAR (gimple_vuse (stmt)))
984 {
985 error ("VDEF and VUSE do not use the same symbol");
986 has_err = true;
987 }
988 has_err |= verify_ssa_name (gimple_vdef (stmt), true);
989 }
990 if (gimple_vuse (stmt))
991 {
992 if (gimple_vuse_op (stmt) == NULL_USE_OPERAND_P)
993 {
994 error ("statement has VUSE operand not in uses list");
995 has_err = true;
996 }
997 has_err |= verify_ssa_name (gimple_vuse (stmt), true);
998 }
999 if (has_err)
1000 {
1001 error ("in statement");
1002 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
1003 goto err;
849 } 1004 }
850 1005
851 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE|SSA_OP_DEF) 1006 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE|SSA_OP_DEF)
852 { 1007 {
853 if (verify_ssa_name (op, false)) 1008 if (verify_ssa_name (op, false))
865 use_p, stmt, false, names_defined_in_bb)) 1020 use_p, stmt, false, names_defined_in_bb))
866 goto err; 1021 goto err;
867 } 1022 }
868 1023
869 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS) 1024 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
870 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op)); 1025 {
1026 if (SSA_NAME_DEF_STMT (op) != stmt)
1027 {
1028 error ("SSA_NAME_DEF_STMT is wrong");
1029 fprintf (stderr, "Expected definition statement:\n");
1030 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1031 fprintf (stderr, "\nActual definition statement:\n");
1032 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1033 4, TDF_VOPS);
1034 goto err;
1035 }
1036 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1037 }
871 } 1038 }
872 1039
873 bitmap_clear (names_defined_in_bb); 1040 bitmap_clear (names_defined_in_bb);
874 } 1041 }
875
876 /* Finally, verify alias information. */
877 if (gimple_aliases_computed_p (cfun))
878 verify_alias_info ();
879 1042
880 free (definition_block); 1043 free (definition_block);
881 1044
882 /* Restore the dominance information to its prior known state, so 1045 /* Restore the dominance information to its prior known state, so
883 that we do not perturb the compiler's subsequent behavior. */ 1046 that we do not perturb the compiler's subsequent behavior. */
884 if (orig_dom_state == DOM_NONE) 1047 if (orig_dom_state == DOM_NONE)
885 free_dominance_info (CDI_DOMINATORS); 1048 free_dominance_info (CDI_DOMINATORS);
886 else 1049 else
887 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state); 1050 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
888 1051
889 BITMAP_FREE (names_defined_in_bb); 1052 BITMAP_FREE (names_defined_in_bb);
890 timevar_pop (TV_TREE_SSA_VERIFY); 1053 timevar_pop (TV_TREE_SSA_VERIFY);
891 return; 1054 return;
892 1055
893 err: 1056 err:
953 1116
954 void 1117 void
955 init_tree_ssa (struct function *fn) 1118 init_tree_ssa (struct function *fn)
956 { 1119 {
957 fn->gimple_df = GGC_CNEW (struct gimple_df); 1120 fn->gimple_df = GGC_CNEW (struct gimple_df);
958 fn->gimple_df->referenced_vars = htab_create_ggc (20, uid_decl_map_hash, 1121 fn->gimple_df->referenced_vars = htab_create_ggc (20, uid_decl_map_hash,
959 uid_decl_map_eq, NULL); 1122 uid_decl_map_eq, NULL);
960 fn->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash, 1123 fn->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash,
961 uid_ssaname_map_eq, NULL); 1124 uid_ssaname_map_eq, NULL);
962 fn->gimple_df->call_clobbered_vars = BITMAP_GGC_ALLOC (); 1125 pt_solution_reset (&fn->gimple_df->escaped);
963 fn->gimple_df->call_used_vars = BITMAP_GGC_ALLOC (); 1126 pt_solution_reset (&fn->gimple_df->callused);
964 fn->gimple_df->addressable_vars = BITMAP_GGC_ALLOC ();
965 init_ssanames (fn, 0); 1127 init_ssanames (fn, 0);
966 init_phinodes (); 1128 init_phinodes ();
967 } 1129 }
968 1130
969 1131
970 /* Deallocate memory associated with SSA data structures for FNDECL. */ 1132 /* Deallocate memory associated with SSA data structures for FNDECL. */
971 1133
972 void 1134 void
973 delete_tree_ssa (void) 1135 delete_tree_ssa (void)
974 { 1136 {
975 size_t i;
976 basic_block bb;
977 gimple_stmt_iterator gsi;
978 referenced_var_iterator rvi; 1137 referenced_var_iterator rvi;
979 tree var; 1138 tree var;
980 1139
981 /* Release any ssa_names still in use. */
982 for (i = 0; i < num_ssa_names; i++)
983 {
984 tree var = ssa_name (i);
985 if (var && TREE_CODE (var) == SSA_NAME)
986 {
987 SSA_NAME_IMM_USE_NODE (var).prev = &(SSA_NAME_IMM_USE_NODE (var));
988 SSA_NAME_IMM_USE_NODE (var).next = &(SSA_NAME_IMM_USE_NODE (var));
989 }
990 release_ssa_name (var);
991 }
992
993 /* FIXME. This may not be necessary. We will release all this
994 memory en masse in free_ssa_operands. This clearing used to be
995 necessary to avoid problems with the inliner, but it may not be
996 needed anymore. */
997 FOR_EACH_BB (bb)
998 {
999 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1000 {
1001 gimple stmt = gsi_stmt (gsi);
1002
1003 if (gimple_has_ops (stmt))
1004 {
1005 gimple_set_def_ops (stmt, NULL);
1006 gimple_set_use_ops (stmt, NULL);
1007 gimple_set_addresses_taken (stmt, NULL);
1008 }
1009
1010 if (gimple_has_mem_ops (stmt))
1011 {
1012 gimple_set_vdef_ops (stmt, NULL);
1013 gimple_set_vuse_ops (stmt, NULL);
1014 BITMAP_FREE (stmt->gsmem.membase.stores);
1015 BITMAP_FREE (stmt->gsmem.membase.loads);
1016 }
1017
1018 gimple_set_modified (stmt, true);
1019 }
1020 set_phi_nodes (bb, NULL);
1021 }
1022
1023 /* Remove annotations from every referenced local variable. */ 1140 /* Remove annotations from every referenced local variable. */
1024 FOR_EACH_REFERENCED_VAR (var, rvi) 1141 FOR_EACH_REFERENCED_VAR (var, rvi)
1025 { 1142 {
1026 if (!MTAG_P (var) 1143 if (is_global_var (var))
1027 && (TREE_STATIC (var) || DECL_EXTERNAL (var))) 1144 continue;
1145 if (var_ann (var))
1028 { 1146 {
1029 var_ann (var)->mpt = NULL_TREE; 1147 ggc_free (var_ann (var));
1030 var_ann (var)->symbol_mem_tag = NULL_TREE; 1148 *DECL_VAR_ANN_PTR (var) = NULL;
1031 continue;
1032 } 1149 }
1033 if (var->base.ann)
1034 ggc_free (var->base.ann);
1035 var->base.ann = NULL;
1036 } 1150 }
1037 htab_delete (gimple_referenced_vars (cfun)); 1151 htab_delete (gimple_referenced_vars (cfun));
1038 cfun->gimple_df->referenced_vars = NULL; 1152 cfun->gimple_df->referenced_vars = NULL;
1039 1153
1040 fini_ssanames (); 1154 fini_ssanames ();
1042 1156
1043 /* We no longer maintain the SSA operand cache at this point. */ 1157 /* We no longer maintain the SSA operand cache at this point. */
1044 if (ssa_operands_active ()) 1158 if (ssa_operands_active ())
1045 fini_ssa_operands (); 1159 fini_ssa_operands ();
1046 1160
1047 cfun->gimple_df->global_var = NULL_TREE; 1161 delete_alias_heapvars ();
1048 1162
1049 htab_delete (cfun->gimple_df->default_defs); 1163 htab_delete (cfun->gimple_df->default_defs);
1050 cfun->gimple_df->default_defs = NULL; 1164 cfun->gimple_df->default_defs = NULL;
1051 cfun->gimple_df->call_clobbered_vars = NULL; 1165 pt_solution_reset (&cfun->gimple_df->escaped);
1052 cfun->gimple_df->call_used_vars = NULL; 1166 pt_solution_reset (&cfun->gimple_df->callused);
1053 cfun->gimple_df->addressable_vars = NULL; 1167 if (cfun->gimple_df->decls_to_pointers != NULL)
1168 pointer_map_destroy (cfun->gimple_df->decls_to_pointers);
1169 cfun->gimple_df->decls_to_pointers = NULL;
1054 cfun->gimple_df->modified_noreturn_calls = NULL; 1170 cfun->gimple_df->modified_noreturn_calls = NULL;
1055 if (gimple_aliases_computed_p (cfun))
1056 {
1057 delete_alias_heapvars ();
1058 gcc_assert (!need_ssa_update_p ());
1059 }
1060 cfun->gimple_df->aliases_computed_p = false;
1061 delete_mem_ref_stats (cfun);
1062
1063 cfun->gimple_df = NULL; 1171 cfun->gimple_df = NULL;
1064 1172
1065 /* We no longer need the edge variable maps. */ 1173 /* We no longer need the edge variable maps. */
1066 redirect_edge_var_map_destroy (); 1174 redirect_edge_var_map_destroy ();
1067 } 1175 }
1068 1176
1069 /* Helper function for useless_type_conversion_p. */ 1177 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
1070 1178 useless type conversion, otherwise return false.
1071 static bool 1179
1072 useless_type_conversion_p_1 (tree outer_type, tree inner_type) 1180 This function implicitly defines the middle-end type system. With
1181 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
1182 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
1183 the following invariants shall be fulfilled:
1184
1185 1) useless_type_conversion_p is transitive.
1186 If a < b and b < c then a < c.
1187
1188 2) useless_type_conversion_p is not symmetric.
1189 From a < b does not follow a > b.
1190
1191 3) Types define the available set of operations applicable to values.
1192 A type conversion is useless if the operations for the target type
1193 is a subset of the operations for the source type. For example
1194 casts to void* are useless, casts from void* are not (void* can't
1195 be dereferenced or offsetted, but copied, hence its set of operations
1196 is a strict subset of that of all other data pointer types). Casts
1197 to const T* are useless (can't be written to), casts from const T*
1198 to T* are not. */
1199
1200 bool
1201 useless_type_conversion_p (tree outer_type, tree inner_type)
1073 { 1202 {
1074 /* Do the following before stripping toplevel qualifiers. */ 1203 /* Do the following before stripping toplevel qualifiers. */
1075 if (POINTER_TYPE_P (inner_type) 1204 if (POINTER_TYPE_P (inner_type)
1076 && POINTER_TYPE_P (outer_type)) 1205 && POINTER_TYPE_P (outer_type))
1077 { 1206 {
1207 /* Do not lose casts between pointers to different address spaces. */
1208 if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
1209 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
1210 return false;
1211
1212 /* If the outer type is (void *) or a pointer to an incomplete
1213 record type or a pointer to an unprototyped function,
1214 then the conversion is not necessary. */
1215 if (VOID_TYPE_P (TREE_TYPE (outer_type))
1216 || ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
1217 || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
1218 && (TREE_CODE (TREE_TYPE (outer_type))
1219 == TREE_CODE (TREE_TYPE (inner_type)))
1220 && !TYPE_ARG_TYPES (TREE_TYPE (outer_type))
1221 && useless_type_conversion_p (TREE_TYPE (TREE_TYPE (outer_type)),
1222 TREE_TYPE (TREE_TYPE (inner_type)))))
1223 return true;
1224
1078 /* Do not lose casts to restrict qualified pointers. */ 1225 /* Do not lose casts to restrict qualified pointers. */
1079 if ((TYPE_RESTRICT (outer_type) 1226 if ((TYPE_RESTRICT (outer_type)
1080 != TYPE_RESTRICT (inner_type)) 1227 != TYPE_RESTRICT (inner_type))
1081 && TYPE_RESTRICT (outer_type)) 1228 && TYPE_RESTRICT (outer_type))
1082 return false; 1229 return false;
1092 /* If we know the canonical types, compare them. */ 1239 /* If we know the canonical types, compare them. */
1093 if (TYPE_CANONICAL (inner_type) 1240 if (TYPE_CANONICAL (inner_type)
1094 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type)) 1241 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type))
1095 return true; 1242 return true;
1096 1243
1097 /* Changes in machine mode are never useless conversions. */ 1244 /* Changes in machine mode are never useless conversions unless we
1098 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type)) 1245 deal with aggregate types in which case we defer to later checks. */
1246 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type)
1247 && !AGGREGATE_TYPE_P (inner_type))
1099 return false; 1248 return false;
1100 1249
1101 /* If both the inner and outer types are integral types, then the 1250 /* If both the inner and outer types are integral types, then the
1102 conversion is not necessary if they have the same mode and 1251 conversion is not necessary if they have the same mode and
1103 signedness and precision, and both or neither are boolean. */ 1252 signedness and precision, and both or neither are boolean. */
1107 /* Preserve changes in signedness or precision. */ 1256 /* Preserve changes in signedness or precision. */
1108 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type) 1257 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
1109 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type)) 1258 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
1110 return false; 1259 return false;
1111 1260
1112 /* Conversions from a non-base to a base type are not useless.
1113 This way we preserve the invariant to do arithmetic in
1114 base types only. */
1115 if (TREE_TYPE (inner_type)
1116 && TREE_TYPE (inner_type) != inner_type
1117 && (TREE_TYPE (outer_type) == outer_type
1118 || TREE_TYPE (outer_type) == NULL_TREE))
1119 return false;
1120
1121 /* We don't need to preserve changes in the types minimum or 1261 /* We don't need to preserve changes in the types minimum or
1122 maximum value in general as these do not generate code 1262 maximum value in general as these do not generate code
1123 unless the types precisions are different. */ 1263 unless the types precisions are different. */
1124
1125 return true; 1264 return true;
1126 } 1265 }
1127 1266
1128 /* Scalar floating point types with the same mode are compatible. */ 1267 /* Scalar floating point types with the same mode are compatible. */
1129 else if (SCALAR_FLOAT_TYPE_P (inner_type) 1268 else if (SCALAR_FLOAT_TYPE_P (inner_type)
1130 && SCALAR_FLOAT_TYPE_P (outer_type)) 1269 && SCALAR_FLOAT_TYPE_P (outer_type))
1270 return true;
1271
1272 /* Fixed point types with the same mode are compatible. */
1273 else if (FIXED_POINT_TYPE_P (inner_type)
1274 && FIXED_POINT_TYPE_P (outer_type))
1131 return true; 1275 return true;
1132 1276
1133 /* We need to take special care recursing to pointed-to types. */ 1277 /* We need to take special care recursing to pointed-to types. */
1134 else if (POINTER_TYPE_P (inner_type) 1278 else if (POINTER_TYPE_P (inner_type)
1135 && POINTER_TYPE_P (outer_type)) 1279 && POINTER_TYPE_P (outer_type))
1145 && (TYPE_VOLATILE (TREE_TYPE (outer_type)) 1289 && (TYPE_VOLATILE (TREE_TYPE (outer_type))
1146 != TYPE_VOLATILE (TREE_TYPE (inner_type))) 1290 != TYPE_VOLATILE (TREE_TYPE (inner_type)))
1147 && TYPE_VOLATILE (TREE_TYPE (outer_type))) 1291 && TYPE_VOLATILE (TREE_TYPE (outer_type)))
1148 return false; 1292 return false;
1149 1293
1150 /* Do not lose casts between pointers with different 1294 /* We require explicit conversions from incomplete target types. */
1151 TYPE_REF_CAN_ALIAS_ALL setting or alias sets. */ 1295 if (!COMPLETE_TYPE_P (TREE_TYPE (inner_type))
1152 if ((TYPE_REF_CAN_ALIAS_ALL (inner_type) 1296 && COMPLETE_TYPE_P (TREE_TYPE (outer_type)))
1153 != TYPE_REF_CAN_ALIAS_ALL (outer_type)) 1297 return false;
1154 || (get_alias_set (TREE_TYPE (inner_type)) 1298
1155 != get_alias_set (TREE_TYPE (outer_type)))) 1299 /* Do not lose casts between pointers that when dereferenced access
1300 memory with different alias sets. */
1301 if (get_deref_alias_set (inner_type) != get_deref_alias_set (outer_type))
1156 return false; 1302 return false;
1157 1303
1158 /* We do not care for const qualification of the pointed-to types 1304 /* We do not care for const qualification of the pointed-to types
1159 as const qualification has no semantic value to the middle-end. */ 1305 as const qualification has no semantic value to the middle-end. */
1160 1306
1161 /* Otherwise pointers/references are equivalent if their pointed 1307 /* Otherwise pointers/references are equivalent if their pointed
1162 to types are effectively the same. We can strip qualifiers 1308 to types are effectively the same. We can strip qualifiers
1163 on pointed-to types for further comparison, which is done in 1309 on pointed-to types for further comparison, which is done in
1164 the callee. */ 1310 the callee. Note we have to use true compatibility here
1165 return useless_type_conversion_p_1 (TREE_TYPE (outer_type), 1311 because addresses are subject to propagation into dereferences
1166 TREE_TYPE (inner_type)); 1312 and thus might get the original type exposed which is equivalent
1313 to a reverse conversion. */
1314 return types_compatible_p (TREE_TYPE (outer_type),
1315 TREE_TYPE (inner_type));
1167 } 1316 }
1168 1317
1169 /* Recurse for complex types. */ 1318 /* Recurse for complex types. */
1170 else if (TREE_CODE (inner_type) == COMPLEX_TYPE 1319 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
1171 && TREE_CODE (outer_type) == COMPLEX_TYPE) 1320 && TREE_CODE (outer_type) == COMPLEX_TYPE)
1177 && TREE_CODE (outer_type) == VECTOR_TYPE 1326 && TREE_CODE (outer_type) == VECTOR_TYPE
1178 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type)) 1327 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
1179 return useless_type_conversion_p (TREE_TYPE (outer_type), 1328 return useless_type_conversion_p (TREE_TYPE (outer_type),
1180 TREE_TYPE (inner_type)); 1329 TREE_TYPE (inner_type));
1181 1330
1182 /* For aggregates we may need to fall back to structural equality 1331 else if (TREE_CODE (inner_type) == ARRAY_TYPE
1183 checks. */ 1332 && TREE_CODE (outer_type) == ARRAY_TYPE)
1333 {
1334 /* Preserve string attributes. */
1335 if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
1336 return false;
1337
1338 /* Conversions from array types with unknown extent to
1339 array types with known extent are not useless. */
1340 if (!TYPE_DOMAIN (inner_type)
1341 && TYPE_DOMAIN (outer_type))
1342 return false;
1343
1344 /* Nor are conversions from array types with non-constant size to
1345 array types with constant size or to different size. */
1346 if (TYPE_SIZE (outer_type)
1347 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
1348 && (!TYPE_SIZE (inner_type)
1349 || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
1350 || !tree_int_cst_equal (TYPE_SIZE (outer_type),
1351 TYPE_SIZE (inner_type))))
1352 return false;
1353
1354 /* Check conversions between arrays with partially known extents.
1355 If the array min/max values are constant they have to match.
1356 Otherwise allow conversions to unknown and variable extents.
1357 In particular this declares conversions that may change the
1358 mode to BLKmode as useless. */
1359 if (TYPE_DOMAIN (inner_type)
1360 && TYPE_DOMAIN (outer_type)
1361 && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
1362 {
1363 tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
1364 tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
1365 tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
1366 tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));
1367
1368 /* After gimplification a variable min/max value carries no
1369 additional information compared to a NULL value. All that
1370 matters has been lowered to be part of the IL. */
1371 if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
1372 inner_min = NULL_TREE;
1373 if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
1374 outer_min = NULL_TREE;
1375 if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
1376 inner_max = NULL_TREE;
1377 if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
1378 outer_max = NULL_TREE;
1379
1380 /* Conversions NULL / variable <- cst are useless, but not
1381 the other way around. */
1382 if (outer_min
1383 && (!inner_min
1384 || !tree_int_cst_equal (inner_min, outer_min)))
1385 return false;
1386 if (outer_max
1387 && (!inner_max
1388 || !tree_int_cst_equal (inner_max, outer_max)))
1389 return false;
1390 }
1391
1392 /* Recurse on the element check. */
1393 return useless_type_conversion_p (TREE_TYPE (outer_type),
1394 TREE_TYPE (inner_type));
1395 }
1396
1397 else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
1398 || TREE_CODE (inner_type) == METHOD_TYPE)
1399 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
1400 {
1401 tree outer_parm, inner_parm;
1402
1403 /* If the return types are not compatible bail out. */
1404 if (!useless_type_conversion_p (TREE_TYPE (outer_type),
1405 TREE_TYPE (inner_type)))
1406 return false;
1407
1408 /* Method types should belong to a compatible base class. */
1409 if (TREE_CODE (inner_type) == METHOD_TYPE
1410 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
1411 TYPE_METHOD_BASETYPE (inner_type)))
1412 return false;
1413
1414 /* A conversion to an unprototyped argument list is ok. */
1415 if (!TYPE_ARG_TYPES (outer_type))
1416 return true;
1417
1418 /* If the unqualified argument types are compatible the conversion
1419 is useless. */
1420 if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
1421 return true;
1422
1423 for (outer_parm = TYPE_ARG_TYPES (outer_type),
1424 inner_parm = TYPE_ARG_TYPES (inner_type);
1425 outer_parm && inner_parm;
1426 outer_parm = TREE_CHAIN (outer_parm),
1427 inner_parm = TREE_CHAIN (inner_parm))
1428 if (!useless_type_conversion_p
1429 (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)),
1430 TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm))))
1431 return false;
1432
1433 /* If there is a mismatch in the number of arguments the functions
1434 are not compatible. */
1435 if (outer_parm || inner_parm)
1436 return false;
1437
1438 /* Defer to the target if necessary. */
1439 if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
1440 return targetm.comp_type_attributes (outer_type, inner_type) != 0;
1441
1442 return true;
1443 }
1444
1445 /* For aggregates we rely on TYPE_CANONICAL exclusively and require
1446 explicit conversions for types involving to be structurally
1447 compared types. */
1184 else if (AGGREGATE_TYPE_P (inner_type) 1448 else if (AGGREGATE_TYPE_P (inner_type)
1185 && AGGREGATE_TYPE_P (outer_type)) 1449 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
1186 { 1450 return false;
1187 /* Different types of aggregates are incompatible. */ 1451
1188 if (TREE_CODE (inner_type) != TREE_CODE (outer_type))
1189 return false;
1190
1191 /* ??? This seems to be necessary even for aggregates that don't
1192 have TYPE_STRUCTURAL_EQUALITY_P set. */
1193
1194 /* ??? This should eventually just return false. */
1195 return lang_hooks.types_compatible_p (inner_type, outer_type);
1196 }
1197 /* Also for functions and possibly other types with
1198 TYPE_STRUCTURAL_EQUALITY_P set. */
1199 else if (TYPE_STRUCTURAL_EQUALITY_P (inner_type)
1200 && TYPE_STRUCTURAL_EQUALITY_P (outer_type))
1201 return lang_hooks.types_compatible_p (inner_type, outer_type);
1202
1203 return false; 1452 return false;
1204 }
1205
1206 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
1207 useless type conversion, otherwise return false.
1208
1209 This function implicitly defines the middle-end type system. With
1210 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
1211 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
1212 the following invariants shall be fulfilled:
1213
1214 1) useless_type_conversion_p is transitive.
1215 If a < b and b < c then a < c.
1216
1217 2) useless_type_conversion_p is not symmetric.
1218 From a < b does not follow a > b.
1219
1220 3) Types define the available set of operations applicable to values.
1221 A type conversion is useless if the operations for the target type
1222 is a subset of the operations for the source type. For example
1223 casts to void* are useless, casts from void* are not (void* can't
1224 be dereferenced or offsetted, but copied, hence its set of operations
1225 is a strict subset of that of all other data pointer types). Casts
1226 to const T* are useless (can't be written to), casts from const T*
1227 to T* are not. */
1228
1229 bool
1230 useless_type_conversion_p (tree outer_type, tree inner_type)
1231 {
1232 /* If the outer type is (void *), then the conversion is not
1233 necessary. We have to make sure to not apply this while
1234 recursing though. */
1235 if (POINTER_TYPE_P (inner_type)
1236 && POINTER_TYPE_P (outer_type)
1237 && TREE_CODE (TREE_TYPE (outer_type)) == VOID_TYPE)
1238 return true;
1239
1240 return useless_type_conversion_p_1 (outer_type, inner_type);
1241 } 1453 }
1242 1454
1243 /* Return true if a conversion from either type of TYPE1 and TYPE2 1455 /* Return true if a conversion from either type of TYPE1 and TYPE2
1244 to the other is not required. Otherwise return false. */ 1456 to the other is not required. Otherwise return false. */
1245 1457
1269 TREE_TYPE (TREE_OPERAND (expr, 0))); 1481 TREE_TYPE (TREE_OPERAND (expr, 0)));
1270 1482
1271 return false; 1483 return false;
1272 } 1484 }
1273 1485
1486 /* Strip conversions from EXP according to
1487 tree_ssa_useless_type_conversion and return the resulting
1488 expression. */
1489
1490 tree
1491 tree_ssa_strip_useless_type_conversions (tree exp)
1492 {
1493 while (tree_ssa_useless_type_conversion (exp))
1494 exp = TREE_OPERAND (exp, 0);
1495 return exp;
1496 }
1497
1274 1498
1275 /* Internal helper for walk_use_def_chains. VAR, FN and DATA are as 1499 /* Internal helper for walk_use_def_chains. VAR, FN and DATA are as
1276 described in walk_use_def_chains. 1500 described in walk_use_def_chains.
1277 1501
1278 VISITED is a pointer set used to mark visited SSA_NAMEs to avoid 1502 VISITED is a pointer set used to mark visited SSA_NAMEs to avoid
1279 infinite loops. We used to have a bitmap for this to just mark 1503 infinite loops. We used to have a bitmap for this to just mark
1280 SSA versions we had visited. But non-sparse bitmaps are way too 1504 SSA versions we had visited. But non-sparse bitmaps are way too
1281 expensive, while sparse bitmaps may cause quadratic behavior. 1505 expensive, while sparse bitmaps may cause quadratic behavior.
1282 1506
1330 if (is_dfs) 1554 if (is_dfs)
1331 for (i = 0; i < gimple_phi_num_args (def_stmt); i++) 1555 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1332 if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data)) 1556 if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
1333 return true; 1557 return true;
1334 } 1558 }
1335 1559
1336 return false; 1560 return false;
1337 } 1561 }
1338 1562
1339 1563
1340 1564
1341 /* Walk use-def chains starting at the SSA variable VAR. Call 1565 /* Walk use-def chains starting at the SSA variable VAR. Call
1342 function FN at each reaching definition found. FN takes three 1566 function FN at each reaching definition found. FN takes three
1343 arguments: VAR, its defining statement (DEF_STMT) and a generic 1567 arguments: VAR, its defining statement (DEF_STMT) and a generic
1344 pointer to whatever state information that FN may want to maintain 1568 pointer to whatever state information that FN may want to maintain
1345 (DATA). FN is able to stop the walk by returning true, otherwise 1569 (DATA). FN is able to stop the walk by returning true, otherwise
1346 in order to continue the walk, FN should return false. 1570 in order to continue the walk, FN should return false.
1347 1571
1348 Note, that if DEF_STMT is a PHI node, the semantics are slightly 1572 Note, that if DEF_STMT is a PHI node, the semantics are slightly
1349 different. The first argument to FN is no longer the original 1573 different. The first argument to FN is no longer the original
1350 variable VAR, but the PHI argument currently being examined. If FN 1574 variable VAR, but the PHI argument currently being examined. If FN
1351 wants to get at VAR, it should call PHI_RESULT (PHI). 1575 wants to get at VAR, it should call PHI_RESULT (PHI).
1435 return; 1659 return;
1436 1660
1437 /* Do not warn if it can be initialized outside this module. */ 1661 /* Do not warn if it can be initialized outside this module. */
1438 if (is_global_var (var)) 1662 if (is_global_var (var))
1439 return; 1663 return;
1440 1664
1441 location = (context != NULL && gimple_has_location (context)) 1665 location = (context != NULL && gimple_has_location (context))
1442 ? gimple_location (context) 1666 ? gimple_location (context)
1443 : DECL_SOURCE_LOCATION (var); 1667 : DECL_SOURCE_LOCATION (var);
1444 xloc = expand_location (location); 1668 xloc = expand_location (location);
1445 floc = expand_location (DECL_SOURCE_LOCATION (cfun->decl)); 1669 floc = expand_location (DECL_SOURCE_LOCATION (cfun->decl));
1448 TREE_NO_WARNING (var) = 1; 1672 TREE_NO_WARNING (var) = 1;
1449 1673
1450 if (xloc.file != floc.file 1674 if (xloc.file != floc.file
1451 || xloc.line < floc.line 1675 || xloc.line < floc.line
1452 || xloc.line > LOCATION_LINE (cfun->function_end_locus)) 1676 || xloc.line > LOCATION_LINE (cfun->function_end_locus))
1453 inform (input_location, "%J%qD was declared here", var, var); 1677 inform (DECL_SOURCE_LOCATION (var), "%qD was declared here", var);
1454 } 1678 }
1455 } 1679 }
1456 1680
1457 struct walk_data { 1681 struct walk_data {
1458 gimple stmt; 1682 gimple stmt;
1492 /* A VAR_DECL in the RHS of a gimple statement may mean that 1716 /* A VAR_DECL in the RHS of a gimple statement may mean that
1493 this variable is loaded from memory. */ 1717 this variable is loaded from memory. */
1494 use_operand_p vuse; 1718 use_operand_p vuse;
1495 tree op; 1719 tree op;
1496 1720
1497 /* If there is not gimple stmt, 1721 /* If there is not gimple stmt,
1498 or alias information has not been computed, 1722 or alias information has not been computed,
1499 then we cannot check VUSE ops. */ 1723 then we cannot check VUSE ops. */
1500 if (data->stmt == NULL 1724 if (data->stmt == NULL)
1501 || !gimple_aliases_computed_p (cfun))
1502 return NULL_TREE; 1725 return NULL_TREE;
1503 1726
1504 /* If the load happens as part of a call do not warn about it. */ 1727 /* If the load happens as part of a call do not warn about it. */
1505 if (is_gimple_call (data->stmt)) 1728 if (is_gimple_call (data->stmt))
1506 return NULL_TREE; 1729 return NULL_TREE;
1507 1730
1508 vuse = SINGLE_SSA_USE_OPERAND (data->stmt, SSA_OP_VUSE); 1731 vuse = gimple_vuse_op (data->stmt);
1509 if (vuse == NULL_USE_OPERAND_P) 1732 if (vuse == NULL_USE_OPERAND_P)
1510 return NULL_TREE; 1733 return NULL_TREE;
1511 1734
1512 op = USE_FROM_PTR (vuse); 1735 op = USE_FROM_PTR (vuse);
1513 if (t != SSA_NAME_VAR (op) 1736 if (t != SSA_NAME_VAR (op)
1514 || !SSA_NAME_IS_DEFAULT_DEF (op)) 1737 || !SSA_NAME_IS_DEFAULT_DEF (op))
1515 return NULL_TREE; 1738 return NULL_TREE;
1516 /* If this is a VUSE of t and it is the default definition, 1739 /* If this is a VUSE of t and it is the default definition,
1517 then warn about op. */ 1740 then warn about op. */
1518 t = op; 1741 t = op;
1587 single_succ (ENTRY_BLOCK_PTR), bb); 1810 single_succ (ENTRY_BLOCK_PTR), bb);
1588 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 1811 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1589 { 1812 {
1590 struct walk_stmt_info wi; 1813 struct walk_stmt_info wi;
1591 data.stmt = gsi_stmt (gsi); 1814 data.stmt = gsi_stmt (gsi);
1815 if (is_gimple_debug (data.stmt))
1816 continue;
1592 memset (&wi, 0, sizeof (wi)); 1817 memset (&wi, 0, sizeof (wi));
1593 wi.info = &data; 1818 wi.info = &data;
1594 walk_gimple_op (gsi_stmt (gsi), warn_uninitialized_var, &wi); 1819 walk_gimple_op (gsi_stmt (gsi), warn_uninitialized_var, &wi);
1595 } 1820 }
1596 } 1821 }
1641 1866
1642 struct gimple_opt_pass pass_early_warn_uninitialized = 1867 struct gimple_opt_pass pass_early_warn_uninitialized =
1643 { 1868 {
1644 { 1869 {
1645 GIMPLE_PASS, 1870 GIMPLE_PASS,
1646 NULL, /* name */ 1871 "*early_warn_uninitialized", /* name */
1647 gate_warn_uninitialized, /* gate */ 1872 gate_warn_uninitialized, /* gate */
1648 execute_early_warn_uninitialized, /* execute */ 1873 execute_early_warn_uninitialized, /* execute */
1649 NULL, /* sub */ 1874 NULL, /* sub */
1650 NULL, /* next */ 1875 NULL, /* next */
1651 0, /* static_pass_number */ 1876 0, /* static_pass_number */
1652 0, /* tv_id */ 1877 TV_NONE, /* tv_id */
1653 PROP_ssa, /* properties_required */ 1878 PROP_ssa, /* properties_required */
1654 0, /* properties_provided */ 1879 0, /* properties_provided */
1655 0, /* properties_destroyed */ 1880 0, /* properties_destroyed */
1656 0, /* todo_flags_start */ 1881 0, /* todo_flags_start */
1657 0 /* todo_flags_finish */ 1882 0 /* todo_flags_finish */
1660 1885
1661 struct gimple_opt_pass pass_late_warn_uninitialized = 1886 struct gimple_opt_pass pass_late_warn_uninitialized =
1662 { 1887 {
1663 { 1888 {
1664 GIMPLE_PASS, 1889 GIMPLE_PASS,
1665 NULL, /* name */ 1890 "*late_warn_uninitialized", /* name */
1666 gate_warn_uninitialized, /* gate */ 1891 gate_warn_uninitialized, /* gate */
1667 execute_late_warn_uninitialized, /* execute */ 1892 execute_late_warn_uninitialized, /* execute */
1668 NULL, /* sub */ 1893 NULL, /* sub */
1669 NULL, /* next */ 1894 NULL, /* next */
1670 0, /* static_pass_number */ 1895 0, /* static_pass_number */
1671 0, /* tv_id */ 1896 TV_NONE, /* tv_id */
1672 PROP_ssa, /* properties_required */ 1897 PROP_ssa, /* properties_required */
1673 0, /* properties_provided */ 1898 0, /* properties_provided */
1674 0, /* properties_destroyed */ 1899 0, /* properties_destroyed */
1675 0, /* todo_flags_start */ 1900 0, /* todo_flags_start */
1676 0 /* todo_flags_finish */ 1901 0 /* todo_flags_finish */
1677 } 1902 }
1678 }; 1903 };
1679 1904
1680 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */ 1905 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1681 1906
1682 static unsigned int 1907 void
1683 execute_update_addresses_taken (void) 1908 execute_update_addresses_taken (bool do_optimize)
1684 { 1909 {
1685 tree var; 1910 tree var;
1686 referenced_var_iterator rvi; 1911 referenced_var_iterator rvi;
1687 gimple_stmt_iterator gsi; 1912 gimple_stmt_iterator gsi;
1688 basic_block bb; 1913 basic_block bb;
1689 bitmap addresses_taken = BITMAP_ALLOC (NULL); 1914 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1690 bitmap not_reg_needs = BITMAP_ALLOC (NULL); 1915 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1691 bitmap vars_updated = BITMAP_ALLOC (NULL);
1692 bool update_vops = false; 1916 bool update_vops = false;
1693 1917
1694 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within 1918 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1695 the function body. */ 1919 the function body. */
1696 FOR_EACH_BB (bb) 1920 FOR_EACH_BB (bb)
1697 { 1921 {
1698 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 1922 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1699 { 1923 {
1700 const_gimple stmt = gsi_stmt (gsi); 1924 gimple stmt = gsi_stmt (gsi);
1701 enum gimple_code code = gimple_code (stmt); 1925 enum gimple_code code = gimple_code (stmt);
1702 bitmap taken = gimple_addresses_taken (stmt); 1926
1703 1927 /* Note all addresses taken by the stmt. */
1704 if (taken) 1928 gimple_ior_addresses_taken (addresses_taken, stmt);
1705 bitmap_ior_into (addresses_taken, taken); 1929
1706
1707 /* If we have a call or an assignment, see if the lhs contains 1930 /* If we have a call or an assignment, see if the lhs contains
1708 a local decl that requires not to be a gimple register. */ 1931 a local decl that requires not to be a gimple register. */
1709 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL) 1932 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1710 { 1933 {
1711 tree lhs = gimple_get_lhs (stmt); 1934 tree lhs = gimple_get_lhs (stmt);
1712 /* A plain decl does not need it set. */ 1935
1713 if (lhs && handled_component_p (lhs)) 1936 /* We may not rewrite TMR_SYMBOL to SSA. */
1714 { 1937 if (lhs && TREE_CODE (lhs) == TARGET_MEM_REF
1715 var = get_base_address (lhs); 1938 && TMR_SYMBOL (lhs))
1716 if (DECL_P (var)) 1939 bitmap_set_bit (not_reg_needs, DECL_UID (TMR_SYMBOL (lhs)));
1717 bitmap_set_bit (not_reg_needs, DECL_UID (var)); 1940
1718 } 1941 /* A plain decl does not need it set. */
1942 else if (lhs && handled_component_p (lhs))
1943 {
1944 var = get_base_address (lhs);
1945 if (DECL_P (var))
1946 bitmap_set_bit (not_reg_needs, DECL_UID (var));
1947 }
1719 } 1948 }
1720 } 1949 }
1721 1950
1722 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 1951 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1723 { 1952 {
1735 } 1964 }
1736 } 1965 }
1737 1966
1738 /* When possible, clear ADDRESSABLE bit or set the REGISTER bit 1967 /* When possible, clear ADDRESSABLE bit or set the REGISTER bit
1739 and mark variable for conversion into SSA. */ 1968 and mark variable for conversion into SSA. */
1740 FOR_EACH_REFERENCED_VAR (var, rvi) 1969 if (optimize && do_optimize)
1741 { 1970 FOR_EACH_REFERENCED_VAR (var, rvi)
1742 /* Global Variables, result decls cannot be changed. */ 1971 {
1743 if (is_global_var (var) 1972 /* Global Variables, result decls cannot be changed. */
1744 || TREE_CODE (var) == RESULT_DECL 1973 if (is_global_var (var)
1745 || bitmap_bit_p (addresses_taken, DECL_UID (var))) 1974 || TREE_CODE (var) == RESULT_DECL
1746 continue; 1975 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1747 1976 continue;
1748 if (TREE_ADDRESSABLE (var) 1977
1749 /* Do not change TREE_ADDRESSABLE if we need to preserve var as 1978 if (TREE_ADDRESSABLE (var)
1750 a non-register. Otherwise we are confused and forget to 1979 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1751 add virtual operands for it. */ 1980 a non-register. Otherwise we are confused and forget to
1752 && (!is_gimple_reg_type (TREE_TYPE (var)) 1981 add virtual operands for it. */
1753 || !bitmap_bit_p (not_reg_needs, DECL_UID (var)))) 1982 && (!is_gimple_reg_type (TREE_TYPE (var))
1754 { 1983 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1755 TREE_ADDRESSABLE (var) = 0; 1984 {
1756 if (is_gimple_reg (var)) 1985 TREE_ADDRESSABLE (var) = 0;
1986 if (is_gimple_reg (var))
1987 mark_sym_for_renaming (var);
1988 update_vops = true;
1989 if (dump_file)
1990 {
1991 fprintf (dump_file, "No longer having address taken ");
1992 print_generic_expr (dump_file, var, 0);
1993 fprintf (dump_file, "\n");
1994 }
1995 }
1996 if (!DECL_GIMPLE_REG_P (var)
1997 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1998 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1999 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
2000 && !TREE_THIS_VOLATILE (var)
2001 && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
2002 {
2003 DECL_GIMPLE_REG_P (var) = 1;
1757 mark_sym_for_renaming (var); 2004 mark_sym_for_renaming (var);
1758 update_vops = true; 2005 update_vops = true;
1759 bitmap_set_bit (vars_updated, DECL_UID (var)); 2006 if (dump_file)
1760 if (dump_file) 2007 {
1761 { 2008 fprintf (dump_file, "Decl is now a gimple register ");
1762 fprintf (dump_file, "No longer having address taken "); 2009 print_generic_expr (dump_file, var, 0);
1763 print_generic_expr (dump_file, var, 0); 2010 fprintf (dump_file, "\n");
1764 fprintf (dump_file, "\n"); 2011 }
1765 } 2012 }
1766 }
1767 if (!DECL_GIMPLE_REG_P (var)
1768 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1769 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1770 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE))
1771 {
1772 DECL_GIMPLE_REG_P (var) = 1;
1773 mark_sym_for_renaming (var);
1774 update_vops = true;
1775 bitmap_set_bit (vars_updated, DECL_UID (var));
1776 if (dump_file)
1777 {
1778 fprintf (dump_file, "Decl is now a gimple register ");
1779 print_generic_expr (dump_file, var, 0);
1780 fprintf (dump_file, "\n");
1781 }
1782 }
1783 } 2013 }
1784 2014
1785 /* Operand caches needs to be recomputed for operands referencing the updated 2015 /* Operand caches needs to be recomputed for operands referencing the updated
1786 variables. */ 2016 variables. */
1787 if (update_vops) 2017 if (update_vops)
1788 FOR_EACH_BB (bb) 2018 {
1789 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 2019 FOR_EACH_BB (bb)
1790 { 2020 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1791 gimple stmt = gsi_stmt (gsi); 2021 {
1792 2022 gimple stmt = gsi_stmt (gsi);
1793 if ((gimple_loaded_syms (stmt) 2023
1794 && bitmap_intersect_p (gimple_loaded_syms (stmt), vars_updated)) 2024 if (gimple_references_memory_p (stmt)
1795 || (gimple_stored_syms (stmt) 2025 || is_gimple_debug (stmt))
1796 && bitmap_intersect_p (gimple_stored_syms (stmt), vars_updated))) 2026 update_stmt (stmt);
1797 update_stmt (stmt); 2027 }
1798 } 2028
2029 /* Update SSA form here, we are called as non-pass as well. */
2030 update_ssa (TODO_update_ssa);
2031 }
2032
1799 BITMAP_FREE (not_reg_needs); 2033 BITMAP_FREE (not_reg_needs);
1800 BITMAP_FREE (addresses_taken); 2034 BITMAP_FREE (addresses_taken);
1801 BITMAP_FREE (vars_updated);
1802 return 0;
1803 } 2035 }
1804 2036
1805 struct gimple_opt_pass pass_update_address_taken = 2037 struct gimple_opt_pass pass_update_address_taken =
1806 { 2038 {
1807 { 2039 {
1808 GIMPLE_PASS, 2040 GIMPLE_PASS,
1809 "addressables", /* name */ 2041 "addressables", /* name */
1810 NULL, /* gate */ 2042 NULL, /* gate */
1811 execute_update_addresses_taken, /* execute */ 2043 NULL, /* execute */
1812 NULL, /* sub */ 2044 NULL, /* sub */
1813 NULL, /* next */ 2045 NULL, /* next */
1814 0, /* static_pass_number */ 2046 0, /* static_pass_number */
1815 0, /* tv_id */ 2047 TV_NONE, /* tv_id */
1816 PROP_ssa, /* properties_required */ 2048 PROP_ssa, /* properties_required */
1817 0, /* properties_provided */ 2049 0, /* properties_provided */
1818 0, /* properties_destroyed */ 2050 0, /* properties_destroyed */
1819 0, /* todo_flags_start */ 2051 0, /* todo_flags_start */
1820 TODO_update_ssa /* todo_flags_finish */ 2052 TODO_update_address_taken
2053 | TODO_dump_func /* todo_flags_finish */
1821 } 2054 }
1822 }; 2055 };