Mercurial > hg > CbC > CbC_gcc
comparison gcc/tree-ssa-pre.c @ 67:f6334be47118
update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
author | nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Tue, 22 Mar 2011 17:18:12 +0900 |
parents | b7f97abdc517 |
children | 04ced10e8804 |
comparison
equal
deleted
inserted
replaced
65:65488c3d617d | 67:f6334be47118 |
---|---|
24 #include "system.h" | 24 #include "system.h" |
25 #include "coretypes.h" | 25 #include "coretypes.h" |
26 #include "tm.h" | 26 #include "tm.h" |
27 #include "tree.h" | 27 #include "tree.h" |
28 #include "basic-block.h" | 28 #include "basic-block.h" |
29 #include "diagnostic.h" | |
30 #include "tree-pretty-print.h" | 29 #include "tree-pretty-print.h" |
31 #include "gimple-pretty-print.h" | 30 #include "gimple-pretty-print.h" |
32 #include "tree-inline.h" | 31 #include "tree-inline.h" |
33 #include "tree-flow.h" | 32 #include "tree-flow.h" |
34 #include "gimple.h" | 33 #include "gimple.h" |
355 | 354 |
356 /* An unordered bitmap set. One bitmap tracks values, the other, | 355 /* An unordered bitmap set. One bitmap tracks values, the other, |
357 expressions. */ | 356 expressions. */ |
358 typedef struct bitmap_set | 357 typedef struct bitmap_set |
359 { | 358 { |
360 bitmap expressions; | 359 bitmap_head expressions; |
361 bitmap values; | 360 bitmap_head values; |
362 } *bitmap_set_t; | 361 } *bitmap_set_t; |
363 | 362 |
364 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \ | 363 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \ |
365 EXECUTE_IF_SET_IN_BITMAP((set)->expressions, 0, (id), (bi)) | 364 EXECUTE_IF_SET_IN_BITMAP(&(set)->expressions, 0, (id), (bi)) |
366 | 365 |
367 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \ | 366 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \ |
368 EXECUTE_IF_SET_IN_BITMAP((set)->values, 0, (id), (bi)) | 367 EXECUTE_IF_SET_IN_BITMAP(&(set)->values, 0, (id), (bi)) |
369 | 368 |
370 /* Mapping from value id to expressions with that value_id. */ | 369 /* Mapping from value id to expressions with that value_id. */ |
371 DEF_VEC_P (bitmap_set_t); | 370 DEF_VEC_P (bitmap_set_t); |
372 DEF_VEC_ALLOC_P (bitmap_set_t, heap); | 371 DEF_VEC_ALLOC_P (bitmap_set_t, heap); |
373 static VEC(bitmap_set_t, heap) *value_expressions; | 372 static VEC(bitmap_set_t, heap) *value_expressions; |
483 match the current variable's type. */ | 482 match the current variable's type. */ |
484 static tree pretemp; | 483 static tree pretemp; |
485 static tree storetemp; | 484 static tree storetemp; |
486 static tree prephitemp; | 485 static tree prephitemp; |
487 | 486 |
488 /* Set of blocks with statements that have had its EH information | 487 /* Set of blocks with statements that have had their EH properties changed. */ |
489 cleaned up. */ | |
490 static bitmap need_eh_cleanup; | 488 static bitmap need_eh_cleanup; |
489 | |
490 /* Set of blocks with statements that have had their AB properties changed. */ | |
491 static bitmap need_ab_cleanup; | |
491 | 492 |
492 /* The phi_translate_table caches phi translations for a given | 493 /* The phi_translate_table caches phi translations for a given |
493 expression and predecessor. */ | 494 expression and predecessor. */ |
494 | 495 |
495 static htab_t phi_translate_table; | 496 static htab_t phi_translate_table; |
614 | 615 |
615 static bitmap_set_t | 616 static bitmap_set_t |
616 bitmap_set_new (void) | 617 bitmap_set_new (void) |
617 { | 618 { |
618 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool); | 619 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool); |
619 ret->expressions = BITMAP_ALLOC (&grand_bitmap_obstack); | 620 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack); |
620 ret->values = BITMAP_ALLOC (&grand_bitmap_obstack); | 621 bitmap_initialize (&ret->values, &grand_bitmap_obstack); |
621 return ret; | 622 return ret; |
622 } | 623 } |
623 | 624 |
624 /* Return the value id for a PRE expression EXPR. */ | 625 /* Return the value id for a PRE expression EXPR. */ |
625 | 626 |
656 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr) | 657 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr) |
657 { | 658 { |
658 unsigned int val = get_expr_value_id (expr); | 659 unsigned int val = get_expr_value_id (expr); |
659 if (!value_id_constant_p (val)) | 660 if (!value_id_constant_p (val)) |
660 { | 661 { |
661 bitmap_clear_bit (set->values, val); | 662 bitmap_clear_bit (&set->values, val); |
662 bitmap_clear_bit (set->expressions, get_expression_id (expr)); | 663 bitmap_clear_bit (&set->expressions, get_expression_id (expr)); |
663 } | 664 } |
664 } | 665 } |
665 | 666 |
666 static void | 667 static void |
667 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr, | 668 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr, |
669 { | 670 { |
670 if (allow_constants || !value_id_constant_p (val)) | 671 if (allow_constants || !value_id_constant_p (val)) |
671 { | 672 { |
672 /* We specifically expect this and only this function to be able to | 673 /* We specifically expect this and only this function to be able to |
673 insert constants into a set. */ | 674 insert constants into a set. */ |
674 bitmap_set_bit (set->values, val); | 675 bitmap_set_bit (&set->values, val); |
675 bitmap_set_bit (set->expressions, get_or_alloc_expression_id (expr)); | 676 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr)); |
676 } | 677 } |
677 } | 678 } |
678 | 679 |
679 /* Insert an expression EXPR into a bitmapped set. */ | 680 /* Insert an expression EXPR into a bitmapped set. */ |
680 | 681 |
687 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */ | 688 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */ |
688 | 689 |
689 static void | 690 static void |
690 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig) | 691 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig) |
691 { | 692 { |
692 bitmap_copy (dest->expressions, orig->expressions); | 693 bitmap_copy (&dest->expressions, &orig->expressions); |
693 bitmap_copy (dest->values, orig->values); | 694 bitmap_copy (&dest->values, &orig->values); |
694 } | 695 } |
695 | 696 |
696 | 697 |
697 /* Free memory used up by SET. */ | 698 /* Free memory used up by SET. */ |
698 static void | 699 static void |
699 bitmap_set_free (bitmap_set_t set) | 700 bitmap_set_free (bitmap_set_t set) |
700 { | 701 { |
701 BITMAP_FREE (set->expressions); | 702 bitmap_clear (&set->expressions); |
702 BITMAP_FREE (set->values); | 703 bitmap_clear (&set->values); |
703 } | 704 } |
704 | 705 |
705 | 706 |
706 /* Generate an topological-ordered array of bitmap set SET. */ | 707 /* Generate an topological-ordered array of bitmap set SET. */ |
707 | 708 |
711 unsigned int i, j; | 712 unsigned int i, j; |
712 bitmap_iterator bi, bj; | 713 bitmap_iterator bi, bj; |
713 VEC(pre_expr, heap) *result; | 714 VEC(pre_expr, heap) *result; |
714 | 715 |
715 /* Pre-allocate roughly enough space for the array. */ | 716 /* Pre-allocate roughly enough space for the array. */ |
716 result = VEC_alloc (pre_expr, heap, bitmap_count_bits (set->values)); | 717 result = VEC_alloc (pre_expr, heap, bitmap_count_bits (&set->values)); |
717 | 718 |
718 FOR_EACH_VALUE_ID_IN_SET (set, i, bi) | 719 FOR_EACH_VALUE_ID_IN_SET (set, i, bi) |
719 { | 720 { |
720 /* The number of expressions having a given value is usually | 721 /* The number of expressions having a given value is usually |
721 relatively small. Thus, rather than making a vector of all | 722 relatively small. Thus, rather than making a vector of all |
728 If this is somehow a significant lose for some cases, we can | 729 If this is somehow a significant lose for some cases, we can |
729 choose which set to walk based on the set size. */ | 730 choose which set to walk based on the set size. */ |
730 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, i); | 731 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, i); |
731 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bj) | 732 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bj) |
732 { | 733 { |
733 if (bitmap_bit_p (set->expressions, j)) | 734 if (bitmap_bit_p (&set->expressions, j)) |
734 VEC_safe_push (pre_expr, heap, result, expression_for_id (j)); | 735 VEC_safe_push (pre_expr, heap, result, expression_for_id (j)); |
735 } | 736 } |
736 } | 737 } |
737 | 738 |
738 return result; | 739 return result; |
746 bitmap_iterator bi; | 747 bitmap_iterator bi; |
747 unsigned int i; | 748 unsigned int i; |
748 | 749 |
749 if (dest != orig) | 750 if (dest != orig) |
750 { | 751 { |
751 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack); | 752 bitmap_head temp; |
752 | 753 bitmap_initialize (&temp, &grand_bitmap_obstack); |
753 bitmap_and_into (dest->values, orig->values); | 754 |
754 bitmap_copy (temp, dest->expressions); | 755 bitmap_and_into (&dest->values, &orig->values); |
755 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi) | 756 bitmap_copy (&temp, &dest->expressions); |
757 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi) | |
756 { | 758 { |
757 pre_expr expr = expression_for_id (i); | 759 pre_expr expr = expression_for_id (i); |
758 unsigned int value_id = get_expr_value_id (expr); | 760 unsigned int value_id = get_expr_value_id (expr); |
759 if (!bitmap_bit_p (dest->values, value_id)) | 761 if (!bitmap_bit_p (&dest->values, value_id)) |
760 bitmap_clear_bit (dest->expressions, i); | 762 bitmap_clear_bit (&dest->expressions, i); |
761 } | 763 } |
762 BITMAP_FREE (temp); | 764 bitmap_clear (&temp); |
763 } | 765 } |
764 } | 766 } |
765 | 767 |
766 /* Subtract all values and expressions contained in ORIG from DEST. */ | 768 /* Subtract all values and expressions contained in ORIG from DEST. */ |
767 | 769 |
770 { | 772 { |
771 bitmap_set_t result = bitmap_set_new (); | 773 bitmap_set_t result = bitmap_set_new (); |
772 bitmap_iterator bi; | 774 bitmap_iterator bi; |
773 unsigned int i; | 775 unsigned int i; |
774 | 776 |
775 bitmap_and_compl (result->expressions, dest->expressions, | 777 bitmap_and_compl (&result->expressions, &dest->expressions, |
776 orig->expressions); | 778 &orig->expressions); |
777 | 779 |
778 FOR_EACH_EXPR_ID_IN_SET (result, i, bi) | 780 FOR_EACH_EXPR_ID_IN_SET (result, i, bi) |
779 { | 781 { |
780 pre_expr expr = expression_for_id (i); | 782 pre_expr expr = expression_for_id (i); |
781 unsigned int value_id = get_expr_value_id (expr); | 783 unsigned int value_id = get_expr_value_id (expr); |
782 bitmap_set_bit (result->values, value_id); | 784 bitmap_set_bit (&result->values, value_id); |
783 } | 785 } |
784 | 786 |
785 return result; | 787 return result; |
786 } | 788 } |
787 | 789 |
790 static void | 792 static void |
791 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b) | 793 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b) |
792 { | 794 { |
793 unsigned int i; | 795 unsigned int i; |
794 bitmap_iterator bi; | 796 bitmap_iterator bi; |
795 bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack); | 797 bitmap_head temp; |
796 | 798 |
797 bitmap_copy (temp, a->expressions); | 799 bitmap_initialize (&temp, &grand_bitmap_obstack); |
798 EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi) | 800 |
801 bitmap_copy (&temp, &a->expressions); | |
802 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi) | |
799 { | 803 { |
800 pre_expr expr = expression_for_id (i); | 804 pre_expr expr = expression_for_id (i); |
801 if (bitmap_set_contains_value (b, get_expr_value_id (expr))) | 805 if (bitmap_set_contains_value (b, get_expr_value_id (expr))) |
802 bitmap_remove_from_set (a, expr); | 806 bitmap_remove_from_set (a, expr); |
803 } | 807 } |
804 BITMAP_FREE (temp); | 808 bitmap_clear (&temp); |
805 } | 809 } |
806 | 810 |
807 | 811 |
808 /* Return true if bitmapped set SET contains the value VALUE_ID. */ | 812 /* Return true if bitmapped set SET contains the value VALUE_ID. */ |
809 | 813 |
811 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id) | 815 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id) |
812 { | 816 { |
813 if (value_id_constant_p (value_id)) | 817 if (value_id_constant_p (value_id)) |
814 return true; | 818 return true; |
815 | 819 |
816 if (!set || bitmap_empty_p (set->expressions)) | 820 if (!set || bitmap_empty_p (&set->expressions)) |
817 return false; | 821 return false; |
818 | 822 |
819 return bitmap_bit_p (set->values, value_id); | 823 return bitmap_bit_p (&set->values, value_id); |
820 } | 824 } |
821 | 825 |
822 static inline bool | 826 static inline bool |
823 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr) | 827 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr) |
824 { | 828 { |
825 return bitmap_bit_p (set->expressions, get_expression_id (expr)); | 829 return bitmap_bit_p (&set->expressions, get_expression_id (expr)); |
826 } | 830 } |
827 | 831 |
828 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */ | 832 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */ |
829 | 833 |
830 static void | 834 static void |
851 significant lose for some cases, we can choose which set to walk | 855 significant lose for some cases, we can choose which set to walk |
852 based on the set size. */ | 856 based on the set size. */ |
853 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor); | 857 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor); |
854 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi) | 858 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi) |
855 { | 859 { |
856 if (bitmap_bit_p (set->expressions, i)) | 860 if (bitmap_clear_bit (&set->expressions, i)) |
857 { | 861 { |
858 bitmap_clear_bit (set->expressions, i); | 862 bitmap_set_bit (&set->expressions, get_expression_id (expr)); |
859 bitmap_set_bit (set->expressions, get_expression_id (expr)); | |
860 return; | 863 return; |
861 } | 864 } |
862 } | 865 } |
863 } | 866 } |
864 | 867 |
865 /* Return true if two bitmap sets are equal. */ | 868 /* Return true if two bitmap sets are equal. */ |
866 | 869 |
867 static bool | 870 static bool |
868 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b) | 871 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b) |
869 { | 872 { |
870 return bitmap_equal_p (a->values, b->values); | 873 return bitmap_equal_p (&a->values, &b->values); |
871 } | 874 } |
872 | 875 |
873 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists, | 876 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists, |
874 and add it otherwise. */ | 877 and add it otherwise. */ |
875 | 878 |
890 static void | 893 static void |
891 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr) | 894 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr) |
892 { | 895 { |
893 unsigned int val = get_expr_value_id (expr); | 896 unsigned int val = get_expr_value_id (expr); |
894 | 897 |
895 #ifdef ENABLE_CHECKING | 898 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr)); |
896 gcc_assert (expr->id == get_or_alloc_expression_id (expr)); | |
897 #endif | |
898 | 899 |
899 /* Constant values are always considered to be part of the set. */ | 900 /* Constant values are always considered to be part of the set. */ |
900 if (value_id_constant_p (val)) | 901 if (value_id_constant_p (val)) |
901 return; | 902 return; |
902 | 903 |
903 /* If the value membership changed, add the expression. */ | 904 /* If the value membership changed, add the expression. */ |
904 if (bitmap_set_bit (set->values, val)) | 905 if (bitmap_set_bit (&set->values, val)) |
905 bitmap_set_bit (set->expressions, expr->id); | 906 bitmap_set_bit (&set->expressions, expr->id); |
906 } | 907 } |
907 | 908 |
908 /* Print out EXPR to outfile. */ | 909 /* Print out EXPR to outfile. */ |
909 | 910 |
910 static void | 911 static void |
984 } | 985 } |
985 } | 986 } |
986 void debug_pre_expr (pre_expr); | 987 void debug_pre_expr (pre_expr); |
987 | 988 |
988 /* Like print_pre_expr but always prints to stderr. */ | 989 /* Like print_pre_expr but always prints to stderr. */ |
989 void | 990 DEBUG_FUNCTION void |
990 debug_pre_expr (pre_expr e) | 991 debug_pre_expr (pre_expr e) |
991 { | 992 { |
992 print_pre_expr (stderr, e); | 993 print_pre_expr (stderr, e); |
993 fprintf (stderr, "\n"); | 994 fprintf (stderr, "\n"); |
994 } | 995 } |
1021 fprintf (outfile, " }\n"); | 1022 fprintf (outfile, " }\n"); |
1022 } | 1023 } |
1023 | 1024 |
1024 void debug_bitmap_set (bitmap_set_t); | 1025 void debug_bitmap_set (bitmap_set_t); |
1025 | 1026 |
1026 void | 1027 DEBUG_FUNCTION void |
1027 debug_bitmap_set (bitmap_set_t set) | 1028 debug_bitmap_set (bitmap_set_t set) |
1028 { | 1029 { |
1029 print_bitmap_set (stderr, set, "debug", 0); | 1030 print_bitmap_set (stderr, set, "debug", 0); |
1030 } | 1031 } |
1031 | 1032 |
1042 print_bitmap_set (outfile, set, s, 0); | 1043 print_bitmap_set (outfile, set, s, 0); |
1043 } | 1044 } |
1044 } | 1045 } |
1045 | 1046 |
1046 | 1047 |
1047 void | 1048 DEBUG_FUNCTION void |
1048 debug_value_expressions (unsigned int val) | 1049 debug_value_expressions (unsigned int val) |
1049 { | 1050 { |
1050 print_value_expressions (stderr, val); | 1051 print_value_expressions (stderr, val); |
1051 } | 1052 } |
1052 | 1053 |
1625 newop.opcode = TREE_CODE (op0); | 1626 newop.opcode = TREE_CODE (op0); |
1626 newop.type = type; | 1627 newop.type = type; |
1627 newop.op0 = op0; | 1628 newop.op0 = op0; |
1628 newop.op1 = op1; | 1629 newop.op1 = op1; |
1629 newop.op2 = op2; | 1630 newop.op2 = op2; |
1631 /* If it transforms a non-constant ARRAY_REF into a constant | |
1632 one, adjust the constant offset. */ | |
1633 if (newop.opcode == ARRAY_REF | |
1634 && newop.off == -1 | |
1635 && TREE_CODE (op0) == INTEGER_CST | |
1636 && TREE_CODE (op1) == INTEGER_CST | |
1637 && TREE_CODE (op2) == INTEGER_CST) | |
1638 { | |
1639 double_int off = tree_to_double_int (op0); | |
1640 off = double_int_add (off, | |
1641 double_int_neg | |
1642 (tree_to_double_int (op1))); | |
1643 off = double_int_mul (off, tree_to_double_int (op2)); | |
1644 if (double_int_fits_in_shwi_p (off)) | |
1645 newop.off = off.low; | |
1646 } | |
1630 VEC_replace (vn_reference_op_s, newoperands, j, &newop); | 1647 VEC_replace (vn_reference_op_s, newoperands, j, &newop); |
1631 /* If it transforms from an SSA_NAME to an address, fold with | 1648 /* If it transforms from an SSA_NAME to an address, fold with |
1632 a preceding indirect reference. */ | 1649 a preceding indirect reference. */ |
1633 if (j > 0 && op0 && TREE_CODE (op0) == ADDR_EXPR | 1650 if (j > 0 && op0 && TREE_CODE (op0) == ADDR_EXPR |
1634 && VEC_index (vn_reference_op_s, | 1651 && VEC_index (vn_reference_op_s, |
1635 newoperands, j - 1)->opcode == INDIRECT_REF) | 1652 newoperands, j - 1)->opcode == MEM_REF) |
1636 vn_reference_fold_indirect (&newoperands, &j); | 1653 vn_reference_fold_indirect (&newoperands, &j); |
1637 } | 1654 } |
1638 if (i != VEC_length (vn_reference_op_s, operands)) | 1655 if (i != VEC_length (vn_reference_op_s, operands)) |
1639 { | 1656 { |
1640 if (newoperands) | 1657 if (newoperands) |
1657 | 1674 |
1658 if (changed || newvuse != vuse) | 1675 if (changed || newvuse != vuse) |
1659 { | 1676 { |
1660 unsigned int new_val_id; | 1677 unsigned int new_val_id; |
1661 pre_expr constant; | 1678 pre_expr constant; |
1679 bool converted = false; | |
1662 | 1680 |
1663 tree result = vn_reference_lookup_pieces (newvuse, ref->set, | 1681 tree result = vn_reference_lookup_pieces (newvuse, ref->set, |
1664 ref->type, | 1682 ref->type, |
1665 newoperands, | 1683 newoperands, |
1666 &newref, true); | 1684 &newref, VN_WALK); |
1667 if (result) | 1685 if (result) |
1668 VEC_free (vn_reference_op_s, heap, newoperands); | 1686 VEC_free (vn_reference_op_s, heap, newoperands); |
1687 | |
1688 if (result | |
1689 && !useless_type_conversion_p (ref->type, TREE_TYPE (result))) | |
1690 { | |
1691 result = fold_build1 (VIEW_CONVERT_EXPR, ref->type, result); | |
1692 converted = true; | |
1693 } | |
1694 else if (!result && newref | |
1695 && !useless_type_conversion_p (ref->type, newref->type)) | |
1696 { | |
1697 VEC_free (vn_reference_op_s, heap, newoperands); | |
1698 return NULL; | |
1699 } | |
1669 | 1700 |
1670 if (result && is_gimple_min_invariant (result)) | 1701 if (result && is_gimple_min_invariant (result)) |
1671 { | 1702 { |
1672 gcc_assert (!newoperands); | 1703 gcc_assert (!newoperands); |
1673 return get_or_alloc_expr_for_constant (result); | 1704 return get_or_alloc_expr_for_constant (result); |
1675 | 1706 |
1676 expr = (pre_expr) pool_alloc (pre_expr_pool); | 1707 expr = (pre_expr) pool_alloc (pre_expr_pool); |
1677 expr->kind = REFERENCE; | 1708 expr->kind = REFERENCE; |
1678 expr->id = 0; | 1709 expr->id = 0; |
1679 | 1710 |
1680 if (newref) | 1711 if (converted) |
1712 { | |
1713 vn_nary_op_t nary; | |
1714 tree nresult; | |
1715 | |
1716 gcc_assert (CONVERT_EXPR_P (result) | |
1717 || TREE_CODE (result) == VIEW_CONVERT_EXPR); | |
1718 | |
1719 nresult = vn_nary_op_lookup_pieces (1, TREE_CODE (result), | |
1720 TREE_TYPE (result), | |
1721 TREE_OPERAND (result, 0), | |
1722 NULL_TREE, NULL_TREE, | |
1723 NULL_TREE, | |
1724 &nary); | |
1725 if (nresult && is_gimple_min_invariant (nresult)) | |
1726 return get_or_alloc_expr_for_constant (nresult); | |
1727 | |
1728 expr->kind = NARY; | |
1729 if (nary) | |
1730 { | |
1731 PRE_EXPR_NARY (expr) = nary; | |
1732 constant = fully_constant_expression (expr); | |
1733 if (constant != expr) | |
1734 return constant; | |
1735 | |
1736 new_val_id = nary->value_id; | |
1737 get_or_alloc_expression_id (expr); | |
1738 } | |
1739 else | |
1740 { | |
1741 new_val_id = get_next_value_id (); | |
1742 VEC_safe_grow_cleared (bitmap_set_t, heap, | |
1743 value_expressions, | |
1744 get_max_value_id() + 1); | |
1745 nary = vn_nary_op_insert_pieces (1, TREE_CODE (result), | |
1746 TREE_TYPE (result), | |
1747 TREE_OPERAND (result, 0), | |
1748 NULL_TREE, NULL_TREE, | |
1749 NULL_TREE, NULL_TREE, | |
1750 new_val_id); | |
1751 PRE_EXPR_NARY (expr) = nary; | |
1752 constant = fully_constant_expression (expr); | |
1753 if (constant != expr) | |
1754 return constant; | |
1755 get_or_alloc_expression_id (expr); | |
1756 } | |
1757 } | |
1758 else if (newref) | |
1681 { | 1759 { |
1682 PRE_EXPR_REFERENCE (expr) = newref; | 1760 PRE_EXPR_REFERENCE (expr) = newref; |
1683 constant = fully_constant_expression (expr); | 1761 constant = fully_constant_expression (expr); |
1684 if (constant != expr) | 1762 if (constant != expr) |
1685 return constant; | 1763 return constant; |
1812 bitmap_set_copy (dest, set); | 1890 bitmap_set_copy (dest, set); |
1813 return; | 1891 return; |
1814 } | 1892 } |
1815 | 1893 |
1816 exprs = sorted_array_from_bitmap_set (set); | 1894 exprs = sorted_array_from_bitmap_set (set); |
1817 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++) | 1895 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr) |
1818 { | 1896 { |
1819 pre_expr translated; | 1897 pre_expr translated; |
1820 translated = phi_translate (expr, set, NULL, pred, phiblock); | 1898 translated = phi_translate (expr, set, NULL, pred, phiblock); |
1821 if (!translated) | 1899 if (!translated) |
1822 continue; | 1900 continue; |
1869 choose which set to walk based on which set is smaller. */ | 1947 choose which set to walk based on which set is smaller. */ |
1870 unsigned int i; | 1948 unsigned int i; |
1871 bitmap_iterator bi; | 1949 bitmap_iterator bi; |
1872 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val); | 1950 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val); |
1873 | 1951 |
1874 EXECUTE_IF_AND_IN_BITMAP (exprset->expressions, | 1952 EXECUTE_IF_AND_IN_BITMAP (&exprset->expressions, |
1875 set->expressions, 0, i, bi) | 1953 &set->expressions, 0, i, bi) |
1876 { | 1954 { |
1877 pre_expr val = expression_for_id (i); | 1955 pre_expr val = expression_for_id (i); |
1878 /* At the point where stmt is not null, there should always | 1956 /* At the point where stmt is not null, there should always |
1879 be an SSA_NAME first in the list of expressions. */ | 1957 be an SSA_NAME first in the list of expressions. */ |
1880 if (stmt) | 1958 if (stmt) |
1881 { | 1959 { |
1882 gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val)); | 1960 gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val)); |
1883 if (gimple_code (def_stmt) != GIMPLE_PHI | 1961 if (gimple_code (def_stmt) != GIMPLE_PHI |
1884 && gimple_bb (def_stmt) == gimple_bb (stmt) | 1962 && gimple_bb (def_stmt) == gimple_bb (stmt) |
1885 && gimple_uid (def_stmt) >= gimple_uid (stmt)) | 1963 /* PRE insertions are at the end of the basic-block |
1964 and have UID 0. */ | |
1965 && (gimple_uid (def_stmt) == 0 | |
1966 || gimple_uid (def_stmt) >= gimple_uid (stmt))) | |
1886 continue; | 1967 continue; |
1887 } | 1968 } |
1888 return val; | 1969 return val; |
1889 } | 1970 } |
1890 } | 1971 } |
2073 { | 2154 { |
2074 vn_reference_t ref = PRE_EXPR_REFERENCE (expr); | 2155 vn_reference_t ref = PRE_EXPR_REFERENCE (expr); |
2075 vn_reference_op_t vro; | 2156 vn_reference_op_t vro; |
2076 unsigned int i; | 2157 unsigned int i; |
2077 | 2158 |
2078 for (i = 0; VEC_iterate (vn_reference_op_s, ref->operands, i, vro); i++) | 2159 FOR_EACH_VEC_ELT (vn_reference_op_s, ref->operands, i, vro) |
2079 { | 2160 { |
2080 if (!vro_valid_in_sets (set1, set2, vro)) | 2161 if (!vro_valid_in_sets (set1, set2, vro)) |
2081 return false; | 2162 return false; |
2082 } | 2163 } |
2083 if (ref->vuse) | 2164 if (ref->vuse) |
2107 { | 2188 { |
2108 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set1); | 2189 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set1); |
2109 pre_expr expr; | 2190 pre_expr expr; |
2110 int i; | 2191 int i; |
2111 | 2192 |
2112 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++) | 2193 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr) |
2113 { | 2194 { |
2114 if (!valid_in_sets (set1, set2, expr, block)) | 2195 if (!valid_in_sets (set1, set2, expr, block)) |
2115 bitmap_remove_from_set (set1, expr); | 2196 bitmap_remove_from_set (set1, expr); |
2116 } | 2197 } |
2117 VEC_free (pre_expr, heap, exprs); | 2198 VEC_free (pre_expr, heap, exprs); |
2126 { | 2207 { |
2127 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set); | 2208 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set); |
2128 pre_expr expr; | 2209 pre_expr expr; |
2129 int i; | 2210 int i; |
2130 | 2211 |
2131 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++) | 2212 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr) |
2132 { | 2213 { |
2133 if (!valid_in_sets (set, NULL, expr, block)) | 2214 if (!valid_in_sets (set, NULL, expr, block)) |
2134 bitmap_remove_from_set (set, expr); | 2215 bitmap_remove_from_set (set, expr); |
2135 } | 2216 } |
2136 VEC_free (pre_expr, heap, exprs); | 2217 VEC_free (pre_expr, heap, exprs); |
2260 if (!gimple_seq_empty_p (phi_nodes (first))) | 2341 if (!gimple_seq_empty_p (phi_nodes (first))) |
2261 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first); | 2342 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first); |
2262 else | 2343 else |
2263 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first)); | 2344 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first)); |
2264 | 2345 |
2265 for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++) | 2346 FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime) |
2266 { | 2347 { |
2267 if (!gimple_seq_empty_p (phi_nodes (bprime))) | 2348 if (!gimple_seq_empty_p (phi_nodes (bprime))) |
2268 { | 2349 { |
2269 bitmap_set_t tmp = bitmap_set_new (); | 2350 bitmap_set_t tmp = bitmap_set_new (); |
2270 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime); | 2351 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime); |
2290 bitmap_value_insert_into_set (ANTIC_IN (block), | 2371 bitmap_value_insert_into_set (ANTIC_IN (block), |
2291 expression_for_id (bii)); | 2372 expression_for_id (bii)); |
2292 | 2373 |
2293 clean (ANTIC_IN (block), block); | 2374 clean (ANTIC_IN (block), block); |
2294 | 2375 |
2295 /* !old->expressions can happen when we deferred a block. */ | 2376 if (!bitmap_set_equal (old, ANTIC_IN (block))) |
2296 if (!old->expressions || !bitmap_set_equal (old, ANTIC_IN (block))) | |
2297 { | 2377 { |
2298 changed = true; | 2378 changed = true; |
2299 SET_BIT (changed_blocks, block->index); | 2379 SET_BIT (changed_blocks, block->index); |
2300 FOR_EACH_EDGE (e, ei, block->preds) | 2380 FOR_EACH_EDGE (e, ei, block->preds) |
2301 SET_BIT (changed_blocks, e->src->index); | 2381 SET_BIT (changed_blocks, e->src->index); |
2366 /* If there are too many partially anticipatable values in the | 2446 /* If there are too many partially anticipatable values in the |
2367 block, phi_translate_set can take an exponential time: stop | 2447 block, phi_translate_set can take an exponential time: stop |
2368 before the translation starts. */ | 2448 before the translation starts. */ |
2369 if (max_pa | 2449 if (max_pa |
2370 && single_succ_p (block) | 2450 && single_succ_p (block) |
2371 && bitmap_count_bits (PA_IN (single_succ (block))->values) > max_pa) | 2451 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa) |
2372 goto maybe_dump_sets; | 2452 goto maybe_dump_sets; |
2373 | 2453 |
2374 old_PA_IN = PA_IN (block); | 2454 old_PA_IN = PA_IN (block); |
2375 PA_OUT = bitmap_set_new (); | 2455 PA_OUT = bitmap_set_new (); |
2376 | 2456 |
2404 continue; | 2484 continue; |
2405 VEC_quick_push (basic_block, worklist, e->dest); | 2485 VEC_quick_push (basic_block, worklist, e->dest); |
2406 } | 2486 } |
2407 if (VEC_length (basic_block, worklist) > 0) | 2487 if (VEC_length (basic_block, worklist) > 0) |
2408 { | 2488 { |
2409 for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++) | 2489 FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime) |
2410 { | 2490 { |
2411 unsigned int i; | 2491 unsigned int i; |
2412 bitmap_iterator bi; | 2492 bitmap_iterator bi; |
2413 | 2493 |
2414 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi) | 2494 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi) |
2436 Then we subtract things from ANTIC_IN. */ | 2516 Then we subtract things from ANTIC_IN. */ |
2437 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block)); | 2517 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block)); |
2438 | 2518 |
2439 /* For partial antic, we want to put back in the phi results, since | 2519 /* For partial antic, we want to put back in the phi results, since |
2440 we will properly avoid making them partially antic over backedges. */ | 2520 we will properly avoid making them partially antic over backedges. */ |
2441 bitmap_ior_into (PA_IN (block)->values, PHI_GEN (block)->values); | 2521 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values); |
2442 bitmap_ior_into (PA_IN (block)->expressions, PHI_GEN (block)->expressions); | 2522 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions); |
2443 | 2523 |
2444 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */ | 2524 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */ |
2445 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block)); | 2525 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block)); |
2446 | 2526 |
2447 dependent_clean (PA_IN (block), ANTIC_IN (block), block); | 2527 dependent_clean (PA_IN (block), ANTIC_IN (block), block); |
2518 sbitmap_ones (changed_blocks); | 2598 sbitmap_ones (changed_blocks); |
2519 while (changed) | 2599 while (changed) |
2520 { | 2600 { |
2521 if (dump_file && (dump_flags & TDF_DETAILS)) | 2601 if (dump_file && (dump_flags & TDF_DETAILS)) |
2522 fprintf (dump_file, "Starting iteration %d\n", num_iterations); | 2602 fprintf (dump_file, "Starting iteration %d\n", num_iterations); |
2603 /* ??? We need to clear our PHI translation cache here as the | |
2604 ANTIC sets shrink and we restrict valid translations to | |
2605 those having operands with leaders in ANTIC. Same below | |
2606 for PA ANTIC computation. */ | |
2523 num_iterations++; | 2607 num_iterations++; |
2524 changed = false; | 2608 changed = false; |
2525 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--) | 2609 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--) |
2526 { | 2610 { |
2527 if (TEST_BIT (changed_blocks, postorder[i])) | 2611 if (TEST_BIT (changed_blocks, postorder[i])) |
2530 changed |= compute_antic_aux (block, | 2614 changed |= compute_antic_aux (block, |
2531 TEST_BIT (has_abnormal_preds, | 2615 TEST_BIT (has_abnormal_preds, |
2532 block->index)); | 2616 block->index)); |
2533 } | 2617 } |
2534 } | 2618 } |
2535 #ifdef ENABLE_CHECKING | |
2536 /* Theoretically possible, but *highly* unlikely. */ | 2619 /* Theoretically possible, but *highly* unlikely. */ |
2537 gcc_assert (num_iterations < 500); | 2620 gcc_checking_assert (num_iterations < 500); |
2538 #endif | |
2539 } | 2621 } |
2540 | 2622 |
2541 statistics_histogram_event (cfun, "compute_antic iterations", | 2623 statistics_histogram_event (cfun, "compute_antic iterations", |
2542 num_iterations); | 2624 num_iterations); |
2543 | 2625 |
2562 |= compute_partial_antic_aux (block, | 2644 |= compute_partial_antic_aux (block, |
2563 TEST_BIT (has_abnormal_preds, | 2645 TEST_BIT (has_abnormal_preds, |
2564 block->index)); | 2646 block->index)); |
2565 } | 2647 } |
2566 } | 2648 } |
2567 #ifdef ENABLE_CHECKING | |
2568 /* Theoretically possible, but *highly* unlikely. */ | 2649 /* Theoretically possible, but *highly* unlikely. */ |
2569 gcc_assert (num_iterations < 500); | 2650 gcc_checking_assert (num_iterations < 500); |
2570 #endif | |
2571 } | 2651 } |
2572 statistics_histogram_event (cfun, "compute_partial_antic iterations", | 2652 statistics_histogram_event (cfun, "compute_partial_antic iterations", |
2573 num_iterations); | 2653 num_iterations); |
2574 } | 2654 } |
2575 sbitmap_free (has_abnormal_preds); | 2655 sbitmap_free (has_abnormal_preds); |
2595 can_PRE_operation (tree op) | 2675 can_PRE_operation (tree op) |
2596 { | 2676 { |
2597 return UNARY_CLASS_P (op) | 2677 return UNARY_CLASS_P (op) |
2598 || BINARY_CLASS_P (op) | 2678 || BINARY_CLASS_P (op) |
2599 || COMPARISON_CLASS_P (op) | 2679 || COMPARISON_CLASS_P (op) |
2600 || TREE_CODE (op) == INDIRECT_REF | 2680 || TREE_CODE (op) == MEM_REF |
2601 || TREE_CODE (op) == COMPONENT_REF | 2681 || TREE_CODE (op) == COMPONENT_REF |
2602 || TREE_CODE (op) == VIEW_CONVERT_EXPR | 2682 || TREE_CODE (op) == VIEW_CONVERT_EXPR |
2603 || TREE_CODE (op) == CALL_EXPR | 2683 || TREE_CODE (op) == CALL_EXPR |
2604 || TREE_CODE (op) == ARRAY_REF; | 2684 || TREE_CODE (op) == ARRAY_REF; |
2605 } | 2685 } |
2671 if (sc) | 2751 if (sc) |
2672 CALL_EXPR_STATIC_CHAIN (folded) = sc; | 2752 CALL_EXPR_STATIC_CHAIN (folded) = sc; |
2673 return folded; | 2753 return folded; |
2674 } | 2754 } |
2675 break; | 2755 break; |
2756 case MEM_REF: | |
2757 { | |
2758 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand, | |
2759 stmts, domstmt); | |
2760 tree offset = currop->op0; | |
2761 if (!baseop) | |
2762 return NULL_TREE; | |
2763 if (TREE_CODE (baseop) == ADDR_EXPR | |
2764 && handled_component_p (TREE_OPERAND (baseop, 0))) | |
2765 { | |
2766 HOST_WIDE_INT off; | |
2767 tree base; | |
2768 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0), | |
2769 &off); | |
2770 gcc_assert (base); | |
2771 offset = int_const_binop (PLUS_EXPR, offset, | |
2772 build_int_cst (TREE_TYPE (offset), | |
2773 off), 0); | |
2774 baseop = build_fold_addr_expr (base); | |
2775 } | |
2776 return fold_build2 (MEM_REF, currop->type, baseop, offset); | |
2777 } | |
2778 break; | |
2676 case TARGET_MEM_REF: | 2779 case TARGET_MEM_REF: |
2677 { | 2780 { |
2781 pre_expr op0expr, op1expr; | |
2782 tree genop0 = NULL_TREE, genop1 = NULL_TREE; | |
2678 vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands, | 2783 vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands, |
2679 *operand); | 2784 ++*operand); |
2680 pre_expr op0expr; | |
2681 tree genop0 = NULL_TREE; | |
2682 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand, | 2785 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand, |
2683 stmts, domstmt); | 2786 stmts, domstmt); |
2684 if (!baseop) | 2787 if (!baseop) |
2685 return NULL_TREE; | 2788 return NULL_TREE; |
2686 if (currop->op0) | 2789 if (currop->op0) |
2689 genop0 = find_or_generate_expression (block, op0expr, | 2792 genop0 = find_or_generate_expression (block, op0expr, |
2690 stmts, domstmt); | 2793 stmts, domstmt); |
2691 if (!genop0) | 2794 if (!genop0) |
2692 return NULL_TREE; | 2795 return NULL_TREE; |
2693 } | 2796 } |
2694 if (DECL_P (baseop)) | 2797 if (nextop->op0) |
2695 return build6 (TARGET_MEM_REF, currop->type, | 2798 { |
2696 baseop, NULL_TREE, | 2799 op1expr = get_or_alloc_expr_for (nextop->op0); |
2697 genop0, currop->op1, currop->op2, | 2800 genop1 = find_or_generate_expression (block, op1expr, |
2698 unshare_expr (nextop->op1)); | 2801 stmts, domstmt); |
2699 else | 2802 if (!genop1) |
2700 return build6 (TARGET_MEM_REF, currop->type, | 2803 return NULL_TREE; |
2701 NULL_TREE, baseop, | 2804 } |
2702 genop0, currop->op1, currop->op2, | 2805 return build5 (TARGET_MEM_REF, currop->type, |
2703 unshare_expr (nextop->op1)); | 2806 baseop, currop->op2, genop0, currop->op1, genop1); |
2704 } | 2807 } |
2705 break; | 2808 break; |
2706 case ADDR_EXPR: | 2809 case ADDR_EXPR: |
2707 if (currop->op0) | 2810 if (currop->op0) |
2708 { | 2811 { |
2720 stmts, domstmt); | 2823 stmts, domstmt); |
2721 if (!genop0) | 2824 if (!genop0) |
2722 return NULL_TREE; | 2825 return NULL_TREE; |
2723 folded = fold_build1 (currop->opcode, currop->type, | 2826 folded = fold_build1 (currop->opcode, currop->type, |
2724 genop0); | 2827 genop0); |
2725 return folded; | |
2726 } | |
2727 break; | |
2728 case ALIGN_INDIRECT_REF: | |
2729 case MISALIGNED_INDIRECT_REF: | |
2730 case INDIRECT_REF: | |
2731 { | |
2732 tree folded; | |
2733 tree genop1 = create_component_ref_by_pieces_1 (block, ref, | |
2734 operand, | |
2735 stmts, domstmt); | |
2736 if (!genop1) | |
2737 return NULL_TREE; | |
2738 genop1 = fold_convert (build_pointer_type (currop->type), | |
2739 genop1); | |
2740 | |
2741 if (currop->opcode == MISALIGNED_INDIRECT_REF) | |
2742 folded = fold_build2 (currop->opcode, currop->type, | |
2743 genop1, currop->op1); | |
2744 else | |
2745 folded = fold_build1 (currop->opcode, currop->type, | |
2746 genop1); | |
2747 return folded; | 2828 return folded; |
2748 } | 2829 } |
2749 break; | 2830 break; |
2750 case BIT_FIELD_REF: | 2831 case BIT_FIELD_REF: |
2751 { | 2832 { |
2877 gcc_unreachable (); | 2958 gcc_unreachable (); |
2878 } | 2959 } |
2879 } | 2960 } |
2880 | 2961 |
2881 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the | 2962 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the |
2882 COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with | 2963 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with |
2883 trying to rename aggregates into ssa form directly, which is a no no. | 2964 trying to rename aggregates into ssa form directly, which is a no no. |
2884 | 2965 |
2885 Thus, this routine doesn't create temporaries, it just builds a | 2966 Thus, this routine doesn't create temporaries, it just builds a |
2886 single access expression for the array, calling | 2967 single access expression for the array, calling |
2887 find_or_generate_expression to build the innermost pieces. | 2968 find_or_generate_expression to build the innermost pieces. |
2925 else if (leader->kind == CONSTANT) | 3006 else if (leader->kind == CONSTANT) |
2926 genop = PRE_EXPR_CONSTANT (leader); | 3007 genop = PRE_EXPR_CONSTANT (leader); |
2927 } | 3008 } |
2928 | 3009 |
2929 /* If it's still NULL, it must be a complex expression, so generate | 3010 /* If it's still NULL, it must be a complex expression, so generate |
2930 it recursively. Not so for FRE though. */ | 3011 it recursively. Not so if inserting expressions for values generated |
3012 by SCCVN. */ | |
2931 if (genop == NULL | 3013 if (genop == NULL |
2932 && !in_fre) | 3014 && !domstmt) |
2933 { | 3015 { |
2934 bitmap_set_t exprset; | 3016 bitmap_set_t exprset; |
2935 unsigned int lookfor = get_expr_value_id (expr); | 3017 unsigned int lookfor = get_expr_value_id (expr); |
2936 bool handled = false; | 3018 bool handled = false; |
2937 bitmap_iterator bi; | 3019 bitmap_iterator bi; |
3128 VN_INFO_GET (name)->valnum = name; | 3210 VN_INFO_GET (name)->valnum = name; |
3129 value_id = get_expr_value_id (expr); | 3211 value_id = get_expr_value_id (expr); |
3130 VN_INFO (name)->value_id = value_id; | 3212 VN_INFO (name)->value_id = value_id; |
3131 nameexpr = get_or_alloc_expr_for_name (name); | 3213 nameexpr = get_or_alloc_expr_for_name (name); |
3132 add_to_value (value_id, nameexpr); | 3214 add_to_value (value_id, nameexpr); |
3133 if (!in_fre) | 3215 if (NEW_SETS (block)) |
3134 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr); | 3216 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr); |
3135 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr); | 3217 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr); |
3136 | 3218 |
3137 pre_stats.insertions++; | 3219 pre_stats.insertions++; |
3138 if (dump_file && (dump_flags & TDF_DETAILS)) | 3220 if (dump_file && (dump_flags & TDF_DETAILS)) |
3165 | 3247 |
3166 /* Otherwise we inhibit the insertion when the address of the | 3248 /* Otherwise we inhibit the insertion when the address of the |
3167 memory reference is a simple induction variable. In other | 3249 memory reference is a simple induction variable. In other |
3168 cases the vectorizer won't do anything anyway (either it's | 3250 cases the vectorizer won't do anything anyway (either it's |
3169 loop invariant or a complicated expression). */ | 3251 loop invariant or a complicated expression). */ |
3170 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i) | 3252 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op) |
3171 { | 3253 { |
3172 switch (op->opcode) | 3254 switch (op->opcode) |
3173 { | 3255 { |
3174 case ARRAY_REF: | 3256 case ARRAY_REF: |
3175 case ARRAY_RANGE_REF: | 3257 case ARRAY_RANGE_REF: |
3307 gsi_insert_seq_on_edge (pred, stmts); | 3389 gsi_insert_seq_on_edge (pred, stmts); |
3308 } | 3390 } |
3309 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr); | 3391 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr); |
3310 } | 3392 } |
3311 } | 3393 } |
3394 else | |
3395 avail[bprime->index] = get_or_alloc_expr_for_constant (builtexpr); | |
3312 } | 3396 } |
3313 } | 3397 } |
3314 else if (eprime->kind == NAME) | 3398 else if (eprime->kind == NAME) |
3315 { | 3399 { |
3316 /* We may have to do a conversion because our value | 3400 /* We may have to do a conversion because our value |
3450 bool new_stuff = false; | 3534 bool new_stuff = false; |
3451 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (ANTIC_IN (block)); | 3535 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (ANTIC_IN (block)); |
3452 pre_expr expr; | 3536 pre_expr expr; |
3453 int i; | 3537 int i; |
3454 | 3538 |
3455 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++) | 3539 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr) |
3456 { | 3540 { |
3457 if (expr->kind != NAME) | 3541 if (expr->kind != NAME) |
3458 { | 3542 { |
3459 pre_expr *avail; | 3543 pre_expr *avail; |
3460 unsigned int val; | 3544 unsigned int val; |
3531 } | 3615 } |
3532 /* If we can insert it, it's not the same value | 3616 /* If we can insert it, it's not the same value |
3533 already existing along every predecessor, and | 3617 already existing along every predecessor, and |
3534 it's defined by some predecessor, it is | 3618 it's defined by some predecessor, it is |
3535 partially redundant. */ | 3619 partially redundant. */ |
3536 if (!cant_insert && !all_same && by_some && do_insertion | 3620 if (!cant_insert && !all_same && by_some) |
3537 && dbg_cnt (treepre_insert)) | |
3538 { | 3621 { |
3539 if (insert_into_preds_of_block (block, get_expression_id (expr), | 3622 if (!do_insertion) |
3540 avail)) | 3623 { |
3624 if (dump_file && (dump_flags & TDF_DETAILS)) | |
3625 { | |
3626 fprintf (dump_file, "Skipping partial redundancy for " | |
3627 "expression "); | |
3628 print_pre_expr (dump_file, expr); | |
3629 fprintf (dump_file, " (%04d), no redundancy on to be " | |
3630 "optimized for speed edge\n", val); | |
3631 } | |
3632 } | |
3633 else if (dbg_cnt (treepre_insert) | |
3634 && insert_into_preds_of_block (block, | |
3635 get_expression_id (expr), | |
3636 avail)) | |
3541 new_stuff = true; | 3637 new_stuff = true; |
3542 } | 3638 } |
3543 /* If all edges produce the same value and that value is | 3639 /* If all edges produce the same value and that value is |
3544 an invariant, then the PHI has the same value on all | 3640 an invariant, then the PHI has the same value on all |
3545 edges. Note this. */ | 3641 edges. Note this. */ |
3596 bool new_stuff = false; | 3692 bool new_stuff = false; |
3597 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (PA_IN (block)); | 3693 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (PA_IN (block)); |
3598 pre_expr expr; | 3694 pre_expr expr; |
3599 int i; | 3695 int i; |
3600 | 3696 |
3601 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++) | 3697 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr) |
3602 { | 3698 { |
3603 if (expr->kind != NAME) | 3699 if (expr->kind != NAME) |
3604 { | 3700 { |
3605 pre_expr *avail; | 3701 pre_expr *avail; |
3606 unsigned int val; | 3702 unsigned int val; |
3923 continue; | 4019 continue; |
3924 | 4020 |
3925 copy_reference_ops_from_call (stmt, &ops); | 4021 copy_reference_ops_from_call (stmt, &ops); |
3926 vn_reference_lookup_pieces (gimple_vuse (stmt), 0, | 4022 vn_reference_lookup_pieces (gimple_vuse (stmt), 0, |
3927 gimple_expr_type (stmt), | 4023 gimple_expr_type (stmt), |
3928 ops, &ref, false); | 4024 ops, &ref, VN_NOWALK); |
3929 VEC_free (vn_reference_op_s, heap, ops); | 4025 VEC_free (vn_reference_op_s, heap, ops); |
3930 if (!ref) | 4026 if (!ref) |
3931 continue; | 4027 continue; |
3932 | 4028 |
3933 for (i = 0; VEC_iterate (vn_reference_op_s, | 4029 for (i = 0; VEC_iterate (vn_reference_op_s, |
3993 unsigned int i; | 4089 unsigned int i; |
3994 vn_reference_op_t vro; | 4090 vn_reference_op_t vro; |
3995 | 4091 |
3996 vn_reference_lookup (gimple_assign_rhs1 (stmt), | 4092 vn_reference_lookup (gimple_assign_rhs1 (stmt), |
3997 gimple_vuse (stmt), | 4093 gimple_vuse (stmt), |
3998 true, &ref); | 4094 VN_WALK, &ref); |
3999 if (!ref) | 4095 if (!ref) |
4000 continue; | 4096 continue; |
4001 | 4097 |
4002 for (i = 0; VEC_iterate (vn_reference_op_s, | 4098 for (i = 0; VEC_iterate (vn_reference_op_s, |
4003 ref->operands, i, | 4099 ref->operands, i, |
4173 && sprime != lhs | 4269 && sprime != lhs |
4174 && (rhs == NULL_TREE | 4270 && (rhs == NULL_TREE |
4175 || TREE_CODE (rhs) != SSA_NAME | 4271 || TREE_CODE (rhs) != SSA_NAME |
4176 || may_propagate_copy (rhs, sprime))) | 4272 || may_propagate_copy (rhs, sprime))) |
4177 { | 4273 { |
4274 bool can_make_abnormal_goto | |
4275 = is_gimple_call (stmt) | |
4276 && stmt_can_make_abnormal_goto (stmt); | |
4277 | |
4178 gcc_assert (sprime != rhs); | 4278 gcc_assert (sprime != rhs); |
4179 | 4279 |
4180 if (dump_file && (dump_flags & TDF_DETAILS)) | 4280 if (dump_file && (dump_flags & TDF_DETAILS)) |
4181 { | 4281 { |
4182 fprintf (dump_file, "Replaced "); | 4282 fprintf (dump_file, "Replaced "); |
4201 pre_stats.eliminations++; | 4301 pre_stats.eliminations++; |
4202 propagate_tree_value_into_stmt (&gsi, sprime); | 4302 propagate_tree_value_into_stmt (&gsi, sprime); |
4203 stmt = gsi_stmt (gsi); | 4303 stmt = gsi_stmt (gsi); |
4204 update_stmt (stmt); | 4304 update_stmt (stmt); |
4205 | 4305 |
4206 /* If we removed EH side effects from the statement, clean | 4306 /* If we removed EH side-effects from the statement, clean |
4207 its EH information. */ | 4307 its EH information. */ |
4208 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)) | 4308 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)) |
4209 { | 4309 { |
4210 bitmap_set_bit (need_eh_cleanup, | 4310 bitmap_set_bit (need_eh_cleanup, |
4211 gimple_bb (stmt)->index); | 4311 gimple_bb (stmt)->index); |
4212 if (dump_file && (dump_flags & TDF_DETAILS)) | 4312 if (dump_file && (dump_flags & TDF_DETAILS)) |
4213 fprintf (dump_file, " Removed EH side effects.\n"); | 4313 fprintf (dump_file, " Removed EH side-effects.\n"); |
4314 } | |
4315 | |
4316 /* Likewise for AB side-effects. */ | |
4317 if (can_make_abnormal_goto | |
4318 && !stmt_can_make_abnormal_goto (stmt)) | |
4319 { | |
4320 bitmap_set_bit (need_ab_cleanup, | |
4321 gimple_bb (stmt)->index); | |
4322 if (dump_file && (dump_flags & TDF_DETAILS)) | |
4323 fprintf (dump_file, " Removed AB side-effects.\n"); | |
4214 } | 4324 } |
4215 } | 4325 } |
4216 } | 4326 } |
4217 /* If the statement is a scalar store, see if the expression | 4327 /* If the statement is a scalar store, see if the expression |
4218 has the same value number as its rhs. If so, the store is | 4328 has the same value number as its rhs. If so, the store is |
4223 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))) | 4333 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))) |
4224 { | 4334 { |
4225 tree rhs = gimple_assign_rhs1 (stmt); | 4335 tree rhs = gimple_assign_rhs1 (stmt); |
4226 tree val; | 4336 tree val; |
4227 val = vn_reference_lookup (gimple_assign_lhs (stmt), | 4337 val = vn_reference_lookup (gimple_assign_lhs (stmt), |
4228 gimple_vuse (stmt), true, NULL); | 4338 gimple_vuse (stmt), VN_WALK, NULL); |
4229 if (TREE_CODE (rhs) == SSA_NAME) | 4339 if (TREE_CODE (rhs) == SSA_NAME) |
4230 rhs = VN_INFO (rhs)->valnum; | 4340 rhs = VN_INFO (rhs)->valnum; |
4231 if (val | 4341 if (val |
4232 && operand_equal_p (val, rhs, 0)) | 4342 && operand_equal_p (val, rhs, 0)) |
4233 { | 4343 { |
4265 todo = TODO_cleanup_cfg; | 4375 todo = TODO_cleanup_cfg; |
4266 } | 4376 } |
4267 } | 4377 } |
4268 /* Visit indirect calls and turn them into direct calls if | 4378 /* Visit indirect calls and turn them into direct calls if |
4269 possible. */ | 4379 possible. */ |
4270 if (gimple_code (stmt) == GIMPLE_CALL | 4380 if (is_gimple_call (stmt) |
4271 && TREE_CODE (gimple_call_fn (stmt)) == SSA_NAME) | 4381 && TREE_CODE (gimple_call_fn (stmt)) == SSA_NAME) |
4272 { | 4382 { |
4273 tree fn = VN_INFO (gimple_call_fn (stmt))->valnum; | 4383 tree fn = VN_INFO (gimple_call_fn (stmt))->valnum; |
4274 if (TREE_CODE (fn) == ADDR_EXPR | 4384 if (TREE_CODE (fn) == ADDR_EXPR |
4275 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL) | 4385 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL) |
4276 { | 4386 { |
4387 bool can_make_abnormal_goto | |
4388 = stmt_can_make_abnormal_goto (stmt); | |
4389 bool was_noreturn = gimple_call_noreturn_p (stmt); | |
4390 | |
4277 if (dump_file && (dump_flags & TDF_DETAILS)) | 4391 if (dump_file && (dump_flags & TDF_DETAILS)) |
4278 { | 4392 { |
4279 fprintf (dump_file, "Replacing call target with "); | 4393 fprintf (dump_file, "Replacing call target with "); |
4280 print_generic_expr (dump_file, fn, 0); | 4394 print_generic_expr (dump_file, fn, 0); |
4281 fprintf (dump_file, " in "); | 4395 fprintf (dump_file, " in "); |
4282 print_gimple_stmt (dump_file, stmt, 0, 0); | 4396 print_gimple_stmt (dump_file, stmt, 0, 0); |
4283 } | 4397 } |
4284 | 4398 |
4285 gimple_call_set_fn (stmt, fn); | 4399 gimple_call_set_fn (stmt, fn); |
4286 update_stmt (stmt); | 4400 update_stmt (stmt); |
4401 | |
4402 /* When changing a call into a noreturn call, cfg cleanup | |
4403 is needed to fix up the noreturn call. */ | |
4404 if (!was_noreturn && gimple_call_noreturn_p (stmt)) | |
4405 todo |= TODO_cleanup_cfg; | |
4406 | |
4407 /* If we removed EH side-effects from the statement, clean | |
4408 its EH information. */ | |
4287 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)) | 4409 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)) |
4288 { | 4410 { |
4289 bitmap_set_bit (need_eh_cleanup, | 4411 bitmap_set_bit (need_eh_cleanup, |
4290 gimple_bb (stmt)->index); | 4412 gimple_bb (stmt)->index); |
4291 if (dump_file && (dump_flags & TDF_DETAILS)) | 4413 if (dump_file && (dump_flags & TDF_DETAILS)) |
4292 fprintf (dump_file, " Removed EH side effects.\n"); | 4414 fprintf (dump_file, " Removed EH side-effects.\n"); |
4415 } | |
4416 | |
4417 /* Likewise for AB side-effects. */ | |
4418 if (can_make_abnormal_goto | |
4419 && !stmt_can_make_abnormal_goto (stmt)) | |
4420 { | |
4421 bitmap_set_bit (need_ab_cleanup, | |
4422 gimple_bb (stmt)->index); | |
4423 if (dump_file && (dump_flags & TDF_DETAILS)) | |
4424 fprintf (dump_file, " Removed AB side-effects.\n"); | |
4293 } | 4425 } |
4294 | 4426 |
4295 /* Changing an indirect call to a direct call may | 4427 /* Changing an indirect call to a direct call may |
4296 have exposed different semantics. This may | 4428 have exposed different semantics. This may |
4297 require an SSA update. */ | 4429 require an SSA update. */ |
4327 else if (sprimeexpr->kind == NAME) | 4459 else if (sprimeexpr->kind == NAME) |
4328 sprime = PRE_EXPR_NAME (sprimeexpr); | 4460 sprime = PRE_EXPR_NAME (sprimeexpr); |
4329 else | 4461 else |
4330 gcc_unreachable (); | 4462 gcc_unreachable (); |
4331 } | 4463 } |
4332 if (!sprimeexpr | 4464 if (!sprime && is_gimple_min_invariant (VN_INFO (res)->valnum)) |
4465 { | |
4466 sprime = VN_INFO (res)->valnum; | |
4467 if (!useless_type_conversion_p (TREE_TYPE (res), | |
4468 TREE_TYPE (sprime))) | |
4469 sprime = fold_convert (TREE_TYPE (res), sprime); | |
4470 } | |
4471 if (!sprime | |
4333 || sprime == res) | 4472 || sprime == res) |
4334 { | 4473 { |
4335 gsi_next (&gsi); | 4474 gsi_next (&gsi); |
4336 continue; | 4475 continue; |
4337 } | 4476 } |
4370 } | 4509 } |
4371 | 4510 |
4372 /* We cannot remove stmts during BB walk, especially not release SSA | 4511 /* We cannot remove stmts during BB walk, especially not release SSA |
4373 names there as this confuses the VN machinery. The stmts ending | 4512 names there as this confuses the VN machinery. The stmts ending |
4374 up in to_remove are either stores or simple copies. */ | 4513 up in to_remove are either stores or simple copies. */ |
4375 for (i = 0; VEC_iterate (gimple, to_remove, i, stmt); ++i) | 4514 FOR_EACH_VEC_ELT (gimple, to_remove, i, stmt) |
4376 { | 4515 { |
4377 tree lhs = gimple_assign_lhs (stmt); | 4516 tree lhs = gimple_assign_lhs (stmt); |
4378 tree rhs = gimple_assign_rhs1 (stmt); | 4517 tree rhs = gimple_assign_rhs1 (stmt); |
4379 use_operand_p use_p; | 4518 use_operand_p use_p; |
4380 gimple use_stmt; | 4519 gimple use_stmt; |
4395 | 4534 |
4396 /* If this is a store or a now unused copy, remove it. */ | 4535 /* If this is a store or a now unused copy, remove it. */ |
4397 if (TREE_CODE (lhs) != SSA_NAME | 4536 if (TREE_CODE (lhs) != SSA_NAME |
4398 || has_zero_uses (lhs)) | 4537 || has_zero_uses (lhs)) |
4399 { | 4538 { |
4539 basic_block bb = gimple_bb (stmt); | |
4400 gsi = gsi_for_stmt (stmt); | 4540 gsi = gsi_for_stmt (stmt); |
4401 unlink_stmt_vdef (stmt); | 4541 unlink_stmt_vdef (stmt); |
4402 gsi_remove (&gsi, true); | 4542 gsi_remove (&gsi, true); |
4543 if (gimple_purge_dead_eh_edges (bb)) | |
4544 todo |= TODO_cleanup_cfg; | |
4403 if (TREE_CODE (lhs) == SSA_NAME) | 4545 if (TREE_CODE (lhs) == SSA_NAME) |
4404 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs)); | 4546 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs)); |
4405 release_defs (stmt); | 4547 release_defs (stmt); |
4406 } | 4548 } |
4407 } | 4549 } |
4632 | 4774 |
4633 | 4775 |
4634 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS); | 4776 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS); |
4635 my_rev_post_order_compute (postorder, false); | 4777 my_rev_post_order_compute (postorder, false); |
4636 | 4778 |
4637 FOR_ALL_BB (bb) | 4779 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets)); |
4638 bb->aux = XCNEWVEC (struct bb_bitmap_sets, 1); | |
4639 | 4780 |
4640 calculate_dominance_info (CDI_POST_DOMINATORS); | 4781 calculate_dominance_info (CDI_POST_DOMINATORS); |
4641 calculate_dominance_info (CDI_DOMINATORS); | 4782 calculate_dominance_info (CDI_DOMINATORS); |
4642 | 4783 |
4643 bitmap_obstack_initialize (&grand_bitmap_obstack); | 4784 bitmap_obstack_initialize (&grand_bitmap_obstack); |
4657 TMP_GEN (bb) = bitmap_set_new (); | 4798 TMP_GEN (bb) = bitmap_set_new (); |
4658 AVAIL_OUT (bb) = bitmap_set_new (); | 4799 AVAIL_OUT (bb) = bitmap_set_new (); |
4659 } | 4800 } |
4660 | 4801 |
4661 need_eh_cleanup = BITMAP_ALLOC (NULL); | 4802 need_eh_cleanup = BITMAP_ALLOC (NULL); |
4803 need_ab_cleanup = BITMAP_ALLOC (NULL); | |
4662 } | 4804 } |
4663 | 4805 |
4664 | 4806 |
4665 /* Deallocate data structures used by PRE. */ | 4807 /* Deallocate data structures used by PRE. */ |
4666 | 4808 |
4667 static void | 4809 static void |
4668 fini_pre (bool do_fre) | 4810 fini_pre (bool do_fre) |
4669 { | 4811 { |
4670 basic_block bb; | |
4671 | |
4672 free (postorder); | 4812 free (postorder); |
4673 VEC_free (bitmap_set_t, heap, value_expressions); | 4813 VEC_free (bitmap_set_t, heap, value_expressions); |
4674 BITMAP_FREE (inserted_exprs); | 4814 BITMAP_FREE (inserted_exprs); |
4675 VEC_free (gimple, heap, need_creation); | 4815 VEC_free (gimple, heap, need_creation); |
4676 bitmap_obstack_release (&grand_bitmap_obstack); | 4816 bitmap_obstack_release (&grand_bitmap_obstack); |
4678 free_alloc_pool (pre_expr_pool); | 4818 free_alloc_pool (pre_expr_pool); |
4679 htab_delete (phi_translate_table); | 4819 htab_delete (phi_translate_table); |
4680 htab_delete (expression_to_id); | 4820 htab_delete (expression_to_id); |
4681 VEC_free (unsigned, heap, name_to_id); | 4821 VEC_free (unsigned, heap, name_to_id); |
4682 | 4822 |
4683 FOR_ALL_BB (bb) | 4823 free_aux_for_blocks (); |
4684 { | |
4685 free (bb->aux); | |
4686 bb->aux = NULL; | |
4687 } | |
4688 | 4824 |
4689 free_dominance_info (CDI_POST_DOMINATORS); | 4825 free_dominance_info (CDI_POST_DOMINATORS); |
4690 | 4826 |
4691 if (!bitmap_empty_p (need_eh_cleanup)) | 4827 if (!bitmap_empty_p (need_eh_cleanup)) |
4692 { | 4828 { |
4693 gimple_purge_all_dead_eh_edges (need_eh_cleanup); | 4829 gimple_purge_all_dead_eh_edges (need_eh_cleanup); |
4694 cleanup_tree_cfg (); | 4830 cleanup_tree_cfg (); |
4695 } | 4831 } |
4696 | 4832 |
4697 BITMAP_FREE (need_eh_cleanup); | 4833 BITMAP_FREE (need_eh_cleanup); |
4834 | |
4835 if (!bitmap_empty_p (need_ab_cleanup)) | |
4836 { | |
4837 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup); | |
4838 cleanup_tree_cfg (); | |
4839 } | |
4840 | |
4841 BITMAP_FREE (need_ab_cleanup); | |
4698 | 4842 |
4699 if (!do_fre) | 4843 if (!do_fre) |
4700 loop_optimizer_finalize (); | 4844 loop_optimizer_finalize (); |
4701 } | 4845 } |
4702 | 4846 |
4713 /* This has to happen before SCCVN runs because | 4857 /* This has to happen before SCCVN runs because |
4714 loop_optimizer_init may create new phis, etc. */ | 4858 loop_optimizer_init may create new phis, etc. */ |
4715 if (!do_fre) | 4859 if (!do_fre) |
4716 loop_optimizer_init (LOOPS_NORMAL); | 4860 loop_optimizer_init (LOOPS_NORMAL); |
4717 | 4861 |
4718 if (!run_scc_vn (do_fre)) | 4862 if (!run_scc_vn (do_fre ? VN_WALKREWRITE : VN_WALK)) |
4719 { | 4863 { |
4720 if (!do_fre) | 4864 if (!do_fre) |
4721 loop_optimizer_finalize (); | 4865 loop_optimizer_finalize (); |
4722 | 4866 |
4723 return 0; | 4867 return 0; |
4769 statistics_counter_event (cfun, "Constified", pre_stats.constified); | 4913 statistics_counter_event (cfun, "Constified", pre_stats.constified); |
4770 | 4914 |
4771 clear_expression_ids (); | 4915 clear_expression_ids (); |
4772 free_scc_vn (); | 4916 free_scc_vn (); |
4773 if (!do_fre) | 4917 if (!do_fre) |
4774 remove_dead_inserted_code (); | 4918 { |
4919 remove_dead_inserted_code (); | |
4920 todo |= TODO_verify_flow; | |
4921 } | |
4775 | 4922 |
4776 scev_finalize (); | 4923 scev_finalize (); |
4777 fini_pre (do_fre); | 4924 fini_pre (do_fre); |
4778 | 4925 |
4779 return todo; | 4926 return todo; |