comparison gcc/gimple-fold.c @ 131:84e7813d76e9

gcc-8.2
author mir3636
date Thu, 25 Oct 2018 07:37:49 +0900
parents 04ced10e8804
children 1830386684a0
comparison
equal deleted inserted replaced
111:04ced10e8804 131:84e7813d76e9
1 /* Statement simplification on GIMPLE. 1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2017 Free Software Foundation, Inc. 2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c. 3 Split out from tree-ssa-ccp.c.
4 4
5 This file is part of GCC. 5 This file is part of GCC.
6 6
7 GCC is free software; you can redistribute it and/or modify it 7 GCC is free software; you can redistribute it and/or modify it
28 #include "gimple.h" 28 #include "gimple.h"
29 #include "predict.h" 29 #include "predict.h"
30 #include "ssa.h" 30 #include "ssa.h"
31 #include "cgraph.h" 31 #include "cgraph.h"
32 #include "gimple-pretty-print.h" 32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
33 #include "fold-const.h" 34 #include "fold-const.h"
34 #include "stmt.h" 35 #include "stmt.h"
35 #include "expr.h" 36 #include "expr.h"
36 #include "stor-layout.h" 37 #include "stor-layout.h"
37 #include "dumpfile.h" 38 #include "dumpfile.h"
38 #include "gimple-fold.h" 39 #include "gimple-fold.h"
39 #include "gimplify.h" 40 #include "gimplify.h"
40 #include "gimple-iterator.h" 41 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h" 42 #include "tree-into-ssa.h"
42 #include "tree-dfa.h" 43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
43 #include "tree-ssa.h" 45 #include "tree-ssa.h"
44 #include "tree-ssa-propagate.h" 46 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h" 47 #include "ipa-utils.h"
46 #include "tree-ssa-address.h" 48 #include "tree-ssa-address.h"
47 #include "langhooks.h" 49 #include "langhooks.h"
51 #include "tree-eh.h" 53 #include "tree-eh.h"
52 #include "gimple-match.h" 54 #include "gimple-match.h"
53 #include "gomp-constants.h" 55 #include "gomp-constants.h"
54 #include "optabs-query.h" 56 #include "optabs-query.h"
55 #include "omp-general.h" 57 #include "omp-general.h"
56 #include "ipa-chkp.h"
57 #include "tree-cfg.h" 58 #include "tree-cfg.h"
58 #include "fold-const-call.h" 59 #include "fold-const-call.h"
59 #include "stringpool.h" 60 #include "stringpool.h"
60 #include "attribs.h" 61 #include "attribs.h"
61 #include "asan.h" 62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
62 68
63 /* Return true when DECL can be referenced from current unit. 69 /* Return true when DECL can be referenced from current unit.
64 FROM_DECL (if non-null) specify constructor of variable DECL was taken from. 70 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
65 We can get declarations that are not possible to reference for various 71 We can get declarations that are not possible to reference for various
66 reasons: 72 reasons:
339 = possible_polymorphic_call_targets (rhs, stmt, &final); 345 = possible_polymorphic_call_targets (rhs, stmt, &final);
340 if (final && targets.length () <= 1 && dbg_cnt (devirt)) 346 if (final && targets.length () <= 1 && dbg_cnt (devirt))
341 { 347 {
342 if (dump_enabled_p ()) 348 if (dump_enabled_p ())
343 { 349 {
344 location_t loc = gimple_location_safe (stmt); 350 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
345 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
346 "resolving virtual function address " 351 "resolving virtual function address "
347 "reference to function %s\n", 352 "reference to function %s\n",
348 targets.length () == 1 353 targets.length () == 1
349 ? targets[0]->name () 354 ? targets[0]->name ()
350 : "NULL"); 355 : "NULL");
623 var_decl_component_p (tree var) 628 var_decl_component_p (tree var)
624 { 629 {
625 tree inner = var; 630 tree inner = var;
626 while (handled_component_p (inner)) 631 while (handled_component_p (inner))
627 inner = TREE_OPERAND (inner, 0); 632 inner = TREE_OPERAND (inner, 0);
628 return SSA_VAR_P (inner); 633 return (DECL_P (inner)
634 || (TREE_CODE (inner) == MEM_REF
635 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
629 } 636 }
630 637
631 /* If the SIZE argument representing the size of an object is in a range 638 /* If the SIZE argument representing the size of an object is in a range
632 of values of which exactly one is valid (and that is zero), return 639 of values of which exactly one is valid (and that is zero), return
633 true, otherwise false. */ 640 true, otherwise false. */
636 size_must_be_zero_p (tree size) 643 size_must_be_zero_p (tree size)
637 { 644 {
638 if (integer_zerop (size)) 645 if (integer_zerop (size))
639 return true; 646 return true;
640 647
641 if (TREE_CODE (size) != SSA_NAME) 648 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
642 return false; 649 return false;
643 650
644 wide_int min, max; 651 wide_int min, max;
645 enum value_range_type rtype = get_range_info (size, &min, &max); 652 enum value_range_kind rtype = get_range_info (size, &min, &max);
646 if (rtype != VR_ANTI_RANGE) 653 if (rtype != VR_ANTI_RANGE)
647 return false; 654 return false;
648 655
649 tree type = TREE_TYPE (size); 656 tree type = TREE_TYPE (size);
650 int prec = TYPE_PRECISION (type); 657 int prec = TYPE_PRECISION (type);
656 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1; 663 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
657 664
658 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max); 665 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
659 } 666 }
660 667
661 /* Fold function call to builtin mem{{,p}cpy,move}. Return 668 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
662 false if no simplification can be made. 669 diagnose (otherwise undefined) overlapping copies without preventing
663 If ENDP is 0, return DEST (like memcpy). 670 folding. When folded, GCC guarantees that overlapping memcpy has
664 If ENDP is 1, return DEST+LEN (like mempcpy). 671 the same semantics as memmove. Call to the library memcpy need not
665 If ENDP is 2, return DEST+LEN-1 (like stpcpy). 672 provide the same guarantee. Return false if no simplification can
666 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap 673 be made. */
667 (memmove). */
668 674
669 static bool 675 static bool
670 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi, 676 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
671 tree dest, tree src, int endp) 677 tree dest, tree src, int endp)
672 { 678 {
673 gimple *stmt = gsi_stmt (*gsi); 679 gimple *stmt = gsi_stmt (*gsi);
674 tree lhs = gimple_call_lhs (stmt); 680 tree lhs = gimple_call_lhs (stmt);
675 tree len = gimple_call_arg (stmt, 2); 681 tree len = gimple_call_arg (stmt, 2);
676 tree destvar, srcvar; 682 tree destvar, srcvar;
677 location_t loc = gimple_location (stmt); 683 location_t loc = gimple_location (stmt);
684
685 bool nowarn = gimple_no_warning_p (stmt);
678 686
679 /* If the LEN parameter is a constant zero or in range where 687 /* If the LEN parameter is a constant zero or in range where
680 the only valid value is zero, return DEST. */ 688 the only valid value is zero, return DEST. */
681 if (size_must_be_zero_p (len)) 689 if (size_must_be_zero_p (len))
682 { 690 {
697 705
698 /* If SRC and DEST are the same (and not volatile), return 706 /* If SRC and DEST are the same (and not volatile), return
699 DEST{,+LEN,+LEN-1}. */ 707 DEST{,+LEN,+LEN-1}. */
700 if (operand_equal_p (src, dest, 0)) 708 if (operand_equal_p (src, dest, 0))
701 { 709 {
710 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
711 It's safe and may even be emitted by GCC itself (see bug
712 32667). */
702 unlink_stmt_vdef (stmt); 713 unlink_stmt_vdef (stmt);
703 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) 714 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
704 release_ssa_name (gimple_vdef (stmt)); 715 release_ssa_name (gimple_vdef (stmt));
705 if (!lhs) 716 if (!lhs)
706 { 717 {
712 else 723 else
713 { 724 {
714 tree srctype, desttype; 725 tree srctype, desttype;
715 unsigned int src_align, dest_align; 726 unsigned int src_align, dest_align;
716 tree off0; 727 tree off0;
717 728 const char *tmp_str;
718 /* Inlining of memcpy/memmove may cause bounds lost (if we copy 729 unsigned HOST_WIDE_INT tmp_len;
719 pointers as wide integer) and also may result in huge function
720 size because of inlined bounds copy. Thus don't inline for
721 functions we want to instrument. */
722 if (flag_check_pointer_bounds
723 && chkp_instrumentable_p (cfun->decl)
724 /* Even if data may contain pointers we can inline if copy
725 less than a pointer size. */
726 && (!tree_fits_uhwi_p (len)
727 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
728 return false;
729 730
730 /* Build accesses at offset zero with a ref-all character type. */ 731 /* Build accesses at offset zero with a ref-all character type. */
731 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node, 732 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
732 ptr_mode, true), 0); 733 ptr_mode, true), 0);
733 734
741 && compare_tree_int (len, MOVE_MAX) <= 0 742 && compare_tree_int (len, MOVE_MAX) <= 0
742 /* ??? Don't transform copies from strings with known length this 743 /* ??? Don't transform copies from strings with known length this
743 confuses the tree-ssa-strlen.c. This doesn't handle 744 confuses the tree-ssa-strlen.c. This doesn't handle
744 the case in gcc.dg/strlenopt-8.c which is XFAILed for that 745 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
745 reason. */ 746 reason. */
746 && !c_strlen (src, 2)) 747 && !c_strlen (src, 2)
748 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
749 && memchr (tmp_str, 0, tmp_len) == NULL))
747 { 750 {
748 unsigned ilen = tree_to_uhwi (len); 751 unsigned ilen = tree_to_uhwi (len);
749 if (pow2p_hwi (ilen)) 752 if (pow2p_hwi (ilen))
750 { 753 {
754 /* Detect invalid bounds and overlapping copies and issue
755 either -Warray-bounds or -Wrestrict. */
756 if (!nowarn
757 && check_bounds_or_overlap (as_a <gcall *>(stmt),
758 dest, src, len, len))
759 gimple_set_no_warning (stmt, true);
760
751 scalar_int_mode mode; 761 scalar_int_mode mode;
752 tree type = lang_hooks.types.type_for_size (ilen * 8, 1); 762 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
753 if (type 763 if (type
754 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode) 764 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
755 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8 765 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
836 /* If *src and *dest can't overlap, optimize into memcpy as well. */ 846 /* If *src and *dest can't overlap, optimize into memcpy as well. */
837 if (TREE_CODE (src) == ADDR_EXPR 847 if (TREE_CODE (src) == ADDR_EXPR
838 && TREE_CODE (dest) == ADDR_EXPR) 848 && TREE_CODE (dest) == ADDR_EXPR)
839 { 849 {
840 tree src_base, dest_base, fn; 850 tree src_base, dest_base, fn;
841 HOST_WIDE_INT src_offset = 0, dest_offset = 0; 851 poly_int64 src_offset = 0, dest_offset = 0;
842 HOST_WIDE_INT maxsize; 852 poly_uint64 maxsize;
843 853
844 srcvar = TREE_OPERAND (src, 0); 854 srcvar = TREE_OPERAND (src, 0);
845 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset); 855 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
846 if (src_base == NULL) 856 if (src_base == NULL)
847 src_base = srcvar; 857 src_base = srcvar;
848 destvar = TREE_OPERAND (dest, 0); 858 destvar = TREE_OPERAND (dest, 0);
849 dest_base = get_addr_base_and_unit_offset (destvar, 859 dest_base = get_addr_base_and_unit_offset (destvar,
850 &dest_offset); 860 &dest_offset);
851 if (dest_base == NULL) 861 if (dest_base == NULL)
852 dest_base = destvar; 862 dest_base = destvar;
853 if (tree_fits_uhwi_p (len)) 863 if (!poly_int_tree_p (len, &maxsize))
854 maxsize = tree_to_uhwi (len);
855 else
856 maxsize = -1; 864 maxsize = -1;
857 if (SSA_VAR_P (src_base) 865 if (SSA_VAR_P (src_base)
858 && SSA_VAR_P (dest_base)) 866 && SSA_VAR_P (dest_base))
859 { 867 {
860 if (operand_equal_p (src_base, dest_base, 0) 868 if (operand_equal_p (src_base, dest_base, 0)
861 && ranges_overlap_p (src_offset, maxsize, 869 && ranges_maybe_overlap_p (src_offset, maxsize,
862 dest_offset, maxsize)) 870 dest_offset, maxsize))
863 return false; 871 return false;
864 } 872 }
865 else if (TREE_CODE (src_base) == MEM_REF 873 else if (TREE_CODE (src_base) == MEM_REF
866 && TREE_CODE (dest_base) == MEM_REF) 874 && TREE_CODE (dest_base) == MEM_REF)
867 { 875 {
868 if (! operand_equal_p (TREE_OPERAND (src_base, 0), 876 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
869 TREE_OPERAND (dest_base, 0), 0)) 877 TREE_OPERAND (dest_base, 0), 0))
870 return false; 878 return false;
871 offset_int off = mem_ref_offset (src_base) + src_offset; 879 poly_offset_int full_src_offset
872 if (!wi::fits_shwi_p (off)) 880 = mem_ref_offset (src_base) + src_offset;
873 return false; 881 poly_offset_int full_dest_offset
874 src_offset = off.to_shwi (); 882 = mem_ref_offset (dest_base) + dest_offset;
875 883 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
876 off = mem_ref_offset (dest_base) + dest_offset; 884 full_dest_offset, maxsize))
877 if (!wi::fits_shwi_p (off))
878 return false;
879 dest_offset = off.to_shwi ();
880 if (ranges_overlap_p (src_offset, maxsize,
881 dest_offset, maxsize))
882 return false; 885 return false;
883 } 886 }
884 else 887 else
885 return false; 888 return false;
886 889
921 return false; 924 return false;
922 } 925 }
923 926
924 if (!tree_fits_shwi_p (len)) 927 if (!tree_fits_shwi_p (len))
925 return false; 928 return false;
926 /* FIXME:
927 This logic lose for arguments like (type *)malloc (sizeof (type)),
928 since we strip the casts of up to VOID return value from malloc.
929 Perhaps we ought to inherit type from non-VOID argument here? */
930 STRIP_NOPS (src);
931 STRIP_NOPS (dest);
932 if (!POINTER_TYPE_P (TREE_TYPE (src)) 929 if (!POINTER_TYPE_P (TREE_TYPE (src))
933 || !POINTER_TYPE_P (TREE_TYPE (dest))) 930 || !POINTER_TYPE_P (TREE_TYPE (dest)))
934 return false; 931 return false;
935 /* In the following try to find a type that is most natural to be 932 /* In the following try to find a type that is most natural to be
936 used for the memcpy source and destination and that allows 933 used for the memcpy source and destination and that allows
937 the most optimization when memcpy is turned into a plain assignment 934 the most optimization when memcpy is turned into a plain assignment
938 using that type. In theory we could always use a char[len] type 935 using that type. In theory we could always use a char[len] type
939 but that only gains us that the destination and source possibly 936 but that only gains us that the destination and source possibly
940 no longer will have their address taken. */ 937 no longer will have their address taken. */
941 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
942 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
943 {
944 tree tem = TREE_OPERAND (src, 0);
945 STRIP_NOPS (tem);
946 if (tem != TREE_OPERAND (src, 0))
947 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
948 }
949 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
950 {
951 tree tem = TREE_OPERAND (dest, 0);
952 STRIP_NOPS (tem);
953 if (tem != TREE_OPERAND (dest, 0))
954 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
955 }
956 srctype = TREE_TYPE (TREE_TYPE (src)); 938 srctype = TREE_TYPE (TREE_TYPE (src));
957 if (TREE_CODE (srctype) == ARRAY_TYPE 939 if (TREE_CODE (srctype) == ARRAY_TYPE
958 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)) 940 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
959 { 941 srctype = TREE_TYPE (srctype);
960 srctype = TREE_TYPE (srctype);
961 STRIP_NOPS (src);
962 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
963 }
964 desttype = TREE_TYPE (TREE_TYPE (dest)); 942 desttype = TREE_TYPE (TREE_TYPE (dest));
965 if (TREE_CODE (desttype) == ARRAY_TYPE 943 if (TREE_CODE (desttype) == ARRAY_TYPE
966 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)) 944 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
967 { 945 desttype = TREE_TYPE (desttype);
968 desttype = TREE_TYPE (desttype);
969 STRIP_NOPS (dest);
970 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
971 }
972 if (TREE_ADDRESSABLE (srctype) 946 if (TREE_ADDRESSABLE (srctype)
973 || TREE_ADDRESSABLE (desttype)) 947 || TREE_ADDRESSABLE (desttype))
974 return false; 948 return false;
975 949
976 /* Make sure we are not copying using a floating-point mode or 950 /* Make sure we are not copying using a floating-point mode or
994 dest_align = get_pointer_alignment (dest); 968 dest_align = get_pointer_alignment (dest);
995 if (dest_align < TYPE_ALIGN (desttype) 969 if (dest_align < TYPE_ALIGN (desttype)
996 || src_align < TYPE_ALIGN (srctype)) 970 || src_align < TYPE_ALIGN (srctype))
997 return false; 971 return false;
998 972
999 destvar = dest; 973 destvar = NULL_TREE;
1000 STRIP_NOPS (destvar); 974 if (TREE_CODE (dest) == ADDR_EXPR
1001 if (TREE_CODE (destvar) == ADDR_EXPR 975 && var_decl_component_p (TREE_OPERAND (dest, 0))
1002 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1003 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)) 976 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1004 destvar = fold_build2 (MEM_REF, desttype, destvar, off0); 977 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1005 else 978
1006 destvar = NULL_TREE; 979 srcvar = NULL_TREE;
1007 980 if (TREE_CODE (src) == ADDR_EXPR
1008 srcvar = src; 981 && var_decl_component_p (TREE_OPERAND (src, 0))
1009 STRIP_NOPS (srcvar);
1010 if (TREE_CODE (srcvar) == ADDR_EXPR
1011 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1012 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)) 982 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1013 { 983 {
1014 if (!destvar 984 if (!destvar
1015 || src_align >= TYPE_ALIGN (desttype)) 985 || src_align >= TYPE_ALIGN (desttype))
1016 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype, 986 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1017 srcvar, off0); 987 src, off0);
1018 else if (!STRICT_ALIGNMENT) 988 else if (!STRICT_ALIGNMENT)
1019 { 989 {
1020 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype), 990 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1021 src_align); 991 src_align);
1022 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0); 992 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1023 } 993 }
1024 else 994 }
1025 srcvar = NULL_TREE;
1026 }
1027 else
1028 srcvar = NULL_TREE;
1029 995
1030 if (srcvar == NULL_TREE && destvar == NULL_TREE) 996 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1031 return false; 997 return false;
1032 998
1033 if (srcvar == NULL_TREE) 999 if (srcvar == NULL_TREE)
1034 { 1000 {
1035 STRIP_NOPS (src);
1036 if (src_align >= TYPE_ALIGN (desttype)) 1001 if (src_align >= TYPE_ALIGN (desttype))
1037 srcvar = fold_build2 (MEM_REF, desttype, src, off0); 1002 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1038 else 1003 else
1039 { 1004 {
1040 if (STRICT_ALIGNMENT) 1005 if (STRICT_ALIGNMENT)
1044 srcvar = fold_build2 (MEM_REF, srctype, src, off0); 1009 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1045 } 1010 }
1046 } 1011 }
1047 else if (destvar == NULL_TREE) 1012 else if (destvar == NULL_TREE)
1048 { 1013 {
1049 STRIP_NOPS (dest);
1050 if (dest_align >= TYPE_ALIGN (srctype)) 1014 if (dest_align >= TYPE_ALIGN (srctype))
1051 destvar = fold_build2 (MEM_REF, srctype, dest, off0); 1015 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1052 else 1016 else
1053 { 1017 {
1054 if (STRICT_ALIGNMENT) 1018 if (STRICT_ALIGNMENT)
1056 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype), 1020 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1057 dest_align); 1021 dest_align);
1058 destvar = fold_build2 (MEM_REF, desttype, dest, off0); 1022 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1059 } 1023 }
1060 } 1024 }
1025
1026 /* Detect invalid bounds and overlapping copies and issue either
1027 -Warray-bounds or -Wrestrict. */
1028 if (!nowarn)
1029 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1061 1030
1062 gimple *new_stmt; 1031 gimple *new_stmt;
1063 if (is_gimple_reg_type (TREE_TYPE (srcvar))) 1032 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1064 { 1033 {
1065 tree tem = fold_const_aggregate_ref (srcvar); 1034 tree tem = fold_const_aggregate_ref (srcvar);
1072 new_stmt); 1041 new_stmt);
1073 gimple_assign_set_lhs (new_stmt, srcvar); 1042 gimple_assign_set_lhs (new_stmt, srcvar);
1074 gimple_set_vuse (new_stmt, gimple_vuse (stmt)); 1043 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1075 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); 1044 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1076 } 1045 }
1077 } 1046 new_stmt = gimple_build_assign (destvar, srcvar);
1078 new_stmt = gimple_build_assign (destvar, srcvar); 1047 goto set_vop_and_replace;
1048 }
1049
1050 /* We get an aggregate copy. Use an unsigned char[] type to
1051 perform the copying to preserve padding and to avoid any issues
1052 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1053 desttype = build_array_type_nelts (unsigned_char_type_node,
1054 tree_to_uhwi (len));
1055 srctype = desttype;
1056 if (src_align > TYPE_ALIGN (srctype))
1057 srctype = build_aligned_type (srctype, src_align);
1058 if (dest_align > TYPE_ALIGN (desttype))
1059 desttype = build_aligned_type (desttype, dest_align);
1060 new_stmt
1061 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1062 fold_build2 (MEM_REF, srctype, src, off0));
1063 set_vop_and_replace:
1079 gimple_set_vuse (new_stmt, gimple_vuse (stmt)); 1064 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1080 gimple_set_vdef (new_stmt, gimple_vdef (stmt)); 1065 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1081 if (gimple_vdef (new_stmt) 1066 if (gimple_vdef (new_stmt)
1082 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME) 1067 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1083 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt; 1068 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1279 1264
1280 /* Obtain the minimum and maximum string length or minimum and maximum 1265 /* Obtain the minimum and maximum string length or minimum and maximum
1281 value of ARG in LENGTH[0] and LENGTH[1], respectively. 1266 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1282 If ARG is an SSA name variable, follow its use-def chains. When 1267 If ARG is an SSA name variable, follow its use-def chains. When
1283 TYPE == 0, if LENGTH[1] is not equal to the length we determine or 1268 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1284 if we are unable to determine the length or value, return False. 1269 if we are unable to determine the length or value, return false.
1285 VISITED is a bitmap of visited variables. 1270 VISITED is a bitmap of visited variables.
1286 TYPE is 0 if string length should be obtained, 1 for maximum string 1271 TYPE is 0 if string length should be obtained, 1 for maximum string
1287 length and 2 for maximum value ARG can have. 1272 length and 2 for maximum value ARG can have.
1288 When FUZZY is set and the length of a string cannot be determined, 1273 When FUZZY is non-zero and the length of a string cannot be determined,
1289 the function instead considers as the maximum possible length the 1274 the function instead considers as the maximum possible length the
1290 size of a character array it may refer to. 1275 size of a character array it may refer to. If FUZZY is 2, it will handle
1276 PHIs and COND_EXPRs optimistically, if we can determine string length
1277 minimum and maximum, it will use the minimum from the ones where it
1278 can be determined.
1291 Set *FLEXP to true if the range of the string lengths has been 1279 Set *FLEXP to true if the range of the string lengths has been
1292 obtained from the upper bound of an array at the end of a struct. 1280 obtained from the upper bound of an array at the end of a struct.
1293 Such an array may hold a string that's longer than its upper bound 1281 Such an array may hold a string that's longer than its upper bound
1294 due to it being used as a poor-man's flexible array member. */ 1282 due to it being used as a poor-man's flexible array member.
1283 Pass NONSTR through to children.
1284 ELTSIZE is 1 for normal single byte character strings, and 2 or
1285 4 for wide characer strings. ELTSIZE is by default 1. */
1295 1286
1296 static bool 1287 static bool
1297 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type, 1288 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1298 bool fuzzy, bool *flexp) 1289 int fuzzy, bool *flexp, unsigned eltsize, tree *nonstr)
1299 { 1290 {
1300 tree var, val; 1291 tree var, val = NULL_TREE;
1301 gimple *def_stmt; 1292 gimple *def_stmt;
1302 1293
1303 /* The minimum and maximum length. The MAXLEN pointer stays unchanged 1294 /* The minimum and maximum length. */
1304 but MINLEN may be cleared during the execution of the function. */ 1295 tree *const minlen = length;
1305 tree *minlen = length;
1306 tree *const maxlen = length + 1; 1296 tree *const maxlen = length + 1;
1307 1297
1308 if (TREE_CODE (arg) != SSA_NAME) 1298 if (TREE_CODE (arg) != SSA_NAME)
1309 { 1299 {
1310 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */ 1300 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1311 if (TREE_CODE (arg) == ADDR_EXPR 1301 if (TREE_CODE (arg) == ADDR_EXPR
1312 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF 1302 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1313 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1))) 1303 {
1314 { 1304 tree op = TREE_OPERAND (arg, 0);
1315 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0); 1305 if (integer_zerop (TREE_OPERAND (op, 1)))
1316 if (TREE_CODE (aop0) == INDIRECT_REF 1306 {
1317 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME) 1307 tree aop0 = TREE_OPERAND (op, 0);
1318 return get_range_strlen (TREE_OPERAND (aop0, 0), 1308 if (TREE_CODE (aop0) == INDIRECT_REF
1319 length, visited, type, fuzzy, flexp); 1309 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1310 return get_range_strlen (TREE_OPERAND (aop0, 0), length,
1311 visited, type, fuzzy, flexp,
1312 eltsize, nonstr);
1313 }
1314 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1315 {
1316 /* Fail if an array is the last member of a struct object
1317 since it could be treated as a (fake) flexible array
1318 member. */
1319 tree idx = TREE_OPERAND (op, 1);
1320
1321 arg = TREE_OPERAND (op, 0);
1322 tree optype = TREE_TYPE (arg);
1323 if (tree dom = TYPE_DOMAIN (optype))
1324 if (tree bound = TYPE_MAX_VALUE (dom))
1325 if (TREE_CODE (bound) == INTEGER_CST
1326 && TREE_CODE (idx) == INTEGER_CST
1327 && tree_int_cst_lt (bound, idx))
1328 return false;
1329 }
1320 } 1330 }
1321 1331
1322 if (type == 2) 1332 if (type == 2)
1323 { 1333 {
1324 val = arg; 1334 val = arg;
1325 if (TREE_CODE (val) != INTEGER_CST 1335 if (TREE_CODE (val) != INTEGER_CST
1326 || tree_int_cst_sgn (val) < 0) 1336 || tree_int_cst_sgn (val) < 0)
1327 return false; 1337 return false;
1328 } 1338 }
1329 else 1339 else
1330 val = c_strlen (arg, 1); 1340 {
1341 c_strlen_data data;
1342 memset (&data, 0, sizeof (c_strlen_data));
1343 val = c_strlen (arg, 1, &data, eltsize);
1344
1345 /* If we potentially had a non-terminated string, then
1346 bubble that information up to the caller. */
1347 if (!val && data.decl)
1348 {
1349 *nonstr = data.decl;
1350 *minlen = data.len;
1351 *maxlen = data.len;
1352 return type == 0 ? false : true;
1353 }
1354 }
1331 1355
1332 if (!val && fuzzy) 1356 if (!val && fuzzy)
1333 { 1357 {
1334 if (TREE_CODE (arg) == ADDR_EXPR) 1358 if (TREE_CODE (arg) == ADDR_EXPR)
1335 return get_range_strlen (TREE_OPERAND (arg, 0), length, 1359 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1336 visited, type, fuzzy, flexp); 1360 visited, type, fuzzy, flexp,
1337 1361 eltsize, nonstr);
1338 if (TREE_CODE (arg) == COMPONENT_REF 1362
1339 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE) 1363 if (TREE_CODE (arg) == ARRAY_REF)
1364 {
1365 tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1366
1367 /* Determine the "innermost" array type. */
1368 while (TREE_CODE (type) == ARRAY_TYPE
1369 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1370 type = TREE_TYPE (type);
1371
1372 /* Avoid arrays of pointers. */
1373 tree eltype = TREE_TYPE (type);
1374 if (TREE_CODE (type) != ARRAY_TYPE
1375 || !INTEGRAL_TYPE_P (eltype))
1376 return false;
1377
1378 val = TYPE_SIZE_UNIT (type);
1379 if (!val || integer_zerop (val))
1380 return false;
1381
1382 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1383 integer_one_node);
1384 /* Set the minimum size to zero since the string in
1385 the array could have zero length. */
1386 *minlen = ssize_int (0);
1387
1388 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1389 && type == TREE_TYPE (TREE_OPERAND (arg, 0))
1390 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1391 *flexp = true;
1392 }
1393 else if (TREE_CODE (arg) == COMPONENT_REF
1394 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1395 == ARRAY_TYPE))
1340 { 1396 {
1341 /* Use the type of the member array to determine the upper 1397 /* Use the type of the member array to determine the upper
1342 bound on the length of the array. This may be overly 1398 bound on the length of the array. This may be overly
1343 optimistic if the array itself isn't NUL-terminated and 1399 optimistic if the array itself isn't NUL-terminated and
1344 the caller relies on the subsequent member to contain 1400 the caller relies on the subsequent member to contain
1345 the NUL. 1401 the NUL but that would only be considered valid if
1402 the array were the last member of a struct.
1346 Set *FLEXP to true if the array whose bound is being 1403 Set *FLEXP to true if the array whose bound is being
1347 used is at the end of a struct. */ 1404 used is at the end of a struct. */
1348 if (array_at_struct_end_p (arg)) 1405 if (array_at_struct_end_p (arg))
1349 *flexp = true; 1406 *flexp = true;
1350 1407
1351 arg = TREE_OPERAND (arg, 1); 1408 arg = TREE_OPERAND (arg, 1);
1352 val = TYPE_SIZE_UNIT (TREE_TYPE (arg)); 1409
1410 tree type = TREE_TYPE (arg);
1411
1412 while (TREE_CODE (type) == ARRAY_TYPE
1413 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1414 type = TREE_TYPE (type);
1415
1416 /* Fail when the array bound is unknown or zero. */
1417 val = TYPE_SIZE_UNIT (type);
1353 if (!val || integer_zerop (val)) 1418 if (!val || integer_zerop (val))
1354 return false; 1419 return false;
1355 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val, 1420 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1356 integer_one_node); 1421 integer_one_node);
1357 /* Set the minimum size to zero since the string in 1422 /* Set the minimum size to zero since the string in
1358 the array could have zero length. */ 1423 the array could have zero length. */
1359 *minlen = ssize_int (0); 1424 *minlen = ssize_int (0);
1360 } 1425 }
1426
1427 if (VAR_P (arg))
1428 {
1429 tree type = TREE_TYPE (arg);
1430 if (POINTER_TYPE_P (type))
1431 type = TREE_TYPE (type);
1432
1433 if (TREE_CODE (type) == ARRAY_TYPE)
1434 {
1435 val = TYPE_SIZE_UNIT (type);
1436 if (!val
1437 || TREE_CODE (val) != INTEGER_CST
1438 || integer_zerop (val))
1439 return false;
1440 val = wide_int_to_tree (TREE_TYPE (val),
1441 wi::sub (wi::to_wide (val), 1));
1442 /* Set the minimum size to zero since the string in
1443 the array could have zero length. */
1444 *minlen = ssize_int (0);
1445 }
1446 }
1361 } 1447 }
1362 1448
1363 if (!val) 1449 if (!val)
1364 return false; 1450 return false;
1365 1451
1366 if (minlen 1452 if (!*minlen
1367 && (!*minlen 1453 || (type > 0
1368 || (type > 0 1454 && TREE_CODE (*minlen) == INTEGER_CST
1369 && TREE_CODE (*minlen) == INTEGER_CST 1455 && TREE_CODE (val) == INTEGER_CST
1370 && TREE_CODE (val) == INTEGER_CST 1456 && tree_int_cst_lt (val, *minlen)))
1371 && tree_int_cst_lt (val, *minlen))))
1372 *minlen = val; 1457 *minlen = val;
1373 1458
1374 if (*maxlen) 1459 if (*maxlen)
1375 { 1460 {
1376 if (type > 0) 1461 if (type > 0)
1413 length. */ 1498 length. */
1414 if (gimple_assign_single_p (def_stmt) 1499 if (gimple_assign_single_p (def_stmt)
1415 || gimple_assign_unary_nop_p (def_stmt)) 1500 || gimple_assign_unary_nop_p (def_stmt))
1416 { 1501 {
1417 tree rhs = gimple_assign_rhs1 (def_stmt); 1502 tree rhs = gimple_assign_rhs1 (def_stmt);
1418 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp); 1503 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp,
1504 eltsize, nonstr);
1419 } 1505 }
1420 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR) 1506 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1421 { 1507 {
1422 tree op2 = gimple_assign_rhs2 (def_stmt); 1508 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1423 tree op3 = gimple_assign_rhs3 (def_stmt); 1509 gimple_assign_rhs3 (def_stmt) };
1424 return get_range_strlen (op2, length, visited, type, fuzzy, flexp) 1510
1425 && get_range_strlen (op3, length, visited, type, fuzzy, flexp); 1511 for (unsigned int i = 0; i < 2; i++)
1426 } 1512 if (!get_range_strlen (ops[i], length, visited, type, fuzzy,
1513 flexp, eltsize, nonstr))
1514 {
1515 if (fuzzy == 2)
1516 *maxlen = build_all_ones_cst (size_type_node);
1517 else
1518 return false;
1519 }
1520 return true;
1521 }
1427 return false; 1522 return false;
1428 1523
1429 case GIMPLE_PHI: 1524 case GIMPLE_PHI:
1430 { 1525 /* All the arguments of the PHI node must have the same constant
1431 /* All the arguments of the PHI node must have the same constant 1526 length. */
1432 length. */ 1527 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1433 unsigned i;
1434
1435 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1436 { 1528 {
1437 tree arg = gimple_phi_arg (def_stmt, i)->def; 1529 tree arg = gimple_phi_arg (def_stmt, i)->def;
1438 1530
1439 /* If this PHI has itself as an argument, we cannot 1531 /* If this PHI has itself as an argument, we cannot
1440 determine the string length of this argument. However, 1532 determine the string length of this argument. However,
1443 constant string length. So be optimistic and just 1535 constant string length. So be optimistic and just
1444 continue with the next argument. */ 1536 continue with the next argument. */
1445 if (arg == gimple_phi_result (def_stmt)) 1537 if (arg == gimple_phi_result (def_stmt))
1446 continue; 1538 continue;
1447 1539
1448 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp)) 1540 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp,
1541 eltsize, nonstr))
1449 { 1542 {
1450 if (fuzzy) 1543 if (fuzzy == 2)
1451 *maxlen = build_all_ones_cst (size_type_node); 1544 *maxlen = build_all_ones_cst (size_type_node);
1452 else 1545 else
1453 return false; 1546 return false;
1454 } 1547 }
1455 } 1548 }
1456 }
1457 return true; 1549 return true;
1458 1550
1459 default: 1551 default:
1460 return false; 1552 return false;
1461 } 1553 }
1465 refers to and store each in the first two elements of MINMAXLEN. 1557 refers to and store each in the first two elements of MINMAXLEN.
1466 For expressions that point to strings of unknown lengths that are 1558 For expressions that point to strings of unknown lengths that are
1467 character arrays, use the upper bound of the array as the maximum 1559 character arrays, use the upper bound of the array as the maximum
1468 length. For example, given an expression like 'x ? array : "xyz"' 1560 length. For example, given an expression like 'x ? array : "xyz"'
1469 and array declared as 'char array[8]', MINMAXLEN[0] will be set 1561 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1470 to 3 and MINMAXLEN[1] to 7, the longest string that could be 1562 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1471 stored in array. 1563 stored in array.
1472 Return true if the range of the string lengths has been obtained 1564 Return true if the range of the string lengths has been obtained
1473 from the upper bound of an array at the end of a struct. Such 1565 from the upper bound of an array at the end of a struct. Such
1474 an array may hold a string that's longer than its upper bound 1566 an array may hold a string that's longer than its upper bound
1475 due to it being used as a poor-man's flexible array member. */ 1567 due to it being used as a poor-man's flexible array member.
1568
1569 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1570 and false if PHIs and COND_EXPRs are to be handled optimistically,
1571 if we can determine string length minimum and maximum; it will use
1572 the minimum from the ones where it can be determined.
1573 STRICT false should be only used for warning code.
1574 When non-null, clear *NONSTR if ARG refers to a constant array
1575 that is known not be nul-terminated. Otherwise set it to
1576 the declaration of the constant non-terminated array.
1577
1578 ELTSIZE is 1 for normal single byte character strings, and 2 or
1579 4 for wide characer strings. ELTSIZE is by default 1. */
1476 1580
1477 bool 1581 bool
1478 get_range_strlen (tree arg, tree minmaxlen[2]) 1582 get_range_strlen (tree arg, tree minmaxlen[2], unsigned eltsize,
1583 bool strict, tree *nonstr /* = NULL */)
1479 { 1584 {
1480 bitmap visited = NULL; 1585 bitmap visited = NULL;
1481 1586
1482 minmaxlen[0] = NULL_TREE; 1587 minmaxlen[0] = NULL_TREE;
1483 minmaxlen[1] = NULL_TREE; 1588 minmaxlen[1] = NULL_TREE;
1484 1589
1590 tree nonstrbuf;
1591 if (!nonstr)
1592 nonstr = &nonstrbuf;
1593 *nonstr = NULL_TREE;
1594
1485 bool flexarray = false; 1595 bool flexarray = false;
1486 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray); 1596 if (!get_range_strlen (arg, minmaxlen, &visited, 1, strict ? 1 : 2,
1597 &flexarray, eltsize, nonstr))
1598 {
1599 minmaxlen[0] = NULL_TREE;
1600 minmaxlen[1] = NULL_TREE;
1601 }
1487 1602
1488 if (visited) 1603 if (visited)
1489 BITMAP_FREE (visited); 1604 BITMAP_FREE (visited);
1490 1605
1491 return flexarray; 1606 return flexarray;
1492 } 1607 }
1493 1608
1609 /* Return the maximum string length for ARG, counting by TYPE
1610 (1, 2 or 4 for normal or wide chars). NONSTR indicates
1611 if the caller is prepared to handle unterminated strings.
1612
1613 If an unterminated string is discovered and our caller handles
1614 unterminated strings, then bubble up the offending DECL and
1615 return the maximum size. Otherwise return NULL. */
1616
1494 tree 1617 tree
1495 get_maxval_strlen (tree arg, int type) 1618 get_maxval_strlen (tree arg, int type, tree *nonstr /* = NULL */)
1496 { 1619 {
1497 bitmap visited = NULL; 1620 bitmap visited = NULL;
1498 tree len[2] = { NULL_TREE, NULL_TREE }; 1621 tree len[2] = { NULL_TREE, NULL_TREE };
1499 1622
1500 bool dummy; 1623 bool dummy;
1501 if (!get_range_strlen (arg, len, &visited, type, false, &dummy)) 1624 /* Set to non-null if ARG refers to an untermianted array. */
1625 tree mynonstr = NULL_TREE;
1626 if (!get_range_strlen (arg, len, &visited, type, 0, &dummy, 1, &mynonstr))
1502 len[1] = NULL_TREE; 1627 len[1] = NULL_TREE;
1503 if (visited) 1628 if (visited)
1504 BITMAP_FREE (visited); 1629 BITMAP_FREE (visited);
1505 1630
1506 return len[1]; 1631 if (nonstr)
1632 {
1633 /* For callers prepared to handle unterminated arrays set
1634 *NONSTR to point to the declaration of the array and return
1635 the maximum length/size. */
1636 *nonstr = mynonstr;
1637 return len[1];
1638 }
1639
1640 /* Fail if the constant array isn't nul-terminated. */
1641 return mynonstr ? NULL_TREE : len[1];
1507 } 1642 }
1508 1643
1509 1644
1510 /* Fold function call to builtin strcpy with arguments DEST and SRC. 1645 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1511 If LEN is not NULL, it represents the length of the string to be 1646 If LEN is not NULL, it represents the length of the string to be
1513 1648
1514 static bool 1649 static bool
1515 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi, 1650 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1516 tree dest, tree src) 1651 tree dest, tree src)
1517 { 1652 {
1518 location_t loc = gimple_location (gsi_stmt (*gsi)); 1653 gimple *stmt = gsi_stmt (*gsi);
1654 location_t loc = gimple_location (stmt);
1519 tree fn; 1655 tree fn;
1520 1656
1521 /* If SRC and DEST are the same (and not volatile), return DEST. */ 1657 /* If SRC and DEST are the same (and not volatile), return DEST. */
1522 if (operand_equal_p (src, dest, 0)) 1658 if (operand_equal_p (src, dest, 0))
1523 { 1659 {
1660 /* Issue -Wrestrict unless the pointers are null (those do
1661 not point to objects and so do not indicate an overlap;
1662 such calls could be the result of sanitization and jump
1663 threading). */
1664 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1665 {
1666 tree func = gimple_call_fndecl (stmt);
1667
1668 warning_at (loc, OPT_Wrestrict,
1669 "%qD source argument is the same as destination",
1670 func);
1671 }
1672
1524 replace_call_with_value (gsi, dest); 1673 replace_call_with_value (gsi, dest);
1525 return true; 1674 return true;
1526 } 1675 }
1527 1676
1528 if (optimize_function_for_size_p (cfun)) 1677 if (optimize_function_for_size_p (cfun))
1530 1679
1531 fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 1680 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1532 if (!fn) 1681 if (!fn)
1533 return false; 1682 return false;
1534 1683
1535 tree len = get_maxval_strlen (src, 0); 1684 /* Set to non-null if ARG refers to an unterminated array. */
1685 tree nonstr = NULL;
1686 tree len = get_maxval_strlen (src, 0, &nonstr);
1687
1688 if (nonstr)
1689 {
1690 /* Avoid folding calls with unterminated arrays. */
1691 if (!gimple_no_warning_p (stmt))
1692 warn_string_no_nul (loc, "strcpy", src, nonstr);
1693 gimple_set_no_warning (stmt, true);
1694 return false;
1695 }
1696
1536 if (!len) 1697 if (!len)
1537 return false; 1698 return false;
1538 1699
1539 len = fold_convert_loc (loc, size_type_node, len); 1700 len = fold_convert_loc (loc, size_type_node, len);
1540 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1)); 1701 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1551 1712
1552 static bool 1713 static bool
1553 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi, 1714 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1554 tree dest, tree src, tree len) 1715 tree dest, tree src, tree len)
1555 { 1716 {
1556 location_t loc = gimple_location (gsi_stmt (*gsi)); 1717 gimple *stmt = gsi_stmt (*gsi);
1557 tree fn; 1718 location_t loc = gimple_location (stmt);
1719 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1558 1720
1559 /* If the LEN parameter is zero, return DEST. */ 1721 /* If the LEN parameter is zero, return DEST. */
1560 if (integer_zerop (len)) 1722 if (integer_zerop (len))
1561 { 1723 {
1724 /* Avoid warning if the destination refers to a an array/pointer
1725 decorate with attribute nonstring. */
1726 if (!nonstring)
1727 {
1728 tree fndecl = gimple_call_fndecl (stmt);
1729
1730 /* Warn about the lack of nul termination: the result is not
1731 a (nul-terminated) string. */
1732 tree slen = get_maxval_strlen (src, 0);
1733 if (slen && !integer_zerop (slen))
1734 warning_at (loc, OPT_Wstringop_truncation,
1735 "%G%qD destination unchanged after copying no bytes "
1736 "from a string of length %E",
1737 stmt, fndecl, slen);
1738 else
1739 warning_at (loc, OPT_Wstringop_truncation,
1740 "%G%qD destination unchanged after copying no bytes",
1741 stmt, fndecl);
1742 }
1743
1562 replace_call_with_value (gsi, dest); 1744 replace_call_with_value (gsi, dest);
1563 return true; 1745 return true;
1564 } 1746 }
1565 1747
1566 /* We can't compare slen with len as constants below if len is not a 1748 /* We can't compare slen with len as constants below if len is not a
1571 /* Now, we must be passed a constant src ptr parameter. */ 1753 /* Now, we must be passed a constant src ptr parameter. */
1572 tree slen = get_maxval_strlen (src, 0); 1754 tree slen = get_maxval_strlen (src, 0);
1573 if (!slen || TREE_CODE (slen) != INTEGER_CST) 1755 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1574 return false; 1756 return false;
1575 1757
1576 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1)); 1758 /* The size of the source string including the terminating nul. */
1759 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1577 1760
1578 /* We do not support simplification of this case, though we do 1761 /* We do not support simplification of this case, though we do
1579 support it when expanding trees into RTL. */ 1762 support it when expanding trees into RTL. */
1580 /* FIXME: generate a call to __builtin_memset. */ 1763 /* FIXME: generate a call to __builtin_memset. */
1581 if (tree_int_cst_lt (slen, len)) 1764 if (tree_int_cst_lt (ssize, len))
1582 return false; 1765 return false;
1583 1766
1767 /* Diagnose truncation that leaves the copy unterminated. */
1768 maybe_diag_stxncpy_trunc (*gsi, src, len);
1769
1584 /* OK transform into builtin memcpy. */ 1770 /* OK transform into builtin memcpy. */
1585 fn = builtin_decl_implicit (BUILT_IN_MEMCPY); 1771 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1586 if (!fn) 1772 if (!fn)
1587 return false; 1773 return false;
1588 1774
1589 len = fold_convert_loc (loc, size_type_node, len); 1775 len = fold_convert_loc (loc, size_type_node, len);
1590 len = force_gimple_operand_gsi (gsi, len, true, 1776 len = force_gimple_operand_gsi (gsi, len, true,
1591 NULL_TREE, true, GSI_SAME_STMT); 1777 NULL_TREE, true, GSI_SAME_STMT);
1592 gimple *repl = gimple_build_call (fn, 3, dest, src, len); 1778 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1593 replace_call_with_call_and_fold (gsi, repl); 1779 replace_call_with_call_and_fold (gsi, repl);
1780
1594 return true; 1781 return true;
1595 } 1782 }
1596 1783
1597 /* Fold function call to builtin strchr or strrchr. 1784 /* Fold function call to builtin strchr or strrchr.
1598 If both arguments are constant, evaluate and fold the result, 1785 If both arguments are constant, evaluate and fold the result,
1870 /* Simplify a call to the strncat builtin. */ 2057 /* Simplify a call to the strncat builtin. */
1871 2058
1872 static bool 2059 static bool
1873 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi) 2060 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1874 { 2061 {
1875 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); 2062 gimple *stmt = gsi_stmt (*gsi);
1876 tree dst = gimple_call_arg (stmt, 0); 2063 tree dst = gimple_call_arg (stmt, 0);
1877 tree src = gimple_call_arg (stmt, 1); 2064 tree src = gimple_call_arg (stmt, 1);
1878 tree len = gimple_call_arg (stmt, 2); 2065 tree len = gimple_call_arg (stmt, 2);
1879 2066
1880 const char *p = c_getstr (src); 2067 const char *p = c_getstr (src);
1885 { 2072 {
1886 replace_call_with_value (gsi, dst); 2073 replace_call_with_value (gsi, dst);
1887 return true; 2074 return true;
1888 } 2075 }
1889 2076
1890 /* If the requested len is greater than or equal to the string 2077 if (TREE_CODE (len) != INTEGER_CST || !p)
1891 length, call strcat. */ 2078 return false;
1892 if (TREE_CODE (len) == INTEGER_CST && p 2079
1893 && compare_tree_int (len, strlen (p)) >= 0) 2080 unsigned srclen = strlen (p);
1894 { 2081
1895 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT); 2082 int cmpsrc = compare_tree_int (len, srclen);
1896 2083
1897 /* If the replacement _DECL isn't initialized, don't do the 2084 /* Return early if the requested len is less than the string length.
1898 transformation. */ 2085 Warnings will be issued elsewhere later. */
1899 if (!fn) 2086 if (cmpsrc < 0)
1900 return false; 2087 return false;
1901 2088
1902 gcall *repl = gimple_build_call (fn, 2, dst, src); 2089 unsigned HOST_WIDE_INT dstsize;
1903 replace_call_with_call_and_fold (gsi, repl); 2090
1904 return true; 2091 bool nowarn = gimple_no_warning_p (stmt);
1905 } 2092
1906 2093 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
1907 return false; 2094 {
2095 int cmpdst = compare_tree_int (len, dstsize);
2096
2097 if (cmpdst >= 0)
2098 {
2099 tree fndecl = gimple_call_fndecl (stmt);
2100
2101 /* Strncat copies (at most) LEN bytes and always appends
2102 the terminating NUL so the specified bound should never
2103 be equal to (or greater than) the size of the destination.
2104 If it is, the copy could overflow. */
2105 location_t loc = gimple_location (stmt);
2106 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2107 cmpdst == 0
2108 ? G_("%G%qD specified bound %E equals "
2109 "destination size")
2110 : G_("%G%qD specified bound %E exceeds "
2111 "destination size %wu"),
2112 stmt, fndecl, len, dstsize);
2113 if (nowarn)
2114 gimple_set_no_warning (stmt, true);
2115 }
2116 }
2117
2118 if (!nowarn && cmpsrc == 0)
2119 {
2120 tree fndecl = gimple_call_fndecl (stmt);
2121 location_t loc = gimple_location (stmt);
2122
2123 /* To avoid possible overflow the specified bound should also
2124 not be equal to the length of the source, even when the size
2125 of the destination is unknown (it's not an uncommon mistake
2126 to specify as the bound to strncpy the length of the source). */
2127 if (warning_at (loc, OPT_Wstringop_overflow_,
2128 "%G%qD specified bound %E equals source length",
2129 stmt, fndecl, len))
2130 gimple_set_no_warning (stmt, true);
2131 }
2132
2133 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2134
2135 /* If the replacement _DECL isn't initialized, don't do the
2136 transformation. */
2137 if (!fn)
2138 return false;
2139
2140 /* Otherwise, emit a call to strcat. */
2141 gcall *repl = gimple_build_call (fn, 2, dst, src);
2142 replace_call_with_call_and_fold (gsi, repl);
2143 return true;
1908 } 2144 }
1909 2145
1910 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC, 2146 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1911 LEN, and SIZE. */ 2147 LEN, and SIZE. */
1912 2148
2035 bool known_result = false; 2271 bool known_result = false;
2036 2272
2037 switch (fcode) 2273 switch (fcode)
2038 { 2274 {
2039 case BUILT_IN_STRCMP: 2275 case BUILT_IN_STRCMP:
2276 case BUILT_IN_STRCMP_EQ:
2040 { 2277 {
2041 r = strcmp (p1, p2); 2278 r = strcmp (p1, p2);
2042 known_result = true; 2279 known_result = true;
2043 break; 2280 break;
2044 } 2281 }
2045 case BUILT_IN_STRNCMP: 2282 case BUILT_IN_STRNCMP:
2283 case BUILT_IN_STRNCMP_EQ:
2046 { 2284 {
2047 if (length == -1) 2285 if (length == -1)
2048 break; 2286 break;
2049 r = strncmp (p1, p2, length); 2287 r = strncmp (p1, p2, length);
2050 known_result = true; 2288 known_result = true;
2059 if (length == -1) 2297 if (length == -1)
2060 break; 2298 break;
2061 r = strncmp (p1, p2, length); 2299 r = strncmp (p1, p2, length);
2062 if (r == 0) 2300 if (r == 0)
2063 known_result = true; 2301 known_result = true;
2064 break;; 2302 break;
2065 } 2303 }
2066 default: 2304 default:
2067 gcc_unreachable (); 2305 gcc_unreachable ();
2068 } 2306 }
2069 2307
2074 } 2312 }
2075 } 2313 }
2076 2314
2077 bool nonzero_length = length >= 1 2315 bool nonzero_length = length >= 1
2078 || fcode == BUILT_IN_STRCMP 2316 || fcode == BUILT_IN_STRCMP
2317 || fcode == BUILT_IN_STRCMP_EQ
2079 || fcode == BUILT_IN_STRCASECMP; 2318 || fcode == BUILT_IN_STRCASECMP;
2080 2319
2081 location_t loc = gimple_location (stmt); 2320 location_t loc = gimple_location (stmt);
2082 2321
2083 /* If the second arg is "", return *(const unsigned char*)arg1. */ 2322 /* If the second arg is "", return *(const unsigned char*)arg1. */
2136 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2); 2375 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2137 gimple_seq_add_stmt_without_update (&stmts, stmt); 2376 gimple_seq_add_stmt_without_update (&stmts, stmt);
2138 } 2377 }
2139 2378
2140 gsi_replace_with_seq_vops (gsi, stmts); 2379 gsi_replace_with_seq_vops (gsi, stmts);
2380 return true;
2381 }
2382
2383 /* If length is larger than the length of one constant string,
2384 replace strncmp with corresponding strcmp */
2385 if (fcode == BUILT_IN_STRNCMP
2386 && length > 0
2387 && ((p2 && (size_t) length > strlen (p2))
2388 || (p1 && (size_t) length > strlen (p1))))
2389 {
2390 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2391 if (!fn)
2392 return false;
2393 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2394 replace_call_with_call_and_fold (gsi, repl);
2141 return true; 2395 return true;
2142 } 2396 }
2143 2397
2144 return false; 2398 return false;
2145 } 2399 }
2402 tree len, fn; 2656 tree len, fn;
2403 2657
2404 /* If SRC and DEST are the same (and not volatile), return DEST. */ 2658 /* If SRC and DEST are the same (and not volatile), return DEST. */
2405 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0)) 2659 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2406 { 2660 {
2661 /* Issue -Wrestrict unless the pointers are null (those do
2662 not point to objects and so do not indicate an overlap;
2663 such calls could be the result of sanitization and jump
2664 threading). */
2665 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2666 {
2667 tree func = gimple_call_fndecl (stmt);
2668
2669 warning_at (loc, OPT_Wrestrict,
2670 "%qD source argument is the same as destination",
2671 func);
2672 }
2673
2407 replace_call_with_value (gsi, dest); 2674 replace_call_with_value (gsi, dest);
2408 return true; 2675 return true;
2409 } 2676 }
2410 2677
2411 if (! tree_fits_uhwi_p (size)) 2678 if (! tree_fits_uhwi_p (size))
2543 { 2810 {
2544 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi)); 2811 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2545 location_t loc = gimple_location (stmt); 2812 location_t loc = gimple_location (stmt);
2546 tree dest = gimple_call_arg (stmt, 0); 2813 tree dest = gimple_call_arg (stmt, 0);
2547 tree src = gimple_call_arg (stmt, 1); 2814 tree src = gimple_call_arg (stmt, 1);
2548 tree fn, len, lenp1; 2815 tree fn, lenp1;
2549 2816
2550 /* If the result is unused, replace stpcpy with strcpy. */ 2817 /* If the result is unused, replace stpcpy with strcpy. */
2551 if (gimple_call_lhs (stmt) == NULL_TREE) 2818 if (gimple_call_lhs (stmt) == NULL_TREE)
2552 { 2819 {
2553 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY); 2820 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2556 gimple_call_set_fndecl (stmt, fn); 2823 gimple_call_set_fndecl (stmt, fn);
2557 fold_stmt (gsi); 2824 fold_stmt (gsi);
2558 return true; 2825 return true;
2559 } 2826 }
2560 2827
2561 len = c_strlen (src, 1); 2828 /* Set to non-null if ARG refers to an unterminated array. */
2829 c_strlen_data data;
2830 memset (&data, 0, sizeof (c_strlen_data));
2831 tree len = c_strlen (src, 1, &data, 1);
2562 if (!len 2832 if (!len
2563 || TREE_CODE (len) != INTEGER_CST) 2833 || TREE_CODE (len) != INTEGER_CST)
2564 return false; 2834 {
2835 data.decl = unterminated_array (src);
2836 if (!data.decl)
2837 return false;
2838 }
2839
2840 if (data.decl)
2841 {
2842 /* Avoid folding calls with unterminated arrays. */
2843 if (!gimple_no_warning_p (stmt))
2844 warn_string_no_nul (loc, "stpcpy", src, data.decl);
2845 gimple_set_no_warning (stmt, true);
2846 return false;
2847 }
2565 2848
2566 if (optimize_function_for_size_p (cfun) 2849 if (optimize_function_for_size_p (cfun)
2567 /* If length is zero it's small enough. */ 2850 /* If length is zero it's small enough. */
2568 && !integer_zerop (len)) 2851 && !integer_zerop (len))
2569 return false; 2852 return false;
2822 3105
2823 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when 3106 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2824 'format' is known to contain no % formats. */ 3107 'format' is known to contain no % formats. */
2825 gimple_seq stmts = NULL; 3108 gimple_seq stmts = NULL;
2826 gimple *repl = gimple_build_call (fn, 2, dest, fmt); 3109 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3110
3111 /* Propagate the NO_WARNING bit to avoid issuing the same
3112 warning more than once. */
3113 if (gimple_no_warning_p (stmt))
3114 gimple_set_no_warning (repl, true);
3115
2827 gimple_seq_add_stmt_without_update (&stmts, repl); 3116 gimple_seq_add_stmt_without_update (&stmts, repl);
2828 if (gimple_call_lhs (stmt)) 3117 if (gimple_call_lhs (stmt))
2829 { 3118 {
2830 repl = gimple_build_assign (gimple_call_lhs (stmt), 3119 repl = gimple_build_assign (gimple_call_lhs (stmt),
2831 build_int_cst (integer_type_node, 3120 build_int_cst (integer_type_node,
2870 } 3159 }
2871 3160
2872 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */ 3161 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2873 gimple_seq stmts = NULL; 3162 gimple_seq stmts = NULL;
2874 gimple *repl = gimple_build_call (fn, 2, dest, orig); 3163 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3164
3165 /* Propagate the NO_WARNING bit to avoid issuing the same
3166 warning more than once. */
3167 if (gimple_no_warning_p (stmt))
3168 gimple_set_no_warning (repl, true);
3169
2875 gimple_seq_add_stmt_without_update (&stmts, repl); 3170 gimple_seq_add_stmt_without_update (&stmts, repl);
2876 if (gimple_call_lhs (stmt)) 3171 if (gimple_call_lhs (stmt))
2877 { 3172 {
2878 if (!useless_type_conversion_p (integer_type_node, 3173 if (!useless_type_conversion_p (integer_type_node,
2879 TREE_TYPE (orig_len))) 3174 TREE_TYPE (orig_len)))
3232 if ((unsigned char)str[len - 1] == target_newline 3527 if ((unsigned char)str[len - 1] == target_newline
3233 && (size_t) (int) len == len 3528 && (size_t) (int) len == len
3234 && (int) len > 0) 3529 && (int) len > 0)
3235 { 3530 {
3236 char *newstr; 3531 char *newstr;
3237 tree offset_node, string_cst;
3238 3532
3239 /* Create a NUL-terminated string that's one char shorter 3533 /* Create a NUL-terminated string that's one char shorter
3240 than the original, stripping off the trailing '\n'. */ 3534 than the original, stripping off the trailing '\n'. */
3241 newarg = build_string_literal (len, str); 3535 newstr = xstrdup (str);
3242 string_cst = string_constant (newarg, &offset_node);
3243 gcc_checking_assert (string_cst
3244 && (TREE_STRING_LENGTH (string_cst)
3245 == (int) len)
3246 && integer_zerop (offset_node)
3247 && (unsigned char)
3248 TREE_STRING_POINTER (string_cst)[len - 1]
3249 == target_newline);
3250 /* build_string_literal creates a new STRING_CST,
3251 modify it in place to avoid double copying. */
3252 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3253 newstr[len - 1] = '\0'; 3536 newstr[len - 1] = '\0';
3537 newarg = build_string_literal (len, newstr);
3538 free (newstr);
3254 if (fn_puts) 3539 if (fn_puts)
3255 { 3540 {
3256 gcall *repl = gimple_build_call (fn_puts, 1, newarg); 3541 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3257 replace_call_with_call_and_fold (gsi, repl); 3542 replace_call_with_call_and_fold (gsi, repl);
3258 return true; 3543 return true;
3305 3590
3306 static bool 3591 static bool
3307 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi) 3592 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3308 { 3593 {
3309 gimple *stmt = gsi_stmt (*gsi); 3594 gimple *stmt = gsi_stmt (*gsi);
3310 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0); 3595 tree arg = gimple_call_arg (stmt, 0);
3311 if (!len) 3596
3312 return false; 3597 wide_int minlen;
3313 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT); 3598 wide_int maxlen;
3314 replace_call_with_value (gsi, len); 3599
3315 return true; 3600 /* Set to non-null if ARG refers to an unterminated array. */
3601 tree nonstr;
3602 tree lenrange[2];
3603 if (!get_range_strlen (arg, lenrange, 1, true, &nonstr)
3604 && !nonstr
3605 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3606 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3607 {
3608 /* The range of lengths refers to either a single constant
3609 string or to the longest and shortest constant string
3610 referenced by the argument of the strlen() call, or to
3611 the strings that can possibly be stored in the arrays
3612 the argument refers to. */
3613 minlen = wi::to_wide (lenrange[0]);
3614 maxlen = wi::to_wide (lenrange[1]);
3615 }
3616 else
3617 {
3618 unsigned prec = TYPE_PRECISION (sizetype);
3619
3620 minlen = wi::shwi (0, prec);
3621 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3622 }
3623
3624 if (minlen == maxlen)
3625 {
3626 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3627 true, GSI_SAME_STMT);
3628 replace_call_with_value (gsi, lenrange[0]);
3629 return true;
3630 }
3631
3632 if (tree lhs = gimple_call_lhs (stmt))
3633 if (TREE_CODE (lhs) == SSA_NAME
3634 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
3635 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3636
3637 return false;
3316 } 3638 }
3317 3639
3318 /* Fold a call to __builtin_acc_on_device. */ 3640 /* Fold a call to __builtin_acc_on_device. */
3319 3641
3320 static bool 3642 static bool
3447 case BUILT_IN_STRRCHR: 3769 case BUILT_IN_STRRCHR:
3448 return gimple_fold_builtin_strchr (gsi, true); 3770 return gimple_fold_builtin_strchr (gsi, true);
3449 case BUILT_IN_STRSTR: 3771 case BUILT_IN_STRSTR:
3450 return gimple_fold_builtin_strstr (gsi); 3772 return gimple_fold_builtin_strstr (gsi);
3451 case BUILT_IN_STRCMP: 3773 case BUILT_IN_STRCMP:
3774 case BUILT_IN_STRCMP_EQ:
3452 case BUILT_IN_STRCASECMP: 3775 case BUILT_IN_STRCASECMP:
3453 case BUILT_IN_STRNCMP: 3776 case BUILT_IN_STRNCMP:
3777 case BUILT_IN_STRNCMP_EQ:
3454 case BUILT_IN_STRNCASECMP: 3778 case BUILT_IN_STRNCASECMP:
3455 return gimple_fold_builtin_string_compare (gsi); 3779 return gimple_fold_builtin_string_compare (gsi);
3456 case BUILT_IN_MEMCHR: 3780 case BUILT_IN_MEMCHR:
3457 return gimple_fold_builtin_memchr (gsi); 3781 return gimple_fold_builtin_memchr (gsi);
3458 case BUILT_IN_FPUTS: 3782 case BUILT_IN_FPUTS:
3564 static tree 3888 static tree
3565 fold_internal_goacc_dim (const gimple *call) 3889 fold_internal_goacc_dim (const gimple *call)
3566 { 3890 {
3567 int axis = oacc_get_ifn_dim_arg (call); 3891 int axis = oacc_get_ifn_dim_arg (call);
3568 int size = oacc_get_fn_dim_size (current_function_decl, axis); 3892 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3569 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3570 tree result = NULL_TREE; 3893 tree result = NULL_TREE;
3571 3894 tree type = TREE_TYPE (gimple_call_lhs (call));
3572 /* If the size is 1, or we only want the size and it is not dynamic, 3895
3573 we know the answer. */ 3896 switch (gimple_call_internal_fn (call))
3574 if (size == 1 || (!is_pos && size)) 3897 {
3575 { 3898 case IFN_GOACC_DIM_POS:
3576 tree type = TREE_TYPE (gimple_call_lhs (call)); 3899 /* If the size is 1, we know the answer. */
3577 result = build_int_cst (type, size - is_pos); 3900 if (size == 1)
3901 result = build_int_cst (type, 0);
3902 break;
3903 case IFN_GOACC_DIM_SIZE:
3904 /* If the size is not dynamic, we know the answer. */
3905 if (size)
3906 result = build_int_cst (type, size);
3907 break;
3908 default:
3909 break;
3578 } 3910 }
3579 3911
3580 return result; 3912 return result;
3581 } 3913 }
3582 3914
3621 || VECTOR_TYPE_P (etype) 3953 || VECTOR_TYPE_P (etype)
3622 || TREE_CODE (etype) == COMPLEX_TYPE 3954 || TREE_CODE (etype) == COMPLEX_TYPE
3623 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs 3955 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3624 might not preserve all the bits. See PR71716. */ 3956 might not preserve all the bits. See PR71716. */
3625 || SCALAR_FLOAT_TYPE_P (etype) 3957 || SCALAR_FLOAT_TYPE_P (etype)
3626 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype))) 3958 || maybe_ne (TYPE_PRECISION (etype),
3959 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3627 return false; 3960 return false;
3628 3961
3629 tree weak = gimple_call_arg (stmt, 3); 3962 tree weak = gimple_call_arg (stmt, 3);
3630 if (!integer_zerop (weak) && !integer_onep (weak)) 3963 if (!integer_zerop (weak) && !integer_onep (weak))
3631 return false; 3964 return false;
3637 if (direct_optab_handler (atomic_compare_and_swap_optab, mode) 3970 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3638 == CODE_FOR_nothing 3971 == CODE_FOR_nothing
3639 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing) 3972 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3640 return false; 3973 return false;
3641 3974
3642 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode)) 3975 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
3643 return false; 3976 return false;
3644 3977
3645 return true; 3978 return true;
3646 } 3979 }
3647 3980
3688 gimple_call_set_lhs (g, lhs); 4021 gimple_call_set_lhs (g, lhs);
3689 gimple_set_vdef (g, gimple_vdef (stmt)); 4022 gimple_set_vdef (g, gimple_vdef (stmt));
3690 gimple_set_vuse (g, gimple_vuse (stmt)); 4023 gimple_set_vuse (g, gimple_vuse (stmt));
3691 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g; 4024 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3692 tree oldlhs = gimple_call_lhs (stmt); 4025 tree oldlhs = gimple_call_lhs (stmt);
3693 if (stmt_can_throw_internal (stmt)) 4026 if (stmt_can_throw_internal (cfun, stmt))
3694 { 4027 {
3695 throws = true; 4028 throws = true;
3696 e = find_fallthru_edge (gsi_bb (*gsi)->succs); 4029 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3697 } 4030 }
3698 gimple_call_set_nothrow (as_a <gcall *> (g), 4031 gimple_call_set_nothrow (as_a <gcall *> (g),
3741 4074
3742 bool 4075 bool
3743 arith_overflowed_p (enum tree_code code, const_tree type, 4076 arith_overflowed_p (enum tree_code code, const_tree type,
3744 const_tree arg0, const_tree arg1) 4077 const_tree arg0, const_tree arg1)
3745 { 4078 {
3746 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3747 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3748 widest2_int_cst;
3749 widest2_int warg0 = widest2_int_cst (arg0); 4079 widest2_int warg0 = widest2_int_cst (arg0);
3750 widest2_int warg1 = widest2_int_cst (arg1); 4080 widest2_int warg1 = widest2_int_cst (arg1);
3751 widest2_int wres; 4081 widest2_int wres;
3752 switch (code) 4082 switch (code)
3753 { 4083 {
3817 if (final && targets.length () <= 1 && dbg_cnt (devirt)) 4147 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3818 { 4148 {
3819 tree lhs = gimple_call_lhs (stmt); 4149 tree lhs = gimple_call_lhs (stmt);
3820 if (dump_enabled_p ()) 4150 if (dump_enabled_p ())
3821 { 4151 {
3822 location_t loc = gimple_location_safe (stmt); 4152 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
3823 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3824 "folding virtual function call to %s\n", 4153 "folding virtual function call to %s\n",
3825 targets.length () == 1 4154 targets.length () == 1
3826 ? targets[0]->name () 4155 ? targets[0]->name ()
3827 : "__builtin_unreachable"); 4156 : "__builtin_unreachable");
3828 } 4157 }
3925 { 4254 {
3926 case IFN_BUILTIN_EXPECT: 4255 case IFN_BUILTIN_EXPECT:
3927 result = fold_builtin_expect (gimple_location (stmt), 4256 result = fold_builtin_expect (gimple_location (stmt),
3928 gimple_call_arg (stmt, 0), 4257 gimple_call_arg (stmt, 0),
3929 gimple_call_arg (stmt, 1), 4258 gimple_call_arg (stmt, 1),
3930 gimple_call_arg (stmt, 2)); 4259 gimple_call_arg (stmt, 2),
4260 NULL_TREE);
3931 break; 4261 break;
3932 case IFN_UBSAN_OBJECT_SIZE: 4262 case IFN_UBSAN_OBJECT_SIZE:
3933 { 4263 {
3934 tree offset = gimple_call_arg (stmt, 1); 4264 tree offset = gimple_call_arg (stmt, 1);
3935 tree objsize = gimple_call_arg (stmt, 2); 4265 tree objsize = gimple_call_arg (stmt, 2);
4111 and the associated statements in *SEQ. Does the replacement 4441 and the associated statements in *SEQ. Does the replacement
4112 according to INPLACE and returns true if the operation succeeded. */ 4442 according to INPLACE and returns true if the operation succeeded. */
4113 4443
4114 static bool 4444 static bool
4115 replace_stmt_with_simplification (gimple_stmt_iterator *gsi, 4445 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4116 code_helper rcode, tree *ops, 4446 gimple_match_op *res_op,
4117 gimple_seq *seq, bool inplace) 4447 gimple_seq *seq, bool inplace)
4118 { 4448 {
4119 gimple *stmt = gsi_stmt (*gsi); 4449 gimple *stmt = gsi_stmt (*gsi);
4450 tree *ops = res_op->ops;
4451 unsigned int num_ops = res_op->num_ops;
4120 4452
4121 /* Play safe and do not allow abnormals to be mentioned in 4453 /* Play safe and do not allow abnormals to be mentioned in
4122 newly created statements. See also maybe_push_res_to_seq. 4454 newly created statements. See also maybe_push_res_to_seq.
4123 As an exception allow such uses if there was a use of the 4455 As an exception allow such uses if there was a use of the
4124 same SSA name on the old stmt. */ 4456 same SSA name on the old stmt. */
4125 if ((TREE_CODE (ops[0]) == SSA_NAME 4457 for (unsigned int i = 0; i < num_ops; ++i)
4126 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]) 4458 if (TREE_CODE (ops[i]) == SSA_NAME
4127 && !has_use_on_stmt (ops[0], stmt)) 4459 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4128 || (ops[1] 4460 && !has_use_on_stmt (ops[i], stmt))
4129 && TREE_CODE (ops[1]) == SSA_NAME 4461 return false;
4130 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]) 4462
4131 && !has_use_on_stmt (ops[1], stmt)) 4463 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4132 || (ops[2] 4464 for (unsigned int i = 0; i < 2; ++i)
4133 && TREE_CODE (ops[2]) == SSA_NAME 4465 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4134 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2]) 4466 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4135 && !has_use_on_stmt (ops[2], stmt)) 4467 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4136 || (COMPARISON_CLASS_P (ops[0]) 4468 return false;
4137 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4138 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4139 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4140 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4141 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4142 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
4143 return false;
4144 4469
4145 /* Don't insert new statements when INPLACE is true, even if we could 4470 /* Don't insert new statements when INPLACE is true, even if we could
4146 reuse STMT for the final statement. */ 4471 reuse STMT for the final statement. */
4147 if (inplace && !gimple_seq_empty_p (*seq)) 4472 if (inplace && !gimple_seq_empty_p (*seq))
4148 return false; 4473 return false;
4149 4474
4150 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt)) 4475 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4151 { 4476 {
4152 gcc_assert (rcode.is_tree_code ()); 4477 gcc_assert (res_op->code.is_tree_code ());
4153 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison 4478 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4154 /* GIMPLE_CONDs condition may not throw. */ 4479 /* GIMPLE_CONDs condition may not throw. */
4155 && (!flag_exceptions 4480 && (!flag_exceptions
4156 || !cfun->can_throw_non_call_exceptions 4481 || !cfun->can_throw_non_call_exceptions
4157 || !operation_could_trap_p (rcode, 4482 || !operation_could_trap_p (res_op->code,
4158 FLOAT_TYPE_P (TREE_TYPE (ops[0])), 4483 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4159 false, NULL_TREE))) 4484 false, NULL_TREE)))
4160 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]); 4485 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4161 else if (rcode == SSA_NAME) 4486 else if (res_op->code == SSA_NAME)
4162 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0], 4487 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4163 build_zero_cst (TREE_TYPE (ops[0]))); 4488 build_zero_cst (TREE_TYPE (ops[0])));
4164 else if (rcode == INTEGER_CST) 4489 else if (res_op->code == INTEGER_CST)
4165 { 4490 {
4166 if (integer_zerop (ops[0])) 4491 if (integer_zerop (ops[0]))
4167 gimple_cond_make_false (cond_stmt); 4492 gimple_cond_make_false (cond_stmt);
4168 else 4493 else
4169 gimple_cond_make_true (cond_stmt); 4494 gimple_cond_make_true (cond_stmt);
4170 } 4495 }
4171 else if (!inplace) 4496 else if (!inplace)
4172 { 4497 {
4173 tree res = maybe_push_res_to_seq (rcode, boolean_type_node, 4498 tree res = maybe_push_res_to_seq (res_op, seq);
4174 ops, seq);
4175 if (!res) 4499 if (!res)
4176 return false; 4500 return false;
4177 gimple_cond_set_condition (cond_stmt, NE_EXPR, res, 4501 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4178 build_zero_cst (TREE_TYPE (res))); 4502 build_zero_cst (TREE_TYPE (res)));
4179 } 4503 }
4189 } 4513 }
4190 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT); 4514 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4191 return true; 4515 return true;
4192 } 4516 }
4193 else if (is_gimple_assign (stmt) 4517 else if (is_gimple_assign (stmt)
4194 && rcode.is_tree_code ()) 4518 && res_op->code.is_tree_code ())
4195 { 4519 {
4196 if (!inplace 4520 if (!inplace
4197 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode)) 4521 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4198 { 4522 {
4199 maybe_build_generic_op (rcode, 4523 maybe_build_generic_op (res_op);
4200 TREE_TYPE (gimple_assign_lhs (stmt)), ops); 4524 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4201 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]); 4525 res_op->op_or_null (0),
4526 res_op->op_or_null (1),
4527 res_op->op_or_null (2));
4202 if (dump_file && (dump_flags & TDF_DETAILS)) 4528 if (dump_file && (dump_flags & TDF_DETAILS))
4203 { 4529 {
4204 fprintf (dump_file, "gimple_simplified to "); 4530 fprintf (dump_file, "gimple_simplified to ");
4205 if (!gimple_seq_empty_p (*seq)) 4531 if (!gimple_seq_empty_p (*seq))
4206 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM); 4532 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4209 } 4535 }
4210 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT); 4536 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4211 return true; 4537 return true;
4212 } 4538 }
4213 } 4539 }
4214 else if (rcode.is_fn_code () 4540 else if (res_op->code.is_fn_code ()
4215 && gimple_call_combined_fn (stmt) == rcode) 4541 && gimple_call_combined_fn (stmt) == res_op->code)
4216 { 4542 {
4217 unsigned i; 4543 gcc_assert (num_ops == gimple_call_num_args (stmt));
4218 for (i = 0; i < gimple_call_num_args (stmt); ++i) 4544 for (unsigned int i = 0; i < num_ops; ++i)
4219 { 4545 gimple_call_set_arg (stmt, i, ops[i]);
4220 gcc_assert (ops[i] != NULL_TREE);
4221 gimple_call_set_arg (stmt, i, ops[i]);
4222 }
4223 if (i < 3)
4224 gcc_assert (ops[i] == NULL_TREE);
4225 if (dump_file && (dump_flags & TDF_DETAILS)) 4546 if (dump_file && (dump_flags & TDF_DETAILS))
4226 { 4547 {
4227 fprintf (dump_file, "gimple_simplified to "); 4548 fprintf (dump_file, "gimple_simplified to ");
4228 if (!gimple_seq_empty_p (*seq)) 4549 if (!gimple_seq_empty_p (*seq))
4229 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM); 4550 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4235 else if (!inplace) 4556 else if (!inplace)
4236 { 4557 {
4237 if (gimple_has_lhs (stmt)) 4558 if (gimple_has_lhs (stmt))
4238 { 4559 {
4239 tree lhs = gimple_get_lhs (stmt); 4560 tree lhs = gimple_get_lhs (stmt);
4240 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs), 4561 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4241 ops, seq, lhs))
4242 return false; 4562 return false;
4243 if (dump_file && (dump_flags & TDF_DETAILS)) 4563 if (dump_file && (dump_flags & TDF_DETAILS))
4244 { 4564 {
4245 fprintf (dump_file, "gimple_simplified to "); 4565 fprintf (dump_file, "gimple_simplified to ");
4246 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM); 4566 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4315 if (TREE_CODE (addr) == ADDR_EXPR 4635 if (TREE_CODE (addr) == ADDR_EXPR
4316 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF 4636 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4317 || handled_component_p (TREE_OPERAND (addr, 0)))) 4637 || handled_component_p (TREE_OPERAND (addr, 0))))
4318 { 4638 {
4319 tree base; 4639 tree base;
4320 HOST_WIDE_INT coffset; 4640 poly_int64 coffset;
4321 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0), 4641 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4322 &coffset); 4642 &coffset);
4323 if (!base) 4643 if (!base)
4324 gcc_unreachable (); 4644 gcc_unreachable ();
4325 4645
4502 if (!inplace 4822 if (!inplace
4503 || is_gimple_assign (stmt) 4823 || is_gimple_assign (stmt)
4504 || gimple_code (stmt) == GIMPLE_COND) 4824 || gimple_code (stmt) == GIMPLE_COND)
4505 { 4825 {
4506 gimple_seq seq = NULL; 4826 gimple_seq seq = NULL;
4507 code_helper rcode; 4827 gimple_match_op res_op;
4508 tree ops[3] = {}; 4828 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4509 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4510 valueize, valueize)) 4829 valueize, valueize))
4511 { 4830 {
4512 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace)) 4831 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4513 changed = true; 4832 changed = true;
4514 else 4833 else
4515 gimple_seq_discard (seq); 4834 gimple_seq_discard (seq);
4516 } 4835 }
4517 } 4836 }
4715 follow_single_use_edges (tree val) 5034 follow_single_use_edges (tree val)
4716 { 5035 {
4717 if (TREE_CODE (val) == SSA_NAME 5036 if (TREE_CODE (val) == SSA_NAME
4718 && !has_single_use (val)) 5037 && !has_single_use (val))
4719 return NULL_TREE; 5038 return NULL_TREE;
5039 return val;
5040 }
5041
5042 /* Valueization callback that follows all SSA edges. */
5043
5044 tree
5045 follow_all_ssa_edges (tree val)
5046 {
4720 return val; 5047 return val;
4721 } 5048 }
4722 5049
4723 /* Fold the statement pointed to by GSI. In some cases, this function may 5050 /* Fold the statement pointed to by GSI. In some cases, this function may
4724 replace the whole statement with a new one. Returns true iff folding 5051 replace the whole statement with a new one. Returns true iff folding
5849 6176
5850 tree 6177 tree
5851 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree), 6178 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5852 tree (*gvalueize) (tree)) 6179 tree (*gvalueize) (tree))
5853 { 6180 {
5854 code_helper rcode; 6181 gimple_match_op res_op;
5855 tree ops[3] = {};
5856 /* ??? The SSA propagators do not correctly deal with following SSA use-def 6182 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5857 edges if there are intermediate VARYING defs. For this reason 6183 edges if there are intermediate VARYING defs. For this reason
5858 do not follow SSA edges here even though SCCVN can technically 6184 do not follow SSA edges here even though SCCVN can technically
5859 just deal fine with that. */ 6185 just deal fine with that. */
5860 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize)) 6186 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
5861 { 6187 {
5862 tree res = NULL_TREE; 6188 tree res = NULL_TREE;
5863 if (gimple_simplified_result_is_gimple_val (rcode, ops)) 6189 if (gimple_simplified_result_is_gimple_val (&res_op))
5864 res = ops[0]; 6190 res = res_op.ops[0];
5865 else if (mprts_hook) 6191 else if (mprts_hook)
5866 res = mprts_hook (rcode, gimple_expr_type (stmt), ops); 6192 res = mprts_hook (&res_op);
5867 if (res) 6193 if (res)
5868 { 6194 {
5869 if (dump_file && dump_flags & TDF_DETAILS) 6195 if (dump_file && dump_flags & TDF_DETAILS)
5870 { 6196 {
5871 fprintf (dump_file, "Match-and-simplified "); 6197 fprintf (dump_file, "Match-and-simplified ");
5901 /* Handle propagating invariant addresses into address 6227 /* Handle propagating invariant addresses into address
5902 operations. */ 6228 operations. */
5903 else if (TREE_CODE (rhs) == ADDR_EXPR 6229 else if (TREE_CODE (rhs) == ADDR_EXPR
5904 && !is_gimple_min_invariant (rhs)) 6230 && !is_gimple_min_invariant (rhs))
5905 { 6231 {
5906 HOST_WIDE_INT offset = 0; 6232 poly_int64 offset = 0;
5907 tree base; 6233 tree base;
5908 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0), 6234 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5909 &offset, 6235 &offset,
5910 valueize); 6236 valueize);
5911 if (base 6237 if (base
5914 return build_invariant_address (TREE_TYPE (rhs), 6240 return build_invariant_address (TREE_TYPE (rhs),
5915 base, offset); 6241 base, offset);
5916 } 6242 }
5917 else if (TREE_CODE (rhs) == CONSTRUCTOR 6243 else if (TREE_CODE (rhs) == CONSTRUCTOR
5918 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE 6244 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5919 && (CONSTRUCTOR_NELTS (rhs) 6245 && known_eq (CONSTRUCTOR_NELTS (rhs),
5920 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)))) 6246 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5921 { 6247 {
5922 unsigned i, nelts; 6248 unsigned i, nelts;
5923 tree val; 6249 tree val;
5924 6250
5925 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)); 6251 nelts = CONSTRUCTOR_NELTS (rhs);
5926 auto_vec<tree, 32> vec (nelts); 6252 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
5927 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val) 6253 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5928 { 6254 {
5929 val = (*valueize) (val); 6255 val = (*valueize) (val);
5930 if (TREE_CODE (val) == INTEGER_CST 6256 if (TREE_CODE (val) == INTEGER_CST
5931 || TREE_CODE (val) == REAL_CST 6257 || TREE_CODE (val) == REAL_CST
5933 vec.quick_push (val); 6259 vec.quick_push (val);
5934 else 6260 else
5935 return NULL_TREE; 6261 return NULL_TREE;
5936 } 6262 }
5937 6263
5938 return build_vector (TREE_TYPE (rhs), vec); 6264 return vec.build ();
5939 } 6265 }
5940 if (subcode == OBJ_TYPE_REF) 6266 if (subcode == OBJ_TYPE_REF)
5941 { 6267 {
5942 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs)); 6268 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5943 /* If callee is constant, we can fold away the wrapper. */ 6269 /* If callee is constant, we can fold away the wrapper. */
6113 return NULL_TREE; 6439 return NULL_TREE;
6114 } 6440 }
6115 6441
6116 fn = (*valueize) (gimple_call_fn (stmt)); 6442 fn = (*valueize) (gimple_call_fn (stmt));
6117 if (TREE_CODE (fn) == ADDR_EXPR 6443 if (TREE_CODE (fn) == ADDR_EXPR
6118 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL 6444 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6119 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6120 && gimple_builtin_call_types_compatible_p (stmt, 6445 && gimple_builtin_call_types_compatible_p (stmt,
6121 TREE_OPERAND (fn, 0))) 6446 TREE_OPERAND (fn, 0)))
6122 { 6447 {
6123 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt)); 6448 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6124 tree retval; 6449 tree retval;
6169 6494
6170 As a special case, return error_mark_node when constructor 6495 As a special case, return error_mark_node when constructor
6171 is not explicitly available, but it is known to be zero 6496 is not explicitly available, but it is known to be zero
6172 such as 'static const int a;'. */ 6497 such as 'static const int a;'. */
6173 static tree 6498 static tree
6174 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset, 6499 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6175 tree (*valueize)(tree)) 6500 tree (*valueize)(tree))
6176 { 6501 {
6177 HOST_WIDE_INT bit_offset2, size, max_size; 6502 poly_int64 bit_offset2, size, max_size;
6178 bool reverse; 6503 bool reverse;
6179 6504
6180 if (TREE_CODE (base) == MEM_REF) 6505 if (TREE_CODE (base) == MEM_REF)
6181 { 6506 {
6182 if (!integer_zerop (TREE_OPERAND (base, 1))) 6507 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6183 { 6508 if (!boff.to_shwi (bit_offset))
6184 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1))) 6509 return NULL_TREE;
6185 return NULL_TREE;
6186 *bit_offset += (mem_ref_offset (base).to_short_addr ()
6187 * BITS_PER_UNIT);
6188 }
6189 6510
6190 if (valueize 6511 if (valueize
6191 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME) 6512 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6192 base = valueize (TREE_OPERAND (base, 0)); 6513 base = valueize (TREE_OPERAND (base, 0));
6193 if (!base || TREE_CODE (base) != ADDR_EXPR) 6514 if (!base || TREE_CODE (base) != ADDR_EXPR)
6224 6545
6225 case ARRAY_REF: 6546 case ARRAY_REF:
6226 case COMPONENT_REF: 6547 case COMPONENT_REF:
6227 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size, 6548 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6228 &reverse); 6549 &reverse);
6229 if (max_size == -1 || size != max_size) 6550 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6230 return NULL_TREE; 6551 return NULL_TREE;
6231 *bit_offset += bit_offset2; 6552 *bit_offset += bit_offset2;
6232 return get_base_constructor (base, bit_offset, valueize); 6553 return get_base_constructor (base, bit_offset, valueize);
6233 6554
6234 case CONSTRUCTOR: 6555 case CONSTRUCTOR:
6240 6561
6241 return NULL_TREE; 6562 return NULL_TREE;
6242 } 6563 }
6243 } 6564 }
6244 6565
6245 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size 6566 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6246 SIZE to the memory at bit OFFSET. */ 6567 to the memory at bit OFFSET. When non-null, TYPE is the expected
6568 type of the reference; otherwise the type of the referenced element
6569 is used instead. When SIZE is zero, attempt to fold a reference to
6570 the entire element which OFFSET refers to. Increment *SUBOFF by
6571 the bit offset of the accessed element. */
6247 6572
6248 static tree 6573 static tree
6249 fold_array_ctor_reference (tree type, tree ctor, 6574 fold_array_ctor_reference (tree type, tree ctor,
6250 unsigned HOST_WIDE_INT offset, 6575 unsigned HOST_WIDE_INT offset,
6251 unsigned HOST_WIDE_INT size, 6576 unsigned HOST_WIDE_INT size,
6252 tree from_decl) 6577 tree from_decl,
6578 unsigned HOST_WIDE_INT *suboff)
6253 { 6579 {
6254 offset_int low_bound; 6580 offset_int low_bound;
6255 offset_int elt_size; 6581 offset_int elt_size;
6256 offset_int access_index; 6582 offset_int access_index;
6257 tree domain_type = NULL_TREE; 6583 tree domain_type = NULL_TREE;
6272 /* Static constructors for variably sized objects makes no sense. */ 6598 /* Static constructors for variably sized objects makes no sense. */
6273 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST) 6599 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6274 return NULL_TREE; 6600 return NULL_TREE;
6275 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))); 6601 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6276 6602
6277 /* We can handle only constantly sized accesses that are known to not 6603 /* When TYPE is non-null, verify that it specifies a constant-sized
6278 be larger than size of array element. */ 6604 accessed not larger than size of array element. */
6279 if (!TYPE_SIZE_UNIT (type) 6605 if (type
6280 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST 6606 && (!TYPE_SIZE_UNIT (type)
6281 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type)) 6607 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6282 || elt_size == 0) 6608 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6609 || elt_size == 0))
6283 return NULL_TREE; 6610 return NULL_TREE;
6284 6611
6285 /* Compute the array index we look for. */ 6612 /* Compute the array index we look for. */
6286 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT), 6613 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6287 elt_size); 6614 elt_size);
6293 /* See if the array field is large enough to span whole access. We do not 6620 /* See if the array field is large enough to span whole access. We do not
6294 care to fold accesses spanning multiple array indexes. */ 6621 care to fold accesses spanning multiple array indexes. */
6295 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT) 6622 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6296 return NULL_TREE; 6623 return NULL_TREE;
6297 if (tree val = get_array_ctor_element_at_index (ctor, access_index)) 6624 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6298 return fold_ctor_reference (type, val, inner_offset, size, from_decl); 6625 {
6299 6626 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6300 /* When memory is not explicitely mentioned in constructor, 6627 {
6301 it is 0 (or out of range). */ 6628 /* For the final reference to the entire accessed element
6302 return build_zero_cst (type); 6629 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6303 } 6630 may be null) in favor of the type of the element, and set
6304 6631 SIZE to the size of the accessed element. */
6305 /* CTOR is CONSTRUCTOR of an aggregate or vector. 6632 inner_offset = 0;
6306 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */ 6633 type = TREE_TYPE (val);
6634 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6635 }
6636
6637 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6638 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6639 suboff);
6640 }
6641
6642 /* Memory not explicitly mentioned in constructor is 0 (or
6643 the reference is out of range). */
6644 return type ? build_zero_cst (type) : NULL_TREE;
6645 }
6646
6647 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6648 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6649 is the expected type of the reference; otherwise the type of
6650 the referenced member is used instead. When SIZE is zero,
6651 attempt to fold a reference to the entire member which OFFSET
6652 refers to; in this case. Increment *SUBOFF by the bit offset
6653 of the accessed member. */
6307 6654
6308 static tree 6655 static tree
6309 fold_nonarray_ctor_reference (tree type, tree ctor, 6656 fold_nonarray_ctor_reference (tree type, tree ctor,
6310 unsigned HOST_WIDE_INT offset, 6657 unsigned HOST_WIDE_INT offset,
6311 unsigned HOST_WIDE_INT size, 6658 unsigned HOST_WIDE_INT size,
6312 tree from_decl) 6659 tree from_decl,
6660 unsigned HOST_WIDE_INT *suboff)
6313 { 6661 {
6314 unsigned HOST_WIDE_INT cnt; 6662 unsigned HOST_WIDE_INT cnt;
6315 tree cfield, cval; 6663 tree cfield, cval;
6316 6664
6317 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, 6665 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6318 cval) 6666 cval)
6319 { 6667 {
6320 tree byte_offset = DECL_FIELD_OFFSET (cfield); 6668 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6321 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield); 6669 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6322 tree field_size = DECL_SIZE (cfield); 6670 tree field_size = DECL_SIZE (cfield);
6323 offset_int bitoffset; 6671
6324 offset_int bitoffset_end, access_end; 6672 if (!field_size)
6673 {
6674 /* Determine the size of the flexible array member from
6675 the size of the initializer provided for it. */
6676 field_size = TYPE_SIZE (TREE_TYPE (cval));
6677 }
6325 6678
6326 /* Variable sized objects in static constructors makes no sense, 6679 /* Variable sized objects in static constructors makes no sense,
6327 but field_size can be NULL for flexible array members. */ 6680 but field_size can be NULL for flexible array members. */
6328 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST 6681 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6329 && TREE_CODE (byte_offset) == INTEGER_CST 6682 && TREE_CODE (byte_offset) == INTEGER_CST
6330 && (field_size != NULL_TREE 6683 && (field_size != NULL_TREE
6331 ? TREE_CODE (field_size) == INTEGER_CST 6684 ? TREE_CODE (field_size) == INTEGER_CST
6332 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE)); 6685 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6333 6686
6334 /* Compute bit offset of the field. */ 6687 /* Compute bit offset of the field. */
6335 bitoffset = (wi::to_offset (field_offset) 6688 offset_int bitoffset
6336 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT)); 6689 = (wi::to_offset (field_offset)
6690 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6337 /* Compute bit offset where the field ends. */ 6691 /* Compute bit offset where the field ends. */
6692 offset_int bitoffset_end;
6338 if (field_size != NULL_TREE) 6693 if (field_size != NULL_TREE)
6339 bitoffset_end = bitoffset + wi::to_offset (field_size); 6694 bitoffset_end = bitoffset + wi::to_offset (field_size);
6340 else 6695 else
6341 bitoffset_end = 0; 6696 bitoffset_end = 0;
6342 6697
6343 access_end = offset_int (offset) + size; 6698 /* Compute the bit offset of the end of the desired access.
6344 6699 As a special case, if the size of the desired access is
6345 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and 6700 zero, assume the access is to the entire field (and let
6346 [BITOFFSET, BITOFFSET_END)? */ 6701 the caller make any necessary adjustments by storing
6702 the actual bounds of the field in FIELDBOUNDS). */
6703 offset_int access_end = offset_int (offset);
6704 if (size)
6705 access_end += size;
6706 else
6707 access_end = bitoffset_end;
6708
6709 /* Is there any overlap between the desired access at
6710 [OFFSET, OFFSET+SIZE) and the offset of the field within
6711 the object at [BITOFFSET, BITOFFSET_END)? */
6347 if (wi::cmps (access_end, bitoffset) > 0 6712 if (wi::cmps (access_end, bitoffset) > 0
6348 && (field_size == NULL_TREE 6713 && (field_size == NULL_TREE
6349 || wi::lts_p (offset, bitoffset_end))) 6714 || wi::lts_p (offset, bitoffset_end)))
6350 { 6715 {
6351 offset_int inner_offset = offset_int (offset) - bitoffset; 6716 *suboff += bitoffset.to_uhwi ();
6352 /* We do have overlap. Now see if field is large enough to 6717
6353 cover the access. Give up for accesses spanning multiple 6718 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6354 fields. */ 6719 {
6720 /* For the final reference to the entire accessed member
6721 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6722 be null) in favor of the type of the member, and set
6723 SIZE to the size of the accessed member. */
6724 offset = bitoffset.to_uhwi ();
6725 type = TREE_TYPE (cval);
6726 size = (bitoffset_end - bitoffset).to_uhwi ();
6727 }
6728
6729 /* We do have overlap. Now see if the field is large enough
6730 to cover the access. Give up for accesses that extend
6731 beyond the end of the object or that span multiple fields. */
6355 if (wi::cmps (access_end, bitoffset_end) > 0) 6732 if (wi::cmps (access_end, bitoffset_end) > 0)
6356 return NULL_TREE; 6733 return NULL_TREE;
6357 if (offset < bitoffset) 6734 if (offset < bitoffset)
6358 return NULL_TREE; 6735 return NULL_TREE;
6736
6737 offset_int inner_offset = offset_int (offset) - bitoffset;
6359 return fold_ctor_reference (type, cval, 6738 return fold_ctor_reference (type, cval,
6360 inner_offset.to_uhwi (), size, 6739 inner_offset.to_uhwi (), size,
6361 from_decl); 6740 from_decl, suboff);
6362 } 6741 }
6363 } 6742 }
6364 /* When memory is not explicitely mentioned in constructor, it is 0. */ 6743 /* Memory not explicitly mentioned in constructor is 0. */
6365 return build_zero_cst (type); 6744 return type ? build_zero_cst (type) : NULL_TREE;
6366 } 6745 }
6367 6746
6368 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE 6747 /* CTOR is value initializing memory. Fold a reference of TYPE and
6369 to the memory at bit OFFSET. */ 6748 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6749 is zero, attempt to fold a reference to the entire subobject
6750 which OFFSET refers to. This is used when folding accesses to
6751 string members of aggregates. When non-null, set *SUBOFF to
6752 the bit offset of the accessed subobject. */
6370 6753
6371 tree 6754 tree
6372 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset, 6755 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6373 unsigned HOST_WIDE_INT size, tree from_decl) 6756 const poly_uint64 &poly_size, tree from_decl,
6757 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6374 { 6758 {
6375 tree ret; 6759 tree ret;
6376 6760
6377 /* We found the field with exact match. */ 6761 /* We found the field with exact match. */
6378 if (useless_type_conversion_p (type, TREE_TYPE (ctor)) 6762 if (type
6379 && !offset) 6763 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6764 && known_eq (poly_offset, 0U))
6380 return canonicalize_constructor_val (unshare_expr (ctor), from_decl); 6765 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6766
6767 /* The remaining optimizations need a constant size and offset. */
6768 unsigned HOST_WIDE_INT size, offset;
6769 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6770 return NULL_TREE;
6381 6771
6382 /* We are at the end of walk, see if we can view convert the 6772 /* We are at the end of walk, see if we can view convert the
6383 result. */ 6773 result. */
6384 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset 6774 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6385 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */ 6775 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6409 if (len > 0) 6799 if (len > 0)
6410 return native_interpret_expr (type, buf, len); 6800 return native_interpret_expr (type, buf, len);
6411 } 6801 }
6412 if (TREE_CODE (ctor) == CONSTRUCTOR) 6802 if (TREE_CODE (ctor) == CONSTRUCTOR)
6413 { 6803 {
6804 unsigned HOST_WIDE_INT dummy = 0;
6805 if (!suboff)
6806 suboff = &dummy;
6414 6807
6415 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE 6808 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6416 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE) 6809 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6417 return fold_array_ctor_reference (type, ctor, offset, size, 6810 return fold_array_ctor_reference (type, ctor, offset, size,
6418 from_decl); 6811 from_decl, suboff);
6419 else 6812
6420 return fold_nonarray_ctor_reference (type, ctor, offset, size, 6813 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6421 from_decl); 6814 from_decl, suboff);
6422 } 6815 }
6423 6816
6424 return NULL_TREE; 6817 return NULL_TREE;
6425 } 6818 }
6426 6819
6430 6823
6431 tree 6824 tree
6432 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree)) 6825 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6433 { 6826 {
6434 tree ctor, idx, base; 6827 tree ctor, idx, base;
6435 HOST_WIDE_INT offset, size, max_size; 6828 poly_int64 offset, size, max_size;
6436 tree tem; 6829 tree tem;
6437 bool reverse; 6830 bool reverse;
6438 6831
6439 if (TREE_THIS_VOLATILE (t)) 6832 if (TREE_THIS_VOLATILE (t))
6440 return NULL_TREE; 6833 return NULL_TREE;
6456 (they will be handled only by iteration of ccp). Perhaps we can bring 6849 (they will be handled only by iteration of ccp). Perhaps we can bring
6457 get_ref_base_and_extent here and make it use a valueize callback. */ 6850 get_ref_base_and_extent here and make it use a valueize callback. */
6458 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME 6851 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6459 && valueize 6852 && valueize
6460 && (idx = (*valueize) (TREE_OPERAND (t, 1))) 6853 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6461 && TREE_CODE (idx) == INTEGER_CST) 6854 && poly_int_tree_p (idx))
6462 { 6855 {
6463 tree low_bound, unit_size; 6856 tree low_bound, unit_size;
6464 6857
6465 /* If the resulting bit-offset is constant, track it. */ 6858 /* If the resulting bit-offset is constant, track it. */
6466 if ((low_bound = array_ref_low_bound (t), 6859 if ((low_bound = array_ref_low_bound (t),
6467 TREE_CODE (low_bound) == INTEGER_CST) 6860 poly_int_tree_p (low_bound))
6468 && (unit_size = array_ref_element_size (t), 6861 && (unit_size = array_ref_element_size (t),
6469 tree_fits_uhwi_p (unit_size))) 6862 tree_fits_uhwi_p (unit_size)))
6470 { 6863 {
6471 offset_int woffset 6864 poly_offset_int woffset
6472 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound), 6865 = wi::sext (wi::to_poly_offset (idx)
6866 - wi::to_poly_offset (low_bound),
6473 TYPE_PRECISION (TREE_TYPE (idx))); 6867 TYPE_PRECISION (TREE_TYPE (idx)));
6474 6868
6475 if (wi::fits_shwi_p (woffset)) 6869 if (woffset.to_shwi (&offset))
6476 { 6870 {
6477 offset = woffset.to_shwi ();
6478 /* TODO: This code seems wrong, multiply then check 6871 /* TODO: This code seems wrong, multiply then check
6479 to see if it fits. */ 6872 to see if it fits. */
6480 offset *= tree_to_uhwi (unit_size); 6873 offset *= tree_to_uhwi (unit_size);
6481 offset *= BITS_PER_UNIT; 6874 offset *= BITS_PER_UNIT;
6482 6875
6485 /* Empty constructor. Always fold to 0. */ 6878 /* Empty constructor. Always fold to 0. */
6486 if (ctor == error_mark_node) 6879 if (ctor == error_mark_node)
6487 return build_zero_cst (TREE_TYPE (t)); 6880 return build_zero_cst (TREE_TYPE (t));
6488 /* Out of bound array access. Value is undefined, 6881 /* Out of bound array access. Value is undefined,
6489 but don't fold. */ 6882 but don't fold. */
6490 if (offset < 0) 6883 if (maybe_lt (offset, 0))
6491 return NULL_TREE; 6884 return NULL_TREE;
6492 /* We can not determine ctor. */ 6885 /* We can not determine ctor. */
6493 if (!ctor) 6886 if (!ctor)
6494 return NULL_TREE; 6887 return NULL_TREE;
6495 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, 6888 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6510 6903
6511 /* Empty constructor. Always fold to 0. */ 6904 /* Empty constructor. Always fold to 0. */
6512 if (ctor == error_mark_node) 6905 if (ctor == error_mark_node)
6513 return build_zero_cst (TREE_TYPE (t)); 6906 return build_zero_cst (TREE_TYPE (t));
6514 /* We do not know precise address. */ 6907 /* We do not know precise address. */
6515 if (max_size == -1 || max_size != size) 6908 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6516 return NULL_TREE; 6909 return NULL_TREE;
6517 /* We can not determine ctor. */ 6910 /* We can not determine ctor. */
6518 if (!ctor) 6911 if (!ctor)
6519 return NULL_TREE; 6912 return NULL_TREE;
6520 6913
6521 /* Out of bound array access. Value is undefined, but don't fold. */ 6914 /* Out of bound array access. Value is undefined, but don't fold. */
6522 if (offset < 0) 6915 if (maybe_lt (offset, 0))
6523 return NULL_TREE; 6916 return NULL_TREE;
6524 6917
6525 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, 6918 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6526 base); 6919 base);
6527 6920
6584 folding. At the moment we do not stream them in all cases, 6977 folding. At the moment we do not stream them in all cases,
6585 but it should never happen that ctor seem unreachable. */ 6978 but it should never happen that ctor seem unreachable. */
6586 gcc_assert (init); 6979 gcc_assert (init);
6587 if (init == error_mark_node) 6980 if (init == error_mark_node)
6588 { 6981 {
6589 gcc_assert (in_lto_p);
6590 /* Pass down that we lost track of the target. */ 6982 /* Pass down that we lost track of the target. */
6591 if (can_refer) 6983 if (can_refer)
6592 *can_refer = false; 6984 *can_refer = false;
6593 return NULL_TREE; 6985 return NULL_TREE;
6594 } 6986 }
6610 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init)))); 7002 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6611 7003
6612 access_index = offset / BITS_PER_UNIT / elt_size; 7004 access_index = offset / BITS_PER_UNIT / elt_size;
6613 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0); 7005 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6614 7006
6615 /* This code makes an assumption that there are no 7007 /* The C++ FE can now produce indexed fields, and we check if the indexes
6616 indexed fileds produced by C++ FE, so we can directly index the array. */ 7008 match. */
6617 if (access_index < CONSTRUCTOR_NELTS (init)) 7009 if (access_index < CONSTRUCTOR_NELTS (init))
6618 { 7010 {
6619 fn = CONSTRUCTOR_ELT (init, access_index)->value; 7011 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6620 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index); 7012 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7013 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
6621 STRIP_NOPS (fn); 7014 STRIP_NOPS (fn);
6622 } 7015 }
6623 else 7016 else
6624 fn = NULL; 7017 fn = NULL;
6625 7018
6761 tree part_width = TYPE_SIZE (type); 7154 tree part_width = TYPE_SIZE (type);
6762 unsigned HOST_WIDE_INT part_widthi 7155 unsigned HOST_WIDE_INT part_widthi
6763 = tree_to_shwi (part_width) / BITS_PER_UNIT; 7156 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6764 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT; 7157 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6765 tree index = bitsize_int (indexi); 7158 tree index = bitsize_int (indexi);
6766 if (offset / part_widthi 7159 if (known_lt (offset / part_widthi,
6767 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))) 7160 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
6768 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0), 7161 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6769 part_width, index); 7162 part_width, index);
6770 } 7163 }
6771 7164
6772 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */ 7165 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6955 simplifying it first if possible. Returns the built 7348 simplifying it first if possible. Returns the built
6956 expression value (or NULL_TREE if TYPE is void) and appends 7349 expression value (or NULL_TREE if TYPE is void) and appends
6957 statements possibly defining it to SEQ. */ 7350 statements possibly defining it to SEQ. */
6958 7351
6959 tree 7352 tree
6960 gimple_build (gimple_seq *seq, location_t loc, 7353 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
6961 enum built_in_function fn, tree type, tree arg0) 7354 tree type, tree arg0)
6962 { 7355 {
6963 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize); 7356 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6964 if (!res) 7357 if (!res)
6965 { 7358 {
6966 tree decl = builtin_decl_implicit (fn); 7359 gcall *stmt;
6967 gimple *stmt = gimple_build_call (decl, 1, arg0); 7360 if (internal_fn_p (fn))
7361 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7362 else
7363 {
7364 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7365 stmt = gimple_build_call (decl, 1, arg0);
7366 }
6968 if (!VOID_TYPE_P (type)) 7367 if (!VOID_TYPE_P (type))
6969 { 7368 {
6970 res = create_tmp_reg_or_ssa_name (type); 7369 res = create_tmp_reg_or_ssa_name (type);
6971 gimple_call_set_lhs (stmt, res); 7370 gimple_call_set_lhs (stmt, res);
6972 } 7371 }
6981 simplifying it first if possible. Returns the built 7380 simplifying it first if possible. Returns the built
6982 expression value (or NULL_TREE if TYPE is void) and appends 7381 expression value (or NULL_TREE if TYPE is void) and appends
6983 statements possibly defining it to SEQ. */ 7382 statements possibly defining it to SEQ. */
6984 7383
6985 tree 7384 tree
6986 gimple_build (gimple_seq *seq, location_t loc, 7385 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
6987 enum built_in_function fn, tree type, tree arg0, tree arg1) 7386 tree type, tree arg0, tree arg1)
6988 { 7387 {
6989 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize); 7388 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6990 if (!res) 7389 if (!res)
6991 { 7390 {
6992 tree decl = builtin_decl_implicit (fn); 7391 gcall *stmt;
6993 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1); 7392 if (internal_fn_p (fn))
7393 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7394 else
7395 {
7396 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7397 stmt = gimple_build_call (decl, 2, arg0, arg1);
7398 }
6994 if (!VOID_TYPE_P (type)) 7399 if (!VOID_TYPE_P (type))
6995 { 7400 {
6996 res = create_tmp_reg_or_ssa_name (type); 7401 res = create_tmp_reg_or_ssa_name (type);
6997 gimple_call_set_lhs (stmt, res); 7402 gimple_call_set_lhs (stmt, res);
6998 } 7403 }
7007 simplifying it first if possible. Returns the built 7412 simplifying it first if possible. Returns the built
7008 expression value (or NULL_TREE if TYPE is void) and appends 7413 expression value (or NULL_TREE if TYPE is void) and appends
7009 statements possibly defining it to SEQ. */ 7414 statements possibly defining it to SEQ. */
7010 7415
7011 tree 7416 tree
7012 gimple_build (gimple_seq *seq, location_t loc, 7417 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7013 enum built_in_function fn, tree type, 7418 tree type, tree arg0, tree arg1, tree arg2)
7014 tree arg0, tree arg1, tree arg2)
7015 { 7419 {
7016 tree res = gimple_simplify (fn, type, arg0, arg1, arg2, 7420 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7017 seq, gimple_build_valueize); 7421 seq, gimple_build_valueize);
7018 if (!res) 7422 if (!res)
7019 { 7423 {
7020 tree decl = builtin_decl_implicit (fn); 7424 gcall *stmt;
7021 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2); 7425 if (internal_fn_p (fn))
7426 stmt = gimple_build_call_internal (as_internal_fn (fn),
7427 3, arg0, arg1, arg2);
7428 else
7429 {
7430 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7431 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7432 }
7022 if (!VOID_TYPE_P (type)) 7433 if (!VOID_TYPE_P (type))
7023 { 7434 {
7024 res = create_tmp_reg_or_ssa_name (type); 7435 res = create_tmp_reg_or_ssa_name (type);
7025 gimple_call_set_lhs (stmt, res); 7436 gimple_call_set_lhs (stmt, res);
7026 } 7437 }
7064 7475
7065 tree 7476 tree
7066 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type, 7477 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7067 tree op) 7478 tree op)
7068 { 7479 {
7480 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7481 && !CONSTANT_CLASS_P (op))
7482 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7483
7069 tree res, vec = build_vector_from_val (type, op); 7484 tree res, vec = build_vector_from_val (type, op);
7070 if (is_gimple_val (vec)) 7485 if (is_gimple_val (vec))
7071 return vec; 7486 return vec;
7072 if (gimple_in_ssa_p (cfun)) 7487 if (gimple_in_ssa_p (cfun))
7073 res = make_ssa_name (type); 7488 res = make_ssa_name (type);
7077 gimple_set_location (stmt, loc); 7492 gimple_set_location (stmt, loc);
7078 gimple_seq_add_stmt_without_update (seq, stmt); 7493 gimple_seq_add_stmt_without_update (seq, stmt);
7079 return res; 7494 return res;
7080 } 7495 }
7081 7496
7082 /* Build a vector of type TYPE in which the elements have the values 7497 /* Build a vector from BUILDER, handling the case in which some elements
7083 given by ELTS. Return a gimple value for the result, appending any 7498 are non-constant. Return a gimple value for the result, appending any
7084 new instructions to SEQ. */ 7499 new instructions to SEQ.
7500
7501 BUILDER must not have a stepped encoding on entry. This is because
7502 the function is not geared up to handle the arithmetic that would
7503 be needed in the variable case, and any code building a vector that
7504 is known to be constant should use BUILDER->build () directly. */
7085 7505
7086 tree 7506 tree
7087 gimple_build_vector (gimple_seq *seq, location_t loc, tree type, 7507 gimple_build_vector (gimple_seq *seq, location_t loc,
7088 vec<tree> elts) 7508 tree_vector_builder *builder)
7089 { 7509 {
7090 unsigned int nelts = elts.length (); 7510 gcc_assert (builder->nelts_per_pattern () <= 2);
7091 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type)); 7511 unsigned int encoded_nelts = builder->encoded_nelts ();
7092 for (unsigned int i = 0; i < nelts; ++i) 7512 for (unsigned int i = 0; i < encoded_nelts; ++i)
7093 if (!TREE_CONSTANT (elts[i])) 7513 if (!TREE_CONSTANT ((*builder)[i]))
7094 { 7514 {
7515 tree type = builder->type ();
7516 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7095 vec<constructor_elt, va_gc> *v; 7517 vec<constructor_elt, va_gc> *v;
7096 vec_alloc (v, nelts); 7518 vec_alloc (v, nelts);
7097 for (i = 0; i < nelts; ++i) 7519 for (i = 0; i < nelts; ++i)
7098 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[i]); 7520 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7099 7521
7100 tree res; 7522 tree res;
7101 if (gimple_in_ssa_p (cfun)) 7523 if (gimple_in_ssa_p (cfun))
7102 res = make_ssa_name (type); 7524 res = make_ssa_name (type);
7103 else 7525 else
7105 gimple *stmt = gimple_build_assign (res, build_constructor (type, v)); 7527 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7106 gimple_set_location (stmt, loc); 7528 gimple_set_location (stmt, loc);
7107 gimple_seq_add_stmt_without_update (seq, stmt); 7529 gimple_seq_add_stmt_without_update (seq, stmt);
7108 return res; 7530 return res;
7109 } 7531 }
7110 return build_vector (type, elts); 7532 return builder->build ();
7111 } 7533 }
7112 7534
7113 /* Return true if the result of assignment STMT is known to be non-negative. 7535 /* Return true if the result of assignment STMT is known to be non-negative.
7114 If the return value is based on the assumption that signed overflow is 7536 If the return value is based on the assumption that signed overflow is
7115 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change 7537 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change