comparison gcc/fold-const.c @ 132:d34655255c78

update gcc-8.2
author mir3636
date Thu, 25 Oct 2018 10:21:07 +0900
parents 84e7813d76e9
children 1830386684a0
comparison
equal deleted inserted replaced
130:e108057fa461 132:d34655255c78
1 /* Fold a constant sub-tree into a single node for C-compiler 1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2017 Free Software Foundation, Inc. 2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 3
4 This file is part of GCC. 4 This file is part of GCC.
5 5
6 GCC is free software; you can redistribute it and/or modify it under 6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free 7 the terms of the GNU General Public License as published by the Free
79 #include "tree-vrp.h" 79 #include "tree-vrp.h"
80 #include "tree-ssanames.h" 80 #include "tree-ssanames.h"
81 #include "selftest.h" 81 #include "selftest.h"
82 #include "stringpool.h" 82 #include "stringpool.h"
83 #include "attribs.h" 83 #include "attribs.h"
84 #include "tree-vector-builder.h"
85 #include "vec-perm-indices.h"
84 86
85 /* Nonzero if we are folding constants inside an initializer; zero 87 /* Nonzero if we are folding constants inside an initializer; zero
86 otherwise. */ 88 otherwise. */
87 int folding_initializer = 0; 89 int folding_initializer = 0;
88 90
111 static bool negate_expr_p (tree); 113 static bool negate_expr_p (tree);
112 static tree negate_expr (tree); 114 static tree negate_expr (tree);
113 static tree associate_trees (location_t, tree, tree, enum tree_code, tree); 115 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
114 static enum comparison_code comparison_to_compcode (enum tree_code); 116 static enum comparison_code comparison_to_compcode (enum tree_code);
115 static enum tree_code compcode_to_comparison (enum comparison_code); 117 static enum tree_code compcode_to_comparison (enum comparison_code);
116 static int twoval_comparison_p (tree, tree *, tree *, int *); 118 static int twoval_comparison_p (tree, tree *, tree *);
117 static tree eval_subst (location_t, tree, tree, tree, tree, tree); 119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
118 static tree optimize_bit_field_compare (location_t, enum tree_code, 120 static tree optimize_bit_field_compare (location_t, enum tree_code,
119 tree, tree, tree); 121 tree, tree, tree);
120 static int simple_operand_p (const_tree); 122 static int simple_operand_p (const_tree);
121 static bool simple_operand_p_2 (tree); 123 static bool simple_operand_p_2 (tree);
407 case VECTOR_CST: 409 case VECTOR_CST:
408 { 410 {
409 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type)) 411 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
410 return true; 412 return true;
411 413
412 int count = VECTOR_CST_NELTS (t), i; 414 /* Steps don't prevent negation. */
413 415 unsigned int count = vector_cst_encoded_nelts (t);
414 for (i = 0; i < count; i++) 416 for (unsigned int i = 0; i < count; ++i)
415 if (!negate_expr_p (VECTOR_CST_ELT (t, i))) 417 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
416 return false; 418 return false;
417 419
418 return true; 420 return true;
419 } 421 }
420 422
426 return negate_expr_p (TREE_OPERAND (t, 0)); 428 return negate_expr_p (TREE_OPERAND (t, 0));
427 429
428 case PLUS_EXPR: 430 case PLUS_EXPR:
429 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)) 431 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
430 || HONOR_SIGNED_ZEROS (element_mode (type)) 432 || HONOR_SIGNED_ZEROS (element_mode (type))
431 || (INTEGRAL_TYPE_P (type) 433 || (ANY_INTEGRAL_TYPE_P (type)
432 && ! TYPE_OVERFLOW_WRAPS (type))) 434 && ! TYPE_OVERFLOW_WRAPS (type)))
433 return false; 435 return false;
434 /* -(A + B) -> (-B) - A. */ 436 /* -(A + B) -> (-B) - A. */
435 if (negate_expr_p (TREE_OPERAND (t, 1))) 437 if (negate_expr_p (TREE_OPERAND (t, 1)))
436 return true; 438 return true;
439 441
440 case MINUS_EXPR: 442 case MINUS_EXPR:
441 /* We can't turn -(A-B) into B-A when we honor signed zeros. */ 443 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
442 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)) 444 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
443 && !HONOR_SIGNED_ZEROS (element_mode (type)) 445 && !HONOR_SIGNED_ZEROS (element_mode (type))
444 && (! INTEGRAL_TYPE_P (type) 446 && (! ANY_INTEGRAL_TYPE_P (type)
445 || TYPE_OVERFLOW_WRAPS (type)); 447 || TYPE_OVERFLOW_WRAPS (type));
446 448
447 case MULT_EXPR: 449 case MULT_EXPR:
448 if (TYPE_UNSIGNED (type)) 450 if (TYPE_UNSIGNED (type))
449 break; 451 break;
470 case TRUNC_DIV_EXPR: 472 case TRUNC_DIV_EXPR:
471 case ROUND_DIV_EXPR: 473 case ROUND_DIV_EXPR:
472 case EXACT_DIV_EXPR: 474 case EXACT_DIV_EXPR:
473 if (TYPE_UNSIGNED (type)) 475 if (TYPE_UNSIGNED (type))
474 break; 476 break;
475 if (negate_expr_p (TREE_OPERAND (t, 0))) 477 /* In general we can't negate A in A / B, because if A is INT_MIN and
478 B is not 1 we change the sign of the result. */
479 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
480 && negate_expr_p (TREE_OPERAND (t, 0)))
476 return true; 481 return true;
477 /* In general we can't negate B in A / B, because if A is INT_MIN and 482 /* In general we can't negate B in A / B, because if A is INT_MIN and
478 B is 1, we may turn this into INT_MIN / -1 which is undefined 483 B is 1, we may turn this into INT_MIN / -1 which is undefined
479 and actually traps on some architectures. */ 484 and actually traps on some architectures. */
480 if (! INTEGRAL_TYPE_P (TREE_TYPE (t)) 485 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
481 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t)) 486 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
482 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST 487 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
483 && ! integer_onep (TREE_OPERAND (t, 1)))) 488 && ! integer_onep (TREE_OPERAND (t, 1))))
484 return negate_expr_p (TREE_OPERAND (t, 1)); 489 return negate_expr_p (TREE_OPERAND (t, 1));
485 break; 490 break;
544 && TYPE_OVERFLOW_WRAPS (type)) 549 && TYPE_OVERFLOW_WRAPS (type))
545 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0) 550 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
546 return tem; 551 return tem;
547 break; 552 break;
548 553
554 case POLY_INT_CST:
549 case REAL_CST: 555 case REAL_CST:
550 tem = fold_negate_const (t, type);
551 return tem;
552
553 case FIXED_CST: 556 case FIXED_CST:
554 tem = fold_negate_const (t, type); 557 tem = fold_negate_const (t, type);
555 return tem; 558 return tem;
556 559
557 case COMPLEX_CST: 560 case COMPLEX_CST:
563 } 566 }
564 break; 567 break;
565 568
566 case VECTOR_CST: 569 case VECTOR_CST:
567 { 570 {
568 int count = VECTOR_CST_NELTS (t), i; 571 tree_vector_builder elts;
569 572 elts.new_unary_operation (type, t, true);
570 auto_vec<tree, 32> elts (count); 573 unsigned int count = elts.encoded_nelts ();
571 for (i = 0; i < count; i++) 574 for (unsigned int i = 0; i < count; ++i)
572 { 575 {
573 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i)); 576 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
574 if (elt == NULL_TREE) 577 if (elt == NULL_TREE)
575 return NULL_TREE; 578 return NULL_TREE;
576 elts.quick_push (elt); 579 elts.quick_push (elt);
577 } 580 }
578 581
579 return build_vector (type, elts); 582 return elts.build ();
580 } 583 }
581 584
582 case COMPLEX_EXPR: 585 case COMPLEX_EXPR:
583 if (negate_expr_p (t)) 586 if (negate_expr_p (t))
584 return fold_build2_loc (loc, COMPLEX_EXPR, type, 587 return fold_build2_loc (loc, COMPLEX_EXPR, type,
650 case TRUNC_DIV_EXPR: 653 case TRUNC_DIV_EXPR:
651 case ROUND_DIV_EXPR: 654 case ROUND_DIV_EXPR:
652 case EXACT_DIV_EXPR: 655 case EXACT_DIV_EXPR:
653 if (TYPE_UNSIGNED (type)) 656 if (TYPE_UNSIGNED (type))
654 break; 657 break;
655 if (negate_expr_p (TREE_OPERAND (t, 0))) 658 /* In general we can't negate A in A / B, because if A is INT_MIN and
659 B is not 1 we change the sign of the result. */
660 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
661 && negate_expr_p (TREE_OPERAND (t, 0)))
656 return fold_build2_loc (loc, TREE_CODE (t), type, 662 return fold_build2_loc (loc, TREE_CODE (t), type,
657 negate_expr (TREE_OPERAND (t, 0)), 663 negate_expr (TREE_OPERAND (t, 0)),
658 TREE_OPERAND (t, 1)); 664 TREE_OPERAND (t, 1));
659 /* In general we can't negate B in A / B, because if A is INT_MIN and 665 /* In general we can't negate B in A / B, because if A is INT_MIN and
660 B is 1, we may turn this into INT_MIN / -1 which is undefined 666 B is 1, we may turn this into INT_MIN / -1 which is undefined
661 and actually traps on some architectures. */ 667 and actually traps on some architectures. */
662 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t)) 668 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
663 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t)) 669 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
664 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST 670 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
665 && ! integer_onep (TREE_OPERAND (t, 1)))) 671 && ! integer_onep (TREE_OPERAND (t, 1))))
666 && negate_expr_p (TREE_OPERAND (t, 1))) 672 && negate_expr_p (TREE_OPERAND (t, 1)))
667 return fold_build2_loc (loc, TREE_CODE (t), type, 673 return fold_build2_loc (loc, TREE_CODE (t), type,
958 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2) 964 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
959 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2) 965 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
960 && TYPE_MODE (type1) == TYPE_MODE (type2); 966 && TYPE_MODE (type1) == TYPE_MODE (type2);
961 } 967 }
962 968
963 969 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
964 /* Combine two integer constants PARG1 and PARG2 under operation CODE 970 a new constant in RES. Return FALSE if we don't know how to
965 to produce a new constant. Return NULL_TREE if we don't know how 971 evaluate CODE at compile-time. */
966 to evaluate CODE at compile-time. */ 972
967 973 bool
968 static tree 974 wide_int_binop (wide_int &res,
969 int_const_binop_1 (enum tree_code code, const_tree parg1, const_tree parg2, 975 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
970 int overflowable) 976 signop sign, wi::overflow_type *overflow)
971 { 977 {
972 wide_int res; 978 wide_int tmp;
973 tree t; 979 *overflow = wi::OVF_NONE;
974 tree type = TREE_TYPE (parg1);
975 signop sign = TYPE_SIGN (type);
976 bool overflow = false;
977
978 wi::tree_to_wide_ref arg1 = wi::to_wide (parg1);
979 wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
980
981 switch (code) 980 switch (code)
982 { 981 {
983 case BIT_IOR_EXPR: 982 case BIT_IOR_EXPR:
984 res = wi::bit_or (arg1, arg2); 983 res = wi::bit_or (arg1, arg2);
985 break; 984 break;
994 993
995 case RSHIFT_EXPR: 994 case RSHIFT_EXPR:
996 case LSHIFT_EXPR: 995 case LSHIFT_EXPR:
997 if (wi::neg_p (arg2)) 996 if (wi::neg_p (arg2))
998 { 997 {
999 arg2 = -arg2; 998 tmp = -arg2;
1000 if (code == RSHIFT_EXPR) 999 if (code == RSHIFT_EXPR)
1001 code = LSHIFT_EXPR; 1000 code = LSHIFT_EXPR;
1002 else 1001 else
1003 code = RSHIFT_EXPR; 1002 code = RSHIFT_EXPR;
1004 } 1003 }
1004 else
1005 tmp = arg2;
1005 1006
1006 if (code == RSHIFT_EXPR) 1007 if (code == RSHIFT_EXPR)
1007 /* It's unclear from the C standard whether shifts can overflow. 1008 /* It's unclear from the C standard whether shifts can overflow.
1008 The following code ignores overflow; perhaps a C standard 1009 The following code ignores overflow; perhaps a C standard
1009 interpretation ruling is needed. */ 1010 interpretation ruling is needed. */
1010 res = wi::rshift (arg1, arg2, sign); 1011 res = wi::rshift (arg1, tmp, sign);
1011 else 1012 else
1012 res = wi::lshift (arg1, arg2); 1013 res = wi::lshift (arg1, tmp);
1013 break; 1014 break;
1014 1015
1015 case RROTATE_EXPR: 1016 case RROTATE_EXPR:
1016 case LROTATE_EXPR: 1017 case LROTATE_EXPR:
1017 if (wi::neg_p (arg2)) 1018 if (wi::neg_p (arg2))
1018 { 1019 {
1019 arg2 = -arg2; 1020 tmp = -arg2;
1020 if (code == RROTATE_EXPR) 1021 if (code == RROTATE_EXPR)
1021 code = LROTATE_EXPR; 1022 code = LROTATE_EXPR;
1022 else 1023 else
1023 code = RROTATE_EXPR; 1024 code = RROTATE_EXPR;
1024 } 1025 }
1026 else
1027 tmp = arg2;
1025 1028
1026 if (code == RROTATE_EXPR) 1029 if (code == RROTATE_EXPR)
1027 res = wi::rrotate (arg1, arg2); 1030 res = wi::rrotate (arg1, tmp);
1028 else 1031 else
1029 res = wi::lrotate (arg1, arg2); 1032 res = wi::lrotate (arg1, tmp);
1030 break; 1033 break;
1031 1034
1032 case PLUS_EXPR: 1035 case PLUS_EXPR:
1033 res = wi::add (arg1, arg2, sign, &overflow); 1036 res = wi::add (arg1, arg2, sign, overflow);
1034 break; 1037 break;
1035 1038
1036 case MINUS_EXPR: 1039 case MINUS_EXPR:
1037 res = wi::sub (arg1, arg2, sign, &overflow); 1040 res = wi::sub (arg1, arg2, sign, overflow);
1038 break; 1041 break;
1039 1042
1040 case MULT_EXPR: 1043 case MULT_EXPR:
1041 res = wi::mul (arg1, arg2, sign, &overflow); 1044 res = wi::mul (arg1, arg2, sign, overflow);
1042 break; 1045 break;
1043 1046
1044 case MULT_HIGHPART_EXPR: 1047 case MULT_HIGHPART_EXPR:
1045 res = wi::mul_high (arg1, arg2, sign); 1048 res = wi::mul_high (arg1, arg2, sign);
1046 break; 1049 break;
1047 1050
1048 case TRUNC_DIV_EXPR: 1051 case TRUNC_DIV_EXPR:
1049 case EXACT_DIV_EXPR: 1052 case EXACT_DIV_EXPR:
1050 if (arg2 == 0) 1053 if (arg2 == 0)
1051 return NULL_TREE; 1054 return false;
1052 res = wi::div_trunc (arg1, arg2, sign, &overflow); 1055 res = wi::div_trunc (arg1, arg2, sign, overflow);
1053 break; 1056 break;
1054 1057
1055 case FLOOR_DIV_EXPR: 1058 case FLOOR_DIV_EXPR:
1056 if (arg2 == 0) 1059 if (arg2 == 0)
1057 return NULL_TREE; 1060 return false;
1058 res = wi::div_floor (arg1, arg2, sign, &overflow); 1061 res = wi::div_floor (arg1, arg2, sign, overflow);
1059 break; 1062 break;
1060 1063
1061 case CEIL_DIV_EXPR: 1064 case CEIL_DIV_EXPR:
1062 if (arg2 == 0) 1065 if (arg2 == 0)
1063 return NULL_TREE; 1066 return false;
1064 res = wi::div_ceil (arg1, arg2, sign, &overflow); 1067 res = wi::div_ceil (arg1, arg2, sign, overflow);
1065 break; 1068 break;
1066 1069
1067 case ROUND_DIV_EXPR: 1070 case ROUND_DIV_EXPR:
1068 if (arg2 == 0) 1071 if (arg2 == 0)
1069 return NULL_TREE; 1072 return false;
1070 res = wi::div_round (arg1, arg2, sign, &overflow); 1073 res = wi::div_round (arg1, arg2, sign, overflow);
1071 break; 1074 break;
1072 1075
1073 case TRUNC_MOD_EXPR: 1076 case TRUNC_MOD_EXPR:
1074 if (arg2 == 0) 1077 if (arg2 == 0)
1075 return NULL_TREE; 1078 return false;
1076 res = wi::mod_trunc (arg1, arg2, sign, &overflow); 1079 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1077 break; 1080 break;
1078 1081
1079 case FLOOR_MOD_EXPR: 1082 case FLOOR_MOD_EXPR:
1080 if (arg2 == 0) 1083 if (arg2 == 0)
1081 return NULL_TREE; 1084 return false;
1082 res = wi::mod_floor (arg1, arg2, sign, &overflow); 1085 res = wi::mod_floor (arg1, arg2, sign, overflow);
1083 break; 1086 break;
1084 1087
1085 case CEIL_MOD_EXPR: 1088 case CEIL_MOD_EXPR:
1086 if (arg2 == 0) 1089 if (arg2 == 0)
1087 return NULL_TREE; 1090 return false;
1088 res = wi::mod_ceil (arg1, arg2, sign, &overflow); 1091 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1089 break; 1092 break;
1090 1093
1091 case ROUND_MOD_EXPR: 1094 case ROUND_MOD_EXPR:
1092 if (arg2 == 0) 1095 if (arg2 == 0)
1093 return NULL_TREE; 1096 return false;
1094 res = wi::mod_round (arg1, arg2, sign, &overflow); 1097 res = wi::mod_round (arg1, arg2, sign, overflow);
1095 break; 1098 break;
1096 1099
1097 case MIN_EXPR: 1100 case MIN_EXPR:
1098 res = wi::min (arg1, arg2, sign); 1101 res = wi::min (arg1, arg2, sign);
1099 break; 1102 break;
1101 case MAX_EXPR: 1104 case MAX_EXPR:
1102 res = wi::max (arg1, arg2, sign); 1105 res = wi::max (arg1, arg2, sign);
1103 break; 1106 break;
1104 1107
1105 default: 1108 default:
1106 return NULL_TREE; 1109 return false;
1107 } 1110 }
1108 1111 return true;
1109 t = force_fit_type (type, res, overflowable, 1112 }
1110 (((sign == SIGNED || overflowable == -1) 1113
1111 && overflow) 1114 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1112 | TREE_OVERFLOW (parg1) | TREE_OVERFLOW (parg2))); 1115 produce a new constant in RES. Return FALSE if we don't know how
1113 1116 to evaluate CODE at compile-time. */
1114 return t; 1117
1115 } 1118 static bool
1119 poly_int_binop (poly_wide_int &res, enum tree_code code,
1120 const_tree arg1, const_tree arg2,
1121 signop sign, wi::overflow_type *overflow)
1122 {
1123 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1124 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1125 switch (code)
1126 {
1127 case PLUS_EXPR:
1128 res = wi::add (wi::to_poly_wide (arg1),
1129 wi::to_poly_wide (arg2), sign, overflow);
1130 break;
1131
1132 case MINUS_EXPR:
1133 res = wi::sub (wi::to_poly_wide (arg1),
1134 wi::to_poly_wide (arg2), sign, overflow);
1135 break;
1136
1137 case MULT_EXPR:
1138 if (TREE_CODE (arg2) == INTEGER_CST)
1139 res = wi::mul (wi::to_poly_wide (arg1),
1140 wi::to_wide (arg2), sign, overflow);
1141 else if (TREE_CODE (arg1) == INTEGER_CST)
1142 res = wi::mul (wi::to_poly_wide (arg2),
1143 wi::to_wide (arg1), sign, overflow);
1144 else
1145 return NULL_TREE;
1146 break;
1147
1148 case LSHIFT_EXPR:
1149 if (TREE_CODE (arg2) == INTEGER_CST)
1150 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1151 else
1152 return false;
1153 break;
1154
1155 case BIT_IOR_EXPR:
1156 if (TREE_CODE (arg2) != INTEGER_CST
1157 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1158 &res))
1159 return false;
1160 break;
1161
1162 default:
1163 return false;
1164 }
1165 return true;
1166 }
1167
1168 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1169 produce a new constant. Return NULL_TREE if we don't know how to
1170 evaluate CODE at compile-time. */
1116 1171
1117 tree 1172 tree
1118 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2) 1173 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1119 { 1174 int overflowable)
1120 return int_const_binop_1 (code, arg1, arg2, 1); 1175 {
1176 bool success = false;
1177 poly_wide_int poly_res;
1178 tree type = TREE_TYPE (arg1);
1179 signop sign = TYPE_SIGN (type);
1180 wi::overflow_type overflow = wi::OVF_NONE;
1181
1182 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1183 {
1184 wide_int warg1 = wi::to_wide (arg1), res;
1185 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1186 success = wide_int_binop (res, code, warg1, warg2, sign, &overflow);
1187 poly_res = res;
1188 }
1189 else if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1190 success = poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow);
1191 if (success)
1192 return force_fit_type (type, poly_res, overflowable,
1193 (((sign == SIGNED || overflowable == -1)
1194 && overflow)
1195 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1196 return NULL_TREE;
1197 }
1198
1199 /* Return true if binary operation OP distributes over addition in operand
1200 OPNO, with the other operand being held constant. OPNO counts from 1. */
1201
1202 static bool
1203 distributes_over_addition_p (tree_code op, int opno)
1204 {
1205 switch (op)
1206 {
1207 case PLUS_EXPR:
1208 case MINUS_EXPR:
1209 case MULT_EXPR:
1210 return true;
1211
1212 case LSHIFT_EXPR:
1213 return opno == 1;
1214
1215 default:
1216 return false;
1217 }
1121 } 1218 }
1122 1219
1123 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new 1220 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1124 constant. We assume ARG1 and ARG2 have the same data type, or at least 1221 constant. We assume ARG1 and ARG2 have the same data type, or at least
1125 are the same kind of constant and the same machine mode. Return zero if 1222 are the same kind of constant and the same machine mode. Return zero if
1133 return NULL_TREE; 1230 return NULL_TREE;
1134 1231
1135 STRIP_NOPS (arg1); 1232 STRIP_NOPS (arg1);
1136 STRIP_NOPS (arg2); 1233 STRIP_NOPS (arg2);
1137 1234
1138 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST) 1235 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1139 { 1236 {
1140 if (code == POINTER_PLUS_EXPR) 1237 if (code == POINTER_PLUS_EXPR)
1141 return int_const_binop (PLUS_EXPR, 1238 return int_const_binop (PLUS_EXPR,
1142 arg1, fold_convert (TREE_TYPE (arg1), arg2)); 1239 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1143 1240
1411 if (real && imag) 1508 if (real && imag)
1412 return build_complex (type, real, imag); 1509 return build_complex (type, real, imag);
1413 } 1510 }
1414 1511
1415 if (TREE_CODE (arg1) == VECTOR_CST 1512 if (TREE_CODE (arg1) == VECTOR_CST
1416 && TREE_CODE (arg2) == VECTOR_CST) 1513 && TREE_CODE (arg2) == VECTOR_CST
1514 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1515 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1417 { 1516 {
1418 tree type = TREE_TYPE (arg1); 1517 tree type = TREE_TYPE (arg1);
1419 int count = VECTOR_CST_NELTS (arg1), i; 1518 bool step_ok_p;
1420 1519 if (VECTOR_CST_STEPPED_P (arg1)
1421 auto_vec<tree, 32> elts (count); 1520 && VECTOR_CST_STEPPED_P (arg2))
1422 for (i = 0; i < count; i++) 1521 /* We can operate directly on the encoding if:
1522
1523 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1524 implies
1525 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1526
1527 Addition and subtraction are the supported operators
1528 for which this is true. */
1529 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1530 else if (VECTOR_CST_STEPPED_P (arg1))
1531 /* We can operate directly on stepped encodings if:
1532
1533 a3 - a2 == a2 - a1
1534 implies:
1535 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1536
1537 which is true if (x -> x op c) distributes over addition. */
1538 step_ok_p = distributes_over_addition_p (code, 1);
1539 else
1540 /* Similarly in reverse. */
1541 step_ok_p = distributes_over_addition_p (code, 2);
1542 tree_vector_builder elts;
1543 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1544 return NULL_TREE;
1545 unsigned int count = elts.encoded_nelts ();
1546 for (unsigned int i = 0; i < count; ++i)
1423 { 1547 {
1424 tree elem1 = VECTOR_CST_ELT (arg1, i); 1548 tree elem1 = VECTOR_CST_ELT (arg1, i);
1425 tree elem2 = VECTOR_CST_ELT (arg2, i); 1549 tree elem2 = VECTOR_CST_ELT (arg2, i);
1426 1550
1427 tree elt = const_binop (code, elem1, elem2); 1551 tree elt = const_binop (code, elem1, elem2);
1431 if (elt == NULL_TREE) 1555 if (elt == NULL_TREE)
1432 return NULL_TREE; 1556 return NULL_TREE;
1433 elts.quick_push (elt); 1557 elts.quick_push (elt);
1434 } 1558 }
1435 1559
1436 return build_vector (type, elts); 1560 return elts.build ();
1437 } 1561 }
1438 1562
1439 /* Shifts allow a scalar offset for a vector. */ 1563 /* Shifts allow a scalar offset for a vector. */
1440 if (TREE_CODE (arg1) == VECTOR_CST 1564 if (TREE_CODE (arg1) == VECTOR_CST
1441 && TREE_CODE (arg2) == INTEGER_CST) 1565 && TREE_CODE (arg2) == INTEGER_CST)
1442 { 1566 {
1443 tree type = TREE_TYPE (arg1); 1567 tree type = TREE_TYPE (arg1);
1444 int count = VECTOR_CST_NELTS (arg1), i; 1568 bool step_ok_p = distributes_over_addition_p (code, 1);
1445 1569 tree_vector_builder elts;
1446 auto_vec<tree, 32> elts (count); 1570 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1447 for (i = 0; i < count; i++) 1571 return NULL_TREE;
1572 unsigned int count = elts.encoded_nelts ();
1573 for (unsigned int i = 0; i < count; ++i)
1448 { 1574 {
1449 tree elem1 = VECTOR_CST_ELT (arg1, i); 1575 tree elem1 = VECTOR_CST_ELT (arg1, i);
1450 1576
1451 tree elt = const_binop (code, elem1, arg2); 1577 tree elt = const_binop (code, elem1, arg2);
1452 1578
1455 if (elt == NULL_TREE) 1581 if (elt == NULL_TREE)
1456 return NULL_TREE; 1582 return NULL_TREE;
1457 elts.quick_push (elt); 1583 elts.quick_push (elt);
1458 } 1584 }
1459 1585
1460 return build_vector (type, elts); 1586 return elts.build ();
1461 } 1587 }
1462 return NULL_TREE; 1588 return NULL_TREE;
1463 } 1589 }
1464 1590
1465 /* Overload that adds a TYPE parameter to be able to dispatch 1591 /* Overload that adds a TYPE parameter to be able to dispatch
1473 1599
1474 /* ??? Until we make the const_binop worker take the type of the 1600 /* ??? Until we make the const_binop worker take the type of the
1475 result as argument put those cases that need it here. */ 1601 result as argument put those cases that need it here. */
1476 switch (code) 1602 switch (code)
1477 { 1603 {
1604 case VEC_SERIES_EXPR:
1605 if (CONSTANT_CLASS_P (arg1)
1606 && CONSTANT_CLASS_P (arg2))
1607 return build_vec_series (type, arg1, arg2);
1608 return NULL_TREE;
1609
1478 case COMPLEX_EXPR: 1610 case COMPLEX_EXPR:
1479 if ((TREE_CODE (arg1) == REAL_CST 1611 if ((TREE_CODE (arg1) == REAL_CST
1480 && TREE_CODE (arg2) == REAL_CST) 1612 && TREE_CODE (arg2) == REAL_CST)
1481 || (TREE_CODE (arg1) == INTEGER_CST 1613 || (TREE_CODE (arg1) == INTEGER_CST
1482 && TREE_CODE (arg2) == INTEGER_CST)) 1614 && TREE_CODE (arg2) == INTEGER_CST))
1483 return build_complex (type, arg1, arg2); 1615 return build_complex (type, arg1, arg2);
1484 return NULL_TREE; 1616 return NULL_TREE;
1485 1617
1618 case POINTER_DIFF_EXPR:
1619 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1620 {
1621 poly_offset_int res = (wi::to_poly_offset (arg1)
1622 - wi::to_poly_offset (arg2));
1623 return force_fit_type (type, res, 1,
1624 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1625 }
1626 return NULL_TREE;
1627
1486 case VEC_PACK_TRUNC_EXPR: 1628 case VEC_PACK_TRUNC_EXPR:
1487 case VEC_PACK_FIX_TRUNC_EXPR: 1629 case VEC_PACK_FIX_TRUNC_EXPR:
1630 case VEC_PACK_FLOAT_EXPR:
1488 { 1631 {
1489 unsigned int out_nelts, in_nelts, i; 1632 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1490 1633
1491 if (TREE_CODE (arg1) != VECTOR_CST 1634 if (TREE_CODE (arg1) != VECTOR_CST
1492 || TREE_CODE (arg2) != VECTOR_CST) 1635 || TREE_CODE (arg2) != VECTOR_CST)
1493 return NULL_TREE; 1636 return NULL_TREE;
1494 1637
1495 in_nelts = VECTOR_CST_NELTS (arg1); 1638 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1639 return NULL_TREE;
1640
1496 out_nelts = in_nelts * 2; 1641 out_nelts = in_nelts * 2;
1497 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2) 1642 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1498 && out_nelts == TYPE_VECTOR_SUBPARTS (type)); 1643 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1499 1644
1500 auto_vec<tree, 32> elts (out_nelts); 1645 tree_vector_builder elts (type, out_nelts, 1);
1501 for (i = 0; i < out_nelts; i++) 1646 for (i = 0; i < out_nelts; i++)
1502 { 1647 {
1503 tree elt = (i < in_nelts 1648 tree elt = (i < in_nelts
1504 ? VECTOR_CST_ELT (arg1, i) 1649 ? VECTOR_CST_ELT (arg1, i)
1505 : VECTOR_CST_ELT (arg2, i - in_nelts)); 1650 : VECTOR_CST_ELT (arg2, i - in_nelts));
1506 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR 1651 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1507 ? NOP_EXPR : FIX_TRUNC_EXPR, 1652 ? NOP_EXPR
1653 : code == VEC_PACK_FLOAT_EXPR
1654 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1508 TREE_TYPE (type), elt); 1655 TREE_TYPE (type), elt);
1509 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt)) 1656 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1510 return NULL_TREE; 1657 return NULL_TREE;
1511 elts.quick_push (elt); 1658 elts.quick_push (elt);
1512 } 1659 }
1513 1660
1514 return build_vector (type, elts); 1661 return elts.build ();
1515 } 1662 }
1516 1663
1517 case VEC_WIDEN_MULT_LO_EXPR: 1664 case VEC_WIDEN_MULT_LO_EXPR:
1518 case VEC_WIDEN_MULT_HI_EXPR: 1665 case VEC_WIDEN_MULT_HI_EXPR:
1519 case VEC_WIDEN_MULT_EVEN_EXPR: 1666 case VEC_WIDEN_MULT_EVEN_EXPR:
1520 case VEC_WIDEN_MULT_ODD_EXPR: 1667 case VEC_WIDEN_MULT_ODD_EXPR:
1521 { 1668 {
1522 unsigned int out_nelts, in_nelts, out, ofs, scale; 1669 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1523 1670
1524 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST) 1671 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1525 return NULL_TREE; 1672 return NULL_TREE;
1526 1673
1527 in_nelts = VECTOR_CST_NELTS (arg1); 1674 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1675 return NULL_TREE;
1528 out_nelts = in_nelts / 2; 1676 out_nelts = in_nelts / 2;
1529 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2) 1677 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1530 && out_nelts == TYPE_VECTOR_SUBPARTS (type)); 1678 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1531 1679
1532 if (code == VEC_WIDEN_MULT_LO_EXPR) 1680 if (code == VEC_WIDEN_MULT_LO_EXPR)
1533 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0; 1681 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1534 else if (code == VEC_WIDEN_MULT_HI_EXPR) 1682 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1535 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts; 1683 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1536 else if (code == VEC_WIDEN_MULT_EVEN_EXPR) 1684 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1537 scale = 1, ofs = 0; 1685 scale = 1, ofs = 0;
1538 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */ 1686 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1539 scale = 1, ofs = 1; 1687 scale = 1, ofs = 1;
1540 1688
1541 auto_vec<tree, 32> elts (out_nelts); 1689 tree_vector_builder elts (type, out_nelts, 1);
1542 for (out = 0; out < out_nelts; out++) 1690 for (out = 0; out < out_nelts; out++)
1543 { 1691 {
1544 unsigned int in = (out << scale) + ofs; 1692 unsigned int in = (out << scale) + ofs;
1545 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), 1693 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1546 VECTOR_CST_ELT (arg1, in)); 1694 VECTOR_CST_ELT (arg1, in));
1553 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt)) 1701 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1554 return NULL_TREE; 1702 return NULL_TREE;
1555 elts.quick_push (elt); 1703 elts.quick_push (elt);
1556 } 1704 }
1557 1705
1558 return build_vector (type, elts); 1706 return elts.build ();
1559 } 1707 }
1560 1708
1561 default:; 1709 default:;
1562 } 1710 }
1563 1711
1581 flag_signaling_nans is on and the operand is a signaling NaN. */ 1729 flag_signaling_nans is on and the operand is a signaling NaN. */
1582 if (TREE_CODE (arg0) == REAL_CST 1730 if (TREE_CODE (arg0) == REAL_CST
1583 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) 1731 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1584 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0)) 1732 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1585 && code != NEGATE_EXPR 1733 && code != NEGATE_EXPR
1586 && code != ABS_EXPR) 1734 && code != ABS_EXPR
1735 && code != ABSU_EXPR)
1587 return NULL_TREE; 1736 return NULL_TREE;
1588 1737
1589 switch (code) 1738 switch (code)
1590 { 1739 {
1591 CASE_CONVERT: 1740 CASE_CONVERT:
1616 return tem; 1765 return tem;
1617 break; 1766 break;
1618 } 1767 }
1619 1768
1620 case ABS_EXPR: 1769 case ABS_EXPR:
1770 case ABSU_EXPR:
1621 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST) 1771 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1622 return fold_abs_const (arg0, type); 1772 return fold_abs_const (arg0, type);
1623 break; 1773 break;
1624 1774
1625 case CONJ_EXPR: 1775 case CONJ_EXPR:
1632 break; 1782 break;
1633 1783
1634 case BIT_NOT_EXPR: 1784 case BIT_NOT_EXPR:
1635 if (TREE_CODE (arg0) == INTEGER_CST) 1785 if (TREE_CODE (arg0) == INTEGER_CST)
1636 return fold_not_const (arg0, type); 1786 return fold_not_const (arg0, type);
1787 else if (POLY_INT_CST_P (arg0))
1788 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1637 /* Perform BIT_NOT_EXPR on each element individually. */ 1789 /* Perform BIT_NOT_EXPR on each element individually. */
1638 else if (TREE_CODE (arg0) == VECTOR_CST) 1790 else if (TREE_CODE (arg0) == VECTOR_CST)
1639 { 1791 {
1640 tree elem; 1792 tree elem;
1641 unsigned count = VECTOR_CST_NELTS (arg0), i; 1793
1642 1794 /* This can cope with stepped encodings because ~x == -1 - x. */
1643 auto_vec<tree, 32> elements (count); 1795 tree_vector_builder elements;
1644 for (i = 0; i < count; i++) 1796 elements.new_unary_operation (type, arg0, true);
1797 unsigned int i, count = elements.encoded_nelts ();
1798 for (i = 0; i < count; ++i)
1645 { 1799 {
1646 elem = VECTOR_CST_ELT (arg0, i); 1800 elem = VECTOR_CST_ELT (arg0, i);
1647 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem); 1801 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1648 if (elem == NULL_TREE) 1802 if (elem == NULL_TREE)
1649 break; 1803 break;
1650 elements.quick_push (elem); 1804 elements.quick_push (elem);
1651 } 1805 }
1652 if (i == count) 1806 if (i == count)
1653 return build_vector (type, elements); 1807 return elements.build ();
1654 } 1808 }
1655 break; 1809 break;
1656 1810
1657 case TRUTH_NOT_EXPR: 1811 case TRUTH_NOT_EXPR:
1658 if (TREE_CODE (arg0) == INTEGER_CST) 1812 if (TREE_CODE (arg0) == INTEGER_CST)
1671 1825
1672 case VEC_UNPACK_LO_EXPR: 1826 case VEC_UNPACK_LO_EXPR:
1673 case VEC_UNPACK_HI_EXPR: 1827 case VEC_UNPACK_HI_EXPR:
1674 case VEC_UNPACK_FLOAT_LO_EXPR: 1828 case VEC_UNPACK_FLOAT_LO_EXPR:
1675 case VEC_UNPACK_FLOAT_HI_EXPR: 1829 case VEC_UNPACK_FLOAT_HI_EXPR:
1830 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1831 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1676 { 1832 {
1677 unsigned int out_nelts, in_nelts, i; 1833 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1678 enum tree_code subcode; 1834 enum tree_code subcode;
1679 1835
1680 if (TREE_CODE (arg0) != VECTOR_CST) 1836 if (TREE_CODE (arg0) != VECTOR_CST)
1681 return NULL_TREE; 1837 return NULL_TREE;
1682 1838
1683 in_nelts = VECTOR_CST_NELTS (arg0); 1839 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1840 return NULL_TREE;
1684 out_nelts = in_nelts / 2; 1841 out_nelts = in_nelts / 2;
1685 gcc_assert (out_nelts == TYPE_VECTOR_SUBPARTS (type)); 1842 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1686 1843
1687 unsigned int offset = 0; 1844 unsigned int offset = 0;
1688 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR 1845 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1689 || code == VEC_UNPACK_FLOAT_LO_EXPR)) 1846 || code == VEC_UNPACK_FLOAT_LO_EXPR
1847 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1690 offset = out_nelts; 1848 offset = out_nelts;
1691 1849
1692 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR) 1850 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1693 subcode = NOP_EXPR; 1851 subcode = NOP_EXPR;
1852 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1853 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1854 subcode = FLOAT_EXPR;
1694 else 1855 else
1695 subcode = FLOAT_EXPR; 1856 subcode = FIX_TRUNC_EXPR;
1696 1857
1697 auto_vec<tree, 32> elts (out_nelts); 1858 tree_vector_builder elts (type, out_nelts, 1);
1698 for (i = 0; i < out_nelts; i++) 1859 for (i = 0; i < out_nelts; i++)
1699 { 1860 {
1700 tree elt = fold_convert_const (subcode, TREE_TYPE (type), 1861 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1701 VECTOR_CST_ELT (arg0, i + offset)); 1862 VECTOR_CST_ELT (arg0, i + offset));
1702 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt)) 1863 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1703 return NULL_TREE; 1864 return NULL_TREE;
1704 elts.quick_push (elt); 1865 elts.quick_push (elt);
1705 } 1866 }
1706 1867
1707 return build_vector (type, elts); 1868 return elts.build ();
1708 } 1869 }
1709 1870
1710 case REDUC_MIN_EXPR: 1871 case VEC_DUPLICATE_EXPR:
1711 case REDUC_MAX_EXPR: 1872 if (CONSTANT_CLASS_P (arg0))
1712 case REDUC_PLUS_EXPR: 1873 return build_vector_from_val (type, arg0);
1713 { 1874 return NULL_TREE;
1714 unsigned int nelts, i;
1715 enum tree_code subcode;
1716
1717 if (TREE_CODE (arg0) != VECTOR_CST)
1718 return NULL_TREE;
1719 nelts = VECTOR_CST_NELTS (arg0);
1720
1721 switch (code)
1722 {
1723 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1724 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1725 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1726 default: gcc_unreachable ();
1727 }
1728
1729 tree res = VECTOR_CST_ELT (arg0, 0);
1730 for (i = 1; i < nelts; i++)
1731 {
1732 res = const_binop (subcode, res, VECTOR_CST_ELT (arg0, i));
1733 if (res == NULL_TREE || !CONSTANT_CLASS_P (res))
1734 return NULL_TREE;
1735 }
1736
1737 return res;
1738 }
1739 1875
1740 default: 1876 default:
1741 break; 1877 break;
1742 } 1878 }
1743 1879
1746 1882
1747 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND 1883 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1748 indicates which particular sizetype to create. */ 1884 indicates which particular sizetype to create. */
1749 1885
1750 tree 1886 tree
1751 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind) 1887 size_int_kind (poly_int64 number, enum size_type_kind kind)
1752 { 1888 {
1753 return build_int_cst (sizetype_tab[(int) kind], number); 1889 return build_int_cst (sizetype_tab[(int) kind], number);
1754 } 1890 }
1755 1891
1756 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE 1892 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1767 return error_mark_node; 1903 return error_mark_node;
1768 1904
1769 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0), 1905 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1770 TREE_TYPE (arg1))); 1906 TREE_TYPE (arg1)));
1771 1907
1772 /* Handle the special case of two integer constants faster. */ 1908 /* Handle the special case of two poly_int constants faster. */
1773 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) 1909 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1774 { 1910 {
1775 /* And some specific cases even faster than that. */ 1911 /* And some specific cases even faster than that. */
1776 if (code == PLUS_EXPR) 1912 if (code == PLUS_EXPR)
1777 { 1913 {
1778 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0)) 1914 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1792 } 1928 }
1793 1929
1794 /* Handle general case of two integer constants. For sizetype 1930 /* Handle general case of two integer constants. For sizetype
1795 constant calculations we always want to know about overflow, 1931 constant calculations we always want to know about overflow,
1796 even in the unsigned case. */ 1932 even in the unsigned case. */
1797 return int_const_binop_1 (code, arg0, arg1, -1); 1933 tree res = int_const_binop (code, arg0, arg1, -1);
1934 if (res != NULL_TREE)
1935 return res;
1798 } 1936 }
1799 1937
1800 return fold_build2_loc (loc, code, type, arg0, arg1); 1938 return fold_build2_loc (loc, code, type, arg0, arg1);
1801 } 1939 }
1802 1940
2116 type TYPE. If no simplification can be done return NULL_TREE. */ 2254 type TYPE. If no simplification can be done return NULL_TREE. */
2117 2255
2118 static tree 2256 static tree
2119 fold_convert_const (enum tree_code code, tree type, tree arg1) 2257 fold_convert_const (enum tree_code code, tree type, tree arg1)
2120 { 2258 {
2121 if (TREE_TYPE (arg1) == type) 2259 tree arg_type = TREE_TYPE (arg1);
2260 if (arg_type == type)
2122 return arg1; 2261 return arg1;
2262
2263 /* We can't widen types, since the runtime value could overflow the
2264 original type before being extended to the new type. */
2265 if (POLY_INT_CST_P (arg1)
2266 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2267 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2268 return build_poly_int_cst (type,
2269 poly_wide_int::from (poly_int_cst_value (arg1),
2270 TYPE_PRECISION (type),
2271 TYPE_SIGN (arg_type)));
2123 2272
2124 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type) 2273 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2125 || TREE_CODE (type) == OFFSET_TYPE) 2274 || TREE_CODE (type) == OFFSET_TYPE)
2126 { 2275 {
2127 if (TREE_CODE (arg1) == INTEGER_CST) 2276 if (TREE_CODE (arg1) == INTEGER_CST)
2150 return fold_convert_const_fixed_from_real (type, arg1); 2299 return fold_convert_const_fixed_from_real (type, arg1);
2151 } 2300 }
2152 else if (TREE_CODE (type) == VECTOR_TYPE) 2301 else if (TREE_CODE (type) == VECTOR_TYPE)
2153 { 2302 {
2154 if (TREE_CODE (arg1) == VECTOR_CST 2303 if (TREE_CODE (arg1) == VECTOR_CST
2155 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1)) 2304 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2156 { 2305 {
2157 int len = VECTOR_CST_NELTS (arg1);
2158 tree elttype = TREE_TYPE (type); 2306 tree elttype = TREE_TYPE (type);
2159 auto_vec<tree, 32> v (len); 2307 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2160 for (int i = 0; i < len; ++i) 2308 /* We can't handle steps directly when extending, since the
2309 values need to wrap at the original precision first. */
2310 bool step_ok_p
2311 = (INTEGRAL_TYPE_P (elttype)
2312 && INTEGRAL_TYPE_P (arg1_elttype)
2313 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2314 tree_vector_builder v;
2315 if (!v.new_unary_operation (type, arg1, step_ok_p))
2316 return NULL_TREE;
2317 unsigned int len = v.encoded_nelts ();
2318 for (unsigned int i = 0; i < len; ++i)
2161 { 2319 {
2162 tree elt = VECTOR_CST_ELT (arg1, i); 2320 tree elt = VECTOR_CST_ELT (arg1, i);
2163 tree cvt = fold_convert_const (code, elttype, elt); 2321 tree cvt = fold_convert_const (code, elttype, elt);
2164 if (cvt == NULL_TREE) 2322 if (cvt == NULL_TREE)
2165 return NULL_TREE; 2323 return NULL_TREE;
2166 v.quick_push (cvt); 2324 v.quick_push (cvt);
2167 } 2325 }
2168 return build_vector (type, v); 2326 return v.build ();
2169 } 2327 }
2170 } 2328 }
2171 return NULL_TREE; 2329 return NULL_TREE;
2172 } 2330 }
2173 2331
2203 switch (TREE_CODE (type)) 2361 switch (TREE_CODE (type))
2204 { 2362 {
2205 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: 2363 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2206 case POINTER_TYPE: case REFERENCE_TYPE: 2364 case POINTER_TYPE: case REFERENCE_TYPE:
2207 case OFFSET_TYPE: 2365 case OFFSET_TYPE:
2208 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) 2366 return (INTEGRAL_TYPE_P (orig)
2367 || (POINTER_TYPE_P (orig)
2368 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2209 || TREE_CODE (orig) == OFFSET_TYPE); 2369 || TREE_CODE (orig) == OFFSET_TYPE);
2210 2370
2211 case REAL_TYPE: 2371 case REAL_TYPE:
2212 case FIXED_POINT_TYPE: 2372 case FIXED_POINT_TYPE:
2213 case VECTOR_TYPE: 2373 case VECTOR_TYPE:
2630 default: 2790 default:
2631 gcc_unreachable (); 2791 gcc_unreachable ();
2632 } 2792 }
2633 } 2793 }
2634 2794
2795 /* Return true if COND1 tests the opposite condition of COND2. */
2796
2797 bool
2798 inverse_conditions_p (const_tree cond1, const_tree cond2)
2799 {
2800 return (COMPARISON_CLASS_P (cond1)
2801 && COMPARISON_CLASS_P (cond2)
2802 && (invert_tree_comparison
2803 (TREE_CODE (cond1),
2804 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2805 && operand_equal_p (TREE_OPERAND (cond1, 0),
2806 TREE_OPERAND (cond2, 0), 0)
2807 && operand_equal_p (TREE_OPERAND (cond1, 1),
2808 TREE_OPERAND (cond2, 1), 0));
2809 }
2810
2635 /* Return a tree for the comparison which is the combination of 2811 /* Return a tree for the comparison which is the combination of
2636 doing the AND or OR (depending on CODE) of the two operations LCODE 2812 doing the AND or OR (depending on CODE) of the two operations LCODE
2637 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account 2813 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2638 the possibility of trapping if the mode has NaNs, and return NULL_TREE 2814 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2639 if this makes the transformation invalid. */ 2815 if this makes the transformation invalid. */
2937 } 3113 }
2938 return 0; 3114 return 0;
2939 3115
2940 case VECTOR_CST: 3116 case VECTOR_CST:
2941 { 3117 {
2942 unsigned i; 3118 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
2943 3119 != VECTOR_CST_LOG2_NPATTERNS (arg1))
2944 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2945 return 0; 3120 return 0;
2946 3121
2947 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i) 3122 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
2948 { 3123 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
2949 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i), 3124 return 0;
2950 VECTOR_CST_ELT (arg1, i), flags)) 3125
2951 return 0; 3126 unsigned int count = vector_cst_encoded_nelts (arg0);
2952 } 3127 for (unsigned int i = 0; i < count; ++i)
3128 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3129 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3130 return 0;
2953 return 1; 3131 return 1;
2954 } 3132 }
2955 3133
2956 case COMPLEX_CST: 3134 case COMPLEX_CST:
2957 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1), 3135 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3151 3329
3152 case TRUTH_ANDIF_EXPR: 3330 case TRUTH_ANDIF_EXPR:
3153 case TRUTH_ORIF_EXPR: 3331 case TRUTH_ORIF_EXPR:
3154 return OP_SAME (0) && OP_SAME (1); 3332 return OP_SAME (0) && OP_SAME (1);
3155 3333
3156 case FMA_EXPR:
3157 case WIDEN_MULT_PLUS_EXPR: 3334 case WIDEN_MULT_PLUS_EXPR:
3158 case WIDEN_MULT_MINUS_EXPR: 3335 case WIDEN_MULT_MINUS_EXPR:
3159 if (!OP_SAME (2)) 3336 if (!OP_SAME (2))
3160 return 0; 3337 return 0;
3161 /* The multiplcation operands are commutative. */ 3338 /* The multiplcation operands are commutative. */
3204 return OP_SAME (0) && OP_SAME (1); 3381 return OP_SAME (0) && OP_SAME (1);
3205 return 0; 3382 return 0;
3206 3383
3207 case CLEANUP_POINT_EXPR: 3384 case CLEANUP_POINT_EXPR:
3208 case EXPR_STMT: 3385 case EXPR_STMT:
3386 case SAVE_EXPR:
3209 if (flags & OEP_LEXICOGRAPHIC) 3387 if (flags & OEP_LEXICOGRAPHIC)
3210 return OP_SAME (0); 3388 return OP_SAME (0);
3211 return 0; 3389 return 0;
3212 3390
3213 default: 3391 default:
3271 } 3449 }
3272 3450
3273 case tcc_declaration: 3451 case tcc_declaration:
3274 /* Consider __builtin_sqrt equal to sqrt. */ 3452 /* Consider __builtin_sqrt equal to sqrt. */
3275 return (TREE_CODE (arg0) == FUNCTION_DECL 3453 return (TREE_CODE (arg0) == FUNCTION_DECL
3276 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1) 3454 && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3277 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1) 3455 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3278 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1)); 3456 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3279 3457
3280 case tcc_exceptional: 3458 case tcc_exceptional:
3281 if (TREE_CODE (arg0) == CONSTRUCTOR) 3459 if (TREE_CODE (arg0) == CONSTRUCTOR)
3293 3471
3294 /* Be sure that vectors constructed have the same representation. 3472 /* Be sure that vectors constructed have the same representation.
3295 We only tested element precision and modes to match. 3473 We only tested element precision and modes to match.
3296 Vectors may be BLKmode and thus also check that the number of 3474 Vectors may be BLKmode and thus also check that the number of
3297 parts match. */ 3475 parts match. */
3298 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) 3476 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3299 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))) 3477 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3300 return 0; 3478 return 0;
3301 3479
3302 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0); 3480 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3303 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1); 3481 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3304 unsigned int len = vec_safe_length (v0); 3482 unsigned int len = vec_safe_length (v0);
3339 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2)) 3517 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3340 return 0; 3518 return 0;
3341 if (tsi_end_p (tsi1) && tsi_end_p (tsi2)) 3519 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3342 return 1; 3520 return 1;
3343 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2), 3521 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3344 OEP_LEXICOGRAPHIC)) 3522 flags & (OEP_LEXICOGRAPHIC
3523 | OEP_NO_HASH_CHECK)))
3345 return 0; 3524 return 0;
3346 } 3525 }
3347 } 3526 }
3348 return 0; 3527 return 0;
3349 3528
3351 switch (TREE_CODE (arg0)) 3530 switch (TREE_CODE (arg0))
3352 { 3531 {
3353 case RETURN_EXPR: 3532 case RETURN_EXPR:
3354 if (flags & OEP_LEXICOGRAPHIC) 3533 if (flags & OEP_LEXICOGRAPHIC)
3355 return OP_SAME_WITH_NULL (0); 3534 return OP_SAME_WITH_NULL (0);
3535 return 0;
3536 case DEBUG_BEGIN_STMT:
3537 if (flags & OEP_LEXICOGRAPHIC)
3538 return 1;
3356 return 0; 3539 return 0;
3357 default: 3540 default:
3358 return 0; 3541 return 0;
3359 } 3542 }
3360 3543
3404 /* See if ARG is an expression that is either a comparison or is performing 3587 /* See if ARG is an expression that is either a comparison or is performing
3405 arithmetic on comparisons. The comparisons must only be comparing 3588 arithmetic on comparisons. The comparisons must only be comparing
3406 two different values, which will be stored in *CVAL1 and *CVAL2; if 3589 two different values, which will be stored in *CVAL1 and *CVAL2; if
3407 they are nonzero it means that some operands have already been found. 3590 they are nonzero it means that some operands have already been found.
3408 No variables may be used anywhere else in the expression except in the 3591 No variables may be used anywhere else in the expression except in the
3409 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around 3592 comparisons.
3410 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3411 3593
3412 If this is true, return 1. Otherwise, return zero. */ 3594 If this is true, return 1. Otherwise, return zero. */
3413 3595
3414 static int 3596 static int
3415 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p) 3597 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3416 { 3598 {
3417 enum tree_code code = TREE_CODE (arg); 3599 enum tree_code code = TREE_CODE (arg);
3418 enum tree_code_class tclass = TREE_CODE_CLASS (code); 3600 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3419 3601
3420 /* We can handle some of the tcc_expression cases here. */ 3602 /* We can handle some of the tcc_expression cases here. */
3423 else if (tclass == tcc_expression 3605 else if (tclass == tcc_expression
3424 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR 3606 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3425 || code == COMPOUND_EXPR)) 3607 || code == COMPOUND_EXPR))
3426 tclass = tcc_binary; 3608 tclass = tcc_binary;
3427 3609
3428 else if (tclass == tcc_expression && code == SAVE_EXPR
3429 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3430 {
3431 /* If we've already found a CVAL1 or CVAL2, this expression is
3432 two complex to handle. */
3433 if (*cval1 || *cval2)
3434 return 0;
3435
3436 tclass = tcc_unary;
3437 *save_p = 1;
3438 }
3439
3440 switch (tclass) 3610 switch (tclass)
3441 { 3611 {
3442 case tcc_unary: 3612 case tcc_unary:
3443 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p); 3613 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3444 3614
3445 case tcc_binary: 3615 case tcc_binary:
3446 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p) 3616 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3447 && twoval_comparison_p (TREE_OPERAND (arg, 1), 3617 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
3448 cval1, cval2, save_p));
3449 3618
3450 case tcc_constant: 3619 case tcc_constant:
3451 return 1; 3620 return 1;
3452 3621
3453 case tcc_expression: 3622 case tcc_expression:
3454 if (code == COND_EXPR) 3623 if (code == COND_EXPR)
3455 return (twoval_comparison_p (TREE_OPERAND (arg, 0), 3624 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3456 cval1, cval2, save_p) 3625 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
3457 && twoval_comparison_p (TREE_OPERAND (arg, 1), 3626 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
3458 cval1, cval2, save_p)
3459 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3460 cval1, cval2, save_p));
3461 return 0; 3627 return 0;
3462 3628
3463 case tcc_comparison: 3629 case tcc_comparison:
3464 /* First see if we can handle the first operand, then the second. For 3630 /* First see if we can handle the first operand, then the second. For
3465 the second operand, we know *CVAL1 can't be zero. It must be that 3631 the second operand, we know *CVAL1 can't be zero. It must be that
3815 is the original memory reference used to preserve the alias set of 3981 is the original memory reference used to preserve the alias set of
3816 the access. */ 3982 the access. */
3817 3983
3818 static tree 3984 static tree
3819 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type, 3985 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3820 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, 3986 HOST_WIDE_INT bitsize, poly_int64 bitpos,
3821 int unsignedp, int reversep) 3987 int unsignedp, int reversep)
3822 { 3988 {
3823 tree result, bftype; 3989 tree result, bftype;
3824 3990
3825 /* Attempt not to lose the access path if possible. */ 3991 /* Attempt not to lose the access path if possible. */
3826 if (TREE_CODE (orig_inner) == COMPONENT_REF) 3992 if (TREE_CODE (orig_inner) == COMPONENT_REF)
3827 { 3993 {
3828 tree ninner = TREE_OPERAND (orig_inner, 0); 3994 tree ninner = TREE_OPERAND (orig_inner, 0);
3829 machine_mode nmode; 3995 machine_mode nmode;
3830 HOST_WIDE_INT nbitsize, nbitpos; 3996 poly_int64 nbitsize, nbitpos;
3831 tree noffset; 3997 tree noffset;
3832 int nunsignedp, nreversep, nvolatilep = 0; 3998 int nunsignedp, nreversep, nvolatilep = 0;
3833 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos, 3999 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3834 &noffset, &nmode, &nunsignedp, 4000 &noffset, &nmode, &nunsignedp,
3835 &nreversep, &nvolatilep); 4001 &nreversep, &nvolatilep);
3836 if (base == inner 4002 if (base == inner
3837 && noffset == NULL_TREE 4003 && noffset == NULL_TREE
3838 && nbitsize >= bitsize 4004 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
3839 && nbitpos <= bitpos
3840 && bitpos + bitsize <= nbitpos + nbitsize
3841 && !reversep 4005 && !reversep
3842 && !nreversep 4006 && !nreversep
3843 && !nvolatilep) 4007 && !nvolatilep)
3844 { 4008 {
3845 inner = ninner; 4009 inner = ninner;
3851 if (iset == 0 && get_alias_set (inner) != iset) 4015 if (iset == 0 && get_alias_set (inner) != iset)
3852 inner = fold_build2 (MEM_REF, TREE_TYPE (inner), 4016 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3853 build_fold_addr_expr (inner), 4017 build_fold_addr_expr (inner),
3854 build_int_cst (ptr_type_node, 0)); 4018 build_int_cst (ptr_type_node, 0));
3855 4019
3856 if (bitpos == 0 && !reversep) 4020 if (known_eq (bitpos, 0) && !reversep)
3857 { 4021 {
3858 tree size = TYPE_SIZE (TREE_TYPE (inner)); 4022 tree size = TYPE_SIZE (TREE_TYPE (inner));
3859 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner)) 4023 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3860 || POINTER_TYPE_P (TREE_TYPE (inner))) 4024 || POINTER_TYPE_P (TREE_TYPE (inner)))
3861 && tree_fits_shwi_p (size) 4025 && tree_fits_shwi_p (size)
3900 4064
3901 static tree 4065 static tree
3902 optimize_bit_field_compare (location_t loc, enum tree_code code, 4066 optimize_bit_field_compare (location_t loc, enum tree_code code,
3903 tree compare_type, tree lhs, tree rhs) 4067 tree compare_type, tree lhs, tree rhs)
3904 { 4068 {
3905 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize; 4069 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4070 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
3906 tree type = TREE_TYPE (lhs); 4071 tree type = TREE_TYPE (lhs);
3907 tree unsigned_type; 4072 tree unsigned_type;
3908 int const_p = TREE_CODE (rhs) == INTEGER_CST; 4073 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3909 machine_mode lmode, rmode; 4074 machine_mode lmode, rmode;
3910 scalar_int_mode nmode; 4075 scalar_int_mode nmode;
3914 tree linner, rinner = NULL_TREE; 4079 tree linner, rinner = NULL_TREE;
3915 tree mask; 4080 tree mask;
3916 tree offset; 4081 tree offset;
3917 4082
3918 /* Get all the information about the extractions being done. If the bit size 4083 /* Get all the information about the extractions being done. If the bit size
3919 if the same as the size of the underlying object, we aren't doing an 4084 is the same as the size of the underlying object, we aren't doing an
3920 extraction at all and so can do nothing. We also don't want to 4085 extraction at all and so can do nothing. We also don't want to
3921 do anything if the inner expression is a PLACEHOLDER_EXPR since we 4086 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3922 then will no longer be able to replace it. */ 4087 then will no longer be able to replace it. */
3923 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode, 4088 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
3924 &lunsignedp, &lreversep, &lvolatilep); 4089 &lunsignedp, &lreversep, &lvolatilep);
3925 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0 4090 if (linner == lhs
3926 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep) 4091 || !known_size_p (plbitsize)
4092 || !plbitsize.is_constant (&lbitsize)
4093 || !plbitpos.is_constant (&lbitpos)
4094 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4095 || offset != 0
4096 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4097 || lvolatilep)
3927 return 0; 4098 return 0;
3928 4099
3929 if (const_p) 4100 if (const_p)
3930 rreversep = lreversep; 4101 rreversep = lreversep;
3931 else 4102 else
3934 sizes, signedness and storage order are the same. */ 4105 sizes, signedness and storage order are the same. */
3935 rinner 4106 rinner
3936 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode, 4107 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3937 &runsignedp, &rreversep, &rvolatilep); 4108 &runsignedp, &rreversep, &rvolatilep);
3938 4109
3939 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize 4110 if (rinner == rhs
3940 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0 4111 || maybe_ne (lbitpos, rbitpos)
3941 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep) 4112 || maybe_ne (lbitsize, rbitsize)
4113 || lunsignedp != runsignedp
4114 || lreversep != rreversep
4115 || offset != 0
4116 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4117 || rvolatilep)
3942 return 0; 4118 return 0;
3943 } 4119 }
3944 4120
3945 /* Honor the C++ memory model and mimic what RTL expansion does. */ 4121 /* Honor the C++ memory model and mimic what RTL expansion does. */
3946 unsigned HOST_WIDE_INT bitstart = 0; 4122 poly_uint64 bitstart = 0;
3947 unsigned HOST_WIDE_INT bitend = 0; 4123 poly_uint64 bitend = 0;
3948 if (TREE_CODE (lhs) == COMPONENT_REF) 4124 if (TREE_CODE (lhs) == COMPONENT_REF)
3949 { 4125 {
3950 get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset); 4126 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
3951 if (offset != NULL_TREE) 4127 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
3952 return 0; 4128 return 0;
3953 } 4129 }
3954 4130
3955 /* See if we can find a mode to refer to this field. We should be able to, 4131 /* See if we can find a mode to refer to this field. We should be able to,
3956 but fail if we can't. */ 4132 but fail if we can't. */
4114 STRIP_NOPS (exp); STRIP_NOPS (and_mask); 4290 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4115 if (TREE_CODE (and_mask) != INTEGER_CST) 4291 if (TREE_CODE (and_mask) != INTEGER_CST)
4116 return 0; 4292 return 0;
4117 } 4293 }
4118 4294
4119 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode, 4295 poly_int64 poly_bitsize, poly_bitpos;
4120 punsignedp, preversep, pvolatilep); 4296 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4297 pmode, punsignedp, preversep, pvolatilep);
4121 if ((inner == exp && and_mask == 0) 4298 if ((inner == exp && and_mask == 0)
4122 || *pbitsize < 0 || offset != 0 4299 || !poly_bitsize.is_constant (pbitsize)
4300 || !poly_bitpos.is_constant (pbitpos)
4301 || *pbitsize < 0
4302 || offset != 0
4123 || TREE_CODE (inner) == PLACEHOLDER_EXPR 4303 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4124 /* Reject out-of-bound accesses (PR79731). */ 4304 /* Reject out-of-bound accesses (PR79731). */
4125 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner)) 4305 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4126 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)), 4306 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4127 *pbitpos + *pbitsize) < 0)) 4307 *pbitpos + *pbitsize) < 0))
4774 tree etype = TREE_TYPE (exp), mask, value; 4954 tree etype = TREE_TYPE (exp), mask, value;
4775 4955
4776 /* Disable this optimization for function pointer expressions 4956 /* Disable this optimization for function pointer expressions
4777 on targets that require function pointer canonicalization. */ 4957 on targets that require function pointer canonicalization. */
4778 if (targetm.have_canonicalize_funcptr_for_compare () 4958 if (targetm.have_canonicalize_funcptr_for_compare ()
4779 && TREE_CODE (etype) == POINTER_TYPE 4959 && POINTER_TYPE_P (etype)
4780 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE) 4960 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
4781 return NULL_TREE; 4961 return NULL_TREE;
4782 4962
4783 if (! in_p) 4963 if (! in_p)
4784 { 4964 {
4785 value = build_range_check (loc, type, exp, 1, low, high); 4965 value = build_range_check (loc, type, exp, 1, low, high);
4926 high1, 1, high0, 1)))) 5106 high1, 1, high0, 1))))
4927 { 5107 {
4928 temp = in0_p, in0_p = in1_p, in1_p = temp; 5108 temp = in0_p, in0_p = in1_p, in1_p = temp;
4929 tem = low0, low0 = low1, low1 = tem; 5109 tem = low0, low0 = low1, low1 = tem;
4930 tem = high0, high0 = high1, high1 = tem; 5110 tem = high0, high0 = high1, high1 = tem;
5111 }
5112
5113 /* If the second range is != high1 where high1 is the type maximum of
5114 the type, try first merging with < high1 range. */
5115 if (low1
5116 && high1
5117 && TREE_CODE (low1) == INTEGER_CST
5118 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5119 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5120 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5121 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5122 && operand_equal_p (low1, high1, 0))
5123 {
5124 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5125 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5126 !in1_p, NULL_TREE, range_predecessor (low1)))
5127 return true;
5128 /* Similarly for the second range != low1 where low1 is the type minimum
5129 of the type, try first merging with > low1 range. */
5130 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5131 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5132 !in1_p, range_successor (low1), NULL_TREE))
5133 return true;
4931 } 5134 }
4932 5135
4933 /* Now flag two cases, whether the ranges are disjoint or whether the 5136 /* Now flag two cases, whether the ranges are disjoint or whether the
4934 second range is totally subsumed in the first. Note that the tests 5137 second range is totally subsumed in the first. Note that the tests
4935 below are simplified by the ones above. */ 5138 below are simplified by the ones above. */
5036 /* Canonicalize - [min, x] into - [-, x]. */ 5239 /* Canonicalize - [min, x] into - [-, x]. */
5037 if (low0 && TREE_CODE (low0) == INTEGER_CST) 5240 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5038 switch (TREE_CODE (TREE_TYPE (low0))) 5241 switch (TREE_CODE (TREE_TYPE (low0)))
5039 { 5242 {
5040 case ENUMERAL_TYPE: 5243 case ENUMERAL_TYPE:
5041 if (TYPE_PRECISION (TREE_TYPE (low0)) 5244 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5042 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0)))) 5245 GET_MODE_BITSIZE
5246 (TYPE_MODE (TREE_TYPE (low0)))))
5043 break; 5247 break;
5044 /* FALLTHROUGH */ 5248 /* FALLTHROUGH */
5045 case INTEGER_TYPE: 5249 case INTEGER_TYPE:
5046 if (tree_int_cst_equal (low0, 5250 if (tree_int_cst_equal (low0,
5047 TYPE_MIN_VALUE (TREE_TYPE (low0)))) 5251 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5059 /* Canonicalize - [x, max] into - [x, -]. */ 5263 /* Canonicalize - [x, max] into - [x, -]. */
5060 if (high1 && TREE_CODE (high1) == INTEGER_CST) 5264 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5061 switch (TREE_CODE (TREE_TYPE (high1))) 5265 switch (TREE_CODE (TREE_TYPE (high1)))
5062 { 5266 {
5063 case ENUMERAL_TYPE: 5267 case ENUMERAL_TYPE:
5064 if (TYPE_PRECISION (TREE_TYPE (high1)) 5268 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5065 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1)))) 5269 GET_MODE_BITSIZE
5270 (TYPE_MODE (TREE_TYPE (high1)))))
5066 break; 5271 break;
5067 /* FALLTHROUGH */ 5272 /* FALLTHROUGH */
5068 case INTEGER_TYPE: 5273 case INTEGER_TYPE:
5069 if (tree_int_cst_equal (high1, 5274 if (tree_int_cst_equal (high1,
5070 TYPE_MAX_VALUE (TREE_TYPE (high1)))) 5275 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5352 ranges is always true or always false, consider it to be the same 5557 ranges is always true or always false, consider it to be the same
5353 expression as the other. */ 5558 expression as the other. */
5354 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0)) 5559 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5355 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0, 5560 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5356 in1_p, low1, high1) 5561 in1_p, low1, high1)
5357 && 0 != (tem = (build_range_check (loc, type, 5562 && (tem = (build_range_check (loc, type,
5358 lhs != 0 ? lhs 5563 lhs != 0 ? lhs
5359 : rhs != 0 ? rhs : integer_zero_node, 5564 : rhs != 0 ? rhs : integer_zero_node,
5360 in_p, low, high)))) 5565 in_p, low, high))) != 0)
5361 { 5566 {
5362 if (strict_overflow_p) 5567 if (strict_overflow_p)
5363 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON); 5568 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5364 return or_op ? invert_truthvalue_loc (loc, tem) : tem; 5569 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5365 } 5570 }
5385 else if (!lang_hooks.decls.global_bindings_p () 5590 else if (!lang_hooks.decls.global_bindings_p ()
5386 && !CONTAINS_PLACEHOLDER_P (lhs)) 5591 && !CONTAINS_PLACEHOLDER_P (lhs))
5387 { 5592 {
5388 tree common = save_expr (lhs); 5593 tree common = save_expr (lhs);
5389 5594
5390 if (0 != (lhs = build_range_check (loc, type, common, 5595 if ((lhs = build_range_check (loc, type, common,
5391 or_op ? ! in0_p : in0_p, 5596 or_op ? ! in0_p : in0_p,
5392 low0, high0)) 5597 low0, high0)) != 0
5393 && (0 != (rhs = build_range_check (loc, type, common, 5598 && (rhs = build_range_check (loc, type, common,
5394 or_op ? ! in1_p : in1_p, 5599 or_op ? ! in1_p : in1_p,
5395 low1, high1)))) 5600 low1, high1)) != 0)
5396 { 5601 {
5397 if (strict_overflow_p) 5602 if (strict_overflow_p)
5398 fold_overflow_warning (warnmsg, 5603 fold_overflow_warning (warnmsg,
5399 WARN_STRICT_OVERFLOW_COMPARISON); 5604 WARN_STRICT_OVERFLOW_COMPARISON);
5400 return build2_loc (loc, code == TRUTH_ANDIF_EXPR 5605 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5802 return constant_boolean_node (wanted_code == NE_EXPR, truth_type); 6007 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5803 } 6008 }
5804 } 6009 }
5805 6010
5806 /* If the right sides are not constant, do the same for it. Also, 6011 /* If the right sides are not constant, do the same for it. Also,
5807 disallow this optimization if a size or signedness mismatch occurs 6012 disallow this optimization if a size, signedness or storage order
5808 between the left and right sides. */ 6013 mismatch occurs between the left and right sides. */
5809 if (l_const == 0) 6014 if (l_const == 0)
5810 { 6015 {
5811 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize 6016 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5812 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp 6017 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6018 || ll_reversep != lr_reversep
5813 /* Make sure the two fields on the right 6019 /* Make sure the two fields on the right
5814 correspond to the left without being swapped. */ 6020 correspond to the left without being swapped. */
5815 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos) 6021 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5816 return 0; 6022 return 0;
5817 6023
6089 we can, replace this expression with the inner simplification for 6295 we can, replace this expression with the inner simplification for
6090 possible later conversion to our or some other type. */ 6296 possible later conversion to our or some other type. */
6091 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0 6297 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6092 && TREE_CODE (t2) == INTEGER_CST 6298 && TREE_CODE (t2) == INTEGER_CST
6093 && !TREE_OVERFLOW (t2) 6299 && !TREE_OVERFLOW (t2)
6094 && (0 != (t1 = extract_muldiv (op0, t2, code, 6300 && (t1 = extract_muldiv (op0, t2, code,
6095 code == MULT_EXPR 6301 code == MULT_EXPR ? ctype : NULL_TREE,
6096 ? ctype : NULL_TREE, 6302 strict_overflow_p)) != 0)
6097 strict_overflow_p))))
6098 return t1; 6303 return t1;
6099 break; 6304 break;
6100 6305
6101 case ABS_EXPR: 6306 case ABS_EXPR:
6102 /* If widening the type changes it from signed to unsigned, then we 6307 /* If widening the type changes it from signed to unsigned, then we
6160 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0))) 6365 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6161 /* const_binop may not detect overflow correctly, 6366 /* const_binop may not detect overflow correctly,
6162 so check for it explicitly here. */ 6367 so check for it explicitly here. */
6163 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), 6368 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6164 wi::to_wide (op1)) 6369 wi::to_wide (op1))
6165 && 0 != (t1 = fold_convert (ctype, 6370 && (t1 = fold_convert (ctype,
6166 const_binop (LSHIFT_EXPR, 6371 const_binop (LSHIFT_EXPR, size_one_node,
6167 size_one_node, 6372 op1))) != 0
6168 op1)))
6169 && !TREE_OVERFLOW (t1)) 6373 && !TREE_OVERFLOW (t1))
6170 return extract_muldiv (build2 (tcode == LSHIFT_EXPR 6374 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6171 ? MULT_EXPR : FLOOR_DIV_EXPR, 6375 ? MULT_EXPR : FLOOR_DIV_EXPR,
6172 ctype, 6376 ctype,
6173 fold_convert (ctype, op0), 6377 fold_convert (ctype, op0),
6302 /* If these are the same operation types, we can associate them 6506 /* If these are the same operation types, we can associate them
6303 assuming no overflow. */ 6507 assuming no overflow. */
6304 if (tcode == code) 6508 if (tcode == code)
6305 { 6509 {
6306 bool overflow_p = false; 6510 bool overflow_p = false;
6307 bool overflow_mul_p; 6511 wi::overflow_type overflow_mul;
6308 signop sign = TYPE_SIGN (ctype); 6512 signop sign = TYPE_SIGN (ctype);
6309 unsigned prec = TYPE_PRECISION (ctype); 6513 unsigned prec = TYPE_PRECISION (ctype);
6310 wide_int mul = wi::mul (wi::to_wide (op1, prec), 6514 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6311 wi::to_wide (c, prec), 6515 wi::to_wide (c, prec),
6312 sign, &overflow_mul_p); 6516 sign, &overflow_mul);
6313 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1); 6517 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6314 if (overflow_mul_p 6518 if (overflow_mul
6315 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED)) 6519 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6316 overflow_p = true; 6520 overflow_p = true;
6317 if (!overflow_p) 6521 if (!overflow_p)
6318 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), 6522 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6319 wide_int_to_tree (ctype, mul)); 6523 wide_int_to_tree (ctype, mul));
6403 tree test, true_value, false_value; 6607 tree test, true_value, false_value;
6404 tree lhs = NULL_TREE; 6608 tree lhs = NULL_TREE;
6405 tree rhs = NULL_TREE; 6609 tree rhs = NULL_TREE;
6406 enum tree_code cond_code = COND_EXPR; 6610 enum tree_code cond_code = COND_EXPR;
6407 6611
6612 /* Do not move possibly trapping operations into the conditional as this
6613 pessimizes code and causes gimplification issues when applied late. */
6614 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
6615 ANY_INTEGRAL_TYPE_P (type)
6616 && TYPE_OVERFLOW_TRAPS (type), op1))
6617 return NULL_TREE;
6618
6408 if (TREE_CODE (cond) == COND_EXPR 6619 if (TREE_CODE (cond) == COND_EXPR
6409 || TREE_CODE (cond) == VEC_COND_EXPR) 6620 || TREE_CODE (cond) == VEC_COND_EXPR)
6410 { 6621 {
6411 test = TREE_OPERAND (cond, 0); 6622 test = TREE_OPERAND (cond, 0);
6412 true_value = TREE_OPERAND (cond, 1); 6623 true_value = TREE_OPERAND (cond, 1);
6521 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo, 6732 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
6522 tree *hi, bool *neg_overflow) 6733 tree *hi, bool *neg_overflow)
6523 { 6734 {
6524 tree prod, tmp, type = TREE_TYPE (c1); 6735 tree prod, tmp, type = TREE_TYPE (c1);
6525 signop sign = TYPE_SIGN (type); 6736 signop sign = TYPE_SIGN (type);
6526 bool overflow; 6737 wi::overflow_type overflow;
6527 6738
6528 /* We have to do this the hard way to detect unsigned overflow. 6739 /* We have to do this the hard way to detect unsigned overflow.
6529 prod = int_const_binop (MULT_EXPR, c1, c2); */ 6740 prod = int_const_binop (MULT_EXPR, c1, c2); */
6530 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow); 6741 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
6531 prod = force_fit_type (type, val, -1, overflow); 6742 prod = force_fit_type (type, val, -1, overflow);
6931 } 7142 }
6932 7143
6933 if (!same) 7144 if (!same)
6934 return NULL_TREE; 7145 return NULL_TREE;
6935 7146
6936 if (! INTEGRAL_TYPE_P (type) 7147 if (! ANY_INTEGRAL_TYPE_P (type)
6937 || TYPE_OVERFLOW_WRAPS (type) 7148 || TYPE_OVERFLOW_WRAPS (type)
6938 /* We are neither factoring zero nor minus one. */ 7149 /* We are neither factoring zero nor minus one. */
6939 || TREE_CODE (same) == INTEGER_CST) 7150 || TREE_CODE (same) == INTEGER_CST)
6940 return fold_build2_loc (loc, MULT_EXPR, type, 7151 return fold_build2_loc (loc, MULT_EXPR, type,
6941 fold_build2_loc (loc, code, type, 7152 fold_build2_loc (loc, code, type,
6943 fold_convert_loc (loc, type, alt1)), 7154 fold_convert_loc (loc, type, alt1)),
6944 fold_convert_loc (loc, type, same)); 7155 fold_convert_loc (loc, type, same));
6945 7156
6946 /* Same may be zero and thus the operation 'code' may overflow. Likewise 7157 /* Same may be zero and thus the operation 'code' may overflow. Likewise
6947 same may be minus one and thus the multiplication may overflow. Perform 7158 same may be minus one and thus the multiplication may overflow. Perform
6948 the operations in an unsigned type. */ 7159 the sum operation in an unsigned type. */
6949 tree utype = unsigned_type_for (type); 7160 tree utype = unsigned_type_for (type);
6950 tree tem = fold_build2_loc (loc, code, utype, 7161 tree tem = fold_build2_loc (loc, code, utype,
6951 fold_convert_loc (loc, utype, alt0), 7162 fold_convert_loc (loc, utype, alt0),
6952 fold_convert_loc (loc, utype, alt1)); 7163 fold_convert_loc (loc, utype, alt1));
6953 /* If the sum evaluated to a constant that is not -INF the multiplication 7164 /* If the sum evaluated to a constant that is not -INF the multiplication
6956 && (wi::to_wide (tem) 7167 && (wi::to_wide (tem)
6957 != wi::min_value (TYPE_PRECISION (utype), SIGNED))) 7168 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
6958 return fold_build2_loc (loc, MULT_EXPR, type, 7169 return fold_build2_loc (loc, MULT_EXPR, type,
6959 fold_convert (type, tem), same); 7170 fold_convert (type, tem), same);
6960 7171
6961 return fold_convert_loc (loc, type, 7172 /* Do not resort to unsigned multiplication because
6962 fold_build2_loc (loc, MULT_EXPR, utype, tem, 7173 we lose the no-overflow property of the expression. */
6963 fold_convert_loc (loc, utype, same))); 7174 return NULL_TREE;
6964 } 7175 }
6965 7176
6966 /* Subroutine of native_encode_expr. Encode the INTEGER_CST 7177 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6967 specified by EXPR into the buffer PTR of length LEN bytes. 7178 specified by EXPR into the buffer PTR of length LEN bytes.
6968 Return the number of bytes placed in the buffer, or zero 7179 Return the number of bytes placed in the buffer, or zero
7142 upon failure. */ 7353 upon failure. */
7143 7354
7144 static int 7355 static int
7145 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off) 7356 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7146 { 7357 {
7147 unsigned i, count; 7358 unsigned HOST_WIDE_INT i, count;
7148 int size, offset; 7359 int size, offset;
7149 tree itype, elem; 7360 tree itype, elem;
7150 7361
7151 offset = 0; 7362 offset = 0;
7152 count = VECTOR_CST_NELTS (expr); 7363 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7364 return 0;
7153 itype = TREE_TYPE (TREE_TYPE (expr)); 7365 itype = TREE_TYPE (TREE_TYPE (expr));
7154 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype)); 7366 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7155 for (i = 0; i < count; i++) 7367 for (i = 0; i < count; i++)
7156 { 7368 {
7157 if (off >= size) 7369 if (off >= size)
7164 len - offset, off); 7376 len - offset, off);
7165 if ((off == -1 && res != size) || res == 0) 7377 if ((off == -1 && res != size) || res == 0)
7166 return 0; 7378 return 0;
7167 offset += res; 7379 offset += res;
7168 if (offset >= len) 7380 if (offset >= len)
7169 return offset; 7381 return (off == -1 && i < count - 1) ? 0 : offset;
7170 if (off != -1) 7382 if (off != -1)
7171 off = 0; 7383 off = 0;
7172 } 7384 }
7173 return offset; 7385 return offset;
7174 } 7386 }
7384 /* Subroutine of native_interpret_expr. Interpret the contents of 7596 /* Subroutine of native_interpret_expr. Interpret the contents of
7385 the buffer PTR of length LEN as a VECTOR_CST of type TYPE. 7597 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7386 If the buffer cannot be interpreted, return NULL_TREE. */ 7598 If the buffer cannot be interpreted, return NULL_TREE. */
7387 7599
7388 static tree 7600 static tree
7389 native_interpret_vector (tree type, const unsigned char *ptr, int len) 7601 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
7390 { 7602 {
7391 tree etype, elem; 7603 tree etype, elem;
7392 int i, size, count; 7604 unsigned int i, size;
7605 unsigned HOST_WIDE_INT count;
7393 7606
7394 etype = TREE_TYPE (type); 7607 etype = TREE_TYPE (type);
7395 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype)); 7608 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7396 count = TYPE_VECTOR_SUBPARTS (type); 7609 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
7397 if (size * count > len) 7610 || size * count > len)
7398 return NULL_TREE; 7611 return NULL_TREE;
7399 7612
7400 auto_vec<tree, 32> elements (count); 7613 tree_vector_builder elements (type, count, 1);
7401 for (i = 0; i < count; ++i) 7614 for (i = 0; i < count; ++i)
7402 { 7615 {
7403 elem = native_interpret_expr (etype, ptr+(i*size), size); 7616 elem = native_interpret_expr (etype, ptr+(i*size), size);
7404 if (!elem) 7617 if (!elem)
7405 return NULL_TREE; 7618 return NULL_TREE;
7406 elements.quick_push (elem); 7619 elements.quick_push (elem);
7407 } 7620 }
7408 return build_vector (type, elements); 7621 return elements.build ();
7409 } 7622 }
7410 7623
7411 7624
7412 /* Subroutine of fold_view_convert_expr. Interpret the contents of 7625 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7413 the buffer PTR of length LEN as a constant of type TYPE. For 7626 the buffer PTR of length LEN as a constant of type TYPE. For
7685 C++ upcasting and then accessing the base. */ 7898 C++ upcasting and then accessing the base. */
7686 if (TREE_CODE (op0) == ADDR_EXPR 7899 if (TREE_CODE (op0) == ADDR_EXPR
7687 && POINTER_TYPE_P (type) 7900 && POINTER_TYPE_P (type)
7688 && handled_component_p (TREE_OPERAND (op0, 0))) 7901 && handled_component_p (TREE_OPERAND (op0, 0)))
7689 { 7902 {
7690 HOST_WIDE_INT bitsize, bitpos; 7903 poly_int64 bitsize, bitpos;
7691 tree offset; 7904 tree offset;
7692 machine_mode mode; 7905 machine_mode mode;
7693 int unsignedp, reversep, volatilep; 7906 int unsignedp, reversep, volatilep;
7694 tree base 7907 tree base
7695 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos, 7908 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7696 &offset, &mode, &unsignedp, &reversep, 7909 &offset, &mode, &unsignedp, &reversep,
7697 &volatilep); 7910 &volatilep);
7698 /* If the reference was to a (constant) zero offset, we can use 7911 /* If the reference was to a (constant) zero offset, we can use
7699 the address of the base if it has the same base type 7912 the address of the base if it has the same base type
7700 as the result type and the pointer type is unqualified. */ 7913 as the result type and the pointer type is unqualified. */
7701 if (! offset && bitpos == 0 7914 if (!offset
7915 && known_eq (bitpos, 0)
7702 && (TYPE_MAIN_VARIANT (TREE_TYPE (type)) 7916 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7703 == TYPE_MAIN_VARIANT (TREE_TYPE (base))) 7917 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7704 && TYPE_QUALS (type) == TYPE_UNQUALIFIED) 7918 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7705 return fold_convert_loc (loc, type, 7919 return fold_convert_loc (loc, type,
7706 build_fold_addr_expr_loc (loc, base)); 7920 build_fold_addr_expr_loc (loc, base));
7983 fold_build2_loc (loc, code, type, a00, a10), 8197 fold_build2_loc (loc, code, type, a00, a10),
7984 a01); 8198 a01);
7985 } 8199 }
7986 8200
7987 /* See if we can build a range comparison. */ 8201 /* See if we can build a range comparison. */
7988 if (0 != (tem = fold_range_test (loc, code, type, op0, op1))) 8202 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
7989 return tem; 8203 return tem;
7990 8204
7991 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR) 8205 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
7992 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR)) 8206 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
7993 { 8207 {
8006 8220
8007 /* Check for the possibility of merging component references. If our 8221 /* Check for the possibility of merging component references. If our
8008 lhs is another similar operation, try to merge its rhs with our 8222 lhs is another similar operation, try to merge its rhs with our
8009 rhs. Then try to merge our lhs and rhs. */ 8223 rhs. Then try to merge our lhs and rhs. */
8010 if (TREE_CODE (arg0) == code 8224 if (TREE_CODE (arg0) == code
8011 && 0 != (tem = fold_truth_andor_1 (loc, code, type, 8225 && (tem = fold_truth_andor_1 (loc, code, type,
8012 TREE_OPERAND (arg0, 1), arg1))) 8226 TREE_OPERAND (arg0, 1), arg1)) != 0)
8013 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem); 8227 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8014 8228
8015 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0) 8229 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8016 return tem; 8230 return tem;
8017 8231
8192 /* Return whether BASE + OFFSET + BITPOS may wrap around the address 8406 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8193 space. This is used to avoid issuing overflow warnings for 8407 space. This is used to avoid issuing overflow warnings for
8194 expressions like &p->x which can not wrap. */ 8408 expressions like &p->x which can not wrap. */
8195 8409
8196 static bool 8410 static bool
8197 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos) 8411 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
8198 { 8412 {
8199 if (!POINTER_TYPE_P (TREE_TYPE (base))) 8413 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8200 return true; 8414 return true;
8201 8415
8202 if (bitpos < 0) 8416 if (maybe_lt (bitpos, 0))
8203 return true; 8417 return true;
8204 8418
8205 wide_int wi_offset; 8419 poly_wide_int wi_offset;
8206 int precision = TYPE_PRECISION (TREE_TYPE (base)); 8420 int precision = TYPE_PRECISION (TREE_TYPE (base));
8207 if (offset == NULL_TREE) 8421 if (offset == NULL_TREE)
8208 wi_offset = wi::zero (precision); 8422 wi_offset = wi::zero (precision);
8209 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset)) 8423 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
8210 return true; 8424 return true;
8211 else 8425 else
8212 wi_offset = wi::to_wide (offset); 8426 wi_offset = wi::to_poly_wide (offset);
8213 8427
8214 bool overflow; 8428 wi::overflow_type overflow;
8215 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision); 8429 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
8216 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow); 8430 precision);
8431 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8217 if (overflow) 8432 if (overflow)
8218 return true; 8433 return true;
8219 8434
8220 if (!wi::fits_uhwi_p (total)) 8435 poly_uint64 total_hwi, size;
8436 if (!total.to_uhwi (&total_hwi)
8437 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
8438 &size)
8439 || known_eq (size, 0U))
8221 return true; 8440 return true;
8222 8441
8223 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base))); 8442 if (known_le (total_hwi, size))
8224 if (size <= 0) 8443 return false;
8225 return true;
8226 8444
8227 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an 8445 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8228 array. */ 8446 array. */
8229 if (TREE_CODE (base) == ADDR_EXPR) 8447 if (TREE_CODE (base) == ADDR_EXPR
8230 { 8448 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
8231 HOST_WIDE_INT base_size; 8449 &size)
8232 8450 && maybe_ne (size, 0U)
8233 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0))); 8451 && known_le (total_hwi, size))
8234 if (base_size > 0 && size < base_size) 8452 return false;
8235 size = base_size; 8453
8236 } 8454 return true;
8237
8238 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8239 } 8455 }
8240 8456
8241 /* Return a positive integer when the symbol DECL is known to have 8457 /* Return a positive integer when the symbol DECL is known to have
8242 a nonzero address, zero when it's known not to (e.g., it's a weak 8458 a nonzero address, zero when it's known not to (e.g., it's a weak
8243 symbol), and a negative integer when the symbol is not yet in the 8459 symbol), and a negative integer when the symbol is not yet in the
8290 || TREE_CODE (arg1) == ADDR_EXPR 8506 || TREE_CODE (arg1) == ADDR_EXPR
8291 || TREE_CODE (arg0) == POINTER_PLUS_EXPR 8507 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8292 || TREE_CODE (arg1) == POINTER_PLUS_EXPR)) 8508 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8293 { 8509 {
8294 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE; 8510 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8295 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0; 8511 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
8296 machine_mode mode; 8512 machine_mode mode;
8297 int volatilep, reversep, unsignedp; 8513 int volatilep, reversep, unsignedp;
8298 bool indirect_base0 = false, indirect_base1 = false; 8514 bool indirect_base0 = false, indirect_base1 = false;
8299 8515
8300 /* Get base and offset for the access. Strip ADDR_EXPR for 8516 /* Get base and offset for the access. Strip ADDR_EXPR for
8331 if (offset0 == NULL_TREE || integer_zerop (offset0)) 8547 if (offset0 == NULL_TREE || integer_zerop (offset0))
8332 offset0 = TREE_OPERAND (arg0, 1); 8548 offset0 = TREE_OPERAND (arg0, 1);
8333 else 8549 else
8334 offset0 = size_binop (PLUS_EXPR, offset0, 8550 offset0 = size_binop (PLUS_EXPR, offset0,
8335 TREE_OPERAND (arg0, 1)); 8551 TREE_OPERAND (arg0, 1));
8336 if (TREE_CODE (offset0) == INTEGER_CST) 8552 if (poly_int_tree_p (offset0))
8337 { 8553 {
8338 offset_int tem = wi::sext (wi::to_offset (offset0), 8554 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
8339 TYPE_PRECISION (sizetype)); 8555 TYPE_PRECISION (sizetype));
8340 tem <<= LOG2_BITS_PER_UNIT; 8556 tem <<= LOG2_BITS_PER_UNIT;
8341 tem += bitpos0; 8557 tem += bitpos0;
8342 if (wi::fits_shwi_p (tem)) 8558 if (tem.to_shwi (&bitpos0))
8343 { 8559 offset0 = NULL_TREE;
8344 bitpos0 = tem.to_shwi ();
8345 offset0 = NULL_TREE;
8346 }
8347 } 8560 }
8348 } 8561 }
8349 8562
8350 base1 = arg1; 8563 base1 = arg1;
8351 if (TREE_CODE (arg1) == ADDR_EXPR) 8564 if (TREE_CODE (arg1) == ADDR_EXPR)
8377 if (offset1 == NULL_TREE || integer_zerop (offset1)) 8590 if (offset1 == NULL_TREE || integer_zerop (offset1))
8378 offset1 = TREE_OPERAND (arg1, 1); 8591 offset1 = TREE_OPERAND (arg1, 1);
8379 else 8592 else
8380 offset1 = size_binop (PLUS_EXPR, offset1, 8593 offset1 = size_binop (PLUS_EXPR, offset1,
8381 TREE_OPERAND (arg1, 1)); 8594 TREE_OPERAND (arg1, 1));
8382 if (TREE_CODE (offset1) == INTEGER_CST) 8595 if (poly_int_tree_p (offset1))
8383 { 8596 {
8384 offset_int tem = wi::sext (wi::to_offset (offset1), 8597 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
8385 TYPE_PRECISION (sizetype)); 8598 TYPE_PRECISION (sizetype));
8386 tem <<= LOG2_BITS_PER_UNIT; 8599 tem <<= LOG2_BITS_PER_UNIT;
8387 tem += bitpos1; 8600 tem += bitpos1;
8388 if (wi::fits_shwi_p (tem)) 8601 if (tem.to_shwi (&bitpos1))
8389 { 8602 offset1 = NULL_TREE;
8390 bitpos1 = tem.to_shwi ();
8391 offset1 = NULL_TREE;
8392 }
8393 } 8603 }
8394 } 8604 }
8395 8605
8396 /* If we have equivalent bases we might be able to simplify. */ 8606 /* If we have equivalent bases we might be able to simplify. */
8397 if (indirect_base0 == indirect_base1 8607 if (indirect_base0 == indirect_base1
8398 && operand_equal_p (base0, base1, 8608 && operand_equal_p (base0, base1,
8399 indirect_base0 ? OEP_ADDRESS_OF : 0)) 8609 indirect_base0 ? OEP_ADDRESS_OF : 0))
8400 { 8610 {
8401 /* We can fold this expression to a constant if the non-constant 8611 /* We can fold this expression to a constant if the non-constant
8402 offset parts are equal. */ 8612 offset parts are equal. */
8403 if (offset0 == offset1 8613 if ((offset0 == offset1
8404 || (offset0 && offset1 8614 || (offset0 && offset1
8405 && operand_equal_p (offset0, offset1, 0))) 8615 && operand_equal_p (offset0, offset1, 0)))
8616 && (equality_code
8617 || (indirect_base0
8618 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8619 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8406 { 8620 {
8407 if (!equality_code 8621 if (!equality_code
8408 && bitpos0 != bitpos1 8622 && maybe_ne (bitpos0, bitpos1)
8409 && (pointer_may_wrap_p (base0, offset0, bitpos0) 8623 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8410 || pointer_may_wrap_p (base1, offset1, bitpos1))) 8624 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8411 fold_overflow_warning (("assuming pointer wraparound does not " 8625 fold_overflow_warning (("assuming pointer wraparound does not "
8412 "occur when comparing P +- C1 with " 8626 "occur when comparing P +- C1 with "
8413 "P +- C2"), 8627 "P +- C2"),
8414 WARN_STRICT_OVERFLOW_CONDITIONAL); 8628 WARN_STRICT_OVERFLOW_CONDITIONAL);
8415 8629
8416 switch (code) 8630 switch (code)
8417 { 8631 {
8418 case EQ_EXPR: 8632 case EQ_EXPR:
8419 return constant_boolean_node (bitpos0 == bitpos1, type); 8633 if (known_eq (bitpos0, bitpos1))
8634 return constant_boolean_node (true, type);
8635 if (known_ne (bitpos0, bitpos1))
8636 return constant_boolean_node (false, type);
8637 break;
8420 case NE_EXPR: 8638 case NE_EXPR:
8421 return constant_boolean_node (bitpos0 != bitpos1, type); 8639 if (known_ne (bitpos0, bitpos1))
8640 return constant_boolean_node (true, type);
8641 if (known_eq (bitpos0, bitpos1))
8642 return constant_boolean_node (false, type);
8643 break;
8422 case LT_EXPR: 8644 case LT_EXPR:
8423 return constant_boolean_node (bitpos0 < bitpos1, type); 8645 if (known_lt (bitpos0, bitpos1))
8646 return constant_boolean_node (true, type);
8647 if (known_ge (bitpos0, bitpos1))
8648 return constant_boolean_node (false, type);
8649 break;
8424 case LE_EXPR: 8650 case LE_EXPR:
8425 return constant_boolean_node (bitpos0 <= bitpos1, type); 8651 if (known_le (bitpos0, bitpos1))
8652 return constant_boolean_node (true, type);
8653 if (known_gt (bitpos0, bitpos1))
8654 return constant_boolean_node (false, type);
8655 break;
8426 case GE_EXPR: 8656 case GE_EXPR:
8427 return constant_boolean_node (bitpos0 >= bitpos1, type); 8657 if (known_ge (bitpos0, bitpos1))
8658 return constant_boolean_node (true, type);
8659 if (known_lt (bitpos0, bitpos1))
8660 return constant_boolean_node (false, type);
8661 break;
8428 case GT_EXPR: 8662 case GT_EXPR:
8429 return constant_boolean_node (bitpos0 > bitpos1, type); 8663 if (known_gt (bitpos0, bitpos1))
8664 return constant_boolean_node (true, type);
8665 if (known_le (bitpos0, bitpos1))
8666 return constant_boolean_node (false, type);
8667 break;
8430 default:; 8668 default:;
8431 } 8669 }
8432 } 8670 }
8433 /* We can simplify the comparison to a comparison of the variable 8671 /* We can simplify the comparison to a comparison of the variable
8434 offset parts if the constant offset parts are equal. 8672 offset parts if the constant offset parts are equal.
8435 Be careful to use signed sizetype here because otherwise we 8673 Be careful to use signed sizetype here because otherwise we
8436 mess with array offsets in the wrong way. This is possible 8674 mess with array offsets in the wrong way. This is possible
8437 because pointer arithmetic is restricted to retain within an 8675 because pointer arithmetic is restricted to retain within an
8438 object and overflow on pointer differences is undefined as of 8676 object and overflow on pointer differences is undefined as of
8439 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */ 8677 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8440 else if (bitpos0 == bitpos1) 8678 else if (known_eq (bitpos0, bitpos1)
8679 && (equality_code
8680 || (indirect_base0
8681 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8682 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8441 { 8683 {
8442 /* By converting to signed sizetype we cover middle-end pointer 8684 /* By converting to signed sizetype we cover middle-end pointer
8443 arithmetic which operates on unsigned pointer types of size 8685 arithmetic which operates on unsigned pointer types of size
8444 type size and ARRAY_REF offsets which are properly sign or 8686 type size and ARRAY_REF offsets which are properly sign or
8445 zero extended from their type in case it is narrower than 8687 zero extended from their type in case it is narrower than
8464 return fold_build2_loc (loc, code, type, offset0, offset1); 8706 return fold_build2_loc (loc, code, type, offset0, offset1);
8465 } 8707 }
8466 } 8708 }
8467 /* For equal offsets we can simplify to a comparison of the 8709 /* For equal offsets we can simplify to a comparison of the
8468 base addresses. */ 8710 base addresses. */
8469 else if (bitpos0 == bitpos1 8711 else if (known_eq (bitpos0, bitpos1)
8470 && (indirect_base0 8712 && (indirect_base0
8471 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0) 8713 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8472 && (indirect_base1 8714 && (indirect_base1
8473 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1) 8715 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8474 && ((offset0 == offset1) 8716 && ((offset0 == offset1)
8493 /* Avoid folding references to struct members at offset 0 to 8735 /* Avoid folding references to struct members at offset 0 to
8494 prevent tests like '&ptr->firstmember == 0' from getting 8736 prevent tests like '&ptr->firstmember == 0' from getting
8495 eliminated. When ptr is null, although the -> expression 8737 eliminated. When ptr is null, although the -> expression
8496 is strictly speaking invalid, GCC retains it as a matter 8738 is strictly speaking invalid, GCC retains it as a matter
8497 of QoI. See PR c/44555. */ 8739 of QoI. See PR c/44555. */
8498 && (offset0 == NULL_TREE && bitpos0 != 0)) 8740 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
8499 || CONSTANT_CLASS_P (base0)) 8741 || CONSTANT_CLASS_P (base0))
8500 && indirect_base0 8742 && indirect_base0
8501 /* The caller guarantees that when one of the arguments is 8743 /* The caller guarantees that when one of the arguments is
8502 constant (i.e., null in this case) it is second. */ 8744 constant (i.e., null in this case) it is second. */
8503 && integer_zerop (arg1)) 8745 && integer_zerop (arg1))
8591 occur in macroized code. */ 8833 occur in macroized code. */
8592 8834
8593 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST) 8835 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8594 { 8836 {
8595 tree cval1 = 0, cval2 = 0; 8837 tree cval1 = 0, cval2 = 0;
8596 int save_p = 0; 8838
8597 8839 if (twoval_comparison_p (arg0, &cval1, &cval2)
8598 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8599 /* Don't handle degenerate cases here; they should already 8840 /* Don't handle degenerate cases here; they should already
8600 have been handled anyway. */ 8841 have been handled anyway. */
8601 && cval1 != 0 && cval2 != 0 8842 && cval1 != 0 && cval2 != 0
8602 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2)) 8843 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8603 && TREE_TYPE (cval1) == TREE_TYPE (cval2) 8844 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8666 case 7: 8907 case 7:
8667 /* Always true. */ 8908 /* Always true. */
8668 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 8909 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8669 } 8910 }
8670 8911
8671 if (save_p)
8672 {
8673 tem = save_expr (build2 (code, type, cval1, cval2));
8674 protected_set_expr_location (tem, loc);
8675 return tem;
8676 }
8677 return fold_build2_loc (loc, code, type, cval1, cval2); 8912 return fold_build2_loc (loc, code, type, cval1, cval2);
8678 } 8913 }
8679 } 8914 }
8680 } 8915 }
8681 8916
8725 true if successful. */ 8960 true if successful. */
8726 8961
8727 static bool 8962 static bool
8728 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts) 8963 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
8729 { 8964 {
8730 unsigned int i; 8965 unsigned HOST_WIDE_INT i, nunits;
8731 8966
8732 if (TREE_CODE (arg) == VECTOR_CST) 8967 if (TREE_CODE (arg) == VECTOR_CST
8733 { 8968 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
8734 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i) 8969 {
8970 for (i = 0; i < nunits; ++i)
8735 elts[i] = VECTOR_CST_ELT (arg, i); 8971 elts[i] = VECTOR_CST_ELT (arg, i);
8736 } 8972 }
8737 else if (TREE_CODE (arg) == CONSTRUCTOR) 8973 else if (TREE_CODE (arg) == CONSTRUCTOR)
8738 { 8974 {
8739 constructor_elt *elt; 8975 constructor_elt *elt;
8755 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL 8991 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8756 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful, 8992 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8757 NULL_TREE otherwise. */ 8993 NULL_TREE otherwise. */
8758 8994
8759 static tree 8995 static tree
8760 fold_vec_perm (tree type, tree arg0, tree arg1, vec_perm_indices sel) 8996 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
8761 { 8997 {
8762 unsigned int i; 8998 unsigned int i;
8999 unsigned HOST_WIDE_INT nelts;
8763 bool need_ctor = false; 9000 bool need_ctor = false;
8764 9001
8765 unsigned int nelts = sel.length (); 9002 if (!sel.length ().is_constant (&nelts))
8766 gcc_assert (TYPE_VECTOR_SUBPARTS (type) == nelts 9003 return NULL_TREE;
8767 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts 9004 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
8768 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts); 9005 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
9006 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
8769 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type) 9007 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8770 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type)) 9008 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8771 return NULL_TREE; 9009 return NULL_TREE;
8772 9010
8773 tree *in_elts = XALLOCAVEC (tree, nelts * 2); 9011 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
8774 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts) 9012 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
8775 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts)) 9013 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
8776 return NULL_TREE; 9014 return NULL_TREE;
8777 9015
8778 auto_vec<tree, 32> out_elts (nelts); 9016 tree_vector_builder out_elts (type, nelts, 1);
8779 for (i = 0; i < nelts; i++) 9017 for (i = 0; i < nelts; i++)
8780 { 9018 {
8781 if (!CONSTANT_CLASS_P (in_elts[sel[i]])) 9019 HOST_WIDE_INT index;
9020 if (!sel[i].is_constant (&index))
9021 return NULL_TREE;
9022 if (!CONSTANT_CLASS_P (in_elts[index]))
8782 need_ctor = true; 9023 need_ctor = true;
8783 out_elts.quick_push (unshare_expr (in_elts[sel[i]])); 9024 out_elts.quick_push (unshare_expr (in_elts[index]));
8784 } 9025 }
8785 9026
8786 if (need_ctor) 9027 if (need_ctor)
8787 { 9028 {
8788 vec<constructor_elt, va_gc> *v; 9029 vec<constructor_elt, va_gc> *v;
8790 for (i = 0; i < nelts; i++) 9031 for (i = 0; i < nelts; i++)
8791 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]); 9032 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
8792 return build_constructor (type, v); 9033 return build_constructor (type, v);
8793 } 9034 }
8794 else 9035 else
8795 return build_vector (type, out_elts); 9036 return out_elts.build ();
8796 } 9037 }
8797 9038
8798 /* Try to fold a pointer difference of type TYPE two address expressions of 9039 /* Try to fold a pointer difference of type TYPE two address expressions of
8799 array references AREF0 and AREF1 using location LOC. Return a 9040 array references AREF0 and AREF1 using location LOC. Return a
8800 simplified expression for the difference or NULL_TREE. */ 9041 simplified expression for the difference or NULL_TREE. */
8801 9042
8802 static tree 9043 static tree
8803 fold_addr_of_array_ref_difference (location_t loc, tree type, 9044 fold_addr_of_array_ref_difference (location_t loc, tree type,
8804 tree aref0, tree aref1) 9045 tree aref0, tree aref1,
9046 bool use_pointer_diff)
8805 { 9047 {
8806 tree base0 = TREE_OPERAND (aref0, 0); 9048 tree base0 = TREE_OPERAND (aref0, 0);
8807 tree base1 = TREE_OPERAND (aref1, 0); 9049 tree base1 = TREE_OPERAND (aref1, 0);
8808 tree base_offset = build_int_cst (type, 0); 9050 tree base_offset = build_int_cst (type, 0);
8809 9051
8811 are pointer indirections compute the difference of the pointers. 9053 are pointer indirections compute the difference of the pointers.
8812 If the bases are equal, we are set. */ 9054 If the bases are equal, we are set. */
8813 if ((TREE_CODE (base0) == ARRAY_REF 9055 if ((TREE_CODE (base0) == ARRAY_REF
8814 && TREE_CODE (base1) == ARRAY_REF 9056 && TREE_CODE (base1) == ARRAY_REF
8815 && (base_offset 9057 && (base_offset
8816 = fold_addr_of_array_ref_difference (loc, type, base0, base1))) 9058 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
9059 use_pointer_diff)))
8817 || (INDIRECT_REF_P (base0) 9060 || (INDIRECT_REF_P (base0)
8818 && INDIRECT_REF_P (base1) 9061 && INDIRECT_REF_P (base1)
8819 && (base_offset 9062 && (base_offset
8820 = fold_binary_loc (loc, MINUS_EXPR, type, 9063 = use_pointer_diff
8821 fold_convert (type, TREE_OPERAND (base0, 0)), 9064 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
8822 fold_convert (type, 9065 TREE_OPERAND (base0, 0),
8823 TREE_OPERAND (base1, 0))))) 9066 TREE_OPERAND (base1, 0))
9067 : fold_binary_loc (loc, MINUS_EXPR, type,
9068 fold_convert (type,
9069 TREE_OPERAND (base0, 0)),
9070 fold_convert (type,
9071 TREE_OPERAND (base1, 0)))))
8824 || operand_equal_p (base0, base1, OEP_ADDRESS_OF)) 9072 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8825 { 9073 {
8826 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1)); 9074 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8827 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1)); 9075 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8828 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0)); 9076 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8842 exact_inverse (tree type, tree cst) 9090 exact_inverse (tree type, tree cst)
8843 { 9091 {
8844 REAL_VALUE_TYPE r; 9092 REAL_VALUE_TYPE r;
8845 tree unit_type; 9093 tree unit_type;
8846 machine_mode mode; 9094 machine_mode mode;
8847 unsigned vec_nelts, i;
8848 9095
8849 switch (TREE_CODE (cst)) 9096 switch (TREE_CODE (cst))
8850 { 9097 {
8851 case REAL_CST: 9098 case REAL_CST:
8852 r = TREE_REAL_CST (cst); 9099 r = TREE_REAL_CST (cst);
8856 9103
8857 return NULL_TREE; 9104 return NULL_TREE;
8858 9105
8859 case VECTOR_CST: 9106 case VECTOR_CST:
8860 { 9107 {
8861 vec_nelts = VECTOR_CST_NELTS (cst);
8862 unit_type = TREE_TYPE (type); 9108 unit_type = TREE_TYPE (type);
8863 mode = TYPE_MODE (unit_type); 9109 mode = TYPE_MODE (unit_type);
8864 9110
8865 auto_vec<tree, 32> elts (vec_nelts); 9111 tree_vector_builder elts;
8866 for (i = 0; i < vec_nelts; i++) 9112 if (!elts.new_unary_operation (type, cst, false))
9113 return NULL_TREE;
9114 unsigned int count = elts.encoded_nelts ();
9115 for (unsigned int i = 0; i < count; ++i)
8867 { 9116 {
8868 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i)); 9117 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8869 if (!exact_real_inverse (mode, &r)) 9118 if (!exact_real_inverse (mode, &r))
8870 return NULL_TREE; 9119 return NULL_TREE;
8871 elts.quick_push (build_real (unit_type, r)); 9120 elts.quick_push (build_real (unit_type, r));
8872 } 9121 }
8873 9122
8874 return build_vector (type, elts); 9123 return elts.build ();
8875 } 9124 }
8876 9125
8877 default: 9126 default:
8878 return NULL_TREE; 9127 return NULL_TREE;
8879 } 9128 }
9005 9254
9006 bool 9255 bool
9007 expr_not_equal_to (tree t, const wide_int &w) 9256 expr_not_equal_to (tree t, const wide_int &w)
9008 { 9257 {
9009 wide_int min, max, nz; 9258 wide_int min, max, nz;
9010 value_range_type rtype; 9259 value_range_kind rtype;
9011 switch (TREE_CODE (t)) 9260 switch (TREE_CODE (t))
9012 { 9261 {
9013 case INTEGER_CST: 9262 case INTEGER_CST:
9014 return wi::to_wide (t) != w; 9263 return wi::to_wide (t) != w;
9015 9264
9044 OP0 and OP1. LOC is the location of the resulting expression. 9293 OP0 and OP1. LOC is the location of the resulting expression.
9045 Return the folded expression if folding is successful. Otherwise, 9294 Return the folded expression if folding is successful. Otherwise,
9046 return NULL_TREE. */ 9295 return NULL_TREE. */
9047 9296
9048 tree 9297 tree
9049 fold_binary_loc (location_t loc, 9298 fold_binary_loc (location_t loc, enum tree_code code, tree type,
9050 enum tree_code code, tree type, tree op0, tree op1) 9299 tree op0, tree op1)
9051 { 9300 {
9052 enum tree_code_class kind = TREE_CODE_CLASS (code); 9301 enum tree_code_class kind = TREE_CODE_CLASS (code);
9053 tree arg0, arg1, tem; 9302 tree arg0, arg1, tem;
9054 tree t1 = NULL_TREE; 9303 tree t1 = NULL_TREE;
9055 bool strict_overflow_p; 9304 bool strict_overflow_p;
9132 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to 9381 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9133 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */ 9382 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9134 9383
9135 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR 9384 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9136 || code == EQ_EXPR || code == NE_EXPR) 9385 || code == EQ_EXPR || code == NE_EXPR)
9137 && TREE_CODE (type) != VECTOR_TYPE 9386 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
9138 && ((truth_value_p (TREE_CODE (arg0)) 9387 && ((truth_value_p (TREE_CODE (arg0))
9139 && (truth_value_p (TREE_CODE (arg1)) 9388 && (truth_value_p (TREE_CODE (arg1))
9140 || (TREE_CODE (arg1) == BIT_AND_EXPR 9389 || (TREE_CODE (arg1) == BIT_AND_EXPR
9141 && integer_onep (TREE_OPERAND (arg1, 1))))) 9390 && integer_onep (TREE_OPERAND (arg1, 1)))))
9142 || (truth_value_p (TREE_CODE (arg1)) 9391 || (truth_value_p (TREE_CODE (arg1))
9217 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */ 9466 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9218 if (TREE_CODE (arg0) == ADDR_EXPR 9467 if (TREE_CODE (arg0) == ADDR_EXPR
9219 && handled_component_p (TREE_OPERAND (arg0, 0))) 9468 && handled_component_p (TREE_OPERAND (arg0, 0)))
9220 { 9469 {
9221 tree base; 9470 tree base;
9222 HOST_WIDE_INT coffset; 9471 poly_int64 coffset;
9223 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0), 9472 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9224 &coffset); 9473 &coffset);
9225 if (!base) 9474 if (!base)
9226 return NULL_TREE; 9475 return NULL_TREE;
9227 return fold_build2 (MEM_REF, type, 9476 return fold_build2 (MEM_REF, type,
9461 tree110 = TREE_OPERAND (tree11, 0); 9710 tree110 = TREE_OPERAND (tree11, 0);
9462 tree111 = TREE_OPERAND (tree11, 1); 9711 tree111 = TREE_OPERAND (tree11, 1);
9463 STRIP_NOPS (tree110); 9712 STRIP_NOPS (tree110);
9464 STRIP_NOPS (tree111); 9713 STRIP_NOPS (tree111);
9465 if (TREE_CODE (tree110) == INTEGER_CST 9714 if (TREE_CODE (tree110) == INTEGER_CST
9466 && 0 == compare_tree_int (tree110, 9715 && compare_tree_int (tree110,
9467 element_precision (rtype)) 9716 element_precision (rtype)) == 0
9468 && operand_equal_p (tree01, tree111, 0)) 9717 && operand_equal_p (tree01, tree111, 0))
9469 { 9718 {
9470 tem = build2_loc (loc, (code0 == LSHIFT_EXPR 9719 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9471 ? LROTATE_EXPR : RROTATE_EXPR), 9720 ? LROTATE_EXPR : RROTATE_EXPR),
9472 rtype, TREE_OPERAND (arg0, 0), 9721 rtype, TREE_OPERAND (arg0, 0),
9483 tree111 = TREE_OPERAND (tree11, 1); 9732 tree111 = TREE_OPERAND (tree11, 1);
9484 STRIP_NOPS (tree110); 9733 STRIP_NOPS (tree110);
9485 STRIP_NOPS (tree111); 9734 STRIP_NOPS (tree111);
9486 if (TREE_CODE (tree110) == NEGATE_EXPR 9735 if (TREE_CODE (tree110) == NEGATE_EXPR
9487 && TREE_CODE (tree111) == INTEGER_CST 9736 && TREE_CODE (tree111) == INTEGER_CST
9488 && 0 == compare_tree_int (tree111, 9737 && compare_tree_int (tree111,
9489 element_precision (rtype) - 1) 9738 element_precision (rtype) - 1) == 0
9490 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0)) 9739 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
9491 { 9740 {
9492 tem = build2_loc (loc, (code0 == LSHIFT_EXPR 9741 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9493 ? LROTATE_EXPR : RROTATE_EXPR), 9742 ? LROTATE_EXPR : RROTATE_EXPR),
9494 rtype, TREE_OPERAND (arg0, 0), 9743 rtype, TREE_OPERAND (arg0, 0),
9530 if (code == MINUS_EXPR) 9779 if (code == MINUS_EXPR)
9531 code = PLUS_EXPR; 9780 code = PLUS_EXPR;
9532 9781
9533 /* With undefined overflow prefer doing association in a type 9782 /* With undefined overflow prefer doing association in a type
9534 which wraps on overflow, if that is one of the operand types. */ 9783 which wraps on overflow, if that is one of the operand types. */
9535 if (POINTER_TYPE_P (type) 9784 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
9536 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type))) 9785 && !TYPE_OVERFLOW_WRAPS (type))
9537 { 9786 {
9538 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 9787 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9539 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))) 9788 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9540 atype = TREE_TYPE (arg0); 9789 atype = TREE_TYPE (arg0);
9541 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1)) 9790 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9544 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type)); 9793 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9545 } 9794 }
9546 9795
9547 /* With undefined overflow we can only associate constants with one 9796 /* With undefined overflow we can only associate constants with one
9548 variable, and constants whose association doesn't overflow. */ 9797 variable, and constants whose association doesn't overflow. */
9549 if (POINTER_TYPE_P (atype) 9798 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
9550 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype))) 9799 && !TYPE_OVERFLOW_WRAPS (atype))
9551 { 9800 {
9552 if ((var0 && var1) || (minus_var0 && minus_var1)) 9801 if ((var0 && var1) || (minus_var0 && minus_var1))
9553 { 9802 {
9554 /* ??? If split_tree would handle NEGATE_EXPR we could 9803 /* ??? If split_tree would handle NEGATE_EXPR we could
9555 simply reject these cases and the allowed cases would 9804 simply reject these cases and the allowed cases would
9592 } 9841 }
9593 9842
9594 /* Only do something if we found more than two objects. Otherwise, 9843 /* Only do something if we found more than two objects. Otherwise,
9595 nothing has changed and we risk infinite recursion. */ 9844 nothing has changed and we risk infinite recursion. */
9596 if (ok 9845 if (ok
9597 && (2 < ((var0 != 0) + (var1 != 0) 9846 && ((var0 != 0) + (var1 != 0)
9598 + (minus_var0 != 0) + (minus_var1 != 0) 9847 + (minus_var0 != 0) + (minus_var1 != 0)
9599 + (con0 != 0) + (con1 != 0) 9848 + (con0 != 0) + (con1 != 0)
9600 + (minus_con0 != 0) + (minus_con1 != 0) 9849 + (minus_con0 != 0) + (minus_con1 != 0)
9601 + (lit0 != 0) + (lit1 != 0) 9850 + (lit0 != 0) + (lit1 != 0)
9602 + (minus_lit0 != 0) + (minus_lit1 != 0)))) 9851 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
9603 { 9852 {
9604 var0 = associate_trees (loc, var0, var1, code, atype); 9853 var0 = associate_trees (loc, var0, var1, code, atype);
9605 minus_var0 = associate_trees (loc, minus_var0, minus_var1, 9854 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
9606 code, atype); 9855 code, atype);
9607 con0 = associate_trees (loc, con0, con1, code, atype); 9856 con0 = associate_trees (loc, con0, con1, code, atype);
9692 } 9941 }
9693 } 9942 }
9694 9943
9695 return NULL_TREE; 9944 return NULL_TREE;
9696 9945
9946 case POINTER_DIFF_EXPR:
9697 case MINUS_EXPR: 9947 case MINUS_EXPR:
9948 /* Fold &a[i] - &a[j] to i-j. */
9949 if (TREE_CODE (arg0) == ADDR_EXPR
9950 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9951 && TREE_CODE (arg1) == ADDR_EXPR
9952 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9953 {
9954 tree tem = fold_addr_of_array_ref_difference (loc, type,
9955 TREE_OPERAND (arg0, 0),
9956 TREE_OPERAND (arg1, 0),
9957 code
9958 == POINTER_DIFF_EXPR);
9959 if (tem)
9960 return tem;
9961 }
9962
9963 /* Further transformations are not for pointers. */
9964 if (code == POINTER_DIFF_EXPR)
9965 return NULL_TREE;
9966
9698 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */ 9967 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9699 if (TREE_CODE (arg0) == NEGATE_EXPR 9968 if (TREE_CODE (arg0) == NEGATE_EXPR
9700 && negate_expr_p (op1)) 9969 && negate_expr_p (op1)
9701 return fold_build2_loc (loc, MINUS_EXPR, type, 9970 /* If arg0 is e.g. unsigned int and type is int, then this could
9702 negate_expr (op1), 9971 introduce UB, because if A is INT_MIN at runtime, the original
9703 fold_convert_loc (loc, type, 9972 expression can be well defined while the latter is not.
9973 See PR83269. */
9974 && !(ANY_INTEGRAL_TYPE_P (type)
9975 && TYPE_OVERFLOW_UNDEFINED (type)
9976 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9977 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9978 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
9979 fold_convert_loc (loc, type,
9704 TREE_OPERAND (arg0, 0))); 9980 TREE_OPERAND (arg0, 0)));
9705 9981
9706 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to 9982 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9707 __complex__ ( x, -y ). This is not the same for SNaNs or if 9983 __complex__ ( x, -y ). This is not the same for SNaNs or if
9708 signed zeros are involved. */ 9984 signed zeros are involved. */
9750 || INTEGRAL_TYPE_P (type))) 10026 || INTEGRAL_TYPE_P (type)))
9751 return fold_build2_loc (loc, PLUS_EXPR, type, 10027 return fold_build2_loc (loc, PLUS_EXPR, type,
9752 fold_convert_loc (loc, type, arg0), 10028 fold_convert_loc (loc, type, arg0),
9753 negate_expr (op1)); 10029 negate_expr (op1));
9754 10030
9755 /* Fold &a[i] - &a[j] to i-j. */
9756 if (TREE_CODE (arg0) == ADDR_EXPR
9757 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9758 && TREE_CODE (arg1) == ADDR_EXPR
9759 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9760 {
9761 tree tem = fold_addr_of_array_ref_difference (loc, type,
9762 TREE_OPERAND (arg0, 0),
9763 TREE_OPERAND (arg1, 0));
9764 if (tem)
9765 return tem;
9766 }
9767
9768 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or 10031 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9769 one. Make sure the type is not saturating and has the signedness of 10032 one. Make sure the type is not saturating and has the signedness of
9770 the stripped operands, as fold_plusminus_mult_expr will re-associate. 10033 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9771 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */ 10034 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9772 if ((TREE_CODE (arg0) == MULT_EXPR 10035 if ((TREE_CODE (arg0) == MULT_EXPR
9797 fold_convert_loc (loc, type, 10060 fold_convert_loc (loc, type,
9798 negate_expr (op0)), tem); 10061 negate_expr (op0)), tem);
9799 10062
9800 strict_overflow_p = false; 10063 strict_overflow_p = false;
9801 if (TREE_CODE (arg1) == INTEGER_CST 10064 if (TREE_CODE (arg1) == INTEGER_CST
9802 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE, 10065 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9803 &strict_overflow_p))) 10066 &strict_overflow_p)) != 0)
9804 { 10067 {
9805 if (strict_overflow_p) 10068 if (strict_overflow_p)
9806 fold_overflow_warning (("assuming signed overflow does not " 10069 fold_overflow_warning (("assuming signed overflow does not "
9807 "occur when simplifying " 10070 "occur when simplifying "
9808 "multiplication"), 10071 "multiplication"),
10000 return fold_build2_loc (loc, code, type, op0, 10263 return fold_build2_loc (loc, code, type, op0,
10001 wide_int_to_tree (type, masked)); 10264 wide_int_to_tree (type, masked));
10002 } 10265 }
10003 } 10266 }
10004 10267
10005 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10006 ((A & N) + B) & M -> (A + B) & M
10007 Similarly if (N & M) == 0,
10008 ((A | N) + B) & M -> (A + B) & M
10009 and for - instead of + (or unary - instead of +)
10010 and/or ^ instead of |.
10011 If B is constant and (B & M) == 0, fold into A & M. */
10012 if (TREE_CODE (arg1) == INTEGER_CST)
10013 {
10014 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10015 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10016 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10017 && (TREE_CODE (arg0) == PLUS_EXPR
10018 || TREE_CODE (arg0) == MINUS_EXPR
10019 || TREE_CODE (arg0) == NEGATE_EXPR)
10020 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10021 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10022 {
10023 tree pmop[2];
10024 int which = 0;
10025 wide_int cst0;
10026
10027 /* Now we know that arg0 is (C + D) or (C - D) or
10028 -C and arg1 (M) is == (1LL << cst) - 1.
10029 Store C into PMOP[0] and D into PMOP[1]. */
10030 pmop[0] = TREE_OPERAND (arg0, 0);
10031 pmop[1] = NULL;
10032 if (TREE_CODE (arg0) != NEGATE_EXPR)
10033 {
10034 pmop[1] = TREE_OPERAND (arg0, 1);
10035 which = 1;
10036 }
10037
10038 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10039 which = -1;
10040
10041 for (; which >= 0; which--)
10042 switch (TREE_CODE (pmop[which]))
10043 {
10044 case BIT_AND_EXPR:
10045 case BIT_IOR_EXPR:
10046 case BIT_XOR_EXPR:
10047 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10048 != INTEGER_CST)
10049 break;
10050 cst0 = wi::to_wide (TREE_OPERAND (pmop[which], 1)) & cst1;
10051 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10052 {
10053 if (cst0 != cst1)
10054 break;
10055 }
10056 else if (cst0 != 0)
10057 break;
10058 /* If C or D is of the form (A & N) where
10059 (N & M) == M, or of the form (A | N) or
10060 (A ^ N) where (N & M) == 0, replace it with A. */
10061 pmop[which] = TREE_OPERAND (pmop[which], 0);
10062 break;
10063 case INTEGER_CST:
10064 /* If C or D is a N where (N & M) == 0, it can be
10065 omitted (assumed 0). */
10066 if ((TREE_CODE (arg0) == PLUS_EXPR
10067 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10068 && (cst1 & wi::to_wide (pmop[which])) == 0)
10069 pmop[which] = NULL;
10070 break;
10071 default:
10072 break;
10073 }
10074
10075 /* Only build anything new if we optimized one or both arguments
10076 above. */
10077 if (pmop[0] != TREE_OPERAND (arg0, 0)
10078 || (TREE_CODE (arg0) != NEGATE_EXPR
10079 && pmop[1] != TREE_OPERAND (arg0, 1)))
10080 {
10081 tree utype = TREE_TYPE (arg0);
10082 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10083 {
10084 /* Perform the operations in a type that has defined
10085 overflow behavior. */
10086 utype = unsigned_type_for (TREE_TYPE (arg0));
10087 if (pmop[0] != NULL)
10088 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10089 if (pmop[1] != NULL)
10090 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10091 }
10092
10093 if (TREE_CODE (arg0) == NEGATE_EXPR)
10094 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10095 else if (TREE_CODE (arg0) == PLUS_EXPR)
10096 {
10097 if (pmop[0] != NULL && pmop[1] != NULL)
10098 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10099 pmop[0], pmop[1]);
10100 else if (pmop[0] != NULL)
10101 tem = pmop[0];
10102 else if (pmop[1] != NULL)
10103 tem = pmop[1];
10104 else
10105 return build_int_cst (type, 0);
10106 }
10107 else if (pmop[0] == NULL)
10108 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10109 else
10110 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10111 pmop[0], pmop[1]);
10112 /* TEM is now the new binary +, - or unary - replacement. */
10113 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10114 fold_convert_loc (loc, utype, arg1));
10115 return fold_convert_loc (loc, type, tem);
10116 }
10117 }
10118 }
10119
10120 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */ 10268 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10121 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR 10269 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10122 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0)))) 10270 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10123 { 10271 {
10124 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0))); 10272 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10231 fold_convert (type, arg0), 10379 fold_convert (type, arg0),
10232 fold_convert (type, arg1)); 10380 fold_convert (type, arg1));
10233 10381
10234 strict_overflow_p = false; 10382 strict_overflow_p = false;
10235 if (TREE_CODE (arg1) == INTEGER_CST 10383 if (TREE_CODE (arg1) == INTEGER_CST
10236 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE, 10384 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10237 &strict_overflow_p))) 10385 &strict_overflow_p)) != 0)
10238 { 10386 {
10239 if (strict_overflow_p) 10387 if (strict_overflow_p)
10240 fold_overflow_warning (("assuming signed overflow does not occur " 10388 fold_overflow_warning (("assuming signed overflow does not occur "
10241 "when simplifying division"), 10389 "when simplifying division"),
10242 WARN_STRICT_OVERFLOW_MISC); 10390 WARN_STRICT_OVERFLOW_MISC);
10249 case FLOOR_MOD_EXPR: 10397 case FLOOR_MOD_EXPR:
10250 case ROUND_MOD_EXPR: 10398 case ROUND_MOD_EXPR:
10251 case TRUNC_MOD_EXPR: 10399 case TRUNC_MOD_EXPR:
10252 strict_overflow_p = false; 10400 strict_overflow_p = false;
10253 if (TREE_CODE (arg1) == INTEGER_CST 10401 if (TREE_CODE (arg1) == INTEGER_CST
10254 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE, 10402 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10255 &strict_overflow_p))) 10403 &strict_overflow_p)) != 0)
10256 { 10404 {
10257 if (strict_overflow_p) 10405 if (strict_overflow_p)
10258 fold_overflow_warning (("assuming signed overflow does not occur " 10406 fold_overflow_warning (("assuming signed overflow does not occur "
10259 "when simplifying modulus"), 10407 "when simplifying modulus"),
10260 WARN_STRICT_OVERFLOW_MISC); 10408 WARN_STRICT_OVERFLOW_MISC);
10510 build_int_cst (TREE_TYPE (arg0), 1)); 10658 build_int_cst (TREE_TYPE (arg0), 1));
10511 return fold_build2_loc (loc, code, type, 10659 return fold_build2_loc (loc, code, type,
10512 fold_convert_loc (loc, TREE_TYPE (arg1), tem), 10660 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10513 arg1); 10661 arg1);
10514 } 10662 }
10515 }
10516
10517 /* If this is an NE or EQ comparison of zero against the result of a
10518 signed MOD operation whose second operand is a power of 2, make
10519 the MOD operation unsigned since it is simpler and equivalent. */
10520 if (integer_zerop (arg1)
10521 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10522 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10523 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10524 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10525 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10526 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10527 {
10528 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10529 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10530 fold_convert_loc (loc, newtype,
10531 TREE_OPERAND (arg0, 0)),
10532 fold_convert_loc (loc, newtype,
10533 TREE_OPERAND (arg0, 1)));
10534
10535 return fold_build2_loc (loc, code, type, newmod,
10536 fold_convert_loc (loc, newtype, arg1));
10537 } 10663 }
10538 10664
10539 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where 10665 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10540 C1 is a valid shift constant, and C2 is a power of two, i.e. 10666 C1 is a valid shift constant, and C2 is a power of two, i.e.
10541 a single bit. */ 10667 a single bit. */
10604 && integer_zerop (arg1)) 10730 && integer_zerop (arg1))
10605 { 10731 {
10606 tree fndecl = get_callee_fndecl (arg0); 10732 tree fndecl = get_callee_fndecl (arg0);
10607 10733
10608 if (fndecl 10734 if (fndecl
10609 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL 10735 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
10610 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10611 && call_expr_nargs (arg0) == 1 10736 && call_expr_nargs (arg0) == 1
10612 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE) 10737 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10613 { 10738 {
10614 tree iref = build_fold_indirect_ref_loc (loc, 10739 tree iref = build_fold_indirect_ref_loc (loc,
10615 CALL_EXPR_ARG (arg0, 0)); 10740 CALL_EXPR_ARG (arg0, 0));
10903 and a comparison, and is probably faster. */ 11028 and a comparison, and is probably faster. */
10904 if (code == LE_EXPR 11029 if (code == LE_EXPR
10905 && TREE_CODE (arg1) == INTEGER_CST 11030 && TREE_CODE (arg1) == INTEGER_CST
10906 && TREE_CODE (arg0) == ABS_EXPR 11031 && TREE_CODE (arg0) == ABS_EXPR
10907 && ! TREE_SIDE_EFFECTS (arg0) 11032 && ! TREE_SIDE_EFFECTS (arg0)
10908 && (0 != (tem = negate_expr (arg1))) 11033 && (tem = negate_expr (arg1)) != 0
10909 && TREE_CODE (tem) == INTEGER_CST 11034 && TREE_CODE (tem) == INTEGER_CST
10910 && !TREE_OVERFLOW (tem)) 11035 && !TREE_OVERFLOW (tem))
10911 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, 11036 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
10912 build2 (GE_EXPR, type, 11037 build2 (GE_EXPR, type,
10913 TREE_OPERAND (arg0, 0), tem), 11038 TREE_OPERAND (arg0, 0), tem),
11028 default: 11153 default:
11029 return NULL_TREE; 11154 return NULL_TREE;
11030 } /* switch (code) */ 11155 } /* switch (code) */
11031 } 11156 }
11032 11157
11158 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11159 ((A & N) + B) & M -> (A + B) & M
11160 Similarly if (N & M) == 0,
11161 ((A | N) + B) & M -> (A + B) & M
11162 and for - instead of + (or unary - instead of +)
11163 and/or ^ instead of |.
11164 If B is constant and (B & M) == 0, fold into A & M.
11165
11166 This function is a helper for match.pd patterns. Return non-NULL
11167 type in which the simplified operation should be performed only
11168 if any optimization is possible.
11169
11170 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
11171 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
11172 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
11173 +/-. */
11174 tree
11175 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
11176 tree arg00, enum tree_code code00, tree arg000, tree arg001,
11177 tree arg01, enum tree_code code01, tree arg010, tree arg011,
11178 tree *pmop)
11179 {
11180 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
11181 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
11182 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11183 if (~cst1 == 0
11184 || (cst1 & (cst1 + 1)) != 0
11185 || !INTEGRAL_TYPE_P (type)
11186 || (!TYPE_OVERFLOW_WRAPS (type)
11187 && TREE_CODE (type) != INTEGER_TYPE)
11188 || (wi::max_value (type) & cst1) != cst1)
11189 return NULL_TREE;
11190
11191 enum tree_code codes[2] = { code00, code01 };
11192 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
11193 int which = 0;
11194 wide_int cst0;
11195
11196 /* Now we know that arg0 is (C + D) or (C - D) or -C and
11197 arg1 (M) is == (1LL << cst) - 1.
11198 Store C into PMOP[0] and D into PMOP[1]. */
11199 pmop[0] = arg00;
11200 pmop[1] = arg01;
11201 which = code != NEGATE_EXPR;
11202
11203 for (; which >= 0; which--)
11204 switch (codes[which])
11205 {
11206 case BIT_AND_EXPR:
11207 case BIT_IOR_EXPR:
11208 case BIT_XOR_EXPR:
11209 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
11210 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
11211 if (codes[which] == BIT_AND_EXPR)
11212 {
11213 if (cst0 != cst1)
11214 break;
11215 }
11216 else if (cst0 != 0)
11217 break;
11218 /* If C or D is of the form (A & N) where
11219 (N & M) == M, or of the form (A | N) or
11220 (A ^ N) where (N & M) == 0, replace it with A. */
11221 pmop[which] = arg0xx[2 * which];
11222 break;
11223 case ERROR_MARK:
11224 if (TREE_CODE (pmop[which]) != INTEGER_CST)
11225 break;
11226 /* If C or D is a N where (N & M) == 0, it can be
11227 omitted (replaced with 0). */
11228 if ((code == PLUS_EXPR
11229 || (code == MINUS_EXPR && which == 0))
11230 && (cst1 & wi::to_wide (pmop[which])) == 0)
11231 pmop[which] = build_int_cst (type, 0);
11232 /* Similarly, with C - N where (-N & M) == 0. */
11233 if (code == MINUS_EXPR
11234 && which == 1
11235 && (cst1 & -wi::to_wide (pmop[which])) == 0)
11236 pmop[which] = build_int_cst (type, 0);
11237 break;
11238 default:
11239 gcc_unreachable ();
11240 }
11241
11242 /* Only build anything new if we optimized one or both arguments above. */
11243 if (pmop[0] == arg00 && pmop[1] == arg01)
11244 return NULL_TREE;
11245
11246 if (TYPE_OVERFLOW_WRAPS (type))
11247 return type;
11248 else
11249 return unsigned_type_for (type);
11250 }
11251
11252 /* Used by contains_label_[p1]. */
11253
11254 struct contains_label_data
11255 {
11256 hash_set<tree> *pset;
11257 bool inside_switch_p;
11258 };
11259
11033 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is 11260 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11034 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees 11261 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
11035 of GOTO_EXPR. */ 11262 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
11036 11263
11037 static tree 11264 static tree
11038 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) 11265 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
11039 { 11266 {
11267 contains_label_data *d = (contains_label_data *) data;
11040 switch (TREE_CODE (*tp)) 11268 switch (TREE_CODE (*tp))
11041 { 11269 {
11042 case LABEL_EXPR: 11270 case LABEL_EXPR:
11043 return *tp; 11271 return *tp;
11044 11272
11273 case CASE_LABEL_EXPR:
11274 if (!d->inside_switch_p)
11275 return *tp;
11276 return NULL_TREE;
11277
11278 case SWITCH_EXPR:
11279 if (!d->inside_switch_p)
11280 {
11281 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
11282 return *tp;
11283 d->inside_switch_p = true;
11284 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
11285 return *tp;
11286 d->inside_switch_p = false;
11287 *walk_subtrees = 0;
11288 }
11289 return NULL_TREE;
11290
11045 case GOTO_EXPR: 11291 case GOTO_EXPR:
11046 *walk_subtrees = 0; 11292 *walk_subtrees = 0;
11047 11293 return NULL_TREE;
11048 /* fall through */
11049 11294
11050 default: 11295 default:
11051 return NULL_TREE; 11296 return NULL_TREE;
11052 } 11297 }
11053 } 11298 }
11056 outside the sub-tree. */ 11301 outside the sub-tree. */
11057 11302
11058 static bool 11303 static bool
11059 contains_label_p (tree st) 11304 contains_label_p (tree st)
11060 { 11305 {
11061 return 11306 hash_set<tree> pset;
11062 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE); 11307 contains_label_data data = { &pset, false };
11308 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
11063 } 11309 }
11064 11310
11065 /* Fold a ternary expression of code CODE and type TYPE with operands 11311 /* Fold a ternary expression of code CODE and type TYPE with operands
11066 OP0, OP1, and OP2. Return the folded expression if folding is 11312 OP0, OP1, and OP2. Return the folded expression if folding is
11067 successful. Otherwise, return NULL_TREE. */ 11313 successful. Otherwise, return NULL_TREE. */
11148 return pedantic_non_lvalue_loc (loc, tem); 11394 return pedantic_non_lvalue_loc (loc, tem);
11149 return NULL_TREE; 11395 return NULL_TREE;
11150 } 11396 }
11151 else if (TREE_CODE (arg0) == VECTOR_CST) 11397 else if (TREE_CODE (arg0) == VECTOR_CST)
11152 { 11398 {
11399 unsigned HOST_WIDE_INT nelts;
11153 if ((TREE_CODE (arg1) == VECTOR_CST 11400 if ((TREE_CODE (arg1) == VECTOR_CST
11154 || TREE_CODE (arg1) == CONSTRUCTOR) 11401 || TREE_CODE (arg1) == CONSTRUCTOR)
11155 && (TREE_CODE (arg2) == VECTOR_CST 11402 && (TREE_CODE (arg2) == VECTOR_CST
11156 || TREE_CODE (arg2) == CONSTRUCTOR)) 11403 || TREE_CODE (arg2) == CONSTRUCTOR)
11404 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
11157 { 11405 {
11158 unsigned int nelts = VECTOR_CST_NELTS (arg0), i; 11406 vec_perm_builder sel (nelts, nelts, 1);
11159 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type)); 11407 for (unsigned int i = 0; i < nelts; i++)
11160 auto_vec_perm_indices sel (nelts);
11161 for (i = 0; i < nelts; i++)
11162 { 11408 {
11163 tree val = VECTOR_CST_ELT (arg0, i); 11409 tree val = VECTOR_CST_ELT (arg0, i);
11164 if (integer_all_onesp (val)) 11410 if (integer_all_onesp (val))
11165 sel.quick_push (i); 11411 sel.quick_push (i);
11166 else if (integer_zerop (val)) 11412 else if (integer_zerop (val))
11167 sel.quick_push (nelts + i); 11413 sel.quick_push (nelts + i);
11168 else /* Currently unreachable. */ 11414 else /* Currently unreachable. */
11169 return NULL_TREE; 11415 return NULL_TREE;
11170 } 11416 }
11171 tree t = fold_vec_perm (type, arg1, arg2, sel); 11417 vec_perm_indices indices (sel, 2, nelts);
11418 tree t = fold_vec_perm (type, arg1, arg2, indices);
11172 if (t != NULL_TREE) 11419 if (t != NULL_TREE)
11173 return t; 11420 return t;
11174 } 11421 }
11175 } 11422 }
11176 11423
11337 && TREE_CODE (arg0) == NE_EXPR 11584 && TREE_CODE (arg0) == NE_EXPR
11338 && integer_zerop (TREE_OPERAND (arg0, 1)) 11585 && integer_zerop (TREE_OPERAND (arg0, 1))
11339 && integer_pow2p (arg1) 11586 && integer_pow2p (arg1)
11340 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR 11587 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11341 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1), 11588 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11342 arg1, OEP_ONLY_CONST)) 11589 arg1, OEP_ONLY_CONST)
11590 /* operand_equal_p compares just value, not precision, so e.g.
11591 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
11592 second operand 32-bit -128, which is not a power of two (or vice
11593 versa. */
11594 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
11343 return pedantic_non_lvalue_loc (loc, 11595 return pedantic_non_lvalue_loc (loc,
11344 fold_convert_loc (loc, type, 11596 fold_convert_loc (loc, type,
11345 TREE_OPERAND (arg0, 0))); 11597 TREE_OPERAND (arg0,
11598 0)));
11346 11599
11347 /* Disable the transformations below for vectors, since 11600 /* Disable the transformations below for vectors, since
11348 fold_binary_op_with_conditional_arg may undo them immediately, 11601 fold_binary_op_with_conditional_arg may undo them immediately,
11349 yielding an infinite loop. */ 11602 yielding an infinite loop. */
11350 if (code == VEC_COND_EXPR) 11603 if (code == VEC_COND_EXPR)
11409 gcc_unreachable (); 11662 gcc_unreachable ();
11410 11663
11411 case BIT_FIELD_REF: 11664 case BIT_FIELD_REF:
11412 if (TREE_CODE (arg0) == VECTOR_CST 11665 if (TREE_CODE (arg0) == VECTOR_CST
11413 && (type == TREE_TYPE (TREE_TYPE (arg0)) 11666 && (type == TREE_TYPE (TREE_TYPE (arg0))
11414 || (TREE_CODE (type) == VECTOR_TYPE 11667 || (VECTOR_TYPE_P (type)
11415 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))) 11668 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
11669 && tree_fits_uhwi_p (op1)
11670 && tree_fits_uhwi_p (op2))
11416 { 11671 {
11417 tree eltype = TREE_TYPE (TREE_TYPE (arg0)); 11672 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11418 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype)); 11673 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11419 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1); 11674 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11420 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2); 11675 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11421 11676
11422 if (n != 0 11677 if (n != 0
11423 && (idx % width) == 0 11678 && (idx % width) == 0
11424 && (n % width) == 0 11679 && (n % width) == 0
11425 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))) 11680 && known_le ((idx + n) / width,
11681 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
11426 { 11682 {
11427 idx = idx / width; 11683 idx = idx / width;
11428 n = n / width; 11684 n = n / width;
11429 11685
11430 if (TREE_CODE (arg0) == VECTOR_CST) 11686 if (TREE_CODE (arg0) == VECTOR_CST)
11431 { 11687 {
11432 if (n == 1) 11688 if (n == 1)
11433 return VECTOR_CST_ELT (arg0, idx); 11689 {
11434 11690 tem = VECTOR_CST_ELT (arg0, idx);
11435 auto_vec<tree, 32> vals (n); 11691 if (VECTOR_TYPE_P (type))
11692 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
11693 return tem;
11694 }
11695
11696 tree_vector_builder vals (type, n, 1);
11436 for (unsigned i = 0; i < n; ++i) 11697 for (unsigned i = 0; i < n; ++i)
11437 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i)); 11698 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
11438 return build_vector (type, vals); 11699 return vals.build ();
11439 } 11700 }
11440 } 11701 }
11441 } 11702 }
11442 11703
11443 /* On constants we can use native encode/interpret to constant 11704 /* On constants we can use native encode/interpret to constant
11444 fold (nearly) all BIT_FIELD_REFs. */ 11705 fold (nearly) all BIT_FIELD_REFs. */
11445 if (CONSTANT_CLASS_P (arg0) 11706 if (CONSTANT_CLASS_P (arg0)
11446 && can_native_interpret_type_p (type) 11707 && can_native_interpret_type_p (type)
11447 && BITS_PER_UNIT == 8) 11708 && BITS_PER_UNIT == 8
11709 && tree_fits_uhwi_p (op1)
11710 && tree_fits_uhwi_p (op2))
11448 { 11711 {
11449 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2); 11712 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11450 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1); 11713 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11451 /* Limit us to a reasonable amount of work. To relax the 11714 /* Limit us to a reasonable amount of work. To relax the
11452 other limitations we need bit-shifting of the buffer 11715 other limitations we need bit-shifting of the buffer
11470 } 11733 }
11471 } 11734 }
11472 11735
11473 return NULL_TREE; 11736 return NULL_TREE;
11474 11737
11475 case FMA_EXPR:
11476 /* For integers we can decompose the FMA if possible. */
11477 if (TREE_CODE (arg0) == INTEGER_CST
11478 && TREE_CODE (arg1) == INTEGER_CST)
11479 return fold_build2_loc (loc, PLUS_EXPR, type,
11480 const_binop (MULT_EXPR, arg0, arg1), arg2);
11481 if (integer_zerop (arg2))
11482 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11483
11484 return fold_fma (loc, type, arg0, arg1, arg2);
11485
11486 case VEC_PERM_EXPR: 11738 case VEC_PERM_EXPR:
11487 if (TREE_CODE (arg2) == VECTOR_CST) 11739 if (TREE_CODE (arg2) == VECTOR_CST)
11488 { 11740 {
11489 unsigned int nelts = VECTOR_CST_NELTS (arg2), i, mask, mask2; 11741 /* Build a vector of integers from the tree mask. */
11490 bool need_mask_canon = false; 11742 vec_perm_builder builder;
11491 bool need_mask_canon2 = false; 11743 if (!tree_to_vec_perm_builder (&builder, arg2))
11492 bool all_in_vec0 = true; 11744 return NULL_TREE;
11493 bool all_in_vec1 = true; 11745
11494 bool maybe_identity = true; 11746 /* Create a vec_perm_indices for the integer vector. */
11747 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
11495 bool single_arg = (op0 == op1); 11748 bool single_arg = (op0 == op1);
11496 bool changed = false; 11749 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
11497 11750
11498 mask2 = 2 * nelts - 1; 11751 /* Check for cases that fold to OP0 or OP1 in their original
11499 mask = single_arg ? (nelts - 1) : mask2; 11752 element order. */
11500 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type)); 11753 if (sel.series_p (0, 1, 0, 1))
11501 auto_vec_perm_indices sel (nelts); 11754 return op0;
11502 auto_vec_perm_indices sel2 (nelts); 11755 if (sel.series_p (0, 1, nelts, 1))
11503 for (i = 0; i < nelts; i++) 11756 return op1;
11757
11758 if (!single_arg)
11504 { 11759 {
11505 tree val = VECTOR_CST_ELT (arg2, i); 11760 if (sel.all_from_input_p (0))
11506 if (TREE_CODE (val) != INTEGER_CST) 11761 op1 = op0;
11507 return NULL_TREE; 11762 else if (sel.all_from_input_p (1))
11508 11763 {
11509 /* Make sure that the perm value is in an acceptable 11764 op0 = op1;
11510 range. */ 11765 sel.rotate_inputs (1);
11511 wi::tree_to_wide_ref t = wi::to_wide (val); 11766 }
11512 need_mask_canon |= wi::gtu_p (t, mask);
11513 need_mask_canon2 |= wi::gtu_p (t, mask2);
11514 unsigned int elt = t.to_uhwi () & mask;
11515 unsigned int elt2 = t.to_uhwi () & mask2;
11516
11517 if (elt < nelts)
11518 all_in_vec1 = false;
11519 else
11520 all_in_vec0 = false;
11521
11522 if ((elt & (nelts - 1)) != i)
11523 maybe_identity = false;
11524
11525 sel.quick_push (elt);
11526 sel2.quick_push (elt2);
11527 }
11528
11529 if (maybe_identity)
11530 {
11531 if (all_in_vec0)
11532 return op0;
11533 if (all_in_vec1)
11534 return op1;
11535 }
11536
11537 if (all_in_vec0)
11538 op1 = op0;
11539 else if (all_in_vec1)
11540 {
11541 op0 = op1;
11542 for (i = 0; i < nelts; i++)
11543 sel[i] -= nelts;
11544 need_mask_canon = true;
11545 } 11767 }
11546 11768
11547 if ((TREE_CODE (op0) == VECTOR_CST 11769 if ((TREE_CODE (op0) == VECTOR_CST
11548 || TREE_CODE (op0) == CONSTRUCTOR) 11770 || TREE_CODE (op0) == CONSTRUCTOR)
11549 && (TREE_CODE (op1) == VECTOR_CST 11771 && (TREE_CODE (op1) == VECTOR_CST
11552 tree t = fold_vec_perm (type, op0, op1, sel); 11774 tree t = fold_vec_perm (type, op0, op1, sel);
11553 if (t != NULL_TREE) 11775 if (t != NULL_TREE)
11554 return t; 11776 return t;
11555 } 11777 }
11556 11778
11557 if (op0 == op1 && !single_arg) 11779 bool changed = (op0 == op1 && !single_arg);
11558 changed = true; 11780
11559 11781 /* Generate a canonical form of the selector. */
11560 /* Some targets are deficient and fail to expand a single 11782 if (arg2 == op2 && sel.encoding () != builder)
11561 argument permutation while still allowing an equivalent
11562 2-argument version. */
11563 if (need_mask_canon && arg2 == op2
11564 && !can_vec_perm_p (TYPE_MODE (type), false, &sel)
11565 && can_vec_perm_p (TYPE_MODE (type), false, &sel2))
11566 { 11783 {
11567 need_mask_canon = need_mask_canon2; 11784 /* Some targets are deficient and fail to expand a single
11568 sel = sel2; 11785 argument permutation while still allowing an equivalent
11569 } 11786 2-argument version. */
11570 11787 if (sel.ninputs () == 2
11571 if (need_mask_canon && arg2 == op2) 11788 || can_vec_perm_const_p (TYPE_MODE (type), sel, false))
11572 { 11789 op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
11573 tree eltype = TREE_TYPE (TREE_TYPE (arg2)); 11790 else
11574 auto_vec<tree, 32> tsel (nelts); 11791 {
11575 for (i = 0; i < nelts; i++) 11792 vec_perm_indices sel2 (builder, 2, nelts);
11576 tsel.quick_push (build_int_cst (eltype, sel[i])); 11793 if (can_vec_perm_const_p (TYPE_MODE (type), sel2, false))
11577 op2 = build_vector (TREE_TYPE (arg2), tsel); 11794 op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel2);
11795 else
11796 /* Not directly supported with either encoding,
11797 so use the preferred form. */
11798 op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
11799 }
11578 changed = true; 11800 changed = true;
11579 } 11801 }
11580 11802
11581 if (changed) 11803 if (changed)
11582 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2); 11804 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11607 unsigned HOST_WIDE_INT elsize 11829 unsigned HOST_WIDE_INT elsize
11608 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1))); 11830 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11609 if (bitpos % elsize == 0) 11831 if (bitpos % elsize == 0)
11610 { 11832 {
11611 unsigned k = bitpos / elsize; 11833 unsigned k = bitpos / elsize;
11834 unsigned HOST_WIDE_INT nelts;
11612 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0)) 11835 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11613 return arg0; 11836 return arg0;
11614 else 11837 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
11615 { 11838 {
11616 unsigned int nelts = VECTOR_CST_NELTS (arg0); 11839 tree_vector_builder elts (type, nelts, 1);
11617 auto_vec<tree, 32> elts (nelts);
11618 elts.quick_grow (nelts); 11840 elts.quick_grow (nelts);
11619 memcpy (&elts[0], VECTOR_CST_ELTS (arg0), 11841 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
11620 sizeof (tree) * nelts); 11842 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
11621 elts[k] = arg1; 11843 return elts.build ();
11622 return build_vector (type, elts);
11623 } 11844 }
11624 } 11845 }
11625 } 11846 }
11626 return NULL_TREE; 11847 return NULL_TREE;
11627 11848
11934 case COMPLEX_CST: 12155 case COMPLEX_CST:
11935 fold_checksum_tree (TREE_REALPART (expr), ctx, ht); 12156 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11936 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht); 12157 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11937 break; 12158 break;
11938 case VECTOR_CST: 12159 case VECTOR_CST:
11939 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i) 12160 len = vector_cst_encoded_nelts (expr);
11940 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht); 12161 for (i = 0; i < len; ++i)
12162 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
11941 break; 12163 break;
11942 default: 12164 default:
11943 break; 12165 break;
11944 } 12166 }
11945 break; 12167 break;
12379 case BIT_AND_EXPR: 12601 case BIT_AND_EXPR:
12380 /* Bitwise and provides a power of two multiple. If the mask is 12602 /* Bitwise and provides a power of two multiple. If the mask is
12381 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */ 12603 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12382 if (!integer_pow2p (bottom)) 12604 if (!integer_pow2p (bottom))
12383 return 0; 12605 return 0;
12384 /* FALLTHRU */ 12606 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12607 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12385 12608
12386 case MULT_EXPR: 12609 case MULT_EXPR:
12610 if (TREE_CODE (bottom) == INTEGER_CST)
12611 {
12612 op1 = TREE_OPERAND (top, 0);
12613 op2 = TREE_OPERAND (top, 1);
12614 if (TREE_CODE (op1) == INTEGER_CST)
12615 std::swap (op1, op2);
12616 if (TREE_CODE (op2) == INTEGER_CST)
12617 {
12618 if (multiple_of_p (type, op2, bottom))
12619 return 1;
12620 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
12621 if (multiple_of_p (type, bottom, op2))
12622 {
12623 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
12624 wi::to_widest (op2));
12625 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
12626 {
12627 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
12628 return multiple_of_p (type, op1, op2);
12629 }
12630 }
12631 return multiple_of_p (type, op1, bottom);
12632 }
12633 }
12387 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom) 12634 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12388 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom)); 12635 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12389 12636
12390 case MINUS_EXPR: 12637 case MINUS_EXPR:
12391 /* It is impossible to prove if op0 - op1 is multiple of bottom 12638 /* It is impossible to prove if op0 - op1 is multiple of bottom
12412 op1 = TREE_OPERAND (top, 1); 12659 op1 = TREE_OPERAND (top, 1);
12413 /* const_binop may not detect overflow correctly, 12660 /* const_binop may not detect overflow correctly,
12414 so check for it explicitly here. */ 12661 so check for it explicitly here. */
12415 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), 12662 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
12416 wi::to_wide (op1)) 12663 wi::to_wide (op1))
12417 && 0 != (t1 = fold_convert (type, 12664 && (t1 = fold_convert (type,
12418 const_binop (LSHIFT_EXPR, 12665 const_binop (LSHIFT_EXPR, size_one_node,
12419 size_one_node, 12666 op1))) != 0
12420 op1)))
12421 && !TREE_OVERFLOW (t1)) 12667 && !TREE_OVERFLOW (t1))
12422 return multiple_of_p (type, t1, bottom); 12668 return multiple_of_p (type, t1, bottom);
12423 } 12669 }
12424 return 0; 12670 return 0;
12425 12671
12486 } 12732 }
12487 12733
12488 /* fall through */ 12734 /* fall through */
12489 12735
12490 default: 12736 default:
12737 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
12738 return multiple_p (wi::to_poly_widest (top),
12739 wi::to_poly_widest (bottom));
12740
12491 return 0; 12741 return 0;
12492 } 12742 }
12493 } 12743 }
12494 12744
12495 #define tree_expr_nonnegative_warnv_p(X, Y) \ 12745 #define tree_expr_nonnegative_warnv_p(X, Y) \
12771 case CFN_BUILT_IN_BSWAP64: 13021 case CFN_BUILT_IN_BSWAP64:
12772 /* Always true. */ 13022 /* Always true. */
12773 return true; 13023 return true;
12774 13024
12775 CASE_CFN_SQRT: 13025 CASE_CFN_SQRT:
13026 CASE_CFN_SQRT_FN:
12776 /* sqrt(-0.0) is -0.0. */ 13027 /* sqrt(-0.0) is -0.0. */
12777 if (!HONOR_SIGNED_ZEROS (element_mode (type))) 13028 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12778 return true; 13029 return true;
12779 return RECURSE (arg0); 13030 return RECURSE (arg0);
12780 13031
12781 CASE_CFN_ASINH: 13032 CASE_CFN_ASINH:
12782 CASE_CFN_ATAN: 13033 CASE_CFN_ATAN:
12783 CASE_CFN_ATANH: 13034 CASE_CFN_ATANH:
12784 CASE_CFN_CBRT: 13035 CASE_CFN_CBRT:
12785 CASE_CFN_CEIL: 13036 CASE_CFN_CEIL:
13037 CASE_CFN_CEIL_FN:
12786 CASE_CFN_ERF: 13038 CASE_CFN_ERF:
12787 CASE_CFN_EXPM1: 13039 CASE_CFN_EXPM1:
12788 CASE_CFN_FLOOR: 13040 CASE_CFN_FLOOR:
13041 CASE_CFN_FLOOR_FN:
12789 CASE_CFN_FMOD: 13042 CASE_CFN_FMOD:
12790 CASE_CFN_FREXP: 13043 CASE_CFN_FREXP:
12791 CASE_CFN_ICEIL: 13044 CASE_CFN_ICEIL:
12792 CASE_CFN_IFLOOR: 13045 CASE_CFN_IFLOOR:
12793 CASE_CFN_IRINT: 13046 CASE_CFN_IRINT:
12801 CASE_CFN_LLROUND: 13054 CASE_CFN_LLROUND:
12802 CASE_CFN_LRINT: 13055 CASE_CFN_LRINT:
12803 CASE_CFN_LROUND: 13056 CASE_CFN_LROUND:
12804 CASE_CFN_MODF: 13057 CASE_CFN_MODF:
12805 CASE_CFN_NEARBYINT: 13058 CASE_CFN_NEARBYINT:
13059 CASE_CFN_NEARBYINT_FN:
12806 CASE_CFN_RINT: 13060 CASE_CFN_RINT:
13061 CASE_CFN_RINT_FN:
12807 CASE_CFN_ROUND: 13062 CASE_CFN_ROUND:
13063 CASE_CFN_ROUND_FN:
12808 CASE_CFN_SCALB: 13064 CASE_CFN_SCALB:
12809 CASE_CFN_SCALBLN: 13065 CASE_CFN_SCALBLN:
12810 CASE_CFN_SCALBN: 13066 CASE_CFN_SCALBN:
12811 CASE_CFN_SIGNBIT: 13067 CASE_CFN_SIGNBIT:
12812 CASE_CFN_SIGNIFICAND: 13068 CASE_CFN_SIGNIFICAND:
12813 CASE_CFN_SINH: 13069 CASE_CFN_SINH:
12814 CASE_CFN_TANH: 13070 CASE_CFN_TANH:
12815 CASE_CFN_TRUNC: 13071 CASE_CFN_TRUNC:
13072 CASE_CFN_TRUNC_FN:
12816 /* True if the 1st argument is nonnegative. */ 13073 /* True if the 1st argument is nonnegative. */
12817 return RECURSE (arg0); 13074 return RECURSE (arg0);
12818 13075
12819 CASE_CFN_FMAX: 13076 CASE_CFN_FMAX:
13077 CASE_CFN_FMAX_FN:
12820 /* True if the 1st OR 2nd arguments are nonnegative. */ 13078 /* True if the 1st OR 2nd arguments are nonnegative. */
12821 return RECURSE (arg0) || RECURSE (arg1); 13079 return RECURSE (arg0) || RECURSE (arg1);
12822 13080
12823 CASE_CFN_FMIN: 13081 CASE_CFN_FMIN:
13082 CASE_CFN_FMIN_FN:
12824 /* True if the 1st AND 2nd arguments are nonnegative. */ 13083 /* True if the 1st AND 2nd arguments are nonnegative. */
12825 return RECURSE (arg0) && RECURSE (arg1); 13084 return RECURSE (arg0) && RECURSE (arg1);
12826 13085
12827 CASE_CFN_COPYSIGN: 13086 CASE_CFN_COPYSIGN:
13087 CASE_CFN_COPYSIGN_FN:
12828 /* True if the 2nd argument is nonnegative. */ 13088 /* True if the 2nd argument is nonnegative. */
12829 return RECURSE (arg1); 13089 return RECURSE (arg1);
12830 13090
12831 CASE_CFN_POWI: 13091 CASE_CFN_POWI:
12832 /* True if the 1st argument is nonnegative or the second 13092 /* True if the 1st argument is nonnegative or the second
13313 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth) 13573 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13314 { 13574 {
13315 switch (fn) 13575 switch (fn)
13316 { 13576 {
13317 CASE_CFN_CEIL: 13577 CASE_CFN_CEIL:
13578 CASE_CFN_CEIL_FN:
13318 CASE_CFN_FLOOR: 13579 CASE_CFN_FLOOR:
13580 CASE_CFN_FLOOR_FN:
13319 CASE_CFN_NEARBYINT: 13581 CASE_CFN_NEARBYINT:
13582 CASE_CFN_NEARBYINT_FN:
13320 CASE_CFN_RINT: 13583 CASE_CFN_RINT:
13584 CASE_CFN_RINT_FN:
13321 CASE_CFN_ROUND: 13585 CASE_CFN_ROUND:
13586 CASE_CFN_ROUND_FN:
13322 CASE_CFN_TRUNC: 13587 CASE_CFN_TRUNC:
13588 CASE_CFN_TRUNC_FN:
13323 return true; 13589 return true;
13324 13590
13325 CASE_CFN_FMIN: 13591 CASE_CFN_FMIN:
13592 CASE_CFN_FMIN_FN:
13326 CASE_CFN_FMAX: 13593 CASE_CFN_FMAX:
13594 CASE_CFN_FMAX_FN:
13327 return RECURSE (arg0) && RECURSE (arg1); 13595 return RECURSE (arg0) && RECURSE (arg1);
13328 13596
13329 default: 13597 default:
13330 break; 13598 break;
13331 } 13599 }
13494 tree index; 13762 tree index;
13495 tree string; 13763 tree string;
13496 location_t loc = EXPR_LOCATION (exp); 13764 location_t loc = EXPR_LOCATION (exp);
13497 13765
13498 if (TREE_CODE (exp) == INDIRECT_REF) 13766 if (TREE_CODE (exp) == INDIRECT_REF)
13499 string = string_constant (exp1, &index); 13767 string = string_constant (exp1, &index, NULL, NULL);
13500 else 13768 else
13501 { 13769 {
13502 tree low_bound = array_ref_low_bound (exp); 13770 tree low_bound = array_ref_low_bound (exp);
13503 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1)); 13771 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13504 13772
13542 { 13810 {
13543 tree t = NULL_TREE; 13811 tree t = NULL_TREE;
13544 13812
13545 switch (TREE_CODE (arg0)) 13813 switch (TREE_CODE (arg0))
13546 { 13814 {
13547 case INTEGER_CST:
13548 {
13549 bool overflow;
13550 wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13551 t = force_fit_type (type, val, 1,
13552 (overflow && ! TYPE_UNSIGNED (type))
13553 || TREE_OVERFLOW (arg0));
13554 break;
13555 }
13556
13557 case REAL_CST: 13815 case REAL_CST:
13558 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0))); 13816 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13559 break; 13817 break;
13560 13818
13561 case FIXED_CST: 13819 case FIXED_CST:
13570 TREE_OVERFLOW (t) = 1; 13828 TREE_OVERFLOW (t) = 1;
13571 break; 13829 break;
13572 } 13830 }
13573 13831
13574 default: 13832 default:
13833 if (poly_int_tree_p (arg0))
13834 {
13835 wi::overflow_type overflow;
13836 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
13837 t = force_fit_type (type, res, 1,
13838 (overflow && ! TYPE_UNSIGNED (type))
13839 || TREE_OVERFLOW (arg0));
13840 break;
13841 }
13842
13575 gcc_unreachable (); 13843 gcc_unreachable ();
13576 } 13844 }
13577 13845
13578 return t; 13846 return t;
13579 } 13847 }
13592 { 13860 {
13593 case INTEGER_CST: 13861 case INTEGER_CST:
13594 { 13862 {
13595 /* If the value is unsigned or non-negative, then the absolute value 13863 /* If the value is unsigned or non-negative, then the absolute value
13596 is the same as the ordinary value. */ 13864 is the same as the ordinary value. */
13597 if (!wi::neg_p (wi::to_wide (arg0), TYPE_SIGN (type))) 13865 wide_int val = wi::to_wide (arg0);
13598 t = arg0; 13866 wi::overflow_type overflow = wi::OVF_NONE;
13867 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
13868 ;
13599 13869
13600 /* If the value is negative, then the absolute value is 13870 /* If the value is negative, then the absolute value is
13601 its negation. */ 13871 its negation. */
13602 else 13872 else
13603 { 13873 val = wi::neg (val, &overflow);
13604 bool overflow; 13874
13605 wide_int val = wi::neg (wi::to_wide (arg0), &overflow); 13875 /* Force to the destination type, set TREE_OVERFLOW for signed
13606 t = force_fit_type (type, val, -1, 13876 TYPE only. */
13607 overflow | TREE_OVERFLOW (arg0)); 13877 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
13608 }
13609 } 13878 }
13610 break; 13879 break;
13611 13880
13612 case REAL_CST: 13881 case REAL_CST:
13613 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0))) 13882 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13614 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0))); 13883 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13615 else 13884 else
13720 { 13989 {
13721 if (!VECTOR_TYPE_P (type)) 13990 if (!VECTOR_TYPE_P (type))
13722 { 13991 {
13723 /* Have vector comparison with scalar boolean result. */ 13992 /* Have vector comparison with scalar boolean result. */
13724 gcc_assert ((code == EQ_EXPR || code == NE_EXPR) 13993 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13725 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1)); 13994 && known_eq (VECTOR_CST_NELTS (op0),
13726 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++) 13995 VECTOR_CST_NELTS (op1)));
13996 unsigned HOST_WIDE_INT nunits;
13997 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
13998 return NULL_TREE;
13999 for (unsigned i = 0; i < nunits; i++)
13727 { 14000 {
13728 tree elem0 = VECTOR_CST_ELT (op0, i); 14001 tree elem0 = VECTOR_CST_ELT (op0, i);
13729 tree elem1 = VECTOR_CST_ELT (op1, i); 14002 tree elem1 = VECTOR_CST_ELT (op1, i);
13730 tree tmp = fold_relational_const (code, type, elem0, elem1); 14003 tree tmp = fold_relational_const (code, type, elem0, elem1);
13731 if (tmp == NULL_TREE) 14004 if (tmp == NULL_TREE)
13733 if (integer_zerop (tmp)) 14006 if (integer_zerop (tmp))
13734 return constant_boolean_node (false, type); 14007 return constant_boolean_node (false, type);
13735 } 14008 }
13736 return constant_boolean_node (true, type); 14009 return constant_boolean_node (true, type);
13737 } 14010 }
13738 unsigned count = VECTOR_CST_NELTS (op0); 14011 tree_vector_builder elts;
13739 gcc_assert (VECTOR_CST_NELTS (op1) == count 14012 if (!elts.new_binary_operation (type, op0, op1, false))
13740 && TYPE_VECTOR_SUBPARTS (type) == count); 14013 return NULL_TREE;
13741 14014 unsigned int count = elts.encoded_nelts ();
13742 auto_vec<tree, 32> elts (count);
13743 for (unsigned i = 0; i < count; i++) 14015 for (unsigned i = 0; i < count; i++)
13744 { 14016 {
13745 tree elem_type = TREE_TYPE (type); 14017 tree elem_type = TREE_TYPE (type);
13746 tree elem0 = VECTOR_CST_ELT (op0, i); 14018 tree elem0 = VECTOR_CST_ELT (op0, i);
13747 tree elem1 = VECTOR_CST_ELT (op1, i); 14019 tree elem1 = VECTOR_CST_ELT (op1, i);
13754 14026
13755 elts.quick_push (build_int_cst (elem_type, 14027 elts.quick_push (build_int_cst (elem_type,
13756 integer_zerop (tem) ? 0 : -1)); 14028 integer_zerop (tem) ? 0 : -1));
13757 } 14029 }
13758 14030
13759 return build_vector (type, elts); 14031 return elts.build ();
13760 } 14032 }
13761 14033
13762 /* From here on we only handle LT, LE, GT, GE, EQ and NE. 14034 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13763 14035
13764 To compute GT, swap the arguments and do LT. 14036 To compute GT, swap the arguments and do LT.
13838 tree 14110 tree
13839 fold_indirect_ref_1 (location_t loc, tree type, tree op0) 14111 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
13840 { 14112 {
13841 tree sub = op0; 14113 tree sub = op0;
13842 tree subtype; 14114 tree subtype;
14115 poly_uint64 const_op01;
13843 14116
13844 STRIP_NOPS (sub); 14117 STRIP_NOPS (sub);
13845 subtype = TREE_TYPE (sub); 14118 subtype = TREE_TYPE (sub);
13846 if (!POINTER_TYPE_P (subtype) 14119 if (!POINTER_TYPE_P (subtype)
13847 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0))) 14120 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
13849 14122
13850 if (TREE_CODE (sub) == ADDR_EXPR) 14123 if (TREE_CODE (sub) == ADDR_EXPR)
13851 { 14124 {
13852 tree op = TREE_OPERAND (sub, 0); 14125 tree op = TREE_OPERAND (sub, 0);
13853 tree optype = TREE_TYPE (op); 14126 tree optype = TREE_TYPE (op);
14127
13854 /* *&CONST_DECL -> to the value of the const decl. */ 14128 /* *&CONST_DECL -> to the value of the const decl. */
13855 if (TREE_CODE (op) == CONST_DECL) 14129 if (TREE_CODE (op) == CONST_DECL)
13856 return DECL_INITIAL (op); 14130 return DECL_INITIAL (op);
13857 /* *&p => p; make sure to handle *&"str"[cst] here. */ 14131 /* *&p => p; make sure to handle *&"str"[cst] here. */
13858 if (type == optype) 14132 if (type == optype)
13882 /* *(foo *)&complexfoo => __real__ complexfoo */ 14156 /* *(foo *)&complexfoo => __real__ complexfoo */
13883 else if (TREE_CODE (optype) == COMPLEX_TYPE 14157 else if (TREE_CODE (optype) == COMPLEX_TYPE
13884 && type == TREE_TYPE (optype)) 14158 && type == TREE_TYPE (optype))
13885 return fold_build1_loc (loc, REALPART_EXPR, type, op); 14159 return fold_build1_loc (loc, REALPART_EXPR, type, op);
13886 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */ 14160 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13887 else if (TREE_CODE (optype) == VECTOR_TYPE 14161 else if (VECTOR_TYPE_P (optype)
13888 && type == TREE_TYPE (optype)) 14162 && type == TREE_TYPE (optype))
13889 { 14163 {
13890 tree part_width = TYPE_SIZE (type); 14164 tree part_width = TYPE_SIZE (type);
13891 tree index = bitsize_int (0); 14165 tree index = bitsize_int (0);
13892 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index); 14166 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
14167 index);
13893 } 14168 }
13894 } 14169 }
13895 14170
13896 if (TREE_CODE (sub) == POINTER_PLUS_EXPR 14171 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
13897 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST) 14172 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
13898 { 14173 {
13899 tree op00 = TREE_OPERAND (sub, 0); 14174 tree op00 = TREE_OPERAND (sub, 0);
13900 tree op01 = TREE_OPERAND (sub, 1); 14175 tree op01 = TREE_OPERAND (sub, 1);
13901 14176
13902 STRIP_NOPS (op00); 14177 STRIP_NOPS (op00);
13905 tree op00type; 14180 tree op00type;
13906 op00 = TREE_OPERAND (op00, 0); 14181 op00 = TREE_OPERAND (op00, 0);
13907 op00type = TREE_TYPE (op00); 14182 op00type = TREE_TYPE (op00);
13908 14183
13909 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */ 14184 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
13910 if (TREE_CODE (op00type) == VECTOR_TYPE 14185 if (VECTOR_TYPE_P (op00type)
13911 && type == TREE_TYPE (op00type)) 14186 && type == TREE_TYPE (op00type)
14187 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
14188 but we want to treat offsets with MSB set as negative.
14189 For the code below negative offsets are invalid and
14190 TYPE_SIZE of the element is something unsigned, so
14191 check whether op01 fits into poly_int64, which implies
14192 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
14193 then just use poly_uint64 because we want to treat the
14194 value as unsigned. */
14195 && tree_fits_poly_int64_p (op01))
13912 { 14196 {
13913 tree part_width = TYPE_SIZE (type); 14197 tree part_width = TYPE_SIZE (type);
13914 unsigned HOST_WIDE_INT max_offset 14198 poly_uint64 max_offset
13915 = (tree_to_uhwi (part_width) / BITS_PER_UNIT 14199 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
13916 * TYPE_VECTOR_SUBPARTS (op00type)); 14200 * TYPE_VECTOR_SUBPARTS (op00type));
13917 if (tree_int_cst_sign_bit (op01) == 0 14201 if (known_lt (const_op01, max_offset))
13918 && compare_tree_int (op01, max_offset) == -1)
13919 { 14202 {
13920 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01); 14203 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
13921 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
13922 tree index = bitsize_int (indexi);
13923 return fold_build3_loc (loc, 14204 return fold_build3_loc (loc,
13924 BIT_FIELD_REF, type, op00, 14205 BIT_FIELD_REF, type, op00,
13925 part_width, index); 14206 part_width, index);
13926 } 14207 }
13927 } 14208 }
13928 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */ 14209 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13929 else if (TREE_CODE (op00type) == COMPLEX_TYPE 14210 else if (TREE_CODE (op00type) == COMPLEX_TYPE
13930 && type == TREE_TYPE (op00type)) 14211 && type == TREE_TYPE (op00type))
13931 { 14212 {
13932 tree size = TYPE_SIZE_UNIT (type); 14213 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
13933 if (tree_int_cst_equal (size, op01)) 14214 const_op01))
13934 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00); 14215 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
13935 } 14216 }
13936 /* ((foo *)&fooarray)[1] => fooarray[1] */ 14217 /* ((foo *)&fooarray)[1] => fooarray[1] */
13937 else if (TREE_CODE (op00type) == ARRAY_TYPE 14218 else if (TREE_CODE (op00type) == ARRAY_TYPE
13938 && type == TREE_TYPE (op00type)) 14219 && type == TREE_TYPE (op00type))
13939 { 14220 {
13940 tree type_domain = TYPE_DOMAIN (op00type); 14221 tree type_domain = TYPE_DOMAIN (op00type);
13941 tree min = size_zero_node; 14222 tree min_val = size_zero_node;
13942 if (type_domain && TYPE_MIN_VALUE (type_domain)) 14223 if (type_domain && TYPE_MIN_VALUE (type_domain))
13943 min = TYPE_MIN_VALUE (type_domain); 14224 min_val = TYPE_MIN_VALUE (type_domain);
13944 offset_int off = wi::to_offset (op01); 14225 poly_uint64 type_size, index;
13945 offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type)); 14226 if (poly_int_tree_p (min_val)
13946 offset_int remainder; 14227 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
13947 off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder); 14228 && multiple_p (const_op01, type_size, &index))
13948 if (remainder == 0 && TREE_CODE (min) == INTEGER_CST)
13949 { 14229 {
13950 off = off + wi::to_offset (min); 14230 poly_offset_int off = index + wi::to_poly_offset (min_val);
13951 op01 = wide_int_to_tree (sizetype, off); 14231 op01 = wide_int_to_tree (sizetype, off);
13952 return build4_loc (loc, ARRAY_REF, type, op00, op01, 14232 return build4_loc (loc, ARRAY_REF, type, op00, op01,
13953 NULL_TREE, NULL_TREE); 14233 NULL_TREE, NULL_TREE);
13954 } 14234 }
13955 } 14235 }
14167 extracts the information about the offset of the access, storing it 14447 extracts the information about the offset of the access, storing it
14168 to PBITPOS and POFFSET. */ 14448 to PBITPOS and POFFSET. */
14169 14449
14170 static tree 14450 static tree
14171 split_address_to_core_and_offset (tree exp, 14451 split_address_to_core_and_offset (tree exp,
14172 HOST_WIDE_INT *pbitpos, tree *poffset) 14452 poly_int64_pod *pbitpos, tree *poffset)
14173 { 14453 {
14174 tree core; 14454 tree core;
14175 machine_mode mode; 14455 machine_mode mode;
14176 int unsignedp, reversep, volatilep; 14456 int unsignedp, reversep, volatilep;
14177 HOST_WIDE_INT bitsize; 14457 poly_int64 bitsize;
14178 location_t loc = EXPR_LOCATION (exp); 14458 location_t loc = EXPR_LOCATION (exp);
14179 14459
14180 if (TREE_CODE (exp) == ADDR_EXPR) 14460 if (TREE_CODE (exp) == ADDR_EXPR)
14181 { 14461 {
14182 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos, 14462 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14188 { 14468 {
14189 core = TREE_OPERAND (exp, 0); 14469 core = TREE_OPERAND (exp, 0);
14190 STRIP_NOPS (core); 14470 STRIP_NOPS (core);
14191 *pbitpos = 0; 14471 *pbitpos = 0;
14192 *poffset = TREE_OPERAND (exp, 1); 14472 *poffset = TREE_OPERAND (exp, 1);
14193 if (TREE_CODE (*poffset) == INTEGER_CST) 14473 if (poly_int_tree_p (*poffset))
14194 { 14474 {
14195 offset_int tem = wi::sext (wi::to_offset (*poffset), 14475 poly_offset_int tem
14196 TYPE_PRECISION (TREE_TYPE (*poffset))); 14476 = wi::sext (wi::to_poly_offset (*poffset),
14477 TYPE_PRECISION (TREE_TYPE (*poffset)));
14197 tem <<= LOG2_BITS_PER_UNIT; 14478 tem <<= LOG2_BITS_PER_UNIT;
14198 if (wi::fits_shwi_p (tem)) 14479 if (tem.to_shwi (pbitpos))
14199 { 14480 *poffset = NULL_TREE;
14200 *pbitpos = tem.to_shwi ();
14201 *poffset = NULL_TREE;
14202 }
14203 } 14481 }
14204 } 14482 }
14205 else 14483 else
14206 { 14484 {
14207 core = exp; 14485 core = exp;
14214 14492
14215 /* Returns true if addresses of E1 and E2 differ by a constant, false 14493 /* Returns true if addresses of E1 and E2 differ by a constant, false
14216 otherwise. If they do, E1 - E2 is stored in *DIFF. */ 14494 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14217 14495
14218 bool 14496 bool
14219 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff) 14497 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
14220 { 14498 {
14221 tree core1, core2; 14499 tree core1, core2;
14222 HOST_WIDE_INT bitpos1, bitpos2; 14500 poly_int64 bitpos1, bitpos2;
14223 tree toffset1, toffset2, tdiff, type; 14501 tree toffset1, toffset2, tdiff, type;
14224 14502
14225 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1); 14503 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14226 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2); 14504 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14227 14505
14228 if (bitpos1 % BITS_PER_UNIT != 0 14506 poly_int64 bytepos1, bytepos2;
14229 || bitpos2 % BITS_PER_UNIT != 0 14507 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
14508 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
14230 || !operand_equal_p (core1, core2, 0)) 14509 || !operand_equal_p (core1, core2, 0))
14231 return false; 14510 return false;
14232 14511
14233 if (toffset1 && toffset2) 14512 if (toffset1 && toffset2)
14234 { 14513 {
14249 return false; 14528 return false;
14250 } 14529 }
14251 else 14530 else
14252 *diff = 0; 14531 *diff = 0;
14253 14532
14254 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT; 14533 *diff += bytepos1 - bytepos2;
14255 return true; 14534 return true;
14256 } 14535 }
14257 14536
14258 /* Return OFF converted to a pointer offset type suitable as offset for 14537 /* Return OFF converted to a pointer offset type suitable as offset for
14259 POINTER_PLUS_EXPR. Use location LOC for this conversion. */ 14538 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14277 { 14556 {
14278 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr), 14557 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14279 ptr, size_int (off)); 14558 ptr, size_int (off));
14280 } 14559 }
14281 14560
14282 /* Return a char pointer for a C string if it is a string constant 14561 /* Return a pointer P to a NUL-terminated string representing the sequence
14283 or sum of string constant and integer constant. We only support 14562 of constant characters referred to by SRC (or a subsequence of such
14284 string constants properly terminated with '\0' character. 14563 characters within it if SRC is a reference to a string plus some
14285 If STRLEN is a valid pointer, length (including terminating character) 14564 constant offset). If STRLEN is non-null, store the number of bytes
14286 of returned string is stored to the argument. */ 14565 in the string constant including the terminating NUL char. *STRLEN is
14566 typically strlen(P) + 1 in the absence of embedded NUL characters. */
14287 14567
14288 const char * 14568 const char *
14289 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen) 14569 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen /* = NULL */)
14290 { 14570 {
14291 tree offset_node; 14571 tree offset_node;
14572 tree mem_size;
14292 14573
14293 if (strlen) 14574 if (strlen)
14294 *strlen = 0; 14575 *strlen = 0;
14295 14576
14296 src = string_constant (src, &offset_node); 14577 src = string_constant (src, &offset_node, &mem_size, NULL);
14297 if (src == 0) 14578 if (src == 0)
14298 return NULL; 14579 return NULL;
14299 14580
14300 unsigned HOST_WIDE_INT offset = 0; 14581 unsigned HOST_WIDE_INT offset = 0;
14301 if (offset_node != NULL_TREE) 14582 if (offset_node != NULL_TREE)
14304 return NULL; 14585 return NULL;
14305 else 14586 else
14306 offset = tree_to_uhwi (offset_node); 14587 offset = tree_to_uhwi (offset_node);
14307 } 14588 }
14308 14589
14590 if (!tree_fits_uhwi_p (mem_size))
14591 return NULL;
14592
14593 /* STRING_LENGTH is the size of the string literal, including any
14594 embedded NULs. STRING_SIZE is the size of the array the string
14595 literal is stored in. */
14309 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src); 14596 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14597 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size);
14598
14599 /* Ideally this would turn into a gcc_checking_assert over time. */
14600 if (string_length > string_size)
14601 string_length = string_size;
14602
14310 const char *string = TREE_STRING_POINTER (src); 14603 const char *string = TREE_STRING_POINTER (src);
14311 14604
14312 /* Support only properly null-terminated strings. */ 14605 /* Ideally this would turn into a gcc_checking_assert over time. */
14606 if (string_length > string_size)
14607 string_length = string_size;
14608
14313 if (string_length == 0 14609 if (string_length == 0
14314 || string[string_length - 1] != '\0' 14610 || offset >= string_size)
14315 || offset >= string_length)
14316 return NULL; 14611 return NULL;
14317 14612
14318 if (strlen) 14613 if (strlen)
14319 *strlen = string_length - offset; 14614 {
14320 return string + offset; 14615 /* Compute and store the length of the substring at OFFSET.
14616 All offsets past the initial length refer to null strings. */
14617 if (offset < string_length)
14618 *strlen = string_length - offset;
14619 else
14620 *strlen = 1;
14621 }
14622 else
14623 {
14624 tree eltype = TREE_TYPE (TREE_TYPE (src));
14625 /* Support only properly NUL-terminated single byte strings. */
14626 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
14627 return NULL;
14628 if (string[string_length - 1] != '\0')
14629 return NULL;
14630 }
14631
14632 return offset < string_length ? string + offset : "";
14633 }
14634
14635 /* Given a tree T, compute which bits in T may be nonzero. */
14636
14637 wide_int
14638 tree_nonzero_bits (const_tree t)
14639 {
14640 switch (TREE_CODE (t))
14641 {
14642 case INTEGER_CST:
14643 return wi::to_wide (t);
14644 case SSA_NAME:
14645 return get_nonzero_bits (t);
14646 case NON_LVALUE_EXPR:
14647 case SAVE_EXPR:
14648 return tree_nonzero_bits (TREE_OPERAND (t, 0));
14649 case BIT_AND_EXPR:
14650 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
14651 tree_nonzero_bits (TREE_OPERAND (t, 1)));
14652 case BIT_IOR_EXPR:
14653 case BIT_XOR_EXPR:
14654 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
14655 tree_nonzero_bits (TREE_OPERAND (t, 1)));
14656 case COND_EXPR:
14657 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
14658 tree_nonzero_bits (TREE_OPERAND (t, 2)));
14659 CASE_CONVERT:
14660 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
14661 TYPE_PRECISION (TREE_TYPE (t)),
14662 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
14663 case PLUS_EXPR:
14664 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
14665 {
14666 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
14667 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
14668 if (wi::bit_and (nzbits1, nzbits2) == 0)
14669 return wi::bit_or (nzbits1, nzbits2);
14670 }
14671 break;
14672 case LSHIFT_EXPR:
14673 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
14674 {
14675 tree type = TREE_TYPE (t);
14676 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
14677 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
14678 TYPE_PRECISION (type));
14679 return wi::neg_p (arg1)
14680 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
14681 : wi::lshift (nzbits, arg1);
14682 }
14683 break;
14684 case RSHIFT_EXPR:
14685 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
14686 {
14687 tree type = TREE_TYPE (t);
14688 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
14689 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
14690 TYPE_PRECISION (type));
14691 return wi::neg_p (arg1)
14692 ? wi::lshift (nzbits, -arg1)
14693 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
14694 }
14695 break;
14696 default:
14697 break;
14698 }
14699
14700 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
14321 } 14701 }
14322 14702
14323 #if CHECKING_P 14703 #if CHECKING_P
14324 14704
14325 namespace selftest { 14705 namespace selftest {
14404 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero))); 14784 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14405 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one))); 14785 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14406 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one))); 14786 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14407 } 14787 }
14408 14788
14789 /* Verify folding of VEC_DUPLICATE_EXPRs. */
14790
14791 static void
14792 test_vec_duplicate_folding ()
14793 {
14794 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
14795 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
14796 /* This will be 1 if VEC_MODE isn't a vector mode. */
14797 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
14798
14799 tree type = build_vector_type (ssizetype, nunits);
14800 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
14801 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
14802 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
14803 }
14804
14409 /* Run all of the selftests within this file. */ 14805 /* Run all of the selftests within this file. */
14410 14806
14411 void 14807 void
14412 fold_const_c_tests () 14808 fold_const_c_tests ()
14413 { 14809 {
14414 test_arithmetic_folding (); 14810 test_arithmetic_folding ();
14415 test_vector_folding (); 14811 test_vector_folding ();
14812 test_vec_duplicate_folding ();
14416 } 14813 }
14417 14814
14418 } // namespace selftest 14815 } // namespace selftest
14419 14816
14420 #endif /* CHECKING_P */ 14817 #endif /* CHECKING_P */