diff gcc/fold-const.c @ 131:84e7813d76e9

gcc-8.2
author mir3636
date Thu, 25 Oct 2018 07:37:49 +0900
parents 04ced10e8804
children 1830386684a0
line wrap: on
line diff
--- a/gcc/fold-const.c	Fri Oct 27 22:46:09 2017 +0900
+++ b/gcc/fold-const.c	Thu Oct 25 07:37:49 2018 +0900
@@ -1,5 +1,5 @@
 /* Fold a constant sub-tree into a single node for C-compiler
-   Copyright (C) 1987-2017 Free Software Foundation, Inc.
+   Copyright (C) 1987-2018 Free Software Foundation, Inc.
 
 This file is part of GCC.
 
@@ -81,6 +81,8 @@
 #include "selftest.h"
 #include "stringpool.h"
 #include "attribs.h"
+#include "tree-vector-builder.h"
+#include "vec-perm-indices.h"
 
 /* Nonzero if we are folding constants inside an initializer; zero
    otherwise.  */
@@ -113,7 +115,7 @@
 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
 static enum comparison_code comparison_to_compcode (enum tree_code);
 static enum tree_code compcode_to_comparison (enum comparison_code);
-static int twoval_comparison_p (tree, tree *, tree *, int *);
+static int twoval_comparison_p (tree, tree *, tree *);
 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
 static tree optimize_bit_field_compare (location_t, enum tree_code,
 					tree, tree, tree);
@@ -409,10 +411,10 @@
 	if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
 	  return true;
 
-	int count = VECTOR_CST_NELTS (t), i;
-
-	for (i = 0; i < count; i++)
-	  if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
+	/* Steps don't prevent negation.  */
+	unsigned int count = vector_cst_encoded_nelts (t);
+	for (unsigned int i = 0; i < count; ++i)
+	  if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
 	    return false;
 
 	return true;
@@ -428,7 +430,7 @@
     case PLUS_EXPR:
       if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
 	  || HONOR_SIGNED_ZEROS (element_mode (type))
-	  || (INTEGRAL_TYPE_P (type)
+	  || (ANY_INTEGRAL_TYPE_P (type)
 	      && ! TYPE_OVERFLOW_WRAPS (type)))
 	return false;
       /* -(A + B) -> (-B) - A.  */
@@ -441,7 +443,7 @@
       /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
       return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
 	     && !HONOR_SIGNED_ZEROS (element_mode (type))
-	     && (! INTEGRAL_TYPE_P (type)
+	     && (! ANY_INTEGRAL_TYPE_P (type)
 		 || TYPE_OVERFLOW_WRAPS (type));
 
     case MULT_EXPR:
@@ -472,12 +474,15 @@
     case EXACT_DIV_EXPR:
       if (TYPE_UNSIGNED (type))
 	break;
-      if (negate_expr_p (TREE_OPERAND (t, 0)))
+      /* In general we can't negate A in A / B, because if A is INT_MIN and
+         B is not 1 we change the sign of the result.  */
+      if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
+	  && negate_expr_p (TREE_OPERAND (t, 0)))
 	return true;
       /* In general we can't negate B in A / B, because if A is INT_MIN and
 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
 	 and actually traps on some architectures.  */
-      if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
+      if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
 	  || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
 	  || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
 	      && ! integer_onep (TREE_OPERAND (t, 1))))
@@ -546,10 +551,8 @@
 	return tem;
       break;
 
+    case POLY_INT_CST:
     case REAL_CST:
-      tem = fold_negate_const (t, type);
-      return tem;
-
     case FIXED_CST:
       tem = fold_negate_const (t, type);
       return tem;
@@ -565,10 +568,10 @@
 
     case VECTOR_CST:
       {
-	int count = VECTOR_CST_NELTS (t), i;
-
-	auto_vec<tree, 32> elts (count);
-	for (i = 0; i < count; i++)
+	tree_vector_builder elts;
+	elts.new_unary_operation (type, t, true);
+	unsigned int count = elts.encoded_nelts ();
+	for (unsigned int i = 0; i < count; ++i)
 	  {
 	    tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
 	    if (elt == NULL_TREE)
@@ -576,7 +579,7 @@
 	    elts.quick_push (elt);
 	  }
 
-	return build_vector (type, elts);
+	return elts.build ();
       }
 
     case COMPLEX_EXPR:
@@ -652,14 +655,17 @@
     case EXACT_DIV_EXPR:
       if (TYPE_UNSIGNED (type))
 	break;
-      if (negate_expr_p (TREE_OPERAND (t, 0)))
+      /* In general we can't negate A in A / B, because if A is INT_MIN and
+	 B is not 1 we change the sign of the result.  */
+      if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
+	  && negate_expr_p (TREE_OPERAND (t, 0)))
 	return fold_build2_loc (loc, TREE_CODE (t), type,
 				negate_expr (TREE_OPERAND (t, 0)),
 				TREE_OPERAND (t, 1));
       /* In general we can't negate B in A / B, because if A is INT_MIN and
 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
 	 and actually traps on some architectures.  */
-      if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
+      if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
 	   || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
 	   || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
 	       && ! integer_onep (TREE_OPERAND (t, 1))))
@@ -960,24 +966,17 @@
 	 && TYPE_MODE (type1) == TYPE_MODE (type2);
 }
 
-
-/* Combine two integer constants PARG1 and PARG2 under operation CODE
-   to produce a new constant.  Return NULL_TREE if we don't know how
-   to evaluate CODE at compile-time.  */
-
-static tree
-int_const_binop_1 (enum tree_code code, const_tree parg1, const_tree parg2,
-		   int overflowable)
-{
-  wide_int res;
-  tree t;
-  tree type = TREE_TYPE (parg1);
-  signop sign = TYPE_SIGN (type);
-  bool overflow = false;
-
-  wi::tree_to_wide_ref arg1 = wi::to_wide (parg1);
-  wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
-
+/* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
+   a new constant in RES.  Return FALSE if we don't know how to
+   evaluate CODE at compile-time.  */
+
+bool
+wide_int_binop (wide_int &res,
+		enum tree_code code, const wide_int &arg1, const wide_int &arg2,
+		signop sign, wi::overflow_type *overflow)
+{
+  wide_int tmp;
+  *overflow = wi::OVF_NONE;
   switch (code)
     {
     case BIT_IOR_EXPR:
@@ -996,49 +995,53 @@
     case LSHIFT_EXPR:
       if (wi::neg_p (arg2))
 	{
-	  arg2 = -arg2;
+	  tmp = -arg2;
 	  if (code == RSHIFT_EXPR)
 	    code = LSHIFT_EXPR;
 	  else
 	    code = RSHIFT_EXPR;
 	}
+      else
+        tmp = arg2;
 
       if (code == RSHIFT_EXPR)
 	/* It's unclear from the C standard whether shifts can overflow.
 	   The following code ignores overflow; perhaps a C standard
 	   interpretation ruling is needed.  */
-	res = wi::rshift (arg1, arg2, sign);
+	res = wi::rshift (arg1, tmp, sign);
       else
-	res = wi::lshift (arg1, arg2);
+	res = wi::lshift (arg1, tmp);
       break;
 
     case RROTATE_EXPR:
     case LROTATE_EXPR:
       if (wi::neg_p (arg2))
 	{
-	  arg2 = -arg2;
+	  tmp = -arg2;
 	  if (code == RROTATE_EXPR)
 	    code = LROTATE_EXPR;
 	  else
 	    code = RROTATE_EXPR;
 	}
+      else
+        tmp = arg2;
 
       if (code == RROTATE_EXPR)
-	res = wi::rrotate (arg1, arg2);
+	res = wi::rrotate (arg1, tmp);
       else
-	res = wi::lrotate (arg1, arg2);
+	res = wi::lrotate (arg1, tmp);
       break;
 
     case PLUS_EXPR:
-      res = wi::add (arg1, arg2, sign, &overflow);
+      res = wi::add (arg1, arg2, sign, overflow);
       break;
 
     case MINUS_EXPR:
-      res = wi::sub (arg1, arg2, sign, &overflow);
+      res = wi::sub (arg1, arg2, sign, overflow);
       break;
 
     case MULT_EXPR:
-      res = wi::mul (arg1, arg2, sign, &overflow);
+      res = wi::mul (arg1, arg2, sign, overflow);
       break;
 
     case MULT_HIGHPART_EXPR:
@@ -1048,50 +1051,50 @@
     case TRUNC_DIV_EXPR:
     case EXACT_DIV_EXPR:
       if (arg2 == 0)
-	return NULL_TREE;
-      res = wi::div_trunc (arg1, arg2, sign, &overflow);
+	return false;
+      res = wi::div_trunc (arg1, arg2, sign, overflow);
       break;
 
     case FLOOR_DIV_EXPR:
       if (arg2 == 0)
-	return NULL_TREE;
-      res = wi::div_floor (arg1, arg2, sign, &overflow);
+	return false;
+      res = wi::div_floor (arg1, arg2, sign, overflow);
       break;
 
     case CEIL_DIV_EXPR:
       if (arg2 == 0)
-	return NULL_TREE;
-      res = wi::div_ceil (arg1, arg2, sign, &overflow);
+	return false;
+      res = wi::div_ceil (arg1, arg2, sign, overflow);
       break;
 
     case ROUND_DIV_EXPR:
       if (arg2 == 0)
-	return NULL_TREE;
-      res = wi::div_round (arg1, arg2, sign, &overflow);
+	return false;
+      res = wi::div_round (arg1, arg2, sign, overflow);
       break;
 
     case TRUNC_MOD_EXPR:
       if (arg2 == 0)
-	return NULL_TREE;
-      res = wi::mod_trunc (arg1, arg2, sign, &overflow);
+	return false;
+      res = wi::mod_trunc (arg1, arg2, sign, overflow);
       break;
 
     case FLOOR_MOD_EXPR:
       if (arg2 == 0)
-	return NULL_TREE;
-      res = wi::mod_floor (arg1, arg2, sign, &overflow);
+	return false;
+      res = wi::mod_floor (arg1, arg2, sign, overflow);
       break;
 
     case CEIL_MOD_EXPR:
       if (arg2 == 0)
-	return NULL_TREE;
-      res = wi::mod_ceil (arg1, arg2, sign, &overflow);
+	return false;
+      res = wi::mod_ceil (arg1, arg2, sign, overflow);
       break;
 
     case ROUND_MOD_EXPR:
       if (arg2 == 0)
-	return NULL_TREE;
-      res = wi::mod_round (arg1, arg2, sign, &overflow);
+	return false;
+      res = wi::mod_round (arg1, arg2, sign, overflow);
       break;
 
     case MIN_EXPR:
@@ -1103,21 +1106,115 @@
       break;
 
     default:
-      return NULL_TREE;
-    }
-
-  t = force_fit_type (type, res, overflowable,
-		      (((sign == SIGNED || overflowable == -1)
-			&& overflow)
-		       | TREE_OVERFLOW (parg1) | TREE_OVERFLOW (parg2)));
-
-  return t;
-}
+      return false;
+    }
+  return true;
+}
+
+/* Combine two poly int's ARG1 and ARG2 under operation CODE to
+   produce a new constant in RES.  Return FALSE if we don't know how
+   to evaluate CODE at compile-time.  */
+
+static bool
+poly_int_binop (poly_wide_int &res, enum tree_code code,
+		const_tree arg1, const_tree arg2,
+		signop sign, wi::overflow_type *overflow)
+{
+  gcc_assert (NUM_POLY_INT_COEFFS != 1);
+  gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
+  switch (code)
+    {
+    case PLUS_EXPR:
+      res = wi::add (wi::to_poly_wide (arg1),
+		     wi::to_poly_wide (arg2), sign, overflow);
+      break;
+
+    case MINUS_EXPR:
+      res = wi::sub (wi::to_poly_wide (arg1),
+		     wi::to_poly_wide (arg2), sign, overflow);
+      break;
+
+    case MULT_EXPR:
+      if (TREE_CODE (arg2) == INTEGER_CST)
+	res = wi::mul (wi::to_poly_wide (arg1),
+		       wi::to_wide (arg2), sign, overflow);
+      else if (TREE_CODE (arg1) == INTEGER_CST)
+	res = wi::mul (wi::to_poly_wide (arg2),
+		       wi::to_wide (arg1), sign, overflow);
+      else
+	return NULL_TREE;
+      break;
+
+    case LSHIFT_EXPR:
+      if (TREE_CODE (arg2) == INTEGER_CST)
+	res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
+      else
+	return false;
+      break;
+
+    case BIT_IOR_EXPR:
+      if (TREE_CODE (arg2) != INTEGER_CST
+	  || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
+			 &res))
+	return false;
+      break;
+
+    default:
+      return false;
+    }
+  return true;
+}
+
+/* Combine two integer constants ARG1 and ARG2 under operation CODE to
+   produce a new constant.  Return NULL_TREE if we don't know how to
+   evaluate CODE at compile-time.  */
 
 tree
-int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
-{
-  return int_const_binop_1 (code, arg1, arg2, 1);
+int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
+		 int overflowable)
+{
+  bool success = false;
+  poly_wide_int poly_res;
+  tree type = TREE_TYPE (arg1);
+  signop sign = TYPE_SIGN (type);
+  wi::overflow_type overflow = wi::OVF_NONE;
+
+  if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
+    {
+      wide_int warg1 = wi::to_wide (arg1), res;
+      wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
+      success = wide_int_binop (res, code, warg1, warg2, sign, &overflow);
+      poly_res = res;
+    }
+  else if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
+    success = poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow);
+  if (success)
+    return force_fit_type (type, poly_res, overflowable,
+			   (((sign == SIGNED || overflowable == -1)
+			     && overflow)
+			    | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
+  return NULL_TREE;
+}
+
+/* Return true if binary operation OP distributes over addition in operand
+   OPNO, with the other operand being held constant.  OPNO counts from 1.  */
+
+static bool
+distributes_over_addition_p (tree_code op, int opno)
+{
+  switch (op)
+    {
+    case PLUS_EXPR:
+    case MINUS_EXPR:
+    case MULT_EXPR:
+      return true;
+
+    case LSHIFT_EXPR:
+      return opno == 1;
+
+    default:
+      return false;
+    }
 }
 
 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
@@ -1135,7 +1232,7 @@
   STRIP_NOPS (arg1);
   STRIP_NOPS (arg2);
 
-  if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
+  if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
     {
       if (code == POINTER_PLUS_EXPR)
 	return int_const_binop (PLUS_EXPR,
@@ -1413,13 +1510,40 @@
     }
 
   if (TREE_CODE (arg1) == VECTOR_CST
-      && TREE_CODE (arg2) == VECTOR_CST)
+      && TREE_CODE (arg2) == VECTOR_CST
+      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
+		   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
     {
       tree type = TREE_TYPE (arg1);
-      int count = VECTOR_CST_NELTS (arg1), i;
-
-      auto_vec<tree, 32> elts (count);
-      for (i = 0; i < count; i++)
+      bool step_ok_p;
+      if (VECTOR_CST_STEPPED_P (arg1)
+	  && VECTOR_CST_STEPPED_P (arg2))
+	/* We can operate directly on the encoding if:
+
+	      a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
+	    implies
+	      (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
+
+	   Addition and subtraction are the supported operators
+	   for which this is true.  */
+	step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
+      else if (VECTOR_CST_STEPPED_P (arg1))
+	/* We can operate directly on stepped encodings if:
+
+	     a3 - a2 == a2 - a1
+	   implies:
+	     (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
+
+	   which is true if (x -> x op c) distributes over addition.  */
+	step_ok_p = distributes_over_addition_p (code, 1);
+      else
+	/* Similarly in reverse.  */
+	step_ok_p = distributes_over_addition_p (code, 2);
+      tree_vector_builder elts;
+      if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
+	return NULL_TREE;
+      unsigned int count = elts.encoded_nelts ();
+      for (unsigned int i = 0; i < count; ++i)
 	{
 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
 	  tree elem2 = VECTOR_CST_ELT (arg2, i);
@@ -1433,7 +1557,7 @@
 	  elts.quick_push (elt);
 	}
 
-      return build_vector (type, elts);
+      return elts.build ();
     }
 
   /* Shifts allow a scalar offset for a vector.  */
@@ -1441,10 +1565,12 @@
       && TREE_CODE (arg2) == INTEGER_CST)
     {
       tree type = TREE_TYPE (arg1);
-      int count = VECTOR_CST_NELTS (arg1), i;
-
-      auto_vec<tree, 32> elts (count);
-      for (i = 0; i < count; i++)
+      bool step_ok_p = distributes_over_addition_p (code, 1);
+      tree_vector_builder elts;
+      if (!elts.new_unary_operation (type, arg1, step_ok_p))
+	return NULL_TREE;
+      unsigned int count = elts.encoded_nelts ();
+      for (unsigned int i = 0; i < count; ++i)
 	{
 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
 
@@ -1457,7 +1583,7 @@
 	  elts.quick_push (elt);
 	}
 
-      return build_vector (type, elts);
+      return elts.build ();
     }
   return NULL_TREE;
 }
@@ -1475,6 +1601,12 @@
      result as argument put those cases that need it here.  */
   switch (code)
     {
+    case VEC_SERIES_EXPR:
+      if (CONSTANT_CLASS_P (arg1)
+	  && CONSTANT_CLASS_P (arg2))
+	return build_vec_series (type, arg1, arg2);
+      return NULL_TREE;
+
     case COMPLEX_EXPR:
       if ((TREE_CODE (arg1) == REAL_CST
 	   && TREE_CODE (arg2) == REAL_CST)
@@ -1483,35 +1615,50 @@
 	return build_complex (type, arg1, arg2);
       return NULL_TREE;
 
+    case POINTER_DIFF_EXPR:
+      if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
+	{
+	  poly_offset_int res = (wi::to_poly_offset (arg1)
+				 - wi::to_poly_offset (arg2));
+	  return force_fit_type (type, res, 1,
+				 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
+	}
+      return NULL_TREE;
+
     case VEC_PACK_TRUNC_EXPR:
     case VEC_PACK_FIX_TRUNC_EXPR:
+    case VEC_PACK_FLOAT_EXPR:
       {
-	unsigned int out_nelts, in_nelts, i;
+	unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
 
 	if (TREE_CODE (arg1) != VECTOR_CST
 	    || TREE_CODE (arg2) != VECTOR_CST)
 	  return NULL_TREE;
 
-	in_nelts = VECTOR_CST_NELTS (arg1);
+	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
+	  return NULL_TREE;
+
 	out_nelts = in_nelts * 2;
-	gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
-		    && out_nelts == TYPE_VECTOR_SUBPARTS (type));
-
-	auto_vec<tree, 32> elts (out_nelts);
+	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
+		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
+
+	tree_vector_builder elts (type, out_nelts, 1);
 	for (i = 0; i < out_nelts; i++)
 	  {
 	    tree elt = (i < in_nelts
 			? VECTOR_CST_ELT (arg1, i)
 			: VECTOR_CST_ELT (arg2, i - in_nelts));
 	    elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
-				      ? NOP_EXPR : FIX_TRUNC_EXPR,
+				      ? NOP_EXPR
+				      : code == VEC_PACK_FLOAT_EXPR
+				      ? FLOAT_EXPR : FIX_TRUNC_EXPR,
 				      TREE_TYPE (type), elt);
 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
 	      return NULL_TREE;
 	    elts.quick_push (elt);
 	  }
 
-	return build_vector (type, elts);
+	return elts.build ();
       }
 
     case VEC_WIDEN_MULT_LO_EXPR:
@@ -1519,15 +1666,16 @@
     case VEC_WIDEN_MULT_EVEN_EXPR:
     case VEC_WIDEN_MULT_ODD_EXPR:
       {
-	unsigned int out_nelts, in_nelts, out, ofs, scale;
+	unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
 
 	if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
 	  return NULL_TREE;
 
-	in_nelts = VECTOR_CST_NELTS (arg1);
+	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
+	  return NULL_TREE;
 	out_nelts = in_nelts / 2;
-	gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
-		    && out_nelts == TYPE_VECTOR_SUBPARTS (type));
+	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
+		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
 
 	if (code == VEC_WIDEN_MULT_LO_EXPR)
 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
@@ -1538,7 +1686,7 @@
 	else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
 	  scale = 1, ofs = 1;
 
-	auto_vec<tree, 32> elts (out_nelts);
+	tree_vector_builder elts (type, out_nelts, 1);
 	for (out = 0; out < out_nelts; out++)
 	  {
 	    unsigned int in = (out << scale) + ofs;
@@ -1555,7 +1703,7 @@
 	    elts.quick_push (elt);
 	  }
 
-	return build_vector (type, elts);
+	return elts.build ();
       }
 
     default:;
@@ -1583,7 +1731,8 @@
       && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
       && code != NEGATE_EXPR
-      && code != ABS_EXPR)
+      && code != ABS_EXPR
+      && code != ABSU_EXPR)
     return NULL_TREE;
 
   switch (code)
@@ -1618,6 +1767,7 @@
       }
 
     case ABS_EXPR:
+    case ABSU_EXPR:
       if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
 	return fold_abs_const (arg0, type);
       break;
@@ -1634,14 +1784,18 @@
     case BIT_NOT_EXPR:
       if (TREE_CODE (arg0) == INTEGER_CST)
 	return fold_not_const (arg0, type);
+      else if (POLY_INT_CST_P (arg0))
+	return wide_int_to_tree (type, -poly_int_cst_value (arg0));
       /* Perform BIT_NOT_EXPR on each element individually.  */
       else if (TREE_CODE (arg0) == VECTOR_CST)
 	{
 	  tree elem;
-	  unsigned count = VECTOR_CST_NELTS (arg0), i;
-
-	  auto_vec<tree, 32> elements (count);
-	  for (i = 0; i < count; i++)
+
+	  /* This can cope with stepped encodings because ~x == -1 - x.  */
+	  tree_vector_builder elements;
+	  elements.new_unary_operation (type, arg0, true);
+	  unsigned int i, count = elements.encoded_nelts ();
+	  for (i = 0; i < count; ++i)
 	    {
 	      elem = VECTOR_CST_ELT (arg0, i);
 	      elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
@@ -1650,7 +1804,7 @@
 	      elements.quick_push (elem);
 	    }
 	  if (i == count)
-	    return build_vector (type, elements);
+	    return elements.build ();
 	}
       break;
 
@@ -1673,28 +1827,35 @@
     case VEC_UNPACK_HI_EXPR:
     case VEC_UNPACK_FLOAT_LO_EXPR:
     case VEC_UNPACK_FLOAT_HI_EXPR:
+    case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
+    case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
       {
-	unsigned int out_nelts, in_nelts, i;
+	unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
 	enum tree_code subcode;
 
 	if (TREE_CODE (arg0) != VECTOR_CST)
 	  return NULL_TREE;
 
-	in_nelts = VECTOR_CST_NELTS (arg0);
+	if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
+	  return NULL_TREE;
 	out_nelts = in_nelts / 2;
-	gcc_assert (out_nelts == TYPE_VECTOR_SUBPARTS (type));
+	gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
 
 	unsigned int offset = 0;
 	if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
-				   || code == VEC_UNPACK_FLOAT_LO_EXPR))
+				   || code == VEC_UNPACK_FLOAT_LO_EXPR
+				   || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
 	  offset = out_nelts;
 
 	if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
 	  subcode = NOP_EXPR;
-	else
+	else if (code == VEC_UNPACK_FLOAT_LO_EXPR
+		 || code == VEC_UNPACK_FLOAT_HI_EXPR)
 	  subcode = FLOAT_EXPR;
-
-	auto_vec<tree, 32> elts (out_nelts);
+	else
+	  subcode = FIX_TRUNC_EXPR;
+
+	tree_vector_builder elts (type, out_nelts, 1);
 	for (i = 0; i < out_nelts; i++)
 	  {
 	    tree elt = fold_convert_const (subcode, TREE_TYPE (type),
@@ -1704,38 +1865,13 @@
 	    elts.quick_push (elt);
 	  }
 
-	return build_vector (type, elts);
+	return elts.build ();
       }
 
-    case REDUC_MIN_EXPR:
-    case REDUC_MAX_EXPR:
-    case REDUC_PLUS_EXPR:
-      {
-	unsigned int nelts, i;
-	enum tree_code subcode;
-
-	if (TREE_CODE (arg0) != VECTOR_CST)
-	  return NULL_TREE;
-	nelts = VECTOR_CST_NELTS (arg0);
-
-	switch (code)
-	  {
-	  case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
-	  case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
-	  case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
-	  default: gcc_unreachable ();
-	  }
-
-	tree res = VECTOR_CST_ELT (arg0, 0);
-	for (i = 1; i < nelts; i++)
-	  {
-	    res = const_binop (subcode, res, VECTOR_CST_ELT (arg0, i));
-	    if (res == NULL_TREE || !CONSTANT_CLASS_P (res))
-	      return NULL_TREE;
-	  }
-
-	return res;
-      }
+    case VEC_DUPLICATE_EXPR:
+      if (CONSTANT_CLASS_P (arg0))
+	return build_vector_from_val (type, arg0);
+      return NULL_TREE;
 
     default:
       break;
@@ -1748,7 +1884,7 @@
    indicates which particular sizetype to create.  */
 
 tree
-size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
+size_int_kind (poly_int64 number, enum size_type_kind kind)
 {
   return build_int_cst (sizetype_tab[(int) kind], number);
 }
@@ -1769,8 +1905,8 @@
   gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
                                        TREE_TYPE (arg1)));
 
-  /* Handle the special case of two integer constants faster.  */
-  if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
+  /* Handle the special case of two poly_int constants faster.  */
+  if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
     {
       /* And some specific cases even faster than that.  */
       if (code == PLUS_EXPR)
@@ -1794,7 +1930,9 @@
       /* Handle general case of two integer constants.  For sizetype
          constant calculations we always want to know about overflow,
 	 even in the unsigned case.  */
-      return int_const_binop_1 (code, arg0, arg1, -1);
+      tree res = int_const_binop (code, arg0, arg1, -1);
+      if (res != NULL_TREE)
+	return res;
     }
 
   return fold_build2_loc (loc, code, type, arg0, arg1);
@@ -2118,9 +2256,20 @@
 static tree
 fold_convert_const (enum tree_code code, tree type, tree arg1)
 {
-  if (TREE_TYPE (arg1) == type)
+  tree arg_type = TREE_TYPE (arg1);
+  if (arg_type == type)
     return arg1;
 
+  /* We can't widen types, since the runtime value could overflow the
+     original type before being extended to the new type.  */
+  if (POLY_INT_CST_P (arg1)
+      && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
+      && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
+    return build_poly_int_cst (type,
+			       poly_wide_int::from (poly_int_cst_value (arg1),
+						    TYPE_PRECISION (type),
+						    TYPE_SIGN (arg_type)));
+
   if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
       || TREE_CODE (type) == OFFSET_TYPE)
     {
@@ -2152,12 +2301,21 @@
   else if (TREE_CODE (type) == VECTOR_TYPE)
     {
       if (TREE_CODE (arg1) == VECTOR_CST
-	  && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
-	{
-	  int len = VECTOR_CST_NELTS (arg1);
+	  && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
+	{
 	  tree elttype = TREE_TYPE (type);
-	  auto_vec<tree, 32> v (len);
-	  for (int i = 0; i < len; ++i)
+	  tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
+	  /* We can't handle steps directly when extending, since the
+	     values need to wrap at the original precision first.  */
+	  bool step_ok_p
+	    = (INTEGRAL_TYPE_P (elttype)
+	       && INTEGRAL_TYPE_P (arg1_elttype)
+	       && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
+	  tree_vector_builder v;
+	  if (!v.new_unary_operation (type, arg1, step_ok_p))
+	    return NULL_TREE;
+	  unsigned int len = v.encoded_nelts ();
+	  for (unsigned int i = 0; i < len; ++i)
 	    {
 	      tree elt = VECTOR_CST_ELT (arg1, i);
 	      tree cvt = fold_convert_const (code, elttype, elt);
@@ -2165,7 +2323,7 @@
 		return NULL_TREE;
 	      v.quick_push (cvt);
 	    }
-	  return build_vector (type, v);
+	  return v.build ();
 	}
     }
   return NULL_TREE;
@@ -2205,7 +2363,9 @@
     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
     case POINTER_TYPE: case REFERENCE_TYPE:
     case OFFSET_TYPE:
-      return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
+      return (INTEGRAL_TYPE_P (orig)
+	      || (POINTER_TYPE_P (orig)
+		  && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
 	      || TREE_CODE (orig) == OFFSET_TYPE);
 
     case REAL_TYPE:
@@ -2632,6 +2792,22 @@
     }
 }
 
+/* Return true if COND1 tests the opposite condition of COND2.  */
+
+bool
+inverse_conditions_p (const_tree cond1, const_tree cond2)
+{
+  return (COMPARISON_CLASS_P (cond1)
+	  && COMPARISON_CLASS_P (cond2)
+	  && (invert_tree_comparison
+	      (TREE_CODE (cond1),
+	       HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
+	  && operand_equal_p (TREE_OPERAND (cond1, 0),
+			      TREE_OPERAND (cond2, 0), 0)
+	  && operand_equal_p (TREE_OPERAND (cond1, 1),
+			      TREE_OPERAND (cond2, 1), 0));
+}
+
 /* Return a tree for the comparison which is the combination of
    doing the AND or OR (depending on CODE) of the two operations LCODE
    and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
@@ -2939,17 +3115,19 @@
 
       case VECTOR_CST:
 	{
-	  unsigned i;
-
-	  if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
+	  if (VECTOR_CST_LOG2_NPATTERNS (arg0)
+	      != VECTOR_CST_LOG2_NPATTERNS (arg1))
+	    return 0;
+
+	  if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
+	      != VECTOR_CST_NELTS_PER_PATTERN (arg1))
 	    return 0;
 
-	  for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
-	    {
-	      if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
-				    VECTOR_CST_ELT (arg1, i), flags))
-		return 0;
-	    }
+	  unsigned int count = vector_cst_encoded_nelts (arg0);
+	  for (unsigned int i = 0; i < count; ++i)
+	    if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
+				  VECTOR_CST_ENCODED_ELT (arg1, i), flags))
+	      return 0;
 	  return 1;
 	}
 
@@ -3153,7 +3331,6 @@
 	case TRUTH_ORIF_EXPR:
 	  return OP_SAME (0) && OP_SAME (1);
 
-	case FMA_EXPR:
 	case WIDEN_MULT_PLUS_EXPR:
 	case WIDEN_MULT_MINUS_EXPR:
 	  if (!OP_SAME (2))
@@ -3206,6 +3383,7 @@
 
 	case CLEANUP_POINT_EXPR:
 	case EXPR_STMT:
+	case SAVE_EXPR:
 	  if (flags & OEP_LEXICOGRAPHIC)
 	    return OP_SAME (0);
 	  return 0;
@@ -3273,7 +3451,7 @@
     case tcc_declaration:
       /* Consider __builtin_sqrt equal to sqrt.  */
       return (TREE_CODE (arg0) == FUNCTION_DECL
-	      && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
+	      && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
 	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
 	      && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
 
@@ -3295,8 +3473,8 @@
 	     We only tested element precision and modes to match.
 	     Vectors may be BLKmode and thus also check that the number of
 	     parts match.  */
-	  if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
-	      != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
+	  if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
+			TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
 	    return 0;
 
 	  vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
@@ -3341,7 +3519,8 @@
 	      if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
 		return 1;
 	      if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
-				    OEP_LEXICOGRAPHIC))
+				    flags & (OEP_LEXICOGRAPHIC
+					     | OEP_NO_HASH_CHECK)))
 		return 0;
 	    }
 	}
@@ -3354,6 +3533,10 @@
 	  if (flags & OEP_LEXICOGRAPHIC)
 	    return OP_SAME_WITH_NULL (0);
 	  return 0;
+	case DEBUG_BEGIN_STMT:
+	  if (flags & OEP_LEXICOGRAPHIC)
+	    return 1;
+	  return 0;
 	default:
 	  return 0;
 	 }
@@ -3406,13 +3589,12 @@
    two different values, which will be stored in *CVAL1 and *CVAL2; if
    they are nonzero it means that some operands have already been found.
    No variables may be used anywhere else in the expression except in the
-   comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
-   the expression and save_expr needs to be called with CVAL1 and CVAL2.
+   comparisons.  
 
    If this is true, return 1.  Otherwise, return zero.  */
 
 static int
-twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
+twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
 {
   enum tree_code code = TREE_CODE (arg);
   enum tree_code_class tclass = TREE_CODE_CLASS (code);
@@ -3425,39 +3607,23 @@
 	       || code == COMPOUND_EXPR))
     tclass = tcc_binary;
 
-  else if (tclass == tcc_expression && code == SAVE_EXPR
-	   && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
-    {
-      /* If we've already found a CVAL1 or CVAL2, this expression is
-	 two complex to handle.  */
-      if (*cval1 || *cval2)
-	return 0;
-
-      tclass = tcc_unary;
-      *save_p = 1;
-    }
-
   switch (tclass)
     {
     case tcc_unary:
-      return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
+      return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
 
     case tcc_binary:
-      return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
-	      && twoval_comparison_p (TREE_OPERAND (arg, 1),
-				      cval1, cval2, save_p));
+      return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
+	      && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
 
     case tcc_constant:
       return 1;
 
     case tcc_expression:
       if (code == COND_EXPR)
-	return (twoval_comparison_p (TREE_OPERAND (arg, 0),
-				     cval1, cval2, save_p)
-		&& twoval_comparison_p (TREE_OPERAND (arg, 1),
-					cval1, cval2, save_p)
-		&& twoval_comparison_p (TREE_OPERAND (arg, 2),
-					cval1, cval2, save_p));
+	return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
+		&& twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
+		&& twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
       return 0;
 
     case tcc_comparison:
@@ -3817,7 +3983,7 @@
 
 static tree
 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
-		    HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
+		    HOST_WIDE_INT bitsize, poly_int64 bitpos,
 		    int unsignedp, int reversep)
 {
   tree result, bftype;
@@ -3827,7 +3993,7 @@
     {
       tree ninner = TREE_OPERAND (orig_inner, 0);
       machine_mode nmode;
-      HOST_WIDE_INT nbitsize, nbitpos;
+      poly_int64 nbitsize, nbitpos;
       tree noffset;
       int nunsignedp, nreversep, nvolatilep = 0;
       tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
@@ -3835,9 +4001,7 @@
 				       &nreversep, &nvolatilep);
       if (base == inner
 	  && noffset == NULL_TREE
-	  && nbitsize >= bitsize
-	  && nbitpos <= bitpos
-	  && bitpos + bitsize <= nbitpos + nbitsize
+	  && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
 	  && !reversep
 	  && !nreversep
 	  && !nvolatilep)
@@ -3853,7 +4017,7 @@
 			 build_fold_addr_expr (inner),
 			 build_int_cst (ptr_type_node, 0));
 
-  if (bitpos == 0 && !reversep)
+  if (known_eq (bitpos, 0) && !reversep)
     {
       tree size = TYPE_SIZE (TREE_TYPE (inner));
       if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
@@ -3902,7 +4066,8 @@
 optimize_bit_field_compare (location_t loc, enum tree_code code,
 			    tree compare_type, tree lhs, tree rhs)
 {
-  HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
+  poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
+  HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
   tree type = TREE_TYPE (lhs);
   tree unsigned_type;
   int const_p = TREE_CODE (rhs) == INTEGER_CST;
@@ -3916,14 +4081,20 @@
   tree offset;
 
   /* Get all the information about the extractions being done.  If the bit size
-     if the same as the size of the underlying object, we aren't doing an
+     is the same as the size of the underlying object, we aren't doing an
      extraction at all and so can do nothing.  We also don't want to
      do anything if the inner expression is a PLACEHOLDER_EXPR since we
      then will no longer be able to replace it.  */
-  linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
+  linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
 				&lunsignedp, &lreversep, &lvolatilep);
-  if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
-      || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
+  if (linner == lhs
+      || !known_size_p (plbitsize)
+      || !plbitsize.is_constant (&lbitsize)
+      || !plbitpos.is_constant (&lbitpos)
+      || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
+      || offset != 0
+      || TREE_CODE (linner) == PLACEHOLDER_EXPR
+      || lvolatilep)
     return 0;
 
   if (const_p)
@@ -3936,19 +4107,24 @@
        = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
 			      &runsignedp, &rreversep, &rvolatilep);
 
-     if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
-	 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
-	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
+     if (rinner == rhs
+	 || maybe_ne (lbitpos, rbitpos)
+	 || maybe_ne (lbitsize, rbitsize)
+	 || lunsignedp != runsignedp
+	 || lreversep != rreversep
+	 || offset != 0
+	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
+	 || rvolatilep)
        return 0;
    }
 
   /* Honor the C++ memory model and mimic what RTL expansion does.  */
-  unsigned HOST_WIDE_INT bitstart = 0;
-  unsigned HOST_WIDE_INT bitend = 0;
+  poly_uint64 bitstart = 0;
+  poly_uint64 bitend = 0;
   if (TREE_CODE (lhs) == COMPONENT_REF)
     {
-      get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset);
-      if (offset != NULL_TREE)
+      get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
+      if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
 	return 0;
     }
 
@@ -4116,10 +4292,14 @@
 	return 0;
     }
 
-  inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
-			       punsignedp, preversep, pvolatilep);
+  poly_int64 poly_bitsize, poly_bitpos;
+  inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
+			       pmode, punsignedp, preversep, pvolatilep);
   if ((inner == exp && and_mask == 0)
-      || *pbitsize < 0 || offset != 0
+      || !poly_bitsize.is_constant (pbitsize)
+      || !poly_bitpos.is_constant (pbitpos)
+      || *pbitsize < 0
+      || offset != 0
       || TREE_CODE (inner) == PLACEHOLDER_EXPR
       /* Reject out-of-bound accesses (PR79731).  */
       || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
@@ -4776,8 +4956,8 @@
   /* Disable this optimization for function pointer expressions
      on targets that require function pointer canonicalization.  */
   if (targetm.have_canonicalize_funcptr_for_compare ()
-      && TREE_CODE (etype) == POINTER_TYPE
-      && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
+      && POINTER_TYPE_P (etype)
+      && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
     return NULL_TREE;
 
   if (! in_p)
@@ -4930,6 +5110,29 @@
       tem = high0, high0 = high1, high1 = tem;
     }
 
+  /* If the second range is != high1 where high1 is the type maximum of
+     the type, try first merging with < high1 range.  */
+  if (low1
+      && high1
+      && TREE_CODE (low1) == INTEGER_CST
+      && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
+	  || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
+	      && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
+			   GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
+      && operand_equal_p (low1, high1, 0))
+    {
+      if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
+	  && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
+			   !in1_p, NULL_TREE, range_predecessor (low1)))
+	return true;
+      /* Similarly for the second range != low1 where low1 is the type minimum
+	 of the type, try first merging with > low1 range.  */
+      if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
+	  && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
+			   !in1_p, range_successor (low1), NULL_TREE))
+	return true;
+    }
+
   /* Now flag two cases, whether the ranges are disjoint or whether the
      second range is totally subsumed in the first.  Note that the tests
      below are simplified by the ones above.  */
@@ -5038,8 +5241,9 @@
 		switch (TREE_CODE (TREE_TYPE (low0)))
 		  {
 		  case ENUMERAL_TYPE:
-		    if (TYPE_PRECISION (TREE_TYPE (low0))
-			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
+		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
+				  GET_MODE_BITSIZE
+				    (TYPE_MODE (TREE_TYPE (low0)))))
 		      break;
 		    /* FALLTHROUGH */
 		  case INTEGER_TYPE:
@@ -5061,8 +5265,9 @@
 		switch (TREE_CODE (TREE_TYPE (high1)))
 		  {
 		  case ENUMERAL_TYPE:
-		    if (TYPE_PRECISION (TREE_TYPE (high1))
-			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
+		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
+				  GET_MODE_BITSIZE
+				    (TYPE_MODE (TREE_TYPE (high1)))))
 		      break;
 		    /* FALLTHROUGH */
 		  case INTEGER_TYPE:
@@ -5354,10 +5559,10 @@
   if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
       && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
 		       in1_p, low1, high1)
-      && 0 != (tem = (build_range_check (loc, type,
-					 lhs != 0 ? lhs
-					 : rhs != 0 ? rhs : integer_zero_node,
-					 in_p, low, high))))
+      && (tem = (build_range_check (loc, type,
+				    lhs != 0 ? lhs
+				    : rhs != 0 ? rhs : integer_zero_node,
+				    in_p, low, high))) != 0)
     {
       if (strict_overflow_p)
 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
@@ -5387,12 +5592,12 @@
 	{
 	  tree common = save_expr (lhs);
 
-	  if (0 != (lhs = build_range_check (loc, type, common,
-					     or_op ? ! in0_p : in0_p,
-					     low0, high0))
-	      && (0 != (rhs = build_range_check (loc, type, common,
-						 or_op ? ! in1_p : in1_p,
-						 low1, high1))))
+	  if ((lhs = build_range_check (loc, type, common,
+					or_op ? ! in0_p : in0_p,
+					low0, high0)) != 0
+	      && (rhs = build_range_check (loc, type, common,
+					   or_op ? ! in1_p : in1_p,
+					   low1, high1)) != 0)
 	    {
 	      if (strict_overflow_p)
 		fold_overflow_warning (warnmsg,
@@ -5804,12 +6009,13 @@
     }
 
   /* If the right sides are not constant, do the same for it.  Also,
-     disallow this optimization if a size or signedness mismatch occurs
-     between the left and right sides.  */
+     disallow this optimization if a size, signedness or storage order
+     mismatch occurs between the left and right sides.  */
   if (l_const == 0)
     {
       if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
 	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
+	  || ll_reversep != lr_reversep
 	  /* Make sure the two fields on the right
 	     correspond to the left without being swapped.  */
 	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
@@ -6091,10 +6297,9 @@
       if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
 	  && TREE_CODE (t2) == INTEGER_CST
 	  && !TREE_OVERFLOW (t2)
-	  && (0 != (t1 = extract_muldiv (op0, t2, code,
-					 code == MULT_EXPR
-					 ? ctype : NULL_TREE,
-					 strict_overflow_p))))
+	  && (t1 = extract_muldiv (op0, t2, code,
+				   code == MULT_EXPR ? ctype : NULL_TREE,
+				   strict_overflow_p)) != 0)
 	return t1;
       break;
 
@@ -6162,10 +6367,9 @@
 	     so check for it explicitly here.  */
 	  && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
 			wi::to_wide (op1))
-	  && 0 != (t1 = fold_convert (ctype,
-				      const_binop (LSHIFT_EXPR,
-						   size_one_node,
-						   op1)))
+	  && (t1 = fold_convert (ctype,
+				 const_binop (LSHIFT_EXPR, size_one_node,
+					      op1))) != 0
 	  && !TREE_OVERFLOW (t1))
 	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
 				       ? MULT_EXPR : FLOOR_DIV_EXPR,
@@ -6304,14 +6508,14 @@
       if (tcode == code)
 	{
 	  bool overflow_p = false;
-	  bool overflow_mul_p;
+	  wi::overflow_type overflow_mul;
 	  signop sign = TYPE_SIGN (ctype);
 	  unsigned prec = TYPE_PRECISION (ctype);
 	  wide_int mul = wi::mul (wi::to_wide (op1, prec),
 				  wi::to_wide (c, prec),
-				  sign, &overflow_mul_p);
+				  sign, &overflow_mul);
 	  overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
-	  if (overflow_mul_p
+	  if (overflow_mul
 	      && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
 	    overflow_p = true;
 	  if (!overflow_p)
@@ -6405,6 +6609,13 @@
   tree rhs = NULL_TREE;
   enum tree_code cond_code = COND_EXPR;
 
+  /* Do not move possibly trapping operations into the conditional as this
+     pessimizes code and causes gimplification issues when applied late.  */
+  if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
+			      ANY_INTEGRAL_TYPE_P (type)
+			      && TYPE_OVERFLOW_TRAPS (type), op1))
+    return NULL_TREE;
+
   if (TREE_CODE (cond) == COND_EXPR
       || TREE_CODE (cond) == VEC_COND_EXPR)
     {
@@ -6523,7 +6734,7 @@
 {
   tree prod, tmp, type = TREE_TYPE (c1);
   signop sign = TYPE_SIGN (type);
-  bool overflow;
+  wi::overflow_type overflow;
 
   /* We have to do this the hard way to detect unsigned overflow.
      prod = int_const_binop (MULT_EXPR, c1, c2);  */
@@ -6933,7 +7144,7 @@
   if (!same)
     return NULL_TREE;
 
-  if (! INTEGRAL_TYPE_P (type)
+  if (! ANY_INTEGRAL_TYPE_P (type)
       || TYPE_OVERFLOW_WRAPS (type)
       /* We are neither factoring zero nor minus one.  */
       || TREE_CODE (same) == INTEGER_CST)
@@ -6945,7 +7156,7 @@
 
   /* Same may be zero and thus the operation 'code' may overflow.  Likewise
      same may be minus one and thus the multiplication may overflow.  Perform
-     the operations in an unsigned type.  */
+     the sum operation in an unsigned type.  */
   tree utype = unsigned_type_for (type);
   tree tem = fold_build2_loc (loc, code, utype,
 			      fold_convert_loc (loc, utype, alt0),
@@ -6958,9 +7169,9 @@
     return fold_build2_loc (loc, MULT_EXPR, type,
 			    fold_convert (type, tem), same);
 
-  return fold_convert_loc (loc, type,
-			   fold_build2_loc (loc, MULT_EXPR, utype, tem,
-					    fold_convert_loc (loc, utype, same)));
+  /* Do not resort to unsigned multiplication because
+     we lose the no-overflow property of the expression.  */
+  return NULL_TREE;
 }
 
 /* Subroutine of native_encode_expr.  Encode the INTEGER_CST
@@ -7144,12 +7355,13 @@
 static int
 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
 {
-  unsigned i, count;
+  unsigned HOST_WIDE_INT i, count;
   int size, offset;
   tree itype, elem;
 
   offset = 0;
-  count = VECTOR_CST_NELTS (expr);
+  if (!VECTOR_CST_NELTS (expr).is_constant (&count))
+    return 0;
   itype = TREE_TYPE (TREE_TYPE (expr));
   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
   for (i = 0; i < count; i++)
@@ -7166,7 +7378,7 @@
 	return 0;
       offset += res;
       if (offset >= len)
-	return offset;
+	return (off == -1 && i < count - 1) ? 0 : offset;
       if (off != -1)
 	off = 0;
     }
@@ -7386,18 +7598,19 @@
    If the buffer cannot be interpreted, return NULL_TREE.  */
 
 static tree
-native_interpret_vector (tree type, const unsigned char *ptr, int len)
+native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
 {
   tree etype, elem;
-  int i, size, count;
+  unsigned int i, size;
+  unsigned HOST_WIDE_INT count;
 
   etype = TREE_TYPE (type);
   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
-  count = TYPE_VECTOR_SUBPARTS (type);
-  if (size * count > len)
+  if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
+      || size * count > len)
     return NULL_TREE;
 
-  auto_vec<tree, 32> elements (count);
+  tree_vector_builder elements (type, count, 1);
   for (i = 0; i < count; ++i)
     {
       elem = native_interpret_expr (etype, ptr+(i*size), size);
@@ -7405,7 +7618,7 @@
 	return NULL_TREE;
       elements.quick_push (elem);
     }
-  return build_vector (type, elements);
+  return elements.build ();
 }
 
 
@@ -7687,7 +7900,7 @@
 	  && POINTER_TYPE_P (type)
 	  && handled_component_p (TREE_OPERAND (op0, 0)))
         {
-	  HOST_WIDE_INT bitsize, bitpos;
+	  poly_int64 bitsize, bitpos;
 	  tree offset;
 	  machine_mode mode;
 	  int unsignedp, reversep, volatilep;
@@ -7698,7 +7911,8 @@
 	  /* If the reference was to a (constant) zero offset, we can use
 	     the address of the base if it has the same base type
 	     as the result type and the pointer type is unqualified.  */
-	  if (! offset && bitpos == 0
+	  if (!offset
+	      && known_eq (bitpos, 0)
 	      && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
 		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
 	      && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
@@ -7985,7 +8199,7 @@
     }
 
   /* See if we can build a range comparison.  */
-  if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
+  if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
     return tem;
 
   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
@@ -8008,8 +8222,8 @@
      lhs is another similar operation, try to merge its rhs with our
      rhs.  Then try to merge our lhs and rhs.  */
   if (TREE_CODE (arg0) == code
-      && 0 != (tem = fold_truth_andor_1 (loc, code, type,
-					 TREE_OPERAND (arg0, 1), arg1)))
+      && (tem = fold_truth_andor_1 (loc, code, type,
+				    TREE_OPERAND (arg0, 1), arg1)) != 0)
     return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
 
   if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
@@ -8194,48 +8408,50 @@
    expressions like &p->x which can not wrap.  */
 
 static bool
-pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
+pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
 {
   if (!POINTER_TYPE_P (TREE_TYPE (base)))
     return true;
 
-  if (bitpos < 0)
+  if (maybe_lt (bitpos, 0))
     return true;
 
-  wide_int wi_offset;
+  poly_wide_int wi_offset;
   int precision = TYPE_PRECISION (TREE_TYPE (base));
   if (offset == NULL_TREE)
     wi_offset = wi::zero (precision);
-  else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
+  else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
     return true;
   else
-    wi_offset = wi::to_wide (offset);
-
-  bool overflow;
-  wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
-  wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
+    wi_offset = wi::to_poly_wide (offset);
+
+  wi::overflow_type overflow;
+  poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
+				  precision);
+  poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
   if (overflow)
     return true;
 
-  if (!wi::fits_uhwi_p (total))
+  poly_uint64 total_hwi, size;
+  if (!total.to_uhwi (&total_hwi)
+      || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
+			   &size)
+      || known_eq (size, 0U))
     return true;
 
-  HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
-  if (size <= 0)
-    return true;
+  if (known_le (total_hwi, size))
+    return false;
 
   /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
      array.  */
-  if (TREE_CODE (base) == ADDR_EXPR)
-    {
-      HOST_WIDE_INT base_size;
-
-      base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
-      if (base_size > 0 && size < base_size)
-	size = base_size;
-    }
-
-  return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
+  if (TREE_CODE (base) == ADDR_EXPR
+      && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
+			  &size)
+      && maybe_ne (size, 0U)
+      && known_le (total_hwi, size))
+    return false;
+
+  return true;
 }
 
 /* Return a positive integer when the symbol DECL is known to have
@@ -8292,7 +8508,7 @@
 	  || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
     {
       tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
-      HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
+      poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
       machine_mode mode;
       int volatilep, reversep, unsignedp;
       bool indirect_base0 = false, indirect_base1 = false;
@@ -8333,17 +8549,14 @@
 	  else
 	    offset0 = size_binop (PLUS_EXPR, offset0,
 				  TREE_OPERAND (arg0, 1));
-	  if (TREE_CODE (offset0) == INTEGER_CST)
-	    {
-	      offset_int tem = wi::sext (wi::to_offset (offset0),
-					 TYPE_PRECISION (sizetype));
+	  if (poly_int_tree_p (offset0))
+	    {
+	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
+					      TYPE_PRECISION (sizetype));
 	      tem <<= LOG2_BITS_PER_UNIT;
 	      tem += bitpos0;
-	      if (wi::fits_shwi_p (tem))
-		{
-		  bitpos0 = tem.to_shwi ();
-		  offset0 = NULL_TREE;
-		}
+	      if (tem.to_shwi (&bitpos0))
+		offset0 = NULL_TREE;
 	    }
 	}
 
@@ -8379,17 +8592,14 @@
 	  else
 	    offset1 = size_binop (PLUS_EXPR, offset1,
 				  TREE_OPERAND (arg1, 1));
-	  if (TREE_CODE (offset1) == INTEGER_CST)
-	    {
-	      offset_int tem = wi::sext (wi::to_offset (offset1),
-					 TYPE_PRECISION (sizetype));
+	  if (poly_int_tree_p (offset1))
+	    {
+	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
+					      TYPE_PRECISION (sizetype));
 	      tem <<= LOG2_BITS_PER_UNIT;
 	      tem += bitpos1;
-	      if (wi::fits_shwi_p (tem))
-		{
-		  bitpos1 = tem.to_shwi ();
-		  offset1 = NULL_TREE;
-		}
+	      if (tem.to_shwi (&bitpos1))
+		offset1 = NULL_TREE;
 	    }
 	}
 
@@ -8400,12 +8610,16 @@
 	{
 	  /* We can fold this expression to a constant if the non-constant
 	     offset parts are equal.  */
-	  if (offset0 == offset1
-	      || (offset0 && offset1
-		  && operand_equal_p (offset0, offset1, 0)))
+	  if ((offset0 == offset1
+	       || (offset0 && offset1
+		   && operand_equal_p (offset0, offset1, 0)))
+	      && (equality_code
+		  || (indirect_base0
+		      && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
+		  || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
 	    {
 	      if (!equality_code
-		  && bitpos0 != bitpos1
+		  && maybe_ne (bitpos0, bitpos1)
 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
 		fold_overflow_warning (("assuming pointer wraparound does not "
@@ -8416,17 +8630,41 @@
 	      switch (code)
 		{
 		case EQ_EXPR:
-		  return constant_boolean_node (bitpos0 == bitpos1, type);
+		  if (known_eq (bitpos0, bitpos1))
+		    return constant_boolean_node (true, type);
+		  if (known_ne (bitpos0, bitpos1))
+		    return constant_boolean_node (false, type);
+		  break;
 		case NE_EXPR:
-		  return constant_boolean_node (bitpos0 != bitpos1, type);
+		  if (known_ne (bitpos0, bitpos1))
+		    return constant_boolean_node (true, type);
+		  if (known_eq (bitpos0, bitpos1))
+		    return constant_boolean_node (false, type);
+		  break;
 		case LT_EXPR:
-		  return constant_boolean_node (bitpos0 < bitpos1, type);
+		  if (known_lt (bitpos0, bitpos1))
+		    return constant_boolean_node (true, type);
+		  if (known_ge (bitpos0, bitpos1))
+		    return constant_boolean_node (false, type);
+		  break;
 		case LE_EXPR:
-		  return constant_boolean_node (bitpos0 <= bitpos1, type);
+		  if (known_le (bitpos0, bitpos1))
+		    return constant_boolean_node (true, type);
+		  if (known_gt (bitpos0, bitpos1))
+		    return constant_boolean_node (false, type);
+		  break;
 		case GE_EXPR:
-		  return constant_boolean_node (bitpos0 >= bitpos1, type);
+		  if (known_ge (bitpos0, bitpos1))
+		    return constant_boolean_node (true, type);
+		  if (known_lt (bitpos0, bitpos1))
+		    return constant_boolean_node (false, type);
+		  break;
 		case GT_EXPR:
-		  return constant_boolean_node (bitpos0 > bitpos1, type);
+		  if (known_gt (bitpos0, bitpos1))
+		    return constant_boolean_node (true, type);
+		  if (known_le (bitpos0, bitpos1))
+		    return constant_boolean_node (false, type);
+		  break;
 		default:;
 		}
 	    }
@@ -8437,7 +8675,11 @@
 	     because pointer arithmetic is restricted to retain within an
 	     object and overflow on pointer differences is undefined as of
 	     6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
-	  else if (bitpos0 == bitpos1)
+	  else if (known_eq (bitpos0, bitpos1)
+		   && (equality_code
+		       || (indirect_base0
+			   && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
+		       || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
 	    {
 	      /* By converting to signed sizetype we cover middle-end pointer
 	         arithmetic which operates on unsigned pointer types of size
@@ -8466,7 +8708,7 @@
 	}
       /* For equal offsets we can simplify to a comparison of the
 	 base addresses.  */
-      else if (bitpos0 == bitpos1
+      else if (known_eq (bitpos0, bitpos1)
 	       && (indirect_base0
 		   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
 	       && (indirect_base1
@@ -8495,7 +8737,7 @@
 		    eliminated.  When ptr is null, although the -> expression
 		    is strictly speaking invalid, GCC retains it as a matter
 		    of QoI.  See PR c/44555. */
-		 && (offset0 == NULL_TREE && bitpos0 != 0))
+		 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
 		|| CONSTANT_CLASS_P (base0))
 	       && indirect_base0
 	       /* The caller guarantees that when one of the arguments is
@@ -8593,9 +8835,8 @@
   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
     {
       tree cval1 = 0, cval2 = 0;
-      int save_p = 0;
-
-      if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
+
+      if (twoval_comparison_p (arg0, &cval1, &cval2)
 	  /* Don't handle degenerate cases here; they should already
 	     have been handled anyway.  */
 	  && cval1 != 0 && cval2 != 0
@@ -8668,12 +8909,6 @@
 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
 		}
 
-	      if (save_p)
-		{
-		  tem = save_expr (build2 (code, type, cval1, cval2));
-		  protected_set_expr_location (tem, loc);
-		  return tem;
-		}
 	      return fold_build2_loc (loc, code, type, cval1, cval2);
 	    }
 	}
@@ -8727,11 +8962,12 @@
 static bool
 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
 {
-  unsigned int i;
-
-  if (TREE_CODE (arg) == VECTOR_CST)
-    {
-      for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
+  unsigned HOST_WIDE_INT i, nunits;
+
+  if (TREE_CODE (arg) == VECTOR_CST
+      && VECTOR_CST_NELTS (arg).is_constant (&nunits))
+    {
+      for (i = 0; i < nunits; ++i)
 	elts[i] = VECTOR_CST_ELT (arg, i);
     }
   else if (TREE_CODE (arg) == CONSTRUCTOR)
@@ -8757,15 +8993,17 @@
    NULL_TREE otherwise.  */
 
 static tree
-fold_vec_perm (tree type, tree arg0, tree arg1, vec_perm_indices sel)
+fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
 {
   unsigned int i;
+  unsigned HOST_WIDE_INT nelts;
   bool need_ctor = false;
 
-  unsigned int nelts = sel.length ();
-  gcc_assert (TYPE_VECTOR_SUBPARTS (type) == nelts
-	      && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
-	      && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
+  if (!sel.length ().is_constant (&nelts))
+    return NULL_TREE;
+  gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
+	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
+	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
   if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
       || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
     return NULL_TREE;
@@ -8775,12 +9013,15 @@
       || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
     return NULL_TREE;
 
-  auto_vec<tree, 32> out_elts (nelts);
+  tree_vector_builder out_elts (type, nelts, 1);
   for (i = 0; i < nelts; i++)
     {
-      if (!CONSTANT_CLASS_P (in_elts[sel[i]]))
+      HOST_WIDE_INT index;
+      if (!sel[i].is_constant (&index))
+	return NULL_TREE;
+      if (!CONSTANT_CLASS_P (in_elts[index]))
 	need_ctor = true;
-      out_elts.quick_push (unshare_expr (in_elts[sel[i]]));
+      out_elts.quick_push (unshare_expr (in_elts[index]));
     }
 
   if (need_ctor)
@@ -8792,7 +9033,7 @@
       return build_constructor (type, v);
     }
   else
-    return build_vector (type, out_elts);
+    return out_elts.build ();
 }
 
 /* Try to fold a pointer difference of type TYPE two address expressions of
@@ -8801,7 +9042,8 @@
 
 static tree
 fold_addr_of_array_ref_difference (location_t loc, tree type,
-				   tree aref0, tree aref1)
+				   tree aref0, tree aref1,
+				   bool use_pointer_diff)
 {
   tree base0 = TREE_OPERAND (aref0, 0);
   tree base1 = TREE_OPERAND (aref1, 0);
@@ -8813,14 +9055,20 @@
   if ((TREE_CODE (base0) == ARRAY_REF
        && TREE_CODE (base1) == ARRAY_REF
        && (base_offset
-	   = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
+	   = fold_addr_of_array_ref_difference (loc, type, base0, base1,
+						use_pointer_diff)))
       || (INDIRECT_REF_P (base0)
 	  && INDIRECT_REF_P (base1)
 	  && (base_offset
-	        = fold_binary_loc (loc, MINUS_EXPR, type,
-				   fold_convert (type, TREE_OPERAND (base0, 0)),
-				   fold_convert (type,
-						 TREE_OPERAND (base1, 0)))))
+	        = use_pointer_diff
+		  ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
+				     TREE_OPERAND (base0, 0),
+				     TREE_OPERAND (base1, 0))
+		  : fold_binary_loc (loc, MINUS_EXPR, type,
+				     fold_convert (type,
+						   TREE_OPERAND (base0, 0)),
+				     fold_convert (type,
+						   TREE_OPERAND (base1, 0)))))
       || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
     {
       tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
@@ -8844,7 +9092,6 @@
   REAL_VALUE_TYPE r;
   tree unit_type;
   machine_mode mode;
-  unsigned vec_nelts, i;
 
   switch (TREE_CODE (cst))
     {
@@ -8858,12 +9105,14 @@
 
     case VECTOR_CST:
       {
-	vec_nelts = VECTOR_CST_NELTS (cst);
 	unit_type = TREE_TYPE (type);
 	mode = TYPE_MODE (unit_type);
 
-	auto_vec<tree, 32> elts (vec_nelts);
-	for (i = 0; i < vec_nelts; i++)
+	tree_vector_builder elts;
+	if (!elts.new_unary_operation (type, cst, false))
+	  return NULL_TREE;
+	unsigned int count = elts.encoded_nelts ();
+	for (unsigned int i = 0; i < count; ++i)
 	  {
 	    r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
 	    if (!exact_real_inverse (mode, &r))
@@ -8871,7 +9120,7 @@
 	    elts.quick_push (build_real (unit_type, r));
 	  }
 
-	return build_vector (type, elts);
+	return elts.build ();
       }
 
     default:
@@ -9007,7 +9256,7 @@
 expr_not_equal_to (tree t, const wide_int &w)
 {
   wide_int min, max, nz;
-  value_range_type rtype;
+  value_range_kind rtype;
   switch (TREE_CODE (t))
     {
     case INTEGER_CST:
@@ -9046,8 +9295,8 @@
    return NULL_TREE.  */
 
 tree
-fold_binary_loc (location_t loc,
-	     enum tree_code code, tree type, tree op0, tree op1)
+fold_binary_loc (location_t loc, enum tree_code code, tree type,
+		 tree op0, tree op1)
 {
   enum tree_code_class kind = TREE_CODE_CLASS (code);
   tree arg0, arg1, tem;
@@ -9134,7 +9383,7 @@
 
   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
        || code == EQ_EXPR || code == NE_EXPR)
-      && TREE_CODE (type) != VECTOR_TYPE
+      && !VECTOR_TYPE_P (TREE_TYPE (arg0))
       && ((truth_value_p (TREE_CODE (arg0))
 	   && (truth_value_p (TREE_CODE (arg1))
 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
@@ -9219,7 +9468,7 @@
 	  && handled_component_p (TREE_OPERAND (arg0, 0)))
 	{
 	  tree base;
-	  HOST_WIDE_INT coffset;
+	  poly_int64 coffset;
 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
 						&coffset);
 	  if (!base)
@@ -9463,8 +9712,8 @@
 		STRIP_NOPS (tree110);
 		STRIP_NOPS (tree111);
 		if (TREE_CODE (tree110) == INTEGER_CST
-		    && 0 == compare_tree_int (tree110,
-					      element_precision (rtype))
+		    && compare_tree_int (tree110,
+					 element_precision (rtype)) == 0
 		    && operand_equal_p (tree01, tree111, 0))
 		  {
 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
@@ -9485,8 +9734,8 @@
 		STRIP_NOPS (tree111);
 		if (TREE_CODE (tree110) == NEGATE_EXPR
 		    && TREE_CODE (tree111) == INTEGER_CST
-		    && 0 == compare_tree_int (tree111,
-					      element_precision (rtype) - 1)
+		    && compare_tree_int (tree111,
+					 element_precision (rtype) - 1) == 0
 		    && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
 		  {
 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
@@ -9532,8 +9781,8 @@
 
 	  /* With undefined overflow prefer doing association in a type
 	     which wraps on overflow, if that is one of the operand types.  */
-	  if (POINTER_TYPE_P (type)
-	      || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
+	  if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
+	      && !TYPE_OVERFLOW_WRAPS (type))
 	    {
 	      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
 		  && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
@@ -9546,8 +9795,8 @@
 
 	  /* With undefined overflow we can only associate constants with one
 	     variable, and constants whose association doesn't overflow.  */
-	  if (POINTER_TYPE_P (atype)
-	      || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
+	  if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
+	      && !TYPE_OVERFLOW_WRAPS (atype))
 	    {
 	      if ((var0 && var1) || (minus_var0 && minus_var1))
 		{
@@ -9594,12 +9843,12 @@
 	  /* Only do something if we found more than two objects.  Otherwise,
 	     nothing has changed and we risk infinite recursion.  */
 	  if (ok
-	      && (2 < ((var0 != 0) + (var1 != 0)
-		       + (minus_var0 != 0) + (minus_var1 != 0)
-		       + (con0 != 0) + (con1 != 0)
-		       + (minus_con0 != 0) + (minus_con1 != 0)
-		       + (lit0 != 0) + (lit1 != 0)
-		       + (minus_lit0 != 0) + (minus_lit1 != 0))))
+	      && ((var0 != 0) + (var1 != 0)
+		  + (minus_var0 != 0) + (minus_var1 != 0)
+		  + (con0 != 0) + (con1 != 0)
+		  + (minus_con0 != 0) + (minus_con1 != 0)
+		  + (lit0 != 0) + (lit1 != 0)
+		  + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
 	    {
 	      var0 = associate_trees (loc, var0, var1, code, atype);
 	      minus_var0 = associate_trees (loc, minus_var0, minus_var1,
@@ -9694,13 +9943,40 @@
 
       return NULL_TREE;
 
+    case POINTER_DIFF_EXPR:
     case MINUS_EXPR:
+      /* Fold &a[i] - &a[j] to i-j.  */
+      if (TREE_CODE (arg0) == ADDR_EXPR
+	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
+	  && TREE_CODE (arg1) == ADDR_EXPR
+	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
+        {
+	  tree tem = fold_addr_of_array_ref_difference (loc, type,
+							TREE_OPERAND (arg0, 0),
+							TREE_OPERAND (arg1, 0),
+							code
+							== POINTER_DIFF_EXPR);
+	  if (tem)
+	    return tem;
+	}
+
+      /* Further transformations are not for pointers.  */
+      if (code == POINTER_DIFF_EXPR)
+	return NULL_TREE;
+
       /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
       if (TREE_CODE (arg0) == NEGATE_EXPR
-	  && negate_expr_p (op1))
-	return fold_build2_loc (loc, MINUS_EXPR, type,
-				negate_expr (op1),
-				fold_convert_loc (loc, type,
+	  && negate_expr_p (op1)
+	  /* If arg0 is e.g. unsigned int and type is int, then this could
+	     introduce UB, because if A is INT_MIN at runtime, the original
+	     expression can be well defined while the latter is not.
+	     See PR83269.  */
+	  && !(ANY_INTEGRAL_TYPE_P (type)
+	       && TYPE_OVERFLOW_UNDEFINED (type)
+	       && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
+	       && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
+	return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
+			        fold_convert_loc (loc, type,
 						  TREE_OPERAND (arg0, 0)));
 
       /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
@@ -9752,19 +10028,6 @@
 				fold_convert_loc (loc, type, arg0),
 				negate_expr (op1));
 
-      /* Fold &a[i] - &a[j] to i-j.  */
-      if (TREE_CODE (arg0) == ADDR_EXPR
-	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
-	  && TREE_CODE (arg1) == ADDR_EXPR
-	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
-        {
-	  tree tem = fold_addr_of_array_ref_difference (loc, type,
-							TREE_OPERAND (arg0, 0),
-							TREE_OPERAND (arg1, 0));
-	  if (tem)
-	    return tem;
-	}
-
       /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
 	 one.  Make sure the type is not saturating and has the signedness of
 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
@@ -9799,8 +10062,8 @@
 
 	  strict_overflow_p = false;
 	  if (TREE_CODE (arg1) == INTEGER_CST
-	      && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
-					     &strict_overflow_p)))
+	      && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
+					&strict_overflow_p)) != 0)
 	    {
 	      if (strict_overflow_p)
 		fold_overflow_warning (("assuming signed overflow does not "
@@ -10002,121 +10265,6 @@
 	    }
 	}
 
-      /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
-	 ((A & N) + B) & M -> (A + B) & M
-	 Similarly if (N & M) == 0,
-	 ((A | N) + B) & M -> (A + B) & M
-	 and for - instead of + (or unary - instead of +)
-	 and/or ^ instead of |.
-	 If B is constant and (B & M) == 0, fold into A & M.  */
-      if (TREE_CODE (arg1) == INTEGER_CST)
-	{
-	  wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
-	  if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
-	      && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
-	      && (TREE_CODE (arg0) == PLUS_EXPR
-		  || TREE_CODE (arg0) == MINUS_EXPR
-		  || TREE_CODE (arg0) == NEGATE_EXPR)
-	      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
-		  || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
-	    {
-	      tree pmop[2];
-	      int which = 0;
-	      wide_int cst0;
-
-	      /* Now we know that arg0 is (C + D) or (C - D) or
-		 -C and arg1 (M) is == (1LL << cst) - 1.
-		 Store C into PMOP[0] and D into PMOP[1].  */
-	      pmop[0] = TREE_OPERAND (arg0, 0);
-	      pmop[1] = NULL;
-	      if (TREE_CODE (arg0) != NEGATE_EXPR)
-		{
-		  pmop[1] = TREE_OPERAND (arg0, 1);
-		  which = 1;
-		}
-
-	      if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
-		which = -1;
-
-	      for (; which >= 0; which--)
-		switch (TREE_CODE (pmop[which]))
-		  {
-		  case BIT_AND_EXPR:
-		  case BIT_IOR_EXPR:
-		  case BIT_XOR_EXPR:
-		    if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
-			!= INTEGER_CST)
-		      break;
-		    cst0 = wi::to_wide (TREE_OPERAND (pmop[which], 1)) & cst1;
-		    if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
-		      {
-			if (cst0 != cst1)
-			  break;
-		      }
-		    else if (cst0 != 0)
-		      break;
-		    /* If C or D is of the form (A & N) where
-		       (N & M) == M, or of the form (A | N) or
-		       (A ^ N) where (N & M) == 0, replace it with A.  */
-		    pmop[which] = TREE_OPERAND (pmop[which], 0);
-		    break;
-		  case INTEGER_CST:
-		    /* If C or D is a N where (N & M) == 0, it can be
-		       omitted (assumed 0).  */
-		    if ((TREE_CODE (arg0) == PLUS_EXPR
-			 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
-			&& (cst1 & wi::to_wide (pmop[which])) == 0)
-		      pmop[which] = NULL;
-		    break;
-		  default:
-		    break;
-		  }
-
-	      /* Only build anything new if we optimized one or both arguments
-		 above.  */
-	      if (pmop[0] != TREE_OPERAND (arg0, 0)
-		  || (TREE_CODE (arg0) != NEGATE_EXPR
-		      && pmop[1] != TREE_OPERAND (arg0, 1)))
-		{
-		  tree utype = TREE_TYPE (arg0);
-		  if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
-		    {
-		      /* Perform the operations in a type that has defined
-			 overflow behavior.  */
-		      utype = unsigned_type_for (TREE_TYPE (arg0));
-		      if (pmop[0] != NULL)
-			pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
-		      if (pmop[1] != NULL)
-			pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
-		    }
-
-		  if (TREE_CODE (arg0) == NEGATE_EXPR)
-		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
-		  else if (TREE_CODE (arg0) == PLUS_EXPR)
-		    {
-		      if (pmop[0] != NULL && pmop[1] != NULL)
-			tem = fold_build2_loc (loc, PLUS_EXPR, utype,
-					       pmop[0], pmop[1]);
-		      else if (pmop[0] != NULL)
-			tem = pmop[0];
-		      else if (pmop[1] != NULL)
-			tem = pmop[1];
-		      else
-			return build_int_cst (type, 0);
-		    }
-		  else if (pmop[0] == NULL)
-		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
-		  else
-		    tem = fold_build2_loc (loc, MINUS_EXPR, utype,
-					   pmop[0], pmop[1]);
-		  /* TEM is now the new binary +, - or unary - replacement.  */
-		  tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
-					 fold_convert_loc (loc, utype, arg1));
-		  return fold_convert_loc (loc, type, tem);
-		}
-	    }
-	}
-
       /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
 	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
@@ -10233,8 +10381,8 @@
 
       strict_overflow_p = false;
       if (TREE_CODE (arg1) == INTEGER_CST
-	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
-					 &strict_overflow_p)))
+	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
+				    &strict_overflow_p)) != 0)
 	{
 	  if (strict_overflow_p)
 	    fold_overflow_warning (("assuming signed overflow does not occur "
@@ -10251,8 +10399,8 @@
     case TRUNC_MOD_EXPR:
       strict_overflow_p = false;
       if (TREE_CODE (arg1) == INTEGER_CST
-	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
-					 &strict_overflow_p)))
+	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
+				    &strict_overflow_p)) != 0)
 	{
 	  if (strict_overflow_p)
 	    fold_overflow_warning (("assuming signed overflow does not occur "
@@ -10514,28 +10662,6 @@
 	    }
 	}
 
-      /* If this is an NE or EQ comparison of zero against the result of a
-	 signed MOD operation whose second operand is a power of 2, make
-	 the MOD operation unsigned since it is simpler and equivalent.  */
-      if (integer_zerop (arg1)
-	  && !TYPE_UNSIGNED (TREE_TYPE (arg0))
-	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
-	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
-	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
-	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
-	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
-	{
-	  tree newtype = unsigned_type_for (TREE_TYPE (arg0));
-	  tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
-				     fold_convert_loc (loc, newtype,
-						       TREE_OPERAND (arg0, 0)),
-				     fold_convert_loc (loc, newtype,
-						       TREE_OPERAND (arg0, 1)));
-
-	  return fold_build2_loc (loc, code, type, newmod,
-			      fold_convert_loc (loc, newtype, arg1));
-	}
-
       /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
 	 C1 is a valid shift constant, and C2 is a power of two, i.e.
 	 a single bit.  */
@@ -10606,8 +10732,7 @@
 	  tree fndecl = get_callee_fndecl (arg0);
 
 	  if (fndecl
-	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
-	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
+	      && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
 	      && call_expr_nargs (arg0) == 1
 	      && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
 	    {
@@ -10905,7 +11030,7 @@
 	  && TREE_CODE (arg1) == INTEGER_CST
 	  && TREE_CODE (arg0) == ABS_EXPR
 	  && ! TREE_SIDE_EFFECTS (arg0)
-	  && (0 != (tem = negate_expr (arg1)))
+	  && (tem = negate_expr (arg1)) != 0
 	  && TREE_CODE (tem) == INTEGER_CST
 	  && !TREE_OVERFLOW (tem))
 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
@@ -11030,22 +11155,142 @@
     } /* switch (code) */
 }
 
+/* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
+   ((A & N) + B) & M -> (A + B) & M
+   Similarly if (N & M) == 0,
+   ((A | N) + B) & M -> (A + B) & M
+   and for - instead of + (or unary - instead of +)
+   and/or ^ instead of |.
+   If B is constant and (B & M) == 0, fold into A & M.
+
+   This function is a helper for match.pd patterns.  Return non-NULL
+   type in which the simplified operation should be performed only
+   if any optimization is possible.
+
+   ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
+   then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
+   Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
+   +/-.  */
+tree
+fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
+		   tree arg00, enum tree_code code00, tree arg000, tree arg001,
+		   tree arg01, enum tree_code code01, tree arg010, tree arg011,
+		   tree *pmop)
+{
+  gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
+  gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
+  wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
+  if (~cst1 == 0
+      || (cst1 & (cst1 + 1)) != 0
+      || !INTEGRAL_TYPE_P (type)
+      || (!TYPE_OVERFLOW_WRAPS (type)
+	  && TREE_CODE (type) != INTEGER_TYPE)
+      || (wi::max_value (type) & cst1) != cst1)
+    return NULL_TREE;
+
+  enum tree_code codes[2] = { code00, code01 };
+  tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
+  int which = 0;
+  wide_int cst0;
+
+  /* Now we know that arg0 is (C + D) or (C - D) or -C and
+     arg1 (M) is == (1LL << cst) - 1.
+     Store C into PMOP[0] and D into PMOP[1].  */
+  pmop[0] = arg00;
+  pmop[1] = arg01;
+  which = code != NEGATE_EXPR;
+
+  for (; which >= 0; which--)
+    switch (codes[which])
+      {
+      case BIT_AND_EXPR:
+      case BIT_IOR_EXPR:
+      case BIT_XOR_EXPR:
+	gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
+	cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
+	if (codes[which] == BIT_AND_EXPR)
+	  {
+	    if (cst0 != cst1)
+	      break;
+	  }
+	else if (cst0 != 0)
+	  break;
+	/* If C or D is of the form (A & N) where
+	   (N & M) == M, or of the form (A | N) or
+	   (A ^ N) where (N & M) == 0, replace it with A.  */
+	pmop[which] = arg0xx[2 * which];
+	break;
+      case ERROR_MARK:
+	if (TREE_CODE (pmop[which]) != INTEGER_CST)
+	  break;
+	/* If C or D is a N where (N & M) == 0, it can be
+	   omitted (replaced with 0).  */
+	if ((code == PLUS_EXPR
+	     || (code == MINUS_EXPR && which == 0))
+	    && (cst1 & wi::to_wide (pmop[which])) == 0)
+	  pmop[which] = build_int_cst (type, 0);
+	/* Similarly, with C - N where (-N & M) == 0.  */
+	if (code == MINUS_EXPR
+	    && which == 1
+	    && (cst1 & -wi::to_wide (pmop[which])) == 0)
+	  pmop[which] = build_int_cst (type, 0);
+	break;
+      default:
+	gcc_unreachable ();
+      }
+
+  /* Only build anything new if we optimized one or both arguments above.  */
+  if (pmop[0] == arg00 && pmop[1] == arg01)
+    return NULL_TREE;
+
+  if (TYPE_OVERFLOW_WRAPS (type))
+    return type;
+  else
+    return unsigned_type_for (type);
+}
+
+/* Used by contains_label_[p1].  */
+
+struct contains_label_data
+{
+  hash_set<tree> *pset;
+  bool inside_switch_p;
+};
+
 /* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
-   a LABEL_EXPR; otherwise return NULL_TREE.  Do not check the subtrees
-   of GOTO_EXPR.  */
+   a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
+   return NULL_TREE.  Do not check the subtrees of GOTO_EXPR.  */
 
 static tree
-contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
-{
+contains_label_1 (tree *tp, int *walk_subtrees, void *data)
+{
+  contains_label_data *d = (contains_label_data *) data;
   switch (TREE_CODE (*tp))
     {
     case LABEL_EXPR:
       return *tp;
 
+    case CASE_LABEL_EXPR:
+      if (!d->inside_switch_p)
+	return *tp;
+      return NULL_TREE;
+
+    case SWITCH_EXPR:
+      if (!d->inside_switch_p)
+	{
+	  if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
+	    return *tp;
+	  d->inside_switch_p = true;
+	  if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
+	    return *tp;
+	  d->inside_switch_p = false;
+	  *walk_subtrees = 0;
+	}
+      return NULL_TREE;
+
     case GOTO_EXPR:
       *walk_subtrees = 0;
-
-      /* fall through */
+      return NULL_TREE;
 
     default:
       return NULL_TREE;
@@ -11058,8 +11303,9 @@
 static bool
 contains_label_p (tree st)
 {
-  return
-   (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
+  hash_set<tree> pset;
+  contains_label_data data = { &pset, false };
+  return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
 }
 
 /* Fold a ternary expression of code CODE and type TYPE with operands
@@ -11150,15 +11396,15 @@
 	}
       else if (TREE_CODE (arg0) == VECTOR_CST)
 	{
+	  unsigned HOST_WIDE_INT nelts;
 	  if ((TREE_CODE (arg1) == VECTOR_CST
 	       || TREE_CODE (arg1) == CONSTRUCTOR)
 	      && (TREE_CODE (arg2) == VECTOR_CST
-		  || TREE_CODE (arg2) == CONSTRUCTOR))
-	    {
-	      unsigned int nelts = VECTOR_CST_NELTS (arg0), i;
-	      gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
-	      auto_vec_perm_indices sel (nelts);
-	      for (i = 0; i < nelts; i++)
+		  || TREE_CODE (arg2) == CONSTRUCTOR)
+	      && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
+	    {
+	      vec_perm_builder sel (nelts, nelts, 1);
+	      for (unsigned int i = 0; i < nelts; i++)
 		{
 		  tree val = VECTOR_CST_ELT (arg0, i);
 		  if (integer_all_onesp (val))
@@ -11168,7 +11414,8 @@
 		  else /* Currently unreachable.  */
 		    return NULL_TREE;
 		}
-	      tree t = fold_vec_perm (type, arg1, arg2, sel);
+	      vec_perm_indices indices (sel, 2, nelts);
+	      tree t = fold_vec_perm (type, arg1, arg2, indices);
 	      if (t != NULL_TREE)
 		return t;
 	    }
@@ -11339,10 +11586,16 @@
 	  && integer_pow2p (arg1)
 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
-			      arg1, OEP_ONLY_CONST))
+			      arg1, OEP_ONLY_CONST)
+	  /* operand_equal_p compares just value, not precision, so e.g.
+	     arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
+	     second operand 32-bit -128, which is not a power of two (or vice
+	     versa.  */
+	  && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
 	return pedantic_non_lvalue_loc (loc,
-				    fold_convert_loc (loc, type,
-						      TREE_OPERAND (arg0, 0)));
+					fold_convert_loc (loc, type,
+							  TREE_OPERAND (arg0,
+									0)));
 
       /* Disable the transformations below for vectors, since
 	 fold_binary_op_with_conditional_arg may undo them immediately,
@@ -11411,8 +11664,10 @@
     case BIT_FIELD_REF:
       if (TREE_CODE (arg0) == VECTOR_CST
 	  && (type == TREE_TYPE (TREE_TYPE (arg0))
-	      || (TREE_CODE (type) == VECTOR_TYPE
-		  && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
+	      || (VECTOR_TYPE_P (type)
+		  && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
+	  && tree_fits_uhwi_p (op1)
+	  && tree_fits_uhwi_p (op2))
 	{
 	  tree eltype = TREE_TYPE (TREE_TYPE (arg0));
 	  unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
@@ -11422,7 +11677,8 @@
 	  if (n != 0
 	      && (idx % width) == 0
 	      && (n % width) == 0
-	      && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
+	      && known_le ((idx + n) / width,
+			   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
 	    {
 	      idx = idx / width;
 	      n = n / width;
@@ -11430,12 +11686,17 @@
 	      if (TREE_CODE (arg0) == VECTOR_CST)
 		{
 		  if (n == 1)
-		    return VECTOR_CST_ELT (arg0, idx);
-
-		  auto_vec<tree, 32> vals (n);
+		    {
+		      tem = VECTOR_CST_ELT (arg0, idx);
+		      if (VECTOR_TYPE_P (type))
+			tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
+		      return tem;
+		    }
+
+		  tree_vector_builder vals (type, n, 1);
 		  for (unsigned i = 0; i < n; ++i)
 		    vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
-		  return build_vector (type, vals);
+		  return vals.build ();
 		}
 	    }
 	}
@@ -11444,7 +11705,9 @@
          fold (nearly) all BIT_FIELD_REFs.  */
       if (CONSTANT_CLASS_P (arg0)
 	  && can_native_interpret_type_p (type)
-	  && BITS_PER_UNIT == 8)
+	  && BITS_PER_UNIT == 8
+	  && tree_fits_uhwi_p (op1)
+	  && tree_fits_uhwi_p (op2))
 	{
 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
 	  unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
@@ -11472,76 +11735,35 @@
 
       return NULL_TREE;
 
-    case FMA_EXPR:
-      /* For integers we can decompose the FMA if possible.  */
-      if (TREE_CODE (arg0) == INTEGER_CST
-	  && TREE_CODE (arg1) == INTEGER_CST)
-	return fold_build2_loc (loc, PLUS_EXPR, type,
-				const_binop (MULT_EXPR, arg0, arg1), arg2);
-      if (integer_zerop (arg2))
-	return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
-
-      return fold_fma (loc, type, arg0, arg1, arg2);
-
     case VEC_PERM_EXPR:
       if (TREE_CODE (arg2) == VECTOR_CST)
 	{
-	  unsigned int nelts = VECTOR_CST_NELTS (arg2), i, mask, mask2;
-	  bool need_mask_canon = false;
-	  bool need_mask_canon2 = false;
-	  bool all_in_vec0 = true;
-	  bool all_in_vec1 = true;
-	  bool maybe_identity = true;
+	  /* Build a vector of integers from the tree mask.  */
+	  vec_perm_builder builder;
+	  if (!tree_to_vec_perm_builder (&builder, arg2))
+	    return NULL_TREE;
+
+	  /* Create a vec_perm_indices for the integer vector.  */
+	  poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
 	  bool single_arg = (op0 == op1);
-	  bool changed = false;
-
-	  mask2 = 2 * nelts - 1;
-	  mask = single_arg ? (nelts - 1) : mask2;
-	  gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
-	  auto_vec_perm_indices sel (nelts);
-	  auto_vec_perm_indices sel2 (nelts);
-	  for (i = 0; i < nelts; i++)
-	    {
-	      tree val = VECTOR_CST_ELT (arg2, i);
-	      if (TREE_CODE (val) != INTEGER_CST)
-		return NULL_TREE;
-
-	      /* Make sure that the perm value is in an acceptable
-		 range.  */
-	      wi::tree_to_wide_ref t = wi::to_wide (val);
-	      need_mask_canon |= wi::gtu_p (t, mask);
-	      need_mask_canon2 |= wi::gtu_p (t, mask2);
-	      unsigned int elt = t.to_uhwi () & mask;
-	      unsigned int elt2 = t.to_uhwi () & mask2;
-
-	      if (elt < nelts)
-		all_in_vec1 = false;
-	      else
-		all_in_vec0 = false;
-
-	      if ((elt & (nelts - 1)) != i)
-		maybe_identity = false;
-
-	      sel.quick_push (elt);
-	      sel2.quick_push (elt2);
-	    }
-
-	  if (maybe_identity)
-	    {
-	      if (all_in_vec0)
-		return op0;
-	      if (all_in_vec1)
-		return op1;
-	    }
-
-	  if (all_in_vec0)
-	    op1 = op0;
-	  else if (all_in_vec1)
-	    {
-	      op0 = op1;
-	      for (i = 0; i < nelts; i++)
-		sel[i] -= nelts;
-	      need_mask_canon = true;
+	  vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
+
+	  /* Check for cases that fold to OP0 or OP1 in their original
+	     element order.  */
+	  if (sel.series_p (0, 1, 0, 1))
+	    return op0;
+	  if (sel.series_p (0, 1, nelts, 1))
+	    return op1;
+
+	  if (!single_arg)
+	    {
+	      if (sel.all_from_input_p (0))
+		op1 = op0;
+	      else if (sel.all_from_input_p (1))
+		{
+		  op0 = op1;
+		  sel.rotate_inputs (1);
+		}
 	    }
 
 	  if ((TREE_CODE (op0) == VECTOR_CST
@@ -11554,27 +11776,27 @@
 		return t;
 	    }
 
-	  if (op0 == op1 && !single_arg)
-	    changed = true;
-
-	  /* Some targets are deficient and fail to expand a single
-	     argument permutation while still allowing an equivalent
-	     2-argument version.  */
-	  if (need_mask_canon && arg2 == op2
-	      && !can_vec_perm_p (TYPE_MODE (type), false, &sel)
-	      && can_vec_perm_p (TYPE_MODE (type), false, &sel2))
-	    {
-	      need_mask_canon = need_mask_canon2;
-	      sel = sel2;
-	    }
-
-	  if (need_mask_canon && arg2 == op2)
-	    {
-	      tree eltype = TREE_TYPE (TREE_TYPE (arg2));
-	      auto_vec<tree, 32> tsel (nelts);
-	      for (i = 0; i < nelts; i++)
-		tsel.quick_push (build_int_cst (eltype, sel[i]));
-	      op2 = build_vector (TREE_TYPE (arg2), tsel);
+	  bool changed = (op0 == op1 && !single_arg);
+
+	  /* Generate a canonical form of the selector.  */
+	  if (arg2 == op2 && sel.encoding () != builder)
+	    {
+	      /* Some targets are deficient and fail to expand a single
+		 argument permutation while still allowing an equivalent
+		 2-argument version.  */
+	      if (sel.ninputs () == 2
+		  || can_vec_perm_const_p (TYPE_MODE (type), sel, false))
+		op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
+	      else
+		{
+		  vec_perm_indices sel2 (builder, 2, nelts);
+		  if (can_vec_perm_const_p (TYPE_MODE (type), sel2, false))
+		    op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel2);
+		  else
+		    /* Not directly supported with either encoding,
+		       so use the preferred form.  */
+		    op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
+		}
 	      changed = true;
 	    }
 
@@ -11609,17 +11831,16 @@
 	  if (bitpos % elsize == 0)
 	    {
 	      unsigned k = bitpos / elsize;
+	      unsigned HOST_WIDE_INT nelts;
 	      if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
 		return arg0;
-	      else
+	      else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
 		{
-		  unsigned int nelts = VECTOR_CST_NELTS (arg0);
-		  auto_vec<tree, 32> elts (nelts);
+		  tree_vector_builder elts (type, nelts, 1);
 		  elts.quick_grow (nelts);
-		  memcpy (&elts[0], VECTOR_CST_ELTS (arg0),
-			  sizeof (tree) * nelts);
-		  elts[k] = arg1;
-		  return build_vector (type, elts);
+		  for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
+		    elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
+		  return elts.build ();
 		}
 	    }
 	}
@@ -11936,8 +12157,9 @@
 	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
 	  break;
 	case VECTOR_CST:
-	  for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
-	    fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
+	  len = vector_cst_encoded_nelts (expr);
+	  for (i = 0; i < len; ++i)
+	    fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
 	  break;
 	default:
 	  break;
@@ -12381,9 +12603,34 @@
 	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
       if (!integer_pow2p (bottom))
 	return 0;
-      /* FALLTHRU */
+      return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
+	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
 
     case MULT_EXPR:
+      if (TREE_CODE (bottom) == INTEGER_CST)
+	{
+	  op1 = TREE_OPERAND (top, 0);
+	  op2 = TREE_OPERAND (top, 1);
+	  if (TREE_CODE (op1) == INTEGER_CST)
+	    std::swap (op1, op2);
+	  if (TREE_CODE (op2) == INTEGER_CST)
+	    {
+	      if (multiple_of_p (type, op2, bottom))
+		return 1;
+	      /* Handle multiple_of_p ((x * 2 + 2) * 4, 8).  */
+	      if (multiple_of_p (type, bottom, op2))
+		{
+		  widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
+						 wi::to_widest (op2));
+		  if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
+		    {
+		      op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
+		      return multiple_of_p (type, op1, op2);
+		    }
+		}
+	      return multiple_of_p (type, op1, bottom);
+	    }
+	}
       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
 
@@ -12414,10 +12661,9 @@
 	     so check for it explicitly here.  */
 	  if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
 			 wi::to_wide (op1))
-	      && 0 != (t1 = fold_convert (type,
-					  const_binop (LSHIFT_EXPR,
-						       size_one_node,
-						       op1)))
+	      && (t1 = fold_convert (type,
+				     const_binop (LSHIFT_EXPR, size_one_node,
+						  op1))) != 0
 	      && !TREE_OVERFLOW (t1))
 	    return multiple_of_p (type, t1, bottom);
 	}
@@ -12488,6 +12734,10 @@
       /* fall through */
 
     default:
+      if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
+	return multiple_p (wi::to_poly_widest (top),
+			   wi::to_poly_widest (bottom));
+
       return 0;
     }
 }
@@ -12773,6 +13023,7 @@
       return true;
 
     CASE_CFN_SQRT:
+    CASE_CFN_SQRT_FN:
       /* sqrt(-0.0) is -0.0.  */
       if (!HONOR_SIGNED_ZEROS (element_mode (type)))
 	return true;
@@ -12783,9 +13034,11 @@
     CASE_CFN_ATANH:
     CASE_CFN_CBRT:
     CASE_CFN_CEIL:
+    CASE_CFN_CEIL_FN:
     CASE_CFN_ERF:
     CASE_CFN_EXPM1:
     CASE_CFN_FLOOR:
+    CASE_CFN_FLOOR_FN:
     CASE_CFN_FMOD:
     CASE_CFN_FREXP:
     CASE_CFN_ICEIL:
@@ -12803,8 +13056,11 @@
     CASE_CFN_LROUND:
     CASE_CFN_MODF:
     CASE_CFN_NEARBYINT:
+    CASE_CFN_NEARBYINT_FN:
     CASE_CFN_RINT:
+    CASE_CFN_RINT_FN:
     CASE_CFN_ROUND:
+    CASE_CFN_ROUND_FN:
     CASE_CFN_SCALB:
     CASE_CFN_SCALBLN:
     CASE_CFN_SCALBN:
@@ -12813,18 +13069,22 @@
     CASE_CFN_SINH:
     CASE_CFN_TANH:
     CASE_CFN_TRUNC:
+    CASE_CFN_TRUNC_FN:
       /* True if the 1st argument is nonnegative.  */
       return RECURSE (arg0);
 
     CASE_CFN_FMAX:
+    CASE_CFN_FMAX_FN:
       /* True if the 1st OR 2nd arguments are nonnegative.  */
       return RECURSE (arg0) || RECURSE (arg1);
 
     CASE_CFN_FMIN:
+    CASE_CFN_FMIN_FN:
       /* True if the 1st AND 2nd arguments are nonnegative.  */
       return RECURSE (arg0) && RECURSE (arg1);
 
     CASE_CFN_COPYSIGN:
+    CASE_CFN_COPYSIGN_FN:
       /* True if the 2nd argument is nonnegative.  */
       return RECURSE (arg1);
 
@@ -13315,15 +13575,23 @@
   switch (fn)
     {
     CASE_CFN_CEIL:
+    CASE_CFN_CEIL_FN:
     CASE_CFN_FLOOR:
+    CASE_CFN_FLOOR_FN:
     CASE_CFN_NEARBYINT:
+    CASE_CFN_NEARBYINT_FN:
     CASE_CFN_RINT:
+    CASE_CFN_RINT_FN:
     CASE_CFN_ROUND:
+    CASE_CFN_ROUND_FN:
     CASE_CFN_TRUNC:
+    CASE_CFN_TRUNC_FN:
       return true;
 
     CASE_CFN_FMIN:
+    CASE_CFN_FMIN_FN:
     CASE_CFN_FMAX:
+    CASE_CFN_FMAX_FN:
       return RECURSE (arg0) && RECURSE (arg1);
 
     default:
@@ -13496,7 +13764,7 @@
       location_t loc = EXPR_LOCATION (exp);
 
       if (TREE_CODE (exp) == INDIRECT_REF)
-	string = string_constant (exp1, &index);
+	string = string_constant (exp1, &index, NULL, NULL);
       else
 	{
 	  tree low_bound = array_ref_low_bound (exp);
@@ -13544,16 +13812,6 @@
 
   switch (TREE_CODE (arg0))
     {
-    case INTEGER_CST:
-      {
-	bool overflow;
-	wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
-	t = force_fit_type (type, val, 1,
-			    (overflow && ! TYPE_UNSIGNED (type))
-			    || TREE_OVERFLOW (arg0));
-	break;
-      }
-
     case REAL_CST:
       t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
       break;
@@ -13572,6 +13830,16 @@
       }
 
     default:
+      if (poly_int_tree_p (arg0))
+	{
+	  wi::overflow_type overflow;
+	  poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
+	  t = force_fit_type (type, res, 1,
+			      (overflow && ! TYPE_UNSIGNED (type))
+			      || TREE_OVERFLOW (arg0));
+	  break;
+	}
+
       gcc_unreachable ();
     }
 
@@ -13594,20 +13862,21 @@
       {
         /* If the value is unsigned or non-negative, then the absolute value
 	   is the same as the ordinary value.  */
-	if (!wi::neg_p (wi::to_wide (arg0), TYPE_SIGN (type)))
-	  t = arg0;
+	wide_int val = wi::to_wide (arg0);
+	wi::overflow_type overflow = wi::OVF_NONE;
+	if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
+	  ;
 
 	/* If the value is negative, then the absolute value is
 	   its negation.  */
 	else
-	  {
-	    bool overflow;
-	    wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
-	    t = force_fit_type (type, val, -1,
-				overflow | TREE_OVERFLOW (arg0));
-	  }
+	  val = wi::neg (val, &overflow);
+
+	/* Force to the destination type, set TREE_OVERFLOW for signed
+	   TYPE only.  */
+	t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
       }
-      break;
+    break;
 
     case REAL_CST:
       if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
@@ -13722,8 +13991,12 @@
 	{
 	  /* Have vector comparison with scalar boolean result.  */
 	  gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
-		      && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
-	  for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
+		      && known_eq (VECTOR_CST_NELTS (op0),
+				   VECTOR_CST_NELTS (op1)));
+	  unsigned HOST_WIDE_INT nunits;
+	  if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
+	    return NULL_TREE;
+	  for (unsigned i = 0; i < nunits; i++)
 	    {
 	      tree elem0 = VECTOR_CST_ELT (op0, i);
 	      tree elem1 = VECTOR_CST_ELT (op1, i);
@@ -13735,11 +14008,10 @@
 	    }
 	  return constant_boolean_node (true, type);
 	}
-      unsigned count = VECTOR_CST_NELTS (op0);
-      gcc_assert (VECTOR_CST_NELTS (op1) == count
-		  && TYPE_VECTOR_SUBPARTS (type) == count);
-
-      auto_vec<tree, 32> elts (count);
+      tree_vector_builder elts;
+      if (!elts.new_binary_operation (type, op0, op1, false))
+	return NULL_TREE;
+      unsigned int count = elts.encoded_nelts ();
       for (unsigned i = 0; i < count; i++)
 	{
 	  tree elem_type = TREE_TYPE (type);
@@ -13756,7 +14028,7 @@
 					  integer_zerop (tem) ? 0 : -1));
 	}
 
-      return build_vector (type, elts);
+      return elts.build ();
     }
 
   /* From here on we only handle LT, LE, GT, GE, EQ and NE.
@@ -13840,6 +14112,7 @@
 {
   tree sub = op0;
   tree subtype;
+  poly_uint64 const_op01;
 
   STRIP_NOPS (sub);
   subtype = TREE_TYPE (sub);
@@ -13851,6 +14124,7 @@
     {
       tree op = TREE_OPERAND (sub, 0);
       tree optype = TREE_TYPE (op);
+
       /* *&CONST_DECL -> to the value of the const decl.  */
       if (TREE_CODE (op) == CONST_DECL)
 	return DECL_INITIAL (op);
@@ -13884,17 +14158,18 @@
 	       && type == TREE_TYPE (optype))
 	return fold_build1_loc (loc, REALPART_EXPR, type, op);
       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
-      else if (TREE_CODE (optype) == VECTOR_TYPE
+      else if (VECTOR_TYPE_P (optype)
 	       && type == TREE_TYPE (optype))
 	{
 	  tree part_width = TYPE_SIZE (type);
 	  tree index = bitsize_int (0);
-	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
+	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
+				  index);
 	}
     }
 
   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
-      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
+      && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
     {
       tree op00 = TREE_OPERAND (sub, 0);
       tree op01 = TREE_OPERAND (sub, 1);
@@ -13907,19 +14182,25 @@
 	  op00type = TREE_TYPE (op00);
 
 	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
-	  if (TREE_CODE (op00type) == VECTOR_TYPE
-	      && type == TREE_TYPE (op00type))
+	  if (VECTOR_TYPE_P (op00type)
+	      && type == TREE_TYPE (op00type)
+	      /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
+		 but we want to treat offsets with MSB set as negative.
+		 For the code below negative offsets are invalid and
+		 TYPE_SIZE of the element is something unsigned, so
+		 check whether op01 fits into poly_int64, which implies
+		 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
+		 then just use poly_uint64 because we want to treat the
+		 value as unsigned.  */
+	      && tree_fits_poly_int64_p (op01))
 	    {
 	      tree part_width = TYPE_SIZE (type);
-	      unsigned HOST_WIDE_INT max_offset
+	      poly_uint64 max_offset
 		= (tree_to_uhwi (part_width) / BITS_PER_UNIT
 		   * TYPE_VECTOR_SUBPARTS (op00type));
-	      if (tree_int_cst_sign_bit (op01) == 0
-		  && compare_tree_int (op01, max_offset) == -1)
+	      if (known_lt (const_op01, max_offset))
 		{
-		  unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
-		  unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
-		  tree index = bitsize_int (indexi);
+		  tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
 		  return fold_build3_loc (loc,
 					  BIT_FIELD_REF, type, op00,
 					  part_width, index);
@@ -13929,8 +14210,8 @@
 	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
 		   && type == TREE_TYPE (op00type))
 	    {
-	      tree size = TYPE_SIZE_UNIT (type);
-	      if (tree_int_cst_equal (size, op01))
+	      if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
+			    const_op01))
 		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
 	    }
 	  /* ((foo *)&fooarray)[1] => fooarray[1] */
@@ -13938,16 +14219,15 @@
 		   && type == TREE_TYPE (op00type))
 	    {
 	      tree type_domain = TYPE_DOMAIN (op00type);
-	      tree min = size_zero_node;
+	      tree min_val = size_zero_node;
 	      if (type_domain && TYPE_MIN_VALUE (type_domain))
-		min = TYPE_MIN_VALUE (type_domain);
-	      offset_int off = wi::to_offset (op01);
-	      offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type));
-	      offset_int remainder;
-	      off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder);
-	      if (remainder == 0 && TREE_CODE (min) == INTEGER_CST)
+		min_val = TYPE_MIN_VALUE (type_domain);
+	      poly_uint64 type_size, index;
+	      if (poly_int_tree_p (min_val)
+		  && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
+		  && multiple_p (const_op01, type_size, &index))
 		{
-		  off = off + wi::to_offset (min);
+		  poly_offset_int off = index + wi::to_poly_offset (min_val);
 		  op01 = wide_int_to_tree (sizetype, off);
 		  return build4_loc (loc, ARRAY_REF, type, op00, op01,
 				     NULL_TREE, NULL_TREE);
@@ -14169,12 +14449,12 @@
 
 static tree
 split_address_to_core_and_offset (tree exp,
-				  HOST_WIDE_INT *pbitpos, tree *poffset)
+				  poly_int64_pod *pbitpos, tree *poffset)
 {
   tree core;
   machine_mode mode;
   int unsignedp, reversep, volatilep;
-  HOST_WIDE_INT bitsize;
+  poly_int64 bitsize;
   location_t loc = EXPR_LOCATION (exp);
 
   if (TREE_CODE (exp) == ADDR_EXPR)
@@ -14190,16 +14470,14 @@
       STRIP_NOPS (core);
       *pbitpos = 0;
       *poffset = TREE_OPERAND (exp, 1);
-      if (TREE_CODE (*poffset) == INTEGER_CST)
-	{
-	  offset_int tem = wi::sext (wi::to_offset (*poffset),
-				     TYPE_PRECISION (TREE_TYPE (*poffset)));
+      if (poly_int_tree_p (*poffset))
+	{
+	  poly_offset_int tem
+	    = wi::sext (wi::to_poly_offset (*poffset),
+			TYPE_PRECISION (TREE_TYPE (*poffset)));
 	  tem <<= LOG2_BITS_PER_UNIT;
-	  if (wi::fits_shwi_p (tem))
-	    {
-	      *pbitpos = tem.to_shwi ();
-	      *poffset = NULL_TREE;
-	    }
+	  if (tem.to_shwi (pbitpos))
+	    *poffset = NULL_TREE;
 	}
     }
   else
@@ -14216,17 +14494,18 @@
    otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
 
 bool
-ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
+ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
 {
   tree core1, core2;
-  HOST_WIDE_INT bitpos1, bitpos2;
+  poly_int64 bitpos1, bitpos2;
   tree toffset1, toffset2, tdiff, type;
 
   core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
   core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
 
-  if (bitpos1 % BITS_PER_UNIT != 0
-      || bitpos2 % BITS_PER_UNIT != 0
+  poly_int64 bytepos1, bytepos2;
+  if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
+      || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
       || !operand_equal_p (core1, core2, 0))
     return false;
 
@@ -14251,7 +14530,7 @@
   else
     *diff = 0;
 
-  *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
+  *diff += bytepos1 - bytepos2;
   return true;
 }
 
@@ -14279,21 +14558,23 @@
 			  ptr, size_int (off));
 }
 
-/* Return a char pointer for a C string if it is a string constant
-   or sum of string constant and integer constant.  We only support
-   string constants properly terminated with '\0' character.
-   If STRLEN is a valid pointer, length (including terminating character)
-   of returned string is stored to the argument.  */
+/* Return a pointer P to a NUL-terminated string representing the sequence
+   of constant characters referred to by SRC (or a subsequence of such
+   characters within it if SRC is a reference to a string plus some
+   constant offset).  If STRLEN is non-null, store the number of bytes
+   in the string constant including the terminating NUL char.  *STRLEN is
+   typically strlen(P) + 1 in the absence of embedded NUL characters.  */
 
 const char *
-c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
+c_getstr (tree src, unsigned HOST_WIDE_INT *strlen /* = NULL */)
 {
   tree offset_node;
+  tree mem_size;
 
   if (strlen)
     *strlen = 0;
 
-  src = string_constant (src, &offset_node);
+  src = string_constant (src, &offset_node, &mem_size, NULL);
   if (src == 0)
     return NULL;
 
@@ -14306,18 +14587,117 @@
 	offset = tree_to_uhwi (offset_node);
     }
 
+  if (!tree_fits_uhwi_p (mem_size))
+    return NULL;
+
+  /* STRING_LENGTH is the size of the string literal, including any
+     embedded NULs.  STRING_SIZE is the size of the array the string
+     literal is stored in.  */
   unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
+  unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size);
+
+  /* Ideally this would turn into a gcc_checking_assert over time.  */
+  if (string_length > string_size)
+    string_length = string_size;
+
   const char *string = TREE_STRING_POINTER (src);
 
-  /* Support only properly null-terminated strings.  */
+  /* Ideally this would turn into a gcc_checking_assert over time.  */
+  if (string_length > string_size)
+    string_length = string_size;
+
   if (string_length == 0
-      || string[string_length - 1] != '\0'
-      || offset >= string_length)
+      || offset >= string_size)
     return NULL;
 
   if (strlen)
-    *strlen = string_length - offset;
-  return string + offset;
+    {
+      /* Compute and store the length of the substring at OFFSET.
+	 All offsets past the initial length refer to null strings.  */
+      if (offset < string_length)
+	*strlen = string_length - offset;
+      else
+	*strlen = 1;
+    }
+  else
+    {
+      tree eltype = TREE_TYPE (TREE_TYPE (src));
+      /* Support only properly NUL-terminated single byte strings.  */
+      if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
+	return NULL;
+      if (string[string_length - 1] != '\0')
+	return NULL;
+    }
+
+  return offset < string_length ? string + offset : "";
+}
+
+/* Given a tree T, compute which bits in T may be nonzero.  */
+
+wide_int
+tree_nonzero_bits (const_tree t)
+{
+  switch (TREE_CODE (t))
+    {
+    case INTEGER_CST:
+      return wi::to_wide (t);
+    case SSA_NAME:
+      return get_nonzero_bits (t);
+    case NON_LVALUE_EXPR:
+    case SAVE_EXPR:
+      return tree_nonzero_bits (TREE_OPERAND (t, 0));
+    case BIT_AND_EXPR:
+      return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
+			  tree_nonzero_bits (TREE_OPERAND (t, 1)));
+    case BIT_IOR_EXPR:
+    case BIT_XOR_EXPR:
+      return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
+			 tree_nonzero_bits (TREE_OPERAND (t, 1)));
+    case COND_EXPR:
+      return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
+			 tree_nonzero_bits (TREE_OPERAND (t, 2)));
+    CASE_CONVERT:
+      return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
+			     TYPE_PRECISION (TREE_TYPE (t)),
+			     TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
+    case PLUS_EXPR:
+      if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
+	{
+	  wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
+	  wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
+	  if (wi::bit_and (nzbits1, nzbits2) == 0)
+	    return wi::bit_or (nzbits1, nzbits2);
+	}
+      break;
+    case LSHIFT_EXPR:
+      if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
+	{
+	  tree type = TREE_TYPE (t);
+	  wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
+	  wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
+				       TYPE_PRECISION (type));
+	  return wi::neg_p (arg1)
+		 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
+		 : wi::lshift (nzbits, arg1);
+	}
+      break;
+    case RSHIFT_EXPR:
+      if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
+        {
+	  tree type = TREE_TYPE (t);
+	  wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
+	  wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
+				       TYPE_PRECISION (type));
+	  return wi::neg_p (arg1)
+		 ? wi::lshift (nzbits, -arg1)
+		 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
+        }
+      break;
+    default:
+      break;
+    }
+
+  return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
 }
 
 #if CHECKING_P
@@ -14406,6 +14786,22 @@
   ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
 }
 
+/* Verify folding of VEC_DUPLICATE_EXPRs.  */
+
+static void
+test_vec_duplicate_folding ()
+{
+  scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
+  machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
+  /* This will be 1 if VEC_MODE isn't a vector mode.  */
+  poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
+
+  tree type = build_vector_type (ssizetype, nunits);
+  tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
+  tree dup5_cst = build_vector_from_val (type, ssize_int (5));
+  ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
+}
+
 /* Run all of the selftests within this file.  */
 
 void
@@ -14413,6 +14809,7 @@
 {
   test_arithmetic_folding ();
   test_vector_folding ();
+  test_vec_duplicate_folding ();
 }
 
 } // namespace selftest