diff gcc/simplify-rtx.c @ 67:f6334be47118

update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
author nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
date Tue, 22 Mar 2011 17:18:12 +0900
parents b7f97abdc517
children 04ced10e8804
line wrap: on
line diff
--- a/gcc/simplify-rtx.c	Tue May 25 18:58:51 2010 +0900
+++ b/gcc/simplify-rtx.c	Tue Mar 22 17:18:12 2011 +0900
@@ -34,7 +34,7 @@
 #include "recog.h"
 #include "function.h"
 #include "expr.h"
-#include "toplev.h"
+#include "diagnostic-core.h"
 #include "output.h"
 #include "ggc.h"
 #include "target.h"
@@ -208,10 +208,11 @@
 rtx
 delegitimize_mem_from_attrs (rtx x)
 {
+  /* MEMs without MEM_OFFSETs may have been offset, so we can't just
+     use their base addresses as equivalent.  */
   if (MEM_P (x)
       && MEM_EXPR (x)
-      && (!MEM_OFFSET (x)
-	  || GET_CODE (MEM_OFFSET (x)) == CONST_INT))
+      && MEM_OFFSET (x))
     {
       tree decl = MEM_EXPR (x);
       enum machine_mode mode = GET_MODE (x);
@@ -264,8 +265,7 @@
 	{
 	  rtx newx;
 
-	  if (MEM_OFFSET (x))
-	    offset += INTVAL (MEM_OFFSET (x));
+	  offset += INTVAL (MEM_OFFSET (x));
 
 	  newx = DECL_RTL (decl);
 
@@ -812,7 +812,7 @@
          than HOST_BITS_PER_WIDE_INT.  */
       if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
 	  && COMPARISON_P (op)
-	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
+	  && (STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
 	return rtl_hooks.gen_lowpart_no_emit (mode, op);
       break;
 
@@ -911,7 +911,7 @@
 	  || ((GET_MODE_BITSIZE (GET_MODE (op))
 	       <= HOST_BITS_PER_WIDE_INT)
 	      && ((nonzero_bits (op, GET_MODE (op))
-		   & ((HOST_WIDE_INT) 1
+		   & ((unsigned HOST_WIDE_INT) 1
 		      << (GET_MODE_BITSIZE (GET_MODE (op)) - 1)))
 		  == 0)))
 	return op;
@@ -1009,6 +1009,42 @@
 	  && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (XEXP (op, 0))))
 	return rtl_hooks.gen_lowpart_no_emit (mode, op);
 
+      /* (sign_extend:M (sign_extend:N <X>)) is (sign_extend:M <X>).
+	 (sign_extend:M (zero_extend:N <X>)) is (zero_extend:M <X>).  */
+      if (GET_CODE (op) == SIGN_EXTEND || GET_CODE (op) == ZERO_EXTEND)
+	{
+	  gcc_assert (GET_MODE_BITSIZE (mode)
+		      > GET_MODE_BITSIZE (GET_MODE (op)));
+	  return simplify_gen_unary (GET_CODE (op), mode, XEXP (op, 0),
+				     GET_MODE (XEXP (op, 0)));
+	}
+
+      /* (sign_extend:M (ashiftrt:N (ashift <X> (const_int I)) (const_int I)))
+	 is (sign_extend:M (subreg:O <X>)) if there is mode with
+	 GET_MODE_BITSIZE (N) - I bits.
+	 (sign_extend:M (lshiftrt:N (ashift <X> (const_int I)) (const_int I)))
+	 is similarly (zero_extend:M (subreg:O <X>)).  */
+      if ((GET_CODE (op) == ASHIFTRT || GET_CODE (op) == LSHIFTRT)
+	  && GET_CODE (XEXP (op, 0)) == ASHIFT
+	  && CONST_INT_P (XEXP (op, 1))
+	  && XEXP (XEXP (op, 0), 1) == XEXP (op, 1)
+	  && GET_MODE_BITSIZE (GET_MODE (op)) > INTVAL (XEXP (op, 1)))
+	{
+	  enum machine_mode tmode
+	    = mode_for_size (GET_MODE_BITSIZE (GET_MODE (op))
+			     - INTVAL (XEXP (op, 1)), MODE_INT, 1);
+	  gcc_assert (GET_MODE_BITSIZE (mode)
+		      > GET_MODE_BITSIZE (GET_MODE (op)));
+	  if (tmode != BLKmode)
+	    {
+	      rtx inner =
+		rtl_hooks.gen_lowpart_no_emit (tmode, XEXP (XEXP (op, 0), 0));
+	      return simplify_gen_unary (GET_CODE (op) == ASHIFTRT
+					 ? SIGN_EXTEND : ZERO_EXTEND,
+					 mode, inner, tmode);
+	    }
+	}
+
 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
       /* As we do not know which address space the pointer is refering to,
 	 we can do this only if the target does not support different pointer
@@ -1035,6 +1071,31 @@
 	  && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (XEXP (op, 0))))
 	return rtl_hooks.gen_lowpart_no_emit (mode, op);
 
+      /* (zero_extend:M (zero_extend:N <X>)) is (zero_extend:M <X>).  */
+      if (GET_CODE (op) == ZERO_EXTEND)
+	return simplify_gen_unary (ZERO_EXTEND, mode, XEXP (op, 0),
+				   GET_MODE (XEXP (op, 0)));
+
+      /* (zero_extend:M (lshiftrt:N (ashift <X> (const_int I)) (const_int I)))
+	 is (zero_extend:M (subreg:O <X>)) if there is mode with
+	 GET_MODE_BITSIZE (N) - I bits.  */
+      if (GET_CODE (op) == LSHIFTRT
+	  && GET_CODE (XEXP (op, 0)) == ASHIFT
+	  && CONST_INT_P (XEXP (op, 1))
+	  && XEXP (XEXP (op, 0), 1) == XEXP (op, 1)
+	  && GET_MODE_BITSIZE (GET_MODE (op)) > INTVAL (XEXP (op, 1)))
+	{
+	  enum machine_mode tmode
+	    = mode_for_size (GET_MODE_BITSIZE (GET_MODE (op))
+			     - INTVAL (XEXP (op, 1)), MODE_INT, 1);
+	  if (tmode != BLKmode)
+	    {
+	      rtx inner =
+		rtl_hooks.gen_lowpart_no_emit (tmode, XEXP (XEXP (op, 0), 0));
+	      return simplify_gen_unary (ZERO_EXTEND, mode, inner, tmode);
+	    }
+	}
+
 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
       /* As we do not know which address space the pointer is refering to,
 	 we can do this only if the target does not support different pointer
@@ -1196,10 +1257,8 @@
 	  break;
 
 	case FFS:
-	  /* Don't use ffs here.  Instead, get low order bit and then its
-	     number.  If arg0 is zero, this will return 0, as desired.  */
 	  arg0 &= GET_MODE_MASK (mode);
-	  val = exact_log2 (arg0 & (- arg0)) + 1;
+	  val = ffs_hwi (arg0);
 	  break;
 
 	case CLZ:
@@ -1220,7 +1279,7 @@
 		val = GET_MODE_BITSIZE (mode);
 	    }
 	  else
-	    val = exact_log2 (arg0 & -arg0);
+	    val = ctz_hwi (arg0);
 	  break;
 
 	case POPCOUNT:
@@ -1270,7 +1329,8 @@
 	      val = arg0;
 	    }
 	  else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
-	    val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
+	    val = arg0 & ~((unsigned HOST_WIDE_INT) (-1)
+			   << GET_MODE_BITSIZE (op_mode));
 	  else
 	    return 0;
 	  break;
@@ -1289,10 +1349,12 @@
 	  else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
 	    {
 	      val
-		= arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
-	      if (val
-		  & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
-		val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
+		= arg0 & ~((unsigned HOST_WIDE_INT) (-1)
+			   << GET_MODE_BITSIZE (op_mode));
+	      if (val & ((unsigned HOST_WIDE_INT) 1
+			 << (GET_MODE_BITSIZE (op_mode) - 1)))
+		val
+		  -= (unsigned HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
 	    }
 	  else
 	    return 0;
@@ -1350,15 +1412,12 @@
 
 	case FFS:
 	  hv = 0;
-	  if (l1 == 0)
-	    {
-	      if (h1 == 0)
-		lv = 0;
-	      else
-		lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & -h1) + 1;
-	    }
+	  if (l1 != 0)
+	    lv = ffs_hwi (l1);
+	  else if (h1 != 0)
+	    lv = HOST_BITS_PER_WIDE_INT + ffs_hwi (h1);
 	  else
-	    lv = exact_log2 (l1 & -l1) + 1;
+	    lv = 0;
 	  break;
 
 	case CLZ:
@@ -1375,9 +1434,9 @@
 	case CTZ:
 	  hv = 0;
 	  if (l1 != 0)
-	    lv = exact_log2 (l1 & -l1);
+	    lv = ctz_hwi (l1);
 	  else if (h1 != 0)
-	    lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & -h1);
+	    lv = HOST_BITS_PER_WIDE_INT + ctz_hwi (h1);
 	  else if (! CTZ_DEFINED_VALUE_AT_ZERO (mode, lv))
 	    lv = GET_MODE_BITSIZE (mode);
 	  break;
@@ -1448,9 +1507,9 @@
 	    {
 	      lv = l1 & GET_MODE_MASK (op_mode);
 	      if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
-		  && (lv & ((HOST_WIDE_INT) 1
+		  && (lv & ((unsigned HOST_WIDE_INT) 1
 			    << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
-		lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
+		lv -= (unsigned HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
 
 	      hv = HWI_SIGN_EXTEND (lv);
 	    }
@@ -1556,13 +1615,14 @@
 	  /* Test against the signed lower bound.  */
 	  if (width > HOST_BITS_PER_WIDE_INT)
 	    {
-	      th = (HOST_WIDE_INT) -1 << (width - HOST_BITS_PER_WIDE_INT - 1);
+	      th = (unsigned HOST_WIDE_INT) (-1)
+		   << (width - HOST_BITS_PER_WIDE_INT - 1);
 	      tl = 0;
 	    }
 	  else
 	    {
 	      th = -1;
-	      tl = (HOST_WIDE_INT) -1 << (width - 1);
+	      tl = (unsigned HOST_WIDE_INT) (-1) << (width - 1);
 	    }
 	  real_from_integer (&t, VOIDmode, tl, th, 0);
 	  if (REAL_VALUES_LESS (x, t))
@@ -2108,6 +2168,19 @@
       if (trueop1 == constm1_rtx)
 	return simplify_gen_unary (NEG, mode, op0, mode);
 
+      if (GET_CODE (op0) == NEG)
+	{
+	  rtx temp = simplify_unary_operation (NEG, mode, op1, mode);
+	  if (temp)
+	    return simplify_gen_binary (MULT, mode, XEXP (op0, 0), temp);
+	}
+      if (GET_CODE (op1) == NEG)
+	{
+	  rtx temp = simplify_unary_operation (NEG, mode, op0, mode);
+	  if (temp)
+	    return simplify_gen_binary (MULT, mode, temp, XEXP (op1, 0));
+	}
+
       /* Maybe simplify x * 0 to 0.  The reduction is not valid if
 	 x is NaN, since x * 0 is then also NaN.  Nor is it valid
 	 when the mode has signed zeros, since multiplying a negative
@@ -2127,7 +2200,7 @@
       /* Convert multiply by constant power of two into shift unless
 	 we are still generating RTL.  This test is a kludge.  */
       if (CONST_INT_P (trueop1)
-	  && (val = exact_log2 (INTVAL (trueop1))) >= 0
+	  && (val = exact_log2 (UINTVAL (trueop1))) >= 0
 	  /* If the mode is larger than the host word size, and the
 	     uppermost bit is set, then this isn't a power of two due
 	     to implicit sign extension.  */
@@ -2190,10 +2263,10 @@
       break;
 
     case IOR:
-      if (trueop1 == const0_rtx)
+      if (trueop1 == CONST0_RTX (mode))
 	return op0;
       if (CONST_INT_P (trueop1)
-	  && ((INTVAL (trueop1) & GET_MODE_MASK (mode))
+	  && ((UINTVAL (trueop1) & GET_MODE_MASK (mode))
 	      == GET_MODE_MASK (mode)))
 	return op1;
       if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
@@ -2208,7 +2281,7 @@
       /* (ior A C) is C if all bits of A that might be nonzero are on in C.  */
       if (CONST_INT_P (op1)
 	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
-	  && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
+	  && (nonzero_bits (op0, mode) & ~UINTVAL (op1)) == 0)
 	return op1;
 
       /* Canonicalize (X & C1) | C2.  */
@@ -2297,12 +2370,12 @@
 	  && GET_CODE (op0) == AND
 	  && CONST_INT_P (XEXP (op0, 1))
 	  && CONST_INT_P (op1)
-	  && (INTVAL (XEXP (op0, 1)) & INTVAL (op1)) != 0)
+	  && (UINTVAL (XEXP (op0, 1)) & UINTVAL (op1)) != 0)
 	return simplify_gen_binary (IOR, mode,
 				    simplify_gen_binary
 					  (AND, mode, XEXP (op0, 0),
-					   GEN_INT (INTVAL (XEXP (op0, 1))
-						    & ~INTVAL (op1))),
+					   GEN_INT (UINTVAL (XEXP (op0, 1))
+						    & ~UINTVAL (op1))),
 				    op1);
 
       /* If OP0 is (ashiftrt (plus ...) C), it might actually be
@@ -2332,10 +2405,10 @@
       break;
 
     case XOR:
-      if (trueop1 == const0_rtx)
+      if (trueop1 == CONST0_RTX (mode))
 	return op0;
       if (CONST_INT_P (trueop1)
-	  && ((INTVAL (trueop1) & GET_MODE_MASK (mode))
+	  && ((UINTVAL (trueop1) & GET_MODE_MASK (mode))
 	      == GET_MODE_MASK (mode)))
 	return simplify_gen_unary (NOT, mode, op0, mode);
       if (rtx_equal_p (trueop0, trueop1)
@@ -2479,7 +2552,7 @@
 	  && CONST_INT_P (trueop1)
 	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
 	  && (~GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))
-	      & INTVAL (trueop1)) == 0)
+	      & UINTVAL (trueop1)) == 0)
 	{
 	  enum machine_mode imode = GET_MODE (XEXP (op0, 0));
 	  tem = simplify_gen_binary (AND, imode, XEXP (op0, 0),
@@ -2560,8 +2633,8 @@
 	 (A +- N) & M -> A & M.  */
       if (CONST_INT_P (trueop1)
 	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
-	  && ~INTVAL (trueop1)
-	  && (INTVAL (trueop1) & (INTVAL (trueop1) + 1)) == 0
+	  && ~UINTVAL (trueop1)
+	  && (UINTVAL (trueop1) & (UINTVAL (trueop1) + 1)) == 0
 	  && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS))
 	{
 	  rtx pmop[2];
@@ -2571,7 +2644,7 @@
 	  pmop[1] = XEXP (op0, 1);
 
 	  if (CONST_INT_P (pmop[1])
-	      && (INTVAL (pmop[1]) & INTVAL (trueop1)) == 0)
+	      && (UINTVAL (pmop[1]) & UINTVAL (trueop1)) == 0)
 	    return simplify_gen_binary (AND, mode, pmop[0], op1);
 
 	  for (which = 0; which < 2; which++)
@@ -2581,14 +2654,14 @@
 		{
 		case AND:
 		  if (CONST_INT_P (XEXP (tem, 1))
-		      && (INTVAL (XEXP (tem, 1)) & INTVAL (trueop1))
-		      == INTVAL (trueop1))
+		      && (UINTVAL (XEXP (tem, 1)) & UINTVAL (trueop1))
+		      == UINTVAL (trueop1))
 		    pmop[which] = XEXP (tem, 0);
 		  break;
 		case IOR:
 		case XOR:
 		  if (CONST_INT_P (XEXP (tem, 1))
-		      && (INTVAL (XEXP (tem, 1)) & INTVAL (trueop1)) == 0)
+		      && (UINTVAL (XEXP (tem, 1)) & UINTVAL (trueop1)) == 0)
 		    pmop[which] = XEXP (tem, 0);
 		  break;
 		default:
@@ -2634,7 +2707,7 @@
 	return rtl_hooks.gen_lowpart_no_emit (mode, op0);
       /* Convert divide by power of two into shift.  */
       if (CONST_INT_P (trueop1)
-	  && (val = exact_log2 (INTVAL (trueop1))) > 0)
+	  && (val = exact_log2 (UINTVAL (trueop1))) > 0)
 	return simplify_gen_binary (LSHIFTRT, mode, op0, GEN_INT (val));
       break;
 
@@ -2681,7 +2754,8 @@
       else
 	{
 	  /* 0/x is 0 (or x&0 if x has side-effects).  */
-	  if (trueop0 == CONST0_RTX (mode))
+	  if (trueop0 == CONST0_RTX (mode)
+	      && !cfun->can_throw_non_call_exceptions)
 	    {
 	      if (side_effects_p (op1))
 		return simplify_gen_binary (AND, mode, op1, trueop0);
@@ -2716,7 +2790,7 @@
 	}
       /* Implement modulus by power of two as AND.  */
       if (CONST_INT_P (trueop1)
-	  && exact_log2 (INTVAL (trueop1)) > 0)
+	  && exact_log2 (UINTVAL (trueop1)) > 0)
 	return simplify_gen_binary (AND, mode, op0,
 				    GEN_INT (INTVAL (op1) - 1));
       break;
@@ -2747,7 +2821,7 @@
 	return op0;
       /* Rotating ~0 always results in ~0.  */
       if (CONST_INT_P (trueop0) && width <= HOST_BITS_PER_WIDE_INT
-	  && (unsigned HOST_WIDE_INT) INTVAL (trueop0) == GET_MODE_MASK (mode)
+	  && UINTVAL (trueop0) == GET_MODE_MASK (mode)
 	  && ! side_effects_p (op1))
 	return op0;
     canonicalize_shift:
@@ -2793,7 +2867,7 @@
     case SMIN:
       if (width <= HOST_BITS_PER_WIDE_INT
 	  && CONST_INT_P (trueop1)
-	  && INTVAL (trueop1) == (HOST_WIDE_INT) 1 << (width -1)
+	  && UINTVAL (trueop1) == (unsigned HOST_WIDE_INT) 1 << (width -1)
 	  && ! side_effects_p (op0))
 	return op1;
       if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
@@ -2806,8 +2880,7 @@
     case SMAX:
       if (width <= HOST_BITS_PER_WIDE_INT
 	  && CONST_INT_P (trueop1)
-	  && ((unsigned HOST_WIDE_INT) INTVAL (trueop1)
-	      == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
+	  && (UINTVAL (trueop1) == GET_MODE_MASK (mode) >> 1)
 	  && ! side_effects_p (op0))
 	return op1;
       if (rtx_equal_p (trueop0, trueop1) && ! side_effects_p (op0))
@@ -3268,141 +3341,124 @@
 
   /* We can fold some multi-word operations.  */
   if (GET_MODE_CLASS (mode) == MODE_INT
-      && width == HOST_BITS_PER_WIDE_INT * 2
-      && (GET_CODE (op0) == CONST_DOUBLE || CONST_INT_P (op0))
-      && (GET_CODE (op1) == CONST_DOUBLE || CONST_INT_P (op1)))
+      && width == HOST_BITS_PER_DOUBLE_INT
+      && (CONST_DOUBLE_P (op0) || CONST_INT_P (op0))
+      && (CONST_DOUBLE_P (op1) || CONST_INT_P (op1)))
     {
-      unsigned HOST_WIDE_INT l1, l2, lv, lt;
-      HOST_WIDE_INT h1, h2, hv, ht;
-
-      if (GET_CODE (op0) == CONST_DOUBLE)
-	l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
-      else
-	l1 = INTVAL (op0), h1 = HWI_SIGN_EXTEND (l1);
-
-      if (GET_CODE (op1) == CONST_DOUBLE)
-	l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
-      else
-	l2 = INTVAL (op1), h2 = HWI_SIGN_EXTEND (l2);
+      double_int o0, o1, res, tmp;
+
+      o0 = rtx_to_double_int (op0);
+      o1 = rtx_to_double_int (op1);
 
       switch (code)
 	{
 	case MINUS:
 	  /* A - B == A + (-B).  */
-	  neg_double (l2, h2, &lv, &hv);
-	  l2 = lv, h2 = hv;
+	  o1 = double_int_neg (o1);
 
 	  /* Fall through....  */
 
 	case PLUS:
-	  add_double (l1, h1, l2, h2, &lv, &hv);
+	  res = double_int_add (o0, o1);
 	  break;
 
 	case MULT:
-	  mul_double (l1, h1, l2, h2, &lv, &hv);
+	  res = double_int_mul (o0, o1);
 	  break;
 
 	case DIV:
-	  if (div_and_round_double (TRUNC_DIV_EXPR, 0, l1, h1, l2, h2,
-				    &lv, &hv, &lt, &ht))
+	  if (div_and_round_double (TRUNC_DIV_EXPR, 0,
+				    o0.low, o0.high, o1.low, o1.high,
+				    &res.low, &res.high,
+				    &tmp.low, &tmp.high))
 	    return 0;
 	  break;
 
 	case MOD:
-	  if (div_and_round_double (TRUNC_DIV_EXPR, 0, l1, h1, l2, h2,
-				    &lt, &ht, &lv, &hv))
+	  if (div_and_round_double (TRUNC_DIV_EXPR, 0,
+				    o0.low, o0.high, o1.low, o1.high,
+				    &tmp.low, &tmp.high,
+				    &res.low, &res.high))
 	    return 0;
 	  break;
 
 	case UDIV:
-	  if (div_and_round_double (TRUNC_DIV_EXPR, 1, l1, h1, l2, h2,
-				    &lv, &hv, &lt, &ht))
+	  if (div_and_round_double (TRUNC_DIV_EXPR, 1,
+				    o0.low, o0.high, o1.low, o1.high,
+				    &res.low, &res.high,
+				    &tmp.low, &tmp.high))
 	    return 0;
 	  break;
 
 	case UMOD:
-	  if (div_and_round_double (TRUNC_DIV_EXPR, 1, l1, h1, l2, h2,
-				    &lt, &ht, &lv, &hv))
+	  if (div_and_round_double (TRUNC_DIV_EXPR, 1,
+				    o0.low, o0.high, o1.low, o1.high,
+				    &tmp.low, &tmp.high,
+				    &res.low, &res.high))
 	    return 0;
 	  break;
 
 	case AND:
-	  lv = l1 & l2, hv = h1 & h2;
+	  res = double_int_and (o0, o1);
 	  break;
 
 	case IOR:
-	  lv = l1 | l2, hv = h1 | h2;
+	  res = double_int_ior (o0, o1);
 	  break;
 
 	case XOR:
-	  lv = l1 ^ l2, hv = h1 ^ h2;
+	  res = double_int_xor (o0, o1);
 	  break;
 
 	case SMIN:
-	  if (h1 < h2
-	      || (h1 == h2
-		  && ((unsigned HOST_WIDE_INT) l1
-		      < (unsigned HOST_WIDE_INT) l2)))
-	    lv = l1, hv = h1;
-	  else
-	    lv = l2, hv = h2;
+	  res = double_int_smin (o0, o1);
 	  break;
 
 	case SMAX:
-	  if (h1 > h2
-	      || (h1 == h2
-		  && ((unsigned HOST_WIDE_INT) l1
-		      > (unsigned HOST_WIDE_INT) l2)))
-	    lv = l1, hv = h1;
-	  else
-	    lv = l2, hv = h2;
+	  res = double_int_smax (o0, o1);
 	  break;
 
 	case UMIN:
-	  if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
-	      || (h1 == h2
-		  && ((unsigned HOST_WIDE_INT) l1
-		      < (unsigned HOST_WIDE_INT) l2)))
-	    lv = l1, hv = h1;
-	  else
-	    lv = l2, hv = h2;
+	  res = double_int_umin (o0, o1);
 	  break;
 
 	case UMAX:
-	  if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
-	      || (h1 == h2
-		  && ((unsigned HOST_WIDE_INT) l1
-		      > (unsigned HOST_WIDE_INT) l2)))
-	    lv = l1, hv = h1;
-	  else
-	    lv = l2, hv = h2;
+	  res = double_int_umax (o0, o1);
 	  break;
 
 	case LSHIFTRT:   case ASHIFTRT:
 	case ASHIFT:
 	case ROTATE:     case ROTATERT:
-	  if (SHIFT_COUNT_TRUNCATED)
-	    l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
-
-	  if (h2 != 0 || l2 >= GET_MODE_BITSIZE (mode))
-	    return 0;
-
-	  if (code == LSHIFTRT || code == ASHIFTRT)
-	    rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
-			   code == ASHIFTRT);
-	  else if (code == ASHIFT)
-	    lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
-	  else if (code == ROTATE)
-	    lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
-	  else /* code == ROTATERT */
-	    rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
+	  {
+	    unsigned HOST_WIDE_INT cnt;
+
+	    if (SHIFT_COUNT_TRUNCATED)
+	      o1 = double_int_zext (o1, GET_MODE_BITSIZE (mode));
+
+	    if (!double_int_fits_in_uhwi_p (o1)
+	        || double_int_to_uhwi (o1) >= GET_MODE_BITSIZE (mode))
+	      return 0;
+
+	    cnt = double_int_to_uhwi (o1);
+
+	    if (code == LSHIFTRT || code == ASHIFTRT)
+	      res = double_int_rshift (o0, cnt, GET_MODE_BITSIZE (mode),
+				       code == ASHIFTRT);
+	    else if (code == ASHIFT)
+	      res = double_int_lshift (o0, cnt, GET_MODE_BITSIZE (mode),
+				       true);
+	    else if (code == ROTATE)
+	      res = double_int_lrotate (o0, cnt, GET_MODE_BITSIZE (mode));
+	    else /* code == ROTATERT */
+	      res = double_int_rrotate (o0, cnt, GET_MODE_BITSIZE (mode));
+	  }
 	  break;
 
 	default:
 	  return 0;
 	}
 
-      return immed_double_const (lv, hv, mode);
+      return immed_double_int_const (res, mode);
     }
 
   if (CONST_INT_P (op0) && CONST_INT_P (op1)
@@ -3416,16 +3472,16 @@
 
       if (width < HOST_BITS_PER_WIDE_INT)
         {
-          arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
-          arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
+          arg0 &= ((unsigned HOST_WIDE_INT) 1 << width) - 1;
+          arg1 &= ((unsigned HOST_WIDE_INT) 1 << width) - 1;
 
           arg0s = arg0;
-          if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
-	    arg0s |= ((HOST_WIDE_INT) (-1) << width);
+          if (arg0s & ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
+	    arg0s |= ((unsigned HOST_WIDE_INT) (-1) << width);
 
 	  arg1s = arg1;
-	  if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
-	    arg1s |= ((HOST_WIDE_INT) (-1) << width);
+	  if (arg1s & ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
+	    arg1s |= ((unsigned HOST_WIDE_INT) (-1) << width);
 	}
       else
 	{
@@ -3451,7 +3507,8 @@
 
 	case DIV:
 	  if (arg1s == 0
-	      || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
+	      || ((unsigned HOST_WIDE_INT) arg0s
+		  == (unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
 		  && arg1s == -1))
 	    return 0;
 	  val = arg0s / arg1s;
@@ -3459,7 +3516,8 @@
 
 	case MOD:
 	  if (arg1s == 0
-	      || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
+	      || ((unsigned HOST_WIDE_INT) arg0s
+		  == (unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
 		  && arg1s == -1))
 	    return 0;
 	  val = arg0s % arg1s;
@@ -3467,7 +3525,8 @@
 
 	case UDIV:
 	  if (arg1 == 0
-	      || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
+	      || ((unsigned HOST_WIDE_INT) arg0s
+		  == (unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
 		  && arg1s == -1))
 	    return 0;
 	  val = (unsigned HOST_WIDE_INT) arg0 / arg1;
@@ -3475,7 +3534,8 @@
 
 	case UMOD:
 	  if (arg1 == 0
-	      || (arg0s == (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
+	      || ((unsigned HOST_WIDE_INT) arg0s
+		  == (unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)
 		  && arg1s == -1))
 	    return 0;
 	  val = (unsigned HOST_WIDE_INT) arg0 % arg1;
@@ -3514,7 +3574,7 @@
 
 	  /* Sign-extend the result for arithmetic right shifts.  */
 	  if (code == ASHIFTRT && arg0s < 0 && arg1 > 0)
-	    val |= ((HOST_WIDE_INT) -1) << (width - arg1);
+	    val |= ((unsigned HOST_WIDE_INT) (-1)) << (width - arg1);
 	  break;
 
 	case ROTATERT:
@@ -4394,14 +4454,14 @@
 	 we have to sign or zero-extend the values.  */
       if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
 	{
-	  l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
-	  l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
-
-	  if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
-	    l0s |= ((HOST_WIDE_INT) (-1) << width);
-
-	  if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
-	    l1s |= ((HOST_WIDE_INT) (-1) << width);
+	  l0u &= ((unsigned HOST_WIDE_INT) 1 << width) - 1;
+	  l1u &= ((unsigned HOST_WIDE_INT) 1 << width) - 1;
+
+	  if (l0s & ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
+	    l0s |= ((unsigned HOST_WIDE_INT) (-1) << width);
+
+	  if (l1s & ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
+	    l1s |= ((unsigned HOST_WIDE_INT) (-1) << width);
 	}
       if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
 	h0u = h1u = 0, h0s = HWI_SIGN_EXTEND (l0s), h1s = HWI_SIGN_EXTEND (l1s);
@@ -4554,8 +4614,9 @@
 	    {
 	      int sign_bitnum = GET_MODE_BITSIZE (mode) - 1;
 	      int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
-			      && (INTVAL (inner_const)
-				  & ((HOST_WIDE_INT) 1 << sign_bitnum)));
+			      && (UINTVAL (inner_const)
+				  & ((unsigned HOST_WIDE_INT) 1
+				     << sign_bitnum)));
 
 	      switch (code)
 		{
@@ -4644,6 +4705,8 @@
 			    rtx op2)
 {
   unsigned int width = GET_MODE_BITSIZE (mode);
+  bool any_change = false;
+  rtx tem;
 
   /* VOIDmode means "infinite" precision.  */
   if (width == 0)
@@ -4651,6 +4714,31 @@
 
   switch (code)
     {
+    case FMA:
+      /* Simplify negations around the multiplication.  */
+      /* -a * -b + c  =>  a * b + c.  */
+      if (GET_CODE (op0) == NEG)
+	{
+	  tem = simplify_unary_operation (NEG, mode, op1, mode);
+	  if (tem)
+	    op1 = tem, op0 = XEXP (op0, 0), any_change = true;
+	}
+      else if (GET_CODE (op1) == NEG)
+	{
+	  tem = simplify_unary_operation (NEG, mode, op0, mode);
+	  if (tem)
+	    op0 = tem, op1 = XEXP (op1, 0), any_change = true;
+	}
+
+      /* Canonicalize the two multiplication operands.  */
+      /* a * -b + c  =>  -b * a + c.  */
+      if (swap_commutative_operands_p (op0, op1))
+	tem = op0, op0 = op1, op1 = tem, any_change = true;
+
+      if (any_change)
+	return gen_rtx_FMA (mode, op0, op1, op2);
+      return NULL_RTX;
+
     case SIGN_EXTRACT:
     case ZERO_EXTRACT:
       if (CONST_INT_P (op0)
@@ -4660,22 +4748,22 @@
 	  && width <= (unsigned) HOST_BITS_PER_WIDE_INT)
 	{
 	  /* Extracting a bit-field from a constant */
-	  HOST_WIDE_INT val = INTVAL (op0);
+	  unsigned HOST_WIDE_INT val = UINTVAL (op0);
 
 	  if (BITS_BIG_ENDIAN)
-	    val >>= (GET_MODE_BITSIZE (op0_mode)
-		     - INTVAL (op2) - INTVAL (op1));
+	    val >>= GET_MODE_BITSIZE (op0_mode) - INTVAL (op2) - INTVAL (op1);
 	  else
 	    val >>= INTVAL (op2);
 
 	  if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
 	    {
 	      /* First zero-extend.  */
-	      val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
+	      val &= ((unsigned HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
 	      /* If desired, propagate sign bit.  */
 	      if (code == SIGN_EXTRACT
-		  && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
-		val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
+		  && (val & ((unsigned HOST_WIDE_INT) 1 << (INTVAL (op1) - 1)))
+		     != 0)
+		val |= ~ (((unsigned HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
 	    }
 
 	  /* Clear the bits that don't belong in our mode,
@@ -4683,9 +4771,9 @@
 	     So we get either a reasonable negative value or a reasonable
 	     unsigned value for this mode.  */
 	  if (width < HOST_BITS_PER_WIDE_INT
-	      && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
-		  != ((HOST_WIDE_INT) (-1) << (width - 1))))
-	    val &= ((HOST_WIDE_INT) 1 << width) - 1;
+	      && ((val & ((unsigned HOST_WIDE_INT) (-1) << (width - 1)))
+		  != ((unsigned HOST_WIDE_INT) (-1) << (width - 1))))
+	    val &= ((unsigned HOST_WIDE_INT) 1 << width) - 1;
 
 	  return gen_int_mode (val, mode);
 	}
@@ -5043,10 +5131,10 @@
 	    for (i = 0;
 		 i < HOST_BITS_PER_WIDE_INT && i < elem_bitsize;
 		 i += value_bit)
-	      lo |= (HOST_WIDE_INT)(*vp++ & value_mask) << i;
+	      lo |= (unsigned HOST_WIDE_INT)(*vp++ & value_mask) << i;
 	    for (; i < elem_bitsize; i += value_bit)
-	      hi |= ((HOST_WIDE_INT)(*vp++ & value_mask)
-		     << (i - HOST_BITS_PER_WIDE_INT));
+	      hi |= (unsigned HOST_WIDE_INT)(*vp++ & value_mask)
+		     << (i - HOST_BITS_PER_WIDE_INT);
 
 	    /* immed_double_const doesn't call trunc_int_for_mode.  I don't
 	       know why.  */
@@ -5099,9 +5187,9 @@
 	    for (i = 0;
 		 i < HOST_BITS_PER_WIDE_INT && i < elem_bitsize;
 		 i += value_bit)
-	      f.data.low |= (HOST_WIDE_INT)(*vp++ & value_mask) << i;
+	      f.data.low |= (unsigned HOST_WIDE_INT)(*vp++ & value_mask) << i;
 	    for (; i < elem_bitsize; i += value_bit)
-	      f.data.high |= ((HOST_WIDE_INT)(*vp++ & value_mask)
+	      f.data.high |= ((unsigned HOST_WIDE_INT)(*vp++ & value_mask)
 			     << (i - HOST_BITS_PER_WIDE_INT));
 
 	    elems[elem] = CONST_FIXED_FROM_FIXED_VALUE (f, outer_submode);
@@ -5429,6 +5517,31 @@
 				   : byte + shifted_bytes));
     }
 
+  /* If we have a lowpart SUBREG of a right shift of MEM, make a new MEM
+     and try replacing the SUBREG and shift with it.  Don't do this if
+     the MEM has a mode-dependent address or if we would be widening it.  */
+
+  if ((GET_CODE (op) == LSHIFTRT
+       || GET_CODE (op) == ASHIFTRT)
+      && MEM_P (XEXP (op, 0))
+      && CONST_INT_P (XEXP (op, 1))
+      && GET_MODE_SIZE (outermode) < GET_MODE_SIZE (GET_MODE (op))
+      && (INTVAL (XEXP (op, 1)) % GET_MODE_BITSIZE (outermode)) == 0
+      && INTVAL (XEXP (op, 1)) > 0
+      && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (innermode)
+      && ! mode_dependent_address_p (XEXP (XEXP (op, 0), 0))
+      && ! MEM_VOLATILE_P (XEXP (op, 0))
+      && byte == subreg_lowpart_offset (outermode, innermode)
+      && (GET_MODE_SIZE (outermode) >= UNITS_PER_WORD
+	  || WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN))
+    {
+      int shifted_bytes = INTVAL (XEXP (op, 1)) / BITS_PER_UNIT;
+      return adjust_address_nv (XEXP (op, 0), outermode,
+				(WORDS_BIG_ENDIAN
+				 ? byte - shifted_bytes
+				 : byte + shifted_bytes));
+    }
+
   return NULL_RTX;
 }