diff gcc/tree-affine.c @ 16:04ced10e8804

gcc 7
author kono
date Fri, 27 Oct 2017 22:46:09 +0900
parents f6334be47118
children 84e7813d76e9
line wrap: on
line diff
--- a/gcc/tree-affine.c	Sun Aug 21 07:07:55 2011 +0900
+++ b/gcc/tree-affine.c	Fri Oct 27 22:46:09 2017 +0900
@@ -1,5 +1,5 @@
 /* Operations with affine combinations of trees.
-   Copyright (C) 2005, 2007, 2008, 2010 Free Software Foundation, Inc.
+   Copyright (C) 2005-2017 Free Software Foundation, Inc.
 
 This file is part of GCC.
 
@@ -20,21 +20,24 @@
 #include "config.h"
 #include "system.h"
 #include "coretypes.h"
+#include "backend.h"
+#include "rtl.h"
 #include "tree.h"
-#include "output.h"
+#include "gimple.h"
+#include "ssa.h"
 #include "tree-pretty-print.h"
-#include "tree-dump.h"
-#include "pointer-set.h"
+#include "fold-const.h"
 #include "tree-affine.h"
-#include "gimple.h"
-#include "flags.h"
+#include "gimplify.h"
+#include "dumpfile.h"
+#include "cfgexpand.h"
 
 /* Extends CST as appropriate for the affine combinations COMB.  */
 
-double_int
-double_int_ext_for_comb (double_int cst, aff_tree *comb)
+widest_int
+wide_int_ext_for_comb (const widest_int &cst, tree type)
 {
-  return double_int_sext (cst, TYPE_PRECISION (comb->type));
+  return wi::sext (cst, TYPE_PRECISION (type));
 }
 
 /* Initializes affine combination COMB so that its value is zero in TYPE.  */
@@ -42,19 +45,22 @@
 static void
 aff_combination_zero (aff_tree *comb, tree type)
 {
+  int i;
   comb->type = type;
-  comb->offset = double_int_zero;
+  comb->offset = 0;
   comb->n = 0;
+  for (i = 0; i < MAX_AFF_ELTS; i++)
+    comb->elts[i].coef = 0;
   comb->rest = NULL_TREE;
 }
 
 /* Sets COMB to CST.  */
 
 void
-aff_combination_const (aff_tree *comb, tree type, double_int cst)
+aff_combination_const (aff_tree *comb, tree type, const widest_int &cst)
 {
   aff_combination_zero (comb, type);
-  comb->offset = double_int_ext_for_comb (cst, comb);
+  comb->offset = wide_int_ext_for_comb (cst, comb->type);;
 }
 
 /* Sets COMB to single element ELT.  */
@@ -66,38 +72,34 @@
 
   comb->n = 1;
   comb->elts[0].val = elt;
-  comb->elts[0].coef = double_int_one;
+  comb->elts[0].coef = 1;
 }
 
 /* Scales COMB by SCALE.  */
 
 void
-aff_combination_scale (aff_tree *comb, double_int scale)
+aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
 {
   unsigned i, j;
 
-  scale = double_int_ext_for_comb (scale, comb);
-  if (double_int_one_p (scale))
+  widest_int scale = wide_int_ext_for_comb (scale_in, comb->type);
+  if (scale == 1)
     return;
 
-  if (double_int_zero_p (scale))
+  if (scale == 0)
     {
       aff_combination_zero (comb, comb->type);
       return;
     }
 
-  comb->offset
-    = double_int_ext_for_comb (double_int_mul (scale, comb->offset), comb);
+  comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb->type);
   for (i = 0, j = 0; i < comb->n; i++)
     {
-      double_int new_coef;
-
-      new_coef
-	= double_int_ext_for_comb (double_int_mul (scale, comb->elts[i].coef),
-				   comb);
+      widest_int new_coef
+	= wide_int_ext_for_comb (scale * comb->elts[i].coef, comb->type);
       /* A coefficient may become zero due to overflow.  Remove the zero
 	 elements.  */
-      if (double_int_zero_p (new_coef))
+      if (new_coef == 0)
 	continue;
       comb->elts[j].coef = new_coef;
       comb->elts[j].val = comb->elts[i].val;
@@ -119,30 +121,28 @@
 	}
       else
 	comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
-				  double_int_to_tree (type, scale));
+				  wide_int_to_tree (type, scale));
     }
 }
 
 /* Adds ELT * SCALE to COMB.  */
 
 void
-aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
+aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
 {
   unsigned i;
   tree type;
 
-  scale = double_int_ext_for_comb (scale, comb);
-  if (double_int_zero_p (scale))
+  widest_int scale = wide_int_ext_for_comb (scale_in, comb->type);
+  if (scale == 0)
     return;
 
   for (i = 0; i < comb->n; i++)
     if (operand_equal_p (comb->elts[i].val, elt, 0))
       {
-	double_int new_coef;
-
-	new_coef = double_int_add (comb->elts[i].coef, scale);
-	new_coef = double_int_ext_for_comb (new_coef, comb);
-	if (!double_int_zero_p (new_coef))
+	widest_int new_coef
+	  = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb->type);
+	if (new_coef != 0)
 	  {
 	    comb->elts[i].coef = new_coef;
 	    return;
@@ -154,7 +154,7 @@
 	if (comb->rest)
 	  {
 	    gcc_assert (comb->n == MAX_AFF_ELTS - 1);
-	    comb->elts[comb->n].coef = double_int_one;
+	    comb->elts[comb->n].coef = 1;
 	    comb->elts[comb->n].val = comb->rest;
 	    comb->rest = NULL_TREE;
 	    comb->n++;
@@ -173,12 +173,12 @@
   if (POINTER_TYPE_P (type))
     type = sizetype;
 
-  if (double_int_one_p (scale))
+  if (scale == 1)
     elt = fold_convert (type, elt);
   else
     elt = fold_build2 (MULT_EXPR, type,
 		       fold_convert (type, elt),
-		       double_int_to_tree (type, scale));
+		       wide_int_to_tree (type, scale));
 
   if (comb->rest)
     comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
@@ -190,9 +190,9 @@
 /* Adds CST to C.  */
 
 static void
-aff_combination_add_cst (aff_tree *c, double_int cst)
+aff_combination_add_cst (aff_tree *c, const widest_int &cst)
 {
-  c->offset = double_int_ext_for_comb (double_int_add (c->offset, cst), c);
+  c->offset = wide_int_ext_for_comb (c->offset + cst, c->type);
 }
 
 /* Adds COMB2 to COMB1.  */
@@ -206,7 +206,7 @@
   for (i = 0; i < comb2->n; i++)
     aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
   if (comb2->rest)
-    aff_combination_add_elt (comb1, comb2->rest, double_int_one);
+    aff_combination_add_elt (comb1, comb2->rest, 1);
 }
 
 /* Converts affine combination COMB to TYPE.  */
@@ -231,13 +231,12 @@
   if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
     return;
 
-  comb->offset = double_int_ext_for_comb (comb->offset, comb);
+  comb->offset = wide_int_ext_for_comb (comb->offset, comb->type);
   for (i = j = 0; i < comb->n; i++)
     {
-      double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb);
-      if (double_int_zero_p (new_coef))
+      if (comb->elts[i].coef == 0)
 	continue;
-      comb->elts[j].coef = new_coef;
+      comb->elts[j].coef = comb->elts[i].coef;
       comb->elts[j].val = fold_convert (type, comb->elts[i].val);
       j++;
     }
@@ -245,7 +244,7 @@
   comb->n = j;
   if (comb->n < MAX_AFF_ELTS && comb->rest)
     {
-      comb->elts[comb->n].coef = double_int_one;
+      comb->elts[comb->n].coef = 1;
       comb->elts[comb->n].val = comb->rest;
       comb->rest = NULL_TREE;
       comb->n++;
@@ -261,8 +260,8 @@
   enum tree_code code;
   tree cst, core, toffset;
   HOST_WIDE_INT bitpos, bitsize;
-  enum machine_mode mode;
-  int unsignedp, volatilep;
+  machine_mode mode;
+  int unsignedp, reversep, volatilep;
 
   STRIP_NOPS (expr);
 
@@ -270,7 +269,7 @@
   switch (code)
     {
     case INTEGER_CST:
-      aff_combination_const (comb, type, tree_to_double_int (expr));
+      aff_combination_const (comb, type, wi::to_widest (expr));
       return;
 
     case POINTER_PLUS_EXPR:
@@ -284,7 +283,7 @@
       tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
       tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
       if (code == MINUS_EXPR)
-	aff_combination_scale (&tmp, double_int_minus_one);
+	aff_combination_scale (&tmp, -1);
       aff_combination_add (comb, &tmp);
       return;
 
@@ -293,19 +292,19 @@
       if (TREE_CODE (cst) != INTEGER_CST)
 	break;
       tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
-      aff_combination_scale (comb, tree_to_double_int (cst));
+      aff_combination_scale (comb, wi::to_widest (cst));
       return;
 
     case NEGATE_EXPR:
       tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
-      aff_combination_scale (comb, double_int_minus_one);
+      aff_combination_scale (comb, -1);
       return;
 
     case BIT_NOT_EXPR:
       /* ~x = -x - 1 */
       tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
-      aff_combination_scale (comb, double_int_minus_one);
-      aff_combination_add_cst (comb, double_int_minus_one);
+      aff_combination_scale (comb, -1);
+      aff_combination_add_cst (comb, -1);
       return;
 
     case ADDR_EXPR:
@@ -319,15 +318,21 @@
 	  return;
 	}
       core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
-				  &toffset, &mode, &unsignedp, &volatilep,
-				  false);
+				  &toffset, &mode, &unsignedp, &reversep,
+				  &volatilep);
       if (bitpos % BITS_PER_UNIT != 0)
 	break;
-      aff_combination_const (comb, type,
-			     uhwi_to_double_int (bitpos / BITS_PER_UNIT));
-      core = build_fold_addr_expr (core);
+      aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
+      if (TREE_CODE (core) == MEM_REF)
+	{
+	  aff_combination_add_cst (comb, wi::to_widest (TREE_OPERAND (core, 1)));
+	  core = TREE_OPERAND (core, 0);
+	}
+      else
+	core = build_fold_addr_expr (core);
+
       if (TREE_CODE (core) == ADDR_EXPR)
-	aff_combination_add_elt (comb, core, double_int_one);
+	aff_combination_add_elt (comb, core, 1);
       else
 	{
 	  tree_to_aff_combination (core, type, &tmp);
@@ -359,6 +364,64 @@
       aff_combination_add (comb, &tmp);
       return;
 
+    CASE_CONVERT:
+      {
+	tree otype = TREE_TYPE (expr);
+	tree inner = TREE_OPERAND (expr, 0);
+	tree itype = TREE_TYPE (inner);
+	enum tree_code icode = TREE_CODE (inner);
+
+	/* In principle this is a valid folding, but it isn't necessarily
+	   an optimization, so do it here and not in fold_unary.  */
+	if ((icode == PLUS_EXPR || icode == MINUS_EXPR || icode == MULT_EXPR)
+	    && TREE_CODE (itype) == INTEGER_TYPE
+	    && TREE_CODE (otype) == INTEGER_TYPE
+	    && TYPE_PRECISION (otype) > TYPE_PRECISION (itype))
+	  {
+	    tree op0 = TREE_OPERAND (inner, 0), op1 = TREE_OPERAND (inner, 1);
+
+	    /* If inner type has undefined overflow behavior, fold conversion
+	       for below two cases:
+		 (T1)(X *+- CST) -> (T1)X *+- (T1)CST
+		 (T1)(X + X)     -> (T1)X + (T1)X.  */
+	    if (TYPE_OVERFLOW_UNDEFINED (itype)
+		&& (TREE_CODE (op1) == INTEGER_CST
+		    || (icode == PLUS_EXPR && operand_equal_p (op0, op1, 0))))
+	      {
+		op0 = fold_convert (otype, op0);
+		op1 = fold_convert (otype, op1);
+		expr = fold_build2 (icode, otype, op0, op1);
+		tree_to_aff_combination (expr, type, comb);
+		return;
+	      }
+	    wide_int minv, maxv;
+	    /* If inner type has wrapping overflow behavior, fold conversion
+	       for below case:
+		 (T1)(X - CST) -> (T1)X - (T1)CST
+	       if X - CST doesn't overflow by range information.  Also handle
+	       (T1)(X + CST) as (T1)(X - (-CST)).  */
+	    if (TYPE_UNSIGNED (itype)
+		&& TYPE_OVERFLOW_WRAPS (itype)
+		&& TREE_CODE (op0) == SSA_NAME
+		&& TREE_CODE (op1) == INTEGER_CST
+		&& icode != MULT_EXPR
+		&& get_range_info (op0, &minv, &maxv) == VR_RANGE)
+	      {
+		if (icode == PLUS_EXPR)
+		  op1 = wide_int_to_tree (itype, -wi::to_wide (op1));
+		if (wi::geu_p (minv, wi::to_wide (op1)))
+		  {
+		    op0 = fold_convert (otype, op0);
+		    op1 = fold_convert (otype, op1);
+		    expr = fold_build2 (MINUS_EXPR, otype, op0, op1);
+		    tree_to_aff_combination (expr, type, comb);
+		    return;
+		  }
+	      }
+	  }
+      }
+      break;
+
     default:
       break;
     }
@@ -370,61 +433,41 @@
    combination COMB.  */
 
 static tree
-add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
-		 aff_tree *comb)
+add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in)
 {
   enum tree_code code;
-  tree type1 = type;
-  if (POINTER_TYPE_P (type))
-    type1 = sizetype;
+
+  widest_int scale = wide_int_ext_for_comb (scale_in, type);
 
-  scale = double_int_ext_for_comb (scale, comb);
-  elt = fold_convert (type1, elt);
-
-  if (double_int_one_p (scale))
+  elt = fold_convert (type, elt);
+  if (scale == 1)
     {
       if (!expr)
-	return fold_convert (type, elt);
+	return elt;
 
-      if (POINTER_TYPE_P (type))
-        return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
       return fold_build2 (PLUS_EXPR, type, expr, elt);
     }
 
-  if (double_int_minus_one_p (scale))
+  if (scale == -1)
     {
       if (!expr)
-	return fold_convert (type, fold_build1 (NEGATE_EXPR, type1, elt));
+	return fold_build1 (NEGATE_EXPR, type, elt);
 
-      if (POINTER_TYPE_P (type))
-	{
-	  elt = fold_build1 (NEGATE_EXPR, type1, elt);
-	  return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
-	}
       return fold_build2 (MINUS_EXPR, type, expr, elt);
     }
 
   if (!expr)
-    return fold_convert (type,
-			 fold_build2 (MULT_EXPR, type1, elt,
-				      double_int_to_tree (type1, scale)));
+    return fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale));
 
-  if (double_int_negative_p (scale))
+  if (wi::neg_p (scale))
     {
       code = MINUS_EXPR;
-      scale = double_int_neg (scale);
+      scale = -scale;
     }
   else
     code = PLUS_EXPR;
 
-  elt = fold_build2 (MULT_EXPR, type1, elt,
-		     double_int_to_tree (type1, scale));
-  if (POINTER_TYPE_P (type))
-    {
-      if (code == MINUS_EXPR)
-        elt = fold_build1 (NEGATE_EXPR, type1, elt);
-      return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
-    }
+  elt = fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale));
   return fold_build2 (code, type, expr, elt);
 }
 
@@ -433,37 +476,48 @@
 tree
 aff_combination_to_tree (aff_tree *comb)
 {
-  tree type = comb->type;
-  tree expr = NULL_TREE;
+  tree type = comb->type, base = NULL_TREE, expr = NULL_TREE;
   unsigned i;
-  double_int off, sgn;
-  tree type1 = type;
-  if (POINTER_TYPE_P (type))
-    type1 = sizetype;
+  widest_int off, sgn;
 
   gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
 
-  for (i = 0; i < comb->n; i++)
-    expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
-			    comb);
+  i = 0;
+  if (POINTER_TYPE_P (type))
+    {
+      type = sizetype;
+      if (comb->n > 0 && comb->elts[0].coef == 1
+	  && POINTER_TYPE_P (TREE_TYPE (comb->elts[0].val)))
+	{
+	  base = comb->elts[0].val;
+	  ++i;
+	}
+    }
+
+  for (; i < comb->n; i++)
+    expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef);
 
   if (comb->rest)
-    expr = add_elt_to_tree (expr, type, comb->rest, double_int_one, comb);
+    expr = add_elt_to_tree (expr, type, comb->rest, 1);
 
   /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
      unsigned.  */
-  if (double_int_negative_p (comb->offset))
+  if (wi::neg_p (comb->offset))
     {
-      off = double_int_neg (comb->offset);
-      sgn = double_int_minus_one;
+      off = -comb->offset;
+      sgn = -1;
     }
   else
     {
       off = comb->offset;
-      sgn = double_int_one;
+      sgn = 1;
     }
-  return add_elt_to_tree (expr, type, double_int_to_tree (type1, off), sgn,
-			  comb);
+  expr = add_elt_to_tree (expr, type, wide_int_to_tree (type, off), sgn);
+
+  if (base)
+    return fold_build_pointer_plus (base, expr);
+  else
+    return fold_convert (comb->type, expr);
 }
 
 /* Copies the tree elements of COMB to ensure that they are not shared.  */
@@ -489,7 +543,7 @@
     comb->elts[m] = comb->elts[comb->n];
   if (comb->rest)
     {
-      comb->elts[comb->n].coef = double_int_one;
+      comb->elts[comb->n].coef = 1;
       comb->elts[comb->n].val = comb->rest;
       comb->rest = NULL_TREE;
       comb->n++;
@@ -501,7 +555,7 @@
 
 
 static void
-aff_combination_add_product (aff_tree *c, double_int coef, tree val,
+aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
 			     aff_tree *r)
 {
   unsigned i;
@@ -517,8 +571,7 @@
 			      fold_convert (type, val));
 	}
 
-      aff_combination_add_elt (r, aval,
-			       double_int_mul (coef, c->elts[i].coef));
+      aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
     }
 
   if (c->rest)
@@ -535,10 +588,9 @@
     }
 
   if (val)
-    aff_combination_add_elt (r, val,
-			     double_int_mul (coef, c->offset));
+    aff_combination_add_elt (r, val, coef * c->offset);
   else
-    aff_combination_add_cst (r, double_int_mul (coef, c->offset));
+    aff_combination_add_cst (r, coef * c->offset);
 }
 
 /* Multiplies C1 by C2, storing the result to R  */
@@ -554,7 +606,7 @@
   for (i = 0; i < c2->n; i++)
     aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
   if (c2->rest)
-    aff_combination_add_product (c1, double_int_one, c2->rest, r);
+    aff_combination_add_product (c1, 1, c2->rest, r);
   aff_combination_add_product (c1, c2->offset, NULL, r);
 }
 
@@ -595,14 +647,13 @@
 
 void
 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
-			struct pointer_map_t **cache ATTRIBUTE_UNUSED)
+			hash_map<tree, name_expansion *> **cache)
 {
   unsigned i;
   aff_tree to_add, current, curre;
   tree e, rhs;
-  gimple def;
-  double_int scale;
-  void **slot;
+  gimple *def;
+  widest_int scale;
   struct name_expansion *exp;
 
   aff_combination_zero (&to_add, comb->type);
@@ -615,7 +666,7 @@
       type = TREE_TYPE (e);
       name = e;
       /* Look through some conversions.  */
-      if (TREE_CODE (e) == NOP_EXPR
+      if (CONVERT_EXPR_P (e)
           && (TYPE_PRECISION (type)
 	      >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
 	name = TREE_OPERAND (e, 0);
@@ -638,37 +689,19 @@
 	continue;
 
       if (!*cache)
-	*cache = pointer_map_create ();
-      slot = pointer_map_insert (*cache, e);
-      exp = (struct name_expansion *) *slot;
+	*cache = new hash_map<tree, name_expansion *>;
+      name_expansion **slot = &(*cache)->get_or_insert (e);
+      exp = *slot;
 
       if (!exp)
 	{
 	  exp = XNEW (struct name_expansion);
 	  exp->in_progress = 1;
 	  *slot = exp;
-	  /* In principle this is a generally valid folding, but
-	     it is not unconditionally an optimization, so do it
-	     here and not in fold_unary.  */
-	  /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
-	     than the type of X and overflow for the type of X is
-	     undefined.  */
-	  if (e != name
-	      && INTEGRAL_TYPE_P (type)
-	      && INTEGRAL_TYPE_P (TREE_TYPE (name))
-	      && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
-	      && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
-	      && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
-	      && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
-	    rhs = fold_build2 (code, type,
-			       fold_convert (type, gimple_assign_rhs1 (def)),
-			       fold_convert (type, gimple_assign_rhs2 (def)));
-	  else
-	    {
-	      rhs = gimple_assign_rhs_to_tree (def);
-	      if (e != name)
-		rhs = fold_convert (type, rhs);
-	    }
+	  rhs = gimple_assign_rhs_to_tree (def);
+	  if (e != name)
+	    rhs = fold_convert (type, rhs);
+
 	  tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
 	  exp->expansion = current;
 	  exp->in_progress = 0;
@@ -686,7 +719,7 @@
          it from COMB.  */
       scale = comb->elts[i].coef;
       aff_combination_zero (&curre, comb->type);
-      aff_combination_add_elt (&curre, e, double_int_neg (scale));
+      aff_combination_add_elt (&curre, e, -scale);
       aff_combination_scale (&current, scale);
       aff_combination_add (&to_add, &current);
       aff_combination_add (&to_add, &curre);
@@ -706,22 +739,19 @@
 
 void
 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
-				struct pointer_map_t **cache)
+				hash_map<tree, name_expansion *> **cache)
 {
   tree_to_aff_combination (expr, type, comb);
   aff_combination_expand (comb, cache);
 }
 
 /* Frees memory occupied by struct name_expansion in *VALUE.  Callback for
-   pointer_map_traverse.  */
+   hash_map::traverse.  */
 
-static bool
-free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value,
-		     void *data ATTRIBUTE_UNUSED)
+bool
+free_name_expansion (tree const &, name_expansion **value, void *)
 {
-  struct name_expansion *const exp = (struct name_expansion *) *value;
-
-  free (exp);
+  free (*value);
   return true;
 }
 
@@ -729,40 +759,44 @@
    tree_to_aff_combination_expand.  */
 
 void
-free_affine_expand_cache (struct pointer_map_t **cache)
+free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
 {
   if (!*cache)
     return;
 
-  pointer_map_traverse (*cache, free_name_expansion, NULL);
-  pointer_map_destroy (*cache);
+  (*cache)->traverse<void *, free_name_expansion> (NULL);
+  delete (*cache);
   *cache = NULL;
 }
 
 /* If VAL != CST * DIV for any constant CST, returns false.
-   Otherwise, if VAL != 0 (and hence CST != 0), and *MULT_SET is true,
-   additionally compares CST and MULT, and if they are different,
-   returns false.  Finally, if neither of these two cases occur,
-   true is returned, and if CST != 0, CST is stored to MULT and
-   MULT_SET is set to true.  */
+   Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
+   and if they are different, returns false.  Finally, if neither of these
+   two cases occur, true is returned, and CST is stored to MULT and MULT_SET
+   is set to true.  */
 
 static bool
-double_int_constant_multiple_p (double_int val, double_int div,
-				bool *mult_set, double_int *mult)
+wide_int_constant_multiple_p (const widest_int &val, const widest_int &div,
+			      bool *mult_set, widest_int *mult)
 {
-  double_int rem, cst;
+  widest_int rem, cst;
 
-  if (double_int_zero_p (val))
-    return true;
+  if (val == 0)
+    {
+      if (*mult_set && *mult != 0)
+	return false;
+      *mult_set = true;
+      *mult = 0;
+      return true;
+    }
 
-  if (double_int_zero_p (div))
+  if (div == 0)
     return false;
 
-  cst = double_int_sdivmod (val, div, FLOOR_DIV_EXPR, &rem);
-  if (!double_int_zero_p (rem))
+  if (!wi::multiple_of_p (val, div, SIGNED, &cst))
     return false;
 
-  if (*mult_set && !double_int_equal_p (*mult, cst))
+  if (*mult_set && *mult != cst)
     return false;
 
   *mult_set = true;
@@ -775,14 +809,14 @@
 
 bool
 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
-				     double_int *mult)
+				     widest_int *mult)
 {
   bool mult_set = false;
   unsigned i;
 
-  if (val->n == 0 && double_int_zero_p (val->offset))
+  if (val->n == 0 && val->offset == 0)
     {
-      *mult = double_int_zero;
+      *mult = 0;
       return true;
     }
   if (val->n != div->n)
@@ -791,8 +825,8 @@
   if (val->rest || div->rest)
     return false;
 
-  if (!double_int_constant_multiple_p (val->offset, div->offset,
-				       &mult_set, mult))
+  if (!wide_int_constant_multiple_p (val->offset, div->offset,
+				     &mult_set, mult))
     return false;
 
   for (i = 0; i < div->n; i++)
@@ -801,8 +835,8 @@
 	      = aff_combination_find_elt (val, div->elts[i].val, NULL);
       if (!elt)
 	return false;
-      if (!double_int_constant_multiple_p (elt->coef, div->elts[i].coef,
-					   &mult_set, mult))
+      if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
+					 &mult_set, mult))
 	return false;
     }
 
@@ -812,17 +846,17 @@
 
 /* Prints the affine VAL to the FILE. */
 
-void
+static void
 print_aff (FILE *file, aff_tree *val)
 {
   unsigned i;
-  bool uns = TYPE_UNSIGNED (val->type);
+  signop sgn = TYPE_SIGN (val->type);
   if (POINTER_TYPE_P (val->type))
-    uns = false;
+    sgn = SIGNED;
   fprintf (file, "{\n  type = ");
   print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
   fprintf (file, "\n  offset = ");
-  dump_double_int (file, val->offset, uns);
+  print_dec (val->offset, file, sgn);
   if (val->n > 0)
     {
       fprintf (file, "\n  elements = {\n");
@@ -832,7 +866,7 @@
 	  print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
 
 	  fprintf (file, " * ");
-	  dump_double_int (file, val->elts[i].coef, uns);
+	  print_dec (val->elts[i].coef, file, sgn);
 	  if (i != val->n - 1)
 	    fprintf (file, ", \n");
 	}
@@ -855,19 +889,20 @@
   fprintf (stderr, "\n");
 }
 
-/* Returns address of the reference REF in ADDR.  The size of the accessed
-   location is stored to SIZE.  */
+/* Computes address of the reference REF in ADDR.  The size of the accessed
+   location is stored to SIZE.  Returns the ultimate containing object to
+   which REF refers.  */
 
-void
-get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size)
+tree
+get_inner_reference_aff (tree ref, aff_tree *addr, widest_int *size)
 {
   HOST_WIDE_INT bitsize, bitpos;
   tree toff;
-  enum machine_mode mode;
-  int uns, vol;
+  machine_mode mode;
+  int uns, rev, vol;
   aff_tree tmp;
   tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
-				   &uns, &vol, false);
+				   &uns, &rev, &vol);
   tree base_addr = build_fold_addr_expr (base);
 
   /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT.  */
@@ -880,10 +915,35 @@
       aff_combination_add (addr, &tmp);
     }
 
-  aff_combination_const (&tmp, sizetype,
-			 shwi_to_double_int (bitpos / BITS_PER_UNIT));
+  aff_combination_const (&tmp, sizetype, bitpos / BITS_PER_UNIT);
   aff_combination_add (addr, &tmp);
 
-  *size = shwi_to_double_int ((bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
+  *size = (bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
+
+  return base;
 }
 
+/* Returns true if a region of size SIZE1 at position 0 and a region of
+   size SIZE2 at position DIFF cannot overlap.  */
+
+bool
+aff_comb_cannot_overlap_p (aff_tree *diff, const widest_int &size1,
+			   const widest_int &size2)
+{
+  /* Unless the difference is a constant, we fail.  */
+  if (diff->n != 0)
+    return false;
+
+  if (wi::neg_p (diff->offset))
+    {
+      /* The second object is before the first one, we succeed if the last
+	 element of the second object is before the start of the first one.  */
+      return wi::neg_p (diff->offset + size2 - 1);
+    }
+  else
+    {
+      /* We succeed if the second object starts after the first one ends.  */
+      return size1 <= diff->offset;
+    }
+}
+