diff gcc/gimple-fold.c @ 67:f6334be47118

update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
author nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
date Tue, 22 Mar 2011 17:18:12 +0900
parents b7f97abdc517
children 04ced10e8804
line wrap: on
line diff
--- a/gcc/gimple-fold.c	Tue May 25 18:58:51 2010 +0900
+++ b/gcc/gimple-fold.c	Tue Mar 22 17:18:12 2011 +0900
@@ -24,23 +24,121 @@
 #include "tm.h"
 #include "tree.h"
 #include "flags.h"
-#include "rtl.h"
-#include "tm_p.h"
-#include "ggc.h"
-#include "basic-block.h"
-#include "output.h"
-#include "expr.h"
 #include "function.h"
-#include "diagnostic.h"
-#include "timevar.h"
 #include "tree-dump.h"
 #include "tree-flow.h"
 #include "tree-pass.h"
 #include "tree-ssa-propagate.h"
-#include "value-prof.h"
-#include "langhooks.h"
 #include "target.h"
 
+/* Return true when DECL can be referenced from current unit.
+   We can get declarations that are not possible to reference for
+   various reasons:
+
+     1) When analyzing C++ virtual tables.
+	C++ virtual tables do have known constructors even
+	when they are keyed to other compilation unit.
+	Those tables can contain pointers to methods and vars
+	in other units.  Those methods have both STATIC and EXTERNAL
+	set.
+     2) In WHOPR mode devirtualization might lead to reference
+	to method that was partitioned elsehwere.
+	In this case we have static VAR_DECL or FUNCTION_DECL
+	that has no corresponding callgraph/varpool node
+	declaring the body.  
+     3) COMDAT functions referred by external vtables that
+        we devirtualize only during final copmilation stage.
+        At this time we already decided that we will not output
+        the function body and thus we can't reference the symbol
+        directly.  */
+
+static bool
+can_refer_decl_in_current_unit_p (tree decl)
+{
+  struct varpool_node *vnode;
+  struct cgraph_node *node;
+
+  if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
+    return true;
+  /* External flag is set, so we deal with C++ reference
+     to static object from other file.  */
+  if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
+      && TREE_CODE (decl) == VAR_DECL)
+    {
+      /* Just be sure it is not big in frontend setting
+	 flags incorrectly.  Those variables should never
+	 be finalized.  */
+      gcc_checking_assert (!(vnode = varpool_get_node (decl))
+			   || !vnode->finalized);
+      return false;
+    }
+  /* When function is public, we always can introduce new reference.
+     Exception are the COMDAT functions where introducing a direct
+     reference imply need to include function body in the curren tunit.  */
+  if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
+    return true;
+  /* We are not at ltrans stage; so don't worry about WHOPR.
+     Also when still gimplifying all referred comdat functions will be
+     produced.  */
+  if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
+    return true;
+  /* If we already output the function body, we are safe.  */
+  if (TREE_ASM_WRITTEN (decl))
+    return true;
+  if (TREE_CODE (decl) == FUNCTION_DECL)
+    {
+      node = cgraph_get_node (decl);
+      /* Check that we still have function body and that we didn't took
+         the decision to eliminate offline copy of the function yet.
+         The second is important when devirtualization happens during final
+         compilation stage when making a new reference no longer makes callee
+         to be compiled.  */
+      if (!node || !node->analyzed || node->global.inlined_to)
+	return false;
+    }
+  else if (TREE_CODE (decl) == VAR_DECL)
+    {
+      vnode = varpool_get_node (decl);
+      if (!vnode || !vnode->finalized)
+	return false;
+    }
+  return true;
+}
+
+/* CVAL is value taken from DECL_INITIAL of variable.  Try to transorm it into
+   acceptable form for is_gimple_min_invariant.   */
+
+tree
+canonicalize_constructor_val (tree cval)
+{
+  STRIP_NOPS (cval);
+  if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
+    {
+      tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
+					     TREE_OPERAND (cval, 0),
+					     TREE_OPERAND (cval, 1),
+					     TREE_TYPE (cval));
+      if (t)
+	cval = t;
+    }
+  if (TREE_CODE (cval) == ADDR_EXPR)
+    {
+      tree base = get_base_address (TREE_OPERAND (cval, 0));
+
+      if (base
+	  && (TREE_CODE (base) == VAR_DECL
+	      || TREE_CODE (base) == FUNCTION_DECL)
+	  && !can_refer_decl_in_current_unit_p (base))
+	return NULL_TREE;
+      if (base && TREE_CODE (base) == VAR_DECL)
+	add_referenced_var (base);
+      /* We never have the chance to fixup types in global initializers
+         during gimplification.  Do so here.  */
+      if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
+	cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
+    }
+  return cval;
+}
 
 /* If SYM is a constant variable with known value, return the value.
    NULL_TREE is returned otherwise.  */
@@ -48,38 +146,24 @@
 tree
 get_symbol_constant_value (tree sym)
 {
-  if (TREE_STATIC (sym)
-      && (TREE_READONLY (sym)
-	  || TREE_CODE (sym) == CONST_DECL))
+  if (const_value_known_p (sym))
     {
       tree val = DECL_INITIAL (sym);
       if (val)
 	{
-	  STRIP_NOPS (val);
-	  if (is_gimple_min_invariant (val))
-	    {
-	      if (TREE_CODE (val) == ADDR_EXPR)
-		{
-		  tree base = get_base_address (TREE_OPERAND (val, 0));
-		  if (base && TREE_CODE (base) == VAR_DECL)
-		    {
-		      TREE_ADDRESSABLE (base) = 1;
-		      if (gimple_referenced_vars (cfun))
-			add_referenced_var (base);
-		    }
-		}
-	      return val;
-	    }
+	  val = canonicalize_constructor_val (val);
+	  if (val && is_gimple_min_invariant (val))
+	    return val;
+	  else
+	    return NULL_TREE;
 	}
       /* Variables declared 'const' without an initializer
 	 have zero as the initializer if they may not be
 	 overridden at link or run time.  */
       if (!val
-	  && !DECL_EXTERNAL (sym)
-	  && targetm.binds_local_p (sym)
           && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
 	       || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
-	return fold_convert (TREE_TYPE (sym), integer_zero_node);
+	return build_zero_cst (TREE_TYPE (sym));
     }
 
   return NULL_TREE;
@@ -92,7 +176,7 @@
 bool
 may_propagate_address_into_dereference (tree addr, tree deref)
 {
-  gcc_assert (INDIRECT_REF_P (deref)
+  gcc_assert (TREE_CODE (deref) == MEM_REF
 	      && TREE_CODE (addr) == ADDR_EXPR);
 
   /* Don't propagate if ADDR's operand has incomplete type.  */
@@ -118,15 +202,12 @@
 
 
 /* A subroutine of fold_stmt.  Attempts to fold *(A+O) to A[X].
-   BASE is an array type.  OFFSET is a byte displacement.  ORIG_TYPE
-   is the desired result type.
+   BASE is an array type.  OFFSET is a byte displacement.
 
    LOC is the location of the original expression.  */
 
 static tree
-maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset,
-				tree orig_type,
-				bool allow_negative_idx)
+maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
 {
   tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
   tree array_type, elt_type, elt_size;
@@ -155,8 +236,6 @@
   if (TREE_CODE (array_type) != ARRAY_TYPE)
     return NULL_TREE;
   elt_type = TREE_TYPE (array_type);
-  if (!useless_type_conversion_p (orig_type, elt_type))
-    return NULL_TREE;
 
   /* Use signed size type for intermediate computation on the index.  */
   idx_type = ssizetype;
@@ -229,34 +308,22 @@
        char *(c[4]);
        c[3][2];
      should not be simplified into (*c)[14] or tree-vrp will
-     give false warnings.  The same is true for
-       struct A { long x; char d[0]; } *a;
-       (char *)a - 4;
-     which should be not folded to &a->d[-8].  */
-  if (domain_type
-      && TYPE_MAX_VALUE (domain_type)
-      && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
+     give false warnings.
+     This is only an issue for multi-dimensional arrays.  */
+  if (TREE_CODE (elt_type) == ARRAY_TYPE
+      && domain_type)
     {
-      tree up_bound = TYPE_MAX_VALUE (domain_type);
-
-      if (tree_int_cst_lt (up_bound, idx)
-	  /* Accesses after the end of arrays of size 0 (gcc
-	     extension) and 1 are likely intentional ("struct
-	     hack").  */
-	  && compare_tree_int (up_bound, 1) > 0)
+      if (TYPE_MAX_VALUE (domain_type)
+	  && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
+	  && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
+	return NULL_TREE;
+      else if (TYPE_MIN_VALUE (domain_type)
+	       && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
+	       && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
+	return NULL_TREE;
+      else if (compare_tree_int (idx, 0) < 0)
 	return NULL_TREE;
     }
-  if (domain_type
-      && TYPE_MIN_VALUE (domain_type))
-    {
-      if (!allow_negative_idx
-	  && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
-	  && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
-	return NULL_TREE;
-    }
-  else if (!allow_negative_idx
-	   && compare_tree_int (idx, 0) < 0)
-    return NULL_TREE;
 
   {
     tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
@@ -266,340 +333,55 @@
 }
 
 
-/* Attempt to fold *(S+O) to S.X.
-   BASE is a record type.  OFFSET is a byte displacement.  ORIG_TYPE
-   is the desired result type.
-
-   LOC is the location of the original expression.  */
-
-static tree
-maybe_fold_offset_to_component_ref (location_t loc, tree record_type,
-				    tree base, tree offset, tree orig_type)
-{
-  tree f, t, field_type, tail_array_field, field_offset;
-  tree ret;
-  tree new_base;
-
-  if (TREE_CODE (record_type) != RECORD_TYPE
-      && TREE_CODE (record_type) != UNION_TYPE
-      && TREE_CODE (record_type) != QUAL_UNION_TYPE)
-    return NULL_TREE;
-
-  /* Short-circuit silly cases.  */
-  if (useless_type_conversion_p (record_type, orig_type))
-    return NULL_TREE;
-
-  tail_array_field = NULL_TREE;
-  for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
-    {
-      int cmp;
-
-      if (TREE_CODE (f) != FIELD_DECL)
-	continue;
-      if (DECL_BIT_FIELD (f))
-	continue;
-
-      if (!DECL_FIELD_OFFSET (f))
-	continue;
-      field_offset = byte_position (f);
-      if (TREE_CODE (field_offset) != INTEGER_CST)
-	continue;
-
-      /* ??? Java creates "interesting" fields for representing base classes.
-	 They have no name, and have no context.  With no context, we get into
-	 trouble with nonoverlapping_component_refs_p.  Skip them.  */
-      if (!DECL_FIELD_CONTEXT (f))
-	continue;
-
-      /* The previous array field isn't at the end.  */
-      tail_array_field = NULL_TREE;
-
-      /* Check to see if this offset overlaps with the field.  */
-      cmp = tree_int_cst_compare (field_offset, offset);
-      if (cmp > 0)
-	continue;
-
-      field_type = TREE_TYPE (f);
-
-      /* Here we exactly match the offset being checked.  If the types match,
-	 then we can return that field.  */
-      if (cmp == 0
-	  && useless_type_conversion_p (orig_type, field_type))
-	{
-	  t = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
-	  return t;
-	}
-
-      /* Don't care about offsets into the middle of scalars.  */
-      if (!AGGREGATE_TYPE_P (field_type))
-	continue;
-
-      /* Check for array at the end of the struct.  This is often
-	 used as for flexible array members.  We should be able to
-	 turn this into an array access anyway.  */
-      if (TREE_CODE (field_type) == ARRAY_TYPE)
-	tail_array_field = f;
-
-      /* Check the end of the field against the offset.  */
-      if (!DECL_SIZE_UNIT (f)
-	  || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
-	continue;
-      t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
-      if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
-	continue;
-
-      /* If we matched, then set offset to the displacement into
-	 this field.  */
-      new_base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
-      SET_EXPR_LOCATION (new_base, loc);
-
-      /* Recurse to possibly find the match.  */
-      ret = maybe_fold_offset_to_array_ref (loc, new_base, t, orig_type,
-					    f == TYPE_FIELDS (record_type));
-      if (ret)
-	return ret;
-      ret = maybe_fold_offset_to_component_ref (loc, field_type, new_base, t,
-						orig_type);
-      if (ret)
-	return ret;
-    }
-
-  if (!tail_array_field)
-    return NULL_TREE;
-
-  f = tail_array_field;
-  field_type = TREE_TYPE (f);
-  offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
-
-  /* If we get here, we've got an aggregate field, and a possibly
-     nonzero offset into them.  Recurse and hope for a valid match.  */
-  base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
-  SET_EXPR_LOCATION (base, loc);
-
-  t = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type,
-				      f == TYPE_FIELDS (record_type));
-  if (t)
-    return t;
-  return maybe_fold_offset_to_component_ref (loc, field_type, base, offset,
-					     orig_type);
-}
-
-/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
-   or BASE[index] or by combination of those.
-
+/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
    LOC is the location of original expression.
 
-   Before attempting the conversion strip off existing ADDR_EXPRs and
-   handled component refs.  */
+   Before attempting the conversion strip off existing ADDR_EXPRs.  */
 
 tree
 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
 				tree orig_type)
 {
   tree ret;
-  tree type;
 
   STRIP_NOPS (base);
   if (TREE_CODE (base) != ADDR_EXPR)
     return NULL_TREE;
 
   base = TREE_OPERAND (base, 0);
-
-  /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
-     so it needs to be removed and new COMPONENT_REF constructed.
-     The wrong COMPONENT_REF are often constructed by folding the
-     (type *)&object within the expression (type *)&object+offset  */
-  if (handled_component_p (base))
-    {
-      HOST_WIDE_INT sub_offset, size, maxsize;
-      tree newbase;
-      newbase = get_ref_base_and_extent (base, &sub_offset,
-					 &size, &maxsize);
-      gcc_assert (newbase);
-      if (size == maxsize
-	  && size != -1
-	  && !(sub_offset & (BITS_PER_UNIT - 1)))
-	{
-	  base = newbase;
-	  if (sub_offset)
-	    offset = int_const_binop (PLUS_EXPR, offset,
-				      build_int_cst (TREE_TYPE (offset),
-						     sub_offset / BITS_PER_UNIT), 1);
-	}
-    }
-  if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
+  if (types_compatible_p (orig_type, TREE_TYPE (base))
       && integer_zerop (offset))
     return base;
-  type = TREE_TYPE (base);
 
-  ret = maybe_fold_offset_to_component_ref (loc, type, base, offset, orig_type);
-  if (!ret)
-    ret = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type, true);
-
-  return ret;
+  ret = maybe_fold_offset_to_array_ref (loc, base, offset);
+  if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
+    return ret;
+  return NULL_TREE;
 }
 
-/* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
-   or &BASE[index] or by combination of those.
-
-   LOC is the location of the original expression.
-
-   Before attempting the conversion strip off existing component refs.  */
+/* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
+   LOC is the location of the original expression.  */
 
 tree
 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
 			      tree orig_type)
 {
-  tree t;
-
-  gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
-	      && POINTER_TYPE_P (orig_type));
-
-  t = maybe_fold_offset_to_reference (loc, addr, offset,
-				      TREE_TYPE (orig_type));
-  if (t != NULL_TREE)
-    {
-      tree orig = addr;
-      tree ptr_type;
-
-      /* For __builtin_object_size to function correctly we need to
-         make sure not to fold address arithmetic so that we change
-	 reference from one array to another.  This would happen for
-	 example for
-
-	   struct X { char s1[10]; char s2[10] } s;
-	   char *foo (void) { return &s.s2[-4]; }
+  tree base, ret;
 
-	 where we need to avoid generating &s.s1[6].  As the C and
-	 C++ frontends create different initial trees
-	 (char *) &s.s1 + -4  vs.  &s.s1[-4]  we have to do some
-	 sophisticated comparisons here.  Note that checking for the
-	 condition after the fact is easier than trying to avoid doing
-	 the folding.  */
-      STRIP_NOPS (orig);
-      if (TREE_CODE (orig) == ADDR_EXPR)
-	orig = TREE_OPERAND (orig, 0);
-      if ((TREE_CODE (orig) == ARRAY_REF
-	   || (TREE_CODE (orig) == COMPONENT_REF
-	       && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
-	  && (TREE_CODE (t) == ARRAY_REF
-	      || TREE_CODE (t) == COMPONENT_REF)
-	  && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
-			       ? TREE_OPERAND (orig, 0) : orig,
-			       TREE_CODE (t) == ARRAY_REF
-			       ? TREE_OPERAND (t, 0) : t, 0))
+  STRIP_NOPS (addr);
+  if (TREE_CODE (addr) != ADDR_EXPR)
+    return NULL_TREE;
+  base = TREE_OPERAND (addr, 0);
+  ret = maybe_fold_offset_to_array_ref (loc, base, offset);
+  if (ret)
+    {
+      ret = build_fold_addr_expr (ret);
+      if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
 	return NULL_TREE;
-
-      ptr_type = build_pointer_type (TREE_TYPE (t));
-      if (!useless_type_conversion_p (orig_type, ptr_type))
-	return NULL_TREE;
-      return build_fold_addr_expr_with_type_loc (loc, t, ptr_type);
+      SET_EXPR_LOCATION (ret, loc);
     }
 
-  return NULL_TREE;
-}
-
-/* A subroutine of fold_stmt.  Attempt to simplify *(BASE+OFFSET).
-   Return the simplified expression, or NULL if nothing could be done.  */
-
-static tree
-maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
-{
-  tree t;
-  bool volatile_p = TREE_THIS_VOLATILE (expr);
-  location_t loc = EXPR_LOCATION (expr);
-
-  /* We may well have constructed a double-nested PLUS_EXPR via multiple
-     substitutions.  Fold that down to one.  Remove NON_LVALUE_EXPRs that
-     are sometimes added.  */
-  base = fold (base);
-  STRIP_TYPE_NOPS (base);
-  TREE_OPERAND (expr, 0) = base;
-
-  /* One possibility is that the address reduces to a string constant.  */
-  t = fold_read_from_constant_string (expr);
-  if (t)
-    return t;
-
-  /* Add in any offset from a POINTER_PLUS_EXPR.  */
-  if (TREE_CODE (base) == POINTER_PLUS_EXPR)
-    {
-      tree offset2;
-
-      offset2 = TREE_OPERAND (base, 1);
-      if (TREE_CODE (offset2) != INTEGER_CST)
-	return NULL_TREE;
-      base = TREE_OPERAND (base, 0);
-
-      offset = fold_convert (sizetype,
-			     int_const_binop (PLUS_EXPR, offset, offset2, 1));
-    }
-
-  if (TREE_CODE (base) == ADDR_EXPR)
-    {
-      tree base_addr = base;
-
-      /* Strip the ADDR_EXPR.  */
-      base = TREE_OPERAND (base, 0);
-
-      /* Fold away CONST_DECL to its value, if the type is scalar.  */
-      if (TREE_CODE (base) == CONST_DECL
-	  && is_gimple_min_invariant (DECL_INITIAL (base)))
-	return DECL_INITIAL (base);
-
-      /* If there is no offset involved simply return the folded base.  */
-      if (integer_zerop (offset))
-	return base;
-
-      /* Try folding *(&B+O) to B.X.  */
-      t = maybe_fold_offset_to_reference (loc, base_addr, offset,
-					  TREE_TYPE (expr));
-      if (t)
-	{
-	  /* Preserve volatileness of the original expression.
-	     We can end up with a plain decl here which is shared
-	     and we shouldn't mess with its flags.  */
-	  if (!SSA_VAR_P (t))
-	    TREE_THIS_VOLATILE (t) = volatile_p;
-	  return t;
-	}
-    }
-  else
-    {
-      /* We can get here for out-of-range string constant accesses,
-	 such as "_"[3].  Bail out of the entire substitution search
-	 and arrange for the entire statement to be replaced by a
-	 call to __builtin_trap.  In all likelihood this will all be
-	 constant-folded away, but in the meantime we can't leave with
-	 something that get_expr_operands can't understand.  */
-
-      t = base;
-      STRIP_NOPS (t);
-      if (TREE_CODE (t) == ADDR_EXPR
-	  && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
-	{
-	  /* FIXME: Except that this causes problems elsewhere with dead
-	     code not being deleted, and we die in the rtl expanders
-	     because we failed to remove some ssa_name.  In the meantime,
-	     just return zero.  */
-	  /* FIXME2: This condition should be signaled by
-	     fold_read_from_constant_string directly, rather than
-	     re-checking for it here.  */
-	  return integer_zero_node;
-	}
-
-      /* Try folding *(B+O) to B->X.  Still an improvement.  */
-      if (POINTER_TYPE_P (TREE_TYPE (base)))
-	{
-          t = maybe_fold_offset_to_reference (loc, base, offset,
-				              TREE_TYPE (expr));
-	  if (t)
-	    return t;
-	}
-    }
-
-  /* Otherwise we had an offset that we could not simplify.  */
-  return NULL_TREE;
+  return ret;
 }
 
 
@@ -632,25 +414,35 @@
       /* Or op0 should now be A[0] and the non-constant offset defined
 	 via a multiplication by the array element size.  */
       if (TREE_CODE (op0) == ARRAY_REF
+	  /* As we will end up creating a variable index array access
+	     in the outermost array dimension make sure there isn't
+	     a more inner array that the index could overflow to.  */
+	  && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
 	  && integer_zerop (TREE_OPERAND (op0, 1))
-	  && TREE_CODE (op1) == SSA_NAME
-	  && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0)), 1))
+	  && TREE_CODE (op1) == SSA_NAME)
 	{
 	  gimple offset_def = SSA_NAME_DEF_STMT (op1);
-	  if (!is_gimple_assign (offset_def))
+	  tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
+	  if (!host_integerp (elsz, 1)
+	      || !is_gimple_assign (offset_def))
+	    return NULL_TREE;
+
+	  /* Do not build array references of something that we can't
+	     see the true number of array dimensions for.  */
+	  if (!DECL_P (TREE_OPERAND (op0, 0))
+	      && !handled_component_p (TREE_OPERAND (op0, 0)))
 	    return NULL_TREE;
 
 	  if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
 	      && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
-	      && tree_int_cst_equal (gimple_assign_rhs2 (offset_def),
-				     TYPE_SIZE_UNIT (TREE_TYPE (op0))))
+	      && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
 	    return build_fold_addr_expr
 			  (build4 (ARRAY_REF, TREE_TYPE (op0),
 				   TREE_OPERAND (op0, 0),
 				   gimple_assign_rhs1 (offset_def),
 				   TREE_OPERAND (op0, 2),
 				   TREE_OPERAND (op0, 3)));
-	  else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0)))
+	  else if (integer_onep (elsz)
 		   && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
 	    return build_fold_addr_expr
 			  (build4 (ARRAY_REF, TREE_TYPE (op0),
@@ -659,6 +451,38 @@
 				   TREE_OPERAND (op0, 2),
 				   TREE_OPERAND (op0, 3)));
 	}
+      else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
+	       /* Dto.  */
+	       && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
+	       && TREE_CODE (op1) == SSA_NAME)
+	{
+	  gimple offset_def = SSA_NAME_DEF_STMT (op1);
+	  tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
+	  if (!host_integerp (elsz, 1)
+	      || !is_gimple_assign (offset_def))
+	    return NULL_TREE;
+
+	  /* Do not build array references of something that we can't
+	     see the true number of array dimensions for.  */
+	  if (!DECL_P (op0)
+	      && !handled_component_p (op0))
+	    return NULL_TREE;
+
+	  if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
+	      && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
+	      && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
+	    return build_fold_addr_expr
+			  (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
+				   op0, gimple_assign_rhs1 (offset_def),
+				   integer_zero_node, NULL_TREE));
+	  else if (integer_onep (elsz)
+		   && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
+	    return build_fold_addr_expr
+			  (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
+				   op0, op1,
+				   integer_zero_node, NULL_TREE));
+	}
+
       return NULL_TREE;
     }
 
@@ -713,13 +537,12 @@
     ptd_type = TREE_TYPE (TREE_TYPE (op0));
 
   /* At which point we can try some of the same things as for indirects.  */
-  t = maybe_fold_offset_to_array_ref (loc, op0, op1, ptd_type, true);
-  if (!t)
-    t = maybe_fold_offset_to_component_ref (loc, TREE_TYPE (op0), op0, op1,
-					    ptd_type);
+  t = maybe_fold_offset_to_array_ref (loc, op0, op1);
   if (t)
     {
-      t = build1 (ADDR_EXPR, res_type, t);
+      t = build_fold_addr_expr (t);
+      if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
+	return NULL_TREE;
       SET_EXPR_LOCATION (t, loc);
     }
 
@@ -735,14 +558,12 @@
 maybe_fold_reference (tree expr, bool is_lhs)
 {
   tree *t = &expr;
+  tree result;
 
-  if (TREE_CODE (expr) == ARRAY_REF
-      && !is_lhs)
-    {
-      tree tem = fold_read_from_constant_string (expr);
-      if (tem)
-	return tem;
-    }
+  if (!is_lhs
+      && (result = fold_const_aggregate_ref (expr))
+      && is_gimple_min_invariant (result))
+    return result;
 
   /* ???  We might want to open-code the relevant remaining cases
      to avoid using the generic fold.  */
@@ -757,19 +578,51 @@
   while (handled_component_p (*t))
     t = &TREE_OPERAND (*t, 0);
 
-  if (TREE_CODE (*t) == INDIRECT_REF)
+  /* Fold back MEM_REFs to reference trees.  */
+  if (TREE_CODE (*t) == MEM_REF
+      && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
+      && integer_zerop (TREE_OPERAND (*t, 1))
+      && (TREE_THIS_VOLATILE (*t)
+	  == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
+      && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
+      && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
+	  == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
+      /* We have to look out here to not drop a required conversion
+	 from the rhs to the lhs if is_lhs, but we don't have the
+	 rhs here to verify that.  Thus require strict type
+	 compatibility.  */
+      && types_compatible_p (TREE_TYPE (*t),
+			     TREE_TYPE (TREE_OPERAND
+					  (TREE_OPERAND (*t, 0), 0))))
     {
-      tree tem = maybe_fold_stmt_indirect (*t, TREE_OPERAND (*t, 0),
-					   integer_zero_node);
-      /* Avoid folding *"abc" = 5 into 'a' = 5.  */
-      if (is_lhs && tem && CONSTANT_CLASS_P (tem))
-	tem = NULL_TREE;
-      if (!tem
-	  && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR)
-	/* If we had a good reason for propagating the address here,
-	   make sure we end up with valid gimple.  See PR34989.  */
-	tem = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
-
+      tree tem;
+      *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
+      tem = maybe_fold_reference (expr, is_lhs);
+      if (tem)
+	return tem;
+      return expr;
+    }
+  /* Canonicalize MEM_REFs invariant address operand.  */
+  else if (TREE_CODE (*t) == MEM_REF
+	   && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
+    {
+      bool volatile_p = TREE_THIS_VOLATILE (*t);
+      tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
+			      TREE_OPERAND (*t, 0),
+			      TREE_OPERAND (*t, 1));
+      if (tem)
+	{
+	  TREE_THIS_VOLATILE (tem) = volatile_p;
+	  *t = tem;
+	  tem = maybe_fold_reference (expr, is_lhs);
+	  if (tem)
+	    return tem;
+	  return expr;
+	}
+    }
+  else if (TREE_CODE (*t) == TARGET_MEM_REF)
+    {
+      tree tem = maybe_fold_tmr (*t);
       if (tem)
 	{
 	  *t = tem;
@@ -853,18 +706,23 @@
 				    COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
           }
 
-	else if (TREE_CODE (rhs) == TARGET_MEM_REF)
-	  return maybe_fold_tmr (rhs);
-
 	else if (REFERENCE_CLASS_P (rhs))
 	  return maybe_fold_reference (rhs, false);
 
 	else if (TREE_CODE (rhs) == ADDR_EXPR)
 	  {
-	    tree tem = maybe_fold_reference (TREE_OPERAND (rhs, 0), true);
-	    if (tem)
+	    tree ref = TREE_OPERAND (rhs, 0);
+	    tree tem = maybe_fold_reference (ref, true);
+	    if (tem
+		&& TREE_CODE (tem) == MEM_REF
+		&& integer_zerop (TREE_OPERAND (tem, 1)))
+	      result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
+	    else if (tem)
 	      result = fold_convert (TREE_TYPE (rhs),
 				     build_fold_addr_expr_loc (loc, tem));
+	    else if (TREE_CODE (ref) == MEM_REF
+		     && integer_zerop (TREE_OPERAND (ref, 1)))
+	      result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
 	  }
 
 	else if (TREE_CODE (rhs) == CONSTRUCTOR
@@ -984,6 +842,33 @@
         }
       break;
 
+    case GIMPLE_TERNARY_RHS:
+      result = fold_ternary_loc (loc, subcode,
+				 TREE_TYPE (gimple_assign_lhs (stmt)),
+				 gimple_assign_rhs1 (stmt),
+				 gimple_assign_rhs2 (stmt),
+				 gimple_assign_rhs3 (stmt));
+
+      if (result)
+        {
+          STRIP_USELESS_TYPE_CONVERSION (result);
+          if (valid_gimple_rhs_p (result))
+	    return result;
+
+	  /* Fold might have produced non-GIMPLE, so if we trust it blindly
+	     we lose canonicalization opportunities.  Do not go again
+	     through fold here though, or the same non-GIMPLE will be
+	     produced.  */
+          if (commutative_ternary_tree_code (subcode)
+              && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
+                                       gimple_assign_rhs2 (stmt), false))
+            return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
+			   gimple_assign_rhs2 (stmt),
+			   gimple_assign_rhs1 (stmt),
+			   gimple_assign_rhs3 (stmt));
+        }
+      break;
+
     case GIMPLE_INVALID_RHS:
       gcc_unreachable ();
     }
@@ -1024,7 +909,9 @@
    is replaced.  If the call is expected to produces a result, then it
    is replaced by an assignment of the new RHS to the result variable.
    If the result is to be ignored, then the call is replaced by a
-   GIMPLE_NOP.  */
+   GIMPLE_NOP.  A proper VDEF chain is retained by making the first
+   VUSE and the last VDEF of the whole sequence be the same as the replaced
+   statement and using new SSA names for stores in between.  */
 
 void
 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
@@ -1036,17 +923,35 @@
   gimple_seq stmts = gimple_seq_alloc();
   struct gimplify_ctx gctx;
   gimple last = NULL;
+  gimple laststore = NULL;
+  tree reaching_vuse;
 
   stmt = gsi_stmt (*si_p);
 
   gcc_assert (is_gimple_call (stmt));
 
   lhs = gimple_call_lhs (stmt);
+  reaching_vuse = gimple_vuse (stmt);
 
   push_gimplify_context (&gctx);
 
   if (lhs == NULL_TREE)
-    gimplify_and_add (expr, &stmts);
+    {
+      gimplify_and_add (expr, &stmts);
+      /* We can end up with folding a memcpy of an empty class assignment
+	 which gets optimized away by C++ gimplification.  */
+      if (gimple_seq_empty_p (stmts))
+	{
+	  pop_gimplify_context (NULL);
+	  if (gimple_in_ssa_p (cfun))
+	    {
+	      unlink_stmt_vdef (stmt);
+	      release_defs (stmt);
+	    }
+	  gsi_remove (si_p, true);
+	  return;
+	}
+    }
   else
     tmp = get_initialized_tmp_var (expr, &stmts, NULL);
 
@@ -1064,15 +969,53 @@
 	  gsi_next (si_p);
 	}
       new_stmt = gsi_stmt (i);
-      find_new_referenced_vars (new_stmt);
-      mark_symbols_for_renaming (new_stmt);
+      if (gimple_in_ssa_p (cfun))
+	{
+	  find_new_referenced_vars (new_stmt);
+	  mark_symbols_for_renaming (new_stmt);
+	}
+      /* If the new statement has a VUSE, update it with exact SSA name we
+         know will reach this one.  */
+      if (gimple_vuse (new_stmt))
+	{
+	  /* If we've also seen a previous store create a new VDEF for
+	     the latter one, and make that the new reaching VUSE.  */
+	  if (laststore)
+	    {
+	      reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
+	      gimple_set_vdef (laststore, reaching_vuse);
+	      update_stmt (laststore);
+	      laststore = NULL;
+	    }
+	  gimple_set_vuse (new_stmt, reaching_vuse);
+	  gimple_set_modified (new_stmt, true);
+	}
+      if (gimple_assign_single_p (new_stmt)
+	  && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
+	{
+	  laststore = new_stmt;
+	}
       last = new_stmt;
     }
 
   if (lhs == NULL_TREE)
     {
-      unlink_stmt_vdef (stmt);
-      release_defs (stmt);
+      /* If we replace a call without LHS that has a VDEF and our new
+         sequence ends with a store we must make that store have the same
+	 vdef in order not to break the sequencing.  This can happen
+	 for instance when folding memcpy calls into assignments.  */
+      if (gimple_vdef (stmt) && laststore)
+	{
+	  gimple_set_vdef (laststore, gimple_vdef (stmt));
+	  if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
+	    SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
+	  update_stmt (laststore);
+	}
+      else if (gimple_in_ssa_p (cfun))
+	{
+	  unlink_stmt_vdef (stmt);
+	  release_defs (stmt);
+	}
       new_stmt = last;
     }
   else
@@ -1082,10 +1025,32 @@
 	  gsi_insert_before (si_p, last, GSI_NEW_STMT);
 	  gsi_next (si_p);
 	}
+      if (laststore && is_gimple_reg (lhs))
+	{
+	  gimple_set_vdef (laststore, gimple_vdef (stmt));
+	  update_stmt (laststore);
+	  if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
+	    SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
+	  laststore = NULL;
+	}
+      else if (laststore)
+	{
+	  reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
+	  gimple_set_vdef (laststore, reaching_vuse);
+	  update_stmt (laststore);
+	  laststore = NULL;
+	}
       new_stmt = gimple_build_assign (lhs, tmp);
-      gimple_set_vuse (new_stmt, gimple_vuse (stmt));
-      gimple_set_vdef (new_stmt, gimple_vdef (stmt));
-      move_ssa_defining_stmt_for_defs (new_stmt, stmt);
+      if (!is_gimple_reg (tmp))
+	gimple_set_vuse (new_stmt, reaching_vuse);
+      if (!is_gimple_reg (lhs))
+	{
+	  gimple_set_vdef (new_stmt, gimple_vdef (stmt));
+	  if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
+	    SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
+	}
+      else if (reaching_vuse == gimple_vuse (stmt))
+	unlink_stmt_vdef (stmt);
     }
 
   gimple_set_location (new_stmt, gimple_location (stmt));
@@ -1157,9 +1122,8 @@
     }
 
   /* If we were already here, break the infinite cycle.  */
-  if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
+  if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
     return true;
-  bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
 
   var = arg;
   def_stmt = SSA_NAME_DEF_STMT (var);
@@ -1312,9 +1276,9 @@
               fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
 
 	  /* If the result is not a valid gimple value, or not a cast
-	     of a valid gimple value, then we can not use the result.  */
+	     of a valid gimple value, then we cannot use the result.  */
 	  if (is_gimple_val (new_val)
-	      || (is_gimple_cast (new_val)
+	      || (CONVERT_EXPR_P (new_val)
 		  && is_gimple_val (TREE_OPERAND (new_val, 0))))
 	    return new_val;
 	}
@@ -1401,98 +1365,26 @@
   return result;
 }
 
-/* Search for a base binfo of BINFO that corresponds to TYPE and return it if
-   it is found or NULL_TREE if it is not.  */
-
-static tree
-get_base_binfo_for_type (tree binfo, tree type)
-{
-  int i;
-  tree base_binfo;
-  tree res = NULL_TREE;
-
-  for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
-    if (TREE_TYPE (base_binfo) == type)
-      {
-	gcc_assert (!res);
-	res = base_binfo;
-      }
-
-  return res;
-}
-
-/* Return a binfo describing the part of object referenced by expression REF.
-   Return NULL_TREE if it cannot be determined.  REF can consist of a series of
-   COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
-   a simple declaration, indirect reference or an SSA_NAME.  If the function
-   discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
-   encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
-   Otherwise the first non-artificial field declaration or the base declaration
-   will be examined to get the encapsulating type. */
+/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
+   is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
+   KNOWN_BINFO carries the binfo describing the true type of
+   OBJ_TYPE_REF_OBJECT(REF).  If a call to the function must be accompanied
+   with a this adjustment, the constant which should be added to this pointer
+   is stored to *DELTA.  If REFUSE_THUNKS is true, return NULL if the function
+   is a thunk (other than a this adjustment which is dealt with by DELTA). */
 
 tree
-gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
-{
-  while (true)
-    {
-      if (TREE_CODE (ref) == COMPONENT_REF)
-	{
-	  tree par_type;
-	  tree binfo, base_binfo;
-	  tree field = TREE_OPERAND (ref, 1);
-
-	  if (!DECL_ARTIFICIAL (field))
-	    {
-	      tree type = TREE_TYPE (field);
-	      if (TREE_CODE (type) == RECORD_TYPE)
-		return TYPE_BINFO (type);
-	      else
-		return NULL_TREE;
-	    }
-
-	  par_type = TREE_TYPE (TREE_OPERAND (ref, 0));
-	  binfo = TYPE_BINFO (par_type);
-	  if (!binfo
-	      || BINFO_N_BASE_BINFOS (binfo) == 0)
-	    return NULL_TREE;
-
-	  base_binfo = BINFO_BASE_BINFO (binfo, 0);
-	  if (BINFO_TYPE (base_binfo) != TREE_TYPE (field))
-	    {
-	      tree d_binfo;
-
-	      d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0),
-						       known_binfo);
-	      /* Get descendant binfo. */
-	      if (!d_binfo)
-		return NULL_TREE;
-	      return get_base_binfo_for_type (d_binfo, TREE_TYPE (field));
-	    }
-
-	  ref = TREE_OPERAND (ref, 0);
-	}
-      else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE)
-	return TYPE_BINFO (TREE_TYPE (ref));
-      else if (known_binfo
-	       && (TREE_CODE (ref) == SSA_NAME
-		   || TREE_CODE (ref) == INDIRECT_REF))
-	return known_binfo;
-      else
-	return NULL_TREE;
-    }
-}
-
-/* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
-   integer form of OBJ_TYPE_REF_TOKEN of the reference expression.  KNOWN_BINFO
-   carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF).  */
-
-tree
-gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo)
+gimple_get_virt_mehtod_for_binfo (HOST_WIDE_INT token, tree known_binfo,
+				  tree *delta, bool refuse_thunks)
 {
   HOST_WIDE_INT i;
   tree v, fndecl;
+  struct cgraph_node *node;
 
   v = BINFO_VIRTUALS (known_binfo);
+  /* If there is no virtual methods leave the OBJ_TYPE_REF alone.  */
+  if (!v)
+    return NULL_TREE;
   i = 0;
   while (i != token)
     {
@@ -1502,34 +1394,53 @@
     }
 
   fndecl = TREE_VALUE (v);
-  return build_fold_addr_expr (fndecl);
+  node = cgraph_get_node_or_alias (fndecl);
+  if (refuse_thunks
+      && (!node
+    /* Bail out if it is a thunk declaration.  Since simple this_adjusting
+       thunks are represented by a constant in TREE_PURPOSE of items in
+       BINFO_VIRTUALS, this is a more complicate type which we cannot handle as
+       yet.
+
+       FIXME: Remove the following condition once we are able to represent
+       thunk information on call graph edges.  */
+	  || (node->same_body_alias && node->thunk.thunk_p)))
+    return NULL_TREE;
+
+  /* When cgraph node is missing and function is not public, we cannot
+     devirtualize.  This can happen in WHOPR when the actual method
+     ends up in other partition, because we found devirtualization
+     possibility too late.  */
+  if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v)))
+    return NULL_TREE;
+
+  *delta = TREE_PURPOSE (v);
+  gcc_checking_assert (host_integerp (*delta, 0));
+  return fndecl;
 }
 
+/* Generate code adjusting the first parameter of a call statement determined
+   by GSI by DELTA.  */
 
-/* Fold a OBJ_TYPE_REF expression to the address of a function.  If KNOWN_TYPE
-   is not NULL_TREE, it is the true type of the outmost encapsulating object if
-   that comes from a pointer SSA_NAME.  If the true outmost encapsulating type
-   can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
-   regardless of KNOWN_TYPE (which thus can be NULL_TREE).  */
-
-tree
-gimple_fold_obj_type_ref (tree ref, tree known_type)
+void
+gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
 {
-  tree obj = OBJ_TYPE_REF_OBJECT (ref);
-  tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE;
-  tree binfo;
-
-  if (TREE_CODE (obj) == ADDR_EXPR)
-    obj = TREE_OPERAND (obj, 0);
+  gimple call_stmt = gsi_stmt (*gsi);
+  tree parm, tmp;
+  gimple new_stmt;
 
-  binfo = gimple_get_relevant_ref_binfo (obj, known_binfo);
-  if (binfo)
-    {
-      HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
-      return gimple_fold_obj_type_ref_known_binfo (token, binfo);
-    }
-  else
-    return NULL_TREE;
+  delta = fold_convert (sizetype, delta);
+  gcc_assert (gimple_call_num_args (call_stmt) >= 1);
+  parm = gimple_call_arg (call_stmt, 0);
+  gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
+  tmp = create_tmp_var (TREE_TYPE (parm), NULL);
+  add_referenced_var (tmp);
+
+  tmp = make_ssa_name (tmp, NULL);
+  new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
+  SSA_NAME_DEF_STMT (tmp) = new_stmt;
+  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
+  gimple_call_set_arg (call_stmt, 0, tmp);
 }
 
 /* Attempt to fold a call statement referenced by the statement iterator GSI.
@@ -1537,8 +1448,8 @@
    simplifies to a constant value. Return true if any changes were made.
    It is assumed that the operands have been previously folded.  */
 
-static bool
-fold_gimple_call (gimple_stmt_iterator *gsi)
+bool
+gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
 {
   gimple stmt = gsi_stmt (*gsi);
 
@@ -1546,7 +1457,7 @@
 
   /* Check for builtins that CCP can handle using information not
      available in the generic fold routines.  */
-  if (callee && DECL_BUILT_IN (callee))
+  if (!inplace && callee && DECL_BUILT_IN (callee))
     {
       tree result = gimple_fold_builtin (stmt);
 
@@ -1557,28 +1468,6 @@
 	  return true;
 	}
     }
-  else
-    {
-      /* ??? Should perhaps do this in fold proper.  However, doing it
-         there requires that we create a new CALL_EXPR, and that requires
-         copying EH region info to the new node.  Easier to just do it
-         here where we can just smash the call operand.  */
-      /* ??? Is there a good reason not to do this in fold_stmt_inplace?  */
-      callee = gimple_call_fn (stmt);
-      if (TREE_CODE (callee) == OBJ_TYPE_REF
-          && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR)
-        {
-          tree t;
-
-          t = gimple_fold_obj_type_ref (callee, NULL_TREE);
-          if (t)
-            {
-              gimple_call_set_fn (stmt, t);
-              return true;
-            }
-        }
-    }
-
   return false;
 }
 
@@ -1630,9 +1519,7 @@
 		changed = true;
 	      }
 	  }
-      /* The entire statement may be replaced in this case.  */
-      if (!inplace)
-	changed |= fold_gimple_call (gsi);
+      changed |= gimple_fold_call (gsi, inplace);
       break;
 
     case GIMPLE_ASM:
@@ -1661,6 +1548,23 @@
 	}
       break;
 
+    case GIMPLE_DEBUG:
+      if (gimple_debug_bind_p (stmt))
+	{
+	  tree val = gimple_debug_bind_get_value (stmt);
+	  if (val
+	      && REFERENCE_CLASS_P (val))
+	    {
+	      tree tem = maybe_fold_reference (val, false);
+	      if (tem)
+		{
+		  gimple_debug_bind_set_value (stmt, tem);
+		  changed = true;
+		}
+	    }
+	}
+      break;
+
     default:;
     }
 
@@ -1714,3 +1618,1055 @@
   return changed;
 }
 
+/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE 
+   if EXPR is null or we don't know how.
+   If non-null, the result always has boolean type.  */
+
+static tree
+canonicalize_bool (tree expr, bool invert)
+{
+  if (!expr)
+    return NULL_TREE;
+  else if (invert)
+    {
+      if (integer_nonzerop (expr))
+	return boolean_false_node;
+      else if (integer_zerop (expr))
+	return boolean_true_node;
+      else if (TREE_CODE (expr) == SSA_NAME)
+	return fold_build2 (EQ_EXPR, boolean_type_node, expr,
+			    build_int_cst (TREE_TYPE (expr), 0));
+      else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
+	return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
+			    boolean_type_node,
+			    TREE_OPERAND (expr, 0),
+			    TREE_OPERAND (expr, 1));
+      else
+	return NULL_TREE;
+    }
+  else
+    {
+      if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
+	return expr;
+      if (integer_nonzerop (expr))
+	return boolean_true_node;
+      else if (integer_zerop (expr))
+	return boolean_false_node;
+      else if (TREE_CODE (expr) == SSA_NAME)
+	return fold_build2 (NE_EXPR, boolean_type_node, expr,
+			    build_int_cst (TREE_TYPE (expr), 0));
+      else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
+	return fold_build2 (TREE_CODE (expr),
+			    boolean_type_node,
+			    TREE_OPERAND (expr, 0),
+			    TREE_OPERAND (expr, 1));
+      else
+	return NULL_TREE;
+    }
+}
+
+/* Check to see if a boolean expression EXPR is logically equivalent to the
+   comparison (OP1 CODE OP2).  Check for various identities involving
+   SSA_NAMEs.  */
+
+static bool
+same_bool_comparison_p (const_tree expr, enum tree_code code,
+			const_tree op1, const_tree op2)
+{
+  gimple s;
+
+  /* The obvious case.  */
+  if (TREE_CODE (expr) == code
+      && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
+      && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
+    return true;
+
+  /* Check for comparing (name, name != 0) and the case where expr
+     is an SSA_NAME with a definition matching the comparison.  */
+  if (TREE_CODE (expr) == SSA_NAME
+      && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
+    {
+      if (operand_equal_p (expr, op1, 0))
+	return ((code == NE_EXPR && integer_zerop (op2))
+		|| (code == EQ_EXPR && integer_nonzerop (op2)));
+      s = SSA_NAME_DEF_STMT (expr);
+      if (is_gimple_assign (s)
+	  && gimple_assign_rhs_code (s) == code
+	  && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
+	  && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
+	return true;
+    }
+
+  /* If op1 is of the form (name != 0) or (name == 0), and the definition
+     of name is a comparison, recurse.  */
+  if (TREE_CODE (op1) == SSA_NAME
+      && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
+    {
+      s = SSA_NAME_DEF_STMT (op1);
+      if (is_gimple_assign (s)
+	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
+	{
+	  enum tree_code c = gimple_assign_rhs_code (s);
+	  if ((c == NE_EXPR && integer_zerop (op2))
+	      || (c == EQ_EXPR && integer_nonzerop (op2)))
+	    return same_bool_comparison_p (expr, c,
+					   gimple_assign_rhs1 (s),
+					   gimple_assign_rhs2 (s));
+	  if ((c == EQ_EXPR && integer_zerop (op2))
+	      || (c == NE_EXPR && integer_nonzerop (op2)))
+	    return same_bool_comparison_p (expr,
+					   invert_tree_comparison (c, false),
+					   gimple_assign_rhs1 (s),
+					   gimple_assign_rhs2 (s));
+	}
+    }
+  return false;
+}
+
+/* Check to see if two boolean expressions OP1 and OP2 are logically
+   equivalent.  */
+
+static bool
+same_bool_result_p (const_tree op1, const_tree op2)
+{
+  /* Simple cases first.  */
+  if (operand_equal_p (op1, op2, 0))
+    return true;
+
+  /* Check the cases where at least one of the operands is a comparison.
+     These are a bit smarter than operand_equal_p in that they apply some
+     identifies on SSA_NAMEs.  */
+  if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
+      && same_bool_comparison_p (op1, TREE_CODE (op2),
+				 TREE_OPERAND (op2, 0),
+				 TREE_OPERAND (op2, 1)))
+    return true;
+  if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
+      && same_bool_comparison_p (op2, TREE_CODE (op1),
+				 TREE_OPERAND (op1, 0),
+				 TREE_OPERAND (op1, 1)))
+    return true;
+
+  /* Default case.  */
+  return false;
+}
+
+/* Forward declarations for some mutually recursive functions.  */
+
+static tree
+and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
+		   enum tree_code code2, tree op2a, tree op2b);
+static tree
+and_var_with_comparison (tree var, bool invert,
+			 enum tree_code code2, tree op2a, tree op2b);
+static tree
+and_var_with_comparison_1 (gimple stmt, 
+			   enum tree_code code2, tree op2a, tree op2b);
+static tree
+or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
+		  enum tree_code code2, tree op2a, tree op2b);
+static tree
+or_var_with_comparison (tree var, bool invert,
+			enum tree_code code2, tree op2a, tree op2b);
+static tree
+or_var_with_comparison_1 (gimple stmt, 
+			  enum tree_code code2, tree op2a, tree op2b);
+
+/* Helper function for and_comparisons_1:  try to simplify the AND of the
+   ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
+   If INVERT is true, invert the value of the VAR before doing the AND.
+   Return NULL_EXPR if we can't simplify this to a single expression.  */
+
+static tree
+and_var_with_comparison (tree var, bool invert,
+			 enum tree_code code2, tree op2a, tree op2b)
+{
+  tree t;
+  gimple stmt = SSA_NAME_DEF_STMT (var);
+
+  /* We can only deal with variables whose definitions are assignments.  */
+  if (!is_gimple_assign (stmt))
+    return NULL_TREE;
+  
+  /* If we have an inverted comparison, apply DeMorgan's law and rewrite
+     !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
+     Then we only have to consider the simpler non-inverted cases.  */
+  if (invert)
+    t = or_var_with_comparison_1 (stmt, 
+				  invert_tree_comparison (code2, false),
+				  op2a, op2b);
+  else
+    t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
+  return canonicalize_bool (t, invert);
+}
+
+/* Try to simplify the AND of the ssa variable defined by the assignment
+   STMT with the comparison specified by (OP2A CODE2 OP2B).
+   Return NULL_EXPR if we can't simplify this to a single expression.  */
+
+static tree
+and_var_with_comparison_1 (gimple stmt,
+			   enum tree_code code2, tree op2a, tree op2b)
+{
+  tree var = gimple_assign_lhs (stmt);
+  tree true_test_var = NULL_TREE;
+  tree false_test_var = NULL_TREE;
+  enum tree_code innercode = gimple_assign_rhs_code (stmt);
+
+  /* Check for identities like (var AND (var == 0)) => false.  */
+  if (TREE_CODE (op2a) == SSA_NAME
+      && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
+    {
+      if ((code2 == NE_EXPR && integer_zerop (op2b))
+	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
+	{
+	  true_test_var = op2a;
+	  if (var == true_test_var)
+	    return var;
+	}
+      else if ((code2 == EQ_EXPR && integer_zerop (op2b))
+	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
+	{
+	  false_test_var = op2a;
+	  if (var == false_test_var)
+	    return boolean_false_node;
+	}
+    }
+
+  /* If the definition is a comparison, recurse on it.  */
+  if (TREE_CODE_CLASS (innercode) == tcc_comparison)
+    {
+      tree t = and_comparisons_1 (innercode,
+				  gimple_assign_rhs1 (stmt),
+				  gimple_assign_rhs2 (stmt),
+				  code2,
+				  op2a,
+				  op2b);
+      if (t)
+	return t;
+    }
+
+  /* If the definition is an AND or OR expression, we may be able to
+     simplify by reassociating.  */
+  if (innercode == TRUTH_AND_EXPR
+      || innercode == TRUTH_OR_EXPR
+      || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
+	  && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
+    {
+      tree inner1 = gimple_assign_rhs1 (stmt);
+      tree inner2 = gimple_assign_rhs2 (stmt);
+      gimple s;
+      tree t;
+      tree partial = NULL_TREE;
+      bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
+      
+      /* Check for boolean identities that don't require recursive examination
+	 of inner1/inner2:
+	 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
+	 inner1 AND (inner1 OR inner2) => inner1
+	 !inner1 AND (inner1 AND inner2) => false
+	 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
+         Likewise for similar cases involving inner2.  */
+      if (inner1 == true_test_var)
+	return (is_and ? var : inner1);
+      else if (inner2 == true_test_var)
+	return (is_and ? var : inner2);
+      else if (inner1 == false_test_var)
+	return (is_and
+		? boolean_false_node
+		: and_var_with_comparison (inner2, false, code2, op2a, op2b));
+      else if (inner2 == false_test_var)
+	return (is_and
+		? boolean_false_node
+		: and_var_with_comparison (inner1, false, code2, op2a, op2b));
+
+      /* Next, redistribute/reassociate the AND across the inner tests.
+	 Compute the first partial result, (inner1 AND (op2a code op2b))  */
+      if (TREE_CODE (inner1) == SSA_NAME
+	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
+	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
+	  && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
+					      gimple_assign_rhs1 (s),
+					      gimple_assign_rhs2 (s),
+					      code2, op2a, op2b)))
+	{
+	  /* Handle the AND case, where we are reassociating:
+	     (inner1 AND inner2) AND (op2a code2 op2b)
+	     => (t AND inner2)
+	     If the partial result t is a constant, we win.  Otherwise
+	     continue on to try reassociating with the other inner test.  */
+	  if (is_and)
+	    {
+	      if (integer_onep (t))
+		return inner2;
+	      else if (integer_zerop (t))
+		return boolean_false_node;
+	    }
+
+	  /* Handle the OR case, where we are redistributing:
+	     (inner1 OR inner2) AND (op2a code2 op2b)
+	     => (t OR (inner2 AND (op2a code2 op2b)))  */
+	  else if (integer_onep (t))
+	    return boolean_true_node;
+
+	  /* Save partial result for later.  */
+	  partial = t;
+	}
+      
+      /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
+      if (TREE_CODE (inner2) == SSA_NAME
+	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
+	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
+	  && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
+					      gimple_assign_rhs1 (s),
+					      gimple_assign_rhs2 (s),
+					      code2, op2a, op2b)))
+	{
+	  /* Handle the AND case, where we are reassociating:
+	     (inner1 AND inner2) AND (op2a code2 op2b)
+	     => (inner1 AND t)  */
+	  if (is_and)
+	    {
+	      if (integer_onep (t))
+		return inner1;
+	      else if (integer_zerop (t))
+		return boolean_false_node;
+	      /* If both are the same, we can apply the identity
+		 (x AND x) == x.  */
+	      else if (partial && same_bool_result_p (t, partial))
+		return t;
+	    }
+
+	  /* Handle the OR case. where we are redistributing:
+	     (inner1 OR inner2) AND (op2a code2 op2b)
+	     => (t OR (inner1 AND (op2a code2 op2b)))
+	     => (t OR partial)  */
+	  else
+	    {
+	      if (integer_onep (t))
+		return boolean_true_node;
+	      else if (partial)
+		{
+		  /* We already got a simplification for the other
+		     operand to the redistributed OR expression.  The
+		     interesting case is when at least one is false.
+		     Or, if both are the same, we can apply the identity
+		     (x OR x) == x.  */
+		  if (integer_zerop (partial))
+		    return t;
+		  else if (integer_zerop (t))
+		    return partial;
+		  else if (same_bool_result_p (t, partial))
+		    return t;
+		}
+	    }
+	}
+    }
+  return NULL_TREE;
+}
+
+/* Try to simplify the AND of two comparisons defined by
+   (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
+   If this can be done without constructing an intermediate value,
+   return the resulting tree; otherwise NULL_TREE is returned.
+   This function is deliberately asymmetric as it recurses on SSA_DEFs
+   in the first comparison but not the second.  */
+
+static tree
+and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
+		   enum tree_code code2, tree op2a, tree op2b)
+{
+  /* First check for ((x CODE1 y) AND (x CODE2 y)).  */
+  if (operand_equal_p (op1a, op2a, 0)
+      && operand_equal_p (op1b, op2b, 0))
+    {
+      tree t = combine_comparisons (UNKNOWN_LOCATION,
+				    TRUTH_ANDIF_EXPR, code1, code2,
+				    boolean_type_node, op1a, op1b);
+      if (t)
+	return t;
+    }
+
+  /* Likewise the swapped case of the above.  */
+  if (operand_equal_p (op1a, op2b, 0)
+      && operand_equal_p (op1b, op2a, 0))
+    {
+      tree t = combine_comparisons (UNKNOWN_LOCATION,
+				    TRUTH_ANDIF_EXPR, code1,
+				    swap_tree_comparison (code2),
+				    boolean_type_node, op1a, op1b);
+      if (t)
+	return t;
+    }
+
+  /* If both comparisons are of the same value against constants, we might
+     be able to merge them.  */
+  if (operand_equal_p (op1a, op2a, 0)
+      && TREE_CODE (op1b) == INTEGER_CST
+      && TREE_CODE (op2b) == INTEGER_CST)
+    {
+      int cmp = tree_int_cst_compare (op1b, op2b);
+
+      /* If we have (op1a == op1b), we should either be able to
+	 return that or FALSE, depending on whether the constant op1b
+	 also satisfies the other comparison against op2b.  */
+      if (code1 == EQ_EXPR)
+	{
+	  bool done = true;
+	  bool val;
+	  switch (code2)
+	    {
+	    case EQ_EXPR: val = (cmp == 0); break;
+	    case NE_EXPR: val = (cmp != 0); break;
+	    case LT_EXPR: val = (cmp < 0); break;
+	    case GT_EXPR: val = (cmp > 0); break;
+	    case LE_EXPR: val = (cmp <= 0); break;
+	    case GE_EXPR: val = (cmp >= 0); break;
+	    default: done = false;
+	    }
+	  if (done)
+	    {
+	      if (val)
+		return fold_build2 (code1, boolean_type_node, op1a, op1b);
+	      else
+		return boolean_false_node;
+	    }
+	}
+      /* Likewise if the second comparison is an == comparison.  */
+      else if (code2 == EQ_EXPR)
+	{
+	  bool done = true;
+	  bool val;
+	  switch (code1)
+	    {
+	    case EQ_EXPR: val = (cmp == 0); break;
+	    case NE_EXPR: val = (cmp != 0); break;
+	    case LT_EXPR: val = (cmp > 0); break;
+	    case GT_EXPR: val = (cmp < 0); break;
+	    case LE_EXPR: val = (cmp >= 0); break;
+	    case GE_EXPR: val = (cmp <= 0); break;
+	    default: done = false;
+	    }
+	  if (done)
+	    {
+	      if (val)
+		return fold_build2 (code2, boolean_type_node, op2a, op2b);
+	      else
+		return boolean_false_node;
+	    }
+	}
+
+      /* Same business with inequality tests.  */
+      else if (code1 == NE_EXPR)
+	{
+	  bool val;
+	  switch (code2)
+	    {
+	    case EQ_EXPR: val = (cmp != 0); break;
+	    case NE_EXPR: val = (cmp == 0); break;
+	    case LT_EXPR: val = (cmp >= 0); break;
+	    case GT_EXPR: val = (cmp <= 0); break;
+	    case LE_EXPR: val = (cmp > 0); break;
+	    case GE_EXPR: val = (cmp < 0); break;
+	    default:
+	      val = false;
+	    }
+	  if (val)
+	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
+	}
+      else if (code2 == NE_EXPR)
+	{
+	  bool val;
+	  switch (code1)
+	    {
+	    case EQ_EXPR: val = (cmp == 0); break;
+	    case NE_EXPR: val = (cmp != 0); break;
+	    case LT_EXPR: val = (cmp <= 0); break;
+	    case GT_EXPR: val = (cmp >= 0); break;
+	    case LE_EXPR: val = (cmp < 0); break;
+	    case GE_EXPR: val = (cmp > 0); break;
+	    default:
+	      val = false;
+	    }
+	  if (val)
+	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
+	}
+
+      /* Chose the more restrictive of two < or <= comparisons.  */
+      else if ((code1 == LT_EXPR || code1 == LE_EXPR)
+	       && (code2 == LT_EXPR || code2 == LE_EXPR))
+	{
+	  if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
+	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
+	  else
+	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
+	}
+
+      /* Likewise chose the more restrictive of two > or >= comparisons.  */
+      else if ((code1 == GT_EXPR || code1 == GE_EXPR)
+	       && (code2 == GT_EXPR || code2 == GE_EXPR))
+	{
+	  if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
+	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
+	  else
+	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
+	}
+
+      /* Check for singleton ranges.  */
+      else if (cmp == 0
+	       && ((code1 == LE_EXPR && code2 == GE_EXPR)
+		   || (code1 == GE_EXPR && code2 == LE_EXPR)))
+	return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
+
+      /* Check for disjoint ranges. */
+      else if (cmp <= 0
+	       && (code1 == LT_EXPR || code1 == LE_EXPR)
+	       && (code2 == GT_EXPR || code2 == GE_EXPR))
+	return boolean_false_node;
+      else if (cmp >= 0
+	       && (code1 == GT_EXPR || code1 == GE_EXPR)
+	       && (code2 == LT_EXPR || code2 == LE_EXPR))
+	return boolean_false_node;
+    }
+
+  /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
+     NAME's definition is a truth value.  See if there are any simplifications
+     that can be done against the NAME's definition.  */
+  if (TREE_CODE (op1a) == SSA_NAME
+      && (code1 == NE_EXPR || code1 == EQ_EXPR)
+      && (integer_zerop (op1b) || integer_onep (op1b)))
+    {
+      bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
+		     || (code1 == NE_EXPR && integer_onep (op1b)));
+      gimple stmt = SSA_NAME_DEF_STMT (op1a);
+      switch (gimple_code (stmt))
+	{
+	case GIMPLE_ASSIGN:
+	  /* Try to simplify by copy-propagating the definition.  */
+	  return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
+
+	case GIMPLE_PHI:
+	  /* If every argument to the PHI produces the same result when
+	     ANDed with the second comparison, we win.
+	     Do not do this unless the type is bool since we need a bool
+	     result here anyway.  */
+	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
+	    {
+	      tree result = NULL_TREE;
+	      unsigned i;
+	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
+		{
+		  tree arg = gimple_phi_arg_def (stmt, i);
+		  
+		  /* If this PHI has itself as an argument, ignore it.
+		     If all the other args produce the same result,
+		     we're still OK.  */
+		  if (arg == gimple_phi_result (stmt))
+		    continue;
+		  else if (TREE_CODE (arg) == INTEGER_CST)
+		    {
+		      if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
+			{
+			  if (!result)
+			    result = boolean_false_node;
+			  else if (!integer_zerop (result))
+			    return NULL_TREE;
+			}
+		      else if (!result)
+			result = fold_build2 (code2, boolean_type_node,
+					      op2a, op2b);
+		      else if (!same_bool_comparison_p (result,
+							code2, op2a, op2b))
+			return NULL_TREE;
+		    }
+		  else if (TREE_CODE (arg) == SSA_NAME)
+		    {
+		      tree temp = and_var_with_comparison (arg, invert,
+							   code2, op2a, op2b);
+		      if (!temp)
+			return NULL_TREE;
+		      else if (!result)
+			result = temp;
+		      else if (!same_bool_result_p (result, temp))
+			return NULL_TREE;
+		    }
+		  else
+		    return NULL_TREE;
+		}
+	      return result;
+	    }
+
+	default:
+	  break;
+	}
+    }
+  return NULL_TREE;
+}
+
+/* Try to simplify the AND of two comparisons, specified by
+   (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
+   If this can be simplified to a single expression (without requiring
+   introducing more SSA variables to hold intermediate values),
+   return the resulting tree.  Otherwise return NULL_TREE.
+   If the result expression is non-null, it has boolean type.  */
+
+tree
+maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
+			    enum tree_code code2, tree op2a, tree op2b)
+{
+  tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
+  if (t)
+    return t;
+  else
+    return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
+}
+
+/* Helper function for or_comparisons_1:  try to simplify the OR of the
+   ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
+   If INVERT is true, invert the value of VAR before doing the OR.
+   Return NULL_EXPR if we can't simplify this to a single expression.  */
+
+static tree
+or_var_with_comparison (tree var, bool invert,
+			enum tree_code code2, tree op2a, tree op2b)
+{
+  tree t;
+  gimple stmt = SSA_NAME_DEF_STMT (var);
+
+  /* We can only deal with variables whose definitions are assignments.  */
+  if (!is_gimple_assign (stmt))
+    return NULL_TREE;
+  
+  /* If we have an inverted comparison, apply DeMorgan's law and rewrite
+     !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
+     Then we only have to consider the simpler non-inverted cases.  */
+  if (invert)
+    t = and_var_with_comparison_1 (stmt, 
+				   invert_tree_comparison (code2, false),
+				   op2a, op2b);
+  else
+    t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
+  return canonicalize_bool (t, invert);
+}
+
+/* Try to simplify the OR of the ssa variable defined by the assignment
+   STMT with the comparison specified by (OP2A CODE2 OP2B).
+   Return NULL_EXPR if we can't simplify this to a single expression.  */
+
+static tree
+or_var_with_comparison_1 (gimple stmt,
+			  enum tree_code code2, tree op2a, tree op2b)
+{
+  tree var = gimple_assign_lhs (stmt);
+  tree true_test_var = NULL_TREE;
+  tree false_test_var = NULL_TREE;
+  enum tree_code innercode = gimple_assign_rhs_code (stmt);
+
+  /* Check for identities like (var OR (var != 0)) => true .  */
+  if (TREE_CODE (op2a) == SSA_NAME
+      && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
+    {
+      if ((code2 == NE_EXPR && integer_zerop (op2b))
+	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
+	{
+	  true_test_var = op2a;
+	  if (var == true_test_var)
+	    return var;
+	}
+      else if ((code2 == EQ_EXPR && integer_zerop (op2b))
+	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
+	{
+	  false_test_var = op2a;
+	  if (var == false_test_var)
+	    return boolean_true_node;
+	}
+    }
+
+  /* If the definition is a comparison, recurse on it.  */
+  if (TREE_CODE_CLASS (innercode) == tcc_comparison)
+    {
+      tree t = or_comparisons_1 (innercode,
+				 gimple_assign_rhs1 (stmt),
+				 gimple_assign_rhs2 (stmt),
+				 code2,
+				 op2a,
+				 op2b);
+      if (t)
+	return t;
+    }
+  
+  /* If the definition is an AND or OR expression, we may be able to
+     simplify by reassociating.  */
+  if (innercode == TRUTH_AND_EXPR
+      || innercode == TRUTH_OR_EXPR
+      || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
+	  && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
+    {
+      tree inner1 = gimple_assign_rhs1 (stmt);
+      tree inner2 = gimple_assign_rhs2 (stmt);
+      gimple s;
+      tree t;
+      tree partial = NULL_TREE;
+      bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
+      
+      /* Check for boolean identities that don't require recursive examination
+	 of inner1/inner2:
+	 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
+	 inner1 OR (inner1 AND inner2) => inner1
+	 !inner1 OR (inner1 OR inner2) => true
+	 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
+      */
+      if (inner1 == true_test_var)
+	return (is_or ? var : inner1);
+      else if (inner2 == true_test_var)
+	return (is_or ? var : inner2);
+      else if (inner1 == false_test_var)
+	return (is_or
+		? boolean_true_node
+		: or_var_with_comparison (inner2, false, code2, op2a, op2b));
+      else if (inner2 == false_test_var)
+	return (is_or
+		? boolean_true_node
+		: or_var_with_comparison (inner1, false, code2, op2a, op2b));
+      
+      /* Next, redistribute/reassociate the OR across the inner tests.
+	 Compute the first partial result, (inner1 OR (op2a code op2b))  */
+      if (TREE_CODE (inner1) == SSA_NAME
+	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
+	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
+	  && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
+					     gimple_assign_rhs1 (s),
+					     gimple_assign_rhs2 (s),
+					     code2, op2a, op2b)))
+	{
+	  /* Handle the OR case, where we are reassociating:
+	     (inner1 OR inner2) OR (op2a code2 op2b)
+	     => (t OR inner2)
+	     If the partial result t is a constant, we win.  Otherwise
+	     continue on to try reassociating with the other inner test.  */
+	  if (is_or)
+	    {
+	      if (integer_onep (t))
+		return boolean_true_node;
+	      else if (integer_zerop (t))
+		return inner2;
+	    }
+	  
+	  /* Handle the AND case, where we are redistributing:
+	     (inner1 AND inner2) OR (op2a code2 op2b)
+	     => (t AND (inner2 OR (op2a code op2b)))  */
+	  else if (integer_zerop (t))
+	    return boolean_false_node;
+
+	  /* Save partial result for later.  */
+	  partial = t;
+	}
+      
+      /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
+      if (TREE_CODE (inner2) == SSA_NAME
+	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
+	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
+	  && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
+					     gimple_assign_rhs1 (s),
+					     gimple_assign_rhs2 (s),
+					     code2, op2a, op2b)))
+	{
+	  /* Handle the OR case, where we are reassociating:
+	     (inner1 OR inner2) OR (op2a code2 op2b)
+	     => (inner1 OR t)
+	     => (t OR partial)  */
+	  if (is_or)
+	    {
+	      if (integer_zerop (t))
+		return inner1;
+	      else if (integer_onep (t))
+		return boolean_true_node;
+	      /* If both are the same, we can apply the identity
+		 (x OR x) == x.  */
+	      else if (partial && same_bool_result_p (t, partial))
+		return t;
+	    }
+	  
+	  /* Handle the AND case, where we are redistributing:
+	     (inner1 AND inner2) OR (op2a code2 op2b)
+	     => (t AND (inner1 OR (op2a code2 op2b)))
+	     => (t AND partial)  */
+	  else 
+	    {
+	      if (integer_zerop (t))
+		return boolean_false_node;
+	      else if (partial)
+		{
+		  /* We already got a simplification for the other
+		     operand to the redistributed AND expression.  The
+		     interesting case is when at least one is true.
+		     Or, if both are the same, we can apply the identity
+		     (x AND x) == x.  */
+		  if (integer_onep (partial))
+		    return t;
+		  else if (integer_onep (t))
+		    return partial;
+		  else if (same_bool_result_p (t, partial))
+		    return t;
+		}
+	    }
+	}
+    }
+  return NULL_TREE;
+}
+
+/* Try to simplify the OR of two comparisons defined by
+   (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
+   If this can be done without constructing an intermediate value,
+   return the resulting tree; otherwise NULL_TREE is returned.
+   This function is deliberately asymmetric as it recurses on SSA_DEFs
+   in the first comparison but not the second.  */
+
+static tree
+or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
+		  enum tree_code code2, tree op2a, tree op2b)
+{
+  /* First check for ((x CODE1 y) OR (x CODE2 y)).  */
+  if (operand_equal_p (op1a, op2a, 0)
+      && operand_equal_p (op1b, op2b, 0))
+    {
+      tree t = combine_comparisons (UNKNOWN_LOCATION,
+				    TRUTH_ORIF_EXPR, code1, code2,
+				    boolean_type_node, op1a, op1b);
+      if (t)
+	return t;
+    }
+
+  /* Likewise the swapped case of the above.  */
+  if (operand_equal_p (op1a, op2b, 0)
+      && operand_equal_p (op1b, op2a, 0))
+    {
+      tree t = combine_comparisons (UNKNOWN_LOCATION,
+				    TRUTH_ORIF_EXPR, code1,
+				    swap_tree_comparison (code2),
+				    boolean_type_node, op1a, op1b);
+      if (t)
+	return t;
+    }
+
+  /* If both comparisons are of the same value against constants, we might
+     be able to merge them.  */
+  if (operand_equal_p (op1a, op2a, 0)
+      && TREE_CODE (op1b) == INTEGER_CST
+      && TREE_CODE (op2b) == INTEGER_CST)
+    {
+      int cmp = tree_int_cst_compare (op1b, op2b);
+
+      /* If we have (op1a != op1b), we should either be able to
+	 return that or TRUE, depending on whether the constant op1b
+	 also satisfies the other comparison against op2b.  */
+      if (code1 == NE_EXPR)
+	{
+	  bool done = true;
+	  bool val;
+	  switch (code2)
+	    {
+	    case EQ_EXPR: val = (cmp == 0); break;
+	    case NE_EXPR: val = (cmp != 0); break;
+	    case LT_EXPR: val = (cmp < 0); break;
+	    case GT_EXPR: val = (cmp > 0); break;
+	    case LE_EXPR: val = (cmp <= 0); break;
+	    case GE_EXPR: val = (cmp >= 0); break;
+	    default: done = false;
+	    }
+	  if (done)
+	    {
+	      if (val)
+		return boolean_true_node;
+	      else
+		return fold_build2 (code1, boolean_type_node, op1a, op1b);
+	    }
+	}
+      /* Likewise if the second comparison is a != comparison.  */
+      else if (code2 == NE_EXPR)
+	{
+	  bool done = true;
+	  bool val;
+	  switch (code1)
+	    {
+	    case EQ_EXPR: val = (cmp == 0); break;
+	    case NE_EXPR: val = (cmp != 0); break;
+	    case LT_EXPR: val = (cmp > 0); break;
+	    case GT_EXPR: val = (cmp < 0); break;
+	    case LE_EXPR: val = (cmp >= 0); break;
+	    case GE_EXPR: val = (cmp <= 0); break;
+	    default: done = false;
+	    }
+	  if (done)
+	    {
+	      if (val)
+		return boolean_true_node;
+	      else
+		return fold_build2 (code2, boolean_type_node, op2a, op2b);
+	    }
+	}
+
+      /* See if an equality test is redundant with the other comparison.  */
+      else if (code1 == EQ_EXPR)
+	{
+	  bool val;
+	  switch (code2)
+	    {
+	    case EQ_EXPR: val = (cmp == 0); break;
+	    case NE_EXPR: val = (cmp != 0); break;
+	    case LT_EXPR: val = (cmp < 0); break;
+	    case GT_EXPR: val = (cmp > 0); break;
+	    case LE_EXPR: val = (cmp <= 0); break;
+	    case GE_EXPR: val = (cmp >= 0); break;
+	    default:
+	      val = false;
+	    }
+	  if (val)
+	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
+	}
+      else if (code2 == EQ_EXPR)
+	{
+	  bool val;
+	  switch (code1)
+	    {
+	    case EQ_EXPR: val = (cmp == 0); break;
+	    case NE_EXPR: val = (cmp != 0); break;
+	    case LT_EXPR: val = (cmp > 0); break;
+	    case GT_EXPR: val = (cmp < 0); break;
+	    case LE_EXPR: val = (cmp >= 0); break;
+	    case GE_EXPR: val = (cmp <= 0); break;
+	    default:
+	      val = false;
+	    }
+	  if (val)
+	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
+	}
+
+      /* Chose the less restrictive of two < or <= comparisons.  */
+      else if ((code1 == LT_EXPR || code1 == LE_EXPR)
+	       && (code2 == LT_EXPR || code2 == LE_EXPR))
+	{
+	  if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
+	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
+	  else
+	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
+	}
+
+      /* Likewise chose the less restrictive of two > or >= comparisons.  */
+      else if ((code1 == GT_EXPR || code1 == GE_EXPR)
+	       && (code2 == GT_EXPR || code2 == GE_EXPR))
+	{
+	  if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
+	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
+	  else
+	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
+	}
+
+      /* Check for singleton ranges.  */
+      else if (cmp == 0
+	       && ((code1 == LT_EXPR && code2 == GT_EXPR)
+		   || (code1 == GT_EXPR && code2 == LT_EXPR)))
+	return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
+
+      /* Check for less/greater pairs that don't restrict the range at all.  */
+      else if (cmp >= 0
+	       && (code1 == LT_EXPR || code1 == LE_EXPR)
+	       && (code2 == GT_EXPR || code2 == GE_EXPR))
+	return boolean_true_node;
+      else if (cmp <= 0
+	       && (code1 == GT_EXPR || code1 == GE_EXPR)
+	       && (code2 == LT_EXPR || code2 == LE_EXPR))
+	return boolean_true_node;
+    }
+
+  /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
+     NAME's definition is a truth value.  See if there are any simplifications
+     that can be done against the NAME's definition.  */
+  if (TREE_CODE (op1a) == SSA_NAME
+      && (code1 == NE_EXPR || code1 == EQ_EXPR)
+      && (integer_zerop (op1b) || integer_onep (op1b)))
+    {
+      bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
+		     || (code1 == NE_EXPR && integer_onep (op1b)));
+      gimple stmt = SSA_NAME_DEF_STMT (op1a);
+      switch (gimple_code (stmt))
+	{
+	case GIMPLE_ASSIGN:
+	  /* Try to simplify by copy-propagating the definition.  */
+	  return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
+
+	case GIMPLE_PHI:
+	  /* If every argument to the PHI produces the same result when
+	     ORed with the second comparison, we win.
+	     Do not do this unless the type is bool since we need a bool
+	     result here anyway.  */
+	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
+	    {
+	      tree result = NULL_TREE;
+	      unsigned i;
+	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
+		{
+		  tree arg = gimple_phi_arg_def (stmt, i);
+		  
+		  /* If this PHI has itself as an argument, ignore it.
+		     If all the other args produce the same result,
+		     we're still OK.  */
+		  if (arg == gimple_phi_result (stmt))
+		    continue;
+		  else if (TREE_CODE (arg) == INTEGER_CST)
+		    {
+		      if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
+			{
+			  if (!result)
+			    result = boolean_true_node;
+			  else if (!integer_onep (result))
+			    return NULL_TREE;
+			}
+		      else if (!result)
+			result = fold_build2 (code2, boolean_type_node,
+					      op2a, op2b);
+		      else if (!same_bool_comparison_p (result,
+							code2, op2a, op2b))
+			return NULL_TREE;
+		    }
+		  else if (TREE_CODE (arg) == SSA_NAME)
+		    {
+		      tree temp = or_var_with_comparison (arg, invert,
+							  code2, op2a, op2b);
+		      if (!temp)
+			return NULL_TREE;
+		      else if (!result)
+			result = temp;
+		      else if (!same_bool_result_p (result, temp))
+			return NULL_TREE;
+		    }
+		  else
+		    return NULL_TREE;
+		}
+	      return result;
+	    }
+
+	default:
+	  break;
+	}
+    }
+  return NULL_TREE;
+}
+
+/* Try to simplify the OR of two comparisons, specified by
+   (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
+   If this can be simplified to a single expression (without requiring
+   introducing more SSA variables to hold intermediate values),
+   return the resulting tree.  Otherwise return NULL_TREE.
+   If the result expression is non-null, it has boolean type.  */
+
+tree
+maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
+			   enum tree_code code2, tree op2a, tree op2b)
+{
+  tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
+  if (t)
+    return t;
+  else
+    return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
+}