diff gcc/tree-ssa-operands.c @ 111:04ced10e8804

gcc 7
author kono
date Fri, 27 Oct 2017 22:46:09 +0900
parents f6334be47118
children ab0bcb71f44d 84e7813d76e9
line wrap: on
line diff
--- a/gcc/tree-ssa-operands.c	Sun Aug 21 07:07:55 2011 +0900
+++ b/gcc/tree-ssa-operands.c	Fri Oct 27 22:46:09 2017 +0900
@@ -1,6 +1,5 @@
 /* SSA operands management for trees.
-   Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
-   Free Software Foundation, Inc.
+   Copyright (C) 2003-2017 Free Software Foundation, Inc.
 
 This file is part of GCC.
 
@@ -21,19 +20,17 @@
 #include "config.h"
 #include "system.h"
 #include "coretypes.h"
-#include "tm.h"
+#include "backend.h"
 #include "tree.h"
-#include "flags.h"
-#include "function.h"
-#include "tree-pretty-print.h"
+#include "gimple.h"
+#include "timevar.h"
+#include "ssa.h"
 #include "gimple-pretty-print.h"
-#include "tree-flow.h"
-#include "tree-inline.h"
-#include "tree-pass.h"
-#include "ggc.h"
-#include "timevar.h"
-#include "langhooks.h"
-#include "ipa-reference.h"
+#include "diagnostic-core.h"
+#include "stmt.h"
+#include "print-tree.h"
+#include "dumpfile.h"
+
 
 /* This file contains the code required to manage the operands cache of the
    SSA optimizer.  For every stmt, we maintain an operand cache in the stmt
@@ -75,34 +72,6 @@
    operand vector for VUSE, then the new vector will also be modified
    such that it contains 'a_5' rather than 'a'.  */
 
-/* Structure storing statistics on how many call clobbers we have, and
-   how many where avoided.  */
-
-static struct
-{
-  /* Number of call-clobbered ops we attempt to add to calls in
-     add_call_clobbered_mem_symbols.  */
-  unsigned int clobbered_vars;
-
-  /* Number of write-clobbers (VDEFs) avoided by using
-     not_written information.  */
-  unsigned int static_write_clobbers_avoided;
-
-  /* Number of reads (VUSEs) avoided by using not_read information.  */
-  unsigned int static_read_clobbers_avoided;
-
-  /* Number of write-clobbers avoided because the variable can't escape to
-     this call.  */
-  unsigned int unescapable_clobbers_avoided;
-
-  /* Number of read-only uses we attempt to add to calls in
-     add_call_read_mem_symbols.  */
-  unsigned int readonly_clobbers;
-
-  /* Number of read-only uses we avoid using not_read information.  */
-  unsigned int static_readonly_clobbers_avoided;
-} clobber_stats;
-
 
 /* Flags to describe operand properties in helpers.  */
 
@@ -121,22 +90,17 @@
    VUSE for 'b'.  */
 #define opf_no_vops 	(1 << 1)
 
-/* Operand is an implicit reference.  This is used to distinguish
-   explicit assignments in the form of MODIFY_EXPR from
-   clobbering sites like function calls or ASM_EXPRs.  */
-#define opf_implicit	(1 << 2)
-
 /* Operand is in a place where address-taken does not imply addressable.  */
 #define opf_non_addressable (1 << 3)
 
 /* Operand is in a place where opf_non_addressable does not apply.  */
 #define opf_not_non_addressable (1 << 4)
 
-/* Array for building all the def operands.  */
-static VEC(tree,heap) *build_defs;
+/* Operand is having its address taken.  */
+#define opf_address_taken (1 << 5)
 
 /* Array for building all the use operands.  */
-static VEC(tree,heap) *build_uses;
+static vec<tree *> build_uses;
 
 /* The built VDEF operand.  */
 static tree build_vdef;
@@ -148,36 +112,28 @@
    compilations of multiple functions.  */
 static bitmap_obstack operands_bitmap_obstack;
 
-static void get_expr_operands (gimple, tree *, int);
+static void get_expr_operands (struct function *, gimple *, tree *, int);
 
 /* Number of functions with initialized ssa_operands.  */
 static int n_initialized = 0;
 
-/* Return the DECL_UID of the base variable of T.  */
-
-static inline unsigned
-get_name_decl (const_tree t)
+/* Accessor to tree-ssa-operands.c caches.  */
+static inline struct ssa_operands *
+gimple_ssa_operands (const struct function *fun)
 {
-  if (TREE_CODE (t) != SSA_NAME)
-    return DECL_UID (t);
-  else
-    return DECL_UID (SSA_NAME_VAR (t));
+  return &fun->gimple_df->ssa_operands;
 }
 
 
 /*  Return true if the SSA operands cache is active.  */
 
 bool
-ssa_operands_active (void)
+ssa_operands_active (struct function *fun)
 {
-  /* This function may be invoked from contexts where CFUN is NULL
-     (IPA passes), return false for now.  FIXME: operands may be
-     active in each individual function, maybe this function should
-     take CFUN as a parameter.  */
-  if (cfun == NULL)
+  if (fun == NULL)
     return false;
 
-  return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
+  return fun->gimple_df && gimple_ssa_operands (fun)->ops_active;
 }
 
 
@@ -185,16 +141,17 @@
    representative of all of the virtual operands FUD chain.  */
 
 static void
-create_vop_var (void)
+create_vop_var (struct function *fn)
 {
   tree global_var;
 
-  gcc_assert (cfun->gimple_df->vop == NULL_TREE);
+  gcc_assert (fn->gimple_df->vop == NULL_TREE);
 
   global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
 			   get_identifier (".MEM"),
 			   void_type_node);
   DECL_ARTIFICIAL (global_var) = 1;
+  DECL_IGNORED_P (global_var) = 1;
   TREE_READONLY (global_var) = 0;
   DECL_EXTERNAL (global_var) = 1;
   TREE_STATIC (global_var) = 1;
@@ -202,10 +159,9 @@
   DECL_CONTEXT (global_var) = NULL_TREE;
   TREE_THIS_VOLATILE (global_var) = 0;
   TREE_ADDRESSABLE (global_var) = 0;
+  VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1;
 
-  create_var_ann (global_var);
-  add_referenced_var (global_var);
-  cfun->gimple_df->vop = global_var;
+  fn->gimple_df->vop = global_var;
 }
 
 /* These are the sizes of the operand memory buffer in bytes which gets
@@ -223,185 +179,130 @@
 /* Initialize the operand cache routines.  */
 
 void
-init_ssa_operands (void)
+init_ssa_operands (struct function *fn)
 {
   if (!n_initialized++)
     {
-      build_defs = VEC_alloc (tree, heap, 5);
-      build_uses = VEC_alloc (tree, heap, 10);
+      build_uses.create (10);
       build_vuse = NULL_TREE;
       build_vdef = NULL_TREE;
       bitmap_obstack_initialize (&operands_bitmap_obstack);
     }
 
-  gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
-  gimple_ssa_operands (cfun)->operand_memory_index
-     = gimple_ssa_operands (cfun)->ssa_operand_mem_size;
-  gimple_ssa_operands (cfun)->ops_active = true;
-  memset (&clobber_stats, 0, sizeof (clobber_stats));
-  gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT;
-  create_vop_var ();
+  gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL);
+  gimple_ssa_operands (fn)->operand_memory_index
+     = gimple_ssa_operands (fn)->ssa_operand_mem_size;
+  gimple_ssa_operands (fn)->ops_active = true;
+  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT;
+  create_vop_var (fn);
 }
 
 
 /* Dispose of anything required by the operand routines.  */
 
 void
-fini_ssa_operands (void)
+fini_ssa_operands (struct function *fn)
 {
   struct ssa_operand_memory_d *ptr;
 
   if (!--n_initialized)
     {
-      VEC_free (tree, heap, build_defs);
-      VEC_free (tree, heap, build_uses);
+      build_uses.release ();
       build_vdef = NULL_TREE;
       build_vuse = NULL_TREE;
     }
 
-  gimple_ssa_operands (cfun)->free_defs = NULL;
-  gimple_ssa_operands (cfun)->free_uses = NULL;
+  gimple_ssa_operands (fn)->free_uses = NULL;
 
-  while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
+  while ((ptr = gimple_ssa_operands (fn)->operand_memory) != NULL)
     {
-      gimple_ssa_operands (cfun)->operand_memory
-	= gimple_ssa_operands (cfun)->operand_memory->next;
+      gimple_ssa_operands (fn)->operand_memory
+	= gimple_ssa_operands (fn)->operand_memory->next;
       ggc_free (ptr);
     }
 
-  gimple_ssa_operands (cfun)->ops_active = false;
+  gimple_ssa_operands (fn)->ops_active = false;
 
   if (!n_initialized)
     bitmap_obstack_release (&operands_bitmap_obstack);
 
-  cfun->gimple_df->vop = NULL_TREE;
-
-  if (dump_file && (dump_flags & TDF_STATS))
-    {
-      fprintf (dump_file, "Original clobbered vars:           %d\n",
-	       clobber_stats.clobbered_vars);
-      fprintf (dump_file, "Static write clobbers avoided:     %d\n",
-	       clobber_stats.static_write_clobbers_avoided);
-      fprintf (dump_file, "Static read clobbers avoided:      %d\n",
-	       clobber_stats.static_read_clobbers_avoided);
-      fprintf (dump_file, "Unescapable clobbers avoided:      %d\n",
-	       clobber_stats.unescapable_clobbers_avoided);
-      fprintf (dump_file, "Original read-only clobbers:       %d\n",
-	       clobber_stats.readonly_clobbers);
-      fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
-	       clobber_stats.static_readonly_clobbers_avoided);
-    }
+  fn->gimple_df->vop = NULL_TREE;
 }
 
 
 /* Return memory for an operand of size SIZE.  */
 
 static inline void *
-ssa_operand_alloc (unsigned size)
+ssa_operand_alloc (struct function *fn, unsigned size)
 {
   char *ptr;
 
-  gcc_assert (size == sizeof (struct use_optype_d)
-	      || size == sizeof (struct def_optype_d));
+  gcc_assert (size == sizeof (struct use_optype_d));
 
-  if (gimple_ssa_operands (cfun)->operand_memory_index + size
-      >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
+  if (gimple_ssa_operands (fn)->operand_memory_index + size
+      >= gimple_ssa_operands (fn)->ssa_operand_mem_size)
     {
       struct ssa_operand_memory_d *ptr;
 
-      switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size)
+      switch (gimple_ssa_operands (fn)->ssa_operand_mem_size)
 	{
 	case OP_SIZE_INIT:
-	  gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1;
+	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_1;
 	  break;
 	case OP_SIZE_1:
-	  gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2;
+	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_2;
 	  break;
 	case OP_SIZE_2:
 	case OP_SIZE_3:
-	  gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3;
+	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_3;
 	  break;
 	default:
 	  gcc_unreachable ();
 	}
 
 
-      ptr = ggc_alloc_ssa_operand_memory_d (sizeof (void *)
-                        + gimple_ssa_operands (cfun)->ssa_operand_mem_size);
+      ptr = (ssa_operand_memory_d *) ggc_internal_alloc
+	(sizeof (void *) + gimple_ssa_operands (fn)->ssa_operand_mem_size);
 
-      ptr->next = gimple_ssa_operands (cfun)->operand_memory;
-      gimple_ssa_operands (cfun)->operand_memory = ptr;
-      gimple_ssa_operands (cfun)->operand_memory_index = 0;
+      ptr->next = gimple_ssa_operands (fn)->operand_memory;
+      gimple_ssa_operands (fn)->operand_memory = ptr;
+      gimple_ssa_operands (fn)->operand_memory_index = 0;
     }
 
-  ptr = &(gimple_ssa_operands (cfun)->operand_memory
-	  ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
-  gimple_ssa_operands (cfun)->operand_memory_index += size;
+  ptr = &(gimple_ssa_operands (fn)->operand_memory
+	  ->mem[gimple_ssa_operands (fn)->operand_memory_index]);
+  gimple_ssa_operands (fn)->operand_memory_index += size;
   return ptr;
 }
 
 
-/* Allocate a DEF operand.  */
-
-static inline struct def_optype_d *
-alloc_def (void)
-{
-  struct def_optype_d *ret;
-  if (gimple_ssa_operands (cfun)->free_defs)
-    {
-      ret = gimple_ssa_operands (cfun)->free_defs;
-      gimple_ssa_operands (cfun)->free_defs
-	= gimple_ssa_operands (cfun)->free_defs->next;
-    }
-  else
-    ret = (struct def_optype_d *)
-	  ssa_operand_alloc (sizeof (struct def_optype_d));
-  return ret;
-}
-
-
 /* Allocate a USE operand.  */
 
 static inline struct use_optype_d *
-alloc_use (void)
+alloc_use (struct function *fn)
 {
   struct use_optype_d *ret;
-  if (gimple_ssa_operands (cfun)->free_uses)
+  if (gimple_ssa_operands (fn)->free_uses)
     {
-      ret = gimple_ssa_operands (cfun)->free_uses;
-      gimple_ssa_operands (cfun)->free_uses
-	= gimple_ssa_operands (cfun)->free_uses->next;
+      ret = gimple_ssa_operands (fn)->free_uses;
+      gimple_ssa_operands (fn)->free_uses
+	= gimple_ssa_operands (fn)->free_uses->next;
     }
   else
     ret = (struct use_optype_d *)
-          ssa_operand_alloc (sizeof (struct use_optype_d));
+          ssa_operand_alloc (fn, sizeof (struct use_optype_d));
   return ret;
 }
 
 
-/* Adds OP to the list of defs after LAST.  */
-
-static inline def_optype_p
-add_def_op (tree *op, def_optype_p last)
-{
-  def_optype_p new_def;
-
-  new_def = alloc_def ();
-  DEF_OP_PTR (new_def) = op;
-  last->next = new_def;
-  new_def->next = NULL;
-  return new_def;
-}
-
-
 /* Adds OP to the list of uses of statement STMT after LAST.  */
 
 static inline use_optype_p
-add_use_op (gimple stmt, tree *op, use_optype_p last)
+add_use_op (struct function *fn, gimple *stmt, tree *op, use_optype_p last)
 {
   use_optype_p new_use;
 
-  new_use = alloc_use ();
+  new_use = alloc_use (fn);
   USE_OP_PTR (new_use)->use = op;
   link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
   last->next = new_use;
@@ -412,19 +313,11 @@
 
 
 /* Takes elements from build_defs and turns them into def operands of STMT.
-   TODO -- Make build_defs VEC of tree *.  */
+   TODO -- Make build_defs vec of tree *.  */
 
 static inline void
-finalize_ssa_defs (gimple stmt)
+finalize_ssa_defs (struct function *fn, gimple *stmt)
 {
-  unsigned new_i;
-  struct def_optype_d new_list;
-  def_optype_p old_ops, last;
-  unsigned int num = VEC_length (tree, build_defs);
-
-  /* There should only be a single real definition per assignment.  */
-  gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1);
-
   /* Pre-pend the vdef we may have built.  */
   if (build_vdef != NULL_TREE)
     {
@@ -434,17 +327,8 @@
 	oldvdef = SSA_NAME_VAR (oldvdef);
       if (oldvdef != build_vdef)
 	gimple_set_vdef (stmt, build_vdef);
-      VEC_safe_insert (tree, heap, build_defs, 0, (tree)gimple_vdef_ptr (stmt));
-      ++num;
     }
 
-  new_list.next = NULL;
-  last = &new_list;
-
-  old_ops = gimple_def_ops (stmt);
-
-  new_i = 0;
-
   /* Clear and unlink a no longer necessary VDEF.  */
   if (build_vdef == NULL_TREE
       && gimple_vdef (stmt) != NULL_TREE)
@@ -452,7 +336,7 @@
       if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
 	{
 	  unlink_stmt_vdef (stmt);
-	  release_ssa_name (gimple_vdef (stmt));
+	  release_ssa_name_fn (fn, gimple_vdef (stmt));
 	}
       gimple_set_vdef (stmt, NULL_TREE);
     }
@@ -460,34 +344,17 @@
   /* If we have a non-SSA_NAME VDEF, mark it for renaming.  */
   if (gimple_vdef (stmt)
       && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
-    mark_sym_for_renaming (gimple_vdef (stmt));
-
-  /* Check for the common case of 1 def that hasn't changed.  */
-  if (old_ops && old_ops->next == NULL && num == 1
-      && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
-    return;
-
-  /* If there is anything in the old list, free it.  */
-  if (old_ops)
     {
-      old_ops->next = gimple_ssa_operands (cfun)->free_defs;
-      gimple_ssa_operands (cfun)->free_defs = old_ops;
+      fn->gimple_df->rename_vops = 1;
+      fn->gimple_df->ssa_renaming_needed = 1;
     }
-
-  /* If there is anything remaining in the build_defs list, simply emit it.  */
-  for ( ; new_i < num; new_i++)
-    last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
-
-  /* Now set the stmt's operands.  */
-  gimple_set_def_ops (stmt, new_list.next);
 }
 
 
-/* Takes elements from build_uses and turns them into use operands of STMT.
-   TODO -- Make build_uses VEC of tree *.  */
+/* Takes elements from build_uses and turns them into use operands of STMT.  */
 
 static inline void
-finalize_ssa_uses (gimple stmt)
+finalize_ssa_uses (struct function *fn, gimple *stmt)
 {
   unsigned new_i;
   struct use_optype_d new_list;
@@ -503,7 +370,7 @@
       if (oldvuse != (build_vuse != NULL_TREE
 		      ? build_vuse : build_vdef))
 	gimple_set_vuse (stmt, NULL_TREE);
-      VEC_safe_insert (tree, heap, build_uses, 0, (tree)gimple_vuse_ptr (stmt));
+      build_uses.safe_insert (0, gimple_vuse_ptr (stmt));
     }
 
   new_list.next = NULL;
@@ -519,10 +386,11 @@
   /* If there is anything in the old list, free it.  */
   if (old_ops)
     {
-      for (ptr = old_ops; ptr; ptr = ptr->next)
+      for (ptr = old_ops; ptr->next; ptr = ptr->next)
 	delink_imm_use (USE_OP_PTR (ptr));
-      old_ops->next = gimple_ssa_operands (cfun)->free_uses;
-      gimple_ssa_operands (cfun)->free_uses = old_ops;
+      delink_imm_use (USE_OP_PTR (ptr));
+      ptr->next = gimple_ssa_operands (fn)->free_uses;
+      gimple_ssa_operands (fn)->free_uses = old_ops;
     }
 
   /* If we added a VUSE, make sure to set the operand if it is not already
@@ -530,15 +398,17 @@
   if (build_vuse != NULL_TREE
       && gimple_vuse (stmt) == NULL_TREE)
     {
-      gimple_set_vuse (stmt, gimple_vop (cfun));
-      mark_sym_for_renaming (gimple_vop (cfun));
+      gimple_set_vuse (stmt, gimple_vop (fn));
+      fn->gimple_df->rename_vops = 1;
+      fn->gimple_df->ssa_renaming_needed = 1;
     }
 
   /* Now create nodes for all the new nodes.  */
-  for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
-    last = add_use_op (stmt,
-		       (tree *) VEC_index (tree, build_uses, new_i),
-		       last);
+  for (new_i = 0; new_i < build_uses.length (); new_i++)
+    {
+      tree *op = build_uses[new_i];
+      last = add_use_op (fn, stmt, op, last);
+    }
 
   /* Now set the stmt's operands.  */
   gimple_set_use_ops (stmt, new_list.next);
@@ -553,18 +423,17 @@
 {
   build_vdef = NULL_TREE;
   build_vuse = NULL_TREE;
-  VEC_truncate (tree, build_defs, 0);
-  VEC_truncate (tree, build_uses, 0);
+  build_uses.truncate (0);
 }
 
 
 /* Finalize all the build vectors, fill the new ones into INFO.  */
 
 static inline void
-finalize_ssa_stmt_operands (gimple stmt)
+finalize_ssa_stmt_operands (struct function *fn, gimple *stmt)
 {
-  finalize_ssa_defs (stmt);
-  finalize_ssa_uses (stmt);
+  finalize_ssa_defs (fn, stmt);
+  finalize_ssa_uses (fn, stmt);
   cleanup_build_arrays ();
 }
 
@@ -574,28 +443,18 @@
 static inline void
 start_ssa_stmt_operands (void)
 {
-  gcc_assert (VEC_length (tree, build_defs) == 0);
-  gcc_assert (VEC_length (tree, build_uses) == 0);
+  gcc_assert (build_uses.length () == 0);
   gcc_assert (build_vuse == NULL_TREE);
   gcc_assert (build_vdef == NULL_TREE);
 }
 
 
-/* Add DEF_P to the list of pointers to operands.  */
-
-static inline void
-append_def (tree *def_p)
-{
-  VEC_safe_push (tree, heap, build_defs, (tree) def_p);
-}
-
-
 /* Add USE_P to the list of pointers to operands.  */
 
 static inline void
 append_use (tree *use_p)
 {
-  VEC_safe_push (tree, heap, build_uses, (tree) use_p);
+  build_uses.safe_push (use_p);
 }
 
 
@@ -604,9 +463,6 @@
 static inline void
 append_vdef (tree var)
 {
-  if (!optimize)
-    return;
-
   gcc_assert ((build_vdef == NULL_TREE
 	       || build_vdef == var)
 	      && (build_vuse == NULL_TREE
@@ -622,9 +478,6 @@
 static inline void
 append_vuse (tree var)
 {
-  if (!optimize)
-    return;
-
   gcc_assert (build_vuse == NULL_TREE
 	      || build_vuse == var);
 
@@ -634,7 +487,8 @@
 /* Add virtual operands for STMT.  FLAGS is as in get_expr_operands.  */
 
 static void
-add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags)
+add_virtual_operand (struct function *fn,
+		     gimple *stmt ATTRIBUTE_UNUSED, int flags)
 {
   /* Add virtual operands to the stmt, unless the caller has specifically
      requested not to do that (used when adding operands inside an
@@ -645,9 +499,9 @@
   gcc_assert (!is_gimple_debug (stmt));
 
   if (flags & opf_def)
-    append_vdef (gimple_vop (cfun));
+    append_vdef (gimple_vop (fn));
   else
-    append_vuse (gimple_vop (cfun));
+    append_vuse (gimple_vop (fn));
 }
 
 
@@ -657,29 +511,32 @@
    added to virtual operands.  */
 
 static void
-add_stmt_operand (tree *var_p, gimple stmt, int flags)
+add_stmt_operand (struct function *fn, tree *var_p, gimple *stmt, int flags)
 {
-  tree var, sym;
+  tree var = *var_p;
 
   gcc_assert (SSA_VAR_P (*var_p));
 
-  var = *var_p;
-  sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
-
-  /* Mark statements with volatile operands.  */
-  if (TREE_THIS_VOLATILE (sym))
-    gimple_set_has_volatile_ops (stmt, true);
-
-  if (is_gimple_reg (sym))
+  if (is_gimple_reg (var))
     {
       /* The variable is a GIMPLE register.  Add it to real operands.  */
       if (flags & opf_def)
-	append_def (var_p);
+	;
       else
 	append_use (var_p);
+      if (DECL_P (*var_p))
+	fn->gimple_df->ssa_renaming_needed = 1;
     }
   else
-    add_virtual_operand (stmt, flags);
+    {
+      /* Mark statements with volatile operands.  */
+      if (!(flags & opf_no_vops)
+	  && TREE_THIS_VOLATILE (var))
+	gimple_set_has_volatile_ops (stmt, true);
+
+      /* The variable is a memory access.  Add virtual operands.  */
+      add_virtual_operand (fn, stmt, flags);
+    }
 }
 
 /* Mark the base address of REF as having its address taken.
@@ -715,46 +572,46 @@
    STMT is the statement being processed, EXPR is the MEM_REF
       that got us here.
 
-   FLAGS is as in get_expr_operands.
-
-   RECURSE_ON_BASE should be set to true if we want to continue
-      calling get_expr_operands on the base pointer, and false if
-      something else will do it for us.  */
+   FLAGS is as in get_expr_operands.  */
 
 static void
-get_indirect_ref_operands (gimple stmt, tree expr, int flags,
-			   bool recurse_on_base)
+get_mem_ref_operands (struct function *fn,
+		      gimple *stmt, tree expr, int flags)
 {
   tree *pptr = &TREE_OPERAND (expr, 0);
 
-  if (TREE_THIS_VOLATILE (expr))
+  if (!(flags & opf_no_vops)
+      && TREE_THIS_VOLATILE (expr))
     gimple_set_has_volatile_ops (stmt, true);
 
   /* Add the VOP.  */
-  add_virtual_operand (stmt, flags);
+  add_virtual_operand (fn, stmt, flags);
 
   /* If requested, add a USE operand for the base pointer.  */
-  if (recurse_on_base)
-    get_expr_operands (stmt, pptr,
-		       opf_non_addressable | opf_use
-		       | (flags & (opf_no_vops|opf_not_non_addressable)));
+  get_expr_operands (fn, stmt, pptr,
+		     opf_non_addressable | opf_use
+		     | (flags & (opf_no_vops|opf_not_non_addressable)));
 }
 
 
 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF.  */
 
 static void
-get_tmr_operands (gimple stmt, tree expr, int flags)
+get_tmr_operands (struct function *fn, gimple *stmt, tree expr, int flags)
 {
-  if (TREE_THIS_VOLATILE (expr))
+  if (!(flags & opf_no_vops)
+      && TREE_THIS_VOLATILE (expr))
     gimple_set_has_volatile_ops (stmt, true);
 
   /* First record the real operands.  */
-  get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
-  get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
-  get_expr_operands (stmt, &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
+  get_expr_operands (fn, stmt,
+		     &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
+  get_expr_operands (fn, stmt,
+		     &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
+  get_expr_operands (fn, stmt,
+		     &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
 
-  add_virtual_operand (stmt, flags);
+  add_virtual_operand (fn, stmt, flags);
 }
 
 
@@ -762,7 +619,7 @@
    escape, add them to the VDEF/VUSE lists for it.  */
 
 static void
-maybe_add_call_vops (gimple stmt)
+maybe_add_call_vops (struct function *fn, gcall *stmt)
 {
   int call_flags = gimple_call_flags (stmt);
 
@@ -771,13 +628,11 @@
      call-clobbered.  */
   if (!(call_flags & ECF_NOVOPS))
     {
-      /* A 'pure' or a 'const' function never call-clobbers anything.
-	 A 'noreturn' function might, but since we don't return anyway
-	 there is no point in recording that.  */
-      if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
-	add_virtual_operand (stmt, opf_def);
+      /* A 'pure' or a 'const' function never call-clobbers anything.  */
+      if (!(call_flags & (ECF_PURE | ECF_CONST)))
+	add_virtual_operand (fn, stmt, opf_def);
       else if (!(call_flags & ECF_CONST))
-	add_virtual_operand (stmt, opf_use);
+	add_virtual_operand (fn, stmt, opf_use);
     }
 }
 
@@ -785,7 +640,7 @@
 /* Scan operands in the ASM_EXPR stmt referred to in INFO.  */
 
 static void
-get_asm_expr_operands (gimple stmt)
+get_asm_stmt_operands (struct function *fn, gasm *stmt)
 {
   size_t i, noutputs;
   const char **oconstraints;
@@ -812,7 +667,8 @@
       if (!allows_reg && allows_mem)
 	mark_address_taken (TREE_VALUE (link));
 
-      get_expr_operands (stmt, &TREE_VALUE (link), opf_def | opf_not_non_addressable);
+      get_expr_operands (fn, stmt,
+			 &TREE_VALUE (link), opf_def | opf_not_non_addressable);
     }
 
   /* Gather all input operands.  */
@@ -828,19 +684,12 @@
       if (!allows_reg && allows_mem)
 	mark_address_taken (TREE_VALUE (link));
 
-      get_expr_operands (stmt, &TREE_VALUE (link), opf_not_non_addressable);
+      get_expr_operands (fn, stmt, &TREE_VALUE (link), opf_not_non_addressable);
     }
 
   /* Clobber all memory and addressable symbols for asm ("" : : : "memory");  */
-  for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
-    {
-      tree link = gimple_asm_clobber_op (stmt, i);
-      if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
-	{
-	  add_virtual_operand (stmt, opf_def);
-	  break;
-	}
-    }
+  if (gimple_asm_clobbers_memory_p (stmt))
+    add_virtual_operand (fn, stmt, opf_def);
 }
 
 
@@ -849,7 +698,7 @@
    interpret the operands found.  */
 
 static void
-get_expr_operands (gimple stmt, tree *expr_p, int flags)
+get_expr_operands (struct function *fn, gimple *stmt, tree *expr_p, int flags)
 {
   enum tree_code code;
   enum tree_code_class codeclass;
@@ -877,29 +726,22 @@
 	  && !is_gimple_debug (stmt))
 	mark_address_taken (TREE_OPERAND (expr, 0));
 
-      /* If the address is invariant, there may be no interesting
-	 variable references inside.  */
-      if (is_gimple_min_invariant (expr))
-	return;
-
       /* Otherwise, there may be variables referenced inside but there
 	 should be no VUSEs created, since the referenced objects are
 	 not really accessed.  The only operands that we should find
 	 here are ARRAY_REF indices which will always be real operands
 	 (GIMPLE does not allow non-registers as array indices).  */
       flags |= opf_no_vops;
-      get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
-			 flags | opf_not_non_addressable);
+      get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0),
+			 flags | opf_not_non_addressable | opf_address_taken);
       return;
 
     case SSA_NAME:
-     add_stmt_operand (expr_p, stmt, flags);
-     return;
-
     case VAR_DECL:
     case PARM_DECL:
     case RESULT_DECL:
-      add_stmt_operand (expr_p, stmt, flags);
+      if (!(flags & opf_address_taken))
+	add_stmt_operand (fn, expr_p, stmt, flags);
       return;
 
     case DEBUG_EXPR_DECL:
@@ -907,11 +749,11 @@
       return;
 
     case MEM_REF:
-      get_indirect_ref_operands (stmt, expr, flags, true);
+      get_mem_ref_operands (fn, stmt, expr, flags);
       return;
 
     case TARGET_MEM_REF:
-      get_tmr_operands (stmt, expr, flags);
+      get_tmr_operands (fn, stmt, expr, flags);
       return;
 
     case ARRAY_REF:
@@ -920,22 +762,24 @@
     case REALPART_EXPR:
     case IMAGPART_EXPR:
       {
-	if (TREE_THIS_VOLATILE (expr))
+	if (!(flags & opf_no_vops)
+	    && TREE_THIS_VOLATILE (expr))
 	  gimple_set_has_volatile_ops (stmt, true);
 
-	get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
 
 	if (code == COMPONENT_REF)
 	  {
-	    if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
+	    if (!(flags & opf_no_vops)
+		&& TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
 	      gimple_set_has_volatile_ops (stmt, true);
-	    get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
+	    get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
 	  }
 	else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
 	  {
-            get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
-            get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
-            get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags);
+            get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
+            get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
+            get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 3), uflags);
 	  }
 
 	return;
@@ -944,15 +788,16 @@
     case WITH_SIZE_EXPR:
       /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
 	 and an rvalue reference to its second argument.  */
-      get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
-      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+      get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
+      get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
       return;
 
     case COND_EXPR:
     case VEC_COND_EXPR:
-      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags);
-      get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
-      get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
+    case VEC_PERM_EXPR:
+      get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), uflags);
+      get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
+      get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
       return;
 
     case CONSTRUCTOR:
@@ -962,47 +807,53 @@
 	constructor_elt *ce;
 	unsigned HOST_WIDE_INT idx;
 
+	/* A volatile constructor is actually TREE_CLOBBER_P, transfer
+	   the volatility to the statement, don't use TREE_CLOBBER_P for
+	   mirroring the other uses of THIS_VOLATILE in this file.  */
+	if (!(flags & opf_no_vops)
+	    && TREE_THIS_VOLATILE (expr))
+	  gimple_set_has_volatile_ops (stmt, true);
+
 	for (idx = 0;
-	     VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
+	     vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce);
 	     idx++)
-	  get_expr_operands (stmt, &ce->value, uflags);
+	  get_expr_operands (fn, stmt, &ce->value, uflags);
 
 	return;
       }
 
     case BIT_FIELD_REF:
-      if (TREE_THIS_VOLATILE (expr))
+      if (!(flags & opf_no_vops)
+	  && TREE_THIS_VOLATILE (expr))
 	gimple_set_has_volatile_ops (stmt, true);
       /* FALLTHRU */
 
-    case TRUTH_NOT_EXPR:
     case VIEW_CONVERT_EXPR:
     do_unary:
-      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+      get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
       return;
 
-    case TRUTH_AND_EXPR:
-    case TRUTH_OR_EXPR:
-    case TRUTH_XOR_EXPR:
+    case BIT_INSERT_EXPR:
     case COMPOUND_EXPR:
     case OBJ_TYPE_REF:
     case ASSERT_EXPR:
     do_binary:
       {
-	get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
-	get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
+	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
+	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
 	return;
       }
 
     case DOT_PROD_EXPR:
+    case SAD_EXPR:
     case REALIGN_LOAD_EXPR:
     case WIDEN_MULT_PLUS_EXPR:
     case WIDEN_MULT_MINUS_EXPR:
     case FMA_EXPR:
       {
-	get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
-	get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
-	get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
+	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
+	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
+	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), flags);
 	return;
       }
 
@@ -1023,12 +874,13 @@
     }
 
   /* If we get here, something has gone wrong.  */
-#ifdef ENABLE_CHECKING
-  fprintf (stderr, "unhandled expression in get_expr_operands():\n");
-  debug_tree (expr);
-  fputs ("\n", stderr);
-#endif
-  gcc_unreachable ();
+  if (flag_checking)
+    {
+      fprintf (stderr, "unhandled expression in get_expr_operands():\n");
+      debug_tree (expr);
+      fputs ("\n", stderr);
+      gcc_unreachable ();
+    }
 }
 
 
@@ -1036,35 +888,49 @@
    build_* operand vectors will have potential operands in them.  */
 
 static void
-parse_ssa_operands (gimple stmt)
+parse_ssa_operands (struct function *fn, gimple *stmt)
 {
   enum gimple_code code = gimple_code (stmt);
+  size_t i, n, start = 0;
 
-  if (code == GIMPLE_ASM)
-    get_asm_expr_operands (stmt);
-  else if (is_gimple_debug (stmt))
+  switch (code)
     {
+    case GIMPLE_ASM:
+      get_asm_stmt_operands (fn, as_a <gasm *> (stmt));
+      break;
+
+    case GIMPLE_TRANSACTION:
+      /* The start of a transaction is a memory barrier.  */
+      add_virtual_operand (fn, stmt, opf_def | opf_use);
+      break;
+
+    case GIMPLE_DEBUG:
       if (gimple_debug_bind_p (stmt)
 	  && gimple_debug_bind_has_value_p (stmt))
-	get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt),
+	get_expr_operands (fn, stmt, gimple_debug_bind_get_value_ptr (stmt),
 			   opf_use | opf_no_vops);
-    }
-  else
-    {
-      size_t i, start = 0;
+      break;
+
+    case GIMPLE_RETURN:
+      append_vuse (gimple_vop (fn));
+      goto do_default;
 
-      if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
-	{
-	  get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def);
-	  start = 1;
-	}
+    case GIMPLE_CALL:
+      /* Add call-clobbered operands, if needed.  */
+      maybe_add_call_vops (fn, as_a <gcall *> (stmt));
+      /* FALLTHRU */
 
-      for (i = start; i < gimple_num_ops (stmt); i++)
-	get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use);
+    case GIMPLE_ASSIGN:
+      get_expr_operands (fn, stmt, gimple_op_ptr (stmt, 0), opf_def);
+      start = 1;
+      /* FALLTHRU */
 
-      /* Add call-clobbered operands, if needed.  */
-      if (code == GIMPLE_CALL)
-	maybe_add_call_vops (stmt);
+    default:
+    do_default:
+      n = gimple_num_ops (stmt);
+      for (i = start; i < n; i++)
+	get_expr_operands (fn, stmt, gimple_op_ptr (stmt, i), opf_use);
+      break;
     }
 }
 
@@ -1072,14 +938,104 @@
 /* Create an operands cache for STMT.  */
 
 static void
-build_ssa_operands (gimple stmt)
+build_ssa_operands (struct function *fn, gimple *stmt)
 {
   /* Initially assume that the statement has no volatile operands.  */
   gimple_set_has_volatile_ops (stmt, false);
 
   start_ssa_stmt_operands ();
-  parse_ssa_operands (stmt);
-  finalize_ssa_stmt_operands (stmt);
+  parse_ssa_operands (fn, stmt);
+  finalize_ssa_stmt_operands (fn, stmt);
+}
+
+/* Verifies SSA statement operands.  */
+
+DEBUG_FUNCTION bool
+verify_ssa_operands (struct function *fn, gimple *stmt)
+{
+  use_operand_p use_p;
+  def_operand_p def_p;
+  ssa_op_iter iter;
+  unsigned i;
+  tree def;
+  bool volatile_p = gimple_has_volatile_ops (stmt);
+
+  /* build_ssa_operands w/o finalizing them.  */
+  gimple_set_has_volatile_ops (stmt, false);
+  start_ssa_stmt_operands ();
+  parse_ssa_operands (fn, stmt);
+
+  /* Now verify the built operands are the same as present in STMT.  */
+  def = gimple_vdef (stmt);
+  if (def
+      && TREE_CODE (def) == SSA_NAME)
+    def = SSA_NAME_VAR (def);
+  if (build_vdef != def)
+    {
+      error ("virtual definition of statement not up-to-date");
+      return true;
+    }
+  if (gimple_vdef (stmt)
+      && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
+	  || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
+    {
+      error ("virtual def operand missing for stmt");
+      return true;
+    }
+
+  tree use = gimple_vuse (stmt);
+  if (use
+      && TREE_CODE (use) == SSA_NAME)
+    use = SSA_NAME_VAR (use);
+  if (build_vuse != use)
+    {
+      error ("virtual use of statement not up-to-date");
+      return true;
+    }
+  if (gimple_vuse (stmt)
+      && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
+	  || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
+    {
+      error ("virtual use operand missing for stmt");
+      return true;
+    }
+
+  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
+    {
+      tree *op;
+      FOR_EACH_VEC_ELT (build_uses, i, op)
+	{
+	  if (use_p->use == op)
+	    {
+	      build_uses[i] = NULL;
+	      break;
+	    }
+	}
+      if (i == build_uses.length ())
+	{
+	  error ("excess use operand for stmt");
+	  debug_generic_expr (USE_FROM_PTR (use_p));
+	  return true;
+	}
+    }
+
+  tree *op;
+  FOR_EACH_VEC_ELT (build_uses, i, op)
+    if (op != NULL)
+      {
+	error ("use operand missing for stmt");
+	debug_generic_expr (*op);
+	return true;
+      }
+
+  if (gimple_has_volatile_ops (stmt) != volatile_p)
+    {
+      error ("stmt volatile flag not up-to-date");
+      return true;
+    }
+
+  cleanup_build_arrays ();
+  return false;
 }
 
 
@@ -1087,27 +1043,17 @@
    the stmt operand lists.  */
 
 void
-free_stmt_operands (gimple stmt)
+free_stmt_operands (struct function *fn, gimple *stmt)
 {
-  def_optype_p defs = gimple_def_ops (stmt), last_def;
   use_optype_p uses = gimple_use_ops (stmt), last_use;
 
-  if (defs)
-    {
-      for (last_def = defs; last_def->next; last_def = last_def->next)
-	continue;
-      last_def->next = gimple_ssa_operands (cfun)->free_defs;
-      gimple_ssa_operands (cfun)->free_defs = defs;
-      gimple_set_def_ops (stmt, NULL);
-    }
-
   if (uses)
     {
       for (last_use = uses; last_use->next; last_use = last_use->next)
 	delink_imm_use (USE_OP_PTR (last_use));
       delink_imm_use (USE_OP_PTR (last_use));
-      last_use->next = gimple_ssa_operands (cfun)->free_uses;
-      gimple_ssa_operands (cfun)->free_uses = uses;
+      last_use->next = gimple_ssa_operands (fn)->free_uses;
+      gimple_ssa_operands (fn)->free_uses = uses;
       gimple_set_use_ops (stmt, NULL);
     }
 
@@ -1122,17 +1068,17 @@
 /* Get the operands of statement STMT.  */
 
 void
-update_stmt_operands (gimple stmt)
+update_stmt_operands (struct function *fn, gimple *stmt)
 {
   /* If update_stmt_operands is called before SSA is initialized, do
      nothing.  */
-  if (!ssa_operands_active ())
+  if (!ssa_operands_active (fn))
     return;
 
   timevar_push (TV_TREE_OPS);
 
   gcc_assert (gimple_modified_p (stmt));
-  build_ssa_operands (stmt);
+  build_ssa_operands (fn, stmt);
   gimple_set_modified (stmt, false);
 
   timevar_pop (TV_TREE_OPS);
@@ -1143,17 +1089,17 @@
    to test the validity of the swap operation.  */
 
 void
-swap_tree_operands (gimple stmt, tree *exp0, tree *exp1)
+swap_ssa_operands (gimple *stmt, tree *exp0, tree *exp1)
 {
   tree op0, op1;
   op0 = *exp0;
   op1 = *exp1;
 
-  /* If the operand cache is active, attempt to preserve the relative
-     positions of these two operands in their respective immediate use
-     lists.  */
-  if (ssa_operands_active () && op0 != op1)
+  if (op0 != op1)
     {
+      /* Attempt to preserve the relative positions of these two operands in
+	 their * respective immediate use lists by adjusting their use pointer
+	 to point to the new operand position.  */
       use_optype_p use0, use1, ptr;
       use0 = use1 = NULL;
 
@@ -1172,19 +1118,17 @@
 	    break;
 	  }
 
-      /* If both uses don't have operand entries, there isn't much we can do
-         at this point.  Presumably we don't need to worry about it.  */
-      if (use0 && use1)
-        {
-	  tree *tmp = USE_OP_PTR (use1)->use;
-	  USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
-	  USE_OP_PTR (use0)->use = tmp;
-	}
+      /* And adjust their location to point to the new position of the
+         operand.  */
+      if (use0)
+	USE_OP_PTR (use0)->use = exp1;
+      if (use1)
+	USE_OP_PTR (use1)->use = exp0;
+
+      /* Now swap the data.  */
+      *exp0 = op1;
+      *exp1 = op0;
     }
-
-  /* Now swap the data.  */
-  *exp0 = op1;
-  *exp1 = op0;
 }
 
 
@@ -1195,7 +1139,7 @@
 verify_imm_links (FILE *f, tree var)
 {
   use_operand_p ptr, prev, list;
-  int count;
+  unsigned int count;
 
   gcc_assert (TREE_CODE (var) == SSA_NAME);
 
@@ -1213,20 +1157,31 @@
   for (ptr = list->next; ptr != list; )
     {
       if (prev != ptr->prev)
-	goto error;
+	{
+	  fprintf (f, "prev != ptr->prev\n");
+	  goto error;
+	}
 
       if (ptr->use == NULL)
-	goto error; /* 2 roots, or SAFE guard node.  */
+	{
+	  fprintf (f, "ptr->use == NULL\n");
+	  goto error; /* 2 roots, or SAFE guard node.  */
+	}
       else if (*(ptr->use) != var)
-	goto error;
+	{
+	  fprintf (f, "*(ptr->use) != var\n");
+	  goto error;
+	}
 
       prev = ptr;
       ptr = ptr->next;
 
-      /* Avoid infinite loops.  50,000,000 uses probably indicates a
-	 problem.  */
-      if (count++ > 50000000)
-	goto error;
+      count++;
+      if (count == 0)
+	{
+	  fprintf (f, "number of immediate uses doesn't fit unsigned int\n");
+	  goto error;
+	}
     }
 
   /* Verify list in the other direction.  */
@@ -1234,15 +1189,25 @@
   for (ptr = list->prev; ptr != list; )
     {
       if (prev != ptr->next)
-	goto error;
+	{
+	  fprintf (f, "prev != ptr->next\n");
+	  goto error;
+	}
       prev = ptr;
       ptr = ptr->prev;
-      if (count-- < 0)
-	goto error;
+      if (count == 0)
+	{
+	  fprintf (f, "count-- < 0\n");
+	  goto error;
+	}
+      count--;
     }
 
   if (count != 0)
-    goto error;
+    {
+      fprintf (f, "count != 0\n");
+      goto error;
+    }
 
   return false;
 
@@ -1255,7 +1220,7 @@
   fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
 	   (void *)ptr->use);
   print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
-  fprintf(f, "\n");
+  fprintf (f, "\n");
   return true;
 }
 
@@ -1290,7 +1255,7 @@
 	else
 	  print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
     }
-  fprintf(file, "\n");
+  fprintf (file, "\n");
 }
 
 
@@ -1303,11 +1268,8 @@
   unsigned int x;
 
   fprintf (file, "Immediate_uses: \n\n");
-  for (x = 1; x < num_ssa_names; x++)
+  FOR_EACH_SSA_NAME (x, var, cfun)
     {
-      var = ssa_name(x);
-      if (!var)
-        continue;
       dump_immediate_uses_for (file, var);
     }
 }
@@ -1334,24 +1296,54 @@
 /* Unlink STMTs virtual definition from the IL by propagating its use.  */
 
 void
-unlink_stmt_vdef (gimple stmt)
+unlink_stmt_vdef (gimple *stmt)
 {
   use_operand_p use_p;
   imm_use_iterator iter;
-  gimple use_stmt;
+  gimple *use_stmt;
   tree vdef = gimple_vdef (stmt);
+  tree vuse = gimple_vuse (stmt);
 
   if (!vdef
       || TREE_CODE (vdef) != SSA_NAME)
     return;
 
-  FOR_EACH_IMM_USE_STMT (use_stmt, iter, gimple_vdef (stmt))
+  FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
     {
       FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
-	SET_USE (use_p, gimple_vuse (stmt));
+	SET_USE (use_p, vuse);
     }
 
-  if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)))
-    SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vuse (stmt)) = 1;
+  if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef))
+    SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
 }
 
+/* Return true if the var whose chain of uses starts at PTR has a
+   single nondebug use.  Set USE_P and STMT to that single nondebug
+   use, if so, or to NULL otherwise.  */
+bool
+single_imm_use_1 (const ssa_use_operand_t *head,
+		  use_operand_p *use_p, gimple **stmt)
+{
+  ssa_use_operand_t *ptr, *single_use = 0;
+
+  for (ptr = head->next; ptr != head; ptr = ptr->next)
+    if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr)))
+      {
+	if (single_use)
+	  {
+	    single_use = NULL;
+	    break;
+	  }
+	single_use = ptr;
+      }
+
+  if (use_p)
+    *use_p = single_use;
+
+  if (stmt)
+    *stmt = single_use ? single_use->loc.stmt : NULL;
+
+  return single_use;
+}
+