diff gcc/gimple.c @ 57:326d9e06c2e3

modify c-parser.c
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Mon, 15 Feb 2010 00:54:17 +0900
parents 959d4c8c8abc 77e2b8dfacca
children 5b5b9ea5b220
line wrap: on
line diff
--- a/gcc/gimple.c	Sun Feb 07 18:30:54 2010 +0900
+++ b/gcc/gimple.c	Mon Feb 15 00:54:17 2010 +0900
@@ -1,6 +1,6 @@
 /* Gimple IR support functions.
 
-   Copyright 2007, 2008 Free Software Foundation, Inc.
+   Copyright 2007, 2008, 2009 Free Software Foundation, Inc.
    Contributed by Aldy Hernandez <aldyh@redhat.com>
 
 This file is part of GCC.
@@ -23,12 +23,13 @@
 #include "system.h"
 #include "coretypes.h"
 #include "tm.h"
+#include "target.h"
 #include "tree.h"
 #include "ggc.h"
-#include "errors.h"
 #include "hard-reg-set.h"
 #include "basic-block.h"
 #include "gimple.h"
+#include "toplev.h"
 #include "diagnostic.h"
 #include "tree-flow.h"
 #include "value-prof.h"
@@ -36,19 +37,51 @@
 #ifndef noCbC
 #include "cbc-tree.h"
 #endif
-
-#define DEFGSCODE(SYM, NAME, STRUCT)	NAME,
+#include "alias.h"
+#include "demangle.h"
+
+#define DEFGSCODE(SYM, NAME, STRUCT)    NAME,
 const char *const gimple_code_name[] = {
 #include "gimple.def"
 };
 #undef DEFGSCODE
 
-/* All the tuples have their operand vector at the very bottom
+/* Global type table.  FIXME lto, it should be possible to re-use some
+   of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
+   etc), but those assume that types were built with the various
+   build_*_type routines which is not the case with the streamer.  */
+static htab_t gimple_types;
+static struct pointer_map_t *type_hash_cache;
+
+/* Global type comparison cache.  */
+static htab_t gtc_visited;
+static struct obstack gtc_ob;
+
+/* All the tuples have their operand vector (if present) at the very bottom
    of the structure.  Therefore, the offset required to find the
    operands vector the size of the structure minus the size of the 1
    element tree array at the end (see gimple_ops).  */
-#define DEFGSCODE(SYM, NAME, STRUCT)	(sizeof (STRUCT) - sizeof (tree)),
-const size_t gimple_ops_offset_[] = {
+#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
+    (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
+EXPORTED_CONST size_t gimple_ops_offset_[] = {
+#include "gsstruct.def"
+};
+#undef DEFGSSTRUCT
+
+#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
+static const size_t gsstruct_code_size[] = {
+#include "gsstruct.def"
+};
+#undef DEFGSSTRUCT
+
+#define DEFGSCODE(SYM, NAME, GSSCODE)   NAME,
+const char *const gimple_code_name[] = {
+#include "gimple.def"
+};
+#undef DEFGSCODE
+
+#define DEFGSCODE(SYM, NAME, GSSCODE)   GSSCODE,
+EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
 #include "gimple.def"
 };
 #undef DEFGSCODE
@@ -91,125 +124,19 @@
   g->gsbase.code = code;
 }
 
-
-/* Return the GSS_* identifier for the given GIMPLE statement CODE.  */
-
-static enum gimple_statement_structure_enum
-gss_for_code (enum gimple_code code)
-{
-  switch (code)
-    {
-    case GIMPLE_ASSIGN:
-    case GIMPLE_CALL:
-    case GIMPLE_RETURN:			return GSS_WITH_MEM_OPS;
-    case GIMPLE_COND:
-    case GIMPLE_GOTO:
-    case GIMPLE_LABEL:
-    case GIMPLE_CHANGE_DYNAMIC_TYPE:
-    case GIMPLE_SWITCH:			return GSS_WITH_OPS;
-    case GIMPLE_ASM:			return GSS_ASM;
-    case GIMPLE_BIND:			return GSS_BIND;
-    case GIMPLE_CATCH:			return GSS_CATCH;
-    case GIMPLE_EH_FILTER:		return GSS_EH_FILTER;
-    case GIMPLE_NOP:			return GSS_BASE;
-    case GIMPLE_PHI:			return GSS_PHI;
-    case GIMPLE_RESX:			return GSS_RESX;
-    case GIMPLE_TRY:			return GSS_TRY;
-    case GIMPLE_WITH_CLEANUP_EXPR:	return GSS_WCE;
-    case GIMPLE_OMP_CRITICAL:		return GSS_OMP_CRITICAL;
-    case GIMPLE_OMP_FOR:		return GSS_OMP_FOR;
-    case GIMPLE_OMP_MASTER:		
-    case GIMPLE_OMP_ORDERED:
-    case GIMPLE_OMP_SECTION:		return GSS_OMP;
-    case GIMPLE_OMP_RETURN:
-    case GIMPLE_OMP_SECTIONS_SWITCH:    return GSS_BASE;
-    case GIMPLE_OMP_CONTINUE:		return GSS_OMP_CONTINUE;
-    case GIMPLE_OMP_PARALLEL:		return GSS_OMP_PARALLEL;
-    case GIMPLE_OMP_TASK:		return GSS_OMP_TASK;
-    case GIMPLE_OMP_SECTIONS:		return GSS_OMP_SECTIONS;
-    case GIMPLE_OMP_SINGLE:		return GSS_OMP_SINGLE;
-    case GIMPLE_OMP_ATOMIC_LOAD:	return GSS_OMP_ATOMIC_LOAD;
-    case GIMPLE_OMP_ATOMIC_STORE:	return GSS_OMP_ATOMIC_STORE;
-    case GIMPLE_PREDICT:		return GSS_BASE;
-    default:				gcc_unreachable ();
-    }
-}
-
-
 /* Return the number of bytes needed to hold a GIMPLE statement with
    code CODE.  */
 
-static size_t
+static inline size_t
 gimple_size (enum gimple_code code)
 {
-  enum gimple_statement_structure_enum gss = gss_for_code (code);
-
-  if (gss == GSS_WITH_OPS)
-    return sizeof (struct gimple_statement_with_ops);
-  else if (gss == GSS_WITH_MEM_OPS)
-    return sizeof (struct gimple_statement_with_memory_ops);
-
-  switch (code)
-    {
-    case GIMPLE_ASM:
-      return sizeof (struct gimple_statement_asm);
-    case GIMPLE_NOP:
-      return sizeof (struct gimple_statement_base);
-    case GIMPLE_BIND:
-      return sizeof (struct gimple_statement_bind);
-    case GIMPLE_CATCH:
-      return sizeof (struct gimple_statement_catch);
-    case GIMPLE_EH_FILTER:
-      return sizeof (struct gimple_statement_eh_filter);
-    case GIMPLE_TRY:
-      return sizeof (struct gimple_statement_try);
-    case GIMPLE_RESX:
-      return sizeof (struct gimple_statement_resx);
-    case GIMPLE_OMP_CRITICAL:
-      return sizeof (struct gimple_statement_omp_critical);
-    case GIMPLE_OMP_FOR:
-      return sizeof (struct gimple_statement_omp_for);
-    case GIMPLE_OMP_PARALLEL:
-      return sizeof (struct gimple_statement_omp_parallel);
-    case GIMPLE_OMP_TASK:
-      return sizeof (struct gimple_statement_omp_task);
-    case GIMPLE_OMP_SECTION:
-    case GIMPLE_OMP_MASTER:
-    case GIMPLE_OMP_ORDERED:
-      return sizeof (struct gimple_statement_omp);
-    case GIMPLE_OMP_RETURN:
-      return sizeof (struct gimple_statement_base);
-    case GIMPLE_OMP_CONTINUE:
-      return sizeof (struct gimple_statement_omp_continue);
-    case GIMPLE_OMP_SECTIONS:
-      return sizeof (struct gimple_statement_omp_sections);
-    case GIMPLE_OMP_SECTIONS_SWITCH:
-      return sizeof (struct gimple_statement_base);
-    case GIMPLE_OMP_SINGLE:
-      return sizeof (struct gimple_statement_omp_single);
-    case GIMPLE_OMP_ATOMIC_LOAD:
-      return sizeof (struct gimple_statement_omp_atomic_load);
-    case GIMPLE_OMP_ATOMIC_STORE:
-      return sizeof (struct gimple_statement_omp_atomic_store);
-    case GIMPLE_WITH_CLEANUP_EXPR:
-      return sizeof (struct gimple_statement_wce);
-    case GIMPLE_CHANGE_DYNAMIC_TYPE:
-      return sizeof (struct gimple_statement_with_ops);
-    case GIMPLE_PREDICT:
-      return sizeof (struct gimple_statement_base);
-    default:
-      break;
-    }
-
-  gcc_unreachable ();
+  return gsstruct_code_size[gss_for_code (code)];
 }
 
-
 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
    operands.  */
 
-#define gimple_alloc(c, n) gimple_alloc_stat (c, n MEM_STAT_INFO)
-static gimple
+gimple
 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
 {
   size_t size;
@@ -253,14 +180,14 @@
 
 /* Build a tuple with operands.  CODE is the statement to build (which
    must be one of the GIMPLE_WITH_OPS tuples).  SUBCODE is the sub-code
-   for the new tuple.  NUM_OPS is the number of operands to allocate.  */ 
+   for the new tuple.  NUM_OPS is the number of operands to allocate.  */
 
 #define gimple_build_with_ops(c, s, n) \
   gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
 
 static gimple
-gimple_build_with_ops_stat (enum gimple_code code, enum tree_code subcode,
-		            unsigned num_ops MEM_STAT_DECL)
+gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
+                    unsigned num_ops MEM_STAT_DECL)
 {
   gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
   gimple_set_subcode (s, subcode);
@@ -274,7 +201,7 @@
 gimple
 gimple_build_return (tree retval)
 {
-  gimple s = gimple_build_with_ops (GIMPLE_RETURN, 0, 1);
+  gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
   if (retval)
     gimple_return_set_retval (s, retval);
   return s;
@@ -287,7 +214,7 @@
 static inline gimple
 gimple_build_call_1 (tree fn, unsigned nargs)
 {
-  gimple s = gimple_build_with_ops (GIMPLE_CALL, 0, nargs + 3);
+  gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
   if (TREE_CODE (fn) == FUNCTION_DECL)
     fn = build_fold_addr_expr (fn);
   gimple_set_op (s, 1, fn);
@@ -366,6 +293,7 @@
 #ifndef noCbC
   gimple_call_set_cbc_goto (call, CALL_EXPR_CbC_GOTO (t));
 #endif
+  gimple_set_no_warning (call, TREE_NO_WARNING (t));
 
   return call;
 }
@@ -376,7 +304,7 @@
 
 void
 extract_ops_from_tree (tree expr, enum tree_code *subcode_p, tree *op1_p,
-		       tree *op2_p)
+               tree *op2_p)
 {
   enum gimple_rhs_class grhs_class;
 
@@ -416,7 +344,7 @@
 
   extract_ops_from_tree (rhs, &subcode, &op1, &op2);
   return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2
-  					    PASS_MEM_STAT);
+                        PASS_MEM_STAT);
 }
 
 
@@ -434,9 +362,9 @@
   /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
      code).  */
   num_ops = get_gimple_rhs_num_ops (subcode) + 1;
-  
-  p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, subcode, num_ops
-  			          PASS_MEM_STAT);
+
+  p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
+                      PASS_MEM_STAT);
   gimple_assign_set_lhs (p, lhs);
   gimple_assign_set_rhs1 (p, op1);
   if (op2)
@@ -457,9 +385,9 @@
 
    This function returns the newly created GIMPLE_ASSIGN tuple.  */
 
-inline gimple
+gimple
 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
-{ 
+{
   tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
   gimplify_and_add (t, seq_p);
   ggc_free (t);
@@ -475,7 +403,7 @@
 
 gimple
 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
-		   tree t_label, tree f_label)
+           tree t_label, tree f_label)
 {
   gimple p;
 
@@ -495,10 +423,11 @@
 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
                                tree *lhs_p, tree *rhs_p)
 {
+  location_t loc = EXPR_LOCATION (cond);
   gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
-	      || TREE_CODE (cond) == TRUTH_NOT_EXPR
-	      || is_gimple_min_invariant (cond)
-	      || SSA_VAR_P (cond));
+          || TREE_CODE (cond) == TRUTH_NOT_EXPR
+          || is_gimple_min_invariant (cond)
+          || SSA_VAR_P (cond));
 
   extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
 
@@ -507,14 +436,14 @@
     {
       *code_p = EQ_EXPR;
       gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
-      *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node);
+      *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
     }
   /* Canonicalize conditionals of the form 'if (VAL)'  */
   else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
     {
       *code_p = NE_EXPR;
       gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
-      *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node);
+      *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
     }
 }
 
@@ -550,7 +479,7 @@
 gimple
 gimple_build_label (tree label)
 {
-  gimple p = gimple_build_with_ops (GIMPLE_LABEL, 0, 1);
+  gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
   gimple_label_set_label (p, label);
   return p;
 }
@@ -560,7 +489,7 @@
 gimple
 gimple_build_goto (tree dest)
 {
-  gimple p = gimple_build_with_ops (GIMPLE_GOTO, 0, 1);
+  gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
   gimple_goto_set_dest (p, dest);
   return p;
 }
@@ -568,7 +497,7 @@
 
 /* Build a GIMPLE_NOP statement.  */
 
-gimple 
+gimple
 gimple_build_nop (void)
 {
   return gimple_alloc (GIMPLE_NOP, 0);
@@ -600,23 +529,29 @@
    */
 
 static inline gimple
-gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs, 
-                    unsigned nclobbers)
+gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
+                    unsigned nclobbers, unsigned nlabels)
 {
   gimple p;
   int size = strlen (string);
 
-  p = gimple_build_with_ops (GIMPLE_ASM, 0, ninputs + noutputs + nclobbers);
+  /* ASMs with labels cannot have outputs.  This should have been
+     enforced by the front end.  */
+  gcc_assert (nlabels == 0 || noutputs == 0);
+
+  p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
+                 ninputs + noutputs + nclobbers + nlabels);
 
   p->gimple_asm.ni = ninputs;
   p->gimple_asm.no = noutputs;
   p->gimple_asm.nc = nclobbers;
+  p->gimple_asm.nl = nlabels;
   p->gimple_asm.string = ggc_alloc_string (string, size);
 
 #ifdef GATHER_STATISTICS
   gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
 #endif
-  
+
   return p;
 }
 
@@ -628,20 +563,23 @@
    NCLOBBERS is the number of clobbered registers.
    INPUTS is a vector of the input register parameters.
    OUTPUTS is a vector of the output register parameters.
-   CLOBBERS is a vector of the clobbered register parameters.  */
+   CLOBBERS is a vector of the clobbered register parameters.
+   LABELS is a vector of destination labels.  */
 
 gimple
-gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs, 
-                      VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers)
+gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
+                      VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
+              VEC(tree,gc)* labels)
 {
   gimple p;
   unsigned i;
 
   p = gimple_build_asm_1 (string,
                           VEC_length (tree, inputs),
-                          VEC_length (tree, outputs), 
-                          VEC_length (tree, clobbers));
-  
+                          VEC_length (tree, outputs),
+                          VEC_length (tree, clobbers),
+              VEC_length (tree, labels));
+
   for (i = 0; i < VEC_length (tree, inputs); i++)
     gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
 
@@ -650,41 +588,10 @@
 
   for (i = 0; i < VEC_length (tree, clobbers); i++)
     gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
-  
-  return p;
-}
-
-/* Build a GIMPLE_ASM statement.
-
-   STRING is the assembly code.
-   NINPUT is the number of register inputs.
-   NOUTPUT is the number of register outputs.
-   NCLOBBERS is the number of clobbered registers.
-   ... are trees for each input, output and clobbered register.  */
-
-gimple
-gimple_build_asm (const char *string, unsigned ninputs, unsigned noutputs, 
-		  unsigned nclobbers, ...)
-{
-  gimple p;
-  unsigned i;
-  va_list ap;
-  
-  p = gimple_build_asm_1 (string, ninputs, noutputs, nclobbers);
-  
-  va_start (ap, nclobbers);
-
-  for (i = 0; i < ninputs; i++)
-    gimple_asm_set_input_op (p, i, va_arg (ap, tree));
-
-  for (i = 0; i < noutputs; i++)
-    gimple_asm_set_output_op (p, i, va_arg (ap, tree));
-
-  for (i = 0; i < nclobbers; i++)
-    gimple_asm_set_clobber_op (p, i, va_arg (ap, tree));
-
-  va_end (ap);
-  
+
+  for (i = 0; i < VEC_length (tree, labels); i++)
+    gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
+
   return p;
 }
 
@@ -720,6 +627,20 @@
   return p;
 }
 
+/* Build a GIMPLE_EH_MUST_NOT_THROW statement.  */
+
+gimple
+gimple_build_eh_must_not_throw (tree decl)
+{
+  gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 1);
+
+  gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
+  gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
+  gimple_eh_must_not_throw_set_fndecl (p, decl);
+
+  return p;
+}
+
 /* Build a GIMPLE_TRY statement.
 
    EVAL is the expression to evaluate.
@@ -729,7 +650,7 @@
 
 gimple
 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
-    		  enum gimple_try_flags kind)
+              enum gimple_try_flags kind)
 {
   gimple p;
 
@@ -759,16 +680,13 @@
 }
 
 
-/* Build a GIMPLE_RESX statement.
-
-   REGION is the region number from which this resx causes control flow to 
-   leave.  */
+/* Build a GIMPLE_RESX statement.  */
 
 gimple
 gimple_build_resx (int region)
 {
-  gimple p = gimple_alloc (GIMPLE_RESX, 0);
-  gimple_resx_set_region (p, region);
+  gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
+  p->gimple_eh_ctrl.region = region;
   return p;
 }
 
@@ -778,13 +696,15 @@
    NLABELS is the number of labels in the switch excluding the default.
    DEFAULT_LABEL is the default label for the switch statement.  */
 
-static inline gimple 
-gimple_build_switch_1 (unsigned nlabels, tree index, tree default_label)
+gimple
+gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
 {
   /* nlabels + 1 default label + 1 index.  */
-  gimple p = gimple_build_with_ops (GIMPLE_SWITCH, 0, nlabels + 1 + 1);
+  gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
+                    1 + (default_label != NULL) + nlabels);
   gimple_switch_set_index (p, index);
-  gimple_switch_set_default_label (p, default_label);
+  if (default_label)
+    gimple_switch_set_default_label (p, default_label);
   return p;
 }
 
@@ -792,22 +712,21 @@
 /* Build a GIMPLE_SWITCH statement.
 
    INDEX is the switch's index.
-   NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL. 
+   NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
    ... are the labels excluding the default.  */
 
-gimple 
+gimple
 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
 {
   va_list al;
-  unsigned i;
-  gimple p;
-  
-  p = gimple_build_switch_1 (nlabels, index, default_label);
+  unsigned i, offset;
+  gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
 
   /* Store the rest of the labels.  */
   va_start (al, default_label);
-  for (i = 1; i <= nlabels; i++)
-    gimple_switch_set_label (p, i, va_arg (al, tree));
+  offset = (default_label != NULL);
+  for (i = 0; i < nlabels; i++)
+    gimple_switch_set_label (p, i + offset, va_arg (al, tree));
   va_end (al);
 
   return p;
@@ -823,14 +742,45 @@
 gimple
 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
 {
-  unsigned i;
-  unsigned nlabels = VEC_length (tree, args);
-  gimple p = gimple_build_switch_1 (nlabels, index, default_label);
-
-  /*  Put labels in labels[1 - (nlabels + 1)].
-     Default label is in labels[0].  */
-  for (i = 1; i <= nlabels; i++)
-    gimple_switch_set_label (p, i, VEC_index (tree, args, i - 1));
+  unsigned i, offset, nlabels = VEC_length (tree, args);
+  gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
+
+  /* Copy the labels from the vector to the switch statement.  */
+  offset = (default_label != NULL);
+  for (i = 0; i < nlabels; i++)
+    gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
+
+  return p;
+}
+
+/* Build a GIMPLE_EH_DISPATCH statement.  */
+
+gimple
+gimple_build_eh_dispatch (int region)
+{
+  gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
+  p->gimple_eh_ctrl.region = region;
+  return p;
+}
+
+/* Build a new GIMPLE_DEBUG_BIND statement.
+
+   VAR is bound to VALUE; block and location are taken from STMT.  */
+
+gimple
+gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
+{
+  gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
+                     (unsigned)GIMPLE_DEBUG_BIND, 2
+                     PASS_MEM_STAT);
+
+  gimple_debug_bind_set_var (p, var);
+  gimple_debug_bind_set_value (p, value);
+  if (stmt)
+    {
+      gimple_set_block (p, gimple_block (stmt));
+      gimple_set_location (p, gimple_location (stmt));
+    }
 
   return p;
 }
@@ -841,7 +791,7 @@
    BODY is the sequence of statements for which only one thread can execute.
    NAME is optional identifier for this critical block.  */
 
-gimple 
+gimple
 gimple_build_omp_critical (gimple_seq body, tree name)
 {
   gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
@@ -855,14 +805,14 @@
 /* Build a GIMPLE_OMP_FOR statement.
 
    BODY is sequence of statements inside the for loop.
-   CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate, 
+   CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
    lastprivate, reductions, ordered, schedule, and nowait.
    COLLAPSE is the collapse count.
    PRE_BODY is the sequence of statements that are loop invariant.  */
 
 gimple
 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
-		      gimple_seq pre_body)
+              gimple_seq pre_body)
 {
   gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
   if (body)
@@ -884,9 +834,9 @@
    CHILD_FN is the function created for the parallel threads to execute.
    DATA_ARG are the shared data argument(s).  */
 
-gimple 
-gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn, 
-			   tree data_arg)
+gimple
+gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
+               tree data_arg)
 {
   gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
   if (body)
@@ -908,10 +858,10 @@
    COPY_FN is the optional function for firstprivate initialization.
    ARG_SIZE and ARG_ALIGN are size and alignment of the data block.  */
 
-gimple 
+gimple
 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
-		       tree data_arg, tree copy_fn, tree arg_size,
-		       tree arg_align)
+               tree data_arg, tree copy_fn, tree arg_size,
+               tree arg_align)
 {
   gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
   if (body)
@@ -946,7 +896,7 @@
 
    BODY is the sequence of statements to be executed by just the master.  */
 
-gimple 
+gimple
 gimple_build_omp_master (gimple_seq body)
 {
   gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
@@ -962,7 +912,7 @@
    CONTROL_DEF is the definition of the control variable.
    CONTROL_USE is the use of the control variable.  */
 
-gimple 
+gimple
 gimple_build_omp_continue (tree control_def, tree control_use)
 {
   gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
@@ -976,7 +926,7 @@
    BODY is the sequence of statements inside a loop that will executed in
    sequence.  */
 
-gimple 
+gimple
 gimple_build_omp_ordered (gimple_seq body)
 {
   gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
@@ -990,7 +940,7 @@
 /* Build a GIMPLE_OMP_RETURN statement.
    WAIT_P is true if this is a non-waiting return.  */
 
-gimple 
+gimple
 gimple_build_omp_return (bool wait_p)
 {
   gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
@@ -1007,7 +957,7 @@
    CLAUSES are any of the OMP sections contsruct's clauses: private,
    firstprivate, lastprivate, reduction, and nowait.  */
 
-gimple 
+gimple
 gimple_build_omp_sections (gimple_seq body, tree clauses)
 {
   gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
@@ -1034,7 +984,7 @@
    CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
    copyprivate, nowait.  */
 
-gimple 
+gimple
 gimple_build_omp_single (gimple_seq body, tree clauses)
 {
   gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
@@ -1046,20 +996,6 @@
 }
 
 
-/* Build a GIMPLE_CHANGE_DYNAMIC_TYPE statement.  TYPE is the new type
-   for the location PTR.  */
-
-gimple
-gimple_build_cdt (tree type, tree ptr)
-{
-  gimple p = gimple_build_with_ops (GIMPLE_CHANGE_DYNAMIC_TYPE, 0, 2);
-  gimple_cdt_set_new_type (p, type);
-  gimple_cdt_set_location (p, ptr);
-
-  return p;
-}
-
-
 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement.  */
 
 gimple
@@ -1097,31 +1033,22 @@
   return p;
 }
 
-/* Return which gimple structure is used by T.  The enums here are defined
-   in gsstruct.def.  */
-
-enum gimple_statement_structure_enum
-gimple_statement_structure (gimple gs)
-{
-  return gss_for_code (gimple_code (gs));
-}
-
 #if defined ENABLE_GIMPLE_CHECKING
 /* Complain of a gimple type mismatch and die.  */
 
 void
 gimple_check_failed (const_gimple gs, const char *file, int line,
-		     const char *function, enum gimple_code code,
-		     enum tree_code subcode)
+             const char *function, enum gimple_code code,
+             enum tree_code subcode)
 {
   internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
-      		  gimple_code_name[code],
-		  tree_code_name[subcode],
-		  gimple_code_name[gimple_code (gs)],
-		  gs->gsbase.subcode > 0
-		    ? tree_code_name[gs->gsbase.subcode]
-		    : "",
-		  function, trim_filename (file), line);
+              gimple_code_name[code],
+          tree_code_name[subcode],
+          gimple_code_name[gimple_code (gs)],
+          gs->gsbase.subcode > 0
+            ? tree_code_name[gs->gsbase.subcode]
+            : "",
+          function, trim_filename (file), line);
 }
 #endif /* ENABLE_GIMPLE_CHECKING */
 
@@ -1166,7 +1093,7 @@
   /* If this triggers, it's a sign that the same list is being freed
      twice.  */
   gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
-  
+
   /* Add SEQ to the pool of free sequences.  */
   seq->next_free = gimple_seq_cache;
   gimple_seq_cache = seq;
@@ -1232,11 +1159,11 @@
 {
   gimple_stmt_iterator i;
 
-
   if (gimple_seq_empty_p (body))
     return true;
   for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
-    if (!empty_stmt_p (gsi_stmt (i)))
+    if (!empty_stmt_p (gsi_stmt (i))
+    && !is_gimple_debug (gsi_stmt (i)))
       return false;
 
   return true;
@@ -1264,7 +1191,7 @@
 
 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
    on each one.  WI is as in walk_gimple_stmt.
-   
+
    If walk_gimple_stmt returns non-NULL, the walk is stopped, the
    value is stored in WI->CALLBACK_RESULT and the statement that
    produced the value is returned.
@@ -1273,7 +1200,7 @@
 
 gimple
 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
-		 walk_tree_fn callback_op, struct walk_stmt_info *wi)
+         walk_tree_fn callback_op, struct walk_stmt_info *wi)
 {
   gimple_stmt_iterator gsi;
 
@@ -1281,13 +1208,13 @@
     {
       tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
       if (ret)
-	{
-	  /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
-	     to hold it.  */
-	  gcc_assert (wi);
-	  wi->callback_result = ret;
-	  return gsi_stmt (gsi);
-	}
+    {
+      /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
+         to hold it.  */
+      gcc_assert (wi);
+      wi->callback_result = ret;
+      return gsi_stmt (gsi);
+    }
     }
 
   if (wi)
@@ -1301,12 +1228,12 @@
 
 static tree
 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
-		 struct walk_stmt_info *wi)
+         struct walk_stmt_info *wi)
 {
-  tree ret;
+  tree ret, op;
   unsigned noutputs;
   const char **oconstraints;
-  unsigned i;
+  unsigned i, n;
   const char *constraint;
   bool allows_mem, allows_reg, is_inout;
 
@@ -1318,33 +1245,34 @@
 
   for (i = 0; i < noutputs; i++)
     {
-      tree op = gimple_asm_output_op (stmt, i);
+      op = gimple_asm_output_op (stmt, i);
       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
       oconstraints[i] = constraint;
       parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
-	                       &is_inout);
+                           &is_inout);
       if (wi)
-	wi->val_only = (allows_reg || !allows_mem);
+    wi->val_only = (allows_reg || !allows_mem);
       ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
       if (ret)
-	return ret;
+    return ret;
     }
 
-  for (i = 0; i < gimple_asm_ninputs (stmt); i++)
+  n = gimple_asm_ninputs (stmt);
+  for (i = 0; i < n; i++)
     {
-      tree op = gimple_asm_input_op (stmt, i);
+      op = gimple_asm_input_op (stmt, i);
       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
       parse_input_constraint (&constraint, 0, 0, noutputs, 0,
-			      oconstraints, &allows_mem, &allows_reg);
+                  oconstraints, &allows_mem, &allows_reg);
       if (wi)
-	wi->val_only = (allows_reg || !allows_mem);
-
-      /* Although input "m" is not really a LHS, we need a lvalue.  */
-      if (wi)
-	wi->is_lhs = !wi->val_only;
+    {
+      wi->val_only = (allows_reg || !allows_mem);
+          /* Although input "m" is not really a LHS, we need a lvalue.  */
+      wi->is_lhs = !wi->val_only;
+    }
       ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
       if (ret)
-	return ret;
+    return ret;
     }
 
   if (wi)
@@ -1353,6 +1281,15 @@
       wi->val_only = true;
     }
 
+  n = gimple_asm_nlabels (stmt);
+  for (i = 0; i < n; i++)
+    {
+      op = gimple_asm_label_op (stmt, i);
+      ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
+      if (ret)
+    return ret;
+    }
+
   return NULL_TREE;
 }
 
@@ -1364,7 +1301,7 @@
    Additional parameters to walk_tree must be stored in WI.  For each operand
    OP, walk_tree is called as:
 
-	walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
+    walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
 
    If CALLBACK_OP returns non-NULL for an operand, the remaining
    operands are not scanned.
@@ -1374,7 +1311,7 @@
 
 inline tree
 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
-		struct walk_stmt_info *wi)
+        struct walk_stmt_info *wi)
 {
   struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
   unsigned i;
@@ -1384,43 +1321,44 @@
     {
     case GIMPLE_ASSIGN:
       /* Walk the RHS operands.  A formal temporary LHS may use a
-	 COMPONENT_REF RHS.  */
+     COMPONENT_REF RHS.  */
       if (wi)
-	wi->val_only = !is_gimple_formal_tmp_var (gimple_assign_lhs (stmt));
+    wi->val_only = !is_gimple_reg (gimple_assign_lhs (stmt))
+                       || !gimple_assign_single_p (stmt);
 
       for (i = 1; i < gimple_num_ops (stmt); i++)
-	{
-	  ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
-			   pset);
-	  if (ret)
-	    return ret;
-	}
+    {
+      ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
+               pset);
+      if (ret)
+        return ret;
+    }
 
       /* Walk the LHS.  If the RHS is appropriate for a memory, we
-	 may use a COMPONENT_REF on the LHS.  */
+     may use a COMPONENT_REF on the LHS.  */
       if (wi)
-	{
+    {
           /* If the RHS has more than 1 operand, it is not appropriate
              for the memory.  */
-	  wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
+      wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
                          || !gimple_assign_single_p (stmt);
-	  wi->is_lhs = true;
-	}
+      wi->is_lhs = true;
+    }
 
       ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
       if (ret)
-	return ret;
+    return ret;
 
       if (wi)
-	{
-	  wi->val_only = true;
-	  wi->is_lhs = false;
-	}
+    {
+      wi->val_only = true;
+      wi->is_lhs = false;
+    }
       break;
 
     case GIMPLE_CALL:
       if (wi)
-	wi->is_lhs = false;
+    wi->is_lhs = false;
 
       ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
       if (ret)
@@ -1431,178 +1369,168 @@
         return ret;
 
       for (i = 0; i < gimple_call_num_args (stmt); i++)
-	{
-	  ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
-			   pset);
-	  if (ret)
-	    return ret;
-	}
+    {
+      ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
+               pset);
+      if (ret)
+        return ret;
+    }
 
       if (wi)
-	wi->is_lhs = true;
+    wi->is_lhs = true;
 
       ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
       if (ret)
-	return ret;
+    return ret;
 
       if (wi)
-	wi->is_lhs = false;
+    wi->is_lhs = false;
       break;
 
     case GIMPLE_CATCH:
       ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
-		       pset);
+               pset);
       if (ret)
-	return ret;
+    return ret;
       break;
 
     case GIMPLE_EH_FILTER:
       ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
-		       pset);
+               pset);
       if (ret)
-	return ret;
-      break;
-
-    case GIMPLE_CHANGE_DYNAMIC_TYPE:
-      ret = walk_tree (gimple_cdt_location_ptr (stmt), callback_op, wi, pset);
-      if (ret)
-	return ret;
-
-      ret = walk_tree (gimple_cdt_new_type_ptr (stmt), callback_op, wi, pset);
-      if (ret)
-	return ret;
+    return ret;
       break;
 
     case GIMPLE_ASM:
       ret = walk_gimple_asm (stmt, callback_op, wi);
       if (ret)
-	return ret;
+    return ret;
       break;
 
     case GIMPLE_OMP_CONTINUE:
       ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
-	  	       callback_op, wi, pset);
+               callback_op, wi, pset);
       if (ret)
-	return ret;
+    return ret;
 
       ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
-	  	       callback_op, wi, pset);
+               callback_op, wi, pset);
       if (ret)
-	return ret;
+    return ret;
       break;
 
     case GIMPLE_OMP_CRITICAL:
       ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
-		       pset);
+               pset);
       if (ret)
-	return ret;
+    return ret;
       break;
 
     case GIMPLE_OMP_FOR:
       ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
-		       pset);
+               pset);
       if (ret)
-	return ret;
+    return ret;
       for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
-	{
-	  ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
-			   wi, pset);
-	  if (ret)
-	    return ret;
-	  ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
-			   wi, pset);
-	  if (ret)
-	    return ret;
-	  ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
-			   wi, pset);
-	  if (ret)
-	    return ret;
-	  ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
-			   wi, pset);
-	}
+    {
+      ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
+               wi, pset);
+      if (ret)
+        return ret;
+      ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
+               wi, pset);
       if (ret)
-	return ret;
+        return ret;
+      ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
+               wi, pset);
+      if (ret)
+        return ret;
+      ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
+               wi, pset);
+    }
+      if (ret)
+    return ret;
       break;
 
     case GIMPLE_OMP_PARALLEL:
       ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
-		       wi, pset);
+               wi, pset);
       if (ret)
-	return ret;
+    return ret;
       ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
-		       wi, pset);
+               wi, pset);
       if (ret)
-	return ret;
+    return ret;
       ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
-		       wi, pset);
+               wi, pset);
       if (ret)
-	return ret;
+    return ret;
       break;
 
     case GIMPLE_OMP_TASK:
       ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
-		       wi, pset);
+               wi, pset);
       if (ret)
-	return ret;
+    return ret;
       ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
-		       wi, pset);
+               wi, pset);
       if (ret)
-	return ret;
+    return ret;
       ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
-		       wi, pset);
+               wi, pset);
       if (ret)
-	return ret;
+    return ret;
       ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
-		       wi, pset);
+               wi, pset);
       if (ret)
-	return ret;
+    return ret;
       ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
-		       wi, pset);
+               wi, pset);
       if (ret)
-	return ret;
+    return ret;
       ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
-		       wi, pset);
+               wi, pset);
       if (ret)
-	return ret;
+    return ret;
       break;
 
     case GIMPLE_OMP_SECTIONS:
       ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
-		       wi, pset);
+               wi, pset);
       if (ret)
-	return ret;
+    return ret;
 
       ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
-		       wi, pset);
+               wi, pset);
       if (ret)
-	return ret;
+    return ret;
 
       break;
 
     case GIMPLE_OMP_SINGLE:
       ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
-		       pset);
+               pset);
       if (ret)
-	return ret;
+    return ret;
       break;
 
     case GIMPLE_OMP_ATOMIC_LOAD:
       ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
-		       pset);
+               pset);
       if (ret)
-	return ret;
+    return ret;
 
       ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
-		       pset);
+               pset);
       if (ret)
-	return ret;
+    return ret;
       break;
 
     case GIMPLE_OMP_ATOMIC_STORE:
       ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
-		       wi, pset);
+               wi, pset);
       if (ret)
-	return ret;
+    return ret;
       break;
 
       /* Tuples that do not have operands.  */
@@ -1614,15 +1542,15 @@
 
     default:
       {
-	enum gimple_statement_structure_enum gss;
-	gss = gimple_statement_structure (stmt);
-	if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
-	  for (i = 0; i < gimple_num_ops (stmt); i++)
-	    {
-	      ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
-	      if (ret)
-		return ret;
-	    }
+    enum gimple_statement_structure_enum gss;
+    gss = gimple_statement_structure (stmt);
+    if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
+      for (i = 0; i < gimple_num_ops (stmt); i++)
+        {
+          ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
+          if (ret)
+        return ret;
+        }
       }
       break;
     }
@@ -1648,7 +1576,7 @@
 
 tree
 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
-		  walk_tree_fn callback_op, struct walk_stmt_info *wi)
+          walk_tree_fn callback_op, struct walk_stmt_info *wi)
 {
   gimple ret;
   tree tree_ret;
@@ -1669,10 +1597,10 @@
       bool handled_ops = false;
       tree_ret = callback_stmt (gsi, &handled_ops, wi);
       if (handled_ops)
-	return tree_ret;
+    return tree_ret;
 
       /* If CALLBACK_STMT did not handle operands, it should not have
-	 a value to return.  */
+     a value to return.  */
       gcc_assert (tree_ret == NULL);
 
       /* Re-read stmt in case the callback changed it.  */
@@ -1684,7 +1612,7 @@
     {
       tree_ret = walk_gimple_op (stmt, callback_op, wi);
       if (tree_ret)
-	return tree_ret;
+    return tree_ret;
     }
 
   /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them.  */
@@ -1692,42 +1620,42 @@
     {
     case GIMPLE_BIND:
       ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
-	                     callback_op, wi);
+                         callback_op, wi);
       if (ret)
-	return wi->callback_result;
+    return wi->callback_result;
       break;
 
     case GIMPLE_CATCH:
       ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
-	                     callback_op, wi);
+                         callback_op, wi);
       if (ret)
-	return wi->callback_result;
+    return wi->callback_result;
       break;
 
     case GIMPLE_EH_FILTER:
       ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
-		             callback_op, wi);
+                     callback_op, wi);
       if (ret)
-	return wi->callback_result;
+    return wi->callback_result;
       break;
 
     case GIMPLE_TRY:
       ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
-	                     wi);
+                         wi);
       if (ret)
-	return wi->callback_result;
+    return wi->callback_result;
 
       ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
-	                     callback_op, wi);
+                         callback_op, wi);
       if (ret)
-	return wi->callback_result;
+    return wi->callback_result;
       break;
 
     case GIMPLE_OMP_FOR:
       ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
-		             callback_op, wi);
+                     callback_op, wi);
       if (ret)
-	return wi->callback_result;
+    return wi->callback_result;
 
       /* FALL THROUGH.  */
     case GIMPLE_OMP_CRITICAL:
@@ -1739,16 +1667,16 @@
     case GIMPLE_OMP_SECTIONS:
     case GIMPLE_OMP_SINGLE:
       ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op,
-	                     wi);
+                         wi);
       if (ret)
-	return wi->callback_result;
+    return wi->callback_result;
       break;
 
     case GIMPLE_WITH_CLEANUP_EXPR:
       ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
-			     callback_op, wi);
+                 callback_op, wi);
       if (ret)
-	return wi->callback_result;
+    return wi->callback_result;
       break;
 
     default:
@@ -1769,8 +1697,8 @@
   if (fn == NULL)
     {
       /* If FNDECL still does not have a function structure associated
-	 with it, then it does not make sense for it to receive a
-	 GIMPLE body.  */
+     with it, then it does not make sense for it to receive a
+     GIMPLE body.  */
       gcc_assert (seq == NULL);
     }
   else
@@ -1812,9 +1740,9 @@
     {
       t = TREE_TYPE (gimple_call_fn (stmt));
       if (t && TREE_CODE (t) == POINTER_TYPE)
-	flags = flags_from_decl_or_type (TREE_TYPE (t));
+    flags = flags_from_decl_or_type (TREE_TYPE (t));
       else
-	flags = 0;
+    flags = 0;
     }
 
   return flags;
@@ -1828,8 +1756,8 @@
 {
   return gimple_code (gs) == GIMPLE_ASSIGN
          && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
-	    == GIMPLE_SINGLE_RHS
-	 && is_gimple_val (gimple_op (gs, 1));
+        == GIMPLE_SINGLE_RHS
+     && is_gimple_val (gimple_op (gs, 1));
 }
 
 
@@ -1839,10 +1767,10 @@
 gimple_assign_ssa_name_copy_p (gimple gs)
 {
   return (gimple_code (gs) == GIMPLE_ASSIGN
-	  && (get_gimple_rhs_class (gimple_assign_rhs_code (gs))
-	      == GIMPLE_SINGLE_RHS)
-	  && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
-	  && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
+      && (get_gimple_rhs_class (gimple_assign_rhs_code (gs))
+          == GIMPLE_SINGLE_RHS)
+      && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
+      && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
 }
 
 
@@ -1857,7 +1785,7 @@
 {
   return (gimple_code (gs) == GIMPLE_ASSIGN
           && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
-	     == GIMPLE_SINGLE_RHS);
+         == GIMPLE_SINGLE_RHS);
 }
 
 /* Return true if GS is an assignment with a unary RHS, but the
@@ -1873,7 +1801,7 @@
    assignment.  I suspect there may be cases where gimple_assign_copy_p,
    gimple_assign_single_p, or equivalent logic is used where a similar
    treatment of unary NOPs is appropriate.  */
-   
+
 bool
 gimple_assign_unary_nop_p (gimple gs)
 {
@@ -1902,17 +1830,17 @@
       t = gimple_label_label (stmt);
       uid = LABEL_DECL_UID (t);
       if (uid == -1)
-	{
-	  unsigned old_len = VEC_length (basic_block, label_to_block_map);
-	  LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
-	  if (old_len <= (unsigned) uid)
-	    {
-	      unsigned new_len = 3 * uid / 2;
-
-	      VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
-				     new_len);
-	    }
-	}
+    {
+      unsigned old_len = VEC_length (basic_block, label_to_block_map);
+      LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
+      if (old_len <= (unsigned) uid)
+        {
+          unsigned new_len = 3 * uid / 2 + 1;
+
+          VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
+                     new_len);
+        }
+    }
 
       VEC_replace (basic_block, label_to_block_map, uid, bb);
     }
@@ -1926,30 +1854,31 @@
 tree
 gimple_fold (const_gimple stmt)
 {
+  location_t loc = gimple_location (stmt);
   switch (gimple_code (stmt))
     {
     case GIMPLE_COND:
-      return fold_binary (gimple_cond_code (stmt),
-			  boolean_type_node,
-			  gimple_cond_lhs (stmt),
-			  gimple_cond_rhs (stmt));
+      return fold_binary_loc (loc, gimple_cond_code (stmt),
+              boolean_type_node,
+              gimple_cond_lhs (stmt),
+              gimple_cond_rhs (stmt));
 
     case GIMPLE_ASSIGN:
       switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
-	{
-	case GIMPLE_UNARY_RHS:
-	  return fold_unary (gimple_assign_rhs_code (stmt),
-			     TREE_TYPE (gimple_assign_lhs (stmt)),
-			     gimple_assign_rhs1 (stmt));
-	case GIMPLE_BINARY_RHS:
-	  return fold_binary (gimple_assign_rhs_code (stmt),
-			      TREE_TYPE (gimple_assign_lhs (stmt)),
-			      gimple_assign_rhs1 (stmt),
-			      gimple_assign_rhs2 (stmt));
-	case GIMPLE_SINGLE_RHS:
-	  return fold (gimple_assign_rhs1 (stmt));
-	default:;
-	}
+    {
+    case GIMPLE_UNARY_RHS:
+      return fold_unary_loc (loc, gimple_assign_rhs_code (stmt),
+                 TREE_TYPE (gimple_assign_lhs (stmt)),
+                 gimple_assign_rhs1 (stmt));
+    case GIMPLE_BINARY_RHS:
+      return fold_binary_loc (loc, gimple_assign_rhs_code (stmt),
+                  TREE_TYPE (gimple_assign_lhs (stmt)),
+                  gimple_assign_rhs1 (stmt),
+                  gimple_assign_rhs2 (stmt));
+    case GIMPLE_SINGLE_RHS:
+      return fold (gimple_assign_rhs1 (stmt));
+    default:;
+    }
       break;
 
     case GIMPLE_SWITCH:
@@ -2002,7 +1931,7 @@
 
 void
 gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
-				tree op1, tree op2)
+                tree op1, tree op2)
 {
   unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
   gimple stmt = gsi_stmt (*gsi);
@@ -2017,7 +1946,7 @@
       stmt = new_stmt;
 
       /* The LHS needs to be reset as this also changes the SSA name
-	 on the LHS.  */
+     on the LHS.  */
       gimple_assign_set_lhs (stmt, lhs);
     }
 
@@ -2064,6 +1993,39 @@
     gcc_unreachable();
 }
 
+/* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
+   GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
+   expression with a different value.
+
+   This will update any annotations (say debug bind stmts) referring
+   to the original LHS, so that they use the RHS instead.  This is
+   done even if NLHS and LHS are the same, for it is understood that
+   the RHS will be modified afterwards, and NLHS will not be assigned
+   an equivalent value.
+
+   Adjusting any non-annotation uses of the LHS, if needed, is a
+   responsibility of the caller.
+
+   The effect of this call should be pretty much the same as that of
+   inserting a copy of STMT before STMT, and then removing the
+   original stmt, at which time gsi_remove() would have update
+   annotations, but using this function saves all the inserting,
+   copying and removing.  */
+
+void
+gimple_replace_lhs (gimple stmt, tree nlhs)
+{
+  if (MAY_HAVE_DEBUG_STMTS)
+    {
+      tree lhs = gimple_get_lhs (stmt);
+
+      gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
+
+      insert_debug_temp_for_var_def (NULL, lhs);
+    }
+
+  gimple_set_lhs (stmt, nlhs);
+}
 
 /* Return a deep copy of statement STMT.  All the operands from STMT
    are reallocated and copied using unshare_expr.  The DEF, USE, VDEF
@@ -2087,140 +2049,134 @@
       tree t;
 
       switch (gimple_code (stmt))
-	{
-	case GIMPLE_BIND:
-	  new_seq = gimple_seq_copy (gimple_bind_body (stmt));
-	  gimple_bind_set_body (copy, new_seq);
-	  gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
-	  gimple_bind_set_block (copy, gimple_bind_block (stmt));
-	  break;
-
-	case GIMPLE_CATCH:
-	  new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
-	  gimple_catch_set_handler (copy, new_seq);
-	  t = unshare_expr (gimple_catch_types (stmt));
-	  gimple_catch_set_types (copy, t);
-	  break;
-
-	case GIMPLE_EH_FILTER:
-	  new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
-	  gimple_eh_filter_set_failure (copy, new_seq);
-	  t = unshare_expr (gimple_eh_filter_types (stmt));
-	  gimple_eh_filter_set_types (copy, t);
-	  break;
-
-	case GIMPLE_TRY:
-	  new_seq = gimple_seq_copy (gimple_try_eval (stmt));
-	  gimple_try_set_eval (copy, new_seq);
-	  new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
-	  gimple_try_set_cleanup (copy, new_seq);
-	  break;
-
-	case GIMPLE_OMP_FOR:
-	  new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
-	  gimple_omp_for_set_pre_body (copy, new_seq);
-	  t = unshare_expr (gimple_omp_for_clauses (stmt));
-	  gimple_omp_for_set_clauses (copy, t);
-	  copy->gimple_omp_for.iter
-	    = GGC_NEWVEC (struct gimple_omp_for_iter,
-			  gimple_omp_for_collapse (stmt));
-	  for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
-	    {
-	      gimple_omp_for_set_cond (copy, i,
-				       gimple_omp_for_cond (stmt, i));
-	      gimple_omp_for_set_index (copy, i,
-					gimple_omp_for_index (stmt, i));
-	      t = unshare_expr (gimple_omp_for_initial (stmt, i));
-	      gimple_omp_for_set_initial (copy, i, t);
-	      t = unshare_expr (gimple_omp_for_final (stmt, i));
-	      gimple_omp_for_set_final (copy, i, t);
-	      t = unshare_expr (gimple_omp_for_incr (stmt, i));
-	      gimple_omp_for_set_incr (copy, i, t);
-	    }
-	  goto copy_omp_body;
-
-	case GIMPLE_OMP_PARALLEL:
-	  t = unshare_expr (gimple_omp_parallel_clauses (stmt));
-	  gimple_omp_parallel_set_clauses (copy, t);
-	  t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
-	  gimple_omp_parallel_set_child_fn (copy, t);
-	  t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
-	  gimple_omp_parallel_set_data_arg (copy, t);
-	  goto copy_omp_body;
-
-	case GIMPLE_OMP_TASK:
-	  t = unshare_expr (gimple_omp_task_clauses (stmt));
-	  gimple_omp_task_set_clauses (copy, t);
-	  t = unshare_expr (gimple_omp_task_child_fn (stmt));
-	  gimple_omp_task_set_child_fn (copy, t);
-	  t = unshare_expr (gimple_omp_task_data_arg (stmt));
-	  gimple_omp_task_set_data_arg (copy, t);
-	  t = unshare_expr (gimple_omp_task_copy_fn (stmt));
-	  gimple_omp_task_set_copy_fn (copy, t);
-	  t = unshare_expr (gimple_omp_task_arg_size (stmt));
-	  gimple_omp_task_set_arg_size (copy, t);
-	  t = unshare_expr (gimple_omp_task_arg_align (stmt));
-	  gimple_omp_task_set_arg_align (copy, t);
-	  goto copy_omp_body;
-
-	case GIMPLE_OMP_CRITICAL:
-	  t = unshare_expr (gimple_omp_critical_name (stmt));
-	  gimple_omp_critical_set_name (copy, t);
-	  goto copy_omp_body;
-
-	case GIMPLE_OMP_SECTIONS:
-	  t = unshare_expr (gimple_omp_sections_clauses (stmt));
-	  gimple_omp_sections_set_clauses (copy, t);
-	  t = unshare_expr (gimple_omp_sections_control (stmt));
-	  gimple_omp_sections_set_control (copy, t);
-	  /* FALLTHRU  */
-
-	case GIMPLE_OMP_SINGLE:
-	case GIMPLE_OMP_SECTION:
-	case GIMPLE_OMP_MASTER:
-	case GIMPLE_OMP_ORDERED:
-	copy_omp_body:
-	  new_seq = gimple_seq_copy (gimple_omp_body (stmt));
-	  gimple_omp_set_body (copy, new_seq);
-	  break;
-
-	case GIMPLE_WITH_CLEANUP_EXPR:
-	  new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
-	  gimple_wce_set_cleanup (copy, new_seq);
-	  break;
-
-	default:
-	  gcc_unreachable ();
-	}
+    {
+    case GIMPLE_BIND:
+      new_seq = gimple_seq_copy (gimple_bind_body (stmt));
+      gimple_bind_set_body (copy, new_seq);
+      gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
+      gimple_bind_set_block (copy, gimple_bind_block (stmt));
+      break;
+
+    case GIMPLE_CATCH:
+      new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
+      gimple_catch_set_handler (copy, new_seq);
+      t = unshare_expr (gimple_catch_types (stmt));
+      gimple_catch_set_types (copy, t);
+      break;
+
+    case GIMPLE_EH_FILTER:
+      new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
+      gimple_eh_filter_set_failure (copy, new_seq);
+      t = unshare_expr (gimple_eh_filter_types (stmt));
+      gimple_eh_filter_set_types (copy, t);
+      break;
+
+    case GIMPLE_TRY:
+      new_seq = gimple_seq_copy (gimple_try_eval (stmt));
+      gimple_try_set_eval (copy, new_seq);
+      new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
+      gimple_try_set_cleanup (copy, new_seq);
+      break;
+
+    case GIMPLE_OMP_FOR:
+      new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
+      gimple_omp_for_set_pre_body (copy, new_seq);
+      t = unshare_expr (gimple_omp_for_clauses (stmt));
+      gimple_omp_for_set_clauses (copy, t);
+      copy->gimple_omp_for.iter
+        = GGC_NEWVEC (struct gimple_omp_for_iter,
+              gimple_omp_for_collapse (stmt));
+      for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
+        {
+          gimple_omp_for_set_cond (copy, i,
+                       gimple_omp_for_cond (stmt, i));
+          gimple_omp_for_set_index (copy, i,
+                    gimple_omp_for_index (stmt, i));
+          t = unshare_expr (gimple_omp_for_initial (stmt, i));
+          gimple_omp_for_set_initial (copy, i, t);
+          t = unshare_expr (gimple_omp_for_final (stmt, i));
+          gimple_omp_for_set_final (copy, i, t);
+          t = unshare_expr (gimple_omp_for_incr (stmt, i));
+          gimple_omp_for_set_incr (copy, i, t);
+        }
+      goto copy_omp_body;
+
+    case GIMPLE_OMP_PARALLEL:
+      t = unshare_expr (gimple_omp_parallel_clauses (stmt));
+      gimple_omp_parallel_set_clauses (copy, t);
+      t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
+      gimple_omp_parallel_set_child_fn (copy, t);
+      t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
+      gimple_omp_parallel_set_data_arg (copy, t);
+      goto copy_omp_body;
+
+    case GIMPLE_OMP_TASK:
+      t = unshare_expr (gimple_omp_task_clauses (stmt));
+      gimple_omp_task_set_clauses (copy, t);
+      t = unshare_expr (gimple_omp_task_child_fn (stmt));
+      gimple_omp_task_set_child_fn (copy, t);
+      t = unshare_expr (gimple_omp_task_data_arg (stmt));
+      gimple_omp_task_set_data_arg (copy, t);
+      t = unshare_expr (gimple_omp_task_copy_fn (stmt));
+      gimple_omp_task_set_copy_fn (copy, t);
+      t = unshare_expr (gimple_omp_task_arg_size (stmt));
+      gimple_omp_task_set_arg_size (copy, t);
+      t = unshare_expr (gimple_omp_task_arg_align (stmt));
+      gimple_omp_task_set_arg_align (copy, t);
+      goto copy_omp_body;
+
+    case GIMPLE_OMP_CRITICAL:
+      t = unshare_expr (gimple_omp_critical_name (stmt));
+      gimple_omp_critical_set_name (copy, t);
+      goto copy_omp_body;
+
+    case GIMPLE_OMP_SECTIONS:
+      t = unshare_expr (gimple_omp_sections_clauses (stmt));
+      gimple_omp_sections_set_clauses (copy, t);
+      t = unshare_expr (gimple_omp_sections_control (stmt));
+      gimple_omp_sections_set_control (copy, t);
+      /* FALLTHRU  */
+
+    case GIMPLE_OMP_SINGLE:
+    case GIMPLE_OMP_SECTION:
+    case GIMPLE_OMP_MASTER:
+    case GIMPLE_OMP_ORDERED:
+    copy_omp_body:
+      new_seq = gimple_seq_copy (gimple_omp_body (stmt));
+      gimple_omp_set_body (copy, new_seq);
+      break;
+
+    case GIMPLE_WITH_CLEANUP_EXPR:
+      new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
+      gimple_wce_set_cleanup (copy, new_seq);
+      break;
+
+    default:
+      gcc_unreachable ();
+    }
     }
 
   /* Make copy of operands.  */
   if (num_ops > 0)
     {
       for (i = 0; i < num_ops; i++)
-	gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
-
-      /* Clear out SSA operand vectors on COPY.  Note that we cannot
-	 call the API functions for setting addresses_taken, stores
-	 and loads.  These functions free the previous values, and we
-	 cannot do that on COPY as it will affect the original
-	 statement.  */
+    gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
+
+      /* Clear out SSA operand vectors on COPY.  */
       if (gimple_has_ops (stmt))
-	{
-	  gimple_set_def_ops (copy, NULL);
-	  gimple_set_use_ops (copy, NULL);
-	  copy->gsops.opbase.addresses_taken = NULL;
-	}
+    {
+      gimple_set_def_ops (copy, NULL);
+      gimple_set_use_ops (copy, NULL);
+    }
 
       if (gimple_has_mem_ops (stmt))
-	{
-	  gimple_set_vdef_ops (copy, NULL);
-	  gimple_set_vuse_ops (copy, NULL);
-	  copy->gsmem.membase.stores = NULL;
-	  copy->gsmem.membase.loads = NULL;
-	}
-
-      update_stmt (copy);
+    {
+      gimple_set_vdef (copy, gimple_vdef (stmt));
+      gimple_set_vuse (copy, gimple_vuse (stmt));
+    }
+
+      /* SSA operands need to be updated.  */
+      gimple_set_modified (copy, true);
     }
 
   return copy;
@@ -2238,10 +2194,10 @@
       s->gsbase.modified = (unsigned) modifiedp;
 
       if (modifiedp
-	  && cfun->gimple_df
-	  && is_gimple_call (s)
-	  && gimple_call_noreturn_p (s))
-	VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), s);
+      && cfun->gimple_df
+      && is_gimple_call (s)
+      && gimple_call_noreturn_p (s))
+    VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), s);
     }
 }
 
@@ -2257,6 +2213,9 @@
 {
   unsigned i;
 
+  if (is_gimple_debug (s))
+    return false;
+
   /* We don't have to scan the arguments to check for
      volatile arguments, though, at present, we still
      do a scan to check for TREE_SIDE_EFFECTS.  */
@@ -2270,36 +2229,36 @@
       if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
         return true;
       else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE)
-	/* An infinite loop is considered a side effect.  */
-	return true;
+    /* An infinite loop is considered a side effect.  */
+    return true;
 
       if (gimple_call_lhs (s)
           && TREE_SIDE_EFFECTS (gimple_call_lhs (s)))
-	{
-	  gcc_assert (gimple_has_volatile_ops (s));
-	  return true;
-	}
+    {
+      gcc_assert (gimple_has_volatile_ops (s));
+      return true;
+    }
 
       if (TREE_SIDE_EFFECTS (gimple_call_fn (s)))
         return true;
 
       for (i = 0; i < nargs; i++)
         if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)))
-	  {
-	    gcc_assert (gimple_has_volatile_ops (s));
-	    return true;
-	  }
+      {
+        gcc_assert (gimple_has_volatile_ops (s));
+        return true;
+      }
 
       return false;
     }
   else
     {
       for (i = 0; i < gimple_num_ops (s); i++)
-	if (TREE_SIDE_EFFECTS (gimple_op (s, i)))
-	  {
-	    gcc_assert (gimple_has_volatile_ops (s));
-	    return true;
-	  }
+    if (TREE_SIDE_EFFECTS (gimple_op (s, i)))
+      {
+        gcc_assert (gimple_has_volatile_ops (s));
+        return true;
+      }
     }
 
   return false;
@@ -2327,10 +2286,10 @@
          because we must ignore a volatile LHS.  */
       if (TREE_SIDE_EFFECTS (gimple_call_fn (s))
           || TREE_THIS_VOLATILE (gimple_call_fn (s)))
-	{
-	  gcc_assert (gimple_has_volatile_ops (s));
-	  return true;
-	}
+    {
+      gcc_assert (gimple_has_volatile_ops (s));
+      return true;
+    }
 
       for (i = 0; i < nargs; i++)
         if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
@@ -2343,23 +2302,25 @@
     {
       /* Skip the first operand, the LHS. */
       for (i = 1; i < gimple_num_ops (s); i++)
-	if (TREE_SIDE_EFFECTS (gimple_op (s, i))
+    if (TREE_SIDE_EFFECTS (gimple_op (s, i))
             || TREE_THIS_VOLATILE (gimple_op (s, i)))
-	  {
-	    gcc_assert (gimple_has_volatile_ops (s));
-	    return true;
-	  }
+      {
+        gcc_assert (gimple_has_volatile_ops (s));
+        return true;
+      }
     }
+  else if (is_gimple_debug (s))
+    return false;
   else
     {
       /* For statements without an LHS, examine all arguments.  */
       for (i = 0; i < gimple_num_ops (s); i++)
-	if (TREE_SIDE_EFFECTS (gimple_op (s, i))
+    if (TREE_SIDE_EFFECTS (gimple_op (s, i))
             || TREE_THIS_VOLATILE (gimple_op (s, i)))
-	  {
-	    gcc_assert (gimple_has_volatile_ops (s));
-	    return true;
-	  }
+      {
+        gcc_assert (gimple_has_volatile_ops (s));
+        return true;
+      }
     }
 
   return false;
@@ -2393,18 +2354,18 @@
       t = gimple_call_fndecl (s);
       /* Assume that calls to weak functions may trap.  */
       if (!t || !DECL_P (t) || DECL_WEAK (t))
-	return true;
+    return true;
       return false;
 
     case GIMPLE_ASSIGN:
       t = gimple_expr_type (s);
       op = gimple_assign_rhs_code (s);
       if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
-	div = gimple_assign_rhs2 (s);
+    div = gimple_assign_rhs2 (s);
       return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
-				      (INTEGRAL_TYPE_P (t)
-				       && TYPE_OVERFLOW_TRAPS (t)),
-				      div));
+                      (INTEGRAL_TYPE_P (t)
+                       && TYPE_OVERFLOW_TRAPS (t)),
+                      div));
 
     default:
       break;
@@ -2448,7 +2409,7 @@
   for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
     {
       fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
-	  gimple_alloc_counts[i], gimple_alloc_sizes[i]);
+      gimple_alloc_counts[i], gimple_alloc_sizes[i]);
       total_tuples += gimple_alloc_counts[i];
       total_bytes += gimple_alloc_sizes[i];
     }
@@ -2461,46 +2422,6 @@
 }
 
 
-/* Deep copy SYMS into the set of symbols stored by STMT.  If SYMS is
-   NULL or empty, the storage used is freed up.  */
-
-void
-gimple_set_stored_syms (gimple stmt, bitmap syms, bitmap_obstack *obs)
-{
-  gcc_assert (gimple_has_mem_ops (stmt));
-
-  if (syms == NULL || bitmap_empty_p (syms))
-    BITMAP_FREE (stmt->gsmem.membase.stores);
-  else
-    {
-      if (stmt->gsmem.membase.stores == NULL)
-	stmt->gsmem.membase.stores = BITMAP_ALLOC (obs);
-
-      bitmap_copy (stmt->gsmem.membase.stores, syms);
-    }
-}
-
-
-/* Deep copy SYMS into the set of symbols loaded by STMT.  If SYMS is
-   NULL or empty, the storage used is freed up.  */
-
-void
-gimple_set_loaded_syms (gimple stmt, bitmap syms, bitmap_obstack *obs)
-{
-  gcc_assert (gimple_has_mem_ops (stmt));
-
-  if (syms == NULL || bitmap_empty_p (syms))
-    BITMAP_FREE (stmt->gsmem.membase.loads);
-  else
-    {
-      if (stmt->gsmem.membase.loads == NULL)
-	stmt->gsmem.membase.loads = BITMAP_ALLOC (obs);
-
-      bitmap_copy (stmt->gsmem.membase.loads, syms);
-    }
-}
-
-
 /* Return the number of operands needed on the RHS of a GIMPLE
    assignment for an expression with tree code CODE.  */
 
@@ -2517,31 +2438,29 @@
     gcc_unreachable ();
 }
 
-#define DEFTREECODE(SYM, STRING, TYPE, NARGS)   			    \
-  (unsigned char)							    \
-  ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS				    \
-   : ((TYPE) == tcc_binary						    \
-      || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS   		    \
-   : ((TYPE) == tcc_constant						    \
-      || (TYPE) == tcc_declaration					    \
-      || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS			    \
-   : ((SYM) == TRUTH_AND_EXPR						    \
-      || (SYM) == TRUTH_OR_EXPR						    \
-      || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS			    \
-   : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS				    \
-   : ((SYM) == COND_EXPR						    \
-      || (SYM) == CONSTRUCTOR						    \
-      || (SYM) == OBJ_TYPE_REF						    \
-      || (SYM) == ASSERT_EXPR						    \
-      || (SYM) == ADDR_EXPR						    \
-      || (SYM) == WITH_SIZE_EXPR					    \
-      || (SYM) == EXC_PTR_EXPR						    \
-      || (SYM) == SSA_NAME						    \
-      || (SYM) == FILTER_EXPR						    \
-      || (SYM) == POLYNOMIAL_CHREC					    \
-      || (SYM) == DOT_PROD_EXPR						    \
-      || (SYM) == VEC_COND_EXPR						    \
-      || (SYM) == REALIGN_LOAD_EXPR) ? GIMPLE_SINGLE_RHS		    \
+#define DEFTREECODE(SYM, STRING, TYPE, NARGS)                   \
+  (unsigned char)                               \
+  ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS                   \
+   : ((TYPE) == tcc_binary                          \
+      || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS              \
+   : ((TYPE) == tcc_constant                            \
+      || (TYPE) == tcc_declaration                      \
+      || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS               \
+   : ((SYM) == TRUTH_AND_EXPR                           \
+      || (SYM) == TRUTH_OR_EXPR                         \
+      || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS               \
+   : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS                 \
+   : ((SYM) == COND_EXPR                            \
+      || (SYM) == CONSTRUCTOR                           \
+      || (SYM) == OBJ_TYPE_REF                          \
+      || (SYM) == ASSERT_EXPR                           \
+      || (SYM) == ADDR_EXPR                         \
+      || (SYM) == WITH_SIZE_EXPR                        \
+      || (SYM) == SSA_NAME                          \
+      || (SYM) == POLYNOMIAL_CHREC                      \
+      || (SYM) == DOT_PROD_EXPR                         \
+      || (SYM) == VEC_COND_EXPR                         \
+      || (SYM) == REALIGN_LOAD_EXPR) ? GIMPLE_SINGLE_RHS            \
    : GIMPLE_INVALID_RHS),
 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
 
@@ -2565,37 +2484,13 @@
   return op && get_gimple_rhs_class (TREE_CODE (op)) == GIMPLE_SINGLE_RHS;
 }
 
-
-/* Return true if T is a GIMPLE RHS for an assignment to a temporary.  */
-
-bool
-is_gimple_formal_tmp_rhs (tree t)
-{
-  if (is_gimple_lvalue (t) || is_gimple_val (t))
-    return true;
-
-  return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
-}
-
 /* Returns true iff T is a valid RHS for an assignment to a renamed
    user -- or front-end generated artificial -- variable.  */
 
 bool
 is_gimple_reg_rhs (tree t)
 {
-  /* If the RHS of the MODIFY_EXPR may throw or make a nonlocal goto
-     and the LHS is a user variable, then we need to introduce a formal
-     temporary.  This way the optimizers can determine that the user
-     variable is only modified if evaluation of the RHS does not throw.
-
-     Don't force a temp of a non-renamable type; the copy could be
-     arbitrarily expensive.  Instead we will generate a VDEF for
-     the assignment.  */
-
-  if (is_gimple_reg_type (TREE_TYPE (t)) && tree_could_throw_p (t))
-    return false;
-
-  return is_gimple_formal_tmp_rhs (t);
+  return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
 }
 
 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
@@ -2609,7 +2504,7 @@
   if (is_gimple_reg_type (TREE_TYPE (t)))
     return is_gimple_val (t);
   else
-    return is_gimple_formal_tmp_rhs (t);
+    return is_gimple_val (t) || is_gimple_lvalue (t);
 }
 
 /*  Return true if T is a valid LHS for a GIMPLE assignment expression.  */
@@ -2618,10 +2513,10 @@
 is_gimple_lvalue (tree t)
 {
   return (is_gimple_addressable (t)
-	  || TREE_CODE (t) == WITH_SIZE_EXPR
-	  /* These are complex lvalues, but don't have addresses, so they
-	     go here.  */
-	  || TREE_CODE (t) == BIT_FIELD_REF);
+      || TREE_CODE (t) == WITH_SIZE_EXPR
+      /* These are complex lvalues, but don't have addresses, so they
+         go here.  */
+      || TREE_CODE (t) == BIT_FIELD_REF);
 }
 
 /*  Return true if T is a GIMPLE condition.  */
@@ -2630,9 +2525,9 @@
 is_gimple_condexpr (tree t)
 {
   return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
-				&& !tree_could_trap_p (t)
-				&& is_gimple_val (TREE_OPERAND (t, 0))
-				&& is_gimple_val (TREE_OPERAND (t, 1))));
+                && !tree_could_trap_p (t)
+                && is_gimple_val (TREE_OPERAND (t, 0))
+                && is_gimple_val (TREE_OPERAND (t, 1))));
 }
 
 /*  Return true if T is something whose address can be taken.  */
@@ -2661,9 +2556,9 @@
     /* Vector constant constructors are gimple invariant.  */
     case CONSTRUCTOR:
       if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
-	return TREE_CONSTANT (t);
+    return TREE_CONSTANT (t);
       else
-	return false;
+    return false;
 
     default:
       return false;
@@ -2684,9 +2579,9 @@
   while (handled_component_p (op))
     {
       if ((TREE_CODE (op) == ARRAY_REF
-	   || TREE_CODE (op) == ARRAY_RANGE_REF)
-	  && !is_gimple_val (TREE_OPERAND (op, 1)))
-	    return false;
+       || TREE_CODE (op) == ARRAY_RANGE_REF)
+      && !is_gimple_val (TREE_OPERAND (op, 1)))
+        return false;
 
       op = TREE_OPERAND (op, 0);
     }
@@ -2718,22 +2613,22 @@
   while (handled_component_p (op))
     {
       switch (TREE_CODE (op))
-	{
-	case ARRAY_REF:
-	case ARRAY_RANGE_REF:
-	  if (!is_gimple_constant (TREE_OPERAND (op, 1))
-	      || TREE_OPERAND (op, 2) != NULL_TREE
-	      || TREE_OPERAND (op, 3) != NULL_TREE)
-	    return NULL;
-	  break;
-
-	case COMPONENT_REF:
-	  if (TREE_OPERAND (op, 2) != NULL_TREE)
-	    return NULL;
-	  break;
-
-	default:;
-	}
+    {
+    case ARRAY_REF:
+    case ARRAY_RANGE_REF:
+      if (!is_gimple_constant (TREE_OPERAND (op, 1))
+          || TREE_OPERAND (op, 2) != NULL_TREE
+          || TREE_OPERAND (op, 3) != NULL_TREE)
+        return NULL;
+      break;
+
+    case COMPONENT_REF:
+      if (TREE_OPERAND (op, 2) != NULL_TREE)
+        return NULL;
+      break;
+
+    default:;
+    }
       op = TREE_OPERAND (op, 0);
     }
 
@@ -2822,9 +2717,7 @@
     case TRY_FINALLY_EXPR:
     case EH_FILTER_EXPR:
     case CATCH_EXPR:
-    case CHANGE_DYNAMIC_TYPE_EXPR:
     case ASM_EXPR:
-    case RESX_EXPR:
     case STATEMENT_LIST:
     case OMP_PARALLEL:
     case OMP_FOR:
@@ -2855,9 +2748,9 @@
 is_gimple_variable (tree t)
 {
   return (TREE_CODE (t) == VAR_DECL
-	  || TREE_CODE (t) == PARM_DECL
-	  || TREE_CODE (t) == RESULT_DECL
-	  || TREE_CODE (t) == SSA_NAME);
+      || TREE_CODE (t) == PARM_DECL
+      || TREE_CODE (t) == RESULT_DECL
+      || TREE_CODE (t) == SSA_NAME);
 }
 
 /*  Return true if T is a GIMPLE identifier (something with an address).  */
@@ -2866,11 +2759,11 @@
 is_gimple_id (tree t)
 {
   return (is_gimple_variable (t)
-	  || TREE_CODE (t) == FUNCTION_DECL
-	  || TREE_CODE (t) == LABEL_DECL
-	  || TREE_CODE (t) == CONST_DECL
-	  /* Allow string constants, since they are addressable.  */
-	  || TREE_CODE (t) == STRING_CST);
+      || TREE_CODE (t) == FUNCTION_DECL
+      || TREE_CODE (t) == LABEL_DECL
+      || TREE_CODE (t) == CONST_DECL
+      /* Allow string constants, since they are addressable.  */
+      || TREE_CODE (t) == STRING_CST);
 }
 
 /* Return true if TYPE is a suitable type for a scalar register variable.  */
@@ -2878,13 +2771,7 @@
 bool
 is_gimple_reg_type (tree type)
 {
-  /* In addition to aggregate types, we also exclude complex types if not
-     optimizing because they can be subject to partial stores in GNU C by
-     means of the __real__ and __imag__ operators and we cannot promote
-     them to total stores (see gimplify_modify_expr_complex_part).  */
-  return !(AGGREGATE_TYPE_P (type)
-	   || (TREE_CODE (type) == COMPLEX_TYPE && !optimize));
-
+  return !AGGREGATE_TYPE_P (type);
 }
 
 /* Return true if T is a non-aggregate register variable.  */
@@ -2895,9 +2782,6 @@
   if (TREE_CODE (t) == SSA_NAME)
     t = SSA_NAME_VAR (t);
 
-  if (MTAG_P (t))
-    return false;
-
   if (!is_gimple_variable (t))
     return false;
 
@@ -2937,35 +2821,6 @@
 }
 
 
-/* Returns true if T is a GIMPLE formal temporary variable.  */
-
-bool
-is_gimple_formal_tmp_var (tree t)
-{
-  if (TREE_CODE (t) == SSA_NAME)
-    return true;
-
-  return TREE_CODE (t) == VAR_DECL && DECL_GIMPLE_FORMAL_TEMP_P (t);
-}
-
-/* Returns true if T is a GIMPLE formal temporary register variable.  */
-
-bool
-is_gimple_formal_tmp_reg (tree t)
-{
-  /* The intent of this is to get hold of a value that won't change.
-     An SSA_NAME qualifies no matter if its of a user variable or not.  */
-  if (TREE_CODE (t) == SSA_NAME)
-    return true;
-
-  /* We don't know the lifetime characteristics of user variables.  */
-  if (!is_gimple_formal_tmp_var (t))
-    return false;
-
-  /* Finally, it must be capable of being placed in a register.  */
-  return is_gimple_reg (t);
-}
-
 /* Return true if T is a GIMPLE variable whose address is not needed.  */
 
 bool
@@ -2988,11 +2843,6 @@
       && !is_gimple_reg (t))
     return false;
 
-  /* FIXME make these decls.  That can happen only when we expose the
-     entire landing-pad construct at the tree level.  */
-  if (TREE_CODE (t) == EXC_PTR_EXPR || TREE_CODE (t) == FILTER_EXPR)
-    return true;
-
   return (is_gimple_variable (t) || is_gimple_min_invariant (t));
 }
 
@@ -3012,6 +2862,8 @@
 bool
 is_gimple_min_lval (tree t)
 {
+  if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
+    return false;
   return (is_gimple_id (t) || TREE_CODE (t) == INDIRECT_REF);
 }
 
@@ -3062,7 +2914,7 @@
 {
   while (handled_component_p (t))
     t = TREE_OPERAND (t, 0);
-  
+
   if (SSA_VAR_P (t)
       || TREE_CODE (t) == STRING_CST
       || TREE_CODE (t) == CONSTRUCTOR
@@ -3083,21 +2935,21 @@
     {
     case tcc_expression:
       switch (code)
-	{
-	case INIT_EXPR:
-	case MODIFY_EXPR:
-	case VA_ARG_EXPR:
-	case PREDECREMENT_EXPR:
-	case PREINCREMENT_EXPR:
-	case POSTDECREMENT_EXPR:
-	case POSTINCREMENT_EXPR:
-	  /* All of these have side-effects, no matter what their
-	     operands are.  */
-	  return;
-
-	default:
-	  break;
-	}
+    {
+    case INIT_EXPR:
+    case MODIFY_EXPR:
+    case VA_ARG_EXPR:
+    case PREDECREMENT_EXPR:
+    case PREINCREMENT_EXPR:
+    case POSTDECREMENT_EXPR:
+    case POSTINCREMENT_EXPR:
+      /* All of these have side-effects, no matter what their
+         operands are.  */
+      return;
+
+    default:
+      break;
+    }
       /* Fall through.  */
 
     case tcc_comparison:  /* a comparison expression */
@@ -3107,11 +2959,11 @@
     case tcc_vl_exp:        /* a function call */
       TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
       for (i = 0; i < len; ++i)
-	{
-	  tree op = TREE_OPERAND (t, i);
-	  if (op && TREE_SIDE_EFFECTS (op))
-	    TREE_SIDE_EFFECTS (t) = 1;
-	}
+    {
+      tree op = TREE_OPERAND (t, i);
+      if (op && TREE_SIDE_EFFECTS (op))
+        TREE_SIDE_EFFECTS (t) = 1;
+    }
       break;
 
     case tcc_constant:
@@ -3130,30 +2982,35 @@
 tree
 canonicalize_cond_expr_cond (tree t)
 {
+  /* Strip conversions around boolean operations.  */
+  if (CONVERT_EXPR_P (t)
+      && truth_value_p (TREE_CODE (TREE_OPERAND (t, 0))))
+    t = TREE_OPERAND (t, 0);
+
   /* For (bool)x use x != 0.  */
-  if (TREE_CODE (t) == NOP_EXPR
-      && TREE_TYPE (t) == boolean_type_node)
+  if (CONVERT_EXPR_P (t)
+      && TREE_CODE (TREE_TYPE (t)) == BOOLEAN_TYPE)
     {
       tree top0 = TREE_OPERAND (t, 0);
       t = build2 (NE_EXPR, TREE_TYPE (t),
-		  top0, build_int_cst (TREE_TYPE (top0), 0));
+          top0, build_int_cst (TREE_TYPE (top0), 0));
     }
   /* For !x use x == 0.  */
   else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
     {
       tree top0 = TREE_OPERAND (t, 0);
       t = build2 (EQ_EXPR, TREE_TYPE (t),
-		  top0, build_int_cst (TREE_TYPE (top0), 0));
+          top0, build_int_cst (TREE_TYPE (top0), 0));
     }
   /* For cmp ? 1 : 0 use cmp.  */
   else if (TREE_CODE (t) == COND_EXPR
-	   && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
-	   && integer_onep (TREE_OPERAND (t, 1))
-	   && integer_zerop (TREE_OPERAND (t, 2)))
+       && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
+       && integer_onep (TREE_OPERAND (t, 1))
+       && integer_zerop (TREE_OPERAND (t, 2)))
     {
       tree top0 = TREE_OPERAND (t, 0);
       t = build2 (TREE_CODE (top0), TREE_TYPE (t),
-		  TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
+          TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
     }
 
   if (is_gimple_condexpr (t))
@@ -3183,6 +3040,9 @@
   if (gimple_call_lhs (stmt))
     gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
 
+  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
+  gimple_set_vdef (new_stmt, gimple_vdef (stmt));
+
   gimple_set_block (new_stmt, gimple_block (stmt));
   if (gimple_has_location (stmt))
     gimple_set_location (new_stmt, gimple_location (stmt));
@@ -3197,7 +3057,1628 @@
 #ifndef noCbC
   gimple_call_set_cbc_goto (new_stmt, gimple_call_cbc_goto_p (stmt));
 #endif
+
+  gimple_set_modified (new_stmt, true);
   return new_stmt;
 }
 
+
+static hashval_t gimple_type_hash (const void *);
+
+/* Structure used to maintain a cache of some type pairs compared by
+   gimple_types_compatible_p when comparing aggregate types.  There are
+   four possible values for SAME_P:
+
+    -2: The pair (T1, T2) has just been inserted in the table.
+    -1: The pair (T1, T2) is currently being compared.
+     0: T1 and T2 are different types.
+     1: T1 and T2 are the same type.
+
+   This table is only used when comparing aggregate types to avoid
+   infinite recursion due to self-referential types.  */
+struct type_pair_d
+{
+  unsigned int uid1;
+  unsigned int uid2;
+  int same_p;
+};
+typedef struct type_pair_d *type_pair_t;
+
+/* Return a hash value for the type pair pointed-to by P.  */
+
+static hashval_t
+type_pair_hash (const void *p)
+{
+  const struct type_pair_d *pair = (const struct type_pair_d *) p;
+  hashval_t val1 = pair->uid1;
+  hashval_t val2 = pair->uid2;
+  return (iterative_hash_hashval_t (val2, val1)
+      ^ iterative_hash_hashval_t (val1, val2));
+}
+
+/* Compare two type pairs pointed-to by P1 and P2.  */
+
+static int
+type_pair_eq (const void *p1, const void *p2)
+{
+  const struct type_pair_d *pair1 = (const struct type_pair_d *) p1;
+  const struct type_pair_d *pair2 = (const struct type_pair_d *) p2;
+  return ((pair1->uid1 == pair2->uid1 && pair1->uid2 == pair2->uid2)
+      || (pair1->uid1 == pair2->uid2 && pair1->uid2 == pair2->uid1));
+}
+
+/* Lookup the pair of types T1 and T2 in *VISITED_P.  Insert a new
+   entry if none existed.  */
+
+static type_pair_t
+lookup_type_pair (tree t1, tree t2, htab_t *visited_p, struct obstack *ob_p)
+{
+  struct type_pair_d pair;
+  type_pair_t p;
+  void **slot;
+
+  if (*visited_p == NULL)
+    {
+      *visited_p = htab_create (251, type_pair_hash, type_pair_eq, NULL);
+      gcc_obstack_init (ob_p);
+    }
+
+  pair.uid1 = TYPE_UID (t1);
+  pair.uid2 = TYPE_UID (t2);
+  slot = htab_find_slot (*visited_p, &pair, INSERT);
+
+  if (*slot)
+    p = *((type_pair_t *) slot);
+  else
+    {
+      p = XOBNEW (ob_p, struct type_pair_d);
+      p->uid1 = TYPE_UID (t1);
+      p->uid2 = TYPE_UID (t2);
+      p->same_p = -2;
+      *slot = (void *) p;
+    }
+
+  return p;
+}
+
+
+/* Return true if T1 and T2 have the same name.  If FOR_COMPLETION_P is
+   true then if any type has no name return false, otherwise return
+   true if both types have no names.  */
+
+static bool
+compare_type_names_p (tree t1, tree t2, bool for_completion_p)
+{
+  tree name1 = TYPE_NAME (t1);
+  tree name2 = TYPE_NAME (t2);
+
+  /* Consider anonymous types all unique for completion.  */
+  if (for_completion_p
+      && (!name1 || !name2))
+    return false;
+
+  if (name1 && TREE_CODE (name1) == TYPE_DECL)
+    {
+      name1 = DECL_NAME (name1);
+      if (for_completion_p
+      && !name1)
+    return false;
+    }
+  gcc_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
+
+  if (name2 && TREE_CODE (name2) == TYPE_DECL)
+    {
+      name2 = DECL_NAME (name2);
+      if (for_completion_p
+      && !name2)
+    return false;
+    }
+  gcc_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
+
+  /* Identifiers can be compared with pointer equality rather
+     than a string comparison.  */
+  if (name1 == name2)
+    return true;
+
+  return false;
+}
+
+/* Return true if the field decls F1 and F2 are at the same offset.  */
+
+bool
+compare_field_offset (tree f1, tree f2)
+{
+  if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
+    return (operand_equal_p (DECL_FIELD_OFFSET (f1),
+                 DECL_FIELD_OFFSET (f2), 0)
+        && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
+                   DECL_FIELD_BIT_OFFSET (f2)));
+
+  /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
+     should be, so handle differing ones specially by decomposing
+     the offset into a byte and bit offset manually.  */
+  if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
+      && host_integerp (DECL_FIELD_OFFSET (f2), 0))
+    {
+      unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
+      unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
+      bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
+      byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
+              + bit_offset1 / BITS_PER_UNIT);
+      bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
+      byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
+              + bit_offset2 / BITS_PER_UNIT);
+      if (byte_offset1 != byte_offset2)
+    return false;
+      return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
+    }
+
+  return false;
+}
+
+/* Return 1 iff T1 and T2 are structurally identical.
+   Otherwise, return 0.  */
+
+static int
+gimple_types_compatible_p (tree t1, tree t2)
+{
+  type_pair_t p = NULL;
+
+  /* Check first for the obvious case of pointer identity.  */
+  if (t1 == t2)
+    return 1;
+
+  /* Check that we have two types to compare.  */
+  if (t1 == NULL_TREE || t2 == NULL_TREE)
+    return 0;
+
+  /* Can't be the same type if the types don't have the same code.  */
+  if (TREE_CODE (t1) != TREE_CODE (t2))
+    return 0;
+
+  /* Can't be the same type if they have different CV qualifiers.  */
+  if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
+    return 0;
+
+  /* Void types are always the same.  */
+  if (TREE_CODE (t1) == VOID_TYPE)
+    return 1;
+
+  /* For numerical types do some simple checks before doing three
+     hashtable queries.  */
+  if (INTEGRAL_TYPE_P (t1)
+      || SCALAR_FLOAT_TYPE_P (t1)
+      || FIXED_POINT_TYPE_P (t1)
+      || TREE_CODE (t1) == VECTOR_TYPE
+      || TREE_CODE (t1) == COMPLEX_TYPE
+      || TREE_CODE (t1) == OFFSET_TYPE)
+    {
+      /* Can't be the same type if they have different alignment,
+     sign, precision or mode.  */
+      if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
+      || TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
+      || TYPE_MODE (t1) != TYPE_MODE (t2)
+      || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
+    return 0;
+
+      if (TREE_CODE (t1) == INTEGER_TYPE
+      && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
+          || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
+    return 0;
+
+      /* That's all we need to check for float and fixed-point types.  */
+      if (SCALAR_FLOAT_TYPE_P (t1)
+      || FIXED_POINT_TYPE_P (t1))
+    return 1;
+
+      /* Perform cheap tail-recursion for vector and complex types.  */
+      if (TREE_CODE (t1) == VECTOR_TYPE
+      || TREE_CODE (t1) == COMPLEX_TYPE)
+    return gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2));
+
+      /* For integral types fall thru to more complex checks.  */
+    }
+
+  /* If the hash values of t1 and t2 are different the types can't
+     possibly be the same.  This helps keeping the type-pair hashtable
+     small, only tracking comparisons for hash collisions.  */
+  if (gimple_type_hash (t1) != gimple_type_hash (t2))
+    return 0;
+
+  /* If we've visited this type pair before (in the case of aggregates
+     with self-referential types), and we made a decision, return it.  */
+  p = lookup_type_pair (t1, t2, &gtc_visited, &gtc_ob);
+  if (p->same_p == 0 || p->same_p == 1)
+    {
+      /* We have already decided whether T1 and T2 are the
+     same, return the cached result.  */
+      return p->same_p == 1;
+    }
+  else if (p->same_p == -1)
+    {
+      /* We are currently comparing this pair of types, assume
+     that they are the same and let the caller decide.  */
+      return 1;
+    }
+
+  gcc_assert (p->same_p == -2);
+
+  /* Mark the (T1, T2) comparison in progress.  */
+  p->same_p = -1;
+
+  /* If their attributes are not the same they can't be the same type.  */
+  if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
+    goto different_types;
+
+  /* Do type-specific comparisons.  */
+  switch (TREE_CODE (t1))
+    {
+    case ARRAY_TYPE:
+      /* Array types are the same if the element types are the same and
+     the number of elements are the same.  */
+      if (!gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
+      || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
+      || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
+    goto different_types;
+      else
+    {
+      tree i1 = TYPE_DOMAIN (t1);
+      tree i2 = TYPE_DOMAIN (t2);
+
+      /* For an incomplete external array, the type domain can be
+         NULL_TREE.  Check this condition also.  */
+      if (i1 == NULL_TREE && i2 == NULL_TREE)
+        goto same_types;
+      else if (i1 == NULL_TREE || i2 == NULL_TREE)
+        goto different_types;
+      /* If for a complete array type the possibly gimplified sizes
+         are different the types are different.  */
+      else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
+           || (TYPE_SIZE (i1)
+               && TYPE_SIZE (i2)
+               && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
+        goto different_types;
+      else
+        {
+          tree min1 = TYPE_MIN_VALUE (i1);
+          tree min2 = TYPE_MIN_VALUE (i2);
+          tree max1 = TYPE_MAX_VALUE (i1);
+          tree max2 = TYPE_MAX_VALUE (i2);
+
+          /* The minimum/maximum values have to be the same.  */
+          if ((min1 == min2
+           || (min1 && min2 && operand_equal_p (min1, min2, 0)))
+          && (max1 == max2
+              || (max1 && max2 && operand_equal_p (max1, max2, 0))))
+        goto same_types;
+          else
+        goto different_types;
+        }
+    }
+
+    case METHOD_TYPE:
+      /* Method types should belong to the same class.  */
+      if (!gimple_types_compatible_p (TYPE_METHOD_BASETYPE (t1),
+                 TYPE_METHOD_BASETYPE (t2)))
+    goto different_types;
+
+      /* Fallthru  */
+
+    case FUNCTION_TYPE:
+      /* Function types are the same if the return type and arguments types
+     are the same.  */
+      if (!gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
+    goto different_types;
+      else
+    {
+      if (!targetm.comp_type_attributes (t1, t2))
+        goto different_types;
+
+      if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
+        goto same_types;
+      else
+        {
+          tree parms1, parms2;
+
+          for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
+           parms1 && parms2;
+           parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
+        {
+          if (!gimple_types_compatible_p (TREE_VALUE (parms1),
+                         TREE_VALUE (parms2)))
+            goto different_types;
+        }
+
+          if (parms1 || parms2)
+        goto different_types;
+
+          goto same_types;
+        }
+    }
+
+    case OFFSET_TYPE:
+      {
+    if (!gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
+        || !gimple_types_compatible_p (TYPE_OFFSET_BASETYPE (t1),
+                       TYPE_OFFSET_BASETYPE (t2)))
+      goto different_types;
+
+    goto same_types;
+      }
+
+    case POINTER_TYPE:
+    case REFERENCE_TYPE:
+      {
+    /* If the two pointers have different ref-all attributes,
+       they can't be the same type.  */
+    if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
+      goto different_types;
+
+    /* If one pointer points to an incomplete type variant of
+       the other pointed-to type they are the same.  */
+    if (TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
+        && RECORD_OR_UNION_TYPE_P (TREE_TYPE (t1))
+        && (!COMPLETE_TYPE_P (TREE_TYPE (t1))
+        || !COMPLETE_TYPE_P (TREE_TYPE (t2)))
+        && compare_type_names_p (TYPE_MAIN_VARIANT (TREE_TYPE (t1)),
+                     TYPE_MAIN_VARIANT (TREE_TYPE (t2)), true))
+      {
+        /* Replace the pointed-to incomplete type with the
+           complete one.  */
+        if (COMPLETE_TYPE_P (TREE_TYPE (t2)))
+          TREE_TYPE (t1) = TREE_TYPE (t2);
+        else
+          TREE_TYPE (t2) = TREE_TYPE (t1);
+        goto same_types;
+      }
+
+    /* Otherwise, pointer and reference types are the same if the
+       pointed-to types are the same.  */
+    if (gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
+      goto same_types;
+
+    goto different_types;
+      }
+
+    case INTEGER_TYPE:
+    case BOOLEAN_TYPE:
+      {
+    tree min1 = TYPE_MIN_VALUE (t1);
+    tree max1 = TYPE_MAX_VALUE (t1);
+    tree min2 = TYPE_MIN_VALUE (t2);
+    tree max2 = TYPE_MAX_VALUE (t2);
+    bool min_equal_p = false;
+    bool max_equal_p = false;
+
+    /* If either type has a minimum value, the other type must
+       have the same.  */
+    if (min1 == NULL_TREE && min2 == NULL_TREE)
+      min_equal_p = true;
+    else if (min1 && min2 && operand_equal_p (min1, min2, 0))
+      min_equal_p = true;
+
+    /* Likewise, if either type has a maximum value, the other
+       type must have the same.  */
+    if (max1 == NULL_TREE && max2 == NULL_TREE)
+      max_equal_p = true;
+    else if (max1 && max2 && operand_equal_p (max1, max2, 0))
+      max_equal_p = true;
+
+    if (!min_equal_p || !max_equal_p)
+      goto different_types;
+
+    goto same_types;
+      }
+
+    case ENUMERAL_TYPE:
+      {
+    /* FIXME lto, we cannot check bounds on enumeral types because
+       different front ends will produce different values.
+       In C, enumeral types are integers, while in C++ each element
+       will have its own symbolic value.  We should decide how enums
+       are to be represented in GIMPLE and have each front end lower
+       to that.  */
+    tree v1, v2;
+
+    /* For enumeral types, all the values must be the same.  */
+    if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
+      goto same_types;
+
+    for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
+         v1 && v2;
+         v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
+      {
+        tree c1 = TREE_VALUE (v1);
+        tree c2 = TREE_VALUE (v2);
+
+        if (TREE_CODE (c1) == CONST_DECL)
+          c1 = DECL_INITIAL (c1);
+
+        if (TREE_CODE (c2) == CONST_DECL)
+          c2 = DECL_INITIAL (c2);
+
+        if (tree_int_cst_equal (c1, c2) != 1)
+          goto different_types;
+      }
+
+    /* If one enumeration has more values than the other, they
+       are not the same.  */
+    if (v1 || v2)
+      goto different_types;
+
+    goto same_types;
+      }
+
+    case RECORD_TYPE:
+    case UNION_TYPE:
+    case QUAL_UNION_TYPE:
+      {
+    tree f1, f2;
+
+    /* If one type requires structural equality checks and the
+       other doesn't, do not merge the types.  */
+    if (TYPE_STRUCTURAL_EQUALITY_P (t1)
+        != TYPE_STRUCTURAL_EQUALITY_P (t2))
+      goto different_types;
+
+    /* The struct tags shall compare equal.  */
+    if (!compare_type_names_p (TYPE_MAIN_VARIANT (t1),
+                   TYPE_MAIN_VARIANT (t2), false))
+      goto different_types;
+
+    /* For aggregate types, all the fields must be the same.  */
+    for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
+         f1 && f2;
+         f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
+      {
+        /* The fields must have the same name, offset and type.  */
+        if (DECL_NAME (f1) != DECL_NAME (f2)
+        || DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
+        || !compare_field_offset (f1, f2)
+        || !gimple_types_compatible_p (TREE_TYPE (f1),
+                           TREE_TYPE (f2)))
+          goto different_types;
+      }
+
+    /* If one aggregate has more fields than the other, they
+       are not the same.  */
+    if (f1 || f2)
+      goto different_types;
+
+    goto same_types;
+      }
+
+    default:
+      gcc_unreachable ();
+    }
+
+  /* Common exit path for types that are not compatible.  */
+different_types:
+  p->same_p = 0;
+  return 0;
+
+  /* Common exit path for types that are compatible.  */
+same_types:
+  p->same_p = 1;
+  return 1;
+}
+
+
+
+
+/* Per pointer state for the SCC finding.  The on_sccstack flag
+   is not strictly required, it is true when there is no hash value
+   recorded for the type and false otherwise.  But querying that
+   is slower.  */
+
+struct sccs
+{
+  unsigned int dfsnum;
+  unsigned int low;
+  bool on_sccstack;
+  hashval_t hash;
+};
+
+static unsigned int next_dfs_num;
+
+static hashval_t
+iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
+                struct pointer_map_t *, struct obstack *);
+
+/* DFS visit the edge from the callers type with state *STATE to T.
+   Update the callers type hash V with the hash for T if it is not part
+   of the SCC containing the callers type and return it.
+   SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.  */
+
+static hashval_t
+visit (tree t, struct sccs *state, hashval_t v,
+       VEC (tree, heap) **sccstack,
+       struct pointer_map_t *sccstate,
+       struct obstack *sccstate_obstack)
+{
+  struct sccs *cstate = NULL;
+  void **slot;
+
+  /* If there is a hash value recorded for this type then it can't
+     possibly be part of our parent SCC.  Simply mix in its hash.  */
+  if ((slot = pointer_map_contains (type_hash_cache, t)))
+    return iterative_hash_hashval_t ((hashval_t) (size_t) *slot, v);
+
+  if ((slot = pointer_map_contains (sccstate, t)) != NULL)
+    cstate = (struct sccs *)*slot;
+  if (!cstate)
+    {
+      hashval_t tem;
+      /* Not yet visited.  DFS recurse.  */
+      tem = iterative_hash_gimple_type (t, v,
+                    sccstack, sccstate, sccstate_obstack);
+      if (!cstate)
+    cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
+      state->low = MIN (state->low, cstate->low);
+      /* If the type is no longer on the SCC stack and thus is not part
+         of the parents SCC mix in its hash value.  Otherwise we will
+     ignore the type for hashing purposes and return the unaltered
+     hash value.  */
+      if (!cstate->on_sccstack)
+    return tem;
+    }
+  if (cstate->dfsnum < state->dfsnum
+      && cstate->on_sccstack)
+    state->low = MIN (cstate->dfsnum, state->low);
+
+  /* We are part of our parents SCC, skip this type during hashing
+     and return the unaltered hash value.  */
+  return v;
+}
+
+/* Hash NAME with the previous hash value V and return it.  */
+
+static hashval_t
+iterative_hash_name (tree name, hashval_t v)
+{
+  if (!name)
+    return v;
+  if (TREE_CODE (name) == TYPE_DECL)
+    name = DECL_NAME (name);
+  if (!name)
+    return v;
+  gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
+  return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
+}
+
+/* Returning a hash value for gimple type TYPE combined with VAL.
+   SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
+
+   To hash a type we end up hashing in types that are reachable.
+   Through pointers we can end up with cycles which messes up the
+   required property that we need to compute the same hash value
+   for structurally equivalent types.  To avoid this we have to
+   hash all types in a cycle (the SCC) in a commutative way.  The
+   easiest way is to not mix in the hashes of the SCC members at
+   all.  To make this work we have to delay setting the hash
+   values of the SCC until it is complete.  */
+
+static hashval_t
+iterative_hash_gimple_type (tree type, hashval_t val,
+                VEC(tree, heap) **sccstack,
+                struct pointer_map_t *sccstate,
+                struct obstack *sccstate_obstack)
+{
+  hashval_t v;
+  void **slot;
+  struct sccs *state;
+
+#ifdef ENABLE_CHECKING
+  /* Not visited during this DFS walk nor during previous walks.  */
+  gcc_assert (!pointer_map_contains (type_hash_cache, type)
+          && !pointer_map_contains (sccstate, type));
+#endif
+  state = XOBNEW (sccstate_obstack, struct sccs);
+  *pointer_map_insert (sccstate, type) = state;
+
+  VEC_safe_push (tree, heap, *sccstack, type);
+  state->dfsnum = next_dfs_num++;
+  state->low = state->dfsnum;
+  state->on_sccstack = true;
+
+  /* Combine a few common features of types so that types are grouped into
+     smaller sets; when searching for existing matching types to merge,
+     only existing types having the same features as the new type will be
+     checked.  */
+  v = iterative_hash_hashval_t (TREE_CODE (type), 0);
+  v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
+  v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
+
+  /* Do not hash the types size as this will cause differences in
+     hash values for the complete vs. the incomplete type variant.  */
+
+  /* Incorporate common features of numerical types.  */
+  if (INTEGRAL_TYPE_P (type)
+      || SCALAR_FLOAT_TYPE_P (type)
+      || FIXED_POINT_TYPE_P (type))
+    {
+      v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
+      v = iterative_hash_hashval_t (TYPE_MODE (type), v);
+      v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
+    }
+
+  /* For pointer and reference types, fold in information about the type
+     pointed to but do not recurse into possibly incomplete types to
+     avoid hash differences for complete vs. incomplete types.  */
+  if (POINTER_TYPE_P (type))
+    {
+      if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
+    {
+      v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
+      v = iterative_hash_name
+          (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
+    }
+      else
+    v = visit (TREE_TYPE (type), state, v,
+           sccstack, sccstate, sccstate_obstack);
+    }
+
+  /* For integer types hash the types min/max values and the string flag.  */
+  if (TREE_CODE (type) == INTEGER_TYPE)
+    {
+      v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
+      v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
+      v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
+    }
+
+  /* For array types hash their domain and the string flag.  */
+  if (TREE_CODE (type) == ARRAY_TYPE
+      && TYPE_DOMAIN (type))
+    {
+      v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
+      v = visit (TYPE_DOMAIN (type), state, v,
+         sccstack, sccstate, sccstate_obstack);
+    }
+
+  /* Recurse for aggregates with a single element type.  */
+  if (TREE_CODE (type) == ARRAY_TYPE
+      || TREE_CODE (type) == COMPLEX_TYPE
+      || TREE_CODE (type) == VECTOR_TYPE)
+    v = visit (TREE_TYPE (type), state, v,
+           sccstack, sccstate, sccstate_obstack);
+
+  /* Incorporate function return and argument types.  */
+  if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
+    {
+      unsigned na;
+      tree p;
+
+      /* For method types also incorporate their parent class.  */
+      if (TREE_CODE (type) == METHOD_TYPE)
+    v = visit (TYPE_METHOD_BASETYPE (type), state, v,
+           sccstack, sccstate, sccstate_obstack);
+
+      v = visit (TREE_TYPE (type), state, v,
+         sccstack, sccstate, sccstate_obstack);
+
+      for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
+    {
+      v = visit (TREE_VALUE (p), state, v,
+             sccstack, sccstate, sccstate_obstack);
+      na++;
+    }
+
+      v = iterative_hash_hashval_t (na, v);
+    }
+
+  if (TREE_CODE (type) == RECORD_TYPE
+      || TREE_CODE (type) == UNION_TYPE
+      || TREE_CODE (type) == QUAL_UNION_TYPE)
+    {
+      unsigned nf;
+      tree f;
+
+      v = iterative_hash_name (TYPE_NAME (TYPE_MAIN_VARIANT (type)), v);
+
+      for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
+    {
+      v = iterative_hash_name (DECL_NAME (f), v);
+      v = visit (TREE_TYPE (f), state, v,
+             sccstack, sccstate, sccstate_obstack);
+      nf++;
+    }
+
+      v = iterative_hash_hashval_t (nf, v);
+    }
+
+  /* Record hash for us.  */
+  state->hash = v;
+
+  /* See if we found an SCC.  */
+  if (state->low == state->dfsnum)
+    {
+      tree x;
+
+      /* Pop off the SCC and set its hash values.  */
+      do
+    {
+      struct sccs *cstate;
+      x = VEC_pop (tree, *sccstack);
+      gcc_assert (!pointer_map_contains (type_hash_cache, x));
+      cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
+      cstate->on_sccstack = false;
+      slot = pointer_map_insert (type_hash_cache, x);
+      *slot = (void *) (size_t) cstate->hash;
+    }
+      while (x != type);
+    }
+
+  return iterative_hash_hashval_t (v, val);
+}
+
+
+/* Returns a hash value for P (assumed to be a type).  The hash value
+   is computed using some distinguishing features of the type.  Note
+   that we cannot use pointer hashing here as we may be dealing with
+   two distinct instances of the same type.
+
+   This function should produce the same hash value for two compatible
+   types according to gimple_types_compatible_p.  */
+
+static hashval_t
+gimple_type_hash (const void *p)
+{
+  const_tree t = (const_tree) p;
+  VEC(tree, heap) *sccstack = NULL;
+  struct pointer_map_t *sccstate;
+  struct obstack sccstate_obstack;
+  hashval_t val;
+  void **slot;
+
+  if (type_hash_cache == NULL)
+    type_hash_cache = pointer_map_create ();
+
+  if ((slot = pointer_map_contains (type_hash_cache, p)) != NULL)
+    return iterative_hash_hashval_t ((hashval_t) (size_t) *slot, 0);
+
+  /* Perform a DFS walk and pre-hash all reachable types.  */
+  next_dfs_num = 1;
+  sccstate = pointer_map_create ();
+  gcc_obstack_init (&sccstate_obstack);
+  val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
+                    &sccstack, sccstate, &sccstate_obstack);
+  VEC_free (tree, heap, sccstack);
+  pointer_map_destroy (sccstate);
+  obstack_free (&sccstate_obstack, NULL);
+
+  return val;
+}
+
+
+/* Returns nonzero if P1 and P2 are equal.  */
+
+static int
+gimple_type_eq (const void *p1, const void *p2)
+{
+  const_tree t1 = (const_tree) p1;
+  const_tree t2 = (const_tree) p2;
+  return gimple_types_compatible_p (CONST_CAST_TREE (t1), CONST_CAST_TREE (t2));
+}
+
+
+/* Register type T in the global type table gimple_types.
+   If another type T', compatible with T, already existed in
+   gimple_types then return T', otherwise return T.  This is used by
+   LTO to merge identical types read from different TUs.  */
+
+tree
+gimple_register_type (tree t)
+{
+  void **slot;
+
+  gcc_assert (TYPE_P (t));
+
+  /* Always register the main variant first.  This is important so we
+     pick up the non-typedef variants as canonical, otherwise we'll end
+     up taking typedef ids for structure tags during comparison.  */
+  if (TYPE_MAIN_VARIANT (t) != t)
+    gimple_register_type (TYPE_MAIN_VARIANT (t));
+
+  if (gimple_types == NULL)
+    gimple_types = htab_create (16381, gimple_type_hash, gimple_type_eq, 0);
+
+  slot = htab_find_slot (gimple_types, t, INSERT);
+  if (*slot
+      && *(tree *)slot != t)
+    {
+      tree new_type = (tree) *((tree *) slot);
+
+      /* Do not merge types with different addressability.  */
+      gcc_assert (TREE_ADDRESSABLE (t) == TREE_ADDRESSABLE (new_type));
+
+      /* If t is not its main variant then make t unreachable from its
+     main variant list.  Otherwise we'd queue up a lot of duplicates
+     there.  */
+      if (t != TYPE_MAIN_VARIANT (t))
+    {
+      tree tem = TYPE_MAIN_VARIANT (t);
+      while (tem && TYPE_NEXT_VARIANT (tem) != t)
+        tem = TYPE_NEXT_VARIANT (tem);
+      if (tem)
+        TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
+      TYPE_NEXT_VARIANT (t) = NULL_TREE;
+    }
+
+      /* If we are a pointer then remove us from the pointer-to or
+     reference-to chain.  Otherwise we'd queue up a lot of duplicates
+     there.  */
+      if (TREE_CODE (t) == POINTER_TYPE)
+    {
+      if (TYPE_POINTER_TO (TREE_TYPE (t)) == t)
+        TYPE_POINTER_TO (TREE_TYPE (t)) = TYPE_NEXT_PTR_TO (t);
+      else
+        {
+          tree tem = TYPE_POINTER_TO (TREE_TYPE (t));
+          while (tem && TYPE_NEXT_PTR_TO (tem) != t)
+        tem = TYPE_NEXT_PTR_TO (tem);
+          if (tem)
+        TYPE_NEXT_PTR_TO (tem) = TYPE_NEXT_PTR_TO (t);
+        }
+      TYPE_NEXT_PTR_TO (t) = NULL_TREE;
+    }
+      else if (TREE_CODE (t) == REFERENCE_TYPE)
+    {
+      if (TYPE_REFERENCE_TO (TREE_TYPE (t)) == t)
+        TYPE_REFERENCE_TO (TREE_TYPE (t)) = TYPE_NEXT_REF_TO (t);
+      else
+        {
+          tree tem = TYPE_REFERENCE_TO (TREE_TYPE (t));
+          while (tem && TYPE_NEXT_REF_TO (tem) != t)
+        tem = TYPE_NEXT_REF_TO (tem);
+          if (tem)
+        TYPE_NEXT_REF_TO (tem) = TYPE_NEXT_REF_TO (t);
+        }
+      TYPE_NEXT_REF_TO (t) = NULL_TREE;
+    }
+
+      t = new_type;
+    }
+  else
+    *slot = (void *) t;
+
+  return t;
+}
+
+
+/* Show statistics on references to the global type table gimple_types.  */
+
+void
+print_gimple_types_stats (void)
+{
+  if (gimple_types)
+    fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
+         "%ld searches, %ld collisions (ratio: %f)\n",
+         (long) htab_size (gimple_types),
+         (long) htab_elements (gimple_types),
+         (long) gimple_types->searches,
+         (long) gimple_types->collisions,
+         htab_collisions (gimple_types));
+  else
+    fprintf (stderr, "GIMPLE type table is empty\n");
+  if (gtc_visited)
+    fprintf (stderr, "GIMPLE type comparison table: size %ld, %ld "
+         "elements, %ld searches, %ld collisions (ratio: %f)\n",
+         (long) htab_size (gtc_visited),
+         (long) htab_elements (gtc_visited),
+         (long) gtc_visited->searches,
+         (long) gtc_visited->collisions,
+         htab_collisions (gtc_visited));
+  else
+    fprintf (stderr, "GIMPLE type comparison table is empty\n");
+}
+
+/* Free the gimple type hashtables used for LTO type merging.  */
+
+void
+free_gimple_type_tables (void)
+{
+  /* Last chance to print stats for the tables.  */
+  if (flag_lto_report)
+    print_gimple_types_stats ();
+
+  if (gimple_types)
+    {
+      htab_delete (gimple_types);
+      gimple_types = NULL;
+    }
+  if (type_hash_cache)
+    {
+      pointer_map_destroy (type_hash_cache);
+      type_hash_cache = NULL;
+    }
+  if (gtc_visited)
+    {
+      htab_delete (gtc_visited);
+      obstack_free (&gtc_ob, NULL);
+      gtc_visited = NULL;
+    }
+}
+
+
+/* Return a type the same as TYPE except unsigned or
+   signed according to UNSIGNEDP.  */
+
+static tree
+gimple_signed_or_unsigned_type (bool unsignedp, tree type)
+{
+  tree type1;
+
+  type1 = TYPE_MAIN_VARIANT (type);
+  if (type1 == signed_char_type_node
+      || type1 == char_type_node
+      || type1 == unsigned_char_type_node)
+    return unsignedp ? unsigned_char_type_node : signed_char_type_node;
+  if (type1 == integer_type_node || type1 == unsigned_type_node)
+    return unsignedp ? unsigned_type_node : integer_type_node;
+  if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
+    return unsignedp ? short_unsigned_type_node : short_integer_type_node;
+  if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
+    return unsignedp ? long_unsigned_type_node : long_integer_type_node;
+  if (type1 == long_long_integer_type_node
+      || type1 == long_long_unsigned_type_node)
+    return unsignedp
+           ? long_long_unsigned_type_node
+       : long_long_integer_type_node;
+#if HOST_BITS_PER_WIDE_INT >= 64
+  if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
+    return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
+#endif
+  if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
+    return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
+  if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
+    return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
+  if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
+    return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
+  if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
+    return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
+
+#define GIMPLE_FIXED_TYPES(NAME)        \
+  if (type1 == short_ ## NAME ## _type_node \
+      || type1 == unsigned_short_ ## NAME ## _type_node) \
+    return unsignedp ? unsigned_short_ ## NAME ## _type_node \
+             : short_ ## NAME ## _type_node; \
+  if (type1 == NAME ## _type_node \
+      || type1 == unsigned_ ## NAME ## _type_node) \
+    return unsignedp ? unsigned_ ## NAME ## _type_node \
+             : NAME ## _type_node; \
+  if (type1 == long_ ## NAME ## _type_node \
+      || type1 == unsigned_long_ ## NAME ## _type_node) \
+    return unsignedp ? unsigned_long_ ## NAME ## _type_node \
+             : long_ ## NAME ## _type_node; \
+  if (type1 == long_long_ ## NAME ## _type_node \
+      || type1 == unsigned_long_long_ ## NAME ## _type_node) \
+    return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
+             : long_long_ ## NAME ## _type_node;
+
+#define GIMPLE_FIXED_MODE_TYPES(NAME) \
+  if (type1 == NAME ## _type_node \
+      || type1 == u ## NAME ## _type_node) \
+    return unsignedp ? u ## NAME ## _type_node \
+             : NAME ## _type_node;
+
+#define GIMPLE_FIXED_TYPES_SAT(NAME) \
+  if (type1 == sat_ ## short_ ## NAME ## _type_node \
+      || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
+    return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
+             : sat_ ## short_ ## NAME ## _type_node; \
+  if (type1 == sat_ ## NAME ## _type_node \
+      || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
+    return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
+             : sat_ ## NAME ## _type_node; \
+  if (type1 == sat_ ## long_ ## NAME ## _type_node \
+      || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
+    return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
+             : sat_ ## long_ ## NAME ## _type_node; \
+  if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
+      || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
+    return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
+             : sat_ ## long_long_ ## NAME ## _type_node;
+
+#define GIMPLE_FIXED_MODE_TYPES_SAT(NAME)   \
+  if (type1 == sat_ ## NAME ## _type_node \
+      || type1 == sat_ ## u ## NAME ## _type_node) \
+    return unsignedp ? sat_ ## u ## NAME ## _type_node \
+             : sat_ ## NAME ## _type_node;
+
+  GIMPLE_FIXED_TYPES (fract);
+  GIMPLE_FIXED_TYPES_SAT (fract);
+  GIMPLE_FIXED_TYPES (accum);
+  GIMPLE_FIXED_TYPES_SAT (accum);
+
+  GIMPLE_FIXED_MODE_TYPES (qq);
+  GIMPLE_FIXED_MODE_TYPES (hq);
+  GIMPLE_FIXED_MODE_TYPES (sq);
+  GIMPLE_FIXED_MODE_TYPES (dq);
+  GIMPLE_FIXED_MODE_TYPES (tq);
+  GIMPLE_FIXED_MODE_TYPES_SAT (qq);
+  GIMPLE_FIXED_MODE_TYPES_SAT (hq);
+  GIMPLE_FIXED_MODE_TYPES_SAT (sq);
+  GIMPLE_FIXED_MODE_TYPES_SAT (dq);
+  GIMPLE_FIXED_MODE_TYPES_SAT (tq);
+  GIMPLE_FIXED_MODE_TYPES (ha);
+  GIMPLE_FIXED_MODE_TYPES (sa);
+  GIMPLE_FIXED_MODE_TYPES (da);
+  GIMPLE_FIXED_MODE_TYPES (ta);
+  GIMPLE_FIXED_MODE_TYPES_SAT (ha);
+  GIMPLE_FIXED_MODE_TYPES_SAT (sa);
+  GIMPLE_FIXED_MODE_TYPES_SAT (da);
+  GIMPLE_FIXED_MODE_TYPES_SAT (ta);
+
+  /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
+     the precision; they have precision set to match their range, but
+     may use a wider mode to match an ABI.  If we change modes, we may
+     wind up with bad conversions.  For INTEGER_TYPEs in C, must check
+     the precision as well, so as to yield correct results for
+     bit-field types.  C++ does not have these separate bit-field
+     types, and producing a signed or unsigned variant of an
+     ENUMERAL_TYPE may cause other problems as well.  */
+  if (!INTEGRAL_TYPE_P (type)
+      || TYPE_UNSIGNED (type) == unsignedp)
+    return type;
+
+#define TYPE_OK(node)                               \
+  (TYPE_MODE (type) == TYPE_MODE (node)                     \
+   && TYPE_PRECISION (type) == TYPE_PRECISION (node))
+  if (TYPE_OK (signed_char_type_node))
+    return unsignedp ? unsigned_char_type_node : signed_char_type_node;
+  if (TYPE_OK (integer_type_node))
+    return unsignedp ? unsigned_type_node : integer_type_node;
+  if (TYPE_OK (short_integer_type_node))
+    return unsignedp ? short_unsigned_type_node : short_integer_type_node;
+  if (TYPE_OK (long_integer_type_node))
+    return unsignedp ? long_unsigned_type_node : long_integer_type_node;
+  if (TYPE_OK (long_long_integer_type_node))
+    return (unsignedp
+        ? long_long_unsigned_type_node
+        : long_long_integer_type_node);
+
+#if HOST_BITS_PER_WIDE_INT >= 64
+  if (TYPE_OK (intTI_type_node))
+    return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
+#endif
+  if (TYPE_OK (intDI_type_node))
+    return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
+  if (TYPE_OK (intSI_type_node))
+    return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
+  if (TYPE_OK (intHI_type_node))
+    return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
+  if (TYPE_OK (intQI_type_node))
+    return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
+
+#undef GIMPLE_FIXED_TYPES
+#undef GIMPLE_FIXED_MODE_TYPES
+#undef GIMPLE_FIXED_TYPES_SAT
+#undef GIMPLE_FIXED_MODE_TYPES_SAT
+#undef TYPE_OK
+
+  return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
+}
+
+
+/* Return an unsigned type the same as TYPE in other respects.  */
+
+tree
+gimple_unsigned_type (tree type)
+{
+  return gimple_signed_or_unsigned_type (true, type);
+}
+
+
+/* Return a signed type the same as TYPE in other respects.  */
+
+tree
+gimple_signed_type (tree type)
+{
+  return gimple_signed_or_unsigned_type (false, type);
+}
+
+
+/* Return the typed-based alias set for T, which may be an expression
+   or a type.  Return -1 if we don't do anything special.  */
+
+alias_set_type
+gimple_get_alias_set (tree t)
+{
+  tree u;
+
+  /* Permit type-punning when accessing a union, provided the access
+     is directly through the union.  For example, this code does not
+     permit taking the address of a union member and then storing
+     through it.  Even the type-punning allowed here is a GCC
+     extension, albeit a common and useful one; the C standard says
+     that such accesses have implementation-defined behavior.  */
+  for (u = t;
+       TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
+       u = TREE_OPERAND (u, 0))
+    if (TREE_CODE (u) == COMPONENT_REF
+    && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
+      return 0;
+
+  /* That's all the expressions we handle specially.  */
+  if (!TYPE_P (t))
+    return -1;
+
+  /* For convenience, follow the C standard when dealing with
+     character types.  Any object may be accessed via an lvalue that
+     has character type.  */
+  if (t == char_type_node
+      || t == signed_char_type_node
+      || t == unsigned_char_type_node)
+    return 0;
+
+  /* Allow aliasing between signed and unsigned variants of the same
+     type.  We treat the signed variant as canonical.  */
+  if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
+    {
+      tree t1 = gimple_signed_type (t);
+
+      /* t1 == t can happen for boolean nodes which are always unsigned.  */
+      if (t1 != t)
+    return get_alias_set (t1);
+    }
+  else if (POINTER_TYPE_P (t))
+    {
+      /* From the common C and C++ langhook implementation:
+
+     Unfortunately, there is no canonical form of a pointer type.
+     In particular, if we have `typedef int I', then `int *', and
+     `I *' are different types.  So, we have to pick a canonical
+     representative.  We do this below.
+
+     Technically, this approach is actually more conservative that
+     it needs to be.  In particular, `const int *' and `int *'
+     should be in different alias sets, according to the C and C++
+     standard, since their types are not the same, and so,
+     technically, an `int **' and `const int **' cannot point at
+     the same thing.
+
+     But, the standard is wrong.  In particular, this code is
+     legal C++:
+
+     int *ip;
+     int **ipp = &ip;
+     const int* const* cipp = ipp;
+     And, it doesn't make sense for that to be legal unless you
+     can dereference IPP and CIPP.  So, we ignore cv-qualifiers on
+     the pointed-to types.  This issue has been reported to the
+     C++ committee.  */
+
+      /* In addition to the above canonicalization issue with LTO
+         we should also canonicalize `T (*)[]' to `T *' avoiding
+     alias issues with pointer-to element types and pointer-to
+     array types.
+
+     Likewise we need to deal with the situation of incomplete
+     pointed-to types and make `*(struct X **)&a' and
+     `*(struct X {} **)&a' alias.  Otherwise we will have to
+     guarantee that all pointer-to incomplete type variants
+     will be replaced by pointer-to complete type variants if
+     they are available.
+
+     With LTO the convenient situation of using `void *' to
+     access and store any pointer type will also become
+     more apparent (and `void *' is just another pointer-to
+     incomplete type).  Assigning alias-set zero to `void *'
+     and all pointer-to incomplete types is a not appealing
+     solution.  Assigning an effective alias-set zero only
+     affecting pointers might be - by recording proper subset
+     relationships of all pointer alias-sets.
+
+     Pointer-to function types are another grey area which
+     needs caution.  Globbing them all into one alias-set
+     or the above effective zero set would work.  */
+
+      /* For now just assign the same alias-set to all pointers.
+         That's simple and avoids all the above problems.  */
+      if (t != ptr_type_node)
+    return get_alias_set (ptr_type_node);
+    }
+
+  return -1;
+}
+
+
+/* Data structure used to count the number of dereferences to PTR
+   inside an expression.  */
+struct count_ptr_d
+{
+  tree ptr;
+  unsigned num_stores;
+  unsigned num_loads;
+};
+
+/* Helper for count_uses_and_derefs.  Called by walk_tree to look for
+   (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA.  */
+
+static tree
+count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
+{
+  struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
+  struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
+
+  /* Do not walk inside ADDR_EXPR nodes.  In the expression &ptr->fld,
+     pointer 'ptr' is *not* dereferenced, it is simply used to compute
+     the address of 'fld' as 'ptr + offsetof(fld)'.  */
+  if (TREE_CODE (*tp) == ADDR_EXPR)
+    {
+      *walk_subtrees = 0;
+      return NULL_TREE;
+    }
+
+  if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
+    {
+      if (wi_p->is_lhs)
+    count_p->num_stores++;
+      else
+    count_p->num_loads++;
+    }
+
+  return NULL_TREE;
+}
+
+/* Count the number of direct and indirect uses for pointer PTR in
+   statement STMT.  The number of direct uses is stored in
+   *NUM_USES_P.  Indirect references are counted separately depending
+   on whether they are store or load operations.  The counts are
+   stored in *NUM_STORES_P and *NUM_LOADS_P.  */
+
+void
+count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
+               unsigned *num_loads_p, unsigned *num_stores_p)
+{
+  ssa_op_iter i;
+  tree use;
+
+  *num_uses_p = 0;
+  *num_loads_p = 0;
+  *num_stores_p = 0;
+
+  /* Find out the total number of uses of PTR in STMT.  */
+  FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
+    if (use == ptr)
+      (*num_uses_p)++;
+
+  /* Now count the number of indirect references to PTR.  This is
+     truly awful, but we don't have much choice.  There are no parent
+     pointers inside INDIRECT_REFs, so an expression like
+     '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
+     find all the indirect and direct uses of x_1 inside.  The only
+     shortcut we can take is the fact that GIMPLE only allows
+     INDIRECT_REFs inside the expressions below.  */
+  if (is_gimple_assign (stmt)
+      || gimple_code (stmt) == GIMPLE_RETURN
+      || gimple_code (stmt) == GIMPLE_ASM
+      || is_gimple_call (stmt))
+    {
+      struct walk_stmt_info wi;
+      struct count_ptr_d count;
+
+      count.ptr = ptr;
+      count.num_stores = 0;
+      count.num_loads = 0;
+
+      memset (&wi, 0, sizeof (wi));
+      wi.info = &count;
+      walk_gimple_op (stmt, count_ptr_derefs, &wi);
+
+      *num_stores_p = count.num_stores;
+      *num_loads_p = count.num_loads;
+    }
+
+  gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
+}
+
+/* From a tree operand OP return the base of a load or store operation
+   or NULL_TREE if OP is not a load or a store.  */
+
+static tree
+get_base_loadstore (tree op)
+{
+  while (handled_component_p (op))
+    op = TREE_OPERAND (op, 0);
+  if (DECL_P (op)
+      || INDIRECT_REF_P (op)
+      || TREE_CODE (op) == TARGET_MEM_REF)
+    return op;
+  return NULL_TREE;
+}
+
+/* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
+   VISIT_ADDR if non-NULL on loads, store and address-taken operands
+   passing the STMT, the base of the operand and DATA to it.  The base
+   will be either a decl, an indirect reference (including TARGET_MEM_REF)
+   or the argument of an address expression.
+   Returns the results of these callbacks or'ed.  */
+
+bool
+walk_stmt_load_store_addr_ops (gimple stmt, void *data,
+                   bool (*visit_load)(gimple, tree, void *),
+                   bool (*visit_store)(gimple, tree, void *),
+                   bool (*visit_addr)(gimple, tree, void *))
+{
+  bool ret = false;
+  unsigned i;
+  if (gimple_assign_single_p (stmt))
+    {
+      tree lhs, rhs;
+      if (visit_store)
+    {
+      lhs = get_base_loadstore (gimple_assign_lhs (stmt));
+      if (lhs)
+        ret |= visit_store (stmt, lhs, data);
+    }
+      rhs = gimple_assign_rhs1 (stmt);
+      while (handled_component_p (rhs))
+    rhs = TREE_OPERAND (rhs, 0);
+      if (visit_addr)
+    {
+      if (TREE_CODE (rhs) == ADDR_EXPR)
+        ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
+      else if (TREE_CODE (rhs) == TARGET_MEM_REF
+                   && TMR_BASE (rhs) != NULL_TREE
+           && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
+        ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
+      else if (TREE_CODE (rhs) == OBJ_TYPE_REF
+           && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
+        ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
+                           0), data);
+          lhs = gimple_assign_lhs (stmt);
+      if (TREE_CODE (lhs) == TARGET_MEM_REF
+              && TMR_BASE (lhs) != NULL_TREE
+              && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
+            ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
+    }
+      if (visit_load)
+    {
+      rhs = get_base_loadstore (rhs);
+      if (rhs)
+        ret |= visit_load (stmt, rhs, data);
+    }
+    }
+  else if (visit_addr
+       && (is_gimple_assign (stmt)
+           || gimple_code (stmt) == GIMPLE_COND))
+    {
+      for (i = 0; i < gimple_num_ops (stmt); ++i)
+    if (gimple_op (stmt, i)
+        && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR)
+      ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data);
+    }
+  else if (is_gimple_call (stmt))
+    {
+      if (visit_store)
+    {
+      tree lhs = gimple_call_lhs (stmt);
+      if (lhs)
+        {
+          lhs = get_base_loadstore (lhs);
+          if (lhs)
+        ret |= visit_store (stmt, lhs, data);
+        }
+    }
+      if (visit_load || visit_addr)
+    for (i = 0; i < gimple_call_num_args (stmt); ++i)
+      {
+        tree rhs = gimple_call_arg (stmt, i);
+        if (visit_addr
+        && TREE_CODE (rhs) == ADDR_EXPR)
+          ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
+        else if (visit_load)
+          {
+        rhs = get_base_loadstore (rhs);
+        if (rhs)
+          ret |= visit_load (stmt, rhs, data);
+          }
+      }
+      if (visit_addr
+      && gimple_call_chain (stmt)
+      && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
+    ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
+               data);
+      if (visit_addr
+      && gimple_call_return_slot_opt_p (stmt)
+      && gimple_call_lhs (stmt) != NULL_TREE
+      && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
+    ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
+    }
+  else if (gimple_code (stmt) == GIMPLE_ASM)
+    {
+      unsigned noutputs;
+      const char *constraint;
+      const char **oconstraints;
+      bool allows_mem, allows_reg, is_inout;
+      noutputs = gimple_asm_noutputs (stmt);
+      oconstraints = XALLOCAVEC (const char *, noutputs);
+      if (visit_store || visit_addr)
+    for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
+      {
+        tree link = gimple_asm_output_op (stmt, i);
+        tree op = get_base_loadstore (TREE_VALUE (link));
+        if (op && visit_store)
+          ret |= visit_store (stmt, op, data);
+        if (visit_addr)
+          {
+        constraint = TREE_STRING_POINTER
+            (TREE_VALUE (TREE_PURPOSE (link)));
+        oconstraints[i] = constraint;
+        parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
+                     &allows_reg, &is_inout);
+        if (op && !allows_reg && allows_mem)
+          ret |= visit_addr (stmt, op, data);
+          }
+      }
+      if (visit_load || visit_addr)
+    for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
+      {
+        tree link = gimple_asm_input_op (stmt, i);
+        tree op = TREE_VALUE (link);
+        if (visit_addr
+        && TREE_CODE (op) == ADDR_EXPR)
+          ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
+        else if (visit_load || visit_addr)
+          {
+        op = get_base_loadstore (op);
+        if (op)
+          {
+            if (visit_load)
+              ret |= visit_load (stmt, op, data);
+            if (visit_addr)
+              {
+            constraint = TREE_STRING_POINTER
+                (TREE_VALUE (TREE_PURPOSE (link)));
+            parse_input_constraint (&constraint, 0, 0, noutputs,
+                        0, oconstraints,
+                        &allows_mem, &allows_reg);
+            if (!allows_reg && allows_mem)
+              ret |= visit_addr (stmt, op, data);
+              }
+          }
+          }
+      }
+    }
+  else if (gimple_code (stmt) == GIMPLE_RETURN)
+    {
+      tree op = gimple_return_retval (stmt);
+      if (op)
+    {
+      if (visit_addr
+          && TREE_CODE (op) == ADDR_EXPR)
+        ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
+      else if (visit_load)
+        {
+          op = get_base_loadstore (op);
+          if (op)
+        ret |= visit_load (stmt, op, data);
+        }
+    }
+    }
+  else if (visit_addr
+       && gimple_code (stmt) == GIMPLE_PHI)
+    {
+      for (i = 0; i < gimple_phi_num_args (stmt); ++i)
+    {
+      tree op = PHI_ARG_DEF (stmt, i);
+      if (TREE_CODE (op) == ADDR_EXPR)
+        ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
+    }
+    }
+
+  return ret;
+}
+
+/* Like walk_stmt_load_store_addr_ops but with NULL visit_addr.  IPA-CP
+   should make a faster clone for this case.  */
+
+bool
+walk_stmt_load_store_ops (gimple stmt, void *data,
+              bool (*visit_load)(gimple, tree, void *),
+              bool (*visit_store)(gimple, tree, void *))
+{
+  return walk_stmt_load_store_addr_ops (stmt, data,
+                    visit_load, visit_store, NULL);
+}
+
+/* Helper for gimple_ior_addresses_taken_1.  */
+
+static bool
+gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
+                  tree addr, void *data)
+{
+  bitmap addresses_taken = (bitmap)data;
+  while (handled_component_p (addr))
+    addr = TREE_OPERAND (addr, 0);
+  if (DECL_P (addr))
+    {
+      bitmap_set_bit (addresses_taken, DECL_UID (addr));
+      return true;
+    }
+  return false;
+}
+
+/* Set the bit for the uid of all decls that have their address taken
+   in STMT in the ADDRESSES_TAKEN bitmap.  Returns true if there
+   were any in this stmt.  */
+
+bool
+gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
+{
+  return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
+                    gimple_ior_addresses_taken_1);
+}
+
+
+/* Return a printable name for symbol DECL.  */
+
+const char *
+gimple_decl_printable_name (tree decl, int verbosity)
+{
+  gcc_assert (decl && DECL_NAME (decl));
+
+  if (DECL_ASSEMBLER_NAME_SET_P (decl))
+    {
+      const char *str, *mangled_str;
+      int dmgl_opts = DMGL_NO_OPTS;
+
+      if (verbosity >= 2)
+    {
+      dmgl_opts = DMGL_VERBOSE
+              | DMGL_ANSI
+              | DMGL_GNU_V3
+              | DMGL_RET_POSTFIX;
+      if (TREE_CODE (decl) == FUNCTION_DECL)
+        dmgl_opts |= DMGL_PARAMS;
+    }
+
+      mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
+      str = cplus_demangle_v3 (mangled_str, dmgl_opts);
+      return (str) ? str : mangled_str;
+    }
+
+  return IDENTIFIER_POINTER (DECL_NAME (decl));
+}
+
+
+/* Fold a OBJ_TYPE_REF expression to the address of a function.
+   KNOWN_TYPE carries the true type of OBJ_TYPE_REF_OBJECT(REF).  Adapted
+   from cp_fold_obj_type_ref, but it tolerates types with no binfo
+   data.  */
+
+tree
+gimple_fold_obj_type_ref (tree ref, tree known_type)
+{
+  HOST_WIDE_INT index;
+  HOST_WIDE_INT i;
+  tree v;
+  tree fndecl;
+
+  if (TYPE_BINFO (known_type) == NULL_TREE)
+    return NULL_TREE;
+
+  v = BINFO_VIRTUALS (TYPE_BINFO (known_type));
+  index = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
+  i = 0;
+  while (i != index)
+    {
+      i += (TARGET_VTABLE_USES_DESCRIPTORS
+        ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
+      v = TREE_CHAIN (v);
+    }
+
+  fndecl = TREE_VALUE (v);
+
+#ifdef ENABLE_CHECKING
+  gcc_assert (tree_int_cst_equal (OBJ_TYPE_REF_TOKEN (ref),
+                  DECL_VINDEX (fndecl)));
+#endif
+
+  cgraph_node (fndecl)->local.vtable_method = true;
+
+  return build_fold_addr_expr (fndecl);
+}
+
 #include "gt-gimple.h"