diff gcc/gimplify.c @ 69:1b10fe6932e1

merge 69
author Nobuyasu Oshiro <dimolto@cr.ie.u-ryukyu.ac.jp>
date Sun, 21 Aug 2011 07:53:12 +0900
parents 326d9e06c2e3 f6334be47118
children b81903832de2
line wrap: on
line diff
--- a/gcc/gimplify.c	Tue Dec 14 03:58:33 2010 +0900
+++ b/gcc/gimplify.c	Sun Aug 21 07:53:12 2011 +0900
@@ -1,6 +1,6 @@
 /* Tree lowering pass.  This pass converts the GENERIC functions-as-trees
    tree representation into the GIMPLE form.
-   Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
+   Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
    Free Software Foundation, Inc.
    Major work done by Sebastian Pop <s.pop@laposte.net>,
    Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
@@ -26,28 +26,21 @@
 #include "coretypes.h"
 #include "tm.h"
 #include "tree.h"
-#include "rtl.h"
-#include "varray.h"
 #include "gimple.h"
 #include "tree-iterator.h"
 #include "tree-inline.h"
-#include "diagnostic.h"
+#include "tree-pretty-print.h"
 #include "langhooks.h"
-#include "langhooks-def.h"
 #include "tree-flow.h"
 #include "cgraph.h"
 #include "timevar.h"
-#include "except.h"
 #include "hashtab.h"
 #include "flags.h"
-#include "real.h"
 #include "function.h"
 #include "output.h"
-#include "expr.h"
 #include "ggc.h"
-#include "toplev.h"
+#include "diagnostic-core.h"
 #include "target.h"
-#include "optabs.h"
 #include "pointer-set.h"
 #include "splay-tree.h"
 #include "vec.h"
@@ -58,6 +51,10 @@
 
 #include "tree-pass.h"
 
+#include "langhooks-def.h"	/* FIXME: for lhd_set_decl_assembler_name.  */
+#include "expr.h"		/* FIXME: for can_move_by_pieces
+				   and STACK_CHECK_MAX_VAR_SIZE.  */
+
 enum gimplify_omp_var_data
 {
   GOVD_SEEN = 1,
@@ -78,9 +75,10 @@
 enum omp_region_type
 {
   ORT_WORKSHARE = 0,
-  ORT_TASK = 1,
   ORT_PARALLEL = 2,
-  ORT_COMBINED_PARALLEL = 3
+  ORT_COMBINED_PARALLEL = 3,
+  ORT_TASK = 4,
+  ORT_UNTIED_TASK = 5
 };
 
 struct gimplify_omp_ctx
@@ -116,10 +114,13 @@
 {
   while (handled_component_p (x))
     x = TREE_OPERAND (x, 0);
+  if (TREE_CODE (x) == MEM_REF
+      && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
+    x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
   if (TREE_CODE (x) != VAR_DECL
       && TREE_CODE (x) != PARM_DECL
       && TREE_CODE (x) != RESULT_DECL)
-    return ;
+    return;
   TREE_ADDRESSABLE (x) = 1;
 }
 
@@ -148,9 +149,11 @@
   if (!operand_equal_p (t1, t2, 0))
     return 0;
 
+#ifdef ENABLE_CHECKING
   /* Only allow them to compare equal if they also hash equal; otherwise
      results are nondeterminate, and we fail bootstrap comparison.  */
   gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
+#endif
 
   return 1;
 }
@@ -161,7 +164,7 @@
    During gimplification, we need to manipulate statement sequences
    before the def/use vectors have been constructed.  */
 
-static void
+void
 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
 {
   gimple_stmt_iterator si;
@@ -322,7 +325,7 @@
   c->privatized_types = pointer_set_create ();
   c->location = input_location;
   c->region_type = region_type;
-  if (region_type != ORT_TASK)
+  if ((region_type & ORT_TASK) == 0)
     c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
   else
     c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
@@ -343,47 +346,6 @@
 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
 
-/* A subroutine of append_to_statement_list{,_force}.  T is not NULL.  */
-
-static void
-append_to_statement_list_1 (tree t, tree *list_p)
-{
-  tree list = *list_p;
-  tree_stmt_iterator i;
-
-  if (!list)
-    {
-      if (t && TREE_CODE (t) == STATEMENT_LIST)
-    {
-      *list_p = t;
-      return;
-    }
-      *list_p = list = alloc_stmt_list ();
-    }
-
-  i = tsi_last (list);
-  tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
-}
-
-/* Add T to the end of the list container pointed to by LIST_P.
-   If T is an expression with no effects, it is ignored.  */
-
-void
-append_to_statement_list (tree t, tree *list_p)
-{
-  if (t && TREE_SIDE_EFFECTS (t))
-    append_to_statement_list_1 (t, list_p);
-}
-
-/* Similar, but the statement is always added, regardless of side effects.  */
-
-void
-append_to_statement_list_force (tree t, tree *list_p)
-{
-  if (t != NULL_TREE)
-    append_to_statement_list_1 (t, list_p);
-}
-
 /* Both gimplify the statement T and append it to *SEQ_P.  This function
    behaves exactly as gimplify_stmt, but you don't have to pass T as a
    reference.  */
@@ -511,6 +473,23 @@
   return tmp_var;
 }
 
+/* Create a new temporary variable declaration of type TYPE by calling
+   create_tmp_var and if TYPE is a vector or a complex number, mark the new
+   temporary as gimple register.  */
+
+tree
+create_tmp_reg (tree type, const char *prefix)
+{
+  tree tmp;
+
+  tmp = create_tmp_var (type, prefix);
+  if (TREE_CODE (type) == COMPLEX_TYPE
+      || TREE_CODE (type) == VECTOR_TYPE)
+    DECL_GIMPLE_REG_P (tmp) = 1;
+
+  return tmp;
+}
+
 /* Create a temporary with a name derived from VAL.  Subroutine of
    lookup_tmp_var; nobody else should call this function.  */
 
@@ -564,7 +543,7 @@
 
 
 /* Return true if T is a CALL_EXPR or an expression that can be
-   assignmed to a temporary.  Note that this predicate should only be
+   assigned to a temporary.  Note that this predicate should only be
    used during gimplification.  See the rationale for this in
    gimplify_modify_expr.  */
 
@@ -613,10 +592,7 @@
 
   mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
 
-  if (EXPR_HAS_LOCATION (val))
-    SET_EXPR_LOCATION (mod, EXPR_LOCATION (val));
-  else
-    SET_EXPR_LOCATION (mod, input_location);
+  SET_EXPR_LOCATION (mod, EXPR_LOC_OR_HERE (val));
 
   /* gimplify_modify_expr might want to reduce this further.  */
   gimplify_and_add (mod, pre_p);
@@ -678,10 +654,10 @@
       block = gimple_bind_block (scope);
       gcc_assert (!block || TREE_CODE (block) == BLOCK);
       if (!block || !debug_info)
-    {
-      TREE_CHAIN (last) = gimple_bind_vars (scope);
-      gimple_bind_set_vars (scope, temps);
-    }
+	{
+	  DECL_CHAIN (last) = gimple_bind_vars (scope);
+	  gimple_bind_set_vars (scope, temps);
+	}
       else
     {
       /* We need to attach the nodes both to the BIND_EXPR and to its
@@ -727,7 +703,7 @@
 void
 gimple_add_tmp_var (tree tmp)
 {
-  gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
+  gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
 
   /* Later processing assumes that the object size is constant, which might
      not be true at this point.  Force the use of a constant upper bound in
@@ -740,7 +716,7 @@
 
   if (gimplify_ctxp)
     {
-      TREE_CHAIN (tmp) = gimplify_ctxp->temps;
+      DECL_CHAIN (tmp) = gimplify_ctxp->temps;
       gimplify_ctxp->temps = tmp;
 
       /* Mark temporaries local within the nearest enclosing parallel.  */
@@ -847,9 +823,44 @@
       annotate_one_with_location (gs, location);
     }
 }
-
-
-/* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
+
+/* This page contains routines to unshare tree nodes, i.e. to duplicate tree
+   nodes that are referenced more than once in GENERIC functions.  This is
+   necessary because gimplification (translation into GIMPLE) is performed
+   by modifying tree nodes in-place, so gimplication of a shared node in a
+   first context could generate an invalid GIMPLE form in a second context.
+
+   This is achieved with a simple mark/copy/unmark algorithm that walks the
+   GENERIC representation top-down, marks nodes with TREE_VISITED the first
+   time it encounters them, duplicates them if they already have TREE_VISITED
+   set, and finally removes the TREE_VISITED marks it has set.
+
+   The algorithm works only at the function level, i.e. it generates a GENERIC
+   representation of a function with no nodes shared within the function when
+   passed a GENERIC function (except for nodes that are allowed to be shared).
+
+   At the global level, it is also necessary to unshare tree nodes that are
+   referenced in more than one function, for the same aforementioned reason.
+   This requires some cooperation from the front-end.  There are 2 strategies:
+
+     1. Manual unsharing.  The front-end needs to call unshare_expr on every
+        expression that might end up being shared across functions.
+
+     2. Deep unsharing.  This is an extension of regular unsharing.  Instead
+        of calling unshare_expr on expressions that might be shared across
+        functions, the front-end pre-marks them with TREE_VISITED.  This will
+        ensure that they are unshared on the first reference within functions
+        when the regular unsharing algorithm runs.  The counterpart is that
+        this algorithm must look deeper than for manual unsharing, which is
+        specified by LANG_HOOKS_DEEP_UNSHARING.
+
+  If there are only few specific cases of node sharing across functions, it is
+  probably easier for a front-end to unshare the expressions manually.  On the
+  contrary, if the expressions generated at the global level are as widespread
+  as expressions generated within functions, deep unsharing is very likely the
+  way to go.  */
+
+/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
    These nodes model computations that should only be done once.  If we
    were to unshare something like SAVE_EXPR(i++), the gimplification
    process would create wrong code.  */
@@ -857,38 +868,47 @@
 static tree
 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
 {
-  enum tree_code code = TREE_CODE (*tp);
-  /* Don't unshare types, decls, constants and SAVE_EXPR nodes.  */
-  if (TREE_CODE_CLASS (code) == tcc_type
-      || TREE_CODE_CLASS (code) == tcc_declaration
-      || TREE_CODE_CLASS (code) == tcc_constant
-      || code == SAVE_EXPR || code == TARGET_EXPR
-      /* We can't do anything sensible with a BLOCK used as an expression,
-     but we also can't just die when we see it because of non-expression
-     uses.  So just avert our eyes and cross our fingers.  Silly Java.  */
-      || code == BLOCK)
+  tree t = *tp;
+  enum tree_code code = TREE_CODE (t);
+
+  /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
+     copy their subtrees if we can make sure to do it only once.  */
+  if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
+    {
+      if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
+	;
+      else
+	*walk_subtrees = 0;
+    }
+
+  /* Stop at types, decls, constants like copy_tree_r.  */
+  else if (TREE_CODE_CLASS (code) == tcc_type
+	   || TREE_CODE_CLASS (code) == tcc_declaration
+	   || TREE_CODE_CLASS (code) == tcc_constant
+	   /* We can't do anything sensible with a BLOCK used as an
+	      expression, but we also can't just die when we see it
+	      because of non-expression uses.  So we avert our eyes
+	      and cross our fingers.  Silly Java.  */
+	   || code == BLOCK)
     *walk_subtrees = 0;
+
+  /* Cope with the statement expression extension.  */
+  else if (code == STATEMENT_LIST)
+    ;
+
+  /* Leave the bulk of the work to copy_tree_r itself.  */
   else
-    {
-      gcc_assert (code != BIND_EXPR);
-      copy_tree_r (tp, walk_subtrees, data);
-    }
+    copy_tree_r (tp, walk_subtrees, NULL);
 
   return NULL_TREE;
 }
 
 /* Callback for walk_tree to unshare most of the shared trees rooted at
    *TP.  If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
-   then *TP is deep copied by calling copy_tree_r.
-
-   This unshares the same trees as copy_tree_r with the exception of
-   SAVE_EXPR nodes.  These nodes model computations that should only be
-   done once.  If we were to unshare something like SAVE_EXPR(i++), the
-   gimplification process would create wrong code.  */
+   then *TP is deep copied by calling mostly_copy_tree_r.  */
 
 static tree
-copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
-          void *data ATTRIBUTE_UNUSED)
+copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
 {
   tree t = *tp;
   enum tree_code code = TREE_CODE (t);
@@ -911,27 +931,29 @@
      any deeper.  */
   else if (TREE_VISITED (t))
     {
-      walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
+      walk_tree (tp, mostly_copy_tree_r, data, NULL);
       *walk_subtrees = 0;
     }
 
-  /* Otherwise, mark the tree as visited and keep looking.  */
+  /* Otherwise, mark the node as visited and keep looking.  */
   else
     TREE_VISITED (t) = 1;
 
   return NULL_TREE;
 }
 
-static tree
-unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
-          void *data ATTRIBUTE_UNUSED)
-{
-  if (TREE_VISITED (*tp))
-    TREE_VISITED (*tp) = 0;
-  else
-    *walk_subtrees = 0;
-
-  return NULL_TREE;
+/* Unshare most of the shared trees rooted at *TP. */
+
+static inline void
+copy_if_shared (tree *tp)
+{
+  /* If the language requires deep unsharing, we need a pointer set to make
+     sure we don't repeatedly unshare subtrees of unshareable nodes.  */
+  struct pointer_set_t *visited
+    = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
+  walk_tree (tp, copy_if_shared_r, visited, NULL);
+  if (visited)
+    pointer_set_destroy (visited);
 }
 
 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
@@ -943,12 +965,40 @@
 {
   struct cgraph_node *cgn = cgraph_node (fndecl);
 
-  walk_tree (body_p, copy_if_shared_r, NULL, NULL);
+  copy_if_shared (body_p);
+
   if (body_p == &DECL_SAVED_TREE (fndecl))
     for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
       unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
 }
 
+/* Callback for walk_tree to unmark the visited trees rooted at *TP.
+   Subtrees are walked until the first unvisited node is encountered.  */
+
+static tree
+unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
+{
+  tree t = *tp;
+
+  /* If this node has been visited, unmark it and keep looking.  */
+  if (TREE_VISITED (t))
+    TREE_VISITED (t) = 0;
+
+  /* Otherwise, don't look any deeper.  */
+  else
+    *walk_subtrees = 0;
+
+  return NULL_TREE;
+}
+
+/* Unmark the visited trees rooted at *TP.  */
+
+static inline void
+unmark_visited (tree *tp)
+{
+  walk_tree (tp, unmark_visited_r, NULL, NULL);
+}
+
 /* Likewise, but mark all trees as not visited.  */
 
 static void
@@ -956,7 +1006,8 @@
 {
   struct cgraph_node *cgn = cgraph_node (fndecl);
 
-  walk_tree (body_p, unmark_visited_r, NULL, NULL);
+  unmark_visited (body_p);
+
   if (body_p == &DECL_SAVED_TREE (fndecl))
     for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
       unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
@@ -1084,7 +1135,7 @@
   tree temp = voidify_wrapper_expr (bind_expr, NULL);
 
   /* Mark variables seen in this bind expr.  */
-  for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
+  for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
     {
       if (TREE_CODE (t) == VAR_DECL)
     {
@@ -1104,19 +1155,14 @@
     }
 
       /* Preliminarily mark non-addressed complex variables as eligible
-     for promotion to gimple registers.  We'll transform their uses
-     as we find them.
-     We exclude complex types if not optimizing because they can be
-     subject to partial stores in GNU C by means of the __real__ and
-     __imag__ operators and we cannot promote them to total stores
-     (see gimplify_modify_expr_complex_part).  */
-      if (optimize
-      && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
-          || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
-      && !TREE_THIS_VOLATILE (t)
-      && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
-      && !needs_to_live_in_memory (t))
-    DECL_GIMPLE_REG_P (t) = 1;
+	 for promotion to gimple registers.  We'll transform their uses
+	 as we find them.  */
+      if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
+	   || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
+	  && !TREE_THIS_VOLATILE (t)
+	  && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
+	  && !needs_to_live_in_memory (t))
+	DECL_GIMPLE_REG_P (t) = 1;
     }
 
   gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
@@ -1224,17 +1270,27 @@
      hard_function_value generates a PARALLEL, we'll die during normal
      expansion of structure assignments; there's special code in expand_return
      to handle this case that does not exist in expand_expr.  */
-  if (!result_decl
-      || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
-    result = result_decl;
+  if (!result_decl)
+    result = NULL_TREE;
+  else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
+    {
+      if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
+	{
+	  if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
+	    gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
+	  /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
+	     should be effectively allocated by the caller, i.e. all calls to
+	     this function must be subject to the Return Slot Optimization.  */
+	  gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
+	  gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
+	}
+      result = result_decl;
+    }
   else if (gimplify_ctxp->return_temp)
     result = gimplify_ctxp->return_temp;
   else
     {
-      result = create_tmp_var (TREE_TYPE (result_decl), NULL);
-      if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
-          || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
-        DECL_GIMPLE_REG_P (result) = 1;
+      result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
 
       /* ??? With complex control flow (usually involving abnormal edges),
      we can wind up warning about an uninitialized value for this.  Due
@@ -1284,6 +1340,8 @@
 
   t = built_in_decls[BUILT_IN_ALLOCA];
   t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
+  /* The call has been built for a variable-sized object.  */
+  ALLOCA_FOR_VAR_P (t) = 1;
   t = fold_convert (ptr_type, t);
   t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
 
@@ -1325,28 +1383,28 @@
                    STACK_CHECK_MAX_VAR_SIZE) > 0))
     gimplify_vla_decl (decl, seq_p);
 
+      /* Some front ends do not explicitly declare all anonymous
+	 artificial variables.  We compensate here by declaring the
+	 variables, though it would be better if the front ends would
+	 explicitly declare them.  */
+      if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
+	  && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
+	gimple_add_tmp_var (decl);
+
       if (init && init != error_mark_node)
-    {
-      if (!TREE_STATIC (decl))
-        {
-          DECL_INITIAL (decl) = NULL_TREE;
-          init = build2 (INIT_EXPR, void_type_node, decl, init);
-          gimplify_and_add (init, seq_p);
-          ggc_free (init);
-        }
-      else
-        /* We must still examine initializers for static variables
-           as they may contain a label address.  */
-        walk_tree (&init, force_labels_r, NULL, NULL);
-    }
-
-      /* Some front ends do not explicitly declare all anonymous
-     artificial variables.  We compensate here by declaring the
-     variables, though it would be better if the front ends would
-     explicitly declare them.  */
-      if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
-      && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
-    gimple_add_tmp_var (decl);
+	{
+	  if (!TREE_STATIC (decl))
+	    {
+	      DECL_INITIAL (decl) = NULL_TREE;
+	      init = build2 (INIT_EXPR, void_type_node, decl, init);
+	      gimplify_and_add (init, seq_p);
+	      ggc_free (init);
+	    }
+	  else
+	    /* We must still examine initializers for static variables
+	       as they may contain a label address.  */
+	    walk_tree (&init, force_labels_r, NULL, NULL);
+	}
     }
 
   return GS_ALL_DONE;
@@ -1429,9 +1487,7 @@
 void
 sort_case_labels (VEC(tree,heap)* label_vec)
 {
-  size_t len = VEC_length (tree, label_vec);
-  qsort (VEC_address (tree, label_vec), len, sizeof (tree),
-         compare_case_labels);
+  VEC_qsort (tree, label_vec, compare_case_labels);
 }
 
 
@@ -1824,7 +1880,7 @@
       && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
       && decl_function_context (decl) == current_function_decl)
     {
-      gcc_assert (errorcount || sorrycount);
+      gcc_assert (seen_error ());
       return GS_ERROR;
     }
 
@@ -1840,29 +1896,29 @@
       /* For referenced nonlocal VLAs add a decl for debugging purposes
      to the current function.  */
       if (TREE_CODE (decl) == VAR_DECL
-      && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
-      && nonlocal_vlas != NULL
-      && TREE_CODE (value_expr) == INDIRECT_REF
-      && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
-      && decl_function_context (decl) != current_function_decl)
-    {
-      struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
-      while (ctx && ctx->region_type == ORT_WORKSHARE)
-        ctx = ctx->outer_context;
-      if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
-        {
-          tree copy = copy_node (decl), block;
-
-          lang_hooks.dup_lang_specific_decl (copy);
-          SET_DECL_RTL (copy, NULL_RTX);
-          TREE_USED (copy) = 1;
-          block = DECL_INITIAL (current_function_decl);
-          TREE_CHAIN (copy) = BLOCK_VARS (block);
-          BLOCK_VARS (block) = copy;
-          SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
-          DECL_HAS_VALUE_EXPR_P (copy) = 1;
-        }
-    }
+	  && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
+	  && nonlocal_vlas != NULL
+	  && TREE_CODE (value_expr) == INDIRECT_REF
+	  && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
+	  && decl_function_context (decl) != current_function_decl)
+	{
+	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
+	  while (ctx && ctx->region_type == ORT_WORKSHARE)
+	    ctx = ctx->outer_context;
+	  if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
+	    {
+	      tree copy = copy_node (decl), block;
+
+	      lang_hooks.dup_lang_specific_decl (copy);
+	      SET_DECL_RTL (copy, 0);
+	      TREE_USED (copy) = 1;
+	      block = DECL_INITIAL (current_function_decl);
+	      DECL_CHAIN (copy) = BLOCK_VARS (block);
+	      BLOCK_VARS (block) = copy;
+	      SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
+	      DECL_HAS_VALUE_EXPR_P (copy) = 1;
+	    }
+	}
 
       *expr_p = unshare_expr (value_expr);
       return GS_OK;
@@ -1900,9 +1956,10 @@
 {
   tree *p;
   VEC(tree,heap) *stack;
-  enum gimplify_status ret = GS_OK, tret;
+  enum gimplify_status ret = GS_ALL_DONE, tret;
   int i;
   location_t loc = EXPR_LOCATION (*expr_p);
+  tree expr = *expr_p;
 
   /* Create a stack of the subexpressions so later we can walk them in
      order from inner to outer.  */
@@ -2056,11 +2113,12 @@
   if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
     {
       canonicalize_component_ref (expr_p);
-      ret = MIN (ret, GS_OK);
     }
 
   VEC_free (tree, heap, stack);
 
+  gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
+
   return ret;
 }
 
@@ -2429,9 +2487,12 @@
   if (!want_value)
     {
       /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
-     have to do is replicate it as a GIMPLE_CALL tuple.  */
+	 have to do is replicate it as a GIMPLE_CALL tuple.  */
+      gimple_stmt_iterator gsi;
       call = gimple_build_call_from_tree (*expr_p);
       gimplify_seq_add_stmt (pre_p, call);
+      gsi = gsi_last (*pre_p);
+      fold_stmt (&gsi);
       *expr_p = NULL_TREE;
     }
 
@@ -2568,20 +2629,19 @@
      into
        if (a) if (b) then c.  */
       while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
-    {
-      /* Keep the original source location on the first 'if'.  */
-      location_t locus = EXPR_HAS_LOCATION (expr)
-                 ? EXPR_LOCATION (expr) : input_location;
-      TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
-      /* Set the source location of the && on the second 'if'.  */
-      if (EXPR_HAS_LOCATION (pred))
-        SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
-      then_ = shortcut_cond_expr (expr);
-      then_se = then_ && TREE_SIDE_EFFECTS (then_);
-      pred = TREE_OPERAND (pred, 0);
-      expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
-      SET_EXPR_LOCATION (expr, locus);
-    }
+	{
+	  /* Keep the original source location on the first 'if'.  */
+	  location_t locus = EXPR_LOC_OR_HERE (expr);
+	  TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
+	  /* Set the source location of the && on the second 'if'.  */
+	  if (EXPR_HAS_LOCATION (pred))
+	    SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
+	  then_ = shortcut_cond_expr (expr);
+	  then_se = then_ && TREE_SIDE_EFFECTS (then_);
+	  pred = TREE_OPERAND (pred, 0);
+	  expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
+	  SET_EXPR_LOCATION (expr, locus);
+	}
     }
 
   if (!then_se)
@@ -2591,20 +2651,19 @@
      into
        if (a); else if (b); else d.  */
       while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
-    {
-      /* Keep the original source location on the first 'if'.  */
-      location_t locus = EXPR_HAS_LOCATION (expr)
-                 ? EXPR_LOCATION (expr) : input_location;
-      TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
-      /* Set the source location of the || on the second 'if'.  */
-      if (EXPR_HAS_LOCATION (pred))
-        SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
-      else_ = shortcut_cond_expr (expr);
-      else_se = else_ && TREE_SIDE_EFFECTS (else_);
-      pred = TREE_OPERAND (pred, 0);
-      expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
-      SET_EXPR_LOCATION (expr, locus);
-    }
+	{
+	  /* Keep the original source location on the first 'if'.  */
+	  location_t locus = EXPR_LOC_OR_HERE (expr);
+	  TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
+	  /* Set the source location of the || on the second 'if'.  */
+	  if (EXPR_HAS_LOCATION (pred))
+	    SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
+	  else_ = shortcut_cond_expr (expr);
+	  else_se = else_ && TREE_SIDE_EFFECTS (else_);
+	  pred = TREE_OPERAND (pred, 0);
+	  expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
+	  SET_EXPR_LOCATION (expr, locus);
+	}
     }
 
   /* If we're done, great.  */
@@ -2658,8 +2717,7 @@
   /* If there was nothing else in our arms, just forward the label(s).  */
   if (!then_se && !else_se)
     return shortcut_cond_r (pred, true_label_p, false_label_p,
-                EXPR_HAS_LOCATION (expr)
-                ? EXPR_LOCATION (expr) : input_location);
+			    EXPR_LOC_OR_HERE (expr));
 
   /* If our last subexpression already has a terminal label, reuse it.  */
   if (else_se)
@@ -2691,8 +2749,7 @@
   jump_over_else = block_may_fallthru (then_);
 
   pred = shortcut_cond_r (pred, true_label_p, false_label_p,
-              EXPR_HAS_LOCATION (expr)
-              ? EXPR_LOCATION (expr) : input_location);
+			  EXPR_LOC_OR_HERE (expr));
 
   expr = NULL;
   append_to_statement_list (pred, &expr);
@@ -2732,6 +2789,36 @@
   tree type = TREE_TYPE (expr);
   location_t loc = EXPR_LOCATION (expr);
 
+  if (TREE_CODE (expr) == NE_EXPR
+      && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
+      && integer_zerop (TREE_OPERAND (expr, 1)))
+    {
+      tree call = TREE_OPERAND (expr, 0);
+      tree fn = get_callee_fndecl (call);
+
+      /* For __builtin_expect ((long) (x), y) recurse into x as well
+	 if x is truth_value_p.  */
+      if (fn
+	  && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
+	  && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
+	  && call_expr_nargs (call) == 2)
+	{
+	  tree arg = CALL_EXPR_ARG (call, 0);
+	  if (arg)
+	    {
+	      if (TREE_CODE (arg) == NOP_EXPR
+		  && TREE_TYPE (arg) == TREE_TYPE (call))
+		arg = TREE_OPERAND (arg, 0);
+	      if (truth_value_p (TREE_CODE (arg)))
+		{
+		  arg = gimple_boolify (arg);
+		  CALL_EXPR_ARG (call, 0)
+		    = fold_convert_loc (loc, TREE_TYPE (call), arg);
+		}
+	    }
+	}
+    }
+
   if (TREE_CODE (type) == BOOLEAN_TYPE)
     return expr;
 
@@ -2835,71 +2922,67 @@
 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
 {
   tree expr = *expr_p;
-  tree tmp, type, arm1, arm2;
+  tree type = TREE_TYPE (expr);
+  location_t loc = EXPR_LOCATION (expr);
+  tree tmp, arm1, arm2;
   enum gimplify_status ret;
   tree label_true, label_false, label_cont;
   bool have_then_clause_p, have_else_clause_p;
   gimple gimple_cond;
   enum tree_code pred_code;
   gimple_seq seq = NULL;
-  location_t loc = EXPR_LOCATION (*expr_p);
-
-  type = TREE_TYPE (expr);
 
   /* If this COND_EXPR has a value, copy the values into a temporary within
      the arms.  */
-  if (! VOID_TYPE_P (type))
-    {
+  if (!VOID_TYPE_P (type))
+    {
+      tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
       tree result;
 
-      /* If an rvalue is ok or we do not require an lvalue, avoid creating
-     an addressable temporary.  */
-      if (((fallback & fb_rvalue)
-       || !(fallback & fb_lvalue))
-      && !TREE_ADDRESSABLE (type))
-    {
-      if (gimplify_ctxp->allow_rhs_cond_expr
-          /* If either branch has side effects or could trap, it can't be
-         evaluated unconditionally.  */
-          && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1))
-          && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 1))
-          && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 2))
-          && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 2)))
-        return gimplify_pure_cond_expr (expr_p, pre_p);
-
-      result = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
-      ret = GS_ALL_DONE;
-    }
+      /* If either an rvalue is ok or we do not require an lvalue, create the
+	 temporary.  But we cannot do that if the type is addressable.  */
+      if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
+	  && !TREE_ADDRESSABLE (type))
+	{
+	  if (gimplify_ctxp->allow_rhs_cond_expr
+	      /* If either branch has side effects or could trap, it can't be
+		 evaluated unconditionally.  */
+	      && !TREE_SIDE_EFFECTS (then_)
+	      && !generic_expr_could_trap_p (then_)
+	      && !TREE_SIDE_EFFECTS (else_)
+	      && !generic_expr_could_trap_p (else_))
+	    return gimplify_pure_cond_expr (expr_p, pre_p);
+
+	  tmp = create_tmp_var (type, "iftmp");
+	  result = tmp;
+	}
+
+      /* Otherwise, only create and copy references to the values.  */
       else
-    {
-      tree type = build_pointer_type (TREE_TYPE (expr));
-
-      if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
-        TREE_OPERAND (expr, 1) =
-          build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 1));
-
-      if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
-        TREE_OPERAND (expr, 2) =
-          build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 2));
-
-      tmp = create_tmp_var (type, "iftmp");
-
-      expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
-             TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
-
-      result = build_fold_indirect_ref_loc (loc, tmp);
-    }
-
-      /* Build the then clause, 't1 = a;'.  But don't build an assignment
-     if this branch is void; in C++ it can be, if it's a throw.  */
-      if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
-    TREE_OPERAND (expr, 1)
-      = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 1));
-
-      /* Build the else clause, 't1 = b;'.  */
-      if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
-    TREE_OPERAND (expr, 2)
-      = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 2));
+	{
+	  type = build_pointer_type (type);
+
+	  if (!VOID_TYPE_P (TREE_TYPE (then_)))
+	    then_ = build_fold_addr_expr_loc (loc, then_);
+
+	  if (!VOID_TYPE_P (TREE_TYPE (else_)))
+	    else_ = build_fold_addr_expr_loc (loc, else_);
+ 
+	  expr
+	    = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
+
+	  tmp = create_tmp_var (type, "iftmp");
+	  result = build_simple_mem_ref_loc (loc, tmp);
+	}
+
+      /* Build the new then clause, `tmp = then_;'.  But don't build the
+	 assignment if the value is void; in C++ it can be if it's a throw.  */
+      if (!VOID_TYPE_P (TREE_TYPE (then_)))
+	TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
+
+      /* Similarly, build the new else clause, `tmp = else_;'.  */
+      if (!VOID_TYPE_P (TREE_TYPE (else_)))
+	TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
 
       TREE_TYPE (expr) = void_type_node;
       recalculate_side_effects (expr);
@@ -3113,7 +3196,7 @@
       gimple_call_set_lhs (gs, t);
       gimplify_seq_add_stmt (seq_p, gs);
 
-      *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
+      *expr_p = build_simple_mem_ref (t);
       return GS_ALL_DONE;
     }
 
@@ -3197,13 +3280,16 @@
   /* If the constructor component is indirect, determine if we have a
      potential overlap with the lhs.  The only bits of information we
      have to go on at this point are addressability and alias sets.  */
-  if (TREE_CODE (t) == INDIRECT_REF
+  if ((INDIRECT_REF_P (t)
+       || TREE_CODE (t) == MEM_REF)
       && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
       && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
     return t;
 
   /* If the constructor component is a call, determine if it can hide a
-     potential overlap with the lhs through an INDIRECT_REF like above.  */
+     potential overlap with the lhs through an INDIRECT_REF like above.
+     ??? Ugh - this is completely broken.  In fact this whole analysis
+     doesn't look conservative.  */
   if (TREE_CODE (t) == CALL_EXPR)
     {
       tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
@@ -3252,8 +3338,8 @@
       constructor_elt *ce;
       VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
 
-      for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
-    gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
+      FOR_EACH_VEC_ELT (constructor_elt, v, ix, ce)
+	gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
 
       return;
     }
@@ -3614,279 +3700,284 @@
     case QUAL_UNION_TYPE:
     case ARRAY_TYPE:
       {
-    struct gimplify_init_ctor_preeval_data preeval_data;
-    HOST_WIDE_INT num_type_elements, num_ctor_elements;
-    HOST_WIDE_INT num_nonzero_elements;
-    bool cleared, valid_const_initializer;
-
-    /* Aggregate types must lower constructors to initialization of
-       individual elements.  The exception is that a CONSTRUCTOR node
-       with no elements indicates zero-initialization of the whole.  */
-    if (VEC_empty (constructor_elt, elts))
-      {
-        if (notify_temp_creation)
-          return GS_OK;
-        break;
-      }
-
-    /* Fetch information about the constructor to direct later processing.
-       We might want to make static versions of it in various cases, and
-       can only do so if it known to be a valid constant initializer.  */
-    valid_const_initializer
-      = categorize_ctor_elements (ctor, &num_nonzero_elements,
-                      &num_ctor_elements, &cleared);
-
-    /* If a const aggregate variable is being initialized, then it
-       should never be a lose to promote the variable to be static.  */
-    if (valid_const_initializer
-        && num_nonzero_elements > 1
-        && TREE_READONLY (object)
-        && TREE_CODE (object) == VAR_DECL
-        && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
-      {
-        if (notify_temp_creation)
-          return GS_ERROR;
-        DECL_INITIAL (object) = ctor;
-        TREE_STATIC (object) = 1;
-        if (!DECL_NAME (object))
-          DECL_NAME (object) = create_tmp_var_name ("C");
-        walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
-
-        /* ??? C++ doesn't automatically append a .<number> to the
-           assembler name, and even when it does, it looks a FE private
-           data structures to figure out what that number should be,
-           which are not set for this variable.  I suppose this is
-           important for local statics for inline functions, which aren't
-           "local" in the object file sense.  So in order to get a unique
-           TU-local symbol, we must invoke the lhd version now.  */
-        lhd_set_decl_assembler_name (object);
-
-        *expr_p = NULL_TREE;
-        break;
-      }
-
-    /* If there are "lots" of initialized elements, even discounting
-       those that are not address constants (and thus *must* be
-       computed at runtime), then partition the constructor into
-       constant and non-constant parts.  Block copy the constant
-       parts in, then generate code for the non-constant parts.  */
-    /* TODO.  There's code in cp/typeck.c to do this.  */
-
-    num_type_elements = count_type_elements (type, true);
-
-    /* If count_type_elements could not determine number of type elements
-       for a constant-sized object, assume clearing is needed.
-       Don't do this for variable-sized objects, as store_constructor
-       will ignore the clearing of variable-sized objects.  */
-    if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
-      cleared = true;
-    /* If there are "lots" of zeros, then block clear the object first.  */
-    else if (num_type_elements - num_nonzero_elements
-         > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
-         && num_nonzero_elements < num_type_elements/4)
-      cleared = true;
-    /* ??? This bit ought not be needed.  For any element not present
-       in the initializer, we should simply set them to zero.  Except
-       we'd need to *find* the elements that are not present, and that
-       requires trickery to avoid quadratic compile-time behavior in
-       large cases or excessive memory use in small cases.  */
-    else if (num_ctor_elements < num_type_elements)
-      cleared = true;
-
-    /* If there are "lots" of initialized elements, and all of them
-       are valid address constants, then the entire initializer can
-       be dropped to memory, and then memcpy'd out.  Don't do this
-       for sparse arrays, though, as it's more efficient to follow
-       the standard CONSTRUCTOR behavior of memset followed by
-       individual element initialization.  Also don't do this for small
-       all-zero initializers (which aren't big enough to merit
-       clearing), and don't try to make bitwise copies of
-       TREE_ADDRESSABLE types.  */
-    if (valid_const_initializer
-        && !(cleared || num_nonzero_elements == 0)
-        && !TREE_ADDRESSABLE (type))
-      {
-        HOST_WIDE_INT size = int_size_in_bytes (type);
-        unsigned int align;
-
-        /* ??? We can still get unbounded array types, at least
-           from the C++ front end.  This seems wrong, but attempt
-           to work around it for now.  */
-        if (size < 0)
-          {
-        size = int_size_in_bytes (TREE_TYPE (object));
-        if (size >= 0)
-          TREE_TYPE (ctor) = type = TREE_TYPE (object);
-          }
-
-        /* Find the maximum alignment we can assume for the object.  */
-        /* ??? Make use of DECL_OFFSET_ALIGN.  */
-        if (DECL_P (object))
-          align = DECL_ALIGN (object);
-        else
-          align = TYPE_ALIGN (type);
-
-        if (size > 0
-        && num_nonzero_elements > 1
-        && !can_move_by_pieces (size, align))
-          {
-        tree new_tree;
-
-        if (notify_temp_creation)
-          return GS_ERROR;
-
-        new_tree = create_tmp_var_raw (type, "C");
-
-        gimple_add_tmp_var (new_tree);
-        TREE_STATIC (new_tree) = 1;
-        TREE_READONLY (new_tree) = 1;
-        DECL_INITIAL (new_tree) = ctor;
-        if (align > DECL_ALIGN (new_tree))
-          {
-            DECL_ALIGN (new_tree) = align;
-            DECL_USER_ALIGN (new_tree) = 1;
-          }
-            walk_tree (&DECL_INITIAL (new_tree), force_labels_r, NULL, NULL);
-
-        TREE_OPERAND (*expr_p, 1) = new_tree;
-
-        /* This is no longer an assignment of a CONSTRUCTOR, but
-           we still may have processing to do on the LHS.  So
-           pretend we didn't do anything here to let that happen.  */
-        return GS_UNHANDLED;
-          }
-      }
-
-    if (notify_temp_creation)
-      return GS_OK;
-
-    /* If there are nonzero elements, pre-evaluate to capture elements
-       overlapping with the lhs into temporaries.  We must do this before
-       clearing to fetch the values before they are zeroed-out.  */
-    if (num_nonzero_elements > 0)
-      {
-        preeval_data.lhs_base_decl = get_base_address (object);
-        if (!DECL_P (preeval_data.lhs_base_decl))
-          preeval_data.lhs_base_decl = NULL;
-        preeval_data.lhs_alias_set = get_alias_set (object);
-
-        gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
-                    pre_p, post_p, &preeval_data);
-      }
-
-    if (cleared)
-      {
-        /* Zap the CONSTRUCTOR element list, which simplifies this case.
-           Note that we still have to gimplify, in order to handle the
-           case of variable sized types.  Avoid shared tree structures.  */
-        CONSTRUCTOR_ELTS (ctor) = NULL;
-        TREE_SIDE_EFFECTS (ctor) = 0;
-        object = unshare_expr (object);
-        gimplify_stmt (expr_p, pre_p);
-      }
-
-    /* If we have not block cleared the object, or if there are nonzero
-       elements in the constructor, add assignments to the individual
-       scalar fields of the object.  */
-    if (!cleared || num_nonzero_elements > 0)
-      gimplify_init_ctor_eval (object, elts, pre_p, cleared);
-
-    *expr_p = NULL_TREE;
+	struct gimplify_init_ctor_preeval_data preeval_data;
+	HOST_WIDE_INT num_type_elements, num_ctor_elements;
+	HOST_WIDE_INT num_nonzero_elements;
+	bool cleared, valid_const_initializer;
+
+	/* Aggregate types must lower constructors to initialization of
+	   individual elements.  The exception is that a CONSTRUCTOR node
+	   with no elements indicates zero-initialization of the whole.  */
+	if (VEC_empty (constructor_elt, elts))
+	  {
+	    if (notify_temp_creation)
+	      return GS_OK;
+	    break;
+	  }
+
+	/* Fetch information about the constructor to direct later processing.
+	   We might want to make static versions of it in various cases, and
+	   can only do so if it known to be a valid constant initializer.  */
+	valid_const_initializer
+	  = categorize_ctor_elements (ctor, &num_nonzero_elements,
+				      &num_ctor_elements, &cleared);
+
+	/* If a const aggregate variable is being initialized, then it
+	   should never be a lose to promote the variable to be static.  */
+	if (valid_const_initializer
+	    && num_nonzero_elements > 1
+	    && TREE_READONLY (object)
+	    && TREE_CODE (object) == VAR_DECL
+	    && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
+	  {
+	    if (notify_temp_creation)
+	      return GS_ERROR;
+	    DECL_INITIAL (object) = ctor;
+	    TREE_STATIC (object) = 1;
+	    if (!DECL_NAME (object))
+	      DECL_NAME (object) = create_tmp_var_name ("C");
+	    walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
+
+	    /* ??? C++ doesn't automatically append a .<number> to the
+	       assembler name, and even when it does, it looks a FE private
+	       data structures to figure out what that number should be,
+	       which are not set for this variable.  I suppose this is
+	       important for local statics for inline functions, which aren't
+	       "local" in the object file sense.  So in order to get a unique
+	       TU-local symbol, we must invoke the lhd version now.  */
+	    lhd_set_decl_assembler_name (object);
+
+	    *expr_p = NULL_TREE;
+	    break;
+	  }
+
+	/* If there are "lots" of initialized elements, even discounting
+	   those that are not address constants (and thus *must* be
+	   computed at runtime), then partition the constructor into
+	   constant and non-constant parts.  Block copy the constant
+	   parts in, then generate code for the non-constant parts.  */
+	/* TODO.  There's code in cp/typeck.c to do this.  */
+
+	num_type_elements = count_type_elements (type, true);
+
+	/* If count_type_elements could not determine number of type elements
+	   for a constant-sized object, assume clearing is needed.
+	   Don't do this for variable-sized objects, as store_constructor
+	   will ignore the clearing of variable-sized objects.  */
+	if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
+	  cleared = true;
+	/* If there are "lots" of zeros, then block clear the object first.  */
+	else if (num_type_elements - num_nonzero_elements
+		 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
+		 && num_nonzero_elements < num_type_elements/4)
+	  cleared = true;
+	/* ??? This bit ought not be needed.  For any element not present
+	   in the initializer, we should simply set them to zero.  Except
+	   we'd need to *find* the elements that are not present, and that
+	   requires trickery to avoid quadratic compile-time behavior in
+	   large cases or excessive memory use in small cases.  */
+	else if (num_ctor_elements < num_type_elements)
+	  cleared = true;
+
+	/* If there are "lots" of initialized elements, and all of them
+	   are valid address constants, then the entire initializer can
+	   be dropped to memory, and then memcpy'd out.  Don't do this
+	   for sparse arrays, though, as it's more efficient to follow
+	   the standard CONSTRUCTOR behavior of memset followed by
+	   individual element initialization.  Also don't do this for small
+	   all-zero initializers (which aren't big enough to merit
+	   clearing), and don't try to make bitwise copies of
+	   TREE_ADDRESSABLE types.  */
+	if (valid_const_initializer
+	    && !(cleared || num_nonzero_elements == 0)
+	    && !TREE_ADDRESSABLE (type))
+	  {
+	    HOST_WIDE_INT size = int_size_in_bytes (type);
+	    unsigned int align;
+
+	    /* ??? We can still get unbounded array types, at least
+	       from the C++ front end.  This seems wrong, but attempt
+	       to work around it for now.  */
+	    if (size < 0)
+	      {
+		size = int_size_in_bytes (TREE_TYPE (object));
+		if (size >= 0)
+		  TREE_TYPE (ctor) = type = TREE_TYPE (object);
+	      }
+
+	    /* Find the maximum alignment we can assume for the object.  */
+	    /* ??? Make use of DECL_OFFSET_ALIGN.  */
+	    if (DECL_P (object))
+	      align = DECL_ALIGN (object);
+	    else
+	      align = TYPE_ALIGN (type);
+
+	    if (size > 0
+		&& num_nonzero_elements > 1
+		&& !can_move_by_pieces (size, align))
+	      {
+		if (notify_temp_creation)
+		  return GS_ERROR;
+
+		walk_tree (&ctor, force_labels_r, NULL, NULL);
+		ctor = tree_output_constant_def (ctor);
+		if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
+		  ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
+		TREE_OPERAND (*expr_p, 1) = ctor;
+
+		/* This is no longer an assignment of a CONSTRUCTOR, but
+		   we still may have processing to do on the LHS.  So
+		   pretend we didn't do anything here to let that happen.  */
+		return GS_UNHANDLED;
+	      }
+	  }
+
+	/* If the target is volatile, we have non-zero elements and more than
+	   one field to assign, initialize the target from a temporary.  */
+	if (TREE_THIS_VOLATILE (object)
+	    && !TREE_ADDRESSABLE (type)
+	    && num_nonzero_elements > 0
+	    && VEC_length (constructor_elt, elts) > 1)
+	  {
+	    tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
+	    TREE_OPERAND (*expr_p, 0) = temp;
+	    *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
+			      *expr_p,
+			      build2 (MODIFY_EXPR, void_type_node,
+				      object, temp));
+	    return GS_OK;
+	  }
+
+	if (notify_temp_creation)
+	  return GS_OK;
+
+	/* If there are nonzero elements and if needed, pre-evaluate to capture
+	   elements overlapping with the lhs into temporaries.  We must do this
+	   before clearing to fetch the values before they are zeroed-out.  */
+	if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
+	  {
+	    preeval_data.lhs_base_decl = get_base_address (object);
+	    if (!DECL_P (preeval_data.lhs_base_decl))
+	      preeval_data.lhs_base_decl = NULL;
+	    preeval_data.lhs_alias_set = get_alias_set (object);
+
+	    gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
+					pre_p, post_p, &preeval_data);
+	  }
+
+	if (cleared)
+	  {
+	    /* Zap the CONSTRUCTOR element list, which simplifies this case.
+	       Note that we still have to gimplify, in order to handle the
+	       case of variable sized types.  Avoid shared tree structures.  */
+	    CONSTRUCTOR_ELTS (ctor) = NULL;
+	    TREE_SIDE_EFFECTS (ctor) = 0;
+	    object = unshare_expr (object);
+	    gimplify_stmt (expr_p, pre_p);
+	  }
+
+	/* If we have not block cleared the object, or if there are nonzero
+	   elements in the constructor, add assignments to the individual
+	   scalar fields of the object.  */
+	if (!cleared || num_nonzero_elements > 0)
+	  gimplify_init_ctor_eval (object, elts, pre_p, cleared);
+
+	*expr_p = NULL_TREE;
       }
       break;
 
     case COMPLEX_TYPE:
       {
-    tree r, i;
-
-    if (notify_temp_creation)
-      return GS_OK;
-
-    /* Extract the real and imaginary parts out of the ctor.  */
-    gcc_assert (VEC_length (constructor_elt, elts) == 2);
-    r = VEC_index (constructor_elt, elts, 0)->value;
-    i = VEC_index (constructor_elt, elts, 1)->value;
-    if (r == NULL || i == NULL)
-      {
-        tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
-        if (r == NULL)
-          r = zero;
-        if (i == NULL)
-          i = zero;
-      }
-
-    /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
-       represent creation of a complex value.  */
-    if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
-      {
-        ctor = build_complex (type, r, i);
-        TREE_OPERAND (*expr_p, 1) = ctor;
-      }
-    else
-      {
-        ctor = build2 (COMPLEX_EXPR, type, r, i);
-        TREE_OPERAND (*expr_p, 1) = ctor;
-        ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
-                 pre_p,
-                 post_p,
-                 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
-                 fb_rvalue);
-      }
+	tree r, i;
+
+	if (notify_temp_creation)
+	  return GS_OK;
+
+	/* Extract the real and imaginary parts out of the ctor.  */
+	gcc_assert (VEC_length (constructor_elt, elts) == 2);
+	r = VEC_index (constructor_elt, elts, 0)->value;
+	i = VEC_index (constructor_elt, elts, 1)->value;
+	if (r == NULL || i == NULL)
+	  {
+	    tree zero = build_zero_cst (TREE_TYPE (type));
+	    if (r == NULL)
+	      r = zero;
+	    if (i == NULL)
+	      i = zero;
+	  }
+
+	/* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
+	   represent creation of a complex value.  */
+	if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
+	  {
+	    ctor = build_complex (type, r, i);
+	    TREE_OPERAND (*expr_p, 1) = ctor;
+	  }
+	else
+	  {
+	    ctor = build2 (COMPLEX_EXPR, type, r, i);
+	    TREE_OPERAND (*expr_p, 1) = ctor;
+	    ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
+				 pre_p,
+				 post_p,
+				 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
+				 fb_rvalue);
+	  }
       }
       break;
 
     case VECTOR_TYPE:
       {
-    unsigned HOST_WIDE_INT ix;
-    constructor_elt *ce;
-
-    if (notify_temp_creation)
-      return GS_OK;
-
-    /* Go ahead and simplify constant constructors to VECTOR_CST.  */
-    if (TREE_CONSTANT (ctor))
-      {
-        bool constant_p = true;
-        tree value;
-
-        /* Even when ctor is constant, it might contain non-*_CST
-           elements, such as addresses or trapping values like
-           1.0/0.0 - 1.0/0.0.  Such expressions don't belong
-           in VECTOR_CST nodes.  */
-        FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
-          if (!CONSTANT_CLASS_P (value))
-        {
-          constant_p = false;
-          break;
-        }
-
-        if (constant_p)
-          {
-        TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
-        break;
-          }
-
-        /* Don't reduce an initializer constant even if we can't
-           make a VECTOR_CST.  It won't do anything for us, and it'll
-           prevent us from representing it as a single constant.  */
-        if (initializer_constant_valid_p (ctor, type))
-          break;
-
-        TREE_CONSTANT (ctor) = 0;
-      }
-
-    /* Vector types use CONSTRUCTOR all the way through gimple
-      compilation as a general initializer.  */
-    for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
-      {
-        enum gimplify_status tret;
-        tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
-                  fb_rvalue);
-        if (tret == GS_ERROR)
-          ret = GS_ERROR;
-      }
-    if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
-      TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
+	unsigned HOST_WIDE_INT ix;
+	constructor_elt *ce;
+
+	if (notify_temp_creation)
+	  return GS_OK;
+
+	/* Go ahead and simplify constant constructors to VECTOR_CST.  */
+	if (TREE_CONSTANT (ctor))
+	  {
+	    bool constant_p = true;
+	    tree value;
+
+	    /* Even when ctor is constant, it might contain non-*_CST
+	       elements, such as addresses or trapping values like
+	       1.0/0.0 - 1.0/0.0.  Such expressions don't belong
+	       in VECTOR_CST nodes.  */
+	    FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
+	      if (!CONSTANT_CLASS_P (value))
+		{
+		  constant_p = false;
+		  break;
+		}
+
+	    if (constant_p)
+	      {
+		TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
+		break;
+	      }
+
+	    /* Don't reduce an initializer constant even if we can't
+	       make a VECTOR_CST.  It won't do anything for us, and it'll
+	       prevent us from representing it as a single constant.  */
+	    if (initializer_constant_valid_p (ctor, type))
+	      break;
+
+	    TREE_CONSTANT (ctor) = 0;
+	  }
+
+	/* Vector types use CONSTRUCTOR all the way through gimple
+	  compilation as a general initializer.  */
+	FOR_EACH_VEC_ELT (constructor_elt, elts, ix, ce)
+	  {
+	    enum gimplify_status tret;
+	    tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
+				  fb_rvalue);
+	    if (tret == GS_ERROR)
+	      ret = GS_ERROR;
+	  }
+	if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
+	  TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
       }
       break;
 
@@ -3928,11 +4019,11 @@
 tree
 gimple_fold_indirect_ref (tree t)
 {
-  tree type = TREE_TYPE (TREE_TYPE (t));
+  tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
   tree sub = t;
   tree subtype;
 
-  STRIP_USELESS_TYPE_CONVERSION (sub);
+  STRIP_NOPS (sub);
   subtype = TREE_TYPE (sub);
   if (!POINTER_TYPE_P (subtype))
     return NULL_TREE;
@@ -3947,18 +4038,81 @@
 
       /* *(foo *)&fooarray => fooarray[0] */
       if (TREE_CODE (optype) == ARRAY_TYPE
-      && useless_type_conversion_p (type, TREE_TYPE (optype)))
+	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
+	  && useless_type_conversion_p (type, TREE_TYPE (optype)))
        {
          tree type_domain = TYPE_DOMAIN (optype);
          tree min_val = size_zero_node;
          if (type_domain && TYPE_MIN_VALUE (type_domain))
            min_val = TYPE_MIN_VALUE (type_domain);
-         return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
+	 if (TREE_CODE (min_val) == INTEGER_CST)
+	   return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
        }
+      /* *(foo *)&complexfoo => __real__ complexfoo */
+      else if (TREE_CODE (optype) == COMPLEX_TYPE
+               && useless_type_conversion_p (type, TREE_TYPE (optype)))
+        return fold_build1 (REALPART_EXPR, type, op);
+      /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
+      else if (TREE_CODE (optype) == VECTOR_TYPE
+               && useless_type_conversion_p (type, TREE_TYPE (optype)))
+        {
+          tree part_width = TYPE_SIZE (type);
+          tree index = bitsize_int (0);
+          return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
+        }
+    }
+
+  /* *(p + CST) -> ...  */
+  if (TREE_CODE (sub) == POINTER_PLUS_EXPR
+      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
+    {
+      tree addr = TREE_OPERAND (sub, 0);
+      tree off = TREE_OPERAND (sub, 1);
+      tree addrtype;
+
+      STRIP_NOPS (addr);
+      addrtype = TREE_TYPE (addr);
+
+      /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
+      if (TREE_CODE (addr) == ADDR_EXPR
+	  && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
+	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
+	{
+          HOST_WIDE_INT offset = tree_low_cst (off, 0);
+          tree part_width = TYPE_SIZE (type);
+          unsigned HOST_WIDE_INT part_widthi
+            = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
+          unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
+          tree index = bitsize_int (indexi);
+          if (offset / part_widthi
+              <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
+            return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
+                                part_width, index);
+	}
+
+      /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
+      if (TREE_CODE (addr) == ADDR_EXPR
+	  && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
+	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
+        {
+          tree size = TYPE_SIZE_UNIT (type);
+          if (tree_int_cst_equal (size, off))
+            return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
+        }
+
+      /* *(p + CST) -> MEM_REF <p, CST>.  */
+      if (TREE_CODE (addr) != ADDR_EXPR
+	  || DECL_P (TREE_OPERAND (addr, 0)))
+	return fold_build2 (MEM_REF, type,
+			    addr,
+			    build_int_cst_wide (ptype,
+						TREE_INT_CST_LOW (off),
+						TREE_INT_CST_HIGH (off)));
     }
 
   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
+      && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
       && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
     {
       tree type_domain;
@@ -3970,7 +4124,8 @@
       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
       if (type_domain && TYPE_MIN_VALUE (type_domain))
         min_val = TYPE_MIN_VALUE (type_domain);
-      return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
+      if (TREE_CODE (min_val) == INTEGER_CST)
+	return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
     }
 
   return NULL_TREE;
@@ -3998,241 +4153,268 @@
               gimple_seq *pre_p, gimple_seq *post_p,
               bool want_value)
 {
-  enum gimplify_status ret = GS_OK;
-
-  while (ret != GS_UNHANDLED)
-    switch (TREE_CODE (*from_p))
-      {
-      case VAR_DECL:
-    /* If we're assigning from a read-only variable initialized with
-       a constructor, do the direct assignment from the constructor,
-       but only if neither source nor target are volatile since this
-       latter assignment might end up being done on a per-field basis.  */
-    if (DECL_INITIAL (*from_p)
-        && TREE_READONLY (*from_p)
-        && !TREE_THIS_VOLATILE (*from_p)
-        && !TREE_THIS_VOLATILE (*to_p)
-        && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
-      {
-        tree old_from = *from_p;
-
-        /* Move the constructor into the RHS.  */
-        *from_p = unshare_expr (DECL_INITIAL (*from_p));
-
-        /* Let's see if gimplify_init_constructor will need to put
-           it in memory.  If so, revert the change.  */
-        ret = gimplify_init_constructor (expr_p, NULL, NULL, false, true);
-        if (ret == GS_ERROR)
-          {
-        *from_p = old_from;
-        /* Fall through.  */
-          }
-        else
-          {
-        ret = GS_OK;
-        break;
-          }
-      }
-    ret = GS_UNHANDLED;
-    break;
-      case INDIRECT_REF:
-    {
-      /* If we have code like
-
-            *(const A*)(A*)&x
-
-         where the type of "x" is a (possibly cv-qualified variant
-         of "A"), treat the entire expression as identical to "x".
-         This kind of code arises in C++ when an object is bound
-         to a const reference, and if "x" is a TARGET_EXPR we want
-         to take advantage of the optimization below.  */
-      tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
-      if (t)
-        {
-          *from_p = t;
-          ret = GS_OK;
-        }
-      else
-        ret = GS_UNHANDLED;
-      break;
-    }
-
-      case TARGET_EXPR:
-    {
-      /* If we are initializing something from a TARGET_EXPR, strip the
-         TARGET_EXPR and initialize it directly, if possible.  This can't
-         be done if the initializer is void, since that implies that the
-         temporary is set in some non-trivial way.
-
-         ??? What about code that pulls out the temp and uses it
-         elsewhere? I think that such code never uses the TARGET_EXPR as
-         an initializer.  If I'm wrong, we'll die because the temp won't
-         have any RTL.  In that case, I guess we'll need to replace
-         references somehow.  */
-      tree init = TARGET_EXPR_INITIAL (*from_p);
-
-      if (init
-          && !VOID_TYPE_P (TREE_TYPE (init)))
-        {
-          *from_p = init;
-          ret = GS_OK;
-        }
-      else
-        ret = GS_UNHANDLED;
-    }
-    break;
-
-      case COMPOUND_EXPR:
-    /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
-       caught.  */
-    gimplify_compound_expr (from_p, pre_p, true);
-    ret = GS_OK;
-    break;
-
-      case CONSTRUCTOR:
-    /* If we're initializing from a CONSTRUCTOR, break this into
-       individual MODIFY_EXPRs.  */
-    return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
-                      false);
-
-      case COND_EXPR:
-    /* If we're assigning to a non-register type, push the assignment
-       down into the branches.  This is mandatory for ADDRESSABLE types,
-       since we cannot generate temporaries for such, but it saves a
-       copy in other cases as well.  */
-    if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
-      {
-        /* This code should mirror the code in gimplify_cond_expr. */
-        enum tree_code code = TREE_CODE (*expr_p);
-        tree cond = *from_p;
-        tree result = *to_p;
-
-        ret = gimplify_expr (&result, pre_p, post_p,
-                 is_gimple_lvalue, fb_lvalue);
-        if (ret != GS_ERROR)
-          ret = GS_OK;
-
-        if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
-          TREE_OPERAND (cond, 1)
-        = build2 (code, void_type_node, result,
-              TREE_OPERAND (cond, 1));
-        if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
-          TREE_OPERAND (cond, 2)
-        = build2 (code, void_type_node, unshare_expr (result),
-              TREE_OPERAND (cond, 2));
-
-        TREE_TYPE (cond) = void_type_node;
-        recalculate_side_effects (cond);
-
-        if (want_value)
-          {
-        gimplify_and_add (cond, pre_p);
-        *expr_p = unshare_expr (result);
-          }
-        else
-          *expr_p = cond;
-        return ret;
-      }
-    else
-      ret = GS_UNHANDLED;
-    break;
-
-      case CALL_EXPR:
-    /* For calls that return in memory, give *to_p as the CALL_EXPR's
-       return slot so that we don't generate a temporary.  */
-    if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
-        && aggregate_value_p (*from_p, *from_p))
-      {
-        bool use_target;
-
-        if (!(rhs_predicate_for (*to_p))(*from_p))
-          /* If we need a temporary, *to_p isn't accurate.  */
-          use_target = false;
-        else if (TREE_CODE (*to_p) == RESULT_DECL
-             && DECL_NAME (*to_p) == NULL_TREE
-             && needs_to_live_in_memory (*to_p))
-          /* It's OK to use the return slot directly unless it's an NRV. */
-          use_target = true;
-        else if (is_gimple_reg_type (TREE_TYPE (*to_p))
-             || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
-          /* Don't force regs into memory.  */
-          use_target = false;
-        else if (TREE_CODE (*expr_p) == INIT_EXPR)
-          /* It's OK to use the target directly if it's being
-         initialized. */
-          use_target = true;
-        else if (!is_gimple_non_addressable (*to_p))
-          /* Don't use the original target if it's already addressable;
-         if its address escapes, and the called function uses the
-         NRV optimization, a conforming program could see *to_p
-         change before the called function returns; see c++/19317.
-         When optimizing, the return_slot pass marks more functions
-         as safe after we have escape info.  */
-          use_target = false;
-        else
-          use_target = true;
-
-        if (use_target)
-          {
-        CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
-        mark_addressable (*to_p);
-          }
-      }
-
-    ret = GS_UNHANDLED;
-    break;
-
-    /* If we're initializing from a container, push the initialization
-       inside it.  */
-      case CLEANUP_POINT_EXPR:
-      case BIND_EXPR:
-      case STATEMENT_LIST:
-    {
-      tree wrap = *from_p;
-      tree t;
-
-      ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
-                   fb_lvalue);
-      if (ret != GS_ERROR)
-        ret = GS_OK;
-
-      t = voidify_wrapper_expr (wrap, *expr_p);
-      gcc_assert (t == *expr_p);
-
-      if (want_value)
-        {
-          gimplify_and_add (wrap, pre_p);
-          *expr_p = unshare_expr (*to_p);
-        }
-      else
-        *expr_p = wrap;
-      return GS_OK;
-    }
-
-      case COMPOUND_LITERAL_EXPR:
-    {
-      tree complit = TREE_OPERAND (*expr_p, 1);
-      tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
-      tree decl = DECL_EXPR_DECL (decl_s);
-      tree init = DECL_INITIAL (decl);
-
-      /* struct T x = (struct T) { 0, 1, 2 } can be optimized
-         into struct T x = { 0, 1, 2 } if the address of the
-         compound literal has never been taken.  */
-      if (!TREE_ADDRESSABLE (complit)
-          && !TREE_ADDRESSABLE (decl)
-          && init)
-        {
-          *expr_p = copy_node (*expr_p);
-          TREE_OPERAND (*expr_p, 1) = init;
-          return GS_OK;
-        }
-    }
-
-      default:
-    ret = GS_UNHANDLED;
-    break;
-      }
+  enum gimplify_status ret = GS_UNHANDLED;
+  bool changed;
+
+  do
+    {
+      changed = false;
+      switch (TREE_CODE (*from_p))
+	{
+	case VAR_DECL:
+	  /* If we're assigning from a read-only variable initialized with
+	     a constructor, do the direct assignment from the constructor,
+	     but only if neither source nor target are volatile since this
+	     latter assignment might end up being done on a per-field basis.  */
+	  if (DECL_INITIAL (*from_p)
+	      && TREE_READONLY (*from_p)
+	      && !TREE_THIS_VOLATILE (*from_p)
+	      && !TREE_THIS_VOLATILE (*to_p)
+	      && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
+	    {
+	      tree old_from = *from_p;
+	      enum gimplify_status subret;
+
+	      /* Move the constructor into the RHS.  */
+	      *from_p = unshare_expr (DECL_INITIAL (*from_p));
+
+	      /* Let's see if gimplify_init_constructor will need to put
+		 it in memory.  */
+	      subret = gimplify_init_constructor (expr_p, NULL, NULL,
+						  false, true);
+	      if (subret == GS_ERROR)
+		{
+		  /* If so, revert the change.  */
+		  *from_p = old_from;
+		}
+	      else
+		{
+		  ret = GS_OK;
+		  changed = true;
+		}
+	    }
+	  break;
+	case INDIRECT_REF:
+	  {
+	    /* If we have code like
+
+	     *(const A*)(A*)&x
+
+	     where the type of "x" is a (possibly cv-qualified variant
+	     of "A"), treat the entire expression as identical to "x".
+	     This kind of code arises in C++ when an object is bound
+	     to a const reference, and if "x" is a TARGET_EXPR we want
+	     to take advantage of the optimization below.  */
+	    bool volatile_p = TREE_THIS_VOLATILE (*from_p);
+	    tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
+	    if (t)
+	      {
+		if (TREE_THIS_VOLATILE (t) != volatile_p)
+		  {
+		    if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
+		      t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
+						    build_fold_addr_expr (t));
+		    if (REFERENCE_CLASS_P (t))
+		      TREE_THIS_VOLATILE (t) = volatile_p;
+		  }
+		*from_p = t;
+		ret = GS_OK;
+		changed = true;
+	      }
+	    break;
+	  }
+
+	case TARGET_EXPR:
+	  {
+	    /* If we are initializing something from a TARGET_EXPR, strip the
+	       TARGET_EXPR and initialize it directly, if possible.  This can't
+	       be done if the initializer is void, since that implies that the
+	       temporary is set in some non-trivial way.
+
+	       ??? What about code that pulls out the temp and uses it
+	       elsewhere? I think that such code never uses the TARGET_EXPR as
+	       an initializer.  If I'm wrong, we'll die because the temp won't
+	       have any RTL.  In that case, I guess we'll need to replace
+	       references somehow.  */
+	    tree init = TARGET_EXPR_INITIAL (*from_p);
+
+	    if (init
+		&& !VOID_TYPE_P (TREE_TYPE (init)))
+	      {
+		*from_p = init;
+		ret = GS_OK;
+		changed = true;
+	      }
+	  }
+	  break;
+
+	case COMPOUND_EXPR:
+	  /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
+	     caught.  */
+	  gimplify_compound_expr (from_p, pre_p, true);
+	  ret = GS_OK;
+	  changed = true;
+	  break;
+
+	case CONSTRUCTOR:
+	  /* If we already made some changes, let the front end have a
+	     crack at this before we break it down.  */
+	  if (ret != GS_UNHANDLED)
+	    break;
+	  /* If we're initializing from a CONSTRUCTOR, break this into
+	     individual MODIFY_EXPRs.  */
+	  return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
+					    false);
+
+	case COND_EXPR:
+	  /* If we're assigning to a non-register type, push the assignment
+	     down into the branches.  This is mandatory for ADDRESSABLE types,
+	     since we cannot generate temporaries for such, but it saves a
+	     copy in other cases as well.  */
+	  if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
+	    {
+	      /* This code should mirror the code in gimplify_cond_expr. */
+	      enum tree_code code = TREE_CODE (*expr_p);
+	      tree cond = *from_p;
+	      tree result = *to_p;
+
+	      ret = gimplify_expr (&result, pre_p, post_p,
+				   is_gimple_lvalue, fb_lvalue);
+	      if (ret != GS_ERROR)
+		ret = GS_OK;
+
+	      if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
+		TREE_OPERAND (cond, 1)
+		  = build2 (code, void_type_node, result,
+			    TREE_OPERAND (cond, 1));
+	      if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
+		TREE_OPERAND (cond, 2)
+		  = build2 (code, void_type_node, unshare_expr (result),
+			    TREE_OPERAND (cond, 2));
+
+	      TREE_TYPE (cond) = void_type_node;
+	      recalculate_side_effects (cond);
+
+	      if (want_value)
+		{
+		  gimplify_and_add (cond, pre_p);
+		  *expr_p = unshare_expr (result);
+		}
+	      else
+		*expr_p = cond;
+	      return ret;
+	    }
+	  break;
+
+	case CALL_EXPR:
+	  /* For calls that return in memory, give *to_p as the CALL_EXPR's
+	     return slot so that we don't generate a temporary.  */
+	  if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
+	      && aggregate_value_p (*from_p, *from_p))
+	    {
+	      bool use_target;
+
+	      if (!(rhs_predicate_for (*to_p))(*from_p))
+		/* If we need a temporary, *to_p isn't accurate.  */
+		use_target = false;
+	      else if (TREE_CODE (*to_p) == RESULT_DECL
+		       && DECL_NAME (*to_p) == NULL_TREE
+		       && needs_to_live_in_memory (*to_p))
+		/* It's OK to use the return slot directly unless it's an NRV. */
+		use_target = true;
+	      else if (is_gimple_reg_type (TREE_TYPE (*to_p))
+		       || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
+		/* Don't force regs into memory.  */
+		use_target = false;
+	      else if (TREE_CODE (*expr_p) == INIT_EXPR)
+		/* It's OK to use the target directly if it's being
+		   initialized. */
+		use_target = true;
+	      else if (!is_gimple_non_addressable (*to_p))
+		/* Don't use the original target if it's already addressable;
+		   if its address escapes, and the called function uses the
+		   NRV optimization, a conforming program could see *to_p
+		   change before the called function returns; see c++/19317.
+		   When optimizing, the return_slot pass marks more functions
+		   as safe after we have escape info.  */
+		use_target = false;
+	      else
+		use_target = true;
+
+	      if (use_target)
+		{
+		  CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
+		  mark_addressable (*to_p);
+		}
+	    }
+	  break;
+
+	case WITH_SIZE_EXPR:
+	  /* Likewise for calls that return an aggregate of non-constant size,
+	     since we would not be able to generate a temporary at all.  */
+	  if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
+	    {
+	      *from_p = TREE_OPERAND (*from_p, 0);
+	      /* We don't change ret in this case because the
+		 WITH_SIZE_EXPR might have been added in
+		 gimplify_modify_expr, so returning GS_OK would lead to an
+		 infinite loop.  */
+	      changed = true;
+	    }
+	  break;
+
+	  /* If we're initializing from a container, push the initialization
+	     inside it.  */
+	case CLEANUP_POINT_EXPR:
+	case BIND_EXPR:
+	case STATEMENT_LIST:
+	  {
+	    tree wrap = *from_p;
+	    tree t;
+
+	    ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
+				 fb_lvalue);
+	    if (ret != GS_ERROR)
+	      ret = GS_OK;
+
+	    t = voidify_wrapper_expr (wrap, *expr_p);
+	    gcc_assert (t == *expr_p);
+
+	    if (want_value)
+	      {
+		gimplify_and_add (wrap, pre_p);
+		*expr_p = unshare_expr (*to_p);
+	      }
+	    else
+	      *expr_p = wrap;
+	    return GS_OK;
+	  }
+
+	case COMPOUND_LITERAL_EXPR:
+	  {
+	    tree complit = TREE_OPERAND (*expr_p, 1);
+	    tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
+	    tree decl = DECL_EXPR_DECL (decl_s);
+	    tree init = DECL_INITIAL (decl);
+
+	    /* struct T x = (struct T) { 0, 1, 2 } can be optimized
+	       into struct T x = { 0, 1, 2 } if the address of the
+	       compound literal has never been taken.  */
+	    if (!TREE_ADDRESSABLE (complit)
+		&& !TREE_ADDRESSABLE (decl)
+		&& init)
+	      {
+		*expr_p = copy_node (*expr_p);
+		TREE_OPERAND (*expr_p, 1) = init;
+		return GS_OK;
+	      }
+	  }
+
+	default:
+	  break;
+	}
+    }
+  while (changed);
 
   return ret;
 }
@@ -4402,7 +4584,7 @@
   /* Try to alleviate the effects of the gimplification creating artificial
      temporaries (see for example is_gimple_reg_rhs) on the debug info.  */
   if (!gimplify_ctxp->into_ssa
-      && DECL_P (*from_p)
+      && TREE_CODE (*from_p) == VAR_DECL
       && DECL_IGNORED_P (*from_p)
       && DECL_P (*to_p)
       && !DECL_IGNORED_P (*to_p))
@@ -4414,12 +4596,16 @@
       SET_DECL_DEBUG_EXPR (*from_p, *to_p);
    }
 
+  if (want_value && TREE_THIS_VOLATILE (*to_p))
+    *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
+
   if (TREE_CODE (*from_p) == CALL_EXPR)
     {
       /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
      instead of a GIMPLE_ASSIGN.  */
       assign = gimple_build_call_from_tree (*from_p);
-      gimple_call_set_lhs (assign, *to_p);
+      if (!gimple_call_noreturn_p (assign))
+	gimple_call_set_lhs (assign, *to_p);
     }
   else
     {
@@ -4440,7 +4626,7 @@
 
   if (want_value)
     {
-      *expr_p = unshare_expr (*to_p);
+      *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
       return GS_OK;
     }
   else
@@ -4455,10 +4641,10 @@
 static enum gimplify_status
 gimplify_variable_sized_compare (tree *expr_p)
 {
+  location_t loc = EXPR_LOCATION (*expr_p);
   tree op0 = TREE_OPERAND (*expr_p, 0);
   tree op1 = TREE_OPERAND (*expr_p, 1);
-  tree t, arg, dest, src;
-  location_t loc = EXPR_LOCATION (*expr_p);
+  tree t, arg, dest, src, expr;
 
   arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
   arg = unshare_expr (arg);
@@ -4467,8 +4653,11 @@
   dest = build_fold_addr_expr_loc (loc, op0);
   t = implicit_built_in_decls[BUILT_IN_MEMCMP];
   t = build_call_expr_loc (loc, t, 3, dest, src, arg);
-  *expr_p
+
+  expr
     = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
+  SET_EXPR_LOCATION (expr, loc);
+  *expr_p = expr;
 
   return GS_OK;
 }
@@ -4622,7 +4811,6 @@
   switch (TREE_CODE (op0))
     {
     case INDIRECT_REF:
-    case MISALIGNED_INDIRECT_REF:
     do_indirect_ref:
       /* Check if we are dealing with an expression of the form '&*ptr'.
      While the front end folds away '&*ptr' into 'ptr', these
@@ -4891,18 +5079,25 @@
 
       /* If the operand is a memory input, it should be an lvalue.  */
       if (!allows_reg && allows_mem)
-    {
-      tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
-                is_gimple_lvalue, fb_lvalue | fb_mayfail);
-      mark_addressable (TREE_VALUE (link));
-      if (tret == GS_ERROR)
-        {
-          if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
-            input_location = EXPR_LOCATION (TREE_VALUE (link));
-          error ("memory input %d is not directly addressable", i);
-          ret = tret;
-        }
-    }
+	{
+	  tree inputv = TREE_VALUE (link);
+	  STRIP_NOPS (inputv);
+	  if (TREE_CODE (inputv) == PREDECREMENT_EXPR
+	      || TREE_CODE (inputv) == PREINCREMENT_EXPR
+	      || TREE_CODE (inputv) == POSTDECREMENT_EXPR
+	      || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
+	    TREE_VALUE (link) = error_mark_node;
+	  tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
+				is_gimple_lvalue, fb_lvalue | fb_mayfail);
+	  mark_addressable (TREE_VALUE (link));
+	  if (tret == GS_ERROR)
+	    {
+	      if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
+	        input_location = EXPR_LOCATION (TREE_VALUE (link));
+	      error ("memory input %d is not directly addressable", i);
+	      ret = tret;
+	    }
+	}
       else
     {
       tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
@@ -4981,25 +5176,26 @@
         {
               /* Note that gsi_insert_seq_before and gsi_remove do not
                  scan operands, unlike some other sequence mutators.  */
-          gsi_insert_seq_before_without_update (&iter,
-                                                    gimple_wce_cleanup (wce),
-                                                    GSI_SAME_STMT);
-          gsi_remove (&iter, true);
-          break;
-        }
-      else
-        {
-          gimple gtry;
-          gimple_seq seq;
-          enum gimple_try_flags kind;
-
-          if (gimple_wce_cleanup_eh_only (wce))
-        kind = GIMPLE_TRY_CATCH;
-          else
-        kind = GIMPLE_TRY_FINALLY;
-          seq = gsi_split_seq_after (iter);
-
-          gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
+	      if (!gimple_wce_cleanup_eh_only (wce))
+		gsi_insert_seq_before_without_update (&iter,
+						      gimple_wce_cleanup (wce),
+						      GSI_SAME_STMT);
+	      gsi_remove (&iter, true);
+	      break;
+	    }
+	  else
+	    {
+	      gimple gtry;
+	      gimple_seq seq;
+	      enum gimple_try_flags kind;
+
+	      if (gimple_wce_cleanup_eh_only (wce))
+		kind = GIMPLE_TRY_CATCH;
+	      else
+		kind = GIMPLE_TRY_FINALLY;
+	      seq = gsi_split_seq_after (iter);
+
+	      gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
               /* Do not use gsi_replace here, as it may scan operands.
                  We want to do a simple structural modification only.  */
               *gsi_stmt_ptr (&iter) = gtry;
@@ -5035,7 +5231,7 @@
 
   /* Errors can result in improperly nested cleanups.  Which results in
      confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR.  */
-  if (errorcount || sorrycount)
+  if (seen_error ())
     return;
 
   if (gimple_conditional_context ())
@@ -5231,13 +5427,13 @@
     case UNION_TYPE:
     case QUAL_UNION_TYPE:
       {
-    tree field;
-    for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
-      if (TREE_CODE (field) == FIELD_DECL)
-        {
-          omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
-          omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
-        }
+	tree field;
+	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
+	  if (TREE_CODE (field) == FIELD_DECL)
+	    {
+	      omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
+	      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
+	    }
       }
       break;
 
@@ -5322,14 +5518,15 @@
         | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
 
       /* We're going to make use of the TYPE_SIZE_UNIT at least in the
-     alloca statement we generate for the variable, so make sure it
-     is available.  This isn't automatically needed for the SHARED
-     case, since we won't be allocating local storage then.
-     For local variables TYPE_SIZE_UNIT might not be gimplified yet,
-     in this case omp_notice_variable will be called later
-     on when it is gimplified.  */
-      else if (! (flags & GOVD_LOCAL))
-    omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
+	 alloca statement we generate for the variable, so make sure it
+	 is available.  This isn't automatically needed for the SHARED
+	 case, since we won't be allocating local storage then.
+	 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
+	 in this case omp_notice_variable will be called later
+	 on when it is gimplified.  */
+      else if (! (flags & GOVD_LOCAL)
+	       && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
+	omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
     }
   else if (lang_hooks.decls.omp_privatize_by_reference (decl))
     {
@@ -5349,6 +5546,31 @@
   splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
 }
 
+/* Notice a threadprivate variable DECL used in OpenMP context CTX.
+   This just prints out diagnostics about threadprivate variable uses
+   in untied tasks.  If DECL2 is non-NULL, prevent this warning
+   on that variable.  */
+
+static bool
+omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
+				   tree decl2)
+{
+  splay_tree_node n;
+
+  if (ctx->region_type != ORT_UNTIED_TASK)
+    return false;
+  n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
+  if (n == NULL)
+    {
+      error ("threadprivate variable %qE used in untied task", DECL_NAME (decl));
+      error_at (ctx->location, "enclosing task");
+      splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
+    }
+  if (decl2)
+    splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
+  return false;
+}
+
 /* Record the fact that DECL was used within the OpenMP context CTX.
    IN_CODE is true when real code uses DECL, and false when we should
    merely emit default(none) errors.  Return true if DECL is going to
@@ -5369,15 +5591,15 @@
   if (is_global_var (decl))
     {
       if (DECL_THREAD_LOCAL_P (decl))
-    return false;
+	return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
 
       if (DECL_HAS_VALUE_EXPR_P (decl))
-    {
-      tree value = get_base_address (DECL_VALUE_EXPR (decl));
-
-      if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
-        return false;
-    }
+	{
+	  tree value = get_base_address (DECL_VALUE_EXPR (decl));
+
+	  if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
+	    return omp_notice_threadprivate_variable (ctx, decl, value);
+	}
     }
 
   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
@@ -5398,54 +5620,57 @@
     default_kind = kind;
 
       switch (default_kind)
-    {
-    case OMP_CLAUSE_DEFAULT_NONE:
-      error ("%qE not specified in enclosing parallel",
-         DECL_NAME (decl));
-      error_at (ctx->location, "enclosing parallel");
-      /* FALLTHRU */
-    case OMP_CLAUSE_DEFAULT_SHARED:
-      flags |= GOVD_SHARED;
-      break;
-    case OMP_CLAUSE_DEFAULT_PRIVATE:
-      flags |= GOVD_PRIVATE;
-      break;
-    case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
-      flags |= GOVD_FIRSTPRIVATE;
-      break;
-    case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
-      /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED.  */
-      gcc_assert (ctx->region_type == ORT_TASK);
-      if (ctx->outer_context)
-        omp_notice_variable (ctx->outer_context, decl, in_code);
-      for (octx = ctx->outer_context; octx; octx = octx->outer_context)
-        {
-          splay_tree_node n2;
-
-          n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
-          if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
-        {
-          flags |= GOVD_FIRSTPRIVATE;
-          break;
-        }
-          if ((octx->region_type & ORT_PARALLEL) != 0)
-        break;
-        }
-      if (flags & GOVD_FIRSTPRIVATE)
-        break;
-      if (octx == NULL
-          && (TREE_CODE (decl) == PARM_DECL
-          || (!is_global_var (decl)
-              && DECL_CONTEXT (decl) == current_function_decl)))
-        {
-          flags |= GOVD_FIRSTPRIVATE;
-          break;
-        }
-      flags |= GOVD_SHARED;
-      break;
-    default:
-      gcc_unreachable ();
-    }
+	{
+	case OMP_CLAUSE_DEFAULT_NONE:
+	  error ("%qE not specified in enclosing parallel",
+		 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
+	  if ((ctx->region_type & ORT_TASK) != 0)
+	    error_at (ctx->location, "enclosing task");
+	  else
+	    error_at (ctx->location, "enclosing parallel");
+	  /* FALLTHRU */
+	case OMP_CLAUSE_DEFAULT_SHARED:
+	  flags |= GOVD_SHARED;
+	  break;
+	case OMP_CLAUSE_DEFAULT_PRIVATE:
+	  flags |= GOVD_PRIVATE;
+	  break;
+	case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
+	  flags |= GOVD_FIRSTPRIVATE;
+	  break;
+	case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
+	  /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED.  */
+	  gcc_assert ((ctx->region_type & ORT_TASK) != 0);
+	  if (ctx->outer_context)
+	    omp_notice_variable (ctx->outer_context, decl, in_code);
+	  for (octx = ctx->outer_context; octx; octx = octx->outer_context)
+	    {
+	      splay_tree_node n2;
+
+	      n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
+	      if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
+		{
+		  flags |= GOVD_FIRSTPRIVATE;
+		  break;
+		}
+	      if ((octx->region_type & ORT_PARALLEL) != 0)
+		break;
+	    }
+	  if (flags & GOVD_FIRSTPRIVATE)
+	    break;
+	  if (octx == NULL
+	      && (TREE_CODE (decl) == PARM_DECL
+		  || (!is_global_var (decl)
+		      && DECL_CONTEXT (decl) == current_function_decl)))
+	    {
+	      flags |= GOVD_FIRSTPRIVATE;
+	      break;
+	    }
+	  flags |= GOVD_SHARED;
+	  break;
+	default:
+	  gcc_unreachable ();
+	}
 
       if ((flags & GOVD_PRIVATE)
       && lang_hooks.decls.omp_private_outer_ref (decl))
@@ -5918,7 +6143,10 @@
   gimple_seq body = NULL;
   struct gimplify_ctx gctx;
 
-  gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, ORT_TASK);
+  gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
+			     find_omp_clause (OMP_TASK_CLAUSES (expr),
+					      OMP_CLAUSE_UNTIED)
+			     ? ORT_UNTIED_TASK : ORT_TASK);
 
   push_gimplify_context (&gctx);
 
@@ -6204,13 +6432,21 @@
       break;
     case tcc_expression:
       switch (TREE_CODE (expr))
-    {
-    case TRUTH_ANDIF_EXPR:
-    case TRUTH_ORIF_EXPR:
-      saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
-                     lhs_addr, lhs_var);
-      saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
-                     lhs_addr, lhs_var);
+	{
+	case TRUTH_ANDIF_EXPR:
+	case TRUTH_ORIF_EXPR:
+	case TRUTH_AND_EXPR:
+	case TRUTH_OR_EXPR:
+	case TRUTH_XOR_EXPR:
+	  saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
+					 lhs_addr, lhs_var);
+	case TRUTH_NOT_EXPR:
+	  saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
+					 lhs_addr, lhs_var);
+	  break;
+	default:
+	  break;
+	}
       break;
     default:
       break;
@@ -6242,9 +6478,7 @@
   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
   tree tmp_load;
 
-   tmp_load = create_tmp_var (type, NULL);
-   if (TREE_CODE (type) == COMPLEX_TYPE || TREE_CODE (type) == VECTOR_TYPE)
-     DECL_GIMPLE_REG_P (tmp_load) = 1;
+   tmp_load = create_tmp_reg (type, NULL);
    if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
      return GS_ERROR;
 
@@ -6325,7 +6559,7 @@
 
    GIMPLE_TEST_F points to a function that takes a tree T and
        returns nonzero if T is in the GIMPLE form requested by the
-       caller.  The GIMPLE predicates are in tree-gimple.c.
+       caller.  The GIMPLE predicates are in gimple.c.
 
    FALLBACK tells the function what sort of a temporary we want if
        gimplification cannot produce an expression that complies with
@@ -6374,7 +6608,8 @@
            || gimple_test_f == is_gimple_mem_rhs_or_call
            || gimple_test_f == is_gimple_reg_rhs
            || gimple_test_f == is_gimple_reg_rhs_or_call
-           || gimple_test_f == is_gimple_asm_val)
+           || gimple_test_f == is_gimple_asm_val
+	   || gimple_test_f == is_gimple_mem_ref_addr)
     gcc_assert (fallback & fb_rvalue);
   else if (gimple_test_f == is_gimple_min_lval
        || gimple_test_f == is_gimple_lvalue)
@@ -6450,570 +6685,637 @@
       else if (ret != GS_UNHANDLED)
     break;
 
-      ret = GS_OK;
+      /* Make sure that all the cases set 'ret' appropriately.  */
+      ret = GS_UNHANDLED;
       switch (TREE_CODE (*expr_p))
-    {
-      /* First deal with the special cases.  */
-
-    case POSTINCREMENT_EXPR:
-    case POSTDECREMENT_EXPR:
-    case PREINCREMENT_EXPR:
-    case PREDECREMENT_EXPR:
-      ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
-                    fallback != fb_none);
-      break;
-
-    case ARRAY_REF:
-    case ARRAY_RANGE_REF:
-    case REALPART_EXPR:
-    case IMAGPART_EXPR:
-    case COMPONENT_REF:
-    case VIEW_CONVERT_EXPR:
-      ret = gimplify_compound_lval (expr_p, pre_p, post_p,
-                    fallback ? fallback : fb_rvalue);
-      break;
-
-    case COND_EXPR:
-      ret = gimplify_cond_expr (expr_p, pre_p, fallback);
-
-      /* C99 code may assign to an array in a structure value of a
-         conditional expression, and this has undefined behavior
-         only on execution, so create a temporary if an lvalue is
-         required.  */
-      if (fallback == fb_lvalue)
-        {
-          *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
-          mark_addressable (*expr_p);
-        }
-      break;
-
-    case CALL_EXPR:
-      ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
-
-      /* C99 code may assign to an array in a structure returned
-         from a function, and this has undefined behavior only on
-         execution, so create a temporary if an lvalue is
-         required.  */
-      if (fallback == fb_lvalue)
-        {
-          *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
-          mark_addressable (*expr_p);
-        }
-      break;
-
-    case TREE_LIST:
-      gcc_unreachable ();
-
-    case COMPOUND_EXPR:
-      ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
-      break;
-
-    case COMPOUND_LITERAL_EXPR:
-      ret = gimplify_compound_literal_expr (expr_p, pre_p);
-      break;
-
-    case MODIFY_EXPR:
-    case INIT_EXPR:
-      ret = gimplify_modify_expr (expr_p, pre_p, post_p,
-                      fallback != fb_none);
-      break;
-
-    case TRUTH_ANDIF_EXPR:
-    case TRUTH_ORIF_EXPR:
-      /* Pass the source location of the outer expression.  */
-      ret = gimplify_boolean_expr (expr_p, saved_location);
-      break;
-
-    case TRUTH_NOT_EXPR:
-      if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE)
-        {
-          tree type = TREE_TYPE (*expr_p);
-          *expr_p = fold_convert (type, gimple_boolify (*expr_p));
-          ret = GS_OK;
-          break;
-        }
-
-      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
-                   is_gimple_val, fb_rvalue);
-      recalculate_side_effects (*expr_p);
-      break;
-
-    case ADDR_EXPR:
-      ret = gimplify_addr_expr (expr_p, pre_p, post_p);
-      break;
-
-    case VA_ARG_EXPR:
-      ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
-      break;
-
-    CASE_CONVERT:
-      if (IS_EMPTY_STMT (*expr_p))
-        {
-          ret = GS_ALL_DONE;
-          break;
-        }
-
-      if (VOID_TYPE_P (TREE_TYPE (*expr_p))
-          || fallback == fb_none)
-        {
-          /* Just strip a conversion to void (or in void context) and
-         try again.  */
-          *expr_p = TREE_OPERAND (*expr_p, 0);
-          break;
-        }
-
-      ret = gimplify_conversion (expr_p);
-      if (ret == GS_ERROR)
-        break;
-      if (*expr_p != save_expr)
-        break;
-      /* FALLTHRU */
-
-    case FIX_TRUNC_EXPR:
-      /* unary_expr: ... | '(' cast ')' val | ...  */
-      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
-                   is_gimple_val, fb_rvalue);
-      recalculate_side_effects (*expr_p);
-      break;
-
-    case INDIRECT_REF:
-      *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
-      if (*expr_p != save_expr)
-        break;
-      /* else fall through.  */
-    case ALIGN_INDIRECT_REF:
-    case MISALIGNED_INDIRECT_REF:
-      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
-                   is_gimple_reg, fb_rvalue);
-      recalculate_side_effects (*expr_p);
-      break;
-
-      /* Constants need not be gimplified.  */
-    case INTEGER_CST:
-    case REAL_CST:
-    case FIXED_CST:
-    case STRING_CST:
-    case COMPLEX_CST:
-    case VECTOR_CST:
-      ret = GS_ALL_DONE;
-      break;
-
-    case CONST_DECL:
-      /* If we require an lvalue, such as for ADDR_EXPR, retain the
-         CONST_DECL node.  Otherwise the decl is replaceable by its
-         value.  */
-      /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either.  */
-      if (fallback & fb_lvalue)
-        ret = GS_ALL_DONE;
-      else
-        *expr_p = DECL_INITIAL (*expr_p);
-      break;
-
-    case DECL_EXPR:
-      ret = gimplify_decl_expr (expr_p, pre_p);
-      break;
-
-    case BIND_EXPR:
-      ret = gimplify_bind_expr (expr_p, pre_p);
-      break;
-
-    case LOOP_EXPR:
-      ret = gimplify_loop_expr (expr_p, pre_p);
-      break;
-
-    case SWITCH_EXPR:
-      ret = gimplify_switch_expr (expr_p, pre_p);
-      break;
-
-    case EXIT_EXPR:
-      ret = gimplify_exit_expr (expr_p);
-      break;
-
-    case GOTO_EXPR:
-      /* If the target is not LABEL, then it is a computed jump
-         and the target needs to be gimplified.  */
-      if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
-        {
-          ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
-                   NULL, is_gimple_val, fb_rvalue);
-          if (ret == GS_ERROR)
-        break;
-        }
-      gimplify_seq_add_stmt (pre_p,
-              gimple_build_goto (GOTO_DESTINATION (*expr_p)));
-      break;
-
-    case PREDICT_EXPR:
-      gimplify_seq_add_stmt (pre_p,
-            gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
-                          PREDICT_EXPR_OUTCOME (*expr_p)));
-      ret = GS_ALL_DONE;
-      break;
-
-    case LABEL_EXPR:
-      ret = GS_ALL_DONE;
-      gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
-              == current_function_decl);
-      gimplify_seq_add_stmt (pre_p,
-              gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
-      break;
-
-    case CASE_LABEL_EXPR:
-      ret = gimplify_case_label_expr (expr_p, pre_p);
-      break;
-
-    case RETURN_EXPR:
-      ret = gimplify_return_expr (*expr_p, pre_p);
-      break;
-
-    case CONSTRUCTOR:
-      /* Don't reduce this in place; let gimplify_init_constructor work its
-         magic.  Buf if we're just elaborating this for side effects, just
-         gimplify any element that has side-effects.  */
-      if (fallback == fb_none)
-        {
-          unsigned HOST_WIDE_INT ix;
-          constructor_elt *ce;
-          tree temp = NULL_TREE;
-          for (ix = 0;
-           VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
-                ix, ce);
-           ix++)
-        if (TREE_SIDE_EFFECTS (ce->value))
-          append_to_statement_list (ce->value, &temp);
-
-          *expr_p = temp;
-          ret = GS_OK;
-        }
-      /* C99 code may assign to an array in a constructed
-         structure or union, and this has undefined behavior only
-         on execution, so create a temporary if an lvalue is
-         required.  */
-      else if (fallback == fb_lvalue)
-        {
-          *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
-          mark_addressable (*expr_p);
-        }
-      else
-        ret = GS_ALL_DONE;
-      break;
-
-      /* The following are special cases that are not handled by the
-         original GIMPLE grammar.  */
-
-      /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
-         eliminated.  */
-    case SAVE_EXPR:
-      ret = gimplify_save_expr (expr_p, pre_p, post_p);
-      break;
-
-    case BIT_FIELD_REF:
-      {
-        enum gimplify_status r0, r1, r2;
-
-        r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
-                post_p, is_gimple_lvalue, fb_either);
-        r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
-                post_p, is_gimple_val, fb_rvalue);
-        r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
-                post_p, is_gimple_val, fb_rvalue);
-        recalculate_side_effects (*expr_p);
-
-        ret = MIN (r0, MIN (r1, r2));
-      }
-      break;
-
-    case TARGET_MEM_REF:
-      {
-        enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
-
-        if (TMR_SYMBOL (*expr_p))
-          r0 = gimplify_expr (&TMR_SYMBOL (*expr_p), pre_p,
-                  post_p, is_gimple_lvalue, fb_either);
-        else if (TMR_BASE (*expr_p))
-          r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
-                  post_p, is_gimple_val, fb_either);
-        if (TMR_INDEX (*expr_p))
-          r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
-                  post_p, is_gimple_val, fb_rvalue);
-        /* TMR_STEP and TMR_OFFSET are always integer constants.  */
-        ret = MIN (r0, r1);
-      }
-      break;
-
-    case NON_LVALUE_EXPR:
-      /* This should have been stripped above.  */
-      gcc_unreachable ();
-
-    case ASM_EXPR:
-      ret = gimplify_asm_expr (expr_p, pre_p, post_p);
-      break;
-
-    case TRY_FINALLY_EXPR:
-    case TRY_CATCH_EXPR:
-      {
-        gimple_seq eval, cleanup;
-        gimple try_;
-
-        eval = cleanup = NULL;
-        gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
-        gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
-        /* Don't create bogus GIMPLE_TRY with empty cleanup.  */
-        if (gimple_seq_empty_p (cleanup))
-          {
-        gimple_seq_add_seq (pre_p, eval);
-        ret = GS_ALL_DONE;
-        break;
-          }
-        try_ = gimple_build_try (eval, cleanup,
-                     TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
-                     ? GIMPLE_TRY_FINALLY
-                     : GIMPLE_TRY_CATCH);
-        if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
-          gimple_try_set_catch_is_cleanup (try_,
-                           TRY_CATCH_IS_CLEANUP (*expr_p));
-        gimplify_seq_add_stmt (pre_p, try_);
-        ret = GS_ALL_DONE;
-        break;
-      }
-
-    case CLEANUP_POINT_EXPR:
-      ret = gimplify_cleanup_point_expr (expr_p, pre_p);
-      break;
-
-    case TARGET_EXPR:
-      ret = gimplify_target_expr (expr_p, pre_p, post_p);
-      break;
-
-    case CATCH_EXPR:
-      {
-        gimple c;
-        gimple_seq handler = NULL;
-        gimplify_and_add (CATCH_BODY (*expr_p), &handler);
-        c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
-        gimplify_seq_add_stmt (pre_p, c);
-        ret = GS_ALL_DONE;
-        break;
-      }
-
-    case EH_FILTER_EXPR:
-      {
-        gimple ehf;
-        gimple_seq failure = NULL;
-
-        gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
-        ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
-        gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
-        gimplify_seq_add_stmt (pre_p, ehf);
-        ret = GS_ALL_DONE;
-        break;
-      }
-
-    case OBJ_TYPE_REF:
-      {
-        enum gimplify_status r0, r1;
-        r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
-                post_p, is_gimple_val, fb_rvalue);
-        r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
-                post_p, is_gimple_val, fb_rvalue);
-        TREE_SIDE_EFFECTS (*expr_p) = 0;
-        ret = MIN (r0, r1);
-      }
-      break;
-
-    case LABEL_DECL:
-      /* We get here when taking the address of a label.  We mark
-         the label as "forced"; meaning it can never be removed and
-         it is a potential target for any computed goto.  */
-      FORCED_LABEL (*expr_p) = 1;
-      ret = GS_ALL_DONE;
-      break;
-
-    case STATEMENT_LIST:
-      ret = gimplify_statement_list (expr_p, pre_p);
-      break;
-
-    case WITH_SIZE_EXPR:
-      {
-        gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
-               post_p == &internal_post ? NULL : post_p,
-               gimple_test_f, fallback);
-        gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
-               is_gimple_val, fb_rvalue);
-      }
-      break;
-
-    case VAR_DECL:
-    case PARM_DECL:
-      ret = gimplify_var_or_parm_decl (expr_p);
-      break;
-
-    case RESULT_DECL:
-      /* When within an OpenMP context, notice uses of variables.  */
-      if (gimplify_omp_ctxp)
-        omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
-      ret = GS_ALL_DONE;
-      break;
-
-    case SSA_NAME:
-      /* Allow callbacks into the gimplifier during optimization.  */
-      ret = GS_ALL_DONE;
-      break;
-
-    case OMP_PARALLEL:
-      gimplify_omp_parallel (expr_p, pre_p);
-      ret = GS_ALL_DONE;
-      break;
-
-    case OMP_TASK:
-      gimplify_omp_task (expr_p, pre_p);
-      ret = GS_ALL_DONE;
-      break;
-
-    case OMP_FOR:
-      ret = gimplify_omp_for (expr_p, pre_p);
-      break;
-
-    case OMP_SECTIONS:
-    case OMP_SINGLE:
-      gimplify_omp_workshare (expr_p, pre_p);
-      ret = GS_ALL_DONE;
-      break;
-
-    case OMP_SECTION:
-    case OMP_MASTER:
-    case OMP_ORDERED:
-    case OMP_CRITICAL:
-      {
-        gimple_seq body = NULL;
-        gimple g;
-
-        gimplify_and_add (OMP_BODY (*expr_p), &body);
-        switch (TREE_CODE (*expr_p))
-          {
-          case OMP_SECTION:
-            g = gimple_build_omp_section (body);
-            break;
-          case OMP_MASTER:
-            g = gimple_build_omp_master (body);
-        break;
-          case OMP_ORDERED:
-        g = gimple_build_omp_ordered (body);
-        break;
-          case OMP_CRITICAL:
-        g = gimple_build_omp_critical (body,
-                               OMP_CRITICAL_NAME (*expr_p));
-        break;
-          default:
-        gcc_unreachable ();
-          }
-        gimplify_seq_add_stmt (pre_p, g);
-        ret = GS_ALL_DONE;
-        break;
-      }
-
-    case OMP_ATOMIC:
-      ret = gimplify_omp_atomic (expr_p, pre_p);
-      break;
-
-    case POINTER_PLUS_EXPR:
+	{
+	  /* First deal with the special cases.  */
+
+	case POSTINCREMENT_EXPR:
+	case POSTDECREMENT_EXPR:
+	case PREINCREMENT_EXPR:
+	case PREDECREMENT_EXPR:
+	  ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
+					fallback != fb_none);
+	  break;
+
+	case ARRAY_REF:
+	case ARRAY_RANGE_REF:
+	case REALPART_EXPR:
+	case IMAGPART_EXPR:
+	case COMPONENT_REF:
+	case VIEW_CONVERT_EXPR:
+	  ret = gimplify_compound_lval (expr_p, pre_p, post_p,
+					fallback ? fallback : fb_rvalue);
+	  break;
+
+	case COND_EXPR:
+	  ret = gimplify_cond_expr (expr_p, pre_p, fallback);
+
+	  /* C99 code may assign to an array in a structure value of a
+	     conditional expression, and this has undefined behavior
+	     only on execution, so create a temporary if an lvalue is
+	     required.  */
+	  if (fallback == fb_lvalue)
+	    {
+	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
+	      mark_addressable (*expr_p);
+	      ret = GS_OK;
+	    }
+	  break;
+
+	case CALL_EXPR:
+	  ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
+
+	  /* C99 code may assign to an array in a structure returned
+	     from a function, and this has undefined behavior only on
+	     execution, so create a temporary if an lvalue is
+	     required.  */
+	  if (fallback == fb_lvalue)
+	    {
+	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
+	      mark_addressable (*expr_p);
+	      ret = GS_OK;
+	    }
+	  break;
+
+	case TREE_LIST:
+	  gcc_unreachable ();
+
+	case COMPOUND_EXPR:
+	  ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
+	  break;
+
+	case COMPOUND_LITERAL_EXPR:
+	  ret = gimplify_compound_literal_expr (expr_p, pre_p);
+	  break;
+
+	case MODIFY_EXPR:
+	case INIT_EXPR:
+	  ret = gimplify_modify_expr (expr_p, pre_p, post_p,
+				      fallback != fb_none);
+	  break;
+
+	case TRUTH_ANDIF_EXPR:
+	case TRUTH_ORIF_EXPR:
+	  /* Pass the source location of the outer expression.  */
+	  ret = gimplify_boolean_expr (expr_p, saved_location);
+	  break;
+
+	case TRUTH_NOT_EXPR:
+	  if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE)
+	    {
+	      tree type = TREE_TYPE (*expr_p);
+	      *expr_p = fold_convert (type, gimple_boolify (*expr_p));
+	      ret = GS_OK;
+	      break;
+	    }
+
+	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
+			       is_gimple_val, fb_rvalue);
+	  recalculate_side_effects (*expr_p);
+	  break;
+
+	case ADDR_EXPR:
+	  ret = gimplify_addr_expr (expr_p, pre_p, post_p);
+	  break;
+
+	case VA_ARG_EXPR:
+	  ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
+	  break;
+
+	CASE_CONVERT:
+	  if (IS_EMPTY_STMT (*expr_p))
+	    {
+	      ret = GS_ALL_DONE;
+	      break;
+	    }
+
+	  if (VOID_TYPE_P (TREE_TYPE (*expr_p))
+	      || fallback == fb_none)
+	    {
+	      /* Just strip a conversion to void (or in void context) and
+		 try again.  */
+	      *expr_p = TREE_OPERAND (*expr_p, 0);
+	      ret = GS_OK;
+	      break;
+	    }
+
+	  ret = gimplify_conversion (expr_p);
+	  if (ret == GS_ERROR)
+	    break;
+	  if (*expr_p != save_expr)
+	    break;
+	  /* FALLTHRU */
+
+	case FIX_TRUNC_EXPR:
+	  /* unary_expr: ... | '(' cast ')' val | ...  */
+	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
+			       is_gimple_val, fb_rvalue);
+	  recalculate_side_effects (*expr_p);
+	  break;
+
+	case INDIRECT_REF:
+	  {
+	    bool volatilep = TREE_THIS_VOLATILE (*expr_p);
+	    bool notrap = TREE_THIS_NOTRAP (*expr_p);
+	    tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
+
+	    *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
+	    if (*expr_p != save_expr)
+	      {
+		ret = GS_OK;
+		break;
+	      }
+
+	    ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
+				 is_gimple_reg, fb_rvalue);
+	    if (ret == GS_ERROR)
+	      break;
+
+	    recalculate_side_effects (*expr_p);
+	    *expr_p = fold_build2_loc (input_location, MEM_REF,
+				       TREE_TYPE (*expr_p),
+				       TREE_OPERAND (*expr_p, 0),
+				       build_int_cst (saved_ptr_type, 0));
+	    TREE_THIS_VOLATILE (*expr_p) = volatilep;
+	    TREE_THIS_NOTRAP (*expr_p) = notrap;
+	    ret = GS_OK;
+	    break;
+	  }
+
+	/* We arrive here through the various re-gimplifcation paths.  */
+	case MEM_REF:
+	  /* First try re-folding the whole thing.  */
+	  tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
+			     TREE_OPERAND (*expr_p, 0),
+			     TREE_OPERAND (*expr_p, 1));
+	  if (tmp)
+	    {
+	      *expr_p = tmp;
+	      recalculate_side_effects (*expr_p);
+	      ret = GS_OK;
+	      break;
+	    }
+	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
+			       is_gimple_mem_ref_addr, fb_rvalue);
+	  if (ret == GS_ERROR)
+	    break;
+	  recalculate_side_effects (*expr_p);
+	  ret = GS_ALL_DONE;
+	  break;
+
+	  /* Constants need not be gimplified.  */
+	case INTEGER_CST:
+	case REAL_CST:
+	case FIXED_CST:
+	case STRING_CST:
+	case COMPLEX_CST:
+	case VECTOR_CST:
+	  ret = GS_ALL_DONE;
+	  break;
+
+	case CONST_DECL:
+	  /* If we require an lvalue, such as for ADDR_EXPR, retain the
+	     CONST_DECL node.  Otherwise the decl is replaceable by its
+	     value.  */
+	  /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either.  */
+	  if (fallback & fb_lvalue)
+	    ret = GS_ALL_DONE;
+	  else
+	    {
+	      *expr_p = DECL_INITIAL (*expr_p);
+	      ret = GS_OK;
+	    }
+	  break;
+
+	case DECL_EXPR:
+	  ret = gimplify_decl_expr (expr_p, pre_p);
+	  break;
+
+	case BIND_EXPR:
+	  ret = gimplify_bind_expr (expr_p, pre_p);
+	  break;
+
+	case LOOP_EXPR:
+	  ret = gimplify_loop_expr (expr_p, pre_p);
+	  break;
+
+	case SWITCH_EXPR:
+	  ret = gimplify_switch_expr (expr_p, pre_p);
+	  break;
+
+	case EXIT_EXPR:
+	  ret = gimplify_exit_expr (expr_p);
+	  break;
+
+	case GOTO_EXPR:
+	  /* If the target is not LABEL, then it is a computed jump
+	     and the target needs to be gimplified.  */
+	  if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
+	    {
+	      ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
+				   NULL, is_gimple_val, fb_rvalue);
+	      if (ret == GS_ERROR)
+		break;
+	    }
+	  gimplify_seq_add_stmt (pre_p,
+			  gimple_build_goto (GOTO_DESTINATION (*expr_p)));
+	  ret = GS_ALL_DONE;
+	  break;
+
+	case PREDICT_EXPR:
+	  gimplify_seq_add_stmt (pre_p,
+			gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
+					      PREDICT_EXPR_OUTCOME (*expr_p)));
+	  ret = GS_ALL_DONE;
+	  break;
+
+	case LABEL_EXPR:
+	  ret = GS_ALL_DONE;
+	  gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
+		      == current_function_decl);
+	  gimplify_seq_add_stmt (pre_p,
+			  gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
+	  break;
+
+	case CASE_LABEL_EXPR:
+	  ret = gimplify_case_label_expr (expr_p, pre_p);
+	  break;
+
+	case RETURN_EXPR:
+	  ret = gimplify_return_expr (*expr_p, pre_p);
+	  break;
+
+	case CONSTRUCTOR:
+	  /* Don't reduce this in place; let gimplify_init_constructor work its
+	     magic.  Buf if we're just elaborating this for side effects, just
+	     gimplify any element that has side-effects.  */
+	  if (fallback == fb_none)
+	    {
+	      unsigned HOST_WIDE_INT ix;
+	      tree val;
+	      tree temp = NULL_TREE;
+	      FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
+		if (TREE_SIDE_EFFECTS (val))
+		  append_to_statement_list (val, &temp);
+
+	      *expr_p = temp;
+	      ret = temp ? GS_OK : GS_ALL_DONE;
+	    }
+	  /* C99 code may assign to an array in a constructed
+	     structure or union, and this has undefined behavior only
+	     on execution, so create a temporary if an lvalue is
+	     required.  */
+	  else if (fallback == fb_lvalue)
+	    {
+	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
+	      mark_addressable (*expr_p);
+	      ret = GS_OK;
+	    }
+	  else
+	    ret = GS_ALL_DONE;
+	  break;
+
+	  /* The following are special cases that are not handled by the
+	     original GIMPLE grammar.  */
+
+	  /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
+	     eliminated.  */
+	case SAVE_EXPR:
+	  ret = gimplify_save_expr (expr_p, pre_p, post_p);
+	  break;
+
+	case BIT_FIELD_REF:
+	  {
+	    enum gimplify_status r0, r1, r2;
+
+	    r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
+				post_p, is_gimple_lvalue, fb_either);
+	    r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
+				post_p, is_gimple_val, fb_rvalue);
+	    r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
+				post_p, is_gimple_val, fb_rvalue);
+	    recalculate_side_effects (*expr_p);
+
+	    ret = MIN (r0, MIN (r1, r2));
+	  }
+	  break;
+
+	case TARGET_MEM_REF:
+	  {
+	    enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
+
+	    if (TMR_BASE (*expr_p))
+	      r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
+				  post_p, is_gimple_mem_ref_addr, fb_either);
+	    if (TMR_INDEX (*expr_p))
+	      r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
+				  post_p, is_gimple_val, fb_rvalue);
+	    if (TMR_INDEX2 (*expr_p))
+	      r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
+				  post_p, is_gimple_val, fb_rvalue);
+	    /* TMR_STEP and TMR_OFFSET are always integer constants.  */
+	    ret = MIN (r0, r1);
+	  }
+	  break;
+
+	case NON_LVALUE_EXPR:
+	  /* This should have been stripped above.  */
+	  gcc_unreachable ();
+
+	case ASM_EXPR:
+	  ret = gimplify_asm_expr (expr_p, pre_p, post_p);
+	  break;
+
+	case TRY_FINALLY_EXPR:
+	case TRY_CATCH_EXPR:
+	  {
+	    gimple_seq eval, cleanup;
+	    gimple try_;
+
+	    eval = cleanup = NULL;
+	    gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
+	    gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
+	    /* Don't create bogus GIMPLE_TRY with empty cleanup.  */
+	    if (gimple_seq_empty_p (cleanup))
+	      {
+		gimple_seq_add_seq (pre_p, eval);
+		ret = GS_ALL_DONE;
+		break;
+	      }
+	    try_ = gimple_build_try (eval, cleanup,
+				     TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
+				     ? GIMPLE_TRY_FINALLY
+				     : GIMPLE_TRY_CATCH);
+	    if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
+	      gimple_try_set_catch_is_cleanup (try_,
+					       TRY_CATCH_IS_CLEANUP (*expr_p));
+	    gimplify_seq_add_stmt (pre_p, try_);
+	    ret = GS_ALL_DONE;
+	    break;
+	  }
+
+	case CLEANUP_POINT_EXPR:
+	  ret = gimplify_cleanup_point_expr (expr_p, pre_p);
+	  break;
+
+	case TARGET_EXPR:
+	  ret = gimplify_target_expr (expr_p, pre_p, post_p);
+	  break;
+
+	case CATCH_EXPR:
+	  {
+	    gimple c;
+	    gimple_seq handler = NULL;
+	    gimplify_and_add (CATCH_BODY (*expr_p), &handler);
+	    c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
+	    gimplify_seq_add_stmt (pre_p, c);
+	    ret = GS_ALL_DONE;
+	    break;
+	  }
+
+	case EH_FILTER_EXPR:
+	  {
+	    gimple ehf;
+	    gimple_seq failure = NULL;
+
+	    gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
+	    ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
+	    gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
+	    gimplify_seq_add_stmt (pre_p, ehf);
+	    ret = GS_ALL_DONE;
+	    break;
+	  }
+
+	case OBJ_TYPE_REF:
+	  {
+	    enum gimplify_status r0, r1;
+	    r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
+				post_p, is_gimple_val, fb_rvalue);
+	    r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
+				post_p, is_gimple_val, fb_rvalue);
+	    TREE_SIDE_EFFECTS (*expr_p) = 0;
+	    ret = MIN (r0, r1);
+	  }
+	  break;
+
+	case LABEL_DECL:
+	  /* We get here when taking the address of a label.  We mark
+	     the label as "forced"; meaning it can never be removed and
+	     it is a potential target for any computed goto.  */
+	  FORCED_LABEL (*expr_p) = 1;
+	  ret = GS_ALL_DONE;
+	  break;
+
+	case STATEMENT_LIST:
+	  ret = gimplify_statement_list (expr_p, pre_p);
+	  break;
+
+	case WITH_SIZE_EXPR:
+	  {
+	    gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
+			   post_p == &internal_post ? NULL : post_p,
+			   gimple_test_f, fallback);
+	    gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
+			   is_gimple_val, fb_rvalue);
+	    ret = GS_ALL_DONE;
+	  }
+	  break;
+
+	case VAR_DECL:
+	case PARM_DECL:
+	  ret = gimplify_var_or_parm_decl (expr_p);
+	  break;
+
+	case RESULT_DECL:
+	  /* When within an OpenMP context, notice uses of variables.  */
+	  if (gimplify_omp_ctxp)
+	    omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
+	  ret = GS_ALL_DONE;
+	  break;
+
+	case SSA_NAME:
+	  /* Allow callbacks into the gimplifier during optimization.  */
+	  ret = GS_ALL_DONE;
+	  break;
+
+	case OMP_PARALLEL:
+	  gimplify_omp_parallel (expr_p, pre_p);
+	  ret = GS_ALL_DONE;
+	  break;
+
+	case OMP_TASK:
+	  gimplify_omp_task (expr_p, pre_p);
+	  ret = GS_ALL_DONE;
+	  break;
+
+	case OMP_FOR:
+	  ret = gimplify_omp_for (expr_p, pre_p);
+	  break;
+
+	case OMP_SECTIONS:
+	case OMP_SINGLE:
+	  gimplify_omp_workshare (expr_p, pre_p);
+	  ret = GS_ALL_DONE;
+	  break;
+
+	case OMP_SECTION:
+	case OMP_MASTER:
+	case OMP_ORDERED:
+	case OMP_CRITICAL:
+	  {
+	    gimple_seq body = NULL;
+	    gimple g;
+
+	    gimplify_and_add (OMP_BODY (*expr_p), &body);
+	    switch (TREE_CODE (*expr_p))
+	      {
+	      case OMP_SECTION:
+	        g = gimple_build_omp_section (body);
+	        break;
+	      case OMP_MASTER:
+	        g = gimple_build_omp_master (body);
+		break;
+	      case OMP_ORDERED:
+		g = gimple_build_omp_ordered (body);
+		break;
+	      case OMP_CRITICAL:
+		g = gimple_build_omp_critical (body,
+		    			       OMP_CRITICAL_NAME (*expr_p));
+		break;
+	      default:
+		gcc_unreachable ();
+	      }
+	    gimplify_seq_add_stmt (pre_p, g);
+	    ret = GS_ALL_DONE;
+	    break;
+	  }
+
+	case OMP_ATOMIC:
+	  ret = gimplify_omp_atomic (expr_p, pre_p);
+	  break;
+
+	case TRUTH_AND_EXPR:
+	case TRUTH_OR_EXPR:
+	case TRUTH_XOR_EXPR:
+	  /* Classified as tcc_expression.  */
+	  goto expr_2;
+
+	case FMA_EXPR:
+	  /* Classified as tcc_expression.  */
+	  goto expr_3;
+
+	case POINTER_PLUS_EXPR:
           /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
-         The second is gimple immediate saving a need for extra statement.
-       */
-      if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
-          && (tmp = maybe_fold_offset_to_address
-          (EXPR_LOCATION (*expr_p),
-           TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
-           TREE_TYPE (*expr_p))))
-        {
-          *expr_p = tmp;
-          break;
-        }
-      /* Convert (void *)&a + 4 into (void *)&a[1].  */
-      if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
-          && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
-          && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
-                                    0),0)))
-          && (tmp = maybe_fold_offset_to_address
-          (EXPR_LOCATION (*expr_p),
-           TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
-           TREE_OPERAND (*expr_p, 1),
-           TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
-                        0)))))
-         {
+	     The second is gimple immediate saving a need for extra statement.
+	   */
+	  if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
+	      && (tmp = maybe_fold_offset_to_address
+		  (EXPR_LOCATION (*expr_p),
+		   TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
+		   TREE_TYPE (*expr_p))))
+	    {
+	      *expr_p = tmp;
+	      ret = GS_OK;
+	      break;
+	    }
+	  /* Convert (void *)&a + 4 into (void *)&a[1].  */
+	  if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
+	      && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
+	      && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
+									0),0)))
+	      && (tmp = maybe_fold_offset_to_address
+		  (EXPR_LOCATION (*expr_p),
+		   TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
+		   TREE_OPERAND (*expr_p, 1),
+		   TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
+					    0)))))
+	     {
                *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
-           break;
-         }
+	       ret = GS_OK;
+	       break;
+	     }
           /* FALLTHRU */
 
-    default:
-      switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
-        {
-        case tcc_comparison:
-          /* Handle comparison of objects of non scalar mode aggregates
-             with a call to memcmp.  It would be nice to only have to do
-             this for variable-sized objects, but then we'd have to allow
-             the same nest of reference nodes we allow for MODIFY_EXPR and
-             that's too complex.
-
-         Compare scalar mode aggregates as scalar mode values.  Using
-         memcmp for them would be very inefficient at best, and is
-         plain wrong if bitfields are involved.  */
-        {
-          tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
-
-          if (!AGGREGATE_TYPE_P (type))
-            goto expr_2;
-          else if (TYPE_MODE (type) != BLKmode)
-            ret = gimplify_scalar_mode_aggregate_compare (expr_p);
-          else
-            ret = gimplify_variable_sized_compare (expr_p);
-
-          break;
-        }
-
-        /* If *EXPR_P does not need to be special-cased, handle it
-           according to its class.  */
-        case tcc_unary:
-          ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
-                   post_p, is_gimple_val, fb_rvalue);
-          break;
-
-        case tcc_binary:
-        expr_2:
-          {
-        enum gimplify_status r0, r1;
-
-        r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
-                            post_p, is_gimple_val, fb_rvalue);
-        r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
-                    post_p, is_gimple_val, fb_rvalue);
-
-        ret = MIN (r0, r1);
-        break;
-          }
-
-        case tcc_declaration:
-        case tcc_constant:
-          ret = GS_ALL_DONE;
-          goto dont_recalculate;
-
-        default:
-          gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
-              || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
-              || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
-          goto expr_2;
-        }
-
-      recalculate_side_effects (*expr_p);
-
-    dont_recalculate:
-      break;
-    }
-
-      /* If we replaced *expr_p, gimplify again.  */
-      if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
-    ret = GS_ALL_DONE;
+	default:
+	  switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
+	    {
+	    case tcc_comparison:
+	      /* Handle comparison of objects of non scalar mode aggregates
+	     	 with a call to memcmp.  It would be nice to only have to do
+	     	 this for variable-sized objects, but then we'd have to allow
+	     	 the same nest of reference nodes we allow for MODIFY_EXPR and
+	     	 that's too complex.
+
+		 Compare scalar mode aggregates as scalar mode values.  Using
+		 memcmp for them would be very inefficient at best, and is
+		 plain wrong if bitfields are involved.  */
+		{
+		  tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
+
+		  if (!AGGREGATE_TYPE_P (type))
+		    goto expr_2;
+		  else if (TYPE_MODE (type) != BLKmode)
+		    ret = gimplify_scalar_mode_aggregate_compare (expr_p);
+		  else
+		    ret = gimplify_variable_sized_compare (expr_p);
+
+		  break;
+		}
+
+	    /* If *EXPR_P does not need to be special-cased, handle it
+	       according to its class.  */
+	    case tcc_unary:
+	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
+				   post_p, is_gimple_val, fb_rvalue);
+	      break;
+
+	    case tcc_binary:
+	    expr_2:
+	      {
+		enum gimplify_status r0, r1;
+
+		r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
+		                    post_p, is_gimple_val, fb_rvalue);
+		r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
+				    post_p, is_gimple_val, fb_rvalue);
+
+		ret = MIN (r0, r1);
+		break;
+	      }
+
+	    expr_3:
+	      {
+		enum gimplify_status r0, r1, r2;
+
+		r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
+		                    post_p, is_gimple_val, fb_rvalue);
+		r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
+				    post_p, is_gimple_val, fb_rvalue);
+		r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
+				    post_p, is_gimple_val, fb_rvalue);
+
+		ret = MIN (MIN (r0, r1), r2);
+		break;
+	      }
+
+	    case tcc_declaration:
+	    case tcc_constant:
+	      ret = GS_ALL_DONE;
+	      goto dont_recalculate;
+
+	    default:
+	      gcc_unreachable ();
+	    }
+
+	  recalculate_side_effects (*expr_p);
+
+	dont_recalculate:
+	  break;
+	}
+
+      gcc_assert (*expr_p || ret != GS_OK);
     }
   while (ret == GS_OK);
 
@@ -7176,7 +7478,7 @@
      that temporary.  */
       tmp = build_fold_addr_expr_loc (input_location, *expr_p);
       gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
-      *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
+      *expr_p = build_simple_mem_ref (tmp);
     }
   else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
     {
@@ -7274,31 +7576,35 @@
       /* These types may not have declarations, so handle them here.  */
       gimplify_type_sizes (TREE_TYPE (type), list_p);
       gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
-      /* When not optimizing, ensure VLA bounds aren't removed.  */
-      if (!optimize
-      && TYPE_DOMAIN (type)
-      && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
-    {
-      t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
-      if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
-        DECL_IGNORED_P (t) = 0;
-      t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
-      if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
-        DECL_IGNORED_P (t) = 0;
-    }
+      /* Ensure VLA bounds aren't removed, for -O0 they should be variables
+	 with assigned stack slots, for -O1+ -g they should be tracked
+	 by VTA.  */
+      if (!(TYPE_NAME (type)
+	    && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
+	    && DECL_IGNORED_P (TYPE_NAME (type)))
+	  && TYPE_DOMAIN (type)
+	  && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
+	{
+	  t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
+	  if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
+	    DECL_IGNORED_P (t) = 0;
+	  t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
+	  if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
+	    DECL_IGNORED_P (t) = 0;
+	}
       break;
 
     case RECORD_TYPE:
     case UNION_TYPE:
     case QUAL_UNION_TYPE:
-      for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
-    if (TREE_CODE (field) == FIELD_DECL)
-      {
-        gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
-        gimplify_one_sizepos (&DECL_SIZE (field), list_p);
-        gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
-        gimplify_type_sizes (TREE_TYPE (field), list_p);
-      }
+      for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
+	if (TREE_CODE (field) == FIELD_DECL)
+	  {
+	    gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
+	    gimplify_one_sizepos (&DECL_SIZE (field), list_p);
+	    gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
+	    gimplify_type_sizes (TREE_TYPE (field), list_p);
+	  }
       break;
 
     case POINTER_TYPE:
@@ -7373,10 +7679,7 @@
       *expr_p = create_tmp_var (type, NULL);
       tmp = build1 (NOP_EXPR, type, expr);
       stmt = gimplify_assign (*expr_p, tmp, stmt_p);
-      if (EXPR_HAS_LOCATION (expr))
-    gimple_set_location (stmt, EXPR_LOCATION (expr));
-      else
-    gimple_set_location (stmt, input_location);
+      gimple_set_location (stmt, EXPR_LOC_OR_HERE (expr));
     }
 }
 
@@ -7441,11 +7744,21 @@
   *body_p = NULL_TREE;
 
   /* If we had callee-copies statements, insert them at the beginning
-     of the function.  */
+     of the function and clear DECL_VALUE_EXPR_P on the parameters.  */
   if (!gimple_seq_empty_p (parm_stmts))
     {
+      tree parm;
+
       gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
       gimple_bind_set_body (outer_bind, parm_stmts);
+
+      for (parm = DECL_ARGUMENTS (current_function_decl);
+	   parm; parm = DECL_CHAIN (parm))
+	if (DECL_HAS_VALUE_EXPR_P (parm))
+	  {
+	    DECL_HAS_VALUE_EXPR_P (parm) = 0;
+	    DECL_IGNORED_P (parm) = 0;
+	  }
     }
 
   if (nonlocal_vlas)
@@ -7458,7 +7771,7 @@
   gcc_assert (gimplify_ctxp == NULL);
 
 #ifdef ENABLE_TYPES_CHECKING
-  if (!errorcount && !sorrycount)
+  if (!seen_error ())
     verify_types_in_gimple_seq (gimple_bind_body (outer_bind));
 #endif
 
@@ -7468,6 +7781,46 @@
   return outer_bind;
 }
 
+typedef char *char_p; /* For DEF_VEC_P.  */
+DEF_VEC_P(char_p);
+DEF_VEC_ALLOC_P(char_p,heap);
+
+/* Return whether we should exclude FNDECL from instrumentation.  */
+
+static bool
+flag_instrument_functions_exclude_p (tree fndecl)
+{
+  VEC(char_p,heap) *vec;
+
+  vec = (VEC(char_p,heap) *) flag_instrument_functions_exclude_functions;
+  if (VEC_length (char_p, vec) > 0)
+    {
+      const char *name;
+      int i;
+      char *s;
+
+      name = lang_hooks.decl_printable_name (fndecl, 0);
+      FOR_EACH_VEC_ELT (char_p, vec, i, s)
+	if (strstr (name, s) != NULL)
+	  return true;
+    }
+
+  vec = (VEC(char_p,heap) *) flag_instrument_functions_exclude_files;
+  if (VEC_length (char_p, vec) > 0)
+    {
+      const char *name;
+      int i;
+      char *s;
+
+      name = DECL_SOURCE_FILE (fndecl);
+      FOR_EACH_VEC_ELT (char_p, vec, i, s)
+	if (strstr (name, s) != NULL)
+	  return true;
+    }
+
+  return false;
+}
+
 /* Entry point to the gimplification pass.  FNDECL is the FUNCTION_DECL
    node for the function we want to gimplify.
 
@@ -7490,7 +7843,7 @@
   else
     push_struct_function (fndecl);
 
-  for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
+  for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
     {
       /* Preliminarily mark non-addressed complex variables as eligible
          for promotion to gimple registers.  We'll transform their uses
@@ -7528,13 +7881,31 @@
       gimple new_bind;
       gimple tf;
       gimple_seq cleanup = NULL, body = NULL;
-
+      tree tmp_var;
+      gimple call;
+
+      x = implicit_built_in_decls[BUILT_IN_RETURN_ADDRESS];
+      call = gimple_build_call (x, 1, integer_zero_node);
+      tmp_var = create_tmp_var (ptr_type_node, "return_addr");
+      gimple_call_set_lhs (call, tmp_var);
+      gimplify_seq_add_stmt (&cleanup, call);
       x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
-      gimplify_seq_add_stmt (&cleanup, gimple_build_call (x, 0));
+      call = gimple_build_call (x, 2,
+				build_fold_addr_expr (current_function_decl),
+				tmp_var);
+      gimplify_seq_add_stmt (&cleanup, call);
       tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
 
+      x = implicit_built_in_decls[BUILT_IN_RETURN_ADDRESS];
+      call = gimple_build_call (x, 1, integer_zero_node);
+      tmp_var = create_tmp_var (ptr_type_node, "return_addr");
+      gimple_call_set_lhs (call, tmp_var);
+      gimplify_seq_add_stmt (&body, call);
       x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
-      gimplify_seq_add_stmt (&body, gimple_build_call (x, 0));
+      call = gimple_build_call (x, 2,
+				build_fold_addr_expr (current_function_decl),
+				tmp_var);
+      gimplify_seq_add_stmt (&body, call);
       gimplify_seq_add_stmt (&body, tf);
       new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
       /* Clear the block for BIND, since it is no longer directly inside
@@ -7662,68 +8033,66 @@
       /* If the LHS changed it in a way that requires a simple RHS,
      create temporary.  */
       if (lhs && !is_gimple_reg (lhs))
-    {
-      bool need_temp = false;
-
-      if (is_gimple_assign (stmt)
-          && num_ops == 2
-          && get_gimple_rhs_class (gimple_expr_code (stmt))
-         == GIMPLE_SINGLE_RHS)
-        gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
-               rhs_predicate_for (gimple_assign_lhs (stmt)),
-               fb_rvalue);
-      else if (is_gimple_reg (lhs))
-        {
-          if (is_gimple_reg_type (TREE_TYPE (lhs)))
-        {
-          if (is_gimple_call (stmt))
-            {
-              i = gimple_call_flags (stmt);
-              if ((i & ECF_LOOPING_CONST_OR_PURE)
-              || !(i & (ECF_CONST | ECF_PURE)))
-            need_temp = true;
-            }
-          if (stmt_can_throw_internal (stmt))
-            need_temp = true;
-        }
-        }
-      else
-        {
-          if (is_gimple_reg_type (TREE_TYPE (lhs)))
-        need_temp = true;
-          else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
-        {
-          if (is_gimple_call (stmt))
-            {
-              tree fndecl = gimple_call_fndecl (stmt);
-
-              if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
-              && !(fndecl && DECL_RESULT (fndecl)
-                   && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
-            need_temp = true;
-            }
-          else
-            need_temp = true;
-        }
-        }
-      if (need_temp)
-        {
-          tree temp = create_tmp_var (TREE_TYPE (lhs), NULL);
-
-          if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
-          || TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE)
-        DECL_GIMPLE_REG_P (temp) = 1;
-          if (TREE_CODE (orig_lhs) == SSA_NAME)
-        orig_lhs = SSA_NAME_VAR (orig_lhs);
-
-          if (gimple_in_ssa_p (cfun))
-        temp = make_ssa_name (temp, NULL);
-          gimple_set_lhs (stmt, temp);
-          post_stmt = gimple_build_assign (lhs, temp);
-          if (TREE_CODE (lhs) == SSA_NAME)
-        SSA_NAME_DEF_STMT (lhs) = post_stmt;
-        }
-    }
+	{
+	  bool need_temp = false;
+
+
+	  if (is_gimple_assign (stmt)
+	      && num_ops == 2
+	      && get_gimple_rhs_class (gimple_expr_code (stmt))
+		 == GIMPLE_SINGLE_RHS)
+	    gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
+			   rhs_predicate_for (gimple_assign_lhs (stmt)),
+			   fb_rvalue);
+	  else if (is_gimple_reg (lhs))
+	    {
+	      if (is_gimple_reg_type (TREE_TYPE (lhs)))
+		{
+		  if (is_gimple_call (stmt))
+		    {
+		      i = gimple_call_flags (stmt);
+		      if ((i & ECF_LOOPING_CONST_OR_PURE)
+			  || !(i & (ECF_CONST | ECF_PURE)))
+			need_temp = true;
+		    }
+		  if (stmt_can_throw_internal (stmt))
+		    need_temp = true;
+		}
+	    }
+	  else
+	    {
+	      if (is_gimple_reg_type (TREE_TYPE (lhs)))
+		need_temp = true;
+	      else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
+		{
+		  if (is_gimple_call (stmt))
+		    {
+		      tree fndecl = gimple_call_fndecl (stmt);
+
+		      if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
+			  && !(fndecl && DECL_RESULT (fndecl)
+			       && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
+			need_temp = true;
+		    }
+		  else
+		    need_temp = true;
+		}
+	    }
+	  if (need_temp)
+	    {
+	      tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
+
+	      if (TREE_CODE (orig_lhs) == SSA_NAME)
+		orig_lhs = SSA_NAME_VAR (orig_lhs);
+
+	      if (gimple_in_ssa_p (cfun))
+		temp = make_ssa_name (temp, NULL);
+	      gimple_set_lhs (stmt, temp);
+	      post_stmt = gimple_build_assign (lhs, temp);
+	      if (TREE_CODE (lhs) == SSA_NAME)
+		SSA_NAME_DEF_STMT (lhs) = post_stmt;
+	    }
+	}
       break;
     }
 
@@ -7749,26 +8118,27 @@
 }
 
 
-/* Expands EXPR to list of gimple statements STMTS.  If SIMPLE is true,
-   force the result to be either ssa_name or an invariant, otherwise
-   just force it to be a rhs expression.  If VAR is not NULL, make the
+/* Expands EXPR to list of gimple statements STMTS.  GIMPLE_TEST_F specifies
+   the predicate that will hold for the result.  If VAR is not NULL, make the
    base variable of the final destination be VAR if suitable.  */
 
 tree
-force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
+force_gimple_operand_1 (tree expr, gimple_seq *stmts,
+			gimple_predicate gimple_test_f, tree var)
 {
   tree t;
   enum gimplify_status ret;
-  gimple_predicate gimple_test_f;
   struct gimplify_ctx gctx;
 
   *stmts = NULL;
 
-  if (is_gimple_val (expr))
+  /* gimple_test_f might be more strict than is_gimple_val, make
+     sure we pass both.  Just checking gimple_test_f doesn't work
+     because most gimple predicates do not work recursively.  */
+  if (is_gimple_val (expr)
+      && (*gimple_test_f) (expr))
     return expr;
 
-  gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
-
   push_gimplify_context (&gctx);
   gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
   gimplify_ctxp->allow_rhs_cond_expr = true;
@@ -7789,7 +8159,7 @@
     }
 
   if (gimple_referenced_vars (cfun))
-    for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
+    for (t = gimplify_ctxp->temps; t ; t = DECL_CHAIN (t))
       add_referenced_var (t);
 
   pop_gimplify_context (NULL);
@@ -7797,38 +8167,72 @@
   return expr;
 }
 
-/* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR.  If
-   some statements are produced, emits them at GSI.  If BEFORE is true.
-   the statements are appended before GSI, otherwise they are appended after
-   it.  M specifies the way GSI moves after insertion (GSI_SAME_STMT or
-   GSI_CONTINUE_LINKING are the usual values).  */
+/* Expands EXPR to list of gimple statements STMTS.  If SIMPLE is true,
+   force the result to be either ssa_name or an invariant, otherwise
+   just force it to be a rhs expression.  If VAR is not NULL, make the
+   base variable of the final destination be VAR if suitable.  */
+
+tree
+force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
+{
+  return force_gimple_operand_1 (expr, stmts,
+				 simple ? is_gimple_val : is_gimple_reg_rhs,
+				 var);
+}
+
+/* Invokes force_gimple_operand_1 for EXPR with parameters GIMPLE_TEST_F
+   and VAR.  If some statements are produced, emits them at GSI.
+   If BEFORE is true.  the statements are appended before GSI, otherwise
+   they are appended after it.  M specifies the way GSI moves after
+   insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING are the usual values).  */
+
+tree
+force_gimple_operand_gsi_1 (gimple_stmt_iterator *gsi, tree expr,
+			    gimple_predicate gimple_test_f,
+			    tree var, bool before,
+			    enum gsi_iterator_update m)
+{
+  gimple_seq stmts;
+
+  expr = force_gimple_operand_1 (expr, &stmts, gimple_test_f, var);
+
+  if (!gimple_seq_empty_p (stmts))
+    {
+      if (gimple_in_ssa_p (cfun))
+	{
+	  gimple_stmt_iterator i;
+
+	  for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
+	    mark_symbols_for_renaming (gsi_stmt (i));
+	}
+
+      if (before)
+	gsi_insert_seq_before (gsi, stmts, m);
+      else
+	gsi_insert_seq_after (gsi, stmts, m);
+    }
+
+  return expr;
+}
+
+/* Invokes force_gimple_operand_1 for EXPR with parameter VAR.
+   If SIMPLE is true, force the result to be either ssa_name or an invariant,
+   otherwise just force it to be a rhs expression.  If some statements are
+   produced, emits them at GSI.  If BEFORE is true, the statements are
+   appended before GSI, otherwise they are appended after it.  M specifies
+   the way GSI moves after insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING
+   are the usual values).  */
 
 tree
 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
               bool simple_p, tree var, bool before,
               enum gsi_iterator_update m)
 {
-  gimple_seq stmts;
-
-  expr = force_gimple_operand (expr, &stmts, simple_p, var);
-
-  if (!gimple_seq_empty_p (stmts))
-    {
-      if (gimple_in_ssa_p (cfun))
-    {
-      gimple_stmt_iterator i;
-
-      for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
-        mark_symbols_for_renaming (gsi_stmt (i));
-    }
-
-      if (before)
-    gsi_insert_seq_before (gsi, stmts, m);
-      else
-    gsi_insert_seq_after (gsi, stmts, m);
-    }
-
-  return expr;
-}
+  return force_gimple_operand_gsi_1 (gsi, expr,
+				     simple_p
+				     ? is_gimple_val : is_gimple_reg_rhs,
+				     var, before, m);
+}
+
 
 #include "gt-gimplify.h"