diff gcc/except.c @ 111:04ced10e8804

gcc 7
author kono
date Fri, 27 Oct 2017 22:46:09 +0900
parents f6334be47118
children 84e7813d76e9
line wrap: on
line diff
--- a/gcc/except.c	Sun Aug 21 07:07:55 2011 +0900
+++ b/gcc/except.c	Fri Oct 27 22:46:09 2017 +0900
@@ -1,7 +1,5 @@
 /* Implements exception handling.
-   Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
-   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
-   Free Software Foundation, Inc.
+   Copyright (C) 1989-2017 Free Software Foundation, Inc.
    Contributed by Mike Stump <mrs@cygnus.com>.
 
 This file is part of GCC.
@@ -81,7 +79,7 @@
    gimple to eh_region mapping that had been recorded in the
    THROW_STMT_TABLE.
 
-   During pass_rtl_eh (except.c), we generate the real landing pads
+   Then, via finish_eh_generation, we generate the real landing pads
    to which the runtime will actually transfer control.  These new
    landing pads perform whatever bookkeeping is needed by the target
    backend in order to resume execution within the current function.
@@ -114,45 +112,44 @@
 #include "config.h"
 #include "system.h"
 #include "coretypes.h"
-#include "tm.h"
+#include "backend.h"
+#include "target.h"
 #include "rtl.h"
 #include "tree.h"
-#include "flags.h"
-#include "function.h"
+#include "cfghooks.h"
+#include "tree-pass.h"
+#include "memmodel.h"
+#include "tm_p.h"
+#include "stringpool.h"
+#include "expmed.h"
+#include "optabs.h"
+#include "emit-rtl.h"
+#include "cgraph.h"
+#include "diagnostic.h"
+#include "fold-const.h"
+#include "stor-layout.h"
+#include "explow.h"
+#include "stmt.h"
 #include "expr.h"
+#include "calls.h"
 #include "libfuncs.h"
-#include "insn-config.h"
 #include "except.h"
-#include "integrate.h"
-#include "hard-reg-set.h"
-#include "basic-block.h"
 #include "output.h"
 #include "dwarf2asm.h"
 #include "dwarf2out.h"
-#include "dwarf2.h"
-#include "toplev.h"
-#include "hashtab.h"
-#include "intl.h"
-#include "ggc.h"
-#include "tm_p.h"
-#include "target.h"
+#include "common/common-target.h"
 #include "langhooks.h"
-#include "cgraph.h"
-#include "diagnostic.h"
+#include "cfgrtl.h"
 #include "tree-pretty-print.h"
-#include "tree-pass.h"
-#include "timevar.h"
-#include "tree-flow.h"
-
-/* Provide defaults for stuff that may not be defined when using
-   sjlj exceptions.  */
-#ifndef EH_RETURN_DATA_REGNO
-#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
-#endif
+#include "cfgloop.h"
+#include "builtins.h"
+#include "tree-hash-traits.h"
 
 static GTY(()) int call_site_base;
-static GTY ((param_is (union tree_node)))
-  htab_t type_to_runtime_map;
+
+static GTY(()) hash_map<tree_hash, tree> *type_to_runtime_map;
+
+static GTY(()) tree setjmp_fn;
 
 /* Describe the SjLj_Function_Context structure.  */
 static GTY(()) tree sjlj_fc_type_node;
@@ -168,33 +165,61 @@
   rtx landing_pad;
   int action;
 };
+
+/* In the following structure and associated functions,
+   we represent entries in the action table as 1-based indices.
+   Special cases are:
+
+	 0:	null action record, non-null landing pad; implies cleanups
+	-1:	null action record, null landing pad; implies no action
+	-2:	no call-site entry; implies must_not_throw
+	-3:	we have yet to process outer regions
+
+   Further, no special cases apply to the "next" field of the record.
+   For next, 0 means end of list.  */
+
+struct action_record
+{
+  int offset;
+  int filter;
+  int next;
+};
+
+/* Hashtable helpers.  */
+
+struct action_record_hasher : free_ptr_hash <action_record>
+{
+  static inline hashval_t hash (const action_record *);
+  static inline bool equal (const action_record *, const action_record *);
+};
+
+inline hashval_t
+action_record_hasher::hash (const action_record *entry)
+{
+  return entry->next * 1009 + entry->filter;
+}
+
+inline bool
+action_record_hasher::equal (const action_record *entry,
+			     const action_record *data)
+{
+  return entry->filter == data->filter && entry->next == data->next;
+}
+
+typedef hash_table<action_record_hasher> action_hash_type;
 
 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
 					   eh_landing_pad *);
 
-static int t2r_eq (const void *, const void *);
-static hashval_t t2r_hash (const void *);
-
-static int ttypes_filter_eq (const void *, const void *);
-static hashval_t ttypes_filter_hash (const void *);
-static int ehspec_filter_eq (const void *, const void *);
-static hashval_t ehspec_filter_hash (const void *);
-static int add_ttypes_entry (htab_t, tree);
-static int add_ehspec_entry (htab_t, htab_t, tree);
 static void dw2_build_landing_pads (void);
 
-static int action_record_eq (const void *, const void *);
-static hashval_t action_record_hash (const void *);
-static int add_action_record (htab_t, int, int);
-static int collect_one_action_chain (htab_t, eh_region);
+static int collect_one_action_chain (action_hash_type *, eh_region);
 static int add_call_site (rtx, int, int);
 
-static void push_uleb128 (VEC (uchar, gc) **, unsigned int);
-static void push_sleb128 (VEC (uchar, gc) **, int);
-#ifndef HAVE_AS_LEB128
+static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
+static void push_sleb128 (vec<uchar, va_gc> **, int);
 static int dw2_size_of_call_site_table (int);
 static int sjlj_size_of_call_site_table (void);
-#endif
 static void dw2_output_call_site_table (int, int);
 static void sjlj_output_call_site_table (void);
 
@@ -205,11 +230,11 @@
   if (! flag_exceptions)
     return;
 
-  type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
+  type_to_runtime_map = hash_map<tree_hash, tree>::create_ggc (31);
 
   /* Create the SjLj_Function_Context structure.  This should match
      the definition in unwind-sjlj.c.  */
-  if (targetm.except_unwind_info (&global_options) == UI_SJLJ)
+  if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
     {
       tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
 
@@ -225,7 +250,7 @@
 			 integer_type_node);
       DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
 
-      tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
+      tmp = build_index_type (size_int (4 - 1));
       tmp = build_array_type (lang_hooks.types.type_for_mode
 				(targetm.unwind_word_mode (), 1),
 			      tmp);
@@ -245,18 +270,31 @@
 
 #ifdef DONT_USE_BUILTIN_SETJMP
 #ifdef JMP_BUF_SIZE
-      tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
+      tmp = size_int (JMP_BUF_SIZE - 1);
 #else
       /* Should be large enough for most systems, if it is not,
 	 JMP_BUF_SIZE should be defined with the proper value.  It will
 	 also tend to be larger than necessary for most systems, a more
 	 optimal port will define JMP_BUF_SIZE.  */
-      tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
+      tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
 #endif
 #else
-      /* builtin_setjmp takes a pointer to 5 words.  */
-      tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
+      /* Compute a minimally sized jump buffer.  We need room to store at
+	 least 3 pointers - stack pointer, frame pointer and return address.
+	 Plus for some targets we need room for an extra pointer - in the
+	 case of MIPS this is the global pointer.  This makes a total of four
+	 pointers, but to be safe we actually allocate room for 5.
+
+	 If pointers are smaller than words then we allocate enough room for
+	 5 words, just in case the backend needs this much room.  For more
+	 discussion on this issue see:
+	 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html.  */
+      if (POINTER_SIZE > BITS_PER_WORD)
+	tmp = size_int (5 - 1);
+      else
+	tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1);
 #endif
+
       tmp = build_index_type (tmp);
       tmp = build_array_type (ptr_type_node, tmp);
       f_jbuf = build_decl (BUILTINS_LOCATION,
@@ -264,7 +302,7 @@
 #ifdef DONT_USE_BUILTIN_SETJMP
       /* We don't know what the alignment requirements of the
 	 runtime's jmp_buf has.  Overestimate.  */
-      DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
+      SET_DECL_ALIGN (f_jbuf, BIGGEST_ALIGNMENT);
       DECL_USER_ALIGN (f_jbuf) = 1;
 #endif
       DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
@@ -281,31 +319,41 @@
       /* Cache the interesting field offsets so that we have
 	 easy access from rtl.  */
       sjlj_fc_call_site_ofs
-	= (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
-	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
+	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
+	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
       sjlj_fc_data_ofs
-	= (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
-	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
+	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
+	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
       sjlj_fc_personality_ofs
-	= (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
-	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
+	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
+	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
       sjlj_fc_lsda_ofs
-	= (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
-	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
+	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
+	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
       sjlj_fc_jbuf_ofs
-	= (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
-	   + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
+	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
+	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
+
+#ifdef DONT_USE_BUILTIN_SETJMP
+      tmp = build_function_type_list (integer_type_node, TREE_TYPE (f_jbuf),
+				      NULL);
+      setjmp_fn = build_decl (BUILTINS_LOCATION, FUNCTION_DECL,
+			      get_identifier ("setjmp"), tmp);
+      TREE_PUBLIC (setjmp_fn) = 1;
+      DECL_EXTERNAL (setjmp_fn) = 1;
+      DECL_ASSEMBLER_NAME (setjmp_fn);
+#endif
     }
 }
 
 void
 init_eh_for_function (void)
 {
-  cfun->eh = ggc_alloc_cleared_eh_status ();
+  cfun->eh = ggc_cleared_alloc<eh_status> ();
 
   /* Make sure zero'th entries are used.  */
-  VEC_safe_push (eh_region, gc, cfun->eh->region_array, NULL);
-  VEC_safe_push (eh_landing_pad, gc, cfun->eh->lp_array, NULL);
+  vec_safe_push (cfun->eh->region_array, (eh_region)0);
+  vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
 }
 
 /* Routines to generate the exception tree somewhat directly.
@@ -318,7 +366,7 @@
   eh_region new_eh;
 
   /* Insert a new blank region as a leaf in the tree.  */
-  new_eh = ggc_alloc_cleared_eh_region_d ();
+  new_eh = ggc_cleared_alloc<eh_region_d> ();
   new_eh->type = type;
   new_eh->outer = outer;
   if (outer)
@@ -332,8 +380,8 @@
       cfun->eh->region_tree = new_eh;
     }
 
-  new_eh->index = VEC_length (eh_region, cfun->eh->region_array);
-  VEC_safe_push (eh_region, gc, cfun->eh->region_array, new_eh);
+  new_eh->index = vec_safe_length (cfun->eh->region_array);
+  vec_safe_push (cfun->eh->region_array, new_eh);
 
   /* Copy the language's notion of whether to use __cxa_end_cleanup.  */
   if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
@@ -375,7 +423,7 @@
 	add_type_for_runtime (TREE_VALUE (type_node));
     }
 
-  c = ggc_alloc_cleared_eh_catch_d ();
+  c = ggc_cleared_alloc<eh_catch_d> ();
   c->type_list = type_list;
   l = t->u.eh_try.last_catch;
   c->prev_catch = l;
@@ -409,14 +457,14 @@
 eh_landing_pad
 gen_eh_landing_pad (eh_region region)
 {
-  eh_landing_pad lp = ggc_alloc_cleared_eh_landing_pad_d ();
+  eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> ();
 
   lp->next_lp = region->landing_pads;
   lp->region = region;
-  lp->index = VEC_length (eh_landing_pad, cfun->eh->lp_array);
+  lp->index = vec_safe_length (cfun->eh->lp_array);
   region->landing_pads = lp;
 
-  VEC_safe_push (eh_landing_pad, gc, cfun->eh->lp_array, lp);
+  vec_safe_push (cfun->eh->lp_array, lp);
 
   return lp;
 }
@@ -424,7 +472,7 @@
 eh_region
 get_eh_region_from_number_fn (struct function *ifun, int i)
 {
-  return VEC_index (eh_region, ifun->eh->region_array, i);
+  return (*ifun->eh->region_array)[i];
 }
 
 eh_region
@@ -436,7 +484,7 @@
 eh_landing_pad
 get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
 {
-  return VEC_index (eh_landing_pad, ifun->eh->lp_array, i);
+  return (*ifun->eh->lp_array)[i];
 }
 
 eh_landing_pad
@@ -449,13 +497,13 @@
 get_eh_region_from_lp_number_fn (struct function *ifun, int i)
 {
   if (i < 0)
-    return VEC_index (eh_region, ifun->eh->region_array, -i);
+    return (*ifun->eh->region_array)[-i];
   else if (i == 0)
     return NULL;
   else
     {
       eh_landing_pad lp;
-      lp = VEC_index (eh_landing_pad, ifun->eh->lp_array, i);
+      lp = (*ifun->eh->lp_array)[i];
       return lp->region;
     }
 }
@@ -481,7 +529,7 @@
 {
   duplicate_eh_regions_map label_map;
   void *label_map_data;
-  struct pointer_map_t *eh_map;
+  hash_map<void *, void *> *eh_map;
 };
 
 static void
@@ -490,12 +538,9 @@
 {
   eh_landing_pad old_lp, new_lp;
   eh_region new_r;
-  void **slot;
 
   new_r = gen_eh_region (old_r->type, outer);
-  slot = pointer_map_insert (data->eh_map, (void *)old_r);
-  gcc_assert (*slot == NULL);
-  *slot = (void *)new_r;
+  gcc_assert (!data->eh_map->put (old_r, new_r));
 
   switch (old_r->type)
     {
@@ -526,7 +571,10 @@
       break;
 
     case ERT_MUST_NOT_THROW:
-      new_r->u.must_not_throw = old_r->u.must_not_throw;
+      new_r->u.must_not_throw.failure_loc =
+	LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc);
+      new_r->u.must_not_throw.failure_decl =
+	old_r->u.must_not_throw.failure_decl;
       break;
     }
 
@@ -537,9 +585,7 @@
 	continue;
 
       new_lp = gen_eh_landing_pad (new_r);
-      slot = pointer_map_insert (data->eh_map, (void *)old_lp);
-      gcc_assert (*slot == NULL);
-      *slot = (void *)new_lp;
+      gcc_assert (!data->eh_map->put (old_lp, new_lp));
 
       new_lp->post_landing_pad
 	= data->label_map (old_lp->post_landing_pad, data->label_map_data);
@@ -560,7 +606,7 @@
    that allows the caller to remap uses of both EH regions and
    EH landing pads.  */
 
-struct pointer_map_t *
+hash_map<void *, void *> *
 duplicate_eh_regions (struct function *ifun,
 		      eh_region copy_region, int outer_lp,
 		      duplicate_eh_regions_map map, void *map_data)
@@ -568,15 +614,14 @@
   struct duplicate_eh_regions_data data;
   eh_region outer_region;
 
-#ifdef ENABLE_CHECKING
-  verify_eh_tree (ifun);
-#endif
+  if (flag_checking)
+    verify_eh_tree (ifun);
 
   data.label_map = map;
   data.label_map_data = map_data;
-  data.eh_map = pointer_map_create ();
-
-  outer_region = get_eh_region_from_lp_number (outer_lp);
+  data.eh_map = new hash_map<void *, void *>;
+
+  outer_region = get_eh_region_from_lp_number_fn (cfun, outer_lp);
 
   /* Copy all the regions in the subtree.  */
   if (copy_region)
@@ -588,9 +633,8 @@
 	duplicate_eh_regions_1 (&data, r, outer_region);
     }
 
-#ifdef ENABLE_CHECKING
-  verify_eh_tree (cfun);
-#endif
+  if (flag_checking)
+    verify_eh_tree (cfun);
 
   return data.eh_map;
 }
@@ -601,81 +645,52 @@
 eh_region_outermost (struct function *ifun, eh_region region_a,
 		     eh_region region_b)
 {
-  sbitmap b_outer;
-
   gcc_assert (ifun->eh->region_array);
   gcc_assert (ifun->eh->region_tree);
 
-  b_outer = sbitmap_alloc (VEC_length (eh_region, ifun->eh->region_array));
-  sbitmap_zero (b_outer);
+  auto_sbitmap b_outer (ifun->eh->region_array->length ());
+  bitmap_clear (b_outer);
 
   do
     {
-      SET_BIT (b_outer, region_b->index);
+      bitmap_set_bit (b_outer, region_b->index);
       region_b = region_b->outer;
     }
   while (region_b);
 
   do
     {
-      if (TEST_BIT (b_outer, region_a->index))
+      if (bitmap_bit_p (b_outer, region_a->index))
 	break;
       region_a = region_a->outer;
     }
   while (region_a);
 
-  sbitmap_free (b_outer);
   return region_a;
 }
 
-static int
-t2r_eq (const void *pentry, const void *pdata)
-{
-  const_tree const entry = (const_tree) pentry;
-  const_tree const data = (const_tree) pdata;
-
-  return TREE_PURPOSE (entry) == data;
-}
-
-static hashval_t
-t2r_hash (const void *pentry)
-{
-  const_tree const entry = (const_tree) pentry;
-  return TREE_HASH (TREE_PURPOSE (entry));
-}
-
 void
 add_type_for_runtime (tree type)
 {
-  tree *slot;
-
   /* If TYPE is NOP_EXPR, it means that it already is a runtime type.  */
   if (TREE_CODE (type) == NOP_EXPR)
     return;
 
-  slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
-					    TREE_HASH (type), INSERT);
-  if (*slot == NULL)
-    {
-      tree runtime = lang_hooks.eh_runtime_type (type);
-      *slot = tree_cons (type, runtime, NULL_TREE);
-    }
+  bool existed = false;
+  tree *slot = &type_to_runtime_map->get_or_insert (type, &existed);
+  if (!existed)
+    *slot = lang_hooks.eh_runtime_type (type);
 }
 
 tree
 lookup_type_for_runtime (tree type)
 {
-  tree *slot;
-
   /* If TYPE is NOP_EXPR, it means that it already is a runtime type.  */
   if (TREE_CODE (type) == NOP_EXPR)
     return type;
 
-  slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
-					    TREE_HASH (type), NO_INSERT);
-
   /* We should have always inserted the data earlier.  */
-  return TREE_VALUE (*slot);
+  return *type_to_runtime_map->get (type);
 }
 
 
@@ -686,46 +701,57 @@
   int filter;
 };
 
+/* Helper for ttypes_filter hashing.  */
+
+struct ttypes_filter_hasher : free_ptr_hash <ttypes_filter>
+{
+  typedef tree_node *compare_type;
+  static inline hashval_t hash (const ttypes_filter *);
+  static inline bool equal (const ttypes_filter *, const tree_node *);
+};
+
 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
    (a tree) for a @TTypes type node we are thinking about adding.  */
 
-static int
-ttypes_filter_eq (const void *pentry, const void *pdata)
+inline bool
+ttypes_filter_hasher::equal (const ttypes_filter *entry, const tree_node *data)
 {
-  const struct ttypes_filter *const entry
-    = (const struct ttypes_filter *) pentry;
-  const_tree const data = (const_tree) pdata;
-
   return entry->t == data;
 }
 
-static hashval_t
-ttypes_filter_hash (const void *pentry)
+inline hashval_t
+ttypes_filter_hasher::hash (const ttypes_filter *entry)
 {
-  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
   return TREE_HASH (entry->t);
 }
 
+typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
+
+
+/* Helper for ehspec hashing.  */
+
+struct ehspec_hasher : free_ptr_hash <ttypes_filter>
+{
+  static inline hashval_t hash (const ttypes_filter *);
+  static inline bool equal (const ttypes_filter *, const ttypes_filter *);
+};
+
 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
    exception specification list we are thinking about adding.  */
 /* ??? Currently we use the type lists in the order given.  Someone
    should put these in some canonical order.  */
 
-static int
-ehspec_filter_eq (const void *pentry, const void *pdata)
+inline bool
+ehspec_hasher::equal (const ttypes_filter *entry, const ttypes_filter *data)
 {
-  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
-  const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
-
   return type_list_equal (entry->t, data->t);
 }
 
 /* Hash function for exception specification lists.  */
 
-static hashval_t
-ehspec_filter_hash (const void *pentry)
+inline hashval_t
+ehspec_hasher::hash (const ttypes_filter *entry)
 {
-  const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
   hashval_t h = 0;
   tree list;
 
@@ -734,16 +760,19 @@
   return h;
 }
 
+typedef hash_table<ehspec_hasher> ehspec_hash_type;
+
+
 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
    to speed up the search.  Return the filter value to be used.  */
 
 static int
-add_ttypes_entry (htab_t ttypes_hash, tree type)
+add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
 {
   struct ttypes_filter **slot, *n;
 
-  slot = (struct ttypes_filter **)
-    htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
+  slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
+					  INSERT);
 
   if ((n = *slot) == NULL)
     {
@@ -751,10 +780,10 @@
 
       n = XNEW (struct ttypes_filter);
       n->t = type;
-      n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
+      n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
       *slot = n;
 
-      VEC_safe_push (tree, gc, cfun->eh->ttype_data, type);
+      vec_safe_push (cfun->eh->ttype_data, type);
     }
 
   return n->filter;
@@ -764,23 +793,23 @@
    to speed up the search.  Return the filter value to be used.  */
 
 static int
-add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
+add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
+		  tree list)
 {
   struct ttypes_filter **slot, *n;
   struct ttypes_filter dummy;
 
   dummy.t = list;
-  slot = (struct ttypes_filter **)
-    htab_find_slot (ehspec_hash, &dummy, INSERT);
+  slot = ehspec_hash->find_slot (&dummy, INSERT);
 
   if ((n = *slot) == NULL)
     {
       int len;
 
       if (targetm.arm_eabi_unwinder)
-	len = VEC_length (tree, cfun->eh->ehspec_data.arm_eabi);
+	len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
       else
-	len = VEC_length (uchar, cfun->eh->ehspec_data.other);
+	len = vec_safe_length (cfun->eh->ehspec_data.other);
 
       /* Filter value is a -1 based byte index into a uleb128 buffer.  */
 
@@ -793,8 +822,7 @@
       for (; list ; list = TREE_CHAIN (list))
 	{
 	  if (targetm.arm_eabi_unwinder)
-	    VEC_safe_push (tree, gc, cfun->eh->ehspec_data.arm_eabi,
-			   TREE_VALUE (list));
+	    vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
 	  else
 	    {
 	      /* Look up each type in the list and encode its filter
@@ -804,9 +832,9 @@
 	    }
 	}
       if (targetm.arm_eabi_unwinder)
-	VEC_safe_push (tree, gc, cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
+	vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
       else
-	VEC_safe_push (uchar, gc, cfun->eh->ehspec_data.other, 0);
+	vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
     }
 
   return n->filter;
@@ -821,20 +849,19 @@
 assign_filter_values (void)
 {
   int i;
-  htab_t ttypes, ehspec;
   eh_region r;
   eh_catch c;
 
-  cfun->eh->ttype_data = VEC_alloc (tree, gc, 16);
+  vec_alloc (cfun->eh->ttype_data, 16);
   if (targetm.arm_eabi_unwinder)
-    cfun->eh->ehspec_data.arm_eabi = VEC_alloc (tree, gc, 64);
+    vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
   else
-    cfun->eh->ehspec_data.other = VEC_alloc (uchar, gc, 64);
-
-  ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
-  ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
-
-  for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
+    vec_alloc (cfun->eh->ehspec_data.other, 64);
+
+  ehspec_hash_type ehspec (31);
+  ttypes_hash_type ttypes (31);
+
+  for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
     {
       if (r == NULL)
 	continue;
@@ -856,8 +883,9 @@
 
 		  for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
 		    {
-		      int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
-		      tree flt_node = build_int_cst (NULL_TREE, flt);
+		      int flt
+		       	= add_ttypes_entry (&ttypes, TREE_VALUE (tp_node));
+		      tree flt_node = build_int_cst (integer_type_node, flt);
 
 		      c->filter_list
 			= tree_cons (NULL_TREE, flt_node, c->filter_list);
@@ -867,8 +895,8 @@
 		{
 		  /* Get a filter value for the NULL list also since it
 		     will need an action record anyway.  */
-		  int flt = add_ttypes_entry (ttypes, NULL);
-		  tree flt_node = build_int_cst (NULL_TREE, flt);
+		  int flt = add_ttypes_entry (&ttypes, NULL);
+		  tree flt_node = build_int_cst (integer_type_node, flt);
 
 		  c->filter_list
 		    = tree_cons (NULL_TREE, flt_node, NULL);
@@ -878,25 +906,22 @@
 
 	case ERT_ALLOWED_EXCEPTIONS:
 	  r->u.allowed.filter
-	    = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
+	    = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
 	  break;
 
 	default:
 	  break;
 	}
     }
-
-  htab_delete (ttypes);
-  htab_delete (ehspec);
 }
 
 /* Emit SEQ into basic block just before INSN (that is assumed to be
    first instruction of some existing BB and return the newly
    produced block.  */
 static basic_block
-emit_to_new_bb_before (rtx seq, rtx insn)
+emit_to_new_bb_before (rtx_insn *seq, rtx_insn *insn)
 {
-  rtx last;
+  rtx_insn *last;
   basic_block bb;
   edge e;
   edge_iterator ei;
@@ -918,6 +943,29 @@
   return bb;
 }
 
+/* A subroutine of dw2_build_landing_pads, also used for edge splitting
+   at the rtl level.  Emit the code required by the target at a landing
+   pad for the given region.  */
+
+void
+expand_dw2_landing_pad_for_region (eh_region region)
+{
+  if (targetm.have_exception_receiver ())
+    emit_insn (targetm.gen_exception_receiver ());
+  else if (targetm.have_nonlocal_goto_receiver ())
+    emit_insn (targetm.gen_nonlocal_goto_receiver ());
+  else
+    { /* Nothing */ }
+
+  if (region->exc_ptr_reg)
+    emit_move_insn (region->exc_ptr_reg,
+		    gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
+  if (region->filter_reg)
+    emit_move_insn (region->filter_reg,
+		    gen_rtx_REG (targetm.eh_return_filter_mode (),
+				 EH_RETURN_DATA_REGNO (1)));
+}
+
 /* Expand the extra code needed at landing pads for dwarf2 unwinding.  */
 
 static void
@@ -925,13 +973,19 @@
 {
   int i;
   eh_landing_pad lp;
-
-  for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
+  int e_flags = EDGE_FALLTHRU;
+
+  /* If we're going to partition blocks, we need to be able to add
+     new landing pads later, which means that we need to hold on to
+     the post-landing-pad block.  Prevent it from being merged away.
+     We'll remove this bit after partitioning.  */
+  if (flag_reorder_blocks_and_partition)
+    e_flags |= EDGE_PRESERVE;
+
+  for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
     {
-      eh_region region;
       basic_block bb;
-      rtx seq;
-      edge e;
+      rtx_insn *seq;
 
       if (lp == NULL || lp->post_landing_pad == NULL)
 	continue;
@@ -942,39 +996,30 @@
       emit_label (lp->landing_pad);
       LABEL_PRESERVE_P (lp->landing_pad) = 1;
 
-#ifdef HAVE_exception_receiver
-      if (HAVE_exception_receiver)
-	emit_insn (gen_exception_receiver ());
-      else
-#endif
-#ifdef HAVE_nonlocal_goto_receiver
-	if (HAVE_nonlocal_goto_receiver)
-	  emit_insn (gen_nonlocal_goto_receiver ());
-	else
-#endif
-	  { /* Nothing */ }
-
-      region = lp->region;
-      if (region->exc_ptr_reg)
-	emit_move_insn (region->exc_ptr_reg,
-			gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
-      if (region->filter_reg)
-	emit_move_insn (region->filter_reg,
-			gen_rtx_REG (targetm.eh_return_filter_mode (),
-				     EH_RETURN_DATA_REGNO (1)));
+      expand_dw2_landing_pad_for_region (lp->region);
 
       seq = get_insns ();
       end_sequence ();
 
       bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
-      e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
-      e->count = bb->count;
-      e->probability = REG_BR_PROB_BASE;
+      bb->count = bb->next_bb->count;
+      bb->frequency = bb->next_bb->frequency;
+      make_single_succ_edge (bb, bb->next_bb, e_flags);
+      if (current_loops)
+	{
+	  struct loop *loop = bb->next_bb->loop_father;
+	  /* If we created a pre-header block, add the new block to the
+	     outer loop, otherwise to the loop itself.  */
+	  if (bb->next_bb == loop->header)
+	    add_bb_to_loop (bb, loop_outer (loop));
+	  else
+	    add_bb_to_loop (bb, loop);
+	}
     }
 }
 
 
-static VEC (int, heap) *sjlj_lp_call_site_index;
+static vec<int> sjlj_lp_call_site_index;
 
 /* Process all active landing pads.  Assign each one a compact dispatch
    index, and a call-site index.  */
@@ -982,22 +1027,21 @@
 static int
 sjlj_assign_call_site_values (void)
 {
-  htab_t ar_hash;
+  action_hash_type ar_hash (31);
   int i, disp_index;
   eh_landing_pad lp;
 
-  crtl->eh.action_record_data = VEC_alloc (uchar, gc, 64);
-  ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
+  vec_alloc (crtl->eh.action_record_data, 64);
 
   disp_index = 0;
   call_site_base = 1;
-  for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
+  for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
     if (lp && lp->post_landing_pad)
       {
 	int action, call_site;
 
 	/* First: build the action table.  */
-	action = collect_one_action_chain (ar_hash, lp->region);
+	action = collect_one_action_chain (&ar_hash, lp->region);
 
 	/* Next: assign call-site values.  If dwarf2 terms, this would be
 	   the region number assigned by convert_to_eh_region_ranges, but
@@ -1011,13 +1055,11 @@
 	/* Otherwise, look it up in the table.  */
 	else
 	  call_site = add_call_site (GEN_INT (disp_index), action, 0);
-	VEC_replace (int, sjlj_lp_call_site_index, i, call_site);
+	sjlj_lp_call_site_index[i] = call_site;
 
 	disp_index++;
       }
 
-  htab_delete (ar_hash);
-
   return disp_index;
 }
 
@@ -1028,7 +1070,8 @@
 sjlj_mark_call_sites (void)
 {
   int last_call_site = -2;
-  rtx insn, mem;
+  rtx_insn *insn;
+  rtx mem;
 
   for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
     {
@@ -1036,12 +1079,27 @@
       eh_region r;
       bool nothrow;
       int this_call_site;
-      rtx before, p;
+      rtx_insn *before, *p;
 
       /* Reset value tracking at extended basic block boundaries.  */
       if (LABEL_P (insn))
 	last_call_site = -2;
 
+      /* If the function allocates dynamic stack space, the context must
+	 be updated after every allocation/deallocation accordingly.  */
+      if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_UPDATE_SJLJ_CONTEXT)
+	{
+	  rtx buf_addr;
+
+	  start_sequence ();
+	  buf_addr = plus_constant (Pmode, XEXP (crtl->eh.sjlj_fc, 0),
+				    sjlj_fc_jbuf_ofs);
+	  expand_builtin_update_setjmp_buf (buf_addr);
+	  p = get_insns ();
+	  end_sequence ();
+	  emit_insn_before (p, insn);
+	}
+
       if (! INSN_P (insn))
 	continue;
 
@@ -1049,7 +1107,7 @@
       if (nothrow)
 	continue;
       if (lp)
-	this_call_site = VEC_index (int, sjlj_lp_call_site_index, lp->index);
+	this_call_site = sjlj_lp_call_site_index[lp->index];
       else if (r == NULL)
 	{
 	  /* Calls (and trapping insns) without notes are outside any
@@ -1072,12 +1130,12 @@
       /* Don't separate a call from it's argument loads.  */
       before = insn;
       if (CALL_P (insn))
-	before = find_first_parameter_load (insn, NULL_RTX);
+	before = find_first_parameter_load (insn, NULL);
 
       start_sequence ();
       mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
 			    sjlj_fc_call_site_ofs);
-      emit_move_insn (mem, GEN_INT (this_call_site));
+      emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)));
       p = get_insns ();
       end_sequence ();
 
@@ -1089,9 +1147,10 @@
 /* Construct the SjLj_Function_Context.  */
 
 static void
-sjlj_emit_function_enter (rtx dispatch_label)
+sjlj_emit_function_enter (rtx_code_label *dispatch_label)
 {
-  rtx fn_begin, fc, mem, seq;
+  rtx_insn *fn_begin, *seq;
+  rtx fc, mem;
   bool fn_begin_outside_block;
   rtx personality = get_personality_function (current_function_decl);
 
@@ -1122,31 +1181,27 @@
 
   if (dispatch_label)
     {
+      rtx addr = plus_constant (Pmode, XEXP (fc, 0), sjlj_fc_jbuf_ofs);
+
 #ifdef DONT_USE_BUILTIN_SETJMP
-      rtx x, last;
-      x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
-				   TYPE_MODE (integer_type_node), 1,
-				   plus_constant (XEXP (fc, 0),
-						  sjlj_fc_jbuf_ofs), Pmode);
+      addr = copy_addr_to_reg (addr);
+      addr = convert_memory_address (ptr_mode, addr);
+      tree addr_tree = make_tree (ptr_type_node, addr);
+
+      tree call_expr = build_call_expr (setjmp_fn, 1, addr_tree);
+      rtx x = expand_call (call_expr, NULL_RTX, false);
 
       emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
 			       TYPE_MODE (integer_type_node), 0,
-			       dispatch_label);
-      last = get_last_insn ();
-      if (JUMP_P (last) && any_condjump_p (last))
-	{
-	  gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
-	  add_reg_note (last, REG_BR_PROB, GEN_INT (REG_BR_PROB_BASE / 100));
-	}
+			       dispatch_label,
+			       profile_probability::unlikely ());
 #else
-      expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0),
-						  sjlj_fc_jbuf_ofs),
-				   dispatch_label);
+      expand_builtin_setjmp_setup (addr, dispatch_label);
 #endif
     }
 
   emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
-		     1, XEXP (fc, 0), Pmode);
+		     XEXP (fc, 0), Pmode);
 
   seq = get_insns ();
   end_sequence ();
@@ -1165,8 +1220,30 @@
 	  fn_begin_outside_block = false;
       }
 
+#ifdef DONT_USE_BUILTIN_SETJMP
+  if (dispatch_label)
+    {
+      /* The sequence contains a branch in the middle so we need to force
+	 the creation of a new basic block by means of BB_SUPERBLOCK.  */
+      if (fn_begin_outside_block)
+	{
+	  basic_block bb
+	    = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
+	  if (JUMP_P (BB_END (bb)))
+	    emit_insn_before (seq, BB_END (bb));
+	  else
+	    emit_insn_after (seq, BB_END (bb));
+	}
+      else
+	emit_insn_after (seq, fn_begin);
+
+      single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flags |= BB_SUPERBLOCK;
+      return;
+    }
+#endif
+
   if (fn_begin_outside_block)
-    insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
+    insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
   else
     emit_insn_after (seq, fn_begin);
 }
@@ -1175,7 +1252,7 @@
    the call to unwind_sjlj_unregister_libfunc if needed.  */
 
 void
-sjlj_emit_function_exit_after (rtx after)
+sjlj_emit_function_exit_after (rtx_insn *after)
 {
   crtl->eh.sjlj_exit_after = after;
 }
@@ -1183,12 +1260,12 @@
 static void
 sjlj_emit_function_exit (void)
 {
-  rtx seq, insn;
+  rtx_insn *seq, *insn;
 
   start_sequence ();
 
   emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
-		     1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
+		     XEXP (crtl->eh.sjlj_fc, 0), Pmode);
 
   seq = get_insns ();
   end_sequence ();
@@ -1205,18 +1282,17 @@
 }
 
 static void
-sjlj_emit_dispatch_table (rtx dispatch_label, int num_dispatch)
+sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
 {
-  enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
-  enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
+  scalar_int_mode unwind_word_mode = targetm.unwind_word_mode ();
+  scalar_int_mode filter_mode = targetm.eh_return_filter_mode ();
   eh_landing_pad lp;
-  rtx mem, seq, fc, before, exc_ptr_reg, filter_reg;
-  rtx first_reachable_label;
+  rtx mem, fc, exc_ptr_reg, filter_reg;
+  rtx_insn *seq;
   basic_block bb;
   eh_region r;
-  edge e;
   int i, disp_index;
-  gimple switch_stmt;
+  vec<tree> dispatch_labels = vNULL;
 
   fc = crtl->eh.sjlj_fc;
 
@@ -1233,8 +1309,7 @@
      label on the nonlocal_goto_label list.  Since we're modeling these
      CFG edges more exactly, we can use the forced_labels list instead.  */
   LABEL_PRESERVE_P (dispatch_label) = 1;
-  forced_labels
-    = gen_rtx_EXPR_LIST (VOIDmode, dispatch_label, forced_labels);
+  vec_safe_push<rtx_insn *> (forced_labels, dispatch_label);
 #endif
 
   /* Load up exc_ptr and filter values from the function context.  */
@@ -1258,26 +1333,18 @@
   /* Jump to one of the directly reachable regions.  */
 
   disp_index = 0;
-  first_reachable_label = NULL;
+  rtx_code_label *first_reachable_label = NULL;
 
   /* If there's exactly one call site in the function, don't bother
      generating a switch statement.  */
-  switch_stmt = NULL;
   if (num_dispatch > 1)
-    {
-      tree disp;
-
-      mem = adjust_address (fc, TYPE_MODE (integer_type_node),
-			    sjlj_fc_call_site_ofs);
-      disp = make_tree (integer_type_node, mem);
-
-      switch_stmt = gimple_build_switch_nlabels (num_dispatch, disp, NULL);
-    }
-
-  for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
+    dispatch_labels.create (num_dispatch);
+
+  for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
     if (lp && lp->post_landing_pad)
       {
-	rtx seq2, label;
+	rtx_insn *seq2;
+	rtx_code_label *label;
 
 	start_sequence ();
 
@@ -1285,15 +1352,13 @@
 
 	if (num_dispatch > 1)
 	  {
-	    tree t_label, case_elt;
+	    tree t_label, case_elt, t;
 
 	    t_label = create_artificial_label (UNKNOWN_LOCATION);
-	    case_elt = build3 (CASE_LABEL_EXPR, void_type_node,
-			       build_int_cst (NULL, disp_index),
-			       NULL, t_label);
-	    gimple_switch_set_label (switch_stmt, disp_index, case_elt);
-
-	    label = label_rtx (t_label);
+	    t = build_int_cst (integer_type_node, disp_index);
+	    case_elt = build_case_label (t, NULL, t_label);
+	    dispatch_labels.quick_push (case_elt);
+	    label = jump_target_rtx (t_label);
 	  }
 	else
 	  label = gen_label_rtx ();
@@ -1311,11 +1376,28 @@
 	seq2 = get_insns ();
 	end_sequence ();
 
-	before = label_rtx (lp->post_landing_pad);
+	rtx_insn *before = label_rtx (lp->post_landing_pad);
 	bb = emit_to_new_bb_before (seq2, before);
-	e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
-	e->count = bb->count;
-	e->probability = REG_BR_PROB_BASE;
+	make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
+	if (current_loops)
+	  {
+	    struct loop *loop = bb->next_bb->loop_father;
+	    /* If we created a pre-header block, add the new block to the
+	       outer loop, otherwise to the loop itself.  */
+	    if (bb->next_bb == loop->header)
+	      add_bb_to_loop (bb, loop_outer (loop));
+	    else
+	      add_bb_to_loop (bb, loop);
+	    /* ???  For multiple dispatches we will end up with edges
+	       from the loop tree root into this loop, making it a
+	       multiple-entry loop.  Discard all affected loops.  */
+	    if (num_dispatch > 1)
+	      {
+		for (loop = bb->loop_father;
+		     loop_outer (loop); loop = loop_outer (loop))
+		  mark_loop_for_removal (loop);
+	      }
+	  }
 
 	disp_index++;
       }
@@ -1323,8 +1405,9 @@
 
   if (num_dispatch > 1)
     {
-      expand_case (switch_stmt);
-      expand_builtin_trap ();
+      rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),
+				 sjlj_fc_call_site_ofs);
+      expand_sjlj_dispatch_table (disp, dispatch_labels);
     }
 
   seq = get_insns ();
@@ -1333,9 +1416,25 @@
   bb = emit_to_new_bb_before (seq, first_reachable_label);
   if (num_dispatch == 1)
     {
-      e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
-      e->count = bb->count;
-      e->probability = REG_BR_PROB_BASE;
+      make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
+      if (current_loops)
+	{
+	  struct loop *loop = bb->next_bb->loop_father;
+	  /* If we created a pre-header block, add the new block to the
+	     outer loop, otherwise to the loop itself.  */
+	  if (bb->next_bb == loop->header)
+	    add_bb_to_loop (bb, loop_outer (loop));
+	  else
+	    add_bb_to_loop (bb, loop);
+	}
+    }
+  else
+    {
+      /* We are not wiring up edges here, but as the dispatcher call
+         is at function begin simply associate the block with the
+	 outermost (non-)loop.  */
+      if (current_loops)
+	add_bb_to_loop (bb, current_loops->tree_root);
     }
 }
 
@@ -1344,15 +1443,15 @@
 {
   int num_dispatch;
 
-  num_dispatch = VEC_length (eh_landing_pad, cfun->eh->lp_array);
+  num_dispatch = vec_safe_length (cfun->eh->lp_array);
   if (num_dispatch == 0)
     return;
-  VEC_safe_grow (int, heap, sjlj_lp_call_site_index, num_dispatch);
+  sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch);
 
   num_dispatch = sjlj_assign_call_site_values ();
   if (num_dispatch > 0)
     {
-      rtx dispatch_label = gen_label_rtx ();
+      rtx_code_label *dispatch_label = gen_label_rtx ();
       int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
 					TYPE_MODE (sjlj_fc_type_node),
 					TYPE_ALIGN (sjlj_fc_type_node));
@@ -1380,35 +1479,47 @@
 			      align);
 
       sjlj_mark_call_sites ();
-      sjlj_emit_function_enter (NULL_RTX);
+      sjlj_emit_function_enter (NULL);
       sjlj_emit_function_exit ();
     }
 
-  VEC_free (int, heap, sjlj_lp_call_site_index);
+  sjlj_lp_call_site_index.release ();
+}
+
+/* Update the sjlj function context.  This function should be called
+   whenever we allocate or deallocate dynamic stack space.  */
+
+void
+update_sjlj_context (void)
+{
+  if (!flag_exceptions)
+    return;
+
+  emit_note (NOTE_INSN_UPDATE_SJLJ_CONTEXT);
 }
 
 /* After initial rtl generation, call back to finish generating
    exception support code.  */
 
-static void
+void
 finish_eh_generation (void)
 {
   basic_block bb;
 
   /* Construct the landing pads.  */
-  if (targetm.except_unwind_info (&global_options) == UI_SJLJ)
+  if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
     sjlj_build_landing_pads ();
   else
     dw2_build_landing_pads ();
   break_superblocks ();
 
-  if (targetm.except_unwind_info (&global_options) == UI_SJLJ
-      /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx).  */
-      || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
+  if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
+      /* Kludge for Alpha (see alpha_gp_save_rtx).  */
+      || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
     commit_edge_insertions ();
 
   /* Redirect all EH edges from the post_landing_pad to the landing pad.  */
-  FOR_EACH_BB (bb)
+  FOR_EACH_BB_FN (bb, cfun)
     {
       eh_landing_pad lp;
       edge_iterator ei;
@@ -1437,41 +1548,6 @@
 	}
     }
 }
-
-static bool
-gate_handle_eh (void)
-{
-  /* Nothing to do if no regions created.  */
-  return cfun->eh->region_tree != NULL;
-}
-
-/* Complete generation of exception handling code.  */
-static unsigned int
-rest_of_handle_eh (void)
-{
-  finish_eh_generation ();
-  cleanup_cfg (CLEANUP_NO_INSN_DEL);
-  return 0;
-}
-
-struct rtl_opt_pass pass_rtl_eh =
-{
- {
-  RTL_PASS,
-  "rtl eh",                             /* name */
-  gate_handle_eh,                       /* gate */
-  rest_of_handle_eh,			/* execute */
-  NULL,                                 /* sub */
-  NULL,                                 /* next */
-  0,                                    /* static_pass_number */
-  TV_JUMP,                              /* tv_id */
-  0,                                    /* properties_required */
-  0,                                    /* properties_provided */
-  0,                                    /* properties_destroyed */
-  0,                                    /* todo_flags_start */
-  TODO_dump_func                        /* todo_flags_finish */
- }
-};
 
 /* This section handles removing dead code for flow.  */
 
@@ -1486,33 +1562,29 @@
 
   if (lp->post_landing_pad)
     EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
-  VEC_replace (eh_landing_pad, cfun->eh->lp_array, lp->index, NULL);
+  (*cfun->eh->lp_array)[lp->index] = NULL;
 }
 
-/* Splice REGION from the region tree.  */
-
-void
-remove_eh_handler (eh_region region)
+/* Splice the EH region at PP from the region tree.  */
+
+static void
+remove_eh_handler_splicer (eh_region *pp)
 {
-  eh_region *pp, *pp_start, p, outer;
+  eh_region region = *pp;
   eh_landing_pad lp;
 
   for (lp = region->landing_pads; lp ; lp = lp->next_lp)
     {
       if (lp->post_landing_pad)
 	EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
-      VEC_replace (eh_landing_pad, cfun->eh->lp_array, lp->index, NULL);
+      (*cfun->eh->lp_array)[lp->index] = NULL;
     }
 
-  outer = region->outer;
-  if (outer)
-    pp_start = &outer->inner;
-  else
-    pp_start = &cfun->eh->region_tree;
-  for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
-    continue;
   if (region->inner)
     {
+      eh_region p, outer;
+      outer = region->outer;
+
       *pp = p = region->inner;
       do
 	{
@@ -1524,7 +1596,60 @@
     }
   *pp = region->next_peer;
 
-  VEC_replace (eh_region, cfun->eh->region_array, region->index, NULL);
+  (*cfun->eh->region_array)[region->index] = NULL;
+}
+
+/* Splice a single EH region REGION from the region tree.
+
+   To unlink REGION, we need to find the pointer to it with a relatively
+   expensive search in REGION's outer region.  If you are going to
+   remove a number of handlers, using remove_unreachable_eh_regions may
+   be a better option.  */
+
+void
+remove_eh_handler (eh_region region)
+{
+  eh_region *pp, *pp_start, p, outer;
+
+  outer = region->outer;
+  if (outer)
+    pp_start = &outer->inner;
+  else
+    pp_start = &cfun->eh->region_tree;
+  for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
+    continue;
+
+  remove_eh_handler_splicer (pp);
+}
+
+/* Worker for remove_unreachable_eh_regions.
+   PP is a pointer to the region to start a region tree depth-first
+   search from.  R_REACHABLE is the set of regions that have to be
+   preserved.  */
+
+static void
+remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
+{
+  while (*pp)
+    {
+      eh_region region = *pp;
+      remove_unreachable_eh_regions_worker (&region->inner, r_reachable);
+      if (!bitmap_bit_p (r_reachable, region->index))
+	remove_eh_handler_splicer (pp);
+      else
+	pp = &region->next_peer;
+    }
+}
+
+/* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
+   Do this by traversing the EH tree top-down and splice out regions that
+   are not marked.  By removing regions from the leaves, we avoid costly
+   searches in the region tree.  */
+
+void
+remove_unreachable_eh_regions (sbitmap r_reachable)
+{
+  remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable);
 }
 
 /* Invokes CALLBACK for every exception handler landing pad label.
@@ -1536,11 +1661,11 @@
   eh_landing_pad lp;
   int i;
 
-  for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
+  for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
     {
       if (lp)
 	{
-	  rtx lab = lp->landing_pad;
+	  rtx_code_label *lab = lp->landing_pad;
 	  if (lab && LABEL_P (lab))
 	    (*callback) (lab);
 	}
@@ -1572,7 +1697,7 @@
   direct call cases) and just pull the data out of the trees.  */
 
 void
-make_reg_eh_region_note (rtx insn, int ecf_flags, int lp_nr)
+make_reg_eh_region_note (rtx_insn *insn, int ecf_flags, int lp_nr)
 {
   rtx value;
   if (ecf_flags & ECF_NOTHROW)
@@ -1589,7 +1714,7 @@
    already exists.  */
 
 void
-make_reg_eh_region_note_nothrow_nononlocal (rtx insn)
+make_reg_eh_region_note_nothrow_nononlocal (rtx_insn *insn)
 {
   rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
   rtx intmin = GEN_INT (INT_MIN);
@@ -1620,9 +1745,10 @@
    to look for a note, or the note itself.  */
 
 void
-copy_reg_eh_region_note_forward (rtx note_or_insn, rtx first, rtx last)
+copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
 {
-  rtx insn, note = note_or_insn;
+  rtx_insn *insn;
+  rtx note = note_or_insn;
 
   if (INSN_P (note_or_insn))
     {
@@ -1641,9 +1767,10 @@
 /* Likewise, but iterate backward.  */
 
 void
-copy_reg_eh_region_note_backward (rtx note_or_insn, rtx last, rtx first)
+copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
 {
-  rtx insn, note = note_or_insn;
+  rtx_insn *insn;
+  rtx note = note_or_insn;
 
   if (INSN_P (note_or_insn))
     {
@@ -1694,10 +1821,10 @@
     }
 
   if (lp_nr < 0)
-    r = VEC_index (eh_region, cfun->eh->region_array, -lp_nr);
+    r = (*cfun->eh->region_array)[-lp_nr];
   else
     {
-      lp = VEC_index (eh_landing_pad, cfun->eh->lp_array, lp_nr);
+      lp = (*cfun->eh->lp_array)[lp_nr];
       r = lp->region;
     }
 
@@ -1756,11 +1883,11 @@
   if (NONJUMP_INSN_P (insn)
       && GET_CODE (PATTERN (insn)) == SEQUENCE)
     {
-      rtx seq = PATTERN (insn);
-      int i, n = XVECLEN (seq, 0);
+      rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
+      int i, n = seq->len ();
 
       for (i = 0; i < n; i++)
-	if (can_throw_external (XVECEXP (seq, 0, i)))
+	if (can_throw_external (seq->element (i)))
 	  return true;
 
       return false;
@@ -1800,11 +1927,11 @@
   if (NONJUMP_INSN_P (insn)
       && GET_CODE (PATTERN (insn)) == SEQUENCE)
     {
-      rtx seq = PATTERN (insn);
-      int i, n = XVECLEN (seq, 0);
+      rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
+      int i, n = seq->len ();
 
       for (i = 0; i < n; i++)
-	if (!insn_nothrow_p (XVECEXP (seq, 0, i)))
+	if (!insn_nothrow_p (seq->element (i)))
 	  return false;
 
       return true;
@@ -1817,7 +1944,7 @@
 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION.  */
 
 bool
-can_nonlocal_goto (const_rtx insn)
+can_nonlocal_goto (const rtx_insn *insn)
 {
   if (nonlocal_goto_handler_labels && CALL_P (insn))
     {
@@ -1833,7 +1960,7 @@
 static unsigned int
 set_nothrow_function_flags (void)
 {
-  rtx insn;
+  rtx_insn *insn;
 
   crtl->nothrow = 1;
 
@@ -1866,28 +1993,15 @@
 	  }
       }
 
-  for (insn = crtl->epilogue_delay_list; insn;
-       insn = XEXP (insn, 1))
-    if (can_throw_external (insn))
-      {
-        crtl->nothrow = 0;
-
-	if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
-	  {
-	    crtl->all_throwers_are_sibcalls = 0;
-	    return 0;
-	  }
-      }
   if (crtl->nothrow
-      && (cgraph_function_body_availability (cgraph_node
-					     (current_function_decl))
+      && (cgraph_node::get (current_function_decl)->get_availability ()
           >= AVAIL_AVAILABLE))
     {
-      struct cgraph_node *node = cgraph_node (current_function_decl);
+      struct cgraph_node *node = cgraph_node::get (current_function_decl);
       struct cgraph_edge *e;
       for (e = node->callers; e; e = e->next_caller)
         e->can_throw_external = false;
-      cgraph_set_nothrow_flag (node, true);
+      node->set_nothrow_flag (true);
 
       if (dump_file)
 	fprintf (dump_file, "Marking function nothrow: %s\n\n",
@@ -1896,24 +2010,43 @@
   return 0;
 }
 
-struct rtl_opt_pass pass_set_nothrow_function_flags =
+namespace {
+
+const pass_data pass_data_set_nothrow_function_flags =
+{
+  RTL_PASS, /* type */
+  "nothrow", /* name */
+  OPTGROUP_NONE, /* optinfo_flags */
+  TV_NONE, /* tv_id */
+  0, /* properties_required */
+  0, /* properties_provided */
+  0, /* properties_destroyed */
+  0, /* todo_flags_start */
+  0, /* todo_flags_finish */
+};
+
+class pass_set_nothrow_function_flags : public rtl_opt_pass
 {
- {
-  RTL_PASS,
-  "nothrow",                            /* name */
-  NULL,                                 /* gate */
-  set_nothrow_function_flags,           /* execute */
-  NULL,                                 /* sub */
-  NULL,                                 /* next */
-  0,                                    /* static_pass_number */
-  TV_NONE,                              /* tv_id */
-  0,                                    /* properties_required */
-  0,                                    /* properties_provided */
-  0,                                    /* properties_destroyed */
-  0,                                    /* todo_flags_start */
-  TODO_dump_func,                       /* todo_flags_finish */
- }
-};
+public:
+  pass_set_nothrow_function_flags (gcc::context *ctxt)
+    : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
+  {}
+
+  /* opt_pass methods: */
+  virtual unsigned int execute (function *)
+    {
+      return set_nothrow_function_flags ();
+    }
+
+}; // class pass_set_nothrow_function_flags
+
+} // anon namespace
+
+rtl_opt_pass *
+make_pass_set_nothrow_function_flags (gcc::context *ctxt)
+{
+  return new pass_set_nothrow_function_flags (ctxt);
+}
 
 
 /* Various hooks for unwind library.  */
@@ -1927,10 +2060,10 @@
   HOST_WIDE_INT region_nr;
   eh_region region;
 
-  gcc_assert (host_integerp (region_nr_t, 0));
-  region_nr = tree_low_cst (region_nr_t, 0);
-
-  region = VEC_index (eh_region, cfun->eh->region_array, region_nr);
+  gcc_assert (tree_fits_shwi_p (region_nr_t));
+  region_nr = tree_to_shwi (region_nr_t);
+
+  region = (*cfun->eh->region_array)[region_nr];
 
   /* ??? We shouldn't have been able to delete a eh region without
      deleting all the code that depended on it.  */
@@ -1973,7 +2106,7 @@
     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
   eh_region src
     = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
-  enum machine_mode fmode = targetm.eh_return_filter_mode ();
+  scalar_int_mode fmode = targetm.eh_return_filter_mode ();
 
   if (dst->exc_ptr_reg == NULL)
     dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
@@ -2001,9 +2134,7 @@
      able to copy the saved values for any registers from frames we unwind.  */
   crtl->saves_all_registers = 1;
 
-#ifdef SETUP_FRAME_ADDRESSES
   SETUP_FRAME_ADDRESSES ();
-#endif
 }
 
 /* Map a non-negative number to an eh return data register number; expands
@@ -2022,7 +2153,7 @@
       return constm1_rtx;
     }
 
-  iwhich = tree_low_cst (which, 1);
+  iwhich = tree_to_uhwi (which);
   iwhich = EH_RETURN_DATA_REGNO (iwhich);
   if (iwhich == INVALID_REGNUM)
     return constm1_rtx;
@@ -2055,14 +2186,13 @@
     }
 
   /* First mask out any unwanted bits.  */
-#ifdef MASK_RETURN_ADDR
-  expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
-#endif
+  rtx mask = MASK_RETURN_ADDR;
+  if (mask)
+    expand_and (Pmode, addr, mask, addr);
 
   /* Then adjust to find the real return address.  */
-#if defined (RETURN_ADDR_OFFSET)
-  addr = plus_constant (addr, RETURN_ADDR_OFFSET);
-#endif
+  if (RETURN_ADDR_OFFSET)
+    addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
 
   return addr;
 }
@@ -2078,10 +2208,11 @@
 
   addr = convert_memory_address (Pmode, addr);
 
-#ifdef RETURN_ADDR_OFFSET
-  addr = force_reg (Pmode, addr);
-  addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
-#endif
+  if (RETURN_ADDR_OFFSET)
+    {
+      addr = force_reg (Pmode, addr);
+      addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
+    }
 
   return addr;
 }
@@ -2100,7 +2231,7 @@
 		     VOIDmode, EXPAND_NORMAL);
   tmp = convert_memory_address (Pmode, tmp);
   if (!crtl->eh.ehr_stackadj)
-    crtl->eh.ehr_stackadj = copy_to_reg (tmp);
+    crtl->eh.ehr_stackadj = copy_addr_to_reg (tmp);
   else if (tmp != crtl->eh.ehr_stackadj)
     emit_move_insn (crtl->eh.ehr_stackadj, tmp);
 #endif
@@ -2109,7 +2240,7 @@
 		     VOIDmode, EXPAND_NORMAL);
   tmp = convert_memory_address (Pmode, tmp);
   if (!crtl->eh.ehr_handler)
-    crtl->eh.ehr_handler = copy_to_reg (tmp);
+    crtl->eh.ehr_handler = copy_addr_to_reg (tmp);
   else if (tmp != crtl->eh.ehr_handler)
     emit_move_insn (crtl->eh.ehr_handler, tmp);
 
@@ -2125,7 +2256,7 @@
 void
 expand_eh_return (void)
 {
-  rtx around_label;
+  rtx_code_label *around_label;
 
   if (! crtl->eh.ehr_label)
     return;
@@ -2146,17 +2277,14 @@
   emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
 #endif
 
-#ifdef HAVE_eh_return
-  if (HAVE_eh_return)
-    emit_insn (gen_eh_return (crtl->eh.ehr_handler));
+  if (targetm.have_eh_return ())
+    emit_insn (targetm.gen_eh_return (crtl->eh.ehr_handler));
   else
-#endif
     {
-#ifdef EH_RETURN_HANDLER_RTX
-      emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
-#else
-      error ("__builtin_eh_return not supported on this target");
-#endif
+      if (rtx handler = EH_RETURN_HANDLER_RTX)
+	emit_move_insn (handler, crtl->eh.ehr_handler);
+      else
+	error ("__builtin_eh_return not supported on this target");
     }
 
   emit_label (around_label);
@@ -2182,52 +2310,19 @@
   return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
 }
 
-/* In the following functions, we represent entries in the action table
-   as 1-based indices.  Special cases are:
-
-	 0:	null action record, non-null landing pad; implies cleanups
-	-1:	null action record, null landing pad; implies no action
-	-2:	no call-site entry; implies must_not_throw
-	-3:	we have yet to process outer regions
-
-   Further, no special cases apply to the "next" field of the record.
-   For next, 0 means end of list.  */
-
-struct action_record
-{
-  int offset;
-  int filter;
-  int next;
-};
-
 static int
-action_record_eq (const void *pentry, const void *pdata)
-{
-  const struct action_record *entry = (const struct action_record *) pentry;
-  const struct action_record *data = (const struct action_record *) pdata;
-  return entry->filter == data->filter && entry->next == data->next;
-}
-
-static hashval_t
-action_record_hash (const void *pentry)
-{
-  const struct action_record *entry = (const struct action_record *) pentry;
-  return entry->next * 1009 + entry->filter;
-}
-
-static int
-add_action_record (htab_t ar_hash, int filter, int next)
+add_action_record (action_hash_type *ar_hash, int filter, int next)
 {
   struct action_record **slot, *new_ar, tmp;
 
   tmp.filter = filter;
   tmp.next = next;
-  slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
+  slot = ar_hash->find_slot (&tmp, INSERT);
 
   if ((new_ar = *slot) == NULL)
     {
       new_ar = XNEW (struct action_record);
-      new_ar->offset = VEC_length (uchar, crtl->eh.action_record_data) + 1;
+      new_ar->offset = crtl->eh.action_record_data->length () + 1;
       new_ar->filter = filter;
       new_ar->next = next;
       *slot = new_ar;
@@ -2239,7 +2334,7 @@
 
       push_sleb128 (&crtl->eh.action_record_data, filter);
       if (next)
-	next -= VEC_length (uchar, crtl->eh.action_record_data) + 1;
+	next -= crtl->eh.action_record_data->length () + 1;
       push_sleb128 (&crtl->eh.action_record_data, next);
     }
 
@@ -2247,7 +2342,7 @@
 }
 
 static int
-collect_one_action_chain (htab_t ar_hash, eh_region region)
+collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
 {
   int next;
 
@@ -2359,15 +2454,82 @@
 {
   call_site_record record;
 
-  record = ggc_alloc_call_site_record_d ();
+  record = ggc_alloc<call_site_record_d> ();
   record->landing_pad = landing_pad;
   record->action = action;
 
-  VEC_safe_push (call_site_record, gc,
-		 crtl->eh.call_site_record[section], record);
-
-  return call_site_base + VEC_length (call_site_record,
-				      crtl->eh.call_site_record[section]) - 1;
+  vec_safe_push (crtl->eh.call_site_record_v[section], record);
+
+  return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
+}
+
+static rtx_note *
+emit_note_eh_region_end (rtx_insn *insn)
+{
+  rtx_insn *next = NEXT_INSN (insn);
+
+  /* Make sure we do not split a call and its corresponding
+     CALL_ARG_LOCATION note.  */
+  if (next && NOTE_P (next)
+      && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
+    insn = next;
+
+  return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
+}
+
+/* Add NOP after NOTE_INSN_SWITCH_TEXT_SECTIONS when the cold section starts
+   with landing pad.
+   With landing pad being at offset 0 from the start label of the section
+   we would miss EH delivery because 0 is special and means no landing pad.  */
+
+static bool
+maybe_add_nop_after_section_switch (void)
+{
+  if (!crtl->uses_eh_lsda
+      || !crtl->eh.call_site_record_v[1])
+    return false;
+  int n = vec_safe_length (crtl->eh.call_site_record_v[1]);
+  hash_set<rtx_insn *> visited;
+
+  for (int i = 0; i < n; ++i)
+    {
+      struct call_site_record_d *cs
+	 = (*crtl->eh.call_site_record_v[1])[i];
+      if (cs->landing_pad)
+	{
+	  rtx_insn *insn = as_a <rtx_insn *> (cs->landing_pad);
+	  while (true)
+	    {
+	      /* Landing pads have LABEL_PRESERVE_P flag set.  This check make
+		 sure that we do not walk past landing pad visited earlier
+		 which would result in possible quadratic behaviour.  */
+	      if (LABEL_P (insn) && LABEL_PRESERVE_P (insn)
+		  && visited.add (insn))
+		break;
+
+	      /* Conservatively assume that ASM insn may be empty.  We have
+		 now way to tell what they contain.  */
+	      if (active_insn_p (insn)
+		  && GET_CODE (PATTERN (insn)) != ASM_INPUT
+		  && GET_CODE (PATTERN (insn)) != ASM_OPERANDS)
+		break;
+
+	      /* If we reached the start of hot section, then NOP will be
+		 needed.  */
+	      if (GET_CODE (insn) == NOTE
+		  && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
+		{
+		  emit_insn_after (gen_nop (), insn);
+		  break;
+		}
+
+	      /* We visit only labels from cold section.  We should never hit
+		 begining of the insn stream here.  */
+	      insn = PREV_INSN (insn);
+	    }
+	}
+    }
+  return false;
 }
 
 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
@@ -2377,25 +2539,22 @@
 static unsigned int
 convert_to_eh_region_ranges (void)
 {
-  rtx insn, iter, note;
-  htab_t ar_hash;
+  rtx insn;
+  rtx_insn *iter;
+  rtx_note *note;
+  action_hash_type ar_hash (31);
   int last_action = -3;
-  rtx last_action_insn = NULL_RTX;
+  rtx_insn *last_action_insn = NULL;
   rtx last_landing_pad = NULL_RTX;
-  rtx first_no_action_insn = NULL_RTX;
+  rtx_insn *first_no_action_insn = NULL;
   int call_site = 0;
   int cur_sec = 0;
-  rtx section_switch_note = NULL_RTX;
-  rtx first_no_action_insn_before_switch = NULL_RTX;
-  rtx last_no_action_insn_before_switch = NULL_RTX;
-  rtx *pad_map = NULL;
-  sbitmap pad_loc = NULL;
-  int min_labelno = 0, max_labelno = 0;
+  rtx_insn *section_switch_note = NULL;
+  rtx_insn *first_no_action_insn_before_switch = NULL;
+  rtx_insn *last_no_action_insn_before_switch = NULL;
   int saved_call_site_base = call_site_base;
 
-  crtl->eh.action_record_data = VEC_alloc (uchar, gc, 64);
-
-  ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
+  vec_alloc (crtl->eh.action_record_data, 64);
 
   for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
     if (INSN_P (iter))
@@ -2404,7 +2563,7 @@
 	eh_region region;
 	bool nothrow;
 	int this_action;
-	rtx this_landing_pad;
+	rtx_code_label *this_landing_pad;
 
 	insn = iter;
 	if (NONJUMP_INSN_P (insn)
@@ -2415,7 +2574,7 @@
 	if (nothrow)
 	  continue;
 	if (region)
-	  this_action = collect_one_action_chain (ar_hash, region);
+	  this_action = collect_one_action_chain (&ar_hash, region);
 	else
 	  this_action = -1;
 
@@ -2435,7 +2594,7 @@
 	if (this_action >= 0)
 	  this_landing_pad = lp->landing_pad;
 	else
-	  this_landing_pad = NULL_RTX;
+	  this_landing_pad = NULL;
 
 	/* Differing actions or landing pads implies a change in call-site
 	   info, which implies some EH_REGION note should be emitted.  */
@@ -2453,14 +2612,14 @@
 		note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
 					 first_no_action_insn_before_switch);
 		NOTE_EH_HANDLER (note) = call_site;
-		note = emit_note_after (NOTE_INSN_EH_REGION_END,
-					last_no_action_insn_before_switch);
+		note
+		  = emit_note_eh_region_end (last_no_action_insn_before_switch);
 		NOTE_EH_HANDLER (note) = call_site;
 		gcc_assert (last_action != -3
 			    || (last_action_insn
 				== last_no_action_insn_before_switch));
-		first_no_action_insn_before_switch = NULL_RTX;
-		last_no_action_insn_before_switch = NULL_RTX;
+		first_no_action_insn_before_switch = NULL;
+		last_no_action_insn_before_switch = NULL;
 		call_site_base++;
 	      }
 	    /* If we'd not seen a previous action (-3) or the previous
@@ -2475,11 +2634,10 @@
 		    note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
 					     first_no_action_insn);
 		    NOTE_EH_HANDLER (note) = call_site;
-		    first_no_action_insn = NULL_RTX;
+		    first_no_action_insn = NULL;
 		  }
 
-		note = emit_note_after (NOTE_INSN_EH_REGION_END,
-					last_action_insn);
+		note = emit_note_eh_region_end (last_action_insn);
 		NOTE_EH_HANDLER (note) = call_site;
 	      }
 
@@ -2509,7 +2667,7 @@
 	  {
 	    first_no_action_insn_before_switch = first_no_action_insn;
 	    last_no_action_insn_before_switch = last_action_insn;
-	    first_no_action_insn = NULL_RTX;
+	    first_no_action_insn = NULL;
 	    gcc_assert (last_action == -1);
 	    last_action = -3;
 	  }
@@ -2517,161 +2675,78 @@
 	   opening a new one afterwards.  */
 	else if (last_action != -3)
 	  last_landing_pad = pc_rtx;
-	call_site_base += VEC_length (call_site_record,
-				      crtl->eh.call_site_record[cur_sec]);
+	if (crtl->eh.call_site_record_v[cur_sec])
+	  call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
 	cur_sec++;
-	gcc_assert (crtl->eh.call_site_record[cur_sec] == NULL);
-	crtl->eh.call_site_record[cur_sec]
-	  = VEC_alloc (call_site_record, gc, 10);
-	max_labelno = max_label_num ();
-	min_labelno = get_first_label_num ();
-	pad_map = XCNEWVEC (rtx, max_labelno - min_labelno + 1);
-	pad_loc = sbitmap_alloc (max_labelno - min_labelno + 1);
+	gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
+	vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
       }
-    else if (LABEL_P (iter) && pad_map)
-      SET_BIT (pad_loc, CODE_LABEL_NUMBER (iter) - min_labelno);
 
   if (last_action >= -1 && ! first_no_action_insn)
     {
-      note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
+      note = emit_note_eh_region_end (last_action_insn);
       NOTE_EH_HANDLER (note) = call_site;
     }
 
   call_site_base = saved_call_site_base;
 
-  if (pad_map)
-    {
-      /* When doing hot/cold partitioning, ensure landing pads are
-	 always in the same section as the EH region, .gcc_except_table
-	 can't express it otherwise.  */
-      for (cur_sec = 0; cur_sec < 2; cur_sec++)
-	{
-	  int i, idx;
-	  int n = VEC_length (call_site_record,
-			      crtl->eh.call_site_record[cur_sec]);
-	  basic_block prev_bb = NULL, padbb;
-
-	  for (i = 0; i < n; ++i)
-	    {
-	      struct call_site_record_d *cs =
-		VEC_index (call_site_record,
-			   crtl->eh.call_site_record[cur_sec], i);
-	      rtx jump, note;
-
-	      if (cs->landing_pad == NULL_RTX)
-		continue;
-	      idx = CODE_LABEL_NUMBER (cs->landing_pad) - min_labelno;
-	      /* If the landing pad is in the correct section, nothing
-		 is needed.  */
-	      if (TEST_BIT (pad_loc, idx) ^ (cur_sec == 0))
-		continue;
-	      /* Otherwise, if we haven't seen this pad yet, we need to
-		 add a new label and jump to the correct section.  */
-	      if (pad_map[idx] == NULL_RTX)
-		{
-		  pad_map[idx] = gen_label_rtx ();
-		  if (prev_bb == NULL)
-		    for (iter = section_switch_note;
-			 iter; iter = PREV_INSN (iter))
-		      if (NOTE_INSN_BASIC_BLOCK_P (iter))
-			{
-			  prev_bb = NOTE_BASIC_BLOCK (iter);
-			  break;
-			}
-		  if (cur_sec == 0)
-		    {
-		      note = emit_label_before (pad_map[idx],
-						section_switch_note);
-		      jump = emit_jump_insn_before (gen_jump (cs->landing_pad),
-						    section_switch_note);
-		    }
-		  else
-		    {
-		      jump = emit_jump_insn_after (gen_jump (cs->landing_pad),
-						   section_switch_note);
-		      note = emit_label_after (pad_map[idx],
-					       section_switch_note);
-		    }
-		  JUMP_LABEL (jump) = cs->landing_pad;
-		  add_reg_note (jump, REG_CROSSING_JUMP, NULL_RTX);
-		  iter = NEXT_INSN (cs->landing_pad);
-		  if (iter && NOTE_INSN_BASIC_BLOCK_P (iter))
-		    padbb = NOTE_BASIC_BLOCK (iter);
-		  else
-		    padbb = NULL;
-		  if (padbb && prev_bb
-		      && BB_PARTITION (padbb) != BB_UNPARTITIONED)
-		    {
-		      basic_block bb;
-		      int part
-			= BB_PARTITION (padbb) == BB_COLD_PARTITION
-			  ? BB_HOT_PARTITION : BB_COLD_PARTITION;
-		      edge_iterator ei;
-		      edge e;
-
-		      bb = create_basic_block (note, jump, prev_bb);
-		      make_single_succ_edge (bb, padbb, EDGE_CROSSING);
-		      BB_SET_PARTITION (bb, part);
-		      for (ei = ei_start (padbb->preds);
-			   (e = ei_safe_edge (ei)); )
-			{
-			  if ((e->flags & (EDGE_EH|EDGE_CROSSING))
-			      == (EDGE_EH|EDGE_CROSSING))
-			    {
-			      redirect_edge_succ (e, bb);
-			      e->flags &= ~EDGE_CROSSING;
-			    }
-			  else
-			    ei_next (&ei);
-			}
-		      if (cur_sec == 0)
-			prev_bb = bb;
-		    }
-		}
-	      cs->landing_pad = pad_map[idx];
-	    }
-	}
-
-      sbitmap_free (pad_loc);
-      XDELETEVEC (pad_map);
-    }
-
-  htab_delete (ar_hash);
   return 0;
 }
 
-static bool
-gate_convert_to_eh_region_ranges (void)
+namespace {
+
+const pass_data pass_data_convert_to_eh_region_ranges =
+{
+  RTL_PASS, /* type */
+  "eh_ranges", /* name */
+  OPTGROUP_NONE, /* optinfo_flags */
+  TV_NONE, /* tv_id */
+  0, /* properties_required */
+  0, /* properties_provided */
+  0, /* properties_destroyed */
+  0, /* todo_flags_start */
+  0, /* todo_flags_finish */
+};
+
+class pass_convert_to_eh_region_ranges : public rtl_opt_pass
+{
+public:
+  pass_convert_to_eh_region_ranges (gcc::context *ctxt)
+    : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
+  {}
+
+  /* opt_pass methods: */
+  virtual bool gate (function *);
+  virtual unsigned int execute (function *)
+    {
+      int ret = convert_to_eh_region_ranges ();
+      maybe_add_nop_after_section_switch ();
+      return ret;
+    }
+
+}; // class pass_convert_to_eh_region_ranges
+
+bool
+pass_convert_to_eh_region_ranges::gate (function *)
 {
   /* Nothing to do for SJLJ exceptions or if no regions created.  */
   if (cfun->eh->region_tree == NULL)
     return false;
-  if (targetm.except_unwind_info (&global_options) == UI_SJLJ)
+  if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
     return false;
   return true;
 }
 
-struct rtl_opt_pass pass_convert_to_eh_region_ranges =
+} // anon namespace
+
+rtl_opt_pass *
+make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
 {
- {
-  RTL_PASS,
-  "eh_ranges",                          /* name */
-  gate_convert_to_eh_region_ranges,	/* gate */
-  convert_to_eh_region_ranges,          /* execute */
-  NULL,                                 /* sub */
-  NULL,                                 /* next */
-  0,                                    /* static_pass_number */
-  TV_NONE,                              /* tv_id */
-  0,                                    /* properties_required */
-  0,                                    /* properties_provided */
-  0,                                    /* properties_destroyed */
-  0,                                    /* todo_flags_start */
-  TODO_dump_func,			/* todo_flags_finish */
- }
-};
+  return new pass_convert_to_eh_region_ranges (ctxt);
+}
 
 static void
-push_uleb128 (VEC (uchar, gc) **data_area, unsigned int value)
+push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
 {
   do
     {
@@ -2679,13 +2754,13 @@
       value >>= 7;
       if (value)
 	byte |= 0x80;
-      VEC_safe_push (uchar, gc, *data_area, byte);
+      vec_safe_push (*data_area, byte);
     }
   while (value);
 }
 
 static void
-push_sleb128 (VEC (uchar, gc) **data_area, int value)
+push_sleb128 (vec<uchar, va_gc> **data_area, int value)
 {
   unsigned char byte;
   int more;
@@ -2698,24 +2773,23 @@
 		|| (value == -1 && (byte & 0x40) != 0));
       if (more)
 	byte |= 0x80;
-      VEC_safe_push (uchar, gc, *data_area, byte);
+      vec_safe_push (*data_area, byte);
     }
   while (more);
 }
 
 
-#ifndef HAVE_AS_LEB128
 static int
 dw2_size_of_call_site_table (int section)
 {
-  int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]);
+  int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
   int size = n * (4 + 4 + 4);
   int i;
 
   for (i = 0; i < n; ++i)
     {
       struct call_site_record_d *cs =
-	VEC_index (call_site_record, crtl->eh.call_site_record[section], i);
+	(*crtl->eh.call_site_record_v[section])[i];
       size += size_of_uleb128 (cs->action);
     }
 
@@ -2725,26 +2799,25 @@
 static int
 sjlj_size_of_call_site_table (void)
 {
-  int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]);
+  int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
   int size = 0;
   int i;
 
   for (i = 0; i < n; ++i)
     {
       struct call_site_record_d *cs =
-	VEC_index (call_site_record, crtl->eh.call_site_record[0], i);
+	(*crtl->eh.call_site_record_v[0])[i];
       size += size_of_uleb128 (INTVAL (cs->landing_pad));
       size += size_of_uleb128 (cs->action);
     }
 
   return size;
 }
-#endif
 
 static void
 dw2_output_call_site_table (int cs_format, int section)
 {
-  int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]);
+  int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
   int i;
   const char *begin;
 
@@ -2757,8 +2830,7 @@
 
   for (i = 0; i < n; ++i)
     {
-      struct call_site_record_d *cs =
-	VEC_index (call_site_record, crtl->eh.call_site_record[section], i);
+      struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
       char reg_start_lab[32];
       char reg_end_lab[32];
       char landing_pad_lab[32];
@@ -2806,13 +2878,12 @@
 static void
 sjlj_output_call_site_table (void)
 {
-  int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]);
+  int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
   int i;
 
   for (i = 0; i < n; ++i)
     {
-      struct call_site_record_d *cs =
-	VEC_index (call_site_record, crtl->eh.call_site_record[0], i);
+      struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
 
       dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
 				   "region %d landing pad", i);
@@ -2833,30 +2904,35 @@
     s = exception_section;
   else
     {
+      int flags;
+
+      if (EH_TABLES_CAN_BE_READ_ONLY)
+	{
+	  int tt_format =
+	    ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
+	  flags = ((! flag_pic
+		    || ((tt_format & 0x70) != DW_EH_PE_absptr
+			&& (tt_format & 0x70) != DW_EH_PE_aligned))
+		   ? 0 : SECTION_WRITE);
+	}
+      else
+	flags = SECTION_WRITE;
+
       /* Compute the section and cache it into exception_section,
 	 unless it depends on the function name.  */
-      if (targetm.have_named_sections)
+      if (targetm_common.have_named_sections)
 	{
-	  int flags;
-
-	  if (EH_TABLES_CAN_BE_READ_ONLY)
-	    {
-	      int tt_format =
-		ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
-	      flags = ((! flag_pic
-			|| ((tt_format & 0x70) != DW_EH_PE_absptr
-			    && (tt_format & 0x70) != DW_EH_PE_aligned))
-		       ? 0 : SECTION_WRITE);
-	    }
-	  else
-	    flags = SECTION_WRITE;
-
 #ifdef HAVE_LD_EH_GC_SECTIONS
-	  if (flag_function_sections)
+	  if (flag_function_sections
+	      || (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP))
 	    {
 	      char *section_name = XNEWVEC (char, strlen (fnname) + 32);
+	      /* The EH table must match the code section, so only mark
+		 it linkonce if we have COMDAT groups to tie them together.  */
+	      if (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP)
+		flags |= SECTION_LINKONCE;
 	      sprintf (section_name, ".gcc_except_table.%s", fnname);
-	      s = get_section (section_name, flags, NULL);
+	      s = get_section (section_name, flags, current_function_decl);
 	      free (section_name);
 	    }
 	  else
@@ -2866,7 +2942,7 @@
 	}
       else
 	exception_section
-	  = s = flag_pic ? data_section : readonly_data_section;
+	  = s = flags == SECTION_WRITE ? data_section : readonly_data_section;
     }
 
   switch_to_section (s);
@@ -2887,8 +2963,6 @@
     value = const0_rtx;
   else
     {
-      struct varpool_node *node;
-
       /* FIXME lto.  pass_ipa_free_lang_data changes all types to
 	 runtime types so TYPE should already be a runtime type
 	 reference.  When pass_ipa_free_lang data is made a default
@@ -2906,13 +2980,8 @@
       if (TREE_CODE (type) == ADDR_EXPR)
 	{
 	  type = TREE_OPERAND (type, 0);
-	  if (TREE_CODE (type) == VAR_DECL)
-	    {
-	      node = varpool_node (type);
-	      if (node)
-		varpool_mark_needed_node (node);
-	      is_public = TREE_PUBLIC (type);
-	    }
+	  if (VAR_P (type))
+	    is_public = TREE_PUBLIC (type);
 	}
       else
 	gcc_assert (TREE_CODE (type) == INTEGER_CST);
@@ -2933,20 +3002,17 @@
 output_one_function_exception_table (int section)
 {
   int tt_format, cs_format, lp_format, i;
-#ifdef HAVE_AS_LEB128
   char ttype_label[32];
   char cs_after_size_label[32];
   char cs_end_label[32];
-#else
   int call_site_len;
-#endif
   int have_tt_data;
   int tt_format_size = 0;
 
-  have_tt_data = (VEC_length (tree, cfun->eh->ttype_data)
+  have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
 		  || (targetm.arm_eabi_unwinder
-		      ? VEC_length (tree, cfun->eh->ehspec_data.arm_eabi)
-		      : VEC_length (uchar, cfun->eh->ehspec_data.other)));
+		      ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
+		      : vec_safe_length (cfun->eh->ehspec_data.other)));
 
   /* Indicate the format of the @TType entries.  */
   if (! have_tt_data)
@@ -2954,11 +3020,11 @@
   else
     {
       tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
-#ifdef HAVE_AS_LEB128
-      ASM_GENERATE_INTERNAL_LABEL (ttype_label,
-				   section ? "LLSDATTC" : "LLSDATT",
-				   current_function_funcdef_no);
-#endif
+      if (HAVE_AS_LEB128)
+	ASM_GENERATE_INTERNAL_LABEL (ttype_label,
+				     section ? "LLSDATTC" : "LLSDATT",
+				     current_function_funcdef_no);
+
       tt_format_size = size_of_encoded_value (tt_format);
 
       assemble_align (tt_format_size * BITS_PER_UNIT);
@@ -2984,122 +3050,127 @@
   dw2_asm_output_data (1, tt_format, "@TType format (%s)",
 		       eh_data_format_name (tt_format));
 
-#ifndef HAVE_AS_LEB128
-  if (targetm.except_unwind_info (&global_options) == UI_SJLJ)
-    call_site_len = sjlj_size_of_call_site_table ();
-  else
-    call_site_len = dw2_size_of_call_site_table (section);
-#endif
+  if (!HAVE_AS_LEB128)
+    {
+      if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
+	call_site_len = sjlj_size_of_call_site_table ();
+      else
+	call_site_len = dw2_size_of_call_site_table (section);
+    }
 
   /* A pc-relative 4-byte displacement to the @TType data.  */
   if (have_tt_data)
     {
-#ifdef HAVE_AS_LEB128
-      char ttype_after_disp_label[32];
-      ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
-				   section ? "LLSDATTDC" : "LLSDATTD",
-				   current_function_funcdef_no);
-      dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
-				    "@TType base offset");
-      ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
-#else
-      /* Ug.  Alignment queers things.  */
-      unsigned int before_disp, after_disp, last_disp, disp;
-
-      before_disp = 1 + 1;
-      after_disp = (1 + size_of_uleb128 (call_site_len)
-		    + call_site_len
-		    + VEC_length (uchar, crtl->eh.action_record_data)
-		    + (VEC_length (tree, cfun->eh->ttype_data)
-		       * tt_format_size));
-
-      disp = after_disp;
-      do
+      if (HAVE_AS_LEB128)
+	{
+	  char ttype_after_disp_label[32];
+	  ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
+				       section ? "LLSDATTDC" : "LLSDATTD",
+				       current_function_funcdef_no);
+	  dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
+					"@TType base offset");
+	  ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
+	}
+      else
 	{
-	  unsigned int disp_size, pad;
-
-	  last_disp = disp;
-	  disp_size = size_of_uleb128 (disp);
-	  pad = before_disp + disp_size + after_disp;
-	  if (pad % tt_format_size)
-	    pad = tt_format_size - (pad % tt_format_size);
-	  else
-	    pad = 0;
-	  disp = after_disp + pad;
+	  /* Ug.  Alignment queers things.  */
+	  unsigned int before_disp, after_disp, last_disp, disp;
+
+	  before_disp = 1 + 1;
+	  after_disp = (1 + size_of_uleb128 (call_site_len)
+			+ call_site_len
+			+ vec_safe_length (crtl->eh.action_record_data)
+			+ (vec_safe_length (cfun->eh->ttype_data)
+			   * tt_format_size));
+
+	  disp = after_disp;
+	  do
+	    {
+	      unsigned int disp_size, pad;
+
+	      last_disp = disp;
+	      disp_size = size_of_uleb128 (disp);
+	      pad = before_disp + disp_size + after_disp;
+	      if (pad % tt_format_size)
+		pad = tt_format_size - (pad % tt_format_size);
+	      else
+		pad = 0;
+	      disp = after_disp + pad;
+	    }
+	  while (disp != last_disp);
+
+	  dw2_asm_output_data_uleb128 (disp, "@TType base offset");
 	}
-      while (disp != last_disp);
-
-      dw2_asm_output_data_uleb128 (disp, "@TType base offset");
-#endif
-    }
+	}
 
   /* Indicate the format of the call-site offsets.  */
-#ifdef HAVE_AS_LEB128
-  cs_format = DW_EH_PE_uleb128;
-#else
-  cs_format = DW_EH_PE_udata4;
-#endif
+  if (HAVE_AS_LEB128)
+    cs_format = DW_EH_PE_uleb128;
+  else
+    cs_format = DW_EH_PE_udata4;
+
   dw2_asm_output_data (1, cs_format, "call-site format (%s)",
 		       eh_data_format_name (cs_format));
 
-#ifdef HAVE_AS_LEB128
-  ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
-			       section ? "LLSDACSBC" : "LLSDACSB",
-			       current_function_funcdef_no);
-  ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
-			       section ? "LLSDACSEC" : "LLSDACSE",
-			       current_function_funcdef_no);
-  dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
-				"Call-site table length");
-  ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
-  if (targetm.except_unwind_info (&global_options) == UI_SJLJ)
-    sjlj_output_call_site_table ();
+  if (HAVE_AS_LEB128)
+    {
+      ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
+				   section ? "LLSDACSBC" : "LLSDACSB",
+				   current_function_funcdef_no);
+      ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
+				   section ? "LLSDACSEC" : "LLSDACSE",
+				   current_function_funcdef_no);
+      dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
+				    "Call-site table length");
+      ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
+      if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
+	sjlj_output_call_site_table ();
+      else
+	dw2_output_call_site_table (cs_format, section);
+      ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
+    }
   else
-    dw2_output_call_site_table (cs_format, section);
-  ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
-#else
-  dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
-  if (targetm.except_unwind_info (&global_options) == UI_SJLJ)
-    sjlj_output_call_site_table ();
-  else
-    dw2_output_call_site_table (cs_format, section);
-#endif
+    {
+      dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
+      if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
+	sjlj_output_call_site_table ();
+      else
+	dw2_output_call_site_table (cs_format, section);
+    }
 
   /* ??? Decode and interpret the data for flag_debug_asm.  */
   {
     uchar uc;
-    FOR_EACH_VEC_ELT (uchar, crtl->eh.action_record_data, i, uc)
+    FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
       dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
   }
 
   if (have_tt_data)
     assemble_align (tt_format_size * BITS_PER_UNIT);
 
-  i = VEC_length (tree, cfun->eh->ttype_data);
+  i = vec_safe_length (cfun->eh->ttype_data);
   while (i-- > 0)
     {
-      tree type = VEC_index (tree, cfun->eh->ttype_data, i);
+      tree type = (*cfun->eh->ttype_data)[i];
       output_ttype (type, tt_format, tt_format_size);
     }
 
-#ifdef HAVE_AS_LEB128
-  if (have_tt_data)
-      ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
-#endif
+  if (HAVE_AS_LEB128 && have_tt_data)
+    ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
 
   /* ??? Decode and interpret the data for flag_debug_asm.  */
   if (targetm.arm_eabi_unwinder)
     {
       tree type;
       for (i = 0;
-	   VEC_iterate (tree, cfun->eh->ehspec_data.arm_eabi, i, type); ++i)
+	   vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
 	output_ttype (type, tt_format, tt_format_size);
     }
   else
     {
       uchar uc;
       for (i = 0;
-	   VEC_iterate (uchar, cfun->eh->ehspec_data.other, i, uc); ++i)
+	   vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
 	dw2_asm_output_data (1, uc,
 			     i ? NULL : "Exception specification table");
     }
@@ -3128,19 +3199,19 @@
   targetm.asm_out.emit_except_table_label (asm_out_file);
 
   output_one_function_exception_table (0);
-  if (crtl->eh.call_site_record[1] != NULL)
+  if (crtl->eh.call_site_record_v[1])
     output_one_function_exception_table (1);
 
   switch_to_section (current_function_section ());
 }
 
 void
-set_eh_throw_stmt_table (struct function *fun, struct htab *table)
+set_eh_throw_stmt_table (function *fun, hash_map<gimple *, int> *table)
 {
   fun->eh->throw_stmt_table = table;
 }
 
-htab_t
+hash_map<gimple *, int> *
 get_eh_throw_stmt_table (struct function *fun)
 {
   return fun->eh->throw_stmt_table;
@@ -3210,7 +3281,7 @@
 	      for (lp = i->landing_pads; lp ; lp = lp->next_lp)
 		{
 		  fprintf (out, "{%i,", lp->index);
-		  print_generic_expr (out, lp->post_landing_pad, 0);
+		  print_generic_expr (out, lp->post_landing_pad);
 		  fputc ('}', out);
 		  if (lp->next_lp)
 		    fputc (',', out);
@@ -3228,7 +3299,7 @@
 		    fprintf (out, "(nil),");
 		  if (lp->post_landing_pad)
 		    {
-		      rtx lab = label_rtx (lp->post_landing_pad);
+		      rtx_insn *lab = label_rtx (lp->post_landing_pad);
 		      fprintf (out, "%i%s}", INSN_UID (lab),
 			       NOTE_P (lab) ? "(del)" : "");
 		    }
@@ -3256,10 +3327,10 @@
 		if (c->label)
 		  {
 		    fprintf (out, "lab:");
-		    print_generic_expr (out, c->label, 0);
+		    print_generic_expr (out, c->label);
 		    fputc (';', out);
 		  }
-		print_generic_expr (out, c->type_list, 0);
+		print_generic_expr (out, c->type_list);
 		fputc ('}', out);
 		if (c->next_catch)
 		  fputc (',', out);
@@ -3269,7 +3340,7 @@
 
 	case ERT_ALLOWED_EXCEPTIONS:
 	  fprintf (out, " filter :%i types:", i->u.allowed.filter);
-	  print_generic_expr (out, i->u.allowed.type_list, 0);
+	  print_generic_expr (out, i->u.allowed.type_list);
 	  break;
 	}
       fputc ('\n', out);
@@ -3319,7 +3390,7 @@
     return;
 
   count_r = 0;
-  for (i = 1; VEC_iterate (eh_region, fun->eh->region_array, i, r); ++i)
+  for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
     if (r)
       {
 	if (r->index == i)
@@ -3332,7 +3403,7 @@
       }
 
   count_lp = 0;
-  for (i = 1; VEC_iterate (eh_landing_pad, fun->eh->lp_array, i, lp); ++i)
+  for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
     if (lp)
       {
 	if (lp->index == i)
@@ -3349,7 +3420,7 @@
   r = fun->eh->region_tree;
   while (1)
     {
-      if (VEC_index (eh_region, fun->eh->region_array, r->index) != r)
+      if ((*fun->eh->region_array)[r->index] != r)
 	{
 	  error ("region_array is corrupted for region %i", r->index);
 	  err = true;
@@ -3368,7 +3439,7 @@
 
       for (lp = r->landing_pads; lp ; lp = lp->next_lp)
 	{
-	  if (VEC_index (eh_landing_pad, fun->eh->lp_array, lp->index) != lp)
+	  if ((*fun->eh->lp_array)[lp->index] != lp)
 	    {
 	      error ("lp_array is corrupted for lp %i", lp->index);
 	      err = true;