diff gcc/tree-eh.c @ 145:1830386684a0

gcc-9.2.0
author anatofuz
date Thu, 13 Feb 2020 11:34:05 +0900
parents 84e7813d76e9
children
line wrap: on
line diff
--- a/gcc/tree-eh.c	Thu Oct 25 07:37:49 2018 +0900
+++ b/gcc/tree-eh.c	Thu Feb 13 11:34:05 2020 +0900
@@ -1,5 +1,5 @@
 /* Exception handling semantics and decomposition for trees.
-   Copyright (C) 2003-2018 Free Software Foundation, Inc.
+   Copyright (C) 2003-2020 Free Software Foundation, Inc.
 
 This file is part of GCC.
 
@@ -139,19 +139,19 @@
    statement is not recorded in the region table.  */
 
 int
-lookup_stmt_eh_lp_fn (struct function *ifun, gimple *t)
+lookup_stmt_eh_lp_fn (struct function *ifun, const gimple *t)
 {
   if (ifun->eh->throw_stmt_table == NULL)
     return 0;
 
-  int *lp_nr = ifun->eh->throw_stmt_table->get (t);
+  int *lp_nr = ifun->eh->throw_stmt_table->get (const_cast <gimple *> (t));
   return lp_nr ? *lp_nr : 0;
 }
 
 /* Likewise, but always use the current function.  */
 
 int
-lookup_stmt_eh_lp (gimple *t)
+lookup_stmt_eh_lp (const gimple *t)
 {
   /* We can get called from initialized data when -fnon-call-exceptions
      is on; prevent crash.  */
@@ -356,6 +356,9 @@
      split out into a separate structure so that we don't have to
      copy so much when processing other nodes.  */
   struct leh_tf_state *tf;
+
+  /* Outer non-clean up region.  */
+  eh_region outer_non_cleanup;
 };
 
 struct leh_tf_state
@@ -503,7 +506,11 @@
       seq = find_goto_replacement (tf, temp);
       if (seq)
 	{
-	  gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
+	  gimple_stmt_iterator i;
+	  seq = gimple_seq_copy (seq);
+	  for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
+	    gimple_set_location (gsi_stmt (i), gimple_location (stmt));
+	  gsi_insert_seq_before (gsi, seq, GSI_SAME_STMT);
 	  gsi_remove (gsi, false);
 	  return;
 	}
@@ -811,15 +818,6 @@
     record_stmt_eh_region (region->outer, x);
 }
 
-/* Emit an EH_DISPATCH statement into SEQ for REGION.  */
-
-static void
-emit_eh_dispatch (gimple_seq *seq, eh_region region)
-{
-  geh_dispatch *x = gimple_build_eh_dispatch (region->index);
-  gimple_seq_add_stmt (seq, x);
-}
-
 /* Note that the current EH region may contain a throw, or a
    call to a function which itself may contain a throw.  */
 
@@ -1001,11 +999,14 @@
       gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else));
       finally = gimple_eh_else_e_body (eh_else);
 
-      /* Let the ELSE see the exception that's being processed.  */
-      eh_region save_ehp = this_state->ehp_region;
-      this_state->ehp_region = this_state->cur_region;
-      lower_eh_constructs_1 (this_state, &finally);
-      this_state->ehp_region = save_ehp;
+      /* Let the ELSE see the exception that's being processed, but
+	 since the cleanup is outside the try block, process it with
+	 outer_state, otherwise it may be used as a cleanup for
+	 itself, and Bad Things (TM) ensue.  */
+      eh_region save_ehp = outer_state->ehp_region;
+      outer_state->ehp_region = this_state->cur_region;
+      lower_eh_constructs_1 (outer_state, &finally);
+      outer_state->ehp_region = save_ehp;
     }
   else
     {
@@ -1626,7 +1627,8 @@
     return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
 }
 
-/* REG is the enclosing region for a possible cleanup region, or the region
+/* REG is current region of a LEH state.
+   is the enclosing region for a possible cleanup region, or the region
    itself.  Returns TRUE if such a region would be unreachable.
 
    Cleanup regions within a must-not-throw region aren't actually reachable
@@ -1634,10 +1636,18 @@
    routine will call terminate before unwinding.  */
 
 static bool
-cleanup_is_dead_in (eh_region reg)
+cleanup_is_dead_in (leh_state *state)
 {
-  while (reg && reg->type == ERT_CLEANUP)
-    reg = reg->outer;
+  if (flag_checking)
+    {
+      eh_region reg = state->cur_region;
+      while (reg && reg->type == ERT_CLEANUP)
+	reg = reg->outer;
+
+      gcc_assert (reg == state->outer_non_cleanup);
+    }
+
+  eh_region reg = state->outer_non_cleanup;
   return (reg && reg->type == ERT_MUST_NOT_THROW);
 }
 
@@ -1660,7 +1670,7 @@
   this_tf.try_finally_expr = tp;
   this_tf.top_p = tp;
   this_tf.outer = state;
-  if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state->cur_region))
+  if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state))
     {
       this_tf.region = gen_eh_region_cleanup (state->cur_region);
       this_state.cur_region = this_tf.region;
@@ -1671,6 +1681,7 @@
       this_state.cur_region = state->cur_region;
     }
 
+  this_state.outer_non_cleanup = state->outer_non_cleanup;
   this_state.ehp_region = state->ehp_region;
   this_state.tf = &this_tf;
 
@@ -1762,12 +1773,15 @@
   tree out_label;
   gimple_seq new_seq, cleanup;
   gimple *x;
+  geh_dispatch *eh_dispatch;
   location_t try_catch_loc = gimple_location (tp);
+  location_t catch_loc = UNKNOWN_LOCATION;
 
   if (flag_exceptions)
     {
       try_region = gen_eh_region_try (state->cur_region);
       this_state.cur_region = try_region;
+      this_state.outer_non_cleanup = this_state.cur_region;
     }
 
   lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
@@ -1776,10 +1790,12 @@
     return gimple_try_eval (tp);
 
   new_seq = NULL;
-  emit_eh_dispatch (&new_seq, try_region);
+  eh_dispatch = gimple_build_eh_dispatch (try_region->index);
+  gimple_seq_add_stmt (&new_seq, eh_dispatch);
   emit_resx (&new_seq, try_region);
 
   this_state.cur_region = state->cur_region;
+  this_state.outer_non_cleanup = state->outer_non_cleanup;
   this_state.ehp_region = try_region;
 
   /* Add eh_seq from lowering EH in the cleanup sequence after the cleanup
@@ -1799,6 +1815,8 @@
       gimple_seq handler;
 
       catch_stmt = as_a <gcatch *> (gsi_stmt (gsi));
+      if (catch_loc == UNKNOWN_LOCATION)
+	catch_loc = gimple_location (catch_stmt);
       c = gen_eh_region_catch (try_region, gimple_catch_types (catch_stmt));
 
       handler = gimple_catch_handler (catch_stmt);
@@ -1822,6 +1840,10 @@
 	break;
     }
 
+  /* Try to set a location on the dispatching construct to avoid inheriting
+     the location of the previous statement.  */
+  gimple_set_location (eh_dispatch, catch_loc);
+
   gimple_try_set_cleanup (tp, new_seq);
 
   gimple_seq new_eh_seq = eh_seq;
@@ -1850,6 +1872,7 @@
       this_region = gen_eh_region_allowed (state->cur_region,
 				           gimple_eh_filter_types (inner));
       this_state.cur_region = this_region;
+      this_state.outer_non_cleanup = this_state.cur_region;
     }
 
   lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
@@ -1857,11 +1880,13 @@
   if (!eh_region_may_contain_throw (this_region))
     return gimple_try_eval (tp);
 
-  new_seq = NULL;
   this_state.cur_region = state->cur_region;
   this_state.ehp_region = this_region;
 
-  emit_eh_dispatch (&new_seq, this_region);
+  new_seq = NULL;
+  x = gimple_build_eh_dispatch (this_region->index);
+  gimple_set_location (x, gimple_location (tp));
+  gimple_seq_add_stmt (&new_seq, x);
   emit_resx (&new_seq, this_region);
 
   this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
@@ -1903,6 +1928,7 @@
       TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
 
       this_state.cur_region = this_region;
+      this_state.outer_non_cleanup = this_state.cur_region;
     }
 
   lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
@@ -1920,12 +1946,13 @@
   eh_region this_region = NULL;
   struct leh_tf_state fake_tf;
   gimple_seq result;
-  bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
+  bool cleanup_dead = cleanup_is_dead_in (state);
 
   if (flag_exceptions && !cleanup_dead)
     {
       this_region = gen_eh_region_cleanup (state->cur_region);
       this_state.cur_region = this_region;
+      this_state.outer_non_cleanup = state->outer_non_cleanup;
     }
 
   lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
@@ -2301,7 +2328,7 @@
   old_lp = get_eh_landing_pad_from_number (old_lp_nr);
 
   throw_stmt = last_stmt (edge_in->src);
-  gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
+  gcc_checking_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
 
   new_label = gimple_block_label (new_bb);
 
@@ -2490,6 +2517,14 @@
       /* Constructing an object cannot trap.  */
       return false;
 
+    case COND_EXPR:
+    case VEC_COND_EXPR:
+      /* Whether *COND_EXPR can trap depends on whether the
+	 first argument can trap, so signal it as not handled.
+	 Whether lhs is floating or not doesn't matter.  */
+      *handled = false;
+      return false;
+
     default:
       /* Any floating arithmetic may trap.  */
       if (fp_operation && flag_trapping_math)
@@ -2514,6 +2549,10 @@
   bool honor_snans = fp_operation && flag_signaling_nans != 0;
   bool handled;
 
+  /* This function cannot tell whether or not COND_EXPR and VEC_COND_EXPR could
+     trap, because that depends on the respective condition op.  */
+  gcc_assert (op != COND_EXPR && op != VEC_COND_EXPR);
+
   if (TREE_CODE_CLASS (op) != tcc_comparison
       && TREE_CODE_CLASS (op) != tcc_unary
       && TREE_CODE_CLASS (op) != tcc_binary)
@@ -2601,6 +2640,13 @@
   if (!expr)
     return false;
 
+  /* In COND_EXPR and VEC_COND_EXPR only the condition may trap, but
+     they won't appear as operands in GIMPLE form, so this is just for the
+     GENERIC uses where it needs to recurse on the operands and so
+     *COND_EXPR itself doesn't trap.  */
+  if (TREE_CODE (expr) == COND_EXPR || TREE_CODE (expr) == VEC_COND_EXPR)
+    return false;
+
   code = TREE_CODE (expr);
   t = TREE_TYPE (expr);
 
@@ -2759,27 +2805,9 @@
 
       if (TREE_CODE (*tp) == ABS_EXPR)
 	{
-	  tree op = TREE_OPERAND (*tp, 0);
-	  op = save_expr (op);
-	  /* save_expr skips simple arithmetics, which is undesirable
-	     here, if it might trap due to flag_trapv.  We need to
-	     force a SAVE_EXPR in the COND_EXPR condition, to evaluate
-	     it before the comparison.  */
-	  if (EXPR_P (op)
-	      && TREE_CODE (op) != SAVE_EXPR
-	      && walk_tree (&op, find_trapping_overflow, NULL, NULL))
-	    {
-	      op = build1_loc (EXPR_LOCATION (op), SAVE_EXPR, type, op);
-	      TREE_SIDE_EFFECTS (op) = 1;
-	    }
-	  /* Change abs (op) to op < 0 ? -op : op and handle the NEGATE_EXPR
-	     like other signed integer trapping operations.  */
-	  tree cond = fold_build2 (LT_EXPR, boolean_type_node,
-				   op, build_int_cst (type, 0));
-	  tree neg = fold_build1 (NEGATE_EXPR, utype,
-				  fold_convert (utype, op));
-	  *tp = fold_build3 (COND_EXPR, type, cond,
-			     fold_convert (type, neg), op);
+	  TREE_SET_CODE (*tp, ABSU_EXPR);
+	  TREE_TYPE (*tp) = utype;
+	  *tp = fold_convert (type, *tp);
 	}
       else
 	{
@@ -3540,10 +3568,15 @@
 }
 
 /* Try to sink var = {v} {CLOBBER} stmts followed just by
-   internal throw to successor BB.  */
+   internal throw to successor BB.
+   SUNK, if not NULL, is an array of sequences indexed by basic-block
+   index to sink to and to pick up sinking opportunities from.
+   If FOUND_OPPORTUNITY is not NULL then do not perform the optimization
+   but set *FOUND_OPPORTUNITY to true.  */
 
 static int
-sink_clobbers (basic_block bb)
+sink_clobbers (basic_block bb,
+	       gimple_seq *sunk = NULL, bool *found_opportunity = NULL)
 {
   edge e;
   edge_iterator ei;
@@ -3578,16 +3611,22 @@
 	return 0;
       any_clobbers = true;
     }
-  if (!any_clobbers)
+  if (!any_clobbers && (!sunk || gimple_seq_empty_p (sunk[bb->index])))
     return 0;
 
+  /* If this was a dry run, tell it we found clobbers to sink.  */
+  if (found_opportunity)
+    {
+      *found_opportunity = true;
+      return 0;
+    }
+
   edge succe = single_succ_edge (bb);
   succbb = succe->dest;
 
   /* See if there is a virtual PHI node to take an updated virtual
      operand from.  */
   gphi *vphi = NULL;
-  tree vuse = NULL_TREE;
   for (gphi_iterator gpi = gsi_start_phis (succbb);
        !gsi_end_p (gpi); gsi_next (&gpi))
     {
@@ -3595,12 +3634,16 @@
       if (virtual_operand_p (res))
 	{
 	  vphi = gpi.phi ();
-	  vuse = res;
 	  break;
 	}
     }
 
-  dgsi = gsi_after_labels (succbb);
+  gimple *first_sunk = NULL;
+  gimple *last_sunk = NULL;
+  if (sunk && !(succbb->flags & BB_VISITED))
+    dgsi = gsi_start (sunk[succbb->index]);
+  else
+    dgsi = gsi_after_labels (succbb);
   gsi = gsi_last_bb (bb);
   for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
     {
@@ -3631,36 +3674,46 @@
          forwarder edge we can keep virtual operands in place.  */
       gsi_remove (&gsi, false);
       gsi_insert_before (&dgsi, stmt, GSI_NEW_STMT);
-
-      /* But adjust virtual operands if we sunk across a PHI node.  */
-      if (vuse)
+      if (!first_sunk)
+	first_sunk = stmt;
+      last_sunk = stmt;
+    }
+  if (sunk && !gimple_seq_empty_p (sunk[bb->index]))
+    {
+      if (!first_sunk)
+	first_sunk = gsi_stmt (gsi_last (sunk[bb->index]));
+      last_sunk = gsi_stmt (gsi_start (sunk[bb->index]));
+      gsi_insert_seq_before_without_update (&dgsi,
+					    sunk[bb->index], GSI_NEW_STMT);
+      sunk[bb->index] = NULL;
+    }
+  if (first_sunk)
+    {
+      /* Adjust virtual operands if we sunk across a virtual PHI.  */
+      if (vphi)
 	{
-	  gimple *use_stmt;
 	  imm_use_iterator iter;
 	  use_operand_p use_p;
-	  FOR_EACH_IMM_USE_STMT (use_stmt, iter, vuse)
+	  gimple *use_stmt;
+	  tree phi_def = gimple_phi_result (vphi);
+	  FOR_EACH_IMM_USE_STMT (use_stmt, iter, phi_def)
 	    FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
-	      SET_USE (use_p, gimple_vdef (stmt));
-	  if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse))
+              SET_USE (use_p, gimple_vdef (first_sunk));
+	  if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (phi_def))
 	    {
-	      SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)) = 1;
-	      SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 0;
+	      SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (first_sunk)) = 1;
+	      SSA_NAME_OCCURS_IN_ABNORMAL_PHI (phi_def) = 0;
 	    }
-	  /* Adjust the incoming virtual operand.  */
-	  SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi, succe), gimple_vuse (stmt));
-	  SET_USE (gimple_vuse_op (stmt), vuse);
+	  SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi, succe),
+		   gimple_vuse (last_sunk));
+	  SET_USE (gimple_vuse_op (last_sunk), phi_def);
 	}
       /* If there isn't a single predecessor but no virtual PHI node
          arrange for virtual operands to be renamed.  */
-      else if (gimple_vuse_op (stmt) != NULL_USE_OPERAND_P
-	       && !single_pred_p (succbb))
+      else if (!single_pred_p (succbb)
+	       && TREE_CODE (gimple_vuse (last_sunk)) == SSA_NAME)
 	{
-	  /* In this case there will be no use of the VDEF of this stmt. 
-	     ???  Unless this is a secondary opportunity and we have not
-	     removed unreachable blocks yet, so we cannot assert this.  
-	     Which also means we will end up renaming too many times.  */
-	  SET_USE (gimple_vuse_op (stmt), gimple_vop (cfun));
-	  mark_virtual_operands_for_renaming (cfun);
+	  mark_virtual_operand_for_renaming (gimple_vuse (last_sunk));
 	  todo |= TODO_update_ssa_only_virtuals;
 	}
     }
@@ -3770,6 +3823,7 @@
 	    filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)));
 	    filter = make_ssa_name (filter, x);
 	    gimple_call_set_lhs (x, filter);
+	    gimple_set_location (x, gimple_location (stmt));
 	    gsi_insert_before (&gsi, x, GSI_SAME_STMT);
 
 	    /* Turn the default label into a default case.  */
@@ -3777,6 +3831,7 @@
 	    sort_case_labels (labels);
 
 	    x = gimple_build_switch (filter, default_label, labels);
+	    gimple_set_location (x, gimple_location (stmt));
 	    gsi_insert_before (&gsi, x, GSI_SAME_STMT);
 	  }
       }
@@ -3793,6 +3848,7 @@
 	filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)));
 	filter = make_ssa_name (filter, x);
 	gimple_call_set_lhs (x, filter);
+	gimple_set_location (x, gimple_location (stmt));
 	gsi_insert_before (&gsi, x, GSI_SAME_STMT);
 
 	r->u.allowed.label = NULL;
@@ -3850,6 +3906,7 @@
   basic_block bb;
   int flags = 0;
   bool redirected = false;
+  bool any_resx_to_process = false;
 
   assign_filter_values ();
 
@@ -3866,18 +3923,48 @@
 	}
       else if (gimple_code (last) == GIMPLE_RESX)
 	{
-	  if (stmt_can_throw_external (cfun, last))
+	  if (stmt_can_throw_external (fun, last))
 	    optimize_clobbers (bb);
-	  else
-	    flags |= sink_clobbers (bb);
+	  else if (!any_resx_to_process)
+	    sink_clobbers (bb, NULL, &any_resx_to_process);
 	}
+      bb->flags &= ~BB_VISITED;
     }
-
   if (redirected)
     {
       free_dominance_info (CDI_DOMINATORS);
       delete_unreachable_blocks ();
     }
+
+  if (any_resx_to_process)
+    {
+      /* Make sure to catch all secondary sinking opportunities by processing
+	 blocks in RPO order and after all CFG modifications from lowering
+	 and unreachable block removal.  */
+      int *rpo = XNEWVEC  (int, n_basic_blocks_for_fn (fun));
+      int rpo_n = pre_and_rev_post_order_compute_fn (fun, NULL, rpo, false);
+      gimple_seq *sunk = XCNEWVEC (gimple_seq, last_basic_block_for_fn (fun));
+      for (int i = 0; i < rpo_n; ++i)
+	{
+	  bb = BASIC_BLOCK_FOR_FN (fun, rpo[i]);
+	  gimple *last = last_stmt (bb);
+	  if (last
+	      && gimple_code (last) == GIMPLE_RESX
+	      && !stmt_can_throw_external (fun, last))
+	    flags |= sink_clobbers (bb, sunk);
+	  /* If there were any clobbers sunk into this BB, insert them now.  */
+	  if (!gimple_seq_empty_p (sunk[bb->index]))
+	    {
+	      gimple_stmt_iterator gsi = gsi_after_labels (bb);
+	      gsi_insert_seq_before (&gsi, sunk[bb->index], GSI_NEW_STMT);
+	      sunk[bb->index] = NULL;
+	    }
+	  bb->flags |= BB_VISITED;
+	}
+      free (rpo);
+      free (sunk);
+    }
+
   return flags;
 }
 
@@ -4046,15 +4133,14 @@
 
   if (cfun->eh == NULL)
     return;
-           
+
   FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
-    if (lp && lp->post_landing_pad)
+    if (lp
+	&& (lp->post_landing_pad == NULL_TREE
+	    || label_to_block (cfun, lp->post_landing_pad) == NULL))
       {
-	if (label_to_block (cfun, lp->post_landing_pad) == NULL)
-	  {
-	    remove_unreachable_handlers ();
-	    return;
-	  }
+	remove_unreachable_handlers ();
+	return;
       }
 }
 
@@ -4217,6 +4303,27 @@
   return changed;
 }
 
+/* Wrapper around unsplit_all_eh that makes it usable everywhere.  */
+
+void
+unsplit_eh_edges (void)
+{
+  bool changed;
+
+  /* unsplit_all_eh can die looking up unreachable landing pads.  */
+  maybe_remove_unreachable_handlers ();
+
+  changed = unsplit_all_eh ();
+
+  /* If EH edges have been unsplit, delete unreachable forwarder blocks.  */
+  if (changed)
+    {
+      free_dominance_info (CDI_DOMINATORS);
+      free_dominance_info (CDI_POST_DOMINATORS);
+      delete_unreachable_blocks ();
+    }
+}
+
 /* A subroutine of cleanup_empty_eh.  Redirect all EH edges incoming
    to OLD_BB to NEW_BB; return true on success, false on failure.
 
@@ -4245,9 +4352,10 @@
 	|  | EH
 	<..>
      which CFG verification would choke on.  See PR45172 and PR51089.  */
-  FOR_EACH_EDGE (e, ei, old_bb->preds)
-    if (find_edge (e->src, new_bb))
-      return false;
+  if (!single_pred_p (new_bb))
+    FOR_EACH_EDGE (e, ei, old_bb->preds)
+      if (find_edge (e->src, new_bb))
+	return false;
 
   FOR_EACH_EDGE (e, ei, old_bb->preds)
     redirect_edge_var_map_clear (e);
@@ -4636,9 +4744,15 @@
   eh_landing_pad lp;
   int i;
 
-  for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
-    if (lp)
-      changed |= cleanup_empty_eh (lp);
+  /* Ideally we'd walk the region tree and process LPs inner to outer
+     to avoid quadraticness in EH redirection.  Walking the LP array
+     in reverse seems to be an approximation of that.  */
+  for (i = vec_safe_length (cfun->eh->lp_array) - 1; i >= 1; --i)
+    {
+      lp = (*cfun->eh->lp_array)[i];
+      if (lp)
+	changed |= cleanup_empty_eh (lp);
+    }
 
   return changed;
 }
@@ -4746,6 +4860,14 @@
   return new pass_cleanup_eh (ctxt);
 }
 
+/* Disable warnings about missing quoting in GCC diagnostics for
+   the verification errors.  Their format strings don't follow GCC
+   diagnostic conventions but are only used for debugging.  */
+#if __GNUC__ >= 10
+#  pragma GCC diagnostic push
+#  pragma GCC diagnostic ignored "-Wformat-diag"
+#endif
+
 /* Verify that BB containing STMT as the last statement, has precisely the
    edge that make_eh_edges would create.  */
 
@@ -4781,7 +4903,7 @@
     {
       if (eh_edge)
 	{
-	  error ("BB %i can not throw but has an EH edge", bb->index);
+	  error ("BB %i cannot throw but has an EH edge", bb->index);
 	  return true;
 	}
       return false;
@@ -4892,3 +5014,7 @@
 
   return false;
 }
+
+#if __GNUC__ >= 10
+#  pragma GCC diagnostic pop
+#endif