comparison gcc/tree-eh.c @ 55:77e2b8dfacca gcc-4.4.5

update it from 4.4.3 to 4.5.0
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Fri, 12 Feb 2010 23:39:51 +0900
parents a06113de4d67
children b7f97abdc517
comparison
equal deleted inserted replaced
52:c156f1bd5cd9 55:77e2b8dfacca
36 #include "timevar.h" 36 #include "timevar.h"
37 #include "langhooks.h" 37 #include "langhooks.h"
38 #include "ggc.h" 38 #include "ggc.h"
39 #include "toplev.h" 39 #include "toplev.h"
40 #include "gimple.h" 40 #include "gimple.h"
41 #include "target.h"
41 42
42 /* In some instances a tree and a gimple need to be stored in a same table, 43 /* In some instances a tree and a gimple need to be stored in a same table,
43 i.e. in hash tables. This is a structure to do this. */ 44 i.e. in hash tables. This is a structure to do this. */
44 typedef union {tree *tp; tree t; gimple g;} treemple; 45 typedef union {tree *tp; tree t; gimple g;} treemple;
45 46
72 const void * const * x = (const void * const *) a; 73 const void * const * x = (const void * const *) a;
73 return (size_t)*x >> 4; 74 return (size_t)*x >> 4;
74 } 75 }
75 76
76 77
77 /* Remember and lookup EH region data for arbitrary statements. 78 /* Remember and lookup EH landing pad data for arbitrary statements.
78 Really this means any statement that could_throw_p. We could 79 Really this means any statement that could_throw_p. We could
79 stuff this information into the stmt_ann data structure, but: 80 stuff this information into the stmt_ann data structure, but:
80 81
81 (1) We absolutely rely on this information being kept until 82 (1) We absolutely rely on this information being kept until
82 we get to rtl. Once we're done with lowering here, if we lose 83 we get to rtl. Once we're done with lowering here, if we lose
84 85
85 (2) There are many more statements that *cannot* throw as 86 (2) There are many more statements that *cannot* throw as
86 compared to those that can. We should be saving some amount 87 compared to those that can. We should be saving some amount
87 of space by only allocating memory for those that can throw. */ 88 of space by only allocating memory for those that can throw. */
88 89
89 static void 90 /* Add statement T in function IFUN to landing pad NUM. */
90 record_stmt_eh_region (struct eh_region *region, gimple t)
91 {
92 if (!region)
93 return;
94
95 add_stmt_to_eh_region (t, get_eh_region_number (region));
96 }
97
98
99 /* Add statement T in function IFUN to EH region NUM. */
100 91
101 void 92 void
102 add_stmt_to_eh_region_fn (struct function *ifun, gimple t, int num) 93 add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
103 { 94 {
104 struct throw_stmt_node *n; 95 struct throw_stmt_node *n;
105 void **slot; 96 void **slot;
106 97
107 gcc_assert (num >= 0); 98 gcc_assert (num != 0);
108 gcc_assert (gimple_code (t) != GIMPLE_RESX);
109 99
110 n = GGC_NEW (struct throw_stmt_node); 100 n = GGC_NEW (struct throw_stmt_node);
111 n->stmt = t; 101 n->stmt = t;
112 n->region_nr = num; 102 n->lp_nr = num;
113 103
114 if (!get_eh_throw_stmt_table (ifun)) 104 if (!get_eh_throw_stmt_table (ifun))
115 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash, 105 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
116 struct_ptr_eq, 106 struct_ptr_eq,
117 ggc_free)); 107 ggc_free));
119 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT); 109 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
120 gcc_assert (!*slot); 110 gcc_assert (!*slot);
121 *slot = n; 111 *slot = n;
122 } 112 }
123 113
124 114 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
125 /* Add statement T in the current function (cfun) to EH region number
126 NUM. */
127 115
128 void 116 void
129 add_stmt_to_eh_region (gimple t, int num) 117 add_stmt_to_eh_lp (gimple t, int num)
130 { 118 {
131 add_stmt_to_eh_region_fn (cfun, t, num); 119 add_stmt_to_eh_lp_fn (cfun, t, num);
132 } 120 }
133 121
134 122 /* Add statement T to the single EH landing pad in REGION. */
135 /* Remove statement T in function IFUN from the EH region holding it. */ 123
124 static void
125 record_stmt_eh_region (eh_region region, gimple t)
126 {
127 if (region == NULL)
128 return;
129 if (region->type == ERT_MUST_NOT_THROW)
130 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
131 else
132 {
133 eh_landing_pad lp = region->landing_pads;
134 if (lp == NULL)
135 lp = gen_eh_landing_pad (region);
136 else
137 gcc_assert (lp->next_lp == NULL);
138 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
139 }
140 }
141
142
143 /* Remove statement T in function IFUN from its EH landing pad. */
136 144
137 bool 145 bool
138 remove_stmt_from_eh_region_fn (struct function *ifun, gimple t) 146 remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
139 { 147 {
140 struct throw_stmt_node dummy; 148 struct throw_stmt_node dummy;
141 void **slot; 149 void **slot;
142 150
143 if (!get_eh_throw_stmt_table (ifun)) 151 if (!get_eh_throw_stmt_table (ifun))
154 else 162 else
155 return false; 163 return false;
156 } 164 }
157 165
158 166
159 /* Remove statement T in the current function (cfun) from the EH 167 /* Remove statement T in the current function (cfun) from its
160 region holding it. */ 168 EH landing pad. */
161 169
162 bool 170 bool
163 remove_stmt_from_eh_region (gimple t) 171 remove_stmt_from_eh_lp (gimple t)
164 { 172 {
165 return remove_stmt_from_eh_region_fn (cfun, t); 173 return remove_stmt_from_eh_lp_fn (cfun, t);
166 } 174 }
167 175
168 /* Determine if statement T is inside an EH region in function IFUN. 176 /* Determine if statement T is inside an EH region in function IFUN.
169 Return the EH region number if found, return -2 if IFUN does not 177 Positive numbers indicate a landing pad index; negative numbers
170 have an EH table and -1 if T could not be found in IFUN's EH region 178 indicate a MUST_NOT_THROW region index; zero indicates that the
171 table. */ 179 statement is not recorded in the region table. */
172 180
173 int 181 int
174 lookup_stmt_eh_region_fn (struct function *ifun, gimple t) 182 lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
175 { 183 {
176 struct throw_stmt_node *p, n; 184 struct throw_stmt_node *p, n;
177 185
178 if (!get_eh_throw_stmt_table (ifun)) 186 if (ifun->eh->throw_stmt_table == NULL)
179 return -2; 187 return 0;
180 188
181 n.stmt = t; 189 n.stmt = t;
182 p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun), &n); 190 p = (struct throw_stmt_node *) htab_find (ifun->eh->throw_stmt_table, &n);
183 return (p ? p->region_nr : -1); 191 return p ? p->lp_nr : 0;
184 } 192 }
185 193
186 194 /* Likewise, but always use the current function. */
187 /* Determine if statement T is inside an EH region in the current
188 function (cfun). Return the EH region number if found, return -2
189 if cfun does not have an EH table and -1 if T could not be found in
190 cfun's EH region table. */
191 195
192 int 196 int
193 lookup_stmt_eh_region (gimple t) 197 lookup_stmt_eh_lp (gimple t)
194 { 198 {
195 /* We can get called from initialized data when -fnon-call-exceptions 199 /* We can get called from initialized data when -fnon-call-exceptions
196 is on; prevent crash. */ 200 is on; prevent crash. */
197 if (!cfun) 201 if (!cfun)
198 return -1; 202 return 0;
199 203 return lookup_stmt_eh_lp_fn (cfun, t);
200 return lookup_stmt_eh_region_fn (cfun, t); 204 }
201 }
202
203
204 /* Determine if expression T is inside an EH region in the current
205 function (cfun). Return the EH region number if found, return -2
206 if IFUN does not have an EH table and -1 if T could not be found in
207 IFUN's EH region table. */
208
209 int
210 lookup_expr_eh_region (tree t)
211 {
212 /* We can get called from initialized data when -fnon-call-exceptions
213 is on; prevent crash. */
214 if (!cfun)
215 return -1;
216
217 if (!get_eh_throw_stmt_table (cfun))
218 return -2;
219
220 if (t && EXPR_P (t))
221 {
222 tree_ann_common_t ann = tree_common_ann (t);
223 if (ann)
224 return (int) ann->rn;
225 }
226
227 return -1;
228 }
229
230 205
231 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY 206 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
232 nodes and LABEL_DECL nodes. We will use this during the second phase to 207 nodes and LABEL_DECL nodes. We will use this during the second phase to
233 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */ 208 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
234 209
236 { 211 {
237 /* When storing a GIMPLE_TRY, we have to record a gimple. However 212 /* When storing a GIMPLE_TRY, we have to record a gimple. However
238 when deciding whether a GOTO to a certain LABEL_DECL (which is a 213 when deciding whether a GOTO to a certain LABEL_DECL (which is a
239 tree) leaves the TRY block, its necessary to record a tree in 214 tree) leaves the TRY block, its necessary to record a tree in
240 this field. Thus a treemple is used. */ 215 this field. Thus a treemple is used. */
241 treemple child; 216 treemple child;
242 gimple parent; 217 gimple parent;
243 }; 218 };
244 219
245 /* Note that this table is *not* marked GTY. It is short-lived. */ 220 /* Note that this table is *not* marked GTY. It is short-lived. */
246 static htab_t finally_tree; 221 static htab_t finally_tree;
261 } 236 }
262 237
263 static void 238 static void
264 collect_finally_tree (gimple stmt, gimple region); 239 collect_finally_tree (gimple stmt, gimple region);
265 240
266 /* Go through the gimple sequence. Works with collect_finally_tree to 241 /* Go through the gimple sequence. Works with collect_finally_tree to
267 record all GIMPLE_LABEL and GIMPLE_TRY statements. */ 242 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
268 243
269 static void 244 static void
270 collect_finally_tree_1 (gimple_seq seq, gimple region) 245 collect_finally_tree_1 (gimple_seq seq, gimple region)
271 { 246 {
342 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY 317 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
343 nodes into a set of gotos, magic labels, and eh regions. 318 nodes into a set of gotos, magic labels, and eh regions.
344 The eh region creation is straight-forward, but frobbing all the gotos 319 The eh region creation is straight-forward, but frobbing all the gotos
345 and such into shape isn't. */ 320 and such into shape isn't. */
346 321
322 /* The sequence into which we record all EH stuff. This will be
323 placed at the end of the function when we're all done. */
324 static gimple_seq eh_seq;
325
326 /* Record whether an EH region contains something that can throw,
327 indexed by EH region number. */
328 static bitmap eh_region_may_contain_throw_map;
329
330 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
331 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
332 The idea is to record a gimple statement for everything except for
333 the conditionals, which get their labels recorded. Since labels are
334 of type 'tree', we need this node to store both gimple and tree
335 objects. REPL_STMT is the sequence used to replace the goto/return
336 statement. CONT_STMT is used to store the statement that allows
337 the return/goto to jump to the original destination. */
338
339 struct goto_queue_node
340 {
341 treemple stmt;
342 gimple_seq repl_stmt;
343 gimple cont_stmt;
344 int index;
345 /* This is used when index >= 0 to indicate that stmt is a label (as
346 opposed to a goto stmt). */
347 int is_label;
348 };
349
347 /* State of the world while lowering. */ 350 /* State of the world while lowering. */
348 351
349 struct leh_state 352 struct leh_state
350 { 353 {
351 /* What's "current" while constructing the eh region tree. These 354 /* What's "current" while constructing the eh region tree. These
352 correspond to variables of the same name in cfun->eh, which we 355 correspond to variables of the same name in cfun->eh, which we
353 don't have easy access to. */ 356 don't have easy access to. */
354 struct eh_region *cur_region; 357 eh_region cur_region;
355 struct eh_region *prev_try; 358
359 /* What's "current" for the purposes of __builtin_eh_pointer. For
360 a CATCH, this is the associated TRY. For an EH_FILTER, this is
361 the associated ALLOWED_EXCEPTIONS, etc. */
362 eh_region ehp_region;
356 363
357 /* Processing of TRY_FINALLY requires a bit more state. This is 364 /* Processing of TRY_FINALLY requires a bit more state. This is
358 split out into a separate structure so that we don't have to 365 split out into a separate structure so that we don't have to
359 copy so much when processing other nodes. */ 366 copy so much when processing other nodes. */
360 struct leh_tf_state *tf; 367 struct leh_tf_state *tf;
366 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain 373 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
367 this so that outside_finally_tree can reliably reference the tree used 374 this so that outside_finally_tree can reliably reference the tree used
368 in the collect_finally_tree data structures. */ 375 in the collect_finally_tree data structures. */
369 gimple try_finally_expr; 376 gimple try_finally_expr;
370 gimple top_p; 377 gimple top_p;
378
371 /* While lowering a top_p usually it is expanded into multiple statements, 379 /* While lowering a top_p usually it is expanded into multiple statements,
372 thus we need the following field to store them. */ 380 thus we need the following field to store them. */
373 gimple_seq top_p_seq; 381 gimple_seq top_p_seq;
374 382
375 /* The state outside this try_finally node. */ 383 /* The state outside this try_finally node. */
376 struct leh_state *outer; 384 struct leh_state *outer;
377 385
378 /* The exception region created for it. */ 386 /* The exception region created for it. */
379 struct eh_region *region; 387 eh_region region;
380 388
381 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN statements 389 /* The goto queue. */
382 that are seen to escape this GIMPLE_TRY_FINALLY node. 390 struct goto_queue_node *goto_queue;
383 The idea is to record a gimple statement for everything except for
384 the conditionals, which get their labels recorded. Since labels are of
385 type 'tree', we need this node to store both gimple and tree objects.
386 REPL_STMT is the sequence used to replace the goto/return statement.
387 CONT_STMT is used to store the statement that allows the return/goto to
388 jump to the original destination. */
389 struct goto_queue_node {
390 treemple stmt;
391 gimple_seq repl_stmt;
392 gimple cont_stmt;
393 int index;
394 /* this is used when index >= 0 to indicate that stmt is a label(as
395 opposed to a goto stmt) */
396 int is_label;
397 } *goto_queue;
398 size_t goto_queue_size; 391 size_t goto_queue_size;
399 size_t goto_queue_active; 392 size_t goto_queue_active;
400 393
401 /* Pointer map to help in searching goto_queue when it is large. */ 394 /* Pointer map to help in searching goto_queue when it is large. */
402 struct pointer_map_t *goto_queue_map; 395 struct pointer_map_t *goto_queue_map;
407 /* A label to be added at the end of the completed transformed 400 /* A label to be added at the end of the completed transformed
408 sequence. It will be set if may_fallthru was true *at one time*, 401 sequence. It will be set if may_fallthru was true *at one time*,
409 though subsequent transformations may have cleared that flag. */ 402 though subsequent transformations may have cleared that flag. */
410 tree fallthru_label; 403 tree fallthru_label;
411 404
412 /* A label that has been registered with except.c to be the
413 landing pad for this try block. */
414 tree eh_label;
415
416 /* True if it is possible to fall out the bottom of the try block. 405 /* True if it is possible to fall out the bottom of the try block.
417 Cleared if the fallthru is converted to a goto. */ 406 Cleared if the fallthru is converted to a goto. */
418 bool may_fallthru; 407 bool may_fallthru;
419 408
420 /* True if any entry in goto_queue is a GIMPLE_RETURN. */ 409 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
423 /* True if the finally block can receive an exception edge. 412 /* True if the finally block can receive an exception edge.
424 Cleared if the exception case is handled by code duplication. */ 413 Cleared if the exception case is handled by code duplication. */
425 bool may_throw; 414 bool may_throw;
426 }; 415 };
427 416
428 static gimple_seq lower_eh_filter (struct leh_state *, gimple); 417 static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple);
429 418
430 /* Search for STMT in the goto queue. Return the replacement, 419 /* Search for STMT in the goto queue. Return the replacement,
431 or null if the statement isn't in the queue. */ 420 or null if the statement isn't in the queue. */
432 421
433 #define LARGE_GOTO_QUEUE 20 422 #define LARGE_GOTO_QUEUE 20
480 gimple_stmt_iterator *gsi) 469 gimple_stmt_iterator *gsi)
481 { 470 {
482 tree label; 471 tree label;
483 gimple_seq new_seq; 472 gimple_seq new_seq;
484 treemple temp; 473 treemple temp;
474 location_t loc = gimple_location (gsi_stmt (*gsi));
485 475
486 temp.tp = tp; 476 temp.tp = tp;
487 new_seq = find_goto_replacement (tf, temp); 477 new_seq = find_goto_replacement (tf, temp);
488 if (!new_seq) 478 if (!new_seq)
489 return; 479 return;
493 { 483 {
494 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq)); 484 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
495 return; 485 return;
496 } 486 }
497 487
498 label = create_artificial_label (); 488 label = create_artificial_label (loc);
499 /* Set the new label for the GIMPLE_COND */ 489 /* Set the new label for the GIMPLE_COND */
500 *tp = label; 490 *tp = label;
501 491
502 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING); 492 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
503 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING); 493 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
803 793
804 gcc_assert (q->is_label); 794 gcc_assert (q->is_label);
805 if (!q->repl_stmt) 795 if (!q->repl_stmt)
806 q->repl_stmt = gimple_seq_alloc (); 796 q->repl_stmt = gimple_seq_alloc ();
807 797
808 q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array,q->index)); 798 q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array, q->index));
809 799
810 if (mod) 800 if (mod)
811 gimple_seq_add_seq (&q->repl_stmt, mod); 801 gimple_seq_add_seq (&q->repl_stmt, mod);
812 802
813 x = gimple_build_goto (finlab); 803 x = gimple_build_goto (finlab);
814 gimple_seq_add_stmt (&q->repl_stmt, x); 804 gimple_seq_add_stmt (&q->repl_stmt, x);
805 }
806
807 /* Emit a standard landing pad sequence into SEQ for REGION. */
808
809 static void
810 emit_post_landing_pad (gimple_seq *seq, eh_region region)
811 {
812 eh_landing_pad lp = region->landing_pads;
813 gimple x;
814
815 if (lp == NULL)
816 lp = gen_eh_landing_pad (region);
817
818 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
819 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
820
821 x = gimple_build_label (lp->post_landing_pad);
822 gimple_seq_add_stmt (seq, x);
823 }
824
825 /* Emit a RESX statement into SEQ for REGION. */
826
827 static void
828 emit_resx (gimple_seq *seq, eh_region region)
829 {
830 gimple x = gimple_build_resx (region->index);
831 gimple_seq_add_stmt (seq, x);
832 if (region->outer)
833 record_stmt_eh_region (region->outer, x);
834 }
835
836 /* Emit an EH_DISPATCH statement into SEQ for REGION. */
837
838 static void
839 emit_eh_dispatch (gimple_seq *seq, eh_region region)
840 {
841 gimple x = gimple_build_eh_dispatch (region->index);
842 gimple_seq_add_stmt (seq, x);
843 }
844
845 /* Note that the current EH region may contain a throw, or a
846 call to a function which itself may contain a throw. */
847
848 static void
849 note_eh_region_may_contain_throw (eh_region region)
850 {
851 while (!bitmap_bit_p (eh_region_may_contain_throw_map, region->index))
852 {
853 bitmap_set_bit (eh_region_may_contain_throw_map, region->index);
854 region = region->outer;
855 if (region == NULL)
856 break;
857 }
858 }
859
860 /* Check if REGION has been marked as containing a throw. If REGION is
861 NULL, this predicate is false. */
862
863 static inline bool
864 eh_region_may_contain_throw (eh_region r)
865 {
866 return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
815 } 867 }
816 868
817 /* We want to transform 869 /* We want to transform
818 try { body; } catch { stuff; } 870 try { body; } catch { stuff; }
819 to 871 to
820 body; goto over; lab: stuff; over: 872 normal_seqence:
821 873 body;
822 TP is a GIMPLE_TRY node. LAB is the label that 874 over:
875 eh_seqence:
876 landing_pad:
877 stuff;
878 goto over;
879
880 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
823 should be placed before the second operand, or NULL. OVER is 881 should be placed before the second operand, or NULL. OVER is
824 an existing label that should be put at the exit, or NULL. */ 882 an existing label that should be put at the exit, or NULL. */
825 883
826 static gimple_seq 884 static gimple_seq
827 frob_into_branch_around (gimple tp, tree lab, tree over) 885 frob_into_branch_around (gimple tp, eh_region region, tree over)
828 { 886 {
829 gimple x; 887 gimple x;
830 gimple_seq cleanup, result; 888 gimple_seq cleanup, result;
889 location_t loc = gimple_location (tp);
831 890
832 cleanup = gimple_try_cleanup (tp); 891 cleanup = gimple_try_cleanup (tp);
833 result = gimple_try_eval (tp); 892 result = gimple_try_eval (tp);
834 893
835 if (gimple_seq_may_fallthru (result)) 894 if (region)
895 emit_post_landing_pad (&eh_seq, region);
896
897 if (gimple_seq_may_fallthru (cleanup))
836 { 898 {
837 if (!over) 899 if (!over)
838 over = create_artificial_label (); 900 over = create_artificial_label (loc);
839 x = gimple_build_goto (over); 901 x = gimple_build_goto (over);
840 gimple_seq_add_stmt (&result, x); 902 gimple_seq_add_stmt (&cleanup, x);
841 } 903 }
842 904 gimple_seq_add_seq (&eh_seq, cleanup);
843 if (lab)
844 {
845 x = gimple_build_label (lab);
846 gimple_seq_add_stmt (&result, x);
847 }
848
849 gimple_seq_add_seq (&result, cleanup);
850 905
851 if (over) 906 if (over)
852 { 907 {
853 x = gimple_build_label (over); 908 x = gimple_build_label (over);
854 gimple_seq_add_stmt (&result, x); 909 gimple_seq_add_stmt (&result, x);
884 tree label = tf->fallthru_label; 939 tree label = tf->fallthru_label;
885 treemple temp; 940 treemple temp;
886 941
887 if (!label) 942 if (!label)
888 { 943 {
889 label = create_artificial_label (); 944 label = create_artificial_label (gimple_location (tf->try_finally_expr));
890 tf->fallthru_label = label; 945 tf->fallthru_label = label;
891 if (tf->outer->tf) 946 if (tf->outer->tf)
892 { 947 {
893 temp.t = label; 948 temp.t = label;
894 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr); 949 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
920 static void 975 static void
921 honor_protect_cleanup_actions (struct leh_state *outer_state, 976 honor_protect_cleanup_actions (struct leh_state *outer_state,
922 struct leh_state *this_state, 977 struct leh_state *this_state,
923 struct leh_tf_state *tf) 978 struct leh_tf_state *tf)
924 { 979 {
925 gimple protect_cleanup_actions; 980 tree protect_cleanup_actions;
926 gimple_stmt_iterator gsi; 981 gimple_stmt_iterator gsi;
927 bool finally_may_fallthru; 982 bool finally_may_fallthru;
928 gimple_seq finally; 983 gimple_seq finally;
929 gimple x; 984 gimple x;
930 985
931 /* First check for nothing to do. */ 986 /* First check for nothing to do. */
932 if (lang_protect_cleanup_actions) 987 if (lang_protect_cleanup_actions == NULL)
933 protect_cleanup_actions = lang_protect_cleanup_actions (); 988 return;
934 else 989 protect_cleanup_actions = lang_protect_cleanup_actions ();
935 protect_cleanup_actions = NULL; 990 if (protect_cleanup_actions == NULL)
991 return;
936 992
937 finally = gimple_try_cleanup (tf->top_p); 993 finally = gimple_try_cleanup (tf->top_p);
938
939 /* If the EH case of the finally block can fall through, this may be a
940 structure of the form
941 try {
942 try {
943 throw ...;
944 } cleanup {
945 try {
946 throw ...;
947 } catch (...) {
948 }
949 }
950 } catch (...) {
951 yyy;
952 }
953 E.g. with an inline destructor with an embedded try block. In this
954 case we must save the runtime EH data around the nested exception.
955
956 This complication means that any time the previous runtime data might
957 be used (via fallthru from the finally) we handle the eh case here,
958 whether or not protect_cleanup_actions is active. */
959
960 finally_may_fallthru = gimple_seq_may_fallthru (finally); 994 finally_may_fallthru = gimple_seq_may_fallthru (finally);
961 if (!finally_may_fallthru && !protect_cleanup_actions)
962 return;
963 995
964 /* Duplicate the FINALLY block. Only need to do this for try-finally, 996 /* Duplicate the FINALLY block. Only need to do this for try-finally,
965 and not for cleanups. */ 997 and not for cleanups. */
966 if (this_state) 998 if (this_state)
967 finally = lower_try_finally_dup_block (finally, outer_state); 999 finally = lower_try_finally_dup_block (finally, outer_state);
973 cp/decl.c). Since it's logically at an outer level, we should call 1005 cp/decl.c). Since it's logically at an outer level, we should call
974 terminate before we get to it, so strip it away before adding the 1006 terminate before we get to it, so strip it away before adding the
975 MUST_NOT_THROW filter. */ 1007 MUST_NOT_THROW filter. */
976 gsi = gsi_start (finally); 1008 gsi = gsi_start (finally);
977 x = gsi_stmt (gsi); 1009 x = gsi_stmt (gsi);
978 if (protect_cleanup_actions 1010 if (gimple_code (x) == GIMPLE_TRY
979 && gimple_code (x) == GIMPLE_TRY
980 && gimple_try_kind (x) == GIMPLE_TRY_CATCH 1011 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
981 && gimple_try_catch_is_cleanup (x)) 1012 && gimple_try_catch_is_cleanup (x))
982 { 1013 {
983 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT); 1014 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
984 gsi_remove (&gsi, false); 1015 gsi_remove (&gsi, false);
985 } 1016 }
986 1017
987 /* Resume execution after the exception. Adding this now lets 1018 /* Wrap the block with protect_cleanup_actions as the action. */
988 lower_eh_filter not add unnecessary gotos, as it is clear that 1019 x = gimple_build_eh_must_not_throw (protect_cleanup_actions);
989 we never fallthru from this copy of the finally block. */ 1020 x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x),
1021 GIMPLE_TRY_CATCH);
1022 finally = lower_eh_must_not_throw (outer_state, x);
1023
1024 /* Drop all of this into the exception sequence. */
1025 emit_post_landing_pad (&eh_seq, tf->region);
1026 gimple_seq_add_seq (&eh_seq, finally);
990 if (finally_may_fallthru) 1027 if (finally_may_fallthru)
991 { 1028 emit_resx (&eh_seq, tf->region);
992 tree save_eptr, save_filt;
993 tree tmp;
994
995 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
996 save_filt = create_tmp_var (integer_type_node, "save_filt");
997
998 gsi = gsi_start (finally);
999 tmp = build0 (EXC_PTR_EXPR, ptr_type_node);
1000 x = gimple_build_assign (save_eptr, tmp);
1001 gsi_insert_before (&gsi, x, GSI_CONTINUE_LINKING);
1002
1003 tmp = build0 (FILTER_EXPR, integer_type_node);
1004 x = gimple_build_assign (save_filt, tmp);
1005 gsi_insert_before (&gsi, x, GSI_CONTINUE_LINKING);
1006
1007 gsi = gsi_last (finally);
1008 tmp = build0 (EXC_PTR_EXPR, ptr_type_node);
1009 x = gimple_build_assign (tmp, save_eptr);
1010 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1011
1012 tmp = build0 (FILTER_EXPR, integer_type_node);
1013 x = gimple_build_assign (tmp, save_filt);
1014 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1015
1016 x = gimple_build_resx (get_eh_region_number (tf->region));
1017 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1018 }
1019
1020 /* Wrap the block with protect_cleanup_actions as the action. */
1021 if (protect_cleanup_actions)
1022 {
1023 gimple_seq seq = NULL, failure = NULL;
1024
1025 gimple_seq_add_stmt (&failure, protect_cleanup_actions);
1026 x = gimple_build_eh_filter (NULL, failure);
1027 gimple_eh_filter_set_must_not_throw (x, 1);
1028
1029 gimple_seq_add_stmt (&seq, x);
1030 x = gimple_build_try (finally, seq, GIMPLE_TRY_CATCH);
1031 finally = lower_eh_filter (outer_state, x);
1032 }
1033 else
1034 lower_eh_constructs_1 (outer_state, finally);
1035
1036 /* Hook this up to the end of the existing try block. If we
1037 previously fell through the end, we'll have to branch around.
1038 This means adding a new goto, and adding it to the queue. */
1039
1040 gsi = gsi_last (gimple_try_eval (tf->top_p));
1041
1042 if (tf->may_fallthru)
1043 {
1044 tree tmp;
1045 tmp = lower_try_finally_fallthru_label (tf);
1046 x = gimple_build_goto (tmp);
1047 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1048
1049 if (this_state)
1050 maybe_record_in_goto_queue (this_state, x);
1051
1052 tf->may_fallthru = false;
1053 }
1054
1055 x = gimple_build_label (tf->eh_label);
1056 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1057 gsi_insert_seq_after (&gsi, finally, GSI_CONTINUE_LINKING);
1058 1029
1059 /* Having now been handled, EH isn't to be considered with 1030 /* Having now been handled, EH isn't to be considered with
1060 the rest of the outgoing edges. */ 1031 the rest of the outgoing edges. */
1061 tf->may_throw = false; 1032 tf->may_throw = false;
1062 } 1033 }
1073 tree lab, return_val; 1044 tree lab, return_val;
1074 gimple x; 1045 gimple x;
1075 gimple_seq finally; 1046 gimple_seq finally;
1076 struct goto_queue_node *q, *qe; 1047 struct goto_queue_node *q, *qe;
1077 1048
1078 if (tf->may_throw) 1049 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1079 lab = tf->eh_label;
1080 else
1081 lab = create_artificial_label ();
1082 1050
1083 /* We expect that tf->top_p is a GIMPLE_TRY. */ 1051 /* We expect that tf->top_p is a GIMPLE_TRY. */
1084 finally = gimple_try_cleanup (tf->top_p); 1052 finally = gimple_try_cleanup (tf->top_p);
1085 tf->top_p_seq = gimple_try_eval (tf->top_p); 1053 tf->top_p_seq = gimple_try_eval (tf->top_p);
1086 1054
1098 1066
1099 replace_goto_queue (tf); 1067 replace_goto_queue (tf);
1100 1068
1101 lower_eh_constructs_1 (state, finally); 1069 lower_eh_constructs_1 (state, finally);
1102 gimple_seq_add_seq (&tf->top_p_seq, finally); 1070 gimple_seq_add_seq (&tf->top_p_seq, finally);
1071
1072 if (tf->may_throw)
1073 {
1074 emit_post_landing_pad (&eh_seq, tf->region);
1075
1076 x = gimple_build_goto (lab);
1077 gimple_seq_add_stmt (&eh_seq, x);
1078 }
1103 } 1079 }
1104 1080
1105 /* A subroutine of lower_try_finally. We have determined that there is 1081 /* A subroutine of lower_try_finally. We have determined that there is
1106 exactly one destination of the finally block. Restructure the 1082 exactly one destination of the finally block. Restructure the
1107 try_finally node for this special case. */ 1083 try_finally node for this special case. */
1111 { 1087 {
1112 struct goto_queue_node *q, *qe; 1088 struct goto_queue_node *q, *qe;
1113 gimple x; 1089 gimple x;
1114 gimple_seq finally; 1090 gimple_seq finally;
1115 tree finally_label; 1091 tree finally_label;
1092 location_t loc = gimple_location (tf->try_finally_expr);
1116 1093
1117 finally = gimple_try_cleanup (tf->top_p); 1094 finally = gimple_try_cleanup (tf->top_p);
1118 tf->top_p_seq = gimple_try_eval (tf->top_p); 1095 tf->top_p_seq = gimple_try_eval (tf->top_p);
1119 1096
1120 lower_eh_constructs_1 (state, finally); 1097 lower_eh_constructs_1 (state, finally);
1121 1098
1122 if (tf->may_throw) 1099 if (tf->may_throw)
1123 { 1100 {
1124 /* Only reachable via the exception edge. Add the given label to 1101 /* Only reachable via the exception edge. Add the given label to
1125 the head of the FINALLY block. Append a RESX at the end. */ 1102 the head of the FINALLY block. Append a RESX at the end. */
1126 1103 emit_post_landing_pad (&eh_seq, tf->region);
1127 x = gimple_build_label (tf->eh_label); 1104 gimple_seq_add_seq (&eh_seq, finally);
1128 gimple_seq_add_stmt (&tf->top_p_seq, x); 1105 emit_resx (&eh_seq, tf->region);
1129
1130 gimple_seq_add_seq (&tf->top_p_seq, finally);
1131
1132 x = gimple_build_resx (get_eh_region_number (tf->region));
1133
1134 gimple_seq_add_stmt (&tf->top_p_seq, x);
1135
1136 return; 1106 return;
1137 } 1107 }
1138 1108
1139 if (tf->may_fallthru) 1109 if (tf->may_fallthru)
1140 { 1110 {
1142 the two blocks run together; we'll fall out the bottom. */ 1112 the two blocks run together; we'll fall out the bottom. */
1143 gimple_seq_add_seq (&tf->top_p_seq, finally); 1113 gimple_seq_add_seq (&tf->top_p_seq, finally);
1144 return; 1114 return;
1145 } 1115 }
1146 1116
1147 finally_label = create_artificial_label (); 1117 finally_label = create_artificial_label (loc);
1148 x = gimple_build_label (finally_label); 1118 x = gimple_build_label (finally_label);
1149 gimple_seq_add_stmt (&tf->top_p_seq, x); 1119 gimple_seq_add_stmt (&tf->top_p_seq, x);
1150 1120
1151 gimple_seq_add_seq (&tf->top_p_seq, finally); 1121 gimple_seq_add_seq (&tf->top_p_seq, finally);
1152 1122
1195 gimple_seq finally; 1165 gimple_seq finally;
1196 gimple_seq new_stmt; 1166 gimple_seq new_stmt;
1197 gimple_seq seq; 1167 gimple_seq seq;
1198 gimple x; 1168 gimple x;
1199 tree tmp; 1169 tree tmp;
1170 location_t tf_loc = gimple_location (tf->try_finally_expr);
1200 1171
1201 finally = gimple_try_cleanup (tf->top_p); 1172 finally = gimple_try_cleanup (tf->top_p);
1202 tf->top_p_seq = gimple_try_eval (tf->top_p); 1173 tf->top_p_seq = gimple_try_eval (tf->top_p);
1203 new_stmt = NULL; 1174 new_stmt = NULL;
1204 1175
1213 gimple_seq_add_stmt (&new_stmt, x); 1184 gimple_seq_add_stmt (&new_stmt, x);
1214 } 1185 }
1215 1186
1216 if (tf->may_throw) 1187 if (tf->may_throw)
1217 { 1188 {
1218 x = gimple_build_label (tf->eh_label);
1219 gimple_seq_add_stmt (&new_stmt, x);
1220
1221 seq = lower_try_finally_dup_block (finally, state); 1189 seq = lower_try_finally_dup_block (finally, state);
1222 lower_eh_constructs_1 (state, seq); 1190 lower_eh_constructs_1 (state, seq);
1223 gimple_seq_add_seq (&new_stmt, seq); 1191
1224 1192 emit_post_landing_pad (&eh_seq, tf->region);
1225 x = gimple_build_resx (get_eh_region_number (tf->region)); 1193 gimple_seq_add_seq (&eh_seq, seq);
1226 gimple_seq_add_stmt (&new_stmt, x); 1194 emit_resx (&eh_seq, tf->region);
1227 } 1195 }
1228 1196
1229 if (tf->goto_queue) 1197 if (tf->goto_queue)
1230 { 1198 {
1231 struct goto_queue_node *q, *qe; 1199 struct goto_queue_node *q, *qe;
1256 1224
1257 q = labels[index].q; 1225 q = labels[index].q;
1258 if (! q) 1226 if (! q)
1259 continue; 1227 continue;
1260 1228
1261 lab = labels[index].label = create_artificial_label (); 1229 lab = labels[index].label
1230 = create_artificial_label (tf_loc);
1262 1231
1263 if (index == return_index) 1232 if (index == return_index)
1264 do_return_redirection (q, lab, NULL, &return_val); 1233 do_return_redirection (q, lab, NULL, &return_val);
1265 else 1234 else
1266 do_goto_redirection (q, lab, NULL, tf); 1235 do_goto_redirection (q, lab, NULL, tf);
1290 if (index == return_index) 1259 if (index == return_index)
1291 do_return_redirection (q, lab, NULL, &return_val); 1260 do_return_redirection (q, lab, NULL, &return_val);
1292 else 1261 else
1293 do_goto_redirection (q, lab, NULL, tf); 1262 do_goto_redirection (q, lab, NULL, tf);
1294 } 1263 }
1295 1264
1296 replace_goto_queue (tf); 1265 replace_goto_queue (tf);
1297 free (labels); 1266 free (labels);
1298 } 1267 }
1299 1268
1300 /* Need to link new stmts after running replace_goto_queue due 1269 /* Need to link new stmts after running replace_goto_queue due
1321 gimple x; 1290 gimple x;
1322 tree tmp; 1291 tree tmp;
1323 gimple switch_stmt; 1292 gimple switch_stmt;
1324 gimple_seq finally; 1293 gimple_seq finally;
1325 struct pointer_map_t *cont_map = NULL; 1294 struct pointer_map_t *cont_map = NULL;
1295 /* The location of the TRY_FINALLY stmt. */
1296 location_t tf_loc = gimple_location (tf->try_finally_expr);
1297 /* The location of the finally block. */
1298 location_t finally_loc;
1326 1299
1327 switch_body = gimple_seq_alloc (); 1300 switch_body = gimple_seq_alloc ();
1328 1301
1329 /* Mash the TRY block to the head of the chain. */ 1302 /* Mash the TRY block to the head of the chain. */
1330 finally = gimple_try_cleanup (tf->top_p); 1303 finally = gimple_try_cleanup (tf->top_p);
1331 tf->top_p_seq = gimple_try_eval (tf->top_p); 1304 tf->top_p_seq = gimple_try_eval (tf->top_p);
1305
1306 /* The location of the finally is either the last stmt in the finally
1307 block or the location of the TRY_FINALLY itself. */
1308 finally_loc = gimple_seq_last_stmt (tf->top_p_seq) != NULL ?
1309 gimple_location (gimple_seq_last_stmt (tf->top_p_seq))
1310 : tf_loc;
1332 1311
1333 /* Lower the finally block itself. */ 1312 /* Lower the finally block itself. */
1334 lower_eh_constructs_1 (state, finally); 1313 lower_eh_constructs_1 (state, finally);
1335 1314
1336 /* Prepare for switch statement generation. */ 1315 /* Prepare for switch statement generation. */
1339 eh_index = return_index + tf->may_return; 1318 eh_index = return_index + tf->may_return;
1340 fallthru_index = eh_index + tf->may_throw; 1319 fallthru_index = eh_index + tf->may_throw;
1341 ndests = fallthru_index + tf->may_fallthru; 1320 ndests = fallthru_index + tf->may_fallthru;
1342 1321
1343 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp"); 1322 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1344 finally_label = create_artificial_label (); 1323 finally_label = create_artificial_label (finally_loc);
1345 1324
1346 /* We use VEC_quick_push on case_label_vec throughout this function, 1325 /* We use VEC_quick_push on case_label_vec throughout this function,
1347 since we know the size in advance and allocate precisely as muce 1326 since we know the size in advance and allocate precisely as muce
1348 space as needed. */ 1327 space as needed. */
1349 case_label_vec = VEC_alloc (tree, heap, ndests); 1328 case_label_vec = VEC_alloc (tree, heap, ndests);
1354 are done in this order to correspond to the sequence the code is 1333 are done in this order to correspond to the sequence the code is
1355 layed out. */ 1334 layed out. */
1356 1335
1357 if (tf->may_fallthru) 1336 if (tf->may_fallthru)
1358 { 1337 {
1359 x = gimple_build_assign (finally_tmp, build_int_cst (integer_type_node, 1338 x = gimple_build_assign (finally_tmp,
1360 fallthru_index)); 1339 build_int_cst (NULL, fallthru_index));
1361 gimple_seq_add_stmt (&tf->top_p_seq, x); 1340 gimple_seq_add_stmt (&tf->top_p_seq, x);
1362 1341
1363 if (tf->may_throw)
1364 {
1365 x = gimple_build_goto (finally_label);
1366 gimple_seq_add_stmt (&tf->top_p_seq, x);
1367 }
1368
1369
1370 last_case = build3 (CASE_LABEL_EXPR, void_type_node, 1342 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1371 build_int_cst (NULL_TREE, fallthru_index), NULL, 1343 build_int_cst (NULL, fallthru_index),
1372 create_artificial_label ()); 1344 NULL, create_artificial_label (tf_loc));
1373 VEC_quick_push (tree, case_label_vec, last_case); 1345 VEC_quick_push (tree, case_label_vec, last_case);
1374 last_case_index++; 1346 last_case_index++;
1375 1347
1376 x = gimple_build_label (CASE_LABEL (last_case)); 1348 x = gimple_build_label (CASE_LABEL (last_case));
1377 gimple_seq_add_stmt (&switch_body, x); 1349 gimple_seq_add_stmt (&switch_body, x);
1381 gimple_seq_add_stmt (&switch_body, x); 1353 gimple_seq_add_stmt (&switch_body, x);
1382 } 1354 }
1383 1355
1384 if (tf->may_throw) 1356 if (tf->may_throw)
1385 { 1357 {
1386 x = gimple_build_label (tf->eh_label); 1358 emit_post_landing_pad (&eh_seq, tf->region);
1387 gimple_seq_add_stmt (&tf->top_p_seq, x); 1359
1388 1360 x = gimple_build_assign (finally_tmp,
1389 x = gimple_build_assign (finally_tmp, build_int_cst (integer_type_node, 1361 build_int_cst (NULL, eh_index));
1390 eh_index)); 1362 gimple_seq_add_stmt (&eh_seq, x);
1391 gimple_seq_add_stmt (&tf->top_p_seq, x); 1363
1364 x = gimple_build_goto (finally_label);
1365 gimple_seq_add_stmt (&eh_seq, x);
1392 1366
1393 last_case = build3 (CASE_LABEL_EXPR, void_type_node, 1367 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1394 build_int_cst (NULL_TREE, eh_index), NULL, 1368 build_int_cst (NULL, eh_index),
1395 create_artificial_label ()); 1369 NULL, create_artificial_label (tf_loc));
1396 VEC_quick_push (tree, case_label_vec, last_case); 1370 VEC_quick_push (tree, case_label_vec, last_case);
1397 last_case_index++; 1371 last_case_index++;
1398 1372
1399 x = gimple_build_label (CASE_LABEL (last_case)); 1373 x = gimple_build_label (CASE_LABEL (last_case));
1400 gimple_seq_add_stmt (&switch_body, x); 1374 gimple_seq_add_stmt (&eh_seq, x);
1401 x = gimple_build_resx (get_eh_region_number (tf->region)); 1375 emit_resx (&eh_seq, tf->region);
1402 gimple_seq_add_stmt (&switch_body, x);
1403 } 1376 }
1404 1377
1405 x = gimple_build_label (finally_label); 1378 x = gimple_build_label (finally_label);
1406 gimple_seq_add_stmt (&tf->top_p_seq, x); 1379 gimple_seq_add_stmt (&tf->top_p_seq, x);
1407 1380
1422 mod = gimple_seq_alloc (); 1395 mod = gimple_seq_alloc ();
1423 1396
1424 if (q->index < 0) 1397 if (q->index < 0)
1425 { 1398 {
1426 x = gimple_build_assign (finally_tmp, 1399 x = gimple_build_assign (finally_tmp,
1427 build_int_cst (integer_type_node, 1400 build_int_cst (NULL, return_index));
1428 return_index));
1429 gimple_seq_add_stmt (&mod, x); 1401 gimple_seq_add_stmt (&mod, x);
1430 do_return_redirection (q, finally_label, mod, &return_val); 1402 do_return_redirection (q, finally_label, mod, &return_val);
1431 switch_id = return_index; 1403 switch_id = return_index;
1432 } 1404 }
1433 else 1405 else
1434 { 1406 {
1435 x = gimple_build_assign (finally_tmp, 1407 x = gimple_build_assign (finally_tmp,
1436 build_int_cst (integer_type_node, q->index)); 1408 build_int_cst (NULL, q->index));
1437 gimple_seq_add_stmt (&mod, x); 1409 gimple_seq_add_stmt (&mod, x);
1438 do_goto_redirection (q, finally_label, mod, tf); 1410 do_goto_redirection (q, finally_label, mod, tf);
1439 switch_id = q->index; 1411 switch_id = q->index;
1440 } 1412 }
1441 1413
1444 || !VEC_index (tree, case_label_vec, case_index)) 1416 || !VEC_index (tree, case_label_vec, case_index))
1445 { 1417 {
1446 tree case_lab; 1418 tree case_lab;
1447 void **slot; 1419 void **slot;
1448 case_lab = build3 (CASE_LABEL_EXPR, void_type_node, 1420 case_lab = build3 (CASE_LABEL_EXPR, void_type_node,
1449 build_int_cst (NULL_TREE, switch_id), NULL, 1421 build_int_cst (NULL, switch_id),
1450 NULL); 1422 NULL, NULL);
1451 /* We store the cont_stmt in the pointer map, so that we can recover 1423 /* We store the cont_stmt in the pointer map, so that we can recover
1452 it in the loop below. We don't create the new label while 1424 it in the loop below. We don't create the new label while
1453 walking the goto_queue because pointers don't offer a stable 1425 walking the goto_queue because pointers don't offer a stable
1454 order. */ 1426 order. */
1455 if (!cont_map) 1427 if (!cont_map)
1456 cont_map = pointer_map_create (); 1428 cont_map = pointer_map_create ();
1457 slot = pointer_map_insert (cont_map, case_lab); 1429 slot = pointer_map_insert (cont_map, case_lab);
1458 *slot = q->cont_stmt; 1430 *slot = q->cont_stmt;
1474 /* As the comment above suggests, CASE_LABEL (last_case) was just a 1446 /* As the comment above suggests, CASE_LABEL (last_case) was just a
1475 placeholder, it does not store an actual label, yet. */ 1447 placeholder, it does not store an actual label, yet. */
1476 gcc_assert (slot); 1448 gcc_assert (slot);
1477 cont_stmt = *(gimple *) slot; 1449 cont_stmt = *(gimple *) slot;
1478 1450
1479 label = create_artificial_label (); 1451 label = create_artificial_label (tf_loc);
1480 CASE_LABEL (last_case) = label; 1452 CASE_LABEL (last_case) = label;
1481 1453
1482 x = gimple_build_label (label); 1454 x = gimple_build_label (label);
1483 gimple_seq_add_stmt (&switch_body, x); 1455 gimple_seq_add_stmt (&switch_body, x);
1484 gimple_seq_add_stmt (&switch_body, cont_stmt); 1456 gimple_seq_add_stmt (&switch_body, cont_stmt);
1496 1468
1497 /* Build the switch statement, setting last_case to be the default 1469 /* Build the switch statement, setting last_case to be the default
1498 label. */ 1470 label. */
1499 switch_stmt = gimple_build_switch_vec (finally_tmp, last_case, 1471 switch_stmt = gimple_build_switch_vec (finally_tmp, last_case,
1500 case_label_vec); 1472 case_label_vec);
1473 gimple_set_location (switch_stmt, finally_loc);
1501 1474
1502 /* Need to link SWITCH_STMT after running replace_goto_queue 1475 /* Need to link SWITCH_STMT after running replace_goto_queue
1503 due to not wanting to process the same goto stmts twice. */ 1476 due to not wanting to process the same goto stmts twice. */
1504 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt); 1477 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1505 gimple_seq_add_seq (&tf->top_p_seq, switch_body); 1478 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1563 memset (&this_tf, 0, sizeof (this_tf)); 1536 memset (&this_tf, 0, sizeof (this_tf));
1564 this_tf.try_finally_expr = tp; 1537 this_tf.try_finally_expr = tp;
1565 this_tf.top_p = tp; 1538 this_tf.top_p = tp;
1566 this_tf.outer = state; 1539 this_tf.outer = state;
1567 if (using_eh_for_cleanups_p) 1540 if (using_eh_for_cleanups_p)
1568 this_tf.region 1541 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1569 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1570 else 1542 else
1571 this_tf.region = NULL; 1543 this_tf.region = NULL;
1572 1544
1573 this_state.cur_region = this_tf.region; 1545 this_state.cur_region = this_tf.region;
1574 this_state.prev_try = state->prev_try; 1546 this_state.ehp_region = state->ehp_region;
1575 this_state.tf = &this_tf; 1547 this_state.tf = &this_tf;
1576 1548
1577 lower_eh_constructs_1 (&this_state, gimple_try_eval(tp)); 1549 lower_eh_constructs_1 (&this_state, gimple_try_eval(tp));
1578 1550
1579 /* Determine if the try block is escaped through the bottom. */ 1551 /* Determine if the try block is escaped through the bottom. */
1580 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp)); 1552 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1581 1553
1582 /* Determine if any exceptions are possible within the try block. */ 1554 /* Determine if any exceptions are possible within the try block. */
1583 if (using_eh_for_cleanups_p) 1555 if (using_eh_for_cleanups_p)
1584 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region); 1556 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
1585 if (this_tf.may_throw) 1557 if (this_tf.may_throw)
1586 { 1558 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1587 this_tf.eh_label = create_artificial_label ();
1588 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1589 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1590 }
1591 1559
1592 /* Determine how many edges (still) reach the finally block. Or rather, 1560 /* Determine how many edges (still) reach the finally block. Or rather,
1593 how many destinations are reached by the finally block. Use this to 1561 how many destinations are reached by the finally block. Use this to
1594 determine how we process the finally block itself. */ 1562 determine how we process the finally block itself. */
1595 1563
1641 exception region trees that records all the magic. */ 1609 exception region trees that records all the magic. */
1642 1610
1643 static gimple_seq 1611 static gimple_seq
1644 lower_catch (struct leh_state *state, gimple tp) 1612 lower_catch (struct leh_state *state, gimple tp)
1645 { 1613 {
1646 struct eh_region *try_region; 1614 eh_region try_region = NULL;
1647 struct leh_state this_state; 1615 struct leh_state this_state = *state;
1648 gimple_stmt_iterator gsi; 1616 gimple_stmt_iterator gsi;
1649 tree out_label; 1617 tree out_label;
1650 1618 gimple_seq new_seq;
1651 try_region = gen_eh_region_try (state->cur_region); 1619 gimple x;
1652 this_state.cur_region = try_region; 1620 location_t try_catch_loc = gimple_location (tp);
1653 this_state.prev_try = try_region; 1621
1654 this_state.tf = state->tf; 1622 if (flag_exceptions)
1623 {
1624 try_region = gen_eh_region_try (state->cur_region);
1625 this_state.cur_region = try_region;
1626 }
1655 1627
1656 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp)); 1628 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1657 1629
1658 if (!get_eh_region_may_contain_throw (try_region)) 1630 if (!eh_region_may_contain_throw (try_region))
1659 { 1631 return gimple_try_eval (tp);
1660 return gimple_try_eval (tp); 1632
1661 } 1633 new_seq = NULL;
1634 emit_eh_dispatch (&new_seq, try_region);
1635 emit_resx (&new_seq, try_region);
1636
1637 this_state.cur_region = state->cur_region;
1638 this_state.ehp_region = try_region;
1662 1639
1663 out_label = NULL; 1640 out_label = NULL;
1664 for (gsi = gsi_start (gimple_try_cleanup (tp)); !gsi_end_p (gsi); ) 1641 for (gsi = gsi_start (gimple_try_cleanup (tp));
1665 { 1642 !gsi_end_p (gsi);
1666 struct eh_region *catch_region; 1643 gsi_next (&gsi))
1667 tree eh_label; 1644 {
1668 gimple x, gcatch; 1645 eh_catch c;
1646 gimple gcatch;
1647 gimple_seq handler;
1669 1648
1670 gcatch = gsi_stmt (gsi); 1649 gcatch = gsi_stmt (gsi);
1671 catch_region = gen_eh_region_catch (try_region, 1650 c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch));
1672 gimple_catch_types (gcatch)); 1651
1673 1652 handler = gimple_catch_handler (gcatch);
1674 this_state.cur_region = catch_region; 1653 lower_eh_constructs_1 (&this_state, handler);
1675 this_state.prev_try = state->prev_try; 1654
1676 lower_eh_constructs_1 (&this_state, gimple_catch_handler (gcatch)); 1655 c->label = create_artificial_label (UNKNOWN_LOCATION);
1677 1656 x = gimple_build_label (c->label);
1678 eh_label = create_artificial_label (); 1657 gimple_seq_add_stmt (&new_seq, x);
1679 set_eh_region_tree_label (catch_region, eh_label); 1658
1680 1659 gimple_seq_add_seq (&new_seq, handler);
1681 x = gimple_build_label (eh_label); 1660
1682 gsi_insert_before (&gsi, x, GSI_SAME_STMT); 1661 if (gimple_seq_may_fallthru (new_seq))
1683
1684 if (gimple_seq_may_fallthru (gimple_catch_handler (gcatch)))
1685 { 1662 {
1686 if (!out_label) 1663 if (!out_label)
1687 out_label = create_artificial_label (); 1664 out_label = create_artificial_label (try_catch_loc);
1688 1665
1689 x = gimple_build_goto (out_label); 1666 x = gimple_build_goto (out_label);
1690 gimple_seq_add_stmt (gimple_catch_handler_ptr (gcatch), x); 1667 gimple_seq_add_stmt (&new_seq, x);
1691 } 1668 }
1692 1669 if (!c->type_list)
1693 gsi_insert_seq_before (&gsi, gimple_catch_handler (gcatch), 1670 break;
1694 GSI_SAME_STMT); 1671 }
1695 gsi_remove (&gsi, false); 1672
1696 } 1673 gimple_try_set_cleanup (tp, new_seq);
1697 1674
1698 return frob_into_branch_around (tp, NULL, out_label); 1675 return frob_into_branch_around (tp, try_region, out_label);
1699 } 1676 }
1700 1677
1701 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a 1678 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1702 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception 1679 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1703 region trees that record all the magic. */ 1680 region trees that record all the magic. */
1704 1681
1705 static gimple_seq 1682 static gimple_seq
1706 lower_eh_filter (struct leh_state *state, gimple tp) 1683 lower_eh_filter (struct leh_state *state, gimple tp)
1707 { 1684 {
1708 struct leh_state this_state; 1685 struct leh_state this_state = *state;
1709 struct eh_region *this_region; 1686 eh_region this_region = NULL;
1710 gimple inner; 1687 gimple inner, x;
1711 tree eh_label; 1688 gimple_seq new_seq;
1712 1689
1713 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp)); 1690 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1714 1691
1715 if (gimple_eh_filter_must_not_throw (inner)) 1692 if (flag_exceptions)
1716 this_region = gen_eh_region_must_not_throw (state->cur_region); 1693 {
1717 else 1694 this_region = gen_eh_region_allowed (state->cur_region,
1718 this_region = gen_eh_region_allowed (state->cur_region, 1695 gimple_eh_filter_types (inner));
1719 gimple_eh_filter_types (inner)); 1696 this_state.cur_region = this_region;
1720 this_state = *state; 1697 }
1721 this_state.cur_region = this_region;
1722 /* For must not throw regions any cleanup regions inside it
1723 can't reach outer catch regions. */
1724 if (gimple_eh_filter_must_not_throw (inner))
1725 this_state.prev_try = NULL;
1726 1698
1727 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp)); 1699 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1728 1700
1729 if (!get_eh_region_may_contain_throw (this_region)) 1701 if (!eh_region_may_contain_throw (this_region))
1730 { 1702 return gimple_try_eval (tp);
1731 return gimple_try_eval (tp); 1703
1732 } 1704 new_seq = NULL;
1733 1705 this_state.cur_region = state->cur_region;
1734 lower_eh_constructs_1 (state, gimple_eh_filter_failure (inner)); 1706 this_state.ehp_region = this_region;
1735 gimple_try_set_cleanup (tp, gimple_eh_filter_failure (inner)); 1707
1736 1708 emit_eh_dispatch (&new_seq, this_region);
1737 eh_label = create_artificial_label (); 1709 emit_resx (&new_seq, this_region);
1738 set_eh_region_tree_label (this_region, eh_label); 1710
1739 1711 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1740 return frob_into_branch_around (tp, eh_label, NULL); 1712 x = gimple_build_label (this_region->u.allowed.label);
1713 gimple_seq_add_stmt (&new_seq, x);
1714
1715 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure (inner));
1716 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1717
1718 gimple_try_set_cleanup (tp, new_seq);
1719
1720 return frob_into_branch_around (tp, this_region, NULL);
1721 }
1722
1723 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1724 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1725 plus the exception region trees that record all the magic. */
1726
1727 static gimple_seq
1728 lower_eh_must_not_throw (struct leh_state *state, gimple tp)
1729 {
1730 struct leh_state this_state = *state;
1731
1732 if (flag_exceptions)
1733 {
1734 gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1735 eh_region this_region;
1736
1737 this_region = gen_eh_region_must_not_throw (state->cur_region);
1738 this_region->u.must_not_throw.failure_decl
1739 = gimple_eh_must_not_throw_fndecl (inner);
1740 this_region->u.must_not_throw.failure_loc = gimple_location (tp);
1741
1742 /* In order to get mangling applied to this decl, we must mark it
1743 used now. Otherwise, pass_ipa_free_lang_data won't think it
1744 needs to happen. */
1745 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1746
1747 this_state.cur_region = this_region;
1748 }
1749
1750 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1751
1752 return gimple_try_eval (tp);
1741 } 1753 }
1742 1754
1743 /* Implement a cleanup expression. This is similar to try-finally, 1755 /* Implement a cleanup expression. This is similar to try-finally,
1744 except that we only execute the cleanup block for exception edges. */ 1756 except that we only execute the cleanup block for exception edges. */
1745 1757
1746 static gimple_seq 1758 static gimple_seq
1747 lower_cleanup (struct leh_state *state, gimple tp) 1759 lower_cleanup (struct leh_state *state, gimple tp)
1748 { 1760 {
1749 struct leh_state this_state; 1761 struct leh_state this_state = *state;
1750 struct eh_region *this_region; 1762 eh_region this_region = NULL;
1751 struct leh_tf_state fake_tf; 1763 struct leh_tf_state fake_tf;
1752 gimple_seq result; 1764 gimple_seq result;
1753 1765
1754 /* If not using eh, then exception-only cleanups are no-ops. */ 1766 if (flag_exceptions)
1755 if (!flag_exceptions) 1767 {
1756 { 1768 this_region = gen_eh_region_cleanup (state->cur_region);
1757 result = gimple_try_eval (tp); 1769 this_state.cur_region = this_region;
1758 lower_eh_constructs_1 (state, result); 1770 }
1759 return result;
1760 }
1761
1762 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1763 this_state = *state;
1764 this_state.cur_region = this_region;
1765 1771
1766 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp)); 1772 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1767 1773
1768 if (!get_eh_region_may_contain_throw (this_region)) 1774 if (!eh_region_may_contain_throw (this_region))
1769 { 1775 return gimple_try_eval (tp);
1770 return gimple_try_eval (tp);
1771 }
1772 1776
1773 /* Build enough of a try-finally state so that we can reuse 1777 /* Build enough of a try-finally state so that we can reuse
1774 honor_protect_cleanup_actions. */ 1778 honor_protect_cleanup_actions. */
1775 memset (&fake_tf, 0, sizeof (fake_tf)); 1779 memset (&fake_tf, 0, sizeof (fake_tf));
1776 fake_tf.top_p = tp; 1780 fake_tf.top_p = fake_tf.try_finally_expr = tp;
1777 fake_tf.outer = state; 1781 fake_tf.outer = state;
1778 fake_tf.region = this_region; 1782 fake_tf.region = this_region;
1779 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp)); 1783 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1780 fake_tf.may_throw = true; 1784 fake_tf.may_throw = true;
1781 1785
1782 fake_tf.eh_label = create_artificial_label ();
1783 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1784
1785 honor_protect_cleanup_actions (state, NULL, &fake_tf); 1786 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1786 1787
1787 if (fake_tf.may_throw) 1788 if (fake_tf.may_throw)
1788 { 1789 {
1789 /* In this case honor_protect_cleanup_actions had nothing to do, 1790 /* In this case honor_protect_cleanup_actions had nothing to do,
1790 and we should process this normally. */ 1791 and we should process this normally. */
1791 lower_eh_constructs_1 (state, gimple_try_cleanup (tp)); 1792 lower_eh_constructs_1 (state, gimple_try_cleanup (tp));
1792 result = frob_into_branch_around (tp, fake_tf.eh_label, 1793 result = frob_into_branch_around (tp, this_region,
1793 fake_tf.fallthru_label); 1794 fake_tf.fallthru_label);
1794 } 1795 }
1795 else 1796 else
1796 { 1797 {
1797 /* In this case honor_protect_cleanup_actions did nearly all of 1798 /* In this case honor_protect_cleanup_actions did nearly all of
1798 the work. All we have left is to append the fallthru_label. */ 1799 the work. All we have left is to append the fallthru_label. */
1805 } 1806 }
1806 } 1807 }
1807 return result; 1808 return result;
1808 } 1809 }
1809 1810
1810 1811 /* Main loop for lowering eh constructs. Also moves gsi to the next
1811
1812 /* Main loop for lowering eh constructs. Also moves gsi to the next
1813 statement. */ 1812 statement. */
1814 1813
1815 static void 1814 static void
1816 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi) 1815 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1817 { 1816 {
1820 gimple stmt = gsi_stmt (*gsi); 1819 gimple stmt = gsi_stmt (*gsi);
1821 1820
1822 switch (gimple_code (stmt)) 1821 switch (gimple_code (stmt))
1823 { 1822 {
1824 case GIMPLE_CALL: 1823 case GIMPLE_CALL:
1824 {
1825 tree fndecl = gimple_call_fndecl (stmt);
1826 tree rhs, lhs;
1827
1828 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1829 switch (DECL_FUNCTION_CODE (fndecl))
1830 {
1831 case BUILT_IN_EH_POINTER:
1832 /* The front end may have generated a call to
1833 __builtin_eh_pointer (0) within a catch region. Replace
1834 this zero argument with the current catch region number. */
1835 if (state->ehp_region)
1836 {
1837 tree nr = build_int_cst (NULL, state->ehp_region->index);
1838 gimple_call_set_arg (stmt, 0, nr);
1839 }
1840 else
1841 {
1842 /* The user has dome something silly. Remove it. */
1843 rhs = build_int_cst (ptr_type_node, 0);
1844 goto do_replace;
1845 }
1846 break;
1847
1848 case BUILT_IN_EH_FILTER:
1849 /* ??? This should never appear, but since it's a builtin it
1850 is accessible to abuse by users. Just remove it and
1851 replace the use with the arbitrary value zero. */
1852 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
1853 do_replace:
1854 lhs = gimple_call_lhs (stmt);
1855 x = gimple_build_assign (lhs, rhs);
1856 gsi_insert_before (gsi, x, GSI_SAME_STMT);
1857 /* FALLTHRU */
1858
1859 case BUILT_IN_EH_COPY_VALUES:
1860 /* Likewise this should not appear. Remove it. */
1861 gsi_remove (gsi, true);
1862 return;
1863
1864 default:
1865 break;
1866 }
1867 }
1868 /* FALLTHRU */
1869
1825 case GIMPLE_ASSIGN: 1870 case GIMPLE_ASSIGN:
1871 /* If the stmt can throw use a new temporary for the assignment
1872 to a LHS. This makes sure the old value of the LHS is
1873 available on the EH edge. Only do so for statements that
1874 potentially fall thru (no noreturn calls e.g.), otherwise
1875 this new assignment might create fake fallthru regions. */
1876 if (stmt_could_throw_p (stmt)
1877 && gimple_has_lhs (stmt)
1878 && gimple_stmt_may_fallthru (stmt)
1879 && !tree_could_throw_p (gimple_get_lhs (stmt))
1880 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
1881 {
1882 tree lhs = gimple_get_lhs (stmt);
1883 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
1884 gimple s = gimple_build_assign (lhs, tmp);
1885 gimple_set_location (s, gimple_location (stmt));
1886 gimple_set_block (s, gimple_block (stmt));
1887 gimple_set_lhs (stmt, tmp);
1888 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
1889 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
1890 DECL_GIMPLE_REG_P (tmp) = 1;
1891 gsi_insert_after (gsi, s, GSI_SAME_STMT);
1892 }
1826 /* Look for things that can throw exceptions, and record them. */ 1893 /* Look for things that can throw exceptions, and record them. */
1827 if (state->cur_region && stmt_could_throw_p (stmt)) 1894 if (state->cur_region && stmt_could_throw_p (stmt))
1828 { 1895 {
1829 record_stmt_eh_region (state->cur_region, stmt); 1896 record_stmt_eh_region (state->cur_region, stmt);
1830 note_eh_region_may_contain_throw (state->cur_region); 1897 note_eh_region_may_contain_throw (state->cur_region);
1845 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY) 1912 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
1846 replace = lower_try_finally (state, stmt); 1913 replace = lower_try_finally (state, stmt);
1847 else 1914 else
1848 { 1915 {
1849 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt)); 1916 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
1850 switch (gimple_code (x)) 1917 if (!x)
1851 { 1918 {
1852 case GIMPLE_CATCH: 1919 replace = gimple_try_eval (stmt);
1853 replace = lower_catch (state, stmt); 1920 lower_eh_constructs_1 (state, replace);
1854 break;
1855 case GIMPLE_EH_FILTER:
1856 replace = lower_eh_filter (state, stmt);
1857 break;
1858 default:
1859 replace = lower_cleanup (state, stmt);
1860 break;
1861 } 1921 }
1922 else
1923 switch (gimple_code (x))
1924 {
1925 case GIMPLE_CATCH:
1926 replace = lower_catch (state, stmt);
1927 break;
1928 case GIMPLE_EH_FILTER:
1929 replace = lower_eh_filter (state, stmt);
1930 break;
1931 case GIMPLE_EH_MUST_NOT_THROW:
1932 replace = lower_eh_must_not_throw (state, stmt);
1933 break;
1934 default:
1935 replace = lower_cleanup (state, stmt);
1936 break;
1937 }
1862 } 1938 }
1863 1939
1864 /* Remove the old stmt and insert the transformed sequence 1940 /* Remove the old stmt and insert the transformed sequence
1865 instead. */ 1941 instead. */
1866 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT); 1942 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
1890 1966
1891 static unsigned int 1967 static unsigned int
1892 lower_eh_constructs (void) 1968 lower_eh_constructs (void)
1893 { 1969 {
1894 struct leh_state null_state; 1970 struct leh_state null_state;
1895 1971 gimple_seq bodyp;
1896 gimple_seq bodyp = gimple_body (current_function_decl); 1972
1973 bodyp = gimple_body (current_function_decl);
1974 if (bodyp == NULL)
1975 return 0;
1897 1976
1898 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free); 1977 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1978 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
1979 memset (&null_state, 0, sizeof (null_state));
1899 1980
1900 collect_finally_tree_1 (bodyp, NULL); 1981 collect_finally_tree_1 (bodyp, NULL);
1901
1902 memset (&null_state, 0, sizeof (null_state));
1903 lower_eh_constructs_1 (&null_state, bodyp); 1982 lower_eh_constructs_1 (&null_state, bodyp);
1904 1983
1984 /* We assume there's a return statement, or something, at the end of
1985 the function, and thus ploping the EH sequence afterward won't
1986 change anything. */
1987 gcc_assert (!gimple_seq_may_fallthru (bodyp));
1988 gimple_seq_add_seq (&bodyp, eh_seq);
1989
1990 /* We assume that since BODYP already existed, adding EH_SEQ to it
1991 didn't change its value, and we don't have to re-set the function. */
1992 gcc_assert (bodyp == gimple_body (current_function_decl));
1993
1905 htab_delete (finally_tree); 1994 htab_delete (finally_tree);
1906 1995 BITMAP_FREE (eh_region_may_contain_throw_map);
1907 collect_eh_region_array (); 1996 eh_seq = NULL;
1997
1998 /* If this function needs a language specific EH personality routine
1999 and the frontend didn't already set one do so now. */
2000 if (function_needs_eh_personality (cfun) == eh_personality_lang
2001 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2002 DECL_FUNCTION_PERSONALITY (current_function_decl)
2003 = lang_hooks.eh_personality ();
2004
1908 return 0; 2005 return 0;
1909 } 2006 }
1910 2007
1911 struct gimple_opt_pass pass_lower_eh = 2008 struct gimple_opt_pass pass_lower_eh =
1912 { 2009 {
1924 0, /* properties_destroyed */ 2021 0, /* properties_destroyed */
1925 0, /* todo_flags_start */ 2022 0, /* todo_flags_start */
1926 TODO_dump_func /* todo_flags_finish */ 2023 TODO_dump_func /* todo_flags_finish */
1927 } 2024 }
1928 }; 2025 };
1929
1930 2026
1931 /* Construct EH edges for STMT. */ 2027 /* Create the multiple edges from an EH_DISPATCH statement to all of
1932 2028 the possible handlers for its EH region. Return true if there's
1933 static void 2029 no fallthru edge; false if there is. */
1934 make_eh_edge (struct eh_region *region, void *data) 2030
1935 { 2031 bool
1936 gimple stmt; 2032 make_eh_dispatch_edges (gimple stmt)
1937 tree lab; 2033 {
2034 eh_region r;
2035 eh_catch c;
1938 basic_block src, dst; 2036 basic_block src, dst;
1939 2037
1940 stmt = (gimple) data; 2038 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
1941 lab = get_eh_region_tree_label (region);
1942
1943 src = gimple_bb (stmt); 2039 src = gimple_bb (stmt);
1944 dst = label_to_block (lab); 2040
1945 2041 switch (r->type)
1946 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH); 2042 {
1947 } 2043 case ERT_TRY:
2044 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2045 {
2046 dst = label_to_block (c->label);
2047 make_edge (src, dst, 0);
2048
2049 /* A catch-all handler doesn't have a fallthru. */
2050 if (c->type_list == NULL)
2051 return false;
2052 }
2053 break;
2054
2055 case ERT_ALLOWED_EXCEPTIONS:
2056 dst = label_to_block (r->u.allowed.label);
2057 make_edge (src, dst, 0);
2058 break;
2059
2060 default:
2061 gcc_unreachable ();
2062 }
2063
2064 return true;
2065 }
2066
2067 /* Create the single EH edge from STMT to its nearest landing pad,
2068 if there is such a landing pad within the current function. */
1948 2069
1949 void 2070 void
1950 make_eh_edges (gimple stmt) 2071 make_eh_edges (gimple stmt)
1951 { 2072 {
1952 int region_nr; 2073 basic_block src, dst;
1953 bool is_resx; 2074 eh_landing_pad lp;
1954 2075 int lp_nr;
1955 if (gimple_code (stmt) == GIMPLE_RESX) 2076
1956 { 2077 lp_nr = lookup_stmt_eh_lp (stmt);
1957 region_nr = gimple_resx_region (stmt); 2078 if (lp_nr <= 0)
1958 is_resx = true; 2079 return;
1959 } 2080
1960 else 2081 lp = get_eh_landing_pad_from_number (lp_nr);
1961 { 2082 gcc_assert (lp != NULL);
1962 region_nr = lookup_stmt_eh_region (stmt); 2083
1963 if (region_nr < 0) 2084 src = gimple_bb (stmt);
1964 return; 2085 dst = label_to_block (lp->post_landing_pad);
1965 is_resx = false; 2086 make_edge (src, dst, EDGE_EH);
1966 } 2087 }
1967 2088
1968 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt); 2089 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
1969 } 2090 do not actually perform the final edge redirection.
1970 2091
1971 static bool mark_eh_edge_found_error; 2092 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
1972 2093 we intend to change the destination EH region as well; this means
1973 /* Mark edge make_eh_edge would create for given region by setting it aux 2094 EH_LANDING_PAD_NR must already be set on the destination block label.
1974 field, output error if something goes wrong. */ 2095 If false, we're being called from generic cfg manipulation code and we
2096 should preserve our place within the region tree. */
1975 2097
1976 static void 2098 static void
1977 mark_eh_edge (struct eh_region *region, void *data) 2099 redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
1978 { 2100 {
1979 gimple stmt; 2101 eh_landing_pad old_lp, new_lp;
1980 tree lab; 2102 basic_block old_bb;
1981 basic_block src, dst; 2103 gimple throw_stmt;
1982 edge e; 2104 int old_lp_nr, new_lp_nr;
1983 2105 tree old_label, new_label;
1984 stmt = (gimple) data;
1985 lab = get_eh_region_tree_label (region);
1986
1987 src = gimple_bb (stmt);
1988 dst = label_to_block (lab);
1989
1990 e = find_edge (src, dst);
1991 if (!e)
1992 {
1993 error ("EH edge %i->%i is missing", src->index, dst->index);
1994 mark_eh_edge_found_error = true;
1995 }
1996 else if (!(e->flags & EDGE_EH))
1997 {
1998 error ("EH edge %i->%i miss EH flag", src->index, dst->index);
1999 mark_eh_edge_found_error = true;
2000 }
2001 else if (e->aux)
2002 {
2003 /* ??? might not be mistake. */
2004 error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
2005 mark_eh_edge_found_error = true;
2006 }
2007 else
2008 e->aux = (void *)1;
2009 }
2010
2011 /* Verify that BB containing STMT as the last statement, has precisely the
2012 edges that make_eh_edges would create. */
2013
2014 bool
2015 verify_eh_edges (gimple stmt)
2016 {
2017 int region_nr;
2018 bool is_resx;
2019 basic_block bb = gimple_bb (stmt);
2020 edge_iterator ei; 2106 edge_iterator ei;
2021 edge e; 2107 edge e;
2022 2108
2023 FOR_EACH_EDGE (e, ei, bb->succs) 2109 old_bb = edge_in->dest;
2024 gcc_assert (!e->aux); 2110 old_label = gimple_block_label (old_bb);
2025 mark_eh_edge_found_error = false; 2111 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2026 if (gimple_code (stmt) == GIMPLE_RESX) 2112 gcc_assert (old_lp_nr > 0);
2027 { 2113 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2028 region_nr = gimple_resx_region (stmt); 2114
2029 is_resx = true; 2115 throw_stmt = last_stmt (edge_in->src);
2116 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2117
2118 new_label = gimple_block_label (new_bb);
2119
2120 /* Look for an existing region that might be using NEW_BB already. */
2121 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2122 if (new_lp_nr)
2123 {
2124 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2125 gcc_assert (new_lp);
2126
2127 /* Unless CHANGE_REGION is true, the new and old landing pad
2128 had better be associated with the same EH region. */
2129 gcc_assert (change_region || new_lp->region == old_lp->region);
2030 } 2130 }
2031 else 2131 else
2032 { 2132 {
2033 region_nr = lookup_stmt_eh_region (stmt); 2133 new_lp = NULL;
2034 if (region_nr < 0) 2134 gcc_assert (!change_region);
2135 }
2136
2137 /* Notice when we redirect the last EH edge away from OLD_BB. */
2138 FOR_EACH_EDGE (e, ei, old_bb->preds)
2139 if (e != edge_in && (e->flags & EDGE_EH))
2140 break;
2141
2142 if (new_lp)
2143 {
2144 /* NEW_LP already exists. If there are still edges into OLD_LP,
2145 there's nothing to do with the EH tree. If there are no more
2146 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2147 If CHANGE_REGION is true, then our caller is expecting to remove
2148 the landing pad. */
2149 if (e == NULL && !change_region)
2150 remove_eh_landing_pad (old_lp);
2151 }
2152 else
2153 {
2154 /* No correct landing pad exists. If there are no more edges
2155 into OLD_LP, then we can simply re-use the existing landing pad.
2156 Otherwise, we have to create a new landing pad. */
2157 if (e == NULL)
2035 { 2158 {
2036 FOR_EACH_EDGE (e, ei, bb->succs) 2159 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2037 if (e->flags & EDGE_EH) 2160 new_lp = old_lp;
2038 {
2039 error ("BB %i can not throw but has EH edges", bb->index);
2040 return true;
2041 }
2042 return false;
2043 } 2161 }
2044 if (!stmt_could_throw_p (stmt)) 2162 else
2163 new_lp = gen_eh_landing_pad (old_lp->region);
2164 new_lp->post_landing_pad = new_label;
2165 EH_LANDING_PAD_NR (new_label) = new_lp->index;
2166 }
2167
2168 /* Maybe move the throwing statement to the new region. */
2169 if (old_lp != new_lp)
2170 {
2171 remove_stmt_from_eh_lp (throw_stmt);
2172 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2173 }
2174 }
2175
2176 /* Redirect EH edge E to NEW_BB. */
2177
2178 edge
2179 redirect_eh_edge (edge edge_in, basic_block new_bb)
2180 {
2181 redirect_eh_edge_1 (edge_in, new_bb, false);
2182 return ssa_redirect_edge (edge_in, new_bb);
2183 }
2184
2185 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2186 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2187 The actual edge update will happen in the caller. */
2188
2189 void
2190 redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb)
2191 {
2192 tree new_lab = gimple_block_label (new_bb);
2193 bool any_changed = false;
2194 basic_block old_bb;
2195 eh_region r;
2196 eh_catch c;
2197
2198 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2199 switch (r->type)
2200 {
2201 case ERT_TRY:
2202 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2045 { 2203 {
2046 error ("BB %i last statement has incorrectly set region", bb->index); 2204 old_bb = label_to_block (c->label);
2047 return true; 2205 if (old_bb == e->dest)
2206 {
2207 c->label = new_lab;
2208 any_changed = true;
2209 }
2048 } 2210 }
2049 is_resx = false; 2211 break;
2050 } 2212
2051 2213 case ERT_ALLOWED_EXCEPTIONS:
2052 foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt); 2214 old_bb = label_to_block (r->u.allowed.label);
2053 FOR_EACH_EDGE (e, ei, bb->succs) 2215 gcc_assert (old_bb == e->dest);
2054 { 2216 r->u.allowed.label = new_lab;
2055 if ((e->flags & EDGE_EH) && !e->aux) 2217 any_changed = true;
2056 { 2218 break;
2057 error ("unnecessary EH edge %i->%i", bb->index, e->dest->index); 2219
2058 mark_eh_edge_found_error = true; 2220 default:
2059 return true; 2221 gcc_unreachable ();
2060 } 2222 }
2061 e->aux = NULL; 2223
2062 } 2224 gcc_assert (any_changed);
2063 2225 }
2064 return mark_eh_edge_found_error;
2065 }
2066
2067 2226
2068 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */ 2227 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2069 2228
2070 bool 2229 bool
2071 operation_could_trap_helper_p (enum tree_code op, 2230 operation_could_trap_helper_p (enum tree_code op,
2185 bool honor_trapv = false; 2344 bool honor_trapv = false;
2186 tree t, base, div = NULL_TREE; 2345 tree t, base, div = NULL_TREE;
2187 2346
2188 if (!expr) 2347 if (!expr)
2189 return false; 2348 return false;
2190 2349
2191 code = TREE_CODE (expr); 2350 code = TREE_CODE (expr);
2192 t = TREE_TYPE (expr); 2351 t = TREE_TYPE (expr);
2193 2352
2194 if (t) 2353 if (t)
2195 { 2354 {
2227 2386
2228 case ARRAY_RANGE_REF: 2387 case ARRAY_RANGE_REF:
2229 base = TREE_OPERAND (expr, 0); 2388 base = TREE_OPERAND (expr, 0);
2230 if (tree_could_trap_p (base)) 2389 if (tree_could_trap_p (base))
2231 return true; 2390 return true;
2232
2233 if (TREE_THIS_NOTRAP (expr)) 2391 if (TREE_THIS_NOTRAP (expr))
2234 return false; 2392 return false;
2235
2236 return !range_in_array_bounds_p (expr); 2393 return !range_in_array_bounds_p (expr);
2237 2394
2238 case ARRAY_REF: 2395 case ARRAY_REF:
2239 base = TREE_OPERAND (expr, 0); 2396 base = TREE_OPERAND (expr, 0);
2240 if (tree_could_trap_p (base)) 2397 if (tree_could_trap_p (base))
2241 return true; 2398 return true;
2242
2243 if (TREE_THIS_NOTRAP (expr)) 2399 if (TREE_THIS_NOTRAP (expr))
2244 return false; 2400 return false;
2245
2246 return !in_array_bounds_p (expr); 2401 return !in_array_bounds_p (expr);
2247 2402
2248 case INDIRECT_REF: 2403 case INDIRECT_REF:
2249 case ALIGN_INDIRECT_REF: 2404 case ALIGN_INDIRECT_REF:
2250 case MISALIGNED_INDIRECT_REF: 2405 case MISALIGNED_INDIRECT_REF:
2251 return !TREE_THIS_NOTRAP (expr); 2406 return !TREE_THIS_NOTRAP (expr);
2252 2407
2253 case ASM_EXPR: 2408 case ASM_EXPR:
2254 return TREE_THIS_VOLATILE (expr); 2409 return TREE_THIS_VOLATILE (expr);
2255
2256 2410
2257 case CALL_EXPR: 2411 case CALL_EXPR:
2258 t = get_callee_fndecl (expr); 2412 t = get_callee_fndecl (expr);
2259 /* Assume that calls to weak functions may trap. */ 2413 /* Assume that calls to weak functions may trap. */
2260 if (!t || !DECL_P (t) || DECL_WEAK (t)) 2414 if (!t || !DECL_P (t) || DECL_WEAK (t))
2318 /* Return true if statement STMT could throw an exception. */ 2472 /* Return true if statement STMT could throw an exception. */
2319 2473
2320 bool 2474 bool
2321 stmt_could_throw_p (gimple stmt) 2475 stmt_could_throw_p (gimple stmt)
2322 { 2476 {
2323 enum gimple_code code;
2324
2325 if (!flag_exceptions) 2477 if (!flag_exceptions)
2326 return false; 2478 return false;
2327 2479
2328 /* The only statements that can throw an exception are assignments, 2480 /* The only statements that can throw an exception are assignments,
2329 conditionals, calls and asms. */ 2481 conditionals, calls, resx, and asms. */
2330 code = gimple_code (stmt); 2482 switch (gimple_code (stmt))
2331 if (code != GIMPLE_ASSIGN 2483 {
2332 && code != GIMPLE_COND 2484 case GIMPLE_RESX:
2333 && code != GIMPLE_CALL 2485 return true;
2334 && code != GIMPLE_ASM) 2486
2335 return false; 2487 case GIMPLE_CALL:
2336 2488 return !gimple_call_nothrow_p (stmt);
2337 /* If exceptions can only be thrown by function calls and STMT is not a 2489
2338 GIMPLE_CALL, the statement cannot throw. */ 2490 case GIMPLE_ASSIGN:
2339 if (!flag_non_call_exceptions && code != GIMPLE_CALL) 2491 case GIMPLE_COND:
2340 return false; 2492 if (!flag_non_call_exceptions)
2341 2493 return false;
2342 if (code == GIMPLE_ASSIGN || code == GIMPLE_COND) 2494 return stmt_could_throw_1_p (stmt);
2343 return stmt_could_throw_1_p (stmt); 2495
2344 else if (is_gimple_call (stmt)) 2496 case GIMPLE_ASM:
2345 { 2497 if (!flag_non_call_exceptions)
2346 tree t = gimple_call_fndecl (stmt); 2498 return false;
2347 2499 return gimple_asm_volatile_p (stmt);
2348 /* Assume that calls to weak functions may trap. */ 2500
2349 if (!t || !DECL_P (t) || DECL_WEAK (t)) 2501 default:
2350 return true; 2502 return false;
2351 2503 }
2352 return (gimple_call_flags (stmt) & ECF_NOTHROW) == 0;
2353 }
2354 else if (gimple_code (stmt) == GIMPLE_ASM)
2355 return (gimple_asm_volatile_p (stmt));
2356 else
2357 gcc_unreachable ();
2358
2359 return false;
2360 } 2504 }
2361 2505
2362 2506
2363 /* Return true if expression T could throw an exception. */ 2507 /* Return true if expression T could throw an exception. */
2364 2508
2368 if (!flag_exceptions) 2512 if (!flag_exceptions)
2369 return false; 2513 return false;
2370 if (TREE_CODE (t) == MODIFY_EXPR) 2514 if (TREE_CODE (t) == MODIFY_EXPR)
2371 { 2515 {
2372 if (flag_non_call_exceptions 2516 if (flag_non_call_exceptions
2373 && tree_could_trap_p (TREE_OPERAND (t, 0))) 2517 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2374 return true; 2518 return true;
2375 t = TREE_OPERAND (t, 1); 2519 t = TREE_OPERAND (t, 1);
2376 } 2520 }
2377 2521
2378 if (TREE_CODE (t) == WITH_SIZE_EXPR) 2522 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2379 t = TREE_OPERAND (t, 0); 2523 t = TREE_OPERAND (t, 0);
2382 if (flag_non_call_exceptions) 2526 if (flag_non_call_exceptions)
2383 return tree_could_trap_p (t); 2527 return tree_could_trap_p (t);
2384 return false; 2528 return false;
2385 } 2529 }
2386 2530
2531 /* Return true if STMT can throw an exception that is not caught within
2532 the current function (CFUN). */
2533
2534 bool
2535 stmt_can_throw_external (gimple stmt)
2536 {
2537 int lp_nr;
2538
2539 if (!stmt_could_throw_p (stmt))
2540 return false;
2541
2542 lp_nr = lookup_stmt_eh_lp (stmt);
2543 return lp_nr == 0;
2544 }
2387 2545
2388 /* Return true if STMT can throw an exception that is caught within 2546 /* Return true if STMT can throw an exception that is caught within
2389 the current function (CFUN). */ 2547 the current function (CFUN). */
2390 2548
2391 bool 2549 bool
2392 stmt_can_throw_internal (gimple stmt) 2550 stmt_can_throw_internal (gimple stmt)
2393 { 2551 {
2394 int region_nr; 2552 int lp_nr;
2395 bool is_resx = false; 2553
2396 2554 if (!stmt_could_throw_p (stmt))
2397 if (gimple_code (stmt) == GIMPLE_RESX)
2398 {
2399 region_nr = gimple_resx_region (stmt);
2400 is_resx = true;
2401 }
2402 else
2403 region_nr = lookup_stmt_eh_region (stmt);
2404
2405 if (region_nr < 0)
2406 return false; 2555 return false;
2407 2556
2408 return can_throw_internal_1 (region_nr, is_resx); 2557 lp_nr = lookup_stmt_eh_lp (stmt);
2409 } 2558 return lp_nr > 0;
2410 2559 }
2560
2561 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
2562 remove any entry it might have from the EH table. Return true if
2563 any change was made. */
2564
2565 bool
2566 maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
2567 {
2568 if (stmt_could_throw_p (stmt))
2569 return false;
2570 return remove_stmt_from_eh_lp_fn (ifun, stmt);
2571 }
2572
2573 /* Likewise, but always use the current function. */
2574
2575 bool
2576 maybe_clean_eh_stmt (gimple stmt)
2577 {
2578 return maybe_clean_eh_stmt_fn (cfun, stmt);
2579 }
2411 2580
2412 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced 2581 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2413 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT 2582 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2414 in the table if it should be in there. Return TRUE if a replacement was 2583 in the table if it should be in there. Return TRUE if a replacement was
2415 done that my require an EH edge purge. */ 2584 done that my require an EH edge purge. */
2416 2585
2417 bool 2586 bool
2418 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt) 2587 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2419 { 2588 {
2420 int region_nr = lookup_stmt_eh_region (old_stmt); 2589 int lp_nr = lookup_stmt_eh_lp (old_stmt);
2421 2590
2422 if (region_nr >= 0) 2591 if (lp_nr != 0)
2423 { 2592 {
2424 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt); 2593 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2425 2594
2426 if (new_stmt == old_stmt && new_stmt_could_throw) 2595 if (new_stmt == old_stmt && new_stmt_could_throw)
2427 return false; 2596 return false;
2428 2597
2429 remove_stmt_from_eh_region (old_stmt); 2598 remove_stmt_from_eh_lp (old_stmt);
2430 if (new_stmt_could_throw) 2599 if (new_stmt_could_throw)
2431 { 2600 {
2432 add_stmt_to_eh_region (new_stmt, region_nr); 2601 add_stmt_to_eh_lp (new_stmt, lp_nr);
2433 return false; 2602 return false;
2434 } 2603 }
2435 else 2604 else
2436 return true; 2605 return true;
2437 } 2606 }
2438 2607
2439 return false; 2608 return false;
2609 }
2610
2611 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statment NEW_STMT
2612 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
2613 operand is the return value of duplicate_eh_regions. */
2614
2615 bool
2616 maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
2617 struct function *old_fun, gimple old_stmt,
2618 struct pointer_map_t *map, int default_lp_nr)
2619 {
2620 int old_lp_nr, new_lp_nr;
2621 void **slot;
2622
2623 if (!stmt_could_throw_p (new_stmt))
2624 return false;
2625
2626 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
2627 if (old_lp_nr == 0)
2628 {
2629 if (default_lp_nr == 0)
2630 return false;
2631 new_lp_nr = default_lp_nr;
2632 }
2633 else if (old_lp_nr > 0)
2634 {
2635 eh_landing_pad old_lp, new_lp;
2636
2637 old_lp = VEC_index (eh_landing_pad, old_fun->eh->lp_array, old_lp_nr);
2638 slot = pointer_map_contains (map, old_lp);
2639 new_lp = (eh_landing_pad) *slot;
2640 new_lp_nr = new_lp->index;
2641 }
2642 else
2643 {
2644 eh_region old_r, new_r;
2645
2646 old_r = VEC_index (eh_region, old_fun->eh->region_array, -old_lp_nr);
2647 slot = pointer_map_contains (map, old_r);
2648 new_r = (eh_region) *slot;
2649 new_lp_nr = -new_r->index;
2650 }
2651
2652 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
2653 return true;
2654 }
2655
2656 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2657 and thus no remapping is required. */
2658
2659 bool
2660 maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
2661 {
2662 int lp_nr;
2663
2664 if (!stmt_could_throw_p (new_stmt))
2665 return false;
2666
2667 lp_nr = lookup_stmt_eh_lp (old_stmt);
2668 if (lp_nr == 0)
2669 return false;
2670
2671 add_stmt_to_eh_lp (new_stmt, lp_nr);
2672 return true;
2440 } 2673 }
2441 2674
2442 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of 2675 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2443 GIMPLE_TRY) that are similar enough to be considered the same. Currently 2676 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2444 this only handles handlers consisting of a single call, as that's the 2677 this only handles handlers consisting of a single call, as that's the
2472 || gimple_call_num_args (ones) != gimple_call_num_args (twos)) 2705 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
2473 return false; 2706 return false;
2474 2707
2475 for (ai = 0; ai < gimple_call_num_args (ones); ++ai) 2708 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
2476 if (!operand_equal_p (gimple_call_arg (ones, ai), 2709 if (!operand_equal_p (gimple_call_arg (ones, ai),
2477 gimple_call_arg (twos, ai), 0)) 2710 gimple_call_arg (twos, ai), 0))
2478 return false; 2711 return false;
2479 2712
2480 return true; 2713 return true;
2481 } 2714 }
2482 2715
2571 { 2804 {
2572 refactor_eh_r (gimple_body (current_function_decl)); 2805 refactor_eh_r (gimple_body (current_function_decl));
2573 return 0; 2806 return 0;
2574 } 2807 }
2575 2808
2809 static bool
2810 gate_refactor_eh (void)
2811 {
2812 return flag_exceptions != 0;
2813 }
2814
2576 struct gimple_opt_pass pass_refactor_eh = 2815 struct gimple_opt_pass pass_refactor_eh =
2577 { 2816 {
2578 { 2817 {
2579 GIMPLE_PASS, 2818 GIMPLE_PASS,
2580 "ehopt", /* name */ 2819 "ehopt", /* name */
2581 NULL, /* gate */ 2820 gate_refactor_eh, /* gate */
2582 refactor_eh, /* execute */ 2821 refactor_eh, /* execute */
2583 NULL, /* sub */ 2822 NULL, /* sub */
2584 NULL, /* next */ 2823 NULL, /* next */
2585 0, /* static_pass_number */ 2824 0, /* static_pass_number */
2586 TV_TREE_EH, /* tv_id */ 2825 TV_TREE_EH, /* tv_id */
2589 0, /* properties_destroyed */ 2828 0, /* properties_destroyed */
2590 0, /* todo_flags_start */ 2829 0, /* todo_flags_start */
2591 TODO_dump_func /* todo_flags_finish */ 2830 TODO_dump_func /* todo_flags_finish */
2592 } 2831 }
2593 }; 2832 };
2833
2834 /* At the end of gimple optimization, we can lower RESX. */
2835
2836 static bool
2837 lower_resx (basic_block bb, gimple stmt, struct pointer_map_t *mnt_map)
2838 {
2839 int lp_nr;
2840 eh_region src_r, dst_r;
2841 gimple_stmt_iterator gsi;
2842 gimple x;
2843 tree fn, src_nr;
2844 bool ret = false;
2845
2846 lp_nr = lookup_stmt_eh_lp (stmt);
2847 if (lp_nr != 0)
2848 dst_r = get_eh_region_from_lp_number (lp_nr);
2849 else
2850 dst_r = NULL;
2851
2852 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
2853 gsi = gsi_last_bb (bb);
2854
2855 if (src_r == NULL)
2856 {
2857 /* We can wind up with no source region when pass_cleanup_eh shows
2858 that there are no entries into an eh region and deletes it, but
2859 then the block that contains the resx isn't removed. This can
2860 happen without optimization when the switch statement created by
2861 lower_try_finally_switch isn't simplified to remove the eh case.
2862
2863 Resolve this by expanding the resx node to an abort. */
2864
2865 fn = implicit_built_in_decls[BUILT_IN_TRAP];
2866 x = gimple_build_call (fn, 0);
2867 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2868
2869 while (EDGE_COUNT (bb->succs) > 0)
2870 remove_edge (EDGE_SUCC (bb, 0));
2871 }
2872 else if (dst_r)
2873 {
2874 /* When we have a destination region, we resolve this by copying
2875 the excptr and filter values into place, and changing the edge
2876 to immediately after the landing pad. */
2877 edge e;
2878
2879 if (lp_nr < 0)
2880 {
2881 basic_block new_bb;
2882 void **slot;
2883 tree lab;
2884
2885 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
2886 the failure decl into a new block, if needed. */
2887 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
2888
2889 slot = pointer_map_contains (mnt_map, dst_r);
2890 if (slot == NULL)
2891 {
2892 gimple_stmt_iterator gsi2;
2893
2894 new_bb = create_empty_bb (bb);
2895 lab = gimple_block_label (new_bb);
2896 gsi2 = gsi_start_bb (new_bb);
2897
2898 fn = dst_r->u.must_not_throw.failure_decl;
2899 x = gimple_build_call (fn, 0);
2900 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
2901 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
2902
2903 slot = pointer_map_insert (mnt_map, dst_r);
2904 *slot = lab;
2905 }
2906 else
2907 {
2908 lab = (tree) *slot;
2909 new_bb = label_to_block (lab);
2910 }
2911
2912 gcc_assert (EDGE_COUNT (bb->succs) == 0);
2913 e = make_edge (bb, new_bb, EDGE_FALLTHRU);
2914 e->count = bb->count;
2915 e->probability = REG_BR_PROB_BASE;
2916 }
2917 else
2918 {
2919 edge_iterator ei;
2920 tree dst_nr = build_int_cst (NULL, dst_r->index);
2921
2922 fn = implicit_built_in_decls[BUILT_IN_EH_COPY_VALUES];
2923 src_nr = build_int_cst (NULL, src_r->index);
2924 x = gimple_build_call (fn, 2, dst_nr, src_nr);
2925 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2926
2927 /* Update the flags for the outgoing edge. */
2928 e = single_succ_edge (bb);
2929 gcc_assert (e->flags & EDGE_EH);
2930 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
2931
2932 /* If there are no more EH users of the landing pad, delete it. */
2933 FOR_EACH_EDGE (e, ei, e->dest->preds)
2934 if (e->flags & EDGE_EH)
2935 break;
2936 if (e == NULL)
2937 {
2938 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2939 remove_eh_landing_pad (lp);
2940 }
2941 }
2942
2943 ret = true;
2944 }
2945 else
2946 {
2947 tree var;
2948
2949 /* When we don't have a destination region, this exception escapes
2950 up the call chain. We resolve this by generating a call to the
2951 _Unwind_Resume library function. */
2952
2953 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
2954 with no arguments for C++ and Java. Check for that. */
2955 if (src_r->use_cxa_end_cleanup)
2956 {
2957 fn = implicit_built_in_decls[BUILT_IN_CXA_END_CLEANUP];
2958 x = gimple_build_call (fn, 0);
2959 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2960 }
2961 else
2962 {
2963 fn = implicit_built_in_decls[BUILT_IN_EH_POINTER];
2964 src_nr = build_int_cst (NULL, src_r->index);
2965 x = gimple_build_call (fn, 1, src_nr);
2966 var = create_tmp_var (ptr_type_node, NULL);
2967 var = make_ssa_name (var, x);
2968 gimple_call_set_lhs (x, var);
2969 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2970
2971 fn = implicit_built_in_decls[BUILT_IN_UNWIND_RESUME];
2972 x = gimple_build_call (fn, 1, var);
2973 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2974 }
2975
2976 gcc_assert (EDGE_COUNT (bb->succs) == 0);
2977 }
2978
2979 gsi_remove (&gsi, true);
2980
2981 return ret;
2982 }
2983
2984 static unsigned
2985 execute_lower_resx (void)
2986 {
2987 basic_block bb;
2988 struct pointer_map_t *mnt_map;
2989 bool dominance_invalidated = false;
2990 bool any_rewritten = false;
2991
2992 mnt_map = pointer_map_create ();
2993
2994 FOR_EACH_BB (bb)
2995 {
2996 gimple last = last_stmt (bb);
2997 if (last && is_gimple_resx (last))
2998 {
2999 dominance_invalidated |= lower_resx (bb, last, mnt_map);
3000 any_rewritten = true;
3001 }
3002 }
3003
3004 pointer_map_destroy (mnt_map);
3005
3006 if (dominance_invalidated)
3007 {
3008 free_dominance_info (CDI_DOMINATORS);
3009 free_dominance_info (CDI_POST_DOMINATORS);
3010 }
3011
3012 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3013 }
3014
3015 static bool
3016 gate_lower_resx (void)
3017 {
3018 return flag_exceptions != 0;
3019 }
3020
3021 struct gimple_opt_pass pass_lower_resx =
3022 {
3023 {
3024 GIMPLE_PASS,
3025 "resx", /* name */
3026 gate_lower_resx, /* gate */
3027 execute_lower_resx, /* execute */
3028 NULL, /* sub */
3029 NULL, /* next */
3030 0, /* static_pass_number */
3031 TV_TREE_EH, /* tv_id */
3032 PROP_gimple_lcf, /* properties_required */
3033 0, /* properties_provided */
3034 0, /* properties_destroyed */
3035 0, /* todo_flags_start */
3036 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
3037 }
3038 };
3039
3040
3041 /* At the end of inlining, we can lower EH_DISPATCH. */
3042
3043 static void
3044 lower_eh_dispatch (basic_block src, gimple stmt)
3045 {
3046 gimple_stmt_iterator gsi;
3047 int region_nr;
3048 eh_region r;
3049 tree filter, fn;
3050 gimple x;
3051
3052 region_nr = gimple_eh_dispatch_region (stmt);
3053 r = get_eh_region_from_number (region_nr);
3054
3055 gsi = gsi_last_bb (src);
3056
3057 switch (r->type)
3058 {
3059 case ERT_TRY:
3060 {
3061 VEC (tree, heap) *labels = NULL;
3062 tree default_label = NULL;
3063 eh_catch c;
3064 edge_iterator ei;
3065 edge e;
3066
3067 /* Collect the labels for a switch. Zero the post_landing_pad
3068 field becase we'll no longer have anything keeping these labels
3069 in existance and the optimizer will be free to merge these
3070 blocks at will. */
3071 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3072 {
3073 tree tp_node, flt_node, lab = c->label;
3074
3075 c->label = NULL;
3076 tp_node = c->type_list;
3077 flt_node = c->filter_list;
3078
3079 if (tp_node == NULL)
3080 {
3081 default_label = lab;
3082 break;
3083 }
3084 do
3085 {
3086 tree t = build3 (CASE_LABEL_EXPR, void_type_node,
3087 TREE_VALUE (flt_node), NULL, lab);
3088 VEC_safe_push (tree, heap, labels, t);
3089
3090 tp_node = TREE_CHAIN (tp_node);
3091 flt_node = TREE_CHAIN (flt_node);
3092 }
3093 while (tp_node);
3094 }
3095
3096 /* Clean up the edge flags. */
3097 FOR_EACH_EDGE (e, ei, src->succs)
3098 {
3099 if (e->flags & EDGE_FALLTHRU)
3100 {
3101 /* If there was no catch-all, use the fallthru edge. */
3102 if (default_label == NULL)
3103 default_label = gimple_block_label (e->dest);
3104 e->flags &= ~EDGE_FALLTHRU;
3105 }
3106 }
3107 gcc_assert (default_label != NULL);
3108
3109 /* Don't generate a switch if there's only a default case.
3110 This is common in the form of try { A; } catch (...) { B; }. */
3111 if (labels == NULL)
3112 {
3113 e = single_succ_edge (src);
3114 e->flags |= EDGE_FALLTHRU;
3115 }
3116 else
3117 {
3118 fn = implicit_built_in_decls[BUILT_IN_EH_FILTER];
3119 x = gimple_build_call (fn, 1, build_int_cst (NULL, region_nr));
3120 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3121 filter = make_ssa_name (filter, x);
3122 gimple_call_set_lhs (x, filter);
3123 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3124
3125 /* Turn the default label into a default case. */
3126 default_label = build3 (CASE_LABEL_EXPR, void_type_node,
3127 NULL, NULL, default_label);
3128 sort_case_labels (labels);
3129
3130 x = gimple_build_switch_vec (filter, default_label, labels);
3131 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3132
3133 VEC_free (tree, heap, labels);
3134 }
3135 }
3136 break;
3137
3138 case ERT_ALLOWED_EXCEPTIONS:
3139 {
3140 edge b_e = BRANCH_EDGE (src);
3141 edge f_e = FALLTHRU_EDGE (src);
3142
3143 fn = implicit_built_in_decls[BUILT_IN_EH_FILTER];
3144 x = gimple_build_call (fn, 1, build_int_cst (NULL, region_nr));
3145 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3146 filter = make_ssa_name (filter, x);
3147 gimple_call_set_lhs (x, filter);
3148 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3149
3150 r->u.allowed.label = NULL;
3151 x = gimple_build_cond (EQ_EXPR, filter,
3152 build_int_cst (TREE_TYPE (filter),
3153 r->u.allowed.filter),
3154 NULL_TREE, NULL_TREE);
3155 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3156
3157 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3158 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3159 }
3160 break;
3161
3162 default:
3163 gcc_unreachable ();
3164 }
3165
3166 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3167 gsi_remove (&gsi, true);
3168 }
3169
3170 static unsigned
3171 execute_lower_eh_dispatch (void)
3172 {
3173 basic_block bb;
3174 bool any_rewritten = false;
3175
3176 assign_filter_values ();
3177
3178 FOR_EACH_BB (bb)
3179 {
3180 gimple last = last_stmt (bb);
3181 if (last && gimple_code (last) == GIMPLE_EH_DISPATCH)
3182 {
3183 lower_eh_dispatch (bb, last);
3184 any_rewritten = true;
3185 }
3186 }
3187
3188 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3189 }
3190
3191 static bool
3192 gate_lower_eh_dispatch (void)
3193 {
3194 return cfun->eh->region_tree != NULL;
3195 }
3196
3197 struct gimple_opt_pass pass_lower_eh_dispatch =
3198 {
3199 {
3200 GIMPLE_PASS,
3201 "ehdisp", /* name */
3202 gate_lower_eh_dispatch, /* gate */
3203 execute_lower_eh_dispatch, /* execute */
3204 NULL, /* sub */
3205 NULL, /* next */
3206 0, /* static_pass_number */
3207 TV_TREE_EH, /* tv_id */
3208 PROP_gimple_lcf, /* properties_required */
3209 0, /* properties_provided */
3210 0, /* properties_destroyed */
3211 0, /* todo_flags_start */
3212 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
3213 }
3214 };
3215
3216 /* Walk statements, see what regions are really referenced and remove
3217 those that are unused. */
3218
3219 static void
3220 remove_unreachable_handlers (void)
3221 {
3222 sbitmap r_reachable, lp_reachable;
3223 eh_region region;
3224 eh_landing_pad lp;
3225 basic_block bb;
3226 int lp_nr, r_nr;
3227
3228 r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
3229 lp_reachable
3230 = sbitmap_alloc (VEC_length (eh_landing_pad, cfun->eh->lp_array));
3231 sbitmap_zero (r_reachable);
3232 sbitmap_zero (lp_reachable);
3233
3234 FOR_EACH_BB (bb)
3235 {
3236 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3237
3238 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3239 {
3240 gimple stmt = gsi_stmt (gsi);
3241 lp_nr = lookup_stmt_eh_lp (stmt);
3242
3243 /* Negative LP numbers are MUST_NOT_THROW regions which
3244 are not considered BB enders. */
3245 if (lp_nr < 0)
3246 SET_BIT (r_reachable, -lp_nr);
3247
3248 /* Positive LP numbers are real landing pads, are are BB enders. */
3249 else if (lp_nr > 0)
3250 {
3251 gcc_assert (gsi_one_before_end_p (gsi));
3252 region = get_eh_region_from_lp_number (lp_nr);
3253 SET_BIT (r_reachable, region->index);
3254 SET_BIT (lp_reachable, lp_nr);
3255 }
3256 }
3257 }
3258
3259 if (dump_file)
3260 {
3261 fprintf (dump_file, "Before removal of unreachable regions:\n");
3262 dump_eh_tree (dump_file, cfun);
3263 fprintf (dump_file, "Reachable regions: ");
3264 dump_sbitmap_file (dump_file, r_reachable);
3265 fprintf (dump_file, "Reachable landing pads: ");
3266 dump_sbitmap_file (dump_file, lp_reachable);
3267 }
3268
3269 for (r_nr = 1;
3270 VEC_iterate (eh_region, cfun->eh->region_array, r_nr, region); ++r_nr)
3271 if (region && !TEST_BIT (r_reachable, r_nr))
3272 {
3273 if (dump_file)
3274 fprintf (dump_file, "Removing unreachable region %d\n", r_nr);
3275 remove_eh_handler (region);
3276 }
3277
3278 for (lp_nr = 1;
3279 VEC_iterate (eh_landing_pad, cfun->eh->lp_array, lp_nr, lp); ++lp_nr)
3280 if (lp && !TEST_BIT (lp_reachable, lp_nr))
3281 {
3282 if (dump_file)
3283 fprintf (dump_file, "Removing unreachable landing pad %d\n", lp_nr);
3284 remove_eh_landing_pad (lp);
3285 }
3286
3287 if (dump_file)
3288 {
3289 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
3290 dump_eh_tree (dump_file, cfun);
3291 fprintf (dump_file, "\n\n");
3292 }
3293
3294 sbitmap_free (r_reachable);
3295 sbitmap_free (lp_reachable);
3296
3297 #ifdef ENABLE_CHECKING
3298 verify_eh_tree (cfun);
3299 #endif
3300 }
3301
3302 /* Remove regions that do not have landing pads. This assumes
3303 that remove_unreachable_handlers has already been run, and
3304 that we've just manipulated the landing pads since then. */
3305
3306 static void
3307 remove_unreachable_handlers_no_lp (void)
3308 {
3309 eh_region r;
3310 int i;
3311
3312 for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
3313 if (r && r->landing_pads == NULL && r->type != ERT_MUST_NOT_THROW)
3314 {
3315 if (dump_file)
3316 fprintf (dump_file, "Removing unreachable region %d\n", i);
3317 remove_eh_handler (r);
3318 }
3319 }
3320
3321 /* Undo critical edge splitting on an EH landing pad. Earlier, we
3322 optimisticaly split all sorts of edges, including EH edges. The
3323 optimization passes in between may not have needed them; if not,
3324 we should undo the split.
3325
3326 Recognize this case by having one EH edge incoming to the BB and
3327 one normal edge outgoing; BB should be empty apart from the
3328 post_landing_pad label.
3329
3330 Note that this is slightly different from the empty handler case
3331 handled by cleanup_empty_eh, in that the actual handler may yet
3332 have actual code but the landing pad has been separated from the
3333 handler. As such, cleanup_empty_eh relies on this transformation
3334 having been done first. */
3335
3336 static bool
3337 unsplit_eh (eh_landing_pad lp)
3338 {
3339 basic_block bb = label_to_block (lp->post_landing_pad);
3340 gimple_stmt_iterator gsi;
3341 edge e_in, e_out;
3342
3343 /* Quickly check the edge counts on BB for singularity. */
3344 if (EDGE_COUNT (bb->preds) != 1 || EDGE_COUNT (bb->succs) != 1)
3345 return false;
3346 e_in = EDGE_PRED (bb, 0);
3347 e_out = EDGE_SUCC (bb, 0);
3348
3349 /* Input edge must be EH and output edge must be normal. */
3350 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
3351 return false;
3352
3353 /* The block must be empty except for the labels. */
3354 if (!gsi_end_p (gsi_after_labels (bb)))
3355 return false;
3356
3357 /* The destination block must not already have a landing pad
3358 for a different region. */
3359 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3360 {
3361 gimple stmt = gsi_stmt (gsi);
3362 tree lab;
3363 int lp_nr;
3364
3365 if (gimple_code (stmt) != GIMPLE_LABEL)
3366 break;
3367 lab = gimple_label_label (stmt);
3368 lp_nr = EH_LANDING_PAD_NR (lab);
3369 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3370 return false;
3371 }
3372
3373 /* The new destination block must not already be a destination of
3374 the source block, lest we merge fallthru and eh edges and get
3375 all sorts of confused. */
3376 if (find_edge (e_in->src, e_out->dest))
3377 return false;
3378
3379 /* ??? We can get degenerate phis due to cfg cleanups. I would have
3380 thought this should have been cleaned up by a phicprop pass, but
3381 that doesn't appear to handle virtuals. Propagate by hand. */
3382 if (!gimple_seq_empty_p (phi_nodes (bb)))
3383 {
3384 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
3385 {
3386 gimple use_stmt, phi = gsi_stmt (gsi);
3387 tree lhs = gimple_phi_result (phi);
3388 tree rhs = gimple_phi_arg_def (phi, 0);
3389 use_operand_p use_p;
3390 imm_use_iterator iter;
3391
3392 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
3393 {
3394 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3395 SET_USE (use_p, rhs);
3396 }
3397
3398 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3399 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
3400
3401 remove_phi_node (&gsi, true);
3402 }
3403 }
3404
3405 if (dump_file && (dump_flags & TDF_DETAILS))
3406 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
3407 lp->index, e_out->dest->index);
3408
3409 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
3410 a successor edge, humor it. But do the real CFG change with the
3411 predecessor of E_OUT in order to preserve the ordering of arguments
3412 to the PHI nodes in E_OUT->DEST. */
3413 redirect_eh_edge_1 (e_in, e_out->dest, false);
3414 redirect_edge_pred (e_out, e_in->src);
3415 e_out->flags = e_in->flags;
3416 e_out->probability = e_in->probability;
3417 e_out->count = e_in->count;
3418 remove_edge (e_in);
3419
3420 return true;
3421 }
3422
3423 /* Examine each landing pad block and see if it matches unsplit_eh. */
3424
3425 static bool
3426 unsplit_all_eh (void)
3427 {
3428 bool changed = false;
3429 eh_landing_pad lp;
3430 int i;
3431
3432 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3433 if (lp)
3434 changed |= unsplit_eh (lp);
3435
3436 return changed;
3437 }
3438
3439 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
3440 to OLD_BB to NEW_BB; return true on success, false on failure.
3441
3442 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
3443 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
3444 Virtual PHIs may be deleted and marked for renaming. */
3445
3446 static bool
3447 cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
3448 edge old_bb_out, bool change_region)
3449 {
3450 gimple_stmt_iterator ngsi, ogsi;
3451 edge_iterator ei;
3452 edge e;
3453 bitmap rename_virts;
3454 bitmap ophi_handled;
3455
3456 FOR_EACH_EDGE (e, ei, old_bb->preds)
3457 redirect_edge_var_map_clear (e);
3458
3459 ophi_handled = BITMAP_ALLOC (NULL);
3460 rename_virts = BITMAP_ALLOC (NULL);
3461
3462 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
3463 for the edges we're going to move. */
3464 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
3465 {
3466 gimple ophi, nphi = gsi_stmt (ngsi);
3467 tree nresult, nop;
3468
3469 nresult = gimple_phi_result (nphi);
3470 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
3471
3472 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
3473 the source ssa_name. */
3474 ophi = NULL;
3475 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3476 {
3477 ophi = gsi_stmt (ogsi);
3478 if (gimple_phi_result (ophi) == nop)
3479 break;
3480 ophi = NULL;
3481 }
3482
3483 /* If we did find the corresponding PHI, copy those inputs. */
3484 if (ophi)
3485 {
3486 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
3487 FOR_EACH_EDGE (e, ei, old_bb->preds)
3488 {
3489 location_t oloc;
3490 tree oop;
3491
3492 if ((e->flags & EDGE_EH) == 0)
3493 continue;
3494 oop = gimple_phi_arg_def (ophi, e->dest_idx);
3495 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
3496 redirect_edge_var_map_add (e, nresult, oop, oloc);
3497 }
3498 }
3499 /* If we didn't find the PHI, but it's a VOP, remember to rename
3500 it later, assuming all other tests succeed. */
3501 else if (!is_gimple_reg (nresult))
3502 bitmap_set_bit (rename_virts, SSA_NAME_VERSION (nresult));
3503 /* If we didn't find the PHI, and it's a real variable, we know
3504 from the fact that OLD_BB is tree_empty_eh_handler_p that the
3505 variable is unchanged from input to the block and we can simply
3506 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
3507 else
3508 {
3509 location_t nloc
3510 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
3511 FOR_EACH_EDGE (e, ei, old_bb->preds)
3512 redirect_edge_var_map_add (e, nresult, nop, nloc);
3513 }
3514 }
3515
3516 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
3517 we don't know what values from the other edges into NEW_BB to use. */
3518 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3519 {
3520 gimple ophi = gsi_stmt (ogsi);
3521 tree oresult = gimple_phi_result (ophi);
3522 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
3523 goto fail;
3524 }
3525
3526 /* At this point we know that the merge will succeed. Remove the PHI
3527 nodes for the virtuals that we want to rename. */
3528 if (!bitmap_empty_p (rename_virts))
3529 {
3530 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); )
3531 {
3532 gimple nphi = gsi_stmt (ngsi);
3533 tree nresult = gimple_phi_result (nphi);
3534 if (bitmap_bit_p (rename_virts, SSA_NAME_VERSION (nresult)))
3535 {
3536 mark_virtual_phi_result_for_renaming (nphi);
3537 remove_phi_node (&ngsi, true);
3538 }
3539 else
3540 gsi_next (&ngsi);
3541 }
3542 }
3543
3544 /* Finally, move the edges and update the PHIs. */
3545 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
3546 if (e->flags & EDGE_EH)
3547 {
3548 redirect_eh_edge_1 (e, new_bb, change_region);
3549 redirect_edge_succ (e, new_bb);
3550 flush_pending_stmts (e);
3551 }
3552 else
3553 ei_next (&ei);
3554
3555 BITMAP_FREE (ophi_handled);
3556 BITMAP_FREE (rename_virts);
3557 return true;
3558
3559 fail:
3560 FOR_EACH_EDGE (e, ei, old_bb->preds)
3561 redirect_edge_var_map_clear (e);
3562 BITMAP_FREE (ophi_handled);
3563 BITMAP_FREE (rename_virts);
3564 return false;
3565 }
3566
3567 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
3568 old region to NEW_REGION at BB. */
3569
3570 static void
3571 cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
3572 eh_landing_pad lp, eh_region new_region)
3573 {
3574 gimple_stmt_iterator gsi;
3575 eh_landing_pad *pp;
3576
3577 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
3578 continue;
3579 *pp = lp->next_lp;
3580
3581 lp->region = new_region;
3582 lp->next_lp = new_region->landing_pads;
3583 new_region->landing_pads = lp;
3584
3585 /* Delete the RESX that was matched within the empty handler block. */
3586 gsi = gsi_last_bb (bb);
3587 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
3588 gsi_remove (&gsi, true);
3589
3590 /* Clean up E_OUT for the fallthru. */
3591 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3592 e_out->probability = REG_BR_PROB_BASE;
3593 }
3594
3595 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
3596 unsplitting than unsplit_eh was prepared to handle, e.g. when
3597 multiple incoming edges and phis are involved. */
3598
3599 static bool
3600 cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
3601 {
3602 gimple_stmt_iterator gsi;
3603 tree lab;
3604
3605 /* We really ought not have totally lost everything following
3606 a landing pad label. Given that BB is empty, there had better
3607 be a successor. */
3608 gcc_assert (e_out != NULL);
3609
3610 /* The destination block must not already have a landing pad
3611 for a different region. */
3612 lab = NULL;
3613 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3614 {
3615 gimple stmt = gsi_stmt (gsi);
3616 int lp_nr;
3617
3618 if (gimple_code (stmt) != GIMPLE_LABEL)
3619 break;
3620 lab = gimple_label_label (stmt);
3621 lp_nr = EH_LANDING_PAD_NR (lab);
3622 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3623 return false;
3624 }
3625
3626 /* Attempt to move the PHIs into the successor block. */
3627 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
3628 {
3629 if (dump_file && (dump_flags & TDF_DETAILS))
3630 fprintf (dump_file,
3631 "Unsplit EH landing pad %d to block %i "
3632 "(via cleanup_empty_eh).\n",
3633 lp->index, e_out->dest->index);
3634 return true;
3635 }
3636
3637 return false;
3638 }
3639
3640 /* Examine the block associated with LP to determine if it's an empty
3641 handler for its EH region. If so, attempt to redirect EH edges to
3642 an outer region. Return true the CFG was updated in any way. This
3643 is similar to jump forwarding, just across EH edges. */
3644
3645 static bool
3646 cleanup_empty_eh (eh_landing_pad lp)
3647 {
3648 basic_block bb = label_to_block (lp->post_landing_pad);
3649 gimple_stmt_iterator gsi;
3650 gimple resx;
3651 eh_region new_region;
3652 edge_iterator ei;
3653 edge e, e_out;
3654 bool has_non_eh_pred;
3655 int new_lp_nr;
3656
3657 /* There can be zero or one edges out of BB. This is the quickest test. */
3658 switch (EDGE_COUNT (bb->succs))
3659 {
3660 case 0:
3661 e_out = NULL;
3662 break;
3663 case 1:
3664 e_out = EDGE_SUCC (bb, 0);
3665 break;
3666 default:
3667 return false;
3668 }
3669 gsi = gsi_after_labels (bb);
3670
3671 /* Make sure to skip debug statements. */
3672 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3673 gsi_next_nondebug (&gsi);
3674
3675 /* If the block is totally empty, look for more unsplitting cases. */
3676 if (gsi_end_p (gsi))
3677 return cleanup_empty_eh_unsplit (bb, e_out, lp);
3678
3679 /* The block should consist only of a single RESX statement. */
3680 resx = gsi_stmt (gsi);
3681 if (!is_gimple_resx (resx))
3682 return false;
3683 gcc_assert (gsi_one_before_end_p (gsi));
3684
3685 /* Determine if there are non-EH edges, or resx edges into the handler. */
3686 has_non_eh_pred = false;
3687 FOR_EACH_EDGE (e, ei, bb->preds)
3688 if (!(e->flags & EDGE_EH))
3689 has_non_eh_pred = true;
3690
3691 /* Find the handler that's outer of the empty handler by looking at
3692 where the RESX instruction was vectored. */
3693 new_lp_nr = lookup_stmt_eh_lp (resx);
3694 new_region = get_eh_region_from_lp_number (new_lp_nr);
3695
3696 /* If there's no destination region within the current function,
3697 redirection is trivial via removing the throwing statements from
3698 the EH region, removing the EH edges, and allowing the block
3699 to go unreachable. */
3700 if (new_region == NULL)
3701 {
3702 gcc_assert (e_out == NULL);
3703 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3704 if (e->flags & EDGE_EH)
3705 {
3706 gimple stmt = last_stmt (e->src);
3707 remove_stmt_from_eh_lp (stmt);
3708 remove_edge (e);
3709 }
3710 else
3711 ei_next (&ei);
3712 goto succeed;
3713 }
3714
3715 /* If the destination region is a MUST_NOT_THROW, allow the runtime
3716 to handle the abort and allow the blocks to go unreachable. */
3717 if (new_region->type == ERT_MUST_NOT_THROW)
3718 {
3719 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3720 if (e->flags & EDGE_EH)
3721 {
3722 gimple stmt = last_stmt (e->src);
3723 remove_stmt_from_eh_lp (stmt);
3724 add_stmt_to_eh_lp (stmt, new_lp_nr);
3725 remove_edge (e);
3726 }
3727 else
3728 ei_next (&ei);
3729 goto succeed;
3730 }
3731
3732 /* Try to redirect the EH edges and merge the PHIs into the destination
3733 landing pad block. If the merge succeeds, we'll already have redirected
3734 all the EH edges. The handler itself will go unreachable if there were
3735 no normal edges. */
3736 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
3737 goto succeed;
3738
3739 /* Finally, if all input edges are EH edges, then we can (potentially)
3740 reduce the number of transfers from the runtime by moving the landing
3741 pad from the original region to the new region. This is a win when
3742 we remove the last CLEANUP region along a particular exception
3743 propagation path. Since nothing changes except for the region with
3744 which the landing pad is associated, the PHI nodes do not need to be
3745 adjusted at all. */
3746 if (!has_non_eh_pred)
3747 {
3748 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
3749 if (dump_file && (dump_flags & TDF_DETAILS))
3750 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
3751 lp->index, new_region->index);
3752
3753 /* ??? The CFG didn't change, but we may have rendered the
3754 old EH region unreachable. Trigger a cleanup there. */
3755 return true;
3756 }
3757
3758 return false;
3759
3760 succeed:
3761 if (dump_file && (dump_flags & TDF_DETAILS))
3762 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
3763 remove_eh_landing_pad (lp);
3764 return true;
3765 }
3766
3767 /* Do a post-order traversal of the EH region tree. Examine each
3768 post_landing_pad block and see if we can eliminate it as empty. */
3769
3770 static bool
3771 cleanup_all_empty_eh (void)
3772 {
3773 bool changed = false;
3774 eh_landing_pad lp;
3775 int i;
3776
3777 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3778 if (lp)
3779 changed |= cleanup_empty_eh (lp);
3780
3781 return changed;
3782 }
3783
3784 /* Perform cleanups and lowering of exception handling
3785 1) cleanups regions with handlers doing nothing are optimized out
3786 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
3787 3) Info about regions that are containing instructions, and regions
3788 reachable via local EH edges is collected
3789 4) Eh tree is pruned for regions no longer neccesary.
3790
3791 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
3792 Unify those that have the same failure decl and locus.
3793 */
3794
3795 static unsigned int
3796 execute_cleanup_eh (void)
3797 {
3798 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
3799 looking up unreachable landing pads. */
3800 remove_unreachable_handlers ();
3801
3802 /* Watch out for the region tree vanishing due to all unreachable. */
3803 if (cfun->eh->region_tree && optimize)
3804 {
3805 bool changed = false;
3806
3807 changed |= unsplit_all_eh ();
3808 changed |= cleanup_all_empty_eh ();
3809
3810 if (changed)
3811 {
3812 free_dominance_info (CDI_DOMINATORS);
3813 free_dominance_info (CDI_POST_DOMINATORS);
3814
3815 /* We delayed all basic block deletion, as we may have performed
3816 cleanups on EH edges while non-EH edges were still present. */
3817 delete_unreachable_blocks ();
3818
3819 /* We manipulated the landing pads. Remove any region that no
3820 longer has a landing pad. */
3821 remove_unreachable_handlers_no_lp ();
3822
3823 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
3824 }
3825 }
3826
3827 return 0;
3828 }
3829
3830 static bool
3831 gate_cleanup_eh (void)
3832 {
3833 return cfun->eh != NULL && cfun->eh->region_tree != NULL;
3834 }
3835
3836 struct gimple_opt_pass pass_cleanup_eh = {
3837 {
3838 GIMPLE_PASS,
3839 "ehcleanup", /* name */
3840 gate_cleanup_eh, /* gate */
3841 execute_cleanup_eh, /* execute */
3842 NULL, /* sub */
3843 NULL, /* next */
3844 0, /* static_pass_number */
3845 TV_TREE_EH, /* tv_id */
3846 PROP_gimple_lcf, /* properties_required */
3847 0, /* properties_provided */
3848 0, /* properties_destroyed */
3849 0, /* todo_flags_start */
3850 TODO_dump_func /* todo_flags_finish */
3851 }
3852 };
3853
3854 /* Verify that BB containing STMT as the last statement, has precisely the
3855 edge that make_eh_edges would create. */
3856
3857 bool
3858 verify_eh_edges (gimple stmt)
3859 {
3860 basic_block bb = gimple_bb (stmt);
3861 eh_landing_pad lp = NULL;
3862 int lp_nr;
3863 edge_iterator ei;
3864 edge e, eh_edge;
3865
3866 lp_nr = lookup_stmt_eh_lp (stmt);
3867 if (lp_nr > 0)
3868 lp = get_eh_landing_pad_from_number (lp_nr);
3869
3870 eh_edge = NULL;
3871 FOR_EACH_EDGE (e, ei, bb->succs)
3872 {
3873 if (e->flags & EDGE_EH)
3874 {
3875 if (eh_edge)
3876 {
3877 error ("BB %i has multiple EH edges", bb->index);
3878 return true;
3879 }
3880 else
3881 eh_edge = e;
3882 }
3883 }
3884
3885 if (lp == NULL)
3886 {
3887 if (eh_edge)
3888 {
3889 error ("BB %i can not throw but has an EH edge", bb->index);
3890 return true;
3891 }
3892 return false;
3893 }
3894
3895 if (!stmt_could_throw_p (stmt))
3896 {
3897 error ("BB %i last statement has incorrectly set lp", bb->index);
3898 return true;
3899 }
3900
3901 if (eh_edge == NULL)
3902 {
3903 error ("BB %i is missing an EH edge", bb->index);
3904 return true;
3905 }
3906
3907 if (eh_edge->dest != label_to_block (lp->post_landing_pad))
3908 {
3909 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
3910 return true;
3911 }
3912
3913 return false;
3914 }
3915
3916 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
3917
3918 bool
3919 verify_eh_dispatch_edge (gimple stmt)
3920 {
3921 eh_region r;
3922 eh_catch c;
3923 basic_block src, dst;
3924 bool want_fallthru = true;
3925 edge_iterator ei;
3926 edge e, fall_edge;
3927
3928 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
3929 src = gimple_bb (stmt);
3930
3931 FOR_EACH_EDGE (e, ei, src->succs)
3932 gcc_assert (e->aux == NULL);
3933
3934 switch (r->type)
3935 {
3936 case ERT_TRY:
3937 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3938 {
3939 dst = label_to_block (c->label);
3940 e = find_edge (src, dst);
3941 if (e == NULL)
3942 {
3943 error ("BB %i is missing an edge", src->index);
3944 return true;
3945 }
3946 e->aux = (void *)e;
3947
3948 /* A catch-all handler doesn't have a fallthru. */
3949 if (c->type_list == NULL)
3950 {
3951 want_fallthru = false;
3952 break;
3953 }
3954 }
3955 break;
3956
3957 case ERT_ALLOWED_EXCEPTIONS:
3958 dst = label_to_block (r->u.allowed.label);
3959 e = find_edge (src, dst);
3960 if (e == NULL)
3961 {
3962 error ("BB %i is missing an edge", src->index);
3963 return true;
3964 }
3965 e->aux = (void *)e;
3966 break;
3967
3968 default:
3969 gcc_unreachable ();
3970 }
3971
3972 fall_edge = NULL;
3973 FOR_EACH_EDGE (e, ei, src->succs)
3974 {
3975 if (e->flags & EDGE_FALLTHRU)
3976 {
3977 if (fall_edge != NULL)
3978 {
3979 error ("BB %i too many fallthru edges", src->index);
3980 return true;
3981 }
3982 fall_edge = e;
3983 }
3984 else if (e->aux)
3985 e->aux = NULL;
3986 else
3987 {
3988 error ("BB %i has incorrect edge", src->index);
3989 return true;
3990 }
3991 }
3992 if ((fall_edge != NULL) ^ want_fallthru)
3993 {
3994 error ("BB %i has incorrect fallthru edge", src->index);
3995 return true;
3996 }
3997
3998 return false;
3999 }