0
|
1 /* Exception handling semantics and decomposition for trees.
|
|
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
|
|
3 Free Software Foundation, Inc.
|
|
4
|
|
5 This file is part of GCC.
|
|
6
|
|
7 GCC is free software; you can redistribute it and/or modify
|
|
8 it under the terms of the GNU General Public License as published by
|
|
9 the Free Software Foundation; either version 3, or (at your option)
|
|
10 any later version.
|
|
11
|
|
12 GCC is distributed in the hope that it will be useful,
|
|
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
15 GNU General Public License for more details.
|
|
16
|
|
17 You should have received a copy of the GNU General Public License
|
|
18 along with GCC; see the file COPYING3. If not see
|
|
19 <http://www.gnu.org/licenses/>. */
|
|
20
|
|
21 #include "config.h"
|
|
22 #include "system.h"
|
|
23 #include "coretypes.h"
|
|
24 #include "tm.h"
|
|
25 #include "tree.h"
|
|
26 #include "rtl.h"
|
|
27 #include "tm_p.h"
|
|
28 #include "flags.h"
|
|
29 #include "function.h"
|
|
30 #include "except.h"
|
|
31 #include "tree-flow.h"
|
|
32 #include "tree-dump.h"
|
|
33 #include "tree-inline.h"
|
|
34 #include "tree-iterator.h"
|
|
35 #include "tree-pass.h"
|
|
36 #include "timevar.h"
|
|
37 #include "langhooks.h"
|
|
38 #include "ggc.h"
|
|
39 #include "toplev.h"
|
|
40 #include "gimple.h"
|
|
41
|
|
42 /* In some instances a tree and a gimple need to be stored in a same table,
|
|
43 i.e. in hash tables. This is a structure to do this. */
|
|
44 typedef union {tree *tp; tree t; gimple g;} treemple;
|
|
45
|
|
46 /* Nonzero if we are using EH to handle cleanups. */
|
|
47 static int using_eh_for_cleanups_p = 0;
|
|
48
|
|
49 void
|
|
50 using_eh_for_cleanups (void)
|
|
51 {
|
|
52 using_eh_for_cleanups_p = 1;
|
|
53 }
|
|
54
|
|
55 /* Misc functions used in this file. */
|
|
56
|
|
57 /* Compare and hash for any structure which begins with a canonical
|
|
58 pointer. Assumes all pointers are interchangeable, which is sort
|
|
59 of already assumed by gcc elsewhere IIRC. */
|
|
60
|
|
61 static int
|
|
62 struct_ptr_eq (const void *a, const void *b)
|
|
63 {
|
|
64 const void * const * x = (const void * const *) a;
|
|
65 const void * const * y = (const void * const *) b;
|
|
66 return *x == *y;
|
|
67 }
|
|
68
|
|
69 static hashval_t
|
|
70 struct_ptr_hash (const void *a)
|
|
71 {
|
|
72 const void * const * x = (const void * const *) a;
|
|
73 return (size_t)*x >> 4;
|
|
74 }
|
|
75
|
|
76
|
|
77 /* Remember and lookup EH region data for arbitrary statements.
|
|
78 Really this means any statement that could_throw_p. We could
|
|
79 stuff this information into the stmt_ann data structure, but:
|
|
80
|
|
81 (1) We absolutely rely on this information being kept until
|
|
82 we get to rtl. Once we're done with lowering here, if we lose
|
|
83 the information there's no way to recover it!
|
|
84
|
|
85 (2) There are many more statements that *cannot* throw as
|
|
86 compared to those that can. We should be saving some amount
|
|
87 of space by only allocating memory for those that can throw. */
|
|
88
|
|
89 static void
|
|
90 record_stmt_eh_region (struct eh_region *region, gimple t)
|
|
91 {
|
|
92 if (!region)
|
|
93 return;
|
|
94
|
|
95 add_stmt_to_eh_region (t, get_eh_region_number (region));
|
|
96 }
|
|
97
|
|
98
|
|
99 /* Add statement T in function IFUN to EH region NUM. */
|
|
100
|
|
101 void
|
|
102 add_stmt_to_eh_region_fn (struct function *ifun, gimple t, int num)
|
|
103 {
|
|
104 struct throw_stmt_node *n;
|
|
105 void **slot;
|
|
106
|
|
107 gcc_assert (num >= 0);
|
|
108 gcc_assert (gimple_code (t) != GIMPLE_RESX);
|
|
109
|
|
110 n = GGC_NEW (struct throw_stmt_node);
|
|
111 n->stmt = t;
|
|
112 n->region_nr = num;
|
|
113
|
|
114 if (!get_eh_throw_stmt_table (ifun))
|
|
115 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
|
|
116 struct_ptr_eq,
|
|
117 ggc_free));
|
|
118
|
|
119 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
|
|
120 gcc_assert (!*slot);
|
|
121 *slot = n;
|
|
122 }
|
|
123
|
|
124
|
|
125 /* Add statement T in the current function (cfun) to EH region number
|
|
126 NUM. */
|
|
127
|
|
128 void
|
|
129 add_stmt_to_eh_region (gimple t, int num)
|
|
130 {
|
|
131 add_stmt_to_eh_region_fn (cfun, t, num);
|
|
132 }
|
|
133
|
|
134
|
|
135 /* Remove statement T in function IFUN from the EH region holding it. */
|
|
136
|
|
137 bool
|
|
138 remove_stmt_from_eh_region_fn (struct function *ifun, gimple t)
|
|
139 {
|
|
140 struct throw_stmt_node dummy;
|
|
141 void **slot;
|
|
142
|
|
143 if (!get_eh_throw_stmt_table (ifun))
|
|
144 return false;
|
|
145
|
|
146 dummy.stmt = t;
|
|
147 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
|
|
148 NO_INSERT);
|
|
149 if (slot)
|
|
150 {
|
|
151 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
|
|
152 return true;
|
|
153 }
|
|
154 else
|
|
155 return false;
|
|
156 }
|
|
157
|
|
158
|
|
159 /* Remove statement T in the current function (cfun) from the EH
|
|
160 region holding it. */
|
|
161
|
|
162 bool
|
|
163 remove_stmt_from_eh_region (gimple t)
|
|
164 {
|
|
165 return remove_stmt_from_eh_region_fn (cfun, t);
|
|
166 }
|
|
167
|
|
168 /* Determine if statement T is inside an EH region in function IFUN.
|
|
169 Return the EH region number if found, return -2 if IFUN does not
|
|
170 have an EH table and -1 if T could not be found in IFUN's EH region
|
|
171 table. */
|
|
172
|
|
173 int
|
|
174 lookup_stmt_eh_region_fn (struct function *ifun, gimple t)
|
|
175 {
|
|
176 struct throw_stmt_node *p, n;
|
|
177
|
|
178 if (!get_eh_throw_stmt_table (ifun))
|
|
179 return -2;
|
|
180
|
|
181 n.stmt = t;
|
|
182 p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun), &n);
|
|
183 return (p ? p->region_nr : -1);
|
|
184 }
|
|
185
|
|
186
|
|
187 /* Determine if statement T is inside an EH region in the current
|
|
188 function (cfun). Return the EH region number if found, return -2
|
|
189 if cfun does not have an EH table and -1 if T could not be found in
|
|
190 cfun's EH region table. */
|
|
191
|
|
192 int
|
|
193 lookup_stmt_eh_region (gimple t)
|
|
194 {
|
|
195 /* We can get called from initialized data when -fnon-call-exceptions
|
|
196 is on; prevent crash. */
|
|
197 if (!cfun)
|
|
198 return -1;
|
|
199
|
|
200 return lookup_stmt_eh_region_fn (cfun, t);
|
|
201 }
|
|
202
|
|
203
|
|
204 /* Determine if expression T is inside an EH region in the current
|
|
205 function (cfun). Return the EH region number if found, return -2
|
|
206 if IFUN does not have an EH table and -1 if T could not be found in
|
|
207 IFUN's EH region table. */
|
|
208
|
|
209 int
|
|
210 lookup_expr_eh_region (tree t)
|
|
211 {
|
|
212 /* We can get called from initialized data when -fnon-call-exceptions
|
|
213 is on; prevent crash. */
|
|
214 if (!cfun)
|
|
215 return -1;
|
|
216
|
|
217 if (!get_eh_throw_stmt_table (cfun))
|
|
218 return -2;
|
|
219
|
|
220 if (t && EXPR_P (t))
|
|
221 {
|
|
222 tree_ann_common_t ann = tree_common_ann (t);
|
|
223 if (ann)
|
|
224 return (int) ann->rn;
|
|
225 }
|
|
226
|
|
227 return -1;
|
|
228 }
|
|
229
|
|
230
|
|
231 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
|
|
232 nodes and LABEL_DECL nodes. We will use this during the second phase to
|
|
233 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
|
|
234
|
|
235 struct finally_tree_node
|
|
236 {
|
|
237 /* When storing a GIMPLE_TRY, we have to record a gimple. However
|
|
238 when deciding whether a GOTO to a certain LABEL_DECL (which is a
|
|
239 tree) leaves the TRY block, its necessary to record a tree in
|
|
240 this field. Thus a treemple is used. */
|
|
241 treemple child;
|
|
242 gimple parent;
|
|
243 };
|
|
244
|
|
245 /* Note that this table is *not* marked GTY. It is short-lived. */
|
|
246 static htab_t finally_tree;
|
|
247
|
|
248 static void
|
|
249 record_in_finally_tree (treemple child, gimple parent)
|
|
250 {
|
|
251 struct finally_tree_node *n;
|
|
252 void **slot;
|
|
253
|
|
254 n = XNEW (struct finally_tree_node);
|
|
255 n->child = child;
|
|
256 n->parent = parent;
|
|
257
|
|
258 slot = htab_find_slot (finally_tree, n, INSERT);
|
|
259 gcc_assert (!*slot);
|
|
260 *slot = n;
|
|
261 }
|
|
262
|
|
263 static void
|
|
264 collect_finally_tree (gimple stmt, gimple region);
|
|
265
|
|
266 /* Go through the gimple sequence. Works with collect_finally_tree to
|
|
267 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
|
|
268
|
|
269 static void
|
|
270 collect_finally_tree_1 (gimple_seq seq, gimple region)
|
|
271 {
|
|
272 gimple_stmt_iterator gsi;
|
|
273
|
|
274 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
|
|
275 collect_finally_tree (gsi_stmt (gsi), region);
|
|
276 }
|
|
277
|
|
278 static void
|
|
279 collect_finally_tree (gimple stmt, gimple region)
|
|
280 {
|
|
281 treemple temp;
|
|
282
|
|
283 switch (gimple_code (stmt))
|
|
284 {
|
|
285 case GIMPLE_LABEL:
|
|
286 temp.t = gimple_label_label (stmt);
|
|
287 record_in_finally_tree (temp, region);
|
|
288 break;
|
|
289
|
|
290 case GIMPLE_TRY:
|
|
291 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
|
|
292 {
|
|
293 temp.g = stmt;
|
|
294 record_in_finally_tree (temp, region);
|
|
295 collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
|
|
296 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
|
|
297 }
|
|
298 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
|
|
299 {
|
|
300 collect_finally_tree_1 (gimple_try_eval (stmt), region);
|
|
301 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
|
|
302 }
|
|
303 break;
|
|
304
|
|
305 case GIMPLE_CATCH:
|
|
306 collect_finally_tree_1 (gimple_catch_handler (stmt), region);
|
|
307 break;
|
|
308
|
|
309 case GIMPLE_EH_FILTER:
|
|
310 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
|
|
311 break;
|
|
312
|
|
313 default:
|
|
314 /* A type, a decl, or some kind of statement that we're not
|
|
315 interested in. Don't walk them. */
|
|
316 break;
|
|
317 }
|
|
318 }
|
|
319
|
|
320
|
|
321 /* Use the finally tree to determine if a jump from START to TARGET
|
|
322 would leave the try_finally node that START lives in. */
|
|
323
|
|
324 static bool
|
|
325 outside_finally_tree (treemple start, gimple target)
|
|
326 {
|
|
327 struct finally_tree_node n, *p;
|
|
328
|
|
329 do
|
|
330 {
|
|
331 n.child = start;
|
|
332 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
|
|
333 if (!p)
|
|
334 return true;
|
|
335 start.g = p->parent;
|
|
336 }
|
|
337 while (start.g != target);
|
|
338
|
|
339 return false;
|
|
340 }
|
|
341
|
|
342 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
|
|
343 nodes into a set of gotos, magic labels, and eh regions.
|
|
344 The eh region creation is straight-forward, but frobbing all the gotos
|
|
345 and such into shape isn't. */
|
|
346
|
|
347 /* State of the world while lowering. */
|
|
348
|
|
349 struct leh_state
|
|
350 {
|
|
351 /* What's "current" while constructing the eh region tree. These
|
|
352 correspond to variables of the same name in cfun->eh, which we
|
|
353 don't have easy access to. */
|
|
354 struct eh_region *cur_region;
|
|
355 struct eh_region *prev_try;
|
|
356
|
|
357 /* Processing of TRY_FINALLY requires a bit more state. This is
|
|
358 split out into a separate structure so that we don't have to
|
|
359 copy so much when processing other nodes. */
|
|
360 struct leh_tf_state *tf;
|
|
361 };
|
|
362
|
|
363 struct leh_tf_state
|
|
364 {
|
|
365 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
|
|
366 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
|
|
367 this so that outside_finally_tree can reliably reference the tree used
|
|
368 in the collect_finally_tree data structures. */
|
|
369 gimple try_finally_expr;
|
|
370 gimple top_p;
|
|
371 /* While lowering a top_p usually it is expanded into multiple statements,
|
|
372 thus we need the following field to store them. */
|
|
373 gimple_seq top_p_seq;
|
|
374
|
|
375 /* The state outside this try_finally node. */
|
|
376 struct leh_state *outer;
|
|
377
|
|
378 /* The exception region created for it. */
|
|
379 struct eh_region *region;
|
|
380
|
|
381 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN statements
|
|
382 that are seen to escape this GIMPLE_TRY_FINALLY node.
|
|
383 The idea is to record a gimple statement for everything except for
|
|
384 the conditionals, which get their labels recorded. Since labels are of
|
|
385 type 'tree', we need this node to store both gimple and tree objects.
|
|
386 REPL_STMT is the sequence used to replace the goto/return statement.
|
|
387 CONT_STMT is used to store the statement that allows the return/goto to
|
|
388 jump to the original destination. */
|
|
389 struct goto_queue_node {
|
|
390 treemple stmt;
|
|
391 gimple_seq repl_stmt;
|
|
392 gimple cont_stmt;
|
|
393 int index;
|
|
394 /* this is used when index >= 0 to indicate that stmt is a label(as
|
|
395 opposed to a goto stmt) */
|
|
396 int is_label;
|
|
397 } *goto_queue;
|
|
398 size_t goto_queue_size;
|
|
399 size_t goto_queue_active;
|
|
400
|
|
401 /* Pointer map to help in searching goto_queue when it is large. */
|
|
402 struct pointer_map_t *goto_queue_map;
|
|
403
|
|
404 /* The set of unique labels seen as entries in the goto queue. */
|
|
405 VEC(tree,heap) *dest_array;
|
|
406
|
|
407 /* A label to be added at the end of the completed transformed
|
|
408 sequence. It will be set if may_fallthru was true *at one time*,
|
|
409 though subsequent transformations may have cleared that flag. */
|
|
410 tree fallthru_label;
|
|
411
|
|
412 /* A label that has been registered with except.c to be the
|
|
413 landing pad for this try block. */
|
|
414 tree eh_label;
|
|
415
|
|
416 /* True if it is possible to fall out the bottom of the try block.
|
|
417 Cleared if the fallthru is converted to a goto. */
|
|
418 bool may_fallthru;
|
|
419
|
|
420 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
|
|
421 bool may_return;
|
|
422
|
|
423 /* True if the finally block can receive an exception edge.
|
|
424 Cleared if the exception case is handled by code duplication. */
|
|
425 bool may_throw;
|
|
426 };
|
|
427
|
|
428 static gimple_seq lower_eh_filter (struct leh_state *, gimple);
|
|
429
|
|
430 /* Search for STMT in the goto queue. Return the replacement,
|
|
431 or null if the statement isn't in the queue. */
|
|
432
|
|
433 #define LARGE_GOTO_QUEUE 20
|
|
434
|
|
435 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq);
|
|
436
|
|
437 static gimple_seq
|
|
438 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
|
|
439 {
|
|
440 unsigned int i;
|
|
441 void **slot;
|
|
442
|
|
443 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
|
|
444 {
|
|
445 for (i = 0; i < tf->goto_queue_active; i++)
|
|
446 if ( tf->goto_queue[i].stmt.g == stmt.g)
|
|
447 return tf->goto_queue[i].repl_stmt;
|
|
448 return NULL;
|
|
449 }
|
|
450
|
|
451 /* If we have a large number of entries in the goto_queue, create a
|
|
452 pointer map and use that for searching. */
|
|
453
|
|
454 if (!tf->goto_queue_map)
|
|
455 {
|
|
456 tf->goto_queue_map = pointer_map_create ();
|
|
457 for (i = 0; i < tf->goto_queue_active; i++)
|
|
458 {
|
|
459 slot = pointer_map_insert (tf->goto_queue_map,
|
|
460 tf->goto_queue[i].stmt.g);
|
|
461 gcc_assert (*slot == NULL);
|
|
462 *slot = &tf->goto_queue[i];
|
|
463 }
|
|
464 }
|
|
465
|
|
466 slot = pointer_map_contains (tf->goto_queue_map, stmt.g);
|
|
467 if (slot != NULL)
|
|
468 return (((struct goto_queue_node *) *slot)->repl_stmt);
|
|
469
|
|
470 return NULL;
|
|
471 }
|
|
472
|
|
473 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
|
|
474 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
|
|
475 then we can just splat it in, otherwise we add the new stmts immediately
|
|
476 after the GIMPLE_COND and redirect. */
|
|
477
|
|
478 static void
|
|
479 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
|
|
480 gimple_stmt_iterator *gsi)
|
|
481 {
|
|
482 tree label;
|
|
483 gimple_seq new_seq;
|
|
484 treemple temp;
|
|
485
|
|
486 temp.tp = tp;
|
|
487 new_seq = find_goto_replacement (tf, temp);
|
|
488 if (!new_seq)
|
|
489 return;
|
|
490
|
|
491 if (gimple_seq_singleton_p (new_seq)
|
|
492 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
|
|
493 {
|
|
494 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
|
|
495 return;
|
|
496 }
|
|
497
|
|
498 label = create_artificial_label ();
|
|
499 /* Set the new label for the GIMPLE_COND */
|
|
500 *tp = label;
|
|
501
|
|
502 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
|
|
503 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
|
|
504 }
|
|
505
|
|
506 /* The real work of replace_goto_queue. Returns with TSI updated to
|
|
507 point to the next statement. */
|
|
508
|
|
509 static void replace_goto_queue_stmt_list (gimple_seq, struct leh_tf_state *);
|
|
510
|
|
511 static void
|
|
512 replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
|
|
513 gimple_stmt_iterator *gsi)
|
|
514 {
|
|
515 gimple_seq seq;
|
|
516 treemple temp;
|
|
517 temp.g = NULL;
|
|
518
|
|
519 switch (gimple_code (stmt))
|
|
520 {
|
|
521 case GIMPLE_GOTO:
|
|
522 case GIMPLE_RETURN:
|
|
523 temp.g = stmt;
|
|
524 seq = find_goto_replacement (tf, temp);
|
|
525 if (seq)
|
|
526 {
|
|
527 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
|
|
528 gsi_remove (gsi, false);
|
|
529 return;
|
|
530 }
|
|
531 break;
|
|
532
|
|
533 case GIMPLE_COND:
|
|
534 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
|
|
535 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
|
|
536 break;
|
|
537
|
|
538 case GIMPLE_TRY:
|
|
539 replace_goto_queue_stmt_list (gimple_try_eval (stmt), tf);
|
|
540 replace_goto_queue_stmt_list (gimple_try_cleanup (stmt), tf);
|
|
541 break;
|
|
542 case GIMPLE_CATCH:
|
|
543 replace_goto_queue_stmt_list (gimple_catch_handler (stmt), tf);
|
|
544 break;
|
|
545 case GIMPLE_EH_FILTER:
|
|
546 replace_goto_queue_stmt_list (gimple_eh_filter_failure (stmt), tf);
|
|
547 break;
|
|
548
|
|
549 default:
|
|
550 /* These won't have gotos in them. */
|
|
551 break;
|
|
552 }
|
|
553
|
|
554 gsi_next (gsi);
|
|
555 }
|
|
556
|
|
557 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
|
|
558
|
|
559 static void
|
|
560 replace_goto_queue_stmt_list (gimple_seq seq, struct leh_tf_state *tf)
|
|
561 {
|
|
562 gimple_stmt_iterator gsi = gsi_start (seq);
|
|
563
|
|
564 while (!gsi_end_p (gsi))
|
|
565 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
|
|
566 }
|
|
567
|
|
568 /* Replace all goto queue members. */
|
|
569
|
|
570 static void
|
|
571 replace_goto_queue (struct leh_tf_state *tf)
|
|
572 {
|
|
573 if (tf->goto_queue_active == 0)
|
|
574 return;
|
|
575 replace_goto_queue_stmt_list (tf->top_p_seq, tf);
|
|
576 }
|
|
577
|
|
578 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
|
|
579 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
|
|
580 a gimple return. */
|
|
581
|
|
582 static void
|
|
583 record_in_goto_queue (struct leh_tf_state *tf,
|
|
584 treemple new_stmt,
|
|
585 int index,
|
|
586 bool is_label)
|
|
587 {
|
|
588 size_t active, size;
|
|
589 struct goto_queue_node *q;
|
|
590
|
|
591 gcc_assert (!tf->goto_queue_map);
|
|
592
|
|
593 active = tf->goto_queue_active;
|
|
594 size = tf->goto_queue_size;
|
|
595 if (active >= size)
|
|
596 {
|
|
597 size = (size ? size * 2 : 32);
|
|
598 tf->goto_queue_size = size;
|
|
599 tf->goto_queue
|
|
600 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
|
|
601 }
|
|
602
|
|
603 q = &tf->goto_queue[active];
|
|
604 tf->goto_queue_active = active + 1;
|
|
605
|
|
606 memset (q, 0, sizeof (*q));
|
|
607 q->stmt = new_stmt;
|
|
608 q->index = index;
|
|
609 q->is_label = is_label;
|
|
610 }
|
|
611
|
|
612 /* Record the LABEL label in the goto queue contained in TF.
|
|
613 TF is not null. */
|
|
614
|
|
615 static void
|
|
616 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label)
|
|
617 {
|
|
618 int index;
|
|
619 treemple temp, new_stmt;
|
|
620
|
|
621 if (!label)
|
|
622 return;
|
|
623
|
|
624 /* Computed and non-local gotos do not get processed. Given
|
|
625 their nature we can neither tell whether we've escaped the
|
|
626 finally block nor redirect them if we knew. */
|
|
627 if (TREE_CODE (label) != LABEL_DECL)
|
|
628 return;
|
|
629
|
|
630 /* No need to record gotos that don't leave the try block. */
|
|
631 temp.t = label;
|
|
632 if (!outside_finally_tree (temp, tf->try_finally_expr))
|
|
633 return;
|
|
634
|
|
635 if (! tf->dest_array)
|
|
636 {
|
|
637 tf->dest_array = VEC_alloc (tree, heap, 10);
|
|
638 VEC_quick_push (tree, tf->dest_array, label);
|
|
639 index = 0;
|
|
640 }
|
|
641 else
|
|
642 {
|
|
643 int n = VEC_length (tree, tf->dest_array);
|
|
644 for (index = 0; index < n; ++index)
|
|
645 if (VEC_index (tree, tf->dest_array, index) == label)
|
|
646 break;
|
|
647 if (index == n)
|
|
648 VEC_safe_push (tree, heap, tf->dest_array, label);
|
|
649 }
|
|
650
|
|
651 /* In the case of a GOTO we want to record the destination label,
|
|
652 since with a GIMPLE_COND we have an easy access to the then/else
|
|
653 labels. */
|
|
654 new_stmt = stmt;
|
|
655 record_in_goto_queue (tf, new_stmt, index, true);
|
|
656
|
|
657 }
|
|
658
|
|
659 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
|
|
660 node, and if so record that fact in the goto queue associated with that
|
|
661 try_finally node. */
|
|
662
|
|
663 static void
|
|
664 maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
|
|
665 {
|
|
666 struct leh_tf_state *tf = state->tf;
|
|
667 treemple new_stmt;
|
|
668
|
|
669 if (!tf)
|
|
670 return;
|
|
671
|
|
672 switch (gimple_code (stmt))
|
|
673 {
|
|
674 case GIMPLE_COND:
|
|
675 new_stmt.tp = gimple_op_ptr (stmt, 2);
|
|
676 record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt));
|
|
677 new_stmt.tp = gimple_op_ptr (stmt, 3);
|
|
678 record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt));
|
|
679 break;
|
|
680 case GIMPLE_GOTO:
|
|
681 new_stmt.g = stmt;
|
|
682 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt));
|
|
683 break;
|
|
684
|
|
685 case GIMPLE_RETURN:
|
|
686 tf->may_return = true;
|
|
687 new_stmt.g = stmt;
|
|
688 record_in_goto_queue (tf, new_stmt, -1, false);
|
|
689 break;
|
|
690
|
|
691 default:
|
|
692 gcc_unreachable ();
|
|
693 }
|
|
694 }
|
|
695
|
|
696
|
|
697 #ifdef ENABLE_CHECKING
|
|
698 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
|
|
699 was in fact structured, and we've not yet done jump threading, then none
|
|
700 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
|
|
701
|
|
702 static void
|
|
703 verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
|
|
704 {
|
|
705 struct leh_tf_state *tf = state->tf;
|
|
706 size_t i, n;
|
|
707
|
|
708 if (!tf)
|
|
709 return;
|
|
710
|
|
711 n = gimple_switch_num_labels (switch_expr);
|
|
712
|
|
713 for (i = 0; i < n; ++i)
|
|
714 {
|
|
715 treemple temp;
|
|
716 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
|
|
717 temp.t = lab;
|
|
718 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
|
|
719 }
|
|
720 }
|
|
721 #else
|
|
722 #define verify_norecord_switch_expr(state, switch_expr)
|
|
723 #endif
|
|
724
|
|
725 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
|
|
726 whatever is needed to finish the return. If MOD is non-null, insert it
|
|
727 before the new branch. RETURN_VALUE_P is a cache containing a temporary
|
|
728 variable to be used in manipulating the value returned from the function. */
|
|
729
|
|
730 static void
|
|
731 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
|
|
732 tree *return_value_p)
|
|
733 {
|
|
734 tree ret_expr;
|
|
735 gimple x;
|
|
736
|
|
737 /* In the case of a return, the queue node must be a gimple statement. */
|
|
738 gcc_assert (!q->is_label);
|
|
739
|
|
740 ret_expr = gimple_return_retval (q->stmt.g);
|
|
741
|
|
742 if (ret_expr)
|
|
743 {
|
|
744 if (!*return_value_p)
|
|
745 *return_value_p = ret_expr;
|
|
746 else
|
|
747 gcc_assert (*return_value_p == ret_expr);
|
|
748 q->cont_stmt = q->stmt.g;
|
|
749 /* The nasty part about redirecting the return value is that the
|
|
750 return value itself is to be computed before the FINALLY block
|
|
751 is executed. e.g.
|
|
752
|
|
753 int x;
|
|
754 int foo (void)
|
|
755 {
|
|
756 x = 0;
|
|
757 try {
|
|
758 return x;
|
|
759 } finally {
|
|
760 x++;
|
|
761 }
|
|
762 }
|
|
763
|
|
764 should return 0, not 1. Arrange for this to happen by copying
|
|
765 computed the return value into a local temporary. This also
|
|
766 allows us to redirect multiple return statements through the
|
|
767 same destination block; whether this is a net win or not really
|
|
768 depends, I guess, but it does make generation of the switch in
|
|
769 lower_try_finally_switch easier. */
|
|
770
|
|
771 if (TREE_CODE (ret_expr) == RESULT_DECL)
|
|
772 {
|
|
773 if (!*return_value_p)
|
|
774 *return_value_p = ret_expr;
|
|
775 else
|
|
776 gcc_assert (*return_value_p == ret_expr);
|
|
777 q->cont_stmt = q->stmt.g;
|
|
778 }
|
|
779 else
|
|
780 gcc_unreachable ();
|
|
781 }
|
|
782 else
|
|
783 /* If we don't return a value, all return statements are the same. */
|
|
784 q->cont_stmt = q->stmt.g;
|
|
785
|
|
786 if (!q->repl_stmt)
|
|
787 q->repl_stmt = gimple_seq_alloc ();
|
|
788
|
|
789 if (mod)
|
|
790 gimple_seq_add_seq (&q->repl_stmt, mod);
|
|
791
|
|
792 x = gimple_build_goto (finlab);
|
|
793 gimple_seq_add_stmt (&q->repl_stmt, x);
|
|
794 }
|
|
795
|
|
796 /* Similar, but easier, for GIMPLE_GOTO. */
|
|
797
|
|
798 static void
|
|
799 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
|
|
800 struct leh_tf_state *tf)
|
|
801 {
|
|
802 gimple x;
|
|
803
|
|
804 gcc_assert (q->is_label);
|
|
805 if (!q->repl_stmt)
|
|
806 q->repl_stmt = gimple_seq_alloc ();
|
|
807
|
|
808 q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array,q->index));
|
|
809
|
|
810 if (mod)
|
|
811 gimple_seq_add_seq (&q->repl_stmt, mod);
|
|
812
|
|
813 x = gimple_build_goto (finlab);
|
|
814 gimple_seq_add_stmt (&q->repl_stmt, x);
|
|
815 }
|
|
816
|
|
817 /* We want to transform
|
|
818 try { body; } catch { stuff; }
|
|
819 to
|
|
820 body; goto over; lab: stuff; over:
|
|
821
|
|
822 TP is a GIMPLE_TRY node. LAB is the label that
|
|
823 should be placed before the second operand, or NULL. OVER is
|
|
824 an existing label that should be put at the exit, or NULL. */
|
|
825
|
|
826 static gimple_seq
|
|
827 frob_into_branch_around (gimple tp, tree lab, tree over)
|
|
828 {
|
|
829 gimple x;
|
|
830 gimple_seq cleanup, result;
|
|
831
|
|
832 cleanup = gimple_try_cleanup (tp);
|
|
833 result = gimple_try_eval (tp);
|
|
834
|
|
835 if (gimple_seq_may_fallthru (result))
|
|
836 {
|
|
837 if (!over)
|
|
838 over = create_artificial_label ();
|
|
839 x = gimple_build_goto (over);
|
|
840 gimple_seq_add_stmt (&result, x);
|
|
841 }
|
|
842
|
|
843 if (lab)
|
|
844 {
|
|
845 x = gimple_build_label (lab);
|
|
846 gimple_seq_add_stmt (&result, x);
|
|
847 }
|
|
848
|
|
849 gimple_seq_add_seq (&result, cleanup);
|
|
850
|
|
851 if (over)
|
|
852 {
|
|
853 x = gimple_build_label (over);
|
|
854 gimple_seq_add_stmt (&result, x);
|
|
855 }
|
|
856 return result;
|
|
857 }
|
|
858
|
|
859 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
|
|
860 Make sure to record all new labels found. */
|
|
861
|
|
862 static gimple_seq
|
|
863 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state)
|
|
864 {
|
|
865 gimple region = NULL;
|
|
866 gimple_seq new_seq;
|
|
867
|
|
868 new_seq = copy_gimple_seq_and_replace_locals (seq);
|
|
869
|
|
870 if (outer_state->tf)
|
|
871 region = outer_state->tf->try_finally_expr;
|
|
872 collect_finally_tree_1 (new_seq, region);
|
|
873
|
|
874 return new_seq;
|
|
875 }
|
|
876
|
|
877 /* A subroutine of lower_try_finally. Create a fallthru label for
|
|
878 the given try_finally state. The only tricky bit here is that
|
|
879 we have to make sure to record the label in our outer context. */
|
|
880
|
|
881 static tree
|
|
882 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
|
|
883 {
|
|
884 tree label = tf->fallthru_label;
|
|
885 treemple temp;
|
|
886
|
|
887 if (!label)
|
|
888 {
|
|
889 label = create_artificial_label ();
|
|
890 tf->fallthru_label = label;
|
|
891 if (tf->outer->tf)
|
|
892 {
|
|
893 temp.t = label;
|
|
894 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
|
|
895 }
|
|
896 }
|
|
897 return label;
|
|
898 }
|
|
899
|
|
900 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
|
|
901 returns non-null, then the language requires that the exception path out
|
|
902 of a try_finally be treated specially. To wit: the code within the
|
|
903 finally block may not itself throw an exception. We have two choices here.
|
|
904 First we can duplicate the finally block and wrap it in a must_not_throw
|
|
905 region. Second, we can generate code like
|
|
906
|
|
907 try {
|
|
908 finally_block;
|
|
909 } catch {
|
|
910 if (fintmp == eh_edge)
|
|
911 protect_cleanup_actions;
|
|
912 }
|
|
913
|
|
914 where "fintmp" is the temporary used in the switch statement generation
|
|
915 alternative considered below. For the nonce, we always choose the first
|
|
916 option.
|
|
917
|
|
918 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
|
|
919
|
|
920 static void
|
|
921 honor_protect_cleanup_actions (struct leh_state *outer_state,
|
|
922 struct leh_state *this_state,
|
|
923 struct leh_tf_state *tf)
|
|
924 {
|
|
925 gimple protect_cleanup_actions;
|
|
926 gimple_stmt_iterator gsi;
|
|
927 bool finally_may_fallthru;
|
|
928 gimple_seq finally;
|
|
929 gimple x;
|
|
930
|
|
931 /* First check for nothing to do. */
|
|
932 if (lang_protect_cleanup_actions)
|
|
933 protect_cleanup_actions = lang_protect_cleanup_actions ();
|
|
934 else
|
|
935 protect_cleanup_actions = NULL;
|
|
936
|
|
937 finally = gimple_try_cleanup (tf->top_p);
|
|
938
|
|
939 /* If the EH case of the finally block can fall through, this may be a
|
|
940 structure of the form
|
|
941 try {
|
|
942 try {
|
|
943 throw ...;
|
|
944 } cleanup {
|
|
945 try {
|
|
946 throw ...;
|
|
947 } catch (...) {
|
|
948 }
|
|
949 }
|
|
950 } catch (...) {
|
|
951 yyy;
|
|
952 }
|
|
953 E.g. with an inline destructor with an embedded try block. In this
|
|
954 case we must save the runtime EH data around the nested exception.
|
|
955
|
|
956 This complication means that any time the previous runtime data might
|
|
957 be used (via fallthru from the finally) we handle the eh case here,
|
|
958 whether or not protect_cleanup_actions is active. */
|
|
959
|
|
960 finally_may_fallthru = gimple_seq_may_fallthru (finally);
|
|
961 if (!finally_may_fallthru && !protect_cleanup_actions)
|
|
962 return;
|
|
963
|
|
964 /* Duplicate the FINALLY block. Only need to do this for try-finally,
|
|
965 and not for cleanups. */
|
|
966 if (this_state)
|
|
967 finally = lower_try_finally_dup_block (finally, outer_state);
|
|
968
|
|
969 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
|
|
970 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
|
|
971 to be in an enclosing scope, but needs to be implemented at this level
|
|
972 to avoid a nesting violation (see wrap_temporary_cleanups in
|
|
973 cp/decl.c). Since it's logically at an outer level, we should call
|
|
974 terminate before we get to it, so strip it away before adding the
|
|
975 MUST_NOT_THROW filter. */
|
|
976 gsi = gsi_start (finally);
|
|
977 x = gsi_stmt (gsi);
|
|
978 if (protect_cleanup_actions
|
|
979 && gimple_code (x) == GIMPLE_TRY
|
|
980 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
|
|
981 && gimple_try_catch_is_cleanup (x))
|
|
982 {
|
|
983 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
|
|
984 gsi_remove (&gsi, false);
|
|
985 }
|
|
986
|
|
987 /* Resume execution after the exception. Adding this now lets
|
|
988 lower_eh_filter not add unnecessary gotos, as it is clear that
|
|
989 we never fallthru from this copy of the finally block. */
|
|
990 if (finally_may_fallthru)
|
|
991 {
|
|
992 tree save_eptr, save_filt;
|
|
993 tree tmp;
|
|
994
|
|
995 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
|
|
996 save_filt = create_tmp_var (integer_type_node, "save_filt");
|
|
997
|
|
998 gsi = gsi_start (finally);
|
|
999 tmp = build0 (EXC_PTR_EXPR, ptr_type_node);
|
|
1000 x = gimple_build_assign (save_eptr, tmp);
|
|
1001 gsi_insert_before (&gsi, x, GSI_CONTINUE_LINKING);
|
|
1002
|
|
1003 tmp = build0 (FILTER_EXPR, integer_type_node);
|
|
1004 x = gimple_build_assign (save_filt, tmp);
|
|
1005 gsi_insert_before (&gsi, x, GSI_CONTINUE_LINKING);
|
|
1006
|
|
1007 gsi = gsi_last (finally);
|
|
1008 tmp = build0 (EXC_PTR_EXPR, ptr_type_node);
|
|
1009 x = gimple_build_assign (tmp, save_eptr);
|
|
1010 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
|
|
1011
|
|
1012 tmp = build0 (FILTER_EXPR, integer_type_node);
|
|
1013 x = gimple_build_assign (tmp, save_filt);
|
|
1014 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
|
|
1015
|
|
1016 x = gimple_build_resx (get_eh_region_number (tf->region));
|
|
1017 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
|
|
1018 }
|
|
1019
|
|
1020 /* Wrap the block with protect_cleanup_actions as the action. */
|
|
1021 if (protect_cleanup_actions)
|
|
1022 {
|
|
1023 gimple_seq seq = NULL, failure = NULL;
|
|
1024
|
|
1025 gimple_seq_add_stmt (&failure, protect_cleanup_actions);
|
|
1026 x = gimple_build_eh_filter (NULL, failure);
|
|
1027 gimple_eh_filter_set_must_not_throw (x, 1);
|
|
1028
|
|
1029 gimple_seq_add_stmt (&seq, x);
|
|
1030 x = gimple_build_try (finally, seq, GIMPLE_TRY_CATCH);
|
|
1031 finally = lower_eh_filter (outer_state, x);
|
|
1032 }
|
|
1033 else
|
|
1034 lower_eh_constructs_1 (outer_state, finally);
|
|
1035
|
|
1036 /* Hook this up to the end of the existing try block. If we
|
|
1037 previously fell through the end, we'll have to branch around.
|
|
1038 This means adding a new goto, and adding it to the queue. */
|
|
1039
|
|
1040 gsi = gsi_last (gimple_try_eval (tf->top_p));
|
|
1041
|
|
1042 if (tf->may_fallthru)
|
|
1043 {
|
|
1044 tree tmp;
|
|
1045 tmp = lower_try_finally_fallthru_label (tf);
|
|
1046 x = gimple_build_goto (tmp);
|
|
1047 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
|
|
1048
|
|
1049 if (this_state)
|
|
1050 maybe_record_in_goto_queue (this_state, x);
|
|
1051
|
|
1052 tf->may_fallthru = false;
|
|
1053 }
|
|
1054
|
|
1055 x = gimple_build_label (tf->eh_label);
|
|
1056 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
|
|
1057 gsi_insert_seq_after (&gsi, finally, GSI_CONTINUE_LINKING);
|
|
1058
|
|
1059 /* Having now been handled, EH isn't to be considered with
|
|
1060 the rest of the outgoing edges. */
|
|
1061 tf->may_throw = false;
|
|
1062 }
|
|
1063
|
|
1064 /* A subroutine of lower_try_finally. We have determined that there is
|
|
1065 no fallthru edge out of the finally block. This means that there is
|
|
1066 no outgoing edge corresponding to any incoming edge. Restructure the
|
|
1067 try_finally node for this special case. */
|
|
1068
|
|
1069 static void
|
|
1070 lower_try_finally_nofallthru (struct leh_state *state,
|
|
1071 struct leh_tf_state *tf)
|
|
1072 {
|
|
1073 tree lab, return_val;
|
|
1074 gimple x;
|
|
1075 gimple_seq finally;
|
|
1076 struct goto_queue_node *q, *qe;
|
|
1077
|
|
1078 if (tf->may_throw)
|
|
1079 lab = tf->eh_label;
|
|
1080 else
|
|
1081 lab = create_artificial_label ();
|
|
1082
|
|
1083 /* We expect that tf->top_p is a GIMPLE_TRY. */
|
|
1084 finally = gimple_try_cleanup (tf->top_p);
|
|
1085 tf->top_p_seq = gimple_try_eval (tf->top_p);
|
|
1086
|
|
1087 x = gimple_build_label (lab);
|
|
1088 gimple_seq_add_stmt (&tf->top_p_seq, x);
|
|
1089
|
|
1090 return_val = NULL;
|
|
1091 q = tf->goto_queue;
|
|
1092 qe = q + tf->goto_queue_active;
|
|
1093 for (; q < qe; ++q)
|
|
1094 if (q->index < 0)
|
|
1095 do_return_redirection (q, lab, NULL, &return_val);
|
|
1096 else
|
|
1097 do_goto_redirection (q, lab, NULL, tf);
|
|
1098
|
|
1099 replace_goto_queue (tf);
|
|
1100
|
|
1101 lower_eh_constructs_1 (state, finally);
|
|
1102 gimple_seq_add_seq (&tf->top_p_seq, finally);
|
|
1103 }
|
|
1104
|
|
1105 /* A subroutine of lower_try_finally. We have determined that there is
|
|
1106 exactly one destination of the finally block. Restructure the
|
|
1107 try_finally node for this special case. */
|
|
1108
|
|
1109 static void
|
|
1110 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
|
|
1111 {
|
|
1112 struct goto_queue_node *q, *qe;
|
|
1113 gimple x;
|
|
1114 gimple_seq finally;
|
|
1115 tree finally_label;
|
|
1116
|
|
1117 finally = gimple_try_cleanup (tf->top_p);
|
|
1118 tf->top_p_seq = gimple_try_eval (tf->top_p);
|
|
1119
|
|
1120 lower_eh_constructs_1 (state, finally);
|
|
1121
|
|
1122 if (tf->may_throw)
|
|
1123 {
|
|
1124 /* Only reachable via the exception edge. Add the given label to
|
|
1125 the head of the FINALLY block. Append a RESX at the end. */
|
|
1126
|
|
1127 x = gimple_build_label (tf->eh_label);
|
|
1128 gimple_seq_add_stmt (&tf->top_p_seq, x);
|
|
1129
|
|
1130 gimple_seq_add_seq (&tf->top_p_seq, finally);
|
|
1131
|
|
1132 x = gimple_build_resx (get_eh_region_number (tf->region));
|
|
1133
|
|
1134 gimple_seq_add_stmt (&tf->top_p_seq, x);
|
|
1135
|
|
1136 return;
|
|
1137 }
|
|
1138
|
|
1139 if (tf->may_fallthru)
|
|
1140 {
|
|
1141 /* Only reachable via the fallthru edge. Do nothing but let
|
|
1142 the two blocks run together; we'll fall out the bottom. */
|
|
1143 gimple_seq_add_seq (&tf->top_p_seq, finally);
|
|
1144 return;
|
|
1145 }
|
|
1146
|
|
1147 finally_label = create_artificial_label ();
|
|
1148 x = gimple_build_label (finally_label);
|
|
1149 gimple_seq_add_stmt (&tf->top_p_seq, x);
|
|
1150
|
|
1151 gimple_seq_add_seq (&tf->top_p_seq, finally);
|
|
1152
|
|
1153 q = tf->goto_queue;
|
|
1154 qe = q + tf->goto_queue_active;
|
|
1155
|
|
1156 if (tf->may_return)
|
|
1157 {
|
|
1158 /* Reachable by return expressions only. Redirect them. */
|
|
1159 tree return_val = NULL;
|
|
1160 for (; q < qe; ++q)
|
|
1161 do_return_redirection (q, finally_label, NULL, &return_val);
|
|
1162 replace_goto_queue (tf);
|
|
1163 }
|
|
1164 else
|
|
1165 {
|
|
1166 /* Reachable by goto expressions only. Redirect them. */
|
|
1167 for (; q < qe; ++q)
|
|
1168 do_goto_redirection (q, finally_label, NULL, tf);
|
|
1169 replace_goto_queue (tf);
|
|
1170
|
|
1171 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
|
|
1172 {
|
|
1173 /* Reachable by goto to fallthru label only. Redirect it
|
|
1174 to the new label (already created, sadly), and do not
|
|
1175 emit the final branch out, or the fallthru label. */
|
|
1176 tf->fallthru_label = NULL;
|
|
1177 return;
|
|
1178 }
|
|
1179 }
|
|
1180
|
|
1181 /* Place the original return/goto to the original destination
|
|
1182 immediately after the finally block. */
|
|
1183 x = tf->goto_queue[0].cont_stmt;
|
|
1184 gimple_seq_add_stmt (&tf->top_p_seq, x);
|
|
1185 maybe_record_in_goto_queue (state, x);
|
|
1186 }
|
|
1187
|
|
1188 /* A subroutine of lower_try_finally. There are multiple edges incoming
|
|
1189 and outgoing from the finally block. Implement this by duplicating the
|
|
1190 finally block for every destination. */
|
|
1191
|
|
1192 static void
|
|
1193 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
|
|
1194 {
|
|
1195 gimple_seq finally;
|
|
1196 gimple_seq new_stmt;
|
|
1197 gimple_seq seq;
|
|
1198 gimple x;
|
|
1199 tree tmp;
|
|
1200
|
|
1201 finally = gimple_try_cleanup (tf->top_p);
|
|
1202 tf->top_p_seq = gimple_try_eval (tf->top_p);
|
|
1203 new_stmt = NULL;
|
|
1204
|
|
1205 if (tf->may_fallthru)
|
|
1206 {
|
|
1207 seq = lower_try_finally_dup_block (finally, state);
|
|
1208 lower_eh_constructs_1 (state, seq);
|
|
1209 gimple_seq_add_seq (&new_stmt, seq);
|
|
1210
|
|
1211 tmp = lower_try_finally_fallthru_label (tf);
|
|
1212 x = gimple_build_goto (tmp);
|
|
1213 gimple_seq_add_stmt (&new_stmt, x);
|
|
1214 }
|
|
1215
|
|
1216 if (tf->may_throw)
|
|
1217 {
|
|
1218 x = gimple_build_label (tf->eh_label);
|
|
1219 gimple_seq_add_stmt (&new_stmt, x);
|
|
1220
|
|
1221 seq = lower_try_finally_dup_block (finally, state);
|
|
1222 lower_eh_constructs_1 (state, seq);
|
|
1223 gimple_seq_add_seq (&new_stmt, seq);
|
|
1224
|
|
1225 x = gimple_build_resx (get_eh_region_number (tf->region));
|
|
1226 gimple_seq_add_stmt (&new_stmt, x);
|
|
1227 }
|
|
1228
|
|
1229 if (tf->goto_queue)
|
|
1230 {
|
|
1231 struct goto_queue_node *q, *qe;
|
|
1232 tree return_val = NULL;
|
|
1233 int return_index, index;
|
|
1234 struct labels_s
|
|
1235 {
|
|
1236 struct goto_queue_node *q;
|
|
1237 tree label;
|
|
1238 } *labels;
|
|
1239
|
|
1240 return_index = VEC_length (tree, tf->dest_array);
|
|
1241 labels = XCNEWVEC (struct labels_s, return_index + 1);
|
|
1242
|
|
1243 q = tf->goto_queue;
|
|
1244 qe = q + tf->goto_queue_active;
|
|
1245 for (; q < qe; q++)
|
|
1246 {
|
|
1247 index = q->index < 0 ? return_index : q->index;
|
|
1248
|
|
1249 if (!labels[index].q)
|
|
1250 labels[index].q = q;
|
|
1251 }
|
|
1252
|
|
1253 for (index = 0; index < return_index + 1; index++)
|
|
1254 {
|
|
1255 tree lab;
|
|
1256
|
|
1257 q = labels[index].q;
|
|
1258 if (! q)
|
|
1259 continue;
|
|
1260
|
|
1261 lab = labels[index].label = create_artificial_label ();
|
|
1262
|
|
1263 if (index == return_index)
|
|
1264 do_return_redirection (q, lab, NULL, &return_val);
|
|
1265 else
|
|
1266 do_goto_redirection (q, lab, NULL, tf);
|
|
1267
|
|
1268 x = gimple_build_label (lab);
|
|
1269 gimple_seq_add_stmt (&new_stmt, x);
|
|
1270
|
|
1271 seq = lower_try_finally_dup_block (finally, state);
|
|
1272 lower_eh_constructs_1 (state, seq);
|
|
1273 gimple_seq_add_seq (&new_stmt, seq);
|
|
1274
|
|
1275 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
|
|
1276 maybe_record_in_goto_queue (state, q->cont_stmt);
|
|
1277 }
|
|
1278
|
|
1279 for (q = tf->goto_queue; q < qe; q++)
|
|
1280 {
|
|
1281 tree lab;
|
|
1282
|
|
1283 index = q->index < 0 ? return_index : q->index;
|
|
1284
|
|
1285 if (labels[index].q == q)
|
|
1286 continue;
|
|
1287
|
|
1288 lab = labels[index].label;
|
|
1289
|
|
1290 if (index == return_index)
|
|
1291 do_return_redirection (q, lab, NULL, &return_val);
|
|
1292 else
|
|
1293 do_goto_redirection (q, lab, NULL, tf);
|
|
1294 }
|
|
1295
|
|
1296 replace_goto_queue (tf);
|
|
1297 free (labels);
|
|
1298 }
|
|
1299
|
|
1300 /* Need to link new stmts after running replace_goto_queue due
|
|
1301 to not wanting to process the same goto stmts twice. */
|
|
1302 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
|
|
1303 }
|
|
1304
|
|
1305 /* A subroutine of lower_try_finally. There are multiple edges incoming
|
|
1306 and outgoing from the finally block. Implement this by instrumenting
|
|
1307 each incoming edge and creating a switch statement at the end of the
|
|
1308 finally block that branches to the appropriate destination. */
|
|
1309
|
|
1310 static void
|
|
1311 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
|
|
1312 {
|
|
1313 struct goto_queue_node *q, *qe;
|
|
1314 tree return_val = NULL;
|
|
1315 tree finally_tmp, finally_label;
|
|
1316 int return_index, eh_index, fallthru_index;
|
|
1317 int nlabels, ndests, j, last_case_index;
|
|
1318 tree last_case;
|
|
1319 VEC (tree,heap) *case_label_vec;
|
|
1320 gimple_seq switch_body;
|
|
1321 gimple x;
|
|
1322 tree tmp;
|
|
1323 gimple switch_stmt;
|
|
1324 gimple_seq finally;
|
|
1325 struct pointer_map_t *cont_map = NULL;
|
|
1326
|
|
1327 switch_body = gimple_seq_alloc ();
|
|
1328
|
|
1329 /* Mash the TRY block to the head of the chain. */
|
|
1330 finally = gimple_try_cleanup (tf->top_p);
|
|
1331 tf->top_p_seq = gimple_try_eval (tf->top_p);
|
|
1332
|
|
1333 /* Lower the finally block itself. */
|
|
1334 lower_eh_constructs_1 (state, finally);
|
|
1335
|
|
1336 /* Prepare for switch statement generation. */
|
|
1337 nlabels = VEC_length (tree, tf->dest_array);
|
|
1338 return_index = nlabels;
|
|
1339 eh_index = return_index + tf->may_return;
|
|
1340 fallthru_index = eh_index + tf->may_throw;
|
|
1341 ndests = fallthru_index + tf->may_fallthru;
|
|
1342
|
|
1343 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
|
|
1344 finally_label = create_artificial_label ();
|
|
1345
|
|
1346 /* We use VEC_quick_push on case_label_vec throughout this function,
|
|
1347 since we know the size in advance and allocate precisely as muce
|
|
1348 space as needed. */
|
|
1349 case_label_vec = VEC_alloc (tree, heap, ndests);
|
|
1350 last_case = NULL;
|
|
1351 last_case_index = 0;
|
|
1352
|
|
1353 /* Begin inserting code for getting to the finally block. Things
|
|
1354 are done in this order to correspond to the sequence the code is
|
|
1355 layed out. */
|
|
1356
|
|
1357 if (tf->may_fallthru)
|
|
1358 {
|
|
1359 x = gimple_build_assign (finally_tmp, build_int_cst (integer_type_node,
|
|
1360 fallthru_index));
|
|
1361 gimple_seq_add_stmt (&tf->top_p_seq, x);
|
|
1362
|
|
1363 if (tf->may_throw)
|
|
1364 {
|
|
1365 x = gimple_build_goto (finally_label);
|
|
1366 gimple_seq_add_stmt (&tf->top_p_seq, x);
|
|
1367 }
|
|
1368
|
|
1369
|
|
1370 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
|
|
1371 build_int_cst (NULL_TREE, fallthru_index), NULL,
|
|
1372 create_artificial_label ());
|
|
1373 VEC_quick_push (tree, case_label_vec, last_case);
|
|
1374 last_case_index++;
|
|
1375
|
|
1376 x = gimple_build_label (CASE_LABEL (last_case));
|
|
1377 gimple_seq_add_stmt (&switch_body, x);
|
|
1378
|
|
1379 tmp = lower_try_finally_fallthru_label (tf);
|
|
1380 x = gimple_build_goto (tmp);
|
|
1381 gimple_seq_add_stmt (&switch_body, x);
|
|
1382 }
|
|
1383
|
|
1384 if (tf->may_throw)
|
|
1385 {
|
|
1386 x = gimple_build_label (tf->eh_label);
|
|
1387 gimple_seq_add_stmt (&tf->top_p_seq, x);
|
|
1388
|
|
1389 x = gimple_build_assign (finally_tmp, build_int_cst (integer_type_node,
|
|
1390 eh_index));
|
|
1391 gimple_seq_add_stmt (&tf->top_p_seq, x);
|
|
1392
|
|
1393 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
|
|
1394 build_int_cst (NULL_TREE, eh_index), NULL,
|
|
1395 create_artificial_label ());
|
|
1396 VEC_quick_push (tree, case_label_vec, last_case);
|
|
1397 last_case_index++;
|
|
1398
|
|
1399 x = gimple_build_label (CASE_LABEL (last_case));
|
|
1400 gimple_seq_add_stmt (&switch_body, x);
|
|
1401 x = gimple_build_resx (get_eh_region_number (tf->region));
|
|
1402 gimple_seq_add_stmt (&switch_body, x);
|
|
1403 }
|
|
1404
|
|
1405 x = gimple_build_label (finally_label);
|
|
1406 gimple_seq_add_stmt (&tf->top_p_seq, x);
|
|
1407
|
|
1408 gimple_seq_add_seq (&tf->top_p_seq, finally);
|
|
1409
|
|
1410 /* Redirect each incoming goto edge. */
|
|
1411 q = tf->goto_queue;
|
|
1412 qe = q + tf->goto_queue_active;
|
|
1413 j = last_case_index + tf->may_return;
|
|
1414 /* Prepare the assignments to finally_tmp that are executed upon the
|
|
1415 entrance through a particular edge. */
|
|
1416 for (; q < qe; ++q)
|
|
1417 {
|
|
1418 gimple_seq mod;
|
|
1419 int switch_id;
|
|
1420 unsigned int case_index;
|
|
1421
|
|
1422 mod = gimple_seq_alloc ();
|
|
1423
|
|
1424 if (q->index < 0)
|
|
1425 {
|
|
1426 x = gimple_build_assign (finally_tmp,
|
|
1427 build_int_cst (integer_type_node,
|
|
1428 return_index));
|
|
1429 gimple_seq_add_stmt (&mod, x);
|
|
1430 do_return_redirection (q, finally_label, mod, &return_val);
|
|
1431 switch_id = return_index;
|
|
1432 }
|
|
1433 else
|
|
1434 {
|
|
1435 x = gimple_build_assign (finally_tmp,
|
|
1436 build_int_cst (integer_type_node, q->index));
|
|
1437 gimple_seq_add_stmt (&mod, x);
|
|
1438 do_goto_redirection (q, finally_label, mod, tf);
|
|
1439 switch_id = q->index;
|
|
1440 }
|
|
1441
|
|
1442 case_index = j + q->index;
|
|
1443 if (VEC_length (tree, case_label_vec) <= case_index
|
|
1444 || !VEC_index (tree, case_label_vec, case_index))
|
|
1445 {
|
|
1446 tree case_lab;
|
|
1447 void **slot;
|
|
1448 case_lab = build3 (CASE_LABEL_EXPR, void_type_node,
|
|
1449 build_int_cst (NULL_TREE, switch_id), NULL,
|
|
1450 NULL);
|
|
1451 /* We store the cont_stmt in the pointer map, so that we can recover
|
|
1452 it in the loop below. We don't create the new label while
|
|
1453 walking the goto_queue because pointers don't offer a stable
|
|
1454 order. */
|
|
1455 if (!cont_map)
|
|
1456 cont_map = pointer_map_create ();
|
|
1457 slot = pointer_map_insert (cont_map, case_lab);
|
|
1458 *slot = q->cont_stmt;
|
|
1459 VEC_quick_push (tree, case_label_vec, case_lab);
|
|
1460 }
|
|
1461 }
|
|
1462 for (j = last_case_index; j < last_case_index + nlabels; j++)
|
|
1463 {
|
|
1464 tree label;
|
|
1465 gimple cont_stmt;
|
|
1466 void **slot;
|
|
1467
|
|
1468 last_case = VEC_index (tree, case_label_vec, j);
|
|
1469
|
|
1470 gcc_assert (last_case);
|
|
1471 gcc_assert (cont_map);
|
|
1472
|
|
1473 slot = pointer_map_contains (cont_map, last_case);
|
|
1474 /* As the comment above suggests, CASE_LABEL (last_case) was just a
|
|
1475 placeholder, it does not store an actual label, yet. */
|
|
1476 gcc_assert (slot);
|
|
1477 cont_stmt = *(gimple *) slot;
|
|
1478
|
|
1479 label = create_artificial_label ();
|
|
1480 CASE_LABEL (last_case) = label;
|
|
1481
|
|
1482 x = gimple_build_label (label);
|
|
1483 gimple_seq_add_stmt (&switch_body, x);
|
|
1484 gimple_seq_add_stmt (&switch_body, cont_stmt);
|
|
1485 maybe_record_in_goto_queue (state, cont_stmt);
|
|
1486 }
|
|
1487 if (cont_map)
|
|
1488 pointer_map_destroy (cont_map);
|
|
1489
|
|
1490 replace_goto_queue (tf);
|
|
1491
|
|
1492 /* Make sure that the last case is the default label, as one is required.
|
|
1493 Then sort the labels, which is also required in GIMPLE. */
|
|
1494 CASE_LOW (last_case) = NULL;
|
|
1495 sort_case_labels (case_label_vec);
|
|
1496
|
|
1497 /* Build the switch statement, setting last_case to be the default
|
|
1498 label. */
|
|
1499 switch_stmt = gimple_build_switch_vec (finally_tmp, last_case,
|
|
1500 case_label_vec);
|
|
1501
|
|
1502 /* Need to link SWITCH_STMT after running replace_goto_queue
|
|
1503 due to not wanting to process the same goto stmts twice. */
|
|
1504 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
|
|
1505 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
|
|
1506 }
|
|
1507
|
|
1508 /* Decide whether or not we are going to duplicate the finally block.
|
|
1509 There are several considerations.
|
|
1510
|
|
1511 First, if this is Java, then the finally block contains code
|
|
1512 written by the user. It has line numbers associated with it,
|
|
1513 so duplicating the block means it's difficult to set a breakpoint.
|
|
1514 Since controlling code generation via -g is verboten, we simply
|
|
1515 never duplicate code without optimization.
|
|
1516
|
|
1517 Second, we'd like to prevent egregious code growth. One way to
|
|
1518 do this is to estimate the size of the finally block, multiply
|
|
1519 that by the number of copies we'd need to make, and compare against
|
|
1520 the estimate of the size of the switch machinery we'd have to add. */
|
|
1521
|
|
1522 static bool
|
|
1523 decide_copy_try_finally (int ndests, gimple_seq finally)
|
|
1524 {
|
|
1525 int f_estimate, sw_estimate;
|
|
1526
|
|
1527 if (!optimize)
|
|
1528 return false;
|
|
1529
|
|
1530 /* Finally estimate N times, plus N gotos. */
|
|
1531 f_estimate = count_insns_seq (finally, &eni_size_weights);
|
|
1532 f_estimate = (f_estimate + 1) * ndests;
|
|
1533
|
|
1534 /* Switch statement (cost 10), N variable assignments, N gotos. */
|
|
1535 sw_estimate = 10 + 2 * ndests;
|
|
1536
|
|
1537 /* Optimize for size clearly wants our best guess. */
|
|
1538 if (optimize_function_for_size_p (cfun))
|
|
1539 return f_estimate < sw_estimate;
|
|
1540
|
|
1541 /* ??? These numbers are completely made up so far. */
|
|
1542 if (optimize > 1)
|
|
1543 return f_estimate < 100 || f_estimate < sw_estimate * 2;
|
|
1544 else
|
|
1545 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
|
|
1546 }
|
|
1547
|
|
1548
|
|
1549 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
|
|
1550 to a sequence of labels and blocks, plus the exception region trees
|
|
1551 that record all the magic. This is complicated by the need to
|
|
1552 arrange for the FINALLY block to be executed on all exits. */
|
|
1553
|
|
1554 static gimple_seq
|
|
1555 lower_try_finally (struct leh_state *state, gimple tp)
|
|
1556 {
|
|
1557 struct leh_tf_state this_tf;
|
|
1558 struct leh_state this_state;
|
|
1559 int ndests;
|
|
1560
|
|
1561 /* Process the try block. */
|
|
1562
|
|
1563 memset (&this_tf, 0, sizeof (this_tf));
|
|
1564 this_tf.try_finally_expr = tp;
|
|
1565 this_tf.top_p = tp;
|
|
1566 this_tf.outer = state;
|
|
1567 if (using_eh_for_cleanups_p)
|
|
1568 this_tf.region
|
|
1569 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
|
|
1570 else
|
|
1571 this_tf.region = NULL;
|
|
1572
|
|
1573 this_state.cur_region = this_tf.region;
|
|
1574 this_state.prev_try = state->prev_try;
|
|
1575 this_state.tf = &this_tf;
|
|
1576
|
|
1577 lower_eh_constructs_1 (&this_state, gimple_try_eval(tp));
|
|
1578
|
|
1579 /* Determine if the try block is escaped through the bottom. */
|
|
1580 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
|
|
1581
|
|
1582 /* Determine if any exceptions are possible within the try block. */
|
|
1583 if (using_eh_for_cleanups_p)
|
|
1584 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
|
|
1585 if (this_tf.may_throw)
|
|
1586 {
|
|
1587 this_tf.eh_label = create_artificial_label ();
|
|
1588 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
|
|
1589 honor_protect_cleanup_actions (state, &this_state, &this_tf);
|
|
1590 }
|
|
1591
|
|
1592 /* Determine how many edges (still) reach the finally block. Or rather,
|
|
1593 how many destinations are reached by the finally block. Use this to
|
|
1594 determine how we process the finally block itself. */
|
|
1595
|
|
1596 ndests = VEC_length (tree, this_tf.dest_array);
|
|
1597 ndests += this_tf.may_fallthru;
|
|
1598 ndests += this_tf.may_return;
|
|
1599 ndests += this_tf.may_throw;
|
|
1600
|
|
1601 /* If the FINALLY block is not reachable, dike it out. */
|
|
1602 if (ndests == 0)
|
|
1603 {
|
|
1604 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
|
|
1605 gimple_try_set_cleanup (tp, NULL);
|
|
1606 }
|
|
1607 /* If the finally block doesn't fall through, then any destination
|
|
1608 we might try to impose there isn't reached either. There may be
|
|
1609 some minor amount of cleanup and redirection still needed. */
|
|
1610 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
|
|
1611 lower_try_finally_nofallthru (state, &this_tf);
|
|
1612
|
|
1613 /* We can easily special-case redirection to a single destination. */
|
|
1614 else if (ndests == 1)
|
|
1615 lower_try_finally_onedest (state, &this_tf);
|
|
1616 else if (decide_copy_try_finally (ndests, gimple_try_cleanup (tp)))
|
|
1617 lower_try_finally_copy (state, &this_tf);
|
|
1618 else
|
|
1619 lower_try_finally_switch (state, &this_tf);
|
|
1620
|
|
1621 /* If someone requested we add a label at the end of the transformed
|
|
1622 block, do so. */
|
|
1623 if (this_tf.fallthru_label)
|
|
1624 {
|
|
1625 /* This must be reached only if ndests == 0. */
|
|
1626 gimple x = gimple_build_label (this_tf.fallthru_label);
|
|
1627 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
|
|
1628 }
|
|
1629
|
|
1630 VEC_free (tree, heap, this_tf.dest_array);
|
|
1631 if (this_tf.goto_queue)
|
|
1632 free (this_tf.goto_queue);
|
|
1633 if (this_tf.goto_queue_map)
|
|
1634 pointer_map_destroy (this_tf.goto_queue_map);
|
|
1635
|
|
1636 return this_tf.top_p_seq;
|
|
1637 }
|
|
1638
|
|
1639 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
|
|
1640 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
|
|
1641 exception region trees that records all the magic. */
|
|
1642
|
|
1643 static gimple_seq
|
|
1644 lower_catch (struct leh_state *state, gimple tp)
|
|
1645 {
|
|
1646 struct eh_region *try_region;
|
|
1647 struct leh_state this_state;
|
|
1648 gimple_stmt_iterator gsi;
|
|
1649 tree out_label;
|
|
1650
|
|
1651 try_region = gen_eh_region_try (state->cur_region);
|
|
1652 this_state.cur_region = try_region;
|
|
1653 this_state.prev_try = try_region;
|
|
1654 this_state.tf = state->tf;
|
|
1655
|
|
1656 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
|
|
1657
|
|
1658 if (!get_eh_region_may_contain_throw (try_region))
|
|
1659 {
|
|
1660 return gimple_try_eval (tp);
|
|
1661 }
|
|
1662
|
|
1663 out_label = NULL;
|
|
1664 for (gsi = gsi_start (gimple_try_cleanup (tp)); !gsi_end_p (gsi); )
|
|
1665 {
|
|
1666 struct eh_region *catch_region;
|
|
1667 tree eh_label;
|
|
1668 gimple x, gcatch;
|
|
1669
|
|
1670 gcatch = gsi_stmt (gsi);
|
|
1671 catch_region = gen_eh_region_catch (try_region,
|
|
1672 gimple_catch_types (gcatch));
|
|
1673
|
|
1674 this_state.cur_region = catch_region;
|
|
1675 this_state.prev_try = state->prev_try;
|
|
1676 lower_eh_constructs_1 (&this_state, gimple_catch_handler (gcatch));
|
|
1677
|
|
1678 eh_label = create_artificial_label ();
|
|
1679 set_eh_region_tree_label (catch_region, eh_label);
|
|
1680
|
|
1681 x = gimple_build_label (eh_label);
|
|
1682 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
|
|
1683
|
|
1684 if (gimple_seq_may_fallthru (gimple_catch_handler (gcatch)))
|
|
1685 {
|
|
1686 if (!out_label)
|
|
1687 out_label = create_artificial_label ();
|
|
1688
|
|
1689 x = gimple_build_goto (out_label);
|
|
1690 gimple_seq_add_stmt (gimple_catch_handler_ptr (gcatch), x);
|
|
1691 }
|
|
1692
|
|
1693 gsi_insert_seq_before (&gsi, gimple_catch_handler (gcatch),
|
|
1694 GSI_SAME_STMT);
|
|
1695 gsi_remove (&gsi, false);
|
|
1696 }
|
|
1697
|
|
1698 return frob_into_branch_around (tp, NULL, out_label);
|
|
1699 }
|
|
1700
|
|
1701 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
|
|
1702 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
|
|
1703 region trees that record all the magic. */
|
|
1704
|
|
1705 static gimple_seq
|
|
1706 lower_eh_filter (struct leh_state *state, gimple tp)
|
|
1707 {
|
|
1708 struct leh_state this_state;
|
|
1709 struct eh_region *this_region;
|
|
1710 gimple inner;
|
|
1711 tree eh_label;
|
|
1712
|
|
1713 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
|
|
1714
|
|
1715 if (gimple_eh_filter_must_not_throw (inner))
|
|
1716 this_region = gen_eh_region_must_not_throw (state->cur_region);
|
|
1717 else
|
|
1718 this_region = gen_eh_region_allowed (state->cur_region,
|
|
1719 gimple_eh_filter_types (inner));
|
|
1720 this_state = *state;
|
|
1721 this_state.cur_region = this_region;
|
|
1722 /* For must not throw regions any cleanup regions inside it
|
|
1723 can't reach outer catch regions. */
|
|
1724 if (gimple_eh_filter_must_not_throw (inner))
|
|
1725 this_state.prev_try = NULL;
|
|
1726
|
|
1727 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
|
|
1728
|
|
1729 if (!get_eh_region_may_contain_throw (this_region))
|
|
1730 {
|
|
1731 return gimple_try_eval (tp);
|
|
1732 }
|
|
1733
|
|
1734 lower_eh_constructs_1 (state, gimple_eh_filter_failure (inner));
|
|
1735 gimple_try_set_cleanup (tp, gimple_eh_filter_failure (inner));
|
|
1736
|
|
1737 eh_label = create_artificial_label ();
|
|
1738 set_eh_region_tree_label (this_region, eh_label);
|
|
1739
|
|
1740 return frob_into_branch_around (tp, eh_label, NULL);
|
|
1741 }
|
|
1742
|
|
1743 /* Implement a cleanup expression. This is similar to try-finally,
|
|
1744 except that we only execute the cleanup block for exception edges. */
|
|
1745
|
|
1746 static gimple_seq
|
|
1747 lower_cleanup (struct leh_state *state, gimple tp)
|
|
1748 {
|
|
1749 struct leh_state this_state;
|
|
1750 struct eh_region *this_region;
|
|
1751 struct leh_tf_state fake_tf;
|
|
1752 gimple_seq result;
|
|
1753
|
|
1754 /* If not using eh, then exception-only cleanups are no-ops. */
|
|
1755 if (!flag_exceptions)
|
|
1756 {
|
|
1757 result = gimple_try_eval (tp);
|
|
1758 lower_eh_constructs_1 (state, result);
|
|
1759 return result;
|
|
1760 }
|
|
1761
|
|
1762 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
|
|
1763 this_state = *state;
|
|
1764 this_state.cur_region = this_region;
|
|
1765
|
|
1766 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
|
|
1767
|
|
1768 if (!get_eh_region_may_contain_throw (this_region))
|
|
1769 {
|
|
1770 return gimple_try_eval (tp);
|
|
1771 }
|
|
1772
|
|
1773 /* Build enough of a try-finally state so that we can reuse
|
|
1774 honor_protect_cleanup_actions. */
|
|
1775 memset (&fake_tf, 0, sizeof (fake_tf));
|
|
1776 fake_tf.top_p = tp;
|
|
1777 fake_tf.outer = state;
|
|
1778 fake_tf.region = this_region;
|
|
1779 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
|
|
1780 fake_tf.may_throw = true;
|
|
1781
|
|
1782 fake_tf.eh_label = create_artificial_label ();
|
|
1783 set_eh_region_tree_label (this_region, fake_tf.eh_label);
|
|
1784
|
|
1785 honor_protect_cleanup_actions (state, NULL, &fake_tf);
|
|
1786
|
|
1787 if (fake_tf.may_throw)
|
|
1788 {
|
|
1789 /* In this case honor_protect_cleanup_actions had nothing to do,
|
|
1790 and we should process this normally. */
|
|
1791 lower_eh_constructs_1 (state, gimple_try_cleanup (tp));
|
|
1792 result = frob_into_branch_around (tp, fake_tf.eh_label,
|
|
1793 fake_tf.fallthru_label);
|
|
1794 }
|
|
1795 else
|
|
1796 {
|
|
1797 /* In this case honor_protect_cleanup_actions did nearly all of
|
|
1798 the work. All we have left is to append the fallthru_label. */
|
|
1799
|
|
1800 result = gimple_try_eval (tp);
|
|
1801 if (fake_tf.fallthru_label)
|
|
1802 {
|
|
1803 gimple x = gimple_build_label (fake_tf.fallthru_label);
|
|
1804 gimple_seq_add_stmt (&result, x);
|
|
1805 }
|
|
1806 }
|
|
1807 return result;
|
|
1808 }
|
|
1809
|
|
1810
|
|
1811
|
|
1812 /* Main loop for lowering eh constructs. Also moves gsi to the next
|
|
1813 statement. */
|
|
1814
|
|
1815 static void
|
|
1816 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
|
|
1817 {
|
|
1818 gimple_seq replace;
|
|
1819 gimple x;
|
|
1820 gimple stmt = gsi_stmt (*gsi);
|
|
1821
|
|
1822 switch (gimple_code (stmt))
|
|
1823 {
|
|
1824 case GIMPLE_CALL:
|
|
1825 case GIMPLE_ASSIGN:
|
|
1826 /* Look for things that can throw exceptions, and record them. */
|
|
1827 if (state->cur_region && stmt_could_throw_p (stmt))
|
|
1828 {
|
|
1829 record_stmt_eh_region (state->cur_region, stmt);
|
|
1830 note_eh_region_may_contain_throw (state->cur_region);
|
|
1831 }
|
|
1832 break;
|
|
1833
|
|
1834 case GIMPLE_COND:
|
|
1835 case GIMPLE_GOTO:
|
|
1836 case GIMPLE_RETURN:
|
|
1837 maybe_record_in_goto_queue (state, stmt);
|
|
1838 break;
|
|
1839
|
|
1840 case GIMPLE_SWITCH:
|
|
1841 verify_norecord_switch_expr (state, stmt);
|
|
1842 break;
|
|
1843
|
|
1844 case GIMPLE_TRY:
|
|
1845 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
|
|
1846 replace = lower_try_finally (state, stmt);
|
|
1847 else
|
|
1848 {
|
|
1849 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
|
|
1850 switch (gimple_code (x))
|
|
1851 {
|
|
1852 case GIMPLE_CATCH:
|
|
1853 replace = lower_catch (state, stmt);
|
|
1854 break;
|
|
1855 case GIMPLE_EH_FILTER:
|
|
1856 replace = lower_eh_filter (state, stmt);
|
|
1857 break;
|
|
1858 default:
|
|
1859 replace = lower_cleanup (state, stmt);
|
|
1860 break;
|
|
1861 }
|
|
1862 }
|
|
1863
|
|
1864 /* Remove the old stmt and insert the transformed sequence
|
|
1865 instead. */
|
|
1866 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
|
|
1867 gsi_remove (gsi, true);
|
|
1868
|
|
1869 /* Return since we don't want gsi_next () */
|
|
1870 return;
|
|
1871
|
|
1872 default:
|
|
1873 /* A type, a decl, or some kind of statement that we're not
|
|
1874 interested in. Don't walk them. */
|
|
1875 break;
|
|
1876 }
|
|
1877
|
|
1878 gsi_next (gsi);
|
|
1879 }
|
|
1880
|
|
1881 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
|
|
1882
|
|
1883 static void
|
|
1884 lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq)
|
|
1885 {
|
|
1886 gimple_stmt_iterator gsi;
|
|
1887 for (gsi = gsi_start (seq); !gsi_end_p (gsi);)
|
|
1888 lower_eh_constructs_2 (state, &gsi);
|
|
1889 }
|
|
1890
|
|
1891 static unsigned int
|
|
1892 lower_eh_constructs (void)
|
|
1893 {
|
|
1894 struct leh_state null_state;
|
|
1895
|
|
1896 gimple_seq bodyp = gimple_body (current_function_decl);
|
|
1897
|
|
1898 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
|
|
1899
|
|
1900 collect_finally_tree_1 (bodyp, NULL);
|
|
1901
|
|
1902 memset (&null_state, 0, sizeof (null_state));
|
|
1903 lower_eh_constructs_1 (&null_state, bodyp);
|
|
1904
|
|
1905 htab_delete (finally_tree);
|
|
1906
|
|
1907 collect_eh_region_array ();
|
|
1908 return 0;
|
|
1909 }
|
|
1910
|
|
1911 struct gimple_opt_pass pass_lower_eh =
|
|
1912 {
|
|
1913 {
|
|
1914 GIMPLE_PASS,
|
|
1915 "eh", /* name */
|
|
1916 NULL, /* gate */
|
|
1917 lower_eh_constructs, /* execute */
|
|
1918 NULL, /* sub */
|
|
1919 NULL, /* next */
|
|
1920 0, /* static_pass_number */
|
|
1921 TV_TREE_EH, /* tv_id */
|
|
1922 PROP_gimple_lcf, /* properties_required */
|
|
1923 PROP_gimple_leh, /* properties_provided */
|
|
1924 0, /* properties_destroyed */
|
|
1925 0, /* todo_flags_start */
|
|
1926 TODO_dump_func /* todo_flags_finish */
|
|
1927 }
|
|
1928 };
|
|
1929
|
|
1930
|
|
1931 /* Construct EH edges for STMT. */
|
|
1932
|
|
1933 static void
|
|
1934 make_eh_edge (struct eh_region *region, void *data)
|
|
1935 {
|
|
1936 gimple stmt;
|
|
1937 tree lab;
|
|
1938 basic_block src, dst;
|
|
1939
|
|
1940 stmt = (gimple) data;
|
|
1941 lab = get_eh_region_tree_label (region);
|
|
1942
|
|
1943 src = gimple_bb (stmt);
|
|
1944 dst = label_to_block (lab);
|
|
1945
|
|
1946 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
|
|
1947 }
|
|
1948
|
|
1949 void
|
|
1950 make_eh_edges (gimple stmt)
|
|
1951 {
|
|
1952 int region_nr;
|
|
1953 bool is_resx;
|
|
1954
|
|
1955 if (gimple_code (stmt) == GIMPLE_RESX)
|
|
1956 {
|
|
1957 region_nr = gimple_resx_region (stmt);
|
|
1958 is_resx = true;
|
|
1959 }
|
|
1960 else
|
|
1961 {
|
|
1962 region_nr = lookup_stmt_eh_region (stmt);
|
|
1963 if (region_nr < 0)
|
|
1964 return;
|
|
1965 is_resx = false;
|
|
1966 }
|
|
1967
|
|
1968 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
|
|
1969 }
|
|
1970
|
|
1971 static bool mark_eh_edge_found_error;
|
|
1972
|
|
1973 /* Mark edge make_eh_edge would create for given region by setting it aux
|
|
1974 field, output error if something goes wrong. */
|
|
1975
|
|
1976 static void
|
|
1977 mark_eh_edge (struct eh_region *region, void *data)
|
|
1978 {
|
|
1979 gimple stmt;
|
|
1980 tree lab;
|
|
1981 basic_block src, dst;
|
|
1982 edge e;
|
|
1983
|
|
1984 stmt = (gimple) data;
|
|
1985 lab = get_eh_region_tree_label (region);
|
|
1986
|
|
1987 src = gimple_bb (stmt);
|
|
1988 dst = label_to_block (lab);
|
|
1989
|
|
1990 e = find_edge (src, dst);
|
|
1991 if (!e)
|
|
1992 {
|
|
1993 error ("EH edge %i->%i is missing", src->index, dst->index);
|
|
1994 mark_eh_edge_found_error = true;
|
|
1995 }
|
|
1996 else if (!(e->flags & EDGE_EH))
|
|
1997 {
|
|
1998 error ("EH edge %i->%i miss EH flag", src->index, dst->index);
|
|
1999 mark_eh_edge_found_error = true;
|
|
2000 }
|
|
2001 else if (e->aux)
|
|
2002 {
|
|
2003 /* ??? might not be mistake. */
|
|
2004 error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
|
|
2005 mark_eh_edge_found_error = true;
|
|
2006 }
|
|
2007 else
|
|
2008 e->aux = (void *)1;
|
|
2009 }
|
|
2010
|
|
2011 /* Verify that BB containing STMT as the last statement, has precisely the
|
|
2012 edges that make_eh_edges would create. */
|
|
2013
|
|
2014 bool
|
|
2015 verify_eh_edges (gimple stmt)
|
|
2016 {
|
|
2017 int region_nr;
|
|
2018 bool is_resx;
|
|
2019 basic_block bb = gimple_bb (stmt);
|
|
2020 edge_iterator ei;
|
|
2021 edge e;
|
|
2022
|
|
2023 FOR_EACH_EDGE (e, ei, bb->succs)
|
|
2024 gcc_assert (!e->aux);
|
|
2025 mark_eh_edge_found_error = false;
|
|
2026 if (gimple_code (stmt) == GIMPLE_RESX)
|
|
2027 {
|
|
2028 region_nr = gimple_resx_region (stmt);
|
|
2029 is_resx = true;
|
|
2030 }
|
|
2031 else
|
|
2032 {
|
|
2033 region_nr = lookup_stmt_eh_region (stmt);
|
|
2034 if (region_nr < 0)
|
|
2035 {
|
|
2036 FOR_EACH_EDGE (e, ei, bb->succs)
|
|
2037 if (e->flags & EDGE_EH)
|
|
2038 {
|
|
2039 error ("BB %i can not throw but has EH edges", bb->index);
|
|
2040 return true;
|
|
2041 }
|
|
2042 return false;
|
|
2043 }
|
|
2044 if (!stmt_could_throw_p (stmt))
|
|
2045 {
|
|
2046 error ("BB %i last statement has incorrectly set region", bb->index);
|
|
2047 return true;
|
|
2048 }
|
|
2049 is_resx = false;
|
|
2050 }
|
|
2051
|
|
2052 foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
|
|
2053 FOR_EACH_EDGE (e, ei, bb->succs)
|
|
2054 {
|
|
2055 if ((e->flags & EDGE_EH) && !e->aux)
|
|
2056 {
|
|
2057 error ("unnecessary EH edge %i->%i", bb->index, e->dest->index);
|
|
2058 mark_eh_edge_found_error = true;
|
|
2059 return true;
|
|
2060 }
|
|
2061 e->aux = NULL;
|
|
2062 }
|
|
2063
|
|
2064 return mark_eh_edge_found_error;
|
|
2065 }
|
|
2066
|
|
2067
|
|
2068 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
|
|
2069
|
|
2070 bool
|
|
2071 operation_could_trap_helper_p (enum tree_code op,
|
|
2072 bool fp_operation,
|
|
2073 bool honor_trapv,
|
|
2074 bool honor_nans,
|
|
2075 bool honor_snans,
|
|
2076 tree divisor,
|
|
2077 bool *handled)
|
|
2078 {
|
|
2079 *handled = true;
|
|
2080 switch (op)
|
|
2081 {
|
|
2082 case TRUNC_DIV_EXPR:
|
|
2083 case CEIL_DIV_EXPR:
|
|
2084 case FLOOR_DIV_EXPR:
|
|
2085 case ROUND_DIV_EXPR:
|
|
2086 case EXACT_DIV_EXPR:
|
|
2087 case CEIL_MOD_EXPR:
|
|
2088 case FLOOR_MOD_EXPR:
|
|
2089 case ROUND_MOD_EXPR:
|
|
2090 case TRUNC_MOD_EXPR:
|
|
2091 case RDIV_EXPR:
|
|
2092 if (honor_snans || honor_trapv)
|
|
2093 return true;
|
|
2094 if (fp_operation)
|
|
2095 return flag_trapping_math;
|
|
2096 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
|
|
2097 return true;
|
|
2098 return false;
|
|
2099
|
|
2100 case LT_EXPR:
|
|
2101 case LE_EXPR:
|
|
2102 case GT_EXPR:
|
|
2103 case GE_EXPR:
|
|
2104 case LTGT_EXPR:
|
|
2105 /* Some floating point comparisons may trap. */
|
|
2106 return honor_nans;
|
|
2107
|
|
2108 case EQ_EXPR:
|
|
2109 case NE_EXPR:
|
|
2110 case UNORDERED_EXPR:
|
|
2111 case ORDERED_EXPR:
|
|
2112 case UNLT_EXPR:
|
|
2113 case UNLE_EXPR:
|
|
2114 case UNGT_EXPR:
|
|
2115 case UNGE_EXPR:
|
|
2116 case UNEQ_EXPR:
|
|
2117 return honor_snans;
|
|
2118
|
|
2119 case CONVERT_EXPR:
|
|
2120 case FIX_TRUNC_EXPR:
|
|
2121 /* Conversion of floating point might trap. */
|
|
2122 return honor_nans;
|
|
2123
|
|
2124 case NEGATE_EXPR:
|
|
2125 case ABS_EXPR:
|
|
2126 case CONJ_EXPR:
|
|
2127 /* These operations don't trap with floating point. */
|
|
2128 if (honor_trapv)
|
|
2129 return true;
|
|
2130 return false;
|
|
2131
|
|
2132 case PLUS_EXPR:
|
|
2133 case MINUS_EXPR:
|
|
2134 case MULT_EXPR:
|
|
2135 /* Any floating arithmetic may trap. */
|
|
2136 if (fp_operation && flag_trapping_math)
|
|
2137 return true;
|
|
2138 if (honor_trapv)
|
|
2139 return true;
|
|
2140 return false;
|
|
2141
|
|
2142 default:
|
|
2143 /* Any floating arithmetic may trap. */
|
|
2144 if (fp_operation && flag_trapping_math)
|
|
2145 return true;
|
|
2146
|
|
2147 *handled = false;
|
|
2148 return false;
|
|
2149 }
|
|
2150 }
|
|
2151
|
|
2152 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
|
|
2153 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
|
|
2154 type operands that may trap. If OP is a division operator, DIVISOR contains
|
|
2155 the value of the divisor. */
|
|
2156
|
|
2157 bool
|
|
2158 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
|
|
2159 tree divisor)
|
|
2160 {
|
|
2161 bool honor_nans = (fp_operation && flag_trapping_math
|
|
2162 && !flag_finite_math_only);
|
|
2163 bool honor_snans = fp_operation && flag_signaling_nans != 0;
|
|
2164 bool handled;
|
|
2165
|
|
2166 if (TREE_CODE_CLASS (op) != tcc_comparison
|
|
2167 && TREE_CODE_CLASS (op) != tcc_unary
|
|
2168 && TREE_CODE_CLASS (op) != tcc_binary)
|
|
2169 return false;
|
|
2170
|
|
2171 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
|
|
2172 honor_nans, honor_snans, divisor,
|
|
2173 &handled);
|
|
2174 }
|
|
2175
|
|
2176 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
|
|
2177 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
|
|
2178 This routine expects only GIMPLE lhs or rhs input. */
|
|
2179
|
|
2180 bool
|
|
2181 tree_could_trap_p (tree expr)
|
|
2182 {
|
|
2183 enum tree_code code;
|
|
2184 bool fp_operation = false;
|
|
2185 bool honor_trapv = false;
|
|
2186 tree t, base, div = NULL_TREE;
|
|
2187
|
|
2188 if (!expr)
|
|
2189 return false;
|
|
2190
|
|
2191 code = TREE_CODE (expr);
|
|
2192 t = TREE_TYPE (expr);
|
|
2193
|
|
2194 if (t)
|
|
2195 {
|
|
2196 if (COMPARISON_CLASS_P (expr))
|
|
2197 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
|
|
2198 else
|
|
2199 fp_operation = FLOAT_TYPE_P (t);
|
|
2200 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
|
|
2201 }
|
|
2202
|
|
2203 if (TREE_CODE_CLASS (code) == tcc_binary)
|
|
2204 div = TREE_OPERAND (expr, 1);
|
|
2205 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
|
|
2206 return true;
|
|
2207
|
|
2208 restart:
|
|
2209 switch (code)
|
|
2210 {
|
|
2211 case TARGET_MEM_REF:
|
|
2212 /* For TARGET_MEM_REFs use the information based on the original
|
|
2213 reference. */
|
|
2214 expr = TMR_ORIGINAL (expr);
|
|
2215 code = TREE_CODE (expr);
|
|
2216 goto restart;
|
|
2217
|
|
2218 case COMPONENT_REF:
|
|
2219 case REALPART_EXPR:
|
|
2220 case IMAGPART_EXPR:
|
|
2221 case BIT_FIELD_REF:
|
|
2222 case VIEW_CONVERT_EXPR:
|
|
2223 case WITH_SIZE_EXPR:
|
|
2224 expr = TREE_OPERAND (expr, 0);
|
|
2225 code = TREE_CODE (expr);
|
|
2226 goto restart;
|
|
2227
|
|
2228 case ARRAY_RANGE_REF:
|
|
2229 base = TREE_OPERAND (expr, 0);
|
|
2230 if (tree_could_trap_p (base))
|
|
2231 return true;
|
|
2232
|
|
2233 if (TREE_THIS_NOTRAP (expr))
|
|
2234 return false;
|
|
2235
|
|
2236 return !range_in_array_bounds_p (expr);
|
|
2237
|
|
2238 case ARRAY_REF:
|
|
2239 base = TREE_OPERAND (expr, 0);
|
|
2240 if (tree_could_trap_p (base))
|
|
2241 return true;
|
|
2242
|
|
2243 if (TREE_THIS_NOTRAP (expr))
|
|
2244 return false;
|
|
2245
|
|
2246 return !in_array_bounds_p (expr);
|
|
2247
|
|
2248 case INDIRECT_REF:
|
|
2249 case ALIGN_INDIRECT_REF:
|
|
2250 case MISALIGNED_INDIRECT_REF:
|
|
2251 return !TREE_THIS_NOTRAP (expr);
|
|
2252
|
|
2253 case ASM_EXPR:
|
|
2254 return TREE_THIS_VOLATILE (expr);
|
|
2255
|
|
2256
|
|
2257 case CALL_EXPR:
|
|
2258 t = get_callee_fndecl (expr);
|
|
2259 /* Assume that calls to weak functions may trap. */
|
|
2260 if (!t || !DECL_P (t) || DECL_WEAK (t))
|
|
2261 return true;
|
|
2262 return false;
|
|
2263
|
|
2264 default:
|
|
2265 return false;
|
|
2266 }
|
|
2267 }
|
|
2268
|
|
2269
|
|
2270 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
|
|
2271 an assignment or a conditional) may throw. */
|
|
2272
|
|
2273 static bool
|
|
2274 stmt_could_throw_1_p (gimple stmt)
|
|
2275 {
|
|
2276 enum tree_code code = gimple_expr_code (stmt);
|
|
2277 bool honor_nans = false;
|
|
2278 bool honor_snans = false;
|
|
2279 bool fp_operation = false;
|
|
2280 bool honor_trapv = false;
|
|
2281 tree t;
|
|
2282 size_t i;
|
|
2283 bool handled, ret;
|
|
2284
|
|
2285 if (TREE_CODE_CLASS (code) == tcc_comparison
|
|
2286 || TREE_CODE_CLASS (code) == tcc_unary
|
|
2287 || TREE_CODE_CLASS (code) == tcc_binary)
|
|
2288 {
|
|
2289 t = gimple_expr_type (stmt);
|
|
2290 fp_operation = FLOAT_TYPE_P (t);
|
|
2291 if (fp_operation)
|
|
2292 {
|
|
2293 honor_nans = flag_trapping_math && !flag_finite_math_only;
|
|
2294 honor_snans = flag_signaling_nans != 0;
|
|
2295 }
|
|
2296 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
|
|
2297 honor_trapv = true;
|
|
2298 }
|
|
2299
|
|
2300 /* Check if the main expression may trap. */
|
|
2301 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
|
|
2302 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
|
|
2303 honor_nans, honor_snans, t,
|
|
2304 &handled);
|
|
2305 if (handled)
|
|
2306 return ret;
|
|
2307
|
|
2308 /* If the expression does not trap, see if any of the individual operands may
|
|
2309 trap. */
|
|
2310 for (i = 0; i < gimple_num_ops (stmt); i++)
|
|
2311 if (tree_could_trap_p (gimple_op (stmt, i)))
|
|
2312 return true;
|
|
2313
|
|
2314 return false;
|
|
2315 }
|
|
2316
|
|
2317
|
|
2318 /* Return true if statement STMT could throw an exception. */
|
|
2319
|
|
2320 bool
|
|
2321 stmt_could_throw_p (gimple stmt)
|
|
2322 {
|
|
2323 enum gimple_code code;
|
|
2324
|
|
2325 if (!flag_exceptions)
|
|
2326 return false;
|
|
2327
|
|
2328 /* The only statements that can throw an exception are assignments,
|
|
2329 conditionals, calls and asms. */
|
|
2330 code = gimple_code (stmt);
|
|
2331 if (code != GIMPLE_ASSIGN
|
|
2332 && code != GIMPLE_COND
|
|
2333 && code != GIMPLE_CALL
|
|
2334 && code != GIMPLE_ASM)
|
|
2335 return false;
|
|
2336
|
|
2337 /* If exceptions can only be thrown by function calls and STMT is not a
|
|
2338 GIMPLE_CALL, the statement cannot throw. */
|
|
2339 if (!flag_non_call_exceptions && code != GIMPLE_CALL)
|
|
2340 return false;
|
|
2341
|
|
2342 if (code == GIMPLE_ASSIGN || code == GIMPLE_COND)
|
|
2343 return stmt_could_throw_1_p (stmt);
|
|
2344 else if (is_gimple_call (stmt))
|
|
2345 {
|
|
2346 tree t = gimple_call_fndecl (stmt);
|
|
2347
|
|
2348 /* Assume that calls to weak functions may trap. */
|
|
2349 if (!t || !DECL_P (t) || DECL_WEAK (t))
|
|
2350 return true;
|
|
2351
|
|
2352 return (gimple_call_flags (stmt) & ECF_NOTHROW) == 0;
|
|
2353 }
|
|
2354 else if (gimple_code (stmt) == GIMPLE_ASM)
|
|
2355 return (gimple_asm_volatile_p (stmt));
|
|
2356 else
|
|
2357 gcc_unreachable ();
|
|
2358
|
|
2359 return false;
|
|
2360 }
|
|
2361
|
|
2362
|
|
2363 /* Return true if expression T could throw an exception. */
|
|
2364
|
|
2365 bool
|
|
2366 tree_could_throw_p (tree t)
|
|
2367 {
|
|
2368 if (!flag_exceptions)
|
|
2369 return false;
|
|
2370 if (TREE_CODE (t) == MODIFY_EXPR)
|
|
2371 {
|
|
2372 if (flag_non_call_exceptions
|
|
2373 && tree_could_trap_p (TREE_OPERAND (t, 0)))
|
|
2374 return true;
|
|
2375 t = TREE_OPERAND (t, 1);
|
|
2376 }
|
|
2377
|
|
2378 if (TREE_CODE (t) == WITH_SIZE_EXPR)
|
|
2379 t = TREE_OPERAND (t, 0);
|
|
2380 if (TREE_CODE (t) == CALL_EXPR)
|
|
2381 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
|
|
2382 if (flag_non_call_exceptions)
|
|
2383 return tree_could_trap_p (t);
|
|
2384 return false;
|
|
2385 }
|
|
2386
|
|
2387
|
|
2388 /* Return true if STMT can throw an exception that is caught within
|
|
2389 the current function (CFUN). */
|
|
2390
|
|
2391 bool
|
|
2392 stmt_can_throw_internal (gimple stmt)
|
|
2393 {
|
|
2394 int region_nr;
|
|
2395 bool is_resx = false;
|
|
2396
|
|
2397 if (gimple_code (stmt) == GIMPLE_RESX)
|
|
2398 {
|
|
2399 region_nr = gimple_resx_region (stmt);
|
|
2400 is_resx = true;
|
|
2401 }
|
|
2402 else
|
|
2403 region_nr = lookup_stmt_eh_region (stmt);
|
|
2404
|
|
2405 if (region_nr < 0)
|
|
2406 return false;
|
|
2407
|
|
2408 return can_throw_internal_1 (region_nr, is_resx);
|
|
2409 }
|
|
2410
|
|
2411
|
|
2412 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
|
|
2413 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
|
|
2414 in the table if it should be in there. Return TRUE if a replacement was
|
|
2415 done that my require an EH edge purge. */
|
|
2416
|
|
2417 bool
|
|
2418 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
|
|
2419 {
|
|
2420 int region_nr = lookup_stmt_eh_region (old_stmt);
|
|
2421
|
|
2422 if (region_nr >= 0)
|
|
2423 {
|
|
2424 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
|
|
2425
|
|
2426 if (new_stmt == old_stmt && new_stmt_could_throw)
|
|
2427 return false;
|
|
2428
|
|
2429 remove_stmt_from_eh_region (old_stmt);
|
|
2430 if (new_stmt_could_throw)
|
|
2431 {
|
|
2432 add_stmt_to_eh_region (new_stmt, region_nr);
|
|
2433 return false;
|
|
2434 }
|
|
2435 else
|
|
2436 return true;
|
|
2437 }
|
|
2438
|
|
2439 return false;
|
|
2440 }
|
|
2441
|
|
2442 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
|
|
2443 GIMPLE_TRY) that are similar enough to be considered the same. Currently
|
|
2444 this only handles handlers consisting of a single call, as that's the
|
|
2445 important case for C++: a destructor call for a particular object showing
|
|
2446 up in multiple handlers. */
|
|
2447
|
|
2448 static bool
|
|
2449 same_handler_p (gimple_seq oneh, gimple_seq twoh)
|
|
2450 {
|
|
2451 gimple_stmt_iterator gsi;
|
|
2452 gimple ones, twos;
|
|
2453 unsigned int ai;
|
|
2454
|
|
2455 gsi = gsi_start (oneh);
|
|
2456 if (!gsi_one_before_end_p (gsi))
|
|
2457 return false;
|
|
2458 ones = gsi_stmt (gsi);
|
|
2459
|
|
2460 gsi = gsi_start (twoh);
|
|
2461 if (!gsi_one_before_end_p (gsi))
|
|
2462 return false;
|
|
2463 twos = gsi_stmt (gsi);
|
|
2464
|
|
2465 if (!is_gimple_call (ones)
|
|
2466 || !is_gimple_call (twos)
|
|
2467 || gimple_call_lhs (ones)
|
|
2468 || gimple_call_lhs (twos)
|
|
2469 || gimple_call_chain (ones)
|
|
2470 || gimple_call_chain (twos)
|
|
2471 || !operand_equal_p (gimple_call_fn (ones), gimple_call_fn (twos), 0)
|
|
2472 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
|
|
2473 return false;
|
|
2474
|
|
2475 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
|
|
2476 if (!operand_equal_p (gimple_call_arg (ones, ai),
|
|
2477 gimple_call_arg (twos, ai), 0))
|
|
2478 return false;
|
|
2479
|
|
2480 return true;
|
|
2481 }
|
|
2482
|
|
2483 /* Optimize
|
|
2484 try { A() } finally { try { ~B() } catch { ~A() } }
|
|
2485 try { ... } finally { ~A() }
|
|
2486 into
|
|
2487 try { A() } catch { ~B() }
|
|
2488 try { ~B() ... } finally { ~A() }
|
|
2489
|
|
2490 This occurs frequently in C++, where A is a local variable and B is a
|
|
2491 temporary used in the initializer for A. */
|
|
2492
|
|
2493 static void
|
|
2494 optimize_double_finally (gimple one, gimple two)
|
|
2495 {
|
|
2496 gimple oneh;
|
|
2497 gimple_stmt_iterator gsi;
|
|
2498
|
|
2499 gsi = gsi_start (gimple_try_cleanup (one));
|
|
2500 if (!gsi_one_before_end_p (gsi))
|
|
2501 return;
|
|
2502
|
|
2503 oneh = gsi_stmt (gsi);
|
|
2504 if (gimple_code (oneh) != GIMPLE_TRY
|
|
2505 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
|
|
2506 return;
|
|
2507
|
|
2508 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
|
|
2509 {
|
|
2510 gimple_seq seq = gimple_try_eval (oneh);
|
|
2511
|
|
2512 gimple_try_set_cleanup (one, seq);
|
|
2513 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
|
|
2514 seq = copy_gimple_seq_and_replace_locals (seq);
|
|
2515 gimple_seq_add_seq (&seq, gimple_try_eval (two));
|
|
2516 gimple_try_set_eval (two, seq);
|
|
2517 }
|
|
2518 }
|
|
2519
|
|
2520 /* Perform EH refactoring optimizations that are simpler to do when code
|
|
2521 flow has been lowered but EH structures haven't. */
|
|
2522
|
|
2523 static void
|
|
2524 refactor_eh_r (gimple_seq seq)
|
|
2525 {
|
|
2526 gimple_stmt_iterator gsi;
|
|
2527 gimple one, two;
|
|
2528
|
|
2529 one = NULL;
|
|
2530 two = NULL;
|
|
2531 gsi = gsi_start (seq);
|
|
2532 while (1)
|
|
2533 {
|
|
2534 one = two;
|
|
2535 if (gsi_end_p (gsi))
|
|
2536 two = NULL;
|
|
2537 else
|
|
2538 two = gsi_stmt (gsi);
|
|
2539 if (one
|
|
2540 && two
|
|
2541 && gimple_code (one) == GIMPLE_TRY
|
|
2542 && gimple_code (two) == GIMPLE_TRY
|
|
2543 && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
|
|
2544 && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
|
|
2545 optimize_double_finally (one, two);
|
|
2546 if (one)
|
|
2547 switch (gimple_code (one))
|
|
2548 {
|
|
2549 case GIMPLE_TRY:
|
|
2550 refactor_eh_r (gimple_try_eval (one));
|
|
2551 refactor_eh_r (gimple_try_cleanup (one));
|
|
2552 break;
|
|
2553 case GIMPLE_CATCH:
|
|
2554 refactor_eh_r (gimple_catch_handler (one));
|
|
2555 break;
|
|
2556 case GIMPLE_EH_FILTER:
|
|
2557 refactor_eh_r (gimple_eh_filter_failure (one));
|
|
2558 break;
|
|
2559 default:
|
|
2560 break;
|
|
2561 }
|
|
2562 if (two)
|
|
2563 gsi_next (&gsi);
|
|
2564 else
|
|
2565 break;
|
|
2566 }
|
|
2567 }
|
|
2568
|
|
2569 static unsigned
|
|
2570 refactor_eh (void)
|
|
2571 {
|
|
2572 refactor_eh_r (gimple_body (current_function_decl));
|
|
2573 return 0;
|
|
2574 }
|
|
2575
|
|
2576 struct gimple_opt_pass pass_refactor_eh =
|
|
2577 {
|
|
2578 {
|
|
2579 GIMPLE_PASS,
|
|
2580 "ehopt", /* name */
|
|
2581 NULL, /* gate */
|
|
2582 refactor_eh, /* execute */
|
|
2583 NULL, /* sub */
|
|
2584 NULL, /* next */
|
|
2585 0, /* static_pass_number */
|
|
2586 TV_TREE_EH, /* tv_id */
|
|
2587 PROP_gimple_lcf, /* properties_required */
|
|
2588 0, /* properties_provided */
|
|
2589 0, /* properties_destroyed */
|
|
2590 0, /* todo_flags_start */
|
|
2591 TODO_dump_func /* todo_flags_finish */
|
|
2592 }
|
|
2593 };
|