Mercurial > hg > CbC > CbC_gcc
comparison gcc/tree-eh.c @ 111:04ced10e8804
gcc 7
author | kono |
---|---|
date | Fri, 27 Oct 2017 22:46:09 +0900 |
parents | f6334be47118 |
children | 84e7813d76e9 |
comparison
equal
deleted
inserted
replaced
68:561a7518be6b | 111:04ced10e8804 |
---|---|
1 /* Exception handling semantics and decomposition for trees. | 1 /* Exception handling semantics and decomposition for trees. |
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 | 2 Copyright (C) 2003-2017 Free Software Foundation, Inc. |
3 Free Software Foundation, Inc. | |
4 | 3 |
5 This file is part of GCC. | 4 This file is part of GCC. |
6 | 5 |
7 GCC is free software; you can redistribute it and/or modify | 6 GCC is free software; you can redistribute it and/or modify |
8 it under the terms of the GNU General Public License as published by | 7 it under the terms of the GNU General Public License as published by |
19 <http://www.gnu.org/licenses/>. */ | 18 <http://www.gnu.org/licenses/>. */ |
20 | 19 |
21 #include "config.h" | 20 #include "config.h" |
22 #include "system.h" | 21 #include "system.h" |
23 #include "coretypes.h" | 22 #include "coretypes.h" |
24 #include "tm.h" | 23 #include "backend.h" |
24 #include "rtl.h" | |
25 #include "tree.h" | 25 #include "tree.h" |
26 #include "flags.h" | 26 #include "gimple.h" |
27 #include "function.h" | 27 #include "cfghooks.h" |
28 #include "tree-pass.h" | |
29 #include "ssa.h" | |
30 #include "cgraph.h" | |
31 #include "diagnostic-core.h" | |
32 #include "fold-const.h" | |
33 #include "calls.h" | |
28 #include "except.h" | 34 #include "except.h" |
29 #include "pointer-set.h" | 35 #include "cfganal.h" |
30 #include "tree-flow.h" | 36 #include "cfgcleanup.h" |
31 #include "tree-dump.h" | 37 #include "tree-eh.h" |
38 #include "gimple-iterator.h" | |
39 #include "tree-cfg.h" | |
40 #include "tree-into-ssa.h" | |
41 #include "tree-ssa.h" | |
32 #include "tree-inline.h" | 42 #include "tree-inline.h" |
33 #include "tree-iterator.h" | |
34 #include "tree-pass.h" | |
35 #include "timevar.h" | |
36 #include "langhooks.h" | 43 #include "langhooks.h" |
37 #include "ggc.h" | 44 #include "cfgloop.h" |
38 #include "diagnostic-core.h" | 45 #include "gimple-low.h" |
39 #include "gimple.h" | 46 #include "stringpool.h" |
40 #include "target.h" | 47 #include "attribs.h" |
48 #include "asan.h" | |
41 | 49 |
42 /* In some instances a tree and a gimple need to be stored in a same table, | 50 /* In some instances a tree and a gimple need to be stored in a same table, |
43 i.e. in hash tables. This is a structure to do this. */ | 51 i.e. in hash tables. This is a structure to do this. */ |
44 typedef union {tree *tp; tree t; gimple g;} treemple; | 52 typedef union {tree *tp; tree t; gimple *g;} treemple; |
45 | |
46 /* Nonzero if we are using EH to handle cleanups. */ | |
47 static int using_eh_for_cleanups_p = 0; | |
48 | |
49 void | |
50 using_eh_for_cleanups (void) | |
51 { | |
52 using_eh_for_cleanups_p = 1; | |
53 } | |
54 | 53 |
55 /* Misc functions used in this file. */ | 54 /* Misc functions used in this file. */ |
56 | |
57 /* Compare and hash for any structure which begins with a canonical | |
58 pointer. Assumes all pointers are interchangeable, which is sort | |
59 of already assumed by gcc elsewhere IIRC. */ | |
60 | |
61 static int | |
62 struct_ptr_eq (const void *a, const void *b) | |
63 { | |
64 const void * const * x = (const void * const *) a; | |
65 const void * const * y = (const void * const *) b; | |
66 return *x == *y; | |
67 } | |
68 | |
69 static hashval_t | |
70 struct_ptr_hash (const void *a) | |
71 { | |
72 const void * const * x = (const void * const *) a; | |
73 return (size_t)*x >> 4; | |
74 } | |
75 | |
76 | 55 |
77 /* Remember and lookup EH landing pad data for arbitrary statements. | 56 /* Remember and lookup EH landing pad data for arbitrary statements. |
78 Really this means any statement that could_throw_p. We could | 57 Really this means any statement that could_throw_p. We could |
79 stuff this information into the stmt_ann data structure, but: | 58 stuff this information into the stmt_ann data structure, but: |
80 | 59 |
86 compared to those that can. We should be saving some amount | 65 compared to those that can. We should be saving some amount |
87 of space by only allocating memory for those that can throw. */ | 66 of space by only allocating memory for those that can throw. */ |
88 | 67 |
89 /* Add statement T in function IFUN to landing pad NUM. */ | 68 /* Add statement T in function IFUN to landing pad NUM. */ |
90 | 69 |
70 static void | |
71 add_stmt_to_eh_lp_fn (struct function *ifun, gimple *t, int num) | |
72 { | |
73 gcc_assert (num != 0); | |
74 | |
75 if (!get_eh_throw_stmt_table (ifun)) | |
76 set_eh_throw_stmt_table (ifun, hash_map<gimple *, int>::create_ggc (31)); | |
77 | |
78 gcc_assert (!get_eh_throw_stmt_table (ifun)->put (t, num)); | |
79 } | |
80 | |
81 /* Add statement T in the current function (cfun) to EH landing pad NUM. */ | |
82 | |
91 void | 83 void |
92 add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num) | 84 add_stmt_to_eh_lp (gimple *t, int num) |
93 { | |
94 struct throw_stmt_node *n; | |
95 void **slot; | |
96 | |
97 gcc_assert (num != 0); | |
98 | |
99 n = ggc_alloc_throw_stmt_node (); | |
100 n->stmt = t; | |
101 n->lp_nr = num; | |
102 | |
103 if (!get_eh_throw_stmt_table (ifun)) | |
104 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash, | |
105 struct_ptr_eq, | |
106 ggc_free)); | |
107 | |
108 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT); | |
109 gcc_assert (!*slot); | |
110 *slot = n; | |
111 } | |
112 | |
113 /* Add statement T in the current function (cfun) to EH landing pad NUM. */ | |
114 | |
115 void | |
116 add_stmt_to_eh_lp (gimple t, int num) | |
117 { | 85 { |
118 add_stmt_to_eh_lp_fn (cfun, t, num); | 86 add_stmt_to_eh_lp_fn (cfun, t, num); |
119 } | 87 } |
120 | 88 |
121 /* Add statement T to the single EH landing pad in REGION. */ | 89 /* Add statement T to the single EH landing pad in REGION. */ |
122 | 90 |
123 static void | 91 static void |
124 record_stmt_eh_region (eh_region region, gimple t) | 92 record_stmt_eh_region (eh_region region, gimple *t) |
125 { | 93 { |
126 if (region == NULL) | 94 if (region == NULL) |
127 return; | 95 return; |
128 if (region->type == ERT_MUST_NOT_THROW) | 96 if (region->type == ERT_MUST_NOT_THROW) |
129 add_stmt_to_eh_lp_fn (cfun, t, -region->index); | 97 add_stmt_to_eh_lp_fn (cfun, t, -region->index); |
140 | 108 |
141 | 109 |
142 /* Remove statement T in function IFUN from its EH landing pad. */ | 110 /* Remove statement T in function IFUN from its EH landing pad. */ |
143 | 111 |
144 bool | 112 bool |
145 remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t) | 113 remove_stmt_from_eh_lp_fn (struct function *ifun, gimple *t) |
146 { | 114 { |
147 struct throw_stmt_node dummy; | |
148 void **slot; | |
149 | |
150 if (!get_eh_throw_stmt_table (ifun)) | 115 if (!get_eh_throw_stmt_table (ifun)) |
151 return false; | 116 return false; |
152 | 117 |
153 dummy.stmt = t; | 118 if (!get_eh_throw_stmt_table (ifun)->get (t)) |
154 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy, | 119 return false; |
155 NO_INSERT); | 120 |
156 if (slot) | 121 get_eh_throw_stmt_table (ifun)->remove (t); |
157 { | |
158 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot); | |
159 return true; | 122 return true; |
160 } | |
161 else | |
162 return false; | |
163 } | 123 } |
164 | 124 |
165 | 125 |
166 /* Remove statement T in the current function (cfun) from its | 126 /* Remove statement T in the current function (cfun) from its |
167 EH landing pad. */ | 127 EH landing pad. */ |
168 | 128 |
169 bool | 129 bool |
170 remove_stmt_from_eh_lp (gimple t) | 130 remove_stmt_from_eh_lp (gimple *t) |
171 { | 131 { |
172 return remove_stmt_from_eh_lp_fn (cfun, t); | 132 return remove_stmt_from_eh_lp_fn (cfun, t); |
173 } | 133 } |
174 | 134 |
175 /* Determine if statement T is inside an EH region in function IFUN. | 135 /* Determine if statement T is inside an EH region in function IFUN. |
176 Positive numbers indicate a landing pad index; negative numbers | 136 Positive numbers indicate a landing pad index; negative numbers |
177 indicate a MUST_NOT_THROW region index; zero indicates that the | 137 indicate a MUST_NOT_THROW region index; zero indicates that the |
178 statement is not recorded in the region table. */ | 138 statement is not recorded in the region table. */ |
179 | 139 |
180 int | 140 int |
181 lookup_stmt_eh_lp_fn (struct function *ifun, gimple t) | 141 lookup_stmt_eh_lp_fn (struct function *ifun, gimple *t) |
182 { | 142 { |
183 struct throw_stmt_node *p, n; | |
184 | |
185 if (ifun->eh->throw_stmt_table == NULL) | 143 if (ifun->eh->throw_stmt_table == NULL) |
186 return 0; | 144 return 0; |
187 | 145 |
188 n.stmt = t; | 146 int *lp_nr = ifun->eh->throw_stmt_table->get (t); |
189 p = (struct throw_stmt_node *) htab_find (ifun->eh->throw_stmt_table, &n); | 147 return lp_nr ? *lp_nr : 0; |
190 return p ? p->lp_nr : 0; | |
191 } | 148 } |
192 | 149 |
193 /* Likewise, but always use the current function. */ | 150 /* Likewise, but always use the current function. */ |
194 | 151 |
195 int | 152 int |
196 lookup_stmt_eh_lp (gimple t) | 153 lookup_stmt_eh_lp (gimple *t) |
197 { | 154 { |
198 /* We can get called from initialized data when -fnon-call-exceptions | 155 /* We can get called from initialized data when -fnon-call-exceptions |
199 is on; prevent crash. */ | 156 is on; prevent crash. */ |
200 if (!cfun) | 157 if (!cfun) |
201 return 0; | 158 return 0; |
211 /* When storing a GIMPLE_TRY, we have to record a gimple. However | 168 /* When storing a GIMPLE_TRY, we have to record a gimple. However |
212 when deciding whether a GOTO to a certain LABEL_DECL (which is a | 169 when deciding whether a GOTO to a certain LABEL_DECL (which is a |
213 tree) leaves the TRY block, its necessary to record a tree in | 170 tree) leaves the TRY block, its necessary to record a tree in |
214 this field. Thus a treemple is used. */ | 171 this field. Thus a treemple is used. */ |
215 treemple child; | 172 treemple child; |
216 gimple parent; | 173 gtry *parent; |
217 }; | 174 }; |
218 | 175 |
176 /* Hashtable helpers. */ | |
177 | |
178 struct finally_tree_hasher : free_ptr_hash <finally_tree_node> | |
179 { | |
180 static inline hashval_t hash (const finally_tree_node *); | |
181 static inline bool equal (const finally_tree_node *, | |
182 const finally_tree_node *); | |
183 }; | |
184 | |
185 inline hashval_t | |
186 finally_tree_hasher::hash (const finally_tree_node *v) | |
187 { | |
188 return (intptr_t)v->child.t >> 4; | |
189 } | |
190 | |
191 inline bool | |
192 finally_tree_hasher::equal (const finally_tree_node *v, | |
193 const finally_tree_node *c) | |
194 { | |
195 return v->child.t == c->child.t; | |
196 } | |
197 | |
219 /* Note that this table is *not* marked GTY. It is short-lived. */ | 198 /* Note that this table is *not* marked GTY. It is short-lived. */ |
220 static htab_t finally_tree; | 199 static hash_table<finally_tree_hasher> *finally_tree; |
221 | 200 |
222 static void | 201 static void |
223 record_in_finally_tree (treemple child, gimple parent) | 202 record_in_finally_tree (treemple child, gtry *parent) |
224 { | 203 { |
225 struct finally_tree_node *n; | 204 struct finally_tree_node *n; |
226 void **slot; | 205 finally_tree_node **slot; |
227 | 206 |
228 n = XNEW (struct finally_tree_node); | 207 n = XNEW (struct finally_tree_node); |
229 n->child = child; | 208 n->child = child; |
230 n->parent = parent; | 209 n->parent = parent; |
231 | 210 |
232 slot = htab_find_slot (finally_tree, n, INSERT); | 211 slot = finally_tree->find_slot (n, INSERT); |
233 gcc_assert (!*slot); | 212 gcc_assert (!*slot); |
234 *slot = n; | 213 *slot = n; |
235 } | 214 } |
236 | 215 |
237 static void | 216 static void |
238 collect_finally_tree (gimple stmt, gimple region); | 217 collect_finally_tree (gimple *stmt, gtry *region); |
239 | 218 |
240 /* Go through the gimple sequence. Works with collect_finally_tree to | 219 /* Go through the gimple sequence. Works with collect_finally_tree to |
241 record all GIMPLE_LABEL and GIMPLE_TRY statements. */ | 220 record all GIMPLE_LABEL and GIMPLE_TRY statements. */ |
242 | 221 |
243 static void | 222 static void |
244 collect_finally_tree_1 (gimple_seq seq, gimple region) | 223 collect_finally_tree_1 (gimple_seq seq, gtry *region) |
245 { | 224 { |
246 gimple_stmt_iterator gsi; | 225 gimple_stmt_iterator gsi; |
247 | 226 |
248 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi)) | 227 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi)) |
249 collect_finally_tree (gsi_stmt (gsi), region); | 228 collect_finally_tree (gsi_stmt (gsi), region); |
250 } | 229 } |
251 | 230 |
252 static void | 231 static void |
253 collect_finally_tree (gimple stmt, gimple region) | 232 collect_finally_tree (gimple *stmt, gtry *region) |
254 { | 233 { |
255 treemple temp; | 234 treemple temp; |
256 | 235 |
257 switch (gimple_code (stmt)) | 236 switch (gimple_code (stmt)) |
258 { | 237 { |
259 case GIMPLE_LABEL: | 238 case GIMPLE_LABEL: |
260 temp.t = gimple_label_label (stmt); | 239 temp.t = gimple_label_label (as_a <glabel *> (stmt)); |
261 record_in_finally_tree (temp, region); | 240 record_in_finally_tree (temp, region); |
262 break; | 241 break; |
263 | 242 |
264 case GIMPLE_TRY: | 243 case GIMPLE_TRY: |
265 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY) | 244 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY) |
266 { | 245 { |
267 temp.g = stmt; | 246 temp.g = stmt; |
268 record_in_finally_tree (temp, region); | 247 record_in_finally_tree (temp, region); |
269 collect_finally_tree_1 (gimple_try_eval (stmt), stmt); | 248 collect_finally_tree_1 (gimple_try_eval (stmt), |
249 as_a <gtry *> (stmt)); | |
270 collect_finally_tree_1 (gimple_try_cleanup (stmt), region); | 250 collect_finally_tree_1 (gimple_try_cleanup (stmt), region); |
271 } | 251 } |
272 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH) | 252 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH) |
273 { | 253 { |
274 collect_finally_tree_1 (gimple_try_eval (stmt), region); | 254 collect_finally_tree_1 (gimple_try_eval (stmt), region); |
275 collect_finally_tree_1 (gimple_try_cleanup (stmt), region); | 255 collect_finally_tree_1 (gimple_try_cleanup (stmt), region); |
276 } | 256 } |
277 break; | 257 break; |
278 | 258 |
279 case GIMPLE_CATCH: | 259 case GIMPLE_CATCH: |
280 collect_finally_tree_1 (gimple_catch_handler (stmt), region); | 260 collect_finally_tree_1 (gimple_catch_handler ( |
261 as_a <gcatch *> (stmt)), | |
262 region); | |
281 break; | 263 break; |
282 | 264 |
283 case GIMPLE_EH_FILTER: | 265 case GIMPLE_EH_FILTER: |
284 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region); | 266 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region); |
267 break; | |
268 | |
269 case GIMPLE_EH_ELSE: | |
270 { | |
271 geh_else *eh_else_stmt = as_a <geh_else *> (stmt); | |
272 collect_finally_tree_1 (gimple_eh_else_n_body (eh_else_stmt), region); | |
273 collect_finally_tree_1 (gimple_eh_else_e_body (eh_else_stmt), region); | |
274 } | |
285 break; | 275 break; |
286 | 276 |
287 default: | 277 default: |
288 /* A type, a decl, or some kind of statement that we're not | 278 /* A type, a decl, or some kind of statement that we're not |
289 interested in. Don't walk them. */ | 279 interested in. Don't walk them. */ |
294 | 284 |
295 /* Use the finally tree to determine if a jump from START to TARGET | 285 /* Use the finally tree to determine if a jump from START to TARGET |
296 would leave the try_finally node that START lives in. */ | 286 would leave the try_finally node that START lives in. */ |
297 | 287 |
298 static bool | 288 static bool |
299 outside_finally_tree (treemple start, gimple target) | 289 outside_finally_tree (treemple start, gimple *target) |
300 { | 290 { |
301 struct finally_tree_node n, *p; | 291 struct finally_tree_node n, *p; |
302 | 292 |
303 do | 293 do |
304 { | 294 { |
305 n.child = start; | 295 n.child = start; |
306 p = (struct finally_tree_node *) htab_find (finally_tree, &n); | 296 p = finally_tree->find (&n); |
307 if (!p) | 297 if (!p) |
308 return true; | 298 return true; |
309 start.g = p->parent; | 299 start.g = p->parent; |
310 } | 300 } |
311 while (start.g != target); | 301 while (start.g != target); |
324 | 314 |
325 /* Record whether an EH region contains something that can throw, | 315 /* Record whether an EH region contains something that can throw, |
326 indexed by EH region number. */ | 316 indexed by EH region number. */ |
327 static bitmap eh_region_may_contain_throw_map; | 317 static bitmap eh_region_may_contain_throw_map; |
328 | 318 |
329 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN | 319 /* The GOTO_QUEUE is an array of GIMPLE_GOTO and GIMPLE_RETURN |
330 statements that are seen to escape this GIMPLE_TRY_FINALLY node. | 320 statements that are seen to escape this GIMPLE_TRY_FINALLY node. |
331 The idea is to record a gimple statement for everything except for | 321 The idea is to record a gimple statement for everything except for |
332 the conditionals, which get their labels recorded. Since labels are | 322 the conditionals, which get their labels recorded. Since labels are |
333 of type 'tree', we need this node to store both gimple and tree | 323 of type 'tree', we need this node to store both gimple and tree |
334 objects. REPL_STMT is the sequence used to replace the goto/return | 324 objects. REPL_STMT is the sequence used to replace the goto/return |
336 the return/goto to jump to the original destination. */ | 326 the return/goto to jump to the original destination. */ |
337 | 327 |
338 struct goto_queue_node | 328 struct goto_queue_node |
339 { | 329 { |
340 treemple stmt; | 330 treemple stmt; |
331 location_t location; | |
341 gimple_seq repl_stmt; | 332 gimple_seq repl_stmt; |
342 gimple cont_stmt; | 333 gimple *cont_stmt; |
343 int index; | 334 int index; |
344 /* This is used when index >= 0 to indicate that stmt is a label (as | 335 /* This is used when index >= 0 to indicate that stmt is a label (as |
345 opposed to a goto stmt). */ | 336 opposed to a goto stmt). */ |
346 int is_label; | 337 int is_label; |
347 }; | 338 }; |
370 { | 361 { |
371 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The | 362 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The |
372 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain | 363 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain |
373 this so that outside_finally_tree can reliably reference the tree used | 364 this so that outside_finally_tree can reliably reference the tree used |
374 in the collect_finally_tree data structures. */ | 365 in the collect_finally_tree data structures. */ |
375 gimple try_finally_expr; | 366 gtry *try_finally_expr; |
376 gimple top_p; | 367 gtry *top_p; |
377 | 368 |
378 /* While lowering a top_p usually it is expanded into multiple statements, | 369 /* While lowering a top_p usually it is expanded into multiple statements, |
379 thus we need the following field to store them. */ | 370 thus we need the following field to store them. */ |
380 gimple_seq top_p_seq; | 371 gimple_seq top_p_seq; |
381 | 372 |
389 struct goto_queue_node *goto_queue; | 380 struct goto_queue_node *goto_queue; |
390 size_t goto_queue_size; | 381 size_t goto_queue_size; |
391 size_t goto_queue_active; | 382 size_t goto_queue_active; |
392 | 383 |
393 /* Pointer map to help in searching goto_queue when it is large. */ | 384 /* Pointer map to help in searching goto_queue when it is large. */ |
394 struct pointer_map_t *goto_queue_map; | 385 hash_map<gimple *, goto_queue_node *> *goto_queue_map; |
395 | 386 |
396 /* The set of unique labels seen as entries in the goto queue. */ | 387 /* The set of unique labels seen as entries in the goto queue. */ |
397 VEC(tree,heap) *dest_array; | 388 vec<tree> dest_array; |
398 | 389 |
399 /* A label to be added at the end of the completed transformed | 390 /* A label to be added at the end of the completed transformed |
400 sequence. It will be set if may_fallthru was true *at one time*, | 391 sequence. It will be set if may_fallthru was true *at one time*, |
401 though subsequent transformations may have cleared that flag. */ | 392 though subsequent transformations may have cleared that flag. */ |
402 tree fallthru_label; | 393 tree fallthru_label; |
411 /* True if the finally block can receive an exception edge. | 402 /* True if the finally block can receive an exception edge. |
412 Cleared if the exception case is handled by code duplication. */ | 403 Cleared if the exception case is handled by code duplication. */ |
413 bool may_throw; | 404 bool may_throw; |
414 }; | 405 }; |
415 | 406 |
416 static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple); | 407 static gimple_seq lower_eh_must_not_throw (struct leh_state *, gtry *); |
417 | 408 |
418 /* Search for STMT in the goto queue. Return the replacement, | 409 /* Search for STMT in the goto queue. Return the replacement, |
419 or null if the statement isn't in the queue. */ | 410 or null if the statement isn't in the queue. */ |
420 | 411 |
421 #define LARGE_GOTO_QUEUE 20 | 412 #define LARGE_GOTO_QUEUE 20 |
422 | 413 |
423 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq); | 414 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq *seq); |
424 | 415 |
425 static gimple_seq | 416 static gimple_seq |
426 find_goto_replacement (struct leh_tf_state *tf, treemple stmt) | 417 find_goto_replacement (struct leh_tf_state *tf, treemple stmt) |
427 { | 418 { |
428 unsigned int i; | 419 unsigned int i; |
429 void **slot; | |
430 | 420 |
431 if (tf->goto_queue_active < LARGE_GOTO_QUEUE) | 421 if (tf->goto_queue_active < LARGE_GOTO_QUEUE) |
432 { | 422 { |
433 for (i = 0; i < tf->goto_queue_active; i++) | 423 for (i = 0; i < tf->goto_queue_active; i++) |
434 if ( tf->goto_queue[i].stmt.g == stmt.g) | 424 if ( tf->goto_queue[i].stmt.g == stmt.g) |
439 /* If we have a large number of entries in the goto_queue, create a | 429 /* If we have a large number of entries in the goto_queue, create a |
440 pointer map and use that for searching. */ | 430 pointer map and use that for searching. */ |
441 | 431 |
442 if (!tf->goto_queue_map) | 432 if (!tf->goto_queue_map) |
443 { | 433 { |
444 tf->goto_queue_map = pointer_map_create (); | 434 tf->goto_queue_map = new hash_map<gimple *, goto_queue_node *>; |
445 for (i = 0; i < tf->goto_queue_active; i++) | 435 for (i = 0; i < tf->goto_queue_active; i++) |
446 { | 436 { |
447 slot = pointer_map_insert (tf->goto_queue_map, | 437 bool existed = tf->goto_queue_map->put (tf->goto_queue[i].stmt.g, |
448 tf->goto_queue[i].stmt.g); | 438 &tf->goto_queue[i]); |
449 gcc_assert (*slot == NULL); | 439 gcc_assert (!existed); |
450 *slot = &tf->goto_queue[i]; | 440 } |
451 } | 441 } |
452 } | 442 |
453 | 443 goto_queue_node **slot = tf->goto_queue_map->get (stmt.g); |
454 slot = pointer_map_contains (tf->goto_queue_map, stmt.g); | |
455 if (slot != NULL) | 444 if (slot != NULL) |
456 return (((struct goto_queue_node *) *slot)->repl_stmt); | 445 return ((*slot)->repl_stmt); |
457 | 446 |
458 return NULL; | 447 return NULL; |
459 } | 448 } |
460 | 449 |
461 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a | 450 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a |
493 } | 482 } |
494 | 483 |
495 /* The real work of replace_goto_queue. Returns with TSI updated to | 484 /* The real work of replace_goto_queue. Returns with TSI updated to |
496 point to the next statement. */ | 485 point to the next statement. */ |
497 | 486 |
498 static void replace_goto_queue_stmt_list (gimple_seq, struct leh_tf_state *); | 487 static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *); |
499 | 488 |
500 static void | 489 static void |
501 replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf, | 490 replace_goto_queue_1 (gimple *stmt, struct leh_tf_state *tf, |
502 gimple_stmt_iterator *gsi) | 491 gimple_stmt_iterator *gsi) |
503 { | 492 { |
504 gimple_seq seq; | 493 gimple_seq seq; |
505 treemple temp; | 494 treemple temp; |
506 temp.g = NULL; | 495 temp.g = NULL; |
523 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi); | 512 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi); |
524 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi); | 513 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi); |
525 break; | 514 break; |
526 | 515 |
527 case GIMPLE_TRY: | 516 case GIMPLE_TRY: |
528 replace_goto_queue_stmt_list (gimple_try_eval (stmt), tf); | 517 replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt), tf); |
529 replace_goto_queue_stmt_list (gimple_try_cleanup (stmt), tf); | 518 replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf); |
530 break; | 519 break; |
531 case GIMPLE_CATCH: | 520 case GIMPLE_CATCH: |
532 replace_goto_queue_stmt_list (gimple_catch_handler (stmt), tf); | 521 replace_goto_queue_stmt_list (gimple_catch_handler_ptr ( |
522 as_a <gcatch *> (stmt)), | |
523 tf); | |
533 break; | 524 break; |
534 case GIMPLE_EH_FILTER: | 525 case GIMPLE_EH_FILTER: |
535 replace_goto_queue_stmt_list (gimple_eh_filter_failure (stmt), tf); | 526 replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf); |
527 break; | |
528 case GIMPLE_EH_ELSE: | |
529 { | |
530 geh_else *eh_else_stmt = as_a <geh_else *> (stmt); | |
531 replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (eh_else_stmt), | |
532 tf); | |
533 replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (eh_else_stmt), | |
534 tf); | |
535 } | |
536 break; | 536 break; |
537 | 537 |
538 default: | 538 default: |
539 /* These won't have gotos in them. */ | 539 /* These won't have gotos in them. */ |
540 break; | 540 break; |
544 } | 544 } |
545 | 545 |
546 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */ | 546 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */ |
547 | 547 |
548 static void | 548 static void |
549 replace_goto_queue_stmt_list (gimple_seq seq, struct leh_tf_state *tf) | 549 replace_goto_queue_stmt_list (gimple_seq *seq, struct leh_tf_state *tf) |
550 { | 550 { |
551 gimple_stmt_iterator gsi = gsi_start (seq); | 551 gimple_stmt_iterator gsi = gsi_start (*seq); |
552 | 552 |
553 while (!gsi_end_p (gsi)) | 553 while (!gsi_end_p (gsi)) |
554 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi); | 554 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi); |
555 } | 555 } |
556 | 556 |
559 static void | 559 static void |
560 replace_goto_queue (struct leh_tf_state *tf) | 560 replace_goto_queue (struct leh_tf_state *tf) |
561 { | 561 { |
562 if (tf->goto_queue_active == 0) | 562 if (tf->goto_queue_active == 0) |
563 return; | 563 return; |
564 replace_goto_queue_stmt_list (tf->top_p_seq, tf); | 564 replace_goto_queue_stmt_list (&tf->top_p_seq, tf); |
565 replace_goto_queue_stmt_list (eh_seq, tf); | 565 replace_goto_queue_stmt_list (&eh_seq, tf); |
566 } | 566 } |
567 | 567 |
568 /* Add a new record to the goto queue contained in TF. NEW_STMT is the | 568 /* Add a new record to the goto queue contained in TF. NEW_STMT is the |
569 data to be added, IS_LABEL indicates whether NEW_STMT is a label or | 569 data to be added, IS_LABEL indicates whether NEW_STMT is a label or |
570 a gimple return. */ | 570 a gimple return. */ |
571 | 571 |
572 static void | 572 static void |
573 record_in_goto_queue (struct leh_tf_state *tf, | 573 record_in_goto_queue (struct leh_tf_state *tf, |
574 treemple new_stmt, | 574 treemple new_stmt, |
575 int index, | 575 int index, |
576 bool is_label) | 576 bool is_label, |
577 location_t location) | |
577 { | 578 { |
578 size_t active, size; | 579 size_t active, size; |
579 struct goto_queue_node *q; | 580 struct goto_queue_node *q; |
580 | 581 |
581 gcc_assert (!tf->goto_queue_map); | 582 gcc_assert (!tf->goto_queue_map); |
594 tf->goto_queue_active = active + 1; | 595 tf->goto_queue_active = active + 1; |
595 | 596 |
596 memset (q, 0, sizeof (*q)); | 597 memset (q, 0, sizeof (*q)); |
597 q->stmt = new_stmt; | 598 q->stmt = new_stmt; |
598 q->index = index; | 599 q->index = index; |
600 q->location = location; | |
599 q->is_label = is_label; | 601 q->is_label = is_label; |
600 } | 602 } |
601 | 603 |
602 /* Record the LABEL label in the goto queue contained in TF. | 604 /* Record the LABEL label in the goto queue contained in TF. |
603 TF is not null. */ | 605 TF is not null. */ |
604 | 606 |
605 static void | 607 static void |
606 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label) | 608 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label, |
609 location_t location) | |
607 { | 610 { |
608 int index; | 611 int index; |
609 treemple temp, new_stmt; | 612 treemple temp, new_stmt; |
610 | 613 |
611 if (!label) | 614 if (!label) |
620 /* No need to record gotos that don't leave the try block. */ | 623 /* No need to record gotos that don't leave the try block. */ |
621 temp.t = label; | 624 temp.t = label; |
622 if (!outside_finally_tree (temp, tf->try_finally_expr)) | 625 if (!outside_finally_tree (temp, tf->try_finally_expr)) |
623 return; | 626 return; |
624 | 627 |
625 if (! tf->dest_array) | 628 if (! tf->dest_array.exists ()) |
626 { | 629 { |
627 tf->dest_array = VEC_alloc (tree, heap, 10); | 630 tf->dest_array.create (10); |
628 VEC_quick_push (tree, tf->dest_array, label); | 631 tf->dest_array.quick_push (label); |
629 index = 0; | 632 index = 0; |
630 } | 633 } |
631 else | 634 else |
632 { | 635 { |
633 int n = VEC_length (tree, tf->dest_array); | 636 int n = tf->dest_array.length (); |
634 for (index = 0; index < n; ++index) | 637 for (index = 0; index < n; ++index) |
635 if (VEC_index (tree, tf->dest_array, index) == label) | 638 if (tf->dest_array[index] == label) |
636 break; | 639 break; |
637 if (index == n) | 640 if (index == n) |
638 VEC_safe_push (tree, heap, tf->dest_array, label); | 641 tf->dest_array.safe_push (label); |
639 } | 642 } |
640 | 643 |
641 /* In the case of a GOTO we want to record the destination label, | 644 /* In the case of a GOTO we want to record the destination label, |
642 since with a GIMPLE_COND we have an easy access to the then/else | 645 since with a GIMPLE_COND we have an easy access to the then/else |
643 labels. */ | 646 labels. */ |
644 new_stmt = stmt; | 647 new_stmt = stmt; |
645 record_in_goto_queue (tf, new_stmt, index, true); | 648 record_in_goto_queue (tf, new_stmt, index, true, location); |
646 } | 649 } |
647 | 650 |
648 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally | 651 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally |
649 node, and if so record that fact in the goto queue associated with that | 652 node, and if so record that fact in the goto queue associated with that |
650 try_finally node. */ | 653 try_finally node. */ |
651 | 654 |
652 static void | 655 static void |
653 maybe_record_in_goto_queue (struct leh_state *state, gimple stmt) | 656 maybe_record_in_goto_queue (struct leh_state *state, gimple *stmt) |
654 { | 657 { |
655 struct leh_tf_state *tf = state->tf; | 658 struct leh_tf_state *tf = state->tf; |
656 treemple new_stmt; | 659 treemple new_stmt; |
657 | 660 |
658 if (!tf) | 661 if (!tf) |
659 return; | 662 return; |
660 | 663 |
661 switch (gimple_code (stmt)) | 664 switch (gimple_code (stmt)) |
662 { | 665 { |
663 case GIMPLE_COND: | 666 case GIMPLE_COND: |
664 new_stmt.tp = gimple_op_ptr (stmt, 2); | 667 { |
665 record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt)); | 668 gcond *cond_stmt = as_a <gcond *> (stmt); |
666 new_stmt.tp = gimple_op_ptr (stmt, 3); | 669 new_stmt.tp = gimple_op_ptr (cond_stmt, 2); |
667 record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt)); | 670 record_in_goto_queue_label (tf, new_stmt, |
671 gimple_cond_true_label (cond_stmt), | |
672 EXPR_LOCATION (*new_stmt.tp)); | |
673 new_stmt.tp = gimple_op_ptr (cond_stmt, 3); | |
674 record_in_goto_queue_label (tf, new_stmt, | |
675 gimple_cond_false_label (cond_stmt), | |
676 EXPR_LOCATION (*new_stmt.tp)); | |
677 } | |
668 break; | 678 break; |
669 case GIMPLE_GOTO: | 679 case GIMPLE_GOTO: |
670 new_stmt.g = stmt; | 680 new_stmt.g = stmt; |
671 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt)); | 681 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt), |
682 gimple_location (stmt)); | |
672 break; | 683 break; |
673 | 684 |
674 case GIMPLE_RETURN: | 685 case GIMPLE_RETURN: |
675 tf->may_return = true; | 686 tf->may_return = true; |
676 new_stmt.g = stmt; | 687 new_stmt.g = stmt; |
677 record_in_goto_queue (tf, new_stmt, -1, false); | 688 record_in_goto_queue (tf, new_stmt, -1, false, gimple_location (stmt)); |
678 break; | 689 break; |
679 | 690 |
680 default: | 691 default: |
681 gcc_unreachable (); | 692 gcc_unreachable (); |
682 } | 693 } |
683 } | 694 } |
684 | 695 |
685 | 696 |
686 #ifdef ENABLE_CHECKING | 697 #if CHECKING_P |
687 /* We do not process GIMPLE_SWITCHes for now. As long as the original source | 698 /* We do not process GIMPLE_SWITCHes for now. As long as the original source |
688 was in fact structured, and we've not yet done jump threading, then none | 699 was in fact structured, and we've not yet done jump threading, then none |
689 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */ | 700 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */ |
690 | 701 |
691 static void | 702 static void |
692 verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr) | 703 verify_norecord_switch_expr (struct leh_state *state, |
704 gswitch *switch_expr) | |
693 { | 705 { |
694 struct leh_tf_state *tf = state->tf; | 706 struct leh_tf_state *tf = state->tf; |
695 size_t i, n; | 707 size_t i, n; |
696 | 708 |
697 if (!tf) | 709 if (!tf) |
709 } | 721 } |
710 #else | 722 #else |
711 #define verify_norecord_switch_expr(state, switch_expr) | 723 #define verify_norecord_switch_expr(state, switch_expr) |
712 #endif | 724 #endif |
713 | 725 |
714 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P | 726 /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is |
715 whatever is needed to finish the return. If MOD is non-null, insert it | 727 non-null, insert it before the new branch. */ |
716 before the new branch. RETURN_VALUE_P is a cache containing a temporary | |
717 variable to be used in manipulating the value returned from the function. */ | |
718 | 728 |
719 static void | 729 static void |
720 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod, | 730 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod) |
721 tree *return_value_p) | 731 { |
722 { | 732 gimple *x; |
723 tree ret_expr; | 733 |
724 gimple x; | 734 /* In the case of a return, the queue node must be a gimple statement. */ |
725 | |
726 /* In the case of a return, the queue node must be a gimple statement. */ | |
727 gcc_assert (!q->is_label); | 735 gcc_assert (!q->is_label); |
728 | 736 |
729 ret_expr = gimple_return_retval (q->stmt.g); | 737 /* Note that the return value may have already been computed, e.g., |
730 | 738 |
731 if (ret_expr) | 739 int x; |
732 { | 740 int foo (void) |
733 if (!*return_value_p) | 741 { |
734 *return_value_p = ret_expr; | 742 x = 0; |
735 else | 743 try { |
736 gcc_assert (*return_value_p == ret_expr); | 744 return x; |
737 q->cont_stmt = q->stmt.g; | 745 } finally { |
738 /* The nasty part about redirecting the return value is that the | 746 x++; |
739 return value itself is to be computed before the FINALLY block | 747 } |
740 is executed. e.g. | 748 } |
741 | 749 |
742 int x; | 750 should return 0, not 1. We don't have to do anything to make |
743 int foo (void) | 751 this happens because the return value has been placed in the |
744 { | 752 RESULT_DECL already. */ |
745 x = 0; | 753 |
746 try { | 754 q->cont_stmt = q->stmt.g; |
747 return x; | |
748 } finally { | |
749 x++; | |
750 } | |
751 } | |
752 | |
753 should return 0, not 1. Arrange for this to happen by copying | |
754 computed the return value into a local temporary. This also | |
755 allows us to redirect multiple return statements through the | |
756 same destination block; whether this is a net win or not really | |
757 depends, I guess, but it does make generation of the switch in | |
758 lower_try_finally_switch easier. */ | |
759 | |
760 if (TREE_CODE (ret_expr) == RESULT_DECL) | |
761 { | |
762 if (!*return_value_p) | |
763 *return_value_p = ret_expr; | |
764 else | |
765 gcc_assert (*return_value_p == ret_expr); | |
766 q->cont_stmt = q->stmt.g; | |
767 } | |
768 else | |
769 gcc_unreachable (); | |
770 } | |
771 else | |
772 /* If we don't return a value, all return statements are the same. */ | |
773 q->cont_stmt = q->stmt.g; | |
774 | |
775 if (!q->repl_stmt) | |
776 q->repl_stmt = gimple_seq_alloc (); | |
777 | 755 |
778 if (mod) | 756 if (mod) |
779 gimple_seq_add_seq (&q->repl_stmt, mod); | 757 gimple_seq_add_seq (&q->repl_stmt, mod); |
780 | 758 |
781 x = gimple_build_goto (finlab); | 759 x = gimple_build_goto (finlab); |
760 gimple_set_location (x, q->location); | |
782 gimple_seq_add_stmt (&q->repl_stmt, x); | 761 gimple_seq_add_stmt (&q->repl_stmt, x); |
783 } | 762 } |
784 | 763 |
785 /* Similar, but easier, for GIMPLE_GOTO. */ | 764 /* Similar, but easier, for GIMPLE_GOTO. */ |
786 | 765 |
787 static void | 766 static void |
788 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod, | 767 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod, |
789 struct leh_tf_state *tf) | 768 struct leh_tf_state *tf) |
790 { | 769 { |
791 gimple x; | 770 ggoto *x; |
792 | 771 |
793 gcc_assert (q->is_label); | 772 gcc_assert (q->is_label); |
794 if (!q->repl_stmt) | 773 |
795 q->repl_stmt = gimple_seq_alloc (); | 774 q->cont_stmt = gimple_build_goto (tf->dest_array[q->index]); |
796 | |
797 q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array, q->index)); | |
798 | 775 |
799 if (mod) | 776 if (mod) |
800 gimple_seq_add_seq (&q->repl_stmt, mod); | 777 gimple_seq_add_seq (&q->repl_stmt, mod); |
801 | 778 |
802 x = gimple_build_goto (finlab); | 779 x = gimple_build_goto (finlab); |
780 gimple_set_location (x, q->location); | |
803 gimple_seq_add_stmt (&q->repl_stmt, x); | 781 gimple_seq_add_stmt (&q->repl_stmt, x); |
804 } | 782 } |
805 | 783 |
806 /* Emit a standard landing pad sequence into SEQ for REGION. */ | 784 /* Emit a standard landing pad sequence into SEQ for REGION. */ |
807 | 785 |
808 static void | 786 static void |
809 emit_post_landing_pad (gimple_seq *seq, eh_region region) | 787 emit_post_landing_pad (gimple_seq *seq, eh_region region) |
810 { | 788 { |
811 eh_landing_pad lp = region->landing_pads; | 789 eh_landing_pad lp = region->landing_pads; |
812 gimple x; | 790 glabel *x; |
813 | 791 |
814 if (lp == NULL) | 792 if (lp == NULL) |
815 lp = gen_eh_landing_pad (region); | 793 lp = gen_eh_landing_pad (region); |
816 | 794 |
817 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION); | 795 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION); |
824 /* Emit a RESX statement into SEQ for REGION. */ | 802 /* Emit a RESX statement into SEQ for REGION. */ |
825 | 803 |
826 static void | 804 static void |
827 emit_resx (gimple_seq *seq, eh_region region) | 805 emit_resx (gimple_seq *seq, eh_region region) |
828 { | 806 { |
829 gimple x = gimple_build_resx (region->index); | 807 gresx *x = gimple_build_resx (region->index); |
830 gimple_seq_add_stmt (seq, x); | 808 gimple_seq_add_stmt (seq, x); |
831 if (region->outer) | 809 if (region->outer) |
832 record_stmt_eh_region (region->outer, x); | 810 record_stmt_eh_region (region->outer, x); |
833 } | 811 } |
834 | 812 |
835 /* Emit an EH_DISPATCH statement into SEQ for REGION. */ | 813 /* Emit an EH_DISPATCH statement into SEQ for REGION. */ |
836 | 814 |
837 static void | 815 static void |
838 emit_eh_dispatch (gimple_seq *seq, eh_region region) | 816 emit_eh_dispatch (gimple_seq *seq, eh_region region) |
839 { | 817 { |
840 gimple x = gimple_build_eh_dispatch (region->index); | 818 geh_dispatch *x = gimple_build_eh_dispatch (region->index); |
841 gimple_seq_add_stmt (seq, x); | 819 gimple_seq_add_stmt (seq, x); |
842 } | 820 } |
843 | 821 |
844 /* Note that the current EH region may contain a throw, or a | 822 /* Note that the current EH region may contain a throw, or a |
845 call to a function which itself may contain a throw. */ | 823 call to a function which itself may contain a throw. */ |
847 static void | 825 static void |
848 note_eh_region_may_contain_throw (eh_region region) | 826 note_eh_region_may_contain_throw (eh_region region) |
849 { | 827 { |
850 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index)) | 828 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index)) |
851 { | 829 { |
830 if (region->type == ERT_MUST_NOT_THROW) | |
831 break; | |
852 region = region->outer; | 832 region = region->outer; |
853 if (region == NULL) | 833 if (region == NULL) |
854 break; | 834 break; |
855 } | 835 } |
856 } | 836 } |
865 } | 845 } |
866 | 846 |
867 /* We want to transform | 847 /* We want to transform |
868 try { body; } catch { stuff; } | 848 try { body; } catch { stuff; } |
869 to | 849 to |
870 normal_seqence: | 850 normal_sequence: |
871 body; | 851 body; |
872 over: | 852 over: |
873 eh_seqence: | 853 eh_sequence: |
874 landing_pad: | 854 landing_pad: |
875 stuff; | 855 stuff; |
876 goto over; | 856 goto over; |
877 | 857 |
878 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad | 858 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad |
879 should be placed before the second operand, or NULL. OVER is | 859 should be placed before the second operand, or NULL. OVER is |
880 an existing label that should be put at the exit, or NULL. */ | 860 an existing label that should be put at the exit, or NULL. */ |
881 | 861 |
882 static gimple_seq | 862 static gimple_seq |
883 frob_into_branch_around (gimple tp, eh_region region, tree over) | 863 frob_into_branch_around (gtry *tp, eh_region region, tree over) |
884 { | 864 { |
885 gimple x; | 865 gimple *x; |
886 gimple_seq cleanup, result; | 866 gimple_seq cleanup, result; |
887 location_t loc = gimple_location (tp); | 867 location_t loc = gimple_location (tp); |
888 | 868 |
889 cleanup = gimple_try_cleanup (tp); | 869 cleanup = gimple_try_cleanup (tp); |
890 result = gimple_try_eval (tp); | 870 result = gimple_try_eval (tp); |
895 if (gimple_seq_may_fallthru (cleanup)) | 875 if (gimple_seq_may_fallthru (cleanup)) |
896 { | 876 { |
897 if (!over) | 877 if (!over) |
898 over = create_artificial_label (loc); | 878 over = create_artificial_label (loc); |
899 x = gimple_build_goto (over); | 879 x = gimple_build_goto (over); |
880 gimple_set_location (x, loc); | |
900 gimple_seq_add_stmt (&cleanup, x); | 881 gimple_seq_add_stmt (&cleanup, x); |
901 } | 882 } |
902 gimple_seq_add_seq (&eh_seq, cleanup); | 883 gimple_seq_add_seq (&eh_seq, cleanup); |
903 | 884 |
904 if (over) | 885 if (over) |
911 | 892 |
912 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T. | 893 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T. |
913 Make sure to record all new labels found. */ | 894 Make sure to record all new labels found. */ |
914 | 895 |
915 static gimple_seq | 896 static gimple_seq |
916 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state) | 897 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state, |
917 { | 898 location_t loc) |
918 gimple region = NULL; | 899 { |
900 gtry *region = NULL; | |
919 gimple_seq new_seq; | 901 gimple_seq new_seq; |
902 gimple_stmt_iterator gsi; | |
920 | 903 |
921 new_seq = copy_gimple_seq_and_replace_locals (seq); | 904 new_seq = copy_gimple_seq_and_replace_locals (seq); |
905 | |
906 for (gsi = gsi_start (new_seq); !gsi_end_p (gsi); gsi_next (&gsi)) | |
907 { | |
908 gimple *stmt = gsi_stmt (gsi); | |
909 /* We duplicate __builtin_stack_restore at -O0 in the hope of eliminating | |
910 it on the EH paths. When it is not eliminated, make it transparent in | |
911 the debug info. */ | |
912 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE)) | |
913 gimple_set_location (stmt, UNKNOWN_LOCATION); | |
914 else if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION) | |
915 { | |
916 tree block = gimple_block (stmt); | |
917 gimple_set_location (stmt, loc); | |
918 gimple_set_block (stmt, block); | |
919 } | |
920 } | |
922 | 921 |
923 if (outer_state->tf) | 922 if (outer_state->tf) |
924 region = outer_state->tf->try_finally_expr; | 923 region = outer_state->tf->try_finally_expr; |
925 collect_finally_tree_1 (new_seq, region); | 924 collect_finally_tree_1 (new_seq, region); |
926 | 925 |
948 } | 947 } |
949 } | 948 } |
950 return label; | 949 return label; |
951 } | 950 } |
952 | 951 |
952 /* A subroutine of lower_try_finally. If FINALLY consits of a | |
953 GIMPLE_EH_ELSE node, return it. */ | |
954 | |
955 static inline geh_else * | |
956 get_eh_else (gimple_seq finally) | |
957 { | |
958 gimple *x = gimple_seq_first_stmt (finally); | |
959 if (gimple_code (x) == GIMPLE_EH_ELSE) | |
960 { | |
961 gcc_assert (gimple_seq_singleton_p (finally)); | |
962 return as_a <geh_else *> (x); | |
963 } | |
964 return NULL; | |
965 } | |
966 | |
953 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions | 967 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions |
954 langhook returns non-null, then the language requires that the exception | 968 langhook returns non-null, then the language requires that the exception |
955 path out of a try_finally be treated specially. To wit: the code within | 969 path out of a try_finally be treated specially. To wit: the code within |
956 the finally block may not itself throw an exception. We have two choices | 970 the finally block may not itself throw an exception. We have two choices |
957 here. First we can duplicate the finally block and wrap it in a | 971 here. First we can duplicate the finally block and wrap it in a |
973 static void | 987 static void |
974 honor_protect_cleanup_actions (struct leh_state *outer_state, | 988 honor_protect_cleanup_actions (struct leh_state *outer_state, |
975 struct leh_state *this_state, | 989 struct leh_state *this_state, |
976 struct leh_tf_state *tf) | 990 struct leh_tf_state *tf) |
977 { | 991 { |
978 tree protect_cleanup_actions; | 992 gimple_seq finally = gimple_try_cleanup (tf->top_p); |
979 gimple_stmt_iterator gsi; | 993 |
980 bool finally_may_fallthru; | 994 /* EH_ELSE doesn't come from user code; only compiler generated stuff. |
981 gimple_seq finally; | 995 It does need to be handled here, so as to separate the (different) |
982 gimple x; | 996 EH path from the normal path. But we should not attempt to wrap |
983 | 997 it with a must-not-throw node (which indeed gets in the way). */ |
984 /* First check for nothing to do. */ | 998 if (geh_else *eh_else = get_eh_else (finally)) |
985 if (lang_hooks.eh_protect_cleanup_actions == NULL) | 999 { |
986 return; | 1000 gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else)); |
987 protect_cleanup_actions = lang_hooks.eh_protect_cleanup_actions (); | 1001 finally = gimple_eh_else_e_body (eh_else); |
988 if (protect_cleanup_actions == NULL) | 1002 |
989 return; | 1003 /* Let the ELSE see the exception that's being processed. */ |
990 | 1004 eh_region save_ehp = this_state->ehp_region; |
991 finally = gimple_try_cleanup (tf->top_p); | 1005 this_state->ehp_region = this_state->cur_region; |
992 finally_may_fallthru = gimple_seq_may_fallthru (finally); | 1006 lower_eh_constructs_1 (this_state, &finally); |
993 | 1007 this_state->ehp_region = save_ehp; |
994 /* Duplicate the FINALLY block. Only need to do this for try-finally, | 1008 } |
995 and not for cleanups. */ | 1009 else |
996 if (this_state) | 1010 { |
997 finally = lower_try_finally_dup_block (finally, outer_state); | 1011 /* First check for nothing to do. */ |
998 | 1012 if (lang_hooks.eh_protect_cleanup_actions == NULL) |
999 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP | 1013 return; |
1000 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought | 1014 tree actions = lang_hooks.eh_protect_cleanup_actions (); |
1001 to be in an enclosing scope, but needs to be implemented at this level | 1015 if (actions == NULL) |
1002 to avoid a nesting violation (see wrap_temporary_cleanups in | 1016 return; |
1003 cp/decl.c). Since it's logically at an outer level, we should call | 1017 |
1004 terminate before we get to it, so strip it away before adding the | 1018 if (this_state) |
1005 MUST_NOT_THROW filter. */ | 1019 finally = lower_try_finally_dup_block (finally, outer_state, |
1006 gsi = gsi_start (finally); | 1020 gimple_location (tf->try_finally_expr)); |
1007 x = gsi_stmt (gsi); | 1021 |
1008 if (gimple_code (x) == GIMPLE_TRY | 1022 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP |
1009 && gimple_try_kind (x) == GIMPLE_TRY_CATCH | 1023 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought |
1010 && gimple_try_catch_is_cleanup (x)) | 1024 to be in an enclosing scope, but needs to be implemented at this level |
1011 { | 1025 to avoid a nesting violation (see wrap_temporary_cleanups in |
1012 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT); | 1026 cp/decl.c). Since it's logically at an outer level, we should call |
1013 gsi_remove (&gsi, false); | 1027 terminate before we get to it, so strip it away before adding the |
1014 } | 1028 MUST_NOT_THROW filter. */ |
1015 | 1029 gimple_stmt_iterator gsi = gsi_start (finally); |
1016 /* Wrap the block with protect_cleanup_actions as the action. */ | 1030 gimple *x = gsi_stmt (gsi); |
1017 x = gimple_build_eh_must_not_throw (protect_cleanup_actions); | 1031 if (gimple_code (x) == GIMPLE_TRY |
1018 x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x), | 1032 && gimple_try_kind (x) == GIMPLE_TRY_CATCH |
1019 GIMPLE_TRY_CATCH); | 1033 && gimple_try_catch_is_cleanup (x)) |
1020 finally = lower_eh_must_not_throw (outer_state, x); | 1034 { |
1035 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT); | |
1036 gsi_remove (&gsi, false); | |
1037 } | |
1038 | |
1039 /* Wrap the block with protect_cleanup_actions as the action. */ | |
1040 geh_mnt *eh_mnt = gimple_build_eh_must_not_throw (actions); | |
1041 gtry *try_stmt = gimple_build_try (finally, | |
1042 gimple_seq_alloc_with_stmt (eh_mnt), | |
1043 GIMPLE_TRY_CATCH); | |
1044 finally = lower_eh_must_not_throw (outer_state, try_stmt); | |
1045 } | |
1021 | 1046 |
1022 /* Drop all of this into the exception sequence. */ | 1047 /* Drop all of this into the exception sequence. */ |
1023 emit_post_landing_pad (&eh_seq, tf->region); | 1048 emit_post_landing_pad (&eh_seq, tf->region); |
1024 gimple_seq_add_seq (&eh_seq, finally); | 1049 gimple_seq_add_seq (&eh_seq, finally); |
1025 if (finally_may_fallthru) | 1050 if (gimple_seq_may_fallthru (finally)) |
1026 emit_resx (&eh_seq, tf->region); | 1051 emit_resx (&eh_seq, tf->region); |
1027 | 1052 |
1028 /* Having now been handled, EH isn't to be considered with | 1053 /* Having now been handled, EH isn't to be considered with |
1029 the rest of the outgoing edges. */ | 1054 the rest of the outgoing edges. */ |
1030 tf->may_throw = false; | 1055 tf->may_throw = false; |
1037 | 1062 |
1038 static void | 1063 static void |
1039 lower_try_finally_nofallthru (struct leh_state *state, | 1064 lower_try_finally_nofallthru (struct leh_state *state, |
1040 struct leh_tf_state *tf) | 1065 struct leh_tf_state *tf) |
1041 { | 1066 { |
1042 tree lab, return_val; | 1067 tree lab; |
1043 gimple x; | 1068 gimple *x; |
1069 geh_else *eh_else; | |
1044 gimple_seq finally; | 1070 gimple_seq finally; |
1045 struct goto_queue_node *q, *qe; | 1071 struct goto_queue_node *q, *qe; |
1046 | 1072 |
1047 lab = create_artificial_label (gimple_location (tf->try_finally_expr)); | 1073 lab = create_artificial_label (gimple_location (tf->try_finally_expr)); |
1048 | 1074 |
1051 tf->top_p_seq = gimple_try_eval (tf->top_p); | 1077 tf->top_p_seq = gimple_try_eval (tf->top_p); |
1052 | 1078 |
1053 x = gimple_build_label (lab); | 1079 x = gimple_build_label (lab); |
1054 gimple_seq_add_stmt (&tf->top_p_seq, x); | 1080 gimple_seq_add_stmt (&tf->top_p_seq, x); |
1055 | 1081 |
1056 return_val = NULL; | |
1057 q = tf->goto_queue; | 1082 q = tf->goto_queue; |
1058 qe = q + tf->goto_queue_active; | 1083 qe = q + tf->goto_queue_active; |
1059 for (; q < qe; ++q) | 1084 for (; q < qe; ++q) |
1060 if (q->index < 0) | 1085 if (q->index < 0) |
1061 do_return_redirection (q, lab, NULL, &return_val); | 1086 do_return_redirection (q, lab, NULL); |
1062 else | 1087 else |
1063 do_goto_redirection (q, lab, NULL, tf); | 1088 do_goto_redirection (q, lab, NULL, tf); |
1064 | 1089 |
1065 replace_goto_queue (tf); | 1090 replace_goto_queue (tf); |
1066 | 1091 |
1067 lower_eh_constructs_1 (state, finally); | 1092 /* Emit the finally block into the stream. Lower EH_ELSE at this time. */ |
1068 gimple_seq_add_seq (&tf->top_p_seq, finally); | 1093 eh_else = get_eh_else (finally); |
1069 | 1094 if (eh_else) |
1070 if (tf->may_throw) | 1095 { |
1071 { | 1096 finally = gimple_eh_else_n_body (eh_else); |
1072 emit_post_landing_pad (&eh_seq, tf->region); | 1097 lower_eh_constructs_1 (state, &finally); |
1073 | 1098 gimple_seq_add_seq (&tf->top_p_seq, finally); |
1074 x = gimple_build_goto (lab); | 1099 |
1075 gimple_seq_add_stmt (&eh_seq, x); | 1100 if (tf->may_throw) |
1101 { | |
1102 finally = gimple_eh_else_e_body (eh_else); | |
1103 lower_eh_constructs_1 (state, &finally); | |
1104 | |
1105 emit_post_landing_pad (&eh_seq, tf->region); | |
1106 gimple_seq_add_seq (&eh_seq, finally); | |
1107 } | |
1108 } | |
1109 else | |
1110 { | |
1111 lower_eh_constructs_1 (state, &finally); | |
1112 gimple_seq_add_seq (&tf->top_p_seq, finally); | |
1113 | |
1114 if (tf->may_throw) | |
1115 { | |
1116 emit_post_landing_pad (&eh_seq, tf->region); | |
1117 | |
1118 x = gimple_build_goto (lab); | |
1119 gimple_set_location (x, gimple_location (tf->try_finally_expr)); | |
1120 gimple_seq_add_stmt (&eh_seq, x); | |
1121 } | |
1076 } | 1122 } |
1077 } | 1123 } |
1078 | 1124 |
1079 /* A subroutine of lower_try_finally. We have determined that there is | 1125 /* A subroutine of lower_try_finally. We have determined that there is |
1080 exactly one destination of the finally block. Restructure the | 1126 exactly one destination of the finally block. Restructure the |
1082 | 1128 |
1083 static void | 1129 static void |
1084 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf) | 1130 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf) |
1085 { | 1131 { |
1086 struct goto_queue_node *q, *qe; | 1132 struct goto_queue_node *q, *qe; |
1087 gimple x; | 1133 geh_else *eh_else; |
1134 glabel *label_stmt; | |
1135 gimple *x; | |
1088 gimple_seq finally; | 1136 gimple_seq finally; |
1137 gimple_stmt_iterator gsi; | |
1089 tree finally_label; | 1138 tree finally_label; |
1090 location_t loc = gimple_location (tf->try_finally_expr); | 1139 location_t loc = gimple_location (tf->try_finally_expr); |
1091 | 1140 |
1092 finally = gimple_try_cleanup (tf->top_p); | 1141 finally = gimple_try_cleanup (tf->top_p); |
1093 tf->top_p_seq = gimple_try_eval (tf->top_p); | 1142 tf->top_p_seq = gimple_try_eval (tf->top_p); |
1094 | 1143 |
1095 lower_eh_constructs_1 (state, finally); | 1144 /* Since there's only one destination, and the destination edge can only |
1145 either be EH or non-EH, that implies that all of our incoming edges | |
1146 are of the same type. Therefore we can lower EH_ELSE immediately. */ | |
1147 eh_else = get_eh_else (finally); | |
1148 if (eh_else) | |
1149 { | |
1150 if (tf->may_throw) | |
1151 finally = gimple_eh_else_e_body (eh_else); | |
1152 else | |
1153 finally = gimple_eh_else_n_body (eh_else); | |
1154 } | |
1155 | |
1156 lower_eh_constructs_1 (state, &finally); | |
1157 | |
1158 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1159 { | |
1160 gimple *stmt = gsi_stmt (gsi); | |
1161 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION) | |
1162 { | |
1163 tree block = gimple_block (stmt); | |
1164 gimple_set_location (stmt, gimple_location (tf->try_finally_expr)); | |
1165 gimple_set_block (stmt, block); | |
1166 } | |
1167 } | |
1096 | 1168 |
1097 if (tf->may_throw) | 1169 if (tf->may_throw) |
1098 { | 1170 { |
1099 /* Only reachable via the exception edge. Add the given label to | 1171 /* Only reachable via the exception edge. Add the given label to |
1100 the head of the FINALLY block. Append a RESX at the end. */ | 1172 the head of the FINALLY block. Append a RESX at the end. */ |
1111 gimple_seq_add_seq (&tf->top_p_seq, finally); | 1183 gimple_seq_add_seq (&tf->top_p_seq, finally); |
1112 return; | 1184 return; |
1113 } | 1185 } |
1114 | 1186 |
1115 finally_label = create_artificial_label (loc); | 1187 finally_label = create_artificial_label (loc); |
1116 x = gimple_build_label (finally_label); | 1188 label_stmt = gimple_build_label (finally_label); |
1117 gimple_seq_add_stmt (&tf->top_p_seq, x); | 1189 gimple_seq_add_stmt (&tf->top_p_seq, label_stmt); |
1118 | 1190 |
1119 gimple_seq_add_seq (&tf->top_p_seq, finally); | 1191 gimple_seq_add_seq (&tf->top_p_seq, finally); |
1120 | 1192 |
1121 q = tf->goto_queue; | 1193 q = tf->goto_queue; |
1122 qe = q + tf->goto_queue_active; | 1194 qe = q + tf->goto_queue_active; |
1123 | 1195 |
1124 if (tf->may_return) | 1196 if (tf->may_return) |
1125 { | 1197 { |
1126 /* Reachable by return expressions only. Redirect them. */ | 1198 /* Reachable by return expressions only. Redirect them. */ |
1127 tree return_val = NULL; | |
1128 for (; q < qe; ++q) | 1199 for (; q < qe; ++q) |
1129 do_return_redirection (q, finally_label, NULL, &return_val); | 1200 do_return_redirection (q, finally_label, NULL); |
1130 replace_goto_queue (tf); | 1201 replace_goto_queue (tf); |
1131 } | 1202 } |
1132 else | 1203 else |
1133 { | 1204 { |
1134 /* Reachable by goto expressions only. Redirect them. */ | 1205 /* Reachable by goto expressions only. Redirect them. */ |
1135 for (; q < qe; ++q) | 1206 for (; q < qe; ++q) |
1136 do_goto_redirection (q, finally_label, NULL, tf); | 1207 do_goto_redirection (q, finally_label, NULL, tf); |
1137 replace_goto_queue (tf); | 1208 replace_goto_queue (tf); |
1138 | 1209 |
1139 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label) | 1210 if (tf->dest_array[0] == tf->fallthru_label) |
1140 { | 1211 { |
1141 /* Reachable by goto to fallthru label only. Redirect it | 1212 /* Reachable by goto to fallthru label only. Redirect it |
1142 to the new label (already created, sadly), and do not | 1213 to the new label (already created, sadly), and do not |
1143 emit the final branch out, or the fallthru label. */ | 1214 emit the final branch out, or the fallthru label. */ |
1144 tf->fallthru_label = NULL; | 1215 tf->fallthru_label = NULL; |
1161 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf) | 1232 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf) |
1162 { | 1233 { |
1163 gimple_seq finally; | 1234 gimple_seq finally; |
1164 gimple_seq new_stmt; | 1235 gimple_seq new_stmt; |
1165 gimple_seq seq; | 1236 gimple_seq seq; |
1166 gimple x; | 1237 gimple *x; |
1238 geh_else *eh_else; | |
1167 tree tmp; | 1239 tree tmp; |
1168 location_t tf_loc = gimple_location (tf->try_finally_expr); | 1240 location_t tf_loc = gimple_location (tf->try_finally_expr); |
1169 | 1241 |
1170 finally = gimple_try_cleanup (tf->top_p); | 1242 finally = gimple_try_cleanup (tf->top_p); |
1243 | |
1244 /* Notice EH_ELSE, and simplify some of the remaining code | |
1245 by considering FINALLY to be the normal return path only. */ | |
1246 eh_else = get_eh_else (finally); | |
1247 if (eh_else) | |
1248 finally = gimple_eh_else_n_body (eh_else); | |
1249 | |
1171 tf->top_p_seq = gimple_try_eval (tf->top_p); | 1250 tf->top_p_seq = gimple_try_eval (tf->top_p); |
1172 new_stmt = NULL; | 1251 new_stmt = NULL; |
1173 | 1252 |
1174 if (tf->may_fallthru) | 1253 if (tf->may_fallthru) |
1175 { | 1254 { |
1176 seq = lower_try_finally_dup_block (finally, state); | 1255 seq = lower_try_finally_dup_block (finally, state, tf_loc); |
1177 lower_eh_constructs_1 (state, seq); | 1256 lower_eh_constructs_1 (state, &seq); |
1178 gimple_seq_add_seq (&new_stmt, seq); | 1257 gimple_seq_add_seq (&new_stmt, seq); |
1179 | 1258 |
1180 tmp = lower_try_finally_fallthru_label (tf); | 1259 tmp = lower_try_finally_fallthru_label (tf); |
1181 x = gimple_build_goto (tmp); | 1260 x = gimple_build_goto (tmp); |
1261 gimple_set_location (x, tf_loc); | |
1182 gimple_seq_add_stmt (&new_stmt, x); | 1262 gimple_seq_add_stmt (&new_stmt, x); |
1183 } | 1263 } |
1184 | 1264 |
1185 if (tf->may_throw) | 1265 if (tf->may_throw) |
1186 { | 1266 { |
1187 seq = lower_try_finally_dup_block (finally, state); | 1267 /* We don't need to copy the EH path of EH_ELSE, |
1188 lower_eh_constructs_1 (state, seq); | 1268 since it is only emitted once. */ |
1269 if (eh_else) | |
1270 seq = gimple_eh_else_e_body (eh_else); | |
1271 else | |
1272 seq = lower_try_finally_dup_block (finally, state, tf_loc); | |
1273 lower_eh_constructs_1 (state, &seq); | |
1189 | 1274 |
1190 emit_post_landing_pad (&eh_seq, tf->region); | 1275 emit_post_landing_pad (&eh_seq, tf->region); |
1191 gimple_seq_add_seq (&eh_seq, seq); | 1276 gimple_seq_add_seq (&eh_seq, seq); |
1192 emit_resx (&eh_seq, tf->region); | 1277 emit_resx (&eh_seq, tf->region); |
1193 } | 1278 } |
1194 | 1279 |
1195 if (tf->goto_queue) | 1280 if (tf->goto_queue) |
1196 { | 1281 { |
1197 struct goto_queue_node *q, *qe; | 1282 struct goto_queue_node *q, *qe; |
1198 tree return_val = NULL; | |
1199 int return_index, index; | 1283 int return_index, index; |
1200 struct labels_s | 1284 struct labels_s |
1201 { | 1285 { |
1202 struct goto_queue_node *q; | 1286 struct goto_queue_node *q; |
1203 tree label; | 1287 tree label; |
1204 } *labels; | 1288 } *labels; |
1205 | 1289 |
1206 return_index = VEC_length (tree, tf->dest_array); | 1290 return_index = tf->dest_array.length (); |
1207 labels = XCNEWVEC (struct labels_s, return_index + 1); | 1291 labels = XCNEWVEC (struct labels_s, return_index + 1); |
1208 | 1292 |
1209 q = tf->goto_queue; | 1293 q = tf->goto_queue; |
1210 qe = q + tf->goto_queue_active; | 1294 qe = q + tf->goto_queue_active; |
1211 for (; q < qe; q++) | 1295 for (; q < qe; q++) |
1226 | 1310 |
1227 lab = labels[index].label | 1311 lab = labels[index].label |
1228 = create_artificial_label (tf_loc); | 1312 = create_artificial_label (tf_loc); |
1229 | 1313 |
1230 if (index == return_index) | 1314 if (index == return_index) |
1231 do_return_redirection (q, lab, NULL, &return_val); | 1315 do_return_redirection (q, lab, NULL); |
1232 else | 1316 else |
1233 do_goto_redirection (q, lab, NULL, tf); | 1317 do_goto_redirection (q, lab, NULL, tf); |
1234 | 1318 |
1235 x = gimple_build_label (lab); | 1319 x = gimple_build_label (lab); |
1236 gimple_seq_add_stmt (&new_stmt, x); | 1320 gimple_seq_add_stmt (&new_stmt, x); |
1237 | 1321 |
1238 seq = lower_try_finally_dup_block (finally, state); | 1322 seq = lower_try_finally_dup_block (finally, state, q->location); |
1239 lower_eh_constructs_1 (state, seq); | 1323 lower_eh_constructs_1 (state, &seq); |
1240 gimple_seq_add_seq (&new_stmt, seq); | 1324 gimple_seq_add_seq (&new_stmt, seq); |
1241 | 1325 |
1242 gimple_seq_add_stmt (&new_stmt, q->cont_stmt); | 1326 gimple_seq_add_stmt (&new_stmt, q->cont_stmt); |
1243 maybe_record_in_goto_queue (state, q->cont_stmt); | 1327 maybe_record_in_goto_queue (state, q->cont_stmt); |
1244 } | 1328 } |
1253 continue; | 1337 continue; |
1254 | 1338 |
1255 lab = labels[index].label; | 1339 lab = labels[index].label; |
1256 | 1340 |
1257 if (index == return_index) | 1341 if (index == return_index) |
1258 do_return_redirection (q, lab, NULL, &return_val); | 1342 do_return_redirection (q, lab, NULL); |
1259 else | 1343 else |
1260 do_goto_redirection (q, lab, NULL, tf); | 1344 do_goto_redirection (q, lab, NULL, tf); |
1261 } | 1345 } |
1262 | 1346 |
1263 replace_goto_queue (tf); | 1347 replace_goto_queue (tf); |
1276 | 1360 |
1277 static void | 1361 static void |
1278 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf) | 1362 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf) |
1279 { | 1363 { |
1280 struct goto_queue_node *q, *qe; | 1364 struct goto_queue_node *q, *qe; |
1281 tree return_val = NULL; | |
1282 tree finally_tmp, finally_label; | 1365 tree finally_tmp, finally_label; |
1283 int return_index, eh_index, fallthru_index; | 1366 int return_index, eh_index, fallthru_index; |
1284 int nlabels, ndests, j, last_case_index; | 1367 int nlabels, ndests, j, last_case_index; |
1285 tree last_case; | 1368 tree last_case; |
1286 VEC (tree,heap) *case_label_vec; | 1369 auto_vec<tree> case_label_vec; |
1287 gimple_seq switch_body; | 1370 gimple_seq switch_body = NULL; |
1288 gimple x; | 1371 gimple *x; |
1372 geh_else *eh_else; | |
1289 tree tmp; | 1373 tree tmp; |
1290 gimple switch_stmt; | 1374 gimple *switch_stmt; |
1291 gimple_seq finally; | 1375 gimple_seq finally; |
1292 struct pointer_map_t *cont_map = NULL; | 1376 hash_map<tree, gimple *> *cont_map = NULL; |
1293 /* The location of the TRY_FINALLY stmt. */ | 1377 /* The location of the TRY_FINALLY stmt. */ |
1294 location_t tf_loc = gimple_location (tf->try_finally_expr); | 1378 location_t tf_loc = gimple_location (tf->try_finally_expr); |
1295 /* The location of the finally block. */ | 1379 /* The location of the finally block. */ |
1296 location_t finally_loc; | 1380 location_t finally_loc; |
1297 | 1381 |
1298 switch_body = gimple_seq_alloc (); | 1382 finally = gimple_try_cleanup (tf->top_p); |
1383 eh_else = get_eh_else (finally); | |
1299 | 1384 |
1300 /* Mash the TRY block to the head of the chain. */ | 1385 /* Mash the TRY block to the head of the chain. */ |
1301 finally = gimple_try_cleanup (tf->top_p); | |
1302 tf->top_p_seq = gimple_try_eval (tf->top_p); | 1386 tf->top_p_seq = gimple_try_eval (tf->top_p); |
1303 | 1387 |
1304 /* The location of the finally is either the last stmt in the finally | 1388 /* The location of the finally is either the last stmt in the finally |
1305 block or the location of the TRY_FINALLY itself. */ | 1389 block or the location of the TRY_FINALLY itself. */ |
1306 finally_loc = gimple_seq_last_stmt (tf->top_p_seq) != NULL ? | 1390 x = gimple_seq_last_stmt (finally); |
1307 gimple_location (gimple_seq_last_stmt (tf->top_p_seq)) | 1391 finally_loc = x ? gimple_location (x) : tf_loc; |
1308 : tf_loc; | |
1309 | |
1310 /* Lower the finally block itself. */ | |
1311 lower_eh_constructs_1 (state, finally); | |
1312 | 1392 |
1313 /* Prepare for switch statement generation. */ | 1393 /* Prepare for switch statement generation. */ |
1314 nlabels = VEC_length (tree, tf->dest_array); | 1394 nlabels = tf->dest_array.length (); |
1315 return_index = nlabels; | 1395 return_index = nlabels; |
1316 eh_index = return_index + tf->may_return; | 1396 eh_index = return_index + tf->may_return; |
1317 fallthru_index = eh_index + tf->may_throw; | 1397 fallthru_index = eh_index + (tf->may_throw && !eh_else); |
1318 ndests = fallthru_index + tf->may_fallthru; | 1398 ndests = fallthru_index + tf->may_fallthru; |
1319 | 1399 |
1320 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp"); | 1400 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp"); |
1321 finally_label = create_artificial_label (finally_loc); | 1401 finally_label = create_artificial_label (finally_loc); |
1322 | 1402 |
1323 /* We use VEC_quick_push on case_label_vec throughout this function, | 1403 /* We use vec::quick_push on case_label_vec throughout this function, |
1324 since we know the size in advance and allocate precisely as muce | 1404 since we know the size in advance and allocate precisely as muce |
1325 space as needed. */ | 1405 space as needed. */ |
1326 case_label_vec = VEC_alloc (tree, heap, ndests); | 1406 case_label_vec.create (ndests); |
1327 last_case = NULL; | 1407 last_case = NULL; |
1328 last_case_index = 0; | 1408 last_case_index = 0; |
1329 | 1409 |
1330 /* Begin inserting code for getting to the finally block. Things | 1410 /* Begin inserting code for getting to the finally block. Things |
1331 are done in this order to correspond to the sequence the code is | 1411 are done in this order to correspond to the sequence the code is |
1332 layed out. */ | 1412 laid out. */ |
1333 | 1413 |
1334 if (tf->may_fallthru) | 1414 if (tf->may_fallthru) |
1335 { | 1415 { |
1336 x = gimple_build_assign (finally_tmp, | 1416 x = gimple_build_assign (finally_tmp, |
1337 build_int_cst (NULL, fallthru_index)); | 1417 build_int_cst (integer_type_node, |
1418 fallthru_index)); | |
1419 gimple_set_location (x, finally_loc); | |
1338 gimple_seq_add_stmt (&tf->top_p_seq, x); | 1420 gimple_seq_add_stmt (&tf->top_p_seq, x); |
1339 | 1421 |
1340 last_case = build3 (CASE_LABEL_EXPR, void_type_node, | 1422 tmp = build_int_cst (integer_type_node, fallthru_index); |
1341 build_int_cst (NULL, fallthru_index), | 1423 last_case = build_case_label (tmp, NULL, |
1342 NULL, create_artificial_label (tf_loc)); | 1424 create_artificial_label (finally_loc)); |
1343 VEC_quick_push (tree, case_label_vec, last_case); | 1425 case_label_vec.quick_push (last_case); |
1344 last_case_index++; | 1426 last_case_index++; |
1345 | 1427 |
1346 x = gimple_build_label (CASE_LABEL (last_case)); | 1428 x = gimple_build_label (CASE_LABEL (last_case)); |
1347 gimple_seq_add_stmt (&switch_body, x); | 1429 gimple_seq_add_stmt (&switch_body, x); |
1348 | 1430 |
1349 tmp = lower_try_finally_fallthru_label (tf); | 1431 tmp = lower_try_finally_fallthru_label (tf); |
1350 x = gimple_build_goto (tmp); | 1432 x = gimple_build_goto (tmp); |
1433 gimple_set_location (x, finally_loc); | |
1351 gimple_seq_add_stmt (&switch_body, x); | 1434 gimple_seq_add_stmt (&switch_body, x); |
1352 } | 1435 } |
1353 | 1436 |
1354 if (tf->may_throw) | 1437 /* For EH_ELSE, emit the exception path (plus resx) now, then |
1438 subsequently we only need consider the normal path. */ | |
1439 if (eh_else) | |
1440 { | |
1441 if (tf->may_throw) | |
1442 { | |
1443 finally = gimple_eh_else_e_body (eh_else); | |
1444 lower_eh_constructs_1 (state, &finally); | |
1445 | |
1446 emit_post_landing_pad (&eh_seq, tf->region); | |
1447 gimple_seq_add_seq (&eh_seq, finally); | |
1448 emit_resx (&eh_seq, tf->region); | |
1449 } | |
1450 | |
1451 finally = gimple_eh_else_n_body (eh_else); | |
1452 } | |
1453 else if (tf->may_throw) | |
1355 { | 1454 { |
1356 emit_post_landing_pad (&eh_seq, tf->region); | 1455 emit_post_landing_pad (&eh_seq, tf->region); |
1357 | 1456 |
1358 x = gimple_build_assign (finally_tmp, | 1457 x = gimple_build_assign (finally_tmp, |
1359 build_int_cst (NULL, eh_index)); | 1458 build_int_cst (integer_type_node, eh_index)); |
1360 gimple_seq_add_stmt (&eh_seq, x); | 1459 gimple_seq_add_stmt (&eh_seq, x); |
1361 | 1460 |
1362 x = gimple_build_goto (finally_label); | 1461 x = gimple_build_goto (finally_label); |
1462 gimple_set_location (x, tf_loc); | |
1363 gimple_seq_add_stmt (&eh_seq, x); | 1463 gimple_seq_add_stmt (&eh_seq, x); |
1364 | 1464 |
1365 last_case = build3 (CASE_LABEL_EXPR, void_type_node, | 1465 tmp = build_int_cst (integer_type_node, eh_index); |
1366 build_int_cst (NULL, eh_index), | 1466 last_case = build_case_label (tmp, NULL, |
1367 NULL, create_artificial_label (tf_loc)); | 1467 create_artificial_label (tf_loc)); |
1368 VEC_quick_push (tree, case_label_vec, last_case); | 1468 case_label_vec.quick_push (last_case); |
1369 last_case_index++; | 1469 last_case_index++; |
1370 | 1470 |
1371 x = gimple_build_label (CASE_LABEL (last_case)); | 1471 x = gimple_build_label (CASE_LABEL (last_case)); |
1372 gimple_seq_add_stmt (&eh_seq, x); | 1472 gimple_seq_add_stmt (&eh_seq, x); |
1373 emit_resx (&eh_seq, tf->region); | 1473 emit_resx (&eh_seq, tf->region); |
1374 } | 1474 } |
1375 | 1475 |
1376 x = gimple_build_label (finally_label); | 1476 x = gimple_build_label (finally_label); |
1377 gimple_seq_add_stmt (&tf->top_p_seq, x); | 1477 gimple_seq_add_stmt (&tf->top_p_seq, x); |
1378 | 1478 |
1479 lower_eh_constructs_1 (state, &finally); | |
1379 gimple_seq_add_seq (&tf->top_p_seq, finally); | 1480 gimple_seq_add_seq (&tf->top_p_seq, finally); |
1380 | 1481 |
1381 /* Redirect each incoming goto edge. */ | 1482 /* Redirect each incoming goto edge. */ |
1382 q = tf->goto_queue; | 1483 q = tf->goto_queue; |
1383 qe = q + tf->goto_queue_active; | 1484 qe = q + tf->goto_queue_active; |
1384 j = last_case_index + tf->may_return; | 1485 j = last_case_index + tf->may_return; |
1385 /* Prepare the assignments to finally_tmp that are executed upon the | 1486 /* Prepare the assignments to finally_tmp that are executed upon the |
1386 entrance through a particular edge. */ | 1487 entrance through a particular edge. */ |
1387 for (; q < qe; ++q) | 1488 for (; q < qe; ++q) |
1388 { | 1489 { |
1389 gimple_seq mod; | 1490 gimple_seq mod = NULL; |
1390 int switch_id; | 1491 int switch_id; |
1391 unsigned int case_index; | 1492 unsigned int case_index; |
1392 | 1493 |
1393 mod = gimple_seq_alloc (); | |
1394 | |
1395 if (q->index < 0) | 1494 if (q->index < 0) |
1396 { | 1495 { |
1397 x = gimple_build_assign (finally_tmp, | 1496 x = gimple_build_assign (finally_tmp, |
1398 build_int_cst (NULL, return_index)); | 1497 build_int_cst (integer_type_node, |
1498 return_index)); | |
1399 gimple_seq_add_stmt (&mod, x); | 1499 gimple_seq_add_stmt (&mod, x); |
1400 do_return_redirection (q, finally_label, mod, &return_val); | 1500 do_return_redirection (q, finally_label, mod); |
1401 switch_id = return_index; | 1501 switch_id = return_index; |
1402 } | 1502 } |
1403 else | 1503 else |
1404 { | 1504 { |
1405 x = gimple_build_assign (finally_tmp, | 1505 x = gimple_build_assign (finally_tmp, |
1406 build_int_cst (NULL, q->index)); | 1506 build_int_cst (integer_type_node, q->index)); |
1407 gimple_seq_add_stmt (&mod, x); | 1507 gimple_seq_add_stmt (&mod, x); |
1408 do_goto_redirection (q, finally_label, mod, tf); | 1508 do_goto_redirection (q, finally_label, mod, tf); |
1409 switch_id = q->index; | 1509 switch_id = q->index; |
1410 } | 1510 } |
1411 | 1511 |
1412 case_index = j + q->index; | 1512 case_index = j + q->index; |
1413 if (VEC_length (tree, case_label_vec) <= case_index | 1513 if (case_label_vec.length () <= case_index || !case_label_vec[case_index]) |
1414 || !VEC_index (tree, case_label_vec, case_index)) | |
1415 { | 1514 { |
1416 tree case_lab; | 1515 tree case_lab; |
1417 void **slot; | 1516 tmp = build_int_cst (integer_type_node, switch_id); |
1418 case_lab = build3 (CASE_LABEL_EXPR, void_type_node, | 1517 case_lab = build_case_label (tmp, NULL, |
1419 build_int_cst (NULL, switch_id), | 1518 create_artificial_label (tf_loc)); |
1420 NULL, NULL); | |
1421 /* We store the cont_stmt in the pointer map, so that we can recover | 1519 /* We store the cont_stmt in the pointer map, so that we can recover |
1422 it in the loop below. We don't create the new label while | 1520 it in the loop below. */ |
1423 walking the goto_queue because pointers don't offer a stable | |
1424 order. */ | |
1425 if (!cont_map) | 1521 if (!cont_map) |
1426 cont_map = pointer_map_create (); | 1522 cont_map = new hash_map<tree, gimple *>; |
1427 slot = pointer_map_insert (cont_map, case_lab); | 1523 cont_map->put (case_lab, q->cont_stmt); |
1428 *slot = q->cont_stmt; | 1524 case_label_vec.quick_push (case_lab); |
1429 VEC_quick_push (tree, case_label_vec, case_lab); | |
1430 } | 1525 } |
1431 } | 1526 } |
1432 for (j = last_case_index; j < last_case_index + nlabels; j++) | 1527 for (j = last_case_index; j < last_case_index + nlabels; j++) |
1433 { | 1528 { |
1434 tree label; | 1529 gimple *cont_stmt; |
1435 gimple cont_stmt; | 1530 |
1436 void **slot; | 1531 last_case = case_label_vec[j]; |
1437 | |
1438 last_case = VEC_index (tree, case_label_vec, j); | |
1439 | 1532 |
1440 gcc_assert (last_case); | 1533 gcc_assert (last_case); |
1441 gcc_assert (cont_map); | 1534 gcc_assert (cont_map); |
1442 | 1535 |
1443 slot = pointer_map_contains (cont_map, last_case); | 1536 cont_stmt = *cont_map->get (last_case); |
1444 /* As the comment above suggests, CASE_LABEL (last_case) was just a | 1537 |
1445 placeholder, it does not store an actual label, yet. */ | 1538 x = gimple_build_label (CASE_LABEL (last_case)); |
1446 gcc_assert (slot); | |
1447 cont_stmt = *(gimple *) slot; | |
1448 | |
1449 label = create_artificial_label (tf_loc); | |
1450 CASE_LABEL (last_case) = label; | |
1451 | |
1452 x = gimple_build_label (label); | |
1453 gimple_seq_add_stmt (&switch_body, x); | 1539 gimple_seq_add_stmt (&switch_body, x); |
1454 gimple_seq_add_stmt (&switch_body, cont_stmt); | 1540 gimple_seq_add_stmt (&switch_body, cont_stmt); |
1455 maybe_record_in_goto_queue (state, cont_stmt); | 1541 maybe_record_in_goto_queue (state, cont_stmt); |
1456 } | 1542 } |
1457 if (cont_map) | 1543 if (cont_map) |
1458 pointer_map_destroy (cont_map); | 1544 delete cont_map; |
1459 | 1545 |
1460 replace_goto_queue (tf); | 1546 replace_goto_queue (tf); |
1461 | 1547 |
1462 /* Make sure that the last case is the default label, as one is required. | 1548 /* Make sure that the last case is the default label, as one is required. |
1463 Then sort the labels, which is also required in GIMPLE. */ | 1549 Then sort the labels, which is also required in GIMPLE. */ |
1464 CASE_LOW (last_case) = NULL; | 1550 CASE_LOW (last_case) = NULL; |
1551 tree tem = case_label_vec.pop (); | |
1552 gcc_assert (tem == last_case); | |
1465 sort_case_labels (case_label_vec); | 1553 sort_case_labels (case_label_vec); |
1466 | 1554 |
1467 /* Build the switch statement, setting last_case to be the default | 1555 /* Build the switch statement, setting last_case to be the default |
1468 label. */ | 1556 label. */ |
1469 switch_stmt = gimple_build_switch_vec (finally_tmp, last_case, | 1557 switch_stmt = gimple_build_switch (finally_tmp, last_case, |
1470 case_label_vec); | 1558 case_label_vec); |
1471 gimple_set_location (switch_stmt, finally_loc); | 1559 gimple_set_location (switch_stmt, finally_loc); |
1472 | 1560 |
1473 /* Need to link SWITCH_STMT after running replace_goto_queue | 1561 /* Need to link SWITCH_STMT after running replace_goto_queue |
1474 due to not wanting to process the same goto stmts twice. */ | 1562 due to not wanting to process the same goto stmts twice. */ |
1475 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt); | 1563 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt); |
1477 } | 1565 } |
1478 | 1566 |
1479 /* Decide whether or not we are going to duplicate the finally block. | 1567 /* Decide whether or not we are going to duplicate the finally block. |
1480 There are several considerations. | 1568 There are several considerations. |
1481 | 1569 |
1482 First, if this is Java, then the finally block contains code | |
1483 written by the user. It has line numbers associated with it, | |
1484 so duplicating the block means it's difficult to set a breakpoint. | |
1485 Since controlling code generation via -g is verboten, we simply | |
1486 never duplicate code without optimization. | |
1487 | |
1488 Second, we'd like to prevent egregious code growth. One way to | 1570 Second, we'd like to prevent egregious code growth. One way to |
1489 do this is to estimate the size of the finally block, multiply | 1571 do this is to estimate the size of the finally block, multiply |
1490 that by the number of copies we'd need to make, and compare against | 1572 that by the number of copies we'd need to make, and compare against |
1491 the estimate of the size of the switch machinery we'd have to add. */ | 1573 the estimate of the size of the switch machinery we'd have to add. */ |
1492 | 1574 |
1493 static bool | 1575 static bool |
1494 decide_copy_try_finally (int ndests, gimple_seq finally) | 1576 decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally) |
1495 { | 1577 { |
1496 int f_estimate, sw_estimate; | 1578 int f_estimate, sw_estimate; |
1579 geh_else *eh_else; | |
1580 | |
1581 /* If there's an EH_ELSE involved, the exception path is separate | |
1582 and really doesn't come into play for this computation. */ | |
1583 eh_else = get_eh_else (finally); | |
1584 if (eh_else) | |
1585 { | |
1586 ndests -= may_throw; | |
1587 finally = gimple_eh_else_n_body (eh_else); | |
1588 } | |
1497 | 1589 |
1498 if (!optimize) | 1590 if (!optimize) |
1499 return false; | 1591 { |
1592 gimple_stmt_iterator gsi; | |
1593 | |
1594 if (ndests == 1) | |
1595 return true; | |
1596 | |
1597 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1598 { | |
1599 /* Duplicate __builtin_stack_restore in the hope of eliminating it | |
1600 on the EH paths and, consequently, useless cleanups. */ | |
1601 gimple *stmt = gsi_stmt (gsi); | |
1602 if (!is_gimple_debug (stmt) | |
1603 && !gimple_clobber_p (stmt) | |
1604 && !gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE)) | |
1605 return false; | |
1606 } | |
1607 return true; | |
1608 } | |
1500 | 1609 |
1501 /* Finally estimate N times, plus N gotos. */ | 1610 /* Finally estimate N times, plus N gotos. */ |
1502 f_estimate = count_insns_seq (finally, &eni_size_weights); | 1611 f_estimate = estimate_num_insns_seq (finally, &eni_size_weights); |
1503 f_estimate = (f_estimate + 1) * ndests; | 1612 f_estimate = (f_estimate + 1) * ndests; |
1504 | 1613 |
1505 /* Switch statement (cost 10), N variable assignments, N gotos. */ | 1614 /* Switch statement (cost 10), N variable assignments, N gotos. */ |
1506 sw_estimate = 10 + 2 * ndests; | 1615 sw_estimate = 10 + 2 * ndests; |
1507 | 1616 |
1535 to a sequence of labels and blocks, plus the exception region trees | 1644 to a sequence of labels and blocks, plus the exception region trees |
1536 that record all the magic. This is complicated by the need to | 1645 that record all the magic. This is complicated by the need to |
1537 arrange for the FINALLY block to be executed on all exits. */ | 1646 arrange for the FINALLY block to be executed on all exits. */ |
1538 | 1647 |
1539 static gimple_seq | 1648 static gimple_seq |
1540 lower_try_finally (struct leh_state *state, gimple tp) | 1649 lower_try_finally (struct leh_state *state, gtry *tp) |
1541 { | 1650 { |
1542 struct leh_tf_state this_tf; | 1651 struct leh_tf_state this_tf; |
1543 struct leh_state this_state; | 1652 struct leh_state this_state; |
1544 int ndests; | 1653 int ndests; |
1545 gimple_seq old_eh_seq; | 1654 gimple_seq old_eh_seq; |
1548 | 1657 |
1549 memset (&this_tf, 0, sizeof (this_tf)); | 1658 memset (&this_tf, 0, sizeof (this_tf)); |
1550 this_tf.try_finally_expr = tp; | 1659 this_tf.try_finally_expr = tp; |
1551 this_tf.top_p = tp; | 1660 this_tf.top_p = tp; |
1552 this_tf.outer = state; | 1661 this_tf.outer = state; |
1553 if (using_eh_for_cleanups_p && !cleanup_is_dead_in (state->cur_region)) | 1662 if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state->cur_region)) |
1554 { | 1663 { |
1555 this_tf.region = gen_eh_region_cleanup (state->cur_region); | 1664 this_tf.region = gen_eh_region_cleanup (state->cur_region); |
1556 this_state.cur_region = this_tf.region; | 1665 this_state.cur_region = this_tf.region; |
1557 } | 1666 } |
1558 else | 1667 else |
1565 this_state.tf = &this_tf; | 1674 this_state.tf = &this_tf; |
1566 | 1675 |
1567 old_eh_seq = eh_seq; | 1676 old_eh_seq = eh_seq; |
1568 eh_seq = NULL; | 1677 eh_seq = NULL; |
1569 | 1678 |
1570 lower_eh_constructs_1 (&this_state, gimple_try_eval(tp)); | 1679 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
1571 | 1680 |
1572 /* Determine if the try block is escaped through the bottom. */ | 1681 /* Determine if the try block is escaped through the bottom. */ |
1573 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp)); | 1682 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp)); |
1574 | 1683 |
1575 /* Determine if any exceptions are possible within the try block. */ | 1684 /* Determine if any exceptions are possible within the try block. */ |
1580 | 1689 |
1581 /* Determine how many edges (still) reach the finally block. Or rather, | 1690 /* Determine how many edges (still) reach the finally block. Or rather, |
1582 how many destinations are reached by the finally block. Use this to | 1691 how many destinations are reached by the finally block. Use this to |
1583 determine how we process the finally block itself. */ | 1692 determine how we process the finally block itself. */ |
1584 | 1693 |
1585 ndests = VEC_length (tree, this_tf.dest_array); | 1694 ndests = this_tf.dest_array.length (); |
1586 ndests += this_tf.may_fallthru; | 1695 ndests += this_tf.may_fallthru; |
1587 ndests += this_tf.may_return; | 1696 ndests += this_tf.may_return; |
1588 ndests += this_tf.may_throw; | 1697 ndests += this_tf.may_throw; |
1589 | 1698 |
1590 /* If the FINALLY block is not reachable, dike it out. */ | 1699 /* If the FINALLY block is not reachable, dike it out. */ |
1600 lower_try_finally_nofallthru (state, &this_tf); | 1709 lower_try_finally_nofallthru (state, &this_tf); |
1601 | 1710 |
1602 /* We can easily special-case redirection to a single destination. */ | 1711 /* We can easily special-case redirection to a single destination. */ |
1603 else if (ndests == 1) | 1712 else if (ndests == 1) |
1604 lower_try_finally_onedest (state, &this_tf); | 1713 lower_try_finally_onedest (state, &this_tf); |
1605 else if (decide_copy_try_finally (ndests, gimple_try_cleanup (tp))) | 1714 else if (decide_copy_try_finally (ndests, this_tf.may_throw, |
1715 gimple_try_cleanup (tp))) | |
1606 lower_try_finally_copy (state, &this_tf); | 1716 lower_try_finally_copy (state, &this_tf); |
1607 else | 1717 else |
1608 lower_try_finally_switch (state, &this_tf); | 1718 lower_try_finally_switch (state, &this_tf); |
1609 | 1719 |
1610 /* If someone requested we add a label at the end of the transformed | 1720 /* If someone requested we add a label at the end of the transformed |
1611 block, do so. */ | 1721 block, do so. */ |
1612 if (this_tf.fallthru_label) | 1722 if (this_tf.fallthru_label) |
1613 { | 1723 { |
1614 /* This must be reached only if ndests == 0. */ | 1724 /* This must be reached only if ndests == 0. */ |
1615 gimple x = gimple_build_label (this_tf.fallthru_label); | 1725 gimple *x = gimple_build_label (this_tf.fallthru_label); |
1616 gimple_seq_add_stmt (&this_tf.top_p_seq, x); | 1726 gimple_seq_add_stmt (&this_tf.top_p_seq, x); |
1617 } | 1727 } |
1618 | 1728 |
1619 VEC_free (tree, heap, this_tf.dest_array); | 1729 this_tf.dest_array.release (); |
1620 if (this_tf.goto_queue) | 1730 free (this_tf.goto_queue); |
1621 free (this_tf.goto_queue); | |
1622 if (this_tf.goto_queue_map) | 1731 if (this_tf.goto_queue_map) |
1623 pointer_map_destroy (this_tf.goto_queue_map); | 1732 delete this_tf.goto_queue_map; |
1624 | 1733 |
1625 /* If there was an old (aka outer) eh_seq, append the current eh_seq. | 1734 /* If there was an old (aka outer) eh_seq, append the current eh_seq. |
1626 If there was no old eh_seq, then the append is trivially already done. */ | 1735 If there was no old eh_seq, then the append is trivially already done. */ |
1627 if (old_eh_seq) | 1736 if (old_eh_seq) |
1628 { | 1737 { |
1630 eh_seq = old_eh_seq; | 1739 eh_seq = old_eh_seq; |
1631 else | 1740 else |
1632 { | 1741 { |
1633 gimple_seq new_eh_seq = eh_seq; | 1742 gimple_seq new_eh_seq = eh_seq; |
1634 eh_seq = old_eh_seq; | 1743 eh_seq = old_eh_seq; |
1635 gimple_seq_add_seq(&eh_seq, new_eh_seq); | 1744 gimple_seq_add_seq (&eh_seq, new_eh_seq); |
1636 } | 1745 } |
1637 } | 1746 } |
1638 | 1747 |
1639 return this_tf.top_p_seq; | 1748 return this_tf.top_p_seq; |
1640 } | 1749 } |
1642 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a | 1751 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a |
1643 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the | 1752 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the |
1644 exception region trees that records all the magic. */ | 1753 exception region trees that records all the magic. */ |
1645 | 1754 |
1646 static gimple_seq | 1755 static gimple_seq |
1647 lower_catch (struct leh_state *state, gimple tp) | 1756 lower_catch (struct leh_state *state, gtry *tp) |
1648 { | 1757 { |
1649 eh_region try_region = NULL; | 1758 eh_region try_region = NULL; |
1650 struct leh_state this_state = *state; | 1759 struct leh_state this_state = *state; |
1651 gimple_stmt_iterator gsi; | 1760 gimple_stmt_iterator gsi; |
1652 tree out_label; | 1761 tree out_label; |
1653 gimple_seq new_seq; | 1762 gimple_seq new_seq, cleanup; |
1654 gimple x; | 1763 gimple *x; |
1655 location_t try_catch_loc = gimple_location (tp); | 1764 location_t try_catch_loc = gimple_location (tp); |
1656 | 1765 |
1657 if (flag_exceptions) | 1766 if (flag_exceptions) |
1658 { | 1767 { |
1659 try_region = gen_eh_region_try (state->cur_region); | 1768 try_region = gen_eh_region_try (state->cur_region); |
1660 this_state.cur_region = try_region; | 1769 this_state.cur_region = try_region; |
1661 } | 1770 } |
1662 | 1771 |
1663 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp)); | 1772 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
1664 | 1773 |
1665 if (!eh_region_may_contain_throw (try_region)) | 1774 if (!eh_region_may_contain_throw (try_region)) |
1666 return gimple_try_eval (tp); | 1775 return gimple_try_eval (tp); |
1667 | 1776 |
1668 new_seq = NULL; | 1777 new_seq = NULL; |
1670 emit_resx (&new_seq, try_region); | 1779 emit_resx (&new_seq, try_region); |
1671 | 1780 |
1672 this_state.cur_region = state->cur_region; | 1781 this_state.cur_region = state->cur_region; |
1673 this_state.ehp_region = try_region; | 1782 this_state.ehp_region = try_region; |
1674 | 1783 |
1784 /* Add eh_seq from lowering EH in the cleanup sequence after the cleanup | |
1785 itself, so that e.g. for coverage purposes the nested cleanups don't | |
1786 appear before the cleanup body. See PR64634 for details. */ | |
1787 gimple_seq old_eh_seq = eh_seq; | |
1788 eh_seq = NULL; | |
1789 | |
1675 out_label = NULL; | 1790 out_label = NULL; |
1676 for (gsi = gsi_start (gimple_try_cleanup (tp)); | 1791 cleanup = gimple_try_cleanup (tp); |
1792 for (gsi = gsi_start (cleanup); | |
1677 !gsi_end_p (gsi); | 1793 !gsi_end_p (gsi); |
1678 gsi_next (&gsi)) | 1794 gsi_next (&gsi)) |
1679 { | 1795 { |
1680 eh_catch c; | 1796 eh_catch c; |
1681 gimple gcatch; | 1797 gcatch *catch_stmt; |
1682 gimple_seq handler; | 1798 gimple_seq handler; |
1683 | 1799 |
1684 gcatch = gsi_stmt (gsi); | 1800 catch_stmt = as_a <gcatch *> (gsi_stmt (gsi)); |
1685 c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch)); | 1801 c = gen_eh_region_catch (try_region, gimple_catch_types (catch_stmt)); |
1686 | 1802 |
1687 handler = gimple_catch_handler (gcatch); | 1803 handler = gimple_catch_handler (catch_stmt); |
1688 lower_eh_constructs_1 (&this_state, handler); | 1804 lower_eh_constructs_1 (&this_state, &handler); |
1689 | 1805 |
1690 c->label = create_artificial_label (UNKNOWN_LOCATION); | 1806 c->label = create_artificial_label (UNKNOWN_LOCATION); |
1691 x = gimple_build_label (c->label); | 1807 x = gimple_build_label (c->label); |
1692 gimple_seq_add_stmt (&new_seq, x); | 1808 gimple_seq_add_stmt (&new_seq, x); |
1693 | 1809 |
1705 break; | 1821 break; |
1706 } | 1822 } |
1707 | 1823 |
1708 gimple_try_set_cleanup (tp, new_seq); | 1824 gimple_try_set_cleanup (tp, new_seq); |
1709 | 1825 |
1710 return frob_into_branch_around (tp, try_region, out_label); | 1826 gimple_seq new_eh_seq = eh_seq; |
1827 eh_seq = old_eh_seq; | |
1828 gimple_seq ret_seq = frob_into_branch_around (tp, try_region, out_label); | |
1829 gimple_seq_add_seq (&eh_seq, new_eh_seq); | |
1830 return ret_seq; | |
1711 } | 1831 } |
1712 | 1832 |
1713 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a | 1833 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a |
1714 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception | 1834 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception |
1715 region trees that record all the magic. */ | 1835 region trees that record all the magic. */ |
1716 | 1836 |
1717 static gimple_seq | 1837 static gimple_seq |
1718 lower_eh_filter (struct leh_state *state, gimple tp) | 1838 lower_eh_filter (struct leh_state *state, gtry *tp) |
1719 { | 1839 { |
1720 struct leh_state this_state = *state; | 1840 struct leh_state this_state = *state; |
1721 eh_region this_region = NULL; | 1841 eh_region this_region = NULL; |
1722 gimple inner, x; | 1842 gimple *inner, *x; |
1723 gimple_seq new_seq; | 1843 gimple_seq new_seq; |
1724 | 1844 |
1725 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp)); | 1845 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp)); |
1726 | 1846 |
1727 if (flag_exceptions) | 1847 if (flag_exceptions) |
1729 this_region = gen_eh_region_allowed (state->cur_region, | 1849 this_region = gen_eh_region_allowed (state->cur_region, |
1730 gimple_eh_filter_types (inner)); | 1850 gimple_eh_filter_types (inner)); |
1731 this_state.cur_region = this_region; | 1851 this_state.cur_region = this_region; |
1732 } | 1852 } |
1733 | 1853 |
1734 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp)); | 1854 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
1735 | 1855 |
1736 if (!eh_region_may_contain_throw (this_region)) | 1856 if (!eh_region_may_contain_throw (this_region)) |
1737 return gimple_try_eval (tp); | 1857 return gimple_try_eval (tp); |
1738 | 1858 |
1739 new_seq = NULL; | 1859 new_seq = NULL; |
1745 | 1865 |
1746 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION); | 1866 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION); |
1747 x = gimple_build_label (this_region->u.allowed.label); | 1867 x = gimple_build_label (this_region->u.allowed.label); |
1748 gimple_seq_add_stmt (&new_seq, x); | 1868 gimple_seq_add_stmt (&new_seq, x); |
1749 | 1869 |
1750 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure (inner)); | 1870 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure_ptr (inner)); |
1751 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner)); | 1871 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner)); |
1752 | 1872 |
1753 gimple_try_set_cleanup (tp, new_seq); | 1873 gimple_try_set_cleanup (tp, new_seq); |
1754 | 1874 |
1755 return frob_into_branch_around (tp, this_region, NULL); | 1875 return frob_into_branch_around (tp, this_region, NULL); |
1758 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with | 1878 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with |
1759 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks, | 1879 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks, |
1760 plus the exception region trees that record all the magic. */ | 1880 plus the exception region trees that record all the magic. */ |
1761 | 1881 |
1762 static gimple_seq | 1882 static gimple_seq |
1763 lower_eh_must_not_throw (struct leh_state *state, gimple tp) | 1883 lower_eh_must_not_throw (struct leh_state *state, gtry *tp) |
1764 { | 1884 { |
1765 struct leh_state this_state = *state; | 1885 struct leh_state this_state = *state; |
1766 | 1886 |
1767 if (flag_exceptions) | 1887 if (flag_exceptions) |
1768 { | 1888 { |
1769 gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp)); | 1889 gimple *inner = gimple_seq_first_stmt (gimple_try_cleanup (tp)); |
1770 eh_region this_region; | 1890 eh_region this_region; |
1771 | 1891 |
1772 this_region = gen_eh_region_must_not_throw (state->cur_region); | 1892 this_region = gen_eh_region_must_not_throw (state->cur_region); |
1773 this_region->u.must_not_throw.failure_decl | 1893 this_region->u.must_not_throw.failure_decl |
1774 = gimple_eh_must_not_throw_fndecl (inner); | 1894 = gimple_eh_must_not_throw_fndecl ( |
1775 this_region->u.must_not_throw.failure_loc = gimple_location (tp); | 1895 as_a <geh_mnt *> (inner)); |
1896 this_region->u.must_not_throw.failure_loc | |
1897 = LOCATION_LOCUS (gimple_location (tp)); | |
1776 | 1898 |
1777 /* In order to get mangling applied to this decl, we must mark it | 1899 /* In order to get mangling applied to this decl, we must mark it |
1778 used now. Otherwise, pass_ipa_free_lang_data won't think it | 1900 used now. Otherwise, pass_ipa_free_lang_data won't think it |
1779 needs to happen. */ | 1901 needs to happen. */ |
1780 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1; | 1902 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1; |
1781 | 1903 |
1782 this_state.cur_region = this_region; | 1904 this_state.cur_region = this_region; |
1783 } | 1905 } |
1784 | 1906 |
1785 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp)); | 1907 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
1786 | 1908 |
1787 return gimple_try_eval (tp); | 1909 return gimple_try_eval (tp); |
1788 } | 1910 } |
1789 | 1911 |
1790 /* Implement a cleanup expression. This is similar to try-finally, | 1912 /* Implement a cleanup expression. This is similar to try-finally, |
1791 except that we only execute the cleanup block for exception edges. */ | 1913 except that we only execute the cleanup block for exception edges. */ |
1792 | 1914 |
1793 static gimple_seq | 1915 static gimple_seq |
1794 lower_cleanup (struct leh_state *state, gimple tp) | 1916 lower_cleanup (struct leh_state *state, gtry *tp) |
1795 { | 1917 { |
1796 struct leh_state this_state = *state; | 1918 struct leh_state this_state = *state; |
1797 eh_region this_region = NULL; | 1919 eh_region this_region = NULL; |
1798 struct leh_tf_state fake_tf; | 1920 struct leh_tf_state fake_tf; |
1799 gimple_seq result; | 1921 gimple_seq result; |
1803 { | 1925 { |
1804 this_region = gen_eh_region_cleanup (state->cur_region); | 1926 this_region = gen_eh_region_cleanup (state->cur_region); |
1805 this_state.cur_region = this_region; | 1927 this_state.cur_region = this_region; |
1806 } | 1928 } |
1807 | 1929 |
1808 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp)); | 1930 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
1809 | 1931 |
1810 if (cleanup_dead || !eh_region_may_contain_throw (this_region)) | 1932 if (cleanup_dead || !eh_region_may_contain_throw (this_region)) |
1811 return gimple_try_eval (tp); | 1933 return gimple_try_eval (tp); |
1812 | 1934 |
1813 /* Build enough of a try-finally state so that we can reuse | 1935 /* Build enough of a try-finally state so that we can reuse |
1823 | 1945 |
1824 if (fake_tf.may_throw) | 1946 if (fake_tf.may_throw) |
1825 { | 1947 { |
1826 /* In this case honor_protect_cleanup_actions had nothing to do, | 1948 /* In this case honor_protect_cleanup_actions had nothing to do, |
1827 and we should process this normally. */ | 1949 and we should process this normally. */ |
1828 lower_eh_constructs_1 (state, gimple_try_cleanup (tp)); | 1950 lower_eh_constructs_1 (state, gimple_try_cleanup_ptr (tp)); |
1829 result = frob_into_branch_around (tp, this_region, | 1951 result = frob_into_branch_around (tp, this_region, |
1830 fake_tf.fallthru_label); | 1952 fake_tf.fallthru_label); |
1831 } | 1953 } |
1832 else | 1954 else |
1833 { | 1955 { |
1835 the work. All we have left is to append the fallthru_label. */ | 1957 the work. All we have left is to append the fallthru_label. */ |
1836 | 1958 |
1837 result = gimple_try_eval (tp); | 1959 result = gimple_try_eval (tp); |
1838 if (fake_tf.fallthru_label) | 1960 if (fake_tf.fallthru_label) |
1839 { | 1961 { |
1840 gimple x = gimple_build_label (fake_tf.fallthru_label); | 1962 gimple *x = gimple_build_label (fake_tf.fallthru_label); |
1841 gimple_seq_add_stmt (&result, x); | 1963 gimple_seq_add_stmt (&result, x); |
1842 } | 1964 } |
1843 } | 1965 } |
1844 return result; | 1966 return result; |
1845 } | 1967 } |
1849 | 1971 |
1850 static void | 1972 static void |
1851 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi) | 1973 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi) |
1852 { | 1974 { |
1853 gimple_seq replace; | 1975 gimple_seq replace; |
1854 gimple x; | 1976 gimple *x; |
1855 gimple stmt = gsi_stmt (*gsi); | 1977 gimple *stmt = gsi_stmt (*gsi); |
1856 | 1978 |
1857 switch (gimple_code (stmt)) | 1979 switch (gimple_code (stmt)) |
1858 { | 1980 { |
1859 case GIMPLE_CALL: | 1981 case GIMPLE_CALL: |
1860 { | 1982 { |
1868 /* The front end may have generated a call to | 1990 /* The front end may have generated a call to |
1869 __builtin_eh_pointer (0) within a catch region. Replace | 1991 __builtin_eh_pointer (0) within a catch region. Replace |
1870 this zero argument with the current catch region number. */ | 1992 this zero argument with the current catch region number. */ |
1871 if (state->ehp_region) | 1993 if (state->ehp_region) |
1872 { | 1994 { |
1873 tree nr = build_int_cst (NULL, state->ehp_region->index); | 1995 tree nr = build_int_cst (integer_type_node, |
1996 state->ehp_region->index); | |
1874 gimple_call_set_arg (stmt, 0, nr); | 1997 gimple_call_set_arg (stmt, 0, nr); |
1875 } | 1998 } |
1876 else | 1999 else |
1877 { | 2000 { |
1878 /* The user has dome something silly. Remove it. */ | 2001 /* The user has dome something silly. Remove it. */ |
1905 | 2028 |
1906 case GIMPLE_ASSIGN: | 2029 case GIMPLE_ASSIGN: |
1907 /* If the stmt can throw use a new temporary for the assignment | 2030 /* If the stmt can throw use a new temporary for the assignment |
1908 to a LHS. This makes sure the old value of the LHS is | 2031 to a LHS. This makes sure the old value of the LHS is |
1909 available on the EH edge. Only do so for statements that | 2032 available on the EH edge. Only do so for statements that |
1910 potentially fall thru (no noreturn calls e.g.), otherwise | 2033 potentially fall through (no noreturn calls e.g.), otherwise |
1911 this new assignment might create fake fallthru regions. */ | 2034 this new assignment might create fake fallthru regions. */ |
1912 if (stmt_could_throw_p (stmt) | 2035 if (stmt_could_throw_p (stmt) |
1913 && gimple_has_lhs (stmt) | 2036 && gimple_has_lhs (stmt) |
1914 && gimple_stmt_may_fallthru (stmt) | 2037 && gimple_stmt_may_fallthru (stmt) |
1915 && !tree_could_throw_p (gimple_get_lhs (stmt)) | 2038 && !tree_could_throw_p (gimple_get_lhs (stmt)) |
1916 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt)))) | 2039 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt)))) |
1917 { | 2040 { |
1918 tree lhs = gimple_get_lhs (stmt); | 2041 tree lhs = gimple_get_lhs (stmt); |
1919 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL); | 2042 tree tmp = create_tmp_var (TREE_TYPE (lhs)); |
1920 gimple s = gimple_build_assign (lhs, tmp); | 2043 gimple *s = gimple_build_assign (lhs, tmp); |
1921 gimple_set_location (s, gimple_location (stmt)); | 2044 gimple_set_location (s, gimple_location (stmt)); |
1922 gimple_set_block (s, gimple_block (stmt)); | 2045 gimple_set_block (s, gimple_block (stmt)); |
1923 gimple_set_lhs (stmt, tmp); | 2046 gimple_set_lhs (stmt, tmp); |
1924 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE | 2047 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE |
1925 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE) | 2048 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE) |
1939 case GIMPLE_RETURN: | 2062 case GIMPLE_RETURN: |
1940 maybe_record_in_goto_queue (state, stmt); | 2063 maybe_record_in_goto_queue (state, stmt); |
1941 break; | 2064 break; |
1942 | 2065 |
1943 case GIMPLE_SWITCH: | 2066 case GIMPLE_SWITCH: |
1944 verify_norecord_switch_expr (state, stmt); | 2067 verify_norecord_switch_expr (state, as_a <gswitch *> (stmt)); |
1945 break; | 2068 break; |
1946 | 2069 |
1947 case GIMPLE_TRY: | 2070 case GIMPLE_TRY: |
1948 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY) | 2071 { |
1949 replace = lower_try_finally (state, stmt); | 2072 gtry *try_stmt = as_a <gtry *> (stmt); |
1950 else | 2073 if (gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY) |
1951 { | 2074 replace = lower_try_finally (state, try_stmt); |
1952 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt)); | 2075 else |
1953 if (!x) | 2076 { |
1954 { | 2077 x = gimple_seq_first_stmt (gimple_try_cleanup (try_stmt)); |
1955 replace = gimple_try_eval (stmt); | 2078 if (!x) |
1956 lower_eh_constructs_1 (state, replace); | |
1957 } | |
1958 else | |
1959 switch (gimple_code (x)) | |
1960 { | 2079 { |
2080 replace = gimple_try_eval (try_stmt); | |
2081 lower_eh_constructs_1 (state, &replace); | |
2082 } | |
2083 else | |
2084 switch (gimple_code (x)) | |
2085 { | |
1961 case GIMPLE_CATCH: | 2086 case GIMPLE_CATCH: |
1962 replace = lower_catch (state, stmt); | 2087 replace = lower_catch (state, try_stmt); |
1963 break; | 2088 break; |
1964 case GIMPLE_EH_FILTER: | 2089 case GIMPLE_EH_FILTER: |
1965 replace = lower_eh_filter (state, stmt); | 2090 replace = lower_eh_filter (state, try_stmt); |
1966 break; | 2091 break; |
1967 case GIMPLE_EH_MUST_NOT_THROW: | 2092 case GIMPLE_EH_MUST_NOT_THROW: |
1968 replace = lower_eh_must_not_throw (state, stmt); | 2093 replace = lower_eh_must_not_throw (state, try_stmt); |
1969 break; | 2094 break; |
2095 case GIMPLE_EH_ELSE: | |
2096 /* This code is only valid with GIMPLE_TRY_FINALLY. */ | |
2097 gcc_unreachable (); | |
1970 default: | 2098 default: |
1971 replace = lower_cleanup (state, stmt); | 2099 replace = lower_cleanup (state, try_stmt); |
1972 break; | 2100 break; |
1973 } | 2101 } |
1974 } | 2102 } |
2103 } | |
1975 | 2104 |
1976 /* Remove the old stmt and insert the transformed sequence | 2105 /* Remove the old stmt and insert the transformed sequence |
1977 instead. */ | 2106 instead. */ |
1978 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT); | 2107 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT); |
1979 gsi_remove (gsi, true); | 2108 gsi_remove (gsi, true); |
1980 | 2109 |
1981 /* Return since we don't want gsi_next () */ | 2110 /* Return since we don't want gsi_next () */ |
1982 return; | 2111 return; |
1983 | 2112 |
2113 case GIMPLE_EH_ELSE: | |
2114 /* We should be eliminating this in lower_try_finally et al. */ | |
2115 gcc_unreachable (); | |
2116 | |
1984 default: | 2117 default: |
1985 /* A type, a decl, or some kind of statement that we're not | 2118 /* A type, a decl, or some kind of statement that we're not |
1986 interested in. Don't walk them. */ | 2119 interested in. Don't walk them. */ |
1987 break; | 2120 break; |
1988 } | 2121 } |
1991 } | 2124 } |
1992 | 2125 |
1993 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */ | 2126 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */ |
1994 | 2127 |
1995 static void | 2128 static void |
1996 lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq) | 2129 lower_eh_constructs_1 (struct leh_state *state, gimple_seq *pseq) |
1997 { | 2130 { |
1998 gimple_stmt_iterator gsi; | 2131 gimple_stmt_iterator gsi; |
1999 for (gsi = gsi_start (seq); !gsi_end_p (gsi);) | 2132 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi);) |
2000 lower_eh_constructs_2 (state, &gsi); | 2133 lower_eh_constructs_2 (state, &gsi); |
2001 } | 2134 } |
2002 | 2135 |
2003 static unsigned int | 2136 namespace { |
2004 lower_eh_constructs (void) | 2137 |
2138 const pass_data pass_data_lower_eh = | |
2139 { | |
2140 GIMPLE_PASS, /* type */ | |
2141 "eh", /* name */ | |
2142 OPTGROUP_NONE, /* optinfo_flags */ | |
2143 TV_TREE_EH, /* tv_id */ | |
2144 PROP_gimple_lcf, /* properties_required */ | |
2145 PROP_gimple_leh, /* properties_provided */ | |
2146 0, /* properties_destroyed */ | |
2147 0, /* todo_flags_start */ | |
2148 0, /* todo_flags_finish */ | |
2149 }; | |
2150 | |
2151 class pass_lower_eh : public gimple_opt_pass | |
2152 { | |
2153 public: | |
2154 pass_lower_eh (gcc::context *ctxt) | |
2155 : gimple_opt_pass (pass_data_lower_eh, ctxt) | |
2156 {} | |
2157 | |
2158 /* opt_pass methods: */ | |
2159 virtual unsigned int execute (function *); | |
2160 | |
2161 }; // class pass_lower_eh | |
2162 | |
2163 unsigned int | |
2164 pass_lower_eh::execute (function *fun) | |
2005 { | 2165 { |
2006 struct leh_state null_state; | 2166 struct leh_state null_state; |
2007 gimple_seq bodyp; | 2167 gimple_seq bodyp; |
2008 | 2168 |
2009 bodyp = gimple_body (current_function_decl); | 2169 bodyp = gimple_body (current_function_decl); |
2010 if (bodyp == NULL) | 2170 if (bodyp == NULL) |
2011 return 0; | 2171 return 0; |
2012 | 2172 |
2013 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free); | 2173 finally_tree = new hash_table<finally_tree_hasher> (31); |
2014 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL); | 2174 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL); |
2015 memset (&null_state, 0, sizeof (null_state)); | 2175 memset (&null_state, 0, sizeof (null_state)); |
2016 | 2176 |
2017 collect_finally_tree_1 (bodyp, NULL); | 2177 collect_finally_tree_1 (bodyp, NULL); |
2018 lower_eh_constructs_1 (&null_state, bodyp); | 2178 lower_eh_constructs_1 (&null_state, &bodyp); |
2179 gimple_set_body (current_function_decl, bodyp); | |
2019 | 2180 |
2020 /* We assume there's a return statement, or something, at the end of | 2181 /* We assume there's a return statement, or something, at the end of |
2021 the function, and thus ploping the EH sequence afterward won't | 2182 the function, and thus ploping the EH sequence afterward won't |
2022 change anything. */ | 2183 change anything. */ |
2023 gcc_assert (!gimple_seq_may_fallthru (bodyp)); | 2184 gcc_assert (!gimple_seq_may_fallthru (bodyp)); |
2025 | 2186 |
2026 /* We assume that since BODYP already existed, adding EH_SEQ to it | 2187 /* We assume that since BODYP already existed, adding EH_SEQ to it |
2027 didn't change its value, and we don't have to re-set the function. */ | 2188 didn't change its value, and we don't have to re-set the function. */ |
2028 gcc_assert (bodyp == gimple_body (current_function_decl)); | 2189 gcc_assert (bodyp == gimple_body (current_function_decl)); |
2029 | 2190 |
2030 htab_delete (finally_tree); | 2191 delete finally_tree; |
2192 finally_tree = NULL; | |
2031 BITMAP_FREE (eh_region_may_contain_throw_map); | 2193 BITMAP_FREE (eh_region_may_contain_throw_map); |
2032 eh_seq = NULL; | 2194 eh_seq = NULL; |
2033 | 2195 |
2034 /* If this function needs a language specific EH personality routine | 2196 /* If this function needs a language specific EH personality routine |
2035 and the frontend didn't already set one do so now. */ | 2197 and the frontend didn't already set one do so now. */ |
2036 if (function_needs_eh_personality (cfun) == eh_personality_lang | 2198 if (function_needs_eh_personality (fun) == eh_personality_lang |
2037 && !DECL_FUNCTION_PERSONALITY (current_function_decl)) | 2199 && !DECL_FUNCTION_PERSONALITY (current_function_decl)) |
2038 DECL_FUNCTION_PERSONALITY (current_function_decl) | 2200 DECL_FUNCTION_PERSONALITY (current_function_decl) |
2039 = lang_hooks.eh_personality (); | 2201 = lang_hooks.eh_personality (); |
2040 | 2202 |
2041 return 0; | 2203 return 0; |
2042 } | 2204 } |
2043 | 2205 |
2044 struct gimple_opt_pass pass_lower_eh = | 2206 } // anon namespace |
2045 { | 2207 |
2046 { | 2208 gimple_opt_pass * |
2047 GIMPLE_PASS, | 2209 make_pass_lower_eh (gcc::context *ctxt) |
2048 "eh", /* name */ | 2210 { |
2049 NULL, /* gate */ | 2211 return new pass_lower_eh (ctxt); |
2050 lower_eh_constructs, /* execute */ | 2212 } |
2051 NULL, /* sub */ | |
2052 NULL, /* next */ | |
2053 0, /* static_pass_number */ | |
2054 TV_TREE_EH, /* tv_id */ | |
2055 PROP_gimple_lcf, /* properties_required */ | |
2056 PROP_gimple_leh, /* properties_provided */ | |
2057 0, /* properties_destroyed */ | |
2058 0, /* todo_flags_start */ | |
2059 TODO_dump_func /* todo_flags_finish */ | |
2060 } | |
2061 }; | |
2062 | 2213 |
2063 /* Create the multiple edges from an EH_DISPATCH statement to all of | 2214 /* Create the multiple edges from an EH_DISPATCH statement to all of |
2064 the possible handlers for its EH region. Return true if there's | 2215 the possible handlers for its EH region. Return true if there's |
2065 no fallthru edge; false if there is. */ | 2216 no fallthru edge; false if there is. */ |
2066 | 2217 |
2067 bool | 2218 bool |
2068 make_eh_dispatch_edges (gimple stmt) | 2219 make_eh_dispatch_edges (geh_dispatch *stmt) |
2069 { | 2220 { |
2070 eh_region r; | 2221 eh_region r; |
2071 eh_catch c; | 2222 eh_catch c; |
2072 basic_block src, dst; | 2223 basic_block src, dst; |
2073 | 2224 |
2102 | 2253 |
2103 /* Create the single EH edge from STMT to its nearest landing pad, | 2254 /* Create the single EH edge from STMT to its nearest landing pad, |
2104 if there is such a landing pad within the current function. */ | 2255 if there is such a landing pad within the current function. */ |
2105 | 2256 |
2106 void | 2257 void |
2107 make_eh_edges (gimple stmt) | 2258 make_eh_edges (gimple *stmt) |
2108 { | 2259 { |
2109 basic_block src, dst; | 2260 basic_block src, dst; |
2110 eh_landing_pad lp; | 2261 eh_landing_pad lp; |
2111 int lp_nr; | 2262 int lp_nr; |
2112 | 2263 |
2134 static void | 2285 static void |
2135 redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region) | 2286 redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region) |
2136 { | 2287 { |
2137 eh_landing_pad old_lp, new_lp; | 2288 eh_landing_pad old_lp, new_lp; |
2138 basic_block old_bb; | 2289 basic_block old_bb; |
2139 gimple throw_stmt; | 2290 gimple *throw_stmt; |
2140 int old_lp_nr, new_lp_nr; | 2291 int old_lp_nr, new_lp_nr; |
2141 tree old_label, new_label; | 2292 tree old_label, new_label; |
2142 edge_iterator ei; | 2293 edge_iterator ei; |
2143 edge e; | 2294 edge e; |
2144 | 2295 |
2221 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the | 2372 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the |
2222 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB. | 2373 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB. |
2223 The actual edge update will happen in the caller. */ | 2374 The actual edge update will happen in the caller. */ |
2224 | 2375 |
2225 void | 2376 void |
2226 redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb) | 2377 redirect_eh_dispatch_edge (geh_dispatch *stmt, edge e, basic_block new_bb) |
2227 { | 2378 { |
2228 tree new_lab = gimple_block_label (new_bb); | 2379 tree new_lab = gimple_block_label (new_bb); |
2229 bool any_changed = false; | 2380 bool any_changed = false; |
2230 basic_block old_bb; | 2381 basic_block old_bb; |
2231 eh_region r; | 2382 eh_region r; |
2309 case UNGT_EXPR: | 2460 case UNGT_EXPR: |
2310 case UNGE_EXPR: | 2461 case UNGE_EXPR: |
2311 case UNEQ_EXPR: | 2462 case UNEQ_EXPR: |
2312 return honor_snans; | 2463 return honor_snans; |
2313 | 2464 |
2314 case CONVERT_EXPR: | |
2315 case FIX_TRUNC_EXPR: | |
2316 /* Conversion of floating point might trap. */ | |
2317 return honor_nans; | |
2318 | |
2319 case NEGATE_EXPR: | 2465 case NEGATE_EXPR: |
2320 case ABS_EXPR: | 2466 case ABS_EXPR: |
2321 case CONJ_EXPR: | 2467 case CONJ_EXPR: |
2322 /* These operations don't trap with floating point. */ | 2468 /* These operations don't trap with floating point. */ |
2323 if (honor_trapv) | 2469 if (honor_trapv) |
2363 bool honor_snans = fp_operation && flag_signaling_nans != 0; | 2509 bool honor_snans = fp_operation && flag_signaling_nans != 0; |
2364 bool handled; | 2510 bool handled; |
2365 | 2511 |
2366 if (TREE_CODE_CLASS (op) != tcc_comparison | 2512 if (TREE_CODE_CLASS (op) != tcc_comparison |
2367 && TREE_CODE_CLASS (op) != tcc_unary | 2513 && TREE_CODE_CLASS (op) != tcc_unary |
2368 && TREE_CODE_CLASS (op) != tcc_binary) | 2514 && TREE_CODE_CLASS (op) != tcc_binary |
2515 && op != FMA_EXPR) | |
2369 return false; | 2516 return false; |
2370 | 2517 |
2371 return operation_could_trap_helper_p (op, fp_operation, honor_trapv, | 2518 return operation_could_trap_helper_p (op, fp_operation, honor_trapv, |
2372 honor_nans, honor_snans, divisor, | 2519 honor_nans, honor_snans, divisor, |
2373 &handled); | 2520 &handled); |
2521 } | |
2522 | |
2523 | |
2524 /* Returns true if it is possible to prove that the index of | |
2525 an array access REF (an ARRAY_REF expression) falls into the | |
2526 array bounds. */ | |
2527 | |
2528 static bool | |
2529 in_array_bounds_p (tree ref) | |
2530 { | |
2531 tree idx = TREE_OPERAND (ref, 1); | |
2532 tree min, max; | |
2533 | |
2534 if (TREE_CODE (idx) != INTEGER_CST) | |
2535 return false; | |
2536 | |
2537 min = array_ref_low_bound (ref); | |
2538 max = array_ref_up_bound (ref); | |
2539 if (!min | |
2540 || !max | |
2541 || TREE_CODE (min) != INTEGER_CST | |
2542 || TREE_CODE (max) != INTEGER_CST) | |
2543 return false; | |
2544 | |
2545 if (tree_int_cst_lt (idx, min) | |
2546 || tree_int_cst_lt (max, idx)) | |
2547 return false; | |
2548 | |
2549 return true; | |
2550 } | |
2551 | |
2552 /* Returns true if it is possible to prove that the range of | |
2553 an array access REF (an ARRAY_RANGE_REF expression) falls | |
2554 into the array bounds. */ | |
2555 | |
2556 static bool | |
2557 range_in_array_bounds_p (tree ref) | |
2558 { | |
2559 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref)); | |
2560 tree range_min, range_max, min, max; | |
2561 | |
2562 range_min = TYPE_MIN_VALUE (domain_type); | |
2563 range_max = TYPE_MAX_VALUE (domain_type); | |
2564 if (!range_min | |
2565 || !range_max | |
2566 || TREE_CODE (range_min) != INTEGER_CST | |
2567 || TREE_CODE (range_max) != INTEGER_CST) | |
2568 return false; | |
2569 | |
2570 min = array_ref_low_bound (ref); | |
2571 max = array_ref_up_bound (ref); | |
2572 if (!min | |
2573 || !max | |
2574 || TREE_CODE (min) != INTEGER_CST | |
2575 || TREE_CODE (max) != INTEGER_CST) | |
2576 return false; | |
2577 | |
2578 if (tree_int_cst_lt (range_min, min) | |
2579 || tree_int_cst_lt (max, range_max)) | |
2580 return false; | |
2581 | |
2582 return true; | |
2374 } | 2583 } |
2375 | 2584 |
2376 /* Return true if EXPR can trap, as in dereferencing an invalid pointer | 2585 /* Return true if EXPR can trap, as in dereferencing an invalid pointer |
2377 location or floating point arithmetic. C.f. the rtl version, may_trap_p. | 2586 location or floating point arithmetic. C.f. the rtl version, may_trap_p. |
2378 This routine expects only GIMPLE lhs or rhs input. */ | 2587 This routine expects only GIMPLE lhs or rhs input. */ |
2406 return true; | 2615 return true; |
2407 | 2616 |
2408 restart: | 2617 restart: |
2409 switch (code) | 2618 switch (code) |
2410 { | 2619 { |
2411 case TARGET_MEM_REF: | |
2412 if (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR | |
2413 && !TMR_INDEX (expr) && !TMR_INDEX2 (expr)) | |
2414 return false; | |
2415 return !TREE_THIS_NOTRAP (expr); | |
2416 | |
2417 case COMPONENT_REF: | 2620 case COMPONENT_REF: |
2418 case REALPART_EXPR: | 2621 case REALPART_EXPR: |
2419 case IMAGPART_EXPR: | 2622 case IMAGPART_EXPR: |
2420 case BIT_FIELD_REF: | 2623 case BIT_FIELD_REF: |
2421 case VIEW_CONVERT_EXPR: | 2624 case VIEW_CONVERT_EXPR: |
2438 return true; | 2641 return true; |
2439 if (TREE_THIS_NOTRAP (expr)) | 2642 if (TREE_THIS_NOTRAP (expr)) |
2440 return false; | 2643 return false; |
2441 return !in_array_bounds_p (expr); | 2644 return !in_array_bounds_p (expr); |
2442 | 2645 |
2646 case TARGET_MEM_REF: | |
2443 case MEM_REF: | 2647 case MEM_REF: |
2648 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR | |
2649 && tree_could_trap_p (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))) | |
2650 return true; | |
2651 if (TREE_THIS_NOTRAP (expr)) | |
2652 return false; | |
2653 /* We cannot prove that the access is in-bounds when we have | |
2654 variable-index TARGET_MEM_REFs. */ | |
2655 if (code == TARGET_MEM_REF | |
2656 && (TMR_INDEX (expr) || TMR_INDEX2 (expr))) | |
2657 return true; | |
2444 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR) | 2658 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR) |
2445 return false; | 2659 { |
2446 /* Fallthru. */ | 2660 tree base = TREE_OPERAND (TREE_OPERAND (expr, 0), 0); |
2661 offset_int off = mem_ref_offset (expr); | |
2662 if (wi::neg_p (off, SIGNED)) | |
2663 return true; | |
2664 if (TREE_CODE (base) == STRING_CST) | |
2665 return wi::leu_p (TREE_STRING_LENGTH (base), off); | |
2666 else if (DECL_SIZE_UNIT (base) == NULL_TREE | |
2667 || TREE_CODE (DECL_SIZE_UNIT (base)) != INTEGER_CST | |
2668 || wi::leu_p (wi::to_offset (DECL_SIZE_UNIT (base)), off)) | |
2669 return true; | |
2670 /* Now we are sure the first byte of the access is inside | |
2671 the object. */ | |
2672 return false; | |
2673 } | |
2674 return true; | |
2675 | |
2447 case INDIRECT_REF: | 2676 case INDIRECT_REF: |
2448 return !TREE_THIS_NOTRAP (expr); | 2677 return !TREE_THIS_NOTRAP (expr); |
2449 | 2678 |
2450 case ASM_EXPR: | 2679 case ASM_EXPR: |
2451 return TREE_THIS_VOLATILE (expr); | 2680 return TREE_THIS_VOLATILE (expr); |
2452 | 2681 |
2453 case CALL_EXPR: | 2682 case CALL_EXPR: |
2454 t = get_callee_fndecl (expr); | 2683 t = get_callee_fndecl (expr); |
2455 /* Assume that calls to weak functions may trap. */ | 2684 /* Assume that calls to weak functions may trap. */ |
2456 if (!t || !DECL_P (t) || DECL_WEAK (t)) | 2685 if (!t || !DECL_P (t)) |
2457 return true; | 2686 return true; |
2687 if (DECL_WEAK (t)) | |
2688 return tree_could_trap_p (t); | |
2689 return false; | |
2690 | |
2691 case FUNCTION_DECL: | |
2692 /* Assume that accesses to weak functions may trap, unless we know | |
2693 they are certainly defined in current TU or in some other | |
2694 LTO partition. */ | |
2695 if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr)) | |
2696 { | |
2697 cgraph_node *node = cgraph_node::get (expr); | |
2698 if (node) | |
2699 node = node->function_symbol (); | |
2700 return !(node && node->in_other_partition); | |
2701 } | |
2702 return false; | |
2703 | |
2704 case VAR_DECL: | |
2705 /* Assume that accesses to weak vars may trap, unless we know | |
2706 they are certainly defined in current TU or in some other | |
2707 LTO partition. */ | |
2708 if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr)) | |
2709 { | |
2710 varpool_node *node = varpool_node::get (expr); | |
2711 if (node) | |
2712 node = node->ultimate_alias_target (); | |
2713 return !(node && node->in_other_partition); | |
2714 } | |
2458 return false; | 2715 return false; |
2459 | 2716 |
2460 default: | 2717 default: |
2461 return false; | 2718 return false; |
2462 } | 2719 } |
2465 | 2722 |
2466 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a | 2723 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a |
2467 an assignment or a conditional) may throw. */ | 2724 an assignment or a conditional) may throw. */ |
2468 | 2725 |
2469 static bool | 2726 static bool |
2470 stmt_could_throw_1_p (gimple stmt) | 2727 stmt_could_throw_1_p (gassign *stmt) |
2471 { | 2728 { |
2472 enum tree_code code = gimple_expr_code (stmt); | 2729 enum tree_code code = gimple_assign_rhs_code (stmt); |
2473 bool honor_nans = false; | 2730 bool honor_nans = false; |
2474 bool honor_snans = false; | 2731 bool honor_snans = false; |
2475 bool fp_operation = false; | 2732 bool fp_operation = false; |
2476 bool honor_trapv = false; | 2733 bool honor_trapv = false; |
2477 tree t; | 2734 tree t; |
2478 size_t i; | 2735 size_t i; |
2479 bool handled, ret; | 2736 bool handled, ret; |
2480 | 2737 |
2481 if (TREE_CODE_CLASS (code) == tcc_comparison | 2738 if (TREE_CODE_CLASS (code) == tcc_comparison |
2482 || TREE_CODE_CLASS (code) == tcc_unary | 2739 || TREE_CODE_CLASS (code) == tcc_unary |
2483 || TREE_CODE_CLASS (code) == tcc_binary) | 2740 || TREE_CODE_CLASS (code) == tcc_binary |
2484 { | 2741 || code == FMA_EXPR) |
2485 t = gimple_expr_type (stmt); | 2742 { |
2743 if (TREE_CODE_CLASS (code) == tcc_comparison) | |
2744 t = TREE_TYPE (gimple_assign_rhs1 (stmt)); | |
2745 else | |
2746 t = gimple_expr_type (stmt); | |
2486 fp_operation = FLOAT_TYPE_P (t); | 2747 fp_operation = FLOAT_TYPE_P (t); |
2487 if (fp_operation) | 2748 if (fp_operation) |
2488 { | 2749 { |
2489 honor_nans = flag_trapping_math && !flag_finite_math_only; | 2750 honor_nans = flag_trapping_math && !flag_finite_math_only; |
2490 honor_snans = flag_signaling_nans != 0; | 2751 honor_snans = flag_signaling_nans != 0; |
2491 } | 2752 } |
2492 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t)) | 2753 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t)) |
2493 honor_trapv = true; | 2754 honor_trapv = true; |
2494 } | 2755 } |
2495 | 2756 |
2757 /* First check the LHS. */ | |
2758 if (tree_could_trap_p (gimple_assign_lhs (stmt))) | |
2759 return true; | |
2760 | |
2496 /* Check if the main expression may trap. */ | 2761 /* Check if the main expression may trap. */ |
2497 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL; | |
2498 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv, | 2762 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv, |
2499 honor_nans, honor_snans, t, | 2763 honor_nans, honor_snans, |
2764 gimple_assign_rhs2 (stmt), | |
2500 &handled); | 2765 &handled); |
2501 if (handled) | 2766 if (handled) |
2502 return ret; | 2767 return ret; |
2503 | 2768 |
2504 /* If the expression does not trap, see if any of the individual operands may | 2769 /* If the expression does not trap, see if any of the individual operands may |
2505 trap. */ | 2770 trap. */ |
2506 for (i = 0; i < gimple_num_ops (stmt); i++) | 2771 for (i = 1; i < gimple_num_ops (stmt); i++) |
2507 if (tree_could_trap_p (gimple_op (stmt, i))) | 2772 if (tree_could_trap_p (gimple_op (stmt, i))) |
2508 return true; | 2773 return true; |
2509 | 2774 |
2510 return false; | 2775 return false; |
2511 } | 2776 } |
2512 | 2777 |
2513 | 2778 |
2514 /* Return true if statement STMT could throw an exception. */ | 2779 /* Return true if statement STMT could throw an exception. */ |
2515 | 2780 |
2516 bool | 2781 bool |
2517 stmt_could_throw_p (gimple stmt) | 2782 stmt_could_throw_p (gimple *stmt) |
2518 { | 2783 { |
2519 if (!flag_exceptions) | 2784 if (!flag_exceptions) |
2520 return false; | 2785 return false; |
2521 | 2786 |
2522 /* The only statements that can throw an exception are assignments, | 2787 /* The only statements that can throw an exception are assignments, |
2525 { | 2790 { |
2526 case GIMPLE_RESX: | 2791 case GIMPLE_RESX: |
2527 return true; | 2792 return true; |
2528 | 2793 |
2529 case GIMPLE_CALL: | 2794 case GIMPLE_CALL: |
2530 return !gimple_call_nothrow_p (stmt); | 2795 return !gimple_call_nothrow_p (as_a <gcall *> (stmt)); |
2796 | |
2797 case GIMPLE_COND: | |
2798 { | |
2799 if (!cfun->can_throw_non_call_exceptions) | |
2800 return false; | |
2801 gcond *cond = as_a <gcond *> (stmt); | |
2802 tree lhs = gimple_cond_lhs (cond); | |
2803 return operation_could_trap_p (gimple_cond_code (cond), | |
2804 FLOAT_TYPE_P (TREE_TYPE (lhs)), | |
2805 false, NULL_TREE); | |
2806 } | |
2531 | 2807 |
2532 case GIMPLE_ASSIGN: | 2808 case GIMPLE_ASSIGN: |
2533 case GIMPLE_COND: | 2809 if (!cfun->can_throw_non_call_exceptions |
2534 if (!cfun->can_throw_non_call_exceptions) | 2810 || gimple_clobber_p (stmt)) |
2535 return false; | 2811 return false; |
2536 return stmt_could_throw_1_p (stmt); | 2812 return stmt_could_throw_1_p (as_a <gassign *> (stmt)); |
2537 | 2813 |
2538 case GIMPLE_ASM: | 2814 case GIMPLE_ASM: |
2539 if (!cfun->can_throw_non_call_exceptions) | 2815 if (!cfun->can_throw_non_call_exceptions) |
2540 return false; | 2816 return false; |
2541 return gimple_asm_volatile_p (stmt); | 2817 return gimple_asm_volatile_p (as_a <gasm *> (stmt)); |
2542 | 2818 |
2543 default: | 2819 default: |
2544 return false; | 2820 return false; |
2545 } | 2821 } |
2546 } | 2822 } |
2572 | 2848 |
2573 /* Return true if STMT can throw an exception that is not caught within | 2849 /* Return true if STMT can throw an exception that is not caught within |
2574 the current function (CFUN). */ | 2850 the current function (CFUN). */ |
2575 | 2851 |
2576 bool | 2852 bool |
2577 stmt_can_throw_external (gimple stmt) | 2853 stmt_can_throw_external (gimple *stmt) |
2578 { | 2854 { |
2579 int lp_nr; | 2855 int lp_nr; |
2580 | 2856 |
2581 if (!stmt_could_throw_p (stmt)) | 2857 if (!stmt_could_throw_p (stmt)) |
2582 return false; | 2858 return false; |
2587 | 2863 |
2588 /* Return true if STMT can throw an exception that is caught within | 2864 /* Return true if STMT can throw an exception that is caught within |
2589 the current function (CFUN). */ | 2865 the current function (CFUN). */ |
2590 | 2866 |
2591 bool | 2867 bool |
2592 stmt_can_throw_internal (gimple stmt) | 2868 stmt_can_throw_internal (gimple *stmt) |
2593 { | 2869 { |
2594 int lp_nr; | 2870 int lp_nr; |
2595 | 2871 |
2596 if (!stmt_could_throw_p (stmt)) | 2872 if (!stmt_could_throw_p (stmt)) |
2597 return false; | 2873 return false; |
2603 /* Given a statement STMT in IFUN, if STMT can no longer throw, then | 2879 /* Given a statement STMT in IFUN, if STMT can no longer throw, then |
2604 remove any entry it might have from the EH table. Return true if | 2880 remove any entry it might have from the EH table. Return true if |
2605 any change was made. */ | 2881 any change was made. */ |
2606 | 2882 |
2607 bool | 2883 bool |
2608 maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt) | 2884 maybe_clean_eh_stmt_fn (struct function *ifun, gimple *stmt) |
2609 { | 2885 { |
2610 if (stmt_could_throw_p (stmt)) | 2886 if (stmt_could_throw_p (stmt)) |
2611 return false; | 2887 return false; |
2612 return remove_stmt_from_eh_lp_fn (ifun, stmt); | 2888 return remove_stmt_from_eh_lp_fn (ifun, stmt); |
2613 } | 2889 } |
2614 | 2890 |
2615 /* Likewise, but always use the current function. */ | 2891 /* Likewise, but always use the current function. */ |
2616 | 2892 |
2617 bool | 2893 bool |
2618 maybe_clean_eh_stmt (gimple stmt) | 2894 maybe_clean_eh_stmt (gimple *stmt) |
2619 { | 2895 { |
2620 return maybe_clean_eh_stmt_fn (cfun, stmt); | 2896 return maybe_clean_eh_stmt_fn (cfun, stmt); |
2621 } | 2897 } |
2622 | 2898 |
2623 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced | 2899 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced |
2624 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT | 2900 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT |
2625 in the table if it should be in there. Return TRUE if a replacement was | 2901 in the table if it should be in there. Return TRUE if a replacement was |
2626 done that my require an EH edge purge. */ | 2902 done that my require an EH edge purge. */ |
2627 | 2903 |
2628 bool | 2904 bool |
2629 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt) | 2905 maybe_clean_or_replace_eh_stmt (gimple *old_stmt, gimple *new_stmt) |
2630 { | 2906 { |
2631 int lp_nr = lookup_stmt_eh_lp (old_stmt); | 2907 int lp_nr = lookup_stmt_eh_lp (old_stmt); |
2632 | 2908 |
2633 if (lp_nr != 0) | 2909 if (lp_nr != 0) |
2634 { | 2910 { |
2648 } | 2924 } |
2649 | 2925 |
2650 return false; | 2926 return false; |
2651 } | 2927 } |
2652 | 2928 |
2653 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statment NEW_STMT | 2929 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT |
2654 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP | 2930 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP |
2655 operand is the return value of duplicate_eh_regions. */ | 2931 operand is the return value of duplicate_eh_regions. */ |
2656 | 2932 |
2657 bool | 2933 bool |
2658 maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt, | 2934 maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple *new_stmt, |
2659 struct function *old_fun, gimple old_stmt, | 2935 struct function *old_fun, gimple *old_stmt, |
2660 struct pointer_map_t *map, int default_lp_nr) | 2936 hash_map<void *, void *> *map, |
2937 int default_lp_nr) | |
2661 { | 2938 { |
2662 int old_lp_nr, new_lp_nr; | 2939 int old_lp_nr, new_lp_nr; |
2663 void **slot; | |
2664 | 2940 |
2665 if (!stmt_could_throw_p (new_stmt)) | 2941 if (!stmt_could_throw_p (new_stmt)) |
2666 return false; | 2942 return false; |
2667 | 2943 |
2668 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt); | 2944 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt); |
2674 } | 2950 } |
2675 else if (old_lp_nr > 0) | 2951 else if (old_lp_nr > 0) |
2676 { | 2952 { |
2677 eh_landing_pad old_lp, new_lp; | 2953 eh_landing_pad old_lp, new_lp; |
2678 | 2954 |
2679 old_lp = VEC_index (eh_landing_pad, old_fun->eh->lp_array, old_lp_nr); | 2955 old_lp = (*old_fun->eh->lp_array)[old_lp_nr]; |
2680 slot = pointer_map_contains (map, old_lp); | 2956 new_lp = static_cast<eh_landing_pad> (*map->get (old_lp)); |
2681 new_lp = (eh_landing_pad) *slot; | |
2682 new_lp_nr = new_lp->index; | 2957 new_lp_nr = new_lp->index; |
2683 } | 2958 } |
2684 else | 2959 else |
2685 { | 2960 { |
2686 eh_region old_r, new_r; | 2961 eh_region old_r, new_r; |
2687 | 2962 |
2688 old_r = VEC_index (eh_region, old_fun->eh->region_array, -old_lp_nr); | 2963 old_r = (*old_fun->eh->region_array)[-old_lp_nr]; |
2689 slot = pointer_map_contains (map, old_r); | 2964 new_r = static_cast<eh_region> (*map->get (old_r)); |
2690 new_r = (eh_region) *slot; | |
2691 new_lp_nr = -new_r->index; | 2965 new_lp_nr = -new_r->index; |
2692 } | 2966 } |
2693 | 2967 |
2694 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr); | 2968 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr); |
2695 return true; | 2969 return true; |
2697 | 2971 |
2698 /* Similar, but both OLD_STMT and NEW_STMT are within the current function, | 2972 /* Similar, but both OLD_STMT and NEW_STMT are within the current function, |
2699 and thus no remapping is required. */ | 2973 and thus no remapping is required. */ |
2700 | 2974 |
2701 bool | 2975 bool |
2702 maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt) | 2976 maybe_duplicate_eh_stmt (gimple *new_stmt, gimple *old_stmt) |
2703 { | 2977 { |
2704 int lp_nr; | 2978 int lp_nr; |
2705 | 2979 |
2706 if (!stmt_could_throw_p (new_stmt)) | 2980 if (!stmt_could_throw_p (new_stmt)) |
2707 return false; | 2981 return false; |
2722 | 2996 |
2723 static bool | 2997 static bool |
2724 same_handler_p (gimple_seq oneh, gimple_seq twoh) | 2998 same_handler_p (gimple_seq oneh, gimple_seq twoh) |
2725 { | 2999 { |
2726 gimple_stmt_iterator gsi; | 3000 gimple_stmt_iterator gsi; |
2727 gimple ones, twos; | 3001 gimple *ones, *twos; |
2728 unsigned int ai; | 3002 unsigned int ai; |
2729 | 3003 |
2730 gsi = gsi_start (oneh); | 3004 gsi = gsi_start (oneh); |
2731 if (!gsi_one_before_end_p (gsi)) | 3005 if (!gsi_one_before_end_p (gsi)) |
2732 return false; | 3006 return false; |
2741 || !is_gimple_call (twos) | 3015 || !is_gimple_call (twos) |
2742 || gimple_call_lhs (ones) | 3016 || gimple_call_lhs (ones) |
2743 || gimple_call_lhs (twos) | 3017 || gimple_call_lhs (twos) |
2744 || gimple_call_chain (ones) | 3018 || gimple_call_chain (ones) |
2745 || gimple_call_chain (twos) | 3019 || gimple_call_chain (twos) |
2746 || !operand_equal_p (gimple_call_fn (ones), gimple_call_fn (twos), 0) | 3020 || !gimple_call_same_target_p (ones, twos) |
2747 || gimple_call_num_args (ones) != gimple_call_num_args (twos)) | 3021 || gimple_call_num_args (ones) != gimple_call_num_args (twos)) |
2748 return false; | 3022 return false; |
2749 | 3023 |
2750 for (ai = 0; ai < gimple_call_num_args (ones); ++ai) | 3024 for (ai = 0; ai < gimple_call_num_args (ones); ++ai) |
2751 if (!operand_equal_p (gimple_call_arg (ones, ai), | 3025 if (!operand_equal_p (gimple_call_arg (ones, ai), |
2764 | 3038 |
2765 This occurs frequently in C++, where A is a local variable and B is a | 3039 This occurs frequently in C++, where A is a local variable and B is a |
2766 temporary used in the initializer for A. */ | 3040 temporary used in the initializer for A. */ |
2767 | 3041 |
2768 static void | 3042 static void |
2769 optimize_double_finally (gimple one, gimple two) | 3043 optimize_double_finally (gtry *one, gtry *two) |
2770 { | 3044 { |
2771 gimple oneh; | 3045 gimple *oneh; |
2772 gimple_stmt_iterator gsi; | 3046 gimple_stmt_iterator gsi; |
2773 | 3047 gimple_seq cleanup; |
2774 gsi = gsi_start (gimple_try_cleanup (one)); | 3048 |
3049 cleanup = gimple_try_cleanup (one); | |
3050 gsi = gsi_start (cleanup); | |
2775 if (!gsi_one_before_end_p (gsi)) | 3051 if (!gsi_one_before_end_p (gsi)) |
2776 return; | 3052 return; |
2777 | 3053 |
2778 oneh = gsi_stmt (gsi); | 3054 oneh = gsi_stmt (gsi); |
2779 if (gimple_code (oneh) != GIMPLE_TRY | 3055 if (gimple_code (oneh) != GIMPLE_TRY |
2797 | 3073 |
2798 static void | 3074 static void |
2799 refactor_eh_r (gimple_seq seq) | 3075 refactor_eh_r (gimple_seq seq) |
2800 { | 3076 { |
2801 gimple_stmt_iterator gsi; | 3077 gimple_stmt_iterator gsi; |
2802 gimple one, two; | 3078 gimple *one, *two; |
2803 | 3079 |
2804 one = NULL; | 3080 one = NULL; |
2805 two = NULL; | 3081 two = NULL; |
2806 gsi = gsi_start (seq); | 3082 gsi = gsi_start (seq); |
2807 while (1) | 3083 while (1) |
2809 one = two; | 3085 one = two; |
2810 if (gsi_end_p (gsi)) | 3086 if (gsi_end_p (gsi)) |
2811 two = NULL; | 3087 two = NULL; |
2812 else | 3088 else |
2813 two = gsi_stmt (gsi); | 3089 two = gsi_stmt (gsi); |
2814 if (one | 3090 if (one && two) |
2815 && two | 3091 if (gtry *try_one = dyn_cast <gtry *> (one)) |
2816 && gimple_code (one) == GIMPLE_TRY | 3092 if (gtry *try_two = dyn_cast <gtry *> (two)) |
2817 && gimple_code (two) == GIMPLE_TRY | 3093 if (gimple_try_kind (try_one) == GIMPLE_TRY_FINALLY |
2818 && gimple_try_kind (one) == GIMPLE_TRY_FINALLY | 3094 && gimple_try_kind (try_two) == GIMPLE_TRY_FINALLY) |
2819 && gimple_try_kind (two) == GIMPLE_TRY_FINALLY) | 3095 optimize_double_finally (try_one, try_two); |
2820 optimize_double_finally (one, two); | |
2821 if (one) | 3096 if (one) |
2822 switch (gimple_code (one)) | 3097 switch (gimple_code (one)) |
2823 { | 3098 { |
2824 case GIMPLE_TRY: | 3099 case GIMPLE_TRY: |
2825 refactor_eh_r (gimple_try_eval (one)); | 3100 refactor_eh_r (gimple_try_eval (one)); |
2826 refactor_eh_r (gimple_try_cleanup (one)); | 3101 refactor_eh_r (gimple_try_cleanup (one)); |
2827 break; | 3102 break; |
2828 case GIMPLE_CATCH: | 3103 case GIMPLE_CATCH: |
2829 refactor_eh_r (gimple_catch_handler (one)); | 3104 refactor_eh_r (gimple_catch_handler (as_a <gcatch *> (one))); |
2830 break; | 3105 break; |
2831 case GIMPLE_EH_FILTER: | 3106 case GIMPLE_EH_FILTER: |
2832 refactor_eh_r (gimple_eh_filter_failure (one)); | 3107 refactor_eh_r (gimple_eh_filter_failure (one)); |
3108 break; | |
3109 case GIMPLE_EH_ELSE: | |
3110 { | |
3111 geh_else *eh_else_stmt = as_a <geh_else *> (one); | |
3112 refactor_eh_r (gimple_eh_else_n_body (eh_else_stmt)); | |
3113 refactor_eh_r (gimple_eh_else_e_body (eh_else_stmt)); | |
3114 } | |
2833 break; | 3115 break; |
2834 default: | 3116 default: |
2835 break; | 3117 break; |
2836 } | 3118 } |
2837 if (two) | 3119 if (two) |
2839 else | 3121 else |
2840 break; | 3122 break; |
2841 } | 3123 } |
2842 } | 3124 } |
2843 | 3125 |
2844 static unsigned | 3126 namespace { |
2845 refactor_eh (void) | 3127 |
2846 { | 3128 const pass_data pass_data_refactor_eh = |
2847 refactor_eh_r (gimple_body (current_function_decl)); | 3129 { |
2848 return 0; | 3130 GIMPLE_PASS, /* type */ |
2849 } | 3131 "ehopt", /* name */ |
2850 | 3132 OPTGROUP_NONE, /* optinfo_flags */ |
2851 static bool | 3133 TV_TREE_EH, /* tv_id */ |
2852 gate_refactor_eh (void) | 3134 PROP_gimple_lcf, /* properties_required */ |
2853 { | 3135 0, /* properties_provided */ |
2854 return flag_exceptions != 0; | 3136 0, /* properties_destroyed */ |
2855 } | 3137 0, /* todo_flags_start */ |
2856 | 3138 0, /* todo_flags_finish */ |
2857 struct gimple_opt_pass pass_refactor_eh = | |
2858 { | |
2859 { | |
2860 GIMPLE_PASS, | |
2861 "ehopt", /* name */ | |
2862 gate_refactor_eh, /* gate */ | |
2863 refactor_eh, /* execute */ | |
2864 NULL, /* sub */ | |
2865 NULL, /* next */ | |
2866 0, /* static_pass_number */ | |
2867 TV_TREE_EH, /* tv_id */ | |
2868 PROP_gimple_lcf, /* properties_required */ | |
2869 0, /* properties_provided */ | |
2870 0, /* properties_destroyed */ | |
2871 0, /* todo_flags_start */ | |
2872 TODO_dump_func /* todo_flags_finish */ | |
2873 } | |
2874 }; | 3139 }; |
3140 | |
3141 class pass_refactor_eh : public gimple_opt_pass | |
3142 { | |
3143 public: | |
3144 pass_refactor_eh (gcc::context *ctxt) | |
3145 : gimple_opt_pass (pass_data_refactor_eh, ctxt) | |
3146 {} | |
3147 | |
3148 /* opt_pass methods: */ | |
3149 virtual bool gate (function *) { return flag_exceptions != 0; } | |
3150 virtual unsigned int execute (function *) | |
3151 { | |
3152 refactor_eh_r (gimple_body (current_function_decl)); | |
3153 return 0; | |
3154 } | |
3155 | |
3156 }; // class pass_refactor_eh | |
3157 | |
3158 } // anon namespace | |
3159 | |
3160 gimple_opt_pass * | |
3161 make_pass_refactor_eh (gcc::context *ctxt) | |
3162 { | |
3163 return new pass_refactor_eh (ctxt); | |
3164 } | |
2875 | 3165 |
2876 /* At the end of gimple optimization, we can lower RESX. */ | 3166 /* At the end of gimple optimization, we can lower RESX. */ |
2877 | 3167 |
2878 static bool | 3168 static bool |
2879 lower_resx (basic_block bb, gimple stmt, struct pointer_map_t *mnt_map) | 3169 lower_resx (basic_block bb, gresx *stmt, |
3170 hash_map<eh_region, tree> *mnt_map) | |
2880 { | 3171 { |
2881 int lp_nr; | 3172 int lp_nr; |
2882 eh_region src_r, dst_r; | 3173 eh_region src_r, dst_r; |
2883 gimple_stmt_iterator gsi; | 3174 gimple_stmt_iterator gsi; |
2884 gimple x; | 3175 gimple *x; |
2885 tree fn, src_nr; | 3176 tree fn, src_nr; |
2886 bool ret = false; | 3177 bool ret = false; |
2887 | 3178 |
2888 lp_nr = lookup_stmt_eh_lp (stmt); | 3179 lp_nr = lookup_stmt_eh_lp (stmt); |
2889 if (lp_nr != 0) | 3180 if (lp_nr != 0) |
2902 happen without optimization when the switch statement created by | 3193 happen without optimization when the switch statement created by |
2903 lower_try_finally_switch isn't simplified to remove the eh case. | 3194 lower_try_finally_switch isn't simplified to remove the eh case. |
2904 | 3195 |
2905 Resolve this by expanding the resx node to an abort. */ | 3196 Resolve this by expanding the resx node to an abort. */ |
2906 | 3197 |
2907 fn = implicit_built_in_decls[BUILT_IN_TRAP]; | 3198 fn = builtin_decl_implicit (BUILT_IN_TRAP); |
2908 x = gimple_build_call (fn, 0); | 3199 x = gimple_build_call (fn, 0); |
2909 gsi_insert_before (&gsi, x, GSI_SAME_STMT); | 3200 gsi_insert_before (&gsi, x, GSI_SAME_STMT); |
2910 | 3201 |
2911 while (EDGE_COUNT (bb->succs) > 0) | 3202 while (EDGE_COUNT (bb->succs) > 0) |
2912 remove_edge (EDGE_SUCC (bb, 0)); | 3203 remove_edge (EDGE_SUCC (bb, 0)); |
2919 edge e; | 3210 edge e; |
2920 | 3211 |
2921 if (lp_nr < 0) | 3212 if (lp_nr < 0) |
2922 { | 3213 { |
2923 basic_block new_bb; | 3214 basic_block new_bb; |
2924 void **slot; | |
2925 tree lab; | 3215 tree lab; |
2926 | 3216 |
2927 /* We are resuming into a MUST_NOT_CALL region. Expand a call to | 3217 /* We are resuming into a MUST_NOT_CALL region. Expand a call to |
2928 the failure decl into a new block, if needed. */ | 3218 the failure decl into a new block, if needed. */ |
2929 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW); | 3219 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW); |
2930 | 3220 |
2931 slot = pointer_map_contains (mnt_map, dst_r); | 3221 tree *slot = mnt_map->get (dst_r); |
2932 if (slot == NULL) | 3222 if (slot == NULL) |
2933 { | 3223 { |
2934 gimple_stmt_iterator gsi2; | 3224 gimple_stmt_iterator gsi2; |
2935 | 3225 |
2936 new_bb = create_empty_bb (bb); | 3226 new_bb = create_empty_bb (bb); |
3227 add_bb_to_loop (new_bb, bb->loop_father); | |
2937 lab = gimple_block_label (new_bb); | 3228 lab = gimple_block_label (new_bb); |
2938 gsi2 = gsi_start_bb (new_bb); | 3229 gsi2 = gsi_start_bb (new_bb); |
2939 | 3230 |
2940 fn = dst_r->u.must_not_throw.failure_decl; | 3231 fn = dst_r->u.must_not_throw.failure_decl; |
2941 x = gimple_build_call (fn, 0); | 3232 x = gimple_build_call (fn, 0); |
2942 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc); | 3233 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc); |
2943 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING); | 3234 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING); |
2944 | 3235 |
2945 slot = pointer_map_insert (mnt_map, dst_r); | 3236 mnt_map->put (dst_r, lab); |
2946 *slot = lab; | |
2947 } | 3237 } |
2948 else | 3238 else |
2949 { | 3239 { |
2950 lab = (tree) *slot; | 3240 lab = *slot; |
2951 new_bb = label_to_block (lab); | 3241 new_bb = label_to_block (lab); |
2952 } | 3242 } |
2953 | 3243 |
2954 gcc_assert (EDGE_COUNT (bb->succs) == 0); | 3244 gcc_assert (EDGE_COUNT (bb->succs) == 0); |
2955 e = make_edge (bb, new_bb, EDGE_FALLTHRU); | 3245 e = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU); |
2956 e->count = bb->count; | |
2957 e->probability = REG_BR_PROB_BASE; | |
2958 } | 3246 } |
2959 else | 3247 else |
2960 { | 3248 { |
2961 edge_iterator ei; | 3249 edge_iterator ei; |
2962 tree dst_nr = build_int_cst (NULL, dst_r->index); | 3250 tree dst_nr = build_int_cst (integer_type_node, dst_r->index); |
2963 | 3251 |
2964 fn = implicit_built_in_decls[BUILT_IN_EH_COPY_VALUES]; | 3252 fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES); |
2965 src_nr = build_int_cst (NULL, src_r->index); | 3253 src_nr = build_int_cst (integer_type_node, src_r->index); |
2966 x = gimple_build_call (fn, 2, dst_nr, src_nr); | 3254 x = gimple_build_call (fn, 2, dst_nr, src_nr); |
2967 gsi_insert_before (&gsi, x, GSI_SAME_STMT); | 3255 gsi_insert_before (&gsi, x, GSI_SAME_STMT); |
2968 | 3256 |
2969 /* Update the flags for the outgoing edge. */ | 3257 /* Update the flags for the outgoing edge. */ |
2970 e = single_succ_edge (bb); | 3258 e = single_succ_edge (bb); |
2971 gcc_assert (e->flags & EDGE_EH); | 3259 gcc_assert (e->flags & EDGE_EH); |
2972 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU; | 3260 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU; |
3261 e->probability = profile_probability::always (); | |
2973 | 3262 |
2974 /* If there are no more EH users of the landing pad, delete it. */ | 3263 /* If there are no more EH users of the landing pad, delete it. */ |
2975 FOR_EACH_EDGE (e, ei, e->dest->preds) | 3264 FOR_EACH_EDGE (e, ei, e->dest->preds) |
2976 if (e->flags & EDGE_EH) | 3265 if (e->flags & EDGE_EH) |
2977 break; | 3266 break; |
2991 /* When we don't have a destination region, this exception escapes | 3280 /* When we don't have a destination region, this exception escapes |
2992 up the call chain. We resolve this by generating a call to the | 3281 up the call chain. We resolve this by generating a call to the |
2993 _Unwind_Resume library function. */ | 3282 _Unwind_Resume library function. */ |
2994 | 3283 |
2995 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup | 3284 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup |
2996 with no arguments for C++ and Java. Check for that. */ | 3285 with no arguments for C++. Check for that. */ |
2997 if (src_r->use_cxa_end_cleanup) | 3286 if (src_r->use_cxa_end_cleanup) |
2998 { | 3287 { |
2999 fn = implicit_built_in_decls[BUILT_IN_CXA_END_CLEANUP]; | 3288 fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP); |
3000 x = gimple_build_call (fn, 0); | 3289 x = gimple_build_call (fn, 0); |
3001 gsi_insert_before (&gsi, x, GSI_SAME_STMT); | 3290 gsi_insert_before (&gsi, x, GSI_SAME_STMT); |
3002 } | 3291 } |
3003 else | 3292 else |
3004 { | 3293 { |
3005 fn = implicit_built_in_decls[BUILT_IN_EH_POINTER]; | 3294 fn = builtin_decl_implicit (BUILT_IN_EH_POINTER); |
3006 src_nr = build_int_cst (NULL, src_r->index); | 3295 src_nr = build_int_cst (integer_type_node, src_r->index); |
3007 x = gimple_build_call (fn, 1, src_nr); | 3296 x = gimple_build_call (fn, 1, src_nr); |
3008 var = create_tmp_var (ptr_type_node, NULL); | 3297 var = create_tmp_var (ptr_type_node); |
3009 var = make_ssa_name (var, x); | 3298 var = make_ssa_name (var, x); |
3010 gimple_call_set_lhs (x, var); | 3299 gimple_call_set_lhs (x, var); |
3011 gsi_insert_before (&gsi, x, GSI_SAME_STMT); | 3300 gsi_insert_before (&gsi, x, GSI_SAME_STMT); |
3012 | 3301 |
3013 fn = implicit_built_in_decls[BUILT_IN_UNWIND_RESUME]; | 3302 /* When exception handling is delegated to a caller function, we |
3303 have to guarantee that shadow memory variables living on stack | |
3304 will be cleaner before control is given to a parent function. */ | |
3305 if (sanitize_flags_p (SANITIZE_ADDRESS)) | |
3306 { | |
3307 tree decl | |
3308 = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN); | |
3309 gimple *g = gimple_build_call (decl, 0); | |
3310 gimple_set_location (g, gimple_location (stmt)); | |
3311 gsi_insert_before (&gsi, g, GSI_SAME_STMT); | |
3312 } | |
3313 | |
3314 fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME); | |
3014 x = gimple_build_call (fn, 1, var); | 3315 x = gimple_build_call (fn, 1, var); |
3015 gsi_insert_before (&gsi, x, GSI_SAME_STMT); | 3316 gsi_insert_before (&gsi, x, GSI_SAME_STMT); |
3016 } | 3317 } |
3017 | 3318 |
3018 gcc_assert (EDGE_COUNT (bb->succs) == 0); | 3319 gcc_assert (EDGE_COUNT (bb->succs) == 0); |
3021 gsi_remove (&gsi, true); | 3322 gsi_remove (&gsi, true); |
3022 | 3323 |
3023 return ret; | 3324 return ret; |
3024 } | 3325 } |
3025 | 3326 |
3026 static unsigned | 3327 namespace { |
3027 execute_lower_resx (void) | 3328 |
3329 const pass_data pass_data_lower_resx = | |
3330 { | |
3331 GIMPLE_PASS, /* type */ | |
3332 "resx", /* name */ | |
3333 OPTGROUP_NONE, /* optinfo_flags */ | |
3334 TV_TREE_EH, /* tv_id */ | |
3335 PROP_gimple_lcf, /* properties_required */ | |
3336 0, /* properties_provided */ | |
3337 0, /* properties_destroyed */ | |
3338 0, /* todo_flags_start */ | |
3339 0, /* todo_flags_finish */ | |
3340 }; | |
3341 | |
3342 class pass_lower_resx : public gimple_opt_pass | |
3343 { | |
3344 public: | |
3345 pass_lower_resx (gcc::context *ctxt) | |
3346 : gimple_opt_pass (pass_data_lower_resx, ctxt) | |
3347 {} | |
3348 | |
3349 /* opt_pass methods: */ | |
3350 virtual bool gate (function *) { return flag_exceptions != 0; } | |
3351 virtual unsigned int execute (function *); | |
3352 | |
3353 }; // class pass_lower_resx | |
3354 | |
3355 unsigned | |
3356 pass_lower_resx::execute (function *fun) | |
3028 { | 3357 { |
3029 basic_block bb; | 3358 basic_block bb; |
3030 struct pointer_map_t *mnt_map; | |
3031 bool dominance_invalidated = false; | 3359 bool dominance_invalidated = false; |
3032 bool any_rewritten = false; | 3360 bool any_rewritten = false; |
3033 | 3361 |
3034 mnt_map = pointer_map_create (); | 3362 hash_map<eh_region, tree> mnt_map; |
3035 | 3363 |
3036 FOR_EACH_BB (bb) | 3364 FOR_EACH_BB_FN (bb, fun) |
3037 { | 3365 { |
3038 gimple last = last_stmt (bb); | 3366 gimple *last = last_stmt (bb); |
3039 if (last && is_gimple_resx (last)) | 3367 if (last && is_gimple_resx (last)) |
3040 { | 3368 { |
3041 dominance_invalidated |= lower_resx (bb, last, mnt_map); | 3369 dominance_invalidated |= |
3370 lower_resx (bb, as_a <gresx *> (last), &mnt_map); | |
3042 any_rewritten = true; | 3371 any_rewritten = true; |
3043 } | 3372 } |
3044 } | 3373 } |
3045 | |
3046 pointer_map_destroy (mnt_map); | |
3047 | 3374 |
3048 if (dominance_invalidated) | 3375 if (dominance_invalidated) |
3049 { | 3376 { |
3050 free_dominance_info (CDI_DOMINATORS); | 3377 free_dominance_info (CDI_DOMINATORS); |
3051 free_dominance_info (CDI_POST_DOMINATORS); | 3378 free_dominance_info (CDI_POST_DOMINATORS); |
3052 } | 3379 } |
3053 | 3380 |
3054 return any_rewritten ? TODO_update_ssa_only_virtuals : 0; | 3381 return any_rewritten ? TODO_update_ssa_only_virtuals : 0; |
3055 } | 3382 } |
3056 | 3383 |
3057 static bool | 3384 } // anon namespace |
3058 gate_lower_resx (void) | 3385 |
3059 { | 3386 gimple_opt_pass * |
3060 return flag_exceptions != 0; | 3387 make_pass_lower_resx (gcc::context *ctxt) |
3061 } | 3388 { |
3062 | 3389 return new pass_lower_resx (ctxt); |
3063 struct gimple_opt_pass pass_lower_resx = | 3390 } |
3064 { | 3391 |
3065 { | 3392 /* Try to optimize var = {v} {CLOBBER} stmts followed just by |
3066 GIMPLE_PASS, | 3393 external throw. */ |
3067 "resx", /* name */ | 3394 |
3068 gate_lower_resx, /* gate */ | 3395 static void |
3069 execute_lower_resx, /* execute */ | 3396 optimize_clobbers (basic_block bb) |
3070 NULL, /* sub */ | 3397 { |
3071 NULL, /* next */ | 3398 gimple_stmt_iterator gsi = gsi_last_bb (bb); |
3072 0, /* static_pass_number */ | 3399 bool any_clobbers = false; |
3073 TV_TREE_EH, /* tv_id */ | 3400 bool seen_stack_restore = false; |
3074 PROP_gimple_lcf, /* properties_required */ | 3401 edge_iterator ei; |
3075 0, /* properties_provided */ | 3402 edge e; |
3076 0, /* properties_destroyed */ | 3403 |
3077 0, /* todo_flags_start */ | 3404 /* Only optimize anything if the bb contains at least one clobber, |
3078 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */ | 3405 ends with resx (checked by caller), optionally contains some |
3079 } | 3406 debug stmts or labels, or at most one __builtin_stack_restore |
3080 }; | 3407 call, and has an incoming EH edge. */ |
3081 | 3408 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi)) |
3409 { | |
3410 gimple *stmt = gsi_stmt (gsi); | |
3411 if (is_gimple_debug (stmt)) | |
3412 continue; | |
3413 if (gimple_clobber_p (stmt)) | |
3414 { | |
3415 any_clobbers = true; | |
3416 continue; | |
3417 } | |
3418 if (!seen_stack_restore | |
3419 && gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE)) | |
3420 { | |
3421 seen_stack_restore = true; | |
3422 continue; | |
3423 } | |
3424 if (gimple_code (stmt) == GIMPLE_LABEL) | |
3425 break; | |
3426 return; | |
3427 } | |
3428 if (!any_clobbers) | |
3429 return; | |
3430 FOR_EACH_EDGE (e, ei, bb->preds) | |
3431 if (e->flags & EDGE_EH) | |
3432 break; | |
3433 if (e == NULL) | |
3434 return; | |
3435 gsi = gsi_last_bb (bb); | |
3436 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi)) | |
3437 { | |
3438 gimple *stmt = gsi_stmt (gsi); | |
3439 if (!gimple_clobber_p (stmt)) | |
3440 continue; | |
3441 unlink_stmt_vdef (stmt); | |
3442 gsi_remove (&gsi, true); | |
3443 release_defs (stmt); | |
3444 } | |
3445 } | |
3446 | |
3447 /* Try to sink var = {v} {CLOBBER} stmts followed just by | |
3448 internal throw to successor BB. */ | |
3449 | |
3450 static int | |
3451 sink_clobbers (basic_block bb) | |
3452 { | |
3453 edge e; | |
3454 edge_iterator ei; | |
3455 gimple_stmt_iterator gsi, dgsi; | |
3456 basic_block succbb; | |
3457 bool any_clobbers = false; | |
3458 unsigned todo = 0; | |
3459 | |
3460 /* Only optimize if BB has a single EH successor and | |
3461 all predecessor edges are EH too. */ | |
3462 if (!single_succ_p (bb) | |
3463 || (single_succ_edge (bb)->flags & EDGE_EH) == 0) | |
3464 return 0; | |
3465 | |
3466 FOR_EACH_EDGE (e, ei, bb->preds) | |
3467 { | |
3468 if ((e->flags & EDGE_EH) == 0) | |
3469 return 0; | |
3470 } | |
3471 | |
3472 /* And BB contains only CLOBBER stmts before the final | |
3473 RESX. */ | |
3474 gsi = gsi_last_bb (bb); | |
3475 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi)) | |
3476 { | |
3477 gimple *stmt = gsi_stmt (gsi); | |
3478 if (is_gimple_debug (stmt)) | |
3479 continue; | |
3480 if (gimple_code (stmt) == GIMPLE_LABEL) | |
3481 break; | |
3482 if (!gimple_clobber_p (stmt)) | |
3483 return 0; | |
3484 any_clobbers = true; | |
3485 } | |
3486 if (!any_clobbers) | |
3487 return 0; | |
3488 | |
3489 edge succe = single_succ_edge (bb); | |
3490 succbb = succe->dest; | |
3491 | |
3492 /* See if there is a virtual PHI node to take an updated virtual | |
3493 operand from. */ | |
3494 gphi *vphi = NULL; | |
3495 tree vuse = NULL_TREE; | |
3496 for (gphi_iterator gpi = gsi_start_phis (succbb); | |
3497 !gsi_end_p (gpi); gsi_next (&gpi)) | |
3498 { | |
3499 tree res = gimple_phi_result (gpi.phi ()); | |
3500 if (virtual_operand_p (res)) | |
3501 { | |
3502 vphi = gpi.phi (); | |
3503 vuse = res; | |
3504 break; | |
3505 } | |
3506 } | |
3507 | |
3508 dgsi = gsi_after_labels (succbb); | |
3509 gsi = gsi_last_bb (bb); | |
3510 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi)) | |
3511 { | |
3512 gimple *stmt = gsi_stmt (gsi); | |
3513 tree lhs; | |
3514 if (is_gimple_debug (stmt)) | |
3515 continue; | |
3516 if (gimple_code (stmt) == GIMPLE_LABEL) | |
3517 break; | |
3518 lhs = gimple_assign_lhs (stmt); | |
3519 /* Unfortunately we don't have dominance info updated at this | |
3520 point, so checking if | |
3521 dominated_by_p (CDI_DOMINATORS, succbb, | |
3522 gimple_bb (SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0))) | |
3523 would be too costly. Thus, avoid sinking any clobbers that | |
3524 refer to non-(D) SSA_NAMEs. */ | |
3525 if (TREE_CODE (lhs) == MEM_REF | |
3526 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME | |
3527 && !SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0))) | |
3528 { | |
3529 unlink_stmt_vdef (stmt); | |
3530 gsi_remove (&gsi, true); | |
3531 release_defs (stmt); | |
3532 continue; | |
3533 } | |
3534 | |
3535 /* As we do not change stmt order when sinking across a | |
3536 forwarder edge we can keep virtual operands in place. */ | |
3537 gsi_remove (&gsi, false); | |
3538 gsi_insert_before (&dgsi, stmt, GSI_NEW_STMT); | |
3539 | |
3540 /* But adjust virtual operands if we sunk across a PHI node. */ | |
3541 if (vuse) | |
3542 { | |
3543 gimple *use_stmt; | |
3544 imm_use_iterator iter; | |
3545 use_operand_p use_p; | |
3546 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vuse) | |
3547 FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
3548 SET_USE (use_p, gimple_vdef (stmt)); | |
3549 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse)) | |
3550 { | |
3551 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)) = 1; | |
3552 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 0; | |
3553 } | |
3554 /* Adjust the incoming virtual operand. */ | |
3555 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi, succe), gimple_vuse (stmt)); | |
3556 SET_USE (gimple_vuse_op (stmt), vuse); | |
3557 } | |
3558 /* If there isn't a single predecessor but no virtual PHI node | |
3559 arrange for virtual operands to be renamed. */ | |
3560 else if (gimple_vuse_op (stmt) != NULL_USE_OPERAND_P | |
3561 && !single_pred_p (succbb)) | |
3562 { | |
3563 /* In this case there will be no use of the VDEF of this stmt. | |
3564 ??? Unless this is a secondary opportunity and we have not | |
3565 removed unreachable blocks yet, so we cannot assert this. | |
3566 Which also means we will end up renaming too many times. */ | |
3567 SET_USE (gimple_vuse_op (stmt), gimple_vop (cfun)); | |
3568 mark_virtual_operands_for_renaming (cfun); | |
3569 todo |= TODO_update_ssa_only_virtuals; | |
3570 } | |
3571 } | |
3572 | |
3573 return todo; | |
3574 } | |
3082 | 3575 |
3083 /* At the end of inlining, we can lower EH_DISPATCH. Return true when | 3576 /* At the end of inlining, we can lower EH_DISPATCH. Return true when |
3084 we have found some duplicate labels and removed some edges. */ | 3577 we have found some duplicate labels and removed some edges. */ |
3085 | 3578 |
3086 static bool | 3579 static bool |
3087 lower_eh_dispatch (basic_block src, gimple stmt) | 3580 lower_eh_dispatch (basic_block src, geh_dispatch *stmt) |
3088 { | 3581 { |
3089 gimple_stmt_iterator gsi; | 3582 gimple_stmt_iterator gsi; |
3090 int region_nr; | 3583 int region_nr; |
3091 eh_region r; | 3584 eh_region r; |
3092 tree filter, fn; | 3585 tree filter, fn; |
3093 gimple x; | 3586 gimple *x; |
3094 bool redirected = false; | 3587 bool redirected = false; |
3095 | 3588 |
3096 region_nr = gimple_eh_dispatch_region (stmt); | 3589 region_nr = gimple_eh_dispatch_region (stmt); |
3097 r = get_eh_region_from_number (region_nr); | 3590 r = get_eh_region_from_number (region_nr); |
3098 | 3591 |
3100 | 3593 |
3101 switch (r->type) | 3594 switch (r->type) |
3102 { | 3595 { |
3103 case ERT_TRY: | 3596 case ERT_TRY: |
3104 { | 3597 { |
3105 VEC (tree, heap) *labels = NULL; | 3598 auto_vec<tree> labels; |
3106 tree default_label = NULL; | 3599 tree default_label = NULL; |
3107 eh_catch c; | 3600 eh_catch c; |
3108 edge_iterator ei; | 3601 edge_iterator ei; |
3109 edge e; | 3602 edge e; |
3110 struct pointer_set_t *seen_values = pointer_set_create (); | 3603 hash_set<tree> seen_values; |
3111 | 3604 |
3112 /* Collect the labels for a switch. Zero the post_landing_pad | 3605 /* Collect the labels for a switch. Zero the post_landing_pad |
3113 field becase we'll no longer have anything keeping these labels | 3606 field becase we'll no longer have anything keeping these labels |
3114 in existance and the optimizer will be free to merge these | 3607 in existence and the optimizer will be free to merge these |
3115 blocks at will. */ | 3608 blocks at will. */ |
3116 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) | 3609 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) |
3117 { | 3610 { |
3118 tree tp_node, flt_node, lab = c->label; | 3611 tree tp_node, flt_node, lab = c->label; |
3119 bool have_label = false; | 3612 bool have_label = false; |
3132 /* Filter out duplicate labels that arise when this handler | 3625 /* Filter out duplicate labels that arise when this handler |
3133 is shadowed by an earlier one. When no labels are | 3626 is shadowed by an earlier one. When no labels are |
3134 attached to the handler anymore, we remove | 3627 attached to the handler anymore, we remove |
3135 the corresponding edge and then we delete unreachable | 3628 the corresponding edge and then we delete unreachable |
3136 blocks at the end of this pass. */ | 3629 blocks at the end of this pass. */ |
3137 if (! pointer_set_contains (seen_values, TREE_VALUE (flt_node))) | 3630 if (! seen_values.contains (TREE_VALUE (flt_node))) |
3138 { | 3631 { |
3139 tree t = build3 (CASE_LABEL_EXPR, void_type_node, | 3632 tree t = build_case_label (TREE_VALUE (flt_node), |
3140 TREE_VALUE (flt_node), NULL, lab); | 3633 NULL, lab); |
3141 VEC_safe_push (tree, heap, labels, t); | 3634 labels.safe_push (t); |
3142 pointer_set_insert (seen_values, TREE_VALUE (flt_node)); | 3635 seen_values.add (TREE_VALUE (flt_node)); |
3143 have_label = true; | 3636 have_label = true; |
3144 } | 3637 } |
3145 | 3638 |
3146 tp_node = TREE_CHAIN (tp_node); | 3639 tp_node = TREE_CHAIN (tp_node); |
3147 flt_node = TREE_CHAIN (flt_node); | 3640 flt_node = TREE_CHAIN (flt_node); |
3167 } | 3660 } |
3168 gcc_assert (default_label != NULL); | 3661 gcc_assert (default_label != NULL); |
3169 | 3662 |
3170 /* Don't generate a switch if there's only a default case. | 3663 /* Don't generate a switch if there's only a default case. |
3171 This is common in the form of try { A; } catch (...) { B; }. */ | 3664 This is common in the form of try { A; } catch (...) { B; }. */ |
3172 if (labels == NULL) | 3665 if (!labels.exists ()) |
3173 { | 3666 { |
3174 e = single_succ_edge (src); | 3667 e = single_succ_edge (src); |
3175 e->flags |= EDGE_FALLTHRU; | 3668 e->flags |= EDGE_FALLTHRU; |
3176 } | 3669 } |
3177 else | 3670 else |
3178 { | 3671 { |
3179 fn = implicit_built_in_decls[BUILT_IN_EH_FILTER]; | 3672 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER); |
3180 x = gimple_build_call (fn, 1, build_int_cst (NULL, region_nr)); | 3673 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node, |
3181 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL); | 3674 region_nr)); |
3675 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn))); | |
3182 filter = make_ssa_name (filter, x); | 3676 filter = make_ssa_name (filter, x); |
3183 gimple_call_set_lhs (x, filter); | 3677 gimple_call_set_lhs (x, filter); |
3184 gsi_insert_before (&gsi, x, GSI_SAME_STMT); | 3678 gsi_insert_before (&gsi, x, GSI_SAME_STMT); |
3185 | 3679 |
3186 /* Turn the default label into a default case. */ | 3680 /* Turn the default label into a default case. */ |
3187 default_label = build3 (CASE_LABEL_EXPR, void_type_node, | 3681 default_label = build_case_label (NULL, NULL, default_label); |
3188 NULL, NULL, default_label); | |
3189 sort_case_labels (labels); | 3682 sort_case_labels (labels); |
3190 | 3683 |
3191 x = gimple_build_switch_vec (filter, default_label, labels); | 3684 x = gimple_build_switch (filter, default_label, labels); |
3192 gsi_insert_before (&gsi, x, GSI_SAME_STMT); | 3685 gsi_insert_before (&gsi, x, GSI_SAME_STMT); |
3193 | |
3194 VEC_free (tree, heap, labels); | |
3195 } | 3686 } |
3196 pointer_set_destroy (seen_values); | |
3197 } | 3687 } |
3198 break; | 3688 break; |
3199 | 3689 |
3200 case ERT_ALLOWED_EXCEPTIONS: | 3690 case ERT_ALLOWED_EXCEPTIONS: |
3201 { | 3691 { |
3202 edge b_e = BRANCH_EDGE (src); | 3692 edge b_e = BRANCH_EDGE (src); |
3203 edge f_e = FALLTHRU_EDGE (src); | 3693 edge f_e = FALLTHRU_EDGE (src); |
3204 | 3694 |
3205 fn = implicit_built_in_decls[BUILT_IN_EH_FILTER]; | 3695 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER); |
3206 x = gimple_build_call (fn, 1, build_int_cst (NULL, region_nr)); | 3696 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node, |
3207 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL); | 3697 region_nr)); |
3698 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn))); | |
3208 filter = make_ssa_name (filter, x); | 3699 filter = make_ssa_name (filter, x); |
3209 gimple_call_set_lhs (x, filter); | 3700 gimple_call_set_lhs (x, filter); |
3210 gsi_insert_before (&gsi, x, GSI_SAME_STMT); | 3701 gsi_insert_before (&gsi, x, GSI_SAME_STMT); |
3211 | 3702 |
3212 r->u.allowed.label = NULL; | 3703 r->u.allowed.label = NULL; |
3228 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */ | 3719 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */ |
3229 gsi_remove (&gsi, true); | 3720 gsi_remove (&gsi, true); |
3230 return redirected; | 3721 return redirected; |
3231 } | 3722 } |
3232 | 3723 |
3233 static unsigned | 3724 namespace { |
3234 execute_lower_eh_dispatch (void) | 3725 |
3726 const pass_data pass_data_lower_eh_dispatch = | |
3727 { | |
3728 GIMPLE_PASS, /* type */ | |
3729 "ehdisp", /* name */ | |
3730 OPTGROUP_NONE, /* optinfo_flags */ | |
3731 TV_TREE_EH, /* tv_id */ | |
3732 PROP_gimple_lcf, /* properties_required */ | |
3733 0, /* properties_provided */ | |
3734 0, /* properties_destroyed */ | |
3735 0, /* todo_flags_start */ | |
3736 0, /* todo_flags_finish */ | |
3737 }; | |
3738 | |
3739 class pass_lower_eh_dispatch : public gimple_opt_pass | |
3740 { | |
3741 public: | |
3742 pass_lower_eh_dispatch (gcc::context *ctxt) | |
3743 : gimple_opt_pass (pass_data_lower_eh_dispatch, ctxt) | |
3744 {} | |
3745 | |
3746 /* opt_pass methods: */ | |
3747 virtual bool gate (function *fun) { return fun->eh->region_tree != NULL; } | |
3748 virtual unsigned int execute (function *); | |
3749 | |
3750 }; // class pass_lower_eh_dispatch | |
3751 | |
3752 unsigned | |
3753 pass_lower_eh_dispatch::execute (function *fun) | |
3235 { | 3754 { |
3236 basic_block bb; | 3755 basic_block bb; |
3237 bool any_rewritten = false; | 3756 int flags = 0; |
3238 bool redirected = false; | 3757 bool redirected = false; |
3239 | 3758 |
3240 assign_filter_values (); | 3759 assign_filter_values (); |
3241 | 3760 |
3242 FOR_EACH_BB (bb) | 3761 FOR_EACH_BB_FN (bb, fun) |
3243 { | 3762 { |
3244 gimple last = last_stmt (bb); | 3763 gimple *last = last_stmt (bb); |
3245 if (last && gimple_code (last) == GIMPLE_EH_DISPATCH) | 3764 if (last == NULL) |
3246 { | 3765 continue; |
3247 redirected |= lower_eh_dispatch (bb, last); | 3766 if (gimple_code (last) == GIMPLE_EH_DISPATCH) |
3248 any_rewritten = true; | 3767 { |
3768 redirected |= lower_eh_dispatch (bb, | |
3769 as_a <geh_dispatch *> (last)); | |
3770 flags |= TODO_update_ssa_only_virtuals; | |
3771 } | |
3772 else if (gimple_code (last) == GIMPLE_RESX) | |
3773 { | |
3774 if (stmt_can_throw_external (last)) | |
3775 optimize_clobbers (bb); | |
3776 else | |
3777 flags |= sink_clobbers (bb); | |
3249 } | 3778 } |
3250 } | 3779 } |
3251 | 3780 |
3252 if (redirected) | 3781 if (redirected) |
3253 delete_unreachable_blocks (); | 3782 delete_unreachable_blocks (); |
3254 return any_rewritten ? TODO_update_ssa_only_virtuals : 0; | 3783 return flags; |
3255 } | 3784 } |
3256 | 3785 |
3257 static bool | 3786 } // anon namespace |
3258 gate_lower_eh_dispatch (void) | 3787 |
3259 { | 3788 gimple_opt_pass * |
3260 return cfun->eh->region_tree != NULL; | 3789 make_pass_lower_eh_dispatch (gcc::context *ctxt) |
3261 } | 3790 { |
3262 | 3791 return new pass_lower_eh_dispatch (ctxt); |
3263 struct gimple_opt_pass pass_lower_eh_dispatch = | 3792 } |
3264 { | |
3265 { | |
3266 GIMPLE_PASS, | |
3267 "ehdisp", /* name */ | |
3268 gate_lower_eh_dispatch, /* gate */ | |
3269 execute_lower_eh_dispatch, /* execute */ | |
3270 NULL, /* sub */ | |
3271 NULL, /* next */ | |
3272 0, /* static_pass_number */ | |
3273 TV_TREE_EH, /* tv_id */ | |
3274 PROP_gimple_lcf, /* properties_required */ | |
3275 0, /* properties_provided */ | |
3276 0, /* properties_destroyed */ | |
3277 0, /* todo_flags_start */ | |
3278 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */ | |
3279 } | |
3280 }; | |
3281 | 3793 |
3282 /* Walk statements, see what regions are really referenced and remove | 3794 /* Walk statements, see what regions and, optionally, landing pads |
3283 those that are unused. */ | 3795 are really referenced. |
3796 | |
3797 Returns in R_REACHABLEP an sbitmap with bits set for reachable regions, | |
3798 and in LP_REACHABLE an sbitmap with bits set for reachable landing pads. | |
3799 | |
3800 Passing NULL for LP_REACHABLE is valid, in this case only reachable | |
3801 regions are marked. | |
3802 | |
3803 The caller is responsible for freeing the returned sbitmaps. */ | |
3804 | |
3805 static void | |
3806 mark_reachable_handlers (sbitmap *r_reachablep, sbitmap *lp_reachablep) | |
3807 { | |
3808 sbitmap r_reachable, lp_reachable; | |
3809 basic_block bb; | |
3810 bool mark_landing_pads = (lp_reachablep != NULL); | |
3811 gcc_checking_assert (r_reachablep != NULL); | |
3812 | |
3813 r_reachable = sbitmap_alloc (cfun->eh->region_array->length ()); | |
3814 bitmap_clear (r_reachable); | |
3815 *r_reachablep = r_reachable; | |
3816 | |
3817 if (mark_landing_pads) | |
3818 { | |
3819 lp_reachable = sbitmap_alloc (cfun->eh->lp_array->length ()); | |
3820 bitmap_clear (lp_reachable); | |
3821 *lp_reachablep = lp_reachable; | |
3822 } | |
3823 else | |
3824 lp_reachable = NULL; | |
3825 | |
3826 FOR_EACH_BB_FN (bb, cfun) | |
3827 { | |
3828 gimple_stmt_iterator gsi; | |
3829 | |
3830 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
3831 { | |
3832 gimple *stmt = gsi_stmt (gsi); | |
3833 | |
3834 if (mark_landing_pads) | |
3835 { | |
3836 int lp_nr = lookup_stmt_eh_lp (stmt); | |
3837 | |
3838 /* Negative LP numbers are MUST_NOT_THROW regions which | |
3839 are not considered BB enders. */ | |
3840 if (lp_nr < 0) | |
3841 bitmap_set_bit (r_reachable, -lp_nr); | |
3842 | |
3843 /* Positive LP numbers are real landing pads, and BB enders. */ | |
3844 else if (lp_nr > 0) | |
3845 { | |
3846 gcc_assert (gsi_one_before_end_p (gsi)); | |
3847 eh_region region = get_eh_region_from_lp_number (lp_nr); | |
3848 bitmap_set_bit (r_reachable, region->index); | |
3849 bitmap_set_bit (lp_reachable, lp_nr); | |
3850 } | |
3851 } | |
3852 | |
3853 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */ | |
3854 switch (gimple_code (stmt)) | |
3855 { | |
3856 case GIMPLE_RESX: | |
3857 bitmap_set_bit (r_reachable, | |
3858 gimple_resx_region (as_a <gresx *> (stmt))); | |
3859 break; | |
3860 case GIMPLE_EH_DISPATCH: | |
3861 bitmap_set_bit (r_reachable, | |
3862 gimple_eh_dispatch_region ( | |
3863 as_a <geh_dispatch *> (stmt))); | |
3864 break; | |
3865 case GIMPLE_CALL: | |
3866 if (gimple_call_builtin_p (stmt, BUILT_IN_EH_COPY_VALUES)) | |
3867 for (int i = 0; i < 2; ++i) | |
3868 { | |
3869 tree rt = gimple_call_arg (stmt, i); | |
3870 HOST_WIDE_INT ri = tree_to_shwi (rt); | |
3871 | |
3872 gcc_assert (ri == (int)ri); | |
3873 bitmap_set_bit (r_reachable, ri); | |
3874 } | |
3875 break; | |
3876 default: | |
3877 break; | |
3878 } | |
3879 } | |
3880 } | |
3881 } | |
3882 | |
3883 /* Remove unreachable handlers and unreachable landing pads. */ | |
3284 | 3884 |
3285 static void | 3885 static void |
3286 remove_unreachable_handlers (void) | 3886 remove_unreachable_handlers (void) |
3287 { | 3887 { |
3288 sbitmap r_reachable, lp_reachable; | 3888 sbitmap r_reachable, lp_reachable; |
3289 eh_region region; | 3889 eh_region region; |
3290 eh_landing_pad lp; | 3890 eh_landing_pad lp; |
3291 basic_block bb; | 3891 unsigned i; |
3292 int lp_nr, r_nr; | 3892 |
3293 | 3893 mark_reachable_handlers (&r_reachable, &lp_reachable); |
3294 r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array)); | |
3295 lp_reachable | |
3296 = sbitmap_alloc (VEC_length (eh_landing_pad, cfun->eh->lp_array)); | |
3297 sbitmap_zero (r_reachable); | |
3298 sbitmap_zero (lp_reachable); | |
3299 | |
3300 FOR_EACH_BB (bb) | |
3301 { | |
3302 gimple_stmt_iterator gsi = gsi_start_bb (bb); | |
3303 | |
3304 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
3305 { | |
3306 gimple stmt = gsi_stmt (gsi); | |
3307 lp_nr = lookup_stmt_eh_lp (stmt); | |
3308 | |
3309 /* Negative LP numbers are MUST_NOT_THROW regions which | |
3310 are not considered BB enders. */ | |
3311 if (lp_nr < 0) | |
3312 SET_BIT (r_reachable, -lp_nr); | |
3313 | |
3314 /* Positive LP numbers are real landing pads, are are BB enders. */ | |
3315 else if (lp_nr > 0) | |
3316 { | |
3317 gcc_assert (gsi_one_before_end_p (gsi)); | |
3318 region = get_eh_region_from_lp_number (lp_nr); | |
3319 SET_BIT (r_reachable, region->index); | |
3320 SET_BIT (lp_reachable, lp_nr); | |
3321 } | |
3322 } | |
3323 } | |
3324 | 3894 |
3325 if (dump_file) | 3895 if (dump_file) |
3326 { | 3896 { |
3327 fprintf (dump_file, "Before removal of unreachable regions:\n"); | 3897 fprintf (dump_file, "Before removal of unreachable regions:\n"); |
3328 dump_eh_tree (dump_file, cfun); | 3898 dump_eh_tree (dump_file, cfun); |
3329 fprintf (dump_file, "Reachable regions: "); | 3899 fprintf (dump_file, "Reachable regions: "); |
3330 dump_sbitmap_file (dump_file, r_reachable); | 3900 dump_bitmap_file (dump_file, r_reachable); |
3331 fprintf (dump_file, "Reachable landing pads: "); | 3901 fprintf (dump_file, "Reachable landing pads: "); |
3332 dump_sbitmap_file (dump_file, lp_reachable); | 3902 dump_bitmap_file (dump_file, lp_reachable); |
3333 } | 3903 } |
3334 | 3904 |
3335 for (r_nr = 1; | 3905 if (dump_file) |
3336 VEC_iterate (eh_region, cfun->eh->region_array, r_nr, region); ++r_nr) | 3906 { |
3337 if (region && !TEST_BIT (r_reachable, r_nr)) | 3907 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region) |
3908 if (region && !bitmap_bit_p (r_reachable, region->index)) | |
3909 fprintf (dump_file, | |
3910 "Removing unreachable region %d\n", | |
3911 region->index); | |
3912 } | |
3913 | |
3914 remove_unreachable_eh_regions (r_reachable); | |
3915 | |
3916 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp) | |
3917 if (lp && !bitmap_bit_p (lp_reachable, lp->index)) | |
3338 { | 3918 { |
3339 if (dump_file) | 3919 if (dump_file) |
3340 fprintf (dump_file, "Removing unreachable region %d\n", r_nr); | 3920 fprintf (dump_file, |
3341 remove_eh_handler (region); | 3921 "Removing unreachable landing pad %d\n", |
3342 } | 3922 lp->index); |
3343 | |
3344 for (lp_nr = 1; | |
3345 VEC_iterate (eh_landing_pad, cfun->eh->lp_array, lp_nr, lp); ++lp_nr) | |
3346 if (lp && !TEST_BIT (lp_reachable, lp_nr)) | |
3347 { | |
3348 if (dump_file) | |
3349 fprintf (dump_file, "Removing unreachable landing pad %d\n", lp_nr); | |
3350 remove_eh_landing_pad (lp); | 3923 remove_eh_landing_pad (lp); |
3351 } | 3924 } |
3352 | 3925 |
3353 if (dump_file) | 3926 if (dump_file) |
3354 { | 3927 { |
3358 } | 3931 } |
3359 | 3932 |
3360 sbitmap_free (r_reachable); | 3933 sbitmap_free (r_reachable); |
3361 sbitmap_free (lp_reachable); | 3934 sbitmap_free (lp_reachable); |
3362 | 3935 |
3363 #ifdef ENABLE_CHECKING | 3936 if (flag_checking) |
3364 verify_eh_tree (cfun); | 3937 verify_eh_tree (cfun); |
3365 #endif | 3938 } |
3939 | |
3940 /* Remove unreachable handlers if any landing pads have been removed after | |
3941 last ehcleanup pass (due to gimple_purge_dead_eh_edges). */ | |
3942 | |
3943 void | |
3944 maybe_remove_unreachable_handlers (void) | |
3945 { | |
3946 eh_landing_pad lp; | |
3947 unsigned i; | |
3948 | |
3949 if (cfun->eh == NULL) | |
3950 return; | |
3951 | |
3952 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp) | |
3953 if (lp && lp->post_landing_pad) | |
3954 { | |
3955 if (label_to_block (lp->post_landing_pad) == NULL) | |
3956 { | |
3957 remove_unreachable_handlers (); | |
3958 return; | |
3959 } | |
3960 } | |
3366 } | 3961 } |
3367 | 3962 |
3368 /* Remove regions that do not have landing pads. This assumes | 3963 /* Remove regions that do not have landing pads. This assumes |
3369 that remove_unreachable_handlers has already been run, and | 3964 that remove_unreachable_handlers has already been run, and |
3370 that we've just manipulated the landing pads since then. */ | 3965 that we've just manipulated the landing pads since then. |
3966 | |
3967 Preserve regions with landing pads and regions that prevent | |
3968 exceptions from propagating further, even if these regions | |
3969 are not reachable. */ | |
3371 | 3970 |
3372 static void | 3971 static void |
3373 remove_unreachable_handlers_no_lp (void) | 3972 remove_unreachable_handlers_no_lp (void) |
3374 { | 3973 { |
3375 eh_region r; | 3974 eh_region region; |
3376 int i; | 3975 sbitmap r_reachable; |
3377 | 3976 unsigned i; |
3378 for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i) | 3977 |
3379 if (r && r->landing_pads == NULL && r->type != ERT_MUST_NOT_THROW) | 3978 mark_reachable_handlers (&r_reachable, /*lp_reachablep=*/NULL); |
3380 { | 3979 |
3381 if (dump_file) | 3980 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region) |
3382 fprintf (dump_file, "Removing unreachable region %d\n", i); | 3981 { |
3383 remove_eh_handler (r); | 3982 if (! region) |
3384 } | 3983 continue; |
3984 | |
3985 if (region->landing_pads != NULL | |
3986 || region->type == ERT_MUST_NOT_THROW) | |
3987 bitmap_set_bit (r_reachable, region->index); | |
3988 | |
3989 if (dump_file | |
3990 && !bitmap_bit_p (r_reachable, region->index)) | |
3991 fprintf (dump_file, | |
3992 "Removing unreachable region %d\n", | |
3993 region->index); | |
3994 } | |
3995 | |
3996 remove_unreachable_eh_regions (r_reachable); | |
3997 | |
3998 sbitmap_free (r_reachable); | |
3385 } | 3999 } |
3386 | 4000 |
3387 /* Undo critical edge splitting on an EH landing pad. Earlier, we | 4001 /* Undo critical edge splitting on an EH landing pad. Earlier, we |
3388 optimisticaly split all sorts of edges, including EH edges. The | 4002 optimisticaly split all sorts of edges, including EH edges. The |
3389 optimization passes in between may not have needed them; if not, | 4003 optimization passes in between may not have needed them; if not, |
3405 basic_block bb = label_to_block (lp->post_landing_pad); | 4019 basic_block bb = label_to_block (lp->post_landing_pad); |
3406 gimple_stmt_iterator gsi; | 4020 gimple_stmt_iterator gsi; |
3407 edge e_in, e_out; | 4021 edge e_in, e_out; |
3408 | 4022 |
3409 /* Quickly check the edge counts on BB for singularity. */ | 4023 /* Quickly check the edge counts on BB for singularity. */ |
3410 if (EDGE_COUNT (bb->preds) != 1 || EDGE_COUNT (bb->succs) != 1) | 4024 if (!single_pred_p (bb) || !single_succ_p (bb)) |
3411 return false; | 4025 return false; |
3412 e_in = EDGE_PRED (bb, 0); | 4026 e_in = single_pred_edge (bb); |
3413 e_out = EDGE_SUCC (bb, 0); | 4027 e_out = single_succ_edge (bb); |
3414 | 4028 |
3415 /* Input edge must be EH and output edge must be normal. */ | 4029 /* Input edge must be EH and output edge must be normal. */ |
3416 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0) | 4030 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0) |
3417 return false; | 4031 return false; |
3418 | 4032 |
3425 | 4039 |
3426 /* The destination block must not already have a landing pad | 4040 /* The destination block must not already have a landing pad |
3427 for a different region. */ | 4041 for a different region. */ |
3428 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi)) | 4042 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi)) |
3429 { | 4043 { |
3430 gimple stmt = gsi_stmt (gsi); | 4044 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi)); |
3431 tree lab; | 4045 tree lab; |
3432 int lp_nr; | 4046 int lp_nr; |
3433 | 4047 |
3434 if (gimple_code (stmt) != GIMPLE_LABEL) | 4048 if (!label_stmt) |
3435 break; | 4049 break; |
3436 lab = gimple_label_label (stmt); | 4050 lab = gimple_label_label (label_stmt); |
3437 lp_nr = EH_LANDING_PAD_NR (lab); | 4051 lp_nr = EH_LANDING_PAD_NR (lab); |
3438 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region) | 4052 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region) |
3439 return false; | 4053 return false; |
3440 } | 4054 } |
3441 | 4055 |
3448 /* ??? We can get degenerate phis due to cfg cleanups. I would have | 4062 /* ??? We can get degenerate phis due to cfg cleanups. I would have |
3449 thought this should have been cleaned up by a phicprop pass, but | 4063 thought this should have been cleaned up by a phicprop pass, but |
3450 that doesn't appear to handle virtuals. Propagate by hand. */ | 4064 that doesn't appear to handle virtuals. Propagate by hand. */ |
3451 if (!gimple_seq_empty_p (phi_nodes (bb))) | 4065 if (!gimple_seq_empty_p (phi_nodes (bb))) |
3452 { | 4066 { |
3453 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); ) | 4067 for (gphi_iterator gpi = gsi_start_phis (bb); !gsi_end_p (gpi); ) |
3454 { | 4068 { |
3455 gimple use_stmt, phi = gsi_stmt (gsi); | 4069 gimple *use_stmt; |
4070 gphi *phi = gpi.phi (); | |
3456 tree lhs = gimple_phi_result (phi); | 4071 tree lhs = gimple_phi_result (phi); |
3457 tree rhs = gimple_phi_arg_def (phi, 0); | 4072 tree rhs = gimple_phi_arg_def (phi, 0); |
3458 use_operand_p use_p; | 4073 use_operand_p use_p; |
3459 imm_use_iterator iter; | 4074 imm_use_iterator iter; |
3460 | 4075 |
3465 } | 4080 } |
3466 | 4081 |
3467 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) | 4082 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) |
3468 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1; | 4083 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1; |
3469 | 4084 |
3470 remove_phi_node (&gsi, true); | 4085 remove_phi_node (&gpi, true); |
3471 } | 4086 } |
3472 } | 4087 } |
3473 | 4088 |
3474 if (dump_file && (dump_flags & TDF_DETAILS)) | 4089 if (dump_file && (dump_flags & TDF_DETAILS)) |
3475 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n", | 4090 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n", |
3481 to the PHI nodes in E_OUT->DEST. */ | 4096 to the PHI nodes in E_OUT->DEST. */ |
3482 redirect_eh_edge_1 (e_in, e_out->dest, false); | 4097 redirect_eh_edge_1 (e_in, e_out->dest, false); |
3483 redirect_edge_pred (e_out, e_in->src); | 4098 redirect_edge_pred (e_out, e_in->src); |
3484 e_out->flags = e_in->flags; | 4099 e_out->flags = e_in->flags; |
3485 e_out->probability = e_in->probability; | 4100 e_out->probability = e_in->probability; |
3486 e_out->count = e_in->count; | |
3487 remove_edge (e_in); | 4101 remove_edge (e_in); |
3488 | 4102 |
3489 return true; | 4103 return true; |
3490 } | 4104 } |
3491 | 4105 |
3496 { | 4110 { |
3497 bool changed = false; | 4111 bool changed = false; |
3498 eh_landing_pad lp; | 4112 eh_landing_pad lp; |
3499 int i; | 4113 int i; |
3500 | 4114 |
3501 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i) | 4115 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i) |
3502 if (lp) | 4116 if (lp) |
3503 changed |= unsplit_eh (lp); | 4117 changed |= unsplit_eh (lp); |
3504 | 4118 |
3505 return changed; | 4119 return changed; |
3506 } | 4120 } |
3514 | 4128 |
3515 static bool | 4129 static bool |
3516 cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb, | 4130 cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb, |
3517 edge old_bb_out, bool change_region) | 4131 edge old_bb_out, bool change_region) |
3518 { | 4132 { |
3519 gimple_stmt_iterator ngsi, ogsi; | 4133 gphi_iterator ngsi, ogsi; |
3520 edge_iterator ei; | 4134 edge_iterator ei; |
3521 edge e; | 4135 edge e; |
3522 bitmap rename_virts; | |
3523 bitmap ophi_handled; | 4136 bitmap ophi_handled; |
4137 | |
4138 /* The destination block must not be a regular successor for any | |
4139 of the preds of the landing pad. Thus, avoid turning | |
4140 <..> | |
4141 | \ EH | |
4142 | <..> | |
4143 | / | |
4144 <..> | |
4145 into | |
4146 <..> | |
4147 | | EH | |
4148 <..> | |
4149 which CFG verification would choke on. See PR45172 and PR51089. */ | |
4150 FOR_EACH_EDGE (e, ei, old_bb->preds) | |
4151 if (find_edge (e->src, new_bb)) | |
4152 return false; | |
3524 | 4153 |
3525 FOR_EACH_EDGE (e, ei, old_bb->preds) | 4154 FOR_EACH_EDGE (e, ei, old_bb->preds) |
3526 redirect_edge_var_map_clear (e); | 4155 redirect_edge_var_map_clear (e); |
3527 | 4156 |
3528 ophi_handled = BITMAP_ALLOC (NULL); | 4157 ophi_handled = BITMAP_ALLOC (NULL); |
3529 rename_virts = BITMAP_ALLOC (NULL); | |
3530 | 4158 |
3531 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map | 4159 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map |
3532 for the edges we're going to move. */ | 4160 for the edges we're going to move. */ |
3533 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi)) | 4161 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi)) |
3534 { | 4162 { |
3535 gimple ophi, nphi = gsi_stmt (ngsi); | 4163 gphi *ophi, *nphi = ngsi.phi (); |
3536 tree nresult, nop; | 4164 tree nresult, nop; |
3537 | 4165 |
3538 nresult = gimple_phi_result (nphi); | 4166 nresult = gimple_phi_result (nphi); |
3539 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx); | 4167 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx); |
3540 | 4168 |
3541 /* Find the corresponding PHI in OLD_BB so we can forward-propagate | 4169 /* Find the corresponding PHI in OLD_BB so we can forward-propagate |
3542 the source ssa_name. */ | 4170 the source ssa_name. */ |
3543 ophi = NULL; | 4171 ophi = NULL; |
3544 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi)) | 4172 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi)) |
3545 { | 4173 { |
3546 ophi = gsi_stmt (ogsi); | 4174 ophi = ogsi.phi (); |
3547 if (gimple_phi_result (ophi) == nop) | 4175 if (gimple_phi_result (ophi) == nop) |
3548 break; | 4176 break; |
3549 ophi = NULL; | 4177 ophi = NULL; |
3550 } | 4178 } |
3551 | 4179 |
3577 oop = gimple_phi_arg_def (ophi, e->dest_idx); | 4205 oop = gimple_phi_arg_def (ophi, e->dest_idx); |
3578 oloc = gimple_phi_arg_location (ophi, e->dest_idx); | 4206 oloc = gimple_phi_arg_location (ophi, e->dest_idx); |
3579 redirect_edge_var_map_add (e, nresult, oop, oloc); | 4207 redirect_edge_var_map_add (e, nresult, oop, oloc); |
3580 } | 4208 } |
3581 } | 4209 } |
3582 /* If we didn't find the PHI, but it's a VOP, remember to rename | 4210 /* If we didn't find the PHI, if it's a real variable or a VOP, we know |
3583 it later, assuming all other tests succeed. */ | |
3584 else if (!is_gimple_reg (nresult)) | |
3585 bitmap_set_bit (rename_virts, SSA_NAME_VERSION (nresult)); | |
3586 /* If we didn't find the PHI, and it's a real variable, we know | |
3587 from the fact that OLD_BB is tree_empty_eh_handler_p that the | 4211 from the fact that OLD_BB is tree_empty_eh_handler_p that the |
3588 variable is unchanged from input to the block and we can simply | 4212 variable is unchanged from input to the block and we can simply |
3589 re-use the input to NEW_BB from the OLD_BB_OUT edge. */ | 4213 re-use the input to NEW_BB from the OLD_BB_OUT edge. */ |
3590 else | 4214 else |
3591 { | 4215 { |
3598 | 4222 |
3599 /* Second, verify that all PHIs from OLD_BB have been handled. If not, | 4223 /* Second, verify that all PHIs from OLD_BB have been handled. If not, |
3600 we don't know what values from the other edges into NEW_BB to use. */ | 4224 we don't know what values from the other edges into NEW_BB to use. */ |
3601 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi)) | 4225 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi)) |
3602 { | 4226 { |
3603 gimple ophi = gsi_stmt (ogsi); | 4227 gphi *ophi = ogsi.phi (); |
3604 tree oresult = gimple_phi_result (ophi); | 4228 tree oresult = gimple_phi_result (ophi); |
3605 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult))) | 4229 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult))) |
3606 goto fail; | 4230 goto fail; |
3607 } | |
3608 | |
3609 /* At this point we know that the merge will succeed. Remove the PHI | |
3610 nodes for the virtuals that we want to rename. */ | |
3611 if (!bitmap_empty_p (rename_virts)) | |
3612 { | |
3613 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); ) | |
3614 { | |
3615 gimple nphi = gsi_stmt (ngsi); | |
3616 tree nresult = gimple_phi_result (nphi); | |
3617 if (bitmap_bit_p (rename_virts, SSA_NAME_VERSION (nresult))) | |
3618 { | |
3619 mark_virtual_phi_result_for_renaming (nphi); | |
3620 remove_phi_node (&ngsi, true); | |
3621 } | |
3622 else | |
3623 gsi_next (&ngsi); | |
3624 } | |
3625 } | 4231 } |
3626 | 4232 |
3627 /* Finally, move the edges and update the PHIs. */ | 4233 /* Finally, move the edges and update the PHIs. */ |
3628 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); ) | 4234 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); ) |
3629 if (e->flags & EDGE_EH) | 4235 if (e->flags & EDGE_EH) |
3630 { | 4236 { |
4237 /* ??? CFG manipluation routines do not try to update loop | |
4238 form on edge redirection. Do so manually here for now. */ | |
4239 /* If we redirect a loop entry or latch edge that will either create | |
4240 a multiple entry loop or rotate the loop. If the loops merge | |
4241 we may have created a loop with multiple latches. | |
4242 All of this isn't easily fixed thus cancel the affected loop | |
4243 and mark the other loop as possibly having multiple latches. */ | |
4244 if (e->dest == e->dest->loop_father->header) | |
4245 { | |
4246 mark_loop_for_removal (e->dest->loop_father); | |
4247 new_bb->loop_father->latch = NULL; | |
4248 loops_state_set (LOOPS_MAY_HAVE_MULTIPLE_LATCHES); | |
4249 } | |
3631 redirect_eh_edge_1 (e, new_bb, change_region); | 4250 redirect_eh_edge_1 (e, new_bb, change_region); |
3632 redirect_edge_succ (e, new_bb); | 4251 redirect_edge_succ (e, new_bb); |
3633 flush_pending_stmts (e); | 4252 flush_pending_stmts (e); |
3634 } | 4253 } |
3635 else | 4254 else |
3636 ei_next (&ei); | 4255 ei_next (&ei); |
3637 | 4256 |
3638 BITMAP_FREE (ophi_handled); | 4257 BITMAP_FREE (ophi_handled); |
3639 BITMAP_FREE (rename_virts); | |
3640 return true; | 4258 return true; |
3641 | 4259 |
3642 fail: | 4260 fail: |
3643 FOR_EACH_EDGE (e, ei, old_bb->preds) | 4261 FOR_EACH_EDGE (e, ei, old_bb->preds) |
3644 redirect_edge_var_map_clear (e); | 4262 redirect_edge_var_map_clear (e); |
3645 BITMAP_FREE (ophi_handled); | 4263 BITMAP_FREE (ophi_handled); |
3646 BITMAP_FREE (rename_virts); | |
3647 return false; | 4264 return false; |
3648 } | 4265 } |
3649 | 4266 |
3650 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its | 4267 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its |
3651 old region to NEW_REGION at BB. */ | 4268 old region to NEW_REGION at BB. */ |
3665 lp->next_lp = new_region->landing_pads; | 4282 lp->next_lp = new_region->landing_pads; |
3666 new_region->landing_pads = lp; | 4283 new_region->landing_pads = lp; |
3667 | 4284 |
3668 /* Delete the RESX that was matched within the empty handler block. */ | 4285 /* Delete the RESX that was matched within the empty handler block. */ |
3669 gsi = gsi_last_bb (bb); | 4286 gsi = gsi_last_bb (bb); |
3670 mark_virtual_ops_for_renaming (gsi_stmt (gsi)); | 4287 unlink_stmt_vdef (gsi_stmt (gsi)); |
3671 gsi_remove (&gsi, true); | 4288 gsi_remove (&gsi, true); |
3672 | 4289 |
3673 /* Clean up E_OUT for the fallthru. */ | 4290 /* Clean up E_OUT for the fallthru. */ |
3674 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU; | 4291 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU; |
3675 e_out->probability = REG_BR_PROB_BASE; | 4292 e_out->probability = profile_probability::always (); |
3676 } | 4293 } |
3677 | 4294 |
3678 /* A subroutine of cleanup_empty_eh. Handle more complex cases of | 4295 /* A subroutine of cleanup_empty_eh. Handle more complex cases of |
3679 unsplitting than unsplit_eh was prepared to handle, e.g. when | 4296 unsplitting than unsplit_eh was prepared to handle, e.g. when |
3680 multiple incoming edges and phis are involved. */ | 4297 multiple incoming edges and phis are involved. */ |
3682 static bool | 4299 static bool |
3683 cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp) | 4300 cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp) |
3684 { | 4301 { |
3685 gimple_stmt_iterator gsi; | 4302 gimple_stmt_iterator gsi; |
3686 tree lab; | 4303 tree lab; |
3687 edge_iterator ei; | |
3688 edge e; | |
3689 | 4304 |
3690 /* We really ought not have totally lost everything following | 4305 /* We really ought not have totally lost everything following |
3691 a landing pad label. Given that BB is empty, there had better | 4306 a landing pad label. Given that BB is empty, there had better |
3692 be a successor. */ | 4307 be a successor. */ |
3693 gcc_assert (e_out != NULL); | 4308 gcc_assert (e_out != NULL); |
3695 /* The destination block must not already have a landing pad | 4310 /* The destination block must not already have a landing pad |
3696 for a different region. */ | 4311 for a different region. */ |
3697 lab = NULL; | 4312 lab = NULL; |
3698 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi)) | 4313 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi)) |
3699 { | 4314 { |
3700 gimple stmt = gsi_stmt (gsi); | 4315 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)); |
3701 int lp_nr; | 4316 int lp_nr; |
3702 | 4317 |
3703 if (gimple_code (stmt) != GIMPLE_LABEL) | 4318 if (!stmt) |
3704 break; | 4319 break; |
3705 lab = gimple_label_label (stmt); | 4320 lab = gimple_label_label (stmt); |
3706 lp_nr = EH_LANDING_PAD_NR (lab); | 4321 lp_nr = EH_LANDING_PAD_NR (lab); |
3707 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region) | 4322 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region) |
3708 return false; | 4323 return false; |
3709 } | 4324 } |
3710 | |
3711 /* The destination block must not be a regular successor for any | |
3712 of the preds of the landing pad. Thus, avoid turning | |
3713 <..> | |
3714 | \ EH | |
3715 | <..> | |
3716 | / | |
3717 <..> | |
3718 into | |
3719 <..> | |
3720 | | EH | |
3721 <..> | |
3722 which CFG verification would choke on. See PR45172. */ | |
3723 FOR_EACH_EDGE (e, ei, bb->preds) | |
3724 if (find_edge (e->src, e_out->dest)) | |
3725 return false; | |
3726 | 4325 |
3727 /* Attempt to move the PHIs into the successor block. */ | 4326 /* Attempt to move the PHIs into the successor block. */ |
3728 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false)) | 4327 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false)) |
3729 { | 4328 { |
3730 if (dump_file && (dump_flags & TDF_DETAILS)) | 4329 if (dump_file && (dump_flags & TDF_DETAILS)) |
3782 static bool | 4381 static bool |
3783 cleanup_empty_eh (eh_landing_pad lp) | 4382 cleanup_empty_eh (eh_landing_pad lp) |
3784 { | 4383 { |
3785 basic_block bb = label_to_block (lp->post_landing_pad); | 4384 basic_block bb = label_to_block (lp->post_landing_pad); |
3786 gimple_stmt_iterator gsi; | 4385 gimple_stmt_iterator gsi; |
3787 gimple resx; | 4386 gimple *resx; |
3788 eh_region new_region; | 4387 eh_region new_region; |
3789 edge_iterator ei; | 4388 edge_iterator ei; |
3790 edge e, e_out; | 4389 edge e, e_out; |
3791 bool has_non_eh_pred; | 4390 bool has_non_eh_pred; |
4391 bool ret = false; | |
3792 int new_lp_nr; | 4392 int new_lp_nr; |
3793 | 4393 |
3794 /* There can be zero or one edges out of BB. This is the quickest test. */ | 4394 /* There can be zero or one edges out of BB. This is the quickest test. */ |
3795 switch (EDGE_COUNT (bb->succs)) | 4395 switch (EDGE_COUNT (bb->succs)) |
3796 { | 4396 { |
3797 case 0: | 4397 case 0: |
3798 e_out = NULL; | 4398 e_out = NULL; |
3799 break; | 4399 break; |
3800 case 1: | 4400 case 1: |
3801 e_out = EDGE_SUCC (bb, 0); | 4401 e_out = single_succ_edge (bb); |
3802 break; | 4402 break; |
3803 default: | 4403 default: |
3804 return false; | 4404 return false; |
3805 } | 4405 } |
4406 | |
4407 gsi = gsi_last_nondebug_bb (bb); | |
4408 resx = gsi_stmt (gsi); | |
4409 if (resx && is_gimple_resx (resx)) | |
4410 { | |
4411 if (stmt_can_throw_external (resx)) | |
4412 optimize_clobbers (bb); | |
4413 else if (sink_clobbers (bb)) | |
4414 ret = true; | |
4415 } | |
4416 | |
3806 gsi = gsi_after_labels (bb); | 4417 gsi = gsi_after_labels (bb); |
3807 | 4418 |
3808 /* Make sure to skip debug statements. */ | 4419 /* Make sure to skip debug statements. */ |
3809 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi))) | 4420 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi))) |
3810 gsi_next_nondebug (&gsi); | 4421 gsi_next_nondebug (&gsi); |
3811 | 4422 |
3812 /* If the block is totally empty, look for more unsplitting cases. */ | 4423 /* If the block is totally empty, look for more unsplitting cases. */ |
3813 if (gsi_end_p (gsi)) | 4424 if (gsi_end_p (gsi)) |
3814 { | 4425 { |
3815 /* For the degenerate case of an infinite loop bail out. */ | 4426 /* For the degenerate case of an infinite loop bail out. |
3816 if (infinite_empty_loop_p (e_out)) | 4427 If bb has no successors and is totally empty, which can happen e.g. |
3817 return false; | 4428 because of incorrect noreturn attribute, bail out too. */ |
3818 | 4429 if (e_out == NULL |
3819 return cleanup_empty_eh_unsplit (bb, e_out, lp); | 4430 || infinite_empty_loop_p (e_out)) |
3820 } | 4431 return ret; |
3821 | 4432 |
3822 /* The block should consist only of a single RESX statement. */ | 4433 return ret | cleanup_empty_eh_unsplit (bb, e_out, lp); |
4434 } | |
4435 | |
4436 /* The block should consist only of a single RESX statement, modulo a | |
4437 preceding call to __builtin_stack_restore if there is no outgoing | |
4438 edge, since the call can be eliminated in this case. */ | |
3823 resx = gsi_stmt (gsi); | 4439 resx = gsi_stmt (gsi); |
4440 if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE)) | |
4441 { | |
4442 gsi_next_nondebug (&gsi); | |
4443 resx = gsi_stmt (gsi); | |
4444 } | |
3824 if (!is_gimple_resx (resx)) | 4445 if (!is_gimple_resx (resx)) |
3825 return false; | 4446 return ret; |
3826 gcc_assert (gsi_one_before_end_p (gsi)); | 4447 gcc_assert (gsi_one_nondebug_before_end_p (gsi)); |
3827 | 4448 |
3828 /* Determine if there are non-EH edges, or resx edges into the handler. */ | 4449 /* Determine if there are non-EH edges, or resx edges into the handler. */ |
3829 has_non_eh_pred = false; | 4450 has_non_eh_pred = false; |
3830 FOR_EACH_EDGE (e, ei, bb->preds) | 4451 FOR_EACH_EDGE (e, ei, bb->preds) |
3831 if (!(e->flags & EDGE_EH)) | 4452 if (!(e->flags & EDGE_EH)) |
3844 { | 4465 { |
3845 gcc_assert (e_out == NULL); | 4466 gcc_assert (e_out == NULL); |
3846 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); ) | 4467 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); ) |
3847 if (e->flags & EDGE_EH) | 4468 if (e->flags & EDGE_EH) |
3848 { | 4469 { |
3849 gimple stmt = last_stmt (e->src); | 4470 gimple *stmt = last_stmt (e->src); |
3850 remove_stmt_from_eh_lp (stmt); | 4471 remove_stmt_from_eh_lp (stmt); |
3851 remove_edge (e); | 4472 remove_edge (e); |
3852 } | 4473 } |
3853 else | 4474 else |
3854 ei_next (&ei); | 4475 ei_next (&ei); |
3860 if (new_region->type == ERT_MUST_NOT_THROW) | 4481 if (new_region->type == ERT_MUST_NOT_THROW) |
3861 { | 4482 { |
3862 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); ) | 4483 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); ) |
3863 if (e->flags & EDGE_EH) | 4484 if (e->flags & EDGE_EH) |
3864 { | 4485 { |
3865 gimple stmt = last_stmt (e->src); | 4486 gimple *stmt = last_stmt (e->src); |
3866 remove_stmt_from_eh_lp (stmt); | 4487 remove_stmt_from_eh_lp (stmt); |
3867 add_stmt_to_eh_lp (stmt, new_lp_nr); | 4488 add_stmt_to_eh_lp (stmt, new_lp_nr); |
3868 remove_edge (e); | 4489 remove_edge (e); |
3869 } | 4490 } |
3870 else | 4491 else |
3896 /* ??? The CFG didn't change, but we may have rendered the | 4517 /* ??? The CFG didn't change, but we may have rendered the |
3897 old EH region unreachable. Trigger a cleanup there. */ | 4518 old EH region unreachable. Trigger a cleanup there. */ |
3898 return true; | 4519 return true; |
3899 } | 4520 } |
3900 | 4521 |
3901 return false; | 4522 return ret; |
3902 | 4523 |
3903 succeed: | 4524 succeed: |
3904 if (dump_file && (dump_flags & TDF_DETAILS)) | 4525 if (dump_file && (dump_flags & TDF_DETAILS)) |
3905 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index); | 4526 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index); |
3906 remove_eh_landing_pad (lp); | 4527 remove_eh_landing_pad (lp); |
3915 { | 4536 { |
3916 bool changed = false; | 4537 bool changed = false; |
3917 eh_landing_pad lp; | 4538 eh_landing_pad lp; |
3918 int i; | 4539 int i; |
3919 | 4540 |
3920 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i) | 4541 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i) |
3921 if (lp) | 4542 if (lp) |
3922 changed |= cleanup_empty_eh (lp); | 4543 changed |= cleanup_empty_eh (lp); |
3923 | 4544 |
3924 return changed; | 4545 return changed; |
3925 } | 4546 } |
3927 /* Perform cleanups and lowering of exception handling | 4548 /* Perform cleanups and lowering of exception handling |
3928 1) cleanups regions with handlers doing nothing are optimized out | 4549 1) cleanups regions with handlers doing nothing are optimized out |
3929 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out | 4550 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out |
3930 3) Info about regions that are containing instructions, and regions | 4551 3) Info about regions that are containing instructions, and regions |
3931 reachable via local EH edges is collected | 4552 reachable via local EH edges is collected |
3932 4) Eh tree is pruned for regions no longer neccesary. | 4553 4) Eh tree is pruned for regions no longer necessary. |
3933 | 4554 |
3934 TODO: Push MUST_NOT_THROW regions to the root of the EH tree. | 4555 TODO: Push MUST_NOT_THROW regions to the root of the EH tree. |
3935 Unify those that have the same failure decl and locus. | 4556 Unify those that have the same failure decl and locus. |
3936 */ | 4557 */ |
3937 | 4558 |
3941 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die | 4562 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die |
3942 looking up unreachable landing pads. */ | 4563 looking up unreachable landing pads. */ |
3943 remove_unreachable_handlers (); | 4564 remove_unreachable_handlers (); |
3944 | 4565 |
3945 /* Watch out for the region tree vanishing due to all unreachable. */ | 4566 /* Watch out for the region tree vanishing due to all unreachable. */ |
3946 if (cfun->eh->region_tree && optimize) | 4567 if (cfun->eh->region_tree) |
3947 { | 4568 { |
3948 bool changed = false; | 4569 bool changed = false; |
3949 | 4570 |
3950 changed |= unsplit_all_eh (); | 4571 if (optimize) |
4572 changed |= unsplit_all_eh (); | |
3951 changed |= cleanup_all_empty_eh (); | 4573 changed |= cleanup_all_empty_eh (); |
3952 | 4574 |
3953 if (changed) | 4575 if (changed) |
3954 { | 4576 { |
3955 free_dominance_info (CDI_DOMINATORS); | 4577 free_dominance_info (CDI_DOMINATORS); |
3968 } | 4590 } |
3969 | 4591 |
3970 return 0; | 4592 return 0; |
3971 } | 4593 } |
3972 | 4594 |
3973 static unsigned int | 4595 namespace { |
3974 execute_cleanup_eh (void) | 4596 |
4597 const pass_data pass_data_cleanup_eh = | |
4598 { | |
4599 GIMPLE_PASS, /* type */ | |
4600 "ehcleanup", /* name */ | |
4601 OPTGROUP_NONE, /* optinfo_flags */ | |
4602 TV_TREE_EH, /* tv_id */ | |
4603 PROP_gimple_lcf, /* properties_required */ | |
4604 0, /* properties_provided */ | |
4605 0, /* properties_destroyed */ | |
4606 0, /* todo_flags_start */ | |
4607 0, /* todo_flags_finish */ | |
4608 }; | |
4609 | |
4610 class pass_cleanup_eh : public gimple_opt_pass | |
4611 { | |
4612 public: | |
4613 pass_cleanup_eh (gcc::context *ctxt) | |
4614 : gimple_opt_pass (pass_data_cleanup_eh, ctxt) | |
4615 {} | |
4616 | |
4617 /* opt_pass methods: */ | |
4618 opt_pass * clone () { return new pass_cleanup_eh (m_ctxt); } | |
4619 virtual bool gate (function *fun) | |
4620 { | |
4621 return fun->eh != NULL && fun->eh->region_tree != NULL; | |
4622 } | |
4623 | |
4624 virtual unsigned int execute (function *); | |
4625 | |
4626 }; // class pass_cleanup_eh | |
4627 | |
4628 unsigned int | |
4629 pass_cleanup_eh::execute (function *fun) | |
3975 { | 4630 { |
3976 int ret = execute_cleanup_eh_1 (); | 4631 int ret = execute_cleanup_eh_1 (); |
3977 | 4632 |
3978 /* If the function no longer needs an EH personality routine | 4633 /* If the function no longer needs an EH personality routine |
3979 clear it. This exposes cross-language inlining opportunities | 4634 clear it. This exposes cross-language inlining opportunities |
3980 and avoids references to a never defined personality routine. */ | 4635 and avoids references to a never defined personality routine. */ |
3981 if (DECL_FUNCTION_PERSONALITY (current_function_decl) | 4636 if (DECL_FUNCTION_PERSONALITY (current_function_decl) |
3982 && function_needs_eh_personality (cfun) != eh_personality_lang) | 4637 && function_needs_eh_personality (fun) != eh_personality_lang) |
3983 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE; | 4638 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE; |
3984 | 4639 |
3985 return ret; | 4640 return ret; |
3986 } | 4641 } |
3987 | 4642 |
3988 static bool | 4643 } // anon namespace |
3989 gate_cleanup_eh (void) | 4644 |
3990 { | 4645 gimple_opt_pass * |
3991 return cfun->eh != NULL && cfun->eh->region_tree != NULL; | 4646 make_pass_cleanup_eh (gcc::context *ctxt) |
3992 } | 4647 { |
3993 | 4648 return new pass_cleanup_eh (ctxt); |
3994 struct gimple_opt_pass pass_cleanup_eh = { | 4649 } |
3995 { | |
3996 GIMPLE_PASS, | |
3997 "ehcleanup", /* name */ | |
3998 gate_cleanup_eh, /* gate */ | |
3999 execute_cleanup_eh, /* execute */ | |
4000 NULL, /* sub */ | |
4001 NULL, /* next */ | |
4002 0, /* static_pass_number */ | |
4003 TV_TREE_EH, /* tv_id */ | |
4004 PROP_gimple_lcf, /* properties_required */ | |
4005 0, /* properties_provided */ | |
4006 0, /* properties_destroyed */ | |
4007 0, /* todo_flags_start */ | |
4008 TODO_dump_func /* todo_flags_finish */ | |
4009 } | |
4010 }; | |
4011 | 4650 |
4012 /* Verify that BB containing STMT as the last statement, has precisely the | 4651 /* Verify that BB containing STMT as the last statement, has precisely the |
4013 edge that make_eh_edges would create. */ | 4652 edge that make_eh_edges would create. */ |
4014 | 4653 |
4015 DEBUG_FUNCTION bool | 4654 DEBUG_FUNCTION bool |
4016 verify_eh_edges (gimple stmt) | 4655 verify_eh_edges (gimple *stmt) |
4017 { | 4656 { |
4018 basic_block bb = gimple_bb (stmt); | 4657 basic_block bb = gimple_bb (stmt); |
4019 eh_landing_pad lp = NULL; | 4658 eh_landing_pad lp = NULL; |
4020 int lp_nr; | 4659 int lp_nr; |
4021 edge_iterator ei; | 4660 edge_iterator ei; |
4072 } | 4711 } |
4073 | 4712 |
4074 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */ | 4713 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */ |
4075 | 4714 |
4076 DEBUG_FUNCTION bool | 4715 DEBUG_FUNCTION bool |
4077 verify_eh_dispatch_edge (gimple stmt) | 4716 verify_eh_dispatch_edge (geh_dispatch *stmt) |
4078 { | 4717 { |
4079 eh_region r; | 4718 eh_region r; |
4080 eh_catch c; | 4719 eh_catch c; |
4081 basic_block src, dst; | 4720 basic_block src, dst; |
4082 bool want_fallthru = true; | 4721 bool want_fallthru = true; |