0
|
1 /* Nested function decomposition for GIMPLE.
|
|
2 Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
|
|
3
|
|
4 This file is part of GCC.
|
|
5
|
|
6 GCC is free software; you can redistribute it and/or modify
|
|
7 it under the terms of the GNU General Public License as published by
|
|
8 the Free Software Foundation; either version 3, or (at your option)
|
|
9 any later version.
|
|
10
|
|
11 GCC is distributed in the hope that it will be useful,
|
|
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
14 GNU General Public License for more details.
|
|
15
|
|
16 You should have received a copy of the GNU General Public License
|
|
17 along with GCC; see the file COPYING3. If not see
|
|
18 <http://www.gnu.org/licenses/>. */
|
|
19
|
|
20 #include "config.h"
|
|
21 #include "system.h"
|
|
22 #include "coretypes.h"
|
|
23 #include "tm.h"
|
|
24 #include "tree.h"
|
|
25 #include "rtl.h"
|
|
26 #include "tm_p.h"
|
|
27 #include "function.h"
|
|
28 #include "tree-dump.h"
|
|
29 #include "tree-inline.h"
|
|
30 #include "gimple.h"
|
|
31 #include "tree-iterator.h"
|
|
32 #include "tree-flow.h"
|
|
33 #include "cgraph.h"
|
|
34 #include "expr.h"
|
|
35 #include "langhooks.h"
|
|
36 #include "pointer-set.h"
|
|
37 #include "ggc.h"
|
|
38
|
|
39
|
|
40 /* The object of this pass is to lower the representation of a set of nested
|
|
41 functions in order to expose all of the gory details of the various
|
|
42 nonlocal references. We want to do this sooner rather than later, in
|
|
43 order to give us more freedom in emitting all of the functions in question.
|
|
44
|
|
45 Back in olden times, when gcc was young, we developed an insanely
|
|
46 complicated scheme whereby variables which were referenced nonlocally
|
|
47 were forced to live in the stack of the declaring function, and then
|
|
48 the nested functions magically discovered where these variables were
|
|
49 placed. In order for this scheme to function properly, it required
|
|
50 that the outer function be partially expanded, then we switch to
|
|
51 compiling the inner function, and once done with those we switch back
|
|
52 to compiling the outer function. Such delicate ordering requirements
|
|
53 makes it difficult to do whole translation unit optimizations
|
|
54 involving such functions.
|
|
55
|
|
56 The implementation here is much more direct. Everything that can be
|
|
57 referenced by an inner function is a member of an explicitly created
|
|
58 structure herein called the "nonlocal frame struct". The incoming
|
|
59 static chain for a nested function is a pointer to this struct in
|
|
60 the parent. In this way, we settle on known offsets from a known
|
|
61 base, and so are decoupled from the logic that places objects in the
|
|
62 function's stack frame. More importantly, we don't have to wait for
|
|
63 that to happen -- since the compilation of the inner function is no
|
|
64 longer tied to a real stack frame, the nonlocal frame struct can be
|
|
65 allocated anywhere. Which means that the outer function is now
|
|
66 inlinable.
|
|
67
|
|
68 Theory of operation here is very simple. Iterate over all the
|
|
69 statements in all the functions (depth first) several times,
|
|
70 allocating structures and fields on demand. In general we want to
|
|
71 examine inner functions first, so that we can avoid making changes
|
|
72 to outer functions which are unnecessary.
|
|
73
|
|
74 The order of the passes matters a bit, in that later passes will be
|
|
75 skipped if it is discovered that the functions don't actually interact
|
|
76 at all. That is, they're nested in the lexical sense but could have
|
|
77 been written as independent functions without change. */
|
|
78
|
|
79
|
|
80 struct nesting_info
|
|
81 {
|
|
82 struct nesting_info *outer;
|
|
83 struct nesting_info *inner;
|
|
84 struct nesting_info *next;
|
|
85
|
|
86 struct pointer_map_t *field_map;
|
|
87 struct pointer_map_t *var_map;
|
|
88 bitmap suppress_expansion;
|
|
89
|
|
90 tree context;
|
|
91 tree new_local_var_chain;
|
|
92 tree debug_var_chain;
|
|
93 tree frame_type;
|
|
94 tree frame_decl;
|
|
95 tree chain_field;
|
|
96 tree chain_decl;
|
|
97 tree nl_goto_field;
|
|
98
|
|
99 bool any_parm_remapped;
|
|
100 bool any_tramp_created;
|
|
101 char static_chain_added;
|
|
102 };
|
|
103
|
|
104
|
|
105 /* Obstack used for the bitmaps in the struct above. */
|
|
106 static struct bitmap_obstack nesting_info_bitmap_obstack;
|
|
107
|
|
108
|
|
109 /* We're working in so many different function contexts simultaneously,
|
|
110 that create_tmp_var is dangerous. Prevent mishap. */
|
|
111 #define create_tmp_var cant_use_create_tmp_var_here_dummy
|
|
112
|
|
113 /* Like create_tmp_var, except record the variable for registration at
|
|
114 the given nesting level. */
|
|
115
|
|
116 static tree
|
|
117 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
|
|
118 {
|
|
119 tree tmp_var;
|
|
120
|
|
121 /* If the type is of variable size or a type which must be created by the
|
|
122 frontend, something is wrong. Note that we explicitly allow
|
|
123 incomplete types here, since we create them ourselves here. */
|
|
124 gcc_assert (!TREE_ADDRESSABLE (type));
|
|
125 gcc_assert (!TYPE_SIZE_UNIT (type)
|
|
126 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
|
|
127
|
|
128 tmp_var = create_tmp_var_raw (type, prefix);
|
|
129 DECL_CONTEXT (tmp_var) = info->context;
|
|
130 TREE_CHAIN (tmp_var) = info->new_local_var_chain;
|
|
131 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
|
|
132 if (TREE_CODE (type) == COMPLEX_TYPE
|
|
133 || TREE_CODE (type) == VECTOR_TYPE)
|
|
134 DECL_GIMPLE_REG_P (tmp_var) = 1;
|
|
135
|
|
136 info->new_local_var_chain = tmp_var;
|
|
137
|
|
138 return tmp_var;
|
|
139 }
|
|
140
|
|
141 /* Take the address of EXP to be used within function CONTEXT.
|
|
142 Mark it for addressability as necessary. */
|
|
143
|
|
144 tree
|
|
145 build_addr (tree exp, tree context)
|
|
146 {
|
|
147 tree base = exp;
|
|
148 tree save_context;
|
|
149 tree retval;
|
|
150
|
|
151 while (handled_component_p (base))
|
|
152 base = TREE_OPERAND (base, 0);
|
|
153
|
|
154 if (DECL_P (base))
|
|
155 TREE_ADDRESSABLE (base) = 1;
|
|
156
|
|
157 /* Building the ADDR_EXPR will compute a set of properties for
|
|
158 that ADDR_EXPR. Those properties are unfortunately context
|
|
159 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
|
|
160
|
|
161 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
|
|
162 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
|
|
163 way the properties are for the ADDR_EXPR are computed properly. */
|
|
164 save_context = current_function_decl;
|
|
165 current_function_decl = context;
|
|
166 retval = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
|
|
167 current_function_decl = save_context;
|
|
168 return retval;
|
|
169 }
|
|
170
|
|
171 /* Insert FIELD into TYPE, sorted by alignment requirements. */
|
|
172
|
|
173 void
|
|
174 insert_field_into_struct (tree type, tree field)
|
|
175 {
|
|
176 tree *p;
|
|
177
|
|
178 DECL_CONTEXT (field) = type;
|
|
179
|
|
180 for (p = &TYPE_FIELDS (type); *p ; p = &TREE_CHAIN (*p))
|
|
181 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
|
|
182 break;
|
|
183
|
|
184 TREE_CHAIN (field) = *p;
|
|
185 *p = field;
|
|
186
|
|
187 /* Set correct alignment for frame struct type. */
|
|
188 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
|
|
189 TYPE_ALIGN (type) = DECL_ALIGN (field);
|
|
190 }
|
|
191
|
|
192 /* Build or return the RECORD_TYPE that describes the frame state that is
|
|
193 shared between INFO->CONTEXT and its nested functions. This record will
|
|
194 not be complete until finalize_nesting_tree; up until that point we'll
|
|
195 be adding fields as necessary.
|
|
196
|
|
197 We also build the DECL that represents this frame in the function. */
|
|
198
|
|
199 static tree
|
|
200 get_frame_type (struct nesting_info *info)
|
|
201 {
|
|
202 tree type = info->frame_type;
|
|
203 if (!type)
|
|
204 {
|
|
205 char *name;
|
|
206
|
|
207 type = make_node (RECORD_TYPE);
|
|
208
|
|
209 name = concat ("FRAME.",
|
|
210 IDENTIFIER_POINTER (DECL_NAME (info->context)),
|
|
211 NULL);
|
|
212 TYPE_NAME (type) = get_identifier (name);
|
|
213 free (name);
|
|
214
|
|
215 info->frame_type = type;
|
|
216 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
|
|
217
|
|
218 /* ??? Always make it addressable for now, since it is meant to
|
|
219 be pointed to by the static chain pointer. This pessimizes
|
|
220 when it turns out that no static chains are needed because
|
|
221 the nested functions referencing non-local variables are not
|
|
222 reachable, but the true pessimization is to create the non-
|
|
223 local frame structure in the first place. */
|
|
224 TREE_ADDRESSABLE (info->frame_decl) = 1;
|
|
225 }
|
|
226 return type;
|
|
227 }
|
|
228
|
|
229 /* Return true if DECL should be referenced by pointer in the non-local
|
|
230 frame structure. */
|
|
231
|
|
232 static bool
|
|
233 use_pointer_in_frame (tree decl)
|
|
234 {
|
|
235 if (TREE_CODE (decl) == PARM_DECL)
|
|
236 {
|
|
237 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
|
|
238 sized decls, and inefficient to copy large aggregates. Don't bother
|
|
239 moving anything but scalar variables. */
|
|
240 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
|
|
241 }
|
|
242 else
|
|
243 {
|
|
244 /* Variable sized types make things "interesting" in the frame. */
|
|
245 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
|
|
246 }
|
|
247 }
|
|
248
|
|
249 /* Given DECL, a non-locally accessed variable, find or create a field
|
|
250 in the non-local frame structure for the given nesting context. */
|
|
251
|
|
252 static tree
|
|
253 lookup_field_for_decl (struct nesting_info *info, tree decl,
|
|
254 enum insert_option insert)
|
|
255 {
|
|
256 void **slot;
|
|
257
|
|
258 if (insert == NO_INSERT)
|
|
259 {
|
|
260 slot = pointer_map_contains (info->field_map, decl);
|
|
261 return slot ? (tree) *slot : NULL_TREE;
|
|
262 }
|
|
263
|
|
264 slot = pointer_map_insert (info->field_map, decl);
|
|
265 if (!*slot)
|
|
266 {
|
|
267 tree field = make_node (FIELD_DECL);
|
|
268 DECL_NAME (field) = DECL_NAME (decl);
|
|
269
|
|
270 if (use_pointer_in_frame (decl))
|
|
271 {
|
|
272 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
|
|
273 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
|
|
274 DECL_NONADDRESSABLE_P (field) = 1;
|
|
275 }
|
|
276 else
|
|
277 {
|
|
278 TREE_TYPE (field) = TREE_TYPE (decl);
|
|
279 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
|
|
280 DECL_ALIGN (field) = DECL_ALIGN (decl);
|
|
281 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
|
|
282 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
|
|
283 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
|
|
284 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
|
|
285 }
|
|
286
|
|
287 insert_field_into_struct (get_frame_type (info), field);
|
|
288 *slot = field;
|
|
289
|
|
290 if (TREE_CODE (decl) == PARM_DECL)
|
|
291 info->any_parm_remapped = true;
|
|
292 }
|
|
293
|
|
294 return (tree) *slot;
|
|
295 }
|
|
296
|
|
297 /* Build or return the variable that holds the static chain within
|
|
298 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
|
|
299
|
|
300 static tree
|
|
301 get_chain_decl (struct nesting_info *info)
|
|
302 {
|
|
303 tree decl = info->chain_decl;
|
|
304 if (!decl)
|
|
305 {
|
|
306 tree type;
|
|
307
|
|
308 type = get_frame_type (info->outer);
|
|
309 type = build_pointer_type (type);
|
|
310
|
|
311 /* Note that this variable is *not* entered into any BIND_EXPR;
|
|
312 the construction of this variable is handled specially in
|
|
313 expand_function_start and initialize_inlined_parameters.
|
|
314 Note also that it's represented as a parameter. This is more
|
|
315 close to the truth, since the initial value does come from
|
|
316 the caller. */
|
|
317 decl = build_decl (PARM_DECL, create_tmp_var_name ("CHAIN"), type);
|
|
318 DECL_ARTIFICIAL (decl) = 1;
|
|
319 DECL_IGNORED_P (decl) = 1;
|
|
320 TREE_USED (decl) = 1;
|
|
321 DECL_CONTEXT (decl) = info->context;
|
|
322 DECL_ARG_TYPE (decl) = type;
|
|
323
|
|
324 /* Tell tree-inline.c that we never write to this variable, so
|
|
325 it can copy-prop the replacement value immediately. */
|
|
326 TREE_READONLY (decl) = 1;
|
|
327
|
|
328 info->chain_decl = decl;
|
|
329 }
|
|
330 return decl;
|
|
331 }
|
|
332
|
|
333 /* Build or return the field within the non-local frame state that holds
|
|
334 the static chain for INFO->CONTEXT. This is the way to walk back up
|
|
335 multiple nesting levels. */
|
|
336
|
|
337 static tree
|
|
338 get_chain_field (struct nesting_info *info)
|
|
339 {
|
|
340 tree field = info->chain_field;
|
|
341 if (!field)
|
|
342 {
|
|
343 tree type = build_pointer_type (get_frame_type (info->outer));
|
|
344
|
|
345 field = make_node (FIELD_DECL);
|
|
346 DECL_NAME (field) = get_identifier ("__chain");
|
|
347 TREE_TYPE (field) = type;
|
|
348 DECL_ALIGN (field) = TYPE_ALIGN (type);
|
|
349 DECL_NONADDRESSABLE_P (field) = 1;
|
|
350
|
|
351 insert_field_into_struct (get_frame_type (info), field);
|
|
352
|
|
353 info->chain_field = field;
|
|
354 }
|
|
355 return field;
|
|
356 }
|
|
357
|
|
358 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
|
|
359
|
|
360 static tree
|
|
361 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
|
|
362 gimple call)
|
|
363 {
|
|
364 tree t;
|
|
365
|
|
366 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
|
|
367 gimple_call_set_lhs (call, t);
|
|
368 if (! gsi_end_p (*gsi))
|
|
369 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
|
|
370 gsi_insert_before (gsi, call, GSI_SAME_STMT);
|
|
371
|
|
372 return t;
|
|
373 }
|
|
374
|
|
375
|
|
376 /* Copy EXP into a temporary. Allocate the temporary in the context of
|
|
377 INFO and insert the initialization statement before GSI. */
|
|
378
|
|
379 static tree
|
|
380 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
|
|
381 {
|
|
382 tree t;
|
|
383 gimple stmt;
|
|
384
|
|
385 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
|
|
386 stmt = gimple_build_assign (t, exp);
|
|
387 if (! gsi_end_p (*gsi))
|
|
388 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
|
|
389 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
|
|
390
|
|
391 return t;
|
|
392 }
|
|
393
|
|
394
|
|
395 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
|
|
396
|
|
397 static tree
|
|
398 gsi_gimplify_val (struct nesting_info *info, tree exp,
|
|
399 gimple_stmt_iterator *gsi)
|
|
400 {
|
|
401 if (is_gimple_val (exp))
|
|
402 return exp;
|
|
403 else
|
|
404 return init_tmp_var (info, exp, gsi);
|
|
405 }
|
|
406
|
|
407 /* Similarly, but copy from the temporary and insert the statement
|
|
408 after the iterator. */
|
|
409
|
|
410 static tree
|
|
411 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
|
|
412 {
|
|
413 tree t;
|
|
414 gimple stmt;
|
|
415
|
|
416 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
|
|
417 stmt = gimple_build_assign (exp, t);
|
|
418 if (! gsi_end_p (*gsi))
|
|
419 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
|
|
420 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
|
|
421
|
|
422 return t;
|
|
423 }
|
|
424
|
|
425 /* Build or return the type used to represent a nested function trampoline. */
|
|
426
|
|
427 static GTY(()) tree trampoline_type;
|
|
428
|
|
429 static tree
|
|
430 get_trampoline_type (void)
|
|
431 {
|
|
432 unsigned align, size;
|
|
433 tree t;
|
|
434
|
|
435 if (trampoline_type)
|
|
436 return trampoline_type;
|
|
437
|
|
438 align = TRAMPOLINE_ALIGNMENT;
|
|
439 size = TRAMPOLINE_SIZE;
|
|
440
|
|
441 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
|
|
442 then allocate extra space so that we can do dynamic alignment. */
|
|
443 if (align > STACK_BOUNDARY)
|
|
444 {
|
|
445 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
|
|
446 align = STACK_BOUNDARY;
|
|
447 }
|
|
448
|
|
449 t = build_index_type (build_int_cst (NULL_TREE, size - 1));
|
|
450 t = build_array_type (char_type_node, t);
|
|
451 t = build_decl (FIELD_DECL, get_identifier ("__data"), t);
|
|
452 DECL_ALIGN (t) = align;
|
|
453 DECL_USER_ALIGN (t) = 1;
|
|
454
|
|
455 trampoline_type = make_node (RECORD_TYPE);
|
|
456 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
|
|
457 TYPE_FIELDS (trampoline_type) = t;
|
|
458 layout_type (trampoline_type);
|
|
459 DECL_CONTEXT (t) = trampoline_type;
|
|
460
|
|
461 return trampoline_type;
|
|
462 }
|
|
463
|
|
464 /* Given DECL, a nested function, find or create a field in the non-local
|
|
465 frame structure for a trampoline for this function. */
|
|
466
|
|
467 static tree
|
|
468 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
|
|
469 enum insert_option insert)
|
|
470 {
|
|
471 void **slot;
|
|
472
|
|
473 if (insert == NO_INSERT)
|
|
474 {
|
|
475 slot = pointer_map_contains (info->var_map, decl);
|
|
476 return slot ? (tree) *slot : NULL_TREE;
|
|
477 }
|
|
478
|
|
479 slot = pointer_map_insert (info->var_map, decl);
|
|
480 if (!*slot)
|
|
481 {
|
|
482 tree field = make_node (FIELD_DECL);
|
|
483 DECL_NAME (field) = DECL_NAME (decl);
|
|
484 TREE_TYPE (field) = get_trampoline_type ();
|
|
485 TREE_ADDRESSABLE (field) = 1;
|
|
486
|
|
487 insert_field_into_struct (get_frame_type (info), field);
|
|
488 *slot = field;
|
|
489
|
|
490 info->any_tramp_created = true;
|
|
491 }
|
|
492
|
|
493 return (tree) *slot;
|
|
494 }
|
|
495
|
|
496 /* Build or return the field within the non-local frame state that holds
|
|
497 the non-local goto "jmp_buf". The buffer itself is maintained by the
|
|
498 rtl middle-end as dynamic stack space is allocated. */
|
|
499
|
|
500 static tree
|
|
501 get_nl_goto_field (struct nesting_info *info)
|
|
502 {
|
|
503 tree field = info->nl_goto_field;
|
|
504 if (!field)
|
|
505 {
|
|
506 unsigned size;
|
|
507 tree type;
|
|
508
|
|
509 /* For __builtin_nonlocal_goto, we need N words. The first is the
|
|
510 frame pointer, the rest is for the target's stack pointer save
|
|
511 area. The number of words is controlled by STACK_SAVEAREA_MODE;
|
|
512 not the best interface, but it'll do for now. */
|
|
513 if (Pmode == ptr_mode)
|
|
514 type = ptr_type_node;
|
|
515 else
|
|
516 type = lang_hooks.types.type_for_mode (Pmode, 1);
|
|
517
|
|
518 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
|
|
519 size = size / GET_MODE_SIZE (Pmode);
|
|
520 size = size + 1;
|
|
521
|
|
522 type = build_array_type
|
|
523 (type, build_index_type (build_int_cst (NULL_TREE, size)));
|
|
524
|
|
525 field = make_node (FIELD_DECL);
|
|
526 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
|
|
527 TREE_TYPE (field) = type;
|
|
528 DECL_ALIGN (field) = TYPE_ALIGN (type);
|
|
529 TREE_ADDRESSABLE (field) = 1;
|
|
530
|
|
531 insert_field_into_struct (get_frame_type (info), field);
|
|
532
|
|
533 info->nl_goto_field = field;
|
|
534 }
|
|
535
|
|
536 return field;
|
|
537 }
|
|
538
|
|
539 /* Invoke CALLBACK on all statements of GIMPLE sequence SEQ. */
|
|
540
|
|
541 static void
|
|
542 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
|
|
543 struct nesting_info *info, gimple_seq seq)
|
|
544 {
|
|
545 struct walk_stmt_info wi;
|
|
546
|
|
547 memset (&wi, 0, sizeof (wi));
|
|
548 wi.info = info;
|
|
549 wi.val_only = true;
|
|
550 walk_gimple_seq (seq, callback_stmt, callback_op, &wi);
|
|
551 }
|
|
552
|
|
553
|
|
554 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
|
|
555
|
|
556 static inline void
|
|
557 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
|
|
558 struct nesting_info *info)
|
|
559 {
|
|
560 walk_body (callback_stmt, callback_op, info, gimple_body (info->context));
|
|
561 }
|
|
562
|
|
563 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
|
|
564
|
|
565 static void
|
|
566 walk_gimple_omp_for (gimple for_stmt,
|
|
567 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
|
|
568 struct nesting_info *info)
|
|
569 {
|
|
570 struct walk_stmt_info wi;
|
|
571 gimple_seq seq;
|
|
572 tree t;
|
|
573 size_t i;
|
|
574
|
|
575 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body (for_stmt));
|
|
576
|
|
577 seq = gimple_seq_alloc ();
|
|
578 memset (&wi, 0, sizeof (wi));
|
|
579 wi.info = info;
|
|
580 wi.gsi = gsi_last (seq);
|
|
581
|
|
582 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
|
|
583 {
|
|
584 wi.val_only = false;
|
|
585 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
|
|
586 &wi, NULL);
|
|
587 wi.val_only = true;
|
|
588 wi.is_lhs = false;
|
|
589 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
|
|
590 &wi, NULL);
|
|
591
|
|
592 wi.val_only = true;
|
|
593 wi.is_lhs = false;
|
|
594 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
|
|
595 &wi, NULL);
|
|
596
|
|
597 t = gimple_omp_for_incr (for_stmt, i);
|
|
598 gcc_assert (BINARY_CLASS_P (t));
|
|
599 wi.val_only = false;
|
|
600 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
|
|
601 wi.val_only = true;
|
|
602 wi.is_lhs = false;
|
|
603 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
|
|
604 }
|
|
605
|
|
606 if (gimple_seq_empty_p (seq))
|
|
607 gimple_seq_free (seq);
|
|
608 else
|
|
609 {
|
|
610 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
|
|
611 annotate_all_with_location (seq, gimple_location (for_stmt));
|
|
612 gimple_seq_add_seq (&pre_body, seq);
|
|
613 gimple_omp_for_set_pre_body (for_stmt, pre_body);
|
|
614 }
|
|
615 }
|
|
616
|
|
617 /* Similarly for ROOT and all functions nested underneath, depth first. */
|
|
618
|
|
619 static void
|
|
620 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
|
|
621 struct nesting_info *root)
|
|
622 {
|
|
623 do
|
|
624 {
|
|
625 if (root->inner)
|
|
626 walk_all_functions (callback_stmt, callback_op, root->inner);
|
|
627 walk_function (callback_stmt, callback_op, root);
|
|
628 root = root->next;
|
|
629 }
|
|
630 while (root);
|
|
631 }
|
|
632
|
|
633
|
|
634 /* We have to check for a fairly pathological case. The operands of function
|
|
635 nested function are to be interpreted in the context of the enclosing
|
|
636 function. So if any are variably-sized, they will get remapped when the
|
|
637 enclosing function is inlined. But that remapping would also have to be
|
|
638 done in the types of the PARM_DECLs of the nested function, meaning the
|
|
639 argument types of that function will disagree with the arguments in the
|
|
640 calls to that function. So we'd either have to make a copy of the nested
|
|
641 function corresponding to each time the enclosing function was inlined or
|
|
642 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
|
|
643 function. The former is not practical. The latter would still require
|
|
644 detecting this case to know when to add the conversions. So, for now at
|
|
645 least, we don't inline such an enclosing function.
|
|
646
|
|
647 We have to do that check recursively, so here return indicating whether
|
|
648 FNDECL has such a nested function. ORIG_FN is the function we were
|
|
649 trying to inline to use for checking whether any argument is variably
|
|
650 modified by anything in it.
|
|
651
|
|
652 It would be better to do this in tree-inline.c so that we could give
|
|
653 the appropriate warning for why a function can't be inlined, but that's
|
|
654 too late since the nesting structure has already been flattened and
|
|
655 adding a flag just to record this fact seems a waste of a flag. */
|
|
656
|
|
657 static bool
|
|
658 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
|
|
659 {
|
|
660 struct cgraph_node *cgn = cgraph_node (fndecl);
|
|
661 tree arg;
|
|
662
|
|
663 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
|
|
664 {
|
|
665 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = TREE_CHAIN (arg))
|
|
666 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
|
|
667 return true;
|
|
668
|
|
669 if (check_for_nested_with_variably_modified (cgn->decl, orig_fndecl))
|
|
670 return true;
|
|
671 }
|
|
672
|
|
673 return false;
|
|
674 }
|
|
675
|
|
676 /* Construct our local datastructure describing the function nesting
|
|
677 tree rooted by CGN. */
|
|
678
|
|
679 static struct nesting_info *
|
|
680 create_nesting_tree (struct cgraph_node *cgn)
|
|
681 {
|
|
682 struct nesting_info *info = XCNEW (struct nesting_info);
|
|
683 info->field_map = pointer_map_create ();
|
|
684 info->var_map = pointer_map_create ();
|
|
685 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
|
|
686 info->context = cgn->decl;
|
|
687
|
|
688 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
|
|
689 {
|
|
690 struct nesting_info *sub = create_nesting_tree (cgn);
|
|
691 sub->outer = info;
|
|
692 sub->next = info->inner;
|
|
693 info->inner = sub;
|
|
694 }
|
|
695
|
|
696 /* See discussion at check_for_nested_with_variably_modified for a
|
|
697 discussion of why this has to be here. */
|
|
698 if (check_for_nested_with_variably_modified (info->context, info->context))
|
|
699 DECL_UNINLINABLE (info->context) = true;
|
|
700
|
|
701 return info;
|
|
702 }
|
|
703
|
|
704 /* Return an expression computing the static chain for TARGET_CONTEXT
|
|
705 from INFO->CONTEXT. Insert any necessary computations before TSI. */
|
|
706
|
|
707 static tree
|
|
708 get_static_chain (struct nesting_info *info, tree target_context,
|
|
709 gimple_stmt_iterator *gsi)
|
|
710 {
|
|
711 struct nesting_info *i;
|
|
712 tree x;
|
|
713
|
|
714 if (info->context == target_context)
|
|
715 {
|
|
716 x = build_addr (info->frame_decl, target_context);
|
|
717 }
|
|
718 else
|
|
719 {
|
|
720 x = get_chain_decl (info);
|
|
721
|
|
722 for (i = info->outer; i->context != target_context; i = i->outer)
|
|
723 {
|
|
724 tree field = get_chain_field (i);
|
|
725
|
|
726 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
|
|
727 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
|
|
728 x = init_tmp_var (info, x, gsi);
|
|
729 }
|
|
730 }
|
|
731
|
|
732 return x;
|
|
733 }
|
|
734
|
|
735
|
|
736 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
|
|
737 frame as seen from INFO->CONTEXT. Insert any necessary computations
|
|
738 before GSI. */
|
|
739
|
|
740 static tree
|
|
741 get_frame_field (struct nesting_info *info, tree target_context,
|
|
742 tree field, gimple_stmt_iterator *gsi)
|
|
743 {
|
|
744 struct nesting_info *i;
|
|
745 tree x;
|
|
746
|
|
747 if (info->context == target_context)
|
|
748 {
|
|
749 /* Make sure frame_decl gets created. */
|
|
750 (void) get_frame_type (info);
|
|
751 x = info->frame_decl;
|
|
752 }
|
|
753 else
|
|
754 {
|
|
755 x = get_chain_decl (info);
|
|
756
|
|
757 for (i = info->outer; i->context != target_context; i = i->outer)
|
|
758 {
|
|
759 tree field = get_chain_field (i);
|
|
760
|
|
761 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
|
|
762 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
|
|
763 x = init_tmp_var (info, x, gsi);
|
|
764 }
|
|
765
|
|
766 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
|
|
767 }
|
|
768
|
|
769 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
|
|
770 return x;
|
|
771 }
|
|
772
|
|
773
|
|
774 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
|
|
775 in the nested function with DECL_VALUE_EXPR set to reference the true
|
|
776 variable in the parent function. This is used both for debug info
|
|
777 and in OpenMP lowering. */
|
|
778
|
|
779 static tree
|
|
780 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
|
|
781 {
|
|
782 tree target_context;
|
|
783 struct nesting_info *i;
|
|
784 tree x, field, new_decl;
|
|
785 void **slot;
|
|
786
|
|
787 slot = pointer_map_insert (info->var_map, decl);
|
|
788
|
|
789 if (*slot)
|
|
790 return (tree) *slot;
|
|
791
|
|
792 target_context = decl_function_context (decl);
|
|
793
|
|
794 /* A copy of the code in get_frame_field, but without the temporaries. */
|
|
795 if (info->context == target_context)
|
|
796 {
|
|
797 /* Make sure frame_decl gets created. */
|
|
798 (void) get_frame_type (info);
|
|
799 x = info->frame_decl;
|
|
800 i = info;
|
|
801 }
|
|
802 else
|
|
803 {
|
|
804 x = get_chain_decl (info);
|
|
805 for (i = info->outer; i->context != target_context; i = i->outer)
|
|
806 {
|
|
807 field = get_chain_field (i);
|
|
808 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
|
|
809 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
|
|
810 }
|
|
811 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
|
|
812 }
|
|
813
|
|
814 field = lookup_field_for_decl (i, decl, INSERT);
|
|
815 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
|
|
816 if (use_pointer_in_frame (decl))
|
|
817 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
|
|
818
|
|
819 /* ??? We should be remapping types as well, surely. */
|
|
820 new_decl = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
|
|
821 DECL_CONTEXT (new_decl) = info->context;
|
|
822 DECL_SOURCE_LOCATION (new_decl) = DECL_SOURCE_LOCATION (decl);
|
|
823 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
|
|
824 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
|
|
825 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
|
|
826 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
|
|
827 TREE_READONLY (new_decl) = TREE_READONLY (decl);
|
|
828 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
|
|
829 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
|
|
830
|
|
831 SET_DECL_VALUE_EXPR (new_decl, x);
|
|
832 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
|
|
833
|
|
834 *slot = new_decl;
|
|
835 TREE_CHAIN (new_decl) = info->debug_var_chain;
|
|
836 info->debug_var_chain = new_decl;
|
|
837
|
|
838 return new_decl;
|
|
839 }
|
|
840
|
|
841
|
|
842 /* Callback for walk_gimple_stmt, rewrite all references to VAR
|
|
843 and PARM_DECLs that belong to outer functions.
|
|
844
|
|
845 The rewrite will involve some number of structure accesses back up
|
|
846 the static chain. E.g. for a variable FOO up one nesting level it'll
|
|
847 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
|
|
848 indirections apply to decls for which use_pointer_in_frame is true. */
|
|
849
|
|
850 static tree
|
|
851 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
|
|
852 {
|
|
853 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
|
|
854 struct nesting_info *const info = (struct nesting_info *) wi->info;
|
|
855 tree t = *tp;
|
|
856
|
|
857 *walk_subtrees = 0;
|
|
858 switch (TREE_CODE (t))
|
|
859 {
|
|
860 case VAR_DECL:
|
|
861 /* Non-automatic variables are never processed. */
|
|
862 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
|
|
863 break;
|
|
864 /* FALLTHRU */
|
|
865
|
|
866 case PARM_DECL:
|
|
867 if (decl_function_context (t) != info->context)
|
|
868 {
|
|
869 tree x;
|
|
870 wi->changed = true;
|
|
871
|
|
872 x = get_nonlocal_debug_decl (info, t);
|
|
873 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
|
|
874 {
|
|
875 tree target_context = decl_function_context (t);
|
|
876 struct nesting_info *i;
|
|
877 for (i = info->outer; i->context != target_context; i = i->outer)
|
|
878 continue;
|
|
879 x = lookup_field_for_decl (i, t, INSERT);
|
|
880 x = get_frame_field (info, target_context, x, &wi->gsi);
|
|
881 if (use_pointer_in_frame (t))
|
|
882 {
|
|
883 x = init_tmp_var (info, x, &wi->gsi);
|
|
884 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
|
|
885 }
|
|
886 }
|
|
887
|
|
888 if (wi->val_only)
|
|
889 {
|
|
890 if (wi->is_lhs)
|
|
891 x = save_tmp_var (info, x, &wi->gsi);
|
|
892 else
|
|
893 x = init_tmp_var (info, x, &wi->gsi);
|
|
894 }
|
|
895
|
|
896 *tp = x;
|
|
897 }
|
|
898 break;
|
|
899
|
|
900 case LABEL_DECL:
|
|
901 /* We're taking the address of a label from a parent function, but
|
|
902 this is not itself a non-local goto. Mark the label such that it
|
|
903 will not be deleted, much as we would with a label address in
|
|
904 static storage. */
|
|
905 if (decl_function_context (t) != info->context)
|
|
906 FORCED_LABEL (t) = 1;
|
|
907 break;
|
|
908
|
|
909 case ADDR_EXPR:
|
|
910 {
|
|
911 bool save_val_only = wi->val_only;
|
|
912
|
|
913 wi->val_only = false;
|
|
914 wi->is_lhs = false;
|
|
915 wi->changed = false;
|
|
916 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
|
|
917 wi->val_only = true;
|
|
918
|
|
919 if (wi->changed)
|
|
920 {
|
|
921 tree save_context;
|
|
922
|
|
923 /* If we changed anything, we might no longer be directly
|
|
924 referencing a decl. */
|
|
925 save_context = current_function_decl;
|
|
926 current_function_decl = info->context;
|
|
927 recompute_tree_invariant_for_addr_expr (t);
|
|
928 current_function_decl = save_context;
|
|
929
|
|
930 /* If the callback converted the address argument in a context
|
|
931 where we only accept variables (and min_invariant, presumably),
|
|
932 then compute the address into a temporary. */
|
|
933 if (save_val_only)
|
|
934 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
|
|
935 t, &wi->gsi);
|
|
936 }
|
|
937 }
|
|
938 break;
|
|
939
|
|
940 case REALPART_EXPR:
|
|
941 case IMAGPART_EXPR:
|
|
942 case COMPONENT_REF:
|
|
943 case ARRAY_REF:
|
|
944 case ARRAY_RANGE_REF:
|
|
945 case BIT_FIELD_REF:
|
|
946 /* Go down this entire nest and just look at the final prefix and
|
|
947 anything that describes the references. Otherwise, we lose track
|
|
948 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
|
|
949 wi->val_only = true;
|
|
950 wi->is_lhs = false;
|
|
951 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
|
|
952 {
|
|
953 if (TREE_CODE (t) == COMPONENT_REF)
|
|
954 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
|
|
955 NULL);
|
|
956 else if (TREE_CODE (t) == ARRAY_REF
|
|
957 || TREE_CODE (t) == ARRAY_RANGE_REF)
|
|
958 {
|
|
959 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
|
|
960 wi, NULL);
|
|
961 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
|
|
962 wi, NULL);
|
|
963 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
|
|
964 wi, NULL);
|
|
965 }
|
|
966 else if (TREE_CODE (t) == BIT_FIELD_REF)
|
|
967 {
|
|
968 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
|
|
969 wi, NULL);
|
|
970 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
|
|
971 wi, NULL);
|
|
972 }
|
|
973 }
|
|
974 wi->val_only = false;
|
|
975 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
|
|
976 break;
|
|
977
|
|
978 case VIEW_CONVERT_EXPR:
|
|
979 /* Just request to look at the subtrees, leaving val_only and lhs
|
|
980 untouched. This might actually be for !val_only + lhs, in which
|
|
981 case we don't want to force a replacement by a temporary. */
|
|
982 *walk_subtrees = 1;
|
|
983 break;
|
|
984
|
|
985 default:
|
|
986 if (!IS_TYPE_OR_DECL_P (t))
|
|
987 {
|
|
988 *walk_subtrees = 1;
|
|
989 wi->val_only = true;
|
|
990 wi->is_lhs = false;
|
|
991 }
|
|
992 break;
|
|
993 }
|
|
994
|
|
995 return NULL_TREE;
|
|
996 }
|
|
997
|
|
998 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
|
|
999 struct walk_stmt_info *);
|
|
1000
|
|
1001 /* Helper for convert_nonlocal_references, rewrite all references to VAR
|
|
1002 and PARM_DECLs that belong to outer functions. */
|
|
1003
|
|
1004 static bool
|
|
1005 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
|
|
1006 {
|
|
1007 struct nesting_info *const info = (struct nesting_info *) wi->info;
|
|
1008 bool need_chain = false, need_stmts = false;
|
|
1009 tree clause, decl;
|
|
1010 int dummy;
|
|
1011 bitmap new_suppress;
|
|
1012
|
|
1013 new_suppress = BITMAP_GGC_ALLOC ();
|
|
1014 bitmap_copy (new_suppress, info->suppress_expansion);
|
|
1015
|
|
1016 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
|
|
1017 {
|
|
1018 switch (OMP_CLAUSE_CODE (clause))
|
|
1019 {
|
|
1020 case OMP_CLAUSE_REDUCTION:
|
|
1021 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
|
|
1022 need_stmts = true;
|
|
1023 goto do_decl_clause;
|
|
1024
|
|
1025 case OMP_CLAUSE_LASTPRIVATE:
|
|
1026 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
|
|
1027 need_stmts = true;
|
|
1028 goto do_decl_clause;
|
|
1029
|
|
1030 case OMP_CLAUSE_PRIVATE:
|
|
1031 case OMP_CLAUSE_FIRSTPRIVATE:
|
|
1032 case OMP_CLAUSE_COPYPRIVATE:
|
|
1033 case OMP_CLAUSE_SHARED:
|
|
1034 do_decl_clause:
|
|
1035 decl = OMP_CLAUSE_DECL (clause);
|
|
1036 if (TREE_CODE (decl) == VAR_DECL
|
|
1037 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
|
|
1038 break;
|
|
1039 if (decl_function_context (decl) != info->context)
|
|
1040 {
|
|
1041 bitmap_set_bit (new_suppress, DECL_UID (decl));
|
|
1042 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
|
|
1043 need_chain = true;
|
|
1044 }
|
|
1045 break;
|
|
1046
|
|
1047 case OMP_CLAUSE_SCHEDULE:
|
|
1048 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
|
|
1049 break;
|
|
1050 /* FALLTHRU */
|
|
1051 case OMP_CLAUSE_IF:
|
|
1052 case OMP_CLAUSE_NUM_THREADS:
|
|
1053 wi->val_only = true;
|
|
1054 wi->is_lhs = false;
|
|
1055 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
|
|
1056 &dummy, wi);
|
|
1057 break;
|
|
1058
|
|
1059 case OMP_CLAUSE_NOWAIT:
|
|
1060 case OMP_CLAUSE_ORDERED:
|
|
1061 case OMP_CLAUSE_DEFAULT:
|
|
1062 case OMP_CLAUSE_COPYIN:
|
|
1063 case OMP_CLAUSE_COLLAPSE:
|
|
1064 case OMP_CLAUSE_UNTIED:
|
|
1065 break;
|
|
1066
|
|
1067 default:
|
|
1068 gcc_unreachable ();
|
|
1069 }
|
|
1070 }
|
|
1071
|
|
1072 info->suppress_expansion = new_suppress;
|
|
1073
|
|
1074 if (need_stmts)
|
|
1075 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
|
|
1076 switch (OMP_CLAUSE_CODE (clause))
|
|
1077 {
|
|
1078 case OMP_CLAUSE_REDUCTION:
|
|
1079 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
|
|
1080 {
|
|
1081 tree old_context
|
|
1082 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
|
|
1083 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
|
|
1084 = info->context;
|
|
1085 walk_body (convert_nonlocal_reference_stmt,
|
|
1086 convert_nonlocal_reference_op, info,
|
|
1087 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
|
|
1088 walk_body (convert_nonlocal_reference_stmt,
|
|
1089 convert_nonlocal_reference_op, info,
|
|
1090 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
|
|
1091 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
|
|
1092 = old_context;
|
|
1093 }
|
|
1094 break;
|
|
1095
|
|
1096 case OMP_CLAUSE_LASTPRIVATE:
|
|
1097 walk_body (convert_nonlocal_reference_stmt,
|
|
1098 convert_nonlocal_reference_op, info,
|
|
1099 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
|
|
1100 break;
|
|
1101
|
|
1102 default:
|
|
1103 break;
|
|
1104 }
|
|
1105
|
|
1106 return need_chain;
|
|
1107 }
|
|
1108
|
|
1109
|
|
1110 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
|
|
1111 PARM_DECLs that belong to outer functions. This handles statements
|
|
1112 that are not handled via the standard recursion done in
|
|
1113 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
|
|
1114 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
|
|
1115 operands of STMT have been handled by this function. */
|
|
1116
|
|
1117 static tree
|
|
1118 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
|
|
1119 struct walk_stmt_info *wi)
|
|
1120 {
|
|
1121 struct nesting_info *info = (struct nesting_info *) wi->info;
|
|
1122 tree save_local_var_chain;
|
|
1123 bitmap save_suppress;
|
|
1124 gimple stmt = gsi_stmt (*gsi);
|
|
1125
|
|
1126 switch (gimple_code (stmt))
|
|
1127 {
|
|
1128 case GIMPLE_GOTO:
|
|
1129 /* Don't walk non-local gotos for now. */
|
|
1130 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
|
|
1131 {
|
|
1132 wi->val_only = true;
|
|
1133 wi->is_lhs = false;
|
|
1134 *handled_ops_p = true;
|
|
1135 return NULL_TREE;
|
|
1136 }
|
|
1137 break;
|
|
1138
|
|
1139 case GIMPLE_OMP_PARALLEL:
|
|
1140 case GIMPLE_OMP_TASK:
|
|
1141 save_suppress = info->suppress_expansion;
|
|
1142 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
|
|
1143 wi))
|
|
1144 {
|
|
1145 tree c, decl;
|
|
1146 decl = get_chain_decl (info);
|
|
1147 c = build_omp_clause (OMP_CLAUSE_FIRSTPRIVATE);
|
|
1148 OMP_CLAUSE_DECL (c) = decl;
|
|
1149 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
|
|
1150 gimple_omp_taskreg_set_clauses (stmt, c);
|
|
1151 }
|
|
1152
|
|
1153 save_local_var_chain = info->new_local_var_chain;
|
|
1154 info->new_local_var_chain = NULL;
|
|
1155
|
|
1156 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
|
|
1157 info, gimple_omp_body (stmt));
|
|
1158
|
|
1159 if (info->new_local_var_chain)
|
|
1160 declare_vars (info->new_local_var_chain,
|
|
1161 gimple_seq_first_stmt (gimple_omp_body (stmt)),
|
|
1162 false);
|
|
1163 info->new_local_var_chain = save_local_var_chain;
|
|
1164 info->suppress_expansion = save_suppress;
|
|
1165 break;
|
|
1166
|
|
1167 case GIMPLE_OMP_FOR:
|
|
1168 save_suppress = info->suppress_expansion;
|
|
1169 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
|
|
1170 walk_gimple_omp_for (stmt, convert_nonlocal_reference_stmt,
|
|
1171 convert_nonlocal_reference_op, info);
|
|
1172 walk_body (convert_nonlocal_reference_stmt,
|
|
1173 convert_nonlocal_reference_op, info, gimple_omp_body (stmt));
|
|
1174 info->suppress_expansion = save_suppress;
|
|
1175 break;
|
|
1176
|
|
1177 case GIMPLE_OMP_SECTIONS:
|
|
1178 save_suppress = info->suppress_expansion;
|
|
1179 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
|
|
1180 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
|
|
1181 info, gimple_omp_body (stmt));
|
|
1182 info->suppress_expansion = save_suppress;
|
|
1183 break;
|
|
1184
|
|
1185 case GIMPLE_OMP_SINGLE:
|
|
1186 save_suppress = info->suppress_expansion;
|
|
1187 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
|
|
1188 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
|
|
1189 info, gimple_omp_body (stmt));
|
|
1190 info->suppress_expansion = save_suppress;
|
|
1191 break;
|
|
1192
|
|
1193 case GIMPLE_OMP_SECTION:
|
|
1194 case GIMPLE_OMP_MASTER:
|
|
1195 case GIMPLE_OMP_ORDERED:
|
|
1196 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
|
|
1197 info, gimple_omp_body (stmt));
|
|
1198 break;
|
|
1199
|
|
1200 default:
|
|
1201 /* For every other statement that we are not interested in
|
|
1202 handling here, let the walker traverse the operands. */
|
|
1203 *handled_ops_p = false;
|
|
1204 return NULL_TREE;
|
|
1205 }
|
|
1206
|
|
1207 /* We have handled all of STMT operands, no need to traverse the operands. */
|
|
1208 *handled_ops_p = true;
|
|
1209 return NULL_TREE;
|
|
1210 }
|
|
1211
|
|
1212
|
|
1213 /* A subroutine of convert_local_reference. Create a local variable
|
|
1214 in the parent function with DECL_VALUE_EXPR set to reference the
|
|
1215 field in FRAME. This is used both for debug info and in OpenMP
|
|
1216 lowering. */
|
|
1217
|
|
1218 static tree
|
|
1219 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
|
|
1220 {
|
|
1221 tree x, new_decl;
|
|
1222 void **slot;
|
|
1223
|
|
1224 slot = pointer_map_insert (info->var_map, decl);
|
|
1225 if (*slot)
|
|
1226 return (tree) *slot;
|
|
1227
|
|
1228 /* Make sure frame_decl gets created. */
|
|
1229 (void) get_frame_type (info);
|
|
1230 x = info->frame_decl;
|
|
1231 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
|
|
1232
|
|
1233 new_decl = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
|
|
1234 DECL_CONTEXT (new_decl) = info->context;
|
|
1235 DECL_SOURCE_LOCATION (new_decl) = DECL_SOURCE_LOCATION (decl);
|
|
1236 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
|
|
1237 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
|
|
1238 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
|
|
1239 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
|
|
1240 TREE_READONLY (new_decl) = TREE_READONLY (decl);
|
|
1241 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
|
|
1242 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
|
|
1243
|
|
1244 SET_DECL_VALUE_EXPR (new_decl, x);
|
|
1245 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
|
|
1246 *slot = new_decl;
|
|
1247
|
|
1248 TREE_CHAIN (new_decl) = info->debug_var_chain;
|
|
1249 info->debug_var_chain = new_decl;
|
|
1250
|
|
1251 /* Do not emit debug info twice. */
|
|
1252 DECL_IGNORED_P (decl) = 1;
|
|
1253
|
|
1254 return new_decl;
|
|
1255 }
|
|
1256
|
|
1257
|
|
1258 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
|
|
1259 and PARM_DECLs that were referenced by inner nested functions.
|
|
1260 The rewrite will be a structure reference to the local frame variable. */
|
|
1261
|
|
1262 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
|
|
1263
|
|
1264 static tree
|
|
1265 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
|
|
1266 {
|
|
1267 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
|
|
1268 struct nesting_info *const info = (struct nesting_info *) wi->info;
|
|
1269 tree t = *tp, field, x;
|
|
1270 bool save_val_only;
|
|
1271
|
|
1272 *walk_subtrees = 0;
|
|
1273 switch (TREE_CODE (t))
|
|
1274 {
|
|
1275 case VAR_DECL:
|
|
1276 /* Non-automatic variables are never processed. */
|
|
1277 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
|
|
1278 break;
|
|
1279 /* FALLTHRU */
|
|
1280
|
|
1281 case PARM_DECL:
|
|
1282 if (decl_function_context (t) == info->context)
|
|
1283 {
|
|
1284 /* If we copied a pointer to the frame, then the original decl
|
|
1285 is used unchanged in the parent function. */
|
|
1286 if (use_pointer_in_frame (t))
|
|
1287 break;
|
|
1288
|
|
1289 /* No need to transform anything if no child references the
|
|
1290 variable. */
|
|
1291 field = lookup_field_for_decl (info, t, NO_INSERT);
|
|
1292 if (!field)
|
|
1293 break;
|
|
1294 wi->changed = true;
|
|
1295
|
|
1296 x = get_local_debug_decl (info, t, field);
|
|
1297 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
|
|
1298 x = get_frame_field (info, info->context, field, &wi->gsi);
|
|
1299
|
|
1300 if (wi->val_only)
|
|
1301 {
|
|
1302 if (wi->is_lhs)
|
|
1303 x = save_tmp_var (info, x, &wi->gsi);
|
|
1304 else
|
|
1305 x = init_tmp_var (info, x, &wi->gsi);
|
|
1306 }
|
|
1307
|
|
1308 *tp = x;
|
|
1309 }
|
|
1310 break;
|
|
1311
|
|
1312 case ADDR_EXPR:
|
|
1313 save_val_only = wi->val_only;
|
|
1314 wi->val_only = false;
|
|
1315 wi->is_lhs = false;
|
|
1316 wi->changed = false;
|
|
1317 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
|
|
1318 wi->val_only = save_val_only;
|
|
1319
|
|
1320 /* If we converted anything ... */
|
|
1321 if (wi->changed)
|
|
1322 {
|
|
1323 tree save_context;
|
|
1324
|
|
1325 /* Then the frame decl is now addressable. */
|
|
1326 TREE_ADDRESSABLE (info->frame_decl) = 1;
|
|
1327
|
|
1328 save_context = current_function_decl;
|
|
1329 current_function_decl = info->context;
|
|
1330 recompute_tree_invariant_for_addr_expr (t);
|
|
1331 current_function_decl = save_context;
|
|
1332
|
|
1333 /* If we are in a context where we only accept values, then
|
|
1334 compute the address into a temporary. */
|
|
1335 if (save_val_only)
|
|
1336 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
|
|
1337 t, &wi->gsi);
|
|
1338 }
|
|
1339 break;
|
|
1340
|
|
1341 case REALPART_EXPR:
|
|
1342 case IMAGPART_EXPR:
|
|
1343 case COMPONENT_REF:
|
|
1344 case ARRAY_REF:
|
|
1345 case ARRAY_RANGE_REF:
|
|
1346 case BIT_FIELD_REF:
|
|
1347 /* Go down this entire nest and just look at the final prefix and
|
|
1348 anything that describes the references. Otherwise, we lose track
|
|
1349 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
|
|
1350 save_val_only = wi->val_only;
|
|
1351 wi->val_only = true;
|
|
1352 wi->is_lhs = false;
|
|
1353 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
|
|
1354 {
|
|
1355 if (TREE_CODE (t) == COMPONENT_REF)
|
|
1356 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
|
|
1357 NULL);
|
|
1358 else if (TREE_CODE (t) == ARRAY_REF
|
|
1359 || TREE_CODE (t) == ARRAY_RANGE_REF)
|
|
1360 {
|
|
1361 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
|
|
1362 NULL);
|
|
1363 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
|
|
1364 NULL);
|
|
1365 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
|
|
1366 NULL);
|
|
1367 }
|
|
1368 else if (TREE_CODE (t) == BIT_FIELD_REF)
|
|
1369 {
|
|
1370 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
|
|
1371 NULL);
|
|
1372 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
|
|
1373 NULL);
|
|
1374 }
|
|
1375 }
|
|
1376 wi->val_only = false;
|
|
1377 walk_tree (tp, convert_local_reference_op, wi, NULL);
|
|
1378 wi->val_only = save_val_only;
|
|
1379 break;
|
|
1380
|
|
1381 case VIEW_CONVERT_EXPR:
|
|
1382 /* Just request to look at the subtrees, leaving val_only and lhs
|
|
1383 untouched. This might actually be for !val_only + lhs, in which
|
|
1384 case we don't want to force a replacement by a temporary. */
|
|
1385 *walk_subtrees = 1;
|
|
1386 break;
|
|
1387
|
|
1388 default:
|
|
1389 if (!IS_TYPE_OR_DECL_P (t))
|
|
1390 {
|
|
1391 *walk_subtrees = 1;
|
|
1392 wi->val_only = true;
|
|
1393 wi->is_lhs = false;
|
|
1394 }
|
|
1395 break;
|
|
1396 }
|
|
1397
|
|
1398 return NULL_TREE;
|
|
1399 }
|
|
1400
|
|
1401 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
|
|
1402 struct walk_stmt_info *);
|
|
1403
|
|
1404 /* Helper for convert_local_reference. Convert all the references in
|
|
1405 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
|
|
1406
|
|
1407 static bool
|
|
1408 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
|
|
1409 {
|
|
1410 struct nesting_info *const info = (struct nesting_info *) wi->info;
|
|
1411 bool need_frame = false, need_stmts = false;
|
|
1412 tree clause, decl;
|
|
1413 int dummy;
|
|
1414 bitmap new_suppress;
|
|
1415
|
|
1416 new_suppress = BITMAP_GGC_ALLOC ();
|
|
1417 bitmap_copy (new_suppress, info->suppress_expansion);
|
|
1418
|
|
1419 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
|
|
1420 {
|
|
1421 switch (OMP_CLAUSE_CODE (clause))
|
|
1422 {
|
|
1423 case OMP_CLAUSE_REDUCTION:
|
|
1424 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
|
|
1425 need_stmts = true;
|
|
1426 goto do_decl_clause;
|
|
1427
|
|
1428 case OMP_CLAUSE_LASTPRIVATE:
|
|
1429 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
|
|
1430 need_stmts = true;
|
|
1431 goto do_decl_clause;
|
|
1432
|
|
1433 case OMP_CLAUSE_PRIVATE:
|
|
1434 case OMP_CLAUSE_FIRSTPRIVATE:
|
|
1435 case OMP_CLAUSE_COPYPRIVATE:
|
|
1436 case OMP_CLAUSE_SHARED:
|
|
1437 do_decl_clause:
|
|
1438 decl = OMP_CLAUSE_DECL (clause);
|
|
1439 if (TREE_CODE (decl) == VAR_DECL
|
|
1440 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
|
|
1441 break;
|
|
1442 if (decl_function_context (decl) == info->context
|
|
1443 && !use_pointer_in_frame (decl))
|
|
1444 {
|
|
1445 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
|
|
1446 if (field)
|
|
1447 {
|
|
1448 bitmap_set_bit (new_suppress, DECL_UID (decl));
|
|
1449 OMP_CLAUSE_DECL (clause)
|
|
1450 = get_local_debug_decl (info, decl, field);
|
|
1451 need_frame = true;
|
|
1452 }
|
|
1453 }
|
|
1454 break;
|
|
1455
|
|
1456 case OMP_CLAUSE_SCHEDULE:
|
|
1457 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
|
|
1458 break;
|
|
1459 /* FALLTHRU */
|
|
1460 case OMP_CLAUSE_IF:
|
|
1461 case OMP_CLAUSE_NUM_THREADS:
|
|
1462 wi->val_only = true;
|
|
1463 wi->is_lhs = false;
|
|
1464 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
|
|
1465 wi);
|
|
1466 break;
|
|
1467
|
|
1468 case OMP_CLAUSE_NOWAIT:
|
|
1469 case OMP_CLAUSE_ORDERED:
|
|
1470 case OMP_CLAUSE_DEFAULT:
|
|
1471 case OMP_CLAUSE_COPYIN:
|
|
1472 case OMP_CLAUSE_COLLAPSE:
|
|
1473 case OMP_CLAUSE_UNTIED:
|
|
1474 break;
|
|
1475
|
|
1476 default:
|
|
1477 gcc_unreachable ();
|
|
1478 }
|
|
1479 }
|
|
1480
|
|
1481 info->suppress_expansion = new_suppress;
|
|
1482
|
|
1483 if (need_stmts)
|
|
1484 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
|
|
1485 switch (OMP_CLAUSE_CODE (clause))
|
|
1486 {
|
|
1487 case OMP_CLAUSE_REDUCTION:
|
|
1488 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
|
|
1489 {
|
|
1490 tree old_context
|
|
1491 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
|
|
1492 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
|
|
1493 = info->context;
|
|
1494 walk_body (convert_local_reference_stmt,
|
|
1495 convert_local_reference_op, info,
|
|
1496 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
|
|
1497 walk_body (convert_local_reference_stmt,
|
|
1498 convert_local_reference_op, info,
|
|
1499 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
|
|
1500 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
|
|
1501 = old_context;
|
|
1502 }
|
|
1503 break;
|
|
1504
|
|
1505 case OMP_CLAUSE_LASTPRIVATE:
|
|
1506 walk_body (convert_local_reference_stmt,
|
|
1507 convert_local_reference_op, info,
|
|
1508 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
|
|
1509 break;
|
|
1510
|
|
1511 default:
|
|
1512 break;
|
|
1513 }
|
|
1514
|
|
1515 return need_frame;
|
|
1516 }
|
|
1517
|
|
1518
|
|
1519 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
|
|
1520 and PARM_DECLs that were referenced by inner nested functions.
|
|
1521 The rewrite will be a structure reference to the local frame variable. */
|
|
1522
|
|
1523 static tree
|
|
1524 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
|
|
1525 struct walk_stmt_info *wi)
|
|
1526 {
|
|
1527 struct nesting_info *info = (struct nesting_info *) wi->info;
|
|
1528 tree save_local_var_chain;
|
|
1529 bitmap save_suppress;
|
|
1530 gimple stmt = gsi_stmt (*gsi);
|
|
1531
|
|
1532 switch (gimple_code (stmt))
|
|
1533 {
|
|
1534 case GIMPLE_OMP_PARALLEL:
|
|
1535 case GIMPLE_OMP_TASK:
|
|
1536 save_suppress = info->suppress_expansion;
|
|
1537 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
|
|
1538 wi))
|
|
1539 {
|
|
1540 tree c;
|
|
1541 (void) get_frame_type (info);
|
|
1542 c = build_omp_clause (OMP_CLAUSE_SHARED);
|
|
1543 OMP_CLAUSE_DECL (c) = info->frame_decl;
|
|
1544 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
|
|
1545 gimple_omp_taskreg_set_clauses (stmt, c);
|
|
1546 }
|
|
1547
|
|
1548 save_local_var_chain = info->new_local_var_chain;
|
|
1549 info->new_local_var_chain = NULL;
|
|
1550
|
|
1551 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
|
|
1552 gimple_omp_body (stmt));
|
|
1553
|
|
1554 if (info->new_local_var_chain)
|
|
1555 declare_vars (info->new_local_var_chain,
|
|
1556 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
|
|
1557 info->new_local_var_chain = save_local_var_chain;
|
|
1558 info->suppress_expansion = save_suppress;
|
|
1559 break;
|
|
1560
|
|
1561 case GIMPLE_OMP_FOR:
|
|
1562 save_suppress = info->suppress_expansion;
|
|
1563 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
|
|
1564 walk_gimple_omp_for (stmt, convert_local_reference_stmt,
|
|
1565 convert_local_reference_op, info);
|
|
1566 walk_body (convert_local_reference_stmt, convert_local_reference_op,
|
|
1567 info, gimple_omp_body (stmt));
|
|
1568 info->suppress_expansion = save_suppress;
|
|
1569 break;
|
|
1570
|
|
1571 case GIMPLE_OMP_SECTIONS:
|
|
1572 save_suppress = info->suppress_expansion;
|
|
1573 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
|
|
1574 walk_body (convert_local_reference_stmt, convert_local_reference_op,
|
|
1575 info, gimple_omp_body (stmt));
|
|
1576 info->suppress_expansion = save_suppress;
|
|
1577 break;
|
|
1578
|
|
1579 case GIMPLE_OMP_SINGLE:
|
|
1580 save_suppress = info->suppress_expansion;
|
|
1581 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
|
|
1582 walk_body (convert_local_reference_stmt, convert_local_reference_op,
|
|
1583 info, gimple_omp_body (stmt));
|
|
1584 info->suppress_expansion = save_suppress;
|
|
1585 break;
|
|
1586
|
|
1587 case GIMPLE_OMP_SECTION:
|
|
1588 case GIMPLE_OMP_MASTER:
|
|
1589 case GIMPLE_OMP_ORDERED:
|
|
1590 walk_body (convert_local_reference_stmt, convert_local_reference_op,
|
|
1591 info, gimple_omp_body (stmt));
|
|
1592 break;
|
|
1593
|
|
1594 default:
|
|
1595 /* For every other statement that we are not interested in
|
|
1596 handling here, let the walker traverse the operands. */
|
|
1597 *handled_ops_p = false;
|
|
1598 return NULL_TREE;
|
|
1599 }
|
|
1600
|
|
1601 /* Indicate that we have handled all the operands ourselves. */
|
|
1602 *handled_ops_p = true;
|
|
1603 return NULL_TREE;
|
|
1604 }
|
|
1605
|
|
1606
|
|
1607 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
|
|
1608 that reference labels from outer functions. The rewrite will be a
|
|
1609 call to __builtin_nonlocal_goto. */
|
|
1610
|
|
1611 static tree
|
|
1612 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
|
|
1613 struct walk_stmt_info *wi)
|
|
1614 {
|
|
1615 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
|
|
1616 tree label, new_label, target_context, x, field;
|
|
1617 void **slot;
|
|
1618 gimple call;
|
|
1619 gimple stmt = gsi_stmt (*gsi);
|
|
1620
|
|
1621 if (gimple_code (stmt) != GIMPLE_GOTO)
|
|
1622 {
|
|
1623 *handled_ops_p = false;
|
|
1624 return NULL_TREE;
|
|
1625 }
|
|
1626
|
|
1627 label = gimple_goto_dest (stmt);
|
|
1628 if (TREE_CODE (label) != LABEL_DECL)
|
|
1629 {
|
|
1630 *handled_ops_p = false;
|
|
1631 return NULL_TREE;
|
|
1632 }
|
|
1633
|
|
1634 target_context = decl_function_context (label);
|
|
1635 if (target_context == info->context)
|
|
1636 {
|
|
1637 *handled_ops_p = false;
|
|
1638 return NULL_TREE;
|
|
1639 }
|
|
1640
|
|
1641 for (i = info->outer; target_context != i->context; i = i->outer)
|
|
1642 continue;
|
|
1643
|
|
1644 /* The original user label may also be use for a normal goto, therefore
|
|
1645 we must create a new label that will actually receive the abnormal
|
|
1646 control transfer. This new label will be marked LABEL_NONLOCAL; this
|
|
1647 mark will trigger proper behavior in the cfg, as well as cause the
|
|
1648 (hairy target-specific) non-local goto receiver code to be generated
|
|
1649 when we expand rtl. Enter this association into var_map so that we
|
|
1650 can insert the new label into the IL during a second pass. */
|
|
1651 slot = pointer_map_insert (i->var_map, label);
|
|
1652 if (*slot == NULL)
|
|
1653 {
|
|
1654 new_label = create_artificial_label ();
|
|
1655 DECL_NONLOCAL (new_label) = 1;
|
|
1656 *slot = new_label;
|
|
1657 }
|
|
1658 else
|
|
1659 new_label = (tree) *slot;
|
|
1660
|
|
1661 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
|
|
1662 field = get_nl_goto_field (i);
|
|
1663 x = get_frame_field (info, target_context, field, &wi->gsi);
|
|
1664 x = build_addr (x, target_context);
|
|
1665 x = gsi_gimplify_val (info, x, &wi->gsi);
|
|
1666 call = gimple_build_call (implicit_built_in_decls[BUILT_IN_NONLOCAL_GOTO], 2,
|
|
1667 build_addr (new_label, target_context), x);
|
|
1668 gsi_replace (&wi->gsi, call, false);
|
|
1669
|
|
1670 /* We have handled all of STMT's operands, no need to keep going. */
|
|
1671 *handled_ops_p = true;
|
|
1672 return NULL_TREE;
|
|
1673 }
|
|
1674
|
|
1675
|
|
1676 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
|
|
1677 are referenced via nonlocal goto from a nested function. The rewrite
|
|
1678 will involve installing a newly generated DECL_NONLOCAL label, and
|
|
1679 (potentially) a branch around the rtl gunk that is assumed to be
|
|
1680 attached to such a label. */
|
|
1681
|
|
1682 static tree
|
|
1683 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
|
|
1684 struct walk_stmt_info *wi)
|
|
1685 {
|
|
1686 struct nesting_info *const info = (struct nesting_info *) wi->info;
|
|
1687 tree label, new_label;
|
|
1688 gimple_stmt_iterator tmp_gsi;
|
|
1689 void **slot;
|
|
1690 gimple stmt = gsi_stmt (*gsi);
|
|
1691
|
|
1692 if (gimple_code (stmt) != GIMPLE_LABEL)
|
|
1693 {
|
|
1694 *handled_ops_p = false;
|
|
1695 return NULL_TREE;
|
|
1696 }
|
|
1697
|
|
1698 label = gimple_label_label (stmt);
|
|
1699
|
|
1700 slot = pointer_map_contains (info->var_map, label);
|
|
1701 if (!slot)
|
|
1702 {
|
|
1703 *handled_ops_p = false;
|
|
1704 return NULL_TREE;
|
|
1705 }
|
|
1706
|
|
1707 /* If there's any possibility that the previous statement falls through,
|
|
1708 then we must branch around the new non-local label. */
|
|
1709 tmp_gsi = wi->gsi;
|
|
1710 gsi_prev (&tmp_gsi);
|
|
1711 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
|
|
1712 {
|
|
1713 gimple stmt = gimple_build_goto (label);
|
|
1714 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
|
|
1715 }
|
|
1716
|
|
1717 new_label = (tree) *slot;
|
|
1718 stmt = gimple_build_label (new_label);
|
|
1719 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
|
|
1720
|
|
1721 *handled_ops_p = true;
|
|
1722 return NULL_TREE;
|
|
1723 }
|
|
1724
|
|
1725
|
|
1726 /* Called via walk_function+walk_stmt, rewrite all references to addresses
|
|
1727 of nested functions that require the use of trampolines. The rewrite
|
|
1728 will involve a reference a trampoline generated for the occasion. */
|
|
1729
|
|
1730 static tree
|
|
1731 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
|
|
1732 {
|
|
1733 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
|
|
1734 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
|
|
1735 tree t = *tp, decl, target_context, x, builtin;
|
|
1736 gimple call;
|
|
1737
|
|
1738 *walk_subtrees = 0;
|
|
1739 switch (TREE_CODE (t))
|
|
1740 {
|
|
1741 case ADDR_EXPR:
|
|
1742 /* Build
|
|
1743 T.1 = &CHAIN->tramp;
|
|
1744 T.2 = __builtin_adjust_trampoline (T.1);
|
|
1745 T.3 = (func_type)T.2;
|
|
1746 */
|
|
1747
|
|
1748 decl = TREE_OPERAND (t, 0);
|
|
1749 if (TREE_CODE (decl) != FUNCTION_DECL)
|
|
1750 break;
|
|
1751
|
|
1752 /* Only need to process nested functions. */
|
|
1753 target_context = decl_function_context (decl);
|
|
1754 if (!target_context)
|
|
1755 break;
|
|
1756
|
|
1757 /* If the nested function doesn't use a static chain, then
|
|
1758 it doesn't need a trampoline. */
|
|
1759 if (DECL_NO_STATIC_CHAIN (decl))
|
|
1760 break;
|
|
1761
|
|
1762 /* If we don't want a trampoline, then don't build one. */
|
|
1763 if (TREE_NO_TRAMPOLINE (t))
|
|
1764 break;
|
|
1765
|
|
1766 /* Lookup the immediate parent of the callee, as that's where
|
|
1767 we need to insert the trampoline. */
|
|
1768 for (i = info; i->context != target_context; i = i->outer)
|
|
1769 continue;
|
|
1770 x = lookup_tramp_for_decl (i, decl, INSERT);
|
|
1771
|
|
1772 /* Compute the address of the field holding the trampoline. */
|
|
1773 x = get_frame_field (info, target_context, x, &wi->gsi);
|
|
1774 x = build_addr (x, target_context);
|
|
1775 x = gsi_gimplify_val (info, x, &wi->gsi);
|
|
1776
|
|
1777 /* Do machine-specific ugliness. Normally this will involve
|
|
1778 computing extra alignment, but it can really be anything. */
|
|
1779 builtin = implicit_built_in_decls[BUILT_IN_ADJUST_TRAMPOLINE];
|
|
1780 call = gimple_build_call (builtin, 1, x);
|
|
1781 x = init_tmp_var_with_call (info, &wi->gsi, call);
|
|
1782
|
|
1783 /* Cast back to the proper function type. */
|
|
1784 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
|
|
1785 x = init_tmp_var (info, x, &wi->gsi);
|
|
1786
|
|
1787 *tp = x;
|
|
1788 break;
|
|
1789
|
|
1790 default:
|
|
1791 if (!IS_TYPE_OR_DECL_P (t))
|
|
1792 *walk_subtrees = 1;
|
|
1793 break;
|
|
1794 }
|
|
1795
|
|
1796 return NULL_TREE;
|
|
1797 }
|
|
1798
|
|
1799
|
|
1800 /* Called via walk_function+walk_gimple_stmt, rewrite all references
|
|
1801 to addresses of nested functions that require the use of
|
|
1802 trampolines. The rewrite will involve a reference a trampoline
|
|
1803 generated for the occasion. */
|
|
1804
|
|
1805 static tree
|
|
1806 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
|
|
1807 struct walk_stmt_info *wi)
|
|
1808 {
|
|
1809 gimple stmt = gsi_stmt (*gsi);
|
|
1810
|
|
1811 switch (gimple_code (stmt))
|
|
1812 {
|
|
1813 case GIMPLE_CALL:
|
|
1814 {
|
|
1815 /* Only walk call arguments, lest we generate trampolines for
|
|
1816 direct calls. */
|
|
1817 unsigned long i, nargs = gimple_call_num_args (stmt);
|
|
1818 for (i = 0; i < nargs; i++)
|
|
1819 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
|
|
1820 wi, NULL);
|
|
1821
|
|
1822 *handled_ops_p = true;
|
|
1823 return NULL_TREE;
|
|
1824 }
|
|
1825
|
|
1826 default:
|
|
1827 break;
|
|
1828 }
|
|
1829
|
|
1830 *handled_ops_p = false;
|
|
1831 return NULL_TREE;
|
|
1832 }
|
|
1833
|
|
1834
|
|
1835
|
|
1836 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
|
|
1837 that reference nested functions to make sure that the static chain
|
|
1838 is set up properly for the call. */
|
|
1839
|
|
1840 static tree
|
|
1841 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
|
|
1842 struct walk_stmt_info *wi)
|
|
1843 {
|
|
1844 struct nesting_info *const info = (struct nesting_info *) wi->info;
|
|
1845 tree decl, target_context;
|
|
1846 char save_static_chain_added;
|
|
1847 int i;
|
|
1848 gimple stmt = gsi_stmt (*gsi);
|
|
1849
|
|
1850 switch (gimple_code (stmt))
|
|
1851 {
|
|
1852 case GIMPLE_CALL:
|
|
1853 decl = gimple_call_fndecl (stmt);
|
|
1854 if (!decl)
|
|
1855 break;
|
|
1856 target_context = decl_function_context (decl);
|
|
1857 if (target_context && !DECL_NO_STATIC_CHAIN (decl))
|
|
1858 {
|
|
1859 gimple_call_set_chain (stmt, get_static_chain (info, target_context,
|
|
1860 &wi->gsi));
|
|
1861 info->static_chain_added |= (1 << (info->context != target_context));
|
|
1862 }
|
|
1863 break;
|
|
1864
|
|
1865 case GIMPLE_OMP_PARALLEL:
|
|
1866 case GIMPLE_OMP_TASK:
|
|
1867 save_static_chain_added = info->static_chain_added;
|
|
1868 info->static_chain_added = 0;
|
|
1869 walk_body (convert_gimple_call, NULL, info, gimple_omp_body (stmt));
|
|
1870 for (i = 0; i < 2; i++)
|
|
1871 {
|
|
1872 tree c, decl;
|
|
1873 if ((info->static_chain_added & (1 << i)) == 0)
|
|
1874 continue;
|
|
1875 decl = i ? get_chain_decl (info) : info->frame_decl;
|
|
1876 /* Don't add CHAIN.* or FRAME.* twice. */
|
|
1877 for (c = gimple_omp_taskreg_clauses (stmt);
|
|
1878 c;
|
|
1879 c = OMP_CLAUSE_CHAIN (c))
|
|
1880 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
|
|
1881 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
|
|
1882 && OMP_CLAUSE_DECL (c) == decl)
|
|
1883 break;
|
|
1884 if (c == NULL)
|
|
1885 {
|
|
1886 c = build_omp_clause (i ? OMP_CLAUSE_FIRSTPRIVATE
|
|
1887 : OMP_CLAUSE_SHARED);
|
|
1888 OMP_CLAUSE_DECL (c) = decl;
|
|
1889 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
|
|
1890 gimple_omp_taskreg_set_clauses (stmt, c);
|
|
1891 }
|
|
1892 }
|
|
1893 info->static_chain_added |= save_static_chain_added;
|
|
1894 break;
|
|
1895
|
|
1896 case GIMPLE_OMP_FOR:
|
|
1897 walk_body (convert_gimple_call, NULL, info,
|
|
1898 gimple_omp_for_pre_body (stmt));
|
|
1899 /* FALLTHRU */
|
|
1900 case GIMPLE_OMP_SECTIONS:
|
|
1901 case GIMPLE_OMP_SECTION:
|
|
1902 case GIMPLE_OMP_SINGLE:
|
|
1903 case GIMPLE_OMP_MASTER:
|
|
1904 case GIMPLE_OMP_ORDERED:
|
|
1905 case GIMPLE_OMP_CRITICAL:
|
|
1906 walk_body (convert_gimple_call, NULL, info, gimple_omp_body (stmt));
|
|
1907 break;
|
|
1908
|
|
1909 default:
|
|
1910 /* Keep looking for other operands. */
|
|
1911 *handled_ops_p = false;
|
|
1912 return NULL_TREE;
|
|
1913 }
|
|
1914
|
|
1915 *handled_ops_p = true;
|
|
1916 return NULL_TREE;
|
|
1917 }
|
|
1918
|
|
1919
|
|
1920 /* Walk the nesting tree starting with ROOT, depth first. Convert all
|
|
1921 trampolines and call expressions. On the way back up, determine if
|
|
1922 a nested function actually uses its static chain; if not, remember that. */
|
|
1923
|
|
1924 static void
|
|
1925 convert_all_function_calls (struct nesting_info *root)
|
|
1926 {
|
|
1927 do
|
|
1928 {
|
|
1929 if (root->inner)
|
|
1930 convert_all_function_calls (root->inner);
|
|
1931
|
|
1932 walk_function (convert_tramp_reference_stmt, convert_tramp_reference_op,
|
|
1933 root);
|
|
1934 walk_function (convert_gimple_call, NULL, root);
|
|
1935
|
|
1936 /* If the function does not use a static chain, then remember that. */
|
|
1937 if (root->outer && !root->chain_decl && !root->chain_field)
|
|
1938 DECL_NO_STATIC_CHAIN (root->context) = 1;
|
|
1939 else
|
|
1940 gcc_assert (!DECL_NO_STATIC_CHAIN (root->context));
|
|
1941
|
|
1942 root = root->next;
|
|
1943 }
|
|
1944 while (root);
|
|
1945 }
|
|
1946
|
|
1947 /* Do "everything else" to clean up or complete state collected by the
|
|
1948 various walking passes -- lay out the types and decls, generate code
|
|
1949 to initialize the frame decl, store critical expressions in the
|
|
1950 struct function for rtl to find. */
|
|
1951
|
|
1952 static void
|
|
1953 finalize_nesting_tree_1 (struct nesting_info *root)
|
|
1954 {
|
|
1955 gimple_seq stmt_list;
|
|
1956 gimple stmt;
|
|
1957 tree context = root->context;
|
|
1958 struct function *sf;
|
|
1959
|
|
1960 stmt_list = NULL;
|
|
1961
|
|
1962 /* If we created a non-local frame type or decl, we need to lay them
|
|
1963 out at this time. */
|
|
1964 if (root->frame_type)
|
|
1965 {
|
|
1966 /* In some cases the frame type will trigger the -Wpadded warning.
|
|
1967 This is not helpful; suppress it. */
|
|
1968 int save_warn_padded = warn_padded;
|
|
1969 warn_padded = 0;
|
|
1970 layout_type (root->frame_type);
|
|
1971 warn_padded = save_warn_padded;
|
|
1972 layout_decl (root->frame_decl, 0);
|
|
1973 }
|
|
1974
|
|
1975 /* If any parameters were referenced non-locally, then we need to
|
|
1976 insert a copy. Likewise, if any variables were referenced by
|
|
1977 pointer, we need to initialize the address. */
|
|
1978 if (root->any_parm_remapped)
|
|
1979 {
|
|
1980 tree p;
|
|
1981 for (p = DECL_ARGUMENTS (context); p ; p = TREE_CHAIN (p))
|
|
1982 {
|
|
1983 tree field, x, y;
|
|
1984
|
|
1985 field = lookup_field_for_decl (root, p, NO_INSERT);
|
|
1986 if (!field)
|
|
1987 continue;
|
|
1988
|
|
1989 if (use_pointer_in_frame (p))
|
|
1990 x = build_addr (p, context);
|
|
1991 else
|
|
1992 x = p;
|
|
1993
|
|
1994 y = build3 (COMPONENT_REF, TREE_TYPE (field),
|
|
1995 root->frame_decl, field, NULL_TREE);
|
|
1996 stmt = gimple_build_assign (y, x);
|
|
1997 gimple_seq_add_stmt (&stmt_list, stmt);
|
|
1998 /* If the assignment is from a non-register the stmt is
|
|
1999 not valid gimple. Make it so by using a temporary instead. */
|
|
2000 if (!is_gimple_reg (x)
|
|
2001 && is_gimple_reg_type (TREE_TYPE (x)))
|
|
2002 {
|
|
2003 gimple_stmt_iterator gsi = gsi_last (stmt_list);
|
|
2004 x = init_tmp_var (root, x, &gsi);
|
|
2005 gimple_assign_set_rhs1 (stmt, x);
|
|
2006 }
|
|
2007 }
|
|
2008 }
|
|
2009
|
|
2010 /* If a chain_field was created, then it needs to be initialized
|
|
2011 from chain_decl. */
|
|
2012 if (root->chain_field)
|
|
2013 {
|
|
2014 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
|
|
2015 root->frame_decl, root->chain_field, NULL_TREE);
|
|
2016 stmt = gimple_build_assign (x, get_chain_decl (root));
|
|
2017 gimple_seq_add_stmt (&stmt_list, stmt);
|
|
2018 }
|
|
2019
|
|
2020 /* If trampolines were created, then we need to initialize them. */
|
|
2021 if (root->any_tramp_created)
|
|
2022 {
|
|
2023 struct nesting_info *i;
|
|
2024 for (i = root->inner; i ; i = i->next)
|
|
2025 {
|
|
2026 tree arg1, arg2, arg3, x, field;
|
|
2027
|
|
2028 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
|
|
2029 if (!field)
|
|
2030 continue;
|
|
2031
|
|
2032 if (DECL_NO_STATIC_CHAIN (i->context))
|
|
2033 arg3 = null_pointer_node;
|
|
2034 else
|
|
2035 arg3 = build_addr (root->frame_decl, context);
|
|
2036
|
|
2037 arg2 = build_addr (i->context, context);
|
|
2038
|
|
2039 x = build3 (COMPONENT_REF, TREE_TYPE (field),
|
|
2040 root->frame_decl, field, NULL_TREE);
|
|
2041 arg1 = build_addr (x, context);
|
|
2042
|
|
2043 x = implicit_built_in_decls[BUILT_IN_INIT_TRAMPOLINE];
|
|
2044 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
|
|
2045 gimple_seq_add_stmt (&stmt_list, stmt);
|
|
2046 }
|
|
2047 }
|
|
2048
|
|
2049 /* If we created initialization statements, insert them. */
|
|
2050 if (stmt_list)
|
|
2051 {
|
|
2052 gimple bind;
|
|
2053 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
|
|
2054 bind = gimple_seq_first_stmt (gimple_body (context));
|
|
2055 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
|
|
2056 gimple_bind_set_body (bind, stmt_list);
|
|
2057 }
|
|
2058
|
|
2059 /* If a chain_decl was created, then it needs to be registered with
|
|
2060 struct function so that it gets initialized from the static chain
|
|
2061 register at the beginning of the function. */
|
|
2062 sf = DECL_STRUCT_FUNCTION (root->context);
|
|
2063 sf->static_chain_decl = root->chain_decl;
|
|
2064
|
|
2065 /* Similarly for the non-local goto save area. */
|
|
2066 if (root->nl_goto_field)
|
|
2067 {
|
|
2068 sf->nonlocal_goto_save_area
|
|
2069 = get_frame_field (root, context, root->nl_goto_field, NULL);
|
|
2070 sf->has_nonlocal_label = 1;
|
|
2071 }
|
|
2072
|
|
2073 /* Make sure all new local variables get inserted into the
|
|
2074 proper BIND_EXPR. */
|
|
2075 if (root->new_local_var_chain)
|
|
2076 declare_vars (root->new_local_var_chain,
|
|
2077 gimple_seq_first_stmt (gimple_body (root->context)),
|
|
2078 false);
|
|
2079 if (root->debug_var_chain)
|
|
2080 declare_vars (root->debug_var_chain,
|
|
2081 gimple_seq_first_stmt (gimple_body (root->context)),
|
|
2082 true);
|
|
2083
|
|
2084 /* Dump the translated tree function. */
|
|
2085 dump_function (TDI_nested, root->context);
|
|
2086 }
|
|
2087
|
|
2088 static void
|
|
2089 finalize_nesting_tree (struct nesting_info *root)
|
|
2090 {
|
|
2091 do
|
|
2092 {
|
|
2093 if (root->inner)
|
|
2094 finalize_nesting_tree (root->inner);
|
|
2095 finalize_nesting_tree_1 (root);
|
|
2096 root = root->next;
|
|
2097 }
|
|
2098 while (root);
|
|
2099 }
|
|
2100
|
|
2101 /* Unnest the nodes and pass them to cgraph. */
|
|
2102
|
|
2103 static void
|
|
2104 unnest_nesting_tree_1 (struct nesting_info *root)
|
|
2105 {
|
|
2106 struct cgraph_node *node = cgraph_node (root->context);
|
|
2107
|
|
2108 /* For nested functions update the cgraph to reflect unnesting.
|
|
2109 We also delay finalizing of these functions up to this point. */
|
|
2110 if (node->origin)
|
|
2111 {
|
|
2112 cgraph_unnest_node (cgraph_node (root->context));
|
|
2113 cgraph_finalize_function (root->context, true);
|
|
2114 }
|
|
2115 }
|
|
2116
|
|
2117 static void
|
|
2118 unnest_nesting_tree (struct nesting_info *root)
|
|
2119 {
|
|
2120 do
|
|
2121 {
|
|
2122 if (root->inner)
|
|
2123 unnest_nesting_tree (root->inner);
|
|
2124 unnest_nesting_tree_1 (root);
|
|
2125 root = root->next;
|
|
2126 }
|
|
2127 while (root);
|
|
2128 }
|
|
2129
|
|
2130 /* Free the data structures allocated during this pass. */
|
|
2131
|
|
2132 static void
|
|
2133 free_nesting_tree (struct nesting_info *root)
|
|
2134 {
|
|
2135 struct nesting_info *next;
|
|
2136 do
|
|
2137 {
|
|
2138 if (root->inner)
|
|
2139 free_nesting_tree (root->inner);
|
|
2140 pointer_map_destroy (root->var_map);
|
|
2141 pointer_map_destroy (root->field_map);
|
|
2142 next = root->next;
|
|
2143 free (root);
|
|
2144 root = next;
|
|
2145 }
|
|
2146 while (root);
|
|
2147 }
|
|
2148
|
|
2149 /* Main entry point for this pass. Process FNDECL and all of its nested
|
|
2150 subroutines and turn them into something less tightly bound. */
|
|
2151
|
|
2152 void
|
|
2153 lower_nested_functions (tree fndecl)
|
|
2154 {
|
|
2155 struct cgraph_node *cgn;
|
|
2156 struct nesting_info *root;
|
|
2157
|
|
2158 /* If there are no nested functions, there's nothing to do. */
|
|
2159 cgn = cgraph_node (fndecl);
|
|
2160 if (!cgn->nested)
|
|
2161 return;
|
|
2162
|
|
2163 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
|
|
2164 root = create_nesting_tree (cgn);
|
|
2165 walk_all_functions (convert_nonlocal_reference_stmt,
|
|
2166 convert_nonlocal_reference_op,
|
|
2167 root);
|
|
2168 walk_all_functions (convert_local_reference_stmt,
|
|
2169 convert_local_reference_op,
|
|
2170 root);
|
|
2171 walk_all_functions (convert_nl_goto_reference, NULL, root);
|
|
2172 walk_all_functions (convert_nl_goto_receiver, NULL, root);
|
|
2173 convert_all_function_calls (root);
|
|
2174 finalize_nesting_tree (root);
|
|
2175 unnest_nesting_tree (root);
|
|
2176 free_nesting_tree (root);
|
|
2177 bitmap_obstack_release (&nesting_info_bitmap_obstack);
|
|
2178 }
|
|
2179
|
|
2180 #include "gt-tree-nested.h"
|