111
|
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
|
|
2
|
131
|
3 Copyright (C) 2002-2018 Free Software Foundation, Inc.
|
111
|
4 Contributed by Jason Merrill <jason@redhat.com>
|
|
5
|
|
6 This file is part of GCC.
|
|
7
|
|
8 GCC is free software; you can redistribute it and/or modify it under
|
|
9 the terms of the GNU General Public License as published by the Free
|
|
10 Software Foundation; either version 3, or (at your option) any later
|
|
11 version.
|
|
12
|
|
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
16 for more details.
|
|
17
|
|
18 You should have received a copy of the GNU General Public License
|
|
19 along with GCC; see the file COPYING3. If not see
|
|
20 <http://www.gnu.org/licenses/>. */
|
|
21
|
|
22 #include "config.h"
|
|
23 #include "system.h"
|
|
24 #include "coretypes.h"
|
|
25 #include "target.h"
|
|
26 #include "basic-block.h"
|
|
27 #include "cp-tree.h"
|
|
28 #include "gimple.h"
|
|
29 #include "predict.h"
|
|
30 #include "stor-layout.h"
|
|
31 #include "tree-iterator.h"
|
|
32 #include "gimplify.h"
|
|
33 #include "c-family/c-ubsan.h"
|
|
34 #include "stringpool.h"
|
|
35 #include "attribs.h"
|
|
36 #include "asan.h"
|
|
37
|
|
38 /* Forward declarations. */
|
|
39
|
|
40 static tree cp_genericize_r (tree *, int *, void *);
|
|
41 static tree cp_fold_r (tree *, int *, void *);
|
|
42 static void cp_genericize_tree (tree*, bool);
|
|
43 static tree cp_fold (tree);
|
|
44
|
|
45 /* Local declarations. */
|
|
46
|
|
47 enum bc_t { bc_break = 0, bc_continue = 1 };
|
|
48
|
|
49 /* Stack of labels which are targets for "break" or "continue",
|
|
50 linked through TREE_CHAIN. */
|
|
51 static tree bc_label[2];
|
|
52
|
|
53 /* Begin a scope which can be exited by a break or continue statement. BC
|
|
54 indicates which.
|
|
55
|
|
56 Just creates a label with location LOCATION and pushes it into the current
|
|
57 context. */
|
|
58
|
|
59 static tree
|
|
60 begin_bc_block (enum bc_t bc, location_t location)
|
|
61 {
|
|
62 tree label = create_artificial_label (location);
|
|
63 DECL_CHAIN (label) = bc_label[bc];
|
|
64 bc_label[bc] = label;
|
|
65 if (bc == bc_break)
|
|
66 LABEL_DECL_BREAK (label) = true;
|
|
67 else
|
|
68 LABEL_DECL_CONTINUE (label) = true;
|
|
69 return label;
|
|
70 }
|
|
71
|
|
72 /* Finish a scope which can be exited by a break or continue statement.
|
|
73 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
|
|
74 an expression for the contents of the scope.
|
|
75
|
|
76 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
|
|
77 BLOCK. Otherwise, just forget the label. */
|
|
78
|
|
79 static void
|
|
80 finish_bc_block (tree *block, enum bc_t bc, tree label)
|
|
81 {
|
|
82 gcc_assert (label == bc_label[bc]);
|
|
83
|
|
84 if (TREE_USED (label))
|
|
85 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
|
|
86 block);
|
|
87
|
|
88 bc_label[bc] = DECL_CHAIN (label);
|
|
89 DECL_CHAIN (label) = NULL_TREE;
|
|
90 }
|
|
91
|
|
92 /* Get the LABEL_EXPR to represent a break or continue statement
|
|
93 in the current block scope. BC indicates which. */
|
|
94
|
|
95 static tree
|
|
96 get_bc_label (enum bc_t bc)
|
|
97 {
|
|
98 tree label = bc_label[bc];
|
|
99
|
|
100 /* Mark the label used for finish_bc_block. */
|
|
101 TREE_USED (label) = 1;
|
|
102 return label;
|
|
103 }
|
|
104
|
|
105 /* Genericize a TRY_BLOCK. */
|
|
106
|
|
107 static void
|
|
108 genericize_try_block (tree *stmt_p)
|
|
109 {
|
|
110 tree body = TRY_STMTS (*stmt_p);
|
|
111 tree cleanup = TRY_HANDLERS (*stmt_p);
|
|
112
|
|
113 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
|
|
114 }
|
|
115
|
|
116 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
|
|
117
|
|
118 static void
|
|
119 genericize_catch_block (tree *stmt_p)
|
|
120 {
|
|
121 tree type = HANDLER_TYPE (*stmt_p);
|
|
122 tree body = HANDLER_BODY (*stmt_p);
|
|
123
|
|
124 /* FIXME should the caught type go in TREE_TYPE? */
|
|
125 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
|
|
126 }
|
|
127
|
|
128 /* A terser interface for building a representation of an exception
|
|
129 specification. */
|
|
130
|
|
131 static tree
|
|
132 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
|
|
133 {
|
|
134 tree t;
|
|
135
|
|
136 /* FIXME should the allowed types go in TREE_TYPE? */
|
|
137 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
|
|
138 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
|
|
139
|
|
140 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
|
|
141 append_to_statement_list (body, &TREE_OPERAND (t, 0));
|
|
142
|
|
143 return t;
|
|
144 }
|
|
145
|
|
146 /* Genericize an EH_SPEC_BLOCK by converting it to a
|
|
147 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
|
|
148
|
|
149 static void
|
|
150 genericize_eh_spec_block (tree *stmt_p)
|
|
151 {
|
|
152 tree body = EH_SPEC_STMTS (*stmt_p);
|
|
153 tree allowed = EH_SPEC_RAISES (*stmt_p);
|
|
154 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
|
|
155
|
|
156 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
|
|
157 TREE_NO_WARNING (*stmt_p) = true;
|
|
158 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
|
|
159 }
|
|
160
|
|
161 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
|
|
162
|
|
163 static void
|
|
164 genericize_if_stmt (tree *stmt_p)
|
|
165 {
|
|
166 tree stmt, cond, then_, else_;
|
|
167 location_t locus = EXPR_LOCATION (*stmt_p);
|
|
168
|
|
169 stmt = *stmt_p;
|
|
170 cond = IF_COND (stmt);
|
|
171 then_ = THEN_CLAUSE (stmt);
|
|
172 else_ = ELSE_CLAUSE (stmt);
|
|
173
|
|
174 if (!then_)
|
|
175 then_ = build_empty_stmt (locus);
|
|
176 if (!else_)
|
|
177 else_ = build_empty_stmt (locus);
|
|
178
|
|
179 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
|
|
180 stmt = then_;
|
|
181 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
|
|
182 stmt = else_;
|
|
183 else
|
|
184 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
|
|
185 if (!EXPR_HAS_LOCATION (stmt))
|
|
186 protected_set_expr_location (stmt, locus);
|
|
187 *stmt_p = stmt;
|
|
188 }
|
|
189
|
|
190 /* Build a generic representation of one of the C loop forms. COND is the
|
|
191 loop condition or NULL_TREE. BODY is the (possibly compound) statement
|
|
192 controlled by the loop. INCR is the increment expression of a for-loop,
|
|
193 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
|
|
194 evaluated before the loop body as in while and for loops, or after the
|
|
195 loop body as in do-while loops. */
|
|
196
|
|
197 static void
|
|
198 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
|
|
199 tree incr, bool cond_is_first, int *walk_subtrees,
|
|
200 void *data)
|
|
201 {
|
|
202 tree blab, clab;
|
|
203 tree exit = NULL;
|
|
204 tree stmt_list = NULL;
|
|
205
|
|
206 blab = begin_bc_block (bc_break, start_locus);
|
|
207 clab = begin_bc_block (bc_continue, start_locus);
|
|
208
|
|
209 protected_set_expr_location (incr, start_locus);
|
|
210
|
|
211 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
|
|
212 cp_walk_tree (&body, cp_genericize_r, data, NULL);
|
|
213 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
|
|
214 *walk_subtrees = 0;
|
|
215
|
|
216 if (cond && TREE_CODE (cond) != INTEGER_CST)
|
|
217 {
|
|
218 /* If COND is constant, don't bother building an exit. If it's false,
|
|
219 we won't build a loop. If it's true, any exits are in the body. */
|
131
|
220 location_t cloc = cp_expr_loc_or_loc (cond, start_locus);
|
111
|
221 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
|
|
222 get_bc_label (bc_break));
|
|
223 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
|
|
224 build_empty_stmt (cloc), exit);
|
|
225 }
|
|
226
|
|
227 if (exit && cond_is_first)
|
|
228 append_to_statement_list (exit, &stmt_list);
|
|
229 append_to_statement_list (body, &stmt_list);
|
|
230 finish_bc_block (&stmt_list, bc_continue, clab);
|
|
231 append_to_statement_list (incr, &stmt_list);
|
|
232 if (exit && !cond_is_first)
|
|
233 append_to_statement_list (exit, &stmt_list);
|
|
234
|
|
235 if (!stmt_list)
|
|
236 stmt_list = build_empty_stmt (start_locus);
|
|
237
|
|
238 tree loop;
|
|
239 if (cond && integer_zerop (cond))
|
|
240 {
|
|
241 if (cond_is_first)
|
|
242 loop = fold_build3_loc (start_locus, COND_EXPR,
|
|
243 void_type_node, cond, stmt_list,
|
|
244 build_empty_stmt (start_locus));
|
|
245 else
|
|
246 loop = stmt_list;
|
|
247 }
|
|
248 else
|
|
249 {
|
|
250 location_t loc = start_locus;
|
|
251 if (!cond || integer_nonzerop (cond))
|
|
252 loc = EXPR_LOCATION (expr_first (body));
|
|
253 if (loc == UNKNOWN_LOCATION)
|
|
254 loc = start_locus;
|
|
255 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
|
|
256 }
|
|
257
|
|
258 stmt_list = NULL;
|
|
259 append_to_statement_list (loop, &stmt_list);
|
|
260 finish_bc_block (&stmt_list, bc_break, blab);
|
|
261 if (!stmt_list)
|
|
262 stmt_list = build_empty_stmt (start_locus);
|
|
263
|
|
264 *stmt_p = stmt_list;
|
|
265 }
|
|
266
|
|
267 /* Genericize a FOR_STMT node *STMT_P. */
|
|
268
|
|
269 static void
|
|
270 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
|
|
271 {
|
|
272 tree stmt = *stmt_p;
|
|
273 tree expr = NULL;
|
|
274 tree loop;
|
|
275 tree init = FOR_INIT_STMT (stmt);
|
|
276
|
|
277 if (init)
|
|
278 {
|
|
279 cp_walk_tree (&init, cp_genericize_r, data, NULL);
|
|
280 append_to_statement_list (init, &expr);
|
|
281 }
|
|
282
|
|
283 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
|
|
284 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
|
|
285 append_to_statement_list (loop, &expr);
|
|
286 if (expr == NULL_TREE)
|
|
287 expr = loop;
|
|
288 *stmt_p = expr;
|
|
289 }
|
|
290
|
|
291 /* Genericize a WHILE_STMT node *STMT_P. */
|
|
292
|
|
293 static void
|
|
294 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
|
|
295 {
|
|
296 tree stmt = *stmt_p;
|
|
297 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
|
|
298 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
|
|
299 }
|
|
300
|
|
301 /* Genericize a DO_STMT node *STMT_P. */
|
|
302
|
|
303 static void
|
|
304 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
|
|
305 {
|
|
306 tree stmt = *stmt_p;
|
|
307 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
|
|
308 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
|
|
309 }
|
|
310
|
|
311 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
|
|
312
|
|
313 static void
|
|
314 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
|
|
315 {
|
|
316 tree stmt = *stmt_p;
|
|
317 tree break_block, body, cond, type;
|
|
318 location_t stmt_locus = EXPR_LOCATION (stmt);
|
|
319
|
|
320 break_block = begin_bc_block (bc_break, stmt_locus);
|
|
321
|
|
322 body = SWITCH_STMT_BODY (stmt);
|
|
323 if (!body)
|
|
324 body = build_empty_stmt (stmt_locus);
|
|
325 cond = SWITCH_STMT_COND (stmt);
|
|
326 type = SWITCH_STMT_TYPE (stmt);
|
|
327
|
|
328 cp_walk_tree (&body, cp_genericize_r, data, NULL);
|
|
329 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
|
|
330 cp_walk_tree (&type, cp_genericize_r, data, NULL);
|
|
331 *walk_subtrees = 0;
|
|
332
|
131
|
333 if (TREE_USED (break_block))
|
|
334 SWITCH_BREAK_LABEL_P (break_block) = 1;
|
|
335 finish_bc_block (&body, bc_break, break_block);
|
|
336 *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
|
|
337 SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
|
|
338 gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
|
|
339 || !TREE_USED (break_block));
|
111
|
340 }
|
|
341
|
|
342 /* Genericize a CONTINUE_STMT node *STMT_P. */
|
|
343
|
|
344 static void
|
|
345 genericize_continue_stmt (tree *stmt_p)
|
|
346 {
|
|
347 tree stmt_list = NULL;
|
|
348 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
|
|
349 tree label = get_bc_label (bc_continue);
|
|
350 location_t location = EXPR_LOCATION (*stmt_p);
|
|
351 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
|
|
352 append_to_statement_list_force (pred, &stmt_list);
|
|
353 append_to_statement_list (jump, &stmt_list);
|
|
354 *stmt_p = stmt_list;
|
|
355 }
|
|
356
|
|
357 /* Genericize a BREAK_STMT node *STMT_P. */
|
|
358
|
|
359 static void
|
|
360 genericize_break_stmt (tree *stmt_p)
|
|
361 {
|
|
362 tree label = get_bc_label (bc_break);
|
|
363 location_t location = EXPR_LOCATION (*stmt_p);
|
|
364 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
|
|
365 }
|
|
366
|
|
367 /* Genericize a OMP_FOR node *STMT_P. */
|
|
368
|
|
369 static void
|
|
370 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
|
|
371 {
|
|
372 tree stmt = *stmt_p;
|
|
373 location_t locus = EXPR_LOCATION (stmt);
|
|
374 tree clab = begin_bc_block (bc_continue, locus);
|
|
375
|
|
376 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
|
|
377 if (TREE_CODE (stmt) != OMP_TASKLOOP)
|
|
378 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
|
|
379 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
|
|
380 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
|
|
381 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
|
|
382 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
|
|
383 *walk_subtrees = 0;
|
|
384
|
|
385 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
|
|
386 }
|
|
387
|
|
388 /* Hook into the middle of gimplifying an OMP_FOR node. */
|
|
389
|
|
390 static enum gimplify_status
|
|
391 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
|
|
392 {
|
|
393 tree for_stmt = *expr_p;
|
|
394 gimple_seq seq = NULL;
|
|
395
|
|
396 /* Protect ourselves from recursion. */
|
|
397 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
|
|
398 return GS_UNHANDLED;
|
|
399 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
|
|
400
|
|
401 gimplify_and_add (for_stmt, &seq);
|
|
402 gimple_seq_add_seq (pre_p, seq);
|
|
403
|
|
404 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
|
|
405
|
|
406 return GS_ALL_DONE;
|
|
407 }
|
|
408
|
|
409 /* Gimplify an EXPR_STMT node. */
|
|
410
|
|
411 static void
|
|
412 gimplify_expr_stmt (tree *stmt_p)
|
|
413 {
|
|
414 tree stmt = EXPR_STMT_EXPR (*stmt_p);
|
|
415
|
|
416 if (stmt == error_mark_node)
|
|
417 stmt = NULL;
|
|
418
|
|
419 /* Gimplification of a statement expression will nullify the
|
|
420 statement if all its side effects are moved to *PRE_P and *POST_P.
|
|
421
|
|
422 In this case we will not want to emit the gimplified statement.
|
|
423 However, we may still want to emit a warning, so we do that before
|
|
424 gimplification. */
|
|
425 if (stmt && warn_unused_value)
|
|
426 {
|
|
427 if (!TREE_SIDE_EFFECTS (stmt))
|
|
428 {
|
|
429 if (!IS_EMPTY_STMT (stmt)
|
|
430 && !VOID_TYPE_P (TREE_TYPE (stmt))
|
|
431 && !TREE_NO_WARNING (stmt))
|
|
432 warning (OPT_Wunused_value, "statement with no effect");
|
|
433 }
|
|
434 else
|
|
435 warn_if_unused_value (stmt, input_location);
|
|
436 }
|
|
437
|
|
438 if (stmt == NULL_TREE)
|
|
439 stmt = alloc_stmt_list ();
|
|
440
|
|
441 *stmt_p = stmt;
|
|
442 }
|
|
443
|
|
444 /* Gimplify initialization from an AGGR_INIT_EXPR. */
|
|
445
|
|
446 static void
|
|
447 cp_gimplify_init_expr (tree *expr_p)
|
|
448 {
|
|
449 tree from = TREE_OPERAND (*expr_p, 1);
|
|
450 tree to = TREE_OPERAND (*expr_p, 0);
|
|
451 tree t;
|
|
452
|
|
453 /* What about code that pulls out the temp and uses it elsewhere? I
|
|
454 think that such code never uses the TARGET_EXPR as an initializer. If
|
|
455 I'm wrong, we'll abort because the temp won't have any RTL. In that
|
|
456 case, I guess we'll need to replace references somehow. */
|
|
457 if (TREE_CODE (from) == TARGET_EXPR)
|
|
458 from = TARGET_EXPR_INITIAL (from);
|
|
459
|
|
460 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
|
|
461 inside the TARGET_EXPR. */
|
|
462 for (t = from; t; )
|
|
463 {
|
|
464 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
|
|
465
|
|
466 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
|
|
467 replace the slot operand with our target.
|
|
468
|
|
469 Should we add a target parm to gimplify_expr instead? No, as in this
|
|
470 case we want to replace the INIT_EXPR. */
|
|
471 if (TREE_CODE (sub) == AGGR_INIT_EXPR
|
|
472 || TREE_CODE (sub) == VEC_INIT_EXPR)
|
|
473 {
|
|
474 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
|
|
475 AGGR_INIT_EXPR_SLOT (sub) = to;
|
|
476 else
|
|
477 VEC_INIT_EXPR_SLOT (sub) = to;
|
|
478 *expr_p = from;
|
|
479
|
|
480 /* The initialization is now a side-effect, so the container can
|
|
481 become void. */
|
|
482 if (from != sub)
|
|
483 TREE_TYPE (from) = void_type_node;
|
|
484 }
|
|
485
|
|
486 /* Handle aggregate NSDMI. */
|
|
487 replace_placeholders (sub, to);
|
|
488
|
|
489 if (t == sub)
|
|
490 break;
|
|
491 else
|
|
492 t = TREE_OPERAND (t, 1);
|
|
493 }
|
|
494
|
|
495 }
|
|
496
|
|
497 /* Gimplify a MUST_NOT_THROW_EXPR. */
|
|
498
|
|
499 static enum gimplify_status
|
|
500 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
|
|
501 {
|
|
502 tree stmt = *expr_p;
|
|
503 tree temp = voidify_wrapper_expr (stmt, NULL);
|
|
504 tree body = TREE_OPERAND (stmt, 0);
|
|
505 gimple_seq try_ = NULL;
|
|
506 gimple_seq catch_ = NULL;
|
|
507 gimple *mnt;
|
|
508
|
|
509 gimplify_and_add (body, &try_);
|
|
510 mnt = gimple_build_eh_must_not_throw (terminate_fn);
|
|
511 gimple_seq_add_stmt_without_update (&catch_, mnt);
|
|
512 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
|
|
513
|
|
514 gimple_seq_add_stmt_without_update (pre_p, mnt);
|
|
515 if (temp)
|
|
516 {
|
|
517 *expr_p = temp;
|
|
518 return GS_OK;
|
|
519 }
|
|
520
|
|
521 *expr_p = NULL;
|
|
522 return GS_ALL_DONE;
|
|
523 }
|
|
524
|
|
525 /* Return TRUE if an operand (OP) of a given TYPE being copied is
|
|
526 really just an empty class copy.
|
|
527
|
|
528 Check that the operand has a simple form so that TARGET_EXPRs and
|
|
529 non-empty CONSTRUCTORs get reduced properly, and we leave the
|
|
530 return slot optimization alone because it isn't a copy. */
|
|
531
|
|
532 static bool
|
|
533 simple_empty_class_p (tree type, tree op)
|
|
534 {
|
|
535 return
|
|
536 ((TREE_CODE (op) == COMPOUND_EXPR
|
|
537 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
|
|
538 || TREE_CODE (op) == EMPTY_CLASS_EXPR
|
|
539 || is_gimple_lvalue (op)
|
|
540 || INDIRECT_REF_P (op)
|
|
541 || (TREE_CODE (op) == CONSTRUCTOR
|
|
542 && CONSTRUCTOR_NELTS (op) == 0
|
|
543 && !TREE_CLOBBER_P (op))
|
|
544 || (TREE_CODE (op) == CALL_EXPR
|
|
545 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
|
|
546 && is_really_empty_class (type);
|
|
547 }
|
|
548
|
|
549 /* Returns true if evaluating E as an lvalue has side-effects;
|
|
550 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
|
|
551 have side-effects until there is a read or write through it. */
|
|
552
|
|
553 static bool
|
|
554 lvalue_has_side_effects (tree e)
|
|
555 {
|
|
556 if (!TREE_SIDE_EFFECTS (e))
|
|
557 return false;
|
|
558 while (handled_component_p (e))
|
|
559 {
|
|
560 if (TREE_CODE (e) == ARRAY_REF
|
|
561 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
|
|
562 return true;
|
|
563 e = TREE_OPERAND (e, 0);
|
|
564 }
|
|
565 if (DECL_P (e))
|
|
566 /* Just naming a variable has no side-effects. */
|
|
567 return false;
|
|
568 else if (INDIRECT_REF_P (e))
|
|
569 /* Similarly, indirection has no side-effects. */
|
|
570 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
|
|
571 else
|
|
572 /* For anything else, trust TREE_SIDE_EFFECTS. */
|
|
573 return TREE_SIDE_EFFECTS (e);
|
|
574 }
|
|
575
|
|
576 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
|
|
577
|
|
578 int
|
|
579 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
|
|
580 {
|
|
581 int saved_stmts_are_full_exprs_p = 0;
|
131
|
582 location_t loc = cp_expr_loc_or_loc (*expr_p, input_location);
|
111
|
583 enum tree_code code = TREE_CODE (*expr_p);
|
|
584 enum gimplify_status ret;
|
|
585
|
|
586 if (STATEMENT_CODE_P (code))
|
|
587 {
|
|
588 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
|
|
589 current_stmt_tree ()->stmts_are_full_exprs_p
|
|
590 = STMT_IS_FULL_EXPR_P (*expr_p);
|
|
591 }
|
|
592
|
|
593 switch (code)
|
|
594 {
|
|
595 case AGGR_INIT_EXPR:
|
|
596 simplify_aggr_init_expr (expr_p);
|
|
597 ret = GS_OK;
|
|
598 break;
|
|
599
|
|
600 case VEC_INIT_EXPR:
|
|
601 {
|
|
602 location_t loc = input_location;
|
|
603 tree init = VEC_INIT_EXPR_INIT (*expr_p);
|
|
604 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
|
|
605 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
|
|
606 input_location = EXPR_LOCATION (*expr_p);
|
|
607 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
|
|
608 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
|
|
609 from_array,
|
|
610 tf_warning_or_error);
|
|
611 hash_set<tree> pset;
|
|
612 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
|
|
613 cp_genericize_tree (expr_p, false);
|
|
614 ret = GS_OK;
|
|
615 input_location = loc;
|
|
616 }
|
|
617 break;
|
|
618
|
|
619 case THROW_EXPR:
|
|
620 /* FIXME communicate throw type to back end, probably by moving
|
|
621 THROW_EXPR into ../tree.def. */
|
|
622 *expr_p = TREE_OPERAND (*expr_p, 0);
|
|
623 ret = GS_OK;
|
|
624 break;
|
|
625
|
|
626 case MUST_NOT_THROW_EXPR:
|
|
627 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
|
|
628 break;
|
|
629
|
|
630 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
|
|
631 LHS of an assignment might also be involved in the RHS, as in bug
|
|
632 25979. */
|
|
633 case INIT_EXPR:
|
|
634 cp_gimplify_init_expr (expr_p);
|
|
635 if (TREE_CODE (*expr_p) != INIT_EXPR)
|
|
636 return GS_OK;
|
|
637 /* Fall through. */
|
|
638 case MODIFY_EXPR:
|
|
639 modify_expr_case:
|
|
640 {
|
|
641 /* If the back end isn't clever enough to know that the lhs and rhs
|
|
642 types are the same, add an explicit conversion. */
|
|
643 tree op0 = TREE_OPERAND (*expr_p, 0);
|
|
644 tree op1 = TREE_OPERAND (*expr_p, 1);
|
|
645
|
|
646 if (!error_operand_p (op0)
|
|
647 && !error_operand_p (op1)
|
|
648 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
|
|
649 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
|
|
650 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
|
|
651 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
|
|
652 TREE_TYPE (op0), op1);
|
|
653
|
|
654 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
|
|
655 {
|
|
656 /* Remove any copies of empty classes. Also drop volatile
|
|
657 variables on the RHS to avoid infinite recursion from
|
|
658 gimplify_expr trying to load the value. */
|
|
659 if (TREE_SIDE_EFFECTS (op1))
|
|
660 {
|
|
661 if (TREE_THIS_VOLATILE (op1)
|
|
662 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
|
|
663 op1 = build_fold_addr_expr (op1);
|
|
664
|
|
665 gimplify_and_add (op1, pre_p);
|
|
666 }
|
|
667 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
|
|
668 is_gimple_lvalue, fb_lvalue);
|
|
669 *expr_p = TREE_OPERAND (*expr_p, 0);
|
|
670 }
|
|
671 /* P0145 says that the RHS is sequenced before the LHS.
|
|
672 gimplify_modify_expr gimplifies the RHS before the LHS, but that
|
|
673 isn't quite strong enough in two cases:
|
|
674
|
|
675 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
|
|
676 mean it's evaluated after the LHS.
|
|
677
|
|
678 2) the value calculation of the RHS is also sequenced before the
|
|
679 LHS, so for scalar assignment we need to preevaluate if the
|
|
680 RHS could be affected by LHS side-effects even if it has no
|
|
681 side-effects of its own. We don't need this for classes because
|
|
682 class assignment takes its RHS by reference. */
|
|
683 else if (flag_strong_eval_order > 1
|
|
684 && TREE_CODE (*expr_p) == MODIFY_EXPR
|
|
685 && lvalue_has_side_effects (op0)
|
|
686 && (TREE_CODE (op1) == CALL_EXPR
|
|
687 || (SCALAR_TYPE_P (TREE_TYPE (op1))
|
|
688 && !TREE_CONSTANT (op1))))
|
|
689 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
|
|
690 }
|
|
691 ret = GS_OK;
|
|
692 break;
|
|
693
|
|
694 case EMPTY_CLASS_EXPR:
|
|
695 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
|
|
696 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
|
|
697 ret = GS_OK;
|
|
698 break;
|
|
699
|
|
700 case BASELINK:
|
|
701 *expr_p = BASELINK_FUNCTIONS (*expr_p);
|
|
702 ret = GS_OK;
|
|
703 break;
|
|
704
|
|
705 case TRY_BLOCK:
|
|
706 genericize_try_block (expr_p);
|
|
707 ret = GS_OK;
|
|
708 break;
|
|
709
|
|
710 case HANDLER:
|
|
711 genericize_catch_block (expr_p);
|
|
712 ret = GS_OK;
|
|
713 break;
|
|
714
|
|
715 case EH_SPEC_BLOCK:
|
|
716 genericize_eh_spec_block (expr_p);
|
|
717 ret = GS_OK;
|
|
718 break;
|
|
719
|
|
720 case USING_STMT:
|
|
721 gcc_unreachable ();
|
|
722
|
|
723 case FOR_STMT:
|
|
724 case WHILE_STMT:
|
|
725 case DO_STMT:
|
|
726 case SWITCH_STMT:
|
|
727 case CONTINUE_STMT:
|
|
728 case BREAK_STMT:
|
|
729 gcc_unreachable ();
|
|
730
|
|
731 case OMP_FOR:
|
|
732 case OMP_SIMD:
|
|
733 case OMP_DISTRIBUTE:
|
|
734 case OMP_TASKLOOP:
|
|
735 ret = cp_gimplify_omp_for (expr_p, pre_p);
|
|
736 break;
|
|
737
|
|
738 case EXPR_STMT:
|
|
739 gimplify_expr_stmt (expr_p);
|
|
740 ret = GS_OK;
|
|
741 break;
|
|
742
|
|
743 case UNARY_PLUS_EXPR:
|
|
744 {
|
|
745 tree arg = TREE_OPERAND (*expr_p, 0);
|
|
746 tree type = TREE_TYPE (*expr_p);
|
|
747 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
|
|
748 : arg;
|
|
749 ret = GS_OK;
|
|
750 }
|
|
751 break;
|
|
752
|
|
753 case CALL_EXPR:
|
|
754 ret = GS_OK;
|
|
755 if (!CALL_EXPR_FN (*expr_p))
|
|
756 /* Internal function call. */;
|
|
757 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
|
|
758 {
|
|
759 /* This is a call to a (compound) assignment operator that used
|
|
760 the operator syntax; gimplify the RHS first. */
|
|
761 gcc_assert (call_expr_nargs (*expr_p) == 2);
|
|
762 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
|
|
763 enum gimplify_status t
|
|
764 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
|
|
765 if (t == GS_ERROR)
|
|
766 ret = GS_ERROR;
|
|
767 }
|
|
768 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
|
|
769 {
|
|
770 /* Leave the last argument for gimplify_call_expr, to avoid problems
|
|
771 with __builtin_va_arg_pack(). */
|
|
772 int nargs = call_expr_nargs (*expr_p) - 1;
|
|
773 for (int i = 0; i < nargs; ++i)
|
|
774 {
|
|
775 enum gimplify_status t
|
|
776 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
|
|
777 if (t == GS_ERROR)
|
|
778 ret = GS_ERROR;
|
|
779 }
|
|
780 }
|
|
781 else if (flag_strong_eval_order
|
|
782 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
|
|
783 {
|
|
784 /* If flag_strong_eval_order, evaluate the object argument first. */
|
|
785 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
|
131
|
786 if (INDIRECT_TYPE_P (fntype))
|
111
|
787 fntype = TREE_TYPE (fntype);
|
|
788 if (TREE_CODE (fntype) == METHOD_TYPE)
|
|
789 {
|
|
790 enum gimplify_status t
|
|
791 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
|
|
792 if (t == GS_ERROR)
|
|
793 ret = GS_ERROR;
|
|
794 }
|
|
795 }
|
131
|
796 if (ret != GS_ERROR)
|
|
797 {
|
|
798 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
|
|
799 if (decl
|
|
800 && fndecl_built_in_p (decl, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
|
|
801 BUILT_IN_FRONTEND))
|
|
802 *expr_p = boolean_false_node;
|
|
803 }
|
111
|
804 break;
|
|
805
|
|
806 case RETURN_EXPR:
|
|
807 if (TREE_OPERAND (*expr_p, 0)
|
|
808 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
|
|
809 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
|
|
810 {
|
|
811 expr_p = &TREE_OPERAND (*expr_p, 0);
|
|
812 code = TREE_CODE (*expr_p);
|
|
813 /* Avoid going through the INIT_EXPR case, which can
|
|
814 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
|
|
815 goto modify_expr_case;
|
|
816 }
|
|
817 /* Fall through. */
|
|
818
|
|
819 default:
|
|
820 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
|
|
821 break;
|
|
822 }
|
|
823
|
|
824 /* Restore saved state. */
|
|
825 if (STATEMENT_CODE_P (code))
|
|
826 current_stmt_tree ()->stmts_are_full_exprs_p
|
|
827 = saved_stmts_are_full_exprs_p;
|
|
828
|
|
829 return ret;
|
|
830 }
|
|
831
|
|
832 static inline bool
|
|
833 is_invisiref_parm (const_tree t)
|
|
834 {
|
|
835 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
|
|
836 && DECL_BY_REFERENCE (t));
|
|
837 }
|
|
838
|
|
839 /* Return true if the uid in both int tree maps are equal. */
|
|
840
|
|
841 bool
|
|
842 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
|
|
843 {
|
|
844 return (a->uid == b->uid);
|
|
845 }
|
|
846
|
|
847 /* Hash a UID in a cxx_int_tree_map. */
|
|
848
|
|
849 unsigned int
|
|
850 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
|
|
851 {
|
|
852 return item->uid;
|
|
853 }
|
|
854
|
|
855 /* A stable comparison routine for use with splay trees and DECLs. */
|
|
856
|
|
857 static int
|
|
858 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
|
|
859 {
|
|
860 tree a = (tree) xa;
|
|
861 tree b = (tree) xb;
|
|
862
|
|
863 return DECL_UID (a) - DECL_UID (b);
|
|
864 }
|
|
865
|
|
866 /* OpenMP context during genericization. */
|
|
867
|
|
868 struct cp_genericize_omp_taskreg
|
|
869 {
|
|
870 bool is_parallel;
|
|
871 bool default_shared;
|
|
872 struct cp_genericize_omp_taskreg *outer;
|
|
873 splay_tree variables;
|
|
874 };
|
|
875
|
|
876 /* Return true if genericization should try to determine if
|
|
877 DECL is firstprivate or shared within task regions. */
|
|
878
|
|
879 static bool
|
|
880 omp_var_to_track (tree decl)
|
|
881 {
|
|
882 tree type = TREE_TYPE (decl);
|
|
883 if (is_invisiref_parm (decl))
|
|
884 type = TREE_TYPE (type);
|
131
|
885 else if (TYPE_REF_P (type))
|
111
|
886 type = TREE_TYPE (type);
|
|
887 while (TREE_CODE (type) == ARRAY_TYPE)
|
|
888 type = TREE_TYPE (type);
|
|
889 if (type == error_mark_node || !CLASS_TYPE_P (type))
|
|
890 return false;
|
|
891 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
|
|
892 return false;
|
|
893 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
|
|
894 return false;
|
|
895 return true;
|
|
896 }
|
|
897
|
|
898 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
|
|
899
|
|
900 static void
|
|
901 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
|
|
902 {
|
|
903 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
|
|
904 (splay_tree_key) decl);
|
|
905 if (n == NULL)
|
|
906 {
|
|
907 int flags = OMP_CLAUSE_DEFAULT_SHARED;
|
|
908 if (omp_ctx->outer)
|
|
909 omp_cxx_notice_variable (omp_ctx->outer, decl);
|
|
910 if (!omp_ctx->default_shared)
|
|
911 {
|
|
912 struct cp_genericize_omp_taskreg *octx;
|
|
913
|
|
914 for (octx = omp_ctx->outer; octx; octx = octx->outer)
|
|
915 {
|
|
916 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
|
|
917 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
|
|
918 {
|
|
919 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
|
|
920 break;
|
|
921 }
|
|
922 if (octx->is_parallel)
|
|
923 break;
|
|
924 }
|
|
925 if (octx == NULL
|
|
926 && (TREE_CODE (decl) == PARM_DECL
|
|
927 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
|
|
928 && DECL_CONTEXT (decl) == current_function_decl)))
|
|
929 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
|
|
930 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
|
|
931 {
|
|
932 /* DECL is implicitly determined firstprivate in
|
|
933 the current task construct. Ensure copy ctor and
|
|
934 dtor are instantiated, because during gimplification
|
|
935 it will be already too late. */
|
|
936 tree type = TREE_TYPE (decl);
|
|
937 if (is_invisiref_parm (decl))
|
|
938 type = TREE_TYPE (type);
|
131
|
939 else if (TYPE_REF_P (type))
|
111
|
940 type = TREE_TYPE (type);
|
|
941 while (TREE_CODE (type) == ARRAY_TYPE)
|
|
942 type = TREE_TYPE (type);
|
|
943 get_copy_ctor (type, tf_none);
|
|
944 get_dtor (type, tf_none);
|
|
945 }
|
|
946 }
|
|
947 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
|
|
948 }
|
|
949 }
|
|
950
|
|
951 /* Genericization context. */
|
|
952
|
|
953 struct cp_genericize_data
|
|
954 {
|
|
955 hash_set<tree> *p_set;
|
|
956 vec<tree> bind_expr_stack;
|
|
957 struct cp_genericize_omp_taskreg *omp_ctx;
|
|
958 tree try_block;
|
|
959 bool no_sanitize_p;
|
|
960 bool handle_invisiref_parm_p;
|
|
961 };
|
|
962
|
|
963 /* Perform any pre-gimplification folding of C++ front end trees to
|
|
964 GENERIC.
|
|
965 Note: The folding of none-omp cases is something to move into
|
|
966 the middle-end. As for now we have most foldings only on GENERIC
|
|
967 in fold-const, we need to perform this before transformation to
|
|
968 GIMPLE-form. */
|
|
969
|
|
970 static tree
|
|
971 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
|
|
972 {
|
|
973 tree stmt;
|
|
974 enum tree_code code;
|
|
975
|
|
976 *stmt_p = stmt = cp_fold (*stmt_p);
|
|
977
|
|
978 if (((hash_set<tree> *) data)->add (stmt))
|
|
979 {
|
|
980 /* Don't walk subtrees of stmts we've already walked once, otherwise
|
|
981 we can have exponential complexity with e.g. lots of nested
|
|
982 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
|
|
983 always the same tree, which the first time cp_fold_r has been
|
|
984 called on it had the subtrees walked. */
|
|
985 *walk_subtrees = 0;
|
|
986 return NULL;
|
|
987 }
|
|
988
|
|
989 code = TREE_CODE (stmt);
|
|
990 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
|
131
|
991 || code == OMP_TASKLOOP || code == OACC_LOOP)
|
111
|
992 {
|
|
993 tree x;
|
|
994 int i, n;
|
|
995
|
|
996 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
|
|
997 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
|
|
998 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
|
|
999 x = OMP_FOR_COND (stmt);
|
|
1000 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
|
|
1001 {
|
|
1002 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
|
|
1003 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
|
|
1004 }
|
|
1005 else if (x && TREE_CODE (x) == TREE_VEC)
|
|
1006 {
|
|
1007 n = TREE_VEC_LENGTH (x);
|
|
1008 for (i = 0; i < n; i++)
|
|
1009 {
|
|
1010 tree o = TREE_VEC_ELT (x, i);
|
|
1011 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
|
|
1012 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
|
|
1013 }
|
|
1014 }
|
|
1015 x = OMP_FOR_INCR (stmt);
|
|
1016 if (x && TREE_CODE (x) == TREE_VEC)
|
|
1017 {
|
|
1018 n = TREE_VEC_LENGTH (x);
|
|
1019 for (i = 0; i < n; i++)
|
|
1020 {
|
|
1021 tree o = TREE_VEC_ELT (x, i);
|
|
1022 if (o && TREE_CODE (o) == MODIFY_EXPR)
|
|
1023 o = TREE_OPERAND (o, 1);
|
|
1024 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
|
|
1025 || TREE_CODE (o) == POINTER_PLUS_EXPR))
|
|
1026 {
|
|
1027 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
|
|
1028 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
|
|
1029 }
|
|
1030 }
|
|
1031 }
|
|
1032 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
|
|
1033 *walk_subtrees = 0;
|
|
1034 }
|
|
1035
|
|
1036 return NULL;
|
|
1037 }
|
|
1038
|
|
1039 /* Fold ALL the trees! FIXME we should be able to remove this, but
|
|
1040 apparently that still causes optimization regressions. */
|
|
1041
|
|
1042 void
|
|
1043 cp_fold_function (tree fndecl)
|
|
1044 {
|
|
1045 hash_set<tree> pset;
|
|
1046 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
|
|
1047 }
|
|
1048
|
|
1049 /* Perform any pre-gimplification lowering of C++ front end trees to
|
|
1050 GENERIC. */
|
|
1051
|
|
1052 static tree
|
|
1053 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
|
|
1054 {
|
|
1055 tree stmt = *stmt_p;
|
|
1056 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
|
|
1057 hash_set<tree> *p_set = wtd->p_set;
|
|
1058
|
|
1059 /* If in an OpenMP context, note var uses. */
|
|
1060 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
|
|
1061 && (VAR_P (stmt)
|
|
1062 || TREE_CODE (stmt) == PARM_DECL
|
|
1063 || TREE_CODE (stmt) == RESULT_DECL)
|
|
1064 && omp_var_to_track (stmt))
|
|
1065 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
|
|
1066
|
131
|
1067 /* Don't dereference parms in a thunk, pass the references through. */
|
|
1068 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
|
|
1069 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
|
|
1070 {
|
|
1071 *walk_subtrees = 0;
|
|
1072 return NULL;
|
|
1073 }
|
|
1074
|
111
|
1075 /* Dereference invisible reference parms. */
|
|
1076 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
|
|
1077 {
|
|
1078 *stmt_p = convert_from_reference (stmt);
|
|
1079 p_set->add (*stmt_p);
|
|
1080 *walk_subtrees = 0;
|
|
1081 return NULL;
|
|
1082 }
|
|
1083
|
|
1084 /* Map block scope extern declarations to visible declarations with the
|
|
1085 same name and type in outer scopes if any. */
|
|
1086 if (cp_function_chain->extern_decl_map
|
|
1087 && VAR_OR_FUNCTION_DECL_P (stmt)
|
|
1088 && DECL_EXTERNAL (stmt))
|
|
1089 {
|
|
1090 struct cxx_int_tree_map *h, in;
|
|
1091 in.uid = DECL_UID (stmt);
|
|
1092 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
|
|
1093 if (h)
|
|
1094 {
|
|
1095 *stmt_p = h->to;
|
131
|
1096 TREE_USED (h->to) |= TREE_USED (stmt);
|
111
|
1097 *walk_subtrees = 0;
|
|
1098 return NULL;
|
|
1099 }
|
|
1100 }
|
|
1101
|
|
1102 if (TREE_CODE (stmt) == INTEGER_CST
|
131
|
1103 && TYPE_REF_P (TREE_TYPE (stmt))
|
111
|
1104 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
|
|
1105 && !wtd->no_sanitize_p)
|
|
1106 {
|
|
1107 ubsan_maybe_instrument_reference (stmt_p);
|
|
1108 if (*stmt_p != stmt)
|
|
1109 {
|
|
1110 *walk_subtrees = 0;
|
|
1111 return NULL_TREE;
|
|
1112 }
|
|
1113 }
|
|
1114
|
|
1115 /* Other than invisiref parms, don't walk the same tree twice. */
|
|
1116 if (p_set->contains (stmt))
|
|
1117 {
|
|
1118 *walk_subtrees = 0;
|
|
1119 return NULL_TREE;
|
|
1120 }
|
|
1121
|
|
1122 switch (TREE_CODE (stmt))
|
|
1123 {
|
|
1124 case ADDR_EXPR:
|
|
1125 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
|
|
1126 {
|
|
1127 /* If in an OpenMP context, note var uses. */
|
|
1128 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
|
|
1129 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
|
|
1130 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
|
|
1131 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
|
|
1132 *walk_subtrees = 0;
|
|
1133 }
|
|
1134 break;
|
|
1135
|
|
1136 case RETURN_EXPR:
|
|
1137 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
|
|
1138 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
|
|
1139 *walk_subtrees = 0;
|
|
1140 break;
|
|
1141
|
|
1142 case OMP_CLAUSE:
|
|
1143 switch (OMP_CLAUSE_CODE (stmt))
|
|
1144 {
|
|
1145 case OMP_CLAUSE_LASTPRIVATE:
|
|
1146 /* Don't dereference an invisiref in OpenMP clauses. */
|
|
1147 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
|
|
1148 {
|
|
1149 *walk_subtrees = 0;
|
|
1150 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
|
|
1151 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
|
|
1152 cp_genericize_r, data, NULL);
|
|
1153 }
|
|
1154 break;
|
|
1155 case OMP_CLAUSE_PRIVATE:
|
|
1156 /* Don't dereference an invisiref in OpenMP clauses. */
|
|
1157 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
|
|
1158 *walk_subtrees = 0;
|
|
1159 else if (wtd->omp_ctx != NULL)
|
|
1160 {
|
|
1161 /* Private clause doesn't cause any references to the
|
|
1162 var in outer contexts, avoid calling
|
|
1163 omp_cxx_notice_variable for it. */
|
|
1164 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
|
|
1165 wtd->omp_ctx = NULL;
|
|
1166 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
|
|
1167 data, NULL);
|
|
1168 wtd->omp_ctx = old;
|
|
1169 *walk_subtrees = 0;
|
|
1170 }
|
|
1171 break;
|
|
1172 case OMP_CLAUSE_SHARED:
|
|
1173 case OMP_CLAUSE_FIRSTPRIVATE:
|
|
1174 case OMP_CLAUSE_COPYIN:
|
|
1175 case OMP_CLAUSE_COPYPRIVATE:
|
|
1176 /* Don't dereference an invisiref in OpenMP clauses. */
|
|
1177 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
|
|
1178 *walk_subtrees = 0;
|
|
1179 break;
|
|
1180 case OMP_CLAUSE_REDUCTION:
|
|
1181 /* Don't dereference an invisiref in reduction clause's
|
|
1182 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
|
|
1183 still needs to be genericized. */
|
|
1184 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
|
|
1185 {
|
|
1186 *walk_subtrees = 0;
|
|
1187 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
|
|
1188 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
|
|
1189 cp_genericize_r, data, NULL);
|
|
1190 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
|
|
1191 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
|
|
1192 cp_genericize_r, data, NULL);
|
|
1193 }
|
|
1194 break;
|
|
1195 default:
|
|
1196 break;
|
|
1197 }
|
|
1198 break;
|
|
1199
|
|
1200 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
|
|
1201 to lower this construct before scanning it, so we need to lower these
|
|
1202 before doing anything else. */
|
|
1203 case CLEANUP_STMT:
|
|
1204 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
|
|
1205 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
|
|
1206 : TRY_FINALLY_EXPR,
|
|
1207 void_type_node,
|
|
1208 CLEANUP_BODY (stmt),
|
|
1209 CLEANUP_EXPR (stmt));
|
|
1210 break;
|
|
1211
|
|
1212 case IF_STMT:
|
|
1213 genericize_if_stmt (stmt_p);
|
|
1214 /* *stmt_p has changed, tail recurse to handle it again. */
|
|
1215 return cp_genericize_r (stmt_p, walk_subtrees, data);
|
|
1216
|
|
1217 /* COND_EXPR might have incompatible types in branches if one or both
|
|
1218 arms are bitfields. Fix it up now. */
|
|
1219 case COND_EXPR:
|
|
1220 {
|
|
1221 tree type_left
|
|
1222 = (TREE_OPERAND (stmt, 1)
|
|
1223 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
|
|
1224 : NULL_TREE);
|
|
1225 tree type_right
|
|
1226 = (TREE_OPERAND (stmt, 2)
|
|
1227 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
|
|
1228 : NULL_TREE);
|
|
1229 if (type_left
|
|
1230 && !useless_type_conversion_p (TREE_TYPE (stmt),
|
|
1231 TREE_TYPE (TREE_OPERAND (stmt, 1))))
|
|
1232 {
|
|
1233 TREE_OPERAND (stmt, 1)
|
|
1234 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
|
|
1235 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
|
|
1236 type_left));
|
|
1237 }
|
|
1238 if (type_right
|
|
1239 && !useless_type_conversion_p (TREE_TYPE (stmt),
|
|
1240 TREE_TYPE (TREE_OPERAND (stmt, 2))))
|
|
1241 {
|
|
1242 TREE_OPERAND (stmt, 2)
|
|
1243 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
|
|
1244 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
|
|
1245 type_right));
|
|
1246 }
|
|
1247 }
|
|
1248 break;
|
|
1249
|
|
1250 case BIND_EXPR:
|
|
1251 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
|
|
1252 {
|
|
1253 tree decl;
|
|
1254 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
|
|
1255 if (VAR_P (decl)
|
|
1256 && !DECL_EXTERNAL (decl)
|
|
1257 && omp_var_to_track (decl))
|
|
1258 {
|
|
1259 splay_tree_node n
|
|
1260 = splay_tree_lookup (wtd->omp_ctx->variables,
|
|
1261 (splay_tree_key) decl);
|
|
1262 if (n == NULL)
|
|
1263 splay_tree_insert (wtd->omp_ctx->variables,
|
|
1264 (splay_tree_key) decl,
|
|
1265 TREE_STATIC (decl)
|
|
1266 ? OMP_CLAUSE_DEFAULT_SHARED
|
|
1267 : OMP_CLAUSE_DEFAULT_PRIVATE);
|
|
1268 }
|
|
1269 }
|
|
1270 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
|
|
1271 {
|
|
1272 /* The point here is to not sanitize static initializers. */
|
|
1273 bool no_sanitize_p = wtd->no_sanitize_p;
|
|
1274 wtd->no_sanitize_p = true;
|
|
1275 for (tree decl = BIND_EXPR_VARS (stmt);
|
|
1276 decl;
|
|
1277 decl = DECL_CHAIN (decl))
|
|
1278 if (VAR_P (decl)
|
|
1279 && TREE_STATIC (decl)
|
|
1280 && DECL_INITIAL (decl))
|
|
1281 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
|
|
1282 wtd->no_sanitize_p = no_sanitize_p;
|
|
1283 }
|
|
1284 wtd->bind_expr_stack.safe_push (stmt);
|
|
1285 cp_walk_tree (&BIND_EXPR_BODY (stmt),
|
|
1286 cp_genericize_r, data, NULL);
|
|
1287 wtd->bind_expr_stack.pop ();
|
|
1288 break;
|
|
1289
|
|
1290 case USING_STMT:
|
|
1291 {
|
|
1292 tree block = NULL_TREE;
|
|
1293
|
|
1294 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
|
|
1295 BLOCK, and append an IMPORTED_DECL to its
|
|
1296 BLOCK_VARS chained list. */
|
|
1297 if (wtd->bind_expr_stack.exists ())
|
|
1298 {
|
|
1299 int i;
|
|
1300 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
|
|
1301 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
|
|
1302 break;
|
|
1303 }
|
|
1304 if (block)
|
|
1305 {
|
131
|
1306 tree decl = TREE_OPERAND (stmt, 0);
|
|
1307 gcc_assert (decl);
|
111
|
1308
|
131
|
1309 if (undeduced_auto_decl (decl))
|
|
1310 /* Omit from the GENERIC, the back-end can't handle it. */;
|
|
1311 else
|
|
1312 {
|
|
1313 tree using_directive = make_node (IMPORTED_DECL);
|
|
1314 TREE_TYPE (using_directive) = void_type_node;
|
111
|
1315
|
131
|
1316 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
|
|
1317 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
|
|
1318 BLOCK_VARS (block) = using_directive;
|
|
1319 }
|
111
|
1320 }
|
|
1321 /* The USING_STMT won't appear in GENERIC. */
|
|
1322 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
|
|
1323 *walk_subtrees = 0;
|
|
1324 }
|
|
1325 break;
|
|
1326
|
|
1327 case DECL_EXPR:
|
|
1328 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
|
|
1329 {
|
|
1330 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
|
|
1331 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
|
|
1332 *walk_subtrees = 0;
|
|
1333 }
|
|
1334 else
|
|
1335 {
|
|
1336 tree d = DECL_EXPR_DECL (stmt);
|
|
1337 if (VAR_P (d))
|
|
1338 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
|
|
1339 }
|
|
1340 break;
|
|
1341
|
|
1342 case OMP_PARALLEL:
|
|
1343 case OMP_TASK:
|
|
1344 case OMP_TASKLOOP:
|
|
1345 {
|
|
1346 struct cp_genericize_omp_taskreg omp_ctx;
|
|
1347 tree c, decl;
|
|
1348 splay_tree_node n;
|
|
1349
|
|
1350 *walk_subtrees = 0;
|
|
1351 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
|
|
1352 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
|
|
1353 omp_ctx.default_shared = omp_ctx.is_parallel;
|
|
1354 omp_ctx.outer = wtd->omp_ctx;
|
|
1355 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
|
|
1356 wtd->omp_ctx = &omp_ctx;
|
|
1357 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
|
|
1358 switch (OMP_CLAUSE_CODE (c))
|
|
1359 {
|
|
1360 case OMP_CLAUSE_SHARED:
|
|
1361 case OMP_CLAUSE_PRIVATE:
|
|
1362 case OMP_CLAUSE_FIRSTPRIVATE:
|
|
1363 case OMP_CLAUSE_LASTPRIVATE:
|
|
1364 decl = OMP_CLAUSE_DECL (c);
|
|
1365 if (decl == error_mark_node || !omp_var_to_track (decl))
|
|
1366 break;
|
|
1367 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
|
|
1368 if (n != NULL)
|
|
1369 break;
|
|
1370 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
|
|
1371 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
|
|
1372 ? OMP_CLAUSE_DEFAULT_SHARED
|
|
1373 : OMP_CLAUSE_DEFAULT_PRIVATE);
|
|
1374 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
|
|
1375 omp_cxx_notice_variable (omp_ctx.outer, decl);
|
|
1376 break;
|
|
1377 case OMP_CLAUSE_DEFAULT:
|
|
1378 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
|
|
1379 omp_ctx.default_shared = true;
|
|
1380 default:
|
|
1381 break;
|
|
1382 }
|
|
1383 if (TREE_CODE (stmt) == OMP_TASKLOOP)
|
|
1384 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
|
|
1385 else
|
|
1386 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
|
|
1387 wtd->omp_ctx = omp_ctx.outer;
|
|
1388 splay_tree_delete (omp_ctx.variables);
|
|
1389 }
|
|
1390 break;
|
|
1391
|
|
1392 case TRY_BLOCK:
|
|
1393 {
|
|
1394 *walk_subtrees = 0;
|
|
1395 tree try_block = wtd->try_block;
|
|
1396 wtd->try_block = stmt;
|
|
1397 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
|
|
1398 wtd->try_block = try_block;
|
|
1399 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
|
|
1400 }
|
|
1401 break;
|
|
1402
|
|
1403 case MUST_NOT_THROW_EXPR:
|
|
1404 /* MUST_NOT_THROW_COND might be something else with TM. */
|
|
1405 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
|
|
1406 {
|
|
1407 *walk_subtrees = 0;
|
|
1408 tree try_block = wtd->try_block;
|
|
1409 wtd->try_block = stmt;
|
|
1410 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
|
|
1411 wtd->try_block = try_block;
|
|
1412 }
|
|
1413 break;
|
|
1414
|
|
1415 case THROW_EXPR:
|
|
1416 {
|
|
1417 location_t loc = location_of (stmt);
|
|
1418 if (TREE_NO_WARNING (stmt))
|
|
1419 /* Never mind. */;
|
|
1420 else if (wtd->try_block)
|
|
1421 {
|
131
|
1422 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
|
|
1423 {
|
|
1424 auto_diagnostic_group d;
|
|
1425 if (warning_at (loc, OPT_Wterminate,
|
|
1426 "throw will always call terminate()")
|
|
1427 && cxx_dialect >= cxx11
|
|
1428 && DECL_DESTRUCTOR_P (current_function_decl))
|
|
1429 inform (loc, "in C++11 destructors default to noexcept");
|
|
1430 }
|
111
|
1431 }
|
|
1432 else
|
|
1433 {
|
|
1434 if (warn_cxx11_compat && cxx_dialect < cxx11
|
|
1435 && DECL_DESTRUCTOR_P (current_function_decl)
|
|
1436 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
|
|
1437 == NULL_TREE)
|
|
1438 && (get_defaulted_eh_spec (current_function_decl)
|
|
1439 == empty_except_spec))
|
|
1440 warning_at (loc, OPT_Wc__11_compat,
|
|
1441 "in C++11 this throw will terminate because "
|
|
1442 "destructors default to noexcept");
|
|
1443 }
|
|
1444 }
|
|
1445 break;
|
|
1446
|
|
1447 case CONVERT_EXPR:
|
|
1448 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
|
|
1449 break;
|
|
1450
|
|
1451 case FOR_STMT:
|
|
1452 genericize_for_stmt (stmt_p, walk_subtrees, data);
|
|
1453 break;
|
|
1454
|
|
1455 case WHILE_STMT:
|
|
1456 genericize_while_stmt (stmt_p, walk_subtrees, data);
|
|
1457 break;
|
|
1458
|
|
1459 case DO_STMT:
|
|
1460 genericize_do_stmt (stmt_p, walk_subtrees, data);
|
|
1461 break;
|
|
1462
|
|
1463 case SWITCH_STMT:
|
|
1464 genericize_switch_stmt (stmt_p, walk_subtrees, data);
|
|
1465 break;
|
|
1466
|
|
1467 case CONTINUE_STMT:
|
|
1468 genericize_continue_stmt (stmt_p);
|
|
1469 break;
|
|
1470
|
|
1471 case BREAK_STMT:
|
|
1472 genericize_break_stmt (stmt_p);
|
|
1473 break;
|
|
1474
|
|
1475 case OMP_FOR:
|
|
1476 case OMP_SIMD:
|
|
1477 case OMP_DISTRIBUTE:
|
131
|
1478 case OACC_LOOP:
|
111
|
1479 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
|
|
1480 break;
|
|
1481
|
|
1482 case PTRMEM_CST:
|
|
1483 /* By the time we get here we're handing off to the back end, so we don't
|
|
1484 need or want to preserve PTRMEM_CST anymore. */
|
|
1485 *stmt_p = cplus_expand_constant (stmt);
|
|
1486 *walk_subtrees = 0;
|
|
1487 break;
|
|
1488
|
|
1489 case MEM_REF:
|
|
1490 /* For MEM_REF, make sure not to sanitize the second operand even
|
|
1491 if it has reference type. It is just an offset with a type
|
|
1492 holding other information. There is no other processing we
|
|
1493 need to do for INTEGER_CSTs, so just ignore the second argument
|
|
1494 unconditionally. */
|
|
1495 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
|
|
1496 *walk_subtrees = 0;
|
|
1497 break;
|
|
1498
|
|
1499 case NOP_EXPR:
|
|
1500 if (!wtd->no_sanitize_p
|
|
1501 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
|
131
|
1502 && TYPE_REF_P (TREE_TYPE (stmt)))
|
111
|
1503 ubsan_maybe_instrument_reference (stmt_p);
|
|
1504 break;
|
|
1505
|
|
1506 case CALL_EXPR:
|
|
1507 if (!wtd->no_sanitize_p
|
|
1508 && sanitize_flags_p ((SANITIZE_NULL
|
|
1509 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
|
|
1510 {
|
|
1511 tree fn = CALL_EXPR_FN (stmt);
|
|
1512 if (fn != NULL_TREE
|
|
1513 && !error_operand_p (fn)
|
131
|
1514 && INDIRECT_TYPE_P (TREE_TYPE (fn))
|
111
|
1515 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
|
|
1516 {
|
|
1517 bool is_ctor
|
|
1518 = TREE_CODE (fn) == ADDR_EXPR
|
|
1519 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
|
|
1520 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
|
|
1521 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
|
|
1522 ubsan_maybe_instrument_member_call (stmt, is_ctor);
|
|
1523 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
|
|
1524 cp_ubsan_maybe_instrument_member_call (stmt);
|
|
1525 }
|
131
|
1526 else if (fn == NULL_TREE
|
|
1527 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
|
|
1528 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
|
|
1529 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
|
|
1530 *walk_subtrees = 0;
|
111
|
1531 }
|
131
|
1532 /* Fall through. */
|
|
1533 case AGGR_INIT_EXPR:
|
|
1534 /* For calls to a multi-versioned function, overload resolution
|
|
1535 returns the function with the highest target priority, that is,
|
|
1536 the version that will checked for dispatching first. If this
|
|
1537 version is inlinable, a direct call to this version can be made
|
|
1538 otherwise the call should go through the dispatcher. */
|
|
1539 {
|
|
1540 tree fn = cp_get_callee_fndecl_nofold (stmt);
|
|
1541 if (fn && DECL_FUNCTION_VERSIONED (fn)
|
|
1542 && (current_function_decl == NULL
|
|
1543 || !targetm.target_option.can_inline_p (current_function_decl,
|
|
1544 fn)))
|
|
1545 if (tree dis = get_function_version_dispatcher (fn))
|
|
1546 {
|
|
1547 mark_versions_used (dis);
|
|
1548 dis = build_address (dis);
|
|
1549 if (TREE_CODE (stmt) == CALL_EXPR)
|
|
1550 CALL_EXPR_FN (stmt) = dis;
|
|
1551 else
|
|
1552 AGGR_INIT_EXPR_FN (stmt) = dis;
|
|
1553 }
|
|
1554 }
|
|
1555 break;
|
|
1556
|
|
1557 case TARGET_EXPR:
|
|
1558 if (TARGET_EXPR_INITIAL (stmt)
|
|
1559 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
|
|
1560 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
|
|
1561 TARGET_EXPR_NO_ELIDE (stmt) = 1;
|
111
|
1562 break;
|
|
1563
|
|
1564 default:
|
|
1565 if (IS_TYPE_OR_DECL_P (stmt))
|
|
1566 *walk_subtrees = 0;
|
|
1567 break;
|
|
1568 }
|
|
1569
|
|
1570 p_set->add (*stmt_p);
|
|
1571
|
|
1572 return NULL;
|
|
1573 }
|
|
1574
|
|
1575 /* Lower C++ front end trees to GENERIC in T_P. */
|
|
1576
|
|
1577 static void
|
|
1578 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
|
|
1579 {
|
|
1580 struct cp_genericize_data wtd;
|
|
1581
|
|
1582 wtd.p_set = new hash_set<tree>;
|
|
1583 wtd.bind_expr_stack.create (0);
|
|
1584 wtd.omp_ctx = NULL;
|
|
1585 wtd.try_block = NULL_TREE;
|
|
1586 wtd.no_sanitize_p = false;
|
|
1587 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
|
|
1588 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
|
|
1589 delete wtd.p_set;
|
|
1590 wtd.bind_expr_stack.release ();
|
|
1591 if (sanitize_flags_p (SANITIZE_VPTR))
|
|
1592 cp_ubsan_instrument_member_accesses (t_p);
|
|
1593 }
|
|
1594
|
|
1595 /* If a function that should end with a return in non-void
|
|
1596 function doesn't obviously end with return, add ubsan
|
131
|
1597 instrumentation code to verify it at runtime. If -fsanitize=return
|
|
1598 is not enabled, instrument __builtin_unreachable. */
|
111
|
1599
|
|
1600 static void
|
131
|
1601 cp_maybe_instrument_return (tree fndecl)
|
111
|
1602 {
|
|
1603 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
|
|
1604 || DECL_CONSTRUCTOR_P (fndecl)
|
|
1605 || DECL_DESTRUCTOR_P (fndecl)
|
|
1606 || !targetm.warn_func_return (fndecl))
|
|
1607 return;
|
|
1608
|
131
|
1609 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
|
|
1610 /* Don't add __builtin_unreachable () if not optimizing, it will not
|
|
1611 improve any optimizations in that case, just break UB code.
|
|
1612 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
|
|
1613 UBSan covers this with ubsan_instrument_return above where sufficient
|
|
1614 information is provided, while the __builtin_unreachable () below
|
|
1615 if return sanitization is disabled will just result in hard to
|
|
1616 understand runtime error without location. */
|
|
1617 && (!optimize
|
|
1618 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
|
|
1619 return;
|
|
1620
|
111
|
1621 tree t = DECL_SAVED_TREE (fndecl);
|
|
1622 while (t)
|
|
1623 {
|
|
1624 switch (TREE_CODE (t))
|
|
1625 {
|
|
1626 case BIND_EXPR:
|
|
1627 t = BIND_EXPR_BODY (t);
|
|
1628 continue;
|
|
1629 case TRY_FINALLY_EXPR:
|
131
|
1630 case CLEANUP_POINT_EXPR:
|
111
|
1631 t = TREE_OPERAND (t, 0);
|
|
1632 continue;
|
|
1633 case STATEMENT_LIST:
|
|
1634 {
|
|
1635 tree_stmt_iterator i = tsi_last (t);
|
131
|
1636 while (!tsi_end_p (i))
|
|
1637 {
|
|
1638 tree p = tsi_stmt (i);
|
|
1639 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
|
|
1640 break;
|
|
1641 tsi_prev (&i);
|
|
1642 }
|
111
|
1643 if (!tsi_end_p (i))
|
|
1644 {
|
|
1645 t = tsi_stmt (i);
|
|
1646 continue;
|
|
1647 }
|
|
1648 }
|
|
1649 break;
|
|
1650 case RETURN_EXPR:
|
|
1651 return;
|
|
1652 default:
|
|
1653 break;
|
|
1654 }
|
|
1655 break;
|
|
1656 }
|
|
1657 if (t == NULL_TREE)
|
|
1658 return;
|
|
1659 tree *p = &DECL_SAVED_TREE (fndecl);
|
|
1660 if (TREE_CODE (*p) == BIND_EXPR)
|
|
1661 p = &BIND_EXPR_BODY (*p);
|
131
|
1662
|
|
1663 location_t loc = DECL_SOURCE_LOCATION (fndecl);
|
|
1664 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
|
|
1665 t = ubsan_instrument_return (loc);
|
|
1666 else
|
|
1667 {
|
|
1668 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
|
|
1669 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
|
|
1670 }
|
|
1671
|
111
|
1672 append_to_statement_list (t, p);
|
|
1673 }
|
|
1674
|
|
1675 void
|
|
1676 cp_genericize (tree fndecl)
|
|
1677 {
|
|
1678 tree t;
|
|
1679
|
|
1680 /* Fix up the types of parms passed by invisible reference. */
|
|
1681 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
|
|
1682 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
|
|
1683 {
|
|
1684 /* If a function's arguments are copied to create a thunk,
|
|
1685 then DECL_BY_REFERENCE will be set -- but the type of the
|
|
1686 argument will be a pointer type, so we will never get
|
|
1687 here. */
|
|
1688 gcc_assert (!DECL_BY_REFERENCE (t));
|
|
1689 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
|
|
1690 TREE_TYPE (t) = DECL_ARG_TYPE (t);
|
|
1691 DECL_BY_REFERENCE (t) = 1;
|
|
1692 TREE_ADDRESSABLE (t) = 0;
|
|
1693 relayout_decl (t);
|
|
1694 }
|
|
1695
|
|
1696 /* Do the same for the return value. */
|
|
1697 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
|
|
1698 {
|
|
1699 t = DECL_RESULT (fndecl);
|
|
1700 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
|
|
1701 DECL_BY_REFERENCE (t) = 1;
|
|
1702 TREE_ADDRESSABLE (t) = 0;
|
|
1703 relayout_decl (t);
|
|
1704 if (DECL_NAME (t))
|
|
1705 {
|
|
1706 /* Adjust DECL_VALUE_EXPR of the original var. */
|
|
1707 tree outer = outer_curly_brace_block (current_function_decl);
|
|
1708 tree var;
|
|
1709
|
|
1710 if (outer)
|
|
1711 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
|
|
1712 if (VAR_P (var)
|
|
1713 && DECL_NAME (t) == DECL_NAME (var)
|
|
1714 && DECL_HAS_VALUE_EXPR_P (var)
|
|
1715 && DECL_VALUE_EXPR (var) == t)
|
|
1716 {
|
|
1717 tree val = convert_from_reference (t);
|
|
1718 SET_DECL_VALUE_EXPR (var, val);
|
|
1719 break;
|
|
1720 }
|
|
1721 }
|
|
1722 }
|
|
1723
|
|
1724 /* If we're a clone, the body is already GIMPLE. */
|
|
1725 if (DECL_CLONED_FUNCTION_P (fndecl))
|
|
1726 return;
|
|
1727
|
|
1728 /* Allow cp_genericize calls to be nested. */
|
|
1729 tree save_bc_label[2];
|
|
1730 save_bc_label[bc_break] = bc_label[bc_break];
|
|
1731 save_bc_label[bc_continue] = bc_label[bc_continue];
|
|
1732 bc_label[bc_break] = NULL_TREE;
|
|
1733 bc_label[bc_continue] = NULL_TREE;
|
|
1734
|
|
1735 /* We do want to see every occurrence of the parms, so we can't just use
|
|
1736 walk_tree's hash functionality. */
|
|
1737 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
|
|
1738
|
131
|
1739 cp_maybe_instrument_return (fndecl);
|
111
|
1740
|
|
1741 /* Do everything else. */
|
|
1742 c_genericize (fndecl);
|
|
1743
|
|
1744 gcc_assert (bc_label[bc_break] == NULL);
|
|
1745 gcc_assert (bc_label[bc_continue] == NULL);
|
|
1746 bc_label[bc_break] = save_bc_label[bc_break];
|
|
1747 bc_label[bc_continue] = save_bc_label[bc_continue];
|
|
1748 }
|
|
1749
|
|
1750 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
|
|
1751 NULL if there is in fact nothing to do. ARG2 may be null if FN
|
|
1752 actually only takes one argument. */
|
|
1753
|
|
1754 static tree
|
|
1755 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
|
|
1756 {
|
|
1757 tree defparm, parm, t;
|
|
1758 int i = 0;
|
|
1759 int nargs;
|
|
1760 tree *argarray;
|
|
1761
|
|
1762 if (fn == NULL)
|
|
1763 return NULL;
|
|
1764
|
|
1765 nargs = list_length (DECL_ARGUMENTS (fn));
|
|
1766 argarray = XALLOCAVEC (tree, nargs);
|
|
1767
|
|
1768 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
|
|
1769 if (arg2)
|
|
1770 defparm = TREE_CHAIN (defparm);
|
|
1771
|
131
|
1772 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
|
111
|
1773 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
|
|
1774 {
|
|
1775 tree inner_type = TREE_TYPE (arg1);
|
|
1776 tree start1, end1, p1;
|
|
1777 tree start2 = NULL, p2 = NULL;
|
|
1778 tree ret = NULL, lab;
|
|
1779
|
|
1780 start1 = arg1;
|
|
1781 start2 = arg2;
|
|
1782 do
|
|
1783 {
|
|
1784 inner_type = TREE_TYPE (inner_type);
|
|
1785 start1 = build4 (ARRAY_REF, inner_type, start1,
|
|
1786 size_zero_node, NULL, NULL);
|
|
1787 if (arg2)
|
|
1788 start2 = build4 (ARRAY_REF, inner_type, start2,
|
|
1789 size_zero_node, NULL, NULL);
|
|
1790 }
|
|
1791 while (TREE_CODE (inner_type) == ARRAY_TYPE);
|
|
1792 start1 = build_fold_addr_expr_loc (input_location, start1);
|
|
1793 if (arg2)
|
|
1794 start2 = build_fold_addr_expr_loc (input_location, start2);
|
|
1795
|
|
1796 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
|
|
1797 end1 = fold_build_pointer_plus (start1, end1);
|
|
1798
|
|
1799 p1 = create_tmp_var (TREE_TYPE (start1));
|
|
1800 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
|
|
1801 append_to_statement_list (t, &ret);
|
|
1802
|
|
1803 if (arg2)
|
|
1804 {
|
|
1805 p2 = create_tmp_var (TREE_TYPE (start2));
|
|
1806 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
|
|
1807 append_to_statement_list (t, &ret);
|
|
1808 }
|
|
1809
|
|
1810 lab = create_artificial_label (input_location);
|
|
1811 t = build1 (LABEL_EXPR, void_type_node, lab);
|
|
1812 append_to_statement_list (t, &ret);
|
|
1813
|
|
1814 argarray[i++] = p1;
|
|
1815 if (arg2)
|
|
1816 argarray[i++] = p2;
|
|
1817 /* Handle default arguments. */
|
|
1818 for (parm = defparm; parm && parm != void_list_node;
|
|
1819 parm = TREE_CHAIN (parm), i++)
|
|
1820 argarray[i] = convert_default_arg (TREE_VALUE (parm),
|
131
|
1821 TREE_PURPOSE (parm), fn,
|
|
1822 i - is_method, tf_warning_or_error);
|
111
|
1823 t = build_call_a (fn, i, argarray);
|
|
1824 t = fold_convert (void_type_node, t);
|
|
1825 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
|
|
1826 append_to_statement_list (t, &ret);
|
|
1827
|
|
1828 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
|
|
1829 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
|
|
1830 append_to_statement_list (t, &ret);
|
|
1831
|
|
1832 if (arg2)
|
|
1833 {
|
|
1834 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
|
|
1835 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
|
|
1836 append_to_statement_list (t, &ret);
|
|
1837 }
|
|
1838
|
|
1839 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
|
|
1840 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
|
|
1841 append_to_statement_list (t, &ret);
|
|
1842
|
|
1843 return ret;
|
|
1844 }
|
|
1845 else
|
|
1846 {
|
|
1847 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
|
|
1848 if (arg2)
|
|
1849 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
|
|
1850 /* Handle default arguments. */
|
|
1851 for (parm = defparm; parm && parm != void_list_node;
|
|
1852 parm = TREE_CHAIN (parm), i++)
|
|
1853 argarray[i] = convert_default_arg (TREE_VALUE (parm),
|
131
|
1854 TREE_PURPOSE (parm), fn,
|
|
1855 i - is_method, tf_warning_or_error);
|
111
|
1856 t = build_call_a (fn, i, argarray);
|
|
1857 t = fold_convert (void_type_node, t);
|
|
1858 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
|
|
1859 }
|
|
1860 }
|
|
1861
|
|
1862 /* Return code to initialize DECL with its default constructor, or
|
|
1863 NULL if there's nothing to do. */
|
|
1864
|
|
1865 tree
|
|
1866 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
|
|
1867 {
|
|
1868 tree info = CP_OMP_CLAUSE_INFO (clause);
|
|
1869 tree ret = NULL;
|
|
1870
|
|
1871 if (info)
|
|
1872 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
|
|
1873
|
|
1874 return ret;
|
|
1875 }
|
|
1876
|
|
1877 /* Return code to initialize DST with a copy constructor from SRC. */
|
|
1878
|
|
1879 tree
|
|
1880 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
|
|
1881 {
|
|
1882 tree info = CP_OMP_CLAUSE_INFO (clause);
|
|
1883 tree ret = NULL;
|
|
1884
|
|
1885 if (info)
|
|
1886 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
|
|
1887 if (ret == NULL)
|
|
1888 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
|
|
1889
|
|
1890 return ret;
|
|
1891 }
|
|
1892
|
|
1893 /* Similarly, except use an assignment operator instead. */
|
|
1894
|
|
1895 tree
|
|
1896 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
|
|
1897 {
|
|
1898 tree info = CP_OMP_CLAUSE_INFO (clause);
|
|
1899 tree ret = NULL;
|
|
1900
|
|
1901 if (info)
|
|
1902 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
|
|
1903 if (ret == NULL)
|
|
1904 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
|
|
1905
|
|
1906 return ret;
|
|
1907 }
|
|
1908
|
|
1909 /* Return code to destroy DECL. */
|
|
1910
|
|
1911 tree
|
|
1912 cxx_omp_clause_dtor (tree clause, tree decl)
|
|
1913 {
|
|
1914 tree info = CP_OMP_CLAUSE_INFO (clause);
|
|
1915 tree ret = NULL;
|
|
1916
|
|
1917 if (info)
|
|
1918 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
|
|
1919
|
|
1920 return ret;
|
|
1921 }
|
|
1922
|
|
1923 /* True if OpenMP should privatize what this DECL points to rather
|
|
1924 than the DECL itself. */
|
|
1925
|
|
1926 bool
|
|
1927 cxx_omp_privatize_by_reference (const_tree decl)
|
|
1928 {
|
131
|
1929 return (TYPE_REF_P (TREE_TYPE (decl))
|
111
|
1930 || is_invisiref_parm (decl));
|
|
1931 }
|
|
1932
|
|
1933 /* Return true if DECL is const qualified var having no mutable member. */
|
|
1934 bool
|
|
1935 cxx_omp_const_qual_no_mutable (tree decl)
|
|
1936 {
|
|
1937 tree type = TREE_TYPE (decl);
|
131
|
1938 if (TYPE_REF_P (type))
|
111
|
1939 {
|
|
1940 if (!is_invisiref_parm (decl))
|
|
1941 return false;
|
|
1942 type = TREE_TYPE (type);
|
|
1943
|
|
1944 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
|
|
1945 {
|
|
1946 /* NVR doesn't preserve const qualification of the
|
|
1947 variable's type. */
|
|
1948 tree outer = outer_curly_brace_block (current_function_decl);
|
|
1949 tree var;
|
|
1950
|
|
1951 if (outer)
|
|
1952 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
|
|
1953 if (VAR_P (var)
|
|
1954 && DECL_NAME (decl) == DECL_NAME (var)
|
|
1955 && (TYPE_MAIN_VARIANT (type)
|
|
1956 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
|
|
1957 {
|
|
1958 if (TYPE_READONLY (TREE_TYPE (var)))
|
|
1959 type = TREE_TYPE (var);
|
|
1960 break;
|
|
1961 }
|
|
1962 }
|
|
1963 }
|
|
1964
|
|
1965 if (type == error_mark_node)
|
|
1966 return false;
|
|
1967
|
|
1968 /* Variables with const-qualified type having no mutable member
|
|
1969 are predetermined shared. */
|
|
1970 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
|
|
1971 return true;
|
|
1972
|
|
1973 return false;
|
|
1974 }
|
|
1975
|
|
1976 /* True if OpenMP sharing attribute of DECL is predetermined. */
|
|
1977
|
|
1978 enum omp_clause_default_kind
|
131
|
1979 cxx_omp_predetermined_sharing_1 (tree decl)
|
111
|
1980 {
|
|
1981 /* Static data members are predetermined shared. */
|
|
1982 if (TREE_STATIC (decl))
|
|
1983 {
|
|
1984 tree ctx = CP_DECL_CONTEXT (decl);
|
|
1985 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
|
|
1986 return OMP_CLAUSE_DEFAULT_SHARED;
|
|
1987 }
|
|
1988
|
|
1989 /* Const qualified vars having no mutable member are predetermined
|
|
1990 shared. */
|
|
1991 if (cxx_omp_const_qual_no_mutable (decl))
|
|
1992 return OMP_CLAUSE_DEFAULT_SHARED;
|
|
1993
|
|
1994 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
|
|
1995 }
|
|
1996
|
131
|
1997 /* Likewise, but also include the artificial vars. We don't want to
|
|
1998 disallow the artificial vars being mentioned in explicit clauses,
|
|
1999 as we use artificial vars e.g. for loop constructs with random
|
|
2000 access iterators other than pointers, but during gimplification
|
|
2001 we want to treat them as predetermined. */
|
|
2002
|
|
2003 enum omp_clause_default_kind
|
|
2004 cxx_omp_predetermined_sharing (tree decl)
|
|
2005 {
|
|
2006 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
|
|
2007 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
|
|
2008 return ret;
|
|
2009
|
|
2010 /* Predetermine artificial variables holding integral values, those
|
|
2011 are usually result of gimplify_one_sizepos or SAVE_EXPR
|
|
2012 gimplification. */
|
|
2013 if (VAR_P (decl)
|
|
2014 && DECL_ARTIFICIAL (decl)
|
|
2015 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
|
|
2016 && !(DECL_LANG_SPECIFIC (decl)
|
|
2017 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
|
|
2018 return OMP_CLAUSE_DEFAULT_SHARED;
|
|
2019
|
|
2020 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
|
|
2021 }
|
|
2022
|
111
|
2023 /* Finalize an implicitly determined clause. */
|
|
2024
|
|
2025 void
|
|
2026 cxx_omp_finish_clause (tree c, gimple_seq *)
|
|
2027 {
|
|
2028 tree decl, inner_type;
|
|
2029 bool make_shared = false;
|
|
2030
|
|
2031 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
|
|
2032 return;
|
|
2033
|
|
2034 decl = OMP_CLAUSE_DECL (c);
|
|
2035 decl = require_complete_type (decl);
|
|
2036 inner_type = TREE_TYPE (decl);
|
|
2037 if (decl == error_mark_node)
|
|
2038 make_shared = true;
|
131
|
2039 else if (TYPE_REF_P (TREE_TYPE (decl)))
|
111
|
2040 inner_type = TREE_TYPE (inner_type);
|
|
2041
|
|
2042 /* We're interested in the base element, not arrays. */
|
|
2043 while (TREE_CODE (inner_type) == ARRAY_TYPE)
|
|
2044 inner_type = TREE_TYPE (inner_type);
|
|
2045
|
|
2046 /* Check for special function availability by building a call to one.
|
|
2047 Save the results, because later we won't be in the right context
|
|
2048 for making these queries. */
|
|
2049 if (!make_shared
|
|
2050 && CLASS_TYPE_P (inner_type)
|
|
2051 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
|
|
2052 make_shared = true;
|
|
2053
|
|
2054 if (make_shared)
|
|
2055 {
|
|
2056 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
|
|
2057 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
|
|
2058 OMP_CLAUSE_SHARED_READONLY (c) = 0;
|
|
2059 }
|
|
2060 }
|
|
2061
|
|
2062 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
|
|
2063 disregarded in OpenMP construct, because it is going to be
|
|
2064 remapped during OpenMP lowering. SHARED is true if DECL
|
|
2065 is going to be shared, false if it is going to be privatized. */
|
|
2066
|
|
2067 bool
|
|
2068 cxx_omp_disregard_value_expr (tree decl, bool shared)
|
|
2069 {
|
|
2070 return !shared
|
|
2071 && VAR_P (decl)
|
|
2072 && DECL_HAS_VALUE_EXPR_P (decl)
|
|
2073 && DECL_ARTIFICIAL (decl)
|
|
2074 && DECL_LANG_SPECIFIC (decl)
|
|
2075 && DECL_OMP_PRIVATIZED_MEMBER (decl);
|
|
2076 }
|
|
2077
|
|
2078 /* Fold expression X which is used as an rvalue if RVAL is true. */
|
|
2079
|
|
2080 static tree
|
|
2081 cp_fold_maybe_rvalue (tree x, bool rval)
|
|
2082 {
|
|
2083 while (true)
|
|
2084 {
|
|
2085 x = cp_fold (x);
|
|
2086 if (rval && DECL_P (x)
|
131
|
2087 && !TYPE_REF_P (TREE_TYPE (x)))
|
111
|
2088 {
|
|
2089 tree v = decl_constant_value (x);
|
|
2090 if (v != x && v != error_mark_node)
|
|
2091 {
|
|
2092 x = v;
|
|
2093 continue;
|
|
2094 }
|
|
2095 }
|
|
2096 break;
|
|
2097 }
|
|
2098 return x;
|
|
2099 }
|
|
2100
|
|
2101 /* Fold expression X which is used as an rvalue. */
|
|
2102
|
|
2103 static tree
|
|
2104 cp_fold_rvalue (tree x)
|
|
2105 {
|
|
2106 return cp_fold_maybe_rvalue (x, true);
|
|
2107 }
|
|
2108
|
|
2109 /* Perform folding on expression X. */
|
|
2110
|
|
2111 tree
|
|
2112 cp_fully_fold (tree x)
|
|
2113 {
|
|
2114 if (processing_template_decl)
|
|
2115 return x;
|
|
2116 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
|
|
2117 have to call both. */
|
|
2118 if (cxx_dialect >= cxx11)
|
131
|
2119 {
|
|
2120 x = maybe_constant_value (x);
|
|
2121 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
|
|
2122 a TARGET_EXPR; undo that here. */
|
|
2123 if (TREE_CODE (x) == TARGET_EXPR)
|
|
2124 x = TARGET_EXPR_INITIAL (x);
|
|
2125 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
|
|
2126 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
|
|
2127 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
|
|
2128 x = TREE_OPERAND (x, 0);
|
|
2129 }
|
111
|
2130 return cp_fold_rvalue (x);
|
|
2131 }
|
|
2132
|
|
2133 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
|
|
2134 and certain changes are made to the folding done. Or should be (FIXME). We
|
|
2135 never touch maybe_const, as it is only used for the C front-end
|
|
2136 C_MAYBE_CONST_EXPR. */
|
|
2137
|
|
2138 tree
|
131
|
2139 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
|
111
|
2140 {
|
131
|
2141 return cp_fold_maybe_rvalue (x, !lval);
|
111
|
2142 }
|
|
2143
|
|
2144 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
|
|
2145
|
|
2146 /* Dispose of the whole FOLD_CACHE. */
|
|
2147
|
|
2148 void
|
|
2149 clear_fold_cache (void)
|
|
2150 {
|
|
2151 if (fold_cache != NULL)
|
|
2152 fold_cache->empty ();
|
|
2153 }
|
|
2154
|
|
2155 /* This function tries to fold an expression X.
|
|
2156 To avoid combinatorial explosion, folding results are kept in fold_cache.
|
131
|
2157 If X is invalid, we don't fold at all.
|
111
|
2158 For performance reasons we don't cache expressions representing a
|
|
2159 declaration or constant.
|
|
2160 Function returns X or its folded variant. */
|
|
2161
|
|
2162 static tree
|
|
2163 cp_fold (tree x)
|
|
2164 {
|
|
2165 tree op0, op1, op2, op3;
|
|
2166 tree org_x = x, r = NULL_TREE;
|
|
2167 enum tree_code code;
|
|
2168 location_t loc;
|
|
2169 bool rval_ops = true;
|
|
2170
|
|
2171 if (!x || x == error_mark_node)
|
|
2172 return x;
|
|
2173
|
131
|
2174 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
|
111
|
2175 return x;
|
|
2176
|
|
2177 /* Don't bother to cache DECLs or constants. */
|
|
2178 if (DECL_P (x) || CONSTANT_CLASS_P (x))
|
|
2179 return x;
|
|
2180
|
|
2181 if (fold_cache == NULL)
|
|
2182 fold_cache = hash_map<tree, tree>::create_ggc (101);
|
|
2183
|
|
2184 if (tree *cached = fold_cache->get (x))
|
|
2185 return *cached;
|
|
2186
|
|
2187 code = TREE_CODE (x);
|
|
2188 switch (code)
|
|
2189 {
|
|
2190 case CLEANUP_POINT_EXPR:
|
|
2191 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
|
|
2192 effects. */
|
|
2193 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
|
|
2194 if (!TREE_SIDE_EFFECTS (r))
|
|
2195 x = r;
|
|
2196 break;
|
|
2197
|
|
2198 case SIZEOF_EXPR:
|
|
2199 x = fold_sizeof_expr (x);
|
|
2200 break;
|
|
2201
|
|
2202 case VIEW_CONVERT_EXPR:
|
|
2203 rval_ops = false;
|
|
2204 /* FALLTHRU */
|
|
2205 case CONVERT_EXPR:
|
|
2206 case NOP_EXPR:
|
|
2207 case NON_LVALUE_EXPR:
|
|
2208
|
|
2209 if (VOID_TYPE_P (TREE_TYPE (x)))
|
131
|
2210 {
|
|
2211 /* This is just to make sure we don't end up with casts to
|
|
2212 void from error_mark_node. If we just return x, then
|
|
2213 cp_fold_r might fold the operand into error_mark_node and
|
|
2214 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
|
|
2215 during gimplification doesn't like such casts.
|
|
2216 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
|
|
2217 folding of the operand should be in the caches and if in cp_fold_r
|
|
2218 it will modify it in place. */
|
|
2219 op0 = cp_fold (TREE_OPERAND (x, 0));
|
|
2220 if (op0 == error_mark_node)
|
|
2221 x = error_mark_node;
|
|
2222 break;
|
|
2223 }
|
111
|
2224
|
|
2225 loc = EXPR_LOCATION (x);
|
|
2226 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
|
|
2227
|
|
2228 if (code == CONVERT_EXPR
|
|
2229 && SCALAR_TYPE_P (TREE_TYPE (x))
|
|
2230 && op0 != void_node)
|
|
2231 /* During parsing we used convert_to_*_nofold; re-convert now using the
|
|
2232 folding variants, since fold() doesn't do those transformations. */
|
|
2233 x = fold (convert (TREE_TYPE (x), op0));
|
|
2234 else if (op0 != TREE_OPERAND (x, 0))
|
|
2235 {
|
|
2236 if (op0 == error_mark_node)
|
|
2237 x = error_mark_node;
|
|
2238 else
|
|
2239 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
|
|
2240 }
|
|
2241 else
|
|
2242 x = fold (x);
|
|
2243
|
|
2244 /* Conversion of an out-of-range value has implementation-defined
|
|
2245 behavior; the language considers it different from arithmetic
|
|
2246 overflow, which is undefined. */
|
|
2247 if (TREE_CODE (op0) == INTEGER_CST
|
|
2248 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
|
|
2249 TREE_OVERFLOW (x) = false;
|
|
2250
|
|
2251 break;
|
|
2252
|
|
2253 case INDIRECT_REF:
|
|
2254 /* We don't need the decltype(auto) obfuscation anymore. */
|
|
2255 if (REF_PARENTHESIZED_P (x))
|
|
2256 {
|
|
2257 tree p = maybe_undo_parenthesized_ref (x);
|
|
2258 return cp_fold (p);
|
|
2259 }
|
|
2260 goto unary;
|
|
2261
|
|
2262 case ADDR_EXPR:
|
131
|
2263 loc = EXPR_LOCATION (x);
|
|
2264 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
|
|
2265
|
|
2266 /* Cope with user tricks that amount to offsetof. */
|
|
2267 if (op0 != error_mark_node
|
|
2268 && TREE_CODE (TREE_TYPE (op0)) != FUNCTION_TYPE
|
|
2269 && TREE_CODE (TREE_TYPE (op0)) != METHOD_TYPE)
|
|
2270 {
|
|
2271 tree val = get_base_address (op0);
|
|
2272 if (val
|
|
2273 && INDIRECT_REF_P (val)
|
|
2274 && COMPLETE_TYPE_P (TREE_TYPE (val))
|
|
2275 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
|
|
2276 {
|
|
2277 val = TREE_OPERAND (val, 0);
|
|
2278 STRIP_NOPS (val);
|
|
2279 if (TREE_CODE (val) == INTEGER_CST)
|
|
2280 return fold_offsetof (op0, TREE_TYPE (x));
|
|
2281 }
|
|
2282 }
|
|
2283 goto finish_unary;
|
|
2284
|
111
|
2285 case REALPART_EXPR:
|
|
2286 case IMAGPART_EXPR:
|
|
2287 rval_ops = false;
|
|
2288 /* FALLTHRU */
|
|
2289 case CONJ_EXPR:
|
|
2290 case FIX_TRUNC_EXPR:
|
|
2291 case FLOAT_EXPR:
|
|
2292 case NEGATE_EXPR:
|
|
2293 case ABS_EXPR:
|
131
|
2294 case ABSU_EXPR:
|
111
|
2295 case BIT_NOT_EXPR:
|
|
2296 case TRUTH_NOT_EXPR:
|
|
2297 case FIXED_CONVERT_EXPR:
|
|
2298 unary:
|
|
2299
|
|
2300 loc = EXPR_LOCATION (x);
|
|
2301 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
|
|
2302
|
131
|
2303 finish_unary:
|
111
|
2304 if (op0 != TREE_OPERAND (x, 0))
|
|
2305 {
|
|
2306 if (op0 == error_mark_node)
|
|
2307 x = error_mark_node;
|
|
2308 else
|
|
2309 {
|
|
2310 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
|
|
2311 if (code == INDIRECT_REF
|
|
2312 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
|
|
2313 {
|
|
2314 TREE_READONLY (x) = TREE_READONLY (org_x);
|
|
2315 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
|
|
2316 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
|
|
2317 }
|
|
2318 }
|
|
2319 }
|
|
2320 else
|
|
2321 x = fold (x);
|
|
2322
|
|
2323 gcc_assert (TREE_CODE (x) != COND_EXPR
|
|
2324 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
|
|
2325 break;
|
|
2326
|
|
2327 case UNARY_PLUS_EXPR:
|
|
2328 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
|
|
2329 if (op0 == error_mark_node)
|
|
2330 x = error_mark_node;
|
|
2331 else
|
|
2332 x = fold_convert (TREE_TYPE (x), op0);
|
|
2333 break;
|
|
2334
|
|
2335 case POSTDECREMENT_EXPR:
|
|
2336 case POSTINCREMENT_EXPR:
|
|
2337 case INIT_EXPR:
|
|
2338 case PREDECREMENT_EXPR:
|
|
2339 case PREINCREMENT_EXPR:
|
|
2340 case COMPOUND_EXPR:
|
|
2341 case MODIFY_EXPR:
|
|
2342 rval_ops = false;
|
|
2343 /* FALLTHRU */
|
|
2344 case POINTER_PLUS_EXPR:
|
|
2345 case PLUS_EXPR:
|
131
|
2346 case POINTER_DIFF_EXPR:
|
111
|
2347 case MINUS_EXPR:
|
|
2348 case MULT_EXPR:
|
|
2349 case TRUNC_DIV_EXPR:
|
|
2350 case CEIL_DIV_EXPR:
|
|
2351 case FLOOR_DIV_EXPR:
|
|
2352 case ROUND_DIV_EXPR:
|
|
2353 case TRUNC_MOD_EXPR:
|
|
2354 case CEIL_MOD_EXPR:
|
|
2355 case ROUND_MOD_EXPR:
|
|
2356 case RDIV_EXPR:
|
|
2357 case EXACT_DIV_EXPR:
|
|
2358 case MIN_EXPR:
|
|
2359 case MAX_EXPR:
|
|
2360 case LSHIFT_EXPR:
|
|
2361 case RSHIFT_EXPR:
|
|
2362 case LROTATE_EXPR:
|
|
2363 case RROTATE_EXPR:
|
|
2364 case BIT_AND_EXPR:
|
|
2365 case BIT_IOR_EXPR:
|
|
2366 case BIT_XOR_EXPR:
|
|
2367 case TRUTH_AND_EXPR:
|
|
2368 case TRUTH_ANDIF_EXPR:
|
|
2369 case TRUTH_OR_EXPR:
|
|
2370 case TRUTH_ORIF_EXPR:
|
|
2371 case TRUTH_XOR_EXPR:
|
|
2372 case LT_EXPR: case LE_EXPR:
|
|
2373 case GT_EXPR: case GE_EXPR:
|
|
2374 case EQ_EXPR: case NE_EXPR:
|
|
2375 case UNORDERED_EXPR: case ORDERED_EXPR:
|
|
2376 case UNLT_EXPR: case UNLE_EXPR:
|
|
2377 case UNGT_EXPR: case UNGE_EXPR:
|
|
2378 case UNEQ_EXPR: case LTGT_EXPR:
|
|
2379 case RANGE_EXPR: case COMPLEX_EXPR:
|
|
2380
|
|
2381 loc = EXPR_LOCATION (x);
|
|
2382 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
|
|
2383 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
|
|
2384
|
|
2385 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
|
|
2386 {
|
|
2387 if (op0 == error_mark_node || op1 == error_mark_node)
|
|
2388 x = error_mark_node;
|
|
2389 else
|
|
2390 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
|
|
2391 }
|
|
2392 else
|
|
2393 x = fold (x);
|
|
2394
|
131
|
2395 /* This is only needed for -Wnonnull-compare and only if
|
|
2396 TREE_NO_WARNING (org_x), but to avoid that option affecting code
|
|
2397 generation, we do it always. */
|
|
2398 if (COMPARISON_CLASS_P (org_x))
|
111
|
2399 {
|
|
2400 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
|
|
2401 ;
|
|
2402 else if (COMPARISON_CLASS_P (x))
|
131
|
2403 {
|
|
2404 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
|
|
2405 TREE_NO_WARNING (x) = 1;
|
|
2406 }
|
111
|
2407 /* Otherwise give up on optimizing these, let GIMPLE folders
|
|
2408 optimize those later on. */
|
|
2409 else if (op0 != TREE_OPERAND (org_x, 0)
|
|
2410 || op1 != TREE_OPERAND (org_x, 1))
|
|
2411 {
|
|
2412 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
|
131
|
2413 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
|
|
2414 TREE_NO_WARNING (x) = 1;
|
111
|
2415 }
|
|
2416 else
|
|
2417 x = org_x;
|
|
2418 }
|
|
2419 break;
|
|
2420
|
|
2421 case VEC_COND_EXPR:
|
|
2422 case COND_EXPR:
|
|
2423 loc = EXPR_LOCATION (x);
|
|
2424 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
|
|
2425 op1 = cp_fold (TREE_OPERAND (x, 1));
|
|
2426 op2 = cp_fold (TREE_OPERAND (x, 2));
|
|
2427
|
|
2428 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
|
|
2429 {
|
|
2430 warning_sentinel s (warn_int_in_bool_context);
|
|
2431 if (!VOID_TYPE_P (TREE_TYPE (op1)))
|
|
2432 op1 = cp_truthvalue_conversion (op1);
|
|
2433 if (!VOID_TYPE_P (TREE_TYPE (op2)))
|
|
2434 op2 = cp_truthvalue_conversion (op2);
|
|
2435 }
|
131
|
2436 else if (VOID_TYPE_P (TREE_TYPE (x)))
|
|
2437 {
|
|
2438 if (TREE_CODE (op0) == INTEGER_CST)
|
|
2439 {
|
|
2440 /* If the condition is constant, fold can fold away
|
|
2441 the COND_EXPR. If some statement-level uses of COND_EXPR
|
|
2442 have one of the branches NULL, avoid folding crash. */
|
|
2443 if (!op1)
|
|
2444 op1 = build_empty_stmt (loc);
|
|
2445 if (!op2)
|
|
2446 op2 = build_empty_stmt (loc);
|
|
2447 }
|
|
2448 else
|
|
2449 {
|
|
2450 /* Otherwise, don't bother folding a void condition, since
|
|
2451 it can't produce a constant value. */
|
|
2452 if (op0 != TREE_OPERAND (x, 0)
|
|
2453 || op1 != TREE_OPERAND (x, 1)
|
|
2454 || op2 != TREE_OPERAND (x, 2))
|
|
2455 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
|
|
2456 break;
|
|
2457 }
|
|
2458 }
|
111
|
2459
|
|
2460 if (op0 != TREE_OPERAND (x, 0)
|
|
2461 || op1 != TREE_OPERAND (x, 1)
|
|
2462 || op2 != TREE_OPERAND (x, 2))
|
|
2463 {
|
|
2464 if (op0 == error_mark_node
|
|
2465 || op1 == error_mark_node
|
|
2466 || op2 == error_mark_node)
|
|
2467 x = error_mark_node;
|
|
2468 else
|
|
2469 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
|
|
2470 }
|
|
2471 else
|
|
2472 x = fold (x);
|
|
2473
|
|
2474 /* A COND_EXPR might have incompatible types in branches if one or both
|
|
2475 arms are bitfields. If folding exposed such a branch, fix it up. */
|
|
2476 if (TREE_CODE (x) != code
|
131
|
2477 && x != error_mark_node
|
111
|
2478 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
|
|
2479 x = fold_convert (TREE_TYPE (org_x), x);
|
|
2480
|
|
2481 break;
|
|
2482
|
|
2483 case CALL_EXPR:
|
|
2484 {
|
|
2485 int i, m, sv = optimize, nw = sv, changed = 0;
|
|
2486 tree callee = get_callee_fndecl (x);
|
|
2487
|
|
2488 /* Some built-in function calls will be evaluated at compile-time in
|
|
2489 fold (). Set optimize to 1 when folding __builtin_constant_p inside
|
|
2490 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
|
131
|
2491 if (callee && fndecl_built_in_p (callee) && !optimize
|
111
|
2492 && DECL_IS_BUILTIN_CONSTANT_P (callee)
|
|
2493 && current_function_decl
|
|
2494 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
|
|
2495 nw = 1;
|
|
2496
|
131
|
2497 /* Defer folding __builtin_is_constant_evaluated. */
|
|
2498 if (callee
|
|
2499 && fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
|
|
2500 BUILT_IN_FRONTEND))
|
|
2501 break;
|
|
2502
|
111
|
2503 x = copy_node (x);
|
|
2504
|
|
2505 m = call_expr_nargs (x);
|
|
2506 for (i = 0; i < m; i++)
|
|
2507 {
|
|
2508 r = cp_fold (CALL_EXPR_ARG (x, i));
|
|
2509 if (r != CALL_EXPR_ARG (x, i))
|
|
2510 {
|
|
2511 if (r == error_mark_node)
|
|
2512 {
|
|
2513 x = error_mark_node;
|
|
2514 break;
|
|
2515 }
|
|
2516 changed = 1;
|
|
2517 }
|
|
2518 CALL_EXPR_ARG (x, i) = r;
|
|
2519 }
|
|
2520 if (x == error_mark_node)
|
|
2521 break;
|
|
2522
|
|
2523 optimize = nw;
|
|
2524 r = fold (x);
|
|
2525 optimize = sv;
|
|
2526
|
|
2527 if (TREE_CODE (r) != CALL_EXPR)
|
|
2528 {
|
|
2529 x = cp_fold (r);
|
|
2530 break;
|
|
2531 }
|
|
2532
|
|
2533 optimize = nw;
|
|
2534
|
|
2535 /* Invoke maybe_constant_value for functions declared
|
|
2536 constexpr and not called with AGGR_INIT_EXPRs.
|
|
2537 TODO:
|
|
2538 Do constexpr expansion of expressions where the call itself is not
|
|
2539 constant, but the call followed by an INDIRECT_REF is. */
|
|
2540 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
|
|
2541 && !flag_no_inline)
|
|
2542 r = maybe_constant_value (x);
|
|
2543 optimize = sv;
|
|
2544
|
|
2545 if (TREE_CODE (r) != CALL_EXPR)
|
|
2546 {
|
|
2547 if (DECL_CONSTRUCTOR_P (callee))
|
|
2548 {
|
|
2549 loc = EXPR_LOCATION (x);
|
|
2550 tree s = build_fold_indirect_ref_loc (loc,
|
|
2551 CALL_EXPR_ARG (x, 0));
|
|
2552 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
|
|
2553 }
|
|
2554 x = r;
|
|
2555 break;
|
|
2556 }
|
|
2557
|
|
2558 if (!changed)
|
|
2559 x = org_x;
|
|
2560 break;
|
|
2561 }
|
|
2562
|
|
2563 case CONSTRUCTOR:
|
|
2564 {
|
|
2565 unsigned i;
|
|
2566 constructor_elt *p;
|
|
2567 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
|
|
2568 vec<constructor_elt, va_gc> *nelts = NULL;
|
|
2569 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
|
|
2570 {
|
|
2571 tree op = cp_fold (p->value);
|
|
2572 if (op != p->value)
|
|
2573 {
|
|
2574 if (op == error_mark_node)
|
|
2575 {
|
|
2576 x = error_mark_node;
|
|
2577 vec_free (nelts);
|
|
2578 break;
|
|
2579 }
|
|
2580 if (nelts == NULL)
|
|
2581 nelts = elts->copy ();
|
|
2582 (*nelts)[i].value = op;
|
|
2583 }
|
|
2584 }
|
|
2585 if (nelts)
|
131
|
2586 {
|
|
2587 x = build_constructor (TREE_TYPE (x), nelts);
|
|
2588 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
|
|
2589 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
|
|
2590 }
|
|
2591 if (VECTOR_TYPE_P (TREE_TYPE (x)))
|
|
2592 x = fold (x);
|
111
|
2593 break;
|
|
2594 }
|
|
2595 case TREE_VEC:
|
|
2596 {
|
|
2597 bool changed = false;
|
|
2598 vec<tree, va_gc> *vec = make_tree_vector ();
|
|
2599 int i, n = TREE_VEC_LENGTH (x);
|
|
2600 vec_safe_reserve (vec, n);
|
|
2601
|
|
2602 for (i = 0; i < n; i++)
|
|
2603 {
|
|
2604 tree op = cp_fold (TREE_VEC_ELT (x, i));
|
|
2605 vec->quick_push (op);
|
|
2606 if (op != TREE_VEC_ELT (x, i))
|
|
2607 changed = true;
|
|
2608 }
|
|
2609
|
|
2610 if (changed)
|
|
2611 {
|
|
2612 r = copy_node (x);
|
|
2613 for (i = 0; i < n; i++)
|
|
2614 TREE_VEC_ELT (r, i) = (*vec)[i];
|
|
2615 x = r;
|
|
2616 }
|
|
2617
|
|
2618 release_tree_vector (vec);
|
|
2619 }
|
|
2620
|
|
2621 break;
|
|
2622
|
|
2623 case ARRAY_REF:
|
|
2624 case ARRAY_RANGE_REF:
|
|
2625
|
|
2626 loc = EXPR_LOCATION (x);
|
|
2627 op0 = cp_fold (TREE_OPERAND (x, 0));
|
|
2628 op1 = cp_fold (TREE_OPERAND (x, 1));
|
|
2629 op2 = cp_fold (TREE_OPERAND (x, 2));
|
|
2630 op3 = cp_fold (TREE_OPERAND (x, 3));
|
|
2631
|
|
2632 if (op0 != TREE_OPERAND (x, 0)
|
|
2633 || op1 != TREE_OPERAND (x, 1)
|
|
2634 || op2 != TREE_OPERAND (x, 2)
|
|
2635 || op3 != TREE_OPERAND (x, 3))
|
|
2636 {
|
|
2637 if (op0 == error_mark_node
|
|
2638 || op1 == error_mark_node
|
|
2639 || op2 == error_mark_node
|
|
2640 || op3 == error_mark_node)
|
|
2641 x = error_mark_node;
|
|
2642 else
|
|
2643 {
|
|
2644 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
|
|
2645 TREE_READONLY (x) = TREE_READONLY (org_x);
|
|
2646 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
|
|
2647 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
|
|
2648 }
|
|
2649 }
|
|
2650
|
|
2651 x = fold (x);
|
|
2652 break;
|
|
2653
|
|
2654 case SAVE_EXPR:
|
|
2655 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
|
|
2656 folding, evaluates to an invariant. In that case no need to wrap
|
|
2657 this folded tree with a SAVE_EXPR. */
|
|
2658 r = cp_fold (TREE_OPERAND (x, 0));
|
|
2659 if (tree_invariant_p (r))
|
|
2660 x = r;
|
|
2661 break;
|
|
2662
|
|
2663 default:
|
|
2664 return org_x;
|
|
2665 }
|
|
2666
|
|
2667 fold_cache->put (org_x, x);
|
|
2668 /* Prevent that we try to fold an already folded result again. */
|
|
2669 if (x != org_x)
|
|
2670 fold_cache->put (x, x);
|
|
2671
|
|
2672 return x;
|
|
2673 }
|
|
2674
|
|
2675 #include "gt-cp-cp-gimplify.h"
|