Mercurial > hg > CbC > CbC_gcc
comparison gcc/gimplify.c.orig @ 57:326d9e06c2e3
modify c-parser.c
author | ryoma <e075725@ie.u-ryukyu.ac.jp> |
---|---|
date | Mon, 15 Feb 2010 00:54:17 +0900 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
54:f62c169bbc24 | 57:326d9e06c2e3 |
---|---|
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees | |
2 tree representation into the GIMPLE form. | |
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 | |
4 Free Software Foundation, Inc. | |
5 Major work done by Sebastian Pop <s.pop@laposte.net>, | |
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>. | |
7 | |
8 This file is part of GCC. | |
9 | |
10 GCC is free software; you can redistribute it and/or modify it under | |
11 the terms of the GNU General Public License as published by the Free | |
12 Software Foundation; either version 3, or (at your option) any later | |
13 version. | |
14 | |
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
18 for more details. | |
19 | |
20 You should have received a copy of the GNU General Public License | |
21 along with GCC; see the file COPYING3. If not see | |
22 <http://www.gnu.org/licenses/>. */ | |
23 | |
24 #include "config.h" | |
25 #include "system.h" | |
26 #include "coretypes.h" | |
27 #include "tm.h" | |
28 #include "tree.h" | |
29 #include "rtl.h" | |
30 #include "varray.h" | |
31 #include "gimple.h" | |
32 #include "tree-iterator.h" | |
33 #include "tree-inline.h" | |
34 #include "diagnostic.h" | |
35 #include "langhooks.h" | |
36 #include "langhooks-def.h" | |
37 #include "tree-flow.h" | |
38 #include "cgraph.h" | |
39 #include "timevar.h" | |
40 #include "except.h" | |
41 #include "hashtab.h" | |
42 #include "flags.h" | |
43 #include "real.h" | |
44 #include "function.h" | |
45 #include "output.h" | |
46 #include "expr.h" | |
47 #include "ggc.h" | |
48 #include "toplev.h" | |
49 #include "target.h" | |
50 #include "optabs.h" | |
51 #include "pointer-set.h" | |
52 #include "splay-tree.h" | |
53 #include "vec.h" | |
54 #include "gimple.h" | |
55 #ifndef noCbC | |
56 #include "cbc-tree.h" | |
57 #endif | |
58 | |
59 #include "tree-pass.h" | |
60 | |
61 enum gimplify_omp_var_data | |
62 { | |
63 GOVD_SEEN = 1, | |
64 GOVD_EXPLICIT = 2, | |
65 GOVD_SHARED = 4, | |
66 GOVD_PRIVATE = 8, | |
67 GOVD_FIRSTPRIVATE = 16, | |
68 GOVD_LASTPRIVATE = 32, | |
69 GOVD_REDUCTION = 64, | |
70 GOVD_LOCAL = 128, | |
71 GOVD_DEBUG_PRIVATE = 256, | |
72 GOVD_PRIVATE_OUTER_REF = 512, | |
73 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE | |
74 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL) | |
75 }; | |
76 | |
77 | |
78 enum omp_region_type | |
79 { | |
80 ORT_WORKSHARE = 0, | |
81 ORT_TASK = 1, | |
82 ORT_PARALLEL = 2, | |
83 ORT_COMBINED_PARALLEL = 3 | |
84 }; | |
85 | |
86 struct gimplify_omp_ctx | |
87 { | |
88 struct gimplify_omp_ctx *outer_context; | |
89 splay_tree variables; | |
90 struct pointer_set_t *privatized_types; | |
91 location_t location; | |
92 enum omp_clause_default_kind default_kind; | |
93 enum omp_region_type region_type; | |
94 }; | |
95 | |
96 static struct gimplify_ctx *gimplify_ctxp; | |
97 static struct gimplify_omp_ctx *gimplify_omp_ctxp; | |
98 | |
99 | |
100 /* Formal (expression) temporary table handling: Multiple occurrences of | |
101 the same scalar expression are evaluated into the same temporary. */ | |
102 | |
103 typedef struct gimple_temp_hash_elt | |
104 { | |
105 tree val; /* Key */ | |
106 tree temp; /* Value */ | |
107 } elt_t; | |
108 | |
109 /* Forward declarations. */ | |
110 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool); | |
111 | |
112 /* Mark X addressable. Unlike the langhook we expect X to be in gimple | |
113 form and we don't do any syntax checking. */ | |
114 void | |
115 mark_addressable (tree x) | |
116 { | |
117 while (handled_component_p (x)) | |
118 x = TREE_OPERAND (x, 0); | |
119 if (TREE_CODE (x) != VAR_DECL | |
120 && TREE_CODE (x) != PARM_DECL | |
121 && TREE_CODE (x) != RESULT_DECL) | |
122 return ; | |
123 TREE_ADDRESSABLE (x) = 1; | |
124 } | |
125 | |
126 /* Return a hash value for a formal temporary table entry. */ | |
127 | |
128 static hashval_t | |
129 gimple_tree_hash (const void *p) | |
130 { | |
131 tree t = ((const elt_t *) p)->val; | |
132 return iterative_hash_expr (t, 0); | |
133 } | |
134 | |
135 /* Compare two formal temporary table entries. */ | |
136 | |
137 static int | |
138 gimple_tree_eq (const void *p1, const void *p2) | |
139 { | |
140 tree t1 = ((const elt_t *) p1)->val; | |
141 tree t2 = ((const elt_t *) p2)->val; | |
142 enum tree_code code = TREE_CODE (t1); | |
143 | |
144 if (TREE_CODE (t2) != code | |
145 || TREE_TYPE (t1) != TREE_TYPE (t2)) | |
146 return 0; | |
147 | |
148 if (!operand_equal_p (t1, t2, 0)) | |
149 return 0; | |
150 | |
151 /* Only allow them to compare equal if they also hash equal; otherwise | |
152 results are nondeterminate, and we fail bootstrap comparison. */ | |
153 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2)); | |
154 | |
155 return 1; | |
156 } | |
157 | |
158 /* Link gimple statement GS to the end of the sequence *SEQ_P. If | |
159 *SEQ_P is NULL, a new sequence is allocated. This function is | |
160 similar to gimple_seq_add_stmt, but does not scan the operands. | |
161 During gimplification, we need to manipulate statement sequences | |
162 before the def/use vectors have been constructed. */ | |
163 | |
164 static void | |
165 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs) | |
166 { | |
167 gimple_stmt_iterator si; | |
168 | |
169 if (gs == NULL) | |
170 return; | |
171 | |
172 if (*seq_p == NULL) | |
173 *seq_p = gimple_seq_alloc (); | |
174 | |
175 si = gsi_last (*seq_p); | |
176 | |
177 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT); | |
178 } | |
179 | |
180 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is | |
181 NULL, a new sequence is allocated. This function is | |
182 similar to gimple_seq_add_seq, but does not scan the operands. | |
183 During gimplification, we need to manipulate statement sequences | |
184 before the def/use vectors have been constructed. */ | |
185 | |
186 static void | |
187 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src) | |
188 { | |
189 gimple_stmt_iterator si; | |
190 | |
191 if (src == NULL) | |
192 return; | |
193 | |
194 if (*dst_p == NULL) | |
195 *dst_p = gimple_seq_alloc (); | |
196 | |
197 si = gsi_last (*dst_p); | |
198 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT); | |
199 } | |
200 | |
201 /* Set up a context for the gimplifier. */ | |
202 | |
203 void | |
204 push_gimplify_context (struct gimplify_ctx *c) | |
205 { | |
206 memset (c, '\0', sizeof (*c)); | |
207 c->prev_context = gimplify_ctxp; | |
208 gimplify_ctxp = c; | |
209 } | |
210 | |
211 /* Tear down a context for the gimplifier. If BODY is non-null, then | |
212 put the temporaries into the outer BIND_EXPR. Otherwise, put them | |
213 in the local_decls. | |
214 | |
215 BODY is not a sequence, but the first tuple in a sequence. */ | |
216 | |
217 void | |
218 pop_gimplify_context (gimple body) | |
219 { | |
220 struct gimplify_ctx *c = gimplify_ctxp; | |
221 | |
222 gcc_assert (c && (c->bind_expr_stack == NULL | |
223 || VEC_empty (gimple, c->bind_expr_stack))); | |
224 VEC_free (gimple, heap, c->bind_expr_stack); | |
225 gimplify_ctxp = c->prev_context; | |
226 | |
227 if (body) | |
228 declare_vars (c->temps, body, false); | |
229 else | |
230 record_vars (c->temps); | |
231 | |
232 if (c->temp_htab) | |
233 htab_delete (c->temp_htab); | |
234 } | |
235 | |
236 static void | |
237 gimple_push_bind_expr (gimple gimple_bind) | |
238 { | |
239 if (gimplify_ctxp->bind_expr_stack == NULL) | |
240 gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8); | |
241 VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind); | |
242 } | |
243 | |
244 static void | |
245 gimple_pop_bind_expr (void) | |
246 { | |
247 VEC_pop (gimple, gimplify_ctxp->bind_expr_stack); | |
248 } | |
249 | |
250 gimple | |
251 gimple_current_bind_expr (void) | |
252 { | |
253 return VEC_last (gimple, gimplify_ctxp->bind_expr_stack); | |
254 } | |
255 | |
256 /* Return the stack GIMPLE_BINDs created during gimplification. */ | |
257 | |
258 VEC(gimple, heap) * | |
259 gimple_bind_expr_stack (void) | |
260 { | |
261 return gimplify_ctxp->bind_expr_stack; | |
262 } | |
263 | |
264 /* Returns true iff there is a COND_EXPR between us and the innermost | |
265 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */ | |
266 | |
267 static bool | |
268 gimple_conditional_context (void) | |
269 { | |
270 return gimplify_ctxp->conditions > 0; | |
271 } | |
272 | |
273 /* Note that we've entered a COND_EXPR. */ | |
274 | |
275 static void | |
276 gimple_push_condition (void) | |
277 { | |
278 #ifdef ENABLE_GIMPLE_CHECKING | |
279 if (gimplify_ctxp->conditions == 0) | |
280 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups)); | |
281 #endif | |
282 ++(gimplify_ctxp->conditions); | |
283 } | |
284 | |
285 /* Note that we've left a COND_EXPR. If we're back at unconditional scope | |
286 now, add any conditional cleanups we've seen to the prequeue. */ | |
287 | |
288 static void | |
289 gimple_pop_condition (gimple_seq *pre_p) | |
290 { | |
291 int conds = --(gimplify_ctxp->conditions); | |
292 | |
293 gcc_assert (conds >= 0); | |
294 if (conds == 0) | |
295 { | |
296 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups); | |
297 gimplify_ctxp->conditional_cleanups = NULL; | |
298 } | |
299 } | |
300 | |
301 /* A stable comparison routine for use with splay trees and DECLs. */ | |
302 | |
303 static int | |
304 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb) | |
305 { | |
306 tree a = (tree) xa; | |
307 tree b = (tree) xb; | |
308 | |
309 return DECL_UID (a) - DECL_UID (b); | |
310 } | |
311 | |
312 /* Create a new omp construct that deals with variable remapping. */ | |
313 | |
314 static struct gimplify_omp_ctx * | |
315 new_omp_context (enum omp_region_type region_type) | |
316 { | |
317 struct gimplify_omp_ctx *c; | |
318 | |
319 c = XCNEW (struct gimplify_omp_ctx); | |
320 c->outer_context = gimplify_omp_ctxp; | |
321 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); | |
322 c->privatized_types = pointer_set_create (); | |
323 c->location = input_location; | |
324 c->region_type = region_type; | |
325 if (region_type != ORT_TASK) | |
326 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED; | |
327 else | |
328 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED; | |
329 | |
330 return c; | |
331 } | |
332 | |
333 /* Destroy an omp construct that deals with variable remapping. */ | |
334 | |
335 static void | |
336 delete_omp_context (struct gimplify_omp_ctx *c) | |
337 { | |
338 splay_tree_delete (c->variables); | |
339 pointer_set_destroy (c->privatized_types); | |
340 XDELETE (c); | |
341 } | |
342 | |
343 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int); | |
344 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool); | |
345 | |
346 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */ | |
347 | |
348 static void | |
349 append_to_statement_list_1 (tree t, tree *list_p) | |
350 { | |
351 tree list = *list_p; | |
352 tree_stmt_iterator i; | |
353 | |
354 if (!list) | |
355 { | |
356 if (t && TREE_CODE (t) == STATEMENT_LIST) | |
357 { | |
358 *list_p = t; | |
359 return; | |
360 } | |
361 *list_p = list = alloc_stmt_list (); | |
362 } | |
363 | |
364 i = tsi_last (list); | |
365 tsi_link_after (&i, t, TSI_CONTINUE_LINKING); | |
366 } | |
367 | |
368 /* Add T to the end of the list container pointed to by LIST_P. | |
369 If T is an expression with no effects, it is ignored. */ | |
370 | |
371 void | |
372 append_to_statement_list (tree t, tree *list_p) | |
373 { | |
374 if (t && TREE_SIDE_EFFECTS (t)) | |
375 append_to_statement_list_1 (t, list_p); | |
376 } | |
377 | |
378 /* Similar, but the statement is always added, regardless of side effects. */ | |
379 | |
380 void | |
381 append_to_statement_list_force (tree t, tree *list_p) | |
382 { | |
383 if (t != NULL_TREE) | |
384 append_to_statement_list_1 (t, list_p); | |
385 } | |
386 | |
387 /* Both gimplify the statement T and append it to *SEQ_P. This function | |
388 behaves exactly as gimplify_stmt, but you don't have to pass T as a | |
389 reference. */ | |
390 | |
391 void | |
392 gimplify_and_add (tree t, gimple_seq *seq_p) | |
393 { | |
394 gimplify_stmt (&t, seq_p); | |
395 } | |
396 | |
397 /* Gimplify statement T into sequence *SEQ_P, and return the first | |
398 tuple in the sequence of generated tuples for this statement. | |
399 Return NULL if gimplifying T produced no tuples. */ | |
400 | |
401 static gimple | |
402 gimplify_and_return_first (tree t, gimple_seq *seq_p) | |
403 { | |
404 gimple_stmt_iterator last = gsi_last (*seq_p); | |
405 | |
406 gimplify_and_add (t, seq_p); | |
407 | |
408 if (!gsi_end_p (last)) | |
409 { | |
410 gsi_next (&last); | |
411 return gsi_stmt (last); | |
412 } | |
413 else | |
414 return gimple_seq_first_stmt (*seq_p); | |
415 } | |
416 | |
417 /* Strip off a legitimate source ending from the input string NAME of | |
418 length LEN. Rather than having to know the names used by all of | |
419 our front ends, we strip off an ending of a period followed by | |
420 up to five characters. (Java uses ".class".) */ | |
421 | |
422 static inline void | |
423 remove_suffix (char *name, int len) | |
424 { | |
425 int i; | |
426 | |
427 for (i = 2; i < 8 && len > i; i++) | |
428 { | |
429 if (name[len - i] == '.') | |
430 { | |
431 name[len - i] = '\0'; | |
432 break; | |
433 } | |
434 } | |
435 } | |
436 | |
437 /* Create a new temporary name with PREFIX. Returns an identifier. */ | |
438 | |
439 static GTY(()) unsigned int tmp_var_id_num; | |
440 | |
441 tree | |
442 create_tmp_var_name (const char *prefix) | |
443 { | |
444 char *tmp_name; | |
445 | |
446 if (prefix) | |
447 { | |
448 char *preftmp = ASTRDUP (prefix); | |
449 | |
450 remove_suffix (preftmp, strlen (preftmp)); | |
451 prefix = preftmp; | |
452 } | |
453 | |
454 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++); | |
455 return get_identifier (tmp_name); | |
456 } | |
457 | |
458 | |
459 /* Create a new temporary variable declaration of type TYPE. | |
460 Does NOT push it into the current binding. */ | |
461 | |
462 tree | |
463 create_tmp_var_raw (tree type, const char *prefix) | |
464 { | |
465 tree tmp_var; | |
466 tree new_type; | |
467 | |
468 /* Make the type of the variable writable. */ | |
469 new_type = build_type_variant (type, 0, 0); | |
470 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type); | |
471 | |
472 tmp_var = build_decl (input_location, | |
473 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL, | |
474 type); | |
475 | |
476 /* The variable was declared by the compiler. */ | |
477 DECL_ARTIFICIAL (tmp_var) = 1; | |
478 /* And we don't want debug info for it. */ | |
479 DECL_IGNORED_P (tmp_var) = 1; | |
480 | |
481 /* Make the variable writable. */ | |
482 TREE_READONLY (tmp_var) = 0; | |
483 | |
484 DECL_EXTERNAL (tmp_var) = 0; | |
485 TREE_STATIC (tmp_var) = 0; | |
486 TREE_USED (tmp_var) = 1; | |
487 | |
488 return tmp_var; | |
489 } | |
490 | |
491 /* Create a new temporary variable declaration of type TYPE. DOES push the | |
492 variable into the current binding. Further, assume that this is called | |
493 only from gimplification or optimization, at which point the creation of | |
494 certain types are bugs. */ | |
495 | |
496 tree | |
497 create_tmp_var (tree type, const char *prefix) | |
498 { | |
499 tree tmp_var; | |
500 | |
501 /* We don't allow types that are addressable (meaning we can't make copies), | |
502 or incomplete. We also used to reject every variable size objects here, | |
503 but now support those for which a constant upper bound can be obtained. | |
504 The processing for variable sizes is performed in gimple_add_tmp_var, | |
505 point at which it really matters and possibly reached via paths not going | |
506 through this function, e.g. after direct calls to create_tmp_var_raw. */ | |
507 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type)); | |
508 | |
509 tmp_var = create_tmp_var_raw (type, prefix); | |
510 gimple_add_tmp_var (tmp_var); | |
511 return tmp_var; | |
512 } | |
513 | |
514 /* Create a temporary with a name derived from VAL. Subroutine of | |
515 lookup_tmp_var; nobody else should call this function. */ | |
516 | |
517 static inline tree | |
518 create_tmp_from_val (tree val) | |
519 { | |
520 return create_tmp_var (TREE_TYPE (val), get_name (val)); | |
521 } | |
522 | |
523 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse | |
524 an existing expression temporary. */ | |
525 | |
526 static tree | |
527 lookup_tmp_var (tree val, bool is_formal) | |
528 { | |
529 tree ret; | |
530 | |
531 /* If not optimizing, never really reuse a temporary. local-alloc | |
532 won't allocate any variable that is used in more than one basic | |
533 block, which means it will go into memory, causing much extra | |
534 work in reload and final and poorer code generation, outweighing | |
535 the extra memory allocation here. */ | |
536 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val)) | |
537 ret = create_tmp_from_val (val); | |
538 else | |
539 { | |
540 elt_t elt, *elt_p; | |
541 void **slot; | |
542 | |
543 elt.val = val; | |
544 if (gimplify_ctxp->temp_htab == NULL) | |
545 gimplify_ctxp->temp_htab | |
546 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free); | |
547 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT); | |
548 if (*slot == NULL) | |
549 { | |
550 elt_p = XNEW (elt_t); | |
551 elt_p->val = val; | |
552 elt_p->temp = ret = create_tmp_from_val (val); | |
553 *slot = (void *) elt_p; | |
554 } | |
555 else | |
556 { | |
557 elt_p = (elt_t *) *slot; | |
558 ret = elt_p->temp; | |
559 } | |
560 } | |
561 | |
562 return ret; | |
563 } | |
564 | |
565 | |
566 /* Return true if T is a CALL_EXPR or an expression that can be | |
567 assignmed to a temporary. Note that this predicate should only be | |
568 used during gimplification. See the rationale for this in | |
569 gimplify_modify_expr. */ | |
570 | |
571 static bool | |
572 is_gimple_reg_rhs_or_call (tree t) | |
573 { | |
574 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS | |
575 || TREE_CODE (t) == CALL_EXPR); | |
576 } | |
577 | |
578 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that | |
579 this predicate should only be used during gimplification. See the | |
580 rationale for this in gimplify_modify_expr. */ | |
581 | |
582 static bool | |
583 is_gimple_mem_rhs_or_call (tree t) | |
584 { | |
585 /* If we're dealing with a renamable type, either source or dest must be | |
586 a renamed variable. */ | |
587 if (is_gimple_reg_type (TREE_TYPE (t))) | |
588 return is_gimple_val (t); | |
589 else | |
590 return (is_gimple_val (t) || is_gimple_lvalue (t) | |
591 || TREE_CODE (t) == CALL_EXPR); | |
592 } | |
593 | |
594 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */ | |
595 | |
596 static tree | |
597 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p, | |
598 bool is_formal) | |
599 { | |
600 tree t, mod; | |
601 | |
602 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we | |
603 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */ | |
604 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call, | |
605 fb_rvalue); | |
606 | |
607 t = lookup_tmp_var (val, is_formal); | |
608 | |
609 if (is_formal | |
610 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE | |
611 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)) | |
612 DECL_GIMPLE_REG_P (t) = 1; | |
613 | |
614 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val)); | |
615 | |
616 if (EXPR_HAS_LOCATION (val)) | |
617 SET_EXPR_LOCATION (mod, EXPR_LOCATION (val)); | |
618 else | |
619 SET_EXPR_LOCATION (mod, input_location); | |
620 | |
621 /* gimplify_modify_expr might want to reduce this further. */ | |
622 gimplify_and_add (mod, pre_p); | |
623 ggc_free (mod); | |
624 | |
625 /* If we're gimplifying into ssa, gimplify_modify_expr will have | |
626 given our temporary an SSA name. Find and return it. */ | |
627 if (gimplify_ctxp->into_ssa) | |
628 { | |
629 gimple last = gimple_seq_last_stmt (*pre_p); | |
630 t = gimple_get_lhs (last); | |
631 } | |
632 | |
633 return t; | |
634 } | |
635 | |
636 /* Returns a formal temporary variable initialized with VAL. PRE_P is as | |
637 in gimplify_expr. Only use this function if: | |
638 | |
639 1) The value of the unfactored expression represented by VAL will not | |
640 change between the initialization and use of the temporary, and | |
641 2) The temporary will not be otherwise modified. | |
642 | |
643 For instance, #1 means that this is inappropriate for SAVE_EXPR temps, | |
644 and #2 means it is inappropriate for && temps. | |
645 | |
646 For other cases, use get_initialized_tmp_var instead. */ | |
647 | |
648 tree | |
649 get_formal_tmp_var (tree val, gimple_seq *pre_p) | |
650 { | |
651 return internal_get_tmp_var (val, pre_p, NULL, true); | |
652 } | |
653 | |
654 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P | |
655 are as in gimplify_expr. */ | |
656 | |
657 tree | |
658 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p) | |
659 { | |
660 return internal_get_tmp_var (val, pre_p, post_p, false); | |
661 } | |
662 | |
663 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is | |
664 true, generate debug info for them; otherwise don't. */ | |
665 | |
666 void | |
667 declare_vars (tree vars, gimple scope, bool debug_info) | |
668 { | |
669 tree last = vars; | |
670 if (last) | |
671 { | |
672 tree temps, block; | |
673 | |
674 gcc_assert (gimple_code (scope) == GIMPLE_BIND); | |
675 | |
676 temps = nreverse (last); | |
677 | |
678 block = gimple_bind_block (scope); | |
679 gcc_assert (!block || TREE_CODE (block) == BLOCK); | |
680 if (!block || !debug_info) | |
681 { | |
682 TREE_CHAIN (last) = gimple_bind_vars (scope); | |
683 gimple_bind_set_vars (scope, temps); | |
684 } | |
685 else | |
686 { | |
687 /* We need to attach the nodes both to the BIND_EXPR and to its | |
688 associated BLOCK for debugging purposes. The key point here | |
689 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR | |
690 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */ | |
691 if (BLOCK_VARS (block)) | |
692 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps); | |
693 else | |
694 { | |
695 gimple_bind_set_vars (scope, | |
696 chainon (gimple_bind_vars (scope), temps)); | |
697 BLOCK_VARS (block) = temps; | |
698 } | |
699 } | |
700 } | |
701 } | |
702 | |
703 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound | |
704 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if | |
705 no such upper bound can be obtained. */ | |
706 | |
707 static void | |
708 force_constant_size (tree var) | |
709 { | |
710 /* The only attempt we make is by querying the maximum size of objects | |
711 of the variable's type. */ | |
712 | |
713 HOST_WIDE_INT max_size; | |
714 | |
715 gcc_assert (TREE_CODE (var) == VAR_DECL); | |
716 | |
717 max_size = max_int_size_in_bytes (TREE_TYPE (var)); | |
718 | |
719 gcc_assert (max_size >= 0); | |
720 | |
721 DECL_SIZE_UNIT (var) | |
722 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size); | |
723 DECL_SIZE (var) | |
724 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT); | |
725 } | |
726 | |
727 void | |
728 gimple_add_tmp_var (tree tmp) | |
729 { | |
730 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); | |
731 | |
732 /* Later processing assumes that the object size is constant, which might | |
733 not be true at this point. Force the use of a constant upper bound in | |
734 this case. */ | |
735 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1)) | |
736 force_constant_size (tmp); | |
737 | |
738 DECL_CONTEXT (tmp) = current_function_decl; | |
739 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; | |
740 | |
741 if (gimplify_ctxp) | |
742 { | |
743 TREE_CHAIN (tmp) = gimplify_ctxp->temps; | |
744 gimplify_ctxp->temps = tmp; | |
745 | |
746 /* Mark temporaries local within the nearest enclosing parallel. */ | |
747 if (gimplify_omp_ctxp) | |
748 { | |
749 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; | |
750 while (ctx && ctx->region_type == ORT_WORKSHARE) | |
751 ctx = ctx->outer_context; | |
752 if (ctx) | |
753 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN); | |
754 } | |
755 } | |
756 else if (cfun) | |
757 record_vars (tmp); | |
758 else | |
759 { | |
760 gimple_seq body_seq; | |
761 | |
762 /* This case is for nested functions. We need to expose the locals | |
763 they create. */ | |
764 body_seq = gimple_body (current_function_decl); | |
765 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false); | |
766 } | |
767 } | |
768 | |
769 /* Determines whether to assign a location to the statement GS. */ | |
770 | |
771 static bool | |
772 should_carry_location_p (gimple gs) | |
773 { | |
774 /* Don't emit a line note for a label. We particularly don't want to | |
775 emit one for the break label, since it doesn't actually correspond | |
776 to the beginning of the loop/switch. */ | |
777 if (gimple_code (gs) == GIMPLE_LABEL) | |
778 return false; | |
779 | |
780 return true; | |
781 } | |
782 | |
783 | |
784 /* Return true if a location should not be emitted for this statement | |
785 by annotate_one_with_location. */ | |
786 | |
787 static inline bool | |
788 gimple_do_not_emit_location_p (gimple g) | |
789 { | |
790 return gimple_plf (g, GF_PLF_1); | |
791 } | |
792 | |
793 /* Mark statement G so a location will not be emitted by | |
794 annotate_one_with_location. */ | |
795 | |
796 static inline void | |
797 gimple_set_do_not_emit_location (gimple g) | |
798 { | |
799 /* The PLF flags are initialized to 0 when a new tuple is created, | |
800 so no need to initialize it anywhere. */ | |
801 gimple_set_plf (g, GF_PLF_1, true); | |
802 } | |
803 | |
804 /* Set the location for gimple statement GS to LOCATION. */ | |
805 | |
806 static void | |
807 annotate_one_with_location (gimple gs, location_t location) | |
808 { | |
809 if (!gimple_has_location (gs) | |
810 && !gimple_do_not_emit_location_p (gs) | |
811 && should_carry_location_p (gs)) | |
812 gimple_set_location (gs, location); | |
813 } | |
814 | |
815 | |
816 /* Set LOCATION for all the statements after iterator GSI in sequence | |
817 SEQ. If GSI is pointing to the end of the sequence, start with the | |
818 first statement in SEQ. */ | |
819 | |
820 static void | |
821 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi, | |
822 location_t location) | |
823 { | |
824 if (gsi_end_p (gsi)) | |
825 gsi = gsi_start (seq); | |
826 else | |
827 gsi_next (&gsi); | |
828 | |
829 for (; !gsi_end_p (gsi); gsi_next (&gsi)) | |
830 annotate_one_with_location (gsi_stmt (gsi), location); | |
831 } | |
832 | |
833 | |
834 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */ | |
835 | |
836 void | |
837 annotate_all_with_location (gimple_seq stmt_p, location_t location) | |
838 { | |
839 gimple_stmt_iterator i; | |
840 | |
841 if (gimple_seq_empty_p (stmt_p)) | |
842 return; | |
843 | |
844 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i)) | |
845 { | |
846 gimple gs = gsi_stmt (i); | |
847 annotate_one_with_location (gs, location); | |
848 } | |
849 } | |
850 | |
851 | |
852 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes. | |
853 These nodes model computations that should only be done once. If we | |
854 were to unshare something like SAVE_EXPR(i++), the gimplification | |
855 process would create wrong code. */ | |
856 | |
857 static tree | |
858 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data) | |
859 { | |
860 enum tree_code code = TREE_CODE (*tp); | |
861 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */ | |
862 if (TREE_CODE_CLASS (code) == tcc_type | |
863 || TREE_CODE_CLASS (code) == tcc_declaration | |
864 || TREE_CODE_CLASS (code) == tcc_constant | |
865 || code == SAVE_EXPR || code == TARGET_EXPR | |
866 /* We can't do anything sensible with a BLOCK used as an expression, | |
867 but we also can't just die when we see it because of non-expression | |
868 uses. So just avert our eyes and cross our fingers. Silly Java. */ | |
869 || code == BLOCK) | |
870 *walk_subtrees = 0; | |
871 else | |
872 { | |
873 gcc_assert (code != BIND_EXPR); | |
874 copy_tree_r (tp, walk_subtrees, data); | |
875 } | |
876 | |
877 return NULL_TREE; | |
878 } | |
879 | |
880 /* Callback for walk_tree to unshare most of the shared trees rooted at | |
881 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1), | |
882 then *TP is deep copied by calling copy_tree_r. | |
883 | |
884 This unshares the same trees as copy_tree_r with the exception of | |
885 SAVE_EXPR nodes. These nodes model computations that should only be | |
886 done once. If we were to unshare something like SAVE_EXPR(i++), the | |
887 gimplification process would create wrong code. */ | |
888 | |
889 static tree | |
890 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, | |
891 void *data ATTRIBUTE_UNUSED) | |
892 { | |
893 tree t = *tp; | |
894 enum tree_code code = TREE_CODE (t); | |
895 | |
896 /* Skip types, decls, and constants. But we do want to look at their | |
897 types and the bounds of types. Mark them as visited so we properly | |
898 unmark their subtrees on the unmark pass. If we've already seen them, | |
899 don't look down further. */ | |
900 if (TREE_CODE_CLASS (code) == tcc_type | |
901 || TREE_CODE_CLASS (code) == tcc_declaration | |
902 || TREE_CODE_CLASS (code) == tcc_constant) | |
903 { | |
904 if (TREE_VISITED (t)) | |
905 *walk_subtrees = 0; | |
906 else | |
907 TREE_VISITED (t) = 1; | |
908 } | |
909 | |
910 /* If this node has been visited already, unshare it and don't look | |
911 any deeper. */ | |
912 else if (TREE_VISITED (t)) | |
913 { | |
914 walk_tree (tp, mostly_copy_tree_r, NULL, NULL); | |
915 *walk_subtrees = 0; | |
916 } | |
917 | |
918 /* Otherwise, mark the tree as visited and keep looking. */ | |
919 else | |
920 TREE_VISITED (t) = 1; | |
921 | |
922 return NULL_TREE; | |
923 } | |
924 | |
925 static tree | |
926 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, | |
927 void *data ATTRIBUTE_UNUSED) | |
928 { | |
929 if (TREE_VISITED (*tp)) | |
930 TREE_VISITED (*tp) = 0; | |
931 else | |
932 *walk_subtrees = 0; | |
933 | |
934 return NULL_TREE; | |
935 } | |
936 | |
937 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the | |
938 bodies of any nested functions if we are unsharing the entire body of | |
939 FNDECL. */ | |
940 | |
941 static void | |
942 unshare_body (tree *body_p, tree fndecl) | |
943 { | |
944 struct cgraph_node *cgn = cgraph_node (fndecl); | |
945 | |
946 walk_tree (body_p, copy_if_shared_r, NULL, NULL); | |
947 if (body_p == &DECL_SAVED_TREE (fndecl)) | |
948 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) | |
949 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl); | |
950 } | |
951 | |
952 /* Likewise, but mark all trees as not visited. */ | |
953 | |
954 static void | |
955 unvisit_body (tree *body_p, tree fndecl) | |
956 { | |
957 struct cgraph_node *cgn = cgraph_node (fndecl); | |
958 | |
959 walk_tree (body_p, unmark_visited_r, NULL, NULL); | |
960 if (body_p == &DECL_SAVED_TREE (fndecl)) | |
961 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) | |
962 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl); | |
963 } | |
964 | |
965 /* Unconditionally make an unshared copy of EXPR. This is used when using | |
966 stored expressions which span multiple functions, such as BINFO_VTABLE, | |
967 as the normal unsharing process can't tell that they're shared. */ | |
968 | |
969 tree | |
970 unshare_expr (tree expr) | |
971 { | |
972 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL); | |
973 return expr; | |
974 } | |
975 | |
976 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both | |
977 contain statements and have a value. Assign its value to a temporary | |
978 and give it void_type_node. Returns the temporary, or NULL_TREE if | |
979 WRAPPER was already void. */ | |
980 | |
981 tree | |
982 voidify_wrapper_expr (tree wrapper, tree temp) | |
983 { | |
984 tree type = TREE_TYPE (wrapper); | |
985 if (type && !VOID_TYPE_P (type)) | |
986 { | |
987 tree *p; | |
988 | |
989 /* Set p to point to the body of the wrapper. Loop until we find | |
990 something that isn't a wrapper. */ | |
991 for (p = &wrapper; p && *p; ) | |
992 { | |
993 switch (TREE_CODE (*p)) | |
994 { | |
995 case BIND_EXPR: | |
996 TREE_SIDE_EFFECTS (*p) = 1; | |
997 TREE_TYPE (*p) = void_type_node; | |
998 /* For a BIND_EXPR, the body is operand 1. */ | |
999 p = &BIND_EXPR_BODY (*p); | |
1000 break; | |
1001 | |
1002 case CLEANUP_POINT_EXPR: | |
1003 case TRY_FINALLY_EXPR: | |
1004 case TRY_CATCH_EXPR: | |
1005 TREE_SIDE_EFFECTS (*p) = 1; | |
1006 TREE_TYPE (*p) = void_type_node; | |
1007 p = &TREE_OPERAND (*p, 0); | |
1008 break; | |
1009 | |
1010 case STATEMENT_LIST: | |
1011 { | |
1012 tree_stmt_iterator i = tsi_last (*p); | |
1013 TREE_SIDE_EFFECTS (*p) = 1; | |
1014 TREE_TYPE (*p) = void_type_node; | |
1015 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i); | |
1016 } | |
1017 break; | |
1018 | |
1019 case COMPOUND_EXPR: | |
1020 /* Advance to the last statement. Set all container types to void. */ | |
1021 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1)) | |
1022 { | |
1023 TREE_SIDE_EFFECTS (*p) = 1; | |
1024 TREE_TYPE (*p) = void_type_node; | |
1025 } | |
1026 break; | |
1027 | |
1028 default: | |
1029 goto out; | |
1030 } | |
1031 } | |
1032 | |
1033 out: | |
1034 if (p == NULL || IS_EMPTY_STMT (*p)) | |
1035 temp = NULL_TREE; | |
1036 else if (temp) | |
1037 { | |
1038 /* The wrapper is on the RHS of an assignment that we're pushing | |
1039 down. */ | |
1040 gcc_assert (TREE_CODE (temp) == INIT_EXPR | |
1041 || TREE_CODE (temp) == MODIFY_EXPR); | |
1042 TREE_OPERAND (temp, 1) = *p; | |
1043 *p = temp; | |
1044 } | |
1045 else | |
1046 { | |
1047 temp = create_tmp_var (type, "retval"); | |
1048 *p = build2 (INIT_EXPR, type, temp, *p); | |
1049 } | |
1050 | |
1051 return temp; | |
1052 } | |
1053 | |
1054 return NULL_TREE; | |
1055 } | |
1056 | |
1057 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as | |
1058 a temporary through which they communicate. */ | |
1059 | |
1060 static void | |
1061 build_stack_save_restore (gimple *save, gimple *restore) | |
1062 { | |
1063 tree tmp_var; | |
1064 | |
1065 *save = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0); | |
1066 tmp_var = create_tmp_var (ptr_type_node, "saved_stack"); | |
1067 gimple_call_set_lhs (*save, tmp_var); | |
1068 | |
1069 *restore = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_RESTORE], | |
1070 1, tmp_var); | |
1071 } | |
1072 | |
1073 /* Gimplify a BIND_EXPR. Just voidify and recurse. */ | |
1074 | |
1075 static enum gimplify_status | |
1076 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p) | |
1077 { | |
1078 tree bind_expr = *expr_p; | |
1079 bool old_save_stack = gimplify_ctxp->save_stack; | |
1080 tree t; | |
1081 gimple gimple_bind; | |
1082 gimple_seq body; | |
1083 | |
1084 tree temp = voidify_wrapper_expr (bind_expr, NULL); | |
1085 | |
1086 /* Mark variables seen in this bind expr. */ | |
1087 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t)) | |
1088 { | |
1089 if (TREE_CODE (t) == VAR_DECL) | |
1090 { | |
1091 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; | |
1092 | |
1093 /* Mark variable as local. */ | |
1094 if (ctx && !is_global_var (t) | |
1095 && (! DECL_SEEN_IN_BIND_EXPR_P (t) | |
1096 || splay_tree_lookup (ctx->variables, | |
1097 (splay_tree_key) t) == NULL)) | |
1098 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN); | |
1099 | |
1100 DECL_SEEN_IN_BIND_EXPR_P (t) = 1; | |
1101 | |
1102 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun) | |
1103 cfun->has_local_explicit_reg_vars = true; | |
1104 } | |
1105 | |
1106 /* Preliminarily mark non-addressed complex variables as eligible | |
1107 for promotion to gimple registers. We'll transform their uses | |
1108 as we find them. | |
1109 We exclude complex types if not optimizing because they can be | |
1110 subject to partial stores in GNU C by means of the __real__ and | |
1111 __imag__ operators and we cannot promote them to total stores | |
1112 (see gimplify_modify_expr_complex_part). */ | |
1113 if (optimize | |
1114 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE | |
1115 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) | |
1116 && !TREE_THIS_VOLATILE (t) | |
1117 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t)) | |
1118 && !needs_to_live_in_memory (t)) | |
1119 DECL_GIMPLE_REG_P (t) = 1; | |
1120 } | |
1121 | |
1122 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL, | |
1123 BIND_EXPR_BLOCK (bind_expr)); | |
1124 gimple_push_bind_expr (gimple_bind); | |
1125 | |
1126 gimplify_ctxp->save_stack = false; | |
1127 | |
1128 /* Gimplify the body into the GIMPLE_BIND tuple's body. */ | |
1129 body = NULL; | |
1130 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body); | |
1131 gimple_bind_set_body (gimple_bind, body); | |
1132 | |
1133 if (gimplify_ctxp->save_stack) | |
1134 { | |
1135 gimple stack_save, stack_restore, gs; | |
1136 gimple_seq cleanup, new_body; | |
1137 | |
1138 /* Save stack on entry and restore it on exit. Add a try_finally | |
1139 block to achieve this. Note that mudflap depends on the | |
1140 format of the emitted code: see mx_register_decls(). */ | |
1141 build_stack_save_restore (&stack_save, &stack_restore); | |
1142 | |
1143 cleanup = new_body = NULL; | |
1144 gimplify_seq_add_stmt (&cleanup, stack_restore); | |
1145 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup, | |
1146 GIMPLE_TRY_FINALLY); | |
1147 | |
1148 gimplify_seq_add_stmt (&new_body, stack_save); | |
1149 gimplify_seq_add_stmt (&new_body, gs); | |
1150 gimple_bind_set_body (gimple_bind, new_body); | |
1151 } | |
1152 | |
1153 gimplify_ctxp->save_stack = old_save_stack; | |
1154 gimple_pop_bind_expr (); | |
1155 | |
1156 gimplify_seq_add_stmt (pre_p, gimple_bind); | |
1157 | |
1158 if (temp) | |
1159 { | |
1160 *expr_p = temp; | |
1161 return GS_OK; | |
1162 } | |
1163 | |
1164 *expr_p = NULL_TREE; | |
1165 return GS_ALL_DONE; | |
1166 } | |
1167 | |
1168 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a | |
1169 GIMPLE value, it is assigned to a new temporary and the statement is | |
1170 re-written to return the temporary. | |
1171 | |
1172 PRE_P points to the sequence where side effects that must happen before | |
1173 STMT should be stored. */ | |
1174 | |
1175 static enum gimplify_status | |
1176 gimplify_return_expr (tree stmt, gimple_seq *pre_p) | |
1177 { | |
1178 gimple ret; | |
1179 tree ret_expr = TREE_OPERAND (stmt, 0); | |
1180 tree result_decl, result; | |
1181 | |
1182 if (ret_expr == error_mark_node) | |
1183 return GS_ERROR; | |
1184 | |
1185 if (!ret_expr | |
1186 || TREE_CODE (ret_expr) == RESULT_DECL | |
1187 || ret_expr == error_mark_node) | |
1188 { | |
1189 gimple ret = gimple_build_return (ret_expr); | |
1190 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); | |
1191 gimplify_seq_add_stmt (pre_p, ret); | |
1192 return GS_ALL_DONE; | |
1193 } | |
1194 | |
1195 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))) | |
1196 #ifndef noCbC | |
1197 || ( ret_expr | |
1198 && TREE_CODE(ret_expr)==CALL_EXPR | |
1199 && CbC_IS_CbC_GOTO(ret_expr) | |
1200 //&& !CbC_IS_CODE_SEGMENT(TREE_TYPE(current_function_decl))) | |
1201 && !(current_function_decl&&CbC_IS_CODE_SEGMENT(TREE_TYPE(current_function_decl)))) | |
1202 //&& !(current_function_decl&&CbC_IS_CODE_SEGMENT(current_function_decl))) | |
1203 #endif | |
1204 ) | |
1205 result_decl = NULL_TREE; | |
1206 else | |
1207 { | |
1208 result_decl = TREE_OPERAND (ret_expr, 0); | |
1209 | |
1210 /* See through a return by reference. */ | |
1211 if (TREE_CODE (result_decl) == INDIRECT_REF) | |
1212 result_decl = TREE_OPERAND (result_decl, 0); | |
1213 | |
1214 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR | |
1215 || TREE_CODE (ret_expr) == INIT_EXPR) | |
1216 && TREE_CODE (result_decl) == RESULT_DECL); | |
1217 } | |
1218 | |
1219 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL. | |
1220 Recall that aggregate_value_p is FALSE for any aggregate type that is | |
1221 returned in registers. If we're returning values in registers, then | |
1222 we don't want to extend the lifetime of the RESULT_DECL, particularly | |
1223 across another call. In addition, for those aggregates for which | |
1224 hard_function_value generates a PARALLEL, we'll die during normal | |
1225 expansion of structure assignments; there's special code in expand_return | |
1226 to handle this case that does not exist in expand_expr. */ | |
1227 if (!result_decl | |
1228 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl))) | |
1229 result = result_decl; | |
1230 else if (gimplify_ctxp->return_temp) | |
1231 result = gimplify_ctxp->return_temp; | |
1232 else | |
1233 { | |
1234 result = create_tmp_var (TREE_TYPE (result_decl), NULL); | |
1235 if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE | |
1236 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE) | |
1237 DECL_GIMPLE_REG_P (result) = 1; | |
1238 | |
1239 /* ??? With complex control flow (usually involving abnormal edges), | |
1240 we can wind up warning about an uninitialized value for this. Due | |
1241 to how this variable is constructed and initialized, this is never | |
1242 true. Give up and never warn. */ | |
1243 TREE_NO_WARNING (result) = 1; | |
1244 | |
1245 gimplify_ctxp->return_temp = result; | |
1246 } | |
1247 | |
1248 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use. | |
1249 Then gimplify the whole thing. */ | |
1250 if (result != result_decl) | |
1251 TREE_OPERAND (ret_expr, 0) = result; | |
1252 | |
1253 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p); | |
1254 | |
1255 ret = gimple_build_return (result); | |
1256 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); | |
1257 gimplify_seq_add_stmt (pre_p, ret); | |
1258 | |
1259 return GS_ALL_DONE; | |
1260 } | |
1261 | |
1262 static void | |
1263 gimplify_vla_decl (tree decl, gimple_seq *seq_p) | |
1264 { | |
1265 /* This is a variable-sized decl. Simplify its size and mark it | |
1266 for deferred expansion. Note that mudflap depends on the format | |
1267 of the emitted code: see mx_register_decls(). */ | |
1268 tree t, addr, ptr_type; | |
1269 | |
1270 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p); | |
1271 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p); | |
1272 | |
1273 /* All occurrences of this decl in final gimplified code will be | |
1274 replaced by indirection. Setting DECL_VALUE_EXPR does two | |
1275 things: First, it lets the rest of the gimplifier know what | |
1276 replacement to use. Second, it lets the debug info know | |
1277 where to find the value. */ | |
1278 ptr_type = build_pointer_type (TREE_TYPE (decl)); | |
1279 addr = create_tmp_var (ptr_type, get_name (decl)); | |
1280 DECL_IGNORED_P (addr) = 0; | |
1281 t = build_fold_indirect_ref (addr); | |
1282 SET_DECL_VALUE_EXPR (decl, t); | |
1283 DECL_HAS_VALUE_EXPR_P (decl) = 1; | |
1284 | |
1285 t = built_in_decls[BUILT_IN_ALLOCA]; | |
1286 t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl)); | |
1287 t = fold_convert (ptr_type, t); | |
1288 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t); | |
1289 | |
1290 gimplify_and_add (t, seq_p); | |
1291 | |
1292 /* Indicate that we need to restore the stack level when the | |
1293 enclosing BIND_EXPR is exited. */ | |
1294 gimplify_ctxp->save_stack = true; | |
1295 } | |
1296 | |
1297 | |
1298 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation | |
1299 and initialization explicit. */ | |
1300 | |
1301 static enum gimplify_status | |
1302 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p) | |
1303 { | |
1304 tree stmt = *stmt_p; | |
1305 tree decl = DECL_EXPR_DECL (stmt); | |
1306 | |
1307 *stmt_p = NULL_TREE; | |
1308 | |
1309 if (TREE_TYPE (decl) == error_mark_node) | |
1310 return GS_ERROR; | |
1311 | |
1312 if ((TREE_CODE (decl) == TYPE_DECL | |
1313 || TREE_CODE (decl) == VAR_DECL) | |
1314 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl))) | |
1315 gimplify_type_sizes (TREE_TYPE (decl), seq_p); | |
1316 | |
1317 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl)) | |
1318 { | |
1319 tree init = DECL_INITIAL (decl); | |
1320 | |
1321 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST | |
1322 || (!TREE_STATIC (decl) | |
1323 && flag_stack_check == GENERIC_STACK_CHECK | |
1324 && compare_tree_int (DECL_SIZE_UNIT (decl), | |
1325 STACK_CHECK_MAX_VAR_SIZE) > 0)) | |
1326 gimplify_vla_decl (decl, seq_p); | |
1327 | |
1328 if (init && init != error_mark_node) | |
1329 { | |
1330 if (!TREE_STATIC (decl)) | |
1331 { | |
1332 DECL_INITIAL (decl) = NULL_TREE; | |
1333 init = build2 (INIT_EXPR, void_type_node, decl, init); | |
1334 gimplify_and_add (init, seq_p); | |
1335 ggc_free (init); | |
1336 } | |
1337 else | |
1338 /* We must still examine initializers for static variables | |
1339 as they may contain a label address. */ | |
1340 walk_tree (&init, force_labels_r, NULL, NULL); | |
1341 } | |
1342 | |
1343 /* Some front ends do not explicitly declare all anonymous | |
1344 artificial variables. We compensate here by declaring the | |
1345 variables, though it would be better if the front ends would | |
1346 explicitly declare them. */ | |
1347 if (!DECL_SEEN_IN_BIND_EXPR_P (decl) | |
1348 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE) | |
1349 gimple_add_tmp_var (decl); | |
1350 } | |
1351 | |
1352 return GS_ALL_DONE; | |
1353 } | |
1354 | |
1355 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body | |
1356 and replacing the LOOP_EXPR with goto, but if the loop contains an | |
1357 EXIT_EXPR, we need to append a label for it to jump to. */ | |
1358 | |
1359 static enum gimplify_status | |
1360 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p) | |
1361 { | |
1362 tree saved_label = gimplify_ctxp->exit_label; | |
1363 tree start_label = create_artificial_label (UNKNOWN_LOCATION); | |
1364 | |
1365 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label)); | |
1366 | |
1367 gimplify_ctxp->exit_label = NULL_TREE; | |
1368 | |
1369 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p); | |
1370 | |
1371 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label)); | |
1372 | |
1373 if (gimplify_ctxp->exit_label) | |
1374 gimplify_seq_add_stmt (pre_p, gimple_build_label (gimplify_ctxp->exit_label)); | |
1375 | |
1376 gimplify_ctxp->exit_label = saved_label; | |
1377 | |
1378 *expr_p = NULL; | |
1379 return GS_ALL_DONE; | |
1380 } | |
1381 | |
1382 /* Gimplifies a statement list onto a sequence. These may be created either | |
1383 by an enlightened front-end, or by shortcut_cond_expr. */ | |
1384 | |
1385 static enum gimplify_status | |
1386 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p) | |
1387 { | |
1388 tree temp = voidify_wrapper_expr (*expr_p, NULL); | |
1389 | |
1390 tree_stmt_iterator i = tsi_start (*expr_p); | |
1391 | |
1392 while (!tsi_end_p (i)) | |
1393 { | |
1394 gimplify_stmt (tsi_stmt_ptr (i), pre_p); | |
1395 tsi_delink (&i); | |
1396 } | |
1397 | |
1398 if (temp) | |
1399 { | |
1400 *expr_p = temp; | |
1401 return GS_OK; | |
1402 } | |
1403 | |
1404 return GS_ALL_DONE; | |
1405 } | |
1406 | |
1407 /* Compare two case labels. Because the front end should already have | |
1408 made sure that case ranges do not overlap, it is enough to only compare | |
1409 the CASE_LOW values of each case label. */ | |
1410 | |
1411 static int | |
1412 compare_case_labels (const void *p1, const void *p2) | |
1413 { | |
1414 const_tree const case1 = *(const_tree const*)p1; | |
1415 const_tree const case2 = *(const_tree const*)p2; | |
1416 | |
1417 /* The 'default' case label always goes first. */ | |
1418 if (!CASE_LOW (case1)) | |
1419 return -1; | |
1420 else if (!CASE_LOW (case2)) | |
1421 return 1; | |
1422 else | |
1423 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2)); | |
1424 } | |
1425 | |
1426 | |
1427 /* Sort the case labels in LABEL_VEC in place in ascending order. */ | |
1428 | |
1429 void | |
1430 sort_case_labels (VEC(tree,heap)* label_vec) | |
1431 { | |
1432 size_t len = VEC_length (tree, label_vec); | |
1433 qsort (VEC_address (tree, label_vec), len, sizeof (tree), | |
1434 compare_case_labels); | |
1435 } | |
1436 | |
1437 | |
1438 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can | |
1439 branch to. */ | |
1440 | |
1441 static enum gimplify_status | |
1442 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p) | |
1443 { | |
1444 tree switch_expr = *expr_p; | |
1445 gimple_seq switch_body_seq = NULL; | |
1446 enum gimplify_status ret; | |
1447 | |
1448 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val, | |
1449 fb_rvalue); | |
1450 if (ret == GS_ERROR || ret == GS_UNHANDLED) | |
1451 return ret; | |
1452 | |
1453 if (SWITCH_BODY (switch_expr)) | |
1454 { | |
1455 VEC (tree,heap) *labels; | |
1456 VEC (tree,heap) *saved_labels; | |
1457 tree default_case = NULL_TREE; | |
1458 size_t i, len; | |
1459 gimple gimple_switch; | |
1460 | |
1461 /* If someone can be bothered to fill in the labels, they can | |
1462 be bothered to null out the body too. */ | |
1463 gcc_assert (!SWITCH_LABELS (switch_expr)); | |
1464 | |
1465 /* save old labels, get new ones from body, then restore the old | |
1466 labels. Save all the things from the switch body to append after. */ | |
1467 saved_labels = gimplify_ctxp->case_labels; | |
1468 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8); | |
1469 | |
1470 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq); | |
1471 labels = gimplify_ctxp->case_labels; | |
1472 gimplify_ctxp->case_labels = saved_labels; | |
1473 | |
1474 i = 0; | |
1475 while (i < VEC_length (tree, labels)) | |
1476 { | |
1477 tree elt = VEC_index (tree, labels, i); | |
1478 tree low = CASE_LOW (elt); | |
1479 bool remove_element = FALSE; | |
1480 | |
1481 if (low) | |
1482 { | |
1483 /* Discard empty ranges. */ | |
1484 tree high = CASE_HIGH (elt); | |
1485 if (high && tree_int_cst_lt (high, low)) | |
1486 remove_element = TRUE; | |
1487 } | |
1488 else | |
1489 { | |
1490 /* The default case must be the last label in the list. */ | |
1491 gcc_assert (!default_case); | |
1492 default_case = elt; | |
1493 remove_element = TRUE; | |
1494 } | |
1495 | |
1496 if (remove_element) | |
1497 VEC_ordered_remove (tree, labels, i); | |
1498 else | |
1499 i++; | |
1500 } | |
1501 len = i; | |
1502 | |
1503 if (!VEC_empty (tree, labels)) | |
1504 sort_case_labels (labels); | |
1505 | |
1506 if (!default_case) | |
1507 { | |
1508 tree type = TREE_TYPE (switch_expr); | |
1509 | |
1510 /* If the switch has no default label, add one, so that we jump | |
1511 around the switch body. If the labels already cover the whole | |
1512 range of type, add the default label pointing to one of the | |
1513 existing labels. */ | |
1514 if (type == void_type_node) | |
1515 type = TREE_TYPE (SWITCH_COND (switch_expr)); | |
1516 if (len | |
1517 && INTEGRAL_TYPE_P (type) | |
1518 && TYPE_MIN_VALUE (type) | |
1519 && TYPE_MAX_VALUE (type) | |
1520 && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)), | |
1521 TYPE_MIN_VALUE (type))) | |
1522 { | |
1523 tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1)); | |
1524 if (!high) | |
1525 high = CASE_LOW (VEC_index (tree, labels, len - 1)); | |
1526 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (type))) | |
1527 { | |
1528 for (i = 1; i < len; i++) | |
1529 { | |
1530 high = CASE_LOW (VEC_index (tree, labels, i)); | |
1531 low = CASE_HIGH (VEC_index (tree, labels, i - 1)); | |
1532 if (!low) | |
1533 low = CASE_LOW (VEC_index (tree, labels, i - 1)); | |
1534 if ((TREE_INT_CST_LOW (low) + 1 | |
1535 != TREE_INT_CST_LOW (high)) | |
1536 || (TREE_INT_CST_HIGH (low) | |
1537 + (TREE_INT_CST_LOW (high) == 0) | |
1538 != TREE_INT_CST_HIGH (high))) | |
1539 break; | |
1540 } | |
1541 if (i == len) | |
1542 default_case = build3 (CASE_LABEL_EXPR, void_type_node, | |
1543 NULL_TREE, NULL_TREE, | |
1544 CASE_LABEL (VEC_index (tree, | |
1545 labels, 0))); | |
1546 } | |
1547 } | |
1548 | |
1549 if (!default_case) | |
1550 { | |
1551 gimple new_default; | |
1552 | |
1553 default_case | |
1554 = build3 (CASE_LABEL_EXPR, void_type_node, | |
1555 NULL_TREE, NULL_TREE, | |
1556 create_artificial_label (UNKNOWN_LOCATION)); | |
1557 new_default = gimple_build_label (CASE_LABEL (default_case)); | |
1558 gimplify_seq_add_stmt (&switch_body_seq, new_default); | |
1559 } | |
1560 } | |
1561 | |
1562 gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr), | |
1563 default_case, labels); | |
1564 gimplify_seq_add_stmt (pre_p, gimple_switch); | |
1565 gimplify_seq_add_seq (pre_p, switch_body_seq); | |
1566 VEC_free(tree, heap, labels); | |
1567 } | |
1568 else | |
1569 gcc_assert (SWITCH_LABELS (switch_expr)); | |
1570 | |
1571 return GS_ALL_DONE; | |
1572 } | |
1573 | |
1574 | |
1575 static enum gimplify_status | |
1576 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p) | |
1577 { | |
1578 struct gimplify_ctx *ctxp; | |
1579 gimple gimple_label; | |
1580 | |
1581 /* Invalid OpenMP programs can play Duff's Device type games with | |
1582 #pragma omp parallel. At least in the C front end, we don't | |
1583 detect such invalid branches until after gimplification. */ | |
1584 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context) | |
1585 if (ctxp->case_labels) | |
1586 break; | |
1587 | |
1588 gimple_label = gimple_build_label (CASE_LABEL (*expr_p)); | |
1589 VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p); | |
1590 gimplify_seq_add_stmt (pre_p, gimple_label); | |
1591 | |
1592 return GS_ALL_DONE; | |
1593 } | |
1594 | |
1595 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first | |
1596 if necessary. */ | |
1597 | |
1598 tree | |
1599 build_and_jump (tree *label_p) | |
1600 { | |
1601 if (label_p == NULL) | |
1602 /* If there's nowhere to jump, just fall through. */ | |
1603 return NULL_TREE; | |
1604 | |
1605 if (*label_p == NULL_TREE) | |
1606 { | |
1607 tree label = create_artificial_label (UNKNOWN_LOCATION); | |
1608 *label_p = label; | |
1609 } | |
1610 | |
1611 return build1 (GOTO_EXPR, void_type_node, *label_p); | |
1612 } | |
1613 | |
1614 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR. | |
1615 This also involves building a label to jump to and communicating it to | |
1616 gimplify_loop_expr through gimplify_ctxp->exit_label. */ | |
1617 | |
1618 static enum gimplify_status | |
1619 gimplify_exit_expr (tree *expr_p) | |
1620 { | |
1621 tree cond = TREE_OPERAND (*expr_p, 0); | |
1622 tree expr; | |
1623 | |
1624 expr = build_and_jump (&gimplify_ctxp->exit_label); | |
1625 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE); | |
1626 *expr_p = expr; | |
1627 | |
1628 return GS_OK; | |
1629 } | |
1630 | |
1631 /* A helper function to be called via walk_tree. Mark all labels under *TP | |
1632 as being forced. To be called for DECL_INITIAL of static variables. */ | |
1633 | |
1634 tree | |
1635 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) | |
1636 { | |
1637 if (TYPE_P (*tp)) | |
1638 *walk_subtrees = 0; | |
1639 if (TREE_CODE (*tp) == LABEL_DECL) | |
1640 FORCED_LABEL (*tp) = 1; | |
1641 | |
1642 return NULL_TREE; | |
1643 } | |
1644 | |
1645 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is | |
1646 different from its canonical type, wrap the whole thing inside a | |
1647 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical | |
1648 type. | |
1649 | |
1650 The canonical type of a COMPONENT_REF is the type of the field being | |
1651 referenced--unless the field is a bit-field which can be read directly | |
1652 in a smaller mode, in which case the canonical type is the | |
1653 sign-appropriate type corresponding to that mode. */ | |
1654 | |
1655 static void | |
1656 canonicalize_component_ref (tree *expr_p) | |
1657 { | |
1658 tree expr = *expr_p; | |
1659 tree type; | |
1660 | |
1661 gcc_assert (TREE_CODE (expr) == COMPONENT_REF); | |
1662 | |
1663 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))) | |
1664 type = TREE_TYPE (get_unwidened (expr, NULL_TREE)); | |
1665 else | |
1666 type = TREE_TYPE (TREE_OPERAND (expr, 1)); | |
1667 | |
1668 /* One could argue that all the stuff below is not necessary for | |
1669 the non-bitfield case and declare it a FE error if type | |
1670 adjustment would be needed. */ | |
1671 if (TREE_TYPE (expr) != type) | |
1672 { | |
1673 #ifdef ENABLE_TYPES_CHECKING | |
1674 tree old_type = TREE_TYPE (expr); | |
1675 #endif | |
1676 int type_quals; | |
1677 | |
1678 /* We need to preserve qualifiers and propagate them from | |
1679 operand 0. */ | |
1680 type_quals = TYPE_QUALS (type) | |
1681 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0))); | |
1682 if (TYPE_QUALS (type) != type_quals) | |
1683 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals); | |
1684 | |
1685 /* Set the type of the COMPONENT_REF to the underlying type. */ | |
1686 TREE_TYPE (expr) = type; | |
1687 | |
1688 #ifdef ENABLE_TYPES_CHECKING | |
1689 /* It is now a FE error, if the conversion from the canonical | |
1690 type to the original expression type is not useless. */ | |
1691 gcc_assert (useless_type_conversion_p (old_type, type)); | |
1692 #endif | |
1693 } | |
1694 } | |
1695 | |
1696 /* If a NOP conversion is changing a pointer to array of foo to a pointer | |
1697 to foo, embed that change in the ADDR_EXPR by converting | |
1698 T array[U]; | |
1699 (T *)&array | |
1700 ==> | |
1701 &array[L] | |
1702 where L is the lower bound. For simplicity, only do this for constant | |
1703 lower bound. | |
1704 The constraint is that the type of &array[L] is trivially convertible | |
1705 to T *. */ | |
1706 | |
1707 static void | |
1708 canonicalize_addr_expr (tree *expr_p) | |
1709 { | |
1710 tree expr = *expr_p; | |
1711 tree addr_expr = TREE_OPERAND (expr, 0); | |
1712 tree datype, ddatype, pddatype; | |
1713 | |
1714 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */ | |
1715 if (!POINTER_TYPE_P (TREE_TYPE (expr)) | |
1716 || TREE_CODE (addr_expr) != ADDR_EXPR) | |
1717 return; | |
1718 | |
1719 /* The addr_expr type should be a pointer to an array. */ | |
1720 datype = TREE_TYPE (TREE_TYPE (addr_expr)); | |
1721 if (TREE_CODE (datype) != ARRAY_TYPE) | |
1722 return; | |
1723 | |
1724 /* The pointer to element type shall be trivially convertible to | |
1725 the expression pointer type. */ | |
1726 ddatype = TREE_TYPE (datype); | |
1727 pddatype = build_pointer_type (ddatype); | |
1728 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)), | |
1729 pddatype)) | |
1730 return; | |
1731 | |
1732 /* The lower bound and element sizes must be constant. */ | |
1733 if (!TYPE_SIZE_UNIT (ddatype) | |
1734 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST | |
1735 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype)) | |
1736 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST) | |
1737 return; | |
1738 | |
1739 /* All checks succeeded. Build a new node to merge the cast. */ | |
1740 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0), | |
1741 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)), | |
1742 NULL_TREE, NULL_TREE); | |
1743 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p); | |
1744 | |
1745 /* We can have stripped a required restrict qualifier above. */ | |
1746 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p))) | |
1747 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p); | |
1748 } | |
1749 | |
1750 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions | |
1751 underneath as appropriate. */ | |
1752 | |
1753 static enum gimplify_status | |
1754 gimplify_conversion (tree *expr_p) | |
1755 { | |
1756 tree tem; | |
1757 location_t loc = EXPR_LOCATION (*expr_p); | |
1758 gcc_assert (CONVERT_EXPR_P (*expr_p)); | |
1759 | |
1760 /* Then strip away all but the outermost conversion. */ | |
1761 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0)); | |
1762 | |
1763 /* And remove the outermost conversion if it's useless. */ | |
1764 if (tree_ssa_useless_type_conversion (*expr_p)) | |
1765 *expr_p = TREE_OPERAND (*expr_p, 0); | |
1766 | |
1767 /* Attempt to avoid NOP_EXPR by producing reference to a subtype. | |
1768 For example this fold (subclass *)&A into &A->subclass avoiding | |
1769 a need for statement. */ | |
1770 if (CONVERT_EXPR_P (*expr_p) | |
1771 && POINTER_TYPE_P (TREE_TYPE (*expr_p)) | |
1772 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0))) | |
1773 && (tem = maybe_fold_offset_to_address | |
1774 (EXPR_LOCATION (*expr_p), TREE_OPERAND (*expr_p, 0), | |
1775 integer_zero_node, TREE_TYPE (*expr_p))) != NULL_TREE) | |
1776 *expr_p = tem; | |
1777 | |
1778 /* If we still have a conversion at the toplevel, | |
1779 then canonicalize some constructs. */ | |
1780 if (CONVERT_EXPR_P (*expr_p)) | |
1781 { | |
1782 tree sub = TREE_OPERAND (*expr_p, 0); | |
1783 | |
1784 /* If a NOP conversion is changing the type of a COMPONENT_REF | |
1785 expression, then canonicalize its type now in order to expose more | |
1786 redundant conversions. */ | |
1787 if (TREE_CODE (sub) == COMPONENT_REF) | |
1788 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0)); | |
1789 | |
1790 /* If a NOP conversion is changing a pointer to array of foo | |
1791 to a pointer to foo, embed that change in the ADDR_EXPR. */ | |
1792 else if (TREE_CODE (sub) == ADDR_EXPR) | |
1793 canonicalize_addr_expr (expr_p); | |
1794 } | |
1795 | |
1796 /* If we have a conversion to a non-register type force the | |
1797 use of a VIEW_CONVERT_EXPR instead. */ | |
1798 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p))) | |
1799 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p), | |
1800 TREE_OPERAND (*expr_p, 0)); | |
1801 | |
1802 return GS_OK; | |
1803 } | |
1804 | |
1805 /* Nonlocal VLAs seen in the current function. */ | |
1806 static struct pointer_set_t *nonlocal_vlas; | |
1807 | |
1808 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a | |
1809 DECL_VALUE_EXPR, and it's worth re-examining things. */ | |
1810 | |
1811 static enum gimplify_status | |
1812 gimplify_var_or_parm_decl (tree *expr_p) | |
1813 { | |
1814 tree decl = *expr_p; | |
1815 | |
1816 /* ??? If this is a local variable, and it has not been seen in any | |
1817 outer BIND_EXPR, then it's probably the result of a duplicate | |
1818 declaration, for which we've already issued an error. It would | |
1819 be really nice if the front end wouldn't leak these at all. | |
1820 Currently the only known culprit is C++ destructors, as seen | |
1821 in g++.old-deja/g++.jason/binding.C. */ | |
1822 if (TREE_CODE (decl) == VAR_DECL | |
1823 && !DECL_SEEN_IN_BIND_EXPR_P (decl) | |
1824 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl) | |
1825 && decl_function_context (decl) == current_function_decl) | |
1826 { | |
1827 gcc_assert (errorcount || sorrycount); | |
1828 return GS_ERROR; | |
1829 } | |
1830 | |
1831 /* When within an OpenMP context, notice uses of variables. */ | |
1832 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true)) | |
1833 return GS_ALL_DONE; | |
1834 | |
1835 /* If the decl is an alias for another expression, substitute it now. */ | |
1836 if (DECL_HAS_VALUE_EXPR_P (decl)) | |
1837 { | |
1838 tree value_expr = DECL_VALUE_EXPR (decl); | |
1839 | |
1840 /* For referenced nonlocal VLAs add a decl for debugging purposes | |
1841 to the current function. */ | |
1842 if (TREE_CODE (decl) == VAR_DECL | |
1843 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST | |
1844 && nonlocal_vlas != NULL | |
1845 && TREE_CODE (value_expr) == INDIRECT_REF | |
1846 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL | |
1847 && decl_function_context (decl) != current_function_decl) | |
1848 { | |
1849 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; | |
1850 while (ctx && ctx->region_type == ORT_WORKSHARE) | |
1851 ctx = ctx->outer_context; | |
1852 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl)) | |
1853 { | |
1854 tree copy = copy_node (decl), block; | |
1855 | |
1856 lang_hooks.dup_lang_specific_decl (copy); | |
1857 SET_DECL_RTL (copy, NULL_RTX); | |
1858 TREE_USED (copy) = 1; | |
1859 block = DECL_INITIAL (current_function_decl); | |
1860 TREE_CHAIN (copy) = BLOCK_VARS (block); | |
1861 BLOCK_VARS (block) = copy; | |
1862 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr)); | |
1863 DECL_HAS_VALUE_EXPR_P (copy) = 1; | |
1864 } | |
1865 } | |
1866 | |
1867 *expr_p = unshare_expr (value_expr); | |
1868 return GS_OK; | |
1869 } | |
1870 | |
1871 return GS_ALL_DONE; | |
1872 } | |
1873 | |
1874 | |
1875 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR | |
1876 node *EXPR_P. | |
1877 | |
1878 compound_lval | |
1879 : min_lval '[' val ']' | |
1880 | min_lval '.' ID | |
1881 | compound_lval '[' val ']' | |
1882 | compound_lval '.' ID | |
1883 | |
1884 This is not part of the original SIMPLE definition, which separates | |
1885 array and member references, but it seems reasonable to handle them | |
1886 together. Also, this way we don't run into problems with union | |
1887 aliasing; gcc requires that for accesses through a union to alias, the | |
1888 union reference must be explicit, which was not always the case when we | |
1889 were splitting up array and member refs. | |
1890 | |
1891 PRE_P points to the sequence where side effects that must happen before | |
1892 *EXPR_P should be stored. | |
1893 | |
1894 POST_P points to the sequence where side effects that must happen after | |
1895 *EXPR_P should be stored. */ | |
1896 | |
1897 static enum gimplify_status | |
1898 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, | |
1899 fallback_t fallback) | |
1900 { | |
1901 tree *p; | |
1902 VEC(tree,heap) *stack; | |
1903 enum gimplify_status ret = GS_OK, tret; | |
1904 int i; | |
1905 location_t loc = EXPR_LOCATION (*expr_p); | |
1906 | |
1907 /* Create a stack of the subexpressions so later we can walk them in | |
1908 order from inner to outer. */ | |
1909 stack = VEC_alloc (tree, heap, 10); | |
1910 | |
1911 /* We can handle anything that get_inner_reference can deal with. */ | |
1912 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0)) | |
1913 { | |
1914 restart: | |
1915 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */ | |
1916 if (TREE_CODE (*p) == INDIRECT_REF) | |
1917 *p = fold_indirect_ref_loc (loc, *p); | |
1918 | |
1919 if (handled_component_p (*p)) | |
1920 ; | |
1921 /* Expand DECL_VALUE_EXPR now. In some cases that may expose | |
1922 additional COMPONENT_REFs. */ | |
1923 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL) | |
1924 && gimplify_var_or_parm_decl (p) == GS_OK) | |
1925 goto restart; | |
1926 else | |
1927 break; | |
1928 | |
1929 VEC_safe_push (tree, heap, stack, *p); | |
1930 } | |
1931 | |
1932 gcc_assert (VEC_length (tree, stack)); | |
1933 | |
1934 /* Now STACK is a stack of pointers to all the refs we've walked through | |
1935 and P points to the innermost expression. | |
1936 | |
1937 Java requires that we elaborated nodes in source order. That | |
1938 means we must gimplify the inner expression followed by each of | |
1939 the indices, in order. But we can't gimplify the inner | |
1940 expression until we deal with any variable bounds, sizes, or | |
1941 positions in order to deal with PLACEHOLDER_EXPRs. | |
1942 | |
1943 So we do this in three steps. First we deal with the annotations | |
1944 for any variables in the components, then we gimplify the base, | |
1945 then we gimplify any indices, from left to right. */ | |
1946 for (i = VEC_length (tree, stack) - 1; i >= 0; i--) | |
1947 { | |
1948 tree t = VEC_index (tree, stack, i); | |
1949 | |
1950 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | |
1951 { | |
1952 /* Gimplify the low bound and element type size and put them into | |
1953 the ARRAY_REF. If these values are set, they have already been | |
1954 gimplified. */ | |
1955 if (TREE_OPERAND (t, 2) == NULL_TREE) | |
1956 { | |
1957 tree low = unshare_expr (array_ref_low_bound (t)); | |
1958 if (!is_gimple_min_invariant (low)) | |
1959 { | |
1960 TREE_OPERAND (t, 2) = low; | |
1961 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, | |
1962 post_p, is_gimple_reg, | |
1963 fb_rvalue); | |
1964 ret = MIN (ret, tret); | |
1965 } | |
1966 } | |
1967 | |
1968 if (!TREE_OPERAND (t, 3)) | |
1969 { | |
1970 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0))); | |
1971 tree elmt_size = unshare_expr (array_ref_element_size (t)); | |
1972 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type)); | |
1973 | |
1974 /* Divide the element size by the alignment of the element | |
1975 type (above). */ | |
1976 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor); | |
1977 | |
1978 if (!is_gimple_min_invariant (elmt_size)) | |
1979 { | |
1980 TREE_OPERAND (t, 3) = elmt_size; | |
1981 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, | |
1982 post_p, is_gimple_reg, | |
1983 fb_rvalue); | |
1984 ret = MIN (ret, tret); | |
1985 } | |
1986 } | |
1987 } | |
1988 else if (TREE_CODE (t) == COMPONENT_REF) | |
1989 { | |
1990 /* Set the field offset into T and gimplify it. */ | |
1991 if (!TREE_OPERAND (t, 2)) | |
1992 { | |
1993 tree offset = unshare_expr (component_ref_field_offset (t)); | |
1994 tree field = TREE_OPERAND (t, 1); | |
1995 tree factor | |
1996 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT); | |
1997 | |
1998 /* Divide the offset by its alignment. */ | |
1999 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor); | |
2000 | |
2001 if (!is_gimple_min_invariant (offset)) | |
2002 { | |
2003 TREE_OPERAND (t, 2) = offset; | |
2004 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, | |
2005 post_p, is_gimple_reg, | |
2006 fb_rvalue); | |
2007 ret = MIN (ret, tret); | |
2008 } | |
2009 } | |
2010 } | |
2011 } | |
2012 | |
2013 /* Step 2 is to gimplify the base expression. Make sure lvalue is set | |
2014 so as to match the min_lval predicate. Failure to do so may result | |
2015 in the creation of large aggregate temporaries. */ | |
2016 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, | |
2017 fallback | fb_lvalue); | |
2018 ret = MIN (ret, tret); | |
2019 | |
2020 /* And finally, the indices and operands to BIT_FIELD_REF. During this | |
2021 loop we also remove any useless conversions. */ | |
2022 for (; VEC_length (tree, stack) > 0; ) | |
2023 { | |
2024 tree t = VEC_pop (tree, stack); | |
2025 | |
2026 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | |
2027 { | |
2028 /* Gimplify the dimension. */ | |
2029 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))) | |
2030 { | |
2031 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p, | |
2032 is_gimple_val, fb_rvalue); | |
2033 ret = MIN (ret, tret); | |
2034 } | |
2035 } | |
2036 else if (TREE_CODE (t) == BIT_FIELD_REF) | |
2037 { | |
2038 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p, | |
2039 is_gimple_val, fb_rvalue); | |
2040 ret = MIN (ret, tret); | |
2041 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, | |
2042 is_gimple_val, fb_rvalue); | |
2043 ret = MIN (ret, tret); | |
2044 } | |
2045 | |
2046 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0)); | |
2047 | |
2048 /* The innermost expression P may have originally had | |
2049 TREE_SIDE_EFFECTS set which would have caused all the outer | |
2050 expressions in *EXPR_P leading to P to also have had | |
2051 TREE_SIDE_EFFECTS set. */ | |
2052 recalculate_side_effects (t); | |
2053 } | |
2054 | |
2055 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */ | |
2056 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF) | |
2057 { | |
2058 canonicalize_component_ref (expr_p); | |
2059 ret = MIN (ret, GS_OK); | |
2060 } | |
2061 | |
2062 VEC_free (tree, heap, stack); | |
2063 | |
2064 return ret; | |
2065 } | |
2066 | |
2067 /* Gimplify the self modifying expression pointed to by EXPR_P | |
2068 (++, --, +=, -=). | |
2069 | |
2070 PRE_P points to the list where side effects that must happen before | |
2071 *EXPR_P should be stored. | |
2072 | |
2073 POST_P points to the list where side effects that must happen after | |
2074 *EXPR_P should be stored. | |
2075 | |
2076 WANT_VALUE is nonzero iff we want to use the value of this expression | |
2077 in another expression. */ | |
2078 | |
2079 static enum gimplify_status | |
2080 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, | |
2081 bool want_value) | |
2082 { | |
2083 enum tree_code code; | |
2084 tree lhs, lvalue, rhs, t1; | |
2085 gimple_seq post = NULL, *orig_post_p = post_p; | |
2086 bool postfix; | |
2087 enum tree_code arith_code; | |
2088 enum gimplify_status ret; | |
2089 location_t loc = EXPR_LOCATION (*expr_p); | |
2090 | |
2091 code = TREE_CODE (*expr_p); | |
2092 | |
2093 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR | |
2094 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR); | |
2095 | |
2096 /* Prefix or postfix? */ | |
2097 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR) | |
2098 /* Faster to treat as prefix if result is not used. */ | |
2099 postfix = want_value; | |
2100 else | |
2101 postfix = false; | |
2102 | |
2103 /* For postfix, make sure the inner expression's post side effects | |
2104 are executed after side effects from this expression. */ | |
2105 if (postfix) | |
2106 post_p = &post; | |
2107 | |
2108 /* Add or subtract? */ | |
2109 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) | |
2110 arith_code = PLUS_EXPR; | |
2111 else | |
2112 arith_code = MINUS_EXPR; | |
2113 | |
2114 /* Gimplify the LHS into a GIMPLE lvalue. */ | |
2115 lvalue = TREE_OPERAND (*expr_p, 0); | |
2116 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue); | |
2117 if (ret == GS_ERROR) | |
2118 return ret; | |
2119 | |
2120 /* Extract the operands to the arithmetic operation. */ | |
2121 lhs = lvalue; | |
2122 rhs = TREE_OPERAND (*expr_p, 1); | |
2123 | |
2124 /* For postfix operator, we evaluate the LHS to an rvalue and then use | |
2125 that as the result value and in the postqueue operation. We also | |
2126 make sure to make lvalue a minimal lval, see | |
2127 gcc.c-torture/execute/20040313-1.c for an example where this matters. */ | |
2128 if (postfix) | |
2129 { | |
2130 if (!is_gimple_min_lval (lvalue)) | |
2131 { | |
2132 mark_addressable (lvalue); | |
2133 lvalue = build_fold_addr_expr_loc (input_location, lvalue); | |
2134 gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue); | |
2135 lvalue = build_fold_indirect_ref_loc (input_location, lvalue); | |
2136 } | |
2137 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue); | |
2138 if (ret == GS_ERROR) | |
2139 return ret; | |
2140 } | |
2141 | |
2142 /* For POINTERs increment, use POINTER_PLUS_EXPR. */ | |
2143 if (POINTER_TYPE_P (TREE_TYPE (lhs))) | |
2144 { | |
2145 rhs = fold_convert_loc (loc, sizetype, rhs); | |
2146 if (arith_code == MINUS_EXPR) | |
2147 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs); | |
2148 arith_code = POINTER_PLUS_EXPR; | |
2149 } | |
2150 | |
2151 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs); | |
2152 | |
2153 if (postfix) | |
2154 { | |
2155 gimplify_assign (lvalue, t1, orig_post_p); | |
2156 gimplify_seq_add_seq (orig_post_p, post); | |
2157 *expr_p = lhs; | |
2158 return GS_ALL_DONE; | |
2159 } | |
2160 else | |
2161 { | |
2162 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1); | |
2163 return GS_OK; | |
2164 } | |
2165 } | |
2166 | |
2167 | |
2168 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */ | |
2169 | |
2170 static void | |
2171 maybe_with_size_expr (tree *expr_p) | |
2172 { | |
2173 tree expr = *expr_p; | |
2174 tree type = TREE_TYPE (expr); | |
2175 tree size; | |
2176 | |
2177 /* If we've already wrapped this or the type is error_mark_node, we can't do | |
2178 anything. */ | |
2179 if (TREE_CODE (expr) == WITH_SIZE_EXPR | |
2180 || type == error_mark_node) | |
2181 return; | |
2182 | |
2183 /* If the size isn't known or is a constant, we have nothing to do. */ | |
2184 size = TYPE_SIZE_UNIT (type); | |
2185 if (!size || TREE_CODE (size) == INTEGER_CST) | |
2186 return; | |
2187 | |
2188 /* Otherwise, make a WITH_SIZE_EXPR. */ | |
2189 size = unshare_expr (size); | |
2190 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr); | |
2191 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size); | |
2192 } | |
2193 | |
2194 | |
2195 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P | |
2196 Store any side-effects in PRE_P. CALL_LOCATION is the location of | |
2197 the CALL_EXPR. */ | |
2198 | |
2199 static enum gimplify_status | |
2200 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location) | |
2201 { | |
2202 bool (*test) (tree); | |
2203 fallback_t fb; | |
2204 | |
2205 /* In general, we allow lvalues for function arguments to avoid | |
2206 extra overhead of copying large aggregates out of even larger | |
2207 aggregates into temporaries only to copy the temporaries to | |
2208 the argument list. Make optimizers happy by pulling out to | |
2209 temporaries those types that fit in registers. */ | |
2210 if (is_gimple_reg_type (TREE_TYPE (*arg_p))) | |
2211 test = is_gimple_val, fb = fb_rvalue; | |
2212 else | |
2213 test = is_gimple_lvalue, fb = fb_either; | |
2214 | |
2215 /* If this is a variable sized type, we must remember the size. */ | |
2216 maybe_with_size_expr (arg_p); | |
2217 | |
2218 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */ | |
2219 /* Make sure arguments have the same location as the function call | |
2220 itself. */ | |
2221 protected_set_expr_location (*arg_p, call_location); | |
2222 | |
2223 /* There is a sequence point before a function call. Side effects in | |
2224 the argument list must occur before the actual call. So, when | |
2225 gimplifying arguments, force gimplify_expr to use an internal | |
2226 post queue which is then appended to the end of PRE_P. */ | |
2227 return gimplify_expr (arg_p, pre_p, NULL, test, fb); | |
2228 } | |
2229 | |
2230 | |
2231 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P. | |
2232 WANT_VALUE is true if the result of the call is desired. */ | |
2233 | |
2234 static enum gimplify_status | |
2235 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) | |
2236 { | |
2237 tree fndecl, parms, p; | |
2238 enum gimplify_status ret; | |
2239 int i, nargs; | |
2240 gimple call; | |
2241 bool builtin_va_start_p = FALSE; | |
2242 location_t loc = EXPR_LOCATION (*expr_p); | |
2243 | |
2244 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR); | |
2245 | |
2246 /* For reliable diagnostics during inlining, it is necessary that | |
2247 every call_expr be annotated with file and line. */ | |
2248 if (! EXPR_HAS_LOCATION (*expr_p)) | |
2249 SET_EXPR_LOCATION (*expr_p, input_location); | |
2250 | |
2251 /* This may be a call to a builtin function. | |
2252 | |
2253 Builtin function calls may be transformed into different | |
2254 (and more efficient) builtin function calls under certain | |
2255 circumstances. Unfortunately, gimplification can muck things | |
2256 up enough that the builtin expanders are not aware that certain | |
2257 transformations are still valid. | |
2258 | |
2259 So we attempt transformation/gimplification of the call before | |
2260 we gimplify the CALL_EXPR. At this time we do not manage to | |
2261 transform all calls in the same manner as the expanders do, but | |
2262 we do transform most of them. */ | |
2263 fndecl = get_callee_fndecl (*expr_p); | |
2264 if (fndecl && DECL_BUILT_IN (fndecl)) | |
2265 { | |
2266 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value); | |
2267 | |
2268 if (new_tree && new_tree != *expr_p) | |
2269 { | |
2270 /* There was a transformation of this call which computes the | |
2271 same value, but in a more efficient way. Return and try | |
2272 again. */ | |
2273 *expr_p = new_tree; | |
2274 return GS_OK; | |
2275 } | |
2276 | |
2277 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2278 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START) | |
2279 { | |
2280 builtin_va_start_p = TRUE; | |
2281 if (call_expr_nargs (*expr_p) < 2) | |
2282 { | |
2283 error ("too few arguments to function %<va_start%>"); | |
2284 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p)); | |
2285 return GS_OK; | |
2286 } | |
2287 | |
2288 if (fold_builtin_next_arg (*expr_p, true)) | |
2289 { | |
2290 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p)); | |
2291 return GS_OK; | |
2292 } | |
2293 } | |
2294 } | |
2295 | |
2296 /* There is a sequence point before the call, so any side effects in | |
2297 the calling expression must occur before the actual call. Force | |
2298 gimplify_expr to use an internal post queue. */ | |
2299 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL, | |
2300 is_gimple_call_addr, fb_rvalue); | |
2301 | |
2302 nargs = call_expr_nargs (*expr_p); | |
2303 | |
2304 /* Get argument types for verification. */ | |
2305 fndecl = get_callee_fndecl (*expr_p); | |
2306 parms = NULL_TREE; | |
2307 if (fndecl) | |
2308 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); | |
2309 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p)))) | |
2310 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p)))); | |
2311 | |
2312 if (fndecl && DECL_ARGUMENTS (fndecl)) | |
2313 p = DECL_ARGUMENTS (fndecl); | |
2314 else if (parms) | |
2315 p = parms; | |
2316 else | |
2317 p = NULL_TREE; | |
2318 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p)) | |
2319 ; | |
2320 | |
2321 /* If the last argument is __builtin_va_arg_pack () and it is not | |
2322 passed as a named argument, decrease the number of CALL_EXPR | |
2323 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */ | |
2324 if (!p | |
2325 && i < nargs | |
2326 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR) | |
2327 { | |
2328 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1); | |
2329 tree last_arg_fndecl = get_callee_fndecl (last_arg); | |
2330 | |
2331 if (last_arg_fndecl | |
2332 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL | |
2333 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL | |
2334 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK) | |
2335 { | |
2336 tree call = *expr_p; | |
2337 | |
2338 --nargs; | |
2339 *expr_p = build_call_array_loc (loc, TREE_TYPE (call), | |
2340 CALL_EXPR_FN (call), | |
2341 nargs, CALL_EXPR_ARGP (call)); | |
2342 | |
2343 /* Copy all CALL_EXPR flags, location and block, except | |
2344 CALL_EXPR_VA_ARG_PACK flag. */ | |
2345 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call); | |
2346 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call); | |
2347 CALL_EXPR_RETURN_SLOT_OPT (*expr_p) | |
2348 = CALL_EXPR_RETURN_SLOT_OPT (call); | |
2349 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call); | |
2350 CALL_CANNOT_INLINE_P (*expr_p) = CALL_CANNOT_INLINE_P (call); | |
2351 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call)); | |
2352 TREE_BLOCK (*expr_p) = TREE_BLOCK (call); | |
2353 | |
2354 /* Set CALL_EXPR_VA_ARG_PACK. */ | |
2355 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1; | |
2356 } | |
2357 } | |
2358 | |
2359 /* Finally, gimplify the function arguments. */ | |
2360 if (nargs > 0) | |
2361 { | |
2362 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0); | |
2363 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs; | |
2364 PUSH_ARGS_REVERSED ? i-- : i++) | |
2365 { | |
2366 enum gimplify_status t; | |
2367 | |
2368 /* Avoid gimplifying the second argument to va_start, which needs to | |
2369 be the plain PARM_DECL. */ | |
2370 if ((i != 1) || !builtin_va_start_p) | |
2371 { | |
2372 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, | |
2373 EXPR_LOCATION (*expr_p)); | |
2374 | |
2375 if (t == GS_ERROR) | |
2376 ret = GS_ERROR; | |
2377 } | |
2378 } | |
2379 } | |
2380 | |
2381 /* Verify the function result. */ | |
2382 if (want_value && fndecl | |
2383 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))) | |
2384 { | |
2385 error_at (loc, "using result of function returning %<void%>"); | |
2386 ret = GS_ERROR; | |
2387 } | |
2388 | |
2389 /* Try this again in case gimplification exposed something. */ | |
2390 if (ret != GS_ERROR) | |
2391 { | |
2392 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value); | |
2393 | |
2394 if (new_tree && new_tree != *expr_p) | |
2395 { | |
2396 /* There was a transformation of this call which computes the | |
2397 same value, but in a more efficient way. Return and try | |
2398 again. */ | |
2399 *expr_p = new_tree; | |
2400 return GS_OK; | |
2401 } | |
2402 } | |
2403 else | |
2404 { | |
2405 *expr_p = error_mark_node; | |
2406 return GS_ERROR; | |
2407 } | |
2408 | |
2409 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its | |
2410 decl. This allows us to eliminate redundant or useless | |
2411 calls to "const" functions. */ | |
2412 if (TREE_CODE (*expr_p) == CALL_EXPR) | |
2413 { | |
2414 int flags = call_expr_flags (*expr_p); | |
2415 if (flags & (ECF_CONST | ECF_PURE) | |
2416 /* An infinite loop is considered a side effect. */ | |
2417 && !(flags & (ECF_LOOPING_CONST_OR_PURE))) | |
2418 TREE_SIDE_EFFECTS (*expr_p) = 0; | |
2419 } | |
2420 | |
2421 /* If the value is not needed by the caller, emit a new GIMPLE_CALL | |
2422 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified | |
2423 form and delegate the creation of a GIMPLE_CALL to | |
2424 gimplify_modify_expr. This is always possible because when | |
2425 WANT_VALUE is true, the caller wants the result of this call into | |
2426 a temporary, which means that we will emit an INIT_EXPR in | |
2427 internal_get_tmp_var which will then be handled by | |
2428 gimplify_modify_expr. */ | |
2429 if (!want_value) | |
2430 { | |
2431 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we | |
2432 have to do is replicate it as a GIMPLE_CALL tuple. */ | |
2433 call = gimple_build_call_from_tree (*expr_p); | |
2434 gimplify_seq_add_stmt (pre_p, call); | |
2435 *expr_p = NULL_TREE; | |
2436 } | |
2437 | |
2438 return ret; | |
2439 } | |
2440 | |
2441 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by | |
2442 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs. | |
2443 | |
2444 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the | |
2445 condition is true or false, respectively. If null, we should generate | |
2446 our own to skip over the evaluation of this specific expression. | |
2447 | |
2448 LOCUS is the source location of the COND_EXPR. | |
2449 | |
2450 This function is the tree equivalent of do_jump. | |
2451 | |
2452 shortcut_cond_r should only be called by shortcut_cond_expr. */ | |
2453 | |
2454 static tree | |
2455 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p, | |
2456 location_t locus) | |
2457 { | |
2458 tree local_label = NULL_TREE; | |
2459 tree t, expr = NULL; | |
2460 | |
2461 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to | |
2462 retain the shortcut semantics. Just insert the gotos here; | |
2463 shortcut_cond_expr will append the real blocks later. */ | |
2464 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) | |
2465 { | |
2466 location_t new_locus; | |
2467 | |
2468 /* Turn if (a && b) into | |
2469 | |
2470 if (a); else goto no; | |
2471 if (b) goto yes; else goto no; | |
2472 (no:) */ | |
2473 | |
2474 if (false_label_p == NULL) | |
2475 false_label_p = &local_label; | |
2476 | |
2477 /* Keep the original source location on the first 'if'. */ | |
2478 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus); | |
2479 append_to_statement_list (t, &expr); | |
2480 | |
2481 /* Set the source location of the && on the second 'if'. */ | |
2482 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus; | |
2483 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p, | |
2484 new_locus); | |
2485 append_to_statement_list (t, &expr); | |
2486 } | |
2487 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR) | |
2488 { | |
2489 location_t new_locus; | |
2490 | |
2491 /* Turn if (a || b) into | |
2492 | |
2493 if (a) goto yes; | |
2494 if (b) goto yes; else goto no; | |
2495 (yes:) */ | |
2496 | |
2497 if (true_label_p == NULL) | |
2498 true_label_p = &local_label; | |
2499 | |
2500 /* Keep the original source location on the first 'if'. */ | |
2501 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus); | |
2502 append_to_statement_list (t, &expr); | |
2503 | |
2504 /* Set the source location of the || on the second 'if'. */ | |
2505 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus; | |
2506 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p, | |
2507 new_locus); | |
2508 append_to_statement_list (t, &expr); | |
2509 } | |
2510 else if (TREE_CODE (pred) == COND_EXPR) | |
2511 { | |
2512 location_t new_locus; | |
2513 | |
2514 /* As long as we're messing with gotos, turn if (a ? b : c) into | |
2515 if (a) | |
2516 if (b) goto yes; else goto no; | |
2517 else | |
2518 if (c) goto yes; else goto no; */ | |
2519 | |
2520 /* Keep the original source location on the first 'if'. Set the source | |
2521 location of the ? on the second 'if'. */ | |
2522 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus; | |
2523 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0), | |
2524 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, | |
2525 false_label_p, locus), | |
2526 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p, | |
2527 false_label_p, new_locus)); | |
2528 } | |
2529 else | |
2530 { | |
2531 expr = build3 (COND_EXPR, void_type_node, pred, | |
2532 build_and_jump (true_label_p), | |
2533 build_and_jump (false_label_p)); | |
2534 SET_EXPR_LOCATION (expr, locus); | |
2535 } | |
2536 | |
2537 if (local_label) | |
2538 { | |
2539 t = build1 (LABEL_EXPR, void_type_node, local_label); | |
2540 append_to_statement_list (t, &expr); | |
2541 } | |
2542 | |
2543 return expr; | |
2544 } | |
2545 | |
2546 /* Given a conditional expression EXPR with short-circuit boolean | |
2547 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the | |
2548 predicate appart into the equivalent sequence of conditionals. */ | |
2549 | |
2550 static tree | |
2551 shortcut_cond_expr (tree expr) | |
2552 { | |
2553 tree pred = TREE_OPERAND (expr, 0); | |
2554 tree then_ = TREE_OPERAND (expr, 1); | |
2555 tree else_ = TREE_OPERAND (expr, 2); | |
2556 tree true_label, false_label, end_label, t; | |
2557 tree *true_label_p; | |
2558 tree *false_label_p; | |
2559 bool emit_end, emit_false, jump_over_else; | |
2560 bool then_se = then_ && TREE_SIDE_EFFECTS (then_); | |
2561 bool else_se = else_ && TREE_SIDE_EFFECTS (else_); | |
2562 | |
2563 /* First do simple transformations. */ | |
2564 if (!else_se) | |
2565 { | |
2566 /* If there is no 'else', turn | |
2567 if (a && b) then c | |
2568 into | |
2569 if (a) if (b) then c. */ | |
2570 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) | |
2571 { | |
2572 /* Keep the original source location on the first 'if'. */ | |
2573 location_t locus = EXPR_HAS_LOCATION (expr) | |
2574 ? EXPR_LOCATION (expr) : input_location; | |
2575 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); | |
2576 /* Set the source location of the && on the second 'if'. */ | |
2577 if (EXPR_HAS_LOCATION (pred)) | |
2578 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred)); | |
2579 then_ = shortcut_cond_expr (expr); | |
2580 then_se = then_ && TREE_SIDE_EFFECTS (then_); | |
2581 pred = TREE_OPERAND (pred, 0); | |
2582 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE); | |
2583 SET_EXPR_LOCATION (expr, locus); | |
2584 } | |
2585 } | |
2586 | |
2587 if (!then_se) | |
2588 { | |
2589 /* If there is no 'then', turn | |
2590 if (a || b); else d | |
2591 into | |
2592 if (a); else if (b); else d. */ | |
2593 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR) | |
2594 { | |
2595 /* Keep the original source location on the first 'if'. */ | |
2596 location_t locus = EXPR_HAS_LOCATION (expr) | |
2597 ? EXPR_LOCATION (expr) : input_location; | |
2598 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); | |
2599 /* Set the source location of the || on the second 'if'. */ | |
2600 if (EXPR_HAS_LOCATION (pred)) | |
2601 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred)); | |
2602 else_ = shortcut_cond_expr (expr); | |
2603 else_se = else_ && TREE_SIDE_EFFECTS (else_); | |
2604 pred = TREE_OPERAND (pred, 0); | |
2605 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_); | |
2606 SET_EXPR_LOCATION (expr, locus); | |
2607 } | |
2608 } | |
2609 | |
2610 /* If we're done, great. */ | |
2611 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR | |
2612 && TREE_CODE (pred) != TRUTH_ORIF_EXPR) | |
2613 return expr; | |
2614 | |
2615 /* Otherwise we need to mess with gotos. Change | |
2616 if (a) c; else d; | |
2617 to | |
2618 if (a); else goto no; | |
2619 c; goto end; | |
2620 no: d; end: | |
2621 and recursively gimplify the condition. */ | |
2622 | |
2623 true_label = false_label = end_label = NULL_TREE; | |
2624 | |
2625 /* If our arms just jump somewhere, hijack those labels so we don't | |
2626 generate jumps to jumps. */ | |
2627 | |
2628 if (then_ | |
2629 && TREE_CODE (then_) == GOTO_EXPR | |
2630 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL) | |
2631 { | |
2632 true_label = GOTO_DESTINATION (then_); | |
2633 then_ = NULL; | |
2634 then_se = false; | |
2635 } | |
2636 | |
2637 if (else_ | |
2638 && TREE_CODE (else_) == GOTO_EXPR | |
2639 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL) | |
2640 { | |
2641 false_label = GOTO_DESTINATION (else_); | |
2642 else_ = NULL; | |
2643 else_se = false; | |
2644 } | |
2645 | |
2646 /* If we aren't hijacking a label for the 'then' branch, it falls through. */ | |
2647 if (true_label) | |
2648 true_label_p = &true_label; | |
2649 else | |
2650 true_label_p = NULL; | |
2651 | |
2652 /* The 'else' branch also needs a label if it contains interesting code. */ | |
2653 if (false_label || else_se) | |
2654 false_label_p = &false_label; | |
2655 else | |
2656 false_label_p = NULL; | |
2657 | |
2658 /* If there was nothing else in our arms, just forward the label(s). */ | |
2659 if (!then_se && !else_se) | |
2660 return shortcut_cond_r (pred, true_label_p, false_label_p, | |
2661 EXPR_HAS_LOCATION (expr) | |
2662 ? EXPR_LOCATION (expr) : input_location); | |
2663 | |
2664 /* If our last subexpression already has a terminal label, reuse it. */ | |
2665 if (else_se) | |
2666 t = expr_last (else_); | |
2667 else if (then_se) | |
2668 t = expr_last (then_); | |
2669 else | |
2670 t = NULL; | |
2671 if (t && TREE_CODE (t) == LABEL_EXPR) | |
2672 end_label = LABEL_EXPR_LABEL (t); | |
2673 | |
2674 /* If we don't care about jumping to the 'else' branch, jump to the end | |
2675 if the condition is false. */ | |
2676 if (!false_label_p) | |
2677 false_label_p = &end_label; | |
2678 | |
2679 /* We only want to emit these labels if we aren't hijacking them. */ | |
2680 emit_end = (end_label == NULL_TREE); | |
2681 emit_false = (false_label == NULL_TREE); | |
2682 | |
2683 /* We only emit the jump over the else clause if we have to--if the | |
2684 then clause may fall through. Otherwise we can wind up with a | |
2685 useless jump and a useless label at the end of gimplified code, | |
2686 which will cause us to think that this conditional as a whole | |
2687 falls through even if it doesn't. If we then inline a function | |
2688 which ends with such a condition, that can cause us to issue an | |
2689 inappropriate warning about control reaching the end of a | |
2690 non-void function. */ | |
2691 jump_over_else = block_may_fallthru (then_); | |
2692 | |
2693 pred = shortcut_cond_r (pred, true_label_p, false_label_p, | |
2694 EXPR_HAS_LOCATION (expr) | |
2695 ? EXPR_LOCATION (expr) : input_location); | |
2696 | |
2697 expr = NULL; | |
2698 append_to_statement_list (pred, &expr); | |
2699 | |
2700 append_to_statement_list (then_, &expr); | |
2701 if (else_se) | |
2702 { | |
2703 if (jump_over_else) | |
2704 { | |
2705 tree last = expr_last (expr); | |
2706 t = build_and_jump (&end_label); | |
2707 if (EXPR_HAS_LOCATION (last)) | |
2708 SET_EXPR_LOCATION (t, EXPR_LOCATION (last)); | |
2709 append_to_statement_list (t, &expr); | |
2710 } | |
2711 if (emit_false) | |
2712 { | |
2713 t = build1 (LABEL_EXPR, void_type_node, false_label); | |
2714 append_to_statement_list (t, &expr); | |
2715 } | |
2716 append_to_statement_list (else_, &expr); | |
2717 } | |
2718 if (emit_end && end_label) | |
2719 { | |
2720 t = build1 (LABEL_EXPR, void_type_node, end_label); | |
2721 append_to_statement_list (t, &expr); | |
2722 } | |
2723 | |
2724 return expr; | |
2725 } | |
2726 | |
2727 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */ | |
2728 | |
2729 tree | |
2730 gimple_boolify (tree expr) | |
2731 { | |
2732 tree type = TREE_TYPE (expr); | |
2733 location_t loc = EXPR_LOCATION (expr); | |
2734 | |
2735 if (TREE_CODE (type) == BOOLEAN_TYPE) | |
2736 return expr; | |
2737 | |
2738 switch (TREE_CODE (expr)) | |
2739 { | |
2740 case TRUTH_AND_EXPR: | |
2741 case TRUTH_OR_EXPR: | |
2742 case TRUTH_XOR_EXPR: | |
2743 case TRUTH_ANDIF_EXPR: | |
2744 case TRUTH_ORIF_EXPR: | |
2745 /* Also boolify the arguments of truth exprs. */ | |
2746 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1)); | |
2747 /* FALLTHRU */ | |
2748 | |
2749 case TRUTH_NOT_EXPR: | |
2750 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); | |
2751 /* FALLTHRU */ | |
2752 | |
2753 case EQ_EXPR: case NE_EXPR: | |
2754 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR: | |
2755 /* These expressions always produce boolean results. */ | |
2756 TREE_TYPE (expr) = boolean_type_node; | |
2757 return expr; | |
2758 | |
2759 default: | |
2760 /* Other expressions that get here must have boolean values, but | |
2761 might need to be converted to the appropriate mode. */ | |
2762 return fold_convert_loc (loc, boolean_type_node, expr); | |
2763 } | |
2764 } | |
2765 | |
2766 /* Given a conditional expression *EXPR_P without side effects, gimplify | |
2767 its operands. New statements are inserted to PRE_P. */ | |
2768 | |
2769 static enum gimplify_status | |
2770 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p) | |
2771 { | |
2772 tree expr = *expr_p, cond; | |
2773 enum gimplify_status ret, tret; | |
2774 enum tree_code code; | |
2775 | |
2776 cond = gimple_boolify (COND_EXPR_COND (expr)); | |
2777 | |
2778 /* We need to handle && and || specially, as their gimplification | |
2779 creates pure cond_expr, thus leading to an infinite cycle otherwise. */ | |
2780 code = TREE_CODE (cond); | |
2781 if (code == TRUTH_ANDIF_EXPR) | |
2782 TREE_SET_CODE (cond, TRUTH_AND_EXPR); | |
2783 else if (code == TRUTH_ORIF_EXPR) | |
2784 TREE_SET_CODE (cond, TRUTH_OR_EXPR); | |
2785 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue); | |
2786 COND_EXPR_COND (*expr_p) = cond; | |
2787 | |
2788 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL, | |
2789 is_gimple_val, fb_rvalue); | |
2790 ret = MIN (ret, tret); | |
2791 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL, | |
2792 is_gimple_val, fb_rvalue); | |
2793 | |
2794 return MIN (ret, tret); | |
2795 } | |
2796 | |
2797 /* Returns true if evaluating EXPR could trap. | |
2798 EXPR is GENERIC, while tree_could_trap_p can be called | |
2799 only on GIMPLE. */ | |
2800 | |
2801 static bool | |
2802 generic_expr_could_trap_p (tree expr) | |
2803 { | |
2804 unsigned i, n; | |
2805 | |
2806 if (!expr || is_gimple_val (expr)) | |
2807 return false; | |
2808 | |
2809 if (!EXPR_P (expr) || tree_could_trap_p (expr)) | |
2810 return true; | |
2811 | |
2812 n = TREE_OPERAND_LENGTH (expr); | |
2813 for (i = 0; i < n; i++) | |
2814 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i))) | |
2815 return true; | |
2816 | |
2817 return false; | |
2818 } | |
2819 | |
2820 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;' | |
2821 into | |
2822 | |
2823 if (p) if (p) | |
2824 t1 = a; a; | |
2825 else or else | |
2826 t1 = b; b; | |
2827 t1; | |
2828 | |
2829 The second form is used when *EXPR_P is of type void. | |
2830 | |
2831 PRE_P points to the list where side effects that must happen before | |
2832 *EXPR_P should be stored. */ | |
2833 | |
2834 static enum gimplify_status | |
2835 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback) | |
2836 { | |
2837 tree expr = *expr_p; | |
2838 tree tmp, type, arm1, arm2; | |
2839 enum gimplify_status ret; | |
2840 tree label_true, label_false, label_cont; | |
2841 bool have_then_clause_p, have_else_clause_p; | |
2842 gimple gimple_cond; | |
2843 enum tree_code pred_code; | |
2844 gimple_seq seq = NULL; | |
2845 location_t loc = EXPR_LOCATION (*expr_p); | |
2846 | |
2847 type = TREE_TYPE (expr); | |
2848 | |
2849 /* If this COND_EXPR has a value, copy the values into a temporary within | |
2850 the arms. */ | |
2851 if (! VOID_TYPE_P (type)) | |
2852 { | |
2853 tree result; | |
2854 | |
2855 /* If an rvalue is ok or we do not require an lvalue, avoid creating | |
2856 an addressable temporary. */ | |
2857 if (((fallback & fb_rvalue) | |
2858 || !(fallback & fb_lvalue)) | |
2859 && !TREE_ADDRESSABLE (type)) | |
2860 { | |
2861 if (gimplify_ctxp->allow_rhs_cond_expr | |
2862 /* If either branch has side effects or could trap, it can't be | |
2863 evaluated unconditionally. */ | |
2864 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1)) | |
2865 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 1)) | |
2866 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 2)) | |
2867 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 2))) | |
2868 return gimplify_pure_cond_expr (expr_p, pre_p); | |
2869 | |
2870 result = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp"); | |
2871 ret = GS_ALL_DONE; | |
2872 } | |
2873 else | |
2874 { | |
2875 tree type = build_pointer_type (TREE_TYPE (expr)); | |
2876 | |
2877 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node) | |
2878 TREE_OPERAND (expr, 1) = | |
2879 build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 1)); | |
2880 | |
2881 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node) | |
2882 TREE_OPERAND (expr, 2) = | |
2883 build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 2)); | |
2884 | |
2885 tmp = create_tmp_var (type, "iftmp"); | |
2886 | |
2887 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0), | |
2888 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2)); | |
2889 | |
2890 result = build_fold_indirect_ref_loc (loc, tmp); | |
2891 } | |
2892 | |
2893 /* Build the then clause, 't1 = a;'. But don't build an assignment | |
2894 if this branch is void; in C++ it can be, if it's a throw. */ | |
2895 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node) | |
2896 TREE_OPERAND (expr, 1) | |
2897 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 1)); | |
2898 | |
2899 /* Build the else clause, 't1 = b;'. */ | |
2900 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node) | |
2901 TREE_OPERAND (expr, 2) | |
2902 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 2)); | |
2903 | |
2904 TREE_TYPE (expr) = void_type_node; | |
2905 recalculate_side_effects (expr); | |
2906 | |
2907 /* Move the COND_EXPR to the prequeue. */ | |
2908 gimplify_stmt (&expr, pre_p); | |
2909 | |
2910 *expr_p = result; | |
2911 return GS_ALL_DONE; | |
2912 } | |
2913 | |
2914 /* Make sure the condition has BOOLEAN_TYPE. */ | |
2915 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); | |
2916 | |
2917 /* Break apart && and || conditions. */ | |
2918 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR | |
2919 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR) | |
2920 { | |
2921 expr = shortcut_cond_expr (expr); | |
2922 | |
2923 if (expr != *expr_p) | |
2924 { | |
2925 *expr_p = expr; | |
2926 | |
2927 /* We can't rely on gimplify_expr to re-gimplify the expanded | |
2928 form properly, as cleanups might cause the target labels to be | |
2929 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to | |
2930 set up a conditional context. */ | |
2931 gimple_push_condition (); | |
2932 gimplify_stmt (expr_p, &seq); | |
2933 gimple_pop_condition (pre_p); | |
2934 gimple_seq_add_seq (pre_p, seq); | |
2935 | |
2936 return GS_ALL_DONE; | |
2937 } | |
2938 } | |
2939 | |
2940 /* Now do the normal gimplification. */ | |
2941 | |
2942 /* Gimplify condition. */ | |
2943 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr, | |
2944 fb_rvalue); | |
2945 if (ret == GS_ERROR) | |
2946 return GS_ERROR; | |
2947 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE); | |
2948 | |
2949 gimple_push_condition (); | |
2950 | |
2951 have_then_clause_p = have_else_clause_p = false; | |
2952 if (TREE_OPERAND (expr, 1) != NULL | |
2953 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR | |
2954 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL | |
2955 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) | |
2956 == current_function_decl) | |
2957 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR | |
2958 have different locations, otherwise we end up with incorrect | |
2959 location information on the branches. */ | |
2960 && (optimize | |
2961 || !EXPR_HAS_LOCATION (expr) | |
2962 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1)) | |
2963 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1)))) | |
2964 { | |
2965 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1)); | |
2966 have_then_clause_p = true; | |
2967 } | |
2968 else | |
2969 label_true = create_artificial_label (UNKNOWN_LOCATION); | |
2970 if (TREE_OPERAND (expr, 2) != NULL | |
2971 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR | |
2972 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL | |
2973 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) | |
2974 == current_function_decl) | |
2975 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR | |
2976 have different locations, otherwise we end up with incorrect | |
2977 location information on the branches. */ | |
2978 && (optimize | |
2979 || !EXPR_HAS_LOCATION (expr) | |
2980 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2)) | |
2981 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2)))) | |
2982 { | |
2983 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2)); | |
2984 have_else_clause_p = true; | |
2985 } | |
2986 else | |
2987 label_false = create_artificial_label (UNKNOWN_LOCATION); | |
2988 | |
2989 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1, | |
2990 &arm2); | |
2991 | |
2992 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true, | |
2993 label_false); | |
2994 | |
2995 gimplify_seq_add_stmt (&seq, gimple_cond); | |
2996 label_cont = NULL_TREE; | |
2997 if (!have_then_clause_p) | |
2998 { | |
2999 /* For if (...) {} else { code; } put label_true after | |
3000 the else block. */ | |
3001 if (TREE_OPERAND (expr, 1) == NULL_TREE | |
3002 && !have_else_clause_p | |
3003 && TREE_OPERAND (expr, 2) != NULL_TREE) | |
3004 label_cont = label_true; | |
3005 else | |
3006 { | |
3007 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true)); | |
3008 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq); | |
3009 /* For if (...) { code; } else {} or | |
3010 if (...) { code; } else goto label; or | |
3011 if (...) { code; return; } else { ... } | |
3012 label_cont isn't needed. */ | |
3013 if (!have_else_clause_p | |
3014 && TREE_OPERAND (expr, 2) != NULL_TREE | |
3015 && gimple_seq_may_fallthru (seq)) | |
3016 { | |
3017 gimple g; | |
3018 label_cont = create_artificial_label (UNKNOWN_LOCATION); | |
3019 | |
3020 g = gimple_build_goto (label_cont); | |
3021 | |
3022 /* GIMPLE_COND's are very low level; they have embedded | |
3023 gotos. This particular embedded goto should not be marked | |
3024 with the location of the original COND_EXPR, as it would | |
3025 correspond to the COND_EXPR's condition, not the ELSE or the | |
3026 THEN arms. To avoid marking it with the wrong location, flag | |
3027 it as "no location". */ | |
3028 gimple_set_do_not_emit_location (g); | |
3029 | |
3030 gimplify_seq_add_stmt (&seq, g); | |
3031 } | |
3032 } | |
3033 } | |
3034 if (!have_else_clause_p) | |
3035 { | |
3036 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false)); | |
3037 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq); | |
3038 } | |
3039 if (label_cont) | |
3040 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont)); | |
3041 | |
3042 gimple_pop_condition (pre_p); | |
3043 gimple_seq_add_seq (pre_p, seq); | |
3044 | |
3045 if (ret == GS_ERROR) | |
3046 ; /* Do nothing. */ | |
3047 else if (have_then_clause_p || have_else_clause_p) | |
3048 ret = GS_ALL_DONE; | |
3049 else | |
3050 { | |
3051 /* Both arms are empty; replace the COND_EXPR with its predicate. */ | |
3052 expr = TREE_OPERAND (expr, 0); | |
3053 gimplify_stmt (&expr, pre_p); | |
3054 } | |
3055 | |
3056 *expr_p = NULL; | |
3057 return ret; | |
3058 } | |
3059 | |
3060 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression, | |
3061 to be marked addressable. | |
3062 | |
3063 We cannot rely on such an expression being directly markable if a temporary | |
3064 has been created by the gimplification. In this case, we create another | |
3065 temporary and initialize it with a copy, which will become a store after we | |
3066 mark it addressable. This can happen if the front-end passed us something | |
3067 that it could not mark addressable yet, like a Fortran pass-by-reference | |
3068 parameter (int) floatvar. */ | |
3069 | |
3070 static void | |
3071 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p) | |
3072 { | |
3073 while (handled_component_p (*expr_p)) | |
3074 expr_p = &TREE_OPERAND (*expr_p, 0); | |
3075 if (is_gimple_reg (*expr_p)) | |
3076 *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL); | |
3077 } | |
3078 | |
3079 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with | |
3080 a call to __builtin_memcpy. */ | |
3081 | |
3082 static enum gimplify_status | |
3083 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value, | |
3084 gimple_seq *seq_p) | |
3085 { | |
3086 tree t, to, to_ptr, from, from_ptr; | |
3087 gimple gs; | |
3088 location_t loc = EXPR_LOCATION (*expr_p); | |
3089 | |
3090 to = TREE_OPERAND (*expr_p, 0); | |
3091 from = TREE_OPERAND (*expr_p, 1); | |
3092 | |
3093 /* Mark the RHS addressable. Beware that it may not be possible to do so | |
3094 directly if a temporary has been created by the gimplification. */ | |
3095 prepare_gimple_addressable (&from, seq_p); | |
3096 | |
3097 mark_addressable (from); | |
3098 from_ptr = build_fold_addr_expr_loc (loc, from); | |
3099 gimplify_arg (&from_ptr, seq_p, loc); | |
3100 | |
3101 mark_addressable (to); | |
3102 to_ptr = build_fold_addr_expr_loc (loc, to); | |
3103 gimplify_arg (&to_ptr, seq_p, loc); | |
3104 | |
3105 t = implicit_built_in_decls[BUILT_IN_MEMCPY]; | |
3106 | |
3107 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size); | |
3108 | |
3109 if (want_value) | |
3110 { | |
3111 /* tmp = memcpy() */ | |
3112 t = create_tmp_var (TREE_TYPE (to_ptr), NULL); | |
3113 gimple_call_set_lhs (gs, t); | |
3114 gimplify_seq_add_stmt (seq_p, gs); | |
3115 | |
3116 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t); | |
3117 return GS_ALL_DONE; | |
3118 } | |
3119 | |
3120 gimplify_seq_add_stmt (seq_p, gs); | |
3121 *expr_p = NULL; | |
3122 return GS_ALL_DONE; | |
3123 } | |
3124 | |
3125 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with | |
3126 a call to __builtin_memset. In this case we know that the RHS is | |
3127 a CONSTRUCTOR with an empty element list. */ | |
3128 | |
3129 static enum gimplify_status | |
3130 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value, | |
3131 gimple_seq *seq_p) | |
3132 { | |
3133 tree t, from, to, to_ptr; | |
3134 gimple gs; | |
3135 location_t loc = EXPR_LOCATION (*expr_p); | |
3136 | |
3137 /* Assert our assumptions, to abort instead of producing wrong code | |
3138 silently if they are not met. Beware that the RHS CONSTRUCTOR might | |
3139 not be immediately exposed. */ | |
3140 from = TREE_OPERAND (*expr_p, 1); | |
3141 if (TREE_CODE (from) == WITH_SIZE_EXPR) | |
3142 from = TREE_OPERAND (from, 0); | |
3143 | |
3144 gcc_assert (TREE_CODE (from) == CONSTRUCTOR | |
3145 && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from))); | |
3146 | |
3147 /* Now proceed. */ | |
3148 to = TREE_OPERAND (*expr_p, 0); | |
3149 | |
3150 to_ptr = build_fold_addr_expr_loc (loc, to); | |
3151 gimplify_arg (&to_ptr, seq_p, loc); | |
3152 t = implicit_built_in_decls[BUILT_IN_MEMSET]; | |
3153 | |
3154 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size); | |
3155 | |
3156 if (want_value) | |
3157 { | |
3158 /* tmp = memset() */ | |
3159 t = create_tmp_var (TREE_TYPE (to_ptr), NULL); | |
3160 gimple_call_set_lhs (gs, t); | |
3161 gimplify_seq_add_stmt (seq_p, gs); | |
3162 | |
3163 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t); | |
3164 return GS_ALL_DONE; | |
3165 } | |
3166 | |
3167 gimplify_seq_add_stmt (seq_p, gs); | |
3168 *expr_p = NULL; | |
3169 return GS_ALL_DONE; | |
3170 } | |
3171 | |
3172 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree, | |
3173 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an | |
3174 assignment. Returns non-null if we detect a potential overlap. */ | |
3175 | |
3176 struct gimplify_init_ctor_preeval_data | |
3177 { | |
3178 /* The base decl of the lhs object. May be NULL, in which case we | |
3179 have to assume the lhs is indirect. */ | |
3180 tree lhs_base_decl; | |
3181 | |
3182 /* The alias set of the lhs object. */ | |
3183 alias_set_type lhs_alias_set; | |
3184 }; | |
3185 | |
3186 static tree | |
3187 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata) | |
3188 { | |
3189 struct gimplify_init_ctor_preeval_data *data | |
3190 = (struct gimplify_init_ctor_preeval_data *) xdata; | |
3191 tree t = *tp; | |
3192 | |
3193 /* If we find the base object, obviously we have overlap. */ | |
3194 if (data->lhs_base_decl == t) | |
3195 return t; | |
3196 | |
3197 /* If the constructor component is indirect, determine if we have a | |
3198 potential overlap with the lhs. The only bits of information we | |
3199 have to go on at this point are addressability and alias sets. */ | |
3200 if (TREE_CODE (t) == INDIRECT_REF | |
3201 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl)) | |
3202 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t))) | |
3203 return t; | |
3204 | |
3205 /* If the constructor component is a call, determine if it can hide a | |
3206 potential overlap with the lhs through an INDIRECT_REF like above. */ | |
3207 if (TREE_CODE (t) == CALL_EXPR) | |
3208 { | |
3209 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t))); | |
3210 | |
3211 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type)) | |
3212 if (POINTER_TYPE_P (TREE_VALUE (type)) | |
3213 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl)) | |
3214 && alias_sets_conflict_p (data->lhs_alias_set, | |
3215 get_alias_set | |
3216 (TREE_TYPE (TREE_VALUE (type))))) | |
3217 return t; | |
3218 } | |
3219 | |
3220 if (IS_TYPE_OR_DECL_P (t)) | |
3221 *walk_subtrees = 0; | |
3222 return NULL; | |
3223 } | |
3224 | |
3225 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR, | |
3226 force values that overlap with the lhs (as described by *DATA) | |
3227 into temporaries. */ | |
3228 | |
3229 static void | |
3230 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, | |
3231 struct gimplify_init_ctor_preeval_data *data) | |
3232 { | |
3233 enum gimplify_status one; | |
3234 | |
3235 /* If the value is constant, then there's nothing to pre-evaluate. */ | |
3236 if (TREE_CONSTANT (*expr_p)) | |
3237 { | |
3238 /* Ensure it does not have side effects, it might contain a reference to | |
3239 the object we're initializing. */ | |
3240 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p)); | |
3241 return; | |
3242 } | |
3243 | |
3244 /* If the type has non-trivial constructors, we can't pre-evaluate. */ | |
3245 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p))) | |
3246 return; | |
3247 | |
3248 /* Recurse for nested constructors. */ | |
3249 if (TREE_CODE (*expr_p) == CONSTRUCTOR) | |
3250 { | |
3251 unsigned HOST_WIDE_INT ix; | |
3252 constructor_elt *ce; | |
3253 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p); | |
3254 | |
3255 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++) | |
3256 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data); | |
3257 | |
3258 return; | |
3259 } | |
3260 | |
3261 /* If this is a variable sized type, we must remember the size. */ | |
3262 maybe_with_size_expr (expr_p); | |
3263 | |
3264 /* Gimplify the constructor element to something appropriate for the rhs | |
3265 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know | |
3266 the gimplifier will consider this a store to memory. Doing this | |
3267 gimplification now means that we won't have to deal with complicated | |
3268 language-specific trees, nor trees like SAVE_EXPR that can induce | |
3269 exponential search behavior. */ | |
3270 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue); | |
3271 if (one == GS_ERROR) | |
3272 { | |
3273 *expr_p = NULL; | |
3274 return; | |
3275 } | |
3276 | |
3277 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap | |
3278 with the lhs, since "a = { .x=a }" doesn't make sense. This will | |
3279 always be true for all scalars, since is_gimple_mem_rhs insists on a | |
3280 temporary variable for them. */ | |
3281 if (DECL_P (*expr_p)) | |
3282 return; | |
3283 | |
3284 /* If this is of variable size, we have no choice but to assume it doesn't | |
3285 overlap since we can't make a temporary for it. */ | |
3286 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST) | |
3287 return; | |
3288 | |
3289 /* Otherwise, we must search for overlap ... */ | |
3290 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL)) | |
3291 return; | |
3292 | |
3293 /* ... and if found, force the value into a temporary. */ | |
3294 *expr_p = get_formal_tmp_var (*expr_p, pre_p); | |
3295 } | |
3296 | |
3297 /* A subroutine of gimplify_init_ctor_eval. Create a loop for | |
3298 a RANGE_EXPR in a CONSTRUCTOR for an array. | |
3299 | |
3300 var = lower; | |
3301 loop_entry: | |
3302 object[var] = value; | |
3303 if (var == upper) | |
3304 goto loop_exit; | |
3305 var = var + 1; | |
3306 goto loop_entry; | |
3307 loop_exit: | |
3308 | |
3309 We increment var _after_ the loop exit check because we might otherwise | |
3310 fail if upper == TYPE_MAX_VALUE (type for upper). | |
3311 | |
3312 Note that we never have to deal with SAVE_EXPRs here, because this has | |
3313 already been taken care of for us, in gimplify_init_ctor_preeval(). */ | |
3314 | |
3315 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *, | |
3316 gimple_seq *, bool); | |
3317 | |
3318 static void | |
3319 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper, | |
3320 tree value, tree array_elt_type, | |
3321 gimple_seq *pre_p, bool cleared) | |
3322 { | |
3323 tree loop_entry_label, loop_exit_label, fall_thru_label; | |
3324 tree var, var_type, cref, tmp; | |
3325 | |
3326 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION); | |
3327 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION); | |
3328 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION); | |
3329 | |
3330 /* Create and initialize the index variable. */ | |
3331 var_type = TREE_TYPE (upper); | |
3332 var = create_tmp_var (var_type, NULL); | |
3333 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower)); | |
3334 | |
3335 /* Add the loop entry label. */ | |
3336 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label)); | |
3337 | |
3338 /* Build the reference. */ | |
3339 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object), | |
3340 var, NULL_TREE, NULL_TREE); | |
3341 | |
3342 /* If we are a constructor, just call gimplify_init_ctor_eval to do | |
3343 the store. Otherwise just assign value to the reference. */ | |
3344 | |
3345 if (TREE_CODE (value) == CONSTRUCTOR) | |
3346 /* NB we might have to call ourself recursively through | |
3347 gimplify_init_ctor_eval if the value is a constructor. */ | |
3348 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value), | |
3349 pre_p, cleared); | |
3350 else | |
3351 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value)); | |
3352 | |
3353 /* We exit the loop when the index var is equal to the upper bound. */ | |
3354 gimplify_seq_add_stmt (pre_p, | |
3355 gimple_build_cond (EQ_EXPR, var, upper, | |
3356 loop_exit_label, fall_thru_label)); | |
3357 | |
3358 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label)); | |
3359 | |
3360 /* Otherwise, increment the index var... */ | |
3361 tmp = build2 (PLUS_EXPR, var_type, var, | |
3362 fold_convert (var_type, integer_one_node)); | |
3363 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp)); | |
3364 | |
3365 /* ...and jump back to the loop entry. */ | |
3366 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label)); | |
3367 | |
3368 /* Add the loop exit label. */ | |
3369 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label)); | |
3370 } | |
3371 | |
3372 /* Return true if FDECL is accessing a field that is zero sized. */ | |
3373 | |
3374 static bool | |
3375 zero_sized_field_decl (const_tree fdecl) | |
3376 { | |
3377 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl) | |
3378 && integer_zerop (DECL_SIZE (fdecl))) | |
3379 return true; | |
3380 return false; | |
3381 } | |
3382 | |
3383 /* Return true if TYPE is zero sized. */ | |
3384 | |
3385 static bool | |
3386 zero_sized_type (const_tree type) | |
3387 { | |
3388 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type) | |
3389 && integer_zerop (TYPE_SIZE (type))) | |
3390 return true; | |
3391 return false; | |
3392 } | |
3393 | |
3394 /* A subroutine of gimplify_init_constructor. Generate individual | |
3395 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the | |
3396 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the | |
3397 CONSTRUCTOR. CLEARED is true if the entire LHS object has been | |
3398 zeroed first. */ | |
3399 | |
3400 static void | |
3401 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts, | |
3402 gimple_seq *pre_p, bool cleared) | |
3403 { | |
3404 tree array_elt_type = NULL; | |
3405 unsigned HOST_WIDE_INT ix; | |
3406 tree purpose, value; | |
3407 | |
3408 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE) | |
3409 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object))); | |
3410 | |
3411 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value) | |
3412 { | |
3413 tree cref; | |
3414 | |
3415 /* NULL values are created above for gimplification errors. */ | |
3416 if (value == NULL) | |
3417 continue; | |
3418 | |
3419 if (cleared && initializer_zerop (value)) | |
3420 continue; | |
3421 | |
3422 /* ??? Here's to hoping the front end fills in all of the indices, | |
3423 so we don't have to figure out what's missing ourselves. */ | |
3424 gcc_assert (purpose); | |
3425 | |
3426 /* Skip zero-sized fields, unless value has side-effects. This can | |
3427 happen with calls to functions returning a zero-sized type, which | |
3428 we shouldn't discard. As a number of downstream passes don't | |
3429 expect sets of zero-sized fields, we rely on the gimplification of | |
3430 the MODIFY_EXPR we make below to drop the assignment statement. */ | |
3431 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose)) | |
3432 continue; | |
3433 | |
3434 /* If we have a RANGE_EXPR, we have to build a loop to assign the | |
3435 whole range. */ | |
3436 if (TREE_CODE (purpose) == RANGE_EXPR) | |
3437 { | |
3438 tree lower = TREE_OPERAND (purpose, 0); | |
3439 tree upper = TREE_OPERAND (purpose, 1); | |
3440 | |
3441 /* If the lower bound is equal to upper, just treat it as if | |
3442 upper was the index. */ | |
3443 if (simple_cst_equal (lower, upper)) | |
3444 purpose = upper; | |
3445 else | |
3446 { | |
3447 gimplify_init_ctor_eval_range (object, lower, upper, value, | |
3448 array_elt_type, pre_p, cleared); | |
3449 continue; | |
3450 } | |
3451 } | |
3452 | |
3453 if (array_elt_type) | |
3454 { | |
3455 /* Do not use bitsizetype for ARRAY_REF indices. */ | |
3456 if (TYPE_DOMAIN (TREE_TYPE (object))) | |
3457 purpose = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))), | |
3458 purpose); | |
3459 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object), | |
3460 purpose, NULL_TREE, NULL_TREE); | |
3461 } | |
3462 else | |
3463 { | |
3464 gcc_assert (TREE_CODE (purpose) == FIELD_DECL); | |
3465 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose), | |
3466 unshare_expr (object), purpose, NULL_TREE); | |
3467 } | |
3468 | |
3469 if (TREE_CODE (value) == CONSTRUCTOR | |
3470 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE) | |
3471 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value), | |
3472 pre_p, cleared); | |
3473 else | |
3474 { | |
3475 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value); | |
3476 gimplify_and_add (init, pre_p); | |
3477 ggc_free (init); | |
3478 } | |
3479 } | |
3480 } | |
3481 | |
3482 | |
3483 /* Returns the appropriate RHS predicate for this LHS. */ | |
3484 | |
3485 gimple_predicate | |
3486 rhs_predicate_for (tree lhs) | |
3487 { | |
3488 if (is_gimple_reg (lhs)) | |
3489 return is_gimple_reg_rhs_or_call; | |
3490 else | |
3491 return is_gimple_mem_rhs_or_call; | |
3492 } | |
3493 | |
3494 /* Gimplify a C99 compound literal expression. This just means adding | |
3495 the DECL_EXPR before the current statement and using its anonymous | |
3496 decl instead. */ | |
3497 | |
3498 static enum gimplify_status | |
3499 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p) | |
3500 { | |
3501 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p); | |
3502 tree decl = DECL_EXPR_DECL (decl_s); | |
3503 /* Mark the decl as addressable if the compound literal | |
3504 expression is addressable now, otherwise it is marked too late | |
3505 after we gimplify the initialization expression. */ | |
3506 if (TREE_ADDRESSABLE (*expr_p)) | |
3507 TREE_ADDRESSABLE (decl) = 1; | |
3508 | |
3509 /* Preliminarily mark non-addressed complex variables as eligible | |
3510 for promotion to gimple registers. We'll transform their uses | |
3511 as we find them. */ | |
3512 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE | |
3513 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE) | |
3514 && !TREE_THIS_VOLATILE (decl) | |
3515 && !needs_to_live_in_memory (decl)) | |
3516 DECL_GIMPLE_REG_P (decl) = 1; | |
3517 | |
3518 /* This decl isn't mentioned in the enclosing block, so add it to the | |
3519 list of temps. FIXME it seems a bit of a kludge to say that | |
3520 anonymous artificial vars aren't pushed, but everything else is. */ | |
3521 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl)) | |
3522 gimple_add_tmp_var (decl); | |
3523 | |
3524 gimplify_and_add (decl_s, pre_p); | |
3525 *expr_p = decl; | |
3526 return GS_OK; | |
3527 } | |
3528 | |
3529 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR, | |
3530 return a new CONSTRUCTOR if something changed. */ | |
3531 | |
3532 static tree | |
3533 optimize_compound_literals_in_ctor (tree orig_ctor) | |
3534 { | |
3535 tree ctor = orig_ctor; | |
3536 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor); | |
3537 unsigned int idx, num = VEC_length (constructor_elt, elts); | |
3538 | |
3539 for (idx = 0; idx < num; idx++) | |
3540 { | |
3541 tree value = VEC_index (constructor_elt, elts, idx)->value; | |
3542 tree newval = value; | |
3543 if (TREE_CODE (value) == CONSTRUCTOR) | |
3544 newval = optimize_compound_literals_in_ctor (value); | |
3545 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR) | |
3546 { | |
3547 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value); | |
3548 tree decl = DECL_EXPR_DECL (decl_s); | |
3549 tree init = DECL_INITIAL (decl); | |
3550 | |
3551 if (!TREE_ADDRESSABLE (value) | |
3552 && !TREE_ADDRESSABLE (decl) | |
3553 && init) | |
3554 newval = optimize_compound_literals_in_ctor (init); | |
3555 } | |
3556 if (newval == value) | |
3557 continue; | |
3558 | |
3559 if (ctor == orig_ctor) | |
3560 { | |
3561 ctor = copy_node (orig_ctor); | |
3562 CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts); | |
3563 elts = CONSTRUCTOR_ELTS (ctor); | |
3564 } | |
3565 VEC_index (constructor_elt, elts, idx)->value = newval; | |
3566 } | |
3567 return ctor; | |
3568 } | |
3569 | |
3570 | |
3571 | |
3572 /* A subroutine of gimplify_modify_expr. Break out elements of a | |
3573 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs. | |
3574 | |
3575 Note that we still need to clear any elements that don't have explicit | |
3576 initializers, so if not all elements are initialized we keep the | |
3577 original MODIFY_EXPR, we just remove all of the constructor elements. | |
3578 | |
3579 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return | |
3580 GS_ERROR if we would have to create a temporary when gimplifying | |
3581 this constructor. Otherwise, return GS_OK. | |
3582 | |
3583 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */ | |
3584 | |
3585 static enum gimplify_status | |
3586 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, | |
3587 bool want_value, bool notify_temp_creation) | |
3588 { | |
3589 tree object, ctor, type; | |
3590 enum gimplify_status ret; | |
3591 VEC(constructor_elt,gc) *elts; | |
3592 | |
3593 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR); | |
3594 | |
3595 if (!notify_temp_creation) | |
3596 { | |
3597 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, | |
3598 is_gimple_lvalue, fb_lvalue); | |
3599 if (ret == GS_ERROR) | |
3600 return ret; | |
3601 } | |
3602 | |
3603 object = TREE_OPERAND (*expr_p, 0); | |
3604 ctor = TREE_OPERAND (*expr_p, 1) = | |
3605 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1)); | |
3606 type = TREE_TYPE (ctor); | |
3607 elts = CONSTRUCTOR_ELTS (ctor); | |
3608 ret = GS_ALL_DONE; | |
3609 | |
3610 switch (TREE_CODE (type)) | |
3611 { | |
3612 case RECORD_TYPE: | |
3613 case UNION_TYPE: | |
3614 case QUAL_UNION_TYPE: | |
3615 case ARRAY_TYPE: | |
3616 { | |
3617 struct gimplify_init_ctor_preeval_data preeval_data; | |
3618 HOST_WIDE_INT num_type_elements, num_ctor_elements; | |
3619 HOST_WIDE_INT num_nonzero_elements; | |
3620 bool cleared, valid_const_initializer; | |
3621 | |
3622 /* Aggregate types must lower constructors to initialization of | |
3623 individual elements. The exception is that a CONSTRUCTOR node | |
3624 with no elements indicates zero-initialization of the whole. */ | |
3625 if (VEC_empty (constructor_elt, elts)) | |
3626 { | |
3627 if (notify_temp_creation) | |
3628 return GS_OK; | |
3629 break; | |
3630 } | |
3631 | |
3632 /* Fetch information about the constructor to direct later processing. | |
3633 We might want to make static versions of it in various cases, and | |
3634 can only do so if it known to be a valid constant initializer. */ | |
3635 valid_const_initializer | |
3636 = categorize_ctor_elements (ctor, &num_nonzero_elements, | |
3637 &num_ctor_elements, &cleared); | |
3638 | |
3639 /* If a const aggregate variable is being initialized, then it | |
3640 should never be a lose to promote the variable to be static. */ | |
3641 if (valid_const_initializer | |
3642 && num_nonzero_elements > 1 | |
3643 && TREE_READONLY (object) | |
3644 && TREE_CODE (object) == VAR_DECL | |
3645 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))) | |
3646 { | |
3647 if (notify_temp_creation) | |
3648 return GS_ERROR; | |
3649 DECL_INITIAL (object) = ctor; | |
3650 TREE_STATIC (object) = 1; | |
3651 if (!DECL_NAME (object)) | |
3652 DECL_NAME (object) = create_tmp_var_name ("C"); | |
3653 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL); | |
3654 | |
3655 /* ??? C++ doesn't automatically append a .<number> to the | |
3656 assembler name, and even when it does, it looks a FE private | |
3657 data structures to figure out what that number should be, | |
3658 which are not set for this variable. I suppose this is | |
3659 important for local statics for inline functions, which aren't | |
3660 "local" in the object file sense. So in order to get a unique | |
3661 TU-local symbol, we must invoke the lhd version now. */ | |
3662 lhd_set_decl_assembler_name (object); | |
3663 | |
3664 *expr_p = NULL_TREE; | |
3665 break; | |
3666 } | |
3667 | |
3668 /* If there are "lots" of initialized elements, even discounting | |
3669 those that are not address constants (and thus *must* be | |
3670 computed at runtime), then partition the constructor into | |
3671 constant and non-constant parts. Block copy the constant | |
3672 parts in, then generate code for the non-constant parts. */ | |
3673 /* TODO. There's code in cp/typeck.c to do this. */ | |
3674 | |
3675 num_type_elements = count_type_elements (type, true); | |
3676 | |
3677 /* If count_type_elements could not determine number of type elements | |
3678 for a constant-sized object, assume clearing is needed. | |
3679 Don't do this for variable-sized objects, as store_constructor | |
3680 will ignore the clearing of variable-sized objects. */ | |
3681 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0) | |
3682 cleared = true; | |
3683 /* If there are "lots" of zeros, then block clear the object first. */ | |
3684 else if (num_type_elements - num_nonzero_elements | |
3685 > CLEAR_RATIO (optimize_function_for_speed_p (cfun)) | |
3686 && num_nonzero_elements < num_type_elements/4) | |
3687 cleared = true; | |
3688 /* ??? This bit ought not be needed. For any element not present | |
3689 in the initializer, we should simply set them to zero. Except | |
3690 we'd need to *find* the elements that are not present, and that | |
3691 requires trickery to avoid quadratic compile-time behavior in | |
3692 large cases or excessive memory use in small cases. */ | |
3693 else if (num_ctor_elements < num_type_elements) | |
3694 cleared = true; | |
3695 | |
3696 /* If there are "lots" of initialized elements, and all of them | |
3697 are valid address constants, then the entire initializer can | |
3698 be dropped to memory, and then memcpy'd out. Don't do this | |
3699 for sparse arrays, though, as it's more efficient to follow | |
3700 the standard CONSTRUCTOR behavior of memset followed by | |
3701 individual element initialization. Also don't do this for small | |
3702 all-zero initializers (which aren't big enough to merit | |
3703 clearing), and don't try to make bitwise copies of | |
3704 TREE_ADDRESSABLE types. */ | |
3705 if (valid_const_initializer | |
3706 && !(cleared || num_nonzero_elements == 0) | |
3707 && !TREE_ADDRESSABLE (type)) | |
3708 { | |
3709 HOST_WIDE_INT size = int_size_in_bytes (type); | |
3710 unsigned int align; | |
3711 | |
3712 /* ??? We can still get unbounded array types, at least | |
3713 from the C++ front end. This seems wrong, but attempt | |
3714 to work around it for now. */ | |
3715 if (size < 0) | |
3716 { | |
3717 size = int_size_in_bytes (TREE_TYPE (object)); | |
3718 if (size >= 0) | |
3719 TREE_TYPE (ctor) = type = TREE_TYPE (object); | |
3720 } | |
3721 | |
3722 /* Find the maximum alignment we can assume for the object. */ | |
3723 /* ??? Make use of DECL_OFFSET_ALIGN. */ | |
3724 if (DECL_P (object)) | |
3725 align = DECL_ALIGN (object); | |
3726 else | |
3727 align = TYPE_ALIGN (type); | |
3728 | |
3729 if (size > 0 | |
3730 && num_nonzero_elements > 1 | |
3731 && !can_move_by_pieces (size, align)) | |
3732 { | |
3733 tree new_tree; | |
3734 | |
3735 if (notify_temp_creation) | |
3736 return GS_ERROR; | |
3737 | |
3738 new_tree = create_tmp_var_raw (type, "C"); | |
3739 | |
3740 gimple_add_tmp_var (new_tree); | |
3741 TREE_STATIC (new_tree) = 1; | |
3742 TREE_READONLY (new_tree) = 1; | |
3743 DECL_INITIAL (new_tree) = ctor; | |
3744 if (align > DECL_ALIGN (new_tree)) | |
3745 { | |
3746 DECL_ALIGN (new_tree) = align; | |
3747 DECL_USER_ALIGN (new_tree) = 1; | |
3748 } | |
3749 walk_tree (&DECL_INITIAL (new_tree), force_labels_r, NULL, NULL); | |
3750 | |
3751 TREE_OPERAND (*expr_p, 1) = new_tree; | |
3752 | |
3753 /* This is no longer an assignment of a CONSTRUCTOR, but | |
3754 we still may have processing to do on the LHS. So | |
3755 pretend we didn't do anything here to let that happen. */ | |
3756 return GS_UNHANDLED; | |
3757 } | |
3758 } | |
3759 | |
3760 if (notify_temp_creation) | |
3761 return GS_OK; | |
3762 | |
3763 /* If there are nonzero elements, pre-evaluate to capture elements | |
3764 overlapping with the lhs into temporaries. We must do this before | |
3765 clearing to fetch the values before they are zeroed-out. */ | |
3766 if (num_nonzero_elements > 0) | |
3767 { | |
3768 preeval_data.lhs_base_decl = get_base_address (object); | |
3769 if (!DECL_P (preeval_data.lhs_base_decl)) | |
3770 preeval_data.lhs_base_decl = NULL; | |
3771 preeval_data.lhs_alias_set = get_alias_set (object); | |
3772 | |
3773 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1), | |
3774 pre_p, post_p, &preeval_data); | |
3775 } | |
3776 | |
3777 if (cleared) | |
3778 { | |
3779 /* Zap the CONSTRUCTOR element list, which simplifies this case. | |
3780 Note that we still have to gimplify, in order to handle the | |
3781 case of variable sized types. Avoid shared tree structures. */ | |
3782 CONSTRUCTOR_ELTS (ctor) = NULL; | |
3783 TREE_SIDE_EFFECTS (ctor) = 0; | |
3784 object = unshare_expr (object); | |
3785 gimplify_stmt (expr_p, pre_p); | |
3786 } | |
3787 | |
3788 /* If we have not block cleared the object, or if there are nonzero | |
3789 elements in the constructor, add assignments to the individual | |
3790 scalar fields of the object. */ | |
3791 if (!cleared || num_nonzero_elements > 0) | |
3792 gimplify_init_ctor_eval (object, elts, pre_p, cleared); | |
3793 | |
3794 *expr_p = NULL_TREE; | |
3795 } | |
3796 break; | |
3797 | |
3798 case COMPLEX_TYPE: | |
3799 { | |
3800 tree r, i; | |
3801 | |
3802 if (notify_temp_creation) | |
3803 return GS_OK; | |
3804 | |
3805 /* Extract the real and imaginary parts out of the ctor. */ | |
3806 gcc_assert (VEC_length (constructor_elt, elts) == 2); | |
3807 r = VEC_index (constructor_elt, elts, 0)->value; | |
3808 i = VEC_index (constructor_elt, elts, 1)->value; | |
3809 if (r == NULL || i == NULL) | |
3810 { | |
3811 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node); | |
3812 if (r == NULL) | |
3813 r = zero; | |
3814 if (i == NULL) | |
3815 i = zero; | |
3816 } | |
3817 | |
3818 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to | |
3819 represent creation of a complex value. */ | |
3820 if (TREE_CONSTANT (r) && TREE_CONSTANT (i)) | |
3821 { | |
3822 ctor = build_complex (type, r, i); | |
3823 TREE_OPERAND (*expr_p, 1) = ctor; | |
3824 } | |
3825 else | |
3826 { | |
3827 ctor = build2 (COMPLEX_EXPR, type, r, i); | |
3828 TREE_OPERAND (*expr_p, 1) = ctor; | |
3829 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), | |
3830 pre_p, | |
3831 post_p, | |
3832 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)), | |
3833 fb_rvalue); | |
3834 } | |
3835 } | |
3836 break; | |
3837 | |
3838 case VECTOR_TYPE: | |
3839 { | |
3840 unsigned HOST_WIDE_INT ix; | |
3841 constructor_elt *ce; | |
3842 | |
3843 if (notify_temp_creation) | |
3844 return GS_OK; | |
3845 | |
3846 /* Go ahead and simplify constant constructors to VECTOR_CST. */ | |
3847 if (TREE_CONSTANT (ctor)) | |
3848 { | |
3849 bool constant_p = true; | |
3850 tree value; | |
3851 | |
3852 /* Even when ctor is constant, it might contain non-*_CST | |
3853 elements, such as addresses or trapping values like | |
3854 1.0/0.0 - 1.0/0.0. Such expressions don't belong | |
3855 in VECTOR_CST nodes. */ | |
3856 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value) | |
3857 if (!CONSTANT_CLASS_P (value)) | |
3858 { | |
3859 constant_p = false; | |
3860 break; | |
3861 } | |
3862 | |
3863 if (constant_p) | |
3864 { | |
3865 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts); | |
3866 break; | |
3867 } | |
3868 | |
3869 /* Don't reduce an initializer constant even if we can't | |
3870 make a VECTOR_CST. It won't do anything for us, and it'll | |
3871 prevent us from representing it as a single constant. */ | |
3872 if (initializer_constant_valid_p (ctor, type)) | |
3873 break; | |
3874 | |
3875 TREE_CONSTANT (ctor) = 0; | |
3876 } | |
3877 | |
3878 /* Vector types use CONSTRUCTOR all the way through gimple | |
3879 compilation as a general initializer. */ | |
3880 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++) | |
3881 { | |
3882 enum gimplify_status tret; | |
3883 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val, | |
3884 fb_rvalue); | |
3885 if (tret == GS_ERROR) | |
3886 ret = GS_ERROR; | |
3887 } | |
3888 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0))) | |
3889 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p); | |
3890 } | |
3891 break; | |
3892 | |
3893 default: | |
3894 /* So how did we get a CONSTRUCTOR for a scalar type? */ | |
3895 gcc_unreachable (); | |
3896 } | |
3897 | |
3898 if (ret == GS_ERROR) | |
3899 return GS_ERROR; | |
3900 else if (want_value) | |
3901 { | |
3902 *expr_p = object; | |
3903 return GS_OK; | |
3904 } | |
3905 else | |
3906 { | |
3907 /* If we have gimplified both sides of the initializer but have | |
3908 not emitted an assignment, do so now. */ | |
3909 if (*expr_p) | |
3910 { | |
3911 tree lhs = TREE_OPERAND (*expr_p, 0); | |
3912 tree rhs = TREE_OPERAND (*expr_p, 1); | |
3913 gimple init = gimple_build_assign (lhs, rhs); | |
3914 gimplify_seq_add_stmt (pre_p, init); | |
3915 *expr_p = NULL; | |
3916 } | |
3917 | |
3918 return GS_ALL_DONE; | |
3919 } | |
3920 } | |
3921 | |
3922 /* Given a pointer value OP0, return a simplified version of an | |
3923 indirection through OP0, or NULL_TREE if no simplification is | |
3924 possible. Note that the resulting type may be different from | |
3925 the type pointed to in the sense that it is still compatible | |
3926 from the langhooks point of view. */ | |
3927 | |
3928 tree | |
3929 gimple_fold_indirect_ref (tree t) | |
3930 { | |
3931 tree type = TREE_TYPE (TREE_TYPE (t)); | |
3932 tree sub = t; | |
3933 tree subtype; | |
3934 | |
3935 STRIP_USELESS_TYPE_CONVERSION (sub); | |
3936 subtype = TREE_TYPE (sub); | |
3937 if (!POINTER_TYPE_P (subtype)) | |
3938 return NULL_TREE; | |
3939 | |
3940 if (TREE_CODE (sub) == ADDR_EXPR) | |
3941 { | |
3942 tree op = TREE_OPERAND (sub, 0); | |
3943 tree optype = TREE_TYPE (op); | |
3944 /* *&p => p */ | |
3945 if (useless_type_conversion_p (type, optype)) | |
3946 return op; | |
3947 | |
3948 /* *(foo *)&fooarray => fooarray[0] */ | |
3949 if (TREE_CODE (optype) == ARRAY_TYPE | |
3950 && useless_type_conversion_p (type, TREE_TYPE (optype))) | |
3951 { | |
3952 tree type_domain = TYPE_DOMAIN (optype); | |
3953 tree min_val = size_zero_node; | |
3954 if (type_domain && TYPE_MIN_VALUE (type_domain)) | |
3955 min_val = TYPE_MIN_VALUE (type_domain); | |
3956 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE); | |
3957 } | |
3958 } | |
3959 | |
3960 /* *(foo *)fooarrptr => (*fooarrptr)[0] */ | |
3961 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE | |
3962 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype)))) | |
3963 { | |
3964 tree type_domain; | |
3965 tree min_val = size_zero_node; | |
3966 tree osub = sub; | |
3967 sub = gimple_fold_indirect_ref (sub); | |
3968 if (! sub) | |
3969 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub); | |
3970 type_domain = TYPE_DOMAIN (TREE_TYPE (sub)); | |
3971 if (type_domain && TYPE_MIN_VALUE (type_domain)) | |
3972 min_val = TYPE_MIN_VALUE (type_domain); | |
3973 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE); | |
3974 } | |
3975 | |
3976 return NULL_TREE; | |
3977 } | |
3978 | |
3979 /* Given a pointer value OP0, return a simplified version of an | |
3980 indirection through OP0, or NULL_TREE if no simplification is | |
3981 possible. This may only be applied to a rhs of an expression. | |
3982 Note that the resulting type may be different from the type pointed | |
3983 to in the sense that it is still compatible from the langhooks | |
3984 point of view. */ | |
3985 | |
3986 static tree | |
3987 gimple_fold_indirect_ref_rhs (tree t) | |
3988 { | |
3989 return gimple_fold_indirect_ref (t); | |
3990 } | |
3991 | |
3992 /* Subroutine of gimplify_modify_expr to do simplifications of | |
3993 MODIFY_EXPRs based on the code of the RHS. We loop for as long as | |
3994 something changes. */ | |
3995 | |
3996 static enum gimplify_status | |
3997 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, | |
3998 gimple_seq *pre_p, gimple_seq *post_p, | |
3999 bool want_value) | |
4000 { | |
4001 enum gimplify_status ret = GS_OK; | |
4002 | |
4003 while (ret != GS_UNHANDLED) | |
4004 switch (TREE_CODE (*from_p)) | |
4005 { | |
4006 case VAR_DECL: | |
4007 /* If we're assigning from a read-only variable initialized with | |
4008 a constructor, do the direct assignment from the constructor, | |
4009 but only if neither source nor target are volatile since this | |
4010 latter assignment might end up being done on a per-field basis. */ | |
4011 if (DECL_INITIAL (*from_p) | |
4012 && TREE_READONLY (*from_p) | |
4013 && !TREE_THIS_VOLATILE (*from_p) | |
4014 && !TREE_THIS_VOLATILE (*to_p) | |
4015 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR) | |
4016 { | |
4017 tree old_from = *from_p; | |
4018 | |
4019 /* Move the constructor into the RHS. */ | |
4020 *from_p = unshare_expr (DECL_INITIAL (*from_p)); | |
4021 | |
4022 /* Let's see if gimplify_init_constructor will need to put | |
4023 it in memory. If so, revert the change. */ | |
4024 ret = gimplify_init_constructor (expr_p, NULL, NULL, false, true); | |
4025 if (ret == GS_ERROR) | |
4026 { | |
4027 *from_p = old_from; | |
4028 /* Fall through. */ | |
4029 } | |
4030 else | |
4031 { | |
4032 ret = GS_OK; | |
4033 break; | |
4034 } | |
4035 } | |
4036 ret = GS_UNHANDLED; | |
4037 break; | |
4038 case INDIRECT_REF: | |
4039 { | |
4040 /* If we have code like | |
4041 | |
4042 *(const A*)(A*)&x | |
4043 | |
4044 where the type of "x" is a (possibly cv-qualified variant | |
4045 of "A"), treat the entire expression as identical to "x". | |
4046 This kind of code arises in C++ when an object is bound | |
4047 to a const reference, and if "x" is a TARGET_EXPR we want | |
4048 to take advantage of the optimization below. */ | |
4049 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)); | |
4050 if (t) | |
4051 { | |
4052 *from_p = t; | |
4053 ret = GS_OK; | |
4054 } | |
4055 else | |
4056 ret = GS_UNHANDLED; | |
4057 break; | |
4058 } | |
4059 | |
4060 case TARGET_EXPR: | |
4061 { | |
4062 /* If we are initializing something from a TARGET_EXPR, strip the | |
4063 TARGET_EXPR and initialize it directly, if possible. This can't | |
4064 be done if the initializer is void, since that implies that the | |
4065 temporary is set in some non-trivial way. | |
4066 | |
4067 ??? What about code that pulls out the temp and uses it | |
4068 elsewhere? I think that such code never uses the TARGET_EXPR as | |
4069 an initializer. If I'm wrong, we'll die because the temp won't | |
4070 have any RTL. In that case, I guess we'll need to replace | |
4071 references somehow. */ | |
4072 tree init = TARGET_EXPR_INITIAL (*from_p); | |
4073 | |
4074 if (init | |
4075 && !VOID_TYPE_P (TREE_TYPE (init))) | |
4076 { | |
4077 *from_p = init; | |
4078 ret = GS_OK; | |
4079 } | |
4080 else | |
4081 ret = GS_UNHANDLED; | |
4082 } | |
4083 break; | |
4084 | |
4085 case COMPOUND_EXPR: | |
4086 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be | |
4087 caught. */ | |
4088 gimplify_compound_expr (from_p, pre_p, true); | |
4089 ret = GS_OK; | |
4090 break; | |
4091 | |
4092 case CONSTRUCTOR: | |
4093 /* If we're initializing from a CONSTRUCTOR, break this into | |
4094 individual MODIFY_EXPRs. */ | |
4095 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value, | |
4096 false); | |
4097 | |
4098 case COND_EXPR: | |
4099 /* If we're assigning to a non-register type, push the assignment | |
4100 down into the branches. This is mandatory for ADDRESSABLE types, | |
4101 since we cannot generate temporaries for such, but it saves a | |
4102 copy in other cases as well. */ | |
4103 if (!is_gimple_reg_type (TREE_TYPE (*from_p))) | |
4104 { | |
4105 /* This code should mirror the code in gimplify_cond_expr. */ | |
4106 enum tree_code code = TREE_CODE (*expr_p); | |
4107 tree cond = *from_p; | |
4108 tree result = *to_p; | |
4109 | |
4110 ret = gimplify_expr (&result, pre_p, post_p, | |
4111 is_gimple_lvalue, fb_lvalue); | |
4112 if (ret != GS_ERROR) | |
4113 ret = GS_OK; | |
4114 | |
4115 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node) | |
4116 TREE_OPERAND (cond, 1) | |
4117 = build2 (code, void_type_node, result, | |
4118 TREE_OPERAND (cond, 1)); | |
4119 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node) | |
4120 TREE_OPERAND (cond, 2) | |
4121 = build2 (code, void_type_node, unshare_expr (result), | |
4122 TREE_OPERAND (cond, 2)); | |
4123 | |
4124 TREE_TYPE (cond) = void_type_node; | |
4125 recalculate_side_effects (cond); | |
4126 | |
4127 if (want_value) | |
4128 { | |
4129 gimplify_and_add (cond, pre_p); | |
4130 *expr_p = unshare_expr (result); | |
4131 } | |
4132 else | |
4133 *expr_p = cond; | |
4134 return ret; | |
4135 } | |
4136 else | |
4137 ret = GS_UNHANDLED; | |
4138 break; | |
4139 | |
4140 case CALL_EXPR: | |
4141 /* For calls that return in memory, give *to_p as the CALL_EXPR's | |
4142 return slot so that we don't generate a temporary. */ | |
4143 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p) | |
4144 && aggregate_value_p (*from_p, *from_p)) | |
4145 { | |
4146 bool use_target; | |
4147 | |
4148 if (!(rhs_predicate_for (*to_p))(*from_p)) | |
4149 /* If we need a temporary, *to_p isn't accurate. */ | |
4150 use_target = false; | |
4151 else if (TREE_CODE (*to_p) == RESULT_DECL | |
4152 && DECL_NAME (*to_p) == NULL_TREE | |
4153 && needs_to_live_in_memory (*to_p)) | |
4154 /* It's OK to use the return slot directly unless it's an NRV. */ | |
4155 use_target = true; | |
4156 else if (is_gimple_reg_type (TREE_TYPE (*to_p)) | |
4157 || (DECL_P (*to_p) && DECL_REGISTER (*to_p))) | |
4158 /* Don't force regs into memory. */ | |
4159 use_target = false; | |
4160 else if (TREE_CODE (*expr_p) == INIT_EXPR) | |
4161 /* It's OK to use the target directly if it's being | |
4162 initialized. */ | |
4163 use_target = true; | |
4164 else if (!is_gimple_non_addressable (*to_p)) | |
4165 /* Don't use the original target if it's already addressable; | |
4166 if its address escapes, and the called function uses the | |
4167 NRV optimization, a conforming program could see *to_p | |
4168 change before the called function returns; see c++/19317. | |
4169 When optimizing, the return_slot pass marks more functions | |
4170 as safe after we have escape info. */ | |
4171 use_target = false; | |
4172 else | |
4173 use_target = true; | |
4174 | |
4175 if (use_target) | |
4176 { | |
4177 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1; | |
4178 mark_addressable (*to_p); | |
4179 } | |
4180 } | |
4181 | |
4182 ret = GS_UNHANDLED; | |
4183 break; | |
4184 | |
4185 /* If we're initializing from a container, push the initialization | |
4186 inside it. */ | |
4187 case CLEANUP_POINT_EXPR: | |
4188 case BIND_EXPR: | |
4189 case STATEMENT_LIST: | |
4190 { | |
4191 tree wrap = *from_p; | |
4192 tree t; | |
4193 | |
4194 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval, | |
4195 fb_lvalue); | |
4196 if (ret != GS_ERROR) | |
4197 ret = GS_OK; | |
4198 | |
4199 t = voidify_wrapper_expr (wrap, *expr_p); | |
4200 gcc_assert (t == *expr_p); | |
4201 | |
4202 if (want_value) | |
4203 { | |
4204 gimplify_and_add (wrap, pre_p); | |
4205 *expr_p = unshare_expr (*to_p); | |
4206 } | |
4207 else | |
4208 *expr_p = wrap; | |
4209 return GS_OK; | |
4210 } | |
4211 | |
4212 case COMPOUND_LITERAL_EXPR: | |
4213 { | |
4214 tree complit = TREE_OPERAND (*expr_p, 1); | |
4215 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit); | |
4216 tree decl = DECL_EXPR_DECL (decl_s); | |
4217 tree init = DECL_INITIAL (decl); | |
4218 | |
4219 /* struct T x = (struct T) { 0, 1, 2 } can be optimized | |
4220 into struct T x = { 0, 1, 2 } if the address of the | |
4221 compound literal has never been taken. */ | |
4222 if (!TREE_ADDRESSABLE (complit) | |
4223 && !TREE_ADDRESSABLE (decl) | |
4224 && init) | |
4225 { | |
4226 *expr_p = copy_node (*expr_p); | |
4227 TREE_OPERAND (*expr_p, 1) = init; | |
4228 return GS_OK; | |
4229 } | |
4230 } | |
4231 | |
4232 default: | |
4233 ret = GS_UNHANDLED; | |
4234 break; | |
4235 } | |
4236 | |
4237 return ret; | |
4238 } | |
4239 | |
4240 | |
4241 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is | |
4242 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with | |
4243 DECL_GIMPLE_REG_P set. | |
4244 | |
4245 IMPORTANT NOTE: This promotion is performed by introducing a load of the | |
4246 other, unmodified part of the complex object just before the total store. | |
4247 As a consequence, if the object is still uninitialized, an undefined value | |
4248 will be loaded into a register, which may result in a spurious exception | |
4249 if the register is floating-point and the value happens to be a signaling | |
4250 NaN for example. Then the fully-fledged complex operations lowering pass | |
4251 followed by a DCE pass are necessary in order to fix things up. */ | |
4252 | |
4253 static enum gimplify_status | |
4254 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p, | |
4255 bool want_value) | |
4256 { | |
4257 enum tree_code code, ocode; | |
4258 tree lhs, rhs, new_rhs, other, realpart, imagpart; | |
4259 | |
4260 lhs = TREE_OPERAND (*expr_p, 0); | |
4261 rhs = TREE_OPERAND (*expr_p, 1); | |
4262 code = TREE_CODE (lhs); | |
4263 lhs = TREE_OPERAND (lhs, 0); | |
4264 | |
4265 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR; | |
4266 other = build1 (ocode, TREE_TYPE (rhs), lhs); | |
4267 other = get_formal_tmp_var (other, pre_p); | |
4268 | |
4269 realpart = code == REALPART_EXPR ? rhs : other; | |
4270 imagpart = code == REALPART_EXPR ? other : rhs; | |
4271 | |
4272 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart)) | |
4273 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart); | |
4274 else | |
4275 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart); | |
4276 | |
4277 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs)); | |
4278 *expr_p = (want_value) ? rhs : NULL_TREE; | |
4279 | |
4280 return GS_ALL_DONE; | |
4281 } | |
4282 | |
4283 | |
4284 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P. | |
4285 | |
4286 modify_expr | |
4287 : varname '=' rhs | |
4288 | '*' ID '=' rhs | |
4289 | |
4290 PRE_P points to the list where side effects that must happen before | |
4291 *EXPR_P should be stored. | |
4292 | |
4293 POST_P points to the list where side effects that must happen after | |
4294 *EXPR_P should be stored. | |
4295 | |
4296 WANT_VALUE is nonzero iff we want to use the value of this expression | |
4297 in another expression. */ | |
4298 | |
4299 static enum gimplify_status | |
4300 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, | |
4301 bool want_value) | |
4302 { | |
4303 tree *from_p = &TREE_OPERAND (*expr_p, 1); | |
4304 tree *to_p = &TREE_OPERAND (*expr_p, 0); | |
4305 enum gimplify_status ret = GS_UNHANDLED; | |
4306 gimple assign; | |
4307 location_t loc = EXPR_LOCATION (*expr_p); | |
4308 | |
4309 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR | |
4310 || TREE_CODE (*expr_p) == INIT_EXPR); | |
4311 | |
4312 /* Insert pointer conversions required by the middle-end that are not | |
4313 required by the frontend. This fixes middle-end type checking for | |
4314 for example gcc.dg/redecl-6.c. */ | |
4315 if (POINTER_TYPE_P (TREE_TYPE (*to_p))) | |
4316 { | |
4317 STRIP_USELESS_TYPE_CONVERSION (*from_p); | |
4318 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p))) | |
4319 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p); | |
4320 } | |
4321 | |
4322 /* See if any simplifications can be done based on what the RHS is. */ | |
4323 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p, | |
4324 want_value); | |
4325 if (ret != GS_UNHANDLED) | |
4326 return ret; | |
4327 | |
4328 /* For zero sized types only gimplify the left hand side and right hand | |
4329 side as statements and throw away the assignment. Do this after | |
4330 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable | |
4331 types properly. */ | |
4332 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value) | |
4333 { | |
4334 gimplify_stmt (from_p, pre_p); | |
4335 gimplify_stmt (to_p, pre_p); | |
4336 *expr_p = NULL_TREE; | |
4337 return GS_ALL_DONE; | |
4338 } | |
4339 | |
4340 /* If the value being copied is of variable width, compute the length | |
4341 of the copy into a WITH_SIZE_EXPR. Note that we need to do this | |
4342 before gimplifying any of the operands so that we can resolve any | |
4343 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses | |
4344 the size of the expression to be copied, not of the destination, so | |
4345 that is what we must do here. */ | |
4346 maybe_with_size_expr (from_p); | |
4347 | |
4348 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue); | |
4349 if (ret == GS_ERROR) | |
4350 return ret; | |
4351 | |
4352 /* As a special case, we have to temporarily allow for assignments | |
4353 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is | |
4354 a toplevel statement, when gimplifying the GENERIC expression | |
4355 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple | |
4356 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>. | |
4357 | |
4358 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To | |
4359 prevent gimplify_expr from trying to create a new temporary for | |
4360 foo's LHS, we tell it that it should only gimplify until it | |
4361 reaches the CALL_EXPR. On return from gimplify_expr, the newly | |
4362 created GIMPLE_CALL <foo> will be the last statement in *PRE_P | |
4363 and all we need to do here is set 'a' to be its LHS. */ | |
4364 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p), | |
4365 fb_rvalue); | |
4366 if (ret == GS_ERROR) | |
4367 return ret; | |
4368 | |
4369 /* Now see if the above changed *from_p to something we handle specially. */ | |
4370 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p, | |
4371 want_value); | |
4372 if (ret != GS_UNHANDLED) | |
4373 return ret; | |
4374 | |
4375 /* If we've got a variable sized assignment between two lvalues (i.e. does | |
4376 not involve a call), then we can make things a bit more straightforward | |
4377 by converting the assignment to memcpy or memset. */ | |
4378 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR) | |
4379 { | |
4380 tree from = TREE_OPERAND (*from_p, 0); | |
4381 tree size = TREE_OPERAND (*from_p, 1); | |
4382 | |
4383 if (TREE_CODE (from) == CONSTRUCTOR) | |
4384 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p); | |
4385 | |
4386 if (is_gimple_addressable (from)) | |
4387 { | |
4388 *from_p = from; | |
4389 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value, | |
4390 pre_p); | |
4391 } | |
4392 } | |
4393 | |
4394 /* Transform partial stores to non-addressable complex variables into | |
4395 total stores. This allows us to use real instead of virtual operands | |
4396 for these variables, which improves optimization. */ | |
4397 if ((TREE_CODE (*to_p) == REALPART_EXPR | |
4398 || TREE_CODE (*to_p) == IMAGPART_EXPR) | |
4399 && is_gimple_reg (TREE_OPERAND (*to_p, 0))) | |
4400 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value); | |
4401 | |
4402 /* Try to alleviate the effects of the gimplification creating artificial | |
4403 temporaries (see for example is_gimple_reg_rhs) on the debug info. */ | |
4404 if (!gimplify_ctxp->into_ssa | |
4405 && DECL_P (*from_p) | |
4406 && DECL_IGNORED_P (*from_p) | |
4407 && DECL_P (*to_p) | |
4408 && !DECL_IGNORED_P (*to_p)) | |
4409 { | |
4410 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p)) | |
4411 DECL_NAME (*from_p) | |
4412 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p))); | |
4413 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1; | |
4414 SET_DECL_DEBUG_EXPR (*from_p, *to_p); | |
4415 } | |
4416 | |
4417 if (TREE_CODE (*from_p) == CALL_EXPR) | |
4418 { | |
4419 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL | |
4420 instead of a GIMPLE_ASSIGN. */ | |
4421 assign = gimple_build_call_from_tree (*from_p); | |
4422 gimple_call_set_lhs (assign, *to_p); | |
4423 } | |
4424 else | |
4425 { | |
4426 assign = gimple_build_assign (*to_p, *from_p); | |
4427 gimple_set_location (assign, EXPR_LOCATION (*expr_p)); | |
4428 } | |
4429 | |
4430 gimplify_seq_add_stmt (pre_p, assign); | |
4431 | |
4432 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p)) | |
4433 { | |
4434 /* If we've somehow already got an SSA_NAME on the LHS, then | |
4435 we've probably modified it twice. Not good. */ | |
4436 gcc_assert (TREE_CODE (*to_p) != SSA_NAME); | |
4437 *to_p = make_ssa_name (*to_p, assign); | |
4438 gimple_set_lhs (assign, *to_p); | |
4439 } | |
4440 | |
4441 if (want_value) | |
4442 { | |
4443 *expr_p = unshare_expr (*to_p); | |
4444 return GS_OK; | |
4445 } | |
4446 else | |
4447 *expr_p = NULL; | |
4448 | |
4449 return GS_ALL_DONE; | |
4450 } | |
4451 | |
4452 /* Gimplify a comparison between two variable-sized objects. Do this | |
4453 with a call to BUILT_IN_MEMCMP. */ | |
4454 | |
4455 static enum gimplify_status | |
4456 gimplify_variable_sized_compare (tree *expr_p) | |
4457 { | |
4458 tree op0 = TREE_OPERAND (*expr_p, 0); | |
4459 tree op1 = TREE_OPERAND (*expr_p, 1); | |
4460 tree t, arg, dest, src; | |
4461 location_t loc = EXPR_LOCATION (*expr_p); | |
4462 | |
4463 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0)); | |
4464 arg = unshare_expr (arg); | |
4465 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0); | |
4466 src = build_fold_addr_expr_loc (loc, op1); | |
4467 dest = build_fold_addr_expr_loc (loc, op0); | |
4468 t = implicit_built_in_decls[BUILT_IN_MEMCMP]; | |
4469 t = build_call_expr_loc (loc, t, 3, dest, src, arg); | |
4470 *expr_p | |
4471 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node); | |
4472 | |
4473 return GS_OK; | |
4474 } | |
4475 | |
4476 /* Gimplify a comparison between two aggregate objects of integral scalar | |
4477 mode as a comparison between the bitwise equivalent scalar values. */ | |
4478 | |
4479 static enum gimplify_status | |
4480 gimplify_scalar_mode_aggregate_compare (tree *expr_p) | |
4481 { | |
4482 location_t loc = EXPR_LOCATION (*expr_p); | |
4483 tree op0 = TREE_OPERAND (*expr_p, 0); | |
4484 tree op1 = TREE_OPERAND (*expr_p, 1); | |
4485 | |
4486 tree type = TREE_TYPE (op0); | |
4487 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1); | |
4488 | |
4489 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0); | |
4490 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1); | |
4491 | |
4492 *expr_p | |
4493 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1); | |
4494 | |
4495 return GS_OK; | |
4496 } | |
4497 | |
4498 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P | |
4499 points to the expression to gimplify. | |
4500 | |
4501 Expressions of the form 'a && b' are gimplified to: | |
4502 | |
4503 a && b ? true : false | |
4504 | |
4505 LOCUS is the source location to be put on the generated COND_EXPR. | |
4506 gimplify_cond_expr will do the rest. */ | |
4507 | |
4508 static enum gimplify_status | |
4509 gimplify_boolean_expr (tree *expr_p, location_t locus) | |
4510 { | |
4511 /* Preserve the original type of the expression. */ | |
4512 tree type = TREE_TYPE (*expr_p); | |
4513 | |
4514 *expr_p = build3 (COND_EXPR, type, *expr_p, | |
4515 fold_convert_loc (locus, type, boolean_true_node), | |
4516 fold_convert_loc (locus, type, boolean_false_node)); | |
4517 | |
4518 SET_EXPR_LOCATION (*expr_p, locus); | |
4519 | |
4520 return GS_OK; | |
4521 } | |
4522 | |
4523 /* Gimplifies an expression sequence. This function gimplifies each | |
4524 expression and re-writes the original expression with the last | |
4525 expression of the sequence in GIMPLE form. | |
4526 | |
4527 PRE_P points to the list where the side effects for all the | |
4528 expressions in the sequence will be emitted. | |
4529 | |
4530 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */ | |
4531 | |
4532 static enum gimplify_status | |
4533 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) | |
4534 { | |
4535 tree t = *expr_p; | |
4536 | |
4537 do | |
4538 { | |
4539 tree *sub_p = &TREE_OPERAND (t, 0); | |
4540 | |
4541 if (TREE_CODE (*sub_p) == COMPOUND_EXPR) | |
4542 gimplify_compound_expr (sub_p, pre_p, false); | |
4543 else | |
4544 gimplify_stmt (sub_p, pre_p); | |
4545 | |
4546 t = TREE_OPERAND (t, 1); | |
4547 } | |
4548 while (TREE_CODE (t) == COMPOUND_EXPR); | |
4549 | |
4550 *expr_p = t; | |
4551 if (want_value) | |
4552 return GS_OK; | |
4553 else | |
4554 { | |
4555 gimplify_stmt (expr_p, pre_p); | |
4556 return GS_ALL_DONE; | |
4557 } | |
4558 } | |
4559 | |
4560 | |
4561 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to | |
4562 gimplify. After gimplification, EXPR_P will point to a new temporary | |
4563 that holds the original value of the SAVE_EXPR node. | |
4564 | |
4565 PRE_P points to the list where side effects that must happen before | |
4566 *EXPR_P should be stored. */ | |
4567 | |
4568 static enum gimplify_status | |
4569 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) | |
4570 { | |
4571 enum gimplify_status ret = GS_ALL_DONE; | |
4572 tree val; | |
4573 | |
4574 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR); | |
4575 val = TREE_OPERAND (*expr_p, 0); | |
4576 | |
4577 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */ | |
4578 if (!SAVE_EXPR_RESOLVED_P (*expr_p)) | |
4579 { | |
4580 /* The operand may be a void-valued expression such as SAVE_EXPRs | |
4581 generated by the Java frontend for class initialization. It is | |
4582 being executed only for its side-effects. */ | |
4583 if (TREE_TYPE (val) == void_type_node) | |
4584 { | |
4585 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, | |
4586 is_gimple_stmt, fb_none); | |
4587 val = NULL; | |
4588 } | |
4589 else | |
4590 val = get_initialized_tmp_var (val, pre_p, post_p); | |
4591 | |
4592 TREE_OPERAND (*expr_p, 0) = val; | |
4593 SAVE_EXPR_RESOLVED_P (*expr_p) = 1; | |
4594 } | |
4595 | |
4596 *expr_p = val; | |
4597 | |
4598 return ret; | |
4599 } | |
4600 | |
4601 /* Re-write the ADDR_EXPR node pointed to by EXPR_P | |
4602 | |
4603 unary_expr | |
4604 : ... | |
4605 | '&' varname | |
4606 ... | |
4607 | |
4608 PRE_P points to the list where side effects that must happen before | |
4609 *EXPR_P should be stored. | |
4610 | |
4611 POST_P points to the list where side effects that must happen after | |
4612 *EXPR_P should be stored. */ | |
4613 | |
4614 static enum gimplify_status | |
4615 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) | |
4616 { | |
4617 tree expr = *expr_p; | |
4618 tree op0 = TREE_OPERAND (expr, 0); | |
4619 enum gimplify_status ret; | |
4620 location_t loc = EXPR_LOCATION (*expr_p); | |
4621 | |
4622 switch (TREE_CODE (op0)) | |
4623 { | |
4624 case INDIRECT_REF: | |
4625 case MISALIGNED_INDIRECT_REF: | |
4626 do_indirect_ref: | |
4627 /* Check if we are dealing with an expression of the form '&*ptr'. | |
4628 While the front end folds away '&*ptr' into 'ptr', these | |
4629 expressions may be generated internally by the compiler (e.g., | |
4630 builtins like __builtin_va_end). */ | |
4631 /* Caution: the silent array decomposition semantics we allow for | |
4632 ADDR_EXPR means we can't always discard the pair. */ | |
4633 /* Gimplification of the ADDR_EXPR operand may drop | |
4634 cv-qualification conversions, so make sure we add them if | |
4635 needed. */ | |
4636 { | |
4637 tree op00 = TREE_OPERAND (op0, 0); | |
4638 tree t_expr = TREE_TYPE (expr); | |
4639 tree t_op00 = TREE_TYPE (op00); | |
4640 | |
4641 if (!useless_type_conversion_p (t_expr, t_op00)) | |
4642 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00); | |
4643 *expr_p = op00; | |
4644 ret = GS_OK; | |
4645 } | |
4646 break; | |
4647 | |
4648 case VIEW_CONVERT_EXPR: | |
4649 /* Take the address of our operand and then convert it to the type of | |
4650 this ADDR_EXPR. | |
4651 | |
4652 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at | |
4653 all clear. The impact of this transformation is even less clear. */ | |
4654 | |
4655 /* If the operand is a useless conversion, look through it. Doing so | |
4656 guarantees that the ADDR_EXPR and its operand will remain of the | |
4657 same type. */ | |
4658 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0))) | |
4659 op0 = TREE_OPERAND (op0, 0); | |
4660 | |
4661 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr), | |
4662 build_fold_addr_expr_loc (loc, | |
4663 TREE_OPERAND (op0, 0))); | |
4664 ret = GS_OK; | |
4665 break; | |
4666 | |
4667 default: | |
4668 /* We use fb_either here because the C frontend sometimes takes | |
4669 the address of a call that returns a struct; see | |
4670 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make | |
4671 the implied temporary explicit. */ | |
4672 | |
4673 /* Make the operand addressable. */ | |
4674 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p, | |
4675 is_gimple_addressable, fb_either); | |
4676 if (ret == GS_ERROR) | |
4677 break; | |
4678 | |
4679 /* Then mark it. Beware that it may not be possible to do so directly | |
4680 if a temporary has been created by the gimplification. */ | |
4681 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p); | |
4682 | |
4683 op0 = TREE_OPERAND (expr, 0); | |
4684 | |
4685 /* For various reasons, the gimplification of the expression | |
4686 may have made a new INDIRECT_REF. */ | |
4687 if (TREE_CODE (op0) == INDIRECT_REF) | |
4688 goto do_indirect_ref; | |
4689 | |
4690 mark_addressable (TREE_OPERAND (expr, 0)); | |
4691 | |
4692 /* The FEs may end up building ADDR_EXPRs early on a decl with | |
4693 an incomplete type. Re-build ADDR_EXPRs in canonical form | |
4694 here. */ | |
4695 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr)))) | |
4696 *expr_p = build_fold_addr_expr (op0); | |
4697 | |
4698 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */ | |
4699 recompute_tree_invariant_for_addr_expr (*expr_p); | |
4700 | |
4701 /* If we re-built the ADDR_EXPR add a conversion to the original type | |
4702 if required. */ | |
4703 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p))) | |
4704 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p); | |
4705 | |
4706 break; | |
4707 } | |
4708 | |
4709 return ret; | |
4710 } | |
4711 | |
4712 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple | |
4713 value; output operands should be a gimple lvalue. */ | |
4714 | |
4715 static enum gimplify_status | |
4716 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) | |
4717 { | |
4718 tree expr; | |
4719 int noutputs; | |
4720 const char **oconstraints; | |
4721 int i; | |
4722 tree link; | |
4723 const char *constraint; | |
4724 bool allows_mem, allows_reg, is_inout; | |
4725 enum gimplify_status ret, tret; | |
4726 gimple stmt; | |
4727 VEC(tree, gc) *inputs; | |
4728 VEC(tree, gc) *outputs; | |
4729 VEC(tree, gc) *clobbers; | |
4730 VEC(tree, gc) *labels; | |
4731 tree link_next; | |
4732 | |
4733 expr = *expr_p; | |
4734 noutputs = list_length (ASM_OUTPUTS (expr)); | |
4735 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); | |
4736 | |
4737 inputs = outputs = clobbers = labels = NULL; | |
4738 | |
4739 ret = GS_ALL_DONE; | |
4740 link_next = NULL_TREE; | |
4741 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next) | |
4742 { | |
4743 bool ok; | |
4744 size_t constraint_len; | |
4745 | |
4746 link_next = TREE_CHAIN (link); | |
4747 | |
4748 oconstraints[i] | |
4749 = constraint | |
4750 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); | |
4751 constraint_len = strlen (constraint); | |
4752 if (constraint_len == 0) | |
4753 continue; | |
4754 | |
4755 ok = parse_output_constraint (&constraint, i, 0, 0, | |
4756 &allows_mem, &allows_reg, &is_inout); | |
4757 if (!ok) | |
4758 { | |
4759 ret = GS_ERROR; | |
4760 is_inout = false; | |
4761 } | |
4762 | |
4763 if (!allows_reg && allows_mem) | |
4764 mark_addressable (TREE_VALUE (link)); | |
4765 | |
4766 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, | |
4767 is_inout ? is_gimple_min_lval : is_gimple_lvalue, | |
4768 fb_lvalue | fb_mayfail); | |
4769 if (tret == GS_ERROR) | |
4770 { | |
4771 error ("invalid lvalue in asm output %d", i); | |
4772 ret = tret; | |
4773 } | |
4774 | |
4775 VEC_safe_push (tree, gc, outputs, link); | |
4776 TREE_CHAIN (link) = NULL_TREE; | |
4777 | |
4778 if (is_inout) | |
4779 { | |
4780 /* An input/output operand. To give the optimizers more | |
4781 flexibility, split it into separate input and output | |
4782 operands. */ | |
4783 tree input; | |
4784 char buf[10]; | |
4785 | |
4786 /* Turn the in/out constraint into an output constraint. */ | |
4787 char *p = xstrdup (constraint); | |
4788 p[0] = '='; | |
4789 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p); | |
4790 | |
4791 /* And add a matching input constraint. */ | |
4792 if (allows_reg) | |
4793 { | |
4794 sprintf (buf, "%d", i); | |
4795 | |
4796 /* If there are multiple alternatives in the constraint, | |
4797 handle each of them individually. Those that allow register | |
4798 will be replaced with operand number, the others will stay | |
4799 unchanged. */ | |
4800 if (strchr (p, ',') != NULL) | |
4801 { | |
4802 size_t len = 0, buflen = strlen (buf); | |
4803 char *beg, *end, *str, *dst; | |
4804 | |
4805 for (beg = p + 1;;) | |
4806 { | |
4807 end = strchr (beg, ','); | |
4808 if (end == NULL) | |
4809 end = strchr (beg, '\0'); | |
4810 if ((size_t) (end - beg) < buflen) | |
4811 len += buflen + 1; | |
4812 else | |
4813 len += end - beg + 1; | |
4814 if (*end) | |
4815 beg = end + 1; | |
4816 else | |
4817 break; | |
4818 } | |
4819 | |
4820 str = (char *) alloca (len); | |
4821 for (beg = p + 1, dst = str;;) | |
4822 { | |
4823 const char *tem; | |
4824 bool mem_p, reg_p, inout_p; | |
4825 | |
4826 end = strchr (beg, ','); | |
4827 if (end) | |
4828 *end = '\0'; | |
4829 beg[-1] = '='; | |
4830 tem = beg - 1; | |
4831 parse_output_constraint (&tem, i, 0, 0, | |
4832 &mem_p, ®_p, &inout_p); | |
4833 if (dst != str) | |
4834 *dst++ = ','; | |
4835 if (reg_p) | |
4836 { | |
4837 memcpy (dst, buf, buflen); | |
4838 dst += buflen; | |
4839 } | |
4840 else | |
4841 { | |
4842 if (end) | |
4843 len = end - beg; | |
4844 else | |
4845 len = strlen (beg); | |
4846 memcpy (dst, beg, len); | |
4847 dst += len; | |
4848 } | |
4849 if (end) | |
4850 beg = end + 1; | |
4851 else | |
4852 break; | |
4853 } | |
4854 *dst = '\0'; | |
4855 input = build_string (dst - str, str); | |
4856 } | |
4857 else | |
4858 input = build_string (strlen (buf), buf); | |
4859 } | |
4860 else | |
4861 input = build_string (constraint_len - 1, constraint + 1); | |
4862 | |
4863 free (p); | |
4864 | |
4865 input = build_tree_list (build_tree_list (NULL_TREE, input), | |
4866 unshare_expr (TREE_VALUE (link))); | |
4867 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input); | |
4868 } | |
4869 } | |
4870 | |
4871 link_next = NULL_TREE; | |
4872 for (link = ASM_INPUTS (expr); link; ++i, link = link_next) | |
4873 { | |
4874 link_next = TREE_CHAIN (link); | |
4875 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); | |
4876 parse_input_constraint (&constraint, 0, 0, noutputs, 0, | |
4877 oconstraints, &allows_mem, &allows_reg); | |
4878 | |
4879 /* If we can't make copies, we can only accept memory. */ | |
4880 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link)))) | |
4881 { | |
4882 if (allows_mem) | |
4883 allows_reg = 0; | |
4884 else | |
4885 { | |
4886 error ("impossible constraint in %<asm%>"); | |
4887 error ("non-memory input %d must stay in memory", i); | |
4888 return GS_ERROR; | |
4889 } | |
4890 } | |
4891 | |
4892 /* If the operand is a memory input, it should be an lvalue. */ | |
4893 if (!allows_reg && allows_mem) | |
4894 { | |
4895 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, | |
4896 is_gimple_lvalue, fb_lvalue | fb_mayfail); | |
4897 mark_addressable (TREE_VALUE (link)); | |
4898 if (tret == GS_ERROR) | |
4899 { | |
4900 if (EXPR_HAS_LOCATION (TREE_VALUE (link))) | |
4901 input_location = EXPR_LOCATION (TREE_VALUE (link)); | |
4902 error ("memory input %d is not directly addressable", i); | |
4903 ret = tret; | |
4904 } | |
4905 } | |
4906 else | |
4907 { | |
4908 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, | |
4909 is_gimple_asm_val, fb_rvalue); | |
4910 if (tret == GS_ERROR) | |
4911 ret = tret; | |
4912 } | |
4913 | |
4914 TREE_CHAIN (link) = NULL_TREE; | |
4915 VEC_safe_push (tree, gc, inputs, link); | |
4916 } | |
4917 | |
4918 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link)) | |
4919 VEC_safe_push (tree, gc, clobbers, link); | |
4920 | |
4921 for (link = ASM_LABELS (expr); link; ++i, link = TREE_CHAIN (link)) | |
4922 VEC_safe_push (tree, gc, labels, link); | |
4923 | |
4924 /* Do not add ASMs with errors to the gimple IL stream. */ | |
4925 if (ret != GS_ERROR) | |
4926 { | |
4927 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)), | |
4928 inputs, outputs, clobbers, labels); | |
4929 | |
4930 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr)); | |
4931 gimple_asm_set_input (stmt, ASM_INPUT_P (expr)); | |
4932 | |
4933 gimplify_seq_add_stmt (pre_p, stmt); | |
4934 } | |
4935 | |
4936 return ret; | |
4937 } | |
4938 | |
4939 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding | |
4940 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while | |
4941 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we | |
4942 return to this function. | |
4943 | |
4944 FIXME should we complexify the prequeue handling instead? Or use flags | |
4945 for all the cleanups and let the optimizer tighten them up? The current | |
4946 code seems pretty fragile; it will break on a cleanup within any | |
4947 non-conditional nesting. But any such nesting would be broken, anyway; | |
4948 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct | |
4949 and continues out of it. We can do that at the RTL level, though, so | |
4950 having an optimizer to tighten up try/finally regions would be a Good | |
4951 Thing. */ | |
4952 | |
4953 static enum gimplify_status | |
4954 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p) | |
4955 { | |
4956 gimple_stmt_iterator iter; | |
4957 gimple_seq body_sequence = NULL; | |
4958 | |
4959 tree temp = voidify_wrapper_expr (*expr_p, NULL); | |
4960 | |
4961 /* We only care about the number of conditions between the innermost | |
4962 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and | |
4963 any cleanups collected outside the CLEANUP_POINT_EXPR. */ | |
4964 int old_conds = gimplify_ctxp->conditions; | |
4965 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups; | |
4966 gimplify_ctxp->conditions = 0; | |
4967 gimplify_ctxp->conditional_cleanups = NULL; | |
4968 | |
4969 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence); | |
4970 | |
4971 gimplify_ctxp->conditions = old_conds; | |
4972 gimplify_ctxp->conditional_cleanups = old_cleanups; | |
4973 | |
4974 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); ) | |
4975 { | |
4976 gimple wce = gsi_stmt (iter); | |
4977 | |
4978 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR) | |
4979 { | |
4980 if (gsi_one_before_end_p (iter)) | |
4981 { | |
4982 /* Note that gsi_insert_seq_before and gsi_remove do not | |
4983 scan operands, unlike some other sequence mutators. */ | |
4984 gsi_insert_seq_before_without_update (&iter, | |
4985 gimple_wce_cleanup (wce), | |
4986 GSI_SAME_STMT); | |
4987 gsi_remove (&iter, true); | |
4988 break; | |
4989 } | |
4990 else | |
4991 { | |
4992 gimple gtry; | |
4993 gimple_seq seq; | |
4994 enum gimple_try_flags kind; | |
4995 | |
4996 if (gimple_wce_cleanup_eh_only (wce)) | |
4997 kind = GIMPLE_TRY_CATCH; | |
4998 else | |
4999 kind = GIMPLE_TRY_FINALLY; | |
5000 seq = gsi_split_seq_after (iter); | |
5001 | |
5002 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind); | |
5003 /* Do not use gsi_replace here, as it may scan operands. | |
5004 We want to do a simple structural modification only. */ | |
5005 *gsi_stmt_ptr (&iter) = gtry; | |
5006 iter = gsi_start (seq); | |
5007 } | |
5008 } | |
5009 else | |
5010 gsi_next (&iter); | |
5011 } | |
5012 | |
5013 gimplify_seq_add_seq (pre_p, body_sequence); | |
5014 if (temp) | |
5015 { | |
5016 *expr_p = temp; | |
5017 return GS_OK; | |
5018 } | |
5019 else | |
5020 { | |
5021 *expr_p = NULL; | |
5022 return GS_ALL_DONE; | |
5023 } | |
5024 } | |
5025 | |
5026 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP | |
5027 is the cleanup action required. EH_ONLY is true if the cleanup should | |
5028 only be executed if an exception is thrown, not on normal exit. */ | |
5029 | |
5030 static void | |
5031 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p) | |
5032 { | |
5033 gimple wce; | |
5034 gimple_seq cleanup_stmts = NULL; | |
5035 | |
5036 /* Errors can result in improperly nested cleanups. Which results in | |
5037 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */ | |
5038 if (errorcount || sorrycount) | |
5039 return; | |
5040 | |
5041 if (gimple_conditional_context ()) | |
5042 { | |
5043 /* If we're in a conditional context, this is more complex. We only | |
5044 want to run the cleanup if we actually ran the initialization that | |
5045 necessitates it, but we want to run it after the end of the | |
5046 conditional context. So we wrap the try/finally around the | |
5047 condition and use a flag to determine whether or not to actually | |
5048 run the destructor. Thus | |
5049 | |
5050 test ? f(A()) : 0 | |
5051 | |
5052 becomes (approximately) | |
5053 | |
5054 flag = 0; | |
5055 try { | |
5056 if (test) { A::A(temp); flag = 1; val = f(temp); } | |
5057 else { val = 0; } | |
5058 } finally { | |
5059 if (flag) A::~A(temp); | |
5060 } | |
5061 val | |
5062 */ | |
5063 tree flag = create_tmp_var (boolean_type_node, "cleanup"); | |
5064 gimple ffalse = gimple_build_assign (flag, boolean_false_node); | |
5065 gimple ftrue = gimple_build_assign (flag, boolean_true_node); | |
5066 | |
5067 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL); | |
5068 gimplify_stmt (&cleanup, &cleanup_stmts); | |
5069 wce = gimple_build_wce (cleanup_stmts); | |
5070 | |
5071 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse); | |
5072 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce); | |
5073 gimplify_seq_add_stmt (pre_p, ftrue); | |
5074 | |
5075 /* Because of this manipulation, and the EH edges that jump | |
5076 threading cannot redirect, the temporary (VAR) will appear | |
5077 to be used uninitialized. Don't warn. */ | |
5078 TREE_NO_WARNING (var) = 1; | |
5079 } | |
5080 else | |
5081 { | |
5082 gimplify_stmt (&cleanup, &cleanup_stmts); | |
5083 wce = gimple_build_wce (cleanup_stmts); | |
5084 gimple_wce_set_cleanup_eh_only (wce, eh_only); | |
5085 gimplify_seq_add_stmt (pre_p, wce); | |
5086 } | |
5087 } | |
5088 | |
5089 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */ | |
5090 | |
5091 static enum gimplify_status | |
5092 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) | |
5093 { | |
5094 tree targ = *expr_p; | |
5095 tree temp = TARGET_EXPR_SLOT (targ); | |
5096 tree init = TARGET_EXPR_INITIAL (targ); | |
5097 enum gimplify_status ret; | |
5098 | |
5099 if (init) | |
5100 { | |
5101 /* TARGET_EXPR temps aren't part of the enclosing block, so add it | |
5102 to the temps list. Handle also variable length TARGET_EXPRs. */ | |
5103 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST) | |
5104 { | |
5105 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp))) | |
5106 gimplify_type_sizes (TREE_TYPE (temp), pre_p); | |
5107 gimplify_vla_decl (temp, pre_p); | |
5108 } | |
5109 else | |
5110 gimple_add_tmp_var (temp); | |
5111 | |
5112 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the | |
5113 expression is supposed to initialize the slot. */ | |
5114 if (VOID_TYPE_P (TREE_TYPE (init))) | |
5115 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none); | |
5116 else | |
5117 { | |
5118 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init); | |
5119 init = init_expr; | |
5120 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none); | |
5121 init = NULL; | |
5122 ggc_free (init_expr); | |
5123 } | |
5124 if (ret == GS_ERROR) | |
5125 { | |
5126 /* PR c++/28266 Make sure this is expanded only once. */ | |
5127 TARGET_EXPR_INITIAL (targ) = NULL_TREE; | |
5128 return GS_ERROR; | |
5129 } | |
5130 if (init) | |
5131 gimplify_and_add (init, pre_p); | |
5132 | |
5133 /* If needed, push the cleanup for the temp. */ | |
5134 if (TARGET_EXPR_CLEANUP (targ)) | |
5135 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ), | |
5136 CLEANUP_EH_ONLY (targ), pre_p); | |
5137 | |
5138 /* Only expand this once. */ | |
5139 TREE_OPERAND (targ, 3) = init; | |
5140 TARGET_EXPR_INITIAL (targ) = NULL_TREE; | |
5141 } | |
5142 else | |
5143 /* We should have expanded this before. */ | |
5144 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp)); | |
5145 | |
5146 *expr_p = temp; | |
5147 return GS_OK; | |
5148 } | |
5149 | |
5150 /* Gimplification of expression trees. */ | |
5151 | |
5152 /* Gimplify an expression which appears at statement context. The | |
5153 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is | |
5154 NULL, a new sequence is allocated. | |
5155 | |
5156 Return true if we actually added a statement to the queue. */ | |
5157 | |
5158 bool | |
5159 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p) | |
5160 { | |
5161 gimple_seq_node last; | |
5162 | |
5163 if (!*seq_p) | |
5164 *seq_p = gimple_seq_alloc (); | |
5165 | |
5166 last = gimple_seq_last (*seq_p); | |
5167 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none); | |
5168 return last != gimple_seq_last (*seq_p); | |
5169 } | |
5170 | |
5171 | |
5172 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels | |
5173 to CTX. If entries already exist, force them to be some flavor of private. | |
5174 If there is no enclosing parallel, do nothing. */ | |
5175 | |
5176 void | |
5177 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl) | |
5178 { | |
5179 splay_tree_node n; | |
5180 | |
5181 if (decl == NULL || !DECL_P (decl)) | |
5182 return; | |
5183 | |
5184 do | |
5185 { | |
5186 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); | |
5187 if (n != NULL) | |
5188 { | |
5189 if (n->value & GOVD_SHARED) | |
5190 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN); | |
5191 else | |
5192 return; | |
5193 } | |
5194 else if (ctx->region_type != ORT_WORKSHARE) | |
5195 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE); | |
5196 | |
5197 ctx = ctx->outer_context; | |
5198 } | |
5199 while (ctx); | |
5200 } | |
5201 | |
5202 /* Similarly for each of the type sizes of TYPE. */ | |
5203 | |
5204 static void | |
5205 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type) | |
5206 { | |
5207 if (type == NULL || type == error_mark_node) | |
5208 return; | |
5209 type = TYPE_MAIN_VARIANT (type); | |
5210 | |
5211 if (pointer_set_insert (ctx->privatized_types, type)) | |
5212 return; | |
5213 | |
5214 switch (TREE_CODE (type)) | |
5215 { | |
5216 case INTEGER_TYPE: | |
5217 case ENUMERAL_TYPE: | |
5218 case BOOLEAN_TYPE: | |
5219 case REAL_TYPE: | |
5220 case FIXED_POINT_TYPE: | |
5221 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type)); | |
5222 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type)); | |
5223 break; | |
5224 | |
5225 case ARRAY_TYPE: | |
5226 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type)); | |
5227 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type)); | |
5228 break; | |
5229 | |
5230 case RECORD_TYPE: | |
5231 case UNION_TYPE: | |
5232 case QUAL_UNION_TYPE: | |
5233 { | |
5234 tree field; | |
5235 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) | |
5236 if (TREE_CODE (field) == FIELD_DECL) | |
5237 { | |
5238 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field)); | |
5239 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field)); | |
5240 } | |
5241 } | |
5242 break; | |
5243 | |
5244 case POINTER_TYPE: | |
5245 case REFERENCE_TYPE: | |
5246 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type)); | |
5247 break; | |
5248 | |
5249 default: | |
5250 break; | |
5251 } | |
5252 | |
5253 omp_firstprivatize_variable (ctx, TYPE_SIZE (type)); | |
5254 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type)); | |
5255 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type); | |
5256 } | |
5257 | |
5258 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */ | |
5259 | |
5260 static void | |
5261 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags) | |
5262 { | |
5263 splay_tree_node n; | |
5264 unsigned int nflags; | |
5265 tree t; | |
5266 | |
5267 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node) | |
5268 return; | |
5269 | |
5270 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means | |
5271 there are constructors involved somewhere. */ | |
5272 if (TREE_ADDRESSABLE (TREE_TYPE (decl)) | |
5273 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))) | |
5274 flags |= GOVD_SEEN; | |
5275 | |
5276 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); | |
5277 if (n != NULL) | |
5278 { | |
5279 /* We shouldn't be re-adding the decl with the same data | |
5280 sharing class. */ | |
5281 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0); | |
5282 /* The only combination of data sharing classes we should see is | |
5283 FIRSTPRIVATE and LASTPRIVATE. */ | |
5284 nflags = n->value | flags; | |
5285 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS) | |
5286 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)); | |
5287 n->value = nflags; | |
5288 return; | |
5289 } | |
5290 | |
5291 /* When adding a variable-sized variable, we have to handle all sorts | |
5292 of additional bits of data: the pointer replacement variable, and | |
5293 the parameters of the type. */ | |
5294 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) | |
5295 { | |
5296 /* Add the pointer replacement variable as PRIVATE if the variable | |
5297 replacement is private, else FIRSTPRIVATE since we'll need the | |
5298 address of the original variable either for SHARED, or for the | |
5299 copy into or out of the context. */ | |
5300 if (!(flags & GOVD_LOCAL)) | |
5301 { | |
5302 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE; | |
5303 nflags |= flags & GOVD_SEEN; | |
5304 t = DECL_VALUE_EXPR (decl); | |
5305 gcc_assert (TREE_CODE (t) == INDIRECT_REF); | |
5306 t = TREE_OPERAND (t, 0); | |
5307 gcc_assert (DECL_P (t)); | |
5308 omp_add_variable (ctx, t, nflags); | |
5309 } | |
5310 | |
5311 /* Add all of the variable and type parameters (which should have | |
5312 been gimplified to a formal temporary) as FIRSTPRIVATE. */ | |
5313 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl)); | |
5314 omp_firstprivatize_variable (ctx, DECL_SIZE (decl)); | |
5315 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl)); | |
5316 | |
5317 /* The variable-sized variable itself is never SHARED, only some form | |
5318 of PRIVATE. The sharing would take place via the pointer variable | |
5319 which we remapped above. */ | |
5320 if (flags & GOVD_SHARED) | |
5321 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE | |
5322 | (flags & (GOVD_SEEN | GOVD_EXPLICIT)); | |
5323 | |
5324 /* We're going to make use of the TYPE_SIZE_UNIT at least in the | |
5325 alloca statement we generate for the variable, so make sure it | |
5326 is available. This isn't automatically needed for the SHARED | |
5327 case, since we won't be allocating local storage then. | |
5328 For local variables TYPE_SIZE_UNIT might not be gimplified yet, | |
5329 in this case omp_notice_variable will be called later | |
5330 on when it is gimplified. */ | |
5331 else if (! (flags & GOVD_LOCAL)) | |
5332 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true); | |
5333 } | |
5334 else if (lang_hooks.decls.omp_privatize_by_reference (decl)) | |
5335 { | |
5336 gcc_assert ((flags & GOVD_LOCAL) == 0); | |
5337 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl)); | |
5338 | |
5339 /* Similar to the direct variable sized case above, we'll need the | |
5340 size of references being privatized. */ | |
5341 if ((flags & GOVD_SHARED) == 0) | |
5342 { | |
5343 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))); | |
5344 if (TREE_CODE (t) != INTEGER_CST) | |
5345 omp_notice_variable (ctx, t, true); | |
5346 } | |
5347 } | |
5348 | |
5349 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags); | |
5350 } | |
5351 | |
5352 /* Record the fact that DECL was used within the OpenMP context CTX. | |
5353 IN_CODE is true when real code uses DECL, and false when we should | |
5354 merely emit default(none) errors. Return true if DECL is going to | |
5355 be remapped and thus DECL shouldn't be gimplified into its | |
5356 DECL_VALUE_EXPR (if any). */ | |
5357 | |
5358 static bool | |
5359 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code) | |
5360 { | |
5361 splay_tree_node n; | |
5362 unsigned flags = in_code ? GOVD_SEEN : 0; | |
5363 bool ret = false, shared; | |
5364 | |
5365 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node) | |
5366 return false; | |
5367 | |
5368 /* Threadprivate variables are predetermined. */ | |
5369 if (is_global_var (decl)) | |
5370 { | |
5371 if (DECL_THREAD_LOCAL_P (decl)) | |
5372 return false; | |
5373 | |
5374 if (DECL_HAS_VALUE_EXPR_P (decl)) | |
5375 { | |
5376 tree value = get_base_address (DECL_VALUE_EXPR (decl)); | |
5377 | |
5378 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value)) | |
5379 return false; | |
5380 } | |
5381 } | |
5382 | |
5383 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); | |
5384 if (n == NULL) | |
5385 { | |
5386 enum omp_clause_default_kind default_kind, kind; | |
5387 struct gimplify_omp_ctx *octx; | |
5388 | |
5389 if (ctx->region_type == ORT_WORKSHARE) | |
5390 goto do_outer; | |
5391 | |
5392 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be | |
5393 remapped firstprivate instead of shared. To some extent this is | |
5394 addressed in omp_firstprivatize_type_sizes, but not effectively. */ | |
5395 default_kind = ctx->default_kind; | |
5396 kind = lang_hooks.decls.omp_predetermined_sharing (decl); | |
5397 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED) | |
5398 default_kind = kind; | |
5399 | |
5400 switch (default_kind) | |
5401 { | |
5402 case OMP_CLAUSE_DEFAULT_NONE: | |
5403 error ("%qE not specified in enclosing parallel", | |
5404 DECL_NAME (decl)); | |
5405 error_at (ctx->location, "enclosing parallel"); | |
5406 /* FALLTHRU */ | |
5407 case OMP_CLAUSE_DEFAULT_SHARED: | |
5408 flags |= GOVD_SHARED; | |
5409 break; | |
5410 case OMP_CLAUSE_DEFAULT_PRIVATE: | |
5411 flags |= GOVD_PRIVATE; | |
5412 break; | |
5413 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE: | |
5414 flags |= GOVD_FIRSTPRIVATE; | |
5415 break; | |
5416 case OMP_CLAUSE_DEFAULT_UNSPECIFIED: | |
5417 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */ | |
5418 gcc_assert (ctx->region_type == ORT_TASK); | |
5419 if (ctx->outer_context) | |
5420 omp_notice_variable (ctx->outer_context, decl, in_code); | |
5421 for (octx = ctx->outer_context; octx; octx = octx->outer_context) | |
5422 { | |
5423 splay_tree_node n2; | |
5424 | |
5425 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl); | |
5426 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED) | |
5427 { | |
5428 flags |= GOVD_FIRSTPRIVATE; | |
5429 break; | |
5430 } | |
5431 if ((octx->region_type & ORT_PARALLEL) != 0) | |
5432 break; | |
5433 } | |
5434 if (flags & GOVD_FIRSTPRIVATE) | |
5435 break; | |
5436 if (octx == NULL | |
5437 && (TREE_CODE (decl) == PARM_DECL | |
5438 || (!is_global_var (decl) | |
5439 && DECL_CONTEXT (decl) == current_function_decl))) | |
5440 { | |
5441 flags |= GOVD_FIRSTPRIVATE; | |
5442 break; | |
5443 } | |
5444 flags |= GOVD_SHARED; | |
5445 break; | |
5446 default: | |
5447 gcc_unreachable (); | |
5448 } | |
5449 | |
5450 if ((flags & GOVD_PRIVATE) | |
5451 && lang_hooks.decls.omp_private_outer_ref (decl)) | |
5452 flags |= GOVD_PRIVATE_OUTER_REF; | |
5453 | |
5454 omp_add_variable (ctx, decl, flags); | |
5455 | |
5456 shared = (flags & GOVD_SHARED) != 0; | |
5457 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared); | |
5458 goto do_outer; | |
5459 } | |
5460 | |
5461 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0 | |
5462 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN | |
5463 && DECL_SIZE (decl) | |
5464 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) | |
5465 { | |
5466 splay_tree_node n2; | |
5467 tree t = DECL_VALUE_EXPR (decl); | |
5468 gcc_assert (TREE_CODE (t) == INDIRECT_REF); | |
5469 t = TREE_OPERAND (t, 0); | |
5470 gcc_assert (DECL_P (t)); | |
5471 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t); | |
5472 n2->value |= GOVD_SEEN; | |
5473 } | |
5474 | |
5475 shared = ((flags | n->value) & GOVD_SHARED) != 0; | |
5476 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared); | |
5477 | |
5478 /* If nothing changed, there's nothing left to do. */ | |
5479 if ((n->value & flags) == flags) | |
5480 return ret; | |
5481 flags |= n->value; | |
5482 n->value = flags; | |
5483 | |
5484 do_outer: | |
5485 /* If the variable is private in the current context, then we don't | |
5486 need to propagate anything to an outer context. */ | |
5487 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF)) | |
5488 return ret; | |
5489 if (ctx->outer_context | |
5490 && omp_notice_variable (ctx->outer_context, decl, in_code)) | |
5491 return true; | |
5492 return ret; | |
5493 } | |
5494 | |
5495 /* Verify that DECL is private within CTX. If there's specific information | |
5496 to the contrary in the innermost scope, generate an error. */ | |
5497 | |
5498 static bool | |
5499 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl) | |
5500 { | |
5501 splay_tree_node n; | |
5502 | |
5503 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); | |
5504 if (n != NULL) | |
5505 { | |
5506 if (n->value & GOVD_SHARED) | |
5507 { | |
5508 if (ctx == gimplify_omp_ctxp) | |
5509 { | |
5510 error ("iteration variable %qE should be private", | |
5511 DECL_NAME (decl)); | |
5512 n->value = GOVD_PRIVATE; | |
5513 return true; | |
5514 } | |
5515 else | |
5516 return false; | |
5517 } | |
5518 else if ((n->value & GOVD_EXPLICIT) != 0 | |
5519 && (ctx == gimplify_omp_ctxp | |
5520 || (ctx->region_type == ORT_COMBINED_PARALLEL | |
5521 && gimplify_omp_ctxp->outer_context == ctx))) | |
5522 { | |
5523 if ((n->value & GOVD_FIRSTPRIVATE) != 0) | |
5524 error ("iteration variable %qE should not be firstprivate", | |
5525 DECL_NAME (decl)); | |
5526 else if ((n->value & GOVD_REDUCTION) != 0) | |
5527 error ("iteration variable %qE should not be reduction", | |
5528 DECL_NAME (decl)); | |
5529 } | |
5530 return (ctx == gimplify_omp_ctxp | |
5531 || (ctx->region_type == ORT_COMBINED_PARALLEL | |
5532 && gimplify_omp_ctxp->outer_context == ctx)); | |
5533 } | |
5534 | |
5535 if (ctx->region_type != ORT_WORKSHARE) | |
5536 return false; | |
5537 else if (ctx->outer_context) | |
5538 return omp_is_private (ctx->outer_context, decl); | |
5539 return false; | |
5540 } | |
5541 | |
5542 /* Return true if DECL is private within a parallel region | |
5543 that binds to the current construct's context or in parallel | |
5544 region's REDUCTION clause. */ | |
5545 | |
5546 static bool | |
5547 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl) | |
5548 { | |
5549 splay_tree_node n; | |
5550 | |
5551 do | |
5552 { | |
5553 ctx = ctx->outer_context; | |
5554 if (ctx == NULL) | |
5555 return !(is_global_var (decl) | |
5556 /* References might be private, but might be shared too. */ | |
5557 || lang_hooks.decls.omp_privatize_by_reference (decl)); | |
5558 | |
5559 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); | |
5560 if (n != NULL) | |
5561 return (n->value & GOVD_SHARED) == 0; | |
5562 } | |
5563 while (ctx->region_type == ORT_WORKSHARE); | |
5564 return false; | |
5565 } | |
5566 | |
5567 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new | |
5568 and previous omp contexts. */ | |
5569 | |
5570 static void | |
5571 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p, | |
5572 enum omp_region_type region_type) | |
5573 { | |
5574 struct gimplify_omp_ctx *ctx, *outer_ctx; | |
5575 struct gimplify_ctx gctx; | |
5576 tree c; | |
5577 | |
5578 ctx = new_omp_context (region_type); | |
5579 outer_ctx = ctx->outer_context; | |
5580 | |
5581 while ((c = *list_p) != NULL) | |
5582 { | |
5583 bool remove = false; | |
5584 bool notice_outer = true; | |
5585 const char *check_non_private = NULL; | |
5586 unsigned int flags; | |
5587 tree decl; | |
5588 | |
5589 switch (OMP_CLAUSE_CODE (c)) | |
5590 { | |
5591 case OMP_CLAUSE_PRIVATE: | |
5592 flags = GOVD_PRIVATE | GOVD_EXPLICIT; | |
5593 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c))) | |
5594 { | |
5595 flags |= GOVD_PRIVATE_OUTER_REF; | |
5596 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1; | |
5597 } | |
5598 else | |
5599 notice_outer = false; | |
5600 goto do_add; | |
5601 case OMP_CLAUSE_SHARED: | |
5602 flags = GOVD_SHARED | GOVD_EXPLICIT; | |
5603 goto do_add; | |
5604 case OMP_CLAUSE_FIRSTPRIVATE: | |
5605 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT; | |
5606 check_non_private = "firstprivate"; | |
5607 goto do_add; | |
5608 case OMP_CLAUSE_LASTPRIVATE: | |
5609 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT; | |
5610 check_non_private = "lastprivate"; | |
5611 goto do_add; | |
5612 case OMP_CLAUSE_REDUCTION: | |
5613 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT; | |
5614 check_non_private = "reduction"; | |
5615 goto do_add; | |
5616 | |
5617 do_add: | |
5618 decl = OMP_CLAUSE_DECL (c); | |
5619 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node) | |
5620 { | |
5621 remove = true; | |
5622 break; | |
5623 } | |
5624 omp_add_variable (ctx, decl, flags); | |
5625 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION | |
5626 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) | |
5627 { | |
5628 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c), | |
5629 GOVD_LOCAL | GOVD_SEEN); | |
5630 gimplify_omp_ctxp = ctx; | |
5631 push_gimplify_context (&gctx); | |
5632 | |
5633 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc (); | |
5634 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc (); | |
5635 | |
5636 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c), | |
5637 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)); | |
5638 pop_gimplify_context | |
5639 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))); | |
5640 push_gimplify_context (&gctx); | |
5641 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c), | |
5642 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)); | |
5643 pop_gimplify_context | |
5644 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c))); | |
5645 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE; | |
5646 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE; | |
5647 | |
5648 gimplify_omp_ctxp = outer_ctx; | |
5649 } | |
5650 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE | |
5651 && OMP_CLAUSE_LASTPRIVATE_STMT (c)) | |
5652 { | |
5653 gimplify_omp_ctxp = ctx; | |
5654 push_gimplify_context (&gctx); | |
5655 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR) | |
5656 { | |
5657 tree bind = build3 (BIND_EXPR, void_type_node, NULL, | |
5658 NULL, NULL); | |
5659 TREE_SIDE_EFFECTS (bind) = 1; | |
5660 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c); | |
5661 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind; | |
5662 } | |
5663 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c), | |
5664 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)); | |
5665 pop_gimplify_context | |
5666 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))); | |
5667 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE; | |
5668 | |
5669 gimplify_omp_ctxp = outer_ctx; | |
5670 } | |
5671 if (notice_outer) | |
5672 goto do_notice; | |
5673 break; | |
5674 | |
5675 case OMP_CLAUSE_COPYIN: | |
5676 case OMP_CLAUSE_COPYPRIVATE: | |
5677 decl = OMP_CLAUSE_DECL (c); | |
5678 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node) | |
5679 { | |
5680 remove = true; | |
5681 break; | |
5682 } | |
5683 do_notice: | |
5684 if (outer_ctx) | |
5685 omp_notice_variable (outer_ctx, decl, true); | |
5686 if (check_non_private | |
5687 && region_type == ORT_WORKSHARE | |
5688 && omp_check_private (ctx, decl)) | |
5689 { | |
5690 error ("%s variable %qE is private in outer context", | |
5691 check_non_private, DECL_NAME (decl)); | |
5692 remove = true; | |
5693 } | |
5694 break; | |
5695 | |
5696 case OMP_CLAUSE_IF: | |
5697 OMP_CLAUSE_OPERAND (c, 0) | |
5698 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0)); | |
5699 /* Fall through. */ | |
5700 | |
5701 case OMP_CLAUSE_SCHEDULE: | |
5702 case OMP_CLAUSE_NUM_THREADS: | |
5703 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL, | |
5704 is_gimple_val, fb_rvalue) == GS_ERROR) | |
5705 remove = true; | |
5706 break; | |
5707 | |
5708 case OMP_CLAUSE_NOWAIT: | |
5709 case OMP_CLAUSE_ORDERED: | |
5710 case OMP_CLAUSE_UNTIED: | |
5711 case OMP_CLAUSE_COLLAPSE: | |
5712 break; | |
5713 | |
5714 case OMP_CLAUSE_DEFAULT: | |
5715 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c); | |
5716 break; | |
5717 | |
5718 default: | |
5719 gcc_unreachable (); | |
5720 } | |
5721 | |
5722 if (remove) | |
5723 *list_p = OMP_CLAUSE_CHAIN (c); | |
5724 else | |
5725 list_p = &OMP_CLAUSE_CHAIN (c); | |
5726 } | |
5727 | |
5728 gimplify_omp_ctxp = ctx; | |
5729 } | |
5730 | |
5731 /* For all variables that were not actually used within the context, | |
5732 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */ | |
5733 | |
5734 static int | |
5735 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data) | |
5736 { | |
5737 tree *list_p = (tree *) data; | |
5738 tree decl = (tree) n->key; | |
5739 unsigned flags = n->value; | |
5740 enum omp_clause_code code; | |
5741 tree clause; | |
5742 bool private_debug; | |
5743 | |
5744 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL)) | |
5745 return 0; | |
5746 if ((flags & GOVD_SEEN) == 0) | |
5747 return 0; | |
5748 if (flags & GOVD_DEBUG_PRIVATE) | |
5749 { | |
5750 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE); | |
5751 private_debug = true; | |
5752 } | |
5753 else | |
5754 private_debug | |
5755 = lang_hooks.decls.omp_private_debug_clause (decl, | |
5756 !!(flags & GOVD_SHARED)); | |
5757 if (private_debug) | |
5758 code = OMP_CLAUSE_PRIVATE; | |
5759 else if (flags & GOVD_SHARED) | |
5760 { | |
5761 if (is_global_var (decl)) | |
5762 { | |
5763 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context; | |
5764 while (ctx != NULL) | |
5765 { | |
5766 splay_tree_node on | |
5767 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); | |
5768 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE | |
5769 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0) | |
5770 break; | |
5771 ctx = ctx->outer_context; | |
5772 } | |
5773 if (ctx == NULL) | |
5774 return 0; | |
5775 } | |
5776 code = OMP_CLAUSE_SHARED; | |
5777 } | |
5778 else if (flags & GOVD_PRIVATE) | |
5779 code = OMP_CLAUSE_PRIVATE; | |
5780 else if (flags & GOVD_FIRSTPRIVATE) | |
5781 code = OMP_CLAUSE_FIRSTPRIVATE; | |
5782 else | |
5783 gcc_unreachable (); | |
5784 | |
5785 clause = build_omp_clause (input_location, code); | |
5786 OMP_CLAUSE_DECL (clause) = decl; | |
5787 OMP_CLAUSE_CHAIN (clause) = *list_p; | |
5788 if (private_debug) | |
5789 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1; | |
5790 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF)) | |
5791 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1; | |
5792 *list_p = clause; | |
5793 lang_hooks.decls.omp_finish_clause (clause); | |
5794 | |
5795 return 0; | |
5796 } | |
5797 | |
5798 static void | |
5799 gimplify_adjust_omp_clauses (tree *list_p) | |
5800 { | |
5801 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; | |
5802 tree c, decl; | |
5803 | |
5804 while ((c = *list_p) != NULL) | |
5805 { | |
5806 splay_tree_node n; | |
5807 bool remove = false; | |
5808 | |
5809 switch (OMP_CLAUSE_CODE (c)) | |
5810 { | |
5811 case OMP_CLAUSE_PRIVATE: | |
5812 case OMP_CLAUSE_SHARED: | |
5813 case OMP_CLAUSE_FIRSTPRIVATE: | |
5814 decl = OMP_CLAUSE_DECL (c); | |
5815 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); | |
5816 remove = !(n->value & GOVD_SEEN); | |
5817 if (! remove) | |
5818 { | |
5819 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED; | |
5820 if ((n->value & GOVD_DEBUG_PRIVATE) | |
5821 || lang_hooks.decls.omp_private_debug_clause (decl, shared)) | |
5822 { | |
5823 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0 | |
5824 || ((n->value & GOVD_DATA_SHARE_CLASS) | |
5825 == GOVD_PRIVATE)); | |
5826 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE); | |
5827 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1; | |
5828 } | |
5829 } | |
5830 break; | |
5831 | |
5832 case OMP_CLAUSE_LASTPRIVATE: | |
5833 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to | |
5834 accurately reflect the presence of a FIRSTPRIVATE clause. */ | |
5835 decl = OMP_CLAUSE_DECL (c); | |
5836 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); | |
5837 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) | |
5838 = (n->value & GOVD_FIRSTPRIVATE) != 0; | |
5839 break; | |
5840 | |
5841 case OMP_CLAUSE_REDUCTION: | |
5842 case OMP_CLAUSE_COPYIN: | |
5843 case OMP_CLAUSE_COPYPRIVATE: | |
5844 case OMP_CLAUSE_IF: | |
5845 case OMP_CLAUSE_NUM_THREADS: | |
5846 case OMP_CLAUSE_SCHEDULE: | |
5847 case OMP_CLAUSE_NOWAIT: | |
5848 case OMP_CLAUSE_ORDERED: | |
5849 case OMP_CLAUSE_DEFAULT: | |
5850 case OMP_CLAUSE_UNTIED: | |
5851 case OMP_CLAUSE_COLLAPSE: | |
5852 break; | |
5853 | |
5854 default: | |
5855 gcc_unreachable (); | |
5856 } | |
5857 | |
5858 if (remove) | |
5859 *list_p = OMP_CLAUSE_CHAIN (c); | |
5860 else | |
5861 list_p = &OMP_CLAUSE_CHAIN (c); | |
5862 } | |
5863 | |
5864 /* Add in any implicit data sharing. */ | |
5865 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p); | |
5866 | |
5867 gimplify_omp_ctxp = ctx->outer_context; | |
5868 delete_omp_context (ctx); | |
5869 } | |
5870 | |
5871 /* Gimplify the contents of an OMP_PARALLEL statement. This involves | |
5872 gimplification of the body, as well as scanning the body for used | |
5873 variables. We need to do this scan now, because variable-sized | |
5874 decls will be decomposed during gimplification. */ | |
5875 | |
5876 static void | |
5877 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p) | |
5878 { | |
5879 tree expr = *expr_p; | |
5880 gimple g; | |
5881 gimple_seq body = NULL; | |
5882 struct gimplify_ctx gctx; | |
5883 | |
5884 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, | |
5885 OMP_PARALLEL_COMBINED (expr) | |
5886 ? ORT_COMBINED_PARALLEL | |
5887 : ORT_PARALLEL); | |
5888 | |
5889 push_gimplify_context (&gctx); | |
5890 | |
5891 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body); | |
5892 if (gimple_code (g) == GIMPLE_BIND) | |
5893 pop_gimplify_context (g); | |
5894 else | |
5895 pop_gimplify_context (NULL); | |
5896 | |
5897 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr)); | |
5898 | |
5899 g = gimple_build_omp_parallel (body, | |
5900 OMP_PARALLEL_CLAUSES (expr), | |
5901 NULL_TREE, NULL_TREE); | |
5902 if (OMP_PARALLEL_COMBINED (expr)) | |
5903 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED); | |
5904 gimplify_seq_add_stmt (pre_p, g); | |
5905 *expr_p = NULL_TREE; | |
5906 } | |
5907 | |
5908 /* Gimplify the contents of an OMP_TASK statement. This involves | |
5909 gimplification of the body, as well as scanning the body for used | |
5910 variables. We need to do this scan now, because variable-sized | |
5911 decls will be decomposed during gimplification. */ | |
5912 | |
5913 static void | |
5914 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p) | |
5915 { | |
5916 tree expr = *expr_p; | |
5917 gimple g; | |
5918 gimple_seq body = NULL; | |
5919 struct gimplify_ctx gctx; | |
5920 | |
5921 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, ORT_TASK); | |
5922 | |
5923 push_gimplify_context (&gctx); | |
5924 | |
5925 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body); | |
5926 if (gimple_code (g) == GIMPLE_BIND) | |
5927 pop_gimplify_context (g); | |
5928 else | |
5929 pop_gimplify_context (NULL); | |
5930 | |
5931 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr)); | |
5932 | |
5933 g = gimple_build_omp_task (body, | |
5934 OMP_TASK_CLAUSES (expr), | |
5935 NULL_TREE, NULL_TREE, | |
5936 NULL_TREE, NULL_TREE, NULL_TREE); | |
5937 gimplify_seq_add_stmt (pre_p, g); | |
5938 *expr_p = NULL_TREE; | |
5939 } | |
5940 | |
5941 /* Gimplify the gross structure of an OMP_FOR statement. */ | |
5942 | |
5943 static enum gimplify_status | |
5944 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p) | |
5945 { | |
5946 tree for_stmt, decl, var, t; | |
5947 enum gimplify_status ret = GS_ALL_DONE; | |
5948 enum gimplify_status tret; | |
5949 gimple gfor; | |
5950 gimple_seq for_body, for_pre_body; | |
5951 int i; | |
5952 | |
5953 for_stmt = *expr_p; | |
5954 | |
5955 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, | |
5956 ORT_WORKSHARE); | |
5957 | |
5958 /* Handle OMP_FOR_INIT. */ | |
5959 for_pre_body = NULL; | |
5960 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body); | |
5961 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE; | |
5962 | |
5963 for_body = gimple_seq_alloc (); | |
5964 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) | |
5965 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt))); | |
5966 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) | |
5967 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt))); | |
5968 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) | |
5969 { | |
5970 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); | |
5971 gcc_assert (TREE_CODE (t) == MODIFY_EXPR); | |
5972 decl = TREE_OPERAND (t, 0); | |
5973 gcc_assert (DECL_P (decl)); | |
5974 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)) | |
5975 || POINTER_TYPE_P (TREE_TYPE (decl))); | |
5976 | |
5977 /* Make sure the iteration variable is private. */ | |
5978 if (omp_is_private (gimplify_omp_ctxp, decl)) | |
5979 omp_notice_variable (gimplify_omp_ctxp, decl, true); | |
5980 else | |
5981 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN); | |
5982 | |
5983 /* If DECL is not a gimple register, create a temporary variable to act | |
5984 as an iteration counter. This is valid, since DECL cannot be | |
5985 modified in the body of the loop. */ | |
5986 if (!is_gimple_reg (decl)) | |
5987 { | |
5988 var = create_tmp_var (TREE_TYPE (decl), get_name (decl)); | |
5989 TREE_OPERAND (t, 0) = var; | |
5990 | |
5991 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var)); | |
5992 | |
5993 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN); | |
5994 } | |
5995 else | |
5996 var = decl; | |
5997 | |
5998 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, | |
5999 is_gimple_val, fb_rvalue); | |
6000 ret = MIN (ret, tret); | |
6001 if (ret == GS_ERROR) | |
6002 return ret; | |
6003 | |
6004 /* Handle OMP_FOR_COND. */ | |
6005 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i); | |
6006 gcc_assert (COMPARISON_CLASS_P (t)); | |
6007 gcc_assert (TREE_OPERAND (t, 0) == decl); | |
6008 | |
6009 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, | |
6010 is_gimple_val, fb_rvalue); | |
6011 ret = MIN (ret, tret); | |
6012 | |
6013 /* Handle OMP_FOR_INCR. */ | |
6014 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); | |
6015 switch (TREE_CODE (t)) | |
6016 { | |
6017 case PREINCREMENT_EXPR: | |
6018 case POSTINCREMENT_EXPR: | |
6019 t = build_int_cst (TREE_TYPE (decl), 1); | |
6020 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t); | |
6021 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t); | |
6022 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t; | |
6023 break; | |
6024 | |
6025 case PREDECREMENT_EXPR: | |
6026 case POSTDECREMENT_EXPR: | |
6027 t = build_int_cst (TREE_TYPE (decl), -1); | |
6028 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t); | |
6029 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t); | |
6030 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t; | |
6031 break; | |
6032 | |
6033 case MODIFY_EXPR: | |
6034 gcc_assert (TREE_OPERAND (t, 0) == decl); | |
6035 TREE_OPERAND (t, 0) = var; | |
6036 | |
6037 t = TREE_OPERAND (t, 1); | |
6038 switch (TREE_CODE (t)) | |
6039 { | |
6040 case PLUS_EXPR: | |
6041 if (TREE_OPERAND (t, 1) == decl) | |
6042 { | |
6043 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0); | |
6044 TREE_OPERAND (t, 0) = var; | |
6045 break; | |
6046 } | |
6047 | |
6048 /* Fallthru. */ | |
6049 case MINUS_EXPR: | |
6050 case POINTER_PLUS_EXPR: | |
6051 gcc_assert (TREE_OPERAND (t, 0) == decl); | |
6052 TREE_OPERAND (t, 0) = var; | |
6053 break; | |
6054 default: | |
6055 gcc_unreachable (); | |
6056 } | |
6057 | |
6058 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, | |
6059 is_gimple_val, fb_rvalue); | |
6060 ret = MIN (ret, tret); | |
6061 break; | |
6062 | |
6063 default: | |
6064 gcc_unreachable (); | |
6065 } | |
6066 | |
6067 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1) | |
6068 { | |
6069 tree c; | |
6070 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c)) | |
6071 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE | |
6072 && OMP_CLAUSE_DECL (c) == decl | |
6073 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL) | |
6074 { | |
6075 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); | |
6076 gcc_assert (TREE_CODE (t) == MODIFY_EXPR); | |
6077 gcc_assert (TREE_OPERAND (t, 0) == var); | |
6078 t = TREE_OPERAND (t, 1); | |
6079 gcc_assert (TREE_CODE (t) == PLUS_EXPR | |
6080 || TREE_CODE (t) == MINUS_EXPR | |
6081 || TREE_CODE (t) == POINTER_PLUS_EXPR); | |
6082 gcc_assert (TREE_OPERAND (t, 0) == var); | |
6083 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl, | |
6084 TREE_OPERAND (t, 1)); | |
6085 gimplify_assign (decl, t, | |
6086 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)); | |
6087 } | |
6088 } | |
6089 } | |
6090 | |
6091 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body); | |
6092 | |
6093 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt)); | |
6094 | |
6095 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt), | |
6096 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)), | |
6097 for_pre_body); | |
6098 | |
6099 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) | |
6100 { | |
6101 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); | |
6102 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0)); | |
6103 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1)); | |
6104 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i); | |
6105 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t)); | |
6106 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1)); | |
6107 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); | |
6108 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1)); | |
6109 } | |
6110 | |
6111 gimplify_seq_add_stmt (pre_p, gfor); | |
6112 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR; | |
6113 } | |
6114 | |
6115 /* Gimplify the gross structure of other OpenMP worksharing constructs. | |
6116 In particular, OMP_SECTIONS and OMP_SINGLE. */ | |
6117 | |
6118 static void | |
6119 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p) | |
6120 { | |
6121 tree expr = *expr_p; | |
6122 gimple stmt; | |
6123 gimple_seq body = NULL; | |
6124 | |
6125 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE); | |
6126 gimplify_and_add (OMP_BODY (expr), &body); | |
6127 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr)); | |
6128 | |
6129 if (TREE_CODE (expr) == OMP_SECTIONS) | |
6130 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr)); | |
6131 else if (TREE_CODE (expr) == OMP_SINGLE) | |
6132 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr)); | |
6133 else | |
6134 gcc_unreachable (); | |
6135 | |
6136 gimplify_seq_add_stmt (pre_p, stmt); | |
6137 } | |
6138 | |
6139 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have | |
6140 stabilized the lhs of the atomic operation as *ADDR. Return true if | |
6141 EXPR is this stabilized form. */ | |
6142 | |
6143 static bool | |
6144 goa_lhs_expr_p (tree expr, tree addr) | |
6145 { | |
6146 /* Also include casts to other type variants. The C front end is fond | |
6147 of adding these for e.g. volatile variables. This is like | |
6148 STRIP_TYPE_NOPS but includes the main variant lookup. */ | |
6149 STRIP_USELESS_TYPE_CONVERSION (expr); | |
6150 | |
6151 if (TREE_CODE (expr) == INDIRECT_REF) | |
6152 { | |
6153 expr = TREE_OPERAND (expr, 0); | |
6154 while (expr != addr | |
6155 && (CONVERT_EXPR_P (expr) | |
6156 || TREE_CODE (expr) == NON_LVALUE_EXPR) | |
6157 && TREE_CODE (expr) == TREE_CODE (addr) | |
6158 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr))) | |
6159 { | |
6160 expr = TREE_OPERAND (expr, 0); | |
6161 addr = TREE_OPERAND (addr, 0); | |
6162 } | |
6163 if (expr == addr) | |
6164 return true; | |
6165 return (TREE_CODE (addr) == ADDR_EXPR | |
6166 && TREE_CODE (expr) == ADDR_EXPR | |
6167 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0)); | |
6168 } | |
6169 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0)) | |
6170 return true; | |
6171 return false; | |
6172 } | |
6173 | |
6174 /* Walk *EXPR_P and replace | |
6175 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve | |
6176 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as | |
6177 a subexpression, 0 if it did not, or -1 if an error was encountered. */ | |
6178 | |
6179 static int | |
6180 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr, | |
6181 tree lhs_var) | |
6182 { | |
6183 tree expr = *expr_p; | |
6184 int saw_lhs; | |
6185 | |
6186 if (goa_lhs_expr_p (expr, lhs_addr)) | |
6187 { | |
6188 *expr_p = lhs_var; | |
6189 return 1; | |
6190 } | |
6191 if (is_gimple_val (expr)) | |
6192 return 0; | |
6193 | |
6194 saw_lhs = 0; | |
6195 switch (TREE_CODE_CLASS (TREE_CODE (expr))) | |
6196 { | |
6197 case tcc_binary: | |
6198 case tcc_comparison: | |
6199 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr, | |
6200 lhs_var); | |
6201 case tcc_unary: | |
6202 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr, | |
6203 lhs_var); | |
6204 break; | |
6205 case tcc_expression: | |
6206 switch (TREE_CODE (expr)) | |
6207 { | |
6208 case TRUTH_ANDIF_EXPR: | |
6209 case TRUTH_ORIF_EXPR: | |
6210 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, | |
6211 lhs_addr, lhs_var); | |
6212 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, | |
6213 lhs_addr, lhs_var); | |
6214 break; | |
6215 default: | |
6216 break; | |
6217 } | |
6218 break; | |
6219 default: | |
6220 break; | |
6221 } | |
6222 | |
6223 if (saw_lhs == 0) | |
6224 { | |
6225 enum gimplify_status gs; | |
6226 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue); | |
6227 if (gs != GS_ALL_DONE) | |
6228 saw_lhs = -1; | |
6229 } | |
6230 | |
6231 return saw_lhs; | |
6232 } | |
6233 | |
6234 | |
6235 /* Gimplify an OMP_ATOMIC statement. */ | |
6236 | |
6237 static enum gimplify_status | |
6238 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p) | |
6239 { | |
6240 tree addr = TREE_OPERAND (*expr_p, 0); | |
6241 tree rhs = TREE_OPERAND (*expr_p, 1); | |
6242 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr))); | |
6243 tree tmp_load; | |
6244 | |
6245 tmp_load = create_tmp_var (type, NULL); | |
6246 if (TREE_CODE (type) == COMPLEX_TYPE || TREE_CODE (type) == VECTOR_TYPE) | |
6247 DECL_GIMPLE_REG_P (tmp_load) = 1; | |
6248 if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0) | |
6249 return GS_ERROR; | |
6250 | |
6251 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue) | |
6252 != GS_ALL_DONE) | |
6253 return GS_ERROR; | |
6254 | |
6255 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_load (tmp_load, addr)); | |
6256 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue) | |
6257 != GS_ALL_DONE) | |
6258 return GS_ERROR; | |
6259 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_store (rhs)); | |
6260 *expr_p = NULL; | |
6261 | |
6262 return GS_ALL_DONE; | |
6263 } | |
6264 | |
6265 | |
6266 /* Converts the GENERIC expression tree *EXPR_P to GIMPLE. If the | |
6267 expression produces a value to be used as an operand inside a GIMPLE | |
6268 statement, the value will be stored back in *EXPR_P. This value will | |
6269 be a tree of class tcc_declaration, tcc_constant, tcc_reference or | |
6270 an SSA_NAME. The corresponding sequence of GIMPLE statements is | |
6271 emitted in PRE_P and POST_P. | |
6272 | |
6273 Additionally, this process may overwrite parts of the input | |
6274 expression during gimplification. Ideally, it should be | |
6275 possible to do non-destructive gimplification. | |
6276 | |
6277 EXPR_P points to the GENERIC expression to convert to GIMPLE. If | |
6278 the expression needs to evaluate to a value to be used as | |
6279 an operand in a GIMPLE statement, this value will be stored in | |
6280 *EXPR_P on exit. This happens when the caller specifies one | |
6281 of fb_lvalue or fb_rvalue fallback flags. | |
6282 | |
6283 PRE_P will contain the sequence of GIMPLE statements corresponding | |
6284 to the evaluation of EXPR and all the side-effects that must | |
6285 be executed before the main expression. On exit, the last | |
6286 statement of PRE_P is the core statement being gimplified. For | |
6287 instance, when gimplifying 'if (++a)' the last statement in | |
6288 PRE_P will be 'if (t.1)' where t.1 is the result of | |
6289 pre-incrementing 'a'. | |
6290 | |
6291 POST_P will contain the sequence of GIMPLE statements corresponding | |
6292 to the evaluation of all the side-effects that must be executed | |
6293 after the main expression. If this is NULL, the post | |
6294 side-effects are stored at the end of PRE_P. | |
6295 | |
6296 The reason why the output is split in two is to handle post | |
6297 side-effects explicitly. In some cases, an expression may have | |
6298 inner and outer post side-effects which need to be emitted in | |
6299 an order different from the one given by the recursive | |
6300 traversal. For instance, for the expression (*p--)++ the post | |
6301 side-effects of '--' must actually occur *after* the post | |
6302 side-effects of '++'. However, gimplification will first visit | |
6303 the inner expression, so if a separate POST sequence was not | |
6304 used, the resulting sequence would be: | |
6305 | |
6306 1 t.1 = *p | |
6307 2 p = p - 1 | |
6308 3 t.2 = t.1 + 1 | |
6309 4 *p = t.2 | |
6310 | |
6311 However, the post-decrement operation in line #2 must not be | |
6312 evaluated until after the store to *p at line #4, so the | |
6313 correct sequence should be: | |
6314 | |
6315 1 t.1 = *p | |
6316 2 t.2 = t.1 + 1 | |
6317 3 *p = t.2 | |
6318 4 p = p - 1 | |
6319 | |
6320 So, by specifying a separate post queue, it is possible | |
6321 to emit the post side-effects in the correct order. | |
6322 If POST_P is NULL, an internal queue will be used. Before | |
6323 returning to the caller, the sequence POST_P is appended to | |
6324 the main output sequence PRE_P. | |
6325 | |
6326 GIMPLE_TEST_F points to a function that takes a tree T and | |
6327 returns nonzero if T is in the GIMPLE form requested by the | |
6328 caller. The GIMPLE predicates are in tree-gimple.c. | |
6329 | |
6330 FALLBACK tells the function what sort of a temporary we want if | |
6331 gimplification cannot produce an expression that complies with | |
6332 GIMPLE_TEST_F. | |
6333 | |
6334 fb_none means that no temporary should be generated | |
6335 fb_rvalue means that an rvalue is OK to generate | |
6336 fb_lvalue means that an lvalue is OK to generate | |
6337 fb_either means that either is OK, but an lvalue is preferable. | |
6338 fb_mayfail means that gimplification may fail (in which case | |
6339 GS_ERROR will be returned) | |
6340 | |
6341 The return value is either GS_ERROR or GS_ALL_DONE, since this | |
6342 function iterates until EXPR is completely gimplified or an error | |
6343 occurs. */ | |
6344 | |
6345 enum gimplify_status | |
6346 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, | |
6347 bool (*gimple_test_f) (tree), fallback_t fallback) | |
6348 { | |
6349 tree tmp; | |
6350 gimple_seq internal_pre = NULL; | |
6351 gimple_seq internal_post = NULL; | |
6352 tree save_expr; | |
6353 bool is_statement; | |
6354 location_t saved_location; | |
6355 enum gimplify_status ret; | |
6356 gimple_stmt_iterator pre_last_gsi, post_last_gsi; | |
6357 | |
6358 save_expr = *expr_p; | |
6359 if (save_expr == NULL_TREE) | |
6360 return GS_ALL_DONE; | |
6361 | |
6362 /* If we are gimplifying a top-level statement, PRE_P must be valid. */ | |
6363 is_statement = gimple_test_f == is_gimple_stmt; | |
6364 if (is_statement) | |
6365 gcc_assert (pre_p); | |
6366 | |
6367 /* Consistency checks. */ | |
6368 if (gimple_test_f == is_gimple_reg) | |
6369 gcc_assert (fallback & (fb_rvalue | fb_lvalue)); | |
6370 else if (gimple_test_f == is_gimple_val | |
6371 || gimple_test_f == is_gimple_call_addr | |
6372 || gimple_test_f == is_gimple_condexpr | |
6373 || gimple_test_f == is_gimple_mem_rhs | |
6374 || gimple_test_f == is_gimple_mem_rhs_or_call | |
6375 || gimple_test_f == is_gimple_reg_rhs | |
6376 || gimple_test_f == is_gimple_reg_rhs_or_call | |
6377 || gimple_test_f == is_gimple_asm_val) | |
6378 gcc_assert (fallback & fb_rvalue); | |
6379 else if (gimple_test_f == is_gimple_min_lval | |
6380 || gimple_test_f == is_gimple_lvalue) | |
6381 gcc_assert (fallback & fb_lvalue); | |
6382 else if (gimple_test_f == is_gimple_addressable) | |
6383 gcc_assert (fallback & fb_either); | |
6384 else if (gimple_test_f == is_gimple_stmt) | |
6385 gcc_assert (fallback == fb_none); | |
6386 else | |
6387 { | |
6388 /* We should have recognized the GIMPLE_TEST_F predicate to | |
6389 know what kind of fallback to use in case a temporary is | |
6390 needed to hold the value or address of *EXPR_P. */ | |
6391 gcc_unreachable (); | |
6392 } | |
6393 | |
6394 /* We used to check the predicate here and return immediately if it | |
6395 succeeds. This is wrong; the design is for gimplification to be | |
6396 idempotent, and for the predicates to only test for valid forms, not | |
6397 whether they are fully simplified. */ | |
6398 if (pre_p == NULL) | |
6399 pre_p = &internal_pre; | |
6400 | |
6401 if (post_p == NULL) | |
6402 post_p = &internal_post; | |
6403 | |
6404 /* Remember the last statements added to PRE_P and POST_P. Every | |
6405 new statement added by the gimplification helpers needs to be | |
6406 annotated with location information. To centralize the | |
6407 responsibility, we remember the last statement that had been | |
6408 added to both queues before gimplifying *EXPR_P. If | |
6409 gimplification produces new statements in PRE_P and POST_P, those | |
6410 statements will be annotated with the same location information | |
6411 as *EXPR_P. */ | |
6412 pre_last_gsi = gsi_last (*pre_p); | |
6413 post_last_gsi = gsi_last (*post_p); | |
6414 | |
6415 saved_location = input_location; | |
6416 if (save_expr != error_mark_node | |
6417 && EXPR_HAS_LOCATION (*expr_p)) | |
6418 input_location = EXPR_LOCATION (*expr_p); | |
6419 | |
6420 /* Loop over the specific gimplifiers until the toplevel node | |
6421 remains the same. */ | |
6422 do | |
6423 { | |
6424 /* Strip away as many useless type conversions as possible | |
6425 at the toplevel. */ | |
6426 STRIP_USELESS_TYPE_CONVERSION (*expr_p); | |
6427 | |
6428 /* Remember the expr. */ | |
6429 save_expr = *expr_p; | |
6430 | |
6431 /* Die, die, die, my darling. */ | |
6432 if (save_expr == error_mark_node | |
6433 || (TREE_TYPE (save_expr) | |
6434 && TREE_TYPE (save_expr) == error_mark_node)) | |
6435 { | |
6436 ret = GS_ERROR; | |
6437 break; | |
6438 } | |
6439 | |
6440 /* Do any language-specific gimplification. */ | |
6441 ret = ((enum gimplify_status) | |
6442 lang_hooks.gimplify_expr (expr_p, pre_p, post_p)); | |
6443 if (ret == GS_OK) | |
6444 { | |
6445 if (*expr_p == NULL_TREE) | |
6446 break; | |
6447 if (*expr_p != save_expr) | |
6448 continue; | |
6449 } | |
6450 else if (ret != GS_UNHANDLED) | |
6451 break; | |
6452 | |
6453 ret = GS_OK; | |
6454 switch (TREE_CODE (*expr_p)) | |
6455 { | |
6456 /* First deal with the special cases. */ | |
6457 | |
6458 case POSTINCREMENT_EXPR: | |
6459 case POSTDECREMENT_EXPR: | |
6460 case PREINCREMENT_EXPR: | |
6461 case PREDECREMENT_EXPR: | |
6462 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p, | |
6463 fallback != fb_none); | |
6464 break; | |
6465 | |
6466 case ARRAY_REF: | |
6467 case ARRAY_RANGE_REF: | |
6468 case REALPART_EXPR: | |
6469 case IMAGPART_EXPR: | |
6470 case COMPONENT_REF: | |
6471 case VIEW_CONVERT_EXPR: | |
6472 ret = gimplify_compound_lval (expr_p, pre_p, post_p, | |
6473 fallback ? fallback : fb_rvalue); | |
6474 break; | |
6475 | |
6476 case COND_EXPR: | |
6477 ret = gimplify_cond_expr (expr_p, pre_p, fallback); | |
6478 | |
6479 /* C99 code may assign to an array in a structure value of a | |
6480 conditional expression, and this has undefined behavior | |
6481 only on execution, so create a temporary if an lvalue is | |
6482 required. */ | |
6483 if (fallback == fb_lvalue) | |
6484 { | |
6485 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); | |
6486 mark_addressable (*expr_p); | |
6487 } | |
6488 break; | |
6489 | |
6490 case CALL_EXPR: | |
6491 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none); | |
6492 | |
6493 /* C99 code may assign to an array in a structure returned | |
6494 from a function, and this has undefined behavior only on | |
6495 execution, so create a temporary if an lvalue is | |
6496 required. */ | |
6497 if (fallback == fb_lvalue) | |
6498 { | |
6499 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); | |
6500 mark_addressable (*expr_p); | |
6501 } | |
6502 break; | |
6503 | |
6504 case TREE_LIST: | |
6505 gcc_unreachable (); | |
6506 | |
6507 case COMPOUND_EXPR: | |
6508 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none); | |
6509 break; | |
6510 | |
6511 case COMPOUND_LITERAL_EXPR: | |
6512 ret = gimplify_compound_literal_expr (expr_p, pre_p); | |
6513 break; | |
6514 | |
6515 case MODIFY_EXPR: | |
6516 case INIT_EXPR: | |
6517 ret = gimplify_modify_expr (expr_p, pre_p, post_p, | |
6518 fallback != fb_none); | |
6519 break; | |
6520 | |
6521 case TRUTH_ANDIF_EXPR: | |
6522 case TRUTH_ORIF_EXPR: | |
6523 /* Pass the source location of the outer expression. */ | |
6524 ret = gimplify_boolean_expr (expr_p, saved_location); | |
6525 break; | |
6526 | |
6527 case TRUTH_NOT_EXPR: | |
6528 if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE) | |
6529 { | |
6530 tree type = TREE_TYPE (*expr_p); | |
6531 *expr_p = fold_convert (type, gimple_boolify (*expr_p)); | |
6532 ret = GS_OK; | |
6533 break; | |
6534 } | |
6535 | |
6536 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, | |
6537 is_gimple_val, fb_rvalue); | |
6538 recalculate_side_effects (*expr_p); | |
6539 break; | |
6540 | |
6541 case ADDR_EXPR: | |
6542 ret = gimplify_addr_expr (expr_p, pre_p, post_p); | |
6543 break; | |
6544 | |
6545 case VA_ARG_EXPR: | |
6546 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p); | |
6547 break; | |
6548 | |
6549 CASE_CONVERT: | |
6550 if (IS_EMPTY_STMT (*expr_p)) | |
6551 { | |
6552 ret = GS_ALL_DONE; | |
6553 break; | |
6554 } | |
6555 | |
6556 if (VOID_TYPE_P (TREE_TYPE (*expr_p)) | |
6557 || fallback == fb_none) | |
6558 { | |
6559 /* Just strip a conversion to void (or in void context) and | |
6560 try again. */ | |
6561 *expr_p = TREE_OPERAND (*expr_p, 0); | |
6562 break; | |
6563 } | |
6564 | |
6565 ret = gimplify_conversion (expr_p); | |
6566 if (ret == GS_ERROR) | |
6567 break; | |
6568 if (*expr_p != save_expr) | |
6569 break; | |
6570 /* FALLTHRU */ | |
6571 | |
6572 case FIX_TRUNC_EXPR: | |
6573 /* unary_expr: ... | '(' cast ')' val | ... */ | |
6574 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, | |
6575 is_gimple_val, fb_rvalue); | |
6576 recalculate_side_effects (*expr_p); | |
6577 break; | |
6578 | |
6579 case INDIRECT_REF: | |
6580 *expr_p = fold_indirect_ref_loc (input_location, *expr_p); | |
6581 if (*expr_p != save_expr) | |
6582 break; | |
6583 /* else fall through. */ | |
6584 case ALIGN_INDIRECT_REF: | |
6585 case MISALIGNED_INDIRECT_REF: | |
6586 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, | |
6587 is_gimple_reg, fb_rvalue); | |
6588 recalculate_side_effects (*expr_p); | |
6589 break; | |
6590 | |
6591 /* Constants need not be gimplified. */ | |
6592 case INTEGER_CST: | |
6593 case REAL_CST: | |
6594 case FIXED_CST: | |
6595 case STRING_CST: | |
6596 case COMPLEX_CST: | |
6597 case VECTOR_CST: | |
6598 ret = GS_ALL_DONE; | |
6599 break; | |
6600 | |
6601 case CONST_DECL: | |
6602 /* If we require an lvalue, such as for ADDR_EXPR, retain the | |
6603 CONST_DECL node. Otherwise the decl is replaceable by its | |
6604 value. */ | |
6605 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */ | |
6606 if (fallback & fb_lvalue) | |
6607 ret = GS_ALL_DONE; | |
6608 else | |
6609 *expr_p = DECL_INITIAL (*expr_p); | |
6610 break; | |
6611 | |
6612 case DECL_EXPR: | |
6613 ret = gimplify_decl_expr (expr_p, pre_p); | |
6614 break; | |
6615 | |
6616 case BIND_EXPR: | |
6617 ret = gimplify_bind_expr (expr_p, pre_p); | |
6618 break; | |
6619 | |
6620 case LOOP_EXPR: | |
6621 ret = gimplify_loop_expr (expr_p, pre_p); | |
6622 break; | |
6623 | |
6624 case SWITCH_EXPR: | |
6625 ret = gimplify_switch_expr (expr_p, pre_p); | |
6626 break; | |
6627 | |
6628 case EXIT_EXPR: | |
6629 ret = gimplify_exit_expr (expr_p); | |
6630 break; | |
6631 | |
6632 case GOTO_EXPR: | |
6633 /* If the target is not LABEL, then it is a computed jump | |
6634 and the target needs to be gimplified. */ | |
6635 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL) | |
6636 { | |
6637 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p, | |
6638 NULL, is_gimple_val, fb_rvalue); | |
6639 if (ret == GS_ERROR) | |
6640 break; | |
6641 } | |
6642 gimplify_seq_add_stmt (pre_p, | |
6643 gimple_build_goto (GOTO_DESTINATION (*expr_p))); | |
6644 break; | |
6645 | |
6646 case PREDICT_EXPR: | |
6647 gimplify_seq_add_stmt (pre_p, | |
6648 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p), | |
6649 PREDICT_EXPR_OUTCOME (*expr_p))); | |
6650 ret = GS_ALL_DONE; | |
6651 break; | |
6652 | |
6653 case LABEL_EXPR: | |
6654 ret = GS_ALL_DONE; | |
6655 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p)) | |
6656 == current_function_decl); | |
6657 gimplify_seq_add_stmt (pre_p, | |
6658 gimple_build_label (LABEL_EXPR_LABEL (*expr_p))); | |
6659 break; | |
6660 | |
6661 case CASE_LABEL_EXPR: | |
6662 ret = gimplify_case_label_expr (expr_p, pre_p); | |
6663 break; | |
6664 | |
6665 case RETURN_EXPR: | |
6666 ret = gimplify_return_expr (*expr_p, pre_p); | |
6667 break; | |
6668 | |
6669 case CONSTRUCTOR: | |
6670 /* Don't reduce this in place; let gimplify_init_constructor work its | |
6671 magic. Buf if we're just elaborating this for side effects, just | |
6672 gimplify any element that has side-effects. */ | |
6673 if (fallback == fb_none) | |
6674 { | |
6675 unsigned HOST_WIDE_INT ix; | |
6676 constructor_elt *ce; | |
6677 tree temp = NULL_TREE; | |
6678 for (ix = 0; | |
6679 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p), | |
6680 ix, ce); | |
6681 ix++) | |
6682 if (TREE_SIDE_EFFECTS (ce->value)) | |
6683 append_to_statement_list (ce->value, &temp); | |
6684 | |
6685 *expr_p = temp; | |
6686 ret = GS_OK; | |
6687 } | |
6688 /* C99 code may assign to an array in a constructed | |
6689 structure or union, and this has undefined behavior only | |
6690 on execution, so create a temporary if an lvalue is | |
6691 required. */ | |
6692 else if (fallback == fb_lvalue) | |
6693 { | |
6694 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); | |
6695 mark_addressable (*expr_p); | |
6696 } | |
6697 else | |
6698 ret = GS_ALL_DONE; | |
6699 break; | |
6700 | |
6701 /* The following are special cases that are not handled by the | |
6702 original GIMPLE grammar. */ | |
6703 | |
6704 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and | |
6705 eliminated. */ | |
6706 case SAVE_EXPR: | |
6707 ret = gimplify_save_expr (expr_p, pre_p, post_p); | |
6708 break; | |
6709 | |
6710 case BIT_FIELD_REF: | |
6711 { | |
6712 enum gimplify_status r0, r1, r2; | |
6713 | |
6714 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, | |
6715 post_p, is_gimple_lvalue, fb_either); | |
6716 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, | |
6717 post_p, is_gimple_val, fb_rvalue); | |
6718 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, | |
6719 post_p, is_gimple_val, fb_rvalue); | |
6720 recalculate_side_effects (*expr_p); | |
6721 | |
6722 ret = MIN (r0, MIN (r1, r2)); | |
6723 } | |
6724 break; | |
6725 | |
6726 case TARGET_MEM_REF: | |
6727 { | |
6728 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE; | |
6729 | |
6730 if (TMR_SYMBOL (*expr_p)) | |
6731 r0 = gimplify_expr (&TMR_SYMBOL (*expr_p), pre_p, | |
6732 post_p, is_gimple_lvalue, fb_either); | |
6733 else if (TMR_BASE (*expr_p)) | |
6734 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p, | |
6735 post_p, is_gimple_val, fb_either); | |
6736 if (TMR_INDEX (*expr_p)) | |
6737 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p, | |
6738 post_p, is_gimple_val, fb_rvalue); | |
6739 /* TMR_STEP and TMR_OFFSET are always integer constants. */ | |
6740 ret = MIN (r0, r1); | |
6741 } | |
6742 break; | |
6743 | |
6744 case NON_LVALUE_EXPR: | |
6745 /* This should have been stripped above. */ | |
6746 gcc_unreachable (); | |
6747 | |
6748 case ASM_EXPR: | |
6749 ret = gimplify_asm_expr (expr_p, pre_p, post_p); | |
6750 break; | |
6751 | |
6752 case TRY_FINALLY_EXPR: | |
6753 case TRY_CATCH_EXPR: | |
6754 { | |
6755 gimple_seq eval, cleanup; | |
6756 gimple try_; | |
6757 | |
6758 eval = cleanup = NULL; | |
6759 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval); | |
6760 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup); | |
6761 /* Don't create bogus GIMPLE_TRY with empty cleanup. */ | |
6762 if (gimple_seq_empty_p (cleanup)) | |
6763 { | |
6764 gimple_seq_add_seq (pre_p, eval); | |
6765 ret = GS_ALL_DONE; | |
6766 break; | |
6767 } | |
6768 try_ = gimple_build_try (eval, cleanup, | |
6769 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR | |
6770 ? GIMPLE_TRY_FINALLY | |
6771 : GIMPLE_TRY_CATCH); | |
6772 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR) | |
6773 gimple_try_set_catch_is_cleanup (try_, | |
6774 TRY_CATCH_IS_CLEANUP (*expr_p)); | |
6775 gimplify_seq_add_stmt (pre_p, try_); | |
6776 ret = GS_ALL_DONE; | |
6777 break; | |
6778 } | |
6779 | |
6780 case CLEANUP_POINT_EXPR: | |
6781 ret = gimplify_cleanup_point_expr (expr_p, pre_p); | |
6782 break; | |
6783 | |
6784 case TARGET_EXPR: | |
6785 ret = gimplify_target_expr (expr_p, pre_p, post_p); | |
6786 break; | |
6787 | |
6788 case CATCH_EXPR: | |
6789 { | |
6790 gimple c; | |
6791 gimple_seq handler = NULL; | |
6792 gimplify_and_add (CATCH_BODY (*expr_p), &handler); | |
6793 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler); | |
6794 gimplify_seq_add_stmt (pre_p, c); | |
6795 ret = GS_ALL_DONE; | |
6796 break; | |
6797 } | |
6798 | |
6799 case EH_FILTER_EXPR: | |
6800 { | |
6801 gimple ehf; | |
6802 gimple_seq failure = NULL; | |
6803 | |
6804 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure); | |
6805 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure); | |
6806 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p)); | |
6807 gimplify_seq_add_stmt (pre_p, ehf); | |
6808 ret = GS_ALL_DONE; | |
6809 break; | |
6810 } | |
6811 | |
6812 case OBJ_TYPE_REF: | |
6813 { | |
6814 enum gimplify_status r0, r1; | |
6815 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, | |
6816 post_p, is_gimple_val, fb_rvalue); | |
6817 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, | |
6818 post_p, is_gimple_val, fb_rvalue); | |
6819 TREE_SIDE_EFFECTS (*expr_p) = 0; | |
6820 ret = MIN (r0, r1); | |
6821 } | |
6822 break; | |
6823 | |
6824 case LABEL_DECL: | |
6825 /* We get here when taking the address of a label. We mark | |
6826 the label as "forced"; meaning it can never be removed and | |
6827 it is a potential target for any computed goto. */ | |
6828 FORCED_LABEL (*expr_p) = 1; | |
6829 ret = GS_ALL_DONE; | |
6830 break; | |
6831 | |
6832 case STATEMENT_LIST: | |
6833 ret = gimplify_statement_list (expr_p, pre_p); | |
6834 break; | |
6835 | |
6836 case WITH_SIZE_EXPR: | |
6837 { | |
6838 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, | |
6839 post_p == &internal_post ? NULL : post_p, | |
6840 gimple_test_f, fallback); | |
6841 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, | |
6842 is_gimple_val, fb_rvalue); | |
6843 } | |
6844 break; | |
6845 | |
6846 case VAR_DECL: | |
6847 case PARM_DECL: | |
6848 ret = gimplify_var_or_parm_decl (expr_p); | |
6849 break; | |
6850 | |
6851 case RESULT_DECL: | |
6852 /* When within an OpenMP context, notice uses of variables. */ | |
6853 if (gimplify_omp_ctxp) | |
6854 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true); | |
6855 ret = GS_ALL_DONE; | |
6856 break; | |
6857 | |
6858 case SSA_NAME: | |
6859 /* Allow callbacks into the gimplifier during optimization. */ | |
6860 ret = GS_ALL_DONE; | |
6861 break; | |
6862 | |
6863 case OMP_PARALLEL: | |
6864 gimplify_omp_parallel (expr_p, pre_p); | |
6865 ret = GS_ALL_DONE; | |
6866 break; | |
6867 | |
6868 case OMP_TASK: | |
6869 gimplify_omp_task (expr_p, pre_p); | |
6870 ret = GS_ALL_DONE; | |
6871 break; | |
6872 | |
6873 case OMP_FOR: | |
6874 ret = gimplify_omp_for (expr_p, pre_p); | |
6875 break; | |
6876 | |
6877 case OMP_SECTIONS: | |
6878 case OMP_SINGLE: | |
6879 gimplify_omp_workshare (expr_p, pre_p); | |
6880 ret = GS_ALL_DONE; | |
6881 break; | |
6882 | |
6883 case OMP_SECTION: | |
6884 case OMP_MASTER: | |
6885 case OMP_ORDERED: | |
6886 case OMP_CRITICAL: | |
6887 { | |
6888 gimple_seq body = NULL; | |
6889 gimple g; | |
6890 | |
6891 gimplify_and_add (OMP_BODY (*expr_p), &body); | |
6892 switch (TREE_CODE (*expr_p)) | |
6893 { | |
6894 case OMP_SECTION: | |
6895 g = gimple_build_omp_section (body); | |
6896 break; | |
6897 case OMP_MASTER: | |
6898 g = gimple_build_omp_master (body); | |
6899 break; | |
6900 case OMP_ORDERED: | |
6901 g = gimple_build_omp_ordered (body); | |
6902 break; | |
6903 case OMP_CRITICAL: | |
6904 g = gimple_build_omp_critical (body, | |
6905 OMP_CRITICAL_NAME (*expr_p)); | |
6906 break; | |
6907 default: | |
6908 gcc_unreachable (); | |
6909 } | |
6910 gimplify_seq_add_stmt (pre_p, g); | |
6911 ret = GS_ALL_DONE; | |
6912 break; | |
6913 } | |
6914 | |
6915 case OMP_ATOMIC: | |
6916 ret = gimplify_omp_atomic (expr_p, pre_p); | |
6917 break; | |
6918 | |
6919 case POINTER_PLUS_EXPR: | |
6920 /* Convert ((type *)A)+offset into &A->field_of_type_and_offset. | |
6921 The second is gimple immediate saving a need for extra statement. | |
6922 */ | |
6923 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST | |
6924 && (tmp = maybe_fold_offset_to_address | |
6925 (EXPR_LOCATION (*expr_p), | |
6926 TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1), | |
6927 TREE_TYPE (*expr_p)))) | |
6928 { | |
6929 *expr_p = tmp; | |
6930 break; | |
6931 } | |
6932 /* Convert (void *)&a + 4 into (void *)&a[1]. */ | |
6933 if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR | |
6934 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST | |
6935 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, | |
6936 0),0))) | |
6937 && (tmp = maybe_fold_offset_to_address | |
6938 (EXPR_LOCATION (*expr_p), | |
6939 TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0), | |
6940 TREE_OPERAND (*expr_p, 1), | |
6941 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0), | |
6942 0))))) | |
6943 { | |
6944 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp); | |
6945 break; | |
6946 } | |
6947 /* FALLTHRU */ | |
6948 | |
6949 default: | |
6950 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p))) | |
6951 { | |
6952 case tcc_comparison: | |
6953 /* Handle comparison of objects of non scalar mode aggregates | |
6954 with a call to memcmp. It would be nice to only have to do | |
6955 this for variable-sized objects, but then we'd have to allow | |
6956 the same nest of reference nodes we allow for MODIFY_EXPR and | |
6957 that's too complex. | |
6958 | |
6959 Compare scalar mode aggregates as scalar mode values. Using | |
6960 memcmp for them would be very inefficient at best, and is | |
6961 plain wrong if bitfields are involved. */ | |
6962 { | |
6963 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1)); | |
6964 | |
6965 if (!AGGREGATE_TYPE_P (type)) | |
6966 goto expr_2; | |
6967 else if (TYPE_MODE (type) != BLKmode) | |
6968 ret = gimplify_scalar_mode_aggregate_compare (expr_p); | |
6969 else | |
6970 ret = gimplify_variable_sized_compare (expr_p); | |
6971 | |
6972 break; | |
6973 } | |
6974 | |
6975 /* If *EXPR_P does not need to be special-cased, handle it | |
6976 according to its class. */ | |
6977 case tcc_unary: | |
6978 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, | |
6979 post_p, is_gimple_val, fb_rvalue); | |
6980 break; | |
6981 | |
6982 case tcc_binary: | |
6983 expr_2: | |
6984 { | |
6985 enum gimplify_status r0, r1; | |
6986 | |
6987 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, | |
6988 post_p, is_gimple_val, fb_rvalue); | |
6989 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, | |
6990 post_p, is_gimple_val, fb_rvalue); | |
6991 | |
6992 ret = MIN (r0, r1); | |
6993 break; | |
6994 } | |
6995 | |
6996 case tcc_declaration: | |
6997 case tcc_constant: | |
6998 ret = GS_ALL_DONE; | |
6999 goto dont_recalculate; | |
7000 | |
7001 default: | |
7002 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR | |
7003 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR | |
7004 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR); | |
7005 goto expr_2; | |
7006 } | |
7007 | |
7008 recalculate_side_effects (*expr_p); | |
7009 | |
7010 dont_recalculate: | |
7011 break; | |
7012 } | |
7013 | |
7014 /* If we replaced *expr_p, gimplify again. */ | |
7015 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr)) | |
7016 ret = GS_ALL_DONE; | |
7017 } | |
7018 while (ret == GS_OK); | |
7019 | |
7020 /* If we encountered an error_mark somewhere nested inside, either | |
7021 stub out the statement or propagate the error back out. */ | |
7022 if (ret == GS_ERROR) | |
7023 { | |
7024 if (is_statement) | |
7025 *expr_p = NULL; | |
7026 goto out; | |
7027 } | |
7028 | |
7029 /* This was only valid as a return value from the langhook, which | |
7030 we handled. Make sure it doesn't escape from any other context. */ | |
7031 gcc_assert (ret != GS_UNHANDLED); | |
7032 | |
7033 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p)) | |
7034 { | |
7035 /* We aren't looking for a value, and we don't have a valid | |
7036 statement. If it doesn't have side-effects, throw it away. */ | |
7037 if (!TREE_SIDE_EFFECTS (*expr_p)) | |
7038 *expr_p = NULL; | |
7039 else if (!TREE_THIS_VOLATILE (*expr_p)) | |
7040 { | |
7041 /* This is probably a _REF that contains something nested that | |
7042 has side effects. Recurse through the operands to find it. */ | |
7043 enum tree_code code = TREE_CODE (*expr_p); | |
7044 | |
7045 switch (code) | |
7046 { | |
7047 case COMPONENT_REF: | |
7048 case REALPART_EXPR: | |
7049 case IMAGPART_EXPR: | |
7050 case VIEW_CONVERT_EXPR: | |
7051 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, | |
7052 gimple_test_f, fallback); | |
7053 break; | |
7054 | |
7055 case ARRAY_REF: | |
7056 case ARRAY_RANGE_REF: | |
7057 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, | |
7058 gimple_test_f, fallback); | |
7059 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, | |
7060 gimple_test_f, fallback); | |
7061 break; | |
7062 | |
7063 default: | |
7064 /* Anything else with side-effects must be converted to | |
7065 a valid statement before we get here. */ | |
7066 gcc_unreachable (); | |
7067 } | |
7068 | |
7069 *expr_p = NULL; | |
7070 } | |
7071 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p)) | |
7072 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode) | |
7073 { | |
7074 /* Historically, the compiler has treated a bare reference | |
7075 to a non-BLKmode volatile lvalue as forcing a load. */ | |
7076 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p)); | |
7077 | |
7078 /* Normally, we do not want to create a temporary for a | |
7079 TREE_ADDRESSABLE type because such a type should not be | |
7080 copied by bitwise-assignment. However, we make an | |
7081 exception here, as all we are doing here is ensuring that | |
7082 we read the bytes that make up the type. We use | |
7083 create_tmp_var_raw because create_tmp_var will abort when | |
7084 given a TREE_ADDRESSABLE type. */ | |
7085 tree tmp = create_tmp_var_raw (type, "vol"); | |
7086 gimple_add_tmp_var (tmp); | |
7087 gimplify_assign (tmp, *expr_p, pre_p); | |
7088 *expr_p = NULL; | |
7089 } | |
7090 else | |
7091 /* We can't do anything useful with a volatile reference to | |
7092 an incomplete type, so just throw it away. Likewise for | |
7093 a BLKmode type, since any implicit inner load should | |
7094 already have been turned into an explicit one by the | |
7095 gimplification process. */ | |
7096 *expr_p = NULL; | |
7097 } | |
7098 | |
7099 /* If we are gimplifying at the statement level, we're done. Tack | |
7100 everything together and return. */ | |
7101 if (fallback == fb_none || is_statement) | |
7102 { | |
7103 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear | |
7104 it out for GC to reclaim it. */ | |
7105 *expr_p = NULL_TREE; | |
7106 | |
7107 if (!gimple_seq_empty_p (internal_pre) | |
7108 || !gimple_seq_empty_p (internal_post)) | |
7109 { | |
7110 gimplify_seq_add_seq (&internal_pre, internal_post); | |
7111 gimplify_seq_add_seq (pre_p, internal_pre); | |
7112 } | |
7113 | |
7114 /* The result of gimplifying *EXPR_P is going to be the last few | |
7115 statements in *PRE_P and *POST_P. Add location information | |
7116 to all the statements that were added by the gimplification | |
7117 helpers. */ | |
7118 if (!gimple_seq_empty_p (*pre_p)) | |
7119 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location); | |
7120 | |
7121 if (!gimple_seq_empty_p (*post_p)) | |
7122 annotate_all_with_location_after (*post_p, post_last_gsi, | |
7123 input_location); | |
7124 | |
7125 goto out; | |
7126 } | |
7127 | |
7128 #ifdef ENABLE_GIMPLE_CHECKING | |
7129 if (*expr_p) | |
7130 { | |
7131 enum tree_code code = TREE_CODE (*expr_p); | |
7132 /* These expressions should already be in gimple IR form. */ | |
7133 gcc_assert (code != MODIFY_EXPR | |
7134 && code != ASM_EXPR | |
7135 && code != BIND_EXPR | |
7136 && code != CATCH_EXPR | |
7137 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr) | |
7138 && code != EH_FILTER_EXPR | |
7139 && code != GOTO_EXPR | |
7140 && code != LABEL_EXPR | |
7141 && code != LOOP_EXPR | |
7142 && code != SWITCH_EXPR | |
7143 && code != TRY_FINALLY_EXPR | |
7144 && code != OMP_CRITICAL | |
7145 && code != OMP_FOR | |
7146 && code != OMP_MASTER | |
7147 && code != OMP_ORDERED | |
7148 && code != OMP_PARALLEL | |
7149 && code != OMP_SECTIONS | |
7150 && code != OMP_SECTION | |
7151 && code != OMP_SINGLE); | |
7152 } | |
7153 #endif | |
7154 | |
7155 /* Otherwise we're gimplifying a subexpression, so the resulting | |
7156 value is interesting. If it's a valid operand that matches | |
7157 GIMPLE_TEST_F, we're done. Unless we are handling some | |
7158 post-effects internally; if that's the case, we need to copy into | |
7159 a temporary before adding the post-effects to POST_P. */ | |
7160 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p)) | |
7161 goto out; | |
7162 | |
7163 /* Otherwise, we need to create a new temporary for the gimplified | |
7164 expression. */ | |
7165 | |
7166 /* We can't return an lvalue if we have an internal postqueue. The | |
7167 object the lvalue refers to would (probably) be modified by the | |
7168 postqueue; we need to copy the value out first, which means an | |
7169 rvalue. */ | |
7170 if ((fallback & fb_lvalue) | |
7171 && gimple_seq_empty_p (internal_post) | |
7172 && is_gimple_addressable (*expr_p)) | |
7173 { | |
7174 /* An lvalue will do. Take the address of the expression, store it | |
7175 in a temporary, and replace the expression with an INDIRECT_REF of | |
7176 that temporary. */ | |
7177 tmp = build_fold_addr_expr_loc (input_location, *expr_p); | |
7178 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue); | |
7179 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp); | |
7180 } | |
7181 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p)) | |
7182 { | |
7183 /* An rvalue will do. Assign the gimplified expression into a | |
7184 new temporary TMP and replace the original expression with | |
7185 TMP. First, make sure that the expression has a type so that | |
7186 it can be assigned into a temporary. */ | |
7187 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p))); | |
7188 | |
7189 if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue)) | |
7190 /* The postqueue might change the value of the expression between | |
7191 the initialization and use of the temporary, so we can't use a | |
7192 formal temp. FIXME do we care? */ | |
7193 { | |
7194 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); | |
7195 if (TREE_CODE (TREE_TYPE (*expr_p)) == COMPLEX_TYPE | |
7196 || TREE_CODE (TREE_TYPE (*expr_p)) == VECTOR_TYPE) | |
7197 DECL_GIMPLE_REG_P (*expr_p) = 1; | |
7198 } | |
7199 else | |
7200 *expr_p = get_formal_tmp_var (*expr_p, pre_p); | |
7201 } | |
7202 else | |
7203 { | |
7204 #ifdef ENABLE_GIMPLE_CHECKING | |
7205 if (!(fallback & fb_mayfail)) | |
7206 { | |
7207 fprintf (stderr, "gimplification failed:\n"); | |
7208 print_generic_expr (stderr, *expr_p, 0); | |
7209 debug_tree (*expr_p); | |
7210 internal_error ("gimplification failed"); | |
7211 } | |
7212 #endif | |
7213 gcc_assert (fallback & fb_mayfail); | |
7214 | |
7215 /* If this is an asm statement, and the user asked for the | |
7216 impossible, don't die. Fail and let gimplify_asm_expr | |
7217 issue an error. */ | |
7218 ret = GS_ERROR; | |
7219 goto out; | |
7220 } | |
7221 | |
7222 /* Make sure the temporary matches our predicate. */ | |
7223 gcc_assert ((*gimple_test_f) (*expr_p)); | |
7224 | |
7225 if (!gimple_seq_empty_p (internal_post)) | |
7226 { | |
7227 annotate_all_with_location (internal_post, input_location); | |
7228 gimplify_seq_add_seq (pre_p, internal_post); | |
7229 } | |
7230 | |
7231 out: | |
7232 input_location = saved_location; | |
7233 return ret; | |
7234 } | |
7235 | |
7236 /* Look through TYPE for variable-sized objects and gimplify each such | |
7237 size that we find. Add to LIST_P any statements generated. */ | |
7238 | |
7239 void | |
7240 gimplify_type_sizes (tree type, gimple_seq *list_p) | |
7241 { | |
7242 tree field, t; | |
7243 | |
7244 if (type == NULL || type == error_mark_node) | |
7245 return; | |
7246 | |
7247 /* We first do the main variant, then copy into any other variants. */ | |
7248 type = TYPE_MAIN_VARIANT (type); | |
7249 | |
7250 /* Avoid infinite recursion. */ | |
7251 if (TYPE_SIZES_GIMPLIFIED (type)) | |
7252 return; | |
7253 | |
7254 TYPE_SIZES_GIMPLIFIED (type) = 1; | |
7255 | |
7256 switch (TREE_CODE (type)) | |
7257 { | |
7258 case INTEGER_TYPE: | |
7259 case ENUMERAL_TYPE: | |
7260 case BOOLEAN_TYPE: | |
7261 case REAL_TYPE: | |
7262 case FIXED_POINT_TYPE: | |
7263 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p); | |
7264 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p); | |
7265 | |
7266 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) | |
7267 { | |
7268 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type); | |
7269 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type); | |
7270 } | |
7271 break; | |
7272 | |
7273 case ARRAY_TYPE: | |
7274 /* These types may not have declarations, so handle them here. */ | |
7275 gimplify_type_sizes (TREE_TYPE (type), list_p); | |
7276 gimplify_type_sizes (TYPE_DOMAIN (type), list_p); | |
7277 /* When not optimizing, ensure VLA bounds aren't removed. */ | |
7278 if (!optimize | |
7279 && TYPE_DOMAIN (type) | |
7280 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type))) | |
7281 { | |
7282 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type)); | |
7283 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t)) | |
7284 DECL_IGNORED_P (t) = 0; | |
7285 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); | |
7286 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t)) | |
7287 DECL_IGNORED_P (t) = 0; | |
7288 } | |
7289 break; | |
7290 | |
7291 case RECORD_TYPE: | |
7292 case UNION_TYPE: | |
7293 case QUAL_UNION_TYPE: | |
7294 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) | |
7295 if (TREE_CODE (field) == FIELD_DECL) | |
7296 { | |
7297 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p); | |
7298 gimplify_one_sizepos (&DECL_SIZE (field), list_p); | |
7299 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p); | |
7300 gimplify_type_sizes (TREE_TYPE (field), list_p); | |
7301 } | |
7302 break; | |
7303 | |
7304 case POINTER_TYPE: | |
7305 case REFERENCE_TYPE: | |
7306 /* We used to recurse on the pointed-to type here, which turned out to | |
7307 be incorrect because its definition might refer to variables not | |
7308 yet initialized at this point if a forward declaration is involved. | |
7309 | |
7310 It was actually useful for anonymous pointed-to types to ensure | |
7311 that the sizes evaluation dominates every possible later use of the | |
7312 values. Restricting to such types here would be safe since there | |
7313 is no possible forward declaration around, but would introduce an | |
7314 undesirable middle-end semantic to anonymity. We then defer to | |
7315 front-ends the responsibility of ensuring that the sizes are | |
7316 evaluated both early and late enough, e.g. by attaching artificial | |
7317 type declarations to the tree. */ | |
7318 break; | |
7319 | |
7320 default: | |
7321 break; | |
7322 } | |
7323 | |
7324 gimplify_one_sizepos (&TYPE_SIZE (type), list_p); | |
7325 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p); | |
7326 | |
7327 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) | |
7328 { | |
7329 TYPE_SIZE (t) = TYPE_SIZE (type); | |
7330 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type); | |
7331 TYPE_SIZES_GIMPLIFIED (t) = 1; | |
7332 } | |
7333 } | |
7334 | |
7335 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P, | |
7336 a size or position, has had all of its SAVE_EXPRs evaluated. | |
7337 We add any required statements to *STMT_P. */ | |
7338 | |
7339 void | |
7340 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p) | |
7341 { | |
7342 tree type, expr = *expr_p; | |
7343 | |
7344 /* We don't do anything if the value isn't there, is constant, or contains | |
7345 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already | |
7346 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier | |
7347 will want to replace it with a new variable, but that will cause problems | |
7348 if this type is from outside the function. It's OK to have that here. */ | |
7349 if (expr == NULL_TREE || TREE_CONSTANT (expr) | |
7350 || TREE_CODE (expr) == VAR_DECL | |
7351 || CONTAINS_PLACEHOLDER_P (expr)) | |
7352 return; | |
7353 | |
7354 type = TREE_TYPE (expr); | |
7355 *expr_p = unshare_expr (expr); | |
7356 | |
7357 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue); | |
7358 expr = *expr_p; | |
7359 | |
7360 /* Verify that we've an exact type match with the original expression. | |
7361 In particular, we do not wish to drop a "sizetype" in favour of a | |
7362 type of similar dimensions. We don't want to pollute the generic | |
7363 type-stripping code with this knowledge because it doesn't matter | |
7364 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT | |
7365 and friends retain their "sizetype-ness". */ | |
7366 if (TREE_TYPE (expr) != type | |
7367 && TREE_CODE (type) == INTEGER_TYPE | |
7368 && TYPE_IS_SIZETYPE (type)) | |
7369 { | |
7370 tree tmp; | |
7371 gimple stmt; | |
7372 | |
7373 *expr_p = create_tmp_var (type, NULL); | |
7374 tmp = build1 (NOP_EXPR, type, expr); | |
7375 stmt = gimplify_assign (*expr_p, tmp, stmt_p); | |
7376 if (EXPR_HAS_LOCATION (expr)) | |
7377 gimple_set_location (stmt, EXPR_LOCATION (expr)); | |
7378 else | |
7379 gimple_set_location (stmt, input_location); | |
7380 } | |
7381 } | |
7382 | |
7383 | |
7384 /* Gimplify the body of statements pointed to by BODY_P and return a | |
7385 GIMPLE_BIND containing the sequence of GIMPLE statements | |
7386 corresponding to BODY_P. FNDECL is the function decl containing | |
7387 *BODY_P. */ | |
7388 | |
7389 gimple | |
7390 gimplify_body (tree *body_p, tree fndecl, bool do_parms) | |
7391 { | |
7392 location_t saved_location = input_location; | |
7393 gimple_seq parm_stmts, seq; | |
7394 gimple outer_bind; | |
7395 struct gimplify_ctx gctx; | |
7396 | |
7397 timevar_push (TV_TREE_GIMPLIFY); | |
7398 | |
7399 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during | |
7400 gimplification. */ | |
7401 default_rtl_profile (); | |
7402 | |
7403 gcc_assert (gimplify_ctxp == NULL); | |
7404 push_gimplify_context (&gctx); | |
7405 | |
7406 /* Unshare most shared trees in the body and in that of any nested functions. | |
7407 It would seem we don't have to do this for nested functions because | |
7408 they are supposed to be output and then the outer function gimplified | |
7409 first, but the g++ front end doesn't always do it that way. */ | |
7410 unshare_body (body_p, fndecl); | |
7411 unvisit_body (body_p, fndecl); | |
7412 | |
7413 if (cgraph_node (fndecl)->origin) | |
7414 nonlocal_vlas = pointer_set_create (); | |
7415 | |
7416 /* Make sure input_location isn't set to something weird. */ | |
7417 input_location = DECL_SOURCE_LOCATION (fndecl); | |
7418 | |
7419 /* Resolve callee-copies. This has to be done before processing | |
7420 the body so that DECL_VALUE_EXPR gets processed correctly. */ | |
7421 parm_stmts = (do_parms) ? gimplify_parameters () : NULL; | |
7422 | |
7423 /* Gimplify the function's body. */ | |
7424 seq = NULL; | |
7425 gimplify_stmt (body_p, &seq); | |
7426 outer_bind = gimple_seq_first_stmt (seq); | |
7427 if (!outer_bind) | |
7428 { | |
7429 outer_bind = gimple_build_nop (); | |
7430 gimplify_seq_add_stmt (&seq, outer_bind); | |
7431 } | |
7432 | |
7433 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is | |
7434 not the case, wrap everything in a GIMPLE_BIND to make it so. */ | |
7435 if (gimple_code (outer_bind) == GIMPLE_BIND | |
7436 && gimple_seq_first (seq) == gimple_seq_last (seq)) | |
7437 ; | |
7438 else | |
7439 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL); | |
7440 | |
7441 *body_p = NULL_TREE; | |
7442 | |
7443 /* If we had callee-copies statements, insert them at the beginning | |
7444 of the function. */ | |
7445 if (!gimple_seq_empty_p (parm_stmts)) | |
7446 { | |
7447 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind)); | |
7448 gimple_bind_set_body (outer_bind, parm_stmts); | |
7449 } | |
7450 | |
7451 if (nonlocal_vlas) | |
7452 { | |
7453 pointer_set_destroy (nonlocal_vlas); | |
7454 nonlocal_vlas = NULL; | |
7455 } | |
7456 | |
7457 pop_gimplify_context (outer_bind); | |
7458 gcc_assert (gimplify_ctxp == NULL); | |
7459 | |
7460 #ifdef ENABLE_TYPES_CHECKING | |
7461 if (!errorcount && !sorrycount) | |
7462 verify_types_in_gimple_seq (gimple_bind_body (outer_bind)); | |
7463 #endif | |
7464 | |
7465 timevar_pop (TV_TREE_GIMPLIFY); | |
7466 input_location = saved_location; | |
7467 | |
7468 return outer_bind; | |
7469 } | |
7470 | |
7471 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL | |
7472 node for the function we want to gimplify. | |
7473 | |
7474 Returns the sequence of GIMPLE statements corresponding to the body | |
7475 of FNDECL. */ | |
7476 | |
7477 void | |
7478 gimplify_function_tree (tree fndecl) | |
7479 { | |
7480 tree oldfn, parm, ret; | |
7481 gimple_seq seq; | |
7482 gimple bind; | |
7483 | |
7484 gcc_assert (!gimple_body (fndecl)); | |
7485 | |
7486 oldfn = current_function_decl; | |
7487 current_function_decl = fndecl; | |
7488 if (DECL_STRUCT_FUNCTION (fndecl)) | |
7489 push_cfun (DECL_STRUCT_FUNCTION (fndecl)); | |
7490 else | |
7491 push_struct_function (fndecl); | |
7492 | |
7493 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm)) | |
7494 { | |
7495 /* Preliminarily mark non-addressed complex variables as eligible | |
7496 for promotion to gimple registers. We'll transform their uses | |
7497 as we find them. */ | |
7498 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE | |
7499 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE) | |
7500 && !TREE_THIS_VOLATILE (parm) | |
7501 && !needs_to_live_in_memory (parm)) | |
7502 DECL_GIMPLE_REG_P (parm) = 1; | |
7503 } | |
7504 | |
7505 ret = DECL_RESULT (fndecl); | |
7506 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE | |
7507 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE) | |
7508 && !needs_to_live_in_memory (ret)) | |
7509 DECL_GIMPLE_REG_P (ret) = 1; | |
7510 | |
7511 bind = gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true); | |
7512 | |
7513 /* The tree body of the function is no longer needed, replace it | |
7514 with the new GIMPLE body. */ | |
7515 seq = gimple_seq_alloc (); | |
7516 gimple_seq_add_stmt (&seq, bind); | |
7517 gimple_set_body (fndecl, seq); | |
7518 | |
7519 /* If we're instrumenting function entry/exit, then prepend the call to | |
7520 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to | |
7521 catch the exit hook. */ | |
7522 /* ??? Add some way to ignore exceptions for this TFE. */ | |
7523 if (flag_instrument_function_entry_exit | |
7524 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) | |
7525 && !flag_instrument_functions_exclude_p (fndecl)) | |
7526 { | |
7527 tree x; | |
7528 gimple new_bind; | |
7529 gimple tf; | |
7530 gimple_seq cleanup = NULL, body = NULL; | |
7531 | |
7532 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT]; | |
7533 gimplify_seq_add_stmt (&cleanup, gimple_build_call (x, 0)); | |
7534 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY); | |
7535 | |
7536 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER]; | |
7537 gimplify_seq_add_stmt (&body, gimple_build_call (x, 0)); | |
7538 gimplify_seq_add_stmt (&body, tf); | |
7539 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind)); | |
7540 /* Clear the block for BIND, since it is no longer directly inside | |
7541 the function, but within a try block. */ | |
7542 gimple_bind_set_block (bind, NULL); | |
7543 | |
7544 /* Replace the current function body with the body | |
7545 wrapped in the try/finally TF. */ | |
7546 seq = gimple_seq_alloc (); | |
7547 gimple_seq_add_stmt (&seq, new_bind); | |
7548 gimple_set_body (fndecl, seq); | |
7549 } | |
7550 | |
7551 DECL_SAVED_TREE (fndecl) = NULL_TREE; | |
7552 cfun->curr_properties = PROP_gimple_any; | |
7553 | |
7554 current_function_decl = oldfn; | |
7555 pop_cfun (); | |
7556 } | |
7557 | |
7558 | |
7559 /* Some transformations like inlining may invalidate the GIMPLE form | |
7560 for operands. This function traverses all the operands in STMT and | |
7561 gimplifies anything that is not a valid gimple operand. Any new | |
7562 GIMPLE statements are inserted before *GSI_P. */ | |
7563 | |
7564 void | |
7565 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p) | |
7566 { | |
7567 size_t i, num_ops; | |
7568 tree orig_lhs = NULL_TREE, lhs, t; | |
7569 gimple_seq pre = NULL; | |
7570 gimple post_stmt = NULL; | |
7571 struct gimplify_ctx gctx; | |
7572 | |
7573 push_gimplify_context (&gctx); | |
7574 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun); | |
7575 | |
7576 switch (gimple_code (stmt)) | |
7577 { | |
7578 case GIMPLE_COND: | |
7579 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL, | |
7580 is_gimple_val, fb_rvalue); | |
7581 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL, | |
7582 is_gimple_val, fb_rvalue); | |
7583 break; | |
7584 case GIMPLE_SWITCH: | |
7585 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL, | |
7586 is_gimple_val, fb_rvalue); | |
7587 break; | |
7588 case GIMPLE_OMP_ATOMIC_LOAD: | |
7589 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL, | |
7590 is_gimple_val, fb_rvalue); | |
7591 break; | |
7592 case GIMPLE_ASM: | |
7593 { | |
7594 size_t i, noutputs = gimple_asm_noutputs (stmt); | |
7595 const char *constraint, **oconstraints; | |
7596 bool allows_mem, allows_reg, is_inout; | |
7597 | |
7598 oconstraints | |
7599 = (const char **) alloca ((noutputs) * sizeof (const char *)); | |
7600 for (i = 0; i < noutputs; i++) | |
7601 { | |
7602 tree op = gimple_asm_output_op (stmt, i); | |
7603 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); | |
7604 oconstraints[i] = constraint; | |
7605 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, | |
7606 &allows_reg, &is_inout); | |
7607 gimplify_expr (&TREE_VALUE (op), &pre, NULL, | |
7608 is_inout ? is_gimple_min_lval : is_gimple_lvalue, | |
7609 fb_lvalue | fb_mayfail); | |
7610 } | |
7611 for (i = 0; i < gimple_asm_ninputs (stmt); i++) | |
7612 { | |
7613 tree op = gimple_asm_input_op (stmt, i); | |
7614 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); | |
7615 parse_input_constraint (&constraint, 0, 0, noutputs, 0, | |
7616 oconstraints, &allows_mem, &allows_reg); | |
7617 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem) | |
7618 allows_reg = 0; | |
7619 if (!allows_reg && allows_mem) | |
7620 gimplify_expr (&TREE_VALUE (op), &pre, NULL, | |
7621 is_gimple_lvalue, fb_lvalue | fb_mayfail); | |
7622 else | |
7623 gimplify_expr (&TREE_VALUE (op), &pre, NULL, | |
7624 is_gimple_asm_val, fb_rvalue); | |
7625 } | |
7626 } | |
7627 break; | |
7628 default: | |
7629 /* NOTE: We start gimplifying operands from last to first to | |
7630 make sure that side-effects on the RHS of calls, assignments | |
7631 and ASMs are executed before the LHS. The ordering is not | |
7632 important for other statements. */ | |
7633 num_ops = gimple_num_ops (stmt); | |
7634 orig_lhs = gimple_get_lhs (stmt); | |
7635 for (i = num_ops; i > 0; i--) | |
7636 { | |
7637 tree op = gimple_op (stmt, i - 1); | |
7638 if (op == NULL_TREE) | |
7639 continue; | |
7640 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt))) | |
7641 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue); | |
7642 else if (i == 2 | |
7643 && is_gimple_assign (stmt) | |
7644 && num_ops == 2 | |
7645 && get_gimple_rhs_class (gimple_expr_code (stmt)) | |
7646 == GIMPLE_SINGLE_RHS) | |
7647 gimplify_expr (&op, &pre, NULL, | |
7648 rhs_predicate_for (gimple_assign_lhs (stmt)), | |
7649 fb_rvalue); | |
7650 else if (i == 2 && is_gimple_call (stmt)) | |
7651 { | |
7652 if (TREE_CODE (op) == FUNCTION_DECL) | |
7653 continue; | |
7654 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue); | |
7655 } | |
7656 else | |
7657 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue); | |
7658 gimple_set_op (stmt, i - 1, op); | |
7659 } | |
7660 | |
7661 lhs = gimple_get_lhs (stmt); | |
7662 /* If the LHS changed it in a way that requires a simple RHS, | |
7663 create temporary. */ | |
7664 if (lhs && !is_gimple_reg (lhs)) | |
7665 { | |
7666 bool need_temp = false; | |
7667 | |
7668 if (is_gimple_assign (stmt) | |
7669 && num_ops == 2 | |
7670 && get_gimple_rhs_class (gimple_expr_code (stmt)) | |
7671 == GIMPLE_SINGLE_RHS) | |
7672 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL, | |
7673 rhs_predicate_for (gimple_assign_lhs (stmt)), | |
7674 fb_rvalue); | |
7675 else if (is_gimple_reg (lhs)) | |
7676 { | |
7677 if (is_gimple_reg_type (TREE_TYPE (lhs))) | |
7678 { | |
7679 if (is_gimple_call (stmt)) | |
7680 { | |
7681 i = gimple_call_flags (stmt); | |
7682 if ((i & ECF_LOOPING_CONST_OR_PURE) | |
7683 || !(i & (ECF_CONST | ECF_PURE))) | |
7684 need_temp = true; | |
7685 } | |
7686 if (stmt_can_throw_internal (stmt)) | |
7687 need_temp = true; | |
7688 } | |
7689 } | |
7690 else | |
7691 { | |
7692 if (is_gimple_reg_type (TREE_TYPE (lhs))) | |
7693 need_temp = true; | |
7694 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode) | |
7695 { | |
7696 if (is_gimple_call (stmt)) | |
7697 { | |
7698 tree fndecl = gimple_call_fndecl (stmt); | |
7699 | |
7700 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl) | |
7701 && !(fndecl && DECL_RESULT (fndecl) | |
7702 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))) | |
7703 need_temp = true; | |
7704 } | |
7705 else | |
7706 need_temp = true; | |
7707 } | |
7708 } | |
7709 if (need_temp) | |
7710 { | |
7711 tree temp = create_tmp_var (TREE_TYPE (lhs), NULL); | |
7712 | |
7713 if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE | |
7714 || TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE) | |
7715 DECL_GIMPLE_REG_P (temp) = 1; | |
7716 if (TREE_CODE (orig_lhs) == SSA_NAME) | |
7717 orig_lhs = SSA_NAME_VAR (orig_lhs); | |
7718 | |
7719 if (gimple_in_ssa_p (cfun)) | |
7720 temp = make_ssa_name (temp, NULL); | |
7721 gimple_set_lhs (stmt, temp); | |
7722 post_stmt = gimple_build_assign (lhs, temp); | |
7723 if (TREE_CODE (lhs) == SSA_NAME) | |
7724 SSA_NAME_DEF_STMT (lhs) = post_stmt; | |
7725 } | |
7726 } | |
7727 break; | |
7728 } | |
7729 | |
7730 if (gimple_referenced_vars (cfun)) | |
7731 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t)) | |
7732 add_referenced_var (t); | |
7733 | |
7734 if (!gimple_seq_empty_p (pre)) | |
7735 { | |
7736 if (gimple_in_ssa_p (cfun)) | |
7737 { | |
7738 gimple_stmt_iterator i; | |
7739 | |
7740 for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i)) | |
7741 mark_symbols_for_renaming (gsi_stmt (i)); | |
7742 } | |
7743 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT); | |
7744 } | |
7745 if (post_stmt) | |
7746 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT); | |
7747 | |
7748 pop_gimplify_context (NULL); | |
7749 } | |
7750 | |
7751 | |
7752 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true, | |
7753 force the result to be either ssa_name or an invariant, otherwise | |
7754 just force it to be a rhs expression. If VAR is not NULL, make the | |
7755 base variable of the final destination be VAR if suitable. */ | |
7756 | |
7757 tree | |
7758 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var) | |
7759 { | |
7760 tree t; | |
7761 enum gimplify_status ret; | |
7762 gimple_predicate gimple_test_f; | |
7763 struct gimplify_ctx gctx; | |
7764 | |
7765 *stmts = NULL; | |
7766 | |
7767 if (is_gimple_val (expr)) | |
7768 return expr; | |
7769 | |
7770 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs; | |
7771 | |
7772 push_gimplify_context (&gctx); | |
7773 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun); | |
7774 gimplify_ctxp->allow_rhs_cond_expr = true; | |
7775 | |
7776 if (var) | |
7777 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr); | |
7778 | |
7779 if (TREE_CODE (expr) != MODIFY_EXPR | |
7780 && TREE_TYPE (expr) == void_type_node) | |
7781 { | |
7782 gimplify_and_add (expr, stmts); | |
7783 expr = NULL_TREE; | |
7784 } | |
7785 else | |
7786 { | |
7787 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue); | |
7788 gcc_assert (ret != GS_ERROR); | |
7789 } | |
7790 | |
7791 if (gimple_referenced_vars (cfun)) | |
7792 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t)) | |
7793 add_referenced_var (t); | |
7794 | |
7795 pop_gimplify_context (NULL); | |
7796 | |
7797 return expr; | |
7798 } | |
7799 | |
7800 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If | |
7801 some statements are produced, emits them at GSI. If BEFORE is true. | |
7802 the statements are appended before GSI, otherwise they are appended after | |
7803 it. M specifies the way GSI moves after insertion (GSI_SAME_STMT or | |
7804 GSI_CONTINUE_LINKING are the usual values). */ | |
7805 | |
7806 tree | |
7807 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr, | |
7808 bool simple_p, tree var, bool before, | |
7809 enum gsi_iterator_update m) | |
7810 { | |
7811 gimple_seq stmts; | |
7812 | |
7813 expr = force_gimple_operand (expr, &stmts, simple_p, var); | |
7814 | |
7815 if (!gimple_seq_empty_p (stmts)) | |
7816 { | |
7817 if (gimple_in_ssa_p (cfun)) | |
7818 { | |
7819 gimple_stmt_iterator i; | |
7820 | |
7821 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i)) | |
7822 mark_symbols_for_renaming (gsi_stmt (i)); | |
7823 } | |
7824 | |
7825 if (before) | |
7826 gsi_insert_seq_before (gsi, stmts, m); | |
7827 else | |
7828 gsi_insert_seq_after (gsi, stmts, m); | |
7829 } | |
7830 | |
7831 return expr; | |
7832 } | |
7833 | |
7834 #include "gt-gimplify.h" |