Mercurial > hg > CbC > CbC_gcc
comparison gcc/gimplify.c @ 0:a06113de4d67
first commit
author | kent <kent@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 17 Jul 2009 14:47:48 +0900 |
parents | |
children | caeb520cebed 58ad6c70ea60 |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:a06113de4d67 |
---|---|
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees | |
2 tree representation into the GIMPLE form. | |
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 | |
4 Free Software Foundation, Inc. | |
5 Major work done by Sebastian Pop <s.pop@laposte.net>, | |
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>. | |
7 | |
8 This file is part of GCC. | |
9 | |
10 GCC is free software; you can redistribute it and/or modify it under | |
11 the terms of the GNU General Public License as published by the Free | |
12 Software Foundation; either version 3, or (at your option) any later | |
13 version. | |
14 | |
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
18 for more details. | |
19 | |
20 You should have received a copy of the GNU General Public License | |
21 along with GCC; see the file COPYING3. If not see | |
22 <http://www.gnu.org/licenses/>. */ | |
23 | |
24 #include "config.h" | |
25 #include "system.h" | |
26 #include "coretypes.h" | |
27 #include "tm.h" | |
28 #include "tree.h" | |
29 #include "rtl.h" | |
30 #include "varray.h" | |
31 #include "gimple.h" | |
32 #include "tree-iterator.h" | |
33 #include "tree-inline.h" | |
34 #include "diagnostic.h" | |
35 #include "langhooks.h" | |
36 #include "langhooks-def.h" | |
37 #include "tree-flow.h" | |
38 #include "cgraph.h" | |
39 #include "timevar.h" | |
40 #include "except.h" | |
41 #include "hashtab.h" | |
42 #include "flags.h" | |
43 #include "real.h" | |
44 #include "function.h" | |
45 #include "output.h" | |
46 #include "expr.h" | |
47 #include "ggc.h" | |
48 #include "toplev.h" | |
49 #include "target.h" | |
50 #include "optabs.h" | |
51 #include "pointer-set.h" | |
52 #include "splay-tree.h" | |
53 #include "vec.h" | |
54 #include "gimple.h" | |
55 | |
56 | |
57 enum gimplify_omp_var_data | |
58 { | |
59 GOVD_SEEN = 1, | |
60 GOVD_EXPLICIT = 2, | |
61 GOVD_SHARED = 4, | |
62 GOVD_PRIVATE = 8, | |
63 GOVD_FIRSTPRIVATE = 16, | |
64 GOVD_LASTPRIVATE = 32, | |
65 GOVD_REDUCTION = 64, | |
66 GOVD_LOCAL = 128, | |
67 GOVD_DEBUG_PRIVATE = 256, | |
68 GOVD_PRIVATE_OUTER_REF = 512, | |
69 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE | |
70 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL) | |
71 }; | |
72 | |
73 | |
74 enum omp_region_type | |
75 { | |
76 ORT_WORKSHARE = 0, | |
77 ORT_TASK = 1, | |
78 ORT_PARALLEL = 2, | |
79 ORT_COMBINED_PARALLEL = 3 | |
80 }; | |
81 | |
82 struct gimplify_omp_ctx | |
83 { | |
84 struct gimplify_omp_ctx *outer_context; | |
85 splay_tree variables; | |
86 struct pointer_set_t *privatized_types; | |
87 location_t location; | |
88 enum omp_clause_default_kind default_kind; | |
89 enum omp_region_type region_type; | |
90 }; | |
91 | |
92 static struct gimplify_ctx *gimplify_ctxp; | |
93 static struct gimplify_omp_ctx *gimplify_omp_ctxp; | |
94 | |
95 | |
96 /* Formal (expression) temporary table handling: Multiple occurrences of | |
97 the same scalar expression are evaluated into the same temporary. */ | |
98 | |
99 typedef struct gimple_temp_hash_elt | |
100 { | |
101 tree val; /* Key */ | |
102 tree temp; /* Value */ | |
103 } elt_t; | |
104 | |
105 /* Forward declarations. */ | |
106 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool); | |
107 | |
108 /* Mark X addressable. Unlike the langhook we expect X to be in gimple | |
109 form and we don't do any syntax checking. */ | |
110 static void | |
111 mark_addressable (tree x) | |
112 { | |
113 while (handled_component_p (x)) | |
114 x = TREE_OPERAND (x, 0); | |
115 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL) | |
116 return ; | |
117 TREE_ADDRESSABLE (x) = 1; | |
118 } | |
119 | |
120 /* Return a hash value for a formal temporary table entry. */ | |
121 | |
122 static hashval_t | |
123 gimple_tree_hash (const void *p) | |
124 { | |
125 tree t = ((const elt_t *) p)->val; | |
126 return iterative_hash_expr (t, 0); | |
127 } | |
128 | |
129 /* Compare two formal temporary table entries. */ | |
130 | |
131 static int | |
132 gimple_tree_eq (const void *p1, const void *p2) | |
133 { | |
134 tree t1 = ((const elt_t *) p1)->val; | |
135 tree t2 = ((const elt_t *) p2)->val; | |
136 enum tree_code code = TREE_CODE (t1); | |
137 | |
138 if (TREE_CODE (t2) != code | |
139 || TREE_TYPE (t1) != TREE_TYPE (t2)) | |
140 return 0; | |
141 | |
142 if (!operand_equal_p (t1, t2, 0)) | |
143 return 0; | |
144 | |
145 /* Only allow them to compare equal if they also hash equal; otherwise | |
146 results are nondeterminate, and we fail bootstrap comparison. */ | |
147 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2)); | |
148 | |
149 return 1; | |
150 } | |
151 | |
152 /* Link gimple statement GS to the end of the sequence *SEQ_P. If | |
153 *SEQ_P is NULL, a new sequence is allocated. This function is | |
154 similar to gimple_seq_add_stmt, but does not scan the operands. | |
155 During gimplification, we need to manipulate statement sequences | |
156 before the def/use vectors have been constructed. */ | |
157 | |
158 static void | |
159 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs) | |
160 { | |
161 gimple_stmt_iterator si; | |
162 | |
163 if (gs == NULL) | |
164 return; | |
165 | |
166 if (*seq_p == NULL) | |
167 *seq_p = gimple_seq_alloc (); | |
168 | |
169 si = gsi_last (*seq_p); | |
170 | |
171 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT); | |
172 } | |
173 | |
174 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is | |
175 NULL, a new sequence is allocated. This function is | |
176 similar to gimple_seq_add_seq, but does not scan the operands. | |
177 During gimplification, we need to manipulate statement sequences | |
178 before the def/use vectors have been constructed. */ | |
179 | |
180 static void | |
181 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src) | |
182 { | |
183 gimple_stmt_iterator si; | |
184 | |
185 if (src == NULL) | |
186 return; | |
187 | |
188 if (*dst_p == NULL) | |
189 *dst_p = gimple_seq_alloc (); | |
190 | |
191 si = gsi_last (*dst_p); | |
192 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT); | |
193 } | |
194 | |
195 /* Set up a context for the gimplifier. */ | |
196 | |
197 void | |
198 push_gimplify_context (struct gimplify_ctx *c) | |
199 { | |
200 memset (c, '\0', sizeof (*c)); | |
201 c->prev_context = gimplify_ctxp; | |
202 gimplify_ctxp = c; | |
203 } | |
204 | |
205 /* Tear down a context for the gimplifier. If BODY is non-null, then | |
206 put the temporaries into the outer BIND_EXPR. Otherwise, put them | |
207 in the local_decls. | |
208 | |
209 BODY is not a sequence, but the first tuple in a sequence. */ | |
210 | |
211 void | |
212 pop_gimplify_context (gimple body) | |
213 { | |
214 struct gimplify_ctx *c = gimplify_ctxp; | |
215 tree t; | |
216 | |
217 gcc_assert (c && (c->bind_expr_stack == NULL | |
218 || VEC_empty (gimple, c->bind_expr_stack))); | |
219 VEC_free (gimple, heap, c->bind_expr_stack); | |
220 gimplify_ctxp = c->prev_context; | |
221 | |
222 for (t = c->temps; t ; t = TREE_CHAIN (t)) | |
223 DECL_GIMPLE_FORMAL_TEMP_P (t) = 0; | |
224 | |
225 if (body) | |
226 declare_vars (c->temps, body, false); | |
227 else | |
228 record_vars (c->temps); | |
229 | |
230 if (c->temp_htab) | |
231 htab_delete (c->temp_htab); | |
232 } | |
233 | |
234 static void | |
235 gimple_push_bind_expr (gimple gimple_bind) | |
236 { | |
237 if (gimplify_ctxp->bind_expr_stack == NULL) | |
238 gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8); | |
239 VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind); | |
240 } | |
241 | |
242 static void | |
243 gimple_pop_bind_expr (void) | |
244 { | |
245 VEC_pop (gimple, gimplify_ctxp->bind_expr_stack); | |
246 } | |
247 | |
248 gimple | |
249 gimple_current_bind_expr (void) | |
250 { | |
251 return VEC_last (gimple, gimplify_ctxp->bind_expr_stack); | |
252 } | |
253 | |
254 /* Return the stack GIMPLE_BINDs created during gimplification. */ | |
255 | |
256 VEC(gimple, heap) * | |
257 gimple_bind_expr_stack (void) | |
258 { | |
259 return gimplify_ctxp->bind_expr_stack; | |
260 } | |
261 | |
262 /* Returns true iff there is a COND_EXPR between us and the innermost | |
263 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */ | |
264 | |
265 static bool | |
266 gimple_conditional_context (void) | |
267 { | |
268 return gimplify_ctxp->conditions > 0; | |
269 } | |
270 | |
271 /* Note that we've entered a COND_EXPR. */ | |
272 | |
273 static void | |
274 gimple_push_condition (void) | |
275 { | |
276 #ifdef ENABLE_GIMPLE_CHECKING | |
277 if (gimplify_ctxp->conditions == 0) | |
278 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups)); | |
279 #endif | |
280 ++(gimplify_ctxp->conditions); | |
281 } | |
282 | |
283 /* Note that we've left a COND_EXPR. If we're back at unconditional scope | |
284 now, add any conditional cleanups we've seen to the prequeue. */ | |
285 | |
286 static void | |
287 gimple_pop_condition (gimple_seq *pre_p) | |
288 { | |
289 int conds = --(gimplify_ctxp->conditions); | |
290 | |
291 gcc_assert (conds >= 0); | |
292 if (conds == 0) | |
293 { | |
294 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups); | |
295 gimplify_ctxp->conditional_cleanups = NULL; | |
296 } | |
297 } | |
298 | |
299 /* A stable comparison routine for use with splay trees and DECLs. */ | |
300 | |
301 static int | |
302 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb) | |
303 { | |
304 tree a = (tree) xa; | |
305 tree b = (tree) xb; | |
306 | |
307 return DECL_UID (a) - DECL_UID (b); | |
308 } | |
309 | |
310 /* Create a new omp construct that deals with variable remapping. */ | |
311 | |
312 static struct gimplify_omp_ctx * | |
313 new_omp_context (enum omp_region_type region_type) | |
314 { | |
315 struct gimplify_omp_ctx *c; | |
316 | |
317 c = XCNEW (struct gimplify_omp_ctx); | |
318 c->outer_context = gimplify_omp_ctxp; | |
319 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); | |
320 c->privatized_types = pointer_set_create (); | |
321 c->location = input_location; | |
322 c->region_type = region_type; | |
323 if (region_type != ORT_TASK) | |
324 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED; | |
325 else | |
326 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED; | |
327 | |
328 return c; | |
329 } | |
330 | |
331 /* Destroy an omp construct that deals with variable remapping. */ | |
332 | |
333 static void | |
334 delete_omp_context (struct gimplify_omp_ctx *c) | |
335 { | |
336 splay_tree_delete (c->variables); | |
337 pointer_set_destroy (c->privatized_types); | |
338 XDELETE (c); | |
339 } | |
340 | |
341 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int); | |
342 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool); | |
343 | |
344 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */ | |
345 | |
346 static void | |
347 append_to_statement_list_1 (tree t, tree *list_p) | |
348 { | |
349 tree list = *list_p; | |
350 tree_stmt_iterator i; | |
351 | |
352 if (!list) | |
353 { | |
354 if (t && TREE_CODE (t) == STATEMENT_LIST) | |
355 { | |
356 *list_p = t; | |
357 return; | |
358 } | |
359 *list_p = list = alloc_stmt_list (); | |
360 } | |
361 | |
362 i = tsi_last (list); | |
363 tsi_link_after (&i, t, TSI_CONTINUE_LINKING); | |
364 } | |
365 | |
366 /* Add T to the end of the list container pointed to by LIST_P. | |
367 If T is an expression with no effects, it is ignored. */ | |
368 | |
369 void | |
370 append_to_statement_list (tree t, tree *list_p) | |
371 { | |
372 if (t && TREE_SIDE_EFFECTS (t)) | |
373 append_to_statement_list_1 (t, list_p); | |
374 } | |
375 | |
376 /* Similar, but the statement is always added, regardless of side effects. */ | |
377 | |
378 void | |
379 append_to_statement_list_force (tree t, tree *list_p) | |
380 { | |
381 if (t != NULL_TREE) | |
382 append_to_statement_list_1 (t, list_p); | |
383 } | |
384 | |
385 /* Both gimplify the statement T and append it to *SEQ_P. This function | |
386 behaves exactly as gimplify_stmt, but you don't have to pass T as a | |
387 reference. */ | |
388 | |
389 void | |
390 gimplify_and_add (tree t, gimple_seq *seq_p) | |
391 { | |
392 gimplify_stmt (&t, seq_p); | |
393 } | |
394 | |
395 /* Gimplify statement T into sequence *SEQ_P, and return the first | |
396 tuple in the sequence of generated tuples for this statement. | |
397 Return NULL if gimplifying T produced no tuples. */ | |
398 | |
399 static gimple | |
400 gimplify_and_return_first (tree t, gimple_seq *seq_p) | |
401 { | |
402 gimple_stmt_iterator last = gsi_last (*seq_p); | |
403 | |
404 gimplify_and_add (t, seq_p); | |
405 | |
406 if (!gsi_end_p (last)) | |
407 { | |
408 gsi_next (&last); | |
409 return gsi_stmt (last); | |
410 } | |
411 else | |
412 return gimple_seq_first_stmt (*seq_p); | |
413 } | |
414 | |
415 /* Strip off a legitimate source ending from the input string NAME of | |
416 length LEN. Rather than having to know the names used by all of | |
417 our front ends, we strip off an ending of a period followed by | |
418 up to five characters. (Java uses ".class".) */ | |
419 | |
420 static inline void | |
421 remove_suffix (char *name, int len) | |
422 { | |
423 int i; | |
424 | |
425 for (i = 2; i < 8 && len > i; i++) | |
426 { | |
427 if (name[len - i] == '.') | |
428 { | |
429 name[len - i] = '\0'; | |
430 break; | |
431 } | |
432 } | |
433 } | |
434 | |
435 /* Subroutine for find_single_pointer_decl. */ | |
436 | |
437 static tree | |
438 find_single_pointer_decl_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, | |
439 void *data) | |
440 { | |
441 tree *pdecl = (tree *) data; | |
442 | |
443 /* We are only looking for pointers at the same level as the | |
444 original tree; we must not look through any indirections. | |
445 Returning anything other than NULL_TREE will cause the caller to | |
446 not find a base. */ | |
447 if (REFERENCE_CLASS_P (*tp)) | |
448 return *tp; | |
449 | |
450 if (DECL_P (*tp) && POINTER_TYPE_P (TREE_TYPE (*tp))) | |
451 { | |
452 if (*pdecl) | |
453 { | |
454 /* We already found a pointer decl; return anything other | |
455 than NULL_TREE to unwind from walk_tree signalling that | |
456 we have a duplicate. */ | |
457 return *tp; | |
458 } | |
459 *pdecl = *tp; | |
460 } | |
461 | |
462 return NULL_TREE; | |
463 } | |
464 | |
465 /* Find the single DECL of pointer type in the tree T, used directly | |
466 rather than via an indirection, and return it. If there are zero | |
467 or more than one such DECLs, return NULL. */ | |
468 | |
469 static tree | |
470 find_single_pointer_decl (tree t) | |
471 { | |
472 tree decl = NULL_TREE; | |
473 | |
474 if (walk_tree (&t, find_single_pointer_decl_1, &decl, NULL)) | |
475 { | |
476 /* find_single_pointer_decl_1 returns a nonzero value, causing | |
477 walk_tree to return a nonzero value, to indicate that it | |
478 found more than one pointer DECL or that it found an | |
479 indirection. */ | |
480 return NULL_TREE; | |
481 } | |
482 | |
483 return decl; | |
484 } | |
485 | |
486 /* Create a new temporary name with PREFIX. Returns an identifier. */ | |
487 | |
488 static GTY(()) unsigned int tmp_var_id_num; | |
489 | |
490 tree | |
491 create_tmp_var_name (const char *prefix) | |
492 { | |
493 char *tmp_name; | |
494 | |
495 if (prefix) | |
496 { | |
497 char *preftmp = ASTRDUP (prefix); | |
498 | |
499 remove_suffix (preftmp, strlen (preftmp)); | |
500 prefix = preftmp; | |
501 } | |
502 | |
503 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++); | |
504 return get_identifier (tmp_name); | |
505 } | |
506 | |
507 | |
508 /* Create a new temporary variable declaration of type TYPE. | |
509 Does NOT push it into the current binding. */ | |
510 | |
511 tree | |
512 create_tmp_var_raw (tree type, const char *prefix) | |
513 { | |
514 tree tmp_var; | |
515 tree new_type; | |
516 | |
517 /* Make the type of the variable writable. */ | |
518 new_type = build_type_variant (type, 0, 0); | |
519 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type); | |
520 | |
521 tmp_var = build_decl (VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL, | |
522 type); | |
523 | |
524 /* The variable was declared by the compiler. */ | |
525 DECL_ARTIFICIAL (tmp_var) = 1; | |
526 /* And we don't want debug info for it. */ | |
527 DECL_IGNORED_P (tmp_var) = 1; | |
528 | |
529 /* Make the variable writable. */ | |
530 TREE_READONLY (tmp_var) = 0; | |
531 | |
532 DECL_EXTERNAL (tmp_var) = 0; | |
533 TREE_STATIC (tmp_var) = 0; | |
534 TREE_USED (tmp_var) = 1; | |
535 | |
536 return tmp_var; | |
537 } | |
538 | |
539 /* Create a new temporary variable declaration of type TYPE. DOES push the | |
540 variable into the current binding. Further, assume that this is called | |
541 only from gimplification or optimization, at which point the creation of | |
542 certain types are bugs. */ | |
543 | |
544 tree | |
545 create_tmp_var (tree type, const char *prefix) | |
546 { | |
547 tree tmp_var; | |
548 | |
549 /* We don't allow types that are addressable (meaning we can't make copies), | |
550 or incomplete. We also used to reject every variable size objects here, | |
551 but now support those for which a constant upper bound can be obtained. | |
552 The processing for variable sizes is performed in gimple_add_tmp_var, | |
553 point at which it really matters and possibly reached via paths not going | |
554 through this function, e.g. after direct calls to create_tmp_var_raw. */ | |
555 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type)); | |
556 | |
557 tmp_var = create_tmp_var_raw (type, prefix); | |
558 gimple_add_tmp_var (tmp_var); | |
559 return tmp_var; | |
560 } | |
561 | |
562 /* Create a temporary with a name derived from VAL. Subroutine of | |
563 lookup_tmp_var; nobody else should call this function. */ | |
564 | |
565 static inline tree | |
566 create_tmp_from_val (tree val) | |
567 { | |
568 return create_tmp_var (TREE_TYPE (val), get_name (val)); | |
569 } | |
570 | |
571 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse | |
572 an existing expression temporary. */ | |
573 | |
574 static tree | |
575 lookup_tmp_var (tree val, bool is_formal) | |
576 { | |
577 tree ret; | |
578 | |
579 /* If not optimizing, never really reuse a temporary. local-alloc | |
580 won't allocate any variable that is used in more than one basic | |
581 block, which means it will go into memory, causing much extra | |
582 work in reload and final and poorer code generation, outweighing | |
583 the extra memory allocation here. */ | |
584 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val)) | |
585 ret = create_tmp_from_val (val); | |
586 else | |
587 { | |
588 elt_t elt, *elt_p; | |
589 void **slot; | |
590 | |
591 elt.val = val; | |
592 if (gimplify_ctxp->temp_htab == NULL) | |
593 gimplify_ctxp->temp_htab | |
594 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free); | |
595 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT); | |
596 if (*slot == NULL) | |
597 { | |
598 elt_p = XNEW (elt_t); | |
599 elt_p->val = val; | |
600 elt_p->temp = ret = create_tmp_from_val (val); | |
601 *slot = (void *) elt_p; | |
602 } | |
603 else | |
604 { | |
605 elt_p = (elt_t *) *slot; | |
606 ret = elt_p->temp; | |
607 } | |
608 } | |
609 | |
610 if (is_formal) | |
611 DECL_GIMPLE_FORMAL_TEMP_P (ret) = 1; | |
612 | |
613 return ret; | |
614 } | |
615 | |
616 | |
617 /* Return true if T is a CALL_EXPR or an expression that can be | |
618 assignmed to a temporary. Note that this predicate should only be | |
619 used during gimplification. See the rationale for this in | |
620 gimplify_modify_expr. */ | |
621 | |
622 static bool | |
623 is_gimple_formal_tmp_or_call_rhs (tree t) | |
624 { | |
625 return TREE_CODE (t) == CALL_EXPR || is_gimple_formal_tmp_rhs (t); | |
626 } | |
627 | |
628 /* Returns true iff T is a valid RHS for an assignment to a renamed | |
629 user -- or front-end generated artificial -- variable. */ | |
630 | |
631 static bool | |
632 is_gimple_reg_or_call_rhs (tree t) | |
633 { | |
634 /* If the RHS of the MODIFY_EXPR may throw or make a nonlocal goto | |
635 and the LHS is a user variable, then we need to introduce a formal | |
636 temporary. This way the optimizers can determine that the user | |
637 variable is only modified if evaluation of the RHS does not throw. | |
638 | |
639 Don't force a temp of a non-renamable type; the copy could be | |
640 arbitrarily expensive. Instead we will generate a VDEF for | |
641 the assignment. */ | |
642 | |
643 if (is_gimple_reg_type (TREE_TYPE (t)) | |
644 && ((TREE_CODE (t) == CALL_EXPR && TREE_SIDE_EFFECTS (t)) | |
645 || tree_could_throw_p (t))) | |
646 return false; | |
647 | |
648 return is_gimple_formal_tmp_or_call_rhs (t); | |
649 } | |
650 | |
651 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that | |
652 this predicate should only be used during gimplification. See the | |
653 rationale for this in gimplify_modify_expr. */ | |
654 | |
655 static bool | |
656 is_gimple_mem_or_call_rhs (tree t) | |
657 { | |
658 /* If we're dealing with a renamable type, either source or dest must be | |
659 a renamed variable. */ | |
660 if (is_gimple_reg_type (TREE_TYPE (t))) | |
661 return is_gimple_val (t); | |
662 else | |
663 return is_gimple_formal_tmp_or_call_rhs (t); | |
664 } | |
665 | |
666 | |
667 /* Returns a formal temporary variable initialized with VAL. PRE_P is as | |
668 in gimplify_expr. Only use this function if: | |
669 | |
670 1) The value of the unfactored expression represented by VAL will not | |
671 change between the initialization and use of the temporary, and | |
672 2) The temporary will not be otherwise modified. | |
673 | |
674 For instance, #1 means that this is inappropriate for SAVE_EXPR temps, | |
675 and #2 means it is inappropriate for && temps. | |
676 | |
677 For other cases, use get_initialized_tmp_var instead. */ | |
678 | |
679 static tree | |
680 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p, | |
681 bool is_formal) | |
682 { | |
683 tree t, mod; | |
684 | |
685 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we | |
686 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */ | |
687 gimplify_expr (&val, pre_p, post_p, is_gimple_formal_tmp_or_call_rhs, | |
688 fb_rvalue); | |
689 | |
690 t = lookup_tmp_var (val, is_formal); | |
691 | |
692 if (is_formal) | |
693 { | |
694 tree u = find_single_pointer_decl (val); | |
695 | |
696 if (u && TREE_CODE (u) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (u)) | |
697 u = DECL_GET_RESTRICT_BASE (u); | |
698 if (u && TYPE_RESTRICT (TREE_TYPE (u))) | |
699 { | |
700 if (DECL_BASED_ON_RESTRICT_P (t)) | |
701 gcc_assert (u == DECL_GET_RESTRICT_BASE (t)); | |
702 else | |
703 { | |
704 DECL_BASED_ON_RESTRICT_P (t) = 1; | |
705 SET_DECL_RESTRICT_BASE (t, u); | |
706 } | |
707 } | |
708 } | |
709 | |
710 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE | |
711 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) | |
712 DECL_GIMPLE_REG_P (t) = 1; | |
713 | |
714 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val)); | |
715 | |
716 if (EXPR_HAS_LOCATION (val)) | |
717 SET_EXPR_LOCUS (mod, EXPR_LOCUS (val)); | |
718 else | |
719 SET_EXPR_LOCATION (mod, input_location); | |
720 | |
721 /* gimplify_modify_expr might want to reduce this further. */ | |
722 gimplify_and_add (mod, pre_p); | |
723 ggc_free (mod); | |
724 | |
725 /* If we're gimplifying into ssa, gimplify_modify_expr will have | |
726 given our temporary an SSA name. Find and return it. */ | |
727 if (gimplify_ctxp->into_ssa) | |
728 { | |
729 gimple last = gimple_seq_last_stmt (*pre_p); | |
730 t = gimple_get_lhs (last); | |
731 } | |
732 | |
733 return t; | |
734 } | |
735 | |
736 /* Returns a formal temporary variable initialized with VAL. PRE_P | |
737 points to a sequence where side-effects needed to compute VAL should be | |
738 stored. */ | |
739 | |
740 tree | |
741 get_formal_tmp_var (tree val, gimple_seq *pre_p) | |
742 { | |
743 return internal_get_tmp_var (val, pre_p, NULL, true); | |
744 } | |
745 | |
746 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P | |
747 are as in gimplify_expr. */ | |
748 | |
749 tree | |
750 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p) | |
751 { | |
752 return internal_get_tmp_var (val, pre_p, post_p, false); | |
753 } | |
754 | |
755 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is | |
756 true, generate debug info for them; otherwise don't. */ | |
757 | |
758 void | |
759 declare_vars (tree vars, gimple scope, bool debug_info) | |
760 { | |
761 tree last = vars; | |
762 if (last) | |
763 { | |
764 tree temps, block; | |
765 | |
766 gcc_assert (gimple_code (scope) == GIMPLE_BIND); | |
767 | |
768 temps = nreverse (last); | |
769 | |
770 block = gimple_bind_block (scope); | |
771 gcc_assert (!block || TREE_CODE (block) == BLOCK); | |
772 if (!block || !debug_info) | |
773 { | |
774 TREE_CHAIN (last) = gimple_bind_vars (scope); | |
775 gimple_bind_set_vars (scope, temps); | |
776 } | |
777 else | |
778 { | |
779 /* We need to attach the nodes both to the BIND_EXPR and to its | |
780 associated BLOCK for debugging purposes. The key point here | |
781 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR | |
782 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */ | |
783 if (BLOCK_VARS (block)) | |
784 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps); | |
785 else | |
786 { | |
787 gimple_bind_set_vars (scope, | |
788 chainon (gimple_bind_vars (scope), temps)); | |
789 BLOCK_VARS (block) = temps; | |
790 } | |
791 } | |
792 } | |
793 } | |
794 | |
795 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound | |
796 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if | |
797 no such upper bound can be obtained. */ | |
798 | |
799 static void | |
800 force_constant_size (tree var) | |
801 { | |
802 /* The only attempt we make is by querying the maximum size of objects | |
803 of the variable's type. */ | |
804 | |
805 HOST_WIDE_INT max_size; | |
806 | |
807 gcc_assert (TREE_CODE (var) == VAR_DECL); | |
808 | |
809 max_size = max_int_size_in_bytes (TREE_TYPE (var)); | |
810 | |
811 gcc_assert (max_size >= 0); | |
812 | |
813 DECL_SIZE_UNIT (var) | |
814 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size); | |
815 DECL_SIZE (var) | |
816 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT); | |
817 } | |
818 | |
819 void | |
820 gimple_add_tmp_var (tree tmp) | |
821 { | |
822 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); | |
823 | |
824 /* Later processing assumes that the object size is constant, which might | |
825 not be true at this point. Force the use of a constant upper bound in | |
826 this case. */ | |
827 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1)) | |
828 force_constant_size (tmp); | |
829 | |
830 DECL_CONTEXT (tmp) = current_function_decl; | |
831 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; | |
832 | |
833 if (gimplify_ctxp) | |
834 { | |
835 TREE_CHAIN (tmp) = gimplify_ctxp->temps; | |
836 gimplify_ctxp->temps = tmp; | |
837 | |
838 /* Mark temporaries local within the nearest enclosing parallel. */ | |
839 if (gimplify_omp_ctxp) | |
840 { | |
841 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; | |
842 while (ctx && ctx->region_type == ORT_WORKSHARE) | |
843 ctx = ctx->outer_context; | |
844 if (ctx) | |
845 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN); | |
846 } | |
847 } | |
848 else if (cfun) | |
849 record_vars (tmp); | |
850 else | |
851 { | |
852 gimple_seq body_seq; | |
853 | |
854 /* This case is for nested functions. We need to expose the locals | |
855 they create. */ | |
856 body_seq = gimple_body (current_function_decl); | |
857 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false); | |
858 } | |
859 } | |
860 | |
861 /* Determines whether to assign a location to the statement GS. */ | |
862 | |
863 static bool | |
864 should_carry_location_p (gimple gs) | |
865 { | |
866 /* Don't emit a line note for a label. We particularly don't want to | |
867 emit one for the break label, since it doesn't actually correspond | |
868 to the beginning of the loop/switch. */ | |
869 if (gimple_code (gs) == GIMPLE_LABEL) | |
870 return false; | |
871 | |
872 return true; | |
873 } | |
874 | |
875 /* Same, but for a tree. */ | |
876 | |
877 static bool | |
878 tree_should_carry_location_p (const_tree stmt) | |
879 { | |
880 /* Don't emit a line note for a label. We particularly don't want to | |
881 emit one for the break label, since it doesn't actually correspond | |
882 to the beginning of the loop/switch. */ | |
883 if (TREE_CODE (stmt) == LABEL_EXPR) | |
884 return false; | |
885 | |
886 /* Do not annotate empty statements, since it confuses gcov. */ | |
887 if (!TREE_SIDE_EFFECTS (stmt)) | |
888 return false; | |
889 | |
890 return true; | |
891 } | |
892 | |
893 /* Return true if a location should not be emitted for this statement | |
894 by annotate_one_with_location. */ | |
895 | |
896 static inline bool | |
897 gimple_do_not_emit_location_p (gimple g) | |
898 { | |
899 return gimple_plf (g, GF_PLF_1); | |
900 } | |
901 | |
902 /* Mark statement G so a location will not be emitted by | |
903 annotate_one_with_location. */ | |
904 | |
905 static inline void | |
906 gimple_set_do_not_emit_location (gimple g) | |
907 { | |
908 /* The PLF flags are initialized to 0 when a new tuple is created, | |
909 so no need to initialize it anywhere. */ | |
910 gimple_set_plf (g, GF_PLF_1, true); | |
911 } | |
912 | |
913 /* Set the location for gimple statement GS to LOCUS. */ | |
914 | |
915 static void | |
916 annotate_one_with_location (gimple gs, location_t location) | |
917 { | |
918 if (!gimple_has_location (gs) | |
919 && !gimple_do_not_emit_location_p (gs) | |
920 && should_carry_location_p (gs)) | |
921 gimple_set_location (gs, location); | |
922 } | |
923 | |
924 /* Same, but for tree T. */ | |
925 | |
926 static void | |
927 tree_annotate_one_with_location (tree t, location_t location) | |
928 { | |
929 if (CAN_HAVE_LOCATION_P (t) | |
930 && ! EXPR_HAS_LOCATION (t) && tree_should_carry_location_p (t)) | |
931 SET_EXPR_LOCATION (t, location); | |
932 } | |
933 | |
934 | |
935 /* Set LOCATION for all the statements after iterator GSI in sequence | |
936 SEQ. If GSI is pointing to the end of the sequence, start with the | |
937 first statement in SEQ. */ | |
938 | |
939 static void | |
940 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi, | |
941 location_t location) | |
942 { | |
943 if (gsi_end_p (gsi)) | |
944 gsi = gsi_start (seq); | |
945 else | |
946 gsi_next (&gsi); | |
947 | |
948 for (; !gsi_end_p (gsi); gsi_next (&gsi)) | |
949 annotate_one_with_location (gsi_stmt (gsi), location); | |
950 } | |
951 | |
952 | |
953 /* Set the location for all the statements in a sequence STMT_P to LOCUS. */ | |
954 | |
955 void | |
956 annotate_all_with_location (gimple_seq stmt_p, location_t location) | |
957 { | |
958 gimple_stmt_iterator i; | |
959 | |
960 if (gimple_seq_empty_p (stmt_p)) | |
961 return; | |
962 | |
963 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i)) | |
964 { | |
965 gimple gs = gsi_stmt (i); | |
966 annotate_one_with_location (gs, location); | |
967 } | |
968 } | |
969 | |
970 /* Same, but for statement or statement list in *STMT_P. */ | |
971 | |
972 void | |
973 tree_annotate_all_with_location (tree *stmt_p, location_t location) | |
974 { | |
975 tree_stmt_iterator i; | |
976 | |
977 if (!*stmt_p) | |
978 return; | |
979 | |
980 for (i = tsi_start (*stmt_p); !tsi_end_p (i); tsi_next (&i)) | |
981 { | |
982 tree t = tsi_stmt (i); | |
983 | |
984 /* Assuming we've already been gimplified, we shouldn't | |
985 see nested chaining constructs anymore. */ | |
986 gcc_assert (TREE_CODE (t) != STATEMENT_LIST | |
987 && TREE_CODE (t) != COMPOUND_EXPR); | |
988 | |
989 tree_annotate_one_with_location (t, location); | |
990 } | |
991 } | |
992 | |
993 | |
994 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes. | |
995 These nodes model computations that should only be done once. If we | |
996 were to unshare something like SAVE_EXPR(i++), the gimplification | |
997 process would create wrong code. */ | |
998 | |
999 static tree | |
1000 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data) | |
1001 { | |
1002 enum tree_code code = TREE_CODE (*tp); | |
1003 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */ | |
1004 if (TREE_CODE_CLASS (code) == tcc_type | |
1005 || TREE_CODE_CLASS (code) == tcc_declaration | |
1006 || TREE_CODE_CLASS (code) == tcc_constant | |
1007 || code == SAVE_EXPR || code == TARGET_EXPR | |
1008 /* We can't do anything sensible with a BLOCK used as an expression, | |
1009 but we also can't just die when we see it because of non-expression | |
1010 uses. So just avert our eyes and cross our fingers. Silly Java. */ | |
1011 || code == BLOCK) | |
1012 *walk_subtrees = 0; | |
1013 else | |
1014 { | |
1015 gcc_assert (code != BIND_EXPR); | |
1016 copy_tree_r (tp, walk_subtrees, data); | |
1017 } | |
1018 | |
1019 return NULL_TREE; | |
1020 } | |
1021 | |
1022 /* Callback for walk_tree to unshare most of the shared trees rooted at | |
1023 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1), | |
1024 then *TP is deep copied by calling copy_tree_r. | |
1025 | |
1026 This unshares the same trees as copy_tree_r with the exception of | |
1027 SAVE_EXPR nodes. These nodes model computations that should only be | |
1028 done once. If we were to unshare something like SAVE_EXPR(i++), the | |
1029 gimplification process would create wrong code. */ | |
1030 | |
1031 static tree | |
1032 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, | |
1033 void *data ATTRIBUTE_UNUSED) | |
1034 { | |
1035 tree t = *tp; | |
1036 enum tree_code code = TREE_CODE (t); | |
1037 | |
1038 /* Skip types, decls, and constants. But we do want to look at their | |
1039 types and the bounds of types. Mark them as visited so we properly | |
1040 unmark their subtrees on the unmark pass. If we've already seen them, | |
1041 don't look down further. */ | |
1042 if (TREE_CODE_CLASS (code) == tcc_type | |
1043 || TREE_CODE_CLASS (code) == tcc_declaration | |
1044 || TREE_CODE_CLASS (code) == tcc_constant) | |
1045 { | |
1046 if (TREE_VISITED (t)) | |
1047 *walk_subtrees = 0; | |
1048 else | |
1049 TREE_VISITED (t) = 1; | |
1050 } | |
1051 | |
1052 /* If this node has been visited already, unshare it and don't look | |
1053 any deeper. */ | |
1054 else if (TREE_VISITED (t)) | |
1055 { | |
1056 walk_tree (tp, mostly_copy_tree_r, NULL, NULL); | |
1057 *walk_subtrees = 0; | |
1058 } | |
1059 | |
1060 /* Otherwise, mark the tree as visited and keep looking. */ | |
1061 else | |
1062 TREE_VISITED (t) = 1; | |
1063 | |
1064 return NULL_TREE; | |
1065 } | |
1066 | |
1067 static tree | |
1068 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, | |
1069 void *data ATTRIBUTE_UNUSED) | |
1070 { | |
1071 if (TREE_VISITED (*tp)) | |
1072 TREE_VISITED (*tp) = 0; | |
1073 else | |
1074 *walk_subtrees = 0; | |
1075 | |
1076 return NULL_TREE; | |
1077 } | |
1078 | |
1079 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the | |
1080 bodies of any nested functions if we are unsharing the entire body of | |
1081 FNDECL. */ | |
1082 | |
1083 static void | |
1084 unshare_body (tree *body_p, tree fndecl) | |
1085 { | |
1086 struct cgraph_node *cgn = cgraph_node (fndecl); | |
1087 | |
1088 walk_tree (body_p, copy_if_shared_r, NULL, NULL); | |
1089 if (body_p == &DECL_SAVED_TREE (fndecl)) | |
1090 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) | |
1091 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl); | |
1092 } | |
1093 | |
1094 /* Likewise, but mark all trees as not visited. */ | |
1095 | |
1096 static void | |
1097 unvisit_body (tree *body_p, tree fndecl) | |
1098 { | |
1099 struct cgraph_node *cgn = cgraph_node (fndecl); | |
1100 | |
1101 walk_tree (body_p, unmark_visited_r, NULL, NULL); | |
1102 if (body_p == &DECL_SAVED_TREE (fndecl)) | |
1103 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) | |
1104 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl); | |
1105 } | |
1106 | |
1107 /* Unconditionally make an unshared copy of EXPR. This is used when using | |
1108 stored expressions which span multiple functions, such as BINFO_VTABLE, | |
1109 as the normal unsharing process can't tell that they're shared. */ | |
1110 | |
1111 tree | |
1112 unshare_expr (tree expr) | |
1113 { | |
1114 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL); | |
1115 return expr; | |
1116 } | |
1117 | |
1118 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both | |
1119 contain statements and have a value. Assign its value to a temporary | |
1120 and give it void_type_node. Returns the temporary, or NULL_TREE if | |
1121 WRAPPER was already void. */ | |
1122 | |
1123 tree | |
1124 voidify_wrapper_expr (tree wrapper, tree temp) | |
1125 { | |
1126 tree type = TREE_TYPE (wrapper); | |
1127 if (type && !VOID_TYPE_P (type)) | |
1128 { | |
1129 tree *p; | |
1130 | |
1131 /* Set p to point to the body of the wrapper. Loop until we find | |
1132 something that isn't a wrapper. */ | |
1133 for (p = &wrapper; p && *p; ) | |
1134 { | |
1135 switch (TREE_CODE (*p)) | |
1136 { | |
1137 case BIND_EXPR: | |
1138 TREE_SIDE_EFFECTS (*p) = 1; | |
1139 TREE_TYPE (*p) = void_type_node; | |
1140 /* For a BIND_EXPR, the body is operand 1. */ | |
1141 p = &BIND_EXPR_BODY (*p); | |
1142 break; | |
1143 | |
1144 case CLEANUP_POINT_EXPR: | |
1145 case TRY_FINALLY_EXPR: | |
1146 case TRY_CATCH_EXPR: | |
1147 TREE_SIDE_EFFECTS (*p) = 1; | |
1148 TREE_TYPE (*p) = void_type_node; | |
1149 p = &TREE_OPERAND (*p, 0); | |
1150 break; | |
1151 | |
1152 case STATEMENT_LIST: | |
1153 { | |
1154 tree_stmt_iterator i = tsi_last (*p); | |
1155 TREE_SIDE_EFFECTS (*p) = 1; | |
1156 TREE_TYPE (*p) = void_type_node; | |
1157 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i); | |
1158 } | |
1159 break; | |
1160 | |
1161 case COMPOUND_EXPR: | |
1162 /* Advance to the last statement. Set all container types to void. */ | |
1163 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1)) | |
1164 { | |
1165 TREE_SIDE_EFFECTS (*p) = 1; | |
1166 TREE_TYPE (*p) = void_type_node; | |
1167 } | |
1168 break; | |
1169 | |
1170 default: | |
1171 goto out; | |
1172 } | |
1173 } | |
1174 | |
1175 out: | |
1176 if (p == NULL || IS_EMPTY_STMT (*p)) | |
1177 temp = NULL_TREE; | |
1178 else if (temp) | |
1179 { | |
1180 /* The wrapper is on the RHS of an assignment that we're pushing | |
1181 down. */ | |
1182 gcc_assert (TREE_CODE (temp) == INIT_EXPR | |
1183 || TREE_CODE (temp) == MODIFY_EXPR); | |
1184 TREE_OPERAND (temp, 1) = *p; | |
1185 *p = temp; | |
1186 } | |
1187 else | |
1188 { | |
1189 temp = create_tmp_var (type, "retval"); | |
1190 *p = build2 (INIT_EXPR, type, temp, *p); | |
1191 } | |
1192 | |
1193 return temp; | |
1194 } | |
1195 | |
1196 return NULL_TREE; | |
1197 } | |
1198 | |
1199 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as | |
1200 a temporary through which they communicate. */ | |
1201 | |
1202 static void | |
1203 build_stack_save_restore (gimple *save, gimple *restore) | |
1204 { | |
1205 tree tmp_var; | |
1206 | |
1207 *save = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0); | |
1208 tmp_var = create_tmp_var (ptr_type_node, "saved_stack"); | |
1209 gimple_call_set_lhs (*save, tmp_var); | |
1210 | |
1211 *restore = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_RESTORE], | |
1212 1, tmp_var); | |
1213 } | |
1214 | |
1215 /* Gimplify a BIND_EXPR. Just voidify and recurse. */ | |
1216 | |
1217 static enum gimplify_status | |
1218 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p) | |
1219 { | |
1220 tree bind_expr = *expr_p; | |
1221 bool old_save_stack = gimplify_ctxp->save_stack; | |
1222 tree t; | |
1223 gimple gimple_bind; | |
1224 gimple_seq body; | |
1225 | |
1226 tree temp = voidify_wrapper_expr (bind_expr, NULL); | |
1227 | |
1228 /* Mark variables seen in this bind expr. */ | |
1229 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t)) | |
1230 { | |
1231 if (TREE_CODE (t) == VAR_DECL) | |
1232 { | |
1233 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; | |
1234 | |
1235 /* Mark variable as local. */ | |
1236 if (ctx && !is_global_var (t) | |
1237 && (! DECL_SEEN_IN_BIND_EXPR_P (t) | |
1238 || splay_tree_lookup (ctx->variables, | |
1239 (splay_tree_key) t) == NULL)) | |
1240 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN); | |
1241 | |
1242 DECL_SEEN_IN_BIND_EXPR_P (t) = 1; | |
1243 | |
1244 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun) | |
1245 cfun->has_local_explicit_reg_vars = true; | |
1246 } | |
1247 | |
1248 /* Preliminarily mark non-addressed complex variables as eligible | |
1249 for promotion to gimple registers. We'll transform their uses | |
1250 as we find them. */ | |
1251 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE | |
1252 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) | |
1253 && !TREE_THIS_VOLATILE (t) | |
1254 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t)) | |
1255 && !needs_to_live_in_memory (t)) | |
1256 DECL_GIMPLE_REG_P (t) = 1; | |
1257 } | |
1258 | |
1259 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL, | |
1260 BIND_EXPR_BLOCK (bind_expr)); | |
1261 gimple_push_bind_expr (gimple_bind); | |
1262 | |
1263 gimplify_ctxp->save_stack = false; | |
1264 | |
1265 /* Gimplify the body into the GIMPLE_BIND tuple's body. */ | |
1266 body = NULL; | |
1267 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body); | |
1268 gimple_bind_set_body (gimple_bind, body); | |
1269 | |
1270 if (gimplify_ctxp->save_stack) | |
1271 { | |
1272 gimple stack_save, stack_restore, gs; | |
1273 gimple_seq cleanup, new_body; | |
1274 | |
1275 /* Save stack on entry and restore it on exit. Add a try_finally | |
1276 block to achieve this. Note that mudflap depends on the | |
1277 format of the emitted code: see mx_register_decls(). */ | |
1278 build_stack_save_restore (&stack_save, &stack_restore); | |
1279 | |
1280 cleanup = new_body = NULL; | |
1281 gimplify_seq_add_stmt (&cleanup, stack_restore); | |
1282 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup, | |
1283 GIMPLE_TRY_FINALLY); | |
1284 | |
1285 gimplify_seq_add_stmt (&new_body, stack_save); | |
1286 gimplify_seq_add_stmt (&new_body, gs); | |
1287 gimple_bind_set_body (gimple_bind, new_body); | |
1288 } | |
1289 | |
1290 gimplify_ctxp->save_stack = old_save_stack; | |
1291 gimple_pop_bind_expr (); | |
1292 | |
1293 gimplify_seq_add_stmt (pre_p, gimple_bind); | |
1294 | |
1295 if (temp) | |
1296 { | |
1297 *expr_p = temp; | |
1298 return GS_OK; | |
1299 } | |
1300 | |
1301 *expr_p = NULL_TREE; | |
1302 return GS_ALL_DONE; | |
1303 } | |
1304 | |
1305 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a | |
1306 GIMPLE value, it is assigned to a new temporary and the statement is | |
1307 re-written to return the temporary. | |
1308 | |
1309 PRE_P points to the sequence where side effects that must happen before | |
1310 STMT should be stored. */ | |
1311 | |
1312 static enum gimplify_status | |
1313 gimplify_return_expr (tree stmt, gimple_seq *pre_p) | |
1314 { | |
1315 gimple ret; | |
1316 tree ret_expr = TREE_OPERAND (stmt, 0); | |
1317 tree result_decl, result; | |
1318 | |
1319 if (ret_expr == error_mark_node) | |
1320 return GS_ERROR; | |
1321 | |
1322 if (!ret_expr | |
1323 || TREE_CODE (ret_expr) == RESULT_DECL | |
1324 || ret_expr == error_mark_node) | |
1325 { | |
1326 gimple ret = gimple_build_return (ret_expr); | |
1327 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); | |
1328 gimplify_seq_add_stmt (pre_p, ret); | |
1329 return GS_ALL_DONE; | |
1330 } | |
1331 | |
1332 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))) | |
1333 result_decl = NULL_TREE; | |
1334 else | |
1335 { | |
1336 result_decl = TREE_OPERAND (ret_expr, 0); | |
1337 | |
1338 /* See through a return by reference. */ | |
1339 if (TREE_CODE (result_decl) == INDIRECT_REF) | |
1340 result_decl = TREE_OPERAND (result_decl, 0); | |
1341 | |
1342 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR | |
1343 || TREE_CODE (ret_expr) == INIT_EXPR) | |
1344 && TREE_CODE (result_decl) == RESULT_DECL); | |
1345 } | |
1346 | |
1347 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL. | |
1348 Recall that aggregate_value_p is FALSE for any aggregate type that is | |
1349 returned in registers. If we're returning values in registers, then | |
1350 we don't want to extend the lifetime of the RESULT_DECL, particularly | |
1351 across another call. In addition, for those aggregates for which | |
1352 hard_function_value generates a PARALLEL, we'll die during normal | |
1353 expansion of structure assignments; there's special code in expand_return | |
1354 to handle this case that does not exist in expand_expr. */ | |
1355 if (!result_decl | |
1356 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl))) | |
1357 result = result_decl; | |
1358 else if (gimplify_ctxp->return_temp) | |
1359 result = gimplify_ctxp->return_temp; | |
1360 else | |
1361 { | |
1362 result = create_tmp_var (TREE_TYPE (result_decl), NULL); | |
1363 if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE | |
1364 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE) | |
1365 DECL_GIMPLE_REG_P (result) = 1; | |
1366 | |
1367 /* ??? With complex control flow (usually involving abnormal edges), | |
1368 we can wind up warning about an uninitialized value for this. Due | |
1369 to how this variable is constructed and initialized, this is never | |
1370 true. Give up and never warn. */ | |
1371 TREE_NO_WARNING (result) = 1; | |
1372 | |
1373 gimplify_ctxp->return_temp = result; | |
1374 } | |
1375 | |
1376 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use. | |
1377 Then gimplify the whole thing. */ | |
1378 if (result != result_decl) | |
1379 TREE_OPERAND (ret_expr, 0) = result; | |
1380 | |
1381 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p); | |
1382 | |
1383 ret = gimple_build_return (result); | |
1384 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); | |
1385 gimplify_seq_add_stmt (pre_p, ret); | |
1386 | |
1387 return GS_ALL_DONE; | |
1388 } | |
1389 | |
1390 static void | |
1391 gimplify_vla_decl (tree decl, gimple_seq *seq_p) | |
1392 { | |
1393 /* This is a variable-sized decl. Simplify its size and mark it | |
1394 for deferred expansion. Note that mudflap depends on the format | |
1395 of the emitted code: see mx_register_decls(). */ | |
1396 tree t, addr, ptr_type; | |
1397 | |
1398 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p); | |
1399 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p); | |
1400 | |
1401 /* All occurrences of this decl in final gimplified code will be | |
1402 replaced by indirection. Setting DECL_VALUE_EXPR does two | |
1403 things: First, it lets the rest of the gimplifier know what | |
1404 replacement to use. Second, it lets the debug info know | |
1405 where to find the value. */ | |
1406 ptr_type = build_pointer_type (TREE_TYPE (decl)); | |
1407 addr = create_tmp_var (ptr_type, get_name (decl)); | |
1408 DECL_IGNORED_P (addr) = 0; | |
1409 t = build_fold_indirect_ref (addr); | |
1410 SET_DECL_VALUE_EXPR (decl, t); | |
1411 DECL_HAS_VALUE_EXPR_P (decl) = 1; | |
1412 | |
1413 t = built_in_decls[BUILT_IN_ALLOCA]; | |
1414 t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl)); | |
1415 t = fold_convert (ptr_type, t); | |
1416 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t); | |
1417 | |
1418 gimplify_and_add (t, seq_p); | |
1419 | |
1420 /* Indicate that we need to restore the stack level when the | |
1421 enclosing BIND_EXPR is exited. */ | |
1422 gimplify_ctxp->save_stack = true; | |
1423 } | |
1424 | |
1425 | |
1426 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation | |
1427 and initialization explicit. */ | |
1428 | |
1429 static enum gimplify_status | |
1430 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p) | |
1431 { | |
1432 tree stmt = *stmt_p; | |
1433 tree decl = DECL_EXPR_DECL (stmt); | |
1434 | |
1435 *stmt_p = NULL_TREE; | |
1436 | |
1437 if (TREE_TYPE (decl) == error_mark_node) | |
1438 return GS_ERROR; | |
1439 | |
1440 if ((TREE_CODE (decl) == TYPE_DECL | |
1441 || TREE_CODE (decl) == VAR_DECL) | |
1442 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl))) | |
1443 gimplify_type_sizes (TREE_TYPE (decl), seq_p); | |
1444 | |
1445 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl)) | |
1446 { | |
1447 tree init = DECL_INITIAL (decl); | |
1448 | |
1449 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST | |
1450 || (!TREE_STATIC (decl) | |
1451 && flag_stack_check == GENERIC_STACK_CHECK | |
1452 && compare_tree_int (DECL_SIZE_UNIT (decl), | |
1453 STACK_CHECK_MAX_VAR_SIZE) > 0)) | |
1454 gimplify_vla_decl (decl, seq_p); | |
1455 | |
1456 if (init && init != error_mark_node) | |
1457 { | |
1458 if (!TREE_STATIC (decl)) | |
1459 { | |
1460 DECL_INITIAL (decl) = NULL_TREE; | |
1461 init = build2 (INIT_EXPR, void_type_node, decl, init); | |
1462 gimplify_and_add (init, seq_p); | |
1463 ggc_free (init); | |
1464 } | |
1465 else | |
1466 /* We must still examine initializers for static variables | |
1467 as they may contain a label address. */ | |
1468 walk_tree (&init, force_labels_r, NULL, NULL); | |
1469 } | |
1470 | |
1471 /* Some front ends do not explicitly declare all anonymous | |
1472 artificial variables. We compensate here by declaring the | |
1473 variables, though it would be better if the front ends would | |
1474 explicitly declare them. */ | |
1475 if (!DECL_SEEN_IN_BIND_EXPR_P (decl) | |
1476 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE) | |
1477 gimple_add_tmp_var (decl); | |
1478 } | |
1479 | |
1480 return GS_ALL_DONE; | |
1481 } | |
1482 | |
1483 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body | |
1484 and replacing the LOOP_EXPR with goto, but if the loop contains an | |
1485 EXIT_EXPR, we need to append a label for it to jump to. */ | |
1486 | |
1487 static enum gimplify_status | |
1488 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p) | |
1489 { | |
1490 tree saved_label = gimplify_ctxp->exit_label; | |
1491 tree start_label = create_artificial_label (); | |
1492 | |
1493 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label)); | |
1494 | |
1495 gimplify_ctxp->exit_label = NULL_TREE; | |
1496 | |
1497 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p); | |
1498 | |
1499 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label)); | |
1500 | |
1501 if (gimplify_ctxp->exit_label) | |
1502 gimplify_seq_add_stmt (pre_p, gimple_build_label (gimplify_ctxp->exit_label)); | |
1503 | |
1504 gimplify_ctxp->exit_label = saved_label; | |
1505 | |
1506 *expr_p = NULL; | |
1507 return GS_ALL_DONE; | |
1508 } | |
1509 | |
1510 /* Gimplifies a statement list onto a sequence. These may be created either | |
1511 by an enlightened front-end, or by shortcut_cond_expr. */ | |
1512 | |
1513 static enum gimplify_status | |
1514 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p) | |
1515 { | |
1516 tree temp = voidify_wrapper_expr (*expr_p, NULL); | |
1517 | |
1518 tree_stmt_iterator i = tsi_start (*expr_p); | |
1519 | |
1520 while (!tsi_end_p (i)) | |
1521 { | |
1522 gimplify_stmt (tsi_stmt_ptr (i), pre_p); | |
1523 tsi_delink (&i); | |
1524 } | |
1525 | |
1526 if (temp) | |
1527 { | |
1528 *expr_p = temp; | |
1529 return GS_OK; | |
1530 } | |
1531 | |
1532 return GS_ALL_DONE; | |
1533 } | |
1534 | |
1535 /* Compare two case labels. Because the front end should already have | |
1536 made sure that case ranges do not overlap, it is enough to only compare | |
1537 the CASE_LOW values of each case label. */ | |
1538 | |
1539 static int | |
1540 compare_case_labels (const void *p1, const void *p2) | |
1541 { | |
1542 const_tree const case1 = *(const_tree const*)p1; | |
1543 const_tree const case2 = *(const_tree const*)p2; | |
1544 | |
1545 /* The 'default' case label always goes first. */ | |
1546 if (!CASE_LOW (case1)) | |
1547 return -1; | |
1548 else if (!CASE_LOW (case2)) | |
1549 return 1; | |
1550 else | |
1551 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2)); | |
1552 } | |
1553 | |
1554 | |
1555 /* Sort the case labels in LABEL_VEC in place in ascending order. */ | |
1556 | |
1557 void | |
1558 sort_case_labels (VEC(tree,heap)* label_vec) | |
1559 { | |
1560 size_t len = VEC_length (tree, label_vec); | |
1561 qsort (VEC_address (tree, label_vec), len, sizeof (tree), | |
1562 compare_case_labels); | |
1563 } | |
1564 | |
1565 | |
1566 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can | |
1567 branch to. */ | |
1568 | |
1569 static enum gimplify_status | |
1570 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p) | |
1571 { | |
1572 tree switch_expr = *expr_p; | |
1573 gimple_seq switch_body_seq = NULL; | |
1574 enum gimplify_status ret; | |
1575 | |
1576 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val, | |
1577 fb_rvalue); | |
1578 if (ret == GS_ERROR || ret == GS_UNHANDLED) | |
1579 return ret; | |
1580 | |
1581 if (SWITCH_BODY (switch_expr)) | |
1582 { | |
1583 VEC (tree,heap) *labels; | |
1584 VEC (tree,heap) *saved_labels; | |
1585 tree default_case = NULL_TREE; | |
1586 size_t i, len; | |
1587 gimple gimple_switch; | |
1588 | |
1589 /* If someone can be bothered to fill in the labels, they can | |
1590 be bothered to null out the body too. */ | |
1591 gcc_assert (!SWITCH_LABELS (switch_expr)); | |
1592 | |
1593 /* save old labels, get new ones from body, then restore the old | |
1594 labels. Save all the things from the switch body to append after. */ | |
1595 saved_labels = gimplify_ctxp->case_labels; | |
1596 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8); | |
1597 | |
1598 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq); | |
1599 labels = gimplify_ctxp->case_labels; | |
1600 gimplify_ctxp->case_labels = saved_labels; | |
1601 | |
1602 i = 0; | |
1603 while (i < VEC_length (tree, labels)) | |
1604 { | |
1605 tree elt = VEC_index (tree, labels, i); | |
1606 tree low = CASE_LOW (elt); | |
1607 bool remove_element = FALSE; | |
1608 | |
1609 if (low) | |
1610 { | |
1611 /* Discard empty ranges. */ | |
1612 tree high = CASE_HIGH (elt); | |
1613 if (high && tree_int_cst_lt (high, low)) | |
1614 remove_element = TRUE; | |
1615 } | |
1616 else | |
1617 { | |
1618 /* The default case must be the last label in the list. */ | |
1619 gcc_assert (!default_case); | |
1620 default_case = elt; | |
1621 remove_element = TRUE; | |
1622 } | |
1623 | |
1624 if (remove_element) | |
1625 VEC_ordered_remove (tree, labels, i); | |
1626 else | |
1627 i++; | |
1628 } | |
1629 len = i; | |
1630 | |
1631 if (!default_case) | |
1632 { | |
1633 gimple new_default; | |
1634 | |
1635 /* If the switch has no default label, add one, so that we jump | |
1636 around the switch body. */ | |
1637 default_case = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE, | |
1638 NULL_TREE, create_artificial_label ()); | |
1639 new_default = gimple_build_label (CASE_LABEL (default_case)); | |
1640 gimplify_seq_add_stmt (&switch_body_seq, new_default); | |
1641 } | |
1642 | |
1643 if (!VEC_empty (tree, labels)) | |
1644 sort_case_labels (labels); | |
1645 | |
1646 gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr), | |
1647 default_case, labels); | |
1648 gimplify_seq_add_stmt (pre_p, gimple_switch); | |
1649 gimplify_seq_add_seq (pre_p, switch_body_seq); | |
1650 VEC_free(tree, heap, labels); | |
1651 } | |
1652 else | |
1653 gcc_assert (SWITCH_LABELS (switch_expr)); | |
1654 | |
1655 return GS_ALL_DONE; | |
1656 } | |
1657 | |
1658 | |
1659 static enum gimplify_status | |
1660 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p) | |
1661 { | |
1662 struct gimplify_ctx *ctxp; | |
1663 gimple gimple_label; | |
1664 | |
1665 /* Invalid OpenMP programs can play Duff's Device type games with | |
1666 #pragma omp parallel. At least in the C front end, we don't | |
1667 detect such invalid branches until after gimplification. */ | |
1668 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context) | |
1669 if (ctxp->case_labels) | |
1670 break; | |
1671 | |
1672 gimple_label = gimple_build_label (CASE_LABEL (*expr_p)); | |
1673 VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p); | |
1674 gimplify_seq_add_stmt (pre_p, gimple_label); | |
1675 | |
1676 return GS_ALL_DONE; | |
1677 } | |
1678 | |
1679 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first | |
1680 if necessary. */ | |
1681 | |
1682 tree | |
1683 build_and_jump (tree *label_p) | |
1684 { | |
1685 if (label_p == NULL) | |
1686 /* If there's nowhere to jump, just fall through. */ | |
1687 return NULL_TREE; | |
1688 | |
1689 if (*label_p == NULL_TREE) | |
1690 { | |
1691 tree label = create_artificial_label (); | |
1692 *label_p = label; | |
1693 } | |
1694 | |
1695 return build1 (GOTO_EXPR, void_type_node, *label_p); | |
1696 } | |
1697 | |
1698 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR. | |
1699 This also involves building a label to jump to and communicating it to | |
1700 gimplify_loop_expr through gimplify_ctxp->exit_label. */ | |
1701 | |
1702 static enum gimplify_status | |
1703 gimplify_exit_expr (tree *expr_p) | |
1704 { | |
1705 tree cond = TREE_OPERAND (*expr_p, 0); | |
1706 tree expr; | |
1707 | |
1708 expr = build_and_jump (&gimplify_ctxp->exit_label); | |
1709 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE); | |
1710 *expr_p = expr; | |
1711 | |
1712 return GS_OK; | |
1713 } | |
1714 | |
1715 /* A helper function to be called via walk_tree. Mark all labels under *TP | |
1716 as being forced. To be called for DECL_INITIAL of static variables. */ | |
1717 | |
1718 tree | |
1719 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) | |
1720 { | |
1721 if (TYPE_P (*tp)) | |
1722 *walk_subtrees = 0; | |
1723 if (TREE_CODE (*tp) == LABEL_DECL) | |
1724 FORCED_LABEL (*tp) = 1; | |
1725 | |
1726 return NULL_TREE; | |
1727 } | |
1728 | |
1729 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is | |
1730 different from its canonical type, wrap the whole thing inside a | |
1731 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical | |
1732 type. | |
1733 | |
1734 The canonical type of a COMPONENT_REF is the type of the field being | |
1735 referenced--unless the field is a bit-field which can be read directly | |
1736 in a smaller mode, in which case the canonical type is the | |
1737 sign-appropriate type corresponding to that mode. */ | |
1738 | |
1739 static void | |
1740 canonicalize_component_ref (tree *expr_p) | |
1741 { | |
1742 tree expr = *expr_p; | |
1743 tree type; | |
1744 | |
1745 gcc_assert (TREE_CODE (expr) == COMPONENT_REF); | |
1746 | |
1747 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))) | |
1748 type = TREE_TYPE (get_unwidened (expr, NULL_TREE)); | |
1749 else | |
1750 type = TREE_TYPE (TREE_OPERAND (expr, 1)); | |
1751 | |
1752 /* One could argue that all the stuff below is not necessary for | |
1753 the non-bitfield case and declare it a FE error if type | |
1754 adjustment would be needed. */ | |
1755 if (TREE_TYPE (expr) != type) | |
1756 { | |
1757 #ifdef ENABLE_TYPES_CHECKING | |
1758 tree old_type = TREE_TYPE (expr); | |
1759 #endif | |
1760 int type_quals; | |
1761 | |
1762 /* We need to preserve qualifiers and propagate them from | |
1763 operand 0. */ | |
1764 type_quals = TYPE_QUALS (type) | |
1765 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0))); | |
1766 if (TYPE_QUALS (type) != type_quals) | |
1767 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals); | |
1768 | |
1769 /* Set the type of the COMPONENT_REF to the underlying type. */ | |
1770 TREE_TYPE (expr) = type; | |
1771 | |
1772 #ifdef ENABLE_TYPES_CHECKING | |
1773 /* It is now a FE error, if the conversion from the canonical | |
1774 type to the original expression type is not useless. */ | |
1775 gcc_assert (useless_type_conversion_p (old_type, type)); | |
1776 #endif | |
1777 } | |
1778 } | |
1779 | |
1780 /* If a NOP conversion is changing a pointer to array of foo to a pointer | |
1781 to foo, embed that change in the ADDR_EXPR by converting | |
1782 T array[U]; | |
1783 (T *)&array | |
1784 ==> | |
1785 &array[L] | |
1786 where L is the lower bound. For simplicity, only do this for constant | |
1787 lower bound. | |
1788 The constraint is that the type of &array[L] is trivially convertible | |
1789 to T *. */ | |
1790 | |
1791 static void | |
1792 canonicalize_addr_expr (tree *expr_p) | |
1793 { | |
1794 tree expr = *expr_p; | |
1795 tree addr_expr = TREE_OPERAND (expr, 0); | |
1796 tree datype, ddatype, pddatype; | |
1797 | |
1798 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */ | |
1799 if (!POINTER_TYPE_P (TREE_TYPE (expr)) | |
1800 || TREE_CODE (addr_expr) != ADDR_EXPR) | |
1801 return; | |
1802 | |
1803 /* The addr_expr type should be a pointer to an array. */ | |
1804 datype = TREE_TYPE (TREE_TYPE (addr_expr)); | |
1805 if (TREE_CODE (datype) != ARRAY_TYPE) | |
1806 return; | |
1807 | |
1808 /* The pointer to element type shall be trivially convertible to | |
1809 the expression pointer type. */ | |
1810 ddatype = TREE_TYPE (datype); | |
1811 pddatype = build_pointer_type (ddatype); | |
1812 if (!useless_type_conversion_p (pddatype, ddatype)) | |
1813 return; | |
1814 | |
1815 /* The lower bound and element sizes must be constant. */ | |
1816 if (!TYPE_SIZE_UNIT (ddatype) | |
1817 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST | |
1818 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype)) | |
1819 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST) | |
1820 return; | |
1821 | |
1822 /* All checks succeeded. Build a new node to merge the cast. */ | |
1823 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0), | |
1824 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)), | |
1825 NULL_TREE, NULL_TREE); | |
1826 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p); | |
1827 } | |
1828 | |
1829 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions | |
1830 underneath as appropriate. */ | |
1831 | |
1832 static enum gimplify_status | |
1833 gimplify_conversion (tree *expr_p) | |
1834 { | |
1835 tree tem; | |
1836 gcc_assert (CONVERT_EXPR_P (*expr_p)); | |
1837 | |
1838 /* Then strip away all but the outermost conversion. */ | |
1839 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0)); | |
1840 | |
1841 /* And remove the outermost conversion if it's useless. */ | |
1842 if (tree_ssa_useless_type_conversion (*expr_p)) | |
1843 *expr_p = TREE_OPERAND (*expr_p, 0); | |
1844 | |
1845 /* Attempt to avoid NOP_EXPR by producing reference to a subtype. | |
1846 For example this fold (subclass *)&A into &A->subclass avoiding | |
1847 a need for statement. */ | |
1848 if (CONVERT_EXPR_P (*expr_p) | |
1849 && POINTER_TYPE_P (TREE_TYPE (*expr_p)) | |
1850 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0))) | |
1851 && (tem = maybe_fold_offset_to_address | |
1852 (TREE_OPERAND (*expr_p, 0), | |
1853 integer_zero_node, TREE_TYPE (*expr_p))) != NULL_TREE) | |
1854 *expr_p = tem; | |
1855 | |
1856 /* If we still have a conversion at the toplevel, | |
1857 then canonicalize some constructs. */ | |
1858 if (CONVERT_EXPR_P (*expr_p)) | |
1859 { | |
1860 tree sub = TREE_OPERAND (*expr_p, 0); | |
1861 | |
1862 /* If a NOP conversion is changing the type of a COMPONENT_REF | |
1863 expression, then canonicalize its type now in order to expose more | |
1864 redundant conversions. */ | |
1865 if (TREE_CODE (sub) == COMPONENT_REF) | |
1866 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0)); | |
1867 | |
1868 /* If a NOP conversion is changing a pointer to array of foo | |
1869 to a pointer to foo, embed that change in the ADDR_EXPR. */ | |
1870 else if (TREE_CODE (sub) == ADDR_EXPR) | |
1871 canonicalize_addr_expr (expr_p); | |
1872 } | |
1873 | |
1874 /* If we have a conversion to a non-register type force the | |
1875 use of a VIEW_CONVERT_EXPR instead. */ | |
1876 if (!is_gimple_reg_type (TREE_TYPE (*expr_p))) | |
1877 *expr_p = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p), | |
1878 TREE_OPERAND (*expr_p, 0)); | |
1879 | |
1880 return GS_OK; | |
1881 } | |
1882 | |
1883 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a | |
1884 DECL_VALUE_EXPR, and it's worth re-examining things. */ | |
1885 | |
1886 static enum gimplify_status | |
1887 gimplify_var_or_parm_decl (tree *expr_p) | |
1888 { | |
1889 tree decl = *expr_p; | |
1890 | |
1891 /* ??? If this is a local variable, and it has not been seen in any | |
1892 outer BIND_EXPR, then it's probably the result of a duplicate | |
1893 declaration, for which we've already issued an error. It would | |
1894 be really nice if the front end wouldn't leak these at all. | |
1895 Currently the only known culprit is C++ destructors, as seen | |
1896 in g++.old-deja/g++.jason/binding.C. */ | |
1897 if (TREE_CODE (decl) == VAR_DECL | |
1898 && !DECL_SEEN_IN_BIND_EXPR_P (decl) | |
1899 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl) | |
1900 && decl_function_context (decl) == current_function_decl) | |
1901 { | |
1902 gcc_assert (errorcount || sorrycount); | |
1903 return GS_ERROR; | |
1904 } | |
1905 | |
1906 /* When within an OpenMP context, notice uses of variables. */ | |
1907 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true)) | |
1908 return GS_ALL_DONE; | |
1909 | |
1910 /* If the decl is an alias for another expression, substitute it now. */ | |
1911 if (DECL_HAS_VALUE_EXPR_P (decl)) | |
1912 { | |
1913 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl)); | |
1914 return GS_OK; | |
1915 } | |
1916 | |
1917 return GS_ALL_DONE; | |
1918 } | |
1919 | |
1920 | |
1921 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR | |
1922 node *EXPR_P. | |
1923 | |
1924 compound_lval | |
1925 : min_lval '[' val ']' | |
1926 | min_lval '.' ID | |
1927 | compound_lval '[' val ']' | |
1928 | compound_lval '.' ID | |
1929 | |
1930 This is not part of the original SIMPLE definition, which separates | |
1931 array and member references, but it seems reasonable to handle them | |
1932 together. Also, this way we don't run into problems with union | |
1933 aliasing; gcc requires that for accesses through a union to alias, the | |
1934 union reference must be explicit, which was not always the case when we | |
1935 were splitting up array and member refs. | |
1936 | |
1937 PRE_P points to the sequence where side effects that must happen before | |
1938 *EXPR_P should be stored. | |
1939 | |
1940 POST_P points to the sequence where side effects that must happen after | |
1941 *EXPR_P should be stored. */ | |
1942 | |
1943 static enum gimplify_status | |
1944 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, | |
1945 fallback_t fallback) | |
1946 { | |
1947 tree *p; | |
1948 VEC(tree,heap) *stack; | |
1949 enum gimplify_status ret = GS_OK, tret; | |
1950 int i; | |
1951 | |
1952 /* Create a stack of the subexpressions so later we can walk them in | |
1953 order from inner to outer. */ | |
1954 stack = VEC_alloc (tree, heap, 10); | |
1955 | |
1956 /* We can handle anything that get_inner_reference can deal with. */ | |
1957 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0)) | |
1958 { | |
1959 restart: | |
1960 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */ | |
1961 if (TREE_CODE (*p) == INDIRECT_REF) | |
1962 *p = fold_indirect_ref (*p); | |
1963 | |
1964 if (handled_component_p (*p)) | |
1965 ; | |
1966 /* Expand DECL_VALUE_EXPR now. In some cases that may expose | |
1967 additional COMPONENT_REFs. */ | |
1968 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL) | |
1969 && gimplify_var_or_parm_decl (p) == GS_OK) | |
1970 goto restart; | |
1971 else | |
1972 break; | |
1973 | |
1974 VEC_safe_push (tree, heap, stack, *p); | |
1975 } | |
1976 | |
1977 gcc_assert (VEC_length (tree, stack)); | |
1978 | |
1979 /* Now STACK is a stack of pointers to all the refs we've walked through | |
1980 and P points to the innermost expression. | |
1981 | |
1982 Java requires that we elaborated nodes in source order. That | |
1983 means we must gimplify the inner expression followed by each of | |
1984 the indices, in order. But we can't gimplify the inner | |
1985 expression until we deal with any variable bounds, sizes, or | |
1986 positions in order to deal with PLACEHOLDER_EXPRs. | |
1987 | |
1988 So we do this in three steps. First we deal with the annotations | |
1989 for any variables in the components, then we gimplify the base, | |
1990 then we gimplify any indices, from left to right. */ | |
1991 for (i = VEC_length (tree, stack) - 1; i >= 0; i--) | |
1992 { | |
1993 tree t = VEC_index (tree, stack, i); | |
1994 | |
1995 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | |
1996 { | |
1997 /* Gimplify the low bound and element type size and put them into | |
1998 the ARRAY_REF. If these values are set, they have already been | |
1999 gimplified. */ | |
2000 if (TREE_OPERAND (t, 2) == NULL_TREE) | |
2001 { | |
2002 tree low = unshare_expr (array_ref_low_bound (t)); | |
2003 if (!is_gimple_min_invariant (low)) | |
2004 { | |
2005 TREE_OPERAND (t, 2) = low; | |
2006 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, | |
2007 post_p, is_gimple_formal_tmp_reg, | |
2008 fb_rvalue); | |
2009 ret = MIN (ret, tret); | |
2010 } | |
2011 } | |
2012 | |
2013 if (!TREE_OPERAND (t, 3)) | |
2014 { | |
2015 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0))); | |
2016 tree elmt_size = unshare_expr (array_ref_element_size (t)); | |
2017 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type)); | |
2018 | |
2019 /* Divide the element size by the alignment of the element | |
2020 type (above). */ | |
2021 elmt_size = size_binop (EXACT_DIV_EXPR, elmt_size, factor); | |
2022 | |
2023 if (!is_gimple_min_invariant (elmt_size)) | |
2024 { | |
2025 TREE_OPERAND (t, 3) = elmt_size; | |
2026 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, | |
2027 post_p, is_gimple_formal_tmp_reg, | |
2028 fb_rvalue); | |
2029 ret = MIN (ret, tret); | |
2030 } | |
2031 } | |
2032 } | |
2033 else if (TREE_CODE (t) == COMPONENT_REF) | |
2034 { | |
2035 /* Set the field offset into T and gimplify it. */ | |
2036 if (!TREE_OPERAND (t, 2)) | |
2037 { | |
2038 tree offset = unshare_expr (component_ref_field_offset (t)); | |
2039 tree field = TREE_OPERAND (t, 1); | |
2040 tree factor | |
2041 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT); | |
2042 | |
2043 /* Divide the offset by its alignment. */ | |
2044 offset = size_binop (EXACT_DIV_EXPR, offset, factor); | |
2045 | |
2046 if (!is_gimple_min_invariant (offset)) | |
2047 { | |
2048 TREE_OPERAND (t, 2) = offset; | |
2049 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, | |
2050 post_p, is_gimple_formal_tmp_reg, | |
2051 fb_rvalue); | |
2052 ret = MIN (ret, tret); | |
2053 } | |
2054 } | |
2055 } | |
2056 } | |
2057 | |
2058 /* Step 2 is to gimplify the base expression. Make sure lvalue is set | |
2059 so as to match the min_lval predicate. Failure to do so may result | |
2060 in the creation of large aggregate temporaries. */ | |
2061 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, | |
2062 fallback | fb_lvalue); | |
2063 ret = MIN (ret, tret); | |
2064 | |
2065 /* And finally, the indices and operands to BIT_FIELD_REF. During this | |
2066 loop we also remove any useless conversions. */ | |
2067 for (; VEC_length (tree, stack) > 0; ) | |
2068 { | |
2069 tree t = VEC_pop (tree, stack); | |
2070 | |
2071 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | |
2072 { | |
2073 /* Gimplify the dimension. | |
2074 Temporary fix for gcc.c-torture/execute/20040313-1.c. | |
2075 Gimplify non-constant array indices into a temporary | |
2076 variable. | |
2077 FIXME - The real fix is to gimplify post-modify | |
2078 expressions into a minimal gimple lvalue. However, that | |
2079 exposes bugs in alias analysis. The alias analyzer does | |
2080 not handle &PTR->FIELD very well. Will fix after the | |
2081 branch is merged into mainline (dnovillo 2004-05-03). */ | |
2082 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))) | |
2083 { | |
2084 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p, | |
2085 is_gimple_formal_tmp_reg, fb_rvalue); | |
2086 ret = MIN (ret, tret); | |
2087 } | |
2088 } | |
2089 else if (TREE_CODE (t) == BIT_FIELD_REF) | |
2090 { | |
2091 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p, | |
2092 is_gimple_val, fb_rvalue); | |
2093 ret = MIN (ret, tret); | |
2094 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, | |
2095 is_gimple_val, fb_rvalue); | |
2096 ret = MIN (ret, tret); | |
2097 } | |
2098 | |
2099 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0)); | |
2100 | |
2101 /* The innermost expression P may have originally had | |
2102 TREE_SIDE_EFFECTS set which would have caused all the outer | |
2103 expressions in *EXPR_P leading to P to also have had | |
2104 TREE_SIDE_EFFECTS set. */ | |
2105 recalculate_side_effects (t); | |
2106 } | |
2107 | |
2108 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */ | |
2109 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF) | |
2110 { | |
2111 canonicalize_component_ref (expr_p); | |
2112 ret = MIN (ret, GS_OK); | |
2113 } | |
2114 | |
2115 VEC_free (tree, heap, stack); | |
2116 | |
2117 return ret; | |
2118 } | |
2119 | |
2120 /* Gimplify the self modifying expression pointed to by EXPR_P | |
2121 (++, --, +=, -=). | |
2122 | |
2123 PRE_P points to the list where side effects that must happen before | |
2124 *EXPR_P should be stored. | |
2125 | |
2126 POST_P points to the list where side effects that must happen after | |
2127 *EXPR_P should be stored. | |
2128 | |
2129 WANT_VALUE is nonzero iff we want to use the value of this expression | |
2130 in another expression. */ | |
2131 | |
2132 static enum gimplify_status | |
2133 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, | |
2134 bool want_value) | |
2135 { | |
2136 enum tree_code code; | |
2137 tree lhs, lvalue, rhs, t1; | |
2138 gimple_seq post = NULL, *orig_post_p = post_p; | |
2139 bool postfix; | |
2140 enum tree_code arith_code; | |
2141 enum gimplify_status ret; | |
2142 | |
2143 code = TREE_CODE (*expr_p); | |
2144 | |
2145 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR | |
2146 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR); | |
2147 | |
2148 /* Prefix or postfix? */ | |
2149 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR) | |
2150 /* Faster to treat as prefix if result is not used. */ | |
2151 postfix = want_value; | |
2152 else | |
2153 postfix = false; | |
2154 | |
2155 /* For postfix, make sure the inner expression's post side effects | |
2156 are executed after side effects from this expression. */ | |
2157 if (postfix) | |
2158 post_p = &post; | |
2159 | |
2160 /* Add or subtract? */ | |
2161 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) | |
2162 arith_code = PLUS_EXPR; | |
2163 else | |
2164 arith_code = MINUS_EXPR; | |
2165 | |
2166 /* Gimplify the LHS into a GIMPLE lvalue. */ | |
2167 lvalue = TREE_OPERAND (*expr_p, 0); | |
2168 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue); | |
2169 if (ret == GS_ERROR) | |
2170 return ret; | |
2171 | |
2172 /* Extract the operands to the arithmetic operation. */ | |
2173 lhs = lvalue; | |
2174 rhs = TREE_OPERAND (*expr_p, 1); | |
2175 | |
2176 /* For postfix operator, we evaluate the LHS to an rvalue and then use | |
2177 that as the result value and in the postqueue operation. */ | |
2178 if (postfix) | |
2179 { | |
2180 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue); | |
2181 if (ret == GS_ERROR) | |
2182 return ret; | |
2183 } | |
2184 | |
2185 /* For POINTERs increment, use POINTER_PLUS_EXPR. */ | |
2186 if (POINTER_TYPE_P (TREE_TYPE (lhs))) | |
2187 { | |
2188 rhs = fold_convert (sizetype, rhs); | |
2189 if (arith_code == MINUS_EXPR) | |
2190 rhs = fold_build1 (NEGATE_EXPR, TREE_TYPE (rhs), rhs); | |
2191 arith_code = POINTER_PLUS_EXPR; | |
2192 } | |
2193 | |
2194 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs); | |
2195 | |
2196 if (postfix) | |
2197 { | |
2198 gimplify_assign (lvalue, t1, orig_post_p); | |
2199 gimplify_seq_add_seq (orig_post_p, post); | |
2200 *expr_p = lhs; | |
2201 return GS_ALL_DONE; | |
2202 } | |
2203 else | |
2204 { | |
2205 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1); | |
2206 return GS_OK; | |
2207 } | |
2208 } | |
2209 | |
2210 | |
2211 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */ | |
2212 | |
2213 static void | |
2214 maybe_with_size_expr (tree *expr_p) | |
2215 { | |
2216 tree expr = *expr_p; | |
2217 tree type = TREE_TYPE (expr); | |
2218 tree size; | |
2219 | |
2220 /* If we've already wrapped this or the type is error_mark_node, we can't do | |
2221 anything. */ | |
2222 if (TREE_CODE (expr) == WITH_SIZE_EXPR | |
2223 || type == error_mark_node) | |
2224 return; | |
2225 | |
2226 /* If the size isn't known or is a constant, we have nothing to do. */ | |
2227 size = TYPE_SIZE_UNIT (type); | |
2228 if (!size || TREE_CODE (size) == INTEGER_CST) | |
2229 return; | |
2230 | |
2231 /* Otherwise, make a WITH_SIZE_EXPR. */ | |
2232 size = unshare_expr (size); | |
2233 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr); | |
2234 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size); | |
2235 } | |
2236 | |
2237 | |
2238 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P | |
2239 Store any side-effects in PRE_P. CALL_LOCATION is the location of | |
2240 the CALL_EXPR. */ | |
2241 | |
2242 static enum gimplify_status | |
2243 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location) | |
2244 { | |
2245 bool (*test) (tree); | |
2246 fallback_t fb; | |
2247 | |
2248 /* In general, we allow lvalues for function arguments to avoid | |
2249 extra overhead of copying large aggregates out of even larger | |
2250 aggregates into temporaries only to copy the temporaries to | |
2251 the argument list. Make optimizers happy by pulling out to | |
2252 temporaries those types that fit in registers. */ | |
2253 if (is_gimple_reg_type (TREE_TYPE (*arg_p))) | |
2254 test = is_gimple_val, fb = fb_rvalue; | |
2255 else | |
2256 test = is_gimple_lvalue, fb = fb_either; | |
2257 | |
2258 /* If this is a variable sized type, we must remember the size. */ | |
2259 maybe_with_size_expr (arg_p); | |
2260 | |
2261 /* Make sure arguments have the same location as the function call | |
2262 itself. */ | |
2263 protected_set_expr_location (*arg_p, call_location); | |
2264 | |
2265 /* There is a sequence point before a function call. Side effects in | |
2266 the argument list must occur before the actual call. So, when | |
2267 gimplifying arguments, force gimplify_expr to use an internal | |
2268 post queue which is then appended to the end of PRE_P. */ | |
2269 return gimplify_expr (arg_p, pre_p, NULL, test, fb); | |
2270 } | |
2271 | |
2272 | |
2273 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P. | |
2274 WANT_VALUE is true if the result of the call is desired. */ | |
2275 | |
2276 static enum gimplify_status | |
2277 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) | |
2278 { | |
2279 tree fndecl, parms, p; | |
2280 enum gimplify_status ret; | |
2281 int i, nargs; | |
2282 gimple call; | |
2283 bool builtin_va_start_p = FALSE; | |
2284 | |
2285 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR); | |
2286 | |
2287 /* For reliable diagnostics during inlining, it is necessary that | |
2288 every call_expr be annotated with file and line. */ | |
2289 if (! EXPR_HAS_LOCATION (*expr_p)) | |
2290 SET_EXPR_LOCATION (*expr_p, input_location); | |
2291 | |
2292 /* This may be a call to a builtin function. | |
2293 | |
2294 Builtin function calls may be transformed into different | |
2295 (and more efficient) builtin function calls under certain | |
2296 circumstances. Unfortunately, gimplification can muck things | |
2297 up enough that the builtin expanders are not aware that certain | |
2298 transformations are still valid. | |
2299 | |
2300 So we attempt transformation/gimplification of the call before | |
2301 we gimplify the CALL_EXPR. At this time we do not manage to | |
2302 transform all calls in the same manner as the expanders do, but | |
2303 we do transform most of them. */ | |
2304 fndecl = get_callee_fndecl (*expr_p); | |
2305 if (fndecl && DECL_BUILT_IN (fndecl)) | |
2306 { | |
2307 tree new_tree = fold_call_expr (*expr_p, !want_value); | |
2308 | |
2309 if (new_tree && new_tree != *expr_p) | |
2310 { | |
2311 /* There was a transformation of this call which computes the | |
2312 same value, but in a more efficient way. Return and try | |
2313 again. */ | |
2314 *expr_p = new_tree; | |
2315 return GS_OK; | |
2316 } | |
2317 | |
2318 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2319 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START) | |
2320 { | |
2321 builtin_va_start_p = TRUE; | |
2322 if (call_expr_nargs (*expr_p) < 2) | |
2323 { | |
2324 error ("too few arguments to function %<va_start%>"); | |
2325 *expr_p = build_empty_stmt (); | |
2326 return GS_OK; | |
2327 } | |
2328 | |
2329 if (fold_builtin_next_arg (*expr_p, true)) | |
2330 { | |
2331 *expr_p = build_empty_stmt (); | |
2332 return GS_OK; | |
2333 } | |
2334 } | |
2335 } | |
2336 | |
2337 /* There is a sequence point before the call, so any side effects in | |
2338 the calling expression must occur before the actual call. Force | |
2339 gimplify_expr to use an internal post queue. */ | |
2340 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL, | |
2341 is_gimple_call_addr, fb_rvalue); | |
2342 | |
2343 nargs = call_expr_nargs (*expr_p); | |
2344 | |
2345 /* Get argument types for verification. */ | |
2346 fndecl = get_callee_fndecl (*expr_p); | |
2347 parms = NULL_TREE; | |
2348 if (fndecl) | |
2349 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); | |
2350 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p)))) | |
2351 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p)))); | |
2352 | |
2353 if (fndecl && DECL_ARGUMENTS (fndecl)) | |
2354 p = DECL_ARGUMENTS (fndecl); | |
2355 else if (parms) | |
2356 p = parms; | |
2357 else | |
2358 p = NULL_TREE; | |
2359 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p)) | |
2360 ; | |
2361 | |
2362 /* If the last argument is __builtin_va_arg_pack () and it is not | |
2363 passed as a named argument, decrease the number of CALL_EXPR | |
2364 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */ | |
2365 if (!p | |
2366 && i < nargs | |
2367 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR) | |
2368 { | |
2369 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1); | |
2370 tree last_arg_fndecl = get_callee_fndecl (last_arg); | |
2371 | |
2372 if (last_arg_fndecl | |
2373 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL | |
2374 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL | |
2375 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK) | |
2376 { | |
2377 tree call = *expr_p; | |
2378 | |
2379 --nargs; | |
2380 *expr_p = build_call_array (TREE_TYPE (call), CALL_EXPR_FN (call), | |
2381 nargs, CALL_EXPR_ARGP (call)); | |
2382 | |
2383 /* Copy all CALL_EXPR flags, location and block, except | |
2384 CALL_EXPR_VA_ARG_PACK flag. */ | |
2385 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call); | |
2386 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call); | |
2387 CALL_EXPR_RETURN_SLOT_OPT (*expr_p) | |
2388 = CALL_EXPR_RETURN_SLOT_OPT (call); | |
2389 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call); | |
2390 CALL_CANNOT_INLINE_P (*expr_p) = CALL_CANNOT_INLINE_P (call); | |
2391 SET_EXPR_LOCUS (*expr_p, EXPR_LOCUS (call)); | |
2392 TREE_BLOCK (*expr_p) = TREE_BLOCK (call); | |
2393 | |
2394 /* Set CALL_EXPR_VA_ARG_PACK. */ | |
2395 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1; | |
2396 } | |
2397 } | |
2398 | |
2399 /* Finally, gimplify the function arguments. */ | |
2400 if (nargs > 0) | |
2401 { | |
2402 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0); | |
2403 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs; | |
2404 PUSH_ARGS_REVERSED ? i-- : i++) | |
2405 { | |
2406 enum gimplify_status t; | |
2407 | |
2408 /* Avoid gimplifying the second argument to va_start, which needs to | |
2409 be the plain PARM_DECL. */ | |
2410 if ((i != 1) || !builtin_va_start_p) | |
2411 { | |
2412 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, | |
2413 EXPR_LOCATION (*expr_p)); | |
2414 | |
2415 if (t == GS_ERROR) | |
2416 ret = GS_ERROR; | |
2417 } | |
2418 } | |
2419 } | |
2420 | |
2421 /* Try this again in case gimplification exposed something. */ | |
2422 if (ret != GS_ERROR) | |
2423 { | |
2424 tree new_tree = fold_call_expr (*expr_p, !want_value); | |
2425 | |
2426 if (new_tree && new_tree != *expr_p) | |
2427 { | |
2428 /* There was a transformation of this call which computes the | |
2429 same value, but in a more efficient way. Return and try | |
2430 again. */ | |
2431 *expr_p = new_tree; | |
2432 return GS_OK; | |
2433 } | |
2434 } | |
2435 else | |
2436 { | |
2437 *expr_p = error_mark_node; | |
2438 return GS_ERROR; | |
2439 } | |
2440 | |
2441 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its | |
2442 decl. This allows us to eliminate redundant or useless | |
2443 calls to "const" functions. */ | |
2444 if (TREE_CODE (*expr_p) == CALL_EXPR) | |
2445 { | |
2446 int flags = call_expr_flags (*expr_p); | |
2447 if (flags & (ECF_CONST | ECF_PURE) | |
2448 /* An infinite loop is considered a side effect. */ | |
2449 && !(flags & (ECF_LOOPING_CONST_OR_PURE))) | |
2450 TREE_SIDE_EFFECTS (*expr_p) = 0; | |
2451 } | |
2452 | |
2453 /* If the value is not needed by the caller, emit a new GIMPLE_CALL | |
2454 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified | |
2455 form and delegate the creation of a GIMPLE_CALL to | |
2456 gimplify_modify_expr. This is always possible because when | |
2457 WANT_VALUE is true, the caller wants the result of this call into | |
2458 a temporary, which means that we will emit an INIT_EXPR in | |
2459 internal_get_tmp_var which will then be handled by | |
2460 gimplify_modify_expr. */ | |
2461 if (!want_value) | |
2462 { | |
2463 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we | |
2464 have to do is replicate it as a GIMPLE_CALL tuple. */ | |
2465 call = gimple_build_call_from_tree (*expr_p); | |
2466 gimplify_seq_add_stmt (pre_p, call); | |
2467 *expr_p = NULL_TREE; | |
2468 } | |
2469 | |
2470 return ret; | |
2471 } | |
2472 | |
2473 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by | |
2474 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs. | |
2475 | |
2476 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the | |
2477 condition is true or false, respectively. If null, we should generate | |
2478 our own to skip over the evaluation of this specific expression. | |
2479 | |
2480 This function is the tree equivalent of do_jump. | |
2481 | |
2482 shortcut_cond_r should only be called by shortcut_cond_expr. */ | |
2483 | |
2484 static tree | |
2485 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p) | |
2486 { | |
2487 tree local_label = NULL_TREE; | |
2488 tree t, expr = NULL; | |
2489 | |
2490 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to | |
2491 retain the shortcut semantics. Just insert the gotos here; | |
2492 shortcut_cond_expr will append the real blocks later. */ | |
2493 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) | |
2494 { | |
2495 /* Turn if (a && b) into | |
2496 | |
2497 if (a); else goto no; | |
2498 if (b) goto yes; else goto no; | |
2499 (no:) */ | |
2500 | |
2501 if (false_label_p == NULL) | |
2502 false_label_p = &local_label; | |
2503 | |
2504 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p); | |
2505 append_to_statement_list (t, &expr); | |
2506 | |
2507 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, | |
2508 false_label_p); | |
2509 append_to_statement_list (t, &expr); | |
2510 } | |
2511 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR) | |
2512 { | |
2513 /* Turn if (a || b) into | |
2514 | |
2515 if (a) goto yes; | |
2516 if (b) goto yes; else goto no; | |
2517 (yes:) */ | |
2518 | |
2519 if (true_label_p == NULL) | |
2520 true_label_p = &local_label; | |
2521 | |
2522 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL); | |
2523 append_to_statement_list (t, &expr); | |
2524 | |
2525 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, | |
2526 false_label_p); | |
2527 append_to_statement_list (t, &expr); | |
2528 } | |
2529 else if (TREE_CODE (pred) == COND_EXPR) | |
2530 { | |
2531 /* As long as we're messing with gotos, turn if (a ? b : c) into | |
2532 if (a) | |
2533 if (b) goto yes; else goto no; | |
2534 else | |
2535 if (c) goto yes; else goto no; */ | |
2536 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0), | |
2537 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, | |
2538 false_label_p), | |
2539 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p, | |
2540 false_label_p)); | |
2541 } | |
2542 else | |
2543 { | |
2544 expr = build3 (COND_EXPR, void_type_node, pred, | |
2545 build_and_jump (true_label_p), | |
2546 build_and_jump (false_label_p)); | |
2547 } | |
2548 | |
2549 if (local_label) | |
2550 { | |
2551 t = build1 (LABEL_EXPR, void_type_node, local_label); | |
2552 append_to_statement_list (t, &expr); | |
2553 } | |
2554 | |
2555 return expr; | |
2556 } | |
2557 | |
2558 /* Given a conditional expression EXPR with short-circuit boolean | |
2559 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the | |
2560 predicate appart into the equivalent sequence of conditionals. */ | |
2561 | |
2562 static tree | |
2563 shortcut_cond_expr (tree expr) | |
2564 { | |
2565 tree pred = TREE_OPERAND (expr, 0); | |
2566 tree then_ = TREE_OPERAND (expr, 1); | |
2567 tree else_ = TREE_OPERAND (expr, 2); | |
2568 tree true_label, false_label, end_label, t; | |
2569 tree *true_label_p; | |
2570 tree *false_label_p; | |
2571 bool emit_end, emit_false, jump_over_else; | |
2572 bool then_se = then_ && TREE_SIDE_EFFECTS (then_); | |
2573 bool else_se = else_ && TREE_SIDE_EFFECTS (else_); | |
2574 | |
2575 /* First do simple transformations. */ | |
2576 if (!else_se) | |
2577 { | |
2578 /* If there is no 'else', turn (a && b) into if (a) if (b). */ | |
2579 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) | |
2580 { | |
2581 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); | |
2582 then_ = shortcut_cond_expr (expr); | |
2583 then_se = then_ && TREE_SIDE_EFFECTS (then_); | |
2584 pred = TREE_OPERAND (pred, 0); | |
2585 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE); | |
2586 } | |
2587 } | |
2588 | |
2589 if (!then_se) | |
2590 { | |
2591 /* If there is no 'then', turn | |
2592 if (a || b); else d | |
2593 into | |
2594 if (a); else if (b); else d. */ | |
2595 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR) | |
2596 { | |
2597 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); | |
2598 else_ = shortcut_cond_expr (expr); | |
2599 else_se = else_ && TREE_SIDE_EFFECTS (else_); | |
2600 pred = TREE_OPERAND (pred, 0); | |
2601 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_); | |
2602 } | |
2603 } | |
2604 | |
2605 /* If we're done, great. */ | |
2606 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR | |
2607 && TREE_CODE (pred) != TRUTH_ORIF_EXPR) | |
2608 return expr; | |
2609 | |
2610 /* Otherwise we need to mess with gotos. Change | |
2611 if (a) c; else d; | |
2612 to | |
2613 if (a); else goto no; | |
2614 c; goto end; | |
2615 no: d; end: | |
2616 and recursively gimplify the condition. */ | |
2617 | |
2618 true_label = false_label = end_label = NULL_TREE; | |
2619 | |
2620 /* If our arms just jump somewhere, hijack those labels so we don't | |
2621 generate jumps to jumps. */ | |
2622 | |
2623 if (then_ | |
2624 && TREE_CODE (then_) == GOTO_EXPR | |
2625 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL) | |
2626 { | |
2627 true_label = GOTO_DESTINATION (then_); | |
2628 then_ = NULL; | |
2629 then_se = false; | |
2630 } | |
2631 | |
2632 if (else_ | |
2633 && TREE_CODE (else_) == GOTO_EXPR | |
2634 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL) | |
2635 { | |
2636 false_label = GOTO_DESTINATION (else_); | |
2637 else_ = NULL; | |
2638 else_se = false; | |
2639 } | |
2640 | |
2641 /* If we aren't hijacking a label for the 'then' branch, it falls through. */ | |
2642 if (true_label) | |
2643 true_label_p = &true_label; | |
2644 else | |
2645 true_label_p = NULL; | |
2646 | |
2647 /* The 'else' branch also needs a label if it contains interesting code. */ | |
2648 if (false_label || else_se) | |
2649 false_label_p = &false_label; | |
2650 else | |
2651 false_label_p = NULL; | |
2652 | |
2653 /* If there was nothing else in our arms, just forward the label(s). */ | |
2654 if (!then_se && !else_se) | |
2655 return shortcut_cond_r (pred, true_label_p, false_label_p); | |
2656 | |
2657 /* If our last subexpression already has a terminal label, reuse it. */ | |
2658 if (else_se) | |
2659 expr = expr_last (else_); | |
2660 else if (then_se) | |
2661 expr = expr_last (then_); | |
2662 else | |
2663 expr = NULL; | |
2664 if (expr && TREE_CODE (expr) == LABEL_EXPR) | |
2665 end_label = LABEL_EXPR_LABEL (expr); | |
2666 | |
2667 /* If we don't care about jumping to the 'else' branch, jump to the end | |
2668 if the condition is false. */ | |
2669 if (!false_label_p) | |
2670 false_label_p = &end_label; | |
2671 | |
2672 /* We only want to emit these labels if we aren't hijacking them. */ | |
2673 emit_end = (end_label == NULL_TREE); | |
2674 emit_false = (false_label == NULL_TREE); | |
2675 | |
2676 /* We only emit the jump over the else clause if we have to--if the | |
2677 then clause may fall through. Otherwise we can wind up with a | |
2678 useless jump and a useless label at the end of gimplified code, | |
2679 which will cause us to think that this conditional as a whole | |
2680 falls through even if it doesn't. If we then inline a function | |
2681 which ends with such a condition, that can cause us to issue an | |
2682 inappropriate warning about control reaching the end of a | |
2683 non-void function. */ | |
2684 jump_over_else = block_may_fallthru (then_); | |
2685 | |
2686 pred = shortcut_cond_r (pred, true_label_p, false_label_p); | |
2687 | |
2688 expr = NULL; | |
2689 append_to_statement_list (pred, &expr); | |
2690 | |
2691 append_to_statement_list (then_, &expr); | |
2692 if (else_se) | |
2693 { | |
2694 if (jump_over_else) | |
2695 { | |
2696 t = build_and_jump (&end_label); | |
2697 append_to_statement_list (t, &expr); | |
2698 } | |
2699 if (emit_false) | |
2700 { | |
2701 t = build1 (LABEL_EXPR, void_type_node, false_label); | |
2702 append_to_statement_list (t, &expr); | |
2703 } | |
2704 append_to_statement_list (else_, &expr); | |
2705 } | |
2706 if (emit_end && end_label) | |
2707 { | |
2708 t = build1 (LABEL_EXPR, void_type_node, end_label); | |
2709 append_to_statement_list (t, &expr); | |
2710 } | |
2711 | |
2712 return expr; | |
2713 } | |
2714 | |
2715 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */ | |
2716 | |
2717 tree | |
2718 gimple_boolify (tree expr) | |
2719 { | |
2720 tree type = TREE_TYPE (expr); | |
2721 | |
2722 if (TREE_CODE (type) == BOOLEAN_TYPE) | |
2723 return expr; | |
2724 | |
2725 switch (TREE_CODE (expr)) | |
2726 { | |
2727 case TRUTH_AND_EXPR: | |
2728 case TRUTH_OR_EXPR: | |
2729 case TRUTH_XOR_EXPR: | |
2730 case TRUTH_ANDIF_EXPR: | |
2731 case TRUTH_ORIF_EXPR: | |
2732 /* Also boolify the arguments of truth exprs. */ | |
2733 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1)); | |
2734 /* FALLTHRU */ | |
2735 | |
2736 case TRUTH_NOT_EXPR: | |
2737 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); | |
2738 /* FALLTHRU */ | |
2739 | |
2740 case EQ_EXPR: case NE_EXPR: | |
2741 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR: | |
2742 /* These expressions always produce boolean results. */ | |
2743 TREE_TYPE (expr) = boolean_type_node; | |
2744 return expr; | |
2745 | |
2746 default: | |
2747 /* Other expressions that get here must have boolean values, but | |
2748 might need to be converted to the appropriate mode. */ | |
2749 return fold_convert (boolean_type_node, expr); | |
2750 } | |
2751 } | |
2752 | |
2753 /* Given a conditional expression *EXPR_P without side effects, gimplify | |
2754 its operands. New statements are inserted to PRE_P. */ | |
2755 | |
2756 static enum gimplify_status | |
2757 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p) | |
2758 { | |
2759 tree expr = *expr_p, cond; | |
2760 enum gimplify_status ret, tret; | |
2761 enum tree_code code; | |
2762 | |
2763 cond = gimple_boolify (COND_EXPR_COND (expr)); | |
2764 | |
2765 /* We need to handle && and || specially, as their gimplification | |
2766 creates pure cond_expr, thus leading to an infinite cycle otherwise. */ | |
2767 code = TREE_CODE (cond); | |
2768 if (code == TRUTH_ANDIF_EXPR) | |
2769 TREE_SET_CODE (cond, TRUTH_AND_EXPR); | |
2770 else if (code == TRUTH_ORIF_EXPR) | |
2771 TREE_SET_CODE (cond, TRUTH_OR_EXPR); | |
2772 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue); | |
2773 COND_EXPR_COND (*expr_p) = cond; | |
2774 | |
2775 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL, | |
2776 is_gimple_val, fb_rvalue); | |
2777 ret = MIN (ret, tret); | |
2778 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL, | |
2779 is_gimple_val, fb_rvalue); | |
2780 | |
2781 return MIN (ret, tret); | |
2782 } | |
2783 | |
2784 /* Returns true if evaluating EXPR could trap. | |
2785 EXPR is GENERIC, while tree_could_trap_p can be called | |
2786 only on GIMPLE. */ | |
2787 | |
2788 static bool | |
2789 generic_expr_could_trap_p (tree expr) | |
2790 { | |
2791 unsigned i, n; | |
2792 | |
2793 if (!expr || is_gimple_val (expr)) | |
2794 return false; | |
2795 | |
2796 if (!EXPR_P (expr) || tree_could_trap_p (expr)) | |
2797 return true; | |
2798 | |
2799 n = TREE_OPERAND_LENGTH (expr); | |
2800 for (i = 0; i < n; i++) | |
2801 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i))) | |
2802 return true; | |
2803 | |
2804 return false; | |
2805 } | |
2806 | |
2807 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;' | |
2808 into | |
2809 | |
2810 if (p) if (p) | |
2811 t1 = a; a; | |
2812 else or else | |
2813 t1 = b; b; | |
2814 t1; | |
2815 | |
2816 The second form is used when *EXPR_P is of type void. | |
2817 | |
2818 PRE_P points to the list where side effects that must happen before | |
2819 *EXPR_P should be stored. */ | |
2820 | |
2821 static enum gimplify_status | |
2822 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback) | |
2823 { | |
2824 tree expr = *expr_p; | |
2825 tree tmp, type, arm1, arm2; | |
2826 enum gimplify_status ret; | |
2827 tree label_true, label_false, label_cont; | |
2828 bool have_then_clause_p, have_else_clause_p; | |
2829 gimple gimple_cond; | |
2830 enum tree_code pred_code; | |
2831 gimple_seq seq = NULL; | |
2832 | |
2833 type = TREE_TYPE (expr); | |
2834 | |
2835 /* If this COND_EXPR has a value, copy the values into a temporary within | |
2836 the arms. */ | |
2837 if (! VOID_TYPE_P (type)) | |
2838 { | |
2839 tree result; | |
2840 | |
2841 /* If an rvalue is ok or we do not require an lvalue, avoid creating | |
2842 an addressable temporary. */ | |
2843 if (((fallback & fb_rvalue) | |
2844 || !(fallback & fb_lvalue)) | |
2845 && !TREE_ADDRESSABLE (type)) | |
2846 { | |
2847 if (gimplify_ctxp->allow_rhs_cond_expr | |
2848 /* If either branch has side effects or could trap, it can't be | |
2849 evaluated unconditionally. */ | |
2850 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1)) | |
2851 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 1)) | |
2852 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 2)) | |
2853 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 2))) | |
2854 return gimplify_pure_cond_expr (expr_p, pre_p); | |
2855 | |
2856 result = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp"); | |
2857 ret = GS_ALL_DONE; | |
2858 } | |
2859 else | |
2860 { | |
2861 tree type = build_pointer_type (TREE_TYPE (expr)); | |
2862 | |
2863 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node) | |
2864 TREE_OPERAND (expr, 1) = | |
2865 build_fold_addr_expr (TREE_OPERAND (expr, 1)); | |
2866 | |
2867 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node) | |
2868 TREE_OPERAND (expr, 2) = | |
2869 build_fold_addr_expr (TREE_OPERAND (expr, 2)); | |
2870 | |
2871 tmp = create_tmp_var (type, "iftmp"); | |
2872 | |
2873 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0), | |
2874 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2)); | |
2875 | |
2876 result = build_fold_indirect_ref (tmp); | |
2877 } | |
2878 | |
2879 /* Build the then clause, 't1 = a;'. But don't build an assignment | |
2880 if this branch is void; in C++ it can be, if it's a throw. */ | |
2881 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node) | |
2882 TREE_OPERAND (expr, 1) | |
2883 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 1)); | |
2884 | |
2885 /* Build the else clause, 't1 = b;'. */ | |
2886 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node) | |
2887 TREE_OPERAND (expr, 2) | |
2888 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 2)); | |
2889 | |
2890 TREE_TYPE (expr) = void_type_node; | |
2891 recalculate_side_effects (expr); | |
2892 | |
2893 /* Move the COND_EXPR to the prequeue. */ | |
2894 gimplify_stmt (&expr, pre_p); | |
2895 | |
2896 *expr_p = result; | |
2897 return GS_ALL_DONE; | |
2898 } | |
2899 | |
2900 /* Make sure the condition has BOOLEAN_TYPE. */ | |
2901 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); | |
2902 | |
2903 /* Break apart && and || conditions. */ | |
2904 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR | |
2905 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR) | |
2906 { | |
2907 expr = shortcut_cond_expr (expr); | |
2908 | |
2909 if (expr != *expr_p) | |
2910 { | |
2911 *expr_p = expr; | |
2912 | |
2913 /* We can't rely on gimplify_expr to re-gimplify the expanded | |
2914 form properly, as cleanups might cause the target labels to be | |
2915 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to | |
2916 set up a conditional context. */ | |
2917 gimple_push_condition (); | |
2918 gimplify_stmt (expr_p, &seq); | |
2919 gimple_pop_condition (pre_p); | |
2920 gimple_seq_add_seq (pre_p, seq); | |
2921 | |
2922 return GS_ALL_DONE; | |
2923 } | |
2924 } | |
2925 | |
2926 /* Now do the normal gimplification. */ | |
2927 | |
2928 /* Gimplify condition. */ | |
2929 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr, | |
2930 fb_rvalue); | |
2931 if (ret == GS_ERROR) | |
2932 return GS_ERROR; | |
2933 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE); | |
2934 | |
2935 gimple_push_condition (); | |
2936 | |
2937 have_then_clause_p = have_else_clause_p = false; | |
2938 if (TREE_OPERAND (expr, 1) != NULL | |
2939 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR | |
2940 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL | |
2941 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) | |
2942 == current_function_decl) | |
2943 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR | |
2944 have different locations, otherwise we end up with incorrect | |
2945 location information on the branches. */ | |
2946 && (optimize | |
2947 || !EXPR_HAS_LOCATION (expr) | |
2948 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1)) | |
2949 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1)))) | |
2950 { | |
2951 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1)); | |
2952 have_then_clause_p = true; | |
2953 } | |
2954 else | |
2955 label_true = create_artificial_label (); | |
2956 if (TREE_OPERAND (expr, 2) != NULL | |
2957 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR | |
2958 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL | |
2959 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) | |
2960 == current_function_decl) | |
2961 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR | |
2962 have different locations, otherwise we end up with incorrect | |
2963 location information on the branches. */ | |
2964 && (optimize | |
2965 || !EXPR_HAS_LOCATION (expr) | |
2966 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2)) | |
2967 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2)))) | |
2968 { | |
2969 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2)); | |
2970 have_else_clause_p = true; | |
2971 } | |
2972 else | |
2973 label_false = create_artificial_label (); | |
2974 | |
2975 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1, | |
2976 &arm2); | |
2977 | |
2978 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true, | |
2979 label_false); | |
2980 | |
2981 gimplify_seq_add_stmt (&seq, gimple_cond); | |
2982 label_cont = NULL_TREE; | |
2983 if (!have_then_clause_p) | |
2984 { | |
2985 /* For if (...) {} else { code; } put label_true after | |
2986 the else block. */ | |
2987 if (TREE_OPERAND (expr, 1) == NULL_TREE | |
2988 && !have_else_clause_p | |
2989 && TREE_OPERAND (expr, 2) != NULL_TREE) | |
2990 label_cont = label_true; | |
2991 else | |
2992 { | |
2993 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true)); | |
2994 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq); | |
2995 /* For if (...) { code; } else {} or | |
2996 if (...) { code; } else goto label; or | |
2997 if (...) { code; return; } else { ... } | |
2998 label_cont isn't needed. */ | |
2999 if (!have_else_clause_p | |
3000 && TREE_OPERAND (expr, 2) != NULL_TREE | |
3001 && gimple_seq_may_fallthru (seq)) | |
3002 { | |
3003 gimple g; | |
3004 label_cont = create_artificial_label (); | |
3005 | |
3006 g = gimple_build_goto (label_cont); | |
3007 | |
3008 /* GIMPLE_COND's are very low level; they have embedded | |
3009 gotos. This particular embedded goto should not be marked | |
3010 with the location of the original COND_EXPR, as it would | |
3011 correspond to the COND_EXPR's condition, not the ELSE or the | |
3012 THEN arms. To avoid marking it with the wrong location, flag | |
3013 it as "no location". */ | |
3014 gimple_set_do_not_emit_location (g); | |
3015 | |
3016 gimplify_seq_add_stmt (&seq, g); | |
3017 } | |
3018 } | |
3019 } | |
3020 if (!have_else_clause_p) | |
3021 { | |
3022 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false)); | |
3023 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq); | |
3024 } | |
3025 if (label_cont) | |
3026 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont)); | |
3027 | |
3028 gimple_pop_condition (pre_p); | |
3029 gimple_seq_add_seq (pre_p, seq); | |
3030 | |
3031 if (ret == GS_ERROR) | |
3032 ; /* Do nothing. */ | |
3033 else if (have_then_clause_p || have_else_clause_p) | |
3034 ret = GS_ALL_DONE; | |
3035 else | |
3036 { | |
3037 /* Both arms are empty; replace the COND_EXPR with its predicate. */ | |
3038 expr = TREE_OPERAND (expr, 0); | |
3039 gimplify_stmt (&expr, pre_p); | |
3040 } | |
3041 | |
3042 *expr_p = NULL; | |
3043 return ret; | |
3044 } | |
3045 | |
3046 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with | |
3047 a call to __builtin_memcpy. */ | |
3048 | |
3049 static enum gimplify_status | |
3050 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value, | |
3051 gimple_seq *seq_p) | |
3052 { | |
3053 tree t, to, to_ptr, from, from_ptr; | |
3054 gimple gs; | |
3055 | |
3056 to = TREE_OPERAND (*expr_p, 0); | |
3057 from = TREE_OPERAND (*expr_p, 1); | |
3058 | |
3059 from_ptr = build_fold_addr_expr (from); | |
3060 gimplify_arg (&from_ptr, seq_p, EXPR_LOCATION (*expr_p)); | |
3061 | |
3062 to_ptr = build_fold_addr_expr (to); | |
3063 gimplify_arg (&to_ptr, seq_p, EXPR_LOCATION (*expr_p)); | |
3064 | |
3065 t = implicit_built_in_decls[BUILT_IN_MEMCPY]; | |
3066 | |
3067 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size); | |
3068 | |
3069 if (want_value) | |
3070 { | |
3071 /* tmp = memcpy() */ | |
3072 t = create_tmp_var (TREE_TYPE (to_ptr), NULL); | |
3073 gimple_call_set_lhs (gs, t); | |
3074 gimplify_seq_add_stmt (seq_p, gs); | |
3075 | |
3076 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t); | |
3077 return GS_ALL_DONE; | |
3078 } | |
3079 | |
3080 gimplify_seq_add_stmt (seq_p, gs); | |
3081 *expr_p = NULL; | |
3082 return GS_ALL_DONE; | |
3083 } | |
3084 | |
3085 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with | |
3086 a call to __builtin_memset. In this case we know that the RHS is | |
3087 a CONSTRUCTOR with an empty element list. */ | |
3088 | |
3089 static enum gimplify_status | |
3090 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value, | |
3091 gimple_seq *seq_p) | |
3092 { | |
3093 tree t, from, to, to_ptr; | |
3094 gimple gs; | |
3095 | |
3096 /* Assert our assumptions, to abort instead of producing wrong code | |
3097 silently if they are not met. Beware that the RHS CONSTRUCTOR might | |
3098 not be immediately exposed. */ | |
3099 from = TREE_OPERAND (*expr_p, 1); | |
3100 if (TREE_CODE (from) == WITH_SIZE_EXPR) | |
3101 from = TREE_OPERAND (from, 0); | |
3102 | |
3103 gcc_assert (TREE_CODE (from) == CONSTRUCTOR | |
3104 && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from))); | |
3105 | |
3106 /* Now proceed. */ | |
3107 to = TREE_OPERAND (*expr_p, 0); | |
3108 | |
3109 to_ptr = build_fold_addr_expr (to); | |
3110 gimplify_arg (&to_ptr, seq_p, EXPR_LOCATION (*expr_p)); | |
3111 t = implicit_built_in_decls[BUILT_IN_MEMSET]; | |
3112 | |
3113 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size); | |
3114 | |
3115 if (want_value) | |
3116 { | |
3117 /* tmp = memset() */ | |
3118 t = create_tmp_var (TREE_TYPE (to_ptr), NULL); | |
3119 gimple_call_set_lhs (gs, t); | |
3120 gimplify_seq_add_stmt (seq_p, gs); | |
3121 | |
3122 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t); | |
3123 return GS_ALL_DONE; | |
3124 } | |
3125 | |
3126 gimplify_seq_add_stmt (seq_p, gs); | |
3127 *expr_p = NULL; | |
3128 return GS_ALL_DONE; | |
3129 } | |
3130 | |
3131 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree, | |
3132 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an | |
3133 assignment. Returns non-null if we detect a potential overlap. */ | |
3134 | |
3135 struct gimplify_init_ctor_preeval_data | |
3136 { | |
3137 /* The base decl of the lhs object. May be NULL, in which case we | |
3138 have to assume the lhs is indirect. */ | |
3139 tree lhs_base_decl; | |
3140 | |
3141 /* The alias set of the lhs object. */ | |
3142 alias_set_type lhs_alias_set; | |
3143 }; | |
3144 | |
3145 static tree | |
3146 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata) | |
3147 { | |
3148 struct gimplify_init_ctor_preeval_data *data | |
3149 = (struct gimplify_init_ctor_preeval_data *) xdata; | |
3150 tree t = *tp; | |
3151 | |
3152 /* If we find the base object, obviously we have overlap. */ | |
3153 if (data->lhs_base_decl == t) | |
3154 return t; | |
3155 | |
3156 /* If the constructor component is indirect, determine if we have a | |
3157 potential overlap with the lhs. The only bits of information we | |
3158 have to go on at this point are addressability and alias sets. */ | |
3159 if (TREE_CODE (t) == INDIRECT_REF | |
3160 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl)) | |
3161 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t))) | |
3162 return t; | |
3163 | |
3164 /* If the constructor component is a call, determine if it can hide a | |
3165 potential overlap with the lhs through an INDIRECT_REF like above. */ | |
3166 if (TREE_CODE (t) == CALL_EXPR) | |
3167 { | |
3168 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t))); | |
3169 | |
3170 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type)) | |
3171 if (POINTER_TYPE_P (TREE_VALUE (type)) | |
3172 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl)) | |
3173 && alias_sets_conflict_p (data->lhs_alias_set, | |
3174 get_alias_set | |
3175 (TREE_TYPE (TREE_VALUE (type))))) | |
3176 return t; | |
3177 } | |
3178 | |
3179 if (IS_TYPE_OR_DECL_P (t)) | |
3180 *walk_subtrees = 0; | |
3181 return NULL; | |
3182 } | |
3183 | |
3184 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR, | |
3185 force values that overlap with the lhs (as described by *DATA) | |
3186 into temporaries. */ | |
3187 | |
3188 static void | |
3189 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, | |
3190 struct gimplify_init_ctor_preeval_data *data) | |
3191 { | |
3192 enum gimplify_status one; | |
3193 | |
3194 /* If the value is constant, then there's nothing to pre-evaluate. */ | |
3195 if (TREE_CONSTANT (*expr_p)) | |
3196 { | |
3197 /* Ensure it does not have side effects, it might contain a reference to | |
3198 the object we're initializing. */ | |
3199 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p)); | |
3200 return; | |
3201 } | |
3202 | |
3203 /* If the type has non-trivial constructors, we can't pre-evaluate. */ | |
3204 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p))) | |
3205 return; | |
3206 | |
3207 /* Recurse for nested constructors. */ | |
3208 if (TREE_CODE (*expr_p) == CONSTRUCTOR) | |
3209 { | |
3210 unsigned HOST_WIDE_INT ix; | |
3211 constructor_elt *ce; | |
3212 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p); | |
3213 | |
3214 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++) | |
3215 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data); | |
3216 | |
3217 return; | |
3218 } | |
3219 | |
3220 /* If this is a variable sized type, we must remember the size. */ | |
3221 maybe_with_size_expr (expr_p); | |
3222 | |
3223 /* Gimplify the constructor element to something appropriate for the rhs | |
3224 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know | |
3225 the gimplifier will consider this a store to memory. Doing this | |
3226 gimplification now means that we won't have to deal with complicated | |
3227 language-specific trees, nor trees like SAVE_EXPR that can induce | |
3228 exponential search behavior. */ | |
3229 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue); | |
3230 if (one == GS_ERROR) | |
3231 { | |
3232 *expr_p = NULL; | |
3233 return; | |
3234 } | |
3235 | |
3236 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap | |
3237 with the lhs, since "a = { .x=a }" doesn't make sense. This will | |
3238 always be true for all scalars, since is_gimple_mem_rhs insists on a | |
3239 temporary variable for them. */ | |
3240 if (DECL_P (*expr_p)) | |
3241 return; | |
3242 | |
3243 /* If this is of variable size, we have no choice but to assume it doesn't | |
3244 overlap since we can't make a temporary for it. */ | |
3245 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST) | |
3246 return; | |
3247 | |
3248 /* Otherwise, we must search for overlap ... */ | |
3249 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL)) | |
3250 return; | |
3251 | |
3252 /* ... and if found, force the value into a temporary. */ | |
3253 *expr_p = get_formal_tmp_var (*expr_p, pre_p); | |
3254 } | |
3255 | |
3256 /* A subroutine of gimplify_init_ctor_eval. Create a loop for | |
3257 a RANGE_EXPR in a CONSTRUCTOR for an array. | |
3258 | |
3259 var = lower; | |
3260 loop_entry: | |
3261 object[var] = value; | |
3262 if (var == upper) | |
3263 goto loop_exit; | |
3264 var = var + 1; | |
3265 goto loop_entry; | |
3266 loop_exit: | |
3267 | |
3268 We increment var _after_ the loop exit check because we might otherwise | |
3269 fail if upper == TYPE_MAX_VALUE (type for upper). | |
3270 | |
3271 Note that we never have to deal with SAVE_EXPRs here, because this has | |
3272 already been taken care of for us, in gimplify_init_ctor_preeval(). */ | |
3273 | |
3274 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *, | |
3275 gimple_seq *, bool); | |
3276 | |
3277 static void | |
3278 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper, | |
3279 tree value, tree array_elt_type, | |
3280 gimple_seq *pre_p, bool cleared) | |
3281 { | |
3282 tree loop_entry_label, loop_exit_label, fall_thru_label; | |
3283 tree var, var_type, cref, tmp; | |
3284 | |
3285 loop_entry_label = create_artificial_label (); | |
3286 loop_exit_label = create_artificial_label (); | |
3287 fall_thru_label = create_artificial_label (); | |
3288 | |
3289 /* Create and initialize the index variable. */ | |
3290 var_type = TREE_TYPE (upper); | |
3291 var = create_tmp_var (var_type, NULL); | |
3292 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower)); | |
3293 | |
3294 /* Add the loop entry label. */ | |
3295 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label)); | |
3296 | |
3297 /* Build the reference. */ | |
3298 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object), | |
3299 var, NULL_TREE, NULL_TREE); | |
3300 | |
3301 /* If we are a constructor, just call gimplify_init_ctor_eval to do | |
3302 the store. Otherwise just assign value to the reference. */ | |
3303 | |
3304 if (TREE_CODE (value) == CONSTRUCTOR) | |
3305 /* NB we might have to call ourself recursively through | |
3306 gimplify_init_ctor_eval if the value is a constructor. */ | |
3307 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value), | |
3308 pre_p, cleared); | |
3309 else | |
3310 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value)); | |
3311 | |
3312 /* We exit the loop when the index var is equal to the upper bound. */ | |
3313 gimplify_seq_add_stmt (pre_p, | |
3314 gimple_build_cond (EQ_EXPR, var, upper, | |
3315 loop_exit_label, fall_thru_label)); | |
3316 | |
3317 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label)); | |
3318 | |
3319 /* Otherwise, increment the index var... */ | |
3320 tmp = build2 (PLUS_EXPR, var_type, var, | |
3321 fold_convert (var_type, integer_one_node)); | |
3322 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp)); | |
3323 | |
3324 /* ...and jump back to the loop entry. */ | |
3325 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label)); | |
3326 | |
3327 /* Add the loop exit label. */ | |
3328 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label)); | |
3329 } | |
3330 | |
3331 /* Return true if FDECL is accessing a field that is zero sized. */ | |
3332 | |
3333 static bool | |
3334 zero_sized_field_decl (const_tree fdecl) | |
3335 { | |
3336 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl) | |
3337 && integer_zerop (DECL_SIZE (fdecl))) | |
3338 return true; | |
3339 return false; | |
3340 } | |
3341 | |
3342 /* Return true if TYPE is zero sized. */ | |
3343 | |
3344 static bool | |
3345 zero_sized_type (const_tree type) | |
3346 { | |
3347 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type) | |
3348 && integer_zerop (TYPE_SIZE (type))) | |
3349 return true; | |
3350 return false; | |
3351 } | |
3352 | |
3353 /* A subroutine of gimplify_init_constructor. Generate individual | |
3354 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the | |
3355 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the | |
3356 CONSTRUCTOR. CLEARED is true if the entire LHS object has been | |
3357 zeroed first. */ | |
3358 | |
3359 static void | |
3360 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts, | |
3361 gimple_seq *pre_p, bool cleared) | |
3362 { | |
3363 tree array_elt_type = NULL; | |
3364 unsigned HOST_WIDE_INT ix; | |
3365 tree purpose, value; | |
3366 | |
3367 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE) | |
3368 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object))); | |
3369 | |
3370 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value) | |
3371 { | |
3372 tree cref; | |
3373 | |
3374 /* NULL values are created above for gimplification errors. */ | |
3375 if (value == NULL) | |
3376 continue; | |
3377 | |
3378 if (cleared && initializer_zerop (value)) | |
3379 continue; | |
3380 | |
3381 /* ??? Here's to hoping the front end fills in all of the indices, | |
3382 so we don't have to figure out what's missing ourselves. */ | |
3383 gcc_assert (purpose); | |
3384 | |
3385 /* Skip zero-sized fields, unless value has side-effects. This can | |
3386 happen with calls to functions returning a zero-sized type, which | |
3387 we shouldn't discard. As a number of downstream passes don't | |
3388 expect sets of zero-sized fields, we rely on the gimplification of | |
3389 the MODIFY_EXPR we make below to drop the assignment statement. */ | |
3390 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose)) | |
3391 continue; | |
3392 | |
3393 /* If we have a RANGE_EXPR, we have to build a loop to assign the | |
3394 whole range. */ | |
3395 if (TREE_CODE (purpose) == RANGE_EXPR) | |
3396 { | |
3397 tree lower = TREE_OPERAND (purpose, 0); | |
3398 tree upper = TREE_OPERAND (purpose, 1); | |
3399 | |
3400 /* If the lower bound is equal to upper, just treat it as if | |
3401 upper was the index. */ | |
3402 if (simple_cst_equal (lower, upper)) | |
3403 purpose = upper; | |
3404 else | |
3405 { | |
3406 gimplify_init_ctor_eval_range (object, lower, upper, value, | |
3407 array_elt_type, pre_p, cleared); | |
3408 continue; | |
3409 } | |
3410 } | |
3411 | |
3412 if (array_elt_type) | |
3413 { | |
3414 /* Do not use bitsizetype for ARRAY_REF indices. */ | |
3415 if (TYPE_DOMAIN (TREE_TYPE (object))) | |
3416 purpose = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))), | |
3417 purpose); | |
3418 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object), | |
3419 purpose, NULL_TREE, NULL_TREE); | |
3420 } | |
3421 else | |
3422 { | |
3423 gcc_assert (TREE_CODE (purpose) == FIELD_DECL); | |
3424 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose), | |
3425 unshare_expr (object), purpose, NULL_TREE); | |
3426 } | |
3427 | |
3428 if (TREE_CODE (value) == CONSTRUCTOR | |
3429 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE) | |
3430 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value), | |
3431 pre_p, cleared); | |
3432 else | |
3433 { | |
3434 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value); | |
3435 gimplify_and_add (init, pre_p); | |
3436 ggc_free (init); | |
3437 } | |
3438 } | |
3439 } | |
3440 | |
3441 | |
3442 /* Returns the appropriate RHS predicate for this LHS. */ | |
3443 | |
3444 gimple_predicate | |
3445 rhs_predicate_for (tree lhs) | |
3446 { | |
3447 if (is_gimple_formal_tmp_var (lhs)) | |
3448 return is_gimple_formal_tmp_or_call_rhs; | |
3449 else if (is_gimple_reg (lhs)) | |
3450 return is_gimple_reg_or_call_rhs; | |
3451 else | |
3452 return is_gimple_mem_or_call_rhs; | |
3453 } | |
3454 | |
3455 | |
3456 /* A subroutine of gimplify_modify_expr. Break out elements of a | |
3457 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs. | |
3458 | |
3459 Note that we still need to clear any elements that don't have explicit | |
3460 initializers, so if not all elements are initialized we keep the | |
3461 original MODIFY_EXPR, we just remove all of the constructor elements. | |
3462 | |
3463 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return | |
3464 GS_ERROR if we would have to create a temporary when gimplifying | |
3465 this constructor. Otherwise, return GS_OK. | |
3466 | |
3467 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */ | |
3468 | |
3469 static enum gimplify_status | |
3470 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, | |
3471 bool want_value, bool notify_temp_creation) | |
3472 { | |
3473 tree object; | |
3474 tree ctor = TREE_OPERAND (*expr_p, 1); | |
3475 tree type = TREE_TYPE (ctor); | |
3476 enum gimplify_status ret; | |
3477 VEC(constructor_elt,gc) *elts; | |
3478 | |
3479 if (TREE_CODE (ctor) != CONSTRUCTOR) | |
3480 return GS_UNHANDLED; | |
3481 | |
3482 if (!notify_temp_creation) | |
3483 { | |
3484 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, | |
3485 is_gimple_lvalue, fb_lvalue); | |
3486 if (ret == GS_ERROR) | |
3487 return ret; | |
3488 } | |
3489 | |
3490 object = TREE_OPERAND (*expr_p, 0); | |
3491 elts = CONSTRUCTOR_ELTS (ctor); | |
3492 ret = GS_ALL_DONE; | |
3493 | |
3494 switch (TREE_CODE (type)) | |
3495 { | |
3496 case RECORD_TYPE: | |
3497 case UNION_TYPE: | |
3498 case QUAL_UNION_TYPE: | |
3499 case ARRAY_TYPE: | |
3500 { | |
3501 struct gimplify_init_ctor_preeval_data preeval_data; | |
3502 HOST_WIDE_INT num_type_elements, num_ctor_elements; | |
3503 HOST_WIDE_INT num_nonzero_elements; | |
3504 bool cleared, valid_const_initializer; | |
3505 | |
3506 /* Aggregate types must lower constructors to initialization of | |
3507 individual elements. The exception is that a CONSTRUCTOR node | |
3508 with no elements indicates zero-initialization of the whole. */ | |
3509 if (VEC_empty (constructor_elt, elts)) | |
3510 { | |
3511 if (notify_temp_creation) | |
3512 return GS_OK; | |
3513 break; | |
3514 } | |
3515 | |
3516 /* Fetch information about the constructor to direct later processing. | |
3517 We might want to make static versions of it in various cases, and | |
3518 can only do so if it known to be a valid constant initializer. */ | |
3519 valid_const_initializer | |
3520 = categorize_ctor_elements (ctor, &num_nonzero_elements, | |
3521 &num_ctor_elements, &cleared); | |
3522 | |
3523 /* If a const aggregate variable is being initialized, then it | |
3524 should never be a lose to promote the variable to be static. */ | |
3525 if (valid_const_initializer | |
3526 && num_nonzero_elements > 1 | |
3527 && TREE_READONLY (object) | |
3528 && TREE_CODE (object) == VAR_DECL | |
3529 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))) | |
3530 { | |
3531 if (notify_temp_creation) | |
3532 return GS_ERROR; | |
3533 DECL_INITIAL (object) = ctor; | |
3534 TREE_STATIC (object) = 1; | |
3535 if (!DECL_NAME (object)) | |
3536 DECL_NAME (object) = create_tmp_var_name ("C"); | |
3537 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL); | |
3538 | |
3539 /* ??? C++ doesn't automatically append a .<number> to the | |
3540 assembler name, and even when it does, it looks a FE private | |
3541 data structures to figure out what that number should be, | |
3542 which are not set for this variable. I suppose this is | |
3543 important for local statics for inline functions, which aren't | |
3544 "local" in the object file sense. So in order to get a unique | |
3545 TU-local symbol, we must invoke the lhd version now. */ | |
3546 lhd_set_decl_assembler_name (object); | |
3547 | |
3548 *expr_p = NULL_TREE; | |
3549 break; | |
3550 } | |
3551 | |
3552 /* If there are "lots" of initialized elements, even discounting | |
3553 those that are not address constants (and thus *must* be | |
3554 computed at runtime), then partition the constructor into | |
3555 constant and non-constant parts. Block copy the constant | |
3556 parts in, then generate code for the non-constant parts. */ | |
3557 /* TODO. There's code in cp/typeck.c to do this. */ | |
3558 | |
3559 num_type_elements = count_type_elements (type, true); | |
3560 | |
3561 /* If count_type_elements could not determine number of type elements | |
3562 for a constant-sized object, assume clearing is needed. | |
3563 Don't do this for variable-sized objects, as store_constructor | |
3564 will ignore the clearing of variable-sized objects. */ | |
3565 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0) | |
3566 cleared = true; | |
3567 /* If there are "lots" of zeros, then block clear the object first. */ | |
3568 else if (num_type_elements - num_nonzero_elements | |
3569 > CLEAR_RATIO (optimize_function_for_speed_p (cfun)) | |
3570 && num_nonzero_elements < num_type_elements/4) | |
3571 cleared = true; | |
3572 /* ??? This bit ought not be needed. For any element not present | |
3573 in the initializer, we should simply set them to zero. Except | |
3574 we'd need to *find* the elements that are not present, and that | |
3575 requires trickery to avoid quadratic compile-time behavior in | |
3576 large cases or excessive memory use in small cases. */ | |
3577 else if (num_ctor_elements < num_type_elements) | |
3578 cleared = true; | |
3579 | |
3580 /* If there are "lots" of initialized elements, and all of them | |
3581 are valid address constants, then the entire initializer can | |
3582 be dropped to memory, and then memcpy'd out. Don't do this | |
3583 for sparse arrays, though, as it's more efficient to follow | |
3584 the standard CONSTRUCTOR behavior of memset followed by | |
3585 individual element initialization. Also don't do this for small | |
3586 all-zero initializers (which aren't big enough to merit | |
3587 clearing), and don't try to make bitwise copies of | |
3588 TREE_ADDRESSABLE types. */ | |
3589 if (valid_const_initializer | |
3590 && !(cleared || num_nonzero_elements == 0) | |
3591 && !TREE_ADDRESSABLE (type)) | |
3592 { | |
3593 HOST_WIDE_INT size = int_size_in_bytes (type); | |
3594 unsigned int align; | |
3595 | |
3596 /* ??? We can still get unbounded array types, at least | |
3597 from the C++ front end. This seems wrong, but attempt | |
3598 to work around it for now. */ | |
3599 if (size < 0) | |
3600 { | |
3601 size = int_size_in_bytes (TREE_TYPE (object)); | |
3602 if (size >= 0) | |
3603 TREE_TYPE (ctor) = type = TREE_TYPE (object); | |
3604 } | |
3605 | |
3606 /* Find the maximum alignment we can assume for the object. */ | |
3607 /* ??? Make use of DECL_OFFSET_ALIGN. */ | |
3608 if (DECL_P (object)) | |
3609 align = DECL_ALIGN (object); | |
3610 else | |
3611 align = TYPE_ALIGN (type); | |
3612 | |
3613 if (size > 0 | |
3614 && num_nonzero_elements > 1 | |
3615 && !can_move_by_pieces (size, align)) | |
3616 { | |
3617 tree new_tree; | |
3618 | |
3619 if (notify_temp_creation) | |
3620 return GS_ERROR; | |
3621 | |
3622 new_tree = create_tmp_var_raw (type, "C"); | |
3623 | |
3624 gimple_add_tmp_var (new_tree); | |
3625 TREE_STATIC (new_tree) = 1; | |
3626 TREE_READONLY (new_tree) = 1; | |
3627 DECL_INITIAL (new_tree) = ctor; | |
3628 if (align > DECL_ALIGN (new_tree)) | |
3629 { | |
3630 DECL_ALIGN (new_tree) = align; | |
3631 DECL_USER_ALIGN (new_tree) = 1; | |
3632 } | |
3633 walk_tree (&DECL_INITIAL (new_tree), force_labels_r, NULL, NULL); | |
3634 | |
3635 TREE_OPERAND (*expr_p, 1) = new_tree; | |
3636 | |
3637 /* This is no longer an assignment of a CONSTRUCTOR, but | |
3638 we still may have processing to do on the LHS. So | |
3639 pretend we didn't do anything here to let that happen. */ | |
3640 return GS_UNHANDLED; | |
3641 } | |
3642 } | |
3643 | |
3644 if (notify_temp_creation) | |
3645 return GS_OK; | |
3646 | |
3647 /* If there are nonzero elements, pre-evaluate to capture elements | |
3648 overlapping with the lhs into temporaries. We must do this before | |
3649 clearing to fetch the values before they are zeroed-out. */ | |
3650 if (num_nonzero_elements > 0) | |
3651 { | |
3652 preeval_data.lhs_base_decl = get_base_address (object); | |
3653 if (!DECL_P (preeval_data.lhs_base_decl)) | |
3654 preeval_data.lhs_base_decl = NULL; | |
3655 preeval_data.lhs_alias_set = get_alias_set (object); | |
3656 | |
3657 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1), | |
3658 pre_p, post_p, &preeval_data); | |
3659 } | |
3660 | |
3661 if (cleared) | |
3662 { | |
3663 /* Zap the CONSTRUCTOR element list, which simplifies this case. | |
3664 Note that we still have to gimplify, in order to handle the | |
3665 case of variable sized types. Avoid shared tree structures. */ | |
3666 CONSTRUCTOR_ELTS (ctor) = NULL; | |
3667 TREE_SIDE_EFFECTS (ctor) = 0; | |
3668 object = unshare_expr (object); | |
3669 gimplify_stmt (expr_p, pre_p); | |
3670 } | |
3671 | |
3672 /* If we have not block cleared the object, or if there are nonzero | |
3673 elements in the constructor, add assignments to the individual | |
3674 scalar fields of the object. */ | |
3675 if (!cleared || num_nonzero_elements > 0) | |
3676 gimplify_init_ctor_eval (object, elts, pre_p, cleared); | |
3677 | |
3678 *expr_p = NULL_TREE; | |
3679 } | |
3680 break; | |
3681 | |
3682 case COMPLEX_TYPE: | |
3683 { | |
3684 tree r, i; | |
3685 | |
3686 if (notify_temp_creation) | |
3687 return GS_OK; | |
3688 | |
3689 /* Extract the real and imaginary parts out of the ctor. */ | |
3690 gcc_assert (VEC_length (constructor_elt, elts) == 2); | |
3691 r = VEC_index (constructor_elt, elts, 0)->value; | |
3692 i = VEC_index (constructor_elt, elts, 1)->value; | |
3693 if (r == NULL || i == NULL) | |
3694 { | |
3695 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node); | |
3696 if (r == NULL) | |
3697 r = zero; | |
3698 if (i == NULL) | |
3699 i = zero; | |
3700 } | |
3701 | |
3702 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to | |
3703 represent creation of a complex value. */ | |
3704 if (TREE_CONSTANT (r) && TREE_CONSTANT (i)) | |
3705 { | |
3706 ctor = build_complex (type, r, i); | |
3707 TREE_OPERAND (*expr_p, 1) = ctor; | |
3708 } | |
3709 else | |
3710 { | |
3711 ctor = build2 (COMPLEX_EXPR, type, r, i); | |
3712 TREE_OPERAND (*expr_p, 1) = ctor; | |
3713 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), | |
3714 pre_p, | |
3715 post_p, | |
3716 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)), | |
3717 fb_rvalue); | |
3718 } | |
3719 } | |
3720 break; | |
3721 | |
3722 case VECTOR_TYPE: | |
3723 { | |
3724 unsigned HOST_WIDE_INT ix; | |
3725 constructor_elt *ce; | |
3726 | |
3727 if (notify_temp_creation) | |
3728 return GS_OK; | |
3729 | |
3730 /* Go ahead and simplify constant constructors to VECTOR_CST. */ | |
3731 if (TREE_CONSTANT (ctor)) | |
3732 { | |
3733 bool constant_p = true; | |
3734 tree value; | |
3735 | |
3736 /* Even when ctor is constant, it might contain non-*_CST | |
3737 elements, such as addresses or trapping values like | |
3738 1.0/0.0 - 1.0/0.0. Such expressions don't belong | |
3739 in VECTOR_CST nodes. */ | |
3740 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value) | |
3741 if (!CONSTANT_CLASS_P (value)) | |
3742 { | |
3743 constant_p = false; | |
3744 break; | |
3745 } | |
3746 | |
3747 if (constant_p) | |
3748 { | |
3749 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts); | |
3750 break; | |
3751 } | |
3752 | |
3753 /* Don't reduce an initializer constant even if we can't | |
3754 make a VECTOR_CST. It won't do anything for us, and it'll | |
3755 prevent us from representing it as a single constant. */ | |
3756 if (initializer_constant_valid_p (ctor, type)) | |
3757 break; | |
3758 | |
3759 TREE_CONSTANT (ctor) = 0; | |
3760 } | |
3761 | |
3762 /* Vector types use CONSTRUCTOR all the way through gimple | |
3763 compilation as a general initializer. */ | |
3764 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++) | |
3765 { | |
3766 enum gimplify_status tret; | |
3767 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val, | |
3768 fb_rvalue); | |
3769 if (tret == GS_ERROR) | |
3770 ret = GS_ERROR; | |
3771 } | |
3772 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0))) | |
3773 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p); | |
3774 } | |
3775 break; | |
3776 | |
3777 default: | |
3778 /* So how did we get a CONSTRUCTOR for a scalar type? */ | |
3779 gcc_unreachable (); | |
3780 } | |
3781 | |
3782 if (ret == GS_ERROR) | |
3783 return GS_ERROR; | |
3784 else if (want_value) | |
3785 { | |
3786 *expr_p = object; | |
3787 return GS_OK; | |
3788 } | |
3789 else | |
3790 { | |
3791 /* If we have gimplified both sides of the initializer but have | |
3792 not emitted an assignment, do so now. */ | |
3793 if (*expr_p) | |
3794 { | |
3795 tree lhs = TREE_OPERAND (*expr_p, 0); | |
3796 tree rhs = TREE_OPERAND (*expr_p, 1); | |
3797 gimple init = gimple_build_assign (lhs, rhs); | |
3798 gimplify_seq_add_stmt (pre_p, init); | |
3799 *expr_p = NULL; | |
3800 } | |
3801 | |
3802 return GS_ALL_DONE; | |
3803 } | |
3804 } | |
3805 | |
3806 /* Given a pointer value OP0, return a simplified version of an | |
3807 indirection through OP0, or NULL_TREE if no simplification is | |
3808 possible. Note that the resulting type may be different from | |
3809 the type pointed to in the sense that it is still compatible | |
3810 from the langhooks point of view. */ | |
3811 | |
3812 tree | |
3813 gimple_fold_indirect_ref (tree t) | |
3814 { | |
3815 tree type = TREE_TYPE (TREE_TYPE (t)); | |
3816 tree sub = t; | |
3817 tree subtype; | |
3818 | |
3819 STRIP_USELESS_TYPE_CONVERSION (sub); | |
3820 subtype = TREE_TYPE (sub); | |
3821 if (!POINTER_TYPE_P (subtype)) | |
3822 return NULL_TREE; | |
3823 | |
3824 if (TREE_CODE (sub) == ADDR_EXPR) | |
3825 { | |
3826 tree op = TREE_OPERAND (sub, 0); | |
3827 tree optype = TREE_TYPE (op); | |
3828 /* *&p => p */ | |
3829 if (useless_type_conversion_p (type, optype)) | |
3830 return op; | |
3831 | |
3832 /* *(foo *)&fooarray => fooarray[0] */ | |
3833 if (TREE_CODE (optype) == ARRAY_TYPE | |
3834 && useless_type_conversion_p (type, TREE_TYPE (optype))) | |
3835 { | |
3836 tree type_domain = TYPE_DOMAIN (optype); | |
3837 tree min_val = size_zero_node; | |
3838 if (type_domain && TYPE_MIN_VALUE (type_domain)) | |
3839 min_val = TYPE_MIN_VALUE (type_domain); | |
3840 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE); | |
3841 } | |
3842 } | |
3843 | |
3844 /* *(foo *)fooarrptr => (*fooarrptr)[0] */ | |
3845 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE | |
3846 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype)))) | |
3847 { | |
3848 tree type_domain; | |
3849 tree min_val = size_zero_node; | |
3850 tree osub = sub; | |
3851 sub = gimple_fold_indirect_ref (sub); | |
3852 if (! sub) | |
3853 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub); | |
3854 type_domain = TYPE_DOMAIN (TREE_TYPE (sub)); | |
3855 if (type_domain && TYPE_MIN_VALUE (type_domain)) | |
3856 min_val = TYPE_MIN_VALUE (type_domain); | |
3857 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE); | |
3858 } | |
3859 | |
3860 return NULL_TREE; | |
3861 } | |
3862 | |
3863 /* Given a pointer value OP0, return a simplified version of an | |
3864 indirection through OP0, or NULL_TREE if no simplification is | |
3865 possible. This may only be applied to a rhs of an expression. | |
3866 Note that the resulting type may be different from the type pointed | |
3867 to in the sense that it is still compatible from the langhooks | |
3868 point of view. */ | |
3869 | |
3870 static tree | |
3871 gimple_fold_indirect_ref_rhs (tree t) | |
3872 { | |
3873 return gimple_fold_indirect_ref (t); | |
3874 } | |
3875 | |
3876 /* Subroutine of gimplify_modify_expr to do simplifications of | |
3877 MODIFY_EXPRs based on the code of the RHS. We loop for as long as | |
3878 something changes. */ | |
3879 | |
3880 static enum gimplify_status | |
3881 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, | |
3882 gimple_seq *pre_p, gimple_seq *post_p, | |
3883 bool want_value) | |
3884 { | |
3885 enum gimplify_status ret = GS_OK; | |
3886 | |
3887 while (ret != GS_UNHANDLED) | |
3888 switch (TREE_CODE (*from_p)) | |
3889 { | |
3890 case VAR_DECL: | |
3891 /* If we're assigning from a constant constructor, move the | |
3892 constructor expression to the RHS of the MODIFY_EXPR. */ | |
3893 if (DECL_INITIAL (*from_p) | |
3894 && TREE_READONLY (*from_p) | |
3895 && !TREE_THIS_VOLATILE (*from_p) | |
3896 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR) | |
3897 { | |
3898 tree old_from = *from_p; | |
3899 | |
3900 /* Move the constructor into the RHS. */ | |
3901 *from_p = unshare_expr (DECL_INITIAL (*from_p)); | |
3902 | |
3903 /* Let's see if gimplify_init_constructor will need to put | |
3904 it in memory. If so, revert the change. */ | |
3905 ret = gimplify_init_constructor (expr_p, NULL, NULL, false, true); | |
3906 if (ret == GS_ERROR) | |
3907 { | |
3908 *from_p = old_from; | |
3909 /* Fall through. */ | |
3910 } | |
3911 else | |
3912 { | |
3913 ret = GS_OK; | |
3914 break; | |
3915 } | |
3916 } | |
3917 ret = GS_UNHANDLED; | |
3918 break; | |
3919 case INDIRECT_REF: | |
3920 { | |
3921 /* If we have code like | |
3922 | |
3923 *(const A*)(A*)&x | |
3924 | |
3925 where the type of "x" is a (possibly cv-qualified variant | |
3926 of "A"), treat the entire expression as identical to "x". | |
3927 This kind of code arises in C++ when an object is bound | |
3928 to a const reference, and if "x" is a TARGET_EXPR we want | |
3929 to take advantage of the optimization below. */ | |
3930 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)); | |
3931 if (t) | |
3932 { | |
3933 *from_p = t; | |
3934 ret = GS_OK; | |
3935 } | |
3936 else | |
3937 ret = GS_UNHANDLED; | |
3938 break; | |
3939 } | |
3940 | |
3941 case TARGET_EXPR: | |
3942 { | |
3943 /* If we are initializing something from a TARGET_EXPR, strip the | |
3944 TARGET_EXPR and initialize it directly, if possible. This can't | |
3945 be done if the initializer is void, since that implies that the | |
3946 temporary is set in some non-trivial way. | |
3947 | |
3948 ??? What about code that pulls out the temp and uses it | |
3949 elsewhere? I think that such code never uses the TARGET_EXPR as | |
3950 an initializer. If I'm wrong, we'll die because the temp won't | |
3951 have any RTL. In that case, I guess we'll need to replace | |
3952 references somehow. */ | |
3953 tree init = TARGET_EXPR_INITIAL (*from_p); | |
3954 | |
3955 if (init | |
3956 && !VOID_TYPE_P (TREE_TYPE (init))) | |
3957 { | |
3958 *from_p = init; | |
3959 ret = GS_OK; | |
3960 } | |
3961 else | |
3962 ret = GS_UNHANDLED; | |
3963 } | |
3964 break; | |
3965 | |
3966 case COMPOUND_EXPR: | |
3967 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be | |
3968 caught. */ | |
3969 gimplify_compound_expr (from_p, pre_p, true); | |
3970 ret = GS_OK; | |
3971 break; | |
3972 | |
3973 case CONSTRUCTOR: | |
3974 /* If we're initializing from a CONSTRUCTOR, break this into | |
3975 individual MODIFY_EXPRs. */ | |
3976 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value, | |
3977 false); | |
3978 | |
3979 case COND_EXPR: | |
3980 /* If we're assigning to a non-register type, push the assignment | |
3981 down into the branches. This is mandatory for ADDRESSABLE types, | |
3982 since we cannot generate temporaries for such, but it saves a | |
3983 copy in other cases as well. */ | |
3984 if (!is_gimple_reg_type (TREE_TYPE (*from_p))) | |
3985 { | |
3986 /* This code should mirror the code in gimplify_cond_expr. */ | |
3987 enum tree_code code = TREE_CODE (*expr_p); | |
3988 tree cond = *from_p; | |
3989 tree result = *to_p; | |
3990 | |
3991 ret = gimplify_expr (&result, pre_p, post_p, | |
3992 is_gimple_lvalue, fb_lvalue); | |
3993 if (ret != GS_ERROR) | |
3994 ret = GS_OK; | |
3995 | |
3996 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node) | |
3997 TREE_OPERAND (cond, 1) | |
3998 = build2 (code, void_type_node, result, | |
3999 TREE_OPERAND (cond, 1)); | |
4000 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node) | |
4001 TREE_OPERAND (cond, 2) | |
4002 = build2 (code, void_type_node, unshare_expr (result), | |
4003 TREE_OPERAND (cond, 2)); | |
4004 | |
4005 TREE_TYPE (cond) = void_type_node; | |
4006 recalculate_side_effects (cond); | |
4007 | |
4008 if (want_value) | |
4009 { | |
4010 gimplify_and_add (cond, pre_p); | |
4011 *expr_p = unshare_expr (result); | |
4012 } | |
4013 else | |
4014 *expr_p = cond; | |
4015 return ret; | |
4016 } | |
4017 else | |
4018 ret = GS_UNHANDLED; | |
4019 break; | |
4020 | |
4021 case CALL_EXPR: | |
4022 /* For calls that return in memory, give *to_p as the CALL_EXPR's | |
4023 return slot so that we don't generate a temporary. */ | |
4024 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p) | |
4025 && aggregate_value_p (*from_p, *from_p)) | |
4026 { | |
4027 bool use_target; | |
4028 | |
4029 if (!(rhs_predicate_for (*to_p))(*from_p)) | |
4030 /* If we need a temporary, *to_p isn't accurate. */ | |
4031 use_target = false; | |
4032 else if (TREE_CODE (*to_p) == RESULT_DECL | |
4033 && DECL_NAME (*to_p) == NULL_TREE | |
4034 && needs_to_live_in_memory (*to_p)) | |
4035 /* It's OK to use the return slot directly unless it's an NRV. */ | |
4036 use_target = true; | |
4037 else if (is_gimple_reg_type (TREE_TYPE (*to_p)) | |
4038 || (DECL_P (*to_p) && DECL_REGISTER (*to_p))) | |
4039 /* Don't force regs into memory. */ | |
4040 use_target = false; | |
4041 else if (TREE_CODE (*to_p) == VAR_DECL | |
4042 && DECL_GIMPLE_FORMAL_TEMP_P (*to_p)) | |
4043 /* Don't use the original target if it's a formal temp; we | |
4044 don't want to take their addresses. */ | |
4045 use_target = false; | |
4046 else if (TREE_CODE (*expr_p) == INIT_EXPR) | |
4047 /* It's OK to use the target directly if it's being | |
4048 initialized. */ | |
4049 use_target = true; | |
4050 else if (!is_gimple_non_addressable (*to_p)) | |
4051 /* Don't use the original target if it's already addressable; | |
4052 if its address escapes, and the called function uses the | |
4053 NRV optimization, a conforming program could see *to_p | |
4054 change before the called function returns; see c++/19317. | |
4055 When optimizing, the return_slot pass marks more functions | |
4056 as safe after we have escape info. */ | |
4057 use_target = false; | |
4058 else | |
4059 use_target = true; | |
4060 | |
4061 if (use_target) | |
4062 { | |
4063 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1; | |
4064 mark_addressable (*to_p); | |
4065 } | |
4066 } | |
4067 | |
4068 ret = GS_UNHANDLED; | |
4069 break; | |
4070 | |
4071 /* If we're initializing from a container, push the initialization | |
4072 inside it. */ | |
4073 case CLEANUP_POINT_EXPR: | |
4074 case BIND_EXPR: | |
4075 case STATEMENT_LIST: | |
4076 { | |
4077 tree wrap = *from_p; | |
4078 tree t; | |
4079 | |
4080 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval, | |
4081 fb_lvalue); | |
4082 if (ret != GS_ERROR) | |
4083 ret = GS_OK; | |
4084 | |
4085 t = voidify_wrapper_expr (wrap, *expr_p); | |
4086 gcc_assert (t == *expr_p); | |
4087 | |
4088 if (want_value) | |
4089 { | |
4090 gimplify_and_add (wrap, pre_p); | |
4091 *expr_p = unshare_expr (*to_p); | |
4092 } | |
4093 else | |
4094 *expr_p = wrap; | |
4095 return GS_OK; | |
4096 } | |
4097 | |
4098 default: | |
4099 ret = GS_UNHANDLED; | |
4100 break; | |
4101 } | |
4102 | |
4103 return ret; | |
4104 } | |
4105 | |
4106 | |
4107 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is | |
4108 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with | |
4109 DECL_GIMPLE_REG_P set. | |
4110 | |
4111 IMPORTANT NOTE: This promotion is performed by introducing a load of the | |
4112 other, unmodified part of the complex object just before the total store. | |
4113 As a consequence, if the object is still uninitialized, an undefined value | |
4114 will be loaded into a register, which may result in a spurious exception | |
4115 if the register is floating-point and the value happens to be a signaling | |
4116 NaN for example. Then the fully-fledged complex operations lowering pass | |
4117 followed by a DCE pass are necessary in order to fix things up. */ | |
4118 | |
4119 static enum gimplify_status | |
4120 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p, | |
4121 bool want_value) | |
4122 { | |
4123 enum tree_code code, ocode; | |
4124 tree lhs, rhs, new_rhs, other, realpart, imagpart; | |
4125 | |
4126 lhs = TREE_OPERAND (*expr_p, 0); | |
4127 rhs = TREE_OPERAND (*expr_p, 1); | |
4128 code = TREE_CODE (lhs); | |
4129 lhs = TREE_OPERAND (lhs, 0); | |
4130 | |
4131 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR; | |
4132 other = build1 (ocode, TREE_TYPE (rhs), lhs); | |
4133 other = get_formal_tmp_var (other, pre_p); | |
4134 | |
4135 realpart = code == REALPART_EXPR ? rhs : other; | |
4136 imagpart = code == REALPART_EXPR ? other : rhs; | |
4137 | |
4138 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart)) | |
4139 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart); | |
4140 else | |
4141 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart); | |
4142 | |
4143 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs)); | |
4144 *expr_p = (want_value) ? rhs : NULL_TREE; | |
4145 | |
4146 return GS_ALL_DONE; | |
4147 } | |
4148 | |
4149 | |
4150 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P. | |
4151 | |
4152 modify_expr | |
4153 : varname '=' rhs | |
4154 | '*' ID '=' rhs | |
4155 | |
4156 PRE_P points to the list where side effects that must happen before | |
4157 *EXPR_P should be stored. | |
4158 | |
4159 POST_P points to the list where side effects that must happen after | |
4160 *EXPR_P should be stored. | |
4161 | |
4162 WANT_VALUE is nonzero iff we want to use the value of this expression | |
4163 in another expression. */ | |
4164 | |
4165 static enum gimplify_status | |
4166 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, | |
4167 bool want_value) | |
4168 { | |
4169 tree *from_p = &TREE_OPERAND (*expr_p, 1); | |
4170 tree *to_p = &TREE_OPERAND (*expr_p, 0); | |
4171 enum gimplify_status ret = GS_UNHANDLED; | |
4172 gimple assign; | |
4173 | |
4174 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR | |
4175 || TREE_CODE (*expr_p) == INIT_EXPR); | |
4176 | |
4177 /* Insert pointer conversions required by the middle-end that are not | |
4178 required by the frontend. This fixes middle-end type checking for | |
4179 for example gcc.dg/redecl-6.c. */ | |
4180 if (POINTER_TYPE_P (TREE_TYPE (*to_p)) | |
4181 && lang_hooks.types_compatible_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p))) | |
4182 { | |
4183 STRIP_USELESS_TYPE_CONVERSION (*from_p); | |
4184 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p))) | |
4185 *from_p = fold_convert (TREE_TYPE (*to_p), *from_p); | |
4186 } | |
4187 | |
4188 /* See if any simplifications can be done based on what the RHS is. */ | |
4189 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p, | |
4190 want_value); | |
4191 if (ret != GS_UNHANDLED) | |
4192 return ret; | |
4193 | |
4194 /* For zero sized types only gimplify the left hand side and right hand | |
4195 side as statements and throw away the assignment. Do this after | |
4196 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable | |
4197 types properly. */ | |
4198 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value) | |
4199 { | |
4200 gimplify_stmt (from_p, pre_p); | |
4201 gimplify_stmt (to_p, pre_p); | |
4202 *expr_p = NULL_TREE; | |
4203 return GS_ALL_DONE; | |
4204 } | |
4205 | |
4206 /* If the value being copied is of variable width, compute the length | |
4207 of the copy into a WITH_SIZE_EXPR. Note that we need to do this | |
4208 before gimplifying any of the operands so that we can resolve any | |
4209 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses | |
4210 the size of the expression to be copied, not of the destination, so | |
4211 that is what we must do here. */ | |
4212 maybe_with_size_expr (from_p); | |
4213 | |
4214 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue); | |
4215 if (ret == GS_ERROR) | |
4216 return ret; | |
4217 | |
4218 /* As a special case, we have to temporarily allow for assignments | |
4219 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is | |
4220 a toplevel statement, when gimplifying the GENERIC expression | |
4221 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple | |
4222 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>. | |
4223 | |
4224 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To | |
4225 prevent gimplify_expr from trying to create a new temporary for | |
4226 foo's LHS, we tell it that it should only gimplify until it | |
4227 reaches the CALL_EXPR. On return from gimplify_expr, the newly | |
4228 created GIMPLE_CALL <foo> will be the last statement in *PRE_P | |
4229 and all we need to do here is set 'a' to be its LHS. */ | |
4230 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p), | |
4231 fb_rvalue); | |
4232 if (ret == GS_ERROR) | |
4233 return ret; | |
4234 | |
4235 /* Now see if the above changed *from_p to something we handle specially. */ | |
4236 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p, | |
4237 want_value); | |
4238 if (ret != GS_UNHANDLED) | |
4239 return ret; | |
4240 | |
4241 /* If we've got a variable sized assignment between two lvalues (i.e. does | |
4242 not involve a call), then we can make things a bit more straightforward | |
4243 by converting the assignment to memcpy or memset. */ | |
4244 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR) | |
4245 { | |
4246 tree from = TREE_OPERAND (*from_p, 0); | |
4247 tree size = TREE_OPERAND (*from_p, 1); | |
4248 | |
4249 if (TREE_CODE (from) == CONSTRUCTOR) | |
4250 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p); | |
4251 | |
4252 if (is_gimple_addressable (from)) | |
4253 { | |
4254 *from_p = from; | |
4255 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value, | |
4256 pre_p); | |
4257 } | |
4258 } | |
4259 | |
4260 /* Transform partial stores to non-addressable complex variables into | |
4261 total stores. This allows us to use real instead of virtual operands | |
4262 for these variables, which improves optimization. */ | |
4263 if ((TREE_CODE (*to_p) == REALPART_EXPR | |
4264 || TREE_CODE (*to_p) == IMAGPART_EXPR) | |
4265 && is_gimple_reg (TREE_OPERAND (*to_p, 0))) | |
4266 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value); | |
4267 | |
4268 /* Try to alleviate the effects of the gimplification creating artificial | |
4269 temporaries (see for example is_gimple_reg_rhs) on the debug info. */ | |
4270 if (!gimplify_ctxp->into_ssa | |
4271 && DECL_P (*from_p) | |
4272 && DECL_IGNORED_P (*from_p) | |
4273 && DECL_P (*to_p) | |
4274 && !DECL_IGNORED_P (*to_p)) | |
4275 { | |
4276 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p)) | |
4277 DECL_NAME (*from_p) | |
4278 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p))); | |
4279 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1; | |
4280 SET_DECL_DEBUG_EXPR (*from_p, *to_p); | |
4281 } | |
4282 | |
4283 if (TREE_CODE (*from_p) == CALL_EXPR) | |
4284 { | |
4285 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL | |
4286 instead of a GIMPLE_ASSIGN. */ | |
4287 assign = gimple_build_call_from_tree (*from_p); | |
4288 gimple_call_set_lhs (assign, *to_p); | |
4289 } | |
4290 else | |
4291 assign = gimple_build_assign (*to_p, *from_p); | |
4292 | |
4293 gimplify_seq_add_stmt (pre_p, assign); | |
4294 | |
4295 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p)) | |
4296 { | |
4297 /* If we've somehow already got an SSA_NAME on the LHS, then | |
4298 we've probably modified it twice. Not good. */ | |
4299 gcc_assert (TREE_CODE (*to_p) != SSA_NAME); | |
4300 *to_p = make_ssa_name (*to_p, assign); | |
4301 gimple_set_lhs (assign, *to_p); | |
4302 } | |
4303 | |
4304 if (want_value) | |
4305 { | |
4306 *expr_p = unshare_expr (*to_p); | |
4307 return GS_OK; | |
4308 } | |
4309 else | |
4310 *expr_p = NULL; | |
4311 | |
4312 return GS_ALL_DONE; | |
4313 } | |
4314 | |
4315 /* Gimplify a comparison between two variable-sized objects. Do this | |
4316 with a call to BUILT_IN_MEMCMP. */ | |
4317 | |
4318 static enum gimplify_status | |
4319 gimplify_variable_sized_compare (tree *expr_p) | |
4320 { | |
4321 tree op0 = TREE_OPERAND (*expr_p, 0); | |
4322 tree op1 = TREE_OPERAND (*expr_p, 1); | |
4323 tree t, arg, dest, src; | |
4324 | |
4325 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0)); | |
4326 arg = unshare_expr (arg); | |
4327 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0); | |
4328 src = build_fold_addr_expr (op1); | |
4329 dest = build_fold_addr_expr (op0); | |
4330 t = implicit_built_in_decls[BUILT_IN_MEMCMP]; | |
4331 t = build_call_expr (t, 3, dest, src, arg); | |
4332 *expr_p | |
4333 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node); | |
4334 | |
4335 return GS_OK; | |
4336 } | |
4337 | |
4338 /* Gimplify a comparison between two aggregate objects of integral scalar | |
4339 mode as a comparison between the bitwise equivalent scalar values. */ | |
4340 | |
4341 static enum gimplify_status | |
4342 gimplify_scalar_mode_aggregate_compare (tree *expr_p) | |
4343 { | |
4344 tree op0 = TREE_OPERAND (*expr_p, 0); | |
4345 tree op1 = TREE_OPERAND (*expr_p, 1); | |
4346 | |
4347 tree type = TREE_TYPE (op0); | |
4348 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1); | |
4349 | |
4350 op0 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op0); | |
4351 op1 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op1); | |
4352 | |
4353 *expr_p | |
4354 = fold_build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1); | |
4355 | |
4356 return GS_OK; | |
4357 } | |
4358 | |
4359 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P | |
4360 points to the expression to gimplify. | |
4361 | |
4362 Expressions of the form 'a && b' are gimplified to: | |
4363 | |
4364 a && b ? true : false | |
4365 | |
4366 gimplify_cond_expr will do the rest. | |
4367 | |
4368 PRE_P points to the list where side effects that must happen before | |
4369 *EXPR_P should be stored. */ | |
4370 | |
4371 static enum gimplify_status | |
4372 gimplify_boolean_expr (tree *expr_p) | |
4373 { | |
4374 /* Preserve the original type of the expression. */ | |
4375 tree type = TREE_TYPE (*expr_p); | |
4376 | |
4377 *expr_p = build3 (COND_EXPR, type, *expr_p, | |
4378 fold_convert (type, boolean_true_node), | |
4379 fold_convert (type, boolean_false_node)); | |
4380 | |
4381 return GS_OK; | |
4382 } | |
4383 | |
4384 /* Gimplifies an expression sequence. This function gimplifies each | |
4385 expression and re-writes the original expression with the last | |
4386 expression of the sequence in GIMPLE form. | |
4387 | |
4388 PRE_P points to the list where the side effects for all the | |
4389 expressions in the sequence will be emitted. | |
4390 | |
4391 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */ | |
4392 | |
4393 static enum gimplify_status | |
4394 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) | |
4395 { | |
4396 tree t = *expr_p; | |
4397 | |
4398 do | |
4399 { | |
4400 tree *sub_p = &TREE_OPERAND (t, 0); | |
4401 | |
4402 if (TREE_CODE (*sub_p) == COMPOUND_EXPR) | |
4403 gimplify_compound_expr (sub_p, pre_p, false); | |
4404 else | |
4405 gimplify_stmt (sub_p, pre_p); | |
4406 | |
4407 t = TREE_OPERAND (t, 1); | |
4408 } | |
4409 while (TREE_CODE (t) == COMPOUND_EXPR); | |
4410 | |
4411 *expr_p = t; | |
4412 if (want_value) | |
4413 return GS_OK; | |
4414 else | |
4415 { | |
4416 gimplify_stmt (expr_p, pre_p); | |
4417 return GS_ALL_DONE; | |
4418 } | |
4419 } | |
4420 | |
4421 | |
4422 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to | |
4423 gimplify. After gimplification, EXPR_P will point to a new temporary | |
4424 that holds the original value of the SAVE_EXPR node. | |
4425 | |
4426 PRE_P points to the list where side effects that must happen before | |
4427 *EXPR_P should be stored. */ | |
4428 | |
4429 static enum gimplify_status | |
4430 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) | |
4431 { | |
4432 enum gimplify_status ret = GS_ALL_DONE; | |
4433 tree val; | |
4434 | |
4435 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR); | |
4436 val = TREE_OPERAND (*expr_p, 0); | |
4437 | |
4438 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */ | |
4439 if (!SAVE_EXPR_RESOLVED_P (*expr_p)) | |
4440 { | |
4441 /* The operand may be a void-valued expression such as SAVE_EXPRs | |
4442 generated by the Java frontend for class initialization. It is | |
4443 being executed only for its side-effects. */ | |
4444 if (TREE_TYPE (val) == void_type_node) | |
4445 { | |
4446 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, | |
4447 is_gimple_stmt, fb_none); | |
4448 val = NULL; | |
4449 } | |
4450 else | |
4451 val = get_initialized_tmp_var (val, pre_p, post_p); | |
4452 | |
4453 TREE_OPERAND (*expr_p, 0) = val; | |
4454 SAVE_EXPR_RESOLVED_P (*expr_p) = 1; | |
4455 } | |
4456 | |
4457 *expr_p = val; | |
4458 | |
4459 return ret; | |
4460 } | |
4461 | |
4462 /* Re-write the ADDR_EXPR node pointed to by EXPR_P | |
4463 | |
4464 unary_expr | |
4465 : ... | |
4466 | '&' varname | |
4467 ... | |
4468 | |
4469 PRE_P points to the list where side effects that must happen before | |
4470 *EXPR_P should be stored. | |
4471 | |
4472 POST_P points to the list where side effects that must happen after | |
4473 *EXPR_P should be stored. */ | |
4474 | |
4475 static enum gimplify_status | |
4476 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) | |
4477 { | |
4478 tree expr = *expr_p; | |
4479 tree op0 = TREE_OPERAND (expr, 0); | |
4480 enum gimplify_status ret; | |
4481 | |
4482 switch (TREE_CODE (op0)) | |
4483 { | |
4484 case INDIRECT_REF: | |
4485 case MISALIGNED_INDIRECT_REF: | |
4486 do_indirect_ref: | |
4487 /* Check if we are dealing with an expression of the form '&*ptr'. | |
4488 While the front end folds away '&*ptr' into 'ptr', these | |
4489 expressions may be generated internally by the compiler (e.g., | |
4490 builtins like __builtin_va_end). */ | |
4491 /* Caution: the silent array decomposition semantics we allow for | |
4492 ADDR_EXPR means we can't always discard the pair. */ | |
4493 /* Gimplification of the ADDR_EXPR operand may drop | |
4494 cv-qualification conversions, so make sure we add them if | |
4495 needed. */ | |
4496 { | |
4497 tree op00 = TREE_OPERAND (op0, 0); | |
4498 tree t_expr = TREE_TYPE (expr); | |
4499 tree t_op00 = TREE_TYPE (op00); | |
4500 | |
4501 if (!useless_type_conversion_p (t_expr, t_op00)) | |
4502 op00 = fold_convert (TREE_TYPE (expr), op00); | |
4503 *expr_p = op00; | |
4504 ret = GS_OK; | |
4505 } | |
4506 break; | |
4507 | |
4508 case VIEW_CONVERT_EXPR: | |
4509 /* Take the address of our operand and then convert it to the type of | |
4510 this ADDR_EXPR. | |
4511 | |
4512 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at | |
4513 all clear. The impact of this transformation is even less clear. */ | |
4514 | |
4515 /* If the operand is a useless conversion, look through it. Doing so | |
4516 guarantees that the ADDR_EXPR and its operand will remain of the | |
4517 same type. */ | |
4518 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0))) | |
4519 op0 = TREE_OPERAND (op0, 0); | |
4520 | |
4521 *expr_p = fold_convert (TREE_TYPE (expr), | |
4522 build_fold_addr_expr (TREE_OPERAND (op0, 0))); | |
4523 ret = GS_OK; | |
4524 break; | |
4525 | |
4526 default: | |
4527 /* We use fb_either here because the C frontend sometimes takes | |
4528 the address of a call that returns a struct; see | |
4529 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make | |
4530 the implied temporary explicit. */ | |
4531 | |
4532 /* Mark the RHS addressable. */ | |
4533 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p, | |
4534 is_gimple_addressable, fb_either); | |
4535 if (ret == GS_ERROR) | |
4536 break; | |
4537 | |
4538 /* We cannot rely on making the RHS addressable if it is | |
4539 a temporary created by gimplification. In this case create a | |
4540 new temporary that is initialized by a copy (which will | |
4541 become a store after we mark it addressable). | |
4542 This mostly happens if the frontend passed us something that | |
4543 it could not mark addressable yet, like a fortran | |
4544 pass-by-reference parameter (int) floatvar. */ | |
4545 if (is_gimple_formal_tmp_var (TREE_OPERAND (expr, 0))) | |
4546 TREE_OPERAND (expr, 0) | |
4547 = get_initialized_tmp_var (TREE_OPERAND (expr, 0), pre_p, post_p); | |
4548 | |
4549 op0 = TREE_OPERAND (expr, 0); | |
4550 | |
4551 /* For various reasons, the gimplification of the expression | |
4552 may have made a new INDIRECT_REF. */ | |
4553 if (TREE_CODE (op0) == INDIRECT_REF) | |
4554 goto do_indirect_ref; | |
4555 | |
4556 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */ | |
4557 recompute_tree_invariant_for_addr_expr (expr); | |
4558 | |
4559 mark_addressable (TREE_OPERAND (expr, 0)); | |
4560 break; | |
4561 } | |
4562 | |
4563 return ret; | |
4564 } | |
4565 | |
4566 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple | |
4567 value; output operands should be a gimple lvalue. */ | |
4568 | |
4569 static enum gimplify_status | |
4570 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) | |
4571 { | |
4572 tree expr; | |
4573 int noutputs; | |
4574 const char **oconstraints; | |
4575 int i; | |
4576 tree link; | |
4577 const char *constraint; | |
4578 bool allows_mem, allows_reg, is_inout; | |
4579 enum gimplify_status ret, tret; | |
4580 gimple stmt; | |
4581 VEC(tree, gc) *inputs; | |
4582 VEC(tree, gc) *outputs; | |
4583 VEC(tree, gc) *clobbers; | |
4584 tree link_next; | |
4585 | |
4586 expr = *expr_p; | |
4587 noutputs = list_length (ASM_OUTPUTS (expr)); | |
4588 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); | |
4589 | |
4590 inputs = outputs = clobbers = NULL; | |
4591 | |
4592 ret = GS_ALL_DONE; | |
4593 link_next = NULL_TREE; | |
4594 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next) | |
4595 { | |
4596 bool ok; | |
4597 size_t constraint_len; | |
4598 | |
4599 link_next = TREE_CHAIN (link); | |
4600 | |
4601 oconstraints[i] | |
4602 = constraint | |
4603 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); | |
4604 constraint_len = strlen (constraint); | |
4605 if (constraint_len == 0) | |
4606 continue; | |
4607 | |
4608 ok = parse_output_constraint (&constraint, i, 0, 0, | |
4609 &allows_mem, &allows_reg, &is_inout); | |
4610 if (!ok) | |
4611 { | |
4612 ret = GS_ERROR; | |
4613 is_inout = false; | |
4614 } | |
4615 | |
4616 if (!allows_reg && allows_mem) | |
4617 mark_addressable (TREE_VALUE (link)); | |
4618 | |
4619 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, | |
4620 is_inout ? is_gimple_min_lval : is_gimple_lvalue, | |
4621 fb_lvalue | fb_mayfail); | |
4622 if (tret == GS_ERROR) | |
4623 { | |
4624 error ("invalid lvalue in asm output %d", i); | |
4625 ret = tret; | |
4626 } | |
4627 | |
4628 VEC_safe_push (tree, gc, outputs, link); | |
4629 TREE_CHAIN (link) = NULL_TREE; | |
4630 | |
4631 if (is_inout) | |
4632 { | |
4633 /* An input/output operand. To give the optimizers more | |
4634 flexibility, split it into separate input and output | |
4635 operands. */ | |
4636 tree input; | |
4637 char buf[10]; | |
4638 | |
4639 /* Turn the in/out constraint into an output constraint. */ | |
4640 char *p = xstrdup (constraint); | |
4641 p[0] = '='; | |
4642 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p); | |
4643 | |
4644 /* And add a matching input constraint. */ | |
4645 if (allows_reg) | |
4646 { | |
4647 sprintf (buf, "%d", i); | |
4648 | |
4649 /* If there are multiple alternatives in the constraint, | |
4650 handle each of them individually. Those that allow register | |
4651 will be replaced with operand number, the others will stay | |
4652 unchanged. */ | |
4653 if (strchr (p, ',') != NULL) | |
4654 { | |
4655 size_t len = 0, buflen = strlen (buf); | |
4656 char *beg, *end, *str, *dst; | |
4657 | |
4658 for (beg = p + 1;;) | |
4659 { | |
4660 end = strchr (beg, ','); | |
4661 if (end == NULL) | |
4662 end = strchr (beg, '\0'); | |
4663 if ((size_t) (end - beg) < buflen) | |
4664 len += buflen + 1; | |
4665 else | |
4666 len += end - beg + 1; | |
4667 if (*end) | |
4668 beg = end + 1; | |
4669 else | |
4670 break; | |
4671 } | |
4672 | |
4673 str = (char *) alloca (len); | |
4674 for (beg = p + 1, dst = str;;) | |
4675 { | |
4676 const char *tem; | |
4677 bool mem_p, reg_p, inout_p; | |
4678 | |
4679 end = strchr (beg, ','); | |
4680 if (end) | |
4681 *end = '\0'; | |
4682 beg[-1] = '='; | |
4683 tem = beg - 1; | |
4684 parse_output_constraint (&tem, i, 0, 0, | |
4685 &mem_p, ®_p, &inout_p); | |
4686 if (dst != str) | |
4687 *dst++ = ','; | |
4688 if (reg_p) | |
4689 { | |
4690 memcpy (dst, buf, buflen); | |
4691 dst += buflen; | |
4692 } | |
4693 else | |
4694 { | |
4695 if (end) | |
4696 len = end - beg; | |
4697 else | |
4698 len = strlen (beg); | |
4699 memcpy (dst, beg, len); | |
4700 dst += len; | |
4701 } | |
4702 if (end) | |
4703 beg = end + 1; | |
4704 else | |
4705 break; | |
4706 } | |
4707 *dst = '\0'; | |
4708 input = build_string (dst - str, str); | |
4709 } | |
4710 else | |
4711 input = build_string (strlen (buf), buf); | |
4712 } | |
4713 else | |
4714 input = build_string (constraint_len - 1, constraint + 1); | |
4715 | |
4716 free (p); | |
4717 | |
4718 input = build_tree_list (build_tree_list (NULL_TREE, input), | |
4719 unshare_expr (TREE_VALUE (link))); | |
4720 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input); | |
4721 } | |
4722 } | |
4723 | |
4724 link_next = NULL_TREE; | |
4725 for (link = ASM_INPUTS (expr); link; ++i, link = link_next) | |
4726 { | |
4727 link_next = TREE_CHAIN (link); | |
4728 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); | |
4729 parse_input_constraint (&constraint, 0, 0, noutputs, 0, | |
4730 oconstraints, &allows_mem, &allows_reg); | |
4731 | |
4732 /* If we can't make copies, we can only accept memory. */ | |
4733 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link)))) | |
4734 { | |
4735 if (allows_mem) | |
4736 allows_reg = 0; | |
4737 else | |
4738 { | |
4739 error ("impossible constraint in %<asm%>"); | |
4740 error ("non-memory input %d must stay in memory", i); | |
4741 return GS_ERROR; | |
4742 } | |
4743 } | |
4744 | |
4745 /* If the operand is a memory input, it should be an lvalue. */ | |
4746 if (!allows_reg && allows_mem) | |
4747 { | |
4748 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, | |
4749 is_gimple_lvalue, fb_lvalue | fb_mayfail); | |
4750 mark_addressable (TREE_VALUE (link)); | |
4751 if (tret == GS_ERROR) | |
4752 { | |
4753 if (EXPR_HAS_LOCATION (TREE_VALUE (link))) | |
4754 input_location = EXPR_LOCATION (TREE_VALUE (link)); | |
4755 error ("memory input %d is not directly addressable", i); | |
4756 ret = tret; | |
4757 } | |
4758 } | |
4759 else | |
4760 { | |
4761 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, | |
4762 is_gimple_asm_val, fb_rvalue); | |
4763 if (tret == GS_ERROR) | |
4764 ret = tret; | |
4765 } | |
4766 | |
4767 TREE_CHAIN (link) = NULL_TREE; | |
4768 VEC_safe_push (tree, gc, inputs, link); | |
4769 } | |
4770 | |
4771 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link)) | |
4772 VEC_safe_push (tree, gc, clobbers, link); | |
4773 | |
4774 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)), | |
4775 inputs, outputs, clobbers); | |
4776 | |
4777 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr)); | |
4778 gimple_asm_set_input (stmt, ASM_INPUT_P (expr)); | |
4779 | |
4780 gimplify_seq_add_stmt (pre_p, stmt); | |
4781 | |
4782 return ret; | |
4783 } | |
4784 | |
4785 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding | |
4786 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while | |
4787 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we | |
4788 return to this function. | |
4789 | |
4790 FIXME should we complexify the prequeue handling instead? Or use flags | |
4791 for all the cleanups and let the optimizer tighten them up? The current | |
4792 code seems pretty fragile; it will break on a cleanup within any | |
4793 non-conditional nesting. But any such nesting would be broken, anyway; | |
4794 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct | |
4795 and continues out of it. We can do that at the RTL level, though, so | |
4796 having an optimizer to tighten up try/finally regions would be a Good | |
4797 Thing. */ | |
4798 | |
4799 static enum gimplify_status | |
4800 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p) | |
4801 { | |
4802 gimple_stmt_iterator iter; | |
4803 gimple_seq body_sequence = NULL; | |
4804 | |
4805 tree temp = voidify_wrapper_expr (*expr_p, NULL); | |
4806 | |
4807 /* We only care about the number of conditions between the innermost | |
4808 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and | |
4809 any cleanups collected outside the CLEANUP_POINT_EXPR. */ | |
4810 int old_conds = gimplify_ctxp->conditions; | |
4811 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups; | |
4812 gimplify_ctxp->conditions = 0; | |
4813 gimplify_ctxp->conditional_cleanups = NULL; | |
4814 | |
4815 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence); | |
4816 | |
4817 gimplify_ctxp->conditions = old_conds; | |
4818 gimplify_ctxp->conditional_cleanups = old_cleanups; | |
4819 | |
4820 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); ) | |
4821 { | |
4822 gimple wce = gsi_stmt (iter); | |
4823 | |
4824 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR) | |
4825 { | |
4826 if (gsi_one_before_end_p (iter)) | |
4827 { | |
4828 /* Note that gsi_insert_seq_before and gsi_remove do not | |
4829 scan operands, unlike some other sequence mutators. */ | |
4830 gsi_insert_seq_before_without_update (&iter, | |
4831 gimple_wce_cleanup (wce), | |
4832 GSI_SAME_STMT); | |
4833 gsi_remove (&iter, true); | |
4834 break; | |
4835 } | |
4836 else | |
4837 { | |
4838 gimple gtry; | |
4839 gimple_seq seq; | |
4840 enum gimple_try_flags kind; | |
4841 | |
4842 if (gimple_wce_cleanup_eh_only (wce)) | |
4843 kind = GIMPLE_TRY_CATCH; | |
4844 else | |
4845 kind = GIMPLE_TRY_FINALLY; | |
4846 seq = gsi_split_seq_after (iter); | |
4847 | |
4848 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind); | |
4849 /* Do not use gsi_replace here, as it may scan operands. | |
4850 We want to do a simple structural modification only. */ | |
4851 *gsi_stmt_ptr (&iter) = gtry; | |
4852 iter = gsi_start (seq); | |
4853 } | |
4854 } | |
4855 else | |
4856 gsi_next (&iter); | |
4857 } | |
4858 | |
4859 gimplify_seq_add_seq (pre_p, body_sequence); | |
4860 if (temp) | |
4861 { | |
4862 *expr_p = temp; | |
4863 return GS_OK; | |
4864 } | |
4865 else | |
4866 { | |
4867 *expr_p = NULL; | |
4868 return GS_ALL_DONE; | |
4869 } | |
4870 } | |
4871 | |
4872 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP | |
4873 is the cleanup action required. EH_ONLY is true if the cleanup should | |
4874 only be executed if an exception is thrown, not on normal exit. */ | |
4875 | |
4876 static void | |
4877 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p) | |
4878 { | |
4879 gimple wce; | |
4880 gimple_seq cleanup_stmts = NULL; | |
4881 | |
4882 /* Errors can result in improperly nested cleanups. Which results in | |
4883 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */ | |
4884 if (errorcount || sorrycount) | |
4885 return; | |
4886 | |
4887 if (gimple_conditional_context ()) | |
4888 { | |
4889 /* If we're in a conditional context, this is more complex. We only | |
4890 want to run the cleanup if we actually ran the initialization that | |
4891 necessitates it, but we want to run it after the end of the | |
4892 conditional context. So we wrap the try/finally around the | |
4893 condition and use a flag to determine whether or not to actually | |
4894 run the destructor. Thus | |
4895 | |
4896 test ? f(A()) : 0 | |
4897 | |
4898 becomes (approximately) | |
4899 | |
4900 flag = 0; | |
4901 try { | |
4902 if (test) { A::A(temp); flag = 1; val = f(temp); } | |
4903 else { val = 0; } | |
4904 } finally { | |
4905 if (flag) A::~A(temp); | |
4906 } | |
4907 val | |
4908 */ | |
4909 tree flag = create_tmp_var (boolean_type_node, "cleanup"); | |
4910 gimple ffalse = gimple_build_assign (flag, boolean_false_node); | |
4911 gimple ftrue = gimple_build_assign (flag, boolean_true_node); | |
4912 | |
4913 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL); | |
4914 gimplify_stmt (&cleanup, &cleanup_stmts); | |
4915 wce = gimple_build_wce (cleanup_stmts); | |
4916 | |
4917 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse); | |
4918 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce); | |
4919 gimplify_seq_add_stmt (pre_p, ftrue); | |
4920 | |
4921 /* Because of this manipulation, and the EH edges that jump | |
4922 threading cannot redirect, the temporary (VAR) will appear | |
4923 to be used uninitialized. Don't warn. */ | |
4924 TREE_NO_WARNING (var) = 1; | |
4925 } | |
4926 else | |
4927 { | |
4928 gimplify_stmt (&cleanup, &cleanup_stmts); | |
4929 wce = gimple_build_wce (cleanup_stmts); | |
4930 gimple_wce_set_cleanup_eh_only (wce, eh_only); | |
4931 gimplify_seq_add_stmt (pre_p, wce); | |
4932 } | |
4933 } | |
4934 | |
4935 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */ | |
4936 | |
4937 static enum gimplify_status | |
4938 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) | |
4939 { | |
4940 tree targ = *expr_p; | |
4941 tree temp = TARGET_EXPR_SLOT (targ); | |
4942 tree init = TARGET_EXPR_INITIAL (targ); | |
4943 enum gimplify_status ret; | |
4944 | |
4945 if (init) | |
4946 { | |
4947 /* TARGET_EXPR temps aren't part of the enclosing block, so add it | |
4948 to the temps list. Handle also variable length TARGET_EXPRs. */ | |
4949 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST) | |
4950 { | |
4951 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp))) | |
4952 gimplify_type_sizes (TREE_TYPE (temp), pre_p); | |
4953 gimplify_vla_decl (temp, pre_p); | |
4954 } | |
4955 else | |
4956 gimple_add_tmp_var (temp); | |
4957 | |
4958 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the | |
4959 expression is supposed to initialize the slot. */ | |
4960 if (VOID_TYPE_P (TREE_TYPE (init))) | |
4961 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none); | |
4962 else | |
4963 { | |
4964 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init); | |
4965 init = init_expr; | |
4966 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none); | |
4967 init = NULL; | |
4968 ggc_free (init_expr); | |
4969 } | |
4970 if (ret == GS_ERROR) | |
4971 { | |
4972 /* PR c++/28266 Make sure this is expanded only once. */ | |
4973 TARGET_EXPR_INITIAL (targ) = NULL_TREE; | |
4974 return GS_ERROR; | |
4975 } | |
4976 if (init) | |
4977 gimplify_and_add (init, pre_p); | |
4978 | |
4979 /* If needed, push the cleanup for the temp. */ | |
4980 if (TARGET_EXPR_CLEANUP (targ)) | |
4981 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ), | |
4982 CLEANUP_EH_ONLY (targ), pre_p); | |
4983 | |
4984 /* Only expand this once. */ | |
4985 TREE_OPERAND (targ, 3) = init; | |
4986 TARGET_EXPR_INITIAL (targ) = NULL_TREE; | |
4987 } | |
4988 else | |
4989 /* We should have expanded this before. */ | |
4990 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp)); | |
4991 | |
4992 *expr_p = temp; | |
4993 return GS_OK; | |
4994 } | |
4995 | |
4996 /* Gimplification of expression trees. */ | |
4997 | |
4998 /* Gimplify an expression which appears at statement context. The | |
4999 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is | |
5000 NULL, a new sequence is allocated. | |
5001 | |
5002 Return true if we actually added a statement to the queue. */ | |
5003 | |
5004 bool | |
5005 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p) | |
5006 { | |
5007 gimple_seq_node last; | |
5008 | |
5009 if (!*seq_p) | |
5010 *seq_p = gimple_seq_alloc (); | |
5011 | |
5012 last = gimple_seq_last (*seq_p); | |
5013 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none); | |
5014 return last != gimple_seq_last (*seq_p); | |
5015 } | |
5016 | |
5017 | |
5018 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels | |
5019 to CTX. If entries already exist, force them to be some flavor of private. | |
5020 If there is no enclosing parallel, do nothing. */ | |
5021 | |
5022 void | |
5023 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl) | |
5024 { | |
5025 splay_tree_node n; | |
5026 | |
5027 if (decl == NULL || !DECL_P (decl)) | |
5028 return; | |
5029 | |
5030 do | |
5031 { | |
5032 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); | |
5033 if (n != NULL) | |
5034 { | |
5035 if (n->value & GOVD_SHARED) | |
5036 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN); | |
5037 else | |
5038 return; | |
5039 } | |
5040 else if (ctx->region_type != ORT_WORKSHARE) | |
5041 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE); | |
5042 | |
5043 ctx = ctx->outer_context; | |
5044 } | |
5045 while (ctx); | |
5046 } | |
5047 | |
5048 /* Similarly for each of the type sizes of TYPE. */ | |
5049 | |
5050 static void | |
5051 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type) | |
5052 { | |
5053 if (type == NULL || type == error_mark_node) | |
5054 return; | |
5055 type = TYPE_MAIN_VARIANT (type); | |
5056 | |
5057 if (pointer_set_insert (ctx->privatized_types, type)) | |
5058 return; | |
5059 | |
5060 switch (TREE_CODE (type)) | |
5061 { | |
5062 case INTEGER_TYPE: | |
5063 case ENUMERAL_TYPE: | |
5064 case BOOLEAN_TYPE: | |
5065 case REAL_TYPE: | |
5066 case FIXED_POINT_TYPE: | |
5067 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type)); | |
5068 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type)); | |
5069 break; | |
5070 | |
5071 case ARRAY_TYPE: | |
5072 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type)); | |
5073 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type)); | |
5074 break; | |
5075 | |
5076 case RECORD_TYPE: | |
5077 case UNION_TYPE: | |
5078 case QUAL_UNION_TYPE: | |
5079 { | |
5080 tree field; | |
5081 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) | |
5082 if (TREE_CODE (field) == FIELD_DECL) | |
5083 { | |
5084 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field)); | |
5085 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field)); | |
5086 } | |
5087 } | |
5088 break; | |
5089 | |
5090 case POINTER_TYPE: | |
5091 case REFERENCE_TYPE: | |
5092 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type)); | |
5093 break; | |
5094 | |
5095 default: | |
5096 break; | |
5097 } | |
5098 | |
5099 omp_firstprivatize_variable (ctx, TYPE_SIZE (type)); | |
5100 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type)); | |
5101 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type); | |
5102 } | |
5103 | |
5104 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */ | |
5105 | |
5106 static void | |
5107 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags) | |
5108 { | |
5109 splay_tree_node n; | |
5110 unsigned int nflags; | |
5111 tree t; | |
5112 | |
5113 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node) | |
5114 return; | |
5115 | |
5116 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means | |
5117 there are constructors involved somewhere. */ | |
5118 if (TREE_ADDRESSABLE (TREE_TYPE (decl)) | |
5119 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))) | |
5120 flags |= GOVD_SEEN; | |
5121 | |
5122 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); | |
5123 if (n != NULL) | |
5124 { | |
5125 /* We shouldn't be re-adding the decl with the same data | |
5126 sharing class. */ | |
5127 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0); | |
5128 /* The only combination of data sharing classes we should see is | |
5129 FIRSTPRIVATE and LASTPRIVATE. */ | |
5130 nflags = n->value | flags; | |
5131 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS) | |
5132 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)); | |
5133 n->value = nflags; | |
5134 return; | |
5135 } | |
5136 | |
5137 /* When adding a variable-sized variable, we have to handle all sorts | |
5138 of additional bits of data: the pointer replacement variable, and | |
5139 the parameters of the type. */ | |
5140 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) | |
5141 { | |
5142 /* Add the pointer replacement variable as PRIVATE if the variable | |
5143 replacement is private, else FIRSTPRIVATE since we'll need the | |
5144 address of the original variable either for SHARED, or for the | |
5145 copy into or out of the context. */ | |
5146 if (!(flags & GOVD_LOCAL)) | |
5147 { | |
5148 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE; | |
5149 nflags |= flags & GOVD_SEEN; | |
5150 t = DECL_VALUE_EXPR (decl); | |
5151 gcc_assert (TREE_CODE (t) == INDIRECT_REF); | |
5152 t = TREE_OPERAND (t, 0); | |
5153 gcc_assert (DECL_P (t)); | |
5154 omp_add_variable (ctx, t, nflags); | |
5155 } | |
5156 | |
5157 /* Add all of the variable and type parameters (which should have | |
5158 been gimplified to a formal temporary) as FIRSTPRIVATE. */ | |
5159 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl)); | |
5160 omp_firstprivatize_variable (ctx, DECL_SIZE (decl)); | |
5161 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl)); | |
5162 | |
5163 /* The variable-sized variable itself is never SHARED, only some form | |
5164 of PRIVATE. The sharing would take place via the pointer variable | |
5165 which we remapped above. */ | |
5166 if (flags & GOVD_SHARED) | |
5167 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE | |
5168 | (flags & (GOVD_SEEN | GOVD_EXPLICIT)); | |
5169 | |
5170 /* We're going to make use of the TYPE_SIZE_UNIT at least in the | |
5171 alloca statement we generate for the variable, so make sure it | |
5172 is available. This isn't automatically needed for the SHARED | |
5173 case, since we won't be allocating local storage then. | |
5174 For local variables TYPE_SIZE_UNIT might not be gimplified yet, | |
5175 in this case omp_notice_variable will be called later | |
5176 on when it is gimplified. */ | |
5177 else if (! (flags & GOVD_LOCAL)) | |
5178 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true); | |
5179 } | |
5180 else if (lang_hooks.decls.omp_privatize_by_reference (decl)) | |
5181 { | |
5182 gcc_assert ((flags & GOVD_LOCAL) == 0); | |
5183 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl)); | |
5184 | |
5185 /* Similar to the direct variable sized case above, we'll need the | |
5186 size of references being privatized. */ | |
5187 if ((flags & GOVD_SHARED) == 0) | |
5188 { | |
5189 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))); | |
5190 if (TREE_CODE (t) != INTEGER_CST) | |
5191 omp_notice_variable (ctx, t, true); | |
5192 } | |
5193 } | |
5194 | |
5195 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags); | |
5196 } | |
5197 | |
5198 /* Record the fact that DECL was used within the OpenMP context CTX. | |
5199 IN_CODE is true when real code uses DECL, and false when we should | |
5200 merely emit default(none) errors. Return true if DECL is going to | |
5201 be remapped and thus DECL shouldn't be gimplified into its | |
5202 DECL_VALUE_EXPR (if any). */ | |
5203 | |
5204 static bool | |
5205 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code) | |
5206 { | |
5207 splay_tree_node n; | |
5208 unsigned flags = in_code ? GOVD_SEEN : 0; | |
5209 bool ret = false, shared; | |
5210 | |
5211 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node) | |
5212 return false; | |
5213 | |
5214 /* Threadprivate variables are predetermined. */ | |
5215 if (is_global_var (decl)) | |
5216 { | |
5217 if (DECL_THREAD_LOCAL_P (decl)) | |
5218 return false; | |
5219 | |
5220 if (DECL_HAS_VALUE_EXPR_P (decl)) | |
5221 { | |
5222 tree value = get_base_address (DECL_VALUE_EXPR (decl)); | |
5223 | |
5224 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value)) | |
5225 return false; | |
5226 } | |
5227 } | |
5228 | |
5229 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); | |
5230 if (n == NULL) | |
5231 { | |
5232 enum omp_clause_default_kind default_kind, kind; | |
5233 struct gimplify_omp_ctx *octx; | |
5234 | |
5235 if (ctx->region_type == ORT_WORKSHARE) | |
5236 goto do_outer; | |
5237 | |
5238 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be | |
5239 remapped firstprivate instead of shared. To some extent this is | |
5240 addressed in omp_firstprivatize_type_sizes, but not effectively. */ | |
5241 default_kind = ctx->default_kind; | |
5242 kind = lang_hooks.decls.omp_predetermined_sharing (decl); | |
5243 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED) | |
5244 default_kind = kind; | |
5245 | |
5246 switch (default_kind) | |
5247 { | |
5248 case OMP_CLAUSE_DEFAULT_NONE: | |
5249 error ("%qs not specified in enclosing parallel", | |
5250 IDENTIFIER_POINTER (DECL_NAME (decl))); | |
5251 error ("%Henclosing parallel", &ctx->location); | |
5252 /* FALLTHRU */ | |
5253 case OMP_CLAUSE_DEFAULT_SHARED: | |
5254 flags |= GOVD_SHARED; | |
5255 break; | |
5256 case OMP_CLAUSE_DEFAULT_PRIVATE: | |
5257 flags |= GOVD_PRIVATE; | |
5258 break; | |
5259 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE: | |
5260 flags |= GOVD_FIRSTPRIVATE; | |
5261 break; | |
5262 case OMP_CLAUSE_DEFAULT_UNSPECIFIED: | |
5263 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */ | |
5264 gcc_assert (ctx->region_type == ORT_TASK); | |
5265 if (ctx->outer_context) | |
5266 omp_notice_variable (ctx->outer_context, decl, in_code); | |
5267 for (octx = ctx->outer_context; octx; octx = octx->outer_context) | |
5268 { | |
5269 splay_tree_node n2; | |
5270 | |
5271 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl); | |
5272 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED) | |
5273 { | |
5274 flags |= GOVD_FIRSTPRIVATE; | |
5275 break; | |
5276 } | |
5277 if ((octx->region_type & ORT_PARALLEL) != 0) | |
5278 break; | |
5279 } | |
5280 if (flags & GOVD_FIRSTPRIVATE) | |
5281 break; | |
5282 if (octx == NULL | |
5283 && (TREE_CODE (decl) == PARM_DECL | |
5284 || (!is_global_var (decl) | |
5285 && DECL_CONTEXT (decl) == current_function_decl))) | |
5286 { | |
5287 flags |= GOVD_FIRSTPRIVATE; | |
5288 break; | |
5289 } | |
5290 flags |= GOVD_SHARED; | |
5291 break; | |
5292 default: | |
5293 gcc_unreachable (); | |
5294 } | |
5295 | |
5296 if ((flags & GOVD_PRIVATE) | |
5297 && lang_hooks.decls.omp_private_outer_ref (decl)) | |
5298 flags |= GOVD_PRIVATE_OUTER_REF; | |
5299 | |
5300 omp_add_variable (ctx, decl, flags); | |
5301 | |
5302 shared = (flags & GOVD_SHARED) != 0; | |
5303 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared); | |
5304 goto do_outer; | |
5305 } | |
5306 | |
5307 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0 | |
5308 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN | |
5309 && DECL_SIZE (decl) | |
5310 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) | |
5311 { | |
5312 splay_tree_node n2; | |
5313 tree t = DECL_VALUE_EXPR (decl); | |
5314 gcc_assert (TREE_CODE (t) == INDIRECT_REF); | |
5315 t = TREE_OPERAND (t, 0); | |
5316 gcc_assert (DECL_P (t)); | |
5317 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t); | |
5318 n2->value |= GOVD_SEEN; | |
5319 } | |
5320 | |
5321 shared = ((flags | n->value) & GOVD_SHARED) != 0; | |
5322 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared); | |
5323 | |
5324 /* If nothing changed, there's nothing left to do. */ | |
5325 if ((n->value & flags) == flags) | |
5326 return ret; | |
5327 flags |= n->value; | |
5328 n->value = flags; | |
5329 | |
5330 do_outer: | |
5331 /* If the variable is private in the current context, then we don't | |
5332 need to propagate anything to an outer context. */ | |
5333 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF)) | |
5334 return ret; | |
5335 if (ctx->outer_context | |
5336 && omp_notice_variable (ctx->outer_context, decl, in_code)) | |
5337 return true; | |
5338 return ret; | |
5339 } | |
5340 | |
5341 /* Verify that DECL is private within CTX. If there's specific information | |
5342 to the contrary in the innermost scope, generate an error. */ | |
5343 | |
5344 static bool | |
5345 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl) | |
5346 { | |
5347 splay_tree_node n; | |
5348 | |
5349 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); | |
5350 if (n != NULL) | |
5351 { | |
5352 if (n->value & GOVD_SHARED) | |
5353 { | |
5354 if (ctx == gimplify_omp_ctxp) | |
5355 { | |
5356 error ("iteration variable %qs should be private", | |
5357 IDENTIFIER_POINTER (DECL_NAME (decl))); | |
5358 n->value = GOVD_PRIVATE; | |
5359 return true; | |
5360 } | |
5361 else | |
5362 return false; | |
5363 } | |
5364 else if ((n->value & GOVD_EXPLICIT) != 0 | |
5365 && (ctx == gimplify_omp_ctxp | |
5366 || (ctx->region_type == ORT_COMBINED_PARALLEL | |
5367 && gimplify_omp_ctxp->outer_context == ctx))) | |
5368 { | |
5369 if ((n->value & GOVD_FIRSTPRIVATE) != 0) | |
5370 error ("iteration variable %qs should not be firstprivate", | |
5371 IDENTIFIER_POINTER (DECL_NAME (decl))); | |
5372 else if ((n->value & GOVD_REDUCTION) != 0) | |
5373 error ("iteration variable %qs should not be reduction", | |
5374 IDENTIFIER_POINTER (DECL_NAME (decl))); | |
5375 } | |
5376 return (ctx == gimplify_omp_ctxp | |
5377 || (ctx->region_type == ORT_COMBINED_PARALLEL | |
5378 && gimplify_omp_ctxp->outer_context == ctx)); | |
5379 } | |
5380 | |
5381 if (ctx->region_type != ORT_WORKSHARE) | |
5382 return false; | |
5383 else if (ctx->outer_context) | |
5384 return omp_is_private (ctx->outer_context, decl); | |
5385 return false; | |
5386 } | |
5387 | |
5388 /* Return true if DECL is private within a parallel region | |
5389 that binds to the current construct's context or in parallel | |
5390 region's REDUCTION clause. */ | |
5391 | |
5392 static bool | |
5393 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl) | |
5394 { | |
5395 splay_tree_node n; | |
5396 | |
5397 do | |
5398 { | |
5399 ctx = ctx->outer_context; | |
5400 if (ctx == NULL) | |
5401 return !(is_global_var (decl) | |
5402 /* References might be private, but might be shared too. */ | |
5403 || lang_hooks.decls.omp_privatize_by_reference (decl)); | |
5404 | |
5405 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); | |
5406 if (n != NULL) | |
5407 return (n->value & GOVD_SHARED) == 0; | |
5408 } | |
5409 while (ctx->region_type == ORT_WORKSHARE); | |
5410 return false; | |
5411 } | |
5412 | |
5413 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new | |
5414 and previous omp contexts. */ | |
5415 | |
5416 static void | |
5417 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p, | |
5418 enum omp_region_type region_type) | |
5419 { | |
5420 struct gimplify_omp_ctx *ctx, *outer_ctx; | |
5421 struct gimplify_ctx gctx; | |
5422 tree c; | |
5423 | |
5424 ctx = new_omp_context (region_type); | |
5425 outer_ctx = ctx->outer_context; | |
5426 | |
5427 while ((c = *list_p) != NULL) | |
5428 { | |
5429 bool remove = false; | |
5430 bool notice_outer = true; | |
5431 const char *check_non_private = NULL; | |
5432 unsigned int flags; | |
5433 tree decl; | |
5434 | |
5435 switch (OMP_CLAUSE_CODE (c)) | |
5436 { | |
5437 case OMP_CLAUSE_PRIVATE: | |
5438 flags = GOVD_PRIVATE | GOVD_EXPLICIT; | |
5439 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c))) | |
5440 { | |
5441 flags |= GOVD_PRIVATE_OUTER_REF; | |
5442 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1; | |
5443 } | |
5444 else | |
5445 notice_outer = false; | |
5446 goto do_add; | |
5447 case OMP_CLAUSE_SHARED: | |
5448 flags = GOVD_SHARED | GOVD_EXPLICIT; | |
5449 goto do_add; | |
5450 case OMP_CLAUSE_FIRSTPRIVATE: | |
5451 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT; | |
5452 check_non_private = "firstprivate"; | |
5453 goto do_add; | |
5454 case OMP_CLAUSE_LASTPRIVATE: | |
5455 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT; | |
5456 check_non_private = "lastprivate"; | |
5457 goto do_add; | |
5458 case OMP_CLAUSE_REDUCTION: | |
5459 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT; | |
5460 check_non_private = "reduction"; | |
5461 goto do_add; | |
5462 | |
5463 do_add: | |
5464 decl = OMP_CLAUSE_DECL (c); | |
5465 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node) | |
5466 { | |
5467 remove = true; | |
5468 break; | |
5469 } | |
5470 omp_add_variable (ctx, decl, flags); | |
5471 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION | |
5472 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) | |
5473 { | |
5474 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c), | |
5475 GOVD_LOCAL | GOVD_SEEN); | |
5476 gimplify_omp_ctxp = ctx; | |
5477 push_gimplify_context (&gctx); | |
5478 | |
5479 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc (); | |
5480 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc (); | |
5481 | |
5482 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c), | |
5483 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)); | |
5484 pop_gimplify_context | |
5485 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))); | |
5486 push_gimplify_context (&gctx); | |
5487 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c), | |
5488 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)); | |
5489 pop_gimplify_context | |
5490 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c))); | |
5491 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE; | |
5492 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE; | |
5493 | |
5494 gimplify_omp_ctxp = outer_ctx; | |
5495 } | |
5496 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE | |
5497 && OMP_CLAUSE_LASTPRIVATE_STMT (c)) | |
5498 { | |
5499 gimplify_omp_ctxp = ctx; | |
5500 push_gimplify_context (&gctx); | |
5501 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR) | |
5502 { | |
5503 tree bind = build3 (BIND_EXPR, void_type_node, NULL, | |
5504 NULL, NULL); | |
5505 TREE_SIDE_EFFECTS (bind) = 1; | |
5506 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c); | |
5507 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind; | |
5508 } | |
5509 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c), | |
5510 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)); | |
5511 pop_gimplify_context | |
5512 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))); | |
5513 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE; | |
5514 | |
5515 gimplify_omp_ctxp = outer_ctx; | |
5516 } | |
5517 if (notice_outer) | |
5518 goto do_notice; | |
5519 break; | |
5520 | |
5521 case OMP_CLAUSE_COPYIN: | |
5522 case OMP_CLAUSE_COPYPRIVATE: | |
5523 decl = OMP_CLAUSE_DECL (c); | |
5524 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node) | |
5525 { | |
5526 remove = true; | |
5527 break; | |
5528 } | |
5529 do_notice: | |
5530 if (outer_ctx) | |
5531 omp_notice_variable (outer_ctx, decl, true); | |
5532 if (check_non_private | |
5533 && region_type == ORT_WORKSHARE | |
5534 && omp_check_private (ctx, decl)) | |
5535 { | |
5536 error ("%s variable %qs is private in outer context", | |
5537 check_non_private, IDENTIFIER_POINTER (DECL_NAME (decl))); | |
5538 remove = true; | |
5539 } | |
5540 break; | |
5541 | |
5542 case OMP_CLAUSE_IF: | |
5543 OMP_CLAUSE_OPERAND (c, 0) | |
5544 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0)); | |
5545 /* Fall through. */ | |
5546 | |
5547 case OMP_CLAUSE_SCHEDULE: | |
5548 case OMP_CLAUSE_NUM_THREADS: | |
5549 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL, | |
5550 is_gimple_val, fb_rvalue) == GS_ERROR) | |
5551 remove = true; | |
5552 break; | |
5553 | |
5554 case OMP_CLAUSE_NOWAIT: | |
5555 case OMP_CLAUSE_ORDERED: | |
5556 case OMP_CLAUSE_UNTIED: | |
5557 case OMP_CLAUSE_COLLAPSE: | |
5558 break; | |
5559 | |
5560 case OMP_CLAUSE_DEFAULT: | |
5561 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c); | |
5562 break; | |
5563 | |
5564 default: | |
5565 gcc_unreachable (); | |
5566 } | |
5567 | |
5568 if (remove) | |
5569 *list_p = OMP_CLAUSE_CHAIN (c); | |
5570 else | |
5571 list_p = &OMP_CLAUSE_CHAIN (c); | |
5572 } | |
5573 | |
5574 gimplify_omp_ctxp = ctx; | |
5575 } | |
5576 | |
5577 /* For all variables that were not actually used within the context, | |
5578 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */ | |
5579 | |
5580 static int | |
5581 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data) | |
5582 { | |
5583 tree *list_p = (tree *) data; | |
5584 tree decl = (tree) n->key; | |
5585 unsigned flags = n->value; | |
5586 enum omp_clause_code code; | |
5587 tree clause; | |
5588 bool private_debug; | |
5589 | |
5590 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL)) | |
5591 return 0; | |
5592 if ((flags & GOVD_SEEN) == 0) | |
5593 return 0; | |
5594 if (flags & GOVD_DEBUG_PRIVATE) | |
5595 { | |
5596 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE); | |
5597 private_debug = true; | |
5598 } | |
5599 else | |
5600 private_debug | |
5601 = lang_hooks.decls.omp_private_debug_clause (decl, | |
5602 !!(flags & GOVD_SHARED)); | |
5603 if (private_debug) | |
5604 code = OMP_CLAUSE_PRIVATE; | |
5605 else if (flags & GOVD_SHARED) | |
5606 { | |
5607 if (is_global_var (decl)) | |
5608 { | |
5609 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context; | |
5610 while (ctx != NULL) | |
5611 { | |
5612 splay_tree_node on | |
5613 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); | |
5614 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE | |
5615 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0) | |
5616 break; | |
5617 ctx = ctx->outer_context; | |
5618 } | |
5619 if (ctx == NULL) | |
5620 return 0; | |
5621 } | |
5622 code = OMP_CLAUSE_SHARED; | |
5623 } | |
5624 else if (flags & GOVD_PRIVATE) | |
5625 code = OMP_CLAUSE_PRIVATE; | |
5626 else if (flags & GOVD_FIRSTPRIVATE) | |
5627 code = OMP_CLAUSE_FIRSTPRIVATE; | |
5628 else | |
5629 gcc_unreachable (); | |
5630 | |
5631 clause = build_omp_clause (code); | |
5632 OMP_CLAUSE_DECL (clause) = decl; | |
5633 OMP_CLAUSE_CHAIN (clause) = *list_p; | |
5634 if (private_debug) | |
5635 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1; | |
5636 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF)) | |
5637 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1; | |
5638 *list_p = clause; | |
5639 lang_hooks.decls.omp_finish_clause (clause); | |
5640 | |
5641 return 0; | |
5642 } | |
5643 | |
5644 static void | |
5645 gimplify_adjust_omp_clauses (tree *list_p) | |
5646 { | |
5647 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; | |
5648 tree c, decl; | |
5649 | |
5650 while ((c = *list_p) != NULL) | |
5651 { | |
5652 splay_tree_node n; | |
5653 bool remove = false; | |
5654 | |
5655 switch (OMP_CLAUSE_CODE (c)) | |
5656 { | |
5657 case OMP_CLAUSE_PRIVATE: | |
5658 case OMP_CLAUSE_SHARED: | |
5659 case OMP_CLAUSE_FIRSTPRIVATE: | |
5660 decl = OMP_CLAUSE_DECL (c); | |
5661 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); | |
5662 remove = !(n->value & GOVD_SEEN); | |
5663 if (! remove) | |
5664 { | |
5665 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED; | |
5666 if ((n->value & GOVD_DEBUG_PRIVATE) | |
5667 || lang_hooks.decls.omp_private_debug_clause (decl, shared)) | |
5668 { | |
5669 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0 | |
5670 || ((n->value & GOVD_DATA_SHARE_CLASS) | |
5671 == GOVD_PRIVATE)); | |
5672 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE); | |
5673 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1; | |
5674 } | |
5675 } | |
5676 break; | |
5677 | |
5678 case OMP_CLAUSE_LASTPRIVATE: | |
5679 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to | |
5680 accurately reflect the presence of a FIRSTPRIVATE clause. */ | |
5681 decl = OMP_CLAUSE_DECL (c); | |
5682 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); | |
5683 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) | |
5684 = (n->value & GOVD_FIRSTPRIVATE) != 0; | |
5685 break; | |
5686 | |
5687 case OMP_CLAUSE_REDUCTION: | |
5688 case OMP_CLAUSE_COPYIN: | |
5689 case OMP_CLAUSE_COPYPRIVATE: | |
5690 case OMP_CLAUSE_IF: | |
5691 case OMP_CLAUSE_NUM_THREADS: | |
5692 case OMP_CLAUSE_SCHEDULE: | |
5693 case OMP_CLAUSE_NOWAIT: | |
5694 case OMP_CLAUSE_ORDERED: | |
5695 case OMP_CLAUSE_DEFAULT: | |
5696 case OMP_CLAUSE_UNTIED: | |
5697 case OMP_CLAUSE_COLLAPSE: | |
5698 break; | |
5699 | |
5700 default: | |
5701 gcc_unreachable (); | |
5702 } | |
5703 | |
5704 if (remove) | |
5705 *list_p = OMP_CLAUSE_CHAIN (c); | |
5706 else | |
5707 list_p = &OMP_CLAUSE_CHAIN (c); | |
5708 } | |
5709 | |
5710 /* Add in any implicit data sharing. */ | |
5711 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p); | |
5712 | |
5713 gimplify_omp_ctxp = ctx->outer_context; | |
5714 delete_omp_context (ctx); | |
5715 } | |
5716 | |
5717 /* Gimplify the contents of an OMP_PARALLEL statement. This involves | |
5718 gimplification of the body, as well as scanning the body for used | |
5719 variables. We need to do this scan now, because variable-sized | |
5720 decls will be decomposed during gimplification. */ | |
5721 | |
5722 static void | |
5723 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p) | |
5724 { | |
5725 tree expr = *expr_p; | |
5726 gimple g; | |
5727 gimple_seq body = NULL; | |
5728 struct gimplify_ctx gctx; | |
5729 | |
5730 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, | |
5731 OMP_PARALLEL_COMBINED (expr) | |
5732 ? ORT_COMBINED_PARALLEL | |
5733 : ORT_PARALLEL); | |
5734 | |
5735 push_gimplify_context (&gctx); | |
5736 | |
5737 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body); | |
5738 if (gimple_code (g) == GIMPLE_BIND) | |
5739 pop_gimplify_context (g); | |
5740 else | |
5741 pop_gimplify_context (NULL); | |
5742 | |
5743 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr)); | |
5744 | |
5745 g = gimple_build_omp_parallel (body, | |
5746 OMP_PARALLEL_CLAUSES (expr), | |
5747 NULL_TREE, NULL_TREE); | |
5748 if (OMP_PARALLEL_COMBINED (expr)) | |
5749 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED); | |
5750 gimplify_seq_add_stmt (pre_p, g); | |
5751 *expr_p = NULL_TREE; | |
5752 } | |
5753 | |
5754 /* Gimplify the contents of an OMP_TASK statement. This involves | |
5755 gimplification of the body, as well as scanning the body for used | |
5756 variables. We need to do this scan now, because variable-sized | |
5757 decls will be decomposed during gimplification. */ | |
5758 | |
5759 static void | |
5760 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p) | |
5761 { | |
5762 tree expr = *expr_p; | |
5763 gimple g; | |
5764 gimple_seq body = NULL; | |
5765 struct gimplify_ctx gctx; | |
5766 | |
5767 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, ORT_TASK); | |
5768 | |
5769 push_gimplify_context (&gctx); | |
5770 | |
5771 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body); | |
5772 if (gimple_code (g) == GIMPLE_BIND) | |
5773 pop_gimplify_context (g); | |
5774 else | |
5775 pop_gimplify_context (NULL); | |
5776 | |
5777 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr)); | |
5778 | |
5779 g = gimple_build_omp_task (body, | |
5780 OMP_TASK_CLAUSES (expr), | |
5781 NULL_TREE, NULL_TREE, | |
5782 NULL_TREE, NULL_TREE, NULL_TREE); | |
5783 gimplify_seq_add_stmt (pre_p, g); | |
5784 *expr_p = NULL_TREE; | |
5785 } | |
5786 | |
5787 /* Gimplify the gross structure of an OMP_FOR statement. */ | |
5788 | |
5789 static enum gimplify_status | |
5790 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p) | |
5791 { | |
5792 tree for_stmt, decl, var, t; | |
5793 enum gimplify_status ret = GS_OK; | |
5794 gimple gfor; | |
5795 gimple_seq for_body, for_pre_body; | |
5796 int i; | |
5797 | |
5798 for_stmt = *expr_p; | |
5799 | |
5800 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, | |
5801 ORT_WORKSHARE); | |
5802 | |
5803 /* Handle OMP_FOR_INIT. */ | |
5804 for_pre_body = NULL; | |
5805 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body); | |
5806 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE; | |
5807 | |
5808 for_body = gimple_seq_alloc (); | |
5809 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) | |
5810 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt))); | |
5811 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) | |
5812 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt))); | |
5813 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) | |
5814 { | |
5815 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); | |
5816 gcc_assert (TREE_CODE (t) == MODIFY_EXPR); | |
5817 decl = TREE_OPERAND (t, 0); | |
5818 gcc_assert (DECL_P (decl)); | |
5819 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)) | |
5820 || POINTER_TYPE_P (TREE_TYPE (decl))); | |
5821 | |
5822 /* Make sure the iteration variable is private. */ | |
5823 if (omp_is_private (gimplify_omp_ctxp, decl)) | |
5824 omp_notice_variable (gimplify_omp_ctxp, decl, true); | |
5825 else | |
5826 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN); | |
5827 | |
5828 /* If DECL is not a gimple register, create a temporary variable to act | |
5829 as an iteration counter. This is valid, since DECL cannot be | |
5830 modified in the body of the loop. */ | |
5831 if (!is_gimple_reg (decl)) | |
5832 { | |
5833 var = create_tmp_var (TREE_TYPE (decl), get_name (decl)); | |
5834 TREE_OPERAND (t, 0) = var; | |
5835 | |
5836 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var)); | |
5837 | |
5838 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN); | |
5839 } | |
5840 else | |
5841 var = decl; | |
5842 | |
5843 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, | |
5844 is_gimple_val, fb_rvalue); | |
5845 if (ret == GS_ERROR) | |
5846 return ret; | |
5847 | |
5848 /* Handle OMP_FOR_COND. */ | |
5849 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i); | |
5850 gcc_assert (COMPARISON_CLASS_P (t)); | |
5851 gcc_assert (TREE_OPERAND (t, 0) == decl); | |
5852 | |
5853 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, | |
5854 is_gimple_val, fb_rvalue); | |
5855 | |
5856 /* Handle OMP_FOR_INCR. */ | |
5857 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); | |
5858 switch (TREE_CODE (t)) | |
5859 { | |
5860 case PREINCREMENT_EXPR: | |
5861 case POSTINCREMENT_EXPR: | |
5862 t = build_int_cst (TREE_TYPE (decl), 1); | |
5863 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t); | |
5864 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t); | |
5865 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t; | |
5866 break; | |
5867 | |
5868 case PREDECREMENT_EXPR: | |
5869 case POSTDECREMENT_EXPR: | |
5870 t = build_int_cst (TREE_TYPE (decl), -1); | |
5871 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t); | |
5872 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t); | |
5873 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t; | |
5874 break; | |
5875 | |
5876 case MODIFY_EXPR: | |
5877 gcc_assert (TREE_OPERAND (t, 0) == decl); | |
5878 TREE_OPERAND (t, 0) = var; | |
5879 | |
5880 t = TREE_OPERAND (t, 1); | |
5881 switch (TREE_CODE (t)) | |
5882 { | |
5883 case PLUS_EXPR: | |
5884 if (TREE_OPERAND (t, 1) == decl) | |
5885 { | |
5886 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0); | |
5887 TREE_OPERAND (t, 0) = var; | |
5888 break; | |
5889 } | |
5890 | |
5891 /* Fallthru. */ | |
5892 case MINUS_EXPR: | |
5893 case POINTER_PLUS_EXPR: | |
5894 gcc_assert (TREE_OPERAND (t, 0) == decl); | |
5895 TREE_OPERAND (t, 0) = var; | |
5896 break; | |
5897 default: | |
5898 gcc_unreachable (); | |
5899 } | |
5900 | |
5901 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, | |
5902 is_gimple_val, fb_rvalue); | |
5903 break; | |
5904 | |
5905 default: | |
5906 gcc_unreachable (); | |
5907 } | |
5908 | |
5909 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1) | |
5910 { | |
5911 tree c; | |
5912 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c)) | |
5913 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE | |
5914 && OMP_CLAUSE_DECL (c) == decl | |
5915 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL) | |
5916 { | |
5917 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); | |
5918 gcc_assert (TREE_CODE (t) == MODIFY_EXPR); | |
5919 gcc_assert (TREE_OPERAND (t, 0) == var); | |
5920 t = TREE_OPERAND (t, 1); | |
5921 gcc_assert (TREE_CODE (t) == PLUS_EXPR | |
5922 || TREE_CODE (t) == MINUS_EXPR | |
5923 || TREE_CODE (t) == POINTER_PLUS_EXPR); | |
5924 gcc_assert (TREE_OPERAND (t, 0) == var); | |
5925 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl, | |
5926 TREE_OPERAND (t, 1)); | |
5927 gimplify_assign (decl, t, | |
5928 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)); | |
5929 } | |
5930 } | |
5931 } | |
5932 | |
5933 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body); | |
5934 | |
5935 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt)); | |
5936 | |
5937 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt), | |
5938 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)), | |
5939 for_pre_body); | |
5940 | |
5941 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) | |
5942 { | |
5943 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); | |
5944 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0)); | |
5945 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1)); | |
5946 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i); | |
5947 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t)); | |
5948 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1)); | |
5949 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); | |
5950 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1)); | |
5951 } | |
5952 | |
5953 gimplify_seq_add_stmt (pre_p, gfor); | |
5954 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR; | |
5955 } | |
5956 | |
5957 /* Gimplify the gross structure of other OpenMP worksharing constructs. | |
5958 In particular, OMP_SECTIONS and OMP_SINGLE. */ | |
5959 | |
5960 static void | |
5961 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p) | |
5962 { | |
5963 tree expr = *expr_p; | |
5964 gimple stmt; | |
5965 gimple_seq body = NULL; | |
5966 | |
5967 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE); | |
5968 gimplify_and_add (OMP_BODY (expr), &body); | |
5969 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr)); | |
5970 | |
5971 if (TREE_CODE (expr) == OMP_SECTIONS) | |
5972 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr)); | |
5973 else if (TREE_CODE (expr) == OMP_SINGLE) | |
5974 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr)); | |
5975 else | |
5976 gcc_unreachable (); | |
5977 | |
5978 gimplify_seq_add_stmt (pre_p, stmt); | |
5979 } | |
5980 | |
5981 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have | |
5982 stabilized the lhs of the atomic operation as *ADDR. Return true if | |
5983 EXPR is this stabilized form. */ | |
5984 | |
5985 static bool | |
5986 goa_lhs_expr_p (tree expr, tree addr) | |
5987 { | |
5988 /* Also include casts to other type variants. The C front end is fond | |
5989 of adding these for e.g. volatile variables. This is like | |
5990 STRIP_TYPE_NOPS but includes the main variant lookup. */ | |
5991 while ((CONVERT_EXPR_P (expr) | |
5992 || TREE_CODE (expr) == NON_LVALUE_EXPR) | |
5993 && TREE_OPERAND (expr, 0) != error_mark_node | |
5994 && (TYPE_MAIN_VARIANT (TREE_TYPE (expr)) | |
5995 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0))))) | |
5996 expr = TREE_OPERAND (expr, 0); | |
5997 | |
5998 if (TREE_CODE (expr) == INDIRECT_REF) | |
5999 { | |
6000 expr = TREE_OPERAND (expr, 0); | |
6001 while (expr != addr | |
6002 && (CONVERT_EXPR_P (expr) | |
6003 || TREE_CODE (expr) == NON_LVALUE_EXPR) | |
6004 && TREE_CODE (expr) == TREE_CODE (addr) | |
6005 && TYPE_MAIN_VARIANT (TREE_TYPE (expr)) | |
6006 == TYPE_MAIN_VARIANT (TREE_TYPE (addr))) | |
6007 { | |
6008 expr = TREE_OPERAND (expr, 0); | |
6009 addr = TREE_OPERAND (addr, 0); | |
6010 } | |
6011 if (expr == addr) | |
6012 return true; | |
6013 return (TREE_CODE (addr) == ADDR_EXPR | |
6014 && TREE_CODE (expr) == ADDR_EXPR | |
6015 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0)); | |
6016 } | |
6017 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0)) | |
6018 return true; | |
6019 return false; | |
6020 } | |
6021 | |
6022 /* Walk *EXPR_P and replace | |
6023 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve | |
6024 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as | |
6025 a subexpression, 0 if it did not, or -1 if an error was encountered. */ | |
6026 | |
6027 static int | |
6028 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr, | |
6029 tree lhs_var) | |
6030 { | |
6031 tree expr = *expr_p; | |
6032 int saw_lhs; | |
6033 | |
6034 if (goa_lhs_expr_p (expr, lhs_addr)) | |
6035 { | |
6036 *expr_p = lhs_var; | |
6037 return 1; | |
6038 } | |
6039 if (is_gimple_val (expr)) | |
6040 return 0; | |
6041 | |
6042 saw_lhs = 0; | |
6043 switch (TREE_CODE_CLASS (TREE_CODE (expr))) | |
6044 { | |
6045 case tcc_binary: | |
6046 case tcc_comparison: | |
6047 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr, | |
6048 lhs_var); | |
6049 case tcc_unary: | |
6050 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr, | |
6051 lhs_var); | |
6052 break; | |
6053 case tcc_expression: | |
6054 switch (TREE_CODE (expr)) | |
6055 { | |
6056 case TRUTH_ANDIF_EXPR: | |
6057 case TRUTH_ORIF_EXPR: | |
6058 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, | |
6059 lhs_addr, lhs_var); | |
6060 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, | |
6061 lhs_addr, lhs_var); | |
6062 break; | |
6063 default: | |
6064 break; | |
6065 } | |
6066 break; | |
6067 default: | |
6068 break; | |
6069 } | |
6070 | |
6071 if (saw_lhs == 0) | |
6072 { | |
6073 enum gimplify_status gs; | |
6074 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue); | |
6075 if (gs != GS_ALL_DONE) | |
6076 saw_lhs = -1; | |
6077 } | |
6078 | |
6079 return saw_lhs; | |
6080 } | |
6081 | |
6082 | |
6083 /* Gimplify an OMP_ATOMIC statement. */ | |
6084 | |
6085 static enum gimplify_status | |
6086 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p) | |
6087 { | |
6088 tree addr = TREE_OPERAND (*expr_p, 0); | |
6089 tree rhs = TREE_OPERAND (*expr_p, 1); | |
6090 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr))); | |
6091 tree tmp_load; | |
6092 | |
6093 tmp_load = create_tmp_var (type, NULL); | |
6094 if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0) | |
6095 return GS_ERROR; | |
6096 | |
6097 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue) | |
6098 != GS_ALL_DONE) | |
6099 return GS_ERROR; | |
6100 | |
6101 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_load (tmp_load, addr)); | |
6102 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue) | |
6103 != GS_ALL_DONE) | |
6104 return GS_ERROR; | |
6105 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_store (rhs)); | |
6106 *expr_p = NULL; | |
6107 | |
6108 return GS_ALL_DONE; | |
6109 } | |
6110 | |
6111 | |
6112 /* Converts the GENERIC expression tree *EXPR_P to GIMPLE. If the | |
6113 expression produces a value to be used as an operand inside a GIMPLE | |
6114 statement, the value will be stored back in *EXPR_P. This value will | |
6115 be a tree of class tcc_declaration, tcc_constant, tcc_reference or | |
6116 an SSA_NAME. The corresponding sequence of GIMPLE statements is | |
6117 emitted in PRE_P and POST_P. | |
6118 | |
6119 Additionally, this process may overwrite parts of the input | |
6120 expression during gimplification. Ideally, it should be | |
6121 possible to do non-destructive gimplification. | |
6122 | |
6123 EXPR_P points to the GENERIC expression to convert to GIMPLE. If | |
6124 the expression needs to evaluate to a value to be used as | |
6125 an operand in a GIMPLE statement, this value will be stored in | |
6126 *EXPR_P on exit. This happens when the caller specifies one | |
6127 of fb_lvalue or fb_rvalue fallback flags. | |
6128 | |
6129 PRE_P will contain the sequence of GIMPLE statements corresponding | |
6130 to the evaluation of EXPR and all the side-effects that must | |
6131 be executed before the main expression. On exit, the last | |
6132 statement of PRE_P is the core statement being gimplified. For | |
6133 instance, when gimplifying 'if (++a)' the last statement in | |
6134 PRE_P will be 'if (t.1)' where t.1 is the result of | |
6135 pre-incrementing 'a'. | |
6136 | |
6137 POST_P will contain the sequence of GIMPLE statements corresponding | |
6138 to the evaluation of all the side-effects that must be executed | |
6139 after the main expression. If this is NULL, the post | |
6140 side-effects are stored at the end of PRE_P. | |
6141 | |
6142 The reason why the output is split in two is to handle post | |
6143 side-effects explicitly. In some cases, an expression may have | |
6144 inner and outer post side-effects which need to be emitted in | |
6145 an order different from the one given by the recursive | |
6146 traversal. For instance, for the expression (*p--)++ the post | |
6147 side-effects of '--' must actually occur *after* the post | |
6148 side-effects of '++'. However, gimplification will first visit | |
6149 the inner expression, so if a separate POST sequence was not | |
6150 used, the resulting sequence would be: | |
6151 | |
6152 1 t.1 = *p | |
6153 2 p = p - 1 | |
6154 3 t.2 = t.1 + 1 | |
6155 4 *p = t.2 | |
6156 | |
6157 However, the post-decrement operation in line #2 must not be | |
6158 evaluated until after the store to *p at line #4, so the | |
6159 correct sequence should be: | |
6160 | |
6161 1 t.1 = *p | |
6162 2 t.2 = t.1 + 1 | |
6163 3 *p = t.2 | |
6164 4 p = p - 1 | |
6165 | |
6166 So, by specifying a separate post queue, it is possible | |
6167 to emit the post side-effects in the correct order. | |
6168 If POST_P is NULL, an internal queue will be used. Before | |
6169 returning to the caller, the sequence POST_P is appended to | |
6170 the main output sequence PRE_P. | |
6171 | |
6172 GIMPLE_TEST_F points to a function that takes a tree T and | |
6173 returns nonzero if T is in the GIMPLE form requested by the | |
6174 caller. The GIMPLE predicates are in tree-gimple.c. | |
6175 | |
6176 FALLBACK tells the function what sort of a temporary we want if | |
6177 gimplification cannot produce an expression that complies with | |
6178 GIMPLE_TEST_F. | |
6179 | |
6180 fb_none means that no temporary should be generated | |
6181 fb_rvalue means that an rvalue is OK to generate | |
6182 fb_lvalue means that an lvalue is OK to generate | |
6183 fb_either means that either is OK, but an lvalue is preferable. | |
6184 fb_mayfail means that gimplification may fail (in which case | |
6185 GS_ERROR will be returned) | |
6186 | |
6187 The return value is either GS_ERROR or GS_ALL_DONE, since this | |
6188 function iterates until EXPR is completely gimplified or an error | |
6189 occurs. */ | |
6190 | |
6191 enum gimplify_status | |
6192 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, | |
6193 bool (*gimple_test_f) (tree), fallback_t fallback) | |
6194 { | |
6195 tree tmp; | |
6196 gimple_seq internal_pre = NULL; | |
6197 gimple_seq internal_post = NULL; | |
6198 tree save_expr; | |
6199 bool is_statement; | |
6200 location_t saved_location; | |
6201 enum gimplify_status ret; | |
6202 gimple_stmt_iterator pre_last_gsi, post_last_gsi; | |
6203 | |
6204 save_expr = *expr_p; | |
6205 if (save_expr == NULL_TREE) | |
6206 return GS_ALL_DONE; | |
6207 | |
6208 /* If we are gimplifying a top-level statement, PRE_P must be valid. */ | |
6209 is_statement = gimple_test_f == is_gimple_stmt; | |
6210 if (is_statement) | |
6211 gcc_assert (pre_p); | |
6212 | |
6213 /* Consistency checks. */ | |
6214 if (gimple_test_f == is_gimple_reg) | |
6215 gcc_assert (fallback & (fb_rvalue | fb_lvalue)); | |
6216 else if (gimple_test_f == is_gimple_val | |
6217 || gimple_test_f == is_gimple_formal_tmp_rhs | |
6218 || gimple_test_f == is_gimple_formal_tmp_or_call_rhs | |
6219 || gimple_test_f == is_gimple_formal_tmp_reg | |
6220 || gimple_test_f == is_gimple_formal_tmp_var | |
6221 || gimple_test_f == is_gimple_call_addr | |
6222 || gimple_test_f == is_gimple_condexpr | |
6223 || gimple_test_f == is_gimple_mem_rhs | |
6224 || gimple_test_f == is_gimple_mem_or_call_rhs | |
6225 || gimple_test_f == is_gimple_reg_rhs | |
6226 || gimple_test_f == is_gimple_reg_or_call_rhs | |
6227 || gimple_test_f == is_gimple_asm_val) | |
6228 gcc_assert (fallback & fb_rvalue); | |
6229 else if (gimple_test_f == is_gimple_min_lval | |
6230 || gimple_test_f == is_gimple_lvalue) | |
6231 gcc_assert (fallback & fb_lvalue); | |
6232 else if (gimple_test_f == is_gimple_addressable) | |
6233 gcc_assert (fallback & fb_either); | |
6234 else if (gimple_test_f == is_gimple_stmt) | |
6235 gcc_assert (fallback == fb_none); | |
6236 else | |
6237 { | |
6238 /* We should have recognized the GIMPLE_TEST_F predicate to | |
6239 know what kind of fallback to use in case a temporary is | |
6240 needed to hold the value or address of *EXPR_P. */ | |
6241 gcc_unreachable (); | |
6242 } | |
6243 | |
6244 /* We used to check the predicate here and return immediately if it | |
6245 succeeds. This is wrong; the design is for gimplification to be | |
6246 idempotent, and for the predicates to only test for valid forms, not | |
6247 whether they are fully simplified. */ | |
6248 if (pre_p == NULL) | |
6249 pre_p = &internal_pre; | |
6250 | |
6251 if (post_p == NULL) | |
6252 post_p = &internal_post; | |
6253 | |
6254 /* Remember the last statements added to PRE_P and POST_P. Every | |
6255 new statement added by the gimplification helpers needs to be | |
6256 annotated with location information. To centralize the | |
6257 responsibility, we remember the last statement that had been | |
6258 added to both queues before gimplifying *EXPR_P. If | |
6259 gimplification produces new statements in PRE_P and POST_P, those | |
6260 statements will be annotated with the same location information | |
6261 as *EXPR_P. */ | |
6262 pre_last_gsi = gsi_last (*pre_p); | |
6263 post_last_gsi = gsi_last (*post_p); | |
6264 | |
6265 saved_location = input_location; | |
6266 if (save_expr != error_mark_node | |
6267 && EXPR_HAS_LOCATION (*expr_p)) | |
6268 input_location = EXPR_LOCATION (*expr_p); | |
6269 | |
6270 /* Loop over the specific gimplifiers until the toplevel node | |
6271 remains the same. */ | |
6272 do | |
6273 { | |
6274 /* Strip away as many useless type conversions as possible | |
6275 at the toplevel. */ | |
6276 STRIP_USELESS_TYPE_CONVERSION (*expr_p); | |
6277 | |
6278 /* Remember the expr. */ | |
6279 save_expr = *expr_p; | |
6280 | |
6281 /* Die, die, die, my darling. */ | |
6282 if (save_expr == error_mark_node | |
6283 || (TREE_TYPE (save_expr) | |
6284 && TREE_TYPE (save_expr) == error_mark_node)) | |
6285 { | |
6286 ret = GS_ERROR; | |
6287 break; | |
6288 } | |
6289 | |
6290 /* Do any language-specific gimplification. */ | |
6291 ret = lang_hooks.gimplify_expr (expr_p, pre_p, post_p); | |
6292 if (ret == GS_OK) | |
6293 { | |
6294 if (*expr_p == NULL_TREE) | |
6295 break; | |
6296 if (*expr_p != save_expr) | |
6297 continue; | |
6298 } | |
6299 else if (ret != GS_UNHANDLED) | |
6300 break; | |
6301 | |
6302 ret = GS_OK; | |
6303 switch (TREE_CODE (*expr_p)) | |
6304 { | |
6305 /* First deal with the special cases. */ | |
6306 | |
6307 case POSTINCREMENT_EXPR: | |
6308 case POSTDECREMENT_EXPR: | |
6309 case PREINCREMENT_EXPR: | |
6310 case PREDECREMENT_EXPR: | |
6311 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p, | |
6312 fallback != fb_none); | |
6313 break; | |
6314 | |
6315 case ARRAY_REF: | |
6316 case ARRAY_RANGE_REF: | |
6317 case REALPART_EXPR: | |
6318 case IMAGPART_EXPR: | |
6319 case COMPONENT_REF: | |
6320 case VIEW_CONVERT_EXPR: | |
6321 ret = gimplify_compound_lval (expr_p, pre_p, post_p, | |
6322 fallback ? fallback : fb_rvalue); | |
6323 break; | |
6324 | |
6325 case COND_EXPR: | |
6326 ret = gimplify_cond_expr (expr_p, pre_p, fallback); | |
6327 | |
6328 /* C99 code may assign to an array in a structure value of a | |
6329 conditional expression, and this has undefined behavior | |
6330 only on execution, so create a temporary if an lvalue is | |
6331 required. */ | |
6332 if (fallback == fb_lvalue) | |
6333 { | |
6334 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); | |
6335 mark_addressable (*expr_p); | |
6336 } | |
6337 break; | |
6338 | |
6339 case CALL_EXPR: | |
6340 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none); | |
6341 | |
6342 /* C99 code may assign to an array in a structure returned | |
6343 from a function, and this has undefined behavior only on | |
6344 execution, so create a temporary if an lvalue is | |
6345 required. */ | |
6346 if (fallback == fb_lvalue) | |
6347 { | |
6348 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); | |
6349 mark_addressable (*expr_p); | |
6350 } | |
6351 break; | |
6352 | |
6353 case TREE_LIST: | |
6354 gcc_unreachable (); | |
6355 | |
6356 case COMPOUND_EXPR: | |
6357 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none); | |
6358 break; | |
6359 | |
6360 case MODIFY_EXPR: | |
6361 case INIT_EXPR: | |
6362 ret = gimplify_modify_expr (expr_p, pre_p, post_p, | |
6363 fallback != fb_none); | |
6364 break; | |
6365 | |
6366 case TRUTH_ANDIF_EXPR: | |
6367 case TRUTH_ORIF_EXPR: | |
6368 ret = gimplify_boolean_expr (expr_p); | |
6369 break; | |
6370 | |
6371 case TRUTH_NOT_EXPR: | |
6372 if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE) | |
6373 { | |
6374 tree type = TREE_TYPE (*expr_p); | |
6375 *expr_p = fold_convert (type, gimple_boolify (*expr_p)); | |
6376 ret = GS_OK; | |
6377 break; | |
6378 } | |
6379 | |
6380 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, | |
6381 is_gimple_val, fb_rvalue); | |
6382 recalculate_side_effects (*expr_p); | |
6383 break; | |
6384 | |
6385 case ADDR_EXPR: | |
6386 ret = gimplify_addr_expr (expr_p, pre_p, post_p); | |
6387 break; | |
6388 | |
6389 case VA_ARG_EXPR: | |
6390 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p); | |
6391 break; | |
6392 | |
6393 CASE_CONVERT: | |
6394 if (IS_EMPTY_STMT (*expr_p)) | |
6395 { | |
6396 ret = GS_ALL_DONE; | |
6397 break; | |
6398 } | |
6399 | |
6400 if (VOID_TYPE_P (TREE_TYPE (*expr_p)) | |
6401 || fallback == fb_none) | |
6402 { | |
6403 /* Just strip a conversion to void (or in void context) and | |
6404 try again. */ | |
6405 *expr_p = TREE_OPERAND (*expr_p, 0); | |
6406 break; | |
6407 } | |
6408 | |
6409 ret = gimplify_conversion (expr_p); | |
6410 if (ret == GS_ERROR) | |
6411 break; | |
6412 if (*expr_p != save_expr) | |
6413 break; | |
6414 /* FALLTHRU */ | |
6415 | |
6416 case FIX_TRUNC_EXPR: | |
6417 /* unary_expr: ... | '(' cast ')' val | ... */ | |
6418 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, | |
6419 is_gimple_val, fb_rvalue); | |
6420 recalculate_side_effects (*expr_p); | |
6421 break; | |
6422 | |
6423 case INDIRECT_REF: | |
6424 *expr_p = fold_indirect_ref (*expr_p); | |
6425 if (*expr_p != save_expr) | |
6426 break; | |
6427 /* else fall through. */ | |
6428 case ALIGN_INDIRECT_REF: | |
6429 case MISALIGNED_INDIRECT_REF: | |
6430 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, | |
6431 is_gimple_reg, fb_rvalue); | |
6432 recalculate_side_effects (*expr_p); | |
6433 break; | |
6434 | |
6435 /* Constants need not be gimplified. */ | |
6436 case INTEGER_CST: | |
6437 case REAL_CST: | |
6438 case FIXED_CST: | |
6439 case STRING_CST: | |
6440 case COMPLEX_CST: | |
6441 case VECTOR_CST: | |
6442 ret = GS_ALL_DONE; | |
6443 break; | |
6444 | |
6445 case CONST_DECL: | |
6446 /* If we require an lvalue, such as for ADDR_EXPR, retain the | |
6447 CONST_DECL node. Otherwise the decl is replaceable by its | |
6448 value. */ | |
6449 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */ | |
6450 if (fallback & fb_lvalue) | |
6451 ret = GS_ALL_DONE; | |
6452 else | |
6453 *expr_p = DECL_INITIAL (*expr_p); | |
6454 break; | |
6455 | |
6456 case DECL_EXPR: | |
6457 ret = gimplify_decl_expr (expr_p, pre_p); | |
6458 break; | |
6459 | |
6460 case EXC_PTR_EXPR: | |
6461 /* FIXME make this a decl. */ | |
6462 ret = GS_ALL_DONE; | |
6463 break; | |
6464 | |
6465 case BIND_EXPR: | |
6466 ret = gimplify_bind_expr (expr_p, pre_p); | |
6467 break; | |
6468 | |
6469 case LOOP_EXPR: | |
6470 ret = gimplify_loop_expr (expr_p, pre_p); | |
6471 break; | |
6472 | |
6473 case SWITCH_EXPR: | |
6474 ret = gimplify_switch_expr (expr_p, pre_p); | |
6475 break; | |
6476 | |
6477 case EXIT_EXPR: | |
6478 ret = gimplify_exit_expr (expr_p); | |
6479 break; | |
6480 | |
6481 case GOTO_EXPR: | |
6482 /* If the target is not LABEL, then it is a computed jump | |
6483 and the target needs to be gimplified. */ | |
6484 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL) | |
6485 { | |
6486 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p, | |
6487 NULL, is_gimple_val, fb_rvalue); | |
6488 if (ret == GS_ERROR) | |
6489 break; | |
6490 } | |
6491 gimplify_seq_add_stmt (pre_p, | |
6492 gimple_build_goto (GOTO_DESTINATION (*expr_p))); | |
6493 break; | |
6494 | |
6495 case PREDICT_EXPR: | |
6496 gimplify_seq_add_stmt (pre_p, | |
6497 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p), | |
6498 PREDICT_EXPR_OUTCOME (*expr_p))); | |
6499 ret = GS_ALL_DONE; | |
6500 break; | |
6501 | |
6502 case LABEL_EXPR: | |
6503 ret = GS_ALL_DONE; | |
6504 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p)) | |
6505 == current_function_decl); | |
6506 gimplify_seq_add_stmt (pre_p, | |
6507 gimple_build_label (LABEL_EXPR_LABEL (*expr_p))); | |
6508 break; | |
6509 | |
6510 case CASE_LABEL_EXPR: | |
6511 ret = gimplify_case_label_expr (expr_p, pre_p); | |
6512 break; | |
6513 | |
6514 case RETURN_EXPR: | |
6515 ret = gimplify_return_expr (*expr_p, pre_p); | |
6516 break; | |
6517 | |
6518 case CONSTRUCTOR: | |
6519 /* Don't reduce this in place; let gimplify_init_constructor work its | |
6520 magic. Buf if we're just elaborating this for side effects, just | |
6521 gimplify any element that has side-effects. */ | |
6522 if (fallback == fb_none) | |
6523 { | |
6524 unsigned HOST_WIDE_INT ix; | |
6525 constructor_elt *ce; | |
6526 tree temp = NULL_TREE; | |
6527 for (ix = 0; | |
6528 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p), | |
6529 ix, ce); | |
6530 ix++) | |
6531 if (TREE_SIDE_EFFECTS (ce->value)) | |
6532 append_to_statement_list (ce->value, &temp); | |
6533 | |
6534 *expr_p = temp; | |
6535 ret = GS_OK; | |
6536 } | |
6537 /* C99 code may assign to an array in a constructed | |
6538 structure or union, and this has undefined behavior only | |
6539 on execution, so create a temporary if an lvalue is | |
6540 required. */ | |
6541 else if (fallback == fb_lvalue) | |
6542 { | |
6543 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); | |
6544 mark_addressable (*expr_p); | |
6545 } | |
6546 else | |
6547 ret = GS_ALL_DONE; | |
6548 break; | |
6549 | |
6550 /* The following are special cases that are not handled by the | |
6551 original GIMPLE grammar. */ | |
6552 | |
6553 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and | |
6554 eliminated. */ | |
6555 case SAVE_EXPR: | |
6556 ret = gimplify_save_expr (expr_p, pre_p, post_p); | |
6557 break; | |
6558 | |
6559 case BIT_FIELD_REF: | |
6560 { | |
6561 enum gimplify_status r0, r1, r2; | |
6562 | |
6563 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, | |
6564 post_p, is_gimple_lvalue, fb_either); | |
6565 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, | |
6566 post_p, is_gimple_val, fb_rvalue); | |
6567 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, | |
6568 post_p, is_gimple_val, fb_rvalue); | |
6569 recalculate_side_effects (*expr_p); | |
6570 | |
6571 ret = MIN (r0, MIN (r1, r2)); | |
6572 } | |
6573 break; | |
6574 | |
6575 case NON_LVALUE_EXPR: | |
6576 /* This should have been stripped above. */ | |
6577 gcc_unreachable (); | |
6578 | |
6579 case ASM_EXPR: | |
6580 ret = gimplify_asm_expr (expr_p, pre_p, post_p); | |
6581 break; | |
6582 | |
6583 case TRY_FINALLY_EXPR: | |
6584 case TRY_CATCH_EXPR: | |
6585 { | |
6586 gimple_seq eval, cleanup; | |
6587 gimple try_; | |
6588 | |
6589 eval = cleanup = NULL; | |
6590 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval); | |
6591 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup); | |
6592 /* Don't create bogus GIMPLE_TRY with empty cleanup. */ | |
6593 if (gimple_seq_empty_p (cleanup)) | |
6594 { | |
6595 gimple_seq_add_seq (pre_p, eval); | |
6596 ret = GS_ALL_DONE; | |
6597 break; | |
6598 } | |
6599 try_ = gimple_build_try (eval, cleanup, | |
6600 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR | |
6601 ? GIMPLE_TRY_FINALLY | |
6602 : GIMPLE_TRY_CATCH); | |
6603 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR) | |
6604 gimple_try_set_catch_is_cleanup (try_, | |
6605 TRY_CATCH_IS_CLEANUP (*expr_p)); | |
6606 gimplify_seq_add_stmt (pre_p, try_); | |
6607 ret = GS_ALL_DONE; | |
6608 break; | |
6609 } | |
6610 | |
6611 case CLEANUP_POINT_EXPR: | |
6612 ret = gimplify_cleanup_point_expr (expr_p, pre_p); | |
6613 break; | |
6614 | |
6615 case TARGET_EXPR: | |
6616 ret = gimplify_target_expr (expr_p, pre_p, post_p); | |
6617 break; | |
6618 | |
6619 case CATCH_EXPR: | |
6620 { | |
6621 gimple c; | |
6622 gimple_seq handler = NULL; | |
6623 gimplify_and_add (CATCH_BODY (*expr_p), &handler); | |
6624 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler); | |
6625 gimplify_seq_add_stmt (pre_p, c); | |
6626 ret = GS_ALL_DONE; | |
6627 break; | |
6628 } | |
6629 | |
6630 case EH_FILTER_EXPR: | |
6631 { | |
6632 gimple ehf; | |
6633 gimple_seq failure = NULL; | |
6634 | |
6635 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure); | |
6636 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure); | |
6637 gimple_eh_filter_set_must_not_throw | |
6638 (ehf, EH_FILTER_MUST_NOT_THROW (*expr_p)); | |
6639 gimplify_seq_add_stmt (pre_p, ehf); | |
6640 ret = GS_ALL_DONE; | |
6641 break; | |
6642 } | |
6643 | |
6644 case CHANGE_DYNAMIC_TYPE_EXPR: | |
6645 { | |
6646 gimple cdt; | |
6647 | |
6648 ret = gimplify_expr (&CHANGE_DYNAMIC_TYPE_LOCATION (*expr_p), | |
6649 pre_p, post_p, is_gimple_reg, fb_lvalue); | |
6650 cdt = gimple_build_cdt (CHANGE_DYNAMIC_TYPE_NEW_TYPE (*expr_p), | |
6651 CHANGE_DYNAMIC_TYPE_LOCATION (*expr_p)); | |
6652 gimplify_seq_add_stmt (pre_p, cdt); | |
6653 ret = GS_ALL_DONE; | |
6654 } | |
6655 break; | |
6656 | |
6657 case OBJ_TYPE_REF: | |
6658 { | |
6659 enum gimplify_status r0, r1; | |
6660 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, | |
6661 post_p, is_gimple_val, fb_rvalue); | |
6662 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, | |
6663 post_p, is_gimple_val, fb_rvalue); | |
6664 TREE_SIDE_EFFECTS (*expr_p) = 0; | |
6665 ret = MIN (r0, r1); | |
6666 } | |
6667 break; | |
6668 | |
6669 case LABEL_DECL: | |
6670 /* We get here when taking the address of a label. We mark | |
6671 the label as "forced"; meaning it can never be removed and | |
6672 it is a potential target for any computed goto. */ | |
6673 FORCED_LABEL (*expr_p) = 1; | |
6674 ret = GS_ALL_DONE; | |
6675 break; | |
6676 | |
6677 case STATEMENT_LIST: | |
6678 ret = gimplify_statement_list (expr_p, pre_p); | |
6679 break; | |
6680 | |
6681 case WITH_SIZE_EXPR: | |
6682 { | |
6683 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, | |
6684 post_p == &internal_post ? NULL : post_p, | |
6685 gimple_test_f, fallback); | |
6686 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, | |
6687 is_gimple_val, fb_rvalue); | |
6688 } | |
6689 break; | |
6690 | |
6691 case VAR_DECL: | |
6692 case PARM_DECL: | |
6693 ret = gimplify_var_or_parm_decl (expr_p); | |
6694 break; | |
6695 | |
6696 case RESULT_DECL: | |
6697 /* When within an OpenMP context, notice uses of variables. */ | |
6698 if (gimplify_omp_ctxp) | |
6699 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true); | |
6700 ret = GS_ALL_DONE; | |
6701 break; | |
6702 | |
6703 case SSA_NAME: | |
6704 /* Allow callbacks into the gimplifier during optimization. */ | |
6705 ret = GS_ALL_DONE; | |
6706 break; | |
6707 | |
6708 case OMP_PARALLEL: | |
6709 gimplify_omp_parallel (expr_p, pre_p); | |
6710 ret = GS_ALL_DONE; | |
6711 break; | |
6712 | |
6713 case OMP_TASK: | |
6714 gimplify_omp_task (expr_p, pre_p); | |
6715 ret = GS_ALL_DONE; | |
6716 break; | |
6717 | |
6718 case OMP_FOR: | |
6719 ret = gimplify_omp_for (expr_p, pre_p); | |
6720 break; | |
6721 | |
6722 case OMP_SECTIONS: | |
6723 case OMP_SINGLE: | |
6724 gimplify_omp_workshare (expr_p, pre_p); | |
6725 ret = GS_ALL_DONE; | |
6726 break; | |
6727 | |
6728 case OMP_SECTION: | |
6729 case OMP_MASTER: | |
6730 case OMP_ORDERED: | |
6731 case OMP_CRITICAL: | |
6732 { | |
6733 gimple_seq body = NULL; | |
6734 gimple g; | |
6735 | |
6736 gimplify_and_add (OMP_BODY (*expr_p), &body); | |
6737 switch (TREE_CODE (*expr_p)) | |
6738 { | |
6739 case OMP_SECTION: | |
6740 g = gimple_build_omp_section (body); | |
6741 break; | |
6742 case OMP_MASTER: | |
6743 g = gimple_build_omp_master (body); | |
6744 break; | |
6745 case OMP_ORDERED: | |
6746 g = gimple_build_omp_ordered (body); | |
6747 break; | |
6748 case OMP_CRITICAL: | |
6749 g = gimple_build_omp_critical (body, | |
6750 OMP_CRITICAL_NAME (*expr_p)); | |
6751 break; | |
6752 default: | |
6753 gcc_unreachable (); | |
6754 } | |
6755 gimplify_seq_add_stmt (pre_p, g); | |
6756 ret = GS_ALL_DONE; | |
6757 break; | |
6758 } | |
6759 | |
6760 case OMP_ATOMIC: | |
6761 ret = gimplify_omp_atomic (expr_p, pre_p); | |
6762 break; | |
6763 | |
6764 case POINTER_PLUS_EXPR: | |
6765 /* Convert ((type *)A)+offset into &A->field_of_type_and_offset. | |
6766 The second is gimple immediate saving a need for extra statement. | |
6767 */ | |
6768 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST | |
6769 && (tmp = maybe_fold_offset_to_address | |
6770 (TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1), | |
6771 TREE_TYPE (*expr_p)))) | |
6772 { | |
6773 *expr_p = tmp; | |
6774 break; | |
6775 } | |
6776 /* Convert (void *)&a + 4 into (void *)&a[1]. */ | |
6777 if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR | |
6778 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST | |
6779 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, | |
6780 0),0))) | |
6781 && (tmp = maybe_fold_offset_to_address | |
6782 (TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0), | |
6783 TREE_OPERAND (*expr_p, 1), | |
6784 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0), | |
6785 0))))) | |
6786 { | |
6787 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp); | |
6788 break; | |
6789 } | |
6790 /* FALLTHRU */ | |
6791 | |
6792 default: | |
6793 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p))) | |
6794 { | |
6795 case tcc_comparison: | |
6796 /* Handle comparison of objects of non scalar mode aggregates | |
6797 with a call to memcmp. It would be nice to only have to do | |
6798 this for variable-sized objects, but then we'd have to allow | |
6799 the same nest of reference nodes we allow for MODIFY_EXPR and | |
6800 that's too complex. | |
6801 | |
6802 Compare scalar mode aggregates as scalar mode values. Using | |
6803 memcmp for them would be very inefficient at best, and is | |
6804 plain wrong if bitfields are involved. */ | |
6805 { | |
6806 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1)); | |
6807 | |
6808 if (!AGGREGATE_TYPE_P (type)) | |
6809 goto expr_2; | |
6810 else if (TYPE_MODE (type) != BLKmode) | |
6811 ret = gimplify_scalar_mode_aggregate_compare (expr_p); | |
6812 else | |
6813 ret = gimplify_variable_sized_compare (expr_p); | |
6814 | |
6815 break; | |
6816 } | |
6817 | |
6818 /* If *EXPR_P does not need to be special-cased, handle it | |
6819 according to its class. */ | |
6820 case tcc_unary: | |
6821 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, | |
6822 post_p, is_gimple_val, fb_rvalue); | |
6823 break; | |
6824 | |
6825 case tcc_binary: | |
6826 expr_2: | |
6827 { | |
6828 enum gimplify_status r0, r1; | |
6829 | |
6830 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, | |
6831 post_p, is_gimple_val, fb_rvalue); | |
6832 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, | |
6833 post_p, is_gimple_val, fb_rvalue); | |
6834 | |
6835 ret = MIN (r0, r1); | |
6836 break; | |
6837 } | |
6838 | |
6839 case tcc_declaration: | |
6840 case tcc_constant: | |
6841 ret = GS_ALL_DONE; | |
6842 goto dont_recalculate; | |
6843 | |
6844 default: | |
6845 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR | |
6846 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR | |
6847 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR); | |
6848 goto expr_2; | |
6849 } | |
6850 | |
6851 recalculate_side_effects (*expr_p); | |
6852 | |
6853 dont_recalculate: | |
6854 break; | |
6855 } | |
6856 | |
6857 /* If we replaced *expr_p, gimplify again. */ | |
6858 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr)) | |
6859 ret = GS_ALL_DONE; | |
6860 } | |
6861 while (ret == GS_OK); | |
6862 | |
6863 /* If we encountered an error_mark somewhere nested inside, either | |
6864 stub out the statement or propagate the error back out. */ | |
6865 if (ret == GS_ERROR) | |
6866 { | |
6867 if (is_statement) | |
6868 *expr_p = NULL; | |
6869 goto out; | |
6870 } | |
6871 | |
6872 /* This was only valid as a return value from the langhook, which | |
6873 we handled. Make sure it doesn't escape from any other context. */ | |
6874 gcc_assert (ret != GS_UNHANDLED); | |
6875 | |
6876 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p)) | |
6877 { | |
6878 /* We aren't looking for a value, and we don't have a valid | |
6879 statement. If it doesn't have side-effects, throw it away. */ | |
6880 if (!TREE_SIDE_EFFECTS (*expr_p)) | |
6881 *expr_p = NULL; | |
6882 else if (!TREE_THIS_VOLATILE (*expr_p)) | |
6883 { | |
6884 /* This is probably a _REF that contains something nested that | |
6885 has side effects. Recurse through the operands to find it. */ | |
6886 enum tree_code code = TREE_CODE (*expr_p); | |
6887 | |
6888 switch (code) | |
6889 { | |
6890 case COMPONENT_REF: | |
6891 case REALPART_EXPR: | |
6892 case IMAGPART_EXPR: | |
6893 case VIEW_CONVERT_EXPR: | |
6894 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, | |
6895 gimple_test_f, fallback); | |
6896 break; | |
6897 | |
6898 case ARRAY_REF: | |
6899 case ARRAY_RANGE_REF: | |
6900 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, | |
6901 gimple_test_f, fallback); | |
6902 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, | |
6903 gimple_test_f, fallback); | |
6904 break; | |
6905 | |
6906 default: | |
6907 /* Anything else with side-effects must be converted to | |
6908 a valid statement before we get here. */ | |
6909 gcc_unreachable (); | |
6910 } | |
6911 | |
6912 *expr_p = NULL; | |
6913 } | |
6914 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p)) | |
6915 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode) | |
6916 { | |
6917 /* Historically, the compiler has treated a bare reference | |
6918 to a non-BLKmode volatile lvalue as forcing a load. */ | |
6919 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p)); | |
6920 | |
6921 /* Normally, we do not want to create a temporary for a | |
6922 TREE_ADDRESSABLE type because such a type should not be | |
6923 copied by bitwise-assignment. However, we make an | |
6924 exception here, as all we are doing here is ensuring that | |
6925 we read the bytes that make up the type. We use | |
6926 create_tmp_var_raw because create_tmp_var will abort when | |
6927 given a TREE_ADDRESSABLE type. */ | |
6928 tree tmp = create_tmp_var_raw (type, "vol"); | |
6929 gimple_add_tmp_var (tmp); | |
6930 gimplify_assign (tmp, *expr_p, pre_p); | |
6931 *expr_p = NULL; | |
6932 } | |
6933 else | |
6934 /* We can't do anything useful with a volatile reference to | |
6935 an incomplete type, so just throw it away. Likewise for | |
6936 a BLKmode type, since any implicit inner load should | |
6937 already have been turned into an explicit one by the | |
6938 gimplification process. */ | |
6939 *expr_p = NULL; | |
6940 } | |
6941 | |
6942 /* If we are gimplifying at the statement level, we're done. Tack | |
6943 everything together and return. */ | |
6944 if (fallback == fb_none || is_statement) | |
6945 { | |
6946 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear | |
6947 it out for GC to reclaim it. */ | |
6948 *expr_p = NULL_TREE; | |
6949 | |
6950 if (!gimple_seq_empty_p (internal_pre) | |
6951 || !gimple_seq_empty_p (internal_post)) | |
6952 { | |
6953 gimplify_seq_add_seq (&internal_pre, internal_post); | |
6954 gimplify_seq_add_seq (pre_p, internal_pre); | |
6955 } | |
6956 | |
6957 /* The result of gimplifying *EXPR_P is going to be the last few | |
6958 statements in *PRE_P and *POST_P. Add location information | |
6959 to all the statements that were added by the gimplification | |
6960 helpers. */ | |
6961 if (!gimple_seq_empty_p (*pre_p)) | |
6962 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location); | |
6963 | |
6964 if (!gimple_seq_empty_p (*post_p)) | |
6965 annotate_all_with_location_after (*post_p, post_last_gsi, | |
6966 input_location); | |
6967 | |
6968 goto out; | |
6969 } | |
6970 | |
6971 #ifdef ENABLE_GIMPLE_CHECKING | |
6972 if (*expr_p) | |
6973 { | |
6974 enum tree_code code = TREE_CODE (*expr_p); | |
6975 /* These expressions should already be in gimple IR form. */ | |
6976 gcc_assert (code != MODIFY_EXPR | |
6977 && code != ASM_EXPR | |
6978 && code != BIND_EXPR | |
6979 && code != CATCH_EXPR | |
6980 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr) | |
6981 && code != EH_FILTER_EXPR | |
6982 && code != GOTO_EXPR | |
6983 && code != LABEL_EXPR | |
6984 && code != LOOP_EXPR | |
6985 && code != RESX_EXPR | |
6986 && code != SWITCH_EXPR | |
6987 && code != TRY_FINALLY_EXPR | |
6988 && code != OMP_CRITICAL | |
6989 && code != OMP_FOR | |
6990 && code != OMP_MASTER | |
6991 && code != OMP_ORDERED | |
6992 && code != OMP_PARALLEL | |
6993 && code != OMP_SECTIONS | |
6994 && code != OMP_SECTION | |
6995 && code != OMP_SINGLE); | |
6996 } | |
6997 #endif | |
6998 | |
6999 /* Otherwise we're gimplifying a subexpression, so the resulting | |
7000 value is interesting. If it's a valid operand that matches | |
7001 GIMPLE_TEST_F, we're done. Unless we are handling some | |
7002 post-effects internally; if that's the case, we need to copy into | |
7003 a temporary before adding the post-effects to POST_P. */ | |
7004 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p)) | |
7005 goto out; | |
7006 | |
7007 /* Otherwise, we need to create a new temporary for the gimplified | |
7008 expression. */ | |
7009 | |
7010 /* We can't return an lvalue if we have an internal postqueue. The | |
7011 object the lvalue refers to would (probably) be modified by the | |
7012 postqueue; we need to copy the value out first, which means an | |
7013 rvalue. */ | |
7014 if ((fallback & fb_lvalue) | |
7015 && gimple_seq_empty_p (internal_post) | |
7016 && is_gimple_addressable (*expr_p)) | |
7017 { | |
7018 /* An lvalue will do. Take the address of the expression, store it | |
7019 in a temporary, and replace the expression with an INDIRECT_REF of | |
7020 that temporary. */ | |
7021 tmp = build_fold_addr_expr (*expr_p); | |
7022 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue); | |
7023 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp); | |
7024 } | |
7025 else if ((fallback & fb_rvalue) && is_gimple_formal_tmp_or_call_rhs (*expr_p)) | |
7026 { | |
7027 /* An rvalue will do. Assign the gimplified expression into a | |
7028 new temporary TMP and replace the original expression with | |
7029 TMP. First, make sure that the expression has a type so that | |
7030 it can be assigned into a temporary. */ | |
7031 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p))); | |
7032 | |
7033 if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue)) | |
7034 /* The postqueue might change the value of the expression between | |
7035 the initialization and use of the temporary, so we can't use a | |
7036 formal temp. FIXME do we care? */ | |
7037 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); | |
7038 else | |
7039 *expr_p = get_formal_tmp_var (*expr_p, pre_p); | |
7040 | |
7041 if (TREE_CODE (*expr_p) != SSA_NAME) | |
7042 DECL_GIMPLE_FORMAL_TEMP_P (*expr_p) = 1; | |
7043 } | |
7044 else | |
7045 { | |
7046 #ifdef ENABLE_GIMPLE_CHECKING | |
7047 if (!(fallback & fb_mayfail)) | |
7048 { | |
7049 fprintf (stderr, "gimplification failed:\n"); | |
7050 print_generic_expr (stderr, *expr_p, 0); | |
7051 debug_tree (*expr_p); | |
7052 internal_error ("gimplification failed"); | |
7053 } | |
7054 #endif | |
7055 gcc_assert (fallback & fb_mayfail); | |
7056 | |
7057 /* If this is an asm statement, and the user asked for the | |
7058 impossible, don't die. Fail and let gimplify_asm_expr | |
7059 issue an error. */ | |
7060 ret = GS_ERROR; | |
7061 goto out; | |
7062 } | |
7063 | |
7064 /* Make sure the temporary matches our predicate. */ | |
7065 gcc_assert ((*gimple_test_f) (*expr_p)); | |
7066 | |
7067 if (!gimple_seq_empty_p (internal_post)) | |
7068 { | |
7069 annotate_all_with_location (internal_post, input_location); | |
7070 gimplify_seq_add_seq (pre_p, internal_post); | |
7071 } | |
7072 | |
7073 out: | |
7074 input_location = saved_location; | |
7075 return ret; | |
7076 } | |
7077 | |
7078 /* Look through TYPE for variable-sized objects and gimplify each such | |
7079 size that we find. Add to LIST_P any statements generated. */ | |
7080 | |
7081 void | |
7082 gimplify_type_sizes (tree type, gimple_seq *list_p) | |
7083 { | |
7084 tree field, t; | |
7085 | |
7086 if (type == NULL || type == error_mark_node) | |
7087 return; | |
7088 | |
7089 /* We first do the main variant, then copy into any other variants. */ | |
7090 type = TYPE_MAIN_VARIANT (type); | |
7091 | |
7092 /* Avoid infinite recursion. */ | |
7093 if (TYPE_SIZES_GIMPLIFIED (type)) | |
7094 return; | |
7095 | |
7096 TYPE_SIZES_GIMPLIFIED (type) = 1; | |
7097 | |
7098 switch (TREE_CODE (type)) | |
7099 { | |
7100 case INTEGER_TYPE: | |
7101 case ENUMERAL_TYPE: | |
7102 case BOOLEAN_TYPE: | |
7103 case REAL_TYPE: | |
7104 case FIXED_POINT_TYPE: | |
7105 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p); | |
7106 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p); | |
7107 | |
7108 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) | |
7109 { | |
7110 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type); | |
7111 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type); | |
7112 } | |
7113 break; | |
7114 | |
7115 case ARRAY_TYPE: | |
7116 /* These types may not have declarations, so handle them here. */ | |
7117 gimplify_type_sizes (TREE_TYPE (type), list_p); | |
7118 gimplify_type_sizes (TYPE_DOMAIN (type), list_p); | |
7119 /* When not optimizing, ensure VLA bounds aren't removed. */ | |
7120 if (!optimize | |
7121 && TYPE_DOMAIN (type) | |
7122 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type))) | |
7123 { | |
7124 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type)); | |
7125 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t)) | |
7126 DECL_IGNORED_P (t) = 0; | |
7127 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); | |
7128 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t)) | |
7129 DECL_IGNORED_P (t) = 0; | |
7130 } | |
7131 break; | |
7132 | |
7133 case RECORD_TYPE: | |
7134 case UNION_TYPE: | |
7135 case QUAL_UNION_TYPE: | |
7136 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) | |
7137 if (TREE_CODE (field) == FIELD_DECL) | |
7138 { | |
7139 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p); | |
7140 gimplify_one_sizepos (&DECL_SIZE (field), list_p); | |
7141 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p); | |
7142 gimplify_type_sizes (TREE_TYPE (field), list_p); | |
7143 } | |
7144 break; | |
7145 | |
7146 case POINTER_TYPE: | |
7147 case REFERENCE_TYPE: | |
7148 /* We used to recurse on the pointed-to type here, which turned out to | |
7149 be incorrect because its definition might refer to variables not | |
7150 yet initialized at this point if a forward declaration is involved. | |
7151 | |
7152 It was actually useful for anonymous pointed-to types to ensure | |
7153 that the sizes evaluation dominates every possible later use of the | |
7154 values. Restricting to such types here would be safe since there | |
7155 is no possible forward declaration around, but would introduce an | |
7156 undesirable middle-end semantic to anonymity. We then defer to | |
7157 front-ends the responsibility of ensuring that the sizes are | |
7158 evaluated both early and late enough, e.g. by attaching artificial | |
7159 type declarations to the tree. */ | |
7160 break; | |
7161 | |
7162 default: | |
7163 break; | |
7164 } | |
7165 | |
7166 gimplify_one_sizepos (&TYPE_SIZE (type), list_p); | |
7167 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p); | |
7168 | |
7169 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) | |
7170 { | |
7171 TYPE_SIZE (t) = TYPE_SIZE (type); | |
7172 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type); | |
7173 TYPE_SIZES_GIMPLIFIED (t) = 1; | |
7174 } | |
7175 } | |
7176 | |
7177 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P, | |
7178 a size or position, has had all of its SAVE_EXPRs evaluated. | |
7179 We add any required statements to *STMT_P. */ | |
7180 | |
7181 void | |
7182 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p) | |
7183 { | |
7184 tree type, expr = *expr_p; | |
7185 | |
7186 /* We don't do anything if the value isn't there, is constant, or contains | |
7187 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already | |
7188 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier | |
7189 will want to replace it with a new variable, but that will cause problems | |
7190 if this type is from outside the function. It's OK to have that here. */ | |
7191 if (expr == NULL_TREE || TREE_CONSTANT (expr) | |
7192 || TREE_CODE (expr) == VAR_DECL | |
7193 || CONTAINS_PLACEHOLDER_P (expr)) | |
7194 return; | |
7195 | |
7196 type = TREE_TYPE (expr); | |
7197 *expr_p = unshare_expr (expr); | |
7198 | |
7199 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue); | |
7200 expr = *expr_p; | |
7201 | |
7202 /* Verify that we've an exact type match with the original expression. | |
7203 In particular, we do not wish to drop a "sizetype" in favour of a | |
7204 type of similar dimensions. We don't want to pollute the generic | |
7205 type-stripping code with this knowledge because it doesn't matter | |
7206 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT | |
7207 and friends retain their "sizetype-ness". */ | |
7208 if (TREE_TYPE (expr) != type | |
7209 && TREE_CODE (type) == INTEGER_TYPE | |
7210 && TYPE_IS_SIZETYPE (type)) | |
7211 { | |
7212 tree tmp; | |
7213 gimple stmt; | |
7214 | |
7215 *expr_p = create_tmp_var (type, NULL); | |
7216 tmp = build1 (NOP_EXPR, type, expr); | |
7217 stmt = gimplify_assign (*expr_p, tmp, stmt_p); | |
7218 if (EXPR_HAS_LOCATION (expr)) | |
7219 gimple_set_location (stmt, *EXPR_LOCUS (expr)); | |
7220 else | |
7221 gimple_set_location (stmt, input_location); | |
7222 } | |
7223 } | |
7224 | |
7225 | |
7226 /* Gimplify the body of statements pointed to by BODY_P and return a | |
7227 GIMPLE_BIND containing the sequence of GIMPLE statements | |
7228 corresponding to BODY_P. FNDECL is the function decl containing | |
7229 *BODY_P. */ | |
7230 | |
7231 gimple | |
7232 gimplify_body (tree *body_p, tree fndecl, bool do_parms) | |
7233 { | |
7234 location_t saved_location = input_location; | |
7235 gimple_seq parm_stmts, seq; | |
7236 gimple outer_bind; | |
7237 struct gimplify_ctx gctx; | |
7238 | |
7239 timevar_push (TV_TREE_GIMPLIFY); | |
7240 | |
7241 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during | |
7242 gimplification. */ | |
7243 default_rtl_profile (); | |
7244 | |
7245 gcc_assert (gimplify_ctxp == NULL); | |
7246 push_gimplify_context (&gctx); | |
7247 | |
7248 /* Unshare most shared trees in the body and in that of any nested functions. | |
7249 It would seem we don't have to do this for nested functions because | |
7250 they are supposed to be output and then the outer function gimplified | |
7251 first, but the g++ front end doesn't always do it that way. */ | |
7252 unshare_body (body_p, fndecl); | |
7253 unvisit_body (body_p, fndecl); | |
7254 | |
7255 /* Make sure input_location isn't set to something weird. */ | |
7256 input_location = DECL_SOURCE_LOCATION (fndecl); | |
7257 | |
7258 /* Resolve callee-copies. This has to be done before processing | |
7259 the body so that DECL_VALUE_EXPR gets processed correctly. */ | |
7260 parm_stmts = (do_parms) ? gimplify_parameters () : NULL; | |
7261 | |
7262 /* Gimplify the function's body. */ | |
7263 seq = NULL; | |
7264 gimplify_stmt (body_p, &seq); | |
7265 outer_bind = gimple_seq_first_stmt (seq); | |
7266 if (!outer_bind) | |
7267 { | |
7268 outer_bind = gimple_build_nop (); | |
7269 gimplify_seq_add_stmt (&seq, outer_bind); | |
7270 } | |
7271 | |
7272 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is | |
7273 not the case, wrap everything in a GIMPLE_BIND to make it so. */ | |
7274 if (gimple_code (outer_bind) == GIMPLE_BIND | |
7275 && gimple_seq_first (seq) == gimple_seq_last (seq)) | |
7276 ; | |
7277 else | |
7278 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL); | |
7279 | |
7280 *body_p = NULL_TREE; | |
7281 | |
7282 /* If we had callee-copies statements, insert them at the beginning | |
7283 of the function. */ | |
7284 if (!gimple_seq_empty_p (parm_stmts)) | |
7285 { | |
7286 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind)); | |
7287 gimple_bind_set_body (outer_bind, parm_stmts); | |
7288 } | |
7289 | |
7290 pop_gimplify_context (outer_bind); | |
7291 gcc_assert (gimplify_ctxp == NULL); | |
7292 | |
7293 #ifdef ENABLE_TYPES_CHECKING | |
7294 if (!errorcount && !sorrycount) | |
7295 verify_types_in_gimple_seq (gimple_bind_body (outer_bind)); | |
7296 #endif | |
7297 | |
7298 timevar_pop (TV_TREE_GIMPLIFY); | |
7299 input_location = saved_location; | |
7300 | |
7301 return outer_bind; | |
7302 } | |
7303 | |
7304 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL | |
7305 node for the function we want to gimplify. | |
7306 | |
7307 Returns the sequence of GIMPLE statements corresponding to the body | |
7308 of FNDECL. */ | |
7309 | |
7310 void | |
7311 gimplify_function_tree (tree fndecl) | |
7312 { | |
7313 tree oldfn, parm, ret; | |
7314 gimple_seq seq; | |
7315 gimple bind; | |
7316 | |
7317 oldfn = current_function_decl; | |
7318 current_function_decl = fndecl; | |
7319 if (DECL_STRUCT_FUNCTION (fndecl)) | |
7320 push_cfun (DECL_STRUCT_FUNCTION (fndecl)); | |
7321 else | |
7322 push_struct_function (fndecl); | |
7323 | |
7324 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm)) | |
7325 { | |
7326 /* Preliminarily mark non-addressed complex variables as eligible | |
7327 for promotion to gimple registers. We'll transform their uses | |
7328 as we find them. */ | |
7329 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE | |
7330 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE) | |
7331 && !TREE_THIS_VOLATILE (parm) | |
7332 && !needs_to_live_in_memory (parm)) | |
7333 DECL_GIMPLE_REG_P (parm) = 1; | |
7334 } | |
7335 | |
7336 ret = DECL_RESULT (fndecl); | |
7337 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE | |
7338 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE) | |
7339 && !needs_to_live_in_memory (ret)) | |
7340 DECL_GIMPLE_REG_P (ret) = 1; | |
7341 | |
7342 bind = gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true); | |
7343 | |
7344 /* The tree body of the function is no longer needed, replace it | |
7345 with the new GIMPLE body. */ | |
7346 seq = gimple_seq_alloc (); | |
7347 gimple_seq_add_stmt (&seq, bind); | |
7348 gimple_set_body (fndecl, seq); | |
7349 | |
7350 /* If we're instrumenting function entry/exit, then prepend the call to | |
7351 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to | |
7352 catch the exit hook. */ | |
7353 /* ??? Add some way to ignore exceptions for this TFE. */ | |
7354 if (flag_instrument_function_entry_exit | |
7355 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) | |
7356 && !flag_instrument_functions_exclude_p (fndecl)) | |
7357 { | |
7358 tree x; | |
7359 gimple new_bind; | |
7360 gimple tf; | |
7361 gimple_seq cleanup = NULL, body = NULL; | |
7362 | |
7363 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT]; | |
7364 gimplify_seq_add_stmt (&cleanup, gimple_build_call (x, 0)); | |
7365 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY); | |
7366 | |
7367 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER]; | |
7368 gimplify_seq_add_stmt (&body, gimple_build_call (x, 0)); | |
7369 gimplify_seq_add_stmt (&body, tf); | |
7370 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind)); | |
7371 /* Clear the block for BIND, since it is no longer directly inside | |
7372 the function, but within a try block. */ | |
7373 gimple_bind_set_block (bind, NULL); | |
7374 | |
7375 /* Replace the current function body with the body | |
7376 wrapped in the try/finally TF. */ | |
7377 seq = gimple_seq_alloc (); | |
7378 gimple_seq_add_stmt (&seq, new_bind); | |
7379 gimple_set_body (fndecl, seq); | |
7380 } | |
7381 | |
7382 DECL_SAVED_TREE (fndecl) = NULL_TREE; | |
7383 | |
7384 current_function_decl = oldfn; | |
7385 pop_cfun (); | |
7386 } | |
7387 | |
7388 | |
7389 /* Some transformations like inlining may invalidate the GIMPLE form | |
7390 for operands. This function traverses all the operands in STMT and | |
7391 gimplifies anything that is not a valid gimple operand. Any new | |
7392 GIMPLE statements are inserted before *GSI_P. */ | |
7393 | |
7394 void | |
7395 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p) | |
7396 { | |
7397 size_t i, num_ops; | |
7398 tree orig_lhs = NULL_TREE, lhs, t; | |
7399 gimple_seq pre = NULL; | |
7400 gimple post_stmt = NULL; | |
7401 struct gimplify_ctx gctx; | |
7402 | |
7403 push_gimplify_context (&gctx); | |
7404 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun); | |
7405 | |
7406 switch (gimple_code (stmt)) | |
7407 { | |
7408 case GIMPLE_COND: | |
7409 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL, | |
7410 is_gimple_val, fb_rvalue); | |
7411 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL, | |
7412 is_gimple_val, fb_rvalue); | |
7413 break; | |
7414 case GIMPLE_SWITCH: | |
7415 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL, | |
7416 is_gimple_val, fb_rvalue); | |
7417 break; | |
7418 case GIMPLE_OMP_ATOMIC_LOAD: | |
7419 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL, | |
7420 is_gimple_val, fb_rvalue); | |
7421 break; | |
7422 case GIMPLE_ASM: | |
7423 { | |
7424 size_t i, noutputs = gimple_asm_noutputs (stmt); | |
7425 const char *constraint, **oconstraints; | |
7426 bool allows_mem, allows_reg, is_inout; | |
7427 | |
7428 oconstraints | |
7429 = (const char **) alloca ((noutputs) * sizeof (const char *)); | |
7430 for (i = 0; i < noutputs; i++) | |
7431 { | |
7432 tree op = gimple_asm_output_op (stmt, i); | |
7433 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); | |
7434 oconstraints[i] = constraint; | |
7435 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, | |
7436 &allows_reg, &is_inout); | |
7437 gimplify_expr (&TREE_VALUE (op), &pre, NULL, | |
7438 is_inout ? is_gimple_min_lval : is_gimple_lvalue, | |
7439 fb_lvalue | fb_mayfail); | |
7440 } | |
7441 for (i = 0; i < gimple_asm_ninputs (stmt); i++) | |
7442 { | |
7443 tree op = gimple_asm_input_op (stmt, i); | |
7444 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); | |
7445 parse_input_constraint (&constraint, 0, 0, noutputs, 0, | |
7446 oconstraints, &allows_mem, &allows_reg); | |
7447 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem) | |
7448 allows_reg = 0; | |
7449 if (!allows_reg && allows_mem) | |
7450 gimplify_expr (&TREE_VALUE (op), &pre, NULL, | |
7451 is_gimple_lvalue, fb_lvalue | fb_mayfail); | |
7452 else | |
7453 gimplify_expr (&TREE_VALUE (op), &pre, NULL, | |
7454 is_gimple_asm_val, fb_rvalue); | |
7455 } | |
7456 } | |
7457 break; | |
7458 default: | |
7459 /* NOTE: We start gimplifying operands from last to first to | |
7460 make sure that side-effects on the RHS of calls, assignments | |
7461 and ASMs are executed before the LHS. The ordering is not | |
7462 important for other statements. */ | |
7463 num_ops = gimple_num_ops (stmt); | |
7464 orig_lhs = gimple_get_lhs (stmt); | |
7465 for (i = num_ops; i > 0; i--) | |
7466 { | |
7467 tree op = gimple_op (stmt, i - 1); | |
7468 if (op == NULL_TREE) | |
7469 continue; | |
7470 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt))) | |
7471 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue); | |
7472 else if (i == 2 | |
7473 && is_gimple_assign (stmt) | |
7474 && num_ops == 2 | |
7475 && get_gimple_rhs_class (gimple_expr_code (stmt)) | |
7476 == GIMPLE_SINGLE_RHS) | |
7477 gimplify_expr (&op, &pre, NULL, | |
7478 rhs_predicate_for (gimple_assign_lhs (stmt)), | |
7479 fb_rvalue); | |
7480 else if (i == 2 && is_gimple_call (stmt)) | |
7481 { | |
7482 if (TREE_CODE (op) == FUNCTION_DECL) | |
7483 continue; | |
7484 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue); | |
7485 } | |
7486 else | |
7487 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue); | |
7488 gimple_set_op (stmt, i - 1, op); | |
7489 } | |
7490 | |
7491 lhs = gimple_get_lhs (stmt); | |
7492 /* If the LHS changed it in a way that requires a simple RHS, | |
7493 create temporary. */ | |
7494 if (lhs && !is_gimple_formal_tmp_var (lhs)) | |
7495 { | |
7496 bool need_temp = false; | |
7497 | |
7498 if (is_gimple_assign (stmt) | |
7499 && num_ops == 2 | |
7500 && get_gimple_rhs_class (gimple_expr_code (stmt)) | |
7501 == GIMPLE_SINGLE_RHS) | |
7502 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL, | |
7503 rhs_predicate_for (gimple_assign_lhs (stmt)), | |
7504 fb_rvalue); | |
7505 else if (is_gimple_reg (lhs)) | |
7506 { | |
7507 if (is_gimple_reg_type (TREE_TYPE (lhs))) | |
7508 { | |
7509 if (is_gimple_call (stmt)) | |
7510 { | |
7511 i = gimple_call_flags (stmt); | |
7512 if ((i & ECF_LOOPING_CONST_OR_PURE) | |
7513 || !(i & (ECF_CONST | ECF_PURE))) | |
7514 need_temp = true; | |
7515 } | |
7516 if (stmt_can_throw_internal (stmt)) | |
7517 need_temp = true; | |
7518 } | |
7519 } | |
7520 else | |
7521 { | |
7522 if (is_gimple_reg_type (TREE_TYPE (lhs))) | |
7523 need_temp = true; | |
7524 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode) | |
7525 { | |
7526 if (is_gimple_call (stmt)) | |
7527 { | |
7528 tree fndecl = gimple_call_fndecl (stmt); | |
7529 | |
7530 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl) | |
7531 && !(fndecl && DECL_RESULT (fndecl) | |
7532 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))) | |
7533 need_temp = true; | |
7534 } | |
7535 else | |
7536 need_temp = true; | |
7537 } | |
7538 } | |
7539 if (need_temp) | |
7540 { | |
7541 tree temp = create_tmp_var (TREE_TYPE (lhs), NULL); | |
7542 | |
7543 DECL_GIMPLE_FORMAL_TEMP_P (temp) = 1; | |
7544 if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE | |
7545 || TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE) | |
7546 DECL_GIMPLE_REG_P (temp) = 1; | |
7547 if (TREE_CODE (orig_lhs) == SSA_NAME) | |
7548 orig_lhs = SSA_NAME_VAR (orig_lhs); | |
7549 if (TREE_CODE (orig_lhs) == VAR_DECL | |
7550 && DECL_BASED_ON_RESTRICT_P (orig_lhs)) | |
7551 { | |
7552 DECL_BASED_ON_RESTRICT_P (temp) = 1; | |
7553 SET_DECL_RESTRICT_BASE (temp, | |
7554 DECL_GET_RESTRICT_BASE (orig_lhs)); | |
7555 } | |
7556 | |
7557 if (gimple_in_ssa_p (cfun)) | |
7558 temp = make_ssa_name (temp, NULL); | |
7559 gimple_set_lhs (stmt, temp); | |
7560 post_stmt = gimple_build_assign (lhs, temp); | |
7561 if (TREE_CODE (lhs) == SSA_NAME) | |
7562 SSA_NAME_DEF_STMT (lhs) = post_stmt; | |
7563 } | |
7564 } | |
7565 break; | |
7566 } | |
7567 | |
7568 if (gimple_referenced_vars (cfun)) | |
7569 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t)) | |
7570 add_referenced_var (t); | |
7571 | |
7572 if (!gimple_seq_empty_p (pre)) | |
7573 { | |
7574 if (gimple_in_ssa_p (cfun)) | |
7575 { | |
7576 gimple_stmt_iterator i; | |
7577 | |
7578 for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i)) | |
7579 mark_symbols_for_renaming (gsi_stmt (i)); | |
7580 } | |
7581 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT); | |
7582 } | |
7583 if (post_stmt) | |
7584 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT); | |
7585 | |
7586 pop_gimplify_context (NULL); | |
7587 } | |
7588 | |
7589 | |
7590 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true, | |
7591 force the result to be either ssa_name or an invariant, otherwise | |
7592 just force it to be a rhs expression. If VAR is not NULL, make the | |
7593 base variable of the final destination be VAR if suitable. */ | |
7594 | |
7595 tree | |
7596 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var) | |
7597 { | |
7598 tree t; | |
7599 enum gimplify_status ret; | |
7600 gimple_predicate gimple_test_f; | |
7601 struct gimplify_ctx gctx; | |
7602 | |
7603 *stmts = NULL; | |
7604 | |
7605 if (is_gimple_val (expr)) | |
7606 return expr; | |
7607 | |
7608 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs; | |
7609 | |
7610 push_gimplify_context (&gctx); | |
7611 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun); | |
7612 gimplify_ctxp->allow_rhs_cond_expr = true; | |
7613 | |
7614 if (var) | |
7615 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr); | |
7616 | |
7617 if (TREE_CODE (expr) != MODIFY_EXPR | |
7618 && TREE_TYPE (expr) == void_type_node) | |
7619 { | |
7620 gimplify_and_add (expr, stmts); | |
7621 expr = NULL_TREE; | |
7622 } | |
7623 else | |
7624 { | |
7625 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue); | |
7626 gcc_assert (ret != GS_ERROR); | |
7627 } | |
7628 | |
7629 if (gimple_referenced_vars (cfun)) | |
7630 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t)) | |
7631 add_referenced_var (t); | |
7632 | |
7633 pop_gimplify_context (NULL); | |
7634 | |
7635 return expr; | |
7636 } | |
7637 | |
7638 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If | |
7639 some statements are produced, emits them at GSI. If BEFORE is true. | |
7640 the statements are appended before GSI, otherwise they are appended after | |
7641 it. M specifies the way GSI moves after insertion (GSI_SAME_STMT or | |
7642 GSI_CONTINUE_LINKING are the usual values). */ | |
7643 | |
7644 tree | |
7645 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr, | |
7646 bool simple_p, tree var, bool before, | |
7647 enum gsi_iterator_update m) | |
7648 { | |
7649 gimple_seq stmts; | |
7650 | |
7651 expr = force_gimple_operand (expr, &stmts, simple_p, var); | |
7652 | |
7653 if (!gimple_seq_empty_p (stmts)) | |
7654 { | |
7655 if (gimple_in_ssa_p (cfun)) | |
7656 { | |
7657 gimple_stmt_iterator i; | |
7658 | |
7659 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i)) | |
7660 mark_symbols_for_renaming (gsi_stmt (i)); | |
7661 } | |
7662 | |
7663 if (before) | |
7664 gsi_insert_seq_before (gsi, stmts, m); | |
7665 else | |
7666 gsi_insert_seq_after (gsi, stmts, m); | |
7667 } | |
7668 | |
7669 return expr; | |
7670 } | |
7671 | |
7672 #include "gt-gimplify.h" |