Mercurial > hg > CbC > CbC_gcc
comparison gcc/gimple.c @ 55:77e2b8dfacca gcc-4.4.5
update it from 4.4.3 to 4.5.0
author | ryoma <e075725@ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 12 Feb 2010 23:39:51 +0900 |
parents | a06113de4d67 |
children | 326d9e06c2e3 b7f97abdc517 |
comparison
equal
deleted
inserted
replaced
52:c156f1bd5cd9 | 55:77e2b8dfacca |
---|---|
1 /* Gimple IR support functions. | 1 /* Gimple IR support functions. |
2 | 2 |
3 Copyright 2007, 2008 Free Software Foundation, Inc. | 3 Copyright 2007, 2008, 2009 Free Software Foundation, Inc. |
4 Contributed by Aldy Hernandez <aldyh@redhat.com> | 4 Contributed by Aldy Hernandez <aldyh@redhat.com> |
5 | 5 |
6 This file is part of GCC. | 6 This file is part of GCC. |
7 | 7 |
8 GCC is free software; you can redistribute it and/or modify it under | 8 GCC is free software; you can redistribute it and/or modify it under |
21 | 21 |
22 #include "config.h" | 22 #include "config.h" |
23 #include "system.h" | 23 #include "system.h" |
24 #include "coretypes.h" | 24 #include "coretypes.h" |
25 #include "tm.h" | 25 #include "tm.h" |
26 #include "target.h" | |
26 #include "tree.h" | 27 #include "tree.h" |
27 #include "ggc.h" | 28 #include "ggc.h" |
28 #include "errors.h" | |
29 #include "hard-reg-set.h" | 29 #include "hard-reg-set.h" |
30 #include "basic-block.h" | 30 #include "basic-block.h" |
31 #include "gimple.h" | 31 #include "gimple.h" |
32 #include "toplev.h" | |
32 #include "diagnostic.h" | 33 #include "diagnostic.h" |
33 #include "tree-flow.h" | 34 #include "tree-flow.h" |
34 #include "value-prof.h" | 35 #include "value-prof.h" |
35 #include "flags.h" | 36 #include "flags.h" |
36 | 37 #include "alias.h" |
37 #define DEFGSCODE(SYM, NAME, STRUCT) NAME, | 38 #include "demangle.h" |
39 | |
40 /* Global type table. FIXME lto, it should be possible to re-use some | |
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup, | |
42 etc), but those assume that types were built with the various | |
43 build_*_type routines which is not the case with the streamer. */ | |
44 static htab_t gimple_types; | |
45 static struct pointer_map_t *type_hash_cache; | |
46 | |
47 /* Global type comparison cache. */ | |
48 static htab_t gtc_visited; | |
49 static struct obstack gtc_ob; | |
50 | |
51 /* All the tuples have their operand vector (if present) at the very bottom | |
52 of the structure. Therefore, the offset required to find the | |
53 operands vector the size of the structure minus the size of the 1 | |
54 element tree array at the end (see gimple_ops). */ | |
55 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \ | |
56 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0), | |
57 EXPORTED_CONST size_t gimple_ops_offset_[] = { | |
58 #include "gsstruct.def" | |
59 }; | |
60 #undef DEFGSSTRUCT | |
61 | |
62 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT), | |
63 static const size_t gsstruct_code_size[] = { | |
64 #include "gsstruct.def" | |
65 }; | |
66 #undef DEFGSSTRUCT | |
67 | |
68 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME, | |
38 const char *const gimple_code_name[] = { | 69 const char *const gimple_code_name[] = { |
39 #include "gimple.def" | 70 #include "gimple.def" |
40 }; | 71 }; |
41 #undef DEFGSCODE | 72 #undef DEFGSCODE |
42 | 73 |
43 /* All the tuples have their operand vector at the very bottom | 74 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE, |
44 of the structure. Therefore, the offset required to find the | 75 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = { |
45 operands vector the size of the structure minus the size of the 1 | |
46 element tree array at the end (see gimple_ops). */ | |
47 #define DEFGSCODE(SYM, NAME, STRUCT) (sizeof (STRUCT) - sizeof (tree)), | |
48 const size_t gimple_ops_offset_[] = { | |
49 #include "gimple.def" | 76 #include "gimple.def" |
50 }; | 77 }; |
51 #undef DEFGSCODE | 78 #undef DEFGSCODE |
52 | 79 |
53 #ifdef GATHER_STATISTICS | 80 #ifdef GATHER_STATISTICS |
86 gimple_set_code (gimple g, enum gimple_code code) | 113 gimple_set_code (gimple g, enum gimple_code code) |
87 { | 114 { |
88 g->gsbase.code = code; | 115 g->gsbase.code = code; |
89 } | 116 } |
90 | 117 |
91 | |
92 /* Return the GSS_* identifier for the given GIMPLE statement CODE. */ | |
93 | |
94 static enum gimple_statement_structure_enum | |
95 gss_for_code (enum gimple_code code) | |
96 { | |
97 switch (code) | |
98 { | |
99 case GIMPLE_ASSIGN: | |
100 case GIMPLE_CALL: | |
101 case GIMPLE_RETURN: return GSS_WITH_MEM_OPS; | |
102 case GIMPLE_COND: | |
103 case GIMPLE_GOTO: | |
104 case GIMPLE_LABEL: | |
105 case GIMPLE_CHANGE_DYNAMIC_TYPE: | |
106 case GIMPLE_SWITCH: return GSS_WITH_OPS; | |
107 case GIMPLE_ASM: return GSS_ASM; | |
108 case GIMPLE_BIND: return GSS_BIND; | |
109 case GIMPLE_CATCH: return GSS_CATCH; | |
110 case GIMPLE_EH_FILTER: return GSS_EH_FILTER; | |
111 case GIMPLE_NOP: return GSS_BASE; | |
112 case GIMPLE_PHI: return GSS_PHI; | |
113 case GIMPLE_RESX: return GSS_RESX; | |
114 case GIMPLE_TRY: return GSS_TRY; | |
115 case GIMPLE_WITH_CLEANUP_EXPR: return GSS_WCE; | |
116 case GIMPLE_OMP_CRITICAL: return GSS_OMP_CRITICAL; | |
117 case GIMPLE_OMP_FOR: return GSS_OMP_FOR; | |
118 case GIMPLE_OMP_MASTER: | |
119 case GIMPLE_OMP_ORDERED: | |
120 case GIMPLE_OMP_SECTION: return GSS_OMP; | |
121 case GIMPLE_OMP_RETURN: | |
122 case GIMPLE_OMP_SECTIONS_SWITCH: return GSS_BASE; | |
123 case GIMPLE_OMP_CONTINUE: return GSS_OMP_CONTINUE; | |
124 case GIMPLE_OMP_PARALLEL: return GSS_OMP_PARALLEL; | |
125 case GIMPLE_OMP_TASK: return GSS_OMP_TASK; | |
126 case GIMPLE_OMP_SECTIONS: return GSS_OMP_SECTIONS; | |
127 case GIMPLE_OMP_SINGLE: return GSS_OMP_SINGLE; | |
128 case GIMPLE_OMP_ATOMIC_LOAD: return GSS_OMP_ATOMIC_LOAD; | |
129 case GIMPLE_OMP_ATOMIC_STORE: return GSS_OMP_ATOMIC_STORE; | |
130 case GIMPLE_PREDICT: return GSS_BASE; | |
131 default: gcc_unreachable (); | |
132 } | |
133 } | |
134 | |
135 | |
136 /* Return the number of bytes needed to hold a GIMPLE statement with | 118 /* Return the number of bytes needed to hold a GIMPLE statement with |
137 code CODE. */ | 119 code CODE. */ |
138 | 120 |
139 static size_t | 121 static inline size_t |
140 gimple_size (enum gimple_code code) | 122 gimple_size (enum gimple_code code) |
141 { | 123 { |
142 enum gimple_statement_structure_enum gss = gss_for_code (code); | 124 return gsstruct_code_size[gss_for_code (code)]; |
143 | 125 } |
144 if (gss == GSS_WITH_OPS) | |
145 return sizeof (struct gimple_statement_with_ops); | |
146 else if (gss == GSS_WITH_MEM_OPS) | |
147 return sizeof (struct gimple_statement_with_memory_ops); | |
148 | |
149 switch (code) | |
150 { | |
151 case GIMPLE_ASM: | |
152 return sizeof (struct gimple_statement_asm); | |
153 case GIMPLE_NOP: | |
154 return sizeof (struct gimple_statement_base); | |
155 case GIMPLE_BIND: | |
156 return sizeof (struct gimple_statement_bind); | |
157 case GIMPLE_CATCH: | |
158 return sizeof (struct gimple_statement_catch); | |
159 case GIMPLE_EH_FILTER: | |
160 return sizeof (struct gimple_statement_eh_filter); | |
161 case GIMPLE_TRY: | |
162 return sizeof (struct gimple_statement_try); | |
163 case GIMPLE_RESX: | |
164 return sizeof (struct gimple_statement_resx); | |
165 case GIMPLE_OMP_CRITICAL: | |
166 return sizeof (struct gimple_statement_omp_critical); | |
167 case GIMPLE_OMP_FOR: | |
168 return sizeof (struct gimple_statement_omp_for); | |
169 case GIMPLE_OMP_PARALLEL: | |
170 return sizeof (struct gimple_statement_omp_parallel); | |
171 case GIMPLE_OMP_TASK: | |
172 return sizeof (struct gimple_statement_omp_task); | |
173 case GIMPLE_OMP_SECTION: | |
174 case GIMPLE_OMP_MASTER: | |
175 case GIMPLE_OMP_ORDERED: | |
176 return sizeof (struct gimple_statement_omp); | |
177 case GIMPLE_OMP_RETURN: | |
178 return sizeof (struct gimple_statement_base); | |
179 case GIMPLE_OMP_CONTINUE: | |
180 return sizeof (struct gimple_statement_omp_continue); | |
181 case GIMPLE_OMP_SECTIONS: | |
182 return sizeof (struct gimple_statement_omp_sections); | |
183 case GIMPLE_OMP_SECTIONS_SWITCH: | |
184 return sizeof (struct gimple_statement_base); | |
185 case GIMPLE_OMP_SINGLE: | |
186 return sizeof (struct gimple_statement_omp_single); | |
187 case GIMPLE_OMP_ATOMIC_LOAD: | |
188 return sizeof (struct gimple_statement_omp_atomic_load); | |
189 case GIMPLE_OMP_ATOMIC_STORE: | |
190 return sizeof (struct gimple_statement_omp_atomic_store); | |
191 case GIMPLE_WITH_CLEANUP_EXPR: | |
192 return sizeof (struct gimple_statement_wce); | |
193 case GIMPLE_CHANGE_DYNAMIC_TYPE: | |
194 return sizeof (struct gimple_statement_with_ops); | |
195 case GIMPLE_PREDICT: | |
196 return sizeof (struct gimple_statement_base); | |
197 default: | |
198 break; | |
199 } | |
200 | |
201 gcc_unreachable (); | |
202 } | |
203 | |
204 | 126 |
205 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS | 127 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS |
206 operands. */ | 128 operands. */ |
207 | 129 |
208 #define gimple_alloc(c, n) gimple_alloc_stat (c, n MEM_STAT_INFO) | 130 gimple |
209 static gimple | |
210 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL) | 131 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL) |
211 { | 132 { |
212 size_t size; | 133 size_t size; |
213 gimple stmt; | 134 gimple stmt; |
214 | 135 |
248 | 169 |
249 | 170 |
250 | 171 |
251 /* Build a tuple with operands. CODE is the statement to build (which | 172 /* Build a tuple with operands. CODE is the statement to build (which |
252 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code | 173 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code |
253 for the new tuple. NUM_OPS is the number of operands to allocate. */ | 174 for the new tuple. NUM_OPS is the number of operands to allocate. */ |
254 | 175 |
255 #define gimple_build_with_ops(c, s, n) \ | 176 #define gimple_build_with_ops(c, s, n) \ |
256 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO) | 177 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO) |
257 | 178 |
258 static gimple | 179 static gimple |
259 gimple_build_with_ops_stat (enum gimple_code code, enum tree_code subcode, | 180 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode, |
260 unsigned num_ops MEM_STAT_DECL) | 181 unsigned num_ops MEM_STAT_DECL) |
261 { | 182 { |
262 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT); | 183 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT); |
263 gimple_set_subcode (s, subcode); | 184 gimple_set_subcode (s, subcode); |
264 | 185 |
269 /* Build a GIMPLE_RETURN statement returning RETVAL. */ | 190 /* Build a GIMPLE_RETURN statement returning RETVAL. */ |
270 | 191 |
271 gimple | 192 gimple |
272 gimple_build_return (tree retval) | 193 gimple_build_return (tree retval) |
273 { | 194 { |
274 gimple s = gimple_build_with_ops (GIMPLE_RETURN, 0, 1); | 195 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1); |
275 if (retval) | 196 if (retval) |
276 gimple_return_set_retval (s, retval); | 197 gimple_return_set_retval (s, retval); |
277 return s; | 198 return s; |
278 } | 199 } |
279 | 200 |
282 GIMPLE_CALL statement to function FN with NARGS arguments. */ | 203 GIMPLE_CALL statement to function FN with NARGS arguments. */ |
283 | 204 |
284 static inline gimple | 205 static inline gimple |
285 gimple_build_call_1 (tree fn, unsigned nargs) | 206 gimple_build_call_1 (tree fn, unsigned nargs) |
286 { | 207 { |
287 gimple s = gimple_build_with_ops (GIMPLE_CALL, 0, nargs + 3); | 208 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3); |
288 if (TREE_CODE (fn) == FUNCTION_DECL) | 209 if (TREE_CODE (fn) == FUNCTION_DECL) |
289 fn = build_fold_addr_expr (fn); | 210 fn = build_fold_addr_expr (fn); |
290 gimple_set_op (s, 1, fn); | 211 gimple_set_op (s, 1, fn); |
291 return s; | 212 return s; |
292 } | 213 } |
358 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t)); | 279 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t)); |
359 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t)); | 280 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t)); |
360 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t)); | 281 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t)); |
361 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t)); | 282 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t)); |
362 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t)); | 283 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t)); |
284 gimple_set_no_warning (call, TREE_NO_WARNING (t)); | |
363 | 285 |
364 return call; | 286 return call; |
365 } | 287 } |
366 | 288 |
367 | 289 |
426 gimple p; | 348 gimple p; |
427 | 349 |
428 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the | 350 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the |
429 code). */ | 351 code). */ |
430 num_ops = get_gimple_rhs_num_ops (subcode) + 1; | 352 num_ops = get_gimple_rhs_num_ops (subcode) + 1; |
431 | 353 |
432 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, subcode, num_ops | 354 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops |
433 PASS_MEM_STAT); | 355 PASS_MEM_STAT); |
434 gimple_assign_set_lhs (p, lhs); | 356 gimple_assign_set_lhs (p, lhs); |
435 gimple_assign_set_rhs1 (p, op1); | 357 gimple_assign_set_rhs1 (p, op1); |
436 if (op2) | 358 if (op2) |
437 { | 359 { |
449 ungimplified trees in DST or SRC, in which case they will be | 371 ungimplified trees in DST or SRC, in which case they will be |
450 converted to a gimple operand if necessary. | 372 converted to a gimple operand if necessary. |
451 | 373 |
452 This function returns the newly created GIMPLE_ASSIGN tuple. */ | 374 This function returns the newly created GIMPLE_ASSIGN tuple. */ |
453 | 375 |
454 inline gimple | 376 gimple |
455 gimplify_assign (tree dst, tree src, gimple_seq *seq_p) | 377 gimplify_assign (tree dst, tree src, gimple_seq *seq_p) |
456 { | 378 { |
457 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); | 379 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); |
458 gimplify_and_add (t, seq_p); | 380 gimplify_and_add (t, seq_p); |
459 ggc_free (t); | 381 ggc_free (t); |
460 return gimple_seq_last_stmt (*seq_p); | 382 return gimple_seq_last_stmt (*seq_p); |
461 } | 383 } |
487 | 409 |
488 void | 410 void |
489 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p, | 411 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p, |
490 tree *lhs_p, tree *rhs_p) | 412 tree *lhs_p, tree *rhs_p) |
491 { | 413 { |
414 location_t loc = EXPR_LOCATION (cond); | |
492 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison | 415 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison |
493 || TREE_CODE (cond) == TRUTH_NOT_EXPR | 416 || TREE_CODE (cond) == TRUTH_NOT_EXPR |
494 || is_gimple_min_invariant (cond) | 417 || is_gimple_min_invariant (cond) |
495 || SSA_VAR_P (cond)); | 418 || SSA_VAR_P (cond)); |
496 | 419 |
499 /* Canonicalize conditionals of the form 'if (!VAL)'. */ | 422 /* Canonicalize conditionals of the form 'if (!VAL)'. */ |
500 if (*code_p == TRUTH_NOT_EXPR) | 423 if (*code_p == TRUTH_NOT_EXPR) |
501 { | 424 { |
502 *code_p = EQ_EXPR; | 425 *code_p = EQ_EXPR; |
503 gcc_assert (*lhs_p && *rhs_p == NULL_TREE); | 426 gcc_assert (*lhs_p && *rhs_p == NULL_TREE); |
504 *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node); | 427 *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node); |
505 } | 428 } |
506 /* Canonicalize conditionals of the form 'if (VAL)' */ | 429 /* Canonicalize conditionals of the form 'if (VAL)' */ |
507 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison) | 430 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison) |
508 { | 431 { |
509 *code_p = NE_EXPR; | 432 *code_p = NE_EXPR; |
510 gcc_assert (*lhs_p && *rhs_p == NULL_TREE); | 433 gcc_assert (*lhs_p && *rhs_p == NULL_TREE); |
511 *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node); | 434 *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node); |
512 } | 435 } |
513 } | 436 } |
514 | 437 |
515 | 438 |
516 /* Build a GIMPLE_COND statement from the conditional expression tree | 439 /* Build a GIMPLE_COND statement from the conditional expression tree |
542 /* Build a GIMPLE_LABEL statement for LABEL. */ | 465 /* Build a GIMPLE_LABEL statement for LABEL. */ |
543 | 466 |
544 gimple | 467 gimple |
545 gimple_build_label (tree label) | 468 gimple_build_label (tree label) |
546 { | 469 { |
547 gimple p = gimple_build_with_ops (GIMPLE_LABEL, 0, 1); | 470 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1); |
548 gimple_label_set_label (p, label); | 471 gimple_label_set_label (p, label); |
549 return p; | 472 return p; |
550 } | 473 } |
551 | 474 |
552 /* Build a GIMPLE_GOTO statement to label DEST. */ | 475 /* Build a GIMPLE_GOTO statement to label DEST. */ |
553 | 476 |
554 gimple | 477 gimple |
555 gimple_build_goto (tree dest) | 478 gimple_build_goto (tree dest) |
556 { | 479 { |
557 gimple p = gimple_build_with_ops (GIMPLE_GOTO, 0, 1); | 480 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1); |
558 gimple_goto_set_dest (p, dest); | 481 gimple_goto_set_dest (p, dest); |
559 return p; | 482 return p; |
560 } | 483 } |
561 | 484 |
562 | 485 |
563 /* Build a GIMPLE_NOP statement. */ | 486 /* Build a GIMPLE_NOP statement. */ |
564 | 487 |
565 gimple | 488 gimple |
566 gimple_build_nop (void) | 489 gimple_build_nop (void) |
567 { | 490 { |
568 return gimple_alloc (GIMPLE_NOP, 0); | 491 return gimple_alloc (GIMPLE_NOP, 0); |
569 } | 492 } |
570 | 493 |
592 NOUTPUT is the number of register outputs. | 515 NOUTPUT is the number of register outputs. |
593 NCLOBBERS is the number of clobbered registers. | 516 NCLOBBERS is the number of clobbered registers. |
594 */ | 517 */ |
595 | 518 |
596 static inline gimple | 519 static inline gimple |
597 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs, | 520 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs, |
598 unsigned nclobbers) | 521 unsigned nclobbers, unsigned nlabels) |
599 { | 522 { |
600 gimple p; | 523 gimple p; |
601 int size = strlen (string); | 524 int size = strlen (string); |
602 | 525 |
603 p = gimple_build_with_ops (GIMPLE_ASM, 0, ninputs + noutputs + nclobbers); | 526 /* ASMs with labels cannot have outputs. This should have been |
527 enforced by the front end. */ | |
528 gcc_assert (nlabels == 0 || noutputs == 0); | |
529 | |
530 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK, | |
531 ninputs + noutputs + nclobbers + nlabels); | |
604 | 532 |
605 p->gimple_asm.ni = ninputs; | 533 p->gimple_asm.ni = ninputs; |
606 p->gimple_asm.no = noutputs; | 534 p->gimple_asm.no = noutputs; |
607 p->gimple_asm.nc = nclobbers; | 535 p->gimple_asm.nc = nclobbers; |
536 p->gimple_asm.nl = nlabels; | |
608 p->gimple_asm.string = ggc_alloc_string (string, size); | 537 p->gimple_asm.string = ggc_alloc_string (string, size); |
609 | 538 |
610 #ifdef GATHER_STATISTICS | 539 #ifdef GATHER_STATISTICS |
611 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size; | 540 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size; |
612 #endif | 541 #endif |
613 | 542 |
614 return p; | 543 return p; |
615 } | 544 } |
616 | 545 |
617 /* Build a GIMPLE_ASM statement. | 546 /* Build a GIMPLE_ASM statement. |
618 | 547 |
620 NINPUT is the number of register inputs. | 549 NINPUT is the number of register inputs. |
621 NOUTPUT is the number of register outputs. | 550 NOUTPUT is the number of register outputs. |
622 NCLOBBERS is the number of clobbered registers. | 551 NCLOBBERS is the number of clobbered registers. |
623 INPUTS is a vector of the input register parameters. | 552 INPUTS is a vector of the input register parameters. |
624 OUTPUTS is a vector of the output register parameters. | 553 OUTPUTS is a vector of the output register parameters. |
625 CLOBBERS is a vector of the clobbered register parameters. */ | 554 CLOBBERS is a vector of the clobbered register parameters. |
555 LABELS is a vector of destination labels. */ | |
626 | 556 |
627 gimple | 557 gimple |
628 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs, | 558 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs, |
629 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers) | 559 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers, |
560 VEC(tree,gc)* labels) | |
630 { | 561 { |
631 gimple p; | 562 gimple p; |
632 unsigned i; | 563 unsigned i; |
633 | 564 |
634 p = gimple_build_asm_1 (string, | 565 p = gimple_build_asm_1 (string, |
635 VEC_length (tree, inputs), | 566 VEC_length (tree, inputs), |
636 VEC_length (tree, outputs), | 567 VEC_length (tree, outputs), |
637 VEC_length (tree, clobbers)); | 568 VEC_length (tree, clobbers), |
638 | 569 VEC_length (tree, labels)); |
570 | |
639 for (i = 0; i < VEC_length (tree, inputs); i++) | 571 for (i = 0; i < VEC_length (tree, inputs); i++) |
640 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i)); | 572 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i)); |
641 | 573 |
642 for (i = 0; i < VEC_length (tree, outputs); i++) | 574 for (i = 0; i < VEC_length (tree, outputs); i++) |
643 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i)); | 575 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i)); |
644 | 576 |
645 for (i = 0; i < VEC_length (tree, clobbers); i++) | 577 for (i = 0; i < VEC_length (tree, clobbers); i++) |
646 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i)); | 578 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i)); |
647 | 579 |
648 return p; | 580 for (i = 0; i < VEC_length (tree, labels); i++) |
649 } | 581 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i)); |
650 | 582 |
651 /* Build a GIMPLE_ASM statement. | |
652 | |
653 STRING is the assembly code. | |
654 NINPUT is the number of register inputs. | |
655 NOUTPUT is the number of register outputs. | |
656 NCLOBBERS is the number of clobbered registers. | |
657 ... are trees for each input, output and clobbered register. */ | |
658 | |
659 gimple | |
660 gimple_build_asm (const char *string, unsigned ninputs, unsigned noutputs, | |
661 unsigned nclobbers, ...) | |
662 { | |
663 gimple p; | |
664 unsigned i; | |
665 va_list ap; | |
666 | |
667 p = gimple_build_asm_1 (string, ninputs, noutputs, nclobbers); | |
668 | |
669 va_start (ap, nclobbers); | |
670 | |
671 for (i = 0; i < ninputs; i++) | |
672 gimple_asm_set_input_op (p, i, va_arg (ap, tree)); | |
673 | |
674 for (i = 0; i < noutputs; i++) | |
675 gimple_asm_set_output_op (p, i, va_arg (ap, tree)); | |
676 | |
677 for (i = 0; i < nclobbers; i++) | |
678 gimple_asm_set_clobber_op (p, i, va_arg (ap, tree)); | |
679 | |
680 va_end (ap); | |
681 | |
682 return p; | 583 return p; |
683 } | 584 } |
684 | 585 |
685 /* Build a GIMPLE_CATCH statement. | 586 /* Build a GIMPLE_CATCH statement. |
686 | 587 |
708 { | 609 { |
709 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0); | 610 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0); |
710 gimple_eh_filter_set_types (p, types); | 611 gimple_eh_filter_set_types (p, types); |
711 if (failure) | 612 if (failure) |
712 gimple_eh_filter_set_failure (p, failure); | 613 gimple_eh_filter_set_failure (p, failure); |
614 | |
615 return p; | |
616 } | |
617 | |
618 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */ | |
619 | |
620 gimple | |
621 gimple_build_eh_must_not_throw (tree decl) | |
622 { | |
623 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 1); | |
624 | |
625 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL); | |
626 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN); | |
627 gimple_eh_must_not_throw_set_fndecl (p, decl); | |
713 | 628 |
714 return p; | 629 return p; |
715 } | 630 } |
716 | 631 |
717 /* Build a GIMPLE_TRY statement. | 632 /* Build a GIMPLE_TRY statement. |
751 | 666 |
752 return p; | 667 return p; |
753 } | 668 } |
754 | 669 |
755 | 670 |
756 /* Build a GIMPLE_RESX statement. | 671 /* Build a GIMPLE_RESX statement. */ |
757 | |
758 REGION is the region number from which this resx causes control flow to | |
759 leave. */ | |
760 | 672 |
761 gimple | 673 gimple |
762 gimple_build_resx (int region) | 674 gimple_build_resx (int region) |
763 { | 675 { |
764 gimple p = gimple_alloc (GIMPLE_RESX, 0); | 676 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0); |
765 gimple_resx_set_region (p, region); | 677 p->gimple_eh_ctrl.region = region; |
766 return p; | 678 return p; |
767 } | 679 } |
768 | 680 |
769 | 681 |
770 /* The helper for constructing a gimple switch statement. | 682 /* The helper for constructing a gimple switch statement. |
771 INDEX is the switch's index. | 683 INDEX is the switch's index. |
772 NLABELS is the number of labels in the switch excluding the default. | 684 NLABELS is the number of labels in the switch excluding the default. |
773 DEFAULT_LABEL is the default label for the switch statement. */ | 685 DEFAULT_LABEL is the default label for the switch statement. */ |
774 | 686 |
775 static inline gimple | 687 gimple |
776 gimple_build_switch_1 (unsigned nlabels, tree index, tree default_label) | 688 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label) |
777 { | 689 { |
778 /* nlabels + 1 default label + 1 index. */ | 690 /* nlabels + 1 default label + 1 index. */ |
779 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, 0, nlabels + 1 + 1); | 691 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK, |
692 1 + (default_label != NULL) + nlabels); | |
780 gimple_switch_set_index (p, index); | 693 gimple_switch_set_index (p, index); |
781 gimple_switch_set_default_label (p, default_label); | 694 if (default_label) |
695 gimple_switch_set_default_label (p, default_label); | |
782 return p; | 696 return p; |
783 } | 697 } |
784 | 698 |
785 | 699 |
786 /* Build a GIMPLE_SWITCH statement. | 700 /* Build a GIMPLE_SWITCH statement. |
787 | 701 |
788 INDEX is the switch's index. | 702 INDEX is the switch's index. |
789 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL. | 703 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL. |
790 ... are the labels excluding the default. */ | 704 ... are the labels excluding the default. */ |
791 | 705 |
792 gimple | 706 gimple |
793 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...) | 707 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...) |
794 { | 708 { |
795 va_list al; | 709 va_list al; |
796 unsigned i; | 710 unsigned i, offset; |
797 gimple p; | 711 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label); |
798 | |
799 p = gimple_build_switch_1 (nlabels, index, default_label); | |
800 | 712 |
801 /* Store the rest of the labels. */ | 713 /* Store the rest of the labels. */ |
802 va_start (al, default_label); | 714 va_start (al, default_label); |
803 for (i = 1; i <= nlabels; i++) | 715 offset = (default_label != NULL); |
804 gimple_switch_set_label (p, i, va_arg (al, tree)); | 716 for (i = 0; i < nlabels; i++) |
717 gimple_switch_set_label (p, i + offset, va_arg (al, tree)); | |
805 va_end (al); | 718 va_end (al); |
806 | 719 |
807 return p; | 720 return p; |
808 } | 721 } |
809 | 722 |
815 ARGS is a vector of labels excluding the default. */ | 728 ARGS is a vector of labels excluding the default. */ |
816 | 729 |
817 gimple | 730 gimple |
818 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args) | 731 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args) |
819 { | 732 { |
820 unsigned i; | 733 unsigned i, offset, nlabels = VEC_length (tree, args); |
821 unsigned nlabels = VEC_length (tree, args); | 734 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label); |
822 gimple p = gimple_build_switch_1 (nlabels, index, default_label); | 735 |
823 | 736 /* Copy the labels from the vector to the switch statement. */ |
824 /* Put labels in labels[1 - (nlabels + 1)]. | 737 offset = (default_label != NULL); |
825 Default label is in labels[0]. */ | 738 for (i = 0; i < nlabels; i++) |
826 for (i = 1; i <= nlabels; i++) | 739 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i)); |
827 gimple_switch_set_label (p, i, VEC_index (tree, args, i - 1)); | 740 |
741 return p; | |
742 } | |
743 | |
744 /* Build a GIMPLE_EH_DISPATCH statement. */ | |
745 | |
746 gimple | |
747 gimple_build_eh_dispatch (int region) | |
748 { | |
749 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0); | |
750 p->gimple_eh_ctrl.region = region; | |
751 return p; | |
752 } | |
753 | |
754 /* Build a new GIMPLE_DEBUG_BIND statement. | |
755 | |
756 VAR is bound to VALUE; block and location are taken from STMT. */ | |
757 | |
758 gimple | |
759 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL) | |
760 { | |
761 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG, | |
762 (unsigned)GIMPLE_DEBUG_BIND, 2 | |
763 PASS_MEM_STAT); | |
764 | |
765 gimple_debug_bind_set_var (p, var); | |
766 gimple_debug_bind_set_value (p, value); | |
767 if (stmt) | |
768 { | |
769 gimple_set_block (p, gimple_block (stmt)); | |
770 gimple_set_location (p, gimple_location (stmt)); | |
771 } | |
828 | 772 |
829 return p; | 773 return p; |
830 } | 774 } |
831 | 775 |
832 | 776 |
833 /* Build a GIMPLE_OMP_CRITICAL statement. | 777 /* Build a GIMPLE_OMP_CRITICAL statement. |
834 | 778 |
835 BODY is the sequence of statements for which only one thread can execute. | 779 BODY is the sequence of statements for which only one thread can execute. |
836 NAME is optional identifier for this critical block. */ | 780 NAME is optional identifier for this critical block. */ |
837 | 781 |
838 gimple | 782 gimple |
839 gimple_build_omp_critical (gimple_seq body, tree name) | 783 gimple_build_omp_critical (gimple_seq body, tree name) |
840 { | 784 { |
841 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0); | 785 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0); |
842 gimple_omp_critical_set_name (p, name); | 786 gimple_omp_critical_set_name (p, name); |
843 if (body) | 787 if (body) |
847 } | 791 } |
848 | 792 |
849 /* Build a GIMPLE_OMP_FOR statement. | 793 /* Build a GIMPLE_OMP_FOR statement. |
850 | 794 |
851 BODY is sequence of statements inside the for loop. | 795 BODY is sequence of statements inside the for loop. |
852 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate, | 796 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate, |
853 lastprivate, reductions, ordered, schedule, and nowait. | 797 lastprivate, reductions, ordered, schedule, and nowait. |
854 COLLAPSE is the collapse count. | 798 COLLAPSE is the collapse count. |
855 PRE_BODY is the sequence of statements that are loop invariant. */ | 799 PRE_BODY is the sequence of statements that are loop invariant. */ |
856 | 800 |
857 gimple | 801 gimple |
876 BODY is sequence of statements which are executed in parallel. | 820 BODY is sequence of statements which are executed in parallel. |
877 CLAUSES, are the OMP parallel construct's clauses. | 821 CLAUSES, are the OMP parallel construct's clauses. |
878 CHILD_FN is the function created for the parallel threads to execute. | 822 CHILD_FN is the function created for the parallel threads to execute. |
879 DATA_ARG are the shared data argument(s). */ | 823 DATA_ARG are the shared data argument(s). */ |
880 | 824 |
881 gimple | 825 gimple |
882 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn, | 826 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn, |
883 tree data_arg) | 827 tree data_arg) |
884 { | 828 { |
885 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0); | 829 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0); |
886 if (body) | 830 if (body) |
887 gimple_omp_set_body (p, body); | 831 gimple_omp_set_body (p, body); |
900 CHILD_FN is the function created for the parallel threads to execute. | 844 CHILD_FN is the function created for the parallel threads to execute. |
901 DATA_ARG are the shared data argument(s). | 845 DATA_ARG are the shared data argument(s). |
902 COPY_FN is the optional function for firstprivate initialization. | 846 COPY_FN is the optional function for firstprivate initialization. |
903 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */ | 847 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */ |
904 | 848 |
905 gimple | 849 gimple |
906 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn, | 850 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn, |
907 tree data_arg, tree copy_fn, tree arg_size, | 851 tree data_arg, tree copy_fn, tree arg_size, |
908 tree arg_align) | 852 tree arg_align) |
909 { | 853 { |
910 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0); | 854 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0); |
938 | 882 |
939 /* Build a GIMPLE_OMP_MASTER statement. | 883 /* Build a GIMPLE_OMP_MASTER statement. |
940 | 884 |
941 BODY is the sequence of statements to be executed by just the master. */ | 885 BODY is the sequence of statements to be executed by just the master. */ |
942 | 886 |
943 gimple | 887 gimple |
944 gimple_build_omp_master (gimple_seq body) | 888 gimple_build_omp_master (gimple_seq body) |
945 { | 889 { |
946 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0); | 890 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0); |
947 if (body) | 891 if (body) |
948 gimple_omp_set_body (p, body); | 892 gimple_omp_set_body (p, body); |
954 /* Build a GIMPLE_OMP_CONTINUE statement. | 898 /* Build a GIMPLE_OMP_CONTINUE statement. |
955 | 899 |
956 CONTROL_DEF is the definition of the control variable. | 900 CONTROL_DEF is the definition of the control variable. |
957 CONTROL_USE is the use of the control variable. */ | 901 CONTROL_USE is the use of the control variable. */ |
958 | 902 |
959 gimple | 903 gimple |
960 gimple_build_omp_continue (tree control_def, tree control_use) | 904 gimple_build_omp_continue (tree control_def, tree control_use) |
961 { | 905 { |
962 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0); | 906 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0); |
963 gimple_omp_continue_set_control_def (p, control_def); | 907 gimple_omp_continue_set_control_def (p, control_def); |
964 gimple_omp_continue_set_control_use (p, control_use); | 908 gimple_omp_continue_set_control_use (p, control_use); |
968 /* Build a GIMPLE_OMP_ORDERED statement. | 912 /* Build a GIMPLE_OMP_ORDERED statement. |
969 | 913 |
970 BODY is the sequence of statements inside a loop that will executed in | 914 BODY is the sequence of statements inside a loop that will executed in |
971 sequence. */ | 915 sequence. */ |
972 | 916 |
973 gimple | 917 gimple |
974 gimple_build_omp_ordered (gimple_seq body) | 918 gimple_build_omp_ordered (gimple_seq body) |
975 { | 919 { |
976 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0); | 920 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0); |
977 if (body) | 921 if (body) |
978 gimple_omp_set_body (p, body); | 922 gimple_omp_set_body (p, body); |
982 | 926 |
983 | 927 |
984 /* Build a GIMPLE_OMP_RETURN statement. | 928 /* Build a GIMPLE_OMP_RETURN statement. |
985 WAIT_P is true if this is a non-waiting return. */ | 929 WAIT_P is true if this is a non-waiting return. */ |
986 | 930 |
987 gimple | 931 gimple |
988 gimple_build_omp_return (bool wait_p) | 932 gimple_build_omp_return (bool wait_p) |
989 { | 933 { |
990 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0); | 934 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0); |
991 if (wait_p) | 935 if (wait_p) |
992 gimple_omp_return_set_nowait (p); | 936 gimple_omp_return_set_nowait (p); |
999 | 943 |
1000 BODY is a sequence of section statements. | 944 BODY is a sequence of section statements. |
1001 CLAUSES are any of the OMP sections contsruct's clauses: private, | 945 CLAUSES are any of the OMP sections contsruct's clauses: private, |
1002 firstprivate, lastprivate, reduction, and nowait. */ | 946 firstprivate, lastprivate, reduction, and nowait. */ |
1003 | 947 |
1004 gimple | 948 gimple |
1005 gimple_build_omp_sections (gimple_seq body, tree clauses) | 949 gimple_build_omp_sections (gimple_seq body, tree clauses) |
1006 { | 950 { |
1007 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0); | 951 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0); |
1008 if (body) | 952 if (body) |
1009 gimple_omp_set_body (p, body); | 953 gimple_omp_set_body (p, body); |
1026 | 970 |
1027 BODY is the sequence of statements that will be executed once. | 971 BODY is the sequence of statements that will be executed once. |
1028 CLAUSES are any of the OMP single construct's clauses: private, firstprivate, | 972 CLAUSES are any of the OMP single construct's clauses: private, firstprivate, |
1029 copyprivate, nowait. */ | 973 copyprivate, nowait. */ |
1030 | 974 |
1031 gimple | 975 gimple |
1032 gimple_build_omp_single (gimple_seq body, tree clauses) | 976 gimple_build_omp_single (gimple_seq body, tree clauses) |
1033 { | 977 { |
1034 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0); | 978 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0); |
1035 if (body) | 979 if (body) |
1036 gimple_omp_set_body (p, body); | 980 gimple_omp_set_body (p, body); |
1037 gimple_omp_single_set_clauses (p, clauses); | 981 gimple_omp_single_set_clauses (p, clauses); |
1038 | |
1039 return p; | |
1040 } | |
1041 | |
1042 | |
1043 /* Build a GIMPLE_CHANGE_DYNAMIC_TYPE statement. TYPE is the new type | |
1044 for the location PTR. */ | |
1045 | |
1046 gimple | |
1047 gimple_build_cdt (tree type, tree ptr) | |
1048 { | |
1049 gimple p = gimple_build_with_ops (GIMPLE_CHANGE_DYNAMIC_TYPE, 0, 2); | |
1050 gimple_cdt_set_new_type (p, type); | |
1051 gimple_cdt_set_location (p, ptr); | |
1052 | 982 |
1053 return p; | 983 return p; |
1054 } | 984 } |
1055 | 985 |
1056 | 986 |
1087 /* Ensure all the predictors fit into the lower bits of the subcode. */ | 1017 /* Ensure all the predictors fit into the lower bits of the subcode. */ |
1088 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN); | 1018 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN); |
1089 gimple_predict_set_predictor (p, predictor); | 1019 gimple_predict_set_predictor (p, predictor); |
1090 gimple_predict_set_outcome (p, outcome); | 1020 gimple_predict_set_outcome (p, outcome); |
1091 return p; | 1021 return p; |
1092 } | |
1093 | |
1094 /* Return which gimple structure is used by T. The enums here are defined | |
1095 in gsstruct.def. */ | |
1096 | |
1097 enum gimple_statement_structure_enum | |
1098 gimple_statement_structure (gimple gs) | |
1099 { | |
1100 return gss_for_code (gimple_code (gs)); | |
1101 } | 1022 } |
1102 | 1023 |
1103 #if defined ENABLE_GIMPLE_CHECKING | 1024 #if defined ENABLE_GIMPLE_CHECKING |
1104 /* Complain of a gimple type mismatch and die. */ | 1025 /* Complain of a gimple type mismatch and die. */ |
1105 | 1026 |
1158 gcc_assert (gimple_seq_last (seq) == NULL); | 1079 gcc_assert (gimple_seq_last (seq) == NULL); |
1159 | 1080 |
1160 /* If this triggers, it's a sign that the same list is being freed | 1081 /* If this triggers, it's a sign that the same list is being freed |
1161 twice. */ | 1082 twice. */ |
1162 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL); | 1083 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL); |
1163 | 1084 |
1164 /* Add SEQ to the pool of free sequences. */ | 1085 /* Add SEQ to the pool of free sequences. */ |
1165 seq->next_free = gimple_seq_cache; | 1086 seq->next_free = gimple_seq_cache; |
1166 gimple_seq_cache = seq; | 1087 gimple_seq_cache = seq; |
1167 } | 1088 } |
1168 | 1089 |
1224 bool | 1145 bool |
1225 empty_body_p (gimple_seq body) | 1146 empty_body_p (gimple_seq body) |
1226 { | 1147 { |
1227 gimple_stmt_iterator i; | 1148 gimple_stmt_iterator i; |
1228 | 1149 |
1229 | |
1230 if (gimple_seq_empty_p (body)) | 1150 if (gimple_seq_empty_p (body)) |
1231 return true; | 1151 return true; |
1232 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i)) | 1152 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i)) |
1233 if (!empty_stmt_p (gsi_stmt (i))) | 1153 if (!empty_stmt_p (gsi_stmt (i)) |
1154 && !is_gimple_debug (gsi_stmt (i))) | |
1234 return false; | 1155 return false; |
1235 | 1156 |
1236 return true; | 1157 return true; |
1237 } | 1158 } |
1238 | 1159 |
1256 } | 1177 } |
1257 | 1178 |
1258 | 1179 |
1259 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt | 1180 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt |
1260 on each one. WI is as in walk_gimple_stmt. | 1181 on each one. WI is as in walk_gimple_stmt. |
1261 | 1182 |
1262 If walk_gimple_stmt returns non-NULL, the walk is stopped, the | 1183 If walk_gimple_stmt returns non-NULL, the walk is stopped, the |
1263 value is stored in WI->CALLBACK_RESULT and the statement that | 1184 value is stored in WI->CALLBACK_RESULT and the statement that |
1264 produced the value is returned. | 1185 produced the value is returned. |
1265 | 1186 |
1266 Otherwise, all the statements are walked and NULL returned. */ | 1187 Otherwise, all the statements are walked and NULL returned. */ |
1295 | 1216 |
1296 static tree | 1217 static tree |
1297 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op, | 1218 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op, |
1298 struct walk_stmt_info *wi) | 1219 struct walk_stmt_info *wi) |
1299 { | 1220 { |
1300 tree ret; | 1221 tree ret, op; |
1301 unsigned noutputs; | 1222 unsigned noutputs; |
1302 const char **oconstraints; | 1223 const char **oconstraints; |
1303 unsigned i; | 1224 unsigned i, n; |
1304 const char *constraint; | 1225 const char *constraint; |
1305 bool allows_mem, allows_reg, is_inout; | 1226 bool allows_mem, allows_reg, is_inout; |
1306 | 1227 |
1307 noutputs = gimple_asm_noutputs (stmt); | 1228 noutputs = gimple_asm_noutputs (stmt); |
1308 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); | 1229 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); |
1310 if (wi) | 1231 if (wi) |
1311 wi->is_lhs = true; | 1232 wi->is_lhs = true; |
1312 | 1233 |
1313 for (i = 0; i < noutputs; i++) | 1234 for (i = 0; i < noutputs; i++) |
1314 { | 1235 { |
1315 tree op = gimple_asm_output_op (stmt, i); | 1236 op = gimple_asm_output_op (stmt, i); |
1316 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); | 1237 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); |
1317 oconstraints[i] = constraint; | 1238 oconstraints[i] = constraint; |
1318 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg, | 1239 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg, |
1319 &is_inout); | 1240 &is_inout); |
1320 if (wi) | 1241 if (wi) |
1322 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL); | 1243 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL); |
1323 if (ret) | 1244 if (ret) |
1324 return ret; | 1245 return ret; |
1325 } | 1246 } |
1326 | 1247 |
1327 for (i = 0; i < gimple_asm_ninputs (stmt); i++) | 1248 n = gimple_asm_ninputs (stmt); |
1328 { | 1249 for (i = 0; i < n; i++) |
1329 tree op = gimple_asm_input_op (stmt, i); | 1250 { |
1251 op = gimple_asm_input_op (stmt, i); | |
1330 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); | 1252 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); |
1331 parse_input_constraint (&constraint, 0, 0, noutputs, 0, | 1253 parse_input_constraint (&constraint, 0, 0, noutputs, 0, |
1332 oconstraints, &allows_mem, &allows_reg); | 1254 oconstraints, &allows_mem, &allows_reg); |
1333 if (wi) | 1255 if (wi) |
1334 wi->val_only = (allows_reg || !allows_mem); | 1256 { |
1335 | 1257 wi->val_only = (allows_reg || !allows_mem); |
1336 /* Although input "m" is not really a LHS, we need a lvalue. */ | 1258 /* Although input "m" is not really a LHS, we need a lvalue. */ |
1337 if (wi) | 1259 wi->is_lhs = !wi->val_only; |
1338 wi->is_lhs = !wi->val_only; | 1260 } |
1339 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL); | 1261 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL); |
1340 if (ret) | 1262 if (ret) |
1341 return ret; | 1263 return ret; |
1342 } | 1264 } |
1343 | 1265 |
1344 if (wi) | 1266 if (wi) |
1345 { | 1267 { |
1346 wi->is_lhs = false; | 1268 wi->is_lhs = false; |
1347 wi->val_only = true; | 1269 wi->val_only = true; |
1270 } | |
1271 | |
1272 n = gimple_asm_nlabels (stmt); | |
1273 for (i = 0; i < n; i++) | |
1274 { | |
1275 op = gimple_asm_label_op (stmt, i); | |
1276 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL); | |
1277 if (ret) | |
1278 return ret; | |
1348 } | 1279 } |
1349 | 1280 |
1350 return NULL_TREE; | 1281 return NULL_TREE; |
1351 } | 1282 } |
1352 | 1283 |
1378 { | 1309 { |
1379 case GIMPLE_ASSIGN: | 1310 case GIMPLE_ASSIGN: |
1380 /* Walk the RHS operands. A formal temporary LHS may use a | 1311 /* Walk the RHS operands. A formal temporary LHS may use a |
1381 COMPONENT_REF RHS. */ | 1312 COMPONENT_REF RHS. */ |
1382 if (wi) | 1313 if (wi) |
1383 wi->val_only = !is_gimple_formal_tmp_var (gimple_assign_lhs (stmt)); | 1314 wi->val_only = !is_gimple_reg (gimple_assign_lhs (stmt)) |
1315 || !gimple_assign_single_p (stmt); | |
1384 | 1316 |
1385 for (i = 1; i < gimple_num_ops (stmt); i++) | 1317 for (i = 1; i < gimple_num_ops (stmt); i++) |
1386 { | 1318 { |
1387 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, | 1319 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, |
1388 pset); | 1320 pset); |
1451 break; | 1383 break; |
1452 | 1384 |
1453 case GIMPLE_EH_FILTER: | 1385 case GIMPLE_EH_FILTER: |
1454 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi, | 1386 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi, |
1455 pset); | 1387 pset); |
1456 if (ret) | |
1457 return ret; | |
1458 break; | |
1459 | |
1460 case GIMPLE_CHANGE_DYNAMIC_TYPE: | |
1461 ret = walk_tree (gimple_cdt_location_ptr (stmt), callback_op, wi, pset); | |
1462 if (ret) | |
1463 return ret; | |
1464 | |
1465 ret = walk_tree (gimple_cdt_new_type_ptr (stmt), callback_op, wi, pset); | |
1466 if (ret) | 1388 if (ret) |
1467 return ret; | 1389 return ret; |
1468 break; | 1390 break; |
1469 | 1391 |
1470 case GIMPLE_ASM: | 1392 case GIMPLE_ASM: |
1865 condition and to proceed in the same manner. In each case, the | 1787 condition and to proceed in the same manner. In each case, the |
1866 assigned value is represented by the single RHS operand of the | 1788 assigned value is represented by the single RHS operand of the |
1867 assignment. I suspect there may be cases where gimple_assign_copy_p, | 1789 assignment. I suspect there may be cases where gimple_assign_copy_p, |
1868 gimple_assign_single_p, or equivalent logic is used where a similar | 1790 gimple_assign_single_p, or equivalent logic is used where a similar |
1869 treatment of unary NOPs is appropriate. */ | 1791 treatment of unary NOPs is appropriate. */ |
1870 | 1792 |
1871 bool | 1793 bool |
1872 gimple_assign_unary_nop_p (gimple gs) | 1794 gimple_assign_unary_nop_p (gimple gs) |
1873 { | 1795 { |
1874 return (gimple_code (gs) == GIMPLE_ASSIGN | 1796 return (gimple_code (gs) == GIMPLE_ASSIGN |
1875 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)) | 1797 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)) |
1899 { | 1821 { |
1900 unsigned old_len = VEC_length (basic_block, label_to_block_map); | 1822 unsigned old_len = VEC_length (basic_block, label_to_block_map); |
1901 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++; | 1823 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++; |
1902 if (old_len <= (unsigned) uid) | 1824 if (old_len <= (unsigned) uid) |
1903 { | 1825 { |
1904 unsigned new_len = 3 * uid / 2; | 1826 unsigned new_len = 3 * uid / 2 + 1; |
1905 | 1827 |
1906 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map, | 1828 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map, |
1907 new_len); | 1829 new_len); |
1908 } | 1830 } |
1909 } | 1831 } |
1918 not modified. */ | 1840 not modified. */ |
1919 | 1841 |
1920 tree | 1842 tree |
1921 gimple_fold (const_gimple stmt) | 1843 gimple_fold (const_gimple stmt) |
1922 { | 1844 { |
1845 location_t loc = gimple_location (stmt); | |
1923 switch (gimple_code (stmt)) | 1846 switch (gimple_code (stmt)) |
1924 { | 1847 { |
1925 case GIMPLE_COND: | 1848 case GIMPLE_COND: |
1926 return fold_binary (gimple_cond_code (stmt), | 1849 return fold_binary_loc (loc, gimple_cond_code (stmt), |
1927 boolean_type_node, | 1850 boolean_type_node, |
1928 gimple_cond_lhs (stmt), | 1851 gimple_cond_lhs (stmt), |
1929 gimple_cond_rhs (stmt)); | 1852 gimple_cond_rhs (stmt)); |
1930 | 1853 |
1931 case GIMPLE_ASSIGN: | 1854 case GIMPLE_ASSIGN: |
1932 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))) | 1855 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))) |
1933 { | 1856 { |
1934 case GIMPLE_UNARY_RHS: | 1857 case GIMPLE_UNARY_RHS: |
1935 return fold_unary (gimple_assign_rhs_code (stmt), | 1858 return fold_unary_loc (loc, gimple_assign_rhs_code (stmt), |
1936 TREE_TYPE (gimple_assign_lhs (stmt)), | 1859 TREE_TYPE (gimple_assign_lhs (stmt)), |
1937 gimple_assign_rhs1 (stmt)); | 1860 gimple_assign_rhs1 (stmt)); |
1938 case GIMPLE_BINARY_RHS: | 1861 case GIMPLE_BINARY_RHS: |
1939 return fold_binary (gimple_assign_rhs_code (stmt), | 1862 return fold_binary_loc (loc, gimple_assign_rhs_code (stmt), |
1940 TREE_TYPE (gimple_assign_lhs (stmt)), | 1863 TREE_TYPE (gimple_assign_lhs (stmt)), |
1941 gimple_assign_rhs1 (stmt), | 1864 gimple_assign_rhs1 (stmt), |
1942 gimple_assign_rhs2 (stmt)); | 1865 gimple_assign_rhs2 (stmt)); |
1943 case GIMPLE_SINGLE_RHS: | 1866 case GIMPLE_SINGLE_RHS: |
1944 return fold (gimple_assign_rhs1 (stmt)); | 1867 return fold (gimple_assign_rhs1 (stmt)); |
2056 gimple_call_set_lhs (stmt, lhs); | 1979 gimple_call_set_lhs (stmt, lhs); |
2057 else | 1980 else |
2058 gcc_unreachable(); | 1981 gcc_unreachable(); |
2059 } | 1982 } |
2060 | 1983 |
1984 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a | |
1985 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an | |
1986 expression with a different value. | |
1987 | |
1988 This will update any annotations (say debug bind stmts) referring | |
1989 to the original LHS, so that they use the RHS instead. This is | |
1990 done even if NLHS and LHS are the same, for it is understood that | |
1991 the RHS will be modified afterwards, and NLHS will not be assigned | |
1992 an equivalent value. | |
1993 | |
1994 Adjusting any non-annotation uses of the LHS, if needed, is a | |
1995 responsibility of the caller. | |
1996 | |
1997 The effect of this call should be pretty much the same as that of | |
1998 inserting a copy of STMT before STMT, and then removing the | |
1999 original stmt, at which time gsi_remove() would have update | |
2000 annotations, but using this function saves all the inserting, | |
2001 copying and removing. */ | |
2002 | |
2003 void | |
2004 gimple_replace_lhs (gimple stmt, tree nlhs) | |
2005 { | |
2006 if (MAY_HAVE_DEBUG_STMTS) | |
2007 { | |
2008 tree lhs = gimple_get_lhs (stmt); | |
2009 | |
2010 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt); | |
2011 | |
2012 insert_debug_temp_for_var_def (NULL, lhs); | |
2013 } | |
2014 | |
2015 gimple_set_lhs (stmt, nlhs); | |
2016 } | |
2061 | 2017 |
2062 /* Return a deep copy of statement STMT. All the operands from STMT | 2018 /* Return a deep copy of statement STMT. All the operands from STMT |
2063 are reallocated and copied using unshare_expr. The DEF, USE, VDEF | 2019 are reallocated and copied using unshare_expr. The DEF, USE, VDEF |
2064 and VUSE operand arrays are set to empty in the new copy. */ | 2020 and VUSE operand arrays are set to empty in the new copy. */ |
2065 | 2021 |
2192 if (num_ops > 0) | 2148 if (num_ops > 0) |
2193 { | 2149 { |
2194 for (i = 0; i < num_ops; i++) | 2150 for (i = 0; i < num_ops; i++) |
2195 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i))); | 2151 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i))); |
2196 | 2152 |
2197 /* Clear out SSA operand vectors on COPY. Note that we cannot | 2153 /* Clear out SSA operand vectors on COPY. */ |
2198 call the API functions for setting addresses_taken, stores | |
2199 and loads. These functions free the previous values, and we | |
2200 cannot do that on COPY as it will affect the original | |
2201 statement. */ | |
2202 if (gimple_has_ops (stmt)) | 2154 if (gimple_has_ops (stmt)) |
2203 { | 2155 { |
2204 gimple_set_def_ops (copy, NULL); | 2156 gimple_set_def_ops (copy, NULL); |
2205 gimple_set_use_ops (copy, NULL); | 2157 gimple_set_use_ops (copy, NULL); |
2206 copy->gsops.opbase.addresses_taken = NULL; | |
2207 } | 2158 } |
2208 | 2159 |
2209 if (gimple_has_mem_ops (stmt)) | 2160 if (gimple_has_mem_ops (stmt)) |
2210 { | 2161 { |
2211 gimple_set_vdef_ops (copy, NULL); | 2162 gimple_set_vdef (copy, gimple_vdef (stmt)); |
2212 gimple_set_vuse_ops (copy, NULL); | 2163 gimple_set_vuse (copy, gimple_vuse (stmt)); |
2213 copy->gsmem.membase.stores = NULL; | |
2214 copy->gsmem.membase.loads = NULL; | |
2215 } | 2164 } |
2216 | 2165 |
2217 update_stmt (copy); | 2166 /* SSA operands need to be updated. */ |
2167 gimple_set_modified (copy, true); | |
2218 } | 2168 } |
2219 | 2169 |
2220 return copy; | 2170 return copy; |
2221 } | 2171 } |
2222 | 2172 |
2248 | 2198 |
2249 bool | 2199 bool |
2250 gimple_has_side_effects (const_gimple s) | 2200 gimple_has_side_effects (const_gimple s) |
2251 { | 2201 { |
2252 unsigned i; | 2202 unsigned i; |
2203 | |
2204 if (is_gimple_debug (s)) | |
2205 return false; | |
2253 | 2206 |
2254 /* We don't have to scan the arguments to check for | 2207 /* We don't have to scan the arguments to check for |
2255 volatile arguments, though, at present, we still | 2208 volatile arguments, though, at present, we still |
2256 do a scan to check for TREE_SIDE_EFFECTS. */ | 2209 do a scan to check for TREE_SIDE_EFFECTS. */ |
2257 if (gimple_has_volatile_ops (s)) | 2210 if (gimple_has_volatile_ops (s)) |
2342 { | 2295 { |
2343 gcc_assert (gimple_has_volatile_ops (s)); | 2296 gcc_assert (gimple_has_volatile_ops (s)); |
2344 return true; | 2297 return true; |
2345 } | 2298 } |
2346 } | 2299 } |
2300 else if (is_gimple_debug (s)) | |
2301 return false; | |
2347 else | 2302 else |
2348 { | 2303 { |
2349 /* For statements without an LHS, examine all arguments. */ | 2304 /* For statements without an LHS, examine all arguments. */ |
2350 for (i = 0; i < gimple_num_ops (s); i++) | 2305 for (i = 0; i < gimple_num_ops (s); i++) |
2351 if (TREE_SIDE_EFFECTS (gimple_op (s, i)) | 2306 if (TREE_SIDE_EFFECTS (gimple_op (s, i)) |
2450 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes); | 2405 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes); |
2451 fprintf (stderr, "---------------------------------------\n"); | 2406 fprintf (stderr, "---------------------------------------\n"); |
2452 #else | 2407 #else |
2453 fprintf (stderr, "No gimple statistics\n"); | 2408 fprintf (stderr, "No gimple statistics\n"); |
2454 #endif | 2409 #endif |
2455 } | |
2456 | |
2457 | |
2458 /* Deep copy SYMS into the set of symbols stored by STMT. If SYMS is | |
2459 NULL or empty, the storage used is freed up. */ | |
2460 | |
2461 void | |
2462 gimple_set_stored_syms (gimple stmt, bitmap syms, bitmap_obstack *obs) | |
2463 { | |
2464 gcc_assert (gimple_has_mem_ops (stmt)); | |
2465 | |
2466 if (syms == NULL || bitmap_empty_p (syms)) | |
2467 BITMAP_FREE (stmt->gsmem.membase.stores); | |
2468 else | |
2469 { | |
2470 if (stmt->gsmem.membase.stores == NULL) | |
2471 stmt->gsmem.membase.stores = BITMAP_ALLOC (obs); | |
2472 | |
2473 bitmap_copy (stmt->gsmem.membase.stores, syms); | |
2474 } | |
2475 } | |
2476 | |
2477 | |
2478 /* Deep copy SYMS into the set of symbols loaded by STMT. If SYMS is | |
2479 NULL or empty, the storage used is freed up. */ | |
2480 | |
2481 void | |
2482 gimple_set_loaded_syms (gimple stmt, bitmap syms, bitmap_obstack *obs) | |
2483 { | |
2484 gcc_assert (gimple_has_mem_ops (stmt)); | |
2485 | |
2486 if (syms == NULL || bitmap_empty_p (syms)) | |
2487 BITMAP_FREE (stmt->gsmem.membase.loads); | |
2488 else | |
2489 { | |
2490 if (stmt->gsmem.membase.loads == NULL) | |
2491 stmt->gsmem.membase.loads = BITMAP_ALLOC (obs); | |
2492 | |
2493 bitmap_copy (stmt->gsmem.membase.loads, syms); | |
2494 } | |
2495 } | 2410 } |
2496 | 2411 |
2497 | 2412 |
2498 /* Return the number of operands needed on the RHS of a GIMPLE | 2413 /* Return the number of operands needed on the RHS of a GIMPLE |
2499 assignment for an expression with tree code CODE. */ | 2414 assignment for an expression with tree code CODE. */ |
2527 || (SYM) == CONSTRUCTOR \ | 2442 || (SYM) == CONSTRUCTOR \ |
2528 || (SYM) == OBJ_TYPE_REF \ | 2443 || (SYM) == OBJ_TYPE_REF \ |
2529 || (SYM) == ASSERT_EXPR \ | 2444 || (SYM) == ASSERT_EXPR \ |
2530 || (SYM) == ADDR_EXPR \ | 2445 || (SYM) == ADDR_EXPR \ |
2531 || (SYM) == WITH_SIZE_EXPR \ | 2446 || (SYM) == WITH_SIZE_EXPR \ |
2532 || (SYM) == EXC_PTR_EXPR \ | |
2533 || (SYM) == SSA_NAME \ | 2447 || (SYM) == SSA_NAME \ |
2534 || (SYM) == FILTER_EXPR \ | |
2535 || (SYM) == POLYNOMIAL_CHREC \ | 2448 || (SYM) == POLYNOMIAL_CHREC \ |
2536 || (SYM) == DOT_PROD_EXPR \ | 2449 || (SYM) == DOT_PROD_EXPR \ |
2537 || (SYM) == VEC_COND_EXPR \ | 2450 || (SYM) == VEC_COND_EXPR \ |
2538 || (SYM) == REALIGN_LOAD_EXPR) ? GIMPLE_SINGLE_RHS \ | 2451 || (SYM) == REALIGN_LOAD_EXPR) ? GIMPLE_SINGLE_RHS \ |
2539 : GIMPLE_INVALID_RHS), | 2452 : GIMPLE_INVALID_RHS), |
2557 is_gimple_operand (const_tree op) | 2470 is_gimple_operand (const_tree op) |
2558 { | 2471 { |
2559 return op && get_gimple_rhs_class (TREE_CODE (op)) == GIMPLE_SINGLE_RHS; | 2472 return op && get_gimple_rhs_class (TREE_CODE (op)) == GIMPLE_SINGLE_RHS; |
2560 } | 2473 } |
2561 | 2474 |
2562 | |
2563 /* Return true if T is a GIMPLE RHS for an assignment to a temporary. */ | |
2564 | |
2565 bool | |
2566 is_gimple_formal_tmp_rhs (tree t) | |
2567 { | |
2568 if (is_gimple_lvalue (t) || is_gimple_val (t)) | |
2569 return true; | |
2570 | |
2571 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS; | |
2572 } | |
2573 | |
2574 /* Returns true iff T is a valid RHS for an assignment to a renamed | 2475 /* Returns true iff T is a valid RHS for an assignment to a renamed |
2575 user -- or front-end generated artificial -- variable. */ | 2476 user -- or front-end generated artificial -- variable. */ |
2576 | 2477 |
2577 bool | 2478 bool |
2578 is_gimple_reg_rhs (tree t) | 2479 is_gimple_reg_rhs (tree t) |
2579 { | 2480 { |
2580 /* If the RHS of the MODIFY_EXPR may throw or make a nonlocal goto | 2481 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS; |
2581 and the LHS is a user variable, then we need to introduce a formal | |
2582 temporary. This way the optimizers can determine that the user | |
2583 variable is only modified if evaluation of the RHS does not throw. | |
2584 | |
2585 Don't force a temp of a non-renamable type; the copy could be | |
2586 arbitrarily expensive. Instead we will generate a VDEF for | |
2587 the assignment. */ | |
2588 | |
2589 if (is_gimple_reg_type (TREE_TYPE (t)) && tree_could_throw_p (t)) | |
2590 return false; | |
2591 | |
2592 return is_gimple_formal_tmp_rhs (t); | |
2593 } | 2482 } |
2594 | 2483 |
2595 /* Returns true iff T is a valid RHS for an assignment to an un-renamed | 2484 /* Returns true iff T is a valid RHS for an assignment to an un-renamed |
2596 LHS, or for a call argument. */ | 2485 LHS, or for a call argument. */ |
2597 | 2486 |
2601 /* If we're dealing with a renamable type, either source or dest must be | 2490 /* If we're dealing with a renamable type, either source or dest must be |
2602 a renamed variable. */ | 2491 a renamed variable. */ |
2603 if (is_gimple_reg_type (TREE_TYPE (t))) | 2492 if (is_gimple_reg_type (TREE_TYPE (t))) |
2604 return is_gimple_val (t); | 2493 return is_gimple_val (t); |
2605 else | 2494 else |
2606 return is_gimple_formal_tmp_rhs (t); | 2495 return is_gimple_val (t) || is_gimple_lvalue (t); |
2607 } | 2496 } |
2608 | 2497 |
2609 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */ | 2498 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */ |
2610 | 2499 |
2611 bool | 2500 bool |
2814 case CASE_LABEL_EXPR: | 2703 case CASE_LABEL_EXPR: |
2815 case TRY_CATCH_EXPR: | 2704 case TRY_CATCH_EXPR: |
2816 case TRY_FINALLY_EXPR: | 2705 case TRY_FINALLY_EXPR: |
2817 case EH_FILTER_EXPR: | 2706 case EH_FILTER_EXPR: |
2818 case CATCH_EXPR: | 2707 case CATCH_EXPR: |
2819 case CHANGE_DYNAMIC_TYPE_EXPR: | |
2820 case ASM_EXPR: | 2708 case ASM_EXPR: |
2821 case RESX_EXPR: | |
2822 case STATEMENT_LIST: | 2709 case STATEMENT_LIST: |
2823 case OMP_PARALLEL: | 2710 case OMP_PARALLEL: |
2824 case OMP_FOR: | 2711 case OMP_FOR: |
2825 case OMP_SECTIONS: | 2712 case OMP_SECTIONS: |
2826 case OMP_SECTION: | 2713 case OMP_SECTION: |
2870 /* Return true if TYPE is a suitable type for a scalar register variable. */ | 2757 /* Return true if TYPE is a suitable type for a scalar register variable. */ |
2871 | 2758 |
2872 bool | 2759 bool |
2873 is_gimple_reg_type (tree type) | 2760 is_gimple_reg_type (tree type) |
2874 { | 2761 { |
2875 /* In addition to aggregate types, we also exclude complex types if not | 2762 return !AGGREGATE_TYPE_P (type); |
2876 optimizing because they can be subject to partial stores in GNU C by | |
2877 means of the __real__ and __imag__ operators and we cannot promote | |
2878 them to total stores (see gimplify_modify_expr_complex_part). */ | |
2879 return !(AGGREGATE_TYPE_P (type) | |
2880 || (TREE_CODE (type) == COMPLEX_TYPE && !optimize)); | |
2881 | |
2882 } | 2763 } |
2883 | 2764 |
2884 /* Return true if T is a non-aggregate register variable. */ | 2765 /* Return true if T is a non-aggregate register variable. */ |
2885 | 2766 |
2886 bool | 2767 bool |
2887 is_gimple_reg (tree t) | 2768 is_gimple_reg (tree t) |
2888 { | 2769 { |
2889 if (TREE_CODE (t) == SSA_NAME) | 2770 if (TREE_CODE (t) == SSA_NAME) |
2890 t = SSA_NAME_VAR (t); | 2771 t = SSA_NAME_VAR (t); |
2891 | |
2892 if (MTAG_P (t)) | |
2893 return false; | |
2894 | 2772 |
2895 if (!is_gimple_variable (t)) | 2773 if (!is_gimple_variable (t)) |
2896 return false; | 2774 return false; |
2897 | 2775 |
2898 if (!is_gimple_reg_type (TREE_TYPE (t))) | 2776 if (!is_gimple_reg_type (TREE_TYPE (t))) |
2929 | 2807 |
2930 return true; | 2808 return true; |
2931 } | 2809 } |
2932 | 2810 |
2933 | 2811 |
2934 /* Returns true if T is a GIMPLE formal temporary variable. */ | |
2935 | |
2936 bool | |
2937 is_gimple_formal_tmp_var (tree t) | |
2938 { | |
2939 if (TREE_CODE (t) == SSA_NAME) | |
2940 return true; | |
2941 | |
2942 return TREE_CODE (t) == VAR_DECL && DECL_GIMPLE_FORMAL_TEMP_P (t); | |
2943 } | |
2944 | |
2945 /* Returns true if T is a GIMPLE formal temporary register variable. */ | |
2946 | |
2947 bool | |
2948 is_gimple_formal_tmp_reg (tree t) | |
2949 { | |
2950 /* The intent of this is to get hold of a value that won't change. | |
2951 An SSA_NAME qualifies no matter if its of a user variable or not. */ | |
2952 if (TREE_CODE (t) == SSA_NAME) | |
2953 return true; | |
2954 | |
2955 /* We don't know the lifetime characteristics of user variables. */ | |
2956 if (!is_gimple_formal_tmp_var (t)) | |
2957 return false; | |
2958 | |
2959 /* Finally, it must be capable of being placed in a register. */ | |
2960 return is_gimple_reg (t); | |
2961 } | |
2962 | |
2963 /* Return true if T is a GIMPLE variable whose address is not needed. */ | 2812 /* Return true if T is a GIMPLE variable whose address is not needed. */ |
2964 | 2813 |
2965 bool | 2814 bool |
2966 is_gimple_non_addressable (tree t) | 2815 is_gimple_non_addressable (tree t) |
2967 { | 2816 { |
2980 if (is_gimple_variable (t) | 2829 if (is_gimple_variable (t) |
2981 && is_gimple_reg_type (TREE_TYPE (t)) | 2830 && is_gimple_reg_type (TREE_TYPE (t)) |
2982 && !is_gimple_reg (t)) | 2831 && !is_gimple_reg (t)) |
2983 return false; | 2832 return false; |
2984 | 2833 |
2985 /* FIXME make these decls. That can happen only when we expose the | |
2986 entire landing-pad construct at the tree level. */ | |
2987 if (TREE_CODE (t) == EXC_PTR_EXPR || TREE_CODE (t) == FILTER_EXPR) | |
2988 return true; | |
2989 | |
2990 return (is_gimple_variable (t) || is_gimple_min_invariant (t)); | 2834 return (is_gimple_variable (t) || is_gimple_min_invariant (t)); |
2991 } | 2835 } |
2992 | 2836 |
2993 /* Similarly, but accept hard registers as inputs to asm statements. */ | 2837 /* Similarly, but accept hard registers as inputs to asm statements. */ |
2994 | 2838 |
3004 /* Return true if T is a GIMPLE minimal lvalue. */ | 2848 /* Return true if T is a GIMPLE minimal lvalue. */ |
3005 | 2849 |
3006 bool | 2850 bool |
3007 is_gimple_min_lval (tree t) | 2851 is_gimple_min_lval (tree t) |
3008 { | 2852 { |
2853 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t)))) | |
2854 return false; | |
3009 return (is_gimple_id (t) || TREE_CODE (t) == INDIRECT_REF); | 2855 return (is_gimple_id (t) || TREE_CODE (t) == INDIRECT_REF); |
3010 } | 2856 } |
3011 | 2857 |
3012 /* Return true if T is a typecast operation. */ | 2858 /* Return true if T is a typecast operation. */ |
3013 | 2859 |
3054 tree | 2900 tree |
3055 get_base_address (tree t) | 2901 get_base_address (tree t) |
3056 { | 2902 { |
3057 while (handled_component_p (t)) | 2903 while (handled_component_p (t)) |
3058 t = TREE_OPERAND (t, 0); | 2904 t = TREE_OPERAND (t, 0); |
3059 | 2905 |
3060 if (SSA_VAR_P (t) | 2906 if (SSA_VAR_P (t) |
3061 || TREE_CODE (t) == STRING_CST | 2907 || TREE_CODE (t) == STRING_CST |
3062 || TREE_CODE (t) == CONSTRUCTOR | 2908 || TREE_CODE (t) == CONSTRUCTOR |
3063 || INDIRECT_REF_P (t)) | 2909 || INDIRECT_REF_P (t)) |
3064 return t; | 2910 return t; |
3122 we failed to create one. */ | 2968 we failed to create one. */ |
3123 | 2969 |
3124 tree | 2970 tree |
3125 canonicalize_cond_expr_cond (tree t) | 2971 canonicalize_cond_expr_cond (tree t) |
3126 { | 2972 { |
2973 /* Strip conversions around boolean operations. */ | |
2974 if (CONVERT_EXPR_P (t) | |
2975 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))) | |
2976 t = TREE_OPERAND (t, 0); | |
2977 | |
3127 /* For (bool)x use x != 0. */ | 2978 /* For (bool)x use x != 0. */ |
3128 if (TREE_CODE (t) == NOP_EXPR | 2979 if (CONVERT_EXPR_P (t) |
3129 && TREE_TYPE (t) == boolean_type_node) | 2980 && TREE_CODE (TREE_TYPE (t)) == BOOLEAN_TYPE) |
3130 { | 2981 { |
3131 tree top0 = TREE_OPERAND (t, 0); | 2982 tree top0 = TREE_OPERAND (t, 0); |
3132 t = build2 (NE_EXPR, TREE_TYPE (t), | 2983 t = build2 (NE_EXPR, TREE_TYPE (t), |
3133 top0, build_int_cst (TREE_TYPE (top0), 0)); | 2984 top0, build_int_cst (TREE_TYPE (top0), 0)); |
3134 } | 2985 } |
3174 | 3025 |
3175 new_stmt = gimple_build_call_vec (fn, vargs); | 3026 new_stmt = gimple_build_call_vec (fn, vargs); |
3176 VEC_free (tree, heap, vargs); | 3027 VEC_free (tree, heap, vargs); |
3177 if (gimple_call_lhs (stmt)) | 3028 if (gimple_call_lhs (stmt)) |
3178 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt)); | 3029 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt)); |
3030 | |
3031 gimple_set_vuse (new_stmt, gimple_vuse (stmt)); | |
3032 gimple_set_vdef (new_stmt, gimple_vdef (stmt)); | |
3179 | 3033 |
3180 gimple_set_block (new_stmt, gimple_block (stmt)); | 3034 gimple_set_block (new_stmt, gimple_block (stmt)); |
3181 if (gimple_has_location (stmt)) | 3035 if (gimple_has_location (stmt)) |
3182 gimple_set_location (new_stmt, gimple_location (stmt)); | 3036 gimple_set_location (new_stmt, gimple_location (stmt)); |
3183 | 3037 |
3186 gimple_call_set_tail (new_stmt, gimple_call_tail_p (stmt)); | 3040 gimple_call_set_tail (new_stmt, gimple_call_tail_p (stmt)); |
3187 gimple_call_set_cannot_inline (new_stmt, gimple_call_cannot_inline_p (stmt)); | 3041 gimple_call_set_cannot_inline (new_stmt, gimple_call_cannot_inline_p (stmt)); |
3188 gimple_call_set_return_slot_opt (new_stmt, gimple_call_return_slot_opt_p (stmt)); | 3042 gimple_call_set_return_slot_opt (new_stmt, gimple_call_return_slot_opt_p (stmt)); |
3189 gimple_call_set_from_thunk (new_stmt, gimple_call_from_thunk_p (stmt)); | 3043 gimple_call_set_from_thunk (new_stmt, gimple_call_from_thunk_p (stmt)); |
3190 gimple_call_set_va_arg_pack (new_stmt, gimple_call_va_arg_pack_p (stmt)); | 3044 gimple_call_set_va_arg_pack (new_stmt, gimple_call_va_arg_pack_p (stmt)); |
3045 | |
3046 gimple_set_modified (new_stmt, true); | |
3047 | |
3191 return new_stmt; | 3048 return new_stmt; |
3192 } | 3049 } |
3193 | 3050 |
3051 | |
3052 static hashval_t gimple_type_hash (const void *); | |
3053 | |
3054 /* Structure used to maintain a cache of some type pairs compared by | |
3055 gimple_types_compatible_p when comparing aggregate types. There are | |
3056 four possible values for SAME_P: | |
3057 | |
3058 -2: The pair (T1, T2) has just been inserted in the table. | |
3059 -1: The pair (T1, T2) is currently being compared. | |
3060 0: T1 and T2 are different types. | |
3061 1: T1 and T2 are the same type. | |
3062 | |
3063 This table is only used when comparing aggregate types to avoid | |
3064 infinite recursion due to self-referential types. */ | |
3065 struct type_pair_d | |
3066 { | |
3067 unsigned int uid1; | |
3068 unsigned int uid2; | |
3069 int same_p; | |
3070 }; | |
3071 typedef struct type_pair_d *type_pair_t; | |
3072 | |
3073 /* Return a hash value for the type pair pointed-to by P. */ | |
3074 | |
3075 static hashval_t | |
3076 type_pair_hash (const void *p) | |
3077 { | |
3078 const struct type_pair_d *pair = (const struct type_pair_d *) p; | |
3079 hashval_t val1 = pair->uid1; | |
3080 hashval_t val2 = pair->uid2; | |
3081 return (iterative_hash_hashval_t (val2, val1) | |
3082 ^ iterative_hash_hashval_t (val1, val2)); | |
3083 } | |
3084 | |
3085 /* Compare two type pairs pointed-to by P1 and P2. */ | |
3086 | |
3087 static int | |
3088 type_pair_eq (const void *p1, const void *p2) | |
3089 { | |
3090 const struct type_pair_d *pair1 = (const struct type_pair_d *) p1; | |
3091 const struct type_pair_d *pair2 = (const struct type_pair_d *) p2; | |
3092 return ((pair1->uid1 == pair2->uid1 && pair1->uid2 == pair2->uid2) | |
3093 || (pair1->uid1 == pair2->uid2 && pair1->uid2 == pair2->uid1)); | |
3094 } | |
3095 | |
3096 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new | |
3097 entry if none existed. */ | |
3098 | |
3099 static type_pair_t | |
3100 lookup_type_pair (tree t1, tree t2, htab_t *visited_p, struct obstack *ob_p) | |
3101 { | |
3102 struct type_pair_d pair; | |
3103 type_pair_t p; | |
3104 void **slot; | |
3105 | |
3106 if (*visited_p == NULL) | |
3107 { | |
3108 *visited_p = htab_create (251, type_pair_hash, type_pair_eq, NULL); | |
3109 gcc_obstack_init (ob_p); | |
3110 } | |
3111 | |
3112 pair.uid1 = TYPE_UID (t1); | |
3113 pair.uid2 = TYPE_UID (t2); | |
3114 slot = htab_find_slot (*visited_p, &pair, INSERT); | |
3115 | |
3116 if (*slot) | |
3117 p = *((type_pair_t *) slot); | |
3118 else | |
3119 { | |
3120 p = XOBNEW (ob_p, struct type_pair_d); | |
3121 p->uid1 = TYPE_UID (t1); | |
3122 p->uid2 = TYPE_UID (t2); | |
3123 p->same_p = -2; | |
3124 *slot = (void *) p; | |
3125 } | |
3126 | |
3127 return p; | |
3128 } | |
3129 | |
3130 | |
3131 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is | |
3132 true then if any type has no name return false, otherwise return | |
3133 true if both types have no names. */ | |
3134 | |
3135 static bool | |
3136 compare_type_names_p (tree t1, tree t2, bool for_completion_p) | |
3137 { | |
3138 tree name1 = TYPE_NAME (t1); | |
3139 tree name2 = TYPE_NAME (t2); | |
3140 | |
3141 /* Consider anonymous types all unique for completion. */ | |
3142 if (for_completion_p | |
3143 && (!name1 || !name2)) | |
3144 return false; | |
3145 | |
3146 if (name1 && TREE_CODE (name1) == TYPE_DECL) | |
3147 { | |
3148 name1 = DECL_NAME (name1); | |
3149 if (for_completion_p | |
3150 && !name1) | |
3151 return false; | |
3152 } | |
3153 gcc_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE); | |
3154 | |
3155 if (name2 && TREE_CODE (name2) == TYPE_DECL) | |
3156 { | |
3157 name2 = DECL_NAME (name2); | |
3158 if (for_completion_p | |
3159 && !name2) | |
3160 return false; | |
3161 } | |
3162 gcc_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE); | |
3163 | |
3164 /* Identifiers can be compared with pointer equality rather | |
3165 than a string comparison. */ | |
3166 if (name1 == name2) | |
3167 return true; | |
3168 | |
3169 return false; | |
3170 } | |
3171 | |
3172 /* Return true if the field decls F1 and F2 are at the same offset. */ | |
3173 | |
3174 bool | |
3175 compare_field_offset (tree f1, tree f2) | |
3176 { | |
3177 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2)) | |
3178 return (operand_equal_p (DECL_FIELD_OFFSET (f1), | |
3179 DECL_FIELD_OFFSET (f2), 0) | |
3180 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1), | |
3181 DECL_FIELD_BIT_OFFSET (f2))); | |
3182 | |
3183 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN | |
3184 should be, so handle differing ones specially by decomposing | |
3185 the offset into a byte and bit offset manually. */ | |
3186 if (host_integerp (DECL_FIELD_OFFSET (f1), 0) | |
3187 && host_integerp (DECL_FIELD_OFFSET (f2), 0)) | |
3188 { | |
3189 unsigned HOST_WIDE_INT byte_offset1, byte_offset2; | |
3190 unsigned HOST_WIDE_INT bit_offset1, bit_offset2; | |
3191 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1)); | |
3192 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1)) | |
3193 + bit_offset1 / BITS_PER_UNIT); | |
3194 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2)); | |
3195 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2)) | |
3196 + bit_offset2 / BITS_PER_UNIT); | |
3197 if (byte_offset1 != byte_offset2) | |
3198 return false; | |
3199 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT; | |
3200 } | |
3201 | |
3202 return false; | |
3203 } | |
3204 | |
3205 /* Return 1 iff T1 and T2 are structurally identical. | |
3206 Otherwise, return 0. */ | |
3207 | |
3208 static int | |
3209 gimple_types_compatible_p (tree t1, tree t2) | |
3210 { | |
3211 type_pair_t p = NULL; | |
3212 | |
3213 /* Check first for the obvious case of pointer identity. */ | |
3214 if (t1 == t2) | |
3215 return 1; | |
3216 | |
3217 /* Check that we have two types to compare. */ | |
3218 if (t1 == NULL_TREE || t2 == NULL_TREE) | |
3219 return 0; | |
3220 | |
3221 /* Can't be the same type if the types don't have the same code. */ | |
3222 if (TREE_CODE (t1) != TREE_CODE (t2)) | |
3223 return 0; | |
3224 | |
3225 /* Can't be the same type if they have different CV qualifiers. */ | |
3226 if (TYPE_QUALS (t1) != TYPE_QUALS (t2)) | |
3227 return 0; | |
3228 | |
3229 /* Void types are always the same. */ | |
3230 if (TREE_CODE (t1) == VOID_TYPE) | |
3231 return 1; | |
3232 | |
3233 /* For numerical types do some simple checks before doing three | |
3234 hashtable queries. */ | |
3235 if (INTEGRAL_TYPE_P (t1) | |
3236 || SCALAR_FLOAT_TYPE_P (t1) | |
3237 || FIXED_POINT_TYPE_P (t1) | |
3238 || TREE_CODE (t1) == VECTOR_TYPE | |
3239 || TREE_CODE (t1) == COMPLEX_TYPE | |
3240 || TREE_CODE (t1) == OFFSET_TYPE) | |
3241 { | |
3242 /* Can't be the same type if they have different alignment, | |
3243 sign, precision or mode. */ | |
3244 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2) | |
3245 || TYPE_PRECISION (t1) != TYPE_PRECISION (t2) | |
3246 || TYPE_MODE (t1) != TYPE_MODE (t2) | |
3247 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)) | |
3248 return 0; | |
3249 | |
3250 if (TREE_CODE (t1) == INTEGER_TYPE | |
3251 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2) | |
3252 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))) | |
3253 return 0; | |
3254 | |
3255 /* That's all we need to check for float and fixed-point types. */ | |
3256 if (SCALAR_FLOAT_TYPE_P (t1) | |
3257 || FIXED_POINT_TYPE_P (t1)) | |
3258 return 1; | |
3259 | |
3260 /* Perform cheap tail-recursion for vector and complex types. */ | |
3261 if (TREE_CODE (t1) == VECTOR_TYPE | |
3262 || TREE_CODE (t1) == COMPLEX_TYPE) | |
3263 return gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)); | |
3264 | |
3265 /* For integral types fall thru to more complex checks. */ | |
3266 } | |
3267 | |
3268 /* If the hash values of t1 and t2 are different the types can't | |
3269 possibly be the same. This helps keeping the type-pair hashtable | |
3270 small, only tracking comparisons for hash collisions. */ | |
3271 if (gimple_type_hash (t1) != gimple_type_hash (t2)) | |
3272 return 0; | |
3273 | |
3274 /* If we've visited this type pair before (in the case of aggregates | |
3275 with self-referential types), and we made a decision, return it. */ | |
3276 p = lookup_type_pair (t1, t2, >c_visited, >c_ob); | |
3277 if (p->same_p == 0 || p->same_p == 1) | |
3278 { | |
3279 /* We have already decided whether T1 and T2 are the | |
3280 same, return the cached result. */ | |
3281 return p->same_p == 1; | |
3282 } | |
3283 else if (p->same_p == -1) | |
3284 { | |
3285 /* We are currently comparing this pair of types, assume | |
3286 that they are the same and let the caller decide. */ | |
3287 return 1; | |
3288 } | |
3289 | |
3290 gcc_assert (p->same_p == -2); | |
3291 | |
3292 /* Mark the (T1, T2) comparison in progress. */ | |
3293 p->same_p = -1; | |
3294 | |
3295 /* If their attributes are not the same they can't be the same type. */ | |
3296 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2))) | |
3297 goto different_types; | |
3298 | |
3299 /* Do type-specific comparisons. */ | |
3300 switch (TREE_CODE (t1)) | |
3301 { | |
3302 case ARRAY_TYPE: | |
3303 /* Array types are the same if the element types are the same and | |
3304 the number of elements are the same. */ | |
3305 if (!gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)) | |
3306 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2) | |
3307 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2)) | |
3308 goto different_types; | |
3309 else | |
3310 { | |
3311 tree i1 = TYPE_DOMAIN (t1); | |
3312 tree i2 = TYPE_DOMAIN (t2); | |
3313 | |
3314 /* For an incomplete external array, the type domain can be | |
3315 NULL_TREE. Check this condition also. */ | |
3316 if (i1 == NULL_TREE && i2 == NULL_TREE) | |
3317 goto same_types; | |
3318 else if (i1 == NULL_TREE || i2 == NULL_TREE) | |
3319 goto different_types; | |
3320 /* If for a complete array type the possibly gimplified sizes | |
3321 are different the types are different. */ | |
3322 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL)) | |
3323 || (TYPE_SIZE (i1) | |
3324 && TYPE_SIZE (i2) | |
3325 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0))) | |
3326 goto different_types; | |
3327 else | |
3328 { | |
3329 tree min1 = TYPE_MIN_VALUE (i1); | |
3330 tree min2 = TYPE_MIN_VALUE (i2); | |
3331 tree max1 = TYPE_MAX_VALUE (i1); | |
3332 tree max2 = TYPE_MAX_VALUE (i2); | |
3333 | |
3334 /* The minimum/maximum values have to be the same. */ | |
3335 if ((min1 == min2 | |
3336 || (min1 && min2 && operand_equal_p (min1, min2, 0))) | |
3337 && (max1 == max2 | |
3338 || (max1 && max2 && operand_equal_p (max1, max2, 0)))) | |
3339 goto same_types; | |
3340 else | |
3341 goto different_types; | |
3342 } | |
3343 } | |
3344 | |
3345 case METHOD_TYPE: | |
3346 /* Method types should belong to the same class. */ | |
3347 if (!gimple_types_compatible_p (TYPE_METHOD_BASETYPE (t1), | |
3348 TYPE_METHOD_BASETYPE (t2))) | |
3349 goto different_types; | |
3350 | |
3351 /* Fallthru */ | |
3352 | |
3353 case FUNCTION_TYPE: | |
3354 /* Function types are the same if the return type and arguments types | |
3355 are the same. */ | |
3356 if (!gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))) | |
3357 goto different_types; | |
3358 else | |
3359 { | |
3360 if (!targetm.comp_type_attributes (t1, t2)) | |
3361 goto different_types; | |
3362 | |
3363 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)) | |
3364 goto same_types; | |
3365 else | |
3366 { | |
3367 tree parms1, parms2; | |
3368 | |
3369 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2); | |
3370 parms1 && parms2; | |
3371 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2)) | |
3372 { | |
3373 if (!gimple_types_compatible_p (TREE_VALUE (parms1), | |
3374 TREE_VALUE (parms2))) | |
3375 goto different_types; | |
3376 } | |
3377 | |
3378 if (parms1 || parms2) | |
3379 goto different_types; | |
3380 | |
3381 goto same_types; | |
3382 } | |
3383 } | |
3384 | |
3385 case OFFSET_TYPE: | |
3386 { | |
3387 if (!gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)) | |
3388 || !gimple_types_compatible_p (TYPE_OFFSET_BASETYPE (t1), | |
3389 TYPE_OFFSET_BASETYPE (t2))) | |
3390 goto different_types; | |
3391 | |
3392 goto same_types; | |
3393 } | |
3394 | |
3395 case POINTER_TYPE: | |
3396 case REFERENCE_TYPE: | |
3397 { | |
3398 /* If the two pointers have different ref-all attributes, | |
3399 they can't be the same type. */ | |
3400 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2)) | |
3401 goto different_types; | |
3402 | |
3403 /* If one pointer points to an incomplete type variant of | |
3404 the other pointed-to type they are the same. */ | |
3405 if (TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2)) | |
3406 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (t1)) | |
3407 && (!COMPLETE_TYPE_P (TREE_TYPE (t1)) | |
3408 || !COMPLETE_TYPE_P (TREE_TYPE (t2))) | |
3409 && compare_type_names_p (TYPE_MAIN_VARIANT (TREE_TYPE (t1)), | |
3410 TYPE_MAIN_VARIANT (TREE_TYPE (t2)), true)) | |
3411 { | |
3412 /* Replace the pointed-to incomplete type with the | |
3413 complete one. */ | |
3414 if (COMPLETE_TYPE_P (TREE_TYPE (t2))) | |
3415 TREE_TYPE (t1) = TREE_TYPE (t2); | |
3416 else | |
3417 TREE_TYPE (t2) = TREE_TYPE (t1); | |
3418 goto same_types; | |
3419 } | |
3420 | |
3421 /* Otherwise, pointer and reference types are the same if the | |
3422 pointed-to types are the same. */ | |
3423 if (gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))) | |
3424 goto same_types; | |
3425 | |
3426 goto different_types; | |
3427 } | |
3428 | |
3429 case INTEGER_TYPE: | |
3430 case BOOLEAN_TYPE: | |
3431 { | |
3432 tree min1 = TYPE_MIN_VALUE (t1); | |
3433 tree max1 = TYPE_MAX_VALUE (t1); | |
3434 tree min2 = TYPE_MIN_VALUE (t2); | |
3435 tree max2 = TYPE_MAX_VALUE (t2); | |
3436 bool min_equal_p = false; | |
3437 bool max_equal_p = false; | |
3438 | |
3439 /* If either type has a minimum value, the other type must | |
3440 have the same. */ | |
3441 if (min1 == NULL_TREE && min2 == NULL_TREE) | |
3442 min_equal_p = true; | |
3443 else if (min1 && min2 && operand_equal_p (min1, min2, 0)) | |
3444 min_equal_p = true; | |
3445 | |
3446 /* Likewise, if either type has a maximum value, the other | |
3447 type must have the same. */ | |
3448 if (max1 == NULL_TREE && max2 == NULL_TREE) | |
3449 max_equal_p = true; | |
3450 else if (max1 && max2 && operand_equal_p (max1, max2, 0)) | |
3451 max_equal_p = true; | |
3452 | |
3453 if (!min_equal_p || !max_equal_p) | |
3454 goto different_types; | |
3455 | |
3456 goto same_types; | |
3457 } | |
3458 | |
3459 case ENUMERAL_TYPE: | |
3460 { | |
3461 /* FIXME lto, we cannot check bounds on enumeral types because | |
3462 different front ends will produce different values. | |
3463 In C, enumeral types are integers, while in C++ each element | |
3464 will have its own symbolic value. We should decide how enums | |
3465 are to be represented in GIMPLE and have each front end lower | |
3466 to that. */ | |
3467 tree v1, v2; | |
3468 | |
3469 /* For enumeral types, all the values must be the same. */ | |
3470 if (TYPE_VALUES (t1) == TYPE_VALUES (t2)) | |
3471 goto same_types; | |
3472 | |
3473 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2); | |
3474 v1 && v2; | |
3475 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2)) | |
3476 { | |
3477 tree c1 = TREE_VALUE (v1); | |
3478 tree c2 = TREE_VALUE (v2); | |
3479 | |
3480 if (TREE_CODE (c1) == CONST_DECL) | |
3481 c1 = DECL_INITIAL (c1); | |
3482 | |
3483 if (TREE_CODE (c2) == CONST_DECL) | |
3484 c2 = DECL_INITIAL (c2); | |
3485 | |
3486 if (tree_int_cst_equal (c1, c2) != 1) | |
3487 goto different_types; | |
3488 } | |
3489 | |
3490 /* If one enumeration has more values than the other, they | |
3491 are not the same. */ | |
3492 if (v1 || v2) | |
3493 goto different_types; | |
3494 | |
3495 goto same_types; | |
3496 } | |
3497 | |
3498 case RECORD_TYPE: | |
3499 case UNION_TYPE: | |
3500 case QUAL_UNION_TYPE: | |
3501 { | |
3502 tree f1, f2; | |
3503 | |
3504 /* If one type requires structural equality checks and the | |
3505 other doesn't, do not merge the types. */ | |
3506 if (TYPE_STRUCTURAL_EQUALITY_P (t1) | |
3507 != TYPE_STRUCTURAL_EQUALITY_P (t2)) | |
3508 goto different_types; | |
3509 | |
3510 /* The struct tags shall compare equal. */ | |
3511 if (!compare_type_names_p (TYPE_MAIN_VARIANT (t1), | |
3512 TYPE_MAIN_VARIANT (t2), false)) | |
3513 goto different_types; | |
3514 | |
3515 /* For aggregate types, all the fields must be the same. */ | |
3516 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2); | |
3517 f1 && f2; | |
3518 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2)) | |
3519 { | |
3520 /* The fields must have the same name, offset and type. */ | |
3521 if (DECL_NAME (f1) != DECL_NAME (f2) | |
3522 || DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2) | |
3523 || !compare_field_offset (f1, f2) | |
3524 || !gimple_types_compatible_p (TREE_TYPE (f1), | |
3525 TREE_TYPE (f2))) | |
3526 goto different_types; | |
3527 } | |
3528 | |
3529 /* If one aggregate has more fields than the other, they | |
3530 are not the same. */ | |
3531 if (f1 || f2) | |
3532 goto different_types; | |
3533 | |
3534 goto same_types; | |
3535 } | |
3536 | |
3537 default: | |
3538 gcc_unreachable (); | |
3539 } | |
3540 | |
3541 /* Common exit path for types that are not compatible. */ | |
3542 different_types: | |
3543 p->same_p = 0; | |
3544 return 0; | |
3545 | |
3546 /* Common exit path for types that are compatible. */ | |
3547 same_types: | |
3548 p->same_p = 1; | |
3549 return 1; | |
3550 } | |
3551 | |
3552 | |
3553 | |
3554 | |
3555 /* Per pointer state for the SCC finding. The on_sccstack flag | |
3556 is not strictly required, it is true when there is no hash value | |
3557 recorded for the type and false otherwise. But querying that | |
3558 is slower. */ | |
3559 | |
3560 struct sccs | |
3561 { | |
3562 unsigned int dfsnum; | |
3563 unsigned int low; | |
3564 bool on_sccstack; | |
3565 hashval_t hash; | |
3566 }; | |
3567 | |
3568 static unsigned int next_dfs_num; | |
3569 | |
3570 static hashval_t | |
3571 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **, | |
3572 struct pointer_map_t *, struct obstack *); | |
3573 | |
3574 /* DFS visit the edge from the callers type with state *STATE to T. | |
3575 Update the callers type hash V with the hash for T if it is not part | |
3576 of the SCC containing the callers type and return it. | |
3577 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */ | |
3578 | |
3579 static hashval_t | |
3580 visit (tree t, struct sccs *state, hashval_t v, | |
3581 VEC (tree, heap) **sccstack, | |
3582 struct pointer_map_t *sccstate, | |
3583 struct obstack *sccstate_obstack) | |
3584 { | |
3585 struct sccs *cstate = NULL; | |
3586 void **slot; | |
3587 | |
3588 /* If there is a hash value recorded for this type then it can't | |
3589 possibly be part of our parent SCC. Simply mix in its hash. */ | |
3590 if ((slot = pointer_map_contains (type_hash_cache, t))) | |
3591 return iterative_hash_hashval_t ((hashval_t) (size_t) *slot, v); | |
3592 | |
3593 if ((slot = pointer_map_contains (sccstate, t)) != NULL) | |
3594 cstate = (struct sccs *)*slot; | |
3595 if (!cstate) | |
3596 { | |
3597 hashval_t tem; | |
3598 /* Not yet visited. DFS recurse. */ | |
3599 tem = iterative_hash_gimple_type (t, v, | |
3600 sccstack, sccstate, sccstate_obstack); | |
3601 if (!cstate) | |
3602 cstate = (struct sccs *)* pointer_map_contains (sccstate, t); | |
3603 state->low = MIN (state->low, cstate->low); | |
3604 /* If the type is no longer on the SCC stack and thus is not part | |
3605 of the parents SCC mix in its hash value. Otherwise we will | |
3606 ignore the type for hashing purposes and return the unaltered | |
3607 hash value. */ | |
3608 if (!cstate->on_sccstack) | |
3609 return tem; | |
3610 } | |
3611 if (cstate->dfsnum < state->dfsnum | |
3612 && cstate->on_sccstack) | |
3613 state->low = MIN (cstate->dfsnum, state->low); | |
3614 | |
3615 /* We are part of our parents SCC, skip this type during hashing | |
3616 and return the unaltered hash value. */ | |
3617 return v; | |
3618 } | |
3619 | |
3620 /* Hash NAME with the previous hash value V and return it. */ | |
3621 | |
3622 static hashval_t | |
3623 iterative_hash_name (tree name, hashval_t v) | |
3624 { | |
3625 if (!name) | |
3626 return v; | |
3627 if (TREE_CODE (name) == TYPE_DECL) | |
3628 name = DECL_NAME (name); | |
3629 if (!name) | |
3630 return v; | |
3631 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE); | |
3632 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v); | |
3633 } | |
3634 | |
3635 /* Returning a hash value for gimple type TYPE combined with VAL. | |
3636 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. | |
3637 | |
3638 To hash a type we end up hashing in types that are reachable. | |
3639 Through pointers we can end up with cycles which messes up the | |
3640 required property that we need to compute the same hash value | |
3641 for structurally equivalent types. To avoid this we have to | |
3642 hash all types in a cycle (the SCC) in a commutative way. The | |
3643 easiest way is to not mix in the hashes of the SCC members at | |
3644 all. To make this work we have to delay setting the hash | |
3645 values of the SCC until it is complete. */ | |
3646 | |
3647 static hashval_t | |
3648 iterative_hash_gimple_type (tree type, hashval_t val, | |
3649 VEC(tree, heap) **sccstack, | |
3650 struct pointer_map_t *sccstate, | |
3651 struct obstack *sccstate_obstack) | |
3652 { | |
3653 hashval_t v; | |
3654 void **slot; | |
3655 struct sccs *state; | |
3656 | |
3657 #ifdef ENABLE_CHECKING | |
3658 /* Not visited during this DFS walk nor during previous walks. */ | |
3659 gcc_assert (!pointer_map_contains (type_hash_cache, type) | |
3660 && !pointer_map_contains (sccstate, type)); | |
3661 #endif | |
3662 state = XOBNEW (sccstate_obstack, struct sccs); | |
3663 *pointer_map_insert (sccstate, type) = state; | |
3664 | |
3665 VEC_safe_push (tree, heap, *sccstack, type); | |
3666 state->dfsnum = next_dfs_num++; | |
3667 state->low = state->dfsnum; | |
3668 state->on_sccstack = true; | |
3669 | |
3670 /* Combine a few common features of types so that types are grouped into | |
3671 smaller sets; when searching for existing matching types to merge, | |
3672 only existing types having the same features as the new type will be | |
3673 checked. */ | |
3674 v = iterative_hash_hashval_t (TREE_CODE (type), 0); | |
3675 v = iterative_hash_hashval_t (TYPE_QUALS (type), v); | |
3676 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v); | |
3677 | |
3678 /* Do not hash the types size as this will cause differences in | |
3679 hash values for the complete vs. the incomplete type variant. */ | |
3680 | |
3681 /* Incorporate common features of numerical types. */ | |
3682 if (INTEGRAL_TYPE_P (type) | |
3683 || SCALAR_FLOAT_TYPE_P (type) | |
3684 || FIXED_POINT_TYPE_P (type)) | |
3685 { | |
3686 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v); | |
3687 v = iterative_hash_hashval_t (TYPE_MODE (type), v); | |
3688 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v); | |
3689 } | |
3690 | |
3691 /* For pointer and reference types, fold in information about the type | |
3692 pointed to but do not recurse into possibly incomplete types to | |
3693 avoid hash differences for complete vs. incomplete types. */ | |
3694 if (POINTER_TYPE_P (type)) | |
3695 { | |
3696 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type))) | |
3697 { | |
3698 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v); | |
3699 v = iterative_hash_name | |
3700 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v); | |
3701 } | |
3702 else | |
3703 v = visit (TREE_TYPE (type), state, v, | |
3704 sccstack, sccstate, sccstate_obstack); | |
3705 } | |
3706 | |
3707 /* For integer types hash the types min/max values and the string flag. */ | |
3708 if (TREE_CODE (type) == INTEGER_TYPE) | |
3709 { | |
3710 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v); | |
3711 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v); | |
3712 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v); | |
3713 } | |
3714 | |
3715 /* For array types hash their domain and the string flag. */ | |
3716 if (TREE_CODE (type) == ARRAY_TYPE | |
3717 && TYPE_DOMAIN (type)) | |
3718 { | |
3719 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v); | |
3720 v = visit (TYPE_DOMAIN (type), state, v, | |
3721 sccstack, sccstate, sccstate_obstack); | |
3722 } | |
3723 | |
3724 /* Recurse for aggregates with a single element type. */ | |
3725 if (TREE_CODE (type) == ARRAY_TYPE | |
3726 || TREE_CODE (type) == COMPLEX_TYPE | |
3727 || TREE_CODE (type) == VECTOR_TYPE) | |
3728 v = visit (TREE_TYPE (type), state, v, | |
3729 sccstack, sccstate, sccstate_obstack); | |
3730 | |
3731 /* Incorporate function return and argument types. */ | |
3732 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE) | |
3733 { | |
3734 unsigned na; | |
3735 tree p; | |
3736 | |
3737 /* For method types also incorporate their parent class. */ | |
3738 if (TREE_CODE (type) == METHOD_TYPE) | |
3739 v = visit (TYPE_METHOD_BASETYPE (type), state, v, | |
3740 sccstack, sccstate, sccstate_obstack); | |
3741 | |
3742 v = visit (TREE_TYPE (type), state, v, | |
3743 sccstack, sccstate, sccstate_obstack); | |
3744 | |
3745 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p)) | |
3746 { | |
3747 v = visit (TREE_VALUE (p), state, v, | |
3748 sccstack, sccstate, sccstate_obstack); | |
3749 na++; | |
3750 } | |
3751 | |
3752 v = iterative_hash_hashval_t (na, v); | |
3753 } | |
3754 | |
3755 if (TREE_CODE (type) == RECORD_TYPE | |
3756 || TREE_CODE (type) == UNION_TYPE | |
3757 || TREE_CODE (type) == QUAL_UNION_TYPE) | |
3758 { | |
3759 unsigned nf; | |
3760 tree f; | |
3761 | |
3762 v = iterative_hash_name (TYPE_NAME (TYPE_MAIN_VARIANT (type)), v); | |
3763 | |
3764 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f)) | |
3765 { | |
3766 v = iterative_hash_name (DECL_NAME (f), v); | |
3767 v = visit (TREE_TYPE (f), state, v, | |
3768 sccstack, sccstate, sccstate_obstack); | |
3769 nf++; | |
3770 } | |
3771 | |
3772 v = iterative_hash_hashval_t (nf, v); | |
3773 } | |
3774 | |
3775 /* Record hash for us. */ | |
3776 state->hash = v; | |
3777 | |
3778 /* See if we found an SCC. */ | |
3779 if (state->low == state->dfsnum) | |
3780 { | |
3781 tree x; | |
3782 | |
3783 /* Pop off the SCC and set its hash values. */ | |
3784 do | |
3785 { | |
3786 struct sccs *cstate; | |
3787 x = VEC_pop (tree, *sccstack); | |
3788 gcc_assert (!pointer_map_contains (type_hash_cache, x)); | |
3789 cstate = (struct sccs *)*pointer_map_contains (sccstate, x); | |
3790 cstate->on_sccstack = false; | |
3791 slot = pointer_map_insert (type_hash_cache, x); | |
3792 *slot = (void *) (size_t) cstate->hash; | |
3793 } | |
3794 while (x != type); | |
3795 } | |
3796 | |
3797 return iterative_hash_hashval_t (v, val); | |
3798 } | |
3799 | |
3800 | |
3801 /* Returns a hash value for P (assumed to be a type). The hash value | |
3802 is computed using some distinguishing features of the type. Note | |
3803 that we cannot use pointer hashing here as we may be dealing with | |
3804 two distinct instances of the same type. | |
3805 | |
3806 This function should produce the same hash value for two compatible | |
3807 types according to gimple_types_compatible_p. */ | |
3808 | |
3809 static hashval_t | |
3810 gimple_type_hash (const void *p) | |
3811 { | |
3812 const_tree t = (const_tree) p; | |
3813 VEC(tree, heap) *sccstack = NULL; | |
3814 struct pointer_map_t *sccstate; | |
3815 struct obstack sccstate_obstack; | |
3816 hashval_t val; | |
3817 void **slot; | |
3818 | |
3819 if (type_hash_cache == NULL) | |
3820 type_hash_cache = pointer_map_create (); | |
3821 | |
3822 if ((slot = pointer_map_contains (type_hash_cache, p)) != NULL) | |
3823 return iterative_hash_hashval_t ((hashval_t) (size_t) *slot, 0); | |
3824 | |
3825 /* Perform a DFS walk and pre-hash all reachable types. */ | |
3826 next_dfs_num = 1; | |
3827 sccstate = pointer_map_create (); | |
3828 gcc_obstack_init (&sccstate_obstack); | |
3829 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0, | |
3830 &sccstack, sccstate, &sccstate_obstack); | |
3831 VEC_free (tree, heap, sccstack); | |
3832 pointer_map_destroy (sccstate); | |
3833 obstack_free (&sccstate_obstack, NULL); | |
3834 | |
3835 return val; | |
3836 } | |
3837 | |
3838 | |
3839 /* Returns nonzero if P1 and P2 are equal. */ | |
3840 | |
3841 static int | |
3842 gimple_type_eq (const void *p1, const void *p2) | |
3843 { | |
3844 const_tree t1 = (const_tree) p1; | |
3845 const_tree t2 = (const_tree) p2; | |
3846 return gimple_types_compatible_p (CONST_CAST_TREE (t1), CONST_CAST_TREE (t2)); | |
3847 } | |
3848 | |
3849 | |
3850 /* Register type T in the global type table gimple_types. | |
3851 If another type T', compatible with T, already existed in | |
3852 gimple_types then return T', otherwise return T. This is used by | |
3853 LTO to merge identical types read from different TUs. */ | |
3854 | |
3855 tree | |
3856 gimple_register_type (tree t) | |
3857 { | |
3858 void **slot; | |
3859 | |
3860 gcc_assert (TYPE_P (t)); | |
3861 | |
3862 /* Always register the main variant first. This is important so we | |
3863 pick up the non-typedef variants as canonical, otherwise we'll end | |
3864 up taking typedef ids for structure tags during comparison. */ | |
3865 if (TYPE_MAIN_VARIANT (t) != t) | |
3866 gimple_register_type (TYPE_MAIN_VARIANT (t)); | |
3867 | |
3868 if (gimple_types == NULL) | |
3869 gimple_types = htab_create (16381, gimple_type_hash, gimple_type_eq, 0); | |
3870 | |
3871 slot = htab_find_slot (gimple_types, t, INSERT); | |
3872 if (*slot | |
3873 && *(tree *)slot != t) | |
3874 { | |
3875 tree new_type = (tree) *((tree *) slot); | |
3876 | |
3877 /* Do not merge types with different addressability. */ | |
3878 gcc_assert (TREE_ADDRESSABLE (t) == TREE_ADDRESSABLE (new_type)); | |
3879 | |
3880 /* If t is not its main variant then make t unreachable from its | |
3881 main variant list. Otherwise we'd queue up a lot of duplicates | |
3882 there. */ | |
3883 if (t != TYPE_MAIN_VARIANT (t)) | |
3884 { | |
3885 tree tem = TYPE_MAIN_VARIANT (t); | |
3886 while (tem && TYPE_NEXT_VARIANT (tem) != t) | |
3887 tem = TYPE_NEXT_VARIANT (tem); | |
3888 if (tem) | |
3889 TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t); | |
3890 TYPE_NEXT_VARIANT (t) = NULL_TREE; | |
3891 } | |
3892 | |
3893 /* If we are a pointer then remove us from the pointer-to or | |
3894 reference-to chain. Otherwise we'd queue up a lot of duplicates | |
3895 there. */ | |
3896 if (TREE_CODE (t) == POINTER_TYPE) | |
3897 { | |
3898 if (TYPE_POINTER_TO (TREE_TYPE (t)) == t) | |
3899 TYPE_POINTER_TO (TREE_TYPE (t)) = TYPE_NEXT_PTR_TO (t); | |
3900 else | |
3901 { | |
3902 tree tem = TYPE_POINTER_TO (TREE_TYPE (t)); | |
3903 while (tem && TYPE_NEXT_PTR_TO (tem) != t) | |
3904 tem = TYPE_NEXT_PTR_TO (tem); | |
3905 if (tem) | |
3906 TYPE_NEXT_PTR_TO (tem) = TYPE_NEXT_PTR_TO (t); | |
3907 } | |
3908 TYPE_NEXT_PTR_TO (t) = NULL_TREE; | |
3909 } | |
3910 else if (TREE_CODE (t) == REFERENCE_TYPE) | |
3911 { | |
3912 if (TYPE_REFERENCE_TO (TREE_TYPE (t)) == t) | |
3913 TYPE_REFERENCE_TO (TREE_TYPE (t)) = TYPE_NEXT_REF_TO (t); | |
3914 else | |
3915 { | |
3916 tree tem = TYPE_REFERENCE_TO (TREE_TYPE (t)); | |
3917 while (tem && TYPE_NEXT_REF_TO (tem) != t) | |
3918 tem = TYPE_NEXT_REF_TO (tem); | |
3919 if (tem) | |
3920 TYPE_NEXT_REF_TO (tem) = TYPE_NEXT_REF_TO (t); | |
3921 } | |
3922 TYPE_NEXT_REF_TO (t) = NULL_TREE; | |
3923 } | |
3924 | |
3925 t = new_type; | |
3926 } | |
3927 else | |
3928 *slot = (void *) t; | |
3929 | |
3930 return t; | |
3931 } | |
3932 | |
3933 | |
3934 /* Show statistics on references to the global type table gimple_types. */ | |
3935 | |
3936 void | |
3937 print_gimple_types_stats (void) | |
3938 { | |
3939 if (gimple_types) | |
3940 fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, " | |
3941 "%ld searches, %ld collisions (ratio: %f)\n", | |
3942 (long) htab_size (gimple_types), | |
3943 (long) htab_elements (gimple_types), | |
3944 (long) gimple_types->searches, | |
3945 (long) gimple_types->collisions, | |
3946 htab_collisions (gimple_types)); | |
3947 else | |
3948 fprintf (stderr, "GIMPLE type table is empty\n"); | |
3949 if (gtc_visited) | |
3950 fprintf (stderr, "GIMPLE type comparison table: size %ld, %ld " | |
3951 "elements, %ld searches, %ld collisions (ratio: %f)\n", | |
3952 (long) htab_size (gtc_visited), | |
3953 (long) htab_elements (gtc_visited), | |
3954 (long) gtc_visited->searches, | |
3955 (long) gtc_visited->collisions, | |
3956 htab_collisions (gtc_visited)); | |
3957 else | |
3958 fprintf (stderr, "GIMPLE type comparison table is empty\n"); | |
3959 } | |
3960 | |
3961 /* Free the gimple type hashtables used for LTO type merging. */ | |
3962 | |
3963 void | |
3964 free_gimple_type_tables (void) | |
3965 { | |
3966 /* Last chance to print stats for the tables. */ | |
3967 if (flag_lto_report) | |
3968 print_gimple_types_stats (); | |
3969 | |
3970 if (gimple_types) | |
3971 { | |
3972 htab_delete (gimple_types); | |
3973 gimple_types = NULL; | |
3974 } | |
3975 if (type_hash_cache) | |
3976 { | |
3977 pointer_map_destroy (type_hash_cache); | |
3978 type_hash_cache = NULL; | |
3979 } | |
3980 if (gtc_visited) | |
3981 { | |
3982 htab_delete (gtc_visited); | |
3983 obstack_free (>c_ob, NULL); | |
3984 gtc_visited = NULL; | |
3985 } | |
3986 } | |
3987 | |
3988 | |
3989 /* Return a type the same as TYPE except unsigned or | |
3990 signed according to UNSIGNEDP. */ | |
3991 | |
3992 static tree | |
3993 gimple_signed_or_unsigned_type (bool unsignedp, tree type) | |
3994 { | |
3995 tree type1; | |
3996 | |
3997 type1 = TYPE_MAIN_VARIANT (type); | |
3998 if (type1 == signed_char_type_node | |
3999 || type1 == char_type_node | |
4000 || type1 == unsigned_char_type_node) | |
4001 return unsignedp ? unsigned_char_type_node : signed_char_type_node; | |
4002 if (type1 == integer_type_node || type1 == unsigned_type_node) | |
4003 return unsignedp ? unsigned_type_node : integer_type_node; | |
4004 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node) | |
4005 return unsignedp ? short_unsigned_type_node : short_integer_type_node; | |
4006 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node) | |
4007 return unsignedp ? long_unsigned_type_node : long_integer_type_node; | |
4008 if (type1 == long_long_integer_type_node | |
4009 || type1 == long_long_unsigned_type_node) | |
4010 return unsignedp | |
4011 ? long_long_unsigned_type_node | |
4012 : long_long_integer_type_node; | |
4013 #if HOST_BITS_PER_WIDE_INT >= 64 | |
4014 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node) | |
4015 return unsignedp ? unsigned_intTI_type_node : intTI_type_node; | |
4016 #endif | |
4017 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node) | |
4018 return unsignedp ? unsigned_intDI_type_node : intDI_type_node; | |
4019 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node) | |
4020 return unsignedp ? unsigned_intSI_type_node : intSI_type_node; | |
4021 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node) | |
4022 return unsignedp ? unsigned_intHI_type_node : intHI_type_node; | |
4023 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node) | |
4024 return unsignedp ? unsigned_intQI_type_node : intQI_type_node; | |
4025 | |
4026 #define GIMPLE_FIXED_TYPES(NAME) \ | |
4027 if (type1 == short_ ## NAME ## _type_node \ | |
4028 || type1 == unsigned_short_ ## NAME ## _type_node) \ | |
4029 return unsignedp ? unsigned_short_ ## NAME ## _type_node \ | |
4030 : short_ ## NAME ## _type_node; \ | |
4031 if (type1 == NAME ## _type_node \ | |
4032 || type1 == unsigned_ ## NAME ## _type_node) \ | |
4033 return unsignedp ? unsigned_ ## NAME ## _type_node \ | |
4034 : NAME ## _type_node; \ | |
4035 if (type1 == long_ ## NAME ## _type_node \ | |
4036 || type1 == unsigned_long_ ## NAME ## _type_node) \ | |
4037 return unsignedp ? unsigned_long_ ## NAME ## _type_node \ | |
4038 : long_ ## NAME ## _type_node; \ | |
4039 if (type1 == long_long_ ## NAME ## _type_node \ | |
4040 || type1 == unsigned_long_long_ ## NAME ## _type_node) \ | |
4041 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \ | |
4042 : long_long_ ## NAME ## _type_node; | |
4043 | |
4044 #define GIMPLE_FIXED_MODE_TYPES(NAME) \ | |
4045 if (type1 == NAME ## _type_node \ | |
4046 || type1 == u ## NAME ## _type_node) \ | |
4047 return unsignedp ? u ## NAME ## _type_node \ | |
4048 : NAME ## _type_node; | |
4049 | |
4050 #define GIMPLE_FIXED_TYPES_SAT(NAME) \ | |
4051 if (type1 == sat_ ## short_ ## NAME ## _type_node \ | |
4052 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \ | |
4053 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \ | |
4054 : sat_ ## short_ ## NAME ## _type_node; \ | |
4055 if (type1 == sat_ ## NAME ## _type_node \ | |
4056 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \ | |
4057 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \ | |
4058 : sat_ ## NAME ## _type_node; \ | |
4059 if (type1 == sat_ ## long_ ## NAME ## _type_node \ | |
4060 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \ | |
4061 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \ | |
4062 : sat_ ## long_ ## NAME ## _type_node; \ | |
4063 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \ | |
4064 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \ | |
4065 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \ | |
4066 : sat_ ## long_long_ ## NAME ## _type_node; | |
4067 | |
4068 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \ | |
4069 if (type1 == sat_ ## NAME ## _type_node \ | |
4070 || type1 == sat_ ## u ## NAME ## _type_node) \ | |
4071 return unsignedp ? sat_ ## u ## NAME ## _type_node \ | |
4072 : sat_ ## NAME ## _type_node; | |
4073 | |
4074 GIMPLE_FIXED_TYPES (fract); | |
4075 GIMPLE_FIXED_TYPES_SAT (fract); | |
4076 GIMPLE_FIXED_TYPES (accum); | |
4077 GIMPLE_FIXED_TYPES_SAT (accum); | |
4078 | |
4079 GIMPLE_FIXED_MODE_TYPES (qq); | |
4080 GIMPLE_FIXED_MODE_TYPES (hq); | |
4081 GIMPLE_FIXED_MODE_TYPES (sq); | |
4082 GIMPLE_FIXED_MODE_TYPES (dq); | |
4083 GIMPLE_FIXED_MODE_TYPES (tq); | |
4084 GIMPLE_FIXED_MODE_TYPES_SAT (qq); | |
4085 GIMPLE_FIXED_MODE_TYPES_SAT (hq); | |
4086 GIMPLE_FIXED_MODE_TYPES_SAT (sq); | |
4087 GIMPLE_FIXED_MODE_TYPES_SAT (dq); | |
4088 GIMPLE_FIXED_MODE_TYPES_SAT (tq); | |
4089 GIMPLE_FIXED_MODE_TYPES (ha); | |
4090 GIMPLE_FIXED_MODE_TYPES (sa); | |
4091 GIMPLE_FIXED_MODE_TYPES (da); | |
4092 GIMPLE_FIXED_MODE_TYPES (ta); | |
4093 GIMPLE_FIXED_MODE_TYPES_SAT (ha); | |
4094 GIMPLE_FIXED_MODE_TYPES_SAT (sa); | |
4095 GIMPLE_FIXED_MODE_TYPES_SAT (da); | |
4096 GIMPLE_FIXED_MODE_TYPES_SAT (ta); | |
4097 | |
4098 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not | |
4099 the precision; they have precision set to match their range, but | |
4100 may use a wider mode to match an ABI. If we change modes, we may | |
4101 wind up with bad conversions. For INTEGER_TYPEs in C, must check | |
4102 the precision as well, so as to yield correct results for | |
4103 bit-field types. C++ does not have these separate bit-field | |
4104 types, and producing a signed or unsigned variant of an | |
4105 ENUMERAL_TYPE may cause other problems as well. */ | |
4106 if (!INTEGRAL_TYPE_P (type) | |
4107 || TYPE_UNSIGNED (type) == unsignedp) | |
4108 return type; | |
4109 | |
4110 #define TYPE_OK(node) \ | |
4111 (TYPE_MODE (type) == TYPE_MODE (node) \ | |
4112 && TYPE_PRECISION (type) == TYPE_PRECISION (node)) | |
4113 if (TYPE_OK (signed_char_type_node)) | |
4114 return unsignedp ? unsigned_char_type_node : signed_char_type_node; | |
4115 if (TYPE_OK (integer_type_node)) | |
4116 return unsignedp ? unsigned_type_node : integer_type_node; | |
4117 if (TYPE_OK (short_integer_type_node)) | |
4118 return unsignedp ? short_unsigned_type_node : short_integer_type_node; | |
4119 if (TYPE_OK (long_integer_type_node)) | |
4120 return unsignedp ? long_unsigned_type_node : long_integer_type_node; | |
4121 if (TYPE_OK (long_long_integer_type_node)) | |
4122 return (unsignedp | |
4123 ? long_long_unsigned_type_node | |
4124 : long_long_integer_type_node); | |
4125 | |
4126 #if HOST_BITS_PER_WIDE_INT >= 64 | |
4127 if (TYPE_OK (intTI_type_node)) | |
4128 return unsignedp ? unsigned_intTI_type_node : intTI_type_node; | |
4129 #endif | |
4130 if (TYPE_OK (intDI_type_node)) | |
4131 return unsignedp ? unsigned_intDI_type_node : intDI_type_node; | |
4132 if (TYPE_OK (intSI_type_node)) | |
4133 return unsignedp ? unsigned_intSI_type_node : intSI_type_node; | |
4134 if (TYPE_OK (intHI_type_node)) | |
4135 return unsignedp ? unsigned_intHI_type_node : intHI_type_node; | |
4136 if (TYPE_OK (intQI_type_node)) | |
4137 return unsignedp ? unsigned_intQI_type_node : intQI_type_node; | |
4138 | |
4139 #undef GIMPLE_FIXED_TYPES | |
4140 #undef GIMPLE_FIXED_MODE_TYPES | |
4141 #undef GIMPLE_FIXED_TYPES_SAT | |
4142 #undef GIMPLE_FIXED_MODE_TYPES_SAT | |
4143 #undef TYPE_OK | |
4144 | |
4145 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp); | |
4146 } | |
4147 | |
4148 | |
4149 /* Return an unsigned type the same as TYPE in other respects. */ | |
4150 | |
4151 tree | |
4152 gimple_unsigned_type (tree type) | |
4153 { | |
4154 return gimple_signed_or_unsigned_type (true, type); | |
4155 } | |
4156 | |
4157 | |
4158 /* Return a signed type the same as TYPE in other respects. */ | |
4159 | |
4160 tree | |
4161 gimple_signed_type (tree type) | |
4162 { | |
4163 return gimple_signed_or_unsigned_type (false, type); | |
4164 } | |
4165 | |
4166 | |
4167 /* Return the typed-based alias set for T, which may be an expression | |
4168 or a type. Return -1 if we don't do anything special. */ | |
4169 | |
4170 alias_set_type | |
4171 gimple_get_alias_set (tree t) | |
4172 { | |
4173 tree u; | |
4174 | |
4175 /* Permit type-punning when accessing a union, provided the access | |
4176 is directly through the union. For example, this code does not | |
4177 permit taking the address of a union member and then storing | |
4178 through it. Even the type-punning allowed here is a GCC | |
4179 extension, albeit a common and useful one; the C standard says | |
4180 that such accesses have implementation-defined behavior. */ | |
4181 for (u = t; | |
4182 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF; | |
4183 u = TREE_OPERAND (u, 0)) | |
4184 if (TREE_CODE (u) == COMPONENT_REF | |
4185 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE) | |
4186 return 0; | |
4187 | |
4188 /* That's all the expressions we handle specially. */ | |
4189 if (!TYPE_P (t)) | |
4190 return -1; | |
4191 | |
4192 /* For convenience, follow the C standard when dealing with | |
4193 character types. Any object may be accessed via an lvalue that | |
4194 has character type. */ | |
4195 if (t == char_type_node | |
4196 || t == signed_char_type_node | |
4197 || t == unsigned_char_type_node) | |
4198 return 0; | |
4199 | |
4200 /* Allow aliasing between signed and unsigned variants of the same | |
4201 type. We treat the signed variant as canonical. */ | |
4202 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t)) | |
4203 { | |
4204 tree t1 = gimple_signed_type (t); | |
4205 | |
4206 /* t1 == t can happen for boolean nodes which are always unsigned. */ | |
4207 if (t1 != t) | |
4208 return get_alias_set (t1); | |
4209 } | |
4210 else if (POINTER_TYPE_P (t)) | |
4211 { | |
4212 /* From the common C and C++ langhook implementation: | |
4213 | |
4214 Unfortunately, there is no canonical form of a pointer type. | |
4215 In particular, if we have `typedef int I', then `int *', and | |
4216 `I *' are different types. So, we have to pick a canonical | |
4217 representative. We do this below. | |
4218 | |
4219 Technically, this approach is actually more conservative that | |
4220 it needs to be. In particular, `const int *' and `int *' | |
4221 should be in different alias sets, according to the C and C++ | |
4222 standard, since their types are not the same, and so, | |
4223 technically, an `int **' and `const int **' cannot point at | |
4224 the same thing. | |
4225 | |
4226 But, the standard is wrong. In particular, this code is | |
4227 legal C++: | |
4228 | |
4229 int *ip; | |
4230 int **ipp = &ip; | |
4231 const int* const* cipp = ipp; | |
4232 And, it doesn't make sense for that to be legal unless you | |
4233 can dereference IPP and CIPP. So, we ignore cv-qualifiers on | |
4234 the pointed-to types. This issue has been reported to the | |
4235 C++ committee. */ | |
4236 | |
4237 /* In addition to the above canonicalization issue with LTO | |
4238 we should also canonicalize `T (*)[]' to `T *' avoiding | |
4239 alias issues with pointer-to element types and pointer-to | |
4240 array types. | |
4241 | |
4242 Likewise we need to deal with the situation of incomplete | |
4243 pointed-to types and make `*(struct X **)&a' and | |
4244 `*(struct X {} **)&a' alias. Otherwise we will have to | |
4245 guarantee that all pointer-to incomplete type variants | |
4246 will be replaced by pointer-to complete type variants if | |
4247 they are available. | |
4248 | |
4249 With LTO the convenient situation of using `void *' to | |
4250 access and store any pointer type will also become | |
4251 more apparent (and `void *' is just another pointer-to | |
4252 incomplete type). Assigning alias-set zero to `void *' | |
4253 and all pointer-to incomplete types is a not appealing | |
4254 solution. Assigning an effective alias-set zero only | |
4255 affecting pointers might be - by recording proper subset | |
4256 relationships of all pointer alias-sets. | |
4257 | |
4258 Pointer-to function types are another grey area which | |
4259 needs caution. Globbing them all into one alias-set | |
4260 or the above effective zero set would work. */ | |
4261 | |
4262 /* For now just assign the same alias-set to all pointers. | |
4263 That's simple and avoids all the above problems. */ | |
4264 if (t != ptr_type_node) | |
4265 return get_alias_set (ptr_type_node); | |
4266 } | |
4267 | |
4268 return -1; | |
4269 } | |
4270 | |
4271 | |
4272 /* Data structure used to count the number of dereferences to PTR | |
4273 inside an expression. */ | |
4274 struct count_ptr_d | |
4275 { | |
4276 tree ptr; | |
4277 unsigned num_stores; | |
4278 unsigned num_loads; | |
4279 }; | |
4280 | |
4281 /* Helper for count_uses_and_derefs. Called by walk_tree to look for | |
4282 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */ | |
4283 | |
4284 static tree | |
4285 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data) | |
4286 { | |
4287 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data; | |
4288 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info; | |
4289 | |
4290 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld, | |
4291 pointer 'ptr' is *not* dereferenced, it is simply used to compute | |
4292 the address of 'fld' as 'ptr + offsetof(fld)'. */ | |
4293 if (TREE_CODE (*tp) == ADDR_EXPR) | |
4294 { | |
4295 *walk_subtrees = 0; | |
4296 return NULL_TREE; | |
4297 } | |
4298 | |
4299 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr) | |
4300 { | |
4301 if (wi_p->is_lhs) | |
4302 count_p->num_stores++; | |
4303 else | |
4304 count_p->num_loads++; | |
4305 } | |
4306 | |
4307 return NULL_TREE; | |
4308 } | |
4309 | |
4310 /* Count the number of direct and indirect uses for pointer PTR in | |
4311 statement STMT. The number of direct uses is stored in | |
4312 *NUM_USES_P. Indirect references are counted separately depending | |
4313 on whether they are store or load operations. The counts are | |
4314 stored in *NUM_STORES_P and *NUM_LOADS_P. */ | |
4315 | |
4316 void | |
4317 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p, | |
4318 unsigned *num_loads_p, unsigned *num_stores_p) | |
4319 { | |
4320 ssa_op_iter i; | |
4321 tree use; | |
4322 | |
4323 *num_uses_p = 0; | |
4324 *num_loads_p = 0; | |
4325 *num_stores_p = 0; | |
4326 | |
4327 /* Find out the total number of uses of PTR in STMT. */ | |
4328 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE) | |
4329 if (use == ptr) | |
4330 (*num_uses_p)++; | |
4331 | |
4332 /* Now count the number of indirect references to PTR. This is | |
4333 truly awful, but we don't have much choice. There are no parent | |
4334 pointers inside INDIRECT_REFs, so an expression like | |
4335 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to | |
4336 find all the indirect and direct uses of x_1 inside. The only | |
4337 shortcut we can take is the fact that GIMPLE only allows | |
4338 INDIRECT_REFs inside the expressions below. */ | |
4339 if (is_gimple_assign (stmt) | |
4340 || gimple_code (stmt) == GIMPLE_RETURN | |
4341 || gimple_code (stmt) == GIMPLE_ASM | |
4342 || is_gimple_call (stmt)) | |
4343 { | |
4344 struct walk_stmt_info wi; | |
4345 struct count_ptr_d count; | |
4346 | |
4347 count.ptr = ptr; | |
4348 count.num_stores = 0; | |
4349 count.num_loads = 0; | |
4350 | |
4351 memset (&wi, 0, sizeof (wi)); | |
4352 wi.info = &count; | |
4353 walk_gimple_op (stmt, count_ptr_derefs, &wi); | |
4354 | |
4355 *num_stores_p = count.num_stores; | |
4356 *num_loads_p = count.num_loads; | |
4357 } | |
4358 | |
4359 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p); | |
4360 } | |
4361 | |
4362 /* From a tree operand OP return the base of a load or store operation | |
4363 or NULL_TREE if OP is not a load or a store. */ | |
4364 | |
4365 static tree | |
4366 get_base_loadstore (tree op) | |
4367 { | |
4368 while (handled_component_p (op)) | |
4369 op = TREE_OPERAND (op, 0); | |
4370 if (DECL_P (op) | |
4371 || INDIRECT_REF_P (op) | |
4372 || TREE_CODE (op) == TARGET_MEM_REF) | |
4373 return op; | |
4374 return NULL_TREE; | |
4375 } | |
4376 | |
4377 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and | |
4378 VISIT_ADDR if non-NULL on loads, store and address-taken operands | |
4379 passing the STMT, the base of the operand and DATA to it. The base | |
4380 will be either a decl, an indirect reference (including TARGET_MEM_REF) | |
4381 or the argument of an address expression. | |
4382 Returns the results of these callbacks or'ed. */ | |
4383 | |
4384 bool | |
4385 walk_stmt_load_store_addr_ops (gimple stmt, void *data, | |
4386 bool (*visit_load)(gimple, tree, void *), | |
4387 bool (*visit_store)(gimple, tree, void *), | |
4388 bool (*visit_addr)(gimple, tree, void *)) | |
4389 { | |
4390 bool ret = false; | |
4391 unsigned i; | |
4392 if (gimple_assign_single_p (stmt)) | |
4393 { | |
4394 tree lhs, rhs; | |
4395 if (visit_store) | |
4396 { | |
4397 lhs = get_base_loadstore (gimple_assign_lhs (stmt)); | |
4398 if (lhs) | |
4399 ret |= visit_store (stmt, lhs, data); | |
4400 } | |
4401 rhs = gimple_assign_rhs1 (stmt); | |
4402 while (handled_component_p (rhs)) | |
4403 rhs = TREE_OPERAND (rhs, 0); | |
4404 if (visit_addr) | |
4405 { | |
4406 if (TREE_CODE (rhs) == ADDR_EXPR) | |
4407 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data); | |
4408 else if (TREE_CODE (rhs) == TARGET_MEM_REF | |
4409 && TMR_BASE (rhs) != NULL_TREE | |
4410 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR) | |
4411 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data); | |
4412 else if (TREE_CODE (rhs) == OBJ_TYPE_REF | |
4413 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR) | |
4414 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs), | |
4415 0), data); | |
4416 lhs = gimple_assign_lhs (stmt); | |
4417 if (TREE_CODE (lhs) == TARGET_MEM_REF | |
4418 && TMR_BASE (lhs) != NULL_TREE | |
4419 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR) | |
4420 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data); | |
4421 } | |
4422 if (visit_load) | |
4423 { | |
4424 rhs = get_base_loadstore (rhs); | |
4425 if (rhs) | |
4426 ret |= visit_load (stmt, rhs, data); | |
4427 } | |
4428 } | |
4429 else if (visit_addr | |
4430 && (is_gimple_assign (stmt) | |
4431 || gimple_code (stmt) == GIMPLE_COND)) | |
4432 { | |
4433 for (i = 0; i < gimple_num_ops (stmt); ++i) | |
4434 if (gimple_op (stmt, i) | |
4435 && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR) | |
4436 ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data); | |
4437 } | |
4438 else if (is_gimple_call (stmt)) | |
4439 { | |
4440 if (visit_store) | |
4441 { | |
4442 tree lhs = gimple_call_lhs (stmt); | |
4443 if (lhs) | |
4444 { | |
4445 lhs = get_base_loadstore (lhs); | |
4446 if (lhs) | |
4447 ret |= visit_store (stmt, lhs, data); | |
4448 } | |
4449 } | |
4450 if (visit_load || visit_addr) | |
4451 for (i = 0; i < gimple_call_num_args (stmt); ++i) | |
4452 { | |
4453 tree rhs = gimple_call_arg (stmt, i); | |
4454 if (visit_addr | |
4455 && TREE_CODE (rhs) == ADDR_EXPR) | |
4456 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data); | |
4457 else if (visit_load) | |
4458 { | |
4459 rhs = get_base_loadstore (rhs); | |
4460 if (rhs) | |
4461 ret |= visit_load (stmt, rhs, data); | |
4462 } | |
4463 } | |
4464 if (visit_addr | |
4465 && gimple_call_chain (stmt) | |
4466 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR) | |
4467 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0), | |
4468 data); | |
4469 if (visit_addr | |
4470 && gimple_call_return_slot_opt_p (stmt) | |
4471 && gimple_call_lhs (stmt) != NULL_TREE | |
4472 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt)))) | |
4473 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data); | |
4474 } | |
4475 else if (gimple_code (stmt) == GIMPLE_ASM) | |
4476 { | |
4477 unsigned noutputs; | |
4478 const char *constraint; | |
4479 const char **oconstraints; | |
4480 bool allows_mem, allows_reg, is_inout; | |
4481 noutputs = gimple_asm_noutputs (stmt); | |
4482 oconstraints = XALLOCAVEC (const char *, noutputs); | |
4483 if (visit_store || visit_addr) | |
4484 for (i = 0; i < gimple_asm_noutputs (stmt); ++i) | |
4485 { | |
4486 tree link = gimple_asm_output_op (stmt, i); | |
4487 tree op = get_base_loadstore (TREE_VALUE (link)); | |
4488 if (op && visit_store) | |
4489 ret |= visit_store (stmt, op, data); | |
4490 if (visit_addr) | |
4491 { | |
4492 constraint = TREE_STRING_POINTER | |
4493 (TREE_VALUE (TREE_PURPOSE (link))); | |
4494 oconstraints[i] = constraint; | |
4495 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, | |
4496 &allows_reg, &is_inout); | |
4497 if (op && !allows_reg && allows_mem) | |
4498 ret |= visit_addr (stmt, op, data); | |
4499 } | |
4500 } | |
4501 if (visit_load || visit_addr) | |
4502 for (i = 0; i < gimple_asm_ninputs (stmt); ++i) | |
4503 { | |
4504 tree link = gimple_asm_input_op (stmt, i); | |
4505 tree op = TREE_VALUE (link); | |
4506 if (visit_addr | |
4507 && TREE_CODE (op) == ADDR_EXPR) | |
4508 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data); | |
4509 else if (visit_load || visit_addr) | |
4510 { | |
4511 op = get_base_loadstore (op); | |
4512 if (op) | |
4513 { | |
4514 if (visit_load) | |
4515 ret |= visit_load (stmt, op, data); | |
4516 if (visit_addr) | |
4517 { | |
4518 constraint = TREE_STRING_POINTER | |
4519 (TREE_VALUE (TREE_PURPOSE (link))); | |
4520 parse_input_constraint (&constraint, 0, 0, noutputs, | |
4521 0, oconstraints, | |
4522 &allows_mem, &allows_reg); | |
4523 if (!allows_reg && allows_mem) | |
4524 ret |= visit_addr (stmt, op, data); | |
4525 } | |
4526 } | |
4527 } | |
4528 } | |
4529 } | |
4530 else if (gimple_code (stmt) == GIMPLE_RETURN) | |
4531 { | |
4532 tree op = gimple_return_retval (stmt); | |
4533 if (op) | |
4534 { | |
4535 if (visit_addr | |
4536 && TREE_CODE (op) == ADDR_EXPR) | |
4537 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data); | |
4538 else if (visit_load) | |
4539 { | |
4540 op = get_base_loadstore (op); | |
4541 if (op) | |
4542 ret |= visit_load (stmt, op, data); | |
4543 } | |
4544 } | |
4545 } | |
4546 else if (visit_addr | |
4547 && gimple_code (stmt) == GIMPLE_PHI) | |
4548 { | |
4549 for (i = 0; i < gimple_phi_num_args (stmt); ++i) | |
4550 { | |
4551 tree op = PHI_ARG_DEF (stmt, i); | |
4552 if (TREE_CODE (op) == ADDR_EXPR) | |
4553 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data); | |
4554 } | |
4555 } | |
4556 | |
4557 return ret; | |
4558 } | |
4559 | |
4560 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP | |
4561 should make a faster clone for this case. */ | |
4562 | |
4563 bool | |
4564 walk_stmt_load_store_ops (gimple stmt, void *data, | |
4565 bool (*visit_load)(gimple, tree, void *), | |
4566 bool (*visit_store)(gimple, tree, void *)) | |
4567 { | |
4568 return walk_stmt_load_store_addr_ops (stmt, data, | |
4569 visit_load, visit_store, NULL); | |
4570 } | |
4571 | |
4572 /* Helper for gimple_ior_addresses_taken_1. */ | |
4573 | |
4574 static bool | |
4575 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED, | |
4576 tree addr, void *data) | |
4577 { | |
4578 bitmap addresses_taken = (bitmap)data; | |
4579 while (handled_component_p (addr)) | |
4580 addr = TREE_OPERAND (addr, 0); | |
4581 if (DECL_P (addr)) | |
4582 { | |
4583 bitmap_set_bit (addresses_taken, DECL_UID (addr)); | |
4584 return true; | |
4585 } | |
4586 return false; | |
4587 } | |
4588 | |
4589 /* Set the bit for the uid of all decls that have their address taken | |
4590 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there | |
4591 were any in this stmt. */ | |
4592 | |
4593 bool | |
4594 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt) | |
4595 { | |
4596 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL, | |
4597 gimple_ior_addresses_taken_1); | |
4598 } | |
4599 | |
4600 | |
4601 /* Return a printable name for symbol DECL. */ | |
4602 | |
4603 const char * | |
4604 gimple_decl_printable_name (tree decl, int verbosity) | |
4605 { | |
4606 gcc_assert (decl && DECL_NAME (decl)); | |
4607 | |
4608 if (DECL_ASSEMBLER_NAME_SET_P (decl)) | |
4609 { | |
4610 const char *str, *mangled_str; | |
4611 int dmgl_opts = DMGL_NO_OPTS; | |
4612 | |
4613 if (verbosity >= 2) | |
4614 { | |
4615 dmgl_opts = DMGL_VERBOSE | |
4616 | DMGL_ANSI | |
4617 | DMGL_GNU_V3 | |
4618 | DMGL_RET_POSTFIX; | |
4619 if (TREE_CODE (decl) == FUNCTION_DECL) | |
4620 dmgl_opts |= DMGL_PARAMS; | |
4621 } | |
4622 | |
4623 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)); | |
4624 str = cplus_demangle_v3 (mangled_str, dmgl_opts); | |
4625 return (str) ? str : mangled_str; | |
4626 } | |
4627 | |
4628 return IDENTIFIER_POINTER (DECL_NAME (decl)); | |
4629 } | |
4630 | |
4631 | |
4632 /* Fold a OBJ_TYPE_REF expression to the address of a function. | |
4633 KNOWN_TYPE carries the true type of OBJ_TYPE_REF_OBJECT(REF). Adapted | |
4634 from cp_fold_obj_type_ref, but it tolerates types with no binfo | |
4635 data. */ | |
4636 | |
4637 tree | |
4638 gimple_fold_obj_type_ref (tree ref, tree known_type) | |
4639 { | |
4640 HOST_WIDE_INT index; | |
4641 HOST_WIDE_INT i; | |
4642 tree v; | |
4643 tree fndecl; | |
4644 | |
4645 if (TYPE_BINFO (known_type) == NULL_TREE) | |
4646 return NULL_TREE; | |
4647 | |
4648 v = BINFO_VIRTUALS (TYPE_BINFO (known_type)); | |
4649 index = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1); | |
4650 i = 0; | |
4651 while (i != index) | |
4652 { | |
4653 i += (TARGET_VTABLE_USES_DESCRIPTORS | |
4654 ? TARGET_VTABLE_USES_DESCRIPTORS : 1); | |
4655 v = TREE_CHAIN (v); | |
4656 } | |
4657 | |
4658 fndecl = TREE_VALUE (v); | |
4659 | |
4660 #ifdef ENABLE_CHECKING | |
4661 gcc_assert (tree_int_cst_equal (OBJ_TYPE_REF_TOKEN (ref), | |
4662 DECL_VINDEX (fndecl))); | |
4663 #endif | |
4664 | |
4665 cgraph_node (fndecl)->local.vtable_method = true; | |
4666 | |
4667 return build_fold_addr_expr (fndecl); | |
4668 } | |
4669 | |
3194 #include "gt-gimple.h" | 4670 #include "gt-gimple.h" |