comparison gcc/gimple.c @ 57:326d9e06c2e3

modify c-parser.c
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Mon, 15 Feb 2010 00:54:17 +0900
parents 959d4c8c8abc 77e2b8dfacca
children 5b5b9ea5b220
comparison
equal deleted inserted replaced
54:f62c169bbc24 57:326d9e06c2e3
1 /* Gimple IR support functions. 1 /* Gimple IR support functions.
2 2
3 Copyright 2007, 2008 Free Software Foundation, Inc. 3 Copyright 2007, 2008, 2009 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com> 4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5 5
6 This file is part of GCC. 6 This file is part of GCC.
7 7
8 GCC is free software; you can redistribute it and/or modify it under 8 GCC is free software; you can redistribute it and/or modify it under
21 21
22 #include "config.h" 22 #include "config.h"
23 #include "system.h" 23 #include "system.h"
24 #include "coretypes.h" 24 #include "coretypes.h"
25 #include "tm.h" 25 #include "tm.h"
26 #include "target.h"
26 #include "tree.h" 27 #include "tree.h"
27 #include "ggc.h" 28 #include "ggc.h"
28 #include "errors.h"
29 #include "hard-reg-set.h" 29 #include "hard-reg-set.h"
30 #include "basic-block.h" 30 #include "basic-block.h"
31 #include "gimple.h" 31 #include "gimple.h"
32 #include "toplev.h"
32 #include "diagnostic.h" 33 #include "diagnostic.h"
33 #include "tree-flow.h" 34 #include "tree-flow.h"
34 #include "value-prof.h" 35 #include "value-prof.h"
35 #include "flags.h" 36 #include "flags.h"
36 #ifndef noCbC 37 #ifndef noCbC
37 #include "cbc-tree.h" 38 #include "cbc-tree.h"
38 #endif 39 #endif
39 40 #include "alias.h"
40 #define DEFGSCODE(SYM, NAME, STRUCT) NAME, 41 #include "demangle.h"
42
43 #define DEFGSCODE(SYM, NAME, STRUCT) NAME,
41 const char *const gimple_code_name[] = { 44 const char *const gimple_code_name[] = {
42 #include "gimple.def" 45 #include "gimple.def"
43 }; 46 };
44 #undef DEFGSCODE 47 #undef DEFGSCODE
45 48
46 /* All the tuples have their operand vector at the very bottom 49 /* Global type table. FIXME lto, it should be possible to re-use some
50 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
51 etc), but those assume that types were built with the various
52 build_*_type routines which is not the case with the streamer. */
53 static htab_t gimple_types;
54 static struct pointer_map_t *type_hash_cache;
55
56 /* Global type comparison cache. */
57 static htab_t gtc_visited;
58 static struct obstack gtc_ob;
59
60 /* All the tuples have their operand vector (if present) at the very bottom
47 of the structure. Therefore, the offset required to find the 61 of the structure. Therefore, the offset required to find the
48 operands vector the size of the structure minus the size of the 1 62 operands vector the size of the structure minus the size of the 1
49 element tree array at the end (see gimple_ops). */ 63 element tree array at the end (see gimple_ops). */
50 #define DEFGSCODE(SYM, NAME, STRUCT) (sizeof (STRUCT) - sizeof (tree)), 64 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
51 const size_t gimple_ops_offset_[] = { 65 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
66 EXPORTED_CONST size_t gimple_ops_offset_[] = {
67 #include "gsstruct.def"
68 };
69 #undef DEFGSSTRUCT
70
71 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
72 static const size_t gsstruct_code_size[] = {
73 #include "gsstruct.def"
74 };
75 #undef DEFGSSTRUCT
76
77 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
78 const char *const gimple_code_name[] = {
79 #include "gimple.def"
80 };
81 #undef DEFGSCODE
82
83 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
84 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
52 #include "gimple.def" 85 #include "gimple.def"
53 }; 86 };
54 #undef DEFGSCODE 87 #undef DEFGSCODE
55 88
56 #ifdef GATHER_STATISTICS 89 #ifdef GATHER_STATISTICS
89 gimple_set_code (gimple g, enum gimple_code code) 122 gimple_set_code (gimple g, enum gimple_code code)
90 { 123 {
91 g->gsbase.code = code; 124 g->gsbase.code = code;
92 } 125 }
93 126
94
95 /* Return the GSS_* identifier for the given GIMPLE statement CODE. */
96
97 static enum gimple_statement_structure_enum
98 gss_for_code (enum gimple_code code)
99 {
100 switch (code)
101 {
102 case GIMPLE_ASSIGN:
103 case GIMPLE_CALL:
104 case GIMPLE_RETURN: return GSS_WITH_MEM_OPS;
105 case GIMPLE_COND:
106 case GIMPLE_GOTO:
107 case GIMPLE_LABEL:
108 case GIMPLE_CHANGE_DYNAMIC_TYPE:
109 case GIMPLE_SWITCH: return GSS_WITH_OPS;
110 case GIMPLE_ASM: return GSS_ASM;
111 case GIMPLE_BIND: return GSS_BIND;
112 case GIMPLE_CATCH: return GSS_CATCH;
113 case GIMPLE_EH_FILTER: return GSS_EH_FILTER;
114 case GIMPLE_NOP: return GSS_BASE;
115 case GIMPLE_PHI: return GSS_PHI;
116 case GIMPLE_RESX: return GSS_RESX;
117 case GIMPLE_TRY: return GSS_TRY;
118 case GIMPLE_WITH_CLEANUP_EXPR: return GSS_WCE;
119 case GIMPLE_OMP_CRITICAL: return GSS_OMP_CRITICAL;
120 case GIMPLE_OMP_FOR: return GSS_OMP_FOR;
121 case GIMPLE_OMP_MASTER:
122 case GIMPLE_OMP_ORDERED:
123 case GIMPLE_OMP_SECTION: return GSS_OMP;
124 case GIMPLE_OMP_RETURN:
125 case GIMPLE_OMP_SECTIONS_SWITCH: return GSS_BASE;
126 case GIMPLE_OMP_CONTINUE: return GSS_OMP_CONTINUE;
127 case GIMPLE_OMP_PARALLEL: return GSS_OMP_PARALLEL;
128 case GIMPLE_OMP_TASK: return GSS_OMP_TASK;
129 case GIMPLE_OMP_SECTIONS: return GSS_OMP_SECTIONS;
130 case GIMPLE_OMP_SINGLE: return GSS_OMP_SINGLE;
131 case GIMPLE_OMP_ATOMIC_LOAD: return GSS_OMP_ATOMIC_LOAD;
132 case GIMPLE_OMP_ATOMIC_STORE: return GSS_OMP_ATOMIC_STORE;
133 case GIMPLE_PREDICT: return GSS_BASE;
134 default: gcc_unreachable ();
135 }
136 }
137
138
139 /* Return the number of bytes needed to hold a GIMPLE statement with 127 /* Return the number of bytes needed to hold a GIMPLE statement with
140 code CODE. */ 128 code CODE. */
141 129
142 static size_t 130 static inline size_t
143 gimple_size (enum gimple_code code) 131 gimple_size (enum gimple_code code)
144 { 132 {
145 enum gimple_statement_structure_enum gss = gss_for_code (code); 133 return gsstruct_code_size[gss_for_code (code)];
146 134 }
147 if (gss == GSS_WITH_OPS)
148 return sizeof (struct gimple_statement_with_ops);
149 else if (gss == GSS_WITH_MEM_OPS)
150 return sizeof (struct gimple_statement_with_memory_ops);
151
152 switch (code)
153 {
154 case GIMPLE_ASM:
155 return sizeof (struct gimple_statement_asm);
156 case GIMPLE_NOP:
157 return sizeof (struct gimple_statement_base);
158 case GIMPLE_BIND:
159 return sizeof (struct gimple_statement_bind);
160 case GIMPLE_CATCH:
161 return sizeof (struct gimple_statement_catch);
162 case GIMPLE_EH_FILTER:
163 return sizeof (struct gimple_statement_eh_filter);
164 case GIMPLE_TRY:
165 return sizeof (struct gimple_statement_try);
166 case GIMPLE_RESX:
167 return sizeof (struct gimple_statement_resx);
168 case GIMPLE_OMP_CRITICAL:
169 return sizeof (struct gimple_statement_omp_critical);
170 case GIMPLE_OMP_FOR:
171 return sizeof (struct gimple_statement_omp_for);
172 case GIMPLE_OMP_PARALLEL:
173 return sizeof (struct gimple_statement_omp_parallel);
174 case GIMPLE_OMP_TASK:
175 return sizeof (struct gimple_statement_omp_task);
176 case GIMPLE_OMP_SECTION:
177 case GIMPLE_OMP_MASTER:
178 case GIMPLE_OMP_ORDERED:
179 return sizeof (struct gimple_statement_omp);
180 case GIMPLE_OMP_RETURN:
181 return sizeof (struct gimple_statement_base);
182 case GIMPLE_OMP_CONTINUE:
183 return sizeof (struct gimple_statement_omp_continue);
184 case GIMPLE_OMP_SECTIONS:
185 return sizeof (struct gimple_statement_omp_sections);
186 case GIMPLE_OMP_SECTIONS_SWITCH:
187 return sizeof (struct gimple_statement_base);
188 case GIMPLE_OMP_SINGLE:
189 return sizeof (struct gimple_statement_omp_single);
190 case GIMPLE_OMP_ATOMIC_LOAD:
191 return sizeof (struct gimple_statement_omp_atomic_load);
192 case GIMPLE_OMP_ATOMIC_STORE:
193 return sizeof (struct gimple_statement_omp_atomic_store);
194 case GIMPLE_WITH_CLEANUP_EXPR:
195 return sizeof (struct gimple_statement_wce);
196 case GIMPLE_CHANGE_DYNAMIC_TYPE:
197 return sizeof (struct gimple_statement_with_ops);
198 case GIMPLE_PREDICT:
199 return sizeof (struct gimple_statement_base);
200 default:
201 break;
202 }
203
204 gcc_unreachable ();
205 }
206
207 135
208 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS 136 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
209 operands. */ 137 operands. */
210 138
211 #define gimple_alloc(c, n) gimple_alloc_stat (c, n MEM_STAT_INFO) 139 gimple
212 static gimple
213 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL) 140 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
214 { 141 {
215 size_t size; 142 size_t size;
216 gimple stmt; 143 gimple stmt;
217 144
251 178
252 179
253 180
254 /* Build a tuple with operands. CODE is the statement to build (which 181 /* Build a tuple with operands. CODE is the statement to build (which
255 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code 182 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
256 for the new tuple. NUM_OPS is the number of operands to allocate. */ 183 for the new tuple. NUM_OPS is the number of operands to allocate. */
257 184
258 #define gimple_build_with_ops(c, s, n) \ 185 #define gimple_build_with_ops(c, s, n) \
259 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO) 186 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
260 187
261 static gimple 188 static gimple
262 gimple_build_with_ops_stat (enum gimple_code code, enum tree_code subcode, 189 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
263 unsigned num_ops MEM_STAT_DECL) 190 unsigned num_ops MEM_STAT_DECL)
264 { 191 {
265 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT); 192 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
266 gimple_set_subcode (s, subcode); 193 gimple_set_subcode (s, subcode);
267 194
268 return s; 195 return s;
272 /* Build a GIMPLE_RETURN statement returning RETVAL. */ 199 /* Build a GIMPLE_RETURN statement returning RETVAL. */
273 200
274 gimple 201 gimple
275 gimple_build_return (tree retval) 202 gimple_build_return (tree retval)
276 { 203 {
277 gimple s = gimple_build_with_ops (GIMPLE_RETURN, 0, 1); 204 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
278 if (retval) 205 if (retval)
279 gimple_return_set_retval (s, retval); 206 gimple_return_set_retval (s, retval);
280 return s; 207 return s;
281 } 208 }
282 209
285 GIMPLE_CALL statement to function FN with NARGS arguments. */ 212 GIMPLE_CALL statement to function FN with NARGS arguments. */
286 213
287 static inline gimple 214 static inline gimple
288 gimple_build_call_1 (tree fn, unsigned nargs) 215 gimple_build_call_1 (tree fn, unsigned nargs)
289 { 216 {
290 gimple s = gimple_build_with_ops (GIMPLE_CALL, 0, nargs + 3); 217 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
291 if (TREE_CODE (fn) == FUNCTION_DECL) 218 if (TREE_CODE (fn) == FUNCTION_DECL)
292 fn = build_fold_addr_expr (fn); 219 fn = build_fold_addr_expr (fn);
293 gimple_set_op (s, 1, fn); 220 gimple_set_op (s, 1, fn);
294 return s; 221 return s;
295 } 222 }
364 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t)); 291 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
365 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t)); 292 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
366 #ifndef noCbC 293 #ifndef noCbC
367 gimple_call_set_cbc_goto (call, CALL_EXPR_CbC_GOTO (t)); 294 gimple_call_set_cbc_goto (call, CALL_EXPR_CbC_GOTO (t));
368 #endif 295 #endif
296 gimple_set_no_warning (call, TREE_NO_WARNING (t));
369 297
370 return call; 298 return call;
371 } 299 }
372 300
373 301
374 /* Extract the operands and code for expression EXPR into *SUBCODE_P, 302 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
375 *OP1_P and *OP2_P respectively. */ 303 *OP1_P and *OP2_P respectively. */
376 304
377 void 305 void
378 extract_ops_from_tree (tree expr, enum tree_code *subcode_p, tree *op1_p, 306 extract_ops_from_tree (tree expr, enum tree_code *subcode_p, tree *op1_p,
379 tree *op2_p) 307 tree *op2_p)
380 { 308 {
381 enum gimple_rhs_class grhs_class; 309 enum gimple_rhs_class grhs_class;
382 310
383 *subcode_p = TREE_CODE (expr); 311 *subcode_p = TREE_CODE (expr);
384 grhs_class = get_gimple_rhs_class (*subcode_p); 312 grhs_class = get_gimple_rhs_class (*subcode_p);
414 enum tree_code subcode; 342 enum tree_code subcode;
415 tree op1, op2; 343 tree op1, op2;
416 344
417 extract_ops_from_tree (rhs, &subcode, &op1, &op2); 345 extract_ops_from_tree (rhs, &subcode, &op1, &op2);
418 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2 346 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2
419 PASS_MEM_STAT); 347 PASS_MEM_STAT);
420 } 348 }
421 349
422 350
423 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands 351 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
424 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class 352 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
432 gimple p; 360 gimple p;
433 361
434 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the 362 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
435 code). */ 363 code). */
436 num_ops = get_gimple_rhs_num_ops (subcode) + 1; 364 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
437 365
438 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, subcode, num_ops 366 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
439 PASS_MEM_STAT); 367 PASS_MEM_STAT);
440 gimple_assign_set_lhs (p, lhs); 368 gimple_assign_set_lhs (p, lhs);
441 gimple_assign_set_rhs1 (p, op1); 369 gimple_assign_set_rhs1 (p, op1);
442 if (op2) 370 if (op2)
443 { 371 {
444 gcc_assert (num_ops > 2); 372 gcc_assert (num_ops > 2);
455 ungimplified trees in DST or SRC, in which case they will be 383 ungimplified trees in DST or SRC, in which case they will be
456 converted to a gimple operand if necessary. 384 converted to a gimple operand if necessary.
457 385
458 This function returns the newly created GIMPLE_ASSIGN tuple. */ 386 This function returns the newly created GIMPLE_ASSIGN tuple. */
459 387
460 inline gimple 388 gimple
461 gimplify_assign (tree dst, tree src, gimple_seq *seq_p) 389 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
462 { 390 {
463 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); 391 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
464 gimplify_and_add (t, seq_p); 392 gimplify_and_add (t, seq_p);
465 ggc_free (t); 393 ggc_free (t);
466 return gimple_seq_last_stmt (*seq_p); 394 return gimple_seq_last_stmt (*seq_p);
467 } 395 }
473 T_LABEL is the label to jump to if the condition is true. 401 T_LABEL is the label to jump to if the condition is true.
474 F_LABEL is the label to jump to otherwise. */ 402 F_LABEL is the label to jump to otherwise. */
475 403
476 gimple 404 gimple
477 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs, 405 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
478 tree t_label, tree f_label) 406 tree t_label, tree f_label)
479 { 407 {
480 gimple p; 408 gimple p;
481 409
482 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison); 410 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
483 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4); 411 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
493 421
494 void 422 void
495 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p, 423 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
496 tree *lhs_p, tree *rhs_p) 424 tree *lhs_p, tree *rhs_p)
497 { 425 {
426 location_t loc = EXPR_LOCATION (cond);
498 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison 427 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
499 || TREE_CODE (cond) == TRUTH_NOT_EXPR 428 || TREE_CODE (cond) == TRUTH_NOT_EXPR
500 || is_gimple_min_invariant (cond) 429 || is_gimple_min_invariant (cond)
501 || SSA_VAR_P (cond)); 430 || SSA_VAR_P (cond));
502 431
503 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p); 432 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
504 433
505 /* Canonicalize conditionals of the form 'if (!VAL)'. */ 434 /* Canonicalize conditionals of the form 'if (!VAL)'. */
506 if (*code_p == TRUTH_NOT_EXPR) 435 if (*code_p == TRUTH_NOT_EXPR)
507 { 436 {
508 *code_p = EQ_EXPR; 437 *code_p = EQ_EXPR;
509 gcc_assert (*lhs_p && *rhs_p == NULL_TREE); 438 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
510 *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node); 439 *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
511 } 440 }
512 /* Canonicalize conditionals of the form 'if (VAL)' */ 441 /* Canonicalize conditionals of the form 'if (VAL)' */
513 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison) 442 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
514 { 443 {
515 *code_p = NE_EXPR; 444 *code_p = NE_EXPR;
516 gcc_assert (*lhs_p && *rhs_p == NULL_TREE); 445 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
517 *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node); 446 *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
518 } 447 }
519 } 448 }
520 449
521 450
522 /* Build a GIMPLE_COND statement from the conditional expression tree 451 /* Build a GIMPLE_COND statement from the conditional expression tree
548 /* Build a GIMPLE_LABEL statement for LABEL. */ 477 /* Build a GIMPLE_LABEL statement for LABEL. */
549 478
550 gimple 479 gimple
551 gimple_build_label (tree label) 480 gimple_build_label (tree label)
552 { 481 {
553 gimple p = gimple_build_with_ops (GIMPLE_LABEL, 0, 1); 482 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
554 gimple_label_set_label (p, label); 483 gimple_label_set_label (p, label);
555 return p; 484 return p;
556 } 485 }
557 486
558 /* Build a GIMPLE_GOTO statement to label DEST. */ 487 /* Build a GIMPLE_GOTO statement to label DEST. */
559 488
560 gimple 489 gimple
561 gimple_build_goto (tree dest) 490 gimple_build_goto (tree dest)
562 { 491 {
563 gimple p = gimple_build_with_ops (GIMPLE_GOTO, 0, 1); 492 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
564 gimple_goto_set_dest (p, dest); 493 gimple_goto_set_dest (p, dest);
565 return p; 494 return p;
566 } 495 }
567 496
568 497
569 /* Build a GIMPLE_NOP statement. */ 498 /* Build a GIMPLE_NOP statement. */
570 499
571 gimple 500 gimple
572 gimple_build_nop (void) 501 gimple_build_nop (void)
573 { 502 {
574 return gimple_alloc (GIMPLE_NOP, 0); 503 return gimple_alloc (GIMPLE_NOP, 0);
575 } 504 }
576 505
598 NOUTPUT is the number of register outputs. 527 NOUTPUT is the number of register outputs.
599 NCLOBBERS is the number of clobbered registers. 528 NCLOBBERS is the number of clobbered registers.
600 */ 529 */
601 530
602 static inline gimple 531 static inline gimple
603 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs, 532 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
604 unsigned nclobbers) 533 unsigned nclobbers, unsigned nlabels)
605 { 534 {
606 gimple p; 535 gimple p;
607 int size = strlen (string); 536 int size = strlen (string);
608 537
609 p = gimple_build_with_ops (GIMPLE_ASM, 0, ninputs + noutputs + nclobbers); 538 /* ASMs with labels cannot have outputs. This should have been
539 enforced by the front end. */
540 gcc_assert (nlabels == 0 || noutputs == 0);
541
542 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
543 ninputs + noutputs + nclobbers + nlabels);
610 544
611 p->gimple_asm.ni = ninputs; 545 p->gimple_asm.ni = ninputs;
612 p->gimple_asm.no = noutputs; 546 p->gimple_asm.no = noutputs;
613 p->gimple_asm.nc = nclobbers; 547 p->gimple_asm.nc = nclobbers;
548 p->gimple_asm.nl = nlabels;
614 p->gimple_asm.string = ggc_alloc_string (string, size); 549 p->gimple_asm.string = ggc_alloc_string (string, size);
615 550
616 #ifdef GATHER_STATISTICS 551 #ifdef GATHER_STATISTICS
617 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size; 552 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
618 #endif 553 #endif
619 554
620 return p; 555 return p;
621 } 556 }
622 557
623 /* Build a GIMPLE_ASM statement. 558 /* Build a GIMPLE_ASM statement.
624 559
626 NINPUT is the number of register inputs. 561 NINPUT is the number of register inputs.
627 NOUTPUT is the number of register outputs. 562 NOUTPUT is the number of register outputs.
628 NCLOBBERS is the number of clobbered registers. 563 NCLOBBERS is the number of clobbered registers.
629 INPUTS is a vector of the input register parameters. 564 INPUTS is a vector of the input register parameters.
630 OUTPUTS is a vector of the output register parameters. 565 OUTPUTS is a vector of the output register parameters.
631 CLOBBERS is a vector of the clobbered register parameters. */ 566 CLOBBERS is a vector of the clobbered register parameters.
567 LABELS is a vector of destination labels. */
632 568
633 gimple 569 gimple
634 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs, 570 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
635 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers) 571 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
572 VEC(tree,gc)* labels)
636 { 573 {
637 gimple p; 574 gimple p;
638 unsigned i; 575 unsigned i;
639 576
640 p = gimple_build_asm_1 (string, 577 p = gimple_build_asm_1 (string,
641 VEC_length (tree, inputs), 578 VEC_length (tree, inputs),
642 VEC_length (tree, outputs), 579 VEC_length (tree, outputs),
643 VEC_length (tree, clobbers)); 580 VEC_length (tree, clobbers),
644 581 VEC_length (tree, labels));
582
645 for (i = 0; i < VEC_length (tree, inputs); i++) 583 for (i = 0; i < VEC_length (tree, inputs); i++)
646 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i)); 584 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
647 585
648 for (i = 0; i < VEC_length (tree, outputs); i++) 586 for (i = 0; i < VEC_length (tree, outputs); i++)
649 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i)); 587 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
650 588
651 for (i = 0; i < VEC_length (tree, clobbers); i++) 589 for (i = 0; i < VEC_length (tree, clobbers); i++)
652 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i)); 590 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
653 591
654 return p; 592 for (i = 0; i < VEC_length (tree, labels); i++)
655 } 593 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
656 594
657 /* Build a GIMPLE_ASM statement.
658
659 STRING is the assembly code.
660 NINPUT is the number of register inputs.
661 NOUTPUT is the number of register outputs.
662 NCLOBBERS is the number of clobbered registers.
663 ... are trees for each input, output and clobbered register. */
664
665 gimple
666 gimple_build_asm (const char *string, unsigned ninputs, unsigned noutputs,
667 unsigned nclobbers, ...)
668 {
669 gimple p;
670 unsigned i;
671 va_list ap;
672
673 p = gimple_build_asm_1 (string, ninputs, noutputs, nclobbers);
674
675 va_start (ap, nclobbers);
676
677 for (i = 0; i < ninputs; i++)
678 gimple_asm_set_input_op (p, i, va_arg (ap, tree));
679
680 for (i = 0; i < noutputs; i++)
681 gimple_asm_set_output_op (p, i, va_arg (ap, tree));
682
683 for (i = 0; i < nclobbers; i++)
684 gimple_asm_set_clobber_op (p, i, va_arg (ap, tree));
685
686 va_end (ap);
687
688 return p; 595 return p;
689 } 596 }
690 597
691 /* Build a GIMPLE_CATCH statement. 598 /* Build a GIMPLE_CATCH statement.
692 599
718 gimple_eh_filter_set_failure (p, failure); 625 gimple_eh_filter_set_failure (p, failure);
719 626
720 return p; 627 return p;
721 } 628 }
722 629
630 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
631
632 gimple
633 gimple_build_eh_must_not_throw (tree decl)
634 {
635 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 1);
636
637 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
638 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
639 gimple_eh_must_not_throw_set_fndecl (p, decl);
640
641 return p;
642 }
643
723 /* Build a GIMPLE_TRY statement. 644 /* Build a GIMPLE_TRY statement.
724 645
725 EVAL is the expression to evaluate. 646 EVAL is the expression to evaluate.
726 CLEANUP is the cleanup expression. 647 CLEANUP is the cleanup expression.
727 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on 648 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
728 whether this is a try/catch or a try/finally respectively. */ 649 whether this is a try/catch or a try/finally respectively. */
729 650
730 gimple 651 gimple
731 gimple_build_try (gimple_seq eval, gimple_seq cleanup, 652 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
732 enum gimple_try_flags kind) 653 enum gimple_try_flags kind)
733 { 654 {
734 gimple p; 655 gimple p;
735 656
736 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY); 657 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
737 p = gimple_alloc (GIMPLE_TRY, 0); 658 p = gimple_alloc (GIMPLE_TRY, 0);
757 678
758 return p; 679 return p;
759 } 680 }
760 681
761 682
762 /* Build a GIMPLE_RESX statement. 683 /* Build a GIMPLE_RESX statement. */
763
764 REGION is the region number from which this resx causes control flow to
765 leave. */
766 684
767 gimple 685 gimple
768 gimple_build_resx (int region) 686 gimple_build_resx (int region)
769 { 687 {
770 gimple p = gimple_alloc (GIMPLE_RESX, 0); 688 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
771 gimple_resx_set_region (p, region); 689 p->gimple_eh_ctrl.region = region;
772 return p; 690 return p;
773 } 691 }
774 692
775 693
776 /* The helper for constructing a gimple switch statement. 694 /* The helper for constructing a gimple switch statement.
777 INDEX is the switch's index. 695 INDEX is the switch's index.
778 NLABELS is the number of labels in the switch excluding the default. 696 NLABELS is the number of labels in the switch excluding the default.
779 DEFAULT_LABEL is the default label for the switch statement. */ 697 DEFAULT_LABEL is the default label for the switch statement. */
780 698
781 static inline gimple 699 gimple
782 gimple_build_switch_1 (unsigned nlabels, tree index, tree default_label) 700 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
783 { 701 {
784 /* nlabels + 1 default label + 1 index. */ 702 /* nlabels + 1 default label + 1 index. */
785 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, 0, nlabels + 1 + 1); 703 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
704 1 + (default_label != NULL) + nlabels);
786 gimple_switch_set_index (p, index); 705 gimple_switch_set_index (p, index);
787 gimple_switch_set_default_label (p, default_label); 706 if (default_label)
707 gimple_switch_set_default_label (p, default_label);
788 return p; 708 return p;
789 } 709 }
790 710
791 711
792 /* Build a GIMPLE_SWITCH statement. 712 /* Build a GIMPLE_SWITCH statement.
793 713
794 INDEX is the switch's index. 714 INDEX is the switch's index.
795 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL. 715 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
796 ... are the labels excluding the default. */ 716 ... are the labels excluding the default. */
797 717
798 gimple 718 gimple
799 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...) 719 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
800 { 720 {
801 va_list al; 721 va_list al;
802 unsigned i; 722 unsigned i, offset;
803 gimple p; 723 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
804
805 p = gimple_build_switch_1 (nlabels, index, default_label);
806 724
807 /* Store the rest of the labels. */ 725 /* Store the rest of the labels. */
808 va_start (al, default_label); 726 va_start (al, default_label);
809 for (i = 1; i <= nlabels; i++) 727 offset = (default_label != NULL);
810 gimple_switch_set_label (p, i, va_arg (al, tree)); 728 for (i = 0; i < nlabels; i++)
729 gimple_switch_set_label (p, i + offset, va_arg (al, tree));
811 va_end (al); 730 va_end (al);
812 731
813 return p; 732 return p;
814 } 733 }
815 734
821 ARGS is a vector of labels excluding the default. */ 740 ARGS is a vector of labels excluding the default. */
822 741
823 gimple 742 gimple
824 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args) 743 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
825 { 744 {
826 unsigned i; 745 unsigned i, offset, nlabels = VEC_length (tree, args);
827 unsigned nlabels = VEC_length (tree, args); 746 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
828 gimple p = gimple_build_switch_1 (nlabels, index, default_label); 747
829 748 /* Copy the labels from the vector to the switch statement. */
830 /* Put labels in labels[1 - (nlabels + 1)]. 749 offset = (default_label != NULL);
831 Default label is in labels[0]. */ 750 for (i = 0; i < nlabels; i++)
832 for (i = 1; i <= nlabels; i++) 751 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
833 gimple_switch_set_label (p, i, VEC_index (tree, args, i - 1)); 752
753 return p;
754 }
755
756 /* Build a GIMPLE_EH_DISPATCH statement. */
757
758 gimple
759 gimple_build_eh_dispatch (int region)
760 {
761 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
762 p->gimple_eh_ctrl.region = region;
763 return p;
764 }
765
766 /* Build a new GIMPLE_DEBUG_BIND statement.
767
768 VAR is bound to VALUE; block and location are taken from STMT. */
769
770 gimple
771 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
772 {
773 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
774 (unsigned)GIMPLE_DEBUG_BIND, 2
775 PASS_MEM_STAT);
776
777 gimple_debug_bind_set_var (p, var);
778 gimple_debug_bind_set_value (p, value);
779 if (stmt)
780 {
781 gimple_set_block (p, gimple_block (stmt));
782 gimple_set_location (p, gimple_location (stmt));
783 }
834 784
835 return p; 785 return p;
836 } 786 }
837 787
838 788
839 /* Build a GIMPLE_OMP_CRITICAL statement. 789 /* Build a GIMPLE_OMP_CRITICAL statement.
840 790
841 BODY is the sequence of statements for which only one thread can execute. 791 BODY is the sequence of statements for which only one thread can execute.
842 NAME is optional identifier for this critical block. */ 792 NAME is optional identifier for this critical block. */
843 793
844 gimple 794 gimple
845 gimple_build_omp_critical (gimple_seq body, tree name) 795 gimple_build_omp_critical (gimple_seq body, tree name)
846 { 796 {
847 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0); 797 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
848 gimple_omp_critical_set_name (p, name); 798 gimple_omp_critical_set_name (p, name);
849 if (body) 799 if (body)
853 } 803 }
854 804
855 /* Build a GIMPLE_OMP_FOR statement. 805 /* Build a GIMPLE_OMP_FOR statement.
856 806
857 BODY is sequence of statements inside the for loop. 807 BODY is sequence of statements inside the for loop.
858 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate, 808 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
859 lastprivate, reductions, ordered, schedule, and nowait. 809 lastprivate, reductions, ordered, schedule, and nowait.
860 COLLAPSE is the collapse count. 810 COLLAPSE is the collapse count.
861 PRE_BODY is the sequence of statements that are loop invariant. */ 811 PRE_BODY is the sequence of statements that are loop invariant. */
862 812
863 gimple 813 gimple
864 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse, 814 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
865 gimple_seq pre_body) 815 gimple_seq pre_body)
866 { 816 {
867 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0); 817 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
868 if (body) 818 if (body)
869 gimple_omp_set_body (p, body); 819 gimple_omp_set_body (p, body);
870 gimple_omp_for_set_clauses (p, clauses); 820 gimple_omp_for_set_clauses (p, clauses);
882 BODY is sequence of statements which are executed in parallel. 832 BODY is sequence of statements which are executed in parallel.
883 CLAUSES, are the OMP parallel construct's clauses. 833 CLAUSES, are the OMP parallel construct's clauses.
884 CHILD_FN is the function created for the parallel threads to execute. 834 CHILD_FN is the function created for the parallel threads to execute.
885 DATA_ARG are the shared data argument(s). */ 835 DATA_ARG are the shared data argument(s). */
886 836
887 gimple 837 gimple
888 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn, 838 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
889 tree data_arg) 839 tree data_arg)
890 { 840 {
891 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0); 841 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
892 if (body) 842 if (body)
893 gimple_omp_set_body (p, body); 843 gimple_omp_set_body (p, body);
894 gimple_omp_parallel_set_clauses (p, clauses); 844 gimple_omp_parallel_set_clauses (p, clauses);
906 CHILD_FN is the function created for the parallel threads to execute. 856 CHILD_FN is the function created for the parallel threads to execute.
907 DATA_ARG are the shared data argument(s). 857 DATA_ARG are the shared data argument(s).
908 COPY_FN is the optional function for firstprivate initialization. 858 COPY_FN is the optional function for firstprivate initialization.
909 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */ 859 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
910 860
911 gimple 861 gimple
912 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn, 862 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
913 tree data_arg, tree copy_fn, tree arg_size, 863 tree data_arg, tree copy_fn, tree arg_size,
914 tree arg_align) 864 tree arg_align)
915 { 865 {
916 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0); 866 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
917 if (body) 867 if (body)
918 gimple_omp_set_body (p, body); 868 gimple_omp_set_body (p, body);
919 gimple_omp_task_set_clauses (p, clauses); 869 gimple_omp_task_set_clauses (p, clauses);
944 894
945 /* Build a GIMPLE_OMP_MASTER statement. 895 /* Build a GIMPLE_OMP_MASTER statement.
946 896
947 BODY is the sequence of statements to be executed by just the master. */ 897 BODY is the sequence of statements to be executed by just the master. */
948 898
949 gimple 899 gimple
950 gimple_build_omp_master (gimple_seq body) 900 gimple_build_omp_master (gimple_seq body)
951 { 901 {
952 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0); 902 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
953 if (body) 903 if (body)
954 gimple_omp_set_body (p, body); 904 gimple_omp_set_body (p, body);
960 /* Build a GIMPLE_OMP_CONTINUE statement. 910 /* Build a GIMPLE_OMP_CONTINUE statement.
961 911
962 CONTROL_DEF is the definition of the control variable. 912 CONTROL_DEF is the definition of the control variable.
963 CONTROL_USE is the use of the control variable. */ 913 CONTROL_USE is the use of the control variable. */
964 914
965 gimple 915 gimple
966 gimple_build_omp_continue (tree control_def, tree control_use) 916 gimple_build_omp_continue (tree control_def, tree control_use)
967 { 917 {
968 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0); 918 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
969 gimple_omp_continue_set_control_def (p, control_def); 919 gimple_omp_continue_set_control_def (p, control_def);
970 gimple_omp_continue_set_control_use (p, control_use); 920 gimple_omp_continue_set_control_use (p, control_use);
974 /* Build a GIMPLE_OMP_ORDERED statement. 924 /* Build a GIMPLE_OMP_ORDERED statement.
975 925
976 BODY is the sequence of statements inside a loop that will executed in 926 BODY is the sequence of statements inside a loop that will executed in
977 sequence. */ 927 sequence. */
978 928
979 gimple 929 gimple
980 gimple_build_omp_ordered (gimple_seq body) 930 gimple_build_omp_ordered (gimple_seq body)
981 { 931 {
982 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0); 932 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
983 if (body) 933 if (body)
984 gimple_omp_set_body (p, body); 934 gimple_omp_set_body (p, body);
988 938
989 939
990 /* Build a GIMPLE_OMP_RETURN statement. 940 /* Build a GIMPLE_OMP_RETURN statement.
991 WAIT_P is true if this is a non-waiting return. */ 941 WAIT_P is true if this is a non-waiting return. */
992 942
993 gimple 943 gimple
994 gimple_build_omp_return (bool wait_p) 944 gimple_build_omp_return (bool wait_p)
995 { 945 {
996 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0); 946 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
997 if (wait_p) 947 if (wait_p)
998 gimple_omp_return_set_nowait (p); 948 gimple_omp_return_set_nowait (p);
1005 955
1006 BODY is a sequence of section statements. 956 BODY is a sequence of section statements.
1007 CLAUSES are any of the OMP sections contsruct's clauses: private, 957 CLAUSES are any of the OMP sections contsruct's clauses: private,
1008 firstprivate, lastprivate, reduction, and nowait. */ 958 firstprivate, lastprivate, reduction, and nowait. */
1009 959
1010 gimple 960 gimple
1011 gimple_build_omp_sections (gimple_seq body, tree clauses) 961 gimple_build_omp_sections (gimple_seq body, tree clauses)
1012 { 962 {
1013 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0); 963 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1014 if (body) 964 if (body)
1015 gimple_omp_set_body (p, body); 965 gimple_omp_set_body (p, body);
1032 982
1033 BODY is the sequence of statements that will be executed once. 983 BODY is the sequence of statements that will be executed once.
1034 CLAUSES are any of the OMP single construct's clauses: private, firstprivate, 984 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1035 copyprivate, nowait. */ 985 copyprivate, nowait. */
1036 986
1037 gimple 987 gimple
1038 gimple_build_omp_single (gimple_seq body, tree clauses) 988 gimple_build_omp_single (gimple_seq body, tree clauses)
1039 { 989 {
1040 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0); 990 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1041 if (body) 991 if (body)
1042 gimple_omp_set_body (p, body); 992 gimple_omp_set_body (p, body);
1043 gimple_omp_single_set_clauses (p, clauses); 993 gimple_omp_single_set_clauses (p, clauses);
1044
1045 return p;
1046 }
1047
1048
1049 /* Build a GIMPLE_CHANGE_DYNAMIC_TYPE statement. TYPE is the new type
1050 for the location PTR. */
1051
1052 gimple
1053 gimple_build_cdt (tree type, tree ptr)
1054 {
1055 gimple p = gimple_build_with_ops (GIMPLE_CHANGE_DYNAMIC_TYPE, 0, 2);
1056 gimple_cdt_set_new_type (p, type);
1057 gimple_cdt_set_location (p, ptr);
1058 994
1059 return p; 995 return p;
1060 } 996 }
1061 997
1062 998
1095 gimple_predict_set_predictor (p, predictor); 1031 gimple_predict_set_predictor (p, predictor);
1096 gimple_predict_set_outcome (p, outcome); 1032 gimple_predict_set_outcome (p, outcome);
1097 return p; 1033 return p;
1098 } 1034 }
1099 1035
1100 /* Return which gimple structure is used by T. The enums here are defined
1101 in gsstruct.def. */
1102
1103 enum gimple_statement_structure_enum
1104 gimple_statement_structure (gimple gs)
1105 {
1106 return gss_for_code (gimple_code (gs));
1107 }
1108
1109 #if defined ENABLE_GIMPLE_CHECKING 1036 #if defined ENABLE_GIMPLE_CHECKING
1110 /* Complain of a gimple type mismatch and die. */ 1037 /* Complain of a gimple type mismatch and die. */
1111 1038
1112 void 1039 void
1113 gimple_check_failed (const_gimple gs, const char *file, int line, 1040 gimple_check_failed (const_gimple gs, const char *file, int line,
1114 const char *function, enum gimple_code code, 1041 const char *function, enum gimple_code code,
1115 enum tree_code subcode) 1042 enum tree_code subcode)
1116 { 1043 {
1117 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d", 1044 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1118 gimple_code_name[code], 1045 gimple_code_name[code],
1119 tree_code_name[subcode], 1046 tree_code_name[subcode],
1120 gimple_code_name[gimple_code (gs)], 1047 gimple_code_name[gimple_code (gs)],
1121 gs->gsbase.subcode > 0 1048 gs->gsbase.subcode > 0
1122 ? tree_code_name[gs->gsbase.subcode] 1049 ? tree_code_name[gs->gsbase.subcode]
1123 : "", 1050 : "",
1124 function, trim_filename (file), line); 1051 function, trim_filename (file), line);
1125 } 1052 }
1126 #endif /* ENABLE_GIMPLE_CHECKING */ 1053 #endif /* ENABLE_GIMPLE_CHECKING */
1127 1054
1128 1055
1129 /* Allocate a new GIMPLE sequence in GC memory and return it. If 1056 /* Allocate a new GIMPLE sequence in GC memory and return it. If
1164 gcc_assert (gimple_seq_last (seq) == NULL); 1091 gcc_assert (gimple_seq_last (seq) == NULL);
1165 1092
1166 /* If this triggers, it's a sign that the same list is being freed 1093 /* If this triggers, it's a sign that the same list is being freed
1167 twice. */ 1094 twice. */
1168 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL); 1095 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
1169 1096
1170 /* Add SEQ to the pool of free sequences. */ 1097 /* Add SEQ to the pool of free sequences. */
1171 seq->next_free = gimple_seq_cache; 1098 seq->next_free = gimple_seq_cache;
1172 gimple_seq_cache = seq; 1099 gimple_seq_cache = seq;
1173 } 1100 }
1174 1101
1230 bool 1157 bool
1231 empty_body_p (gimple_seq body) 1158 empty_body_p (gimple_seq body)
1232 { 1159 {
1233 gimple_stmt_iterator i; 1160 gimple_stmt_iterator i;
1234 1161
1235
1236 if (gimple_seq_empty_p (body)) 1162 if (gimple_seq_empty_p (body))
1237 return true; 1163 return true;
1238 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i)) 1164 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1239 if (!empty_stmt_p (gsi_stmt (i))) 1165 if (!empty_stmt_p (gsi_stmt (i))
1166 && !is_gimple_debug (gsi_stmt (i)))
1240 return false; 1167 return false;
1241 1168
1242 return true; 1169 return true;
1243 } 1170 }
1244 1171
1262 } 1189 }
1263 1190
1264 1191
1265 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt 1192 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1266 on each one. WI is as in walk_gimple_stmt. 1193 on each one. WI is as in walk_gimple_stmt.
1267 1194
1268 If walk_gimple_stmt returns non-NULL, the walk is stopped, the 1195 If walk_gimple_stmt returns non-NULL, the walk is stopped, the
1269 value is stored in WI->CALLBACK_RESULT and the statement that 1196 value is stored in WI->CALLBACK_RESULT and the statement that
1270 produced the value is returned. 1197 produced the value is returned.
1271 1198
1272 Otherwise, all the statements are walked and NULL returned. */ 1199 Otherwise, all the statements are walked and NULL returned. */
1273 1200
1274 gimple 1201 gimple
1275 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt, 1202 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1276 walk_tree_fn callback_op, struct walk_stmt_info *wi) 1203 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1277 { 1204 {
1278 gimple_stmt_iterator gsi; 1205 gimple_stmt_iterator gsi;
1279 1206
1280 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi)) 1207 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
1281 { 1208 {
1282 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi); 1209 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1283 if (ret) 1210 if (ret)
1284 { 1211 {
1285 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist 1212 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1286 to hold it. */ 1213 to hold it. */
1287 gcc_assert (wi); 1214 gcc_assert (wi);
1288 wi->callback_result = ret; 1215 wi->callback_result = ret;
1289 return gsi_stmt (gsi); 1216 return gsi_stmt (gsi);
1290 } 1217 }
1291 } 1218 }
1292 1219
1293 if (wi) 1220 if (wi)
1294 wi->callback_result = NULL_TREE; 1221 wi->callback_result = NULL_TREE;
1295 1222
1299 1226
1300 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */ 1227 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1301 1228
1302 static tree 1229 static tree
1303 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op, 1230 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1304 struct walk_stmt_info *wi) 1231 struct walk_stmt_info *wi)
1305 { 1232 {
1306 tree ret; 1233 tree ret, op;
1307 unsigned noutputs; 1234 unsigned noutputs;
1308 const char **oconstraints; 1235 const char **oconstraints;
1309 unsigned i; 1236 unsigned i, n;
1310 const char *constraint; 1237 const char *constraint;
1311 bool allows_mem, allows_reg, is_inout; 1238 bool allows_mem, allows_reg, is_inout;
1312 1239
1313 noutputs = gimple_asm_noutputs (stmt); 1240 noutputs = gimple_asm_noutputs (stmt);
1314 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); 1241 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1316 if (wi) 1243 if (wi)
1317 wi->is_lhs = true; 1244 wi->is_lhs = true;
1318 1245
1319 for (i = 0; i < noutputs; i++) 1246 for (i = 0; i < noutputs; i++)
1320 { 1247 {
1321 tree op = gimple_asm_output_op (stmt, i); 1248 op = gimple_asm_output_op (stmt, i);
1322 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); 1249 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1323 oconstraints[i] = constraint; 1250 oconstraints[i] = constraint;
1324 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg, 1251 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1325 &is_inout); 1252 &is_inout);
1326 if (wi) 1253 if (wi)
1327 wi->val_only = (allows_reg || !allows_mem); 1254 wi->val_only = (allows_reg || !allows_mem);
1328 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL); 1255 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1329 if (ret) 1256 if (ret)
1330 return ret; 1257 return ret;
1331 } 1258 }
1332 1259
1333 for (i = 0; i < gimple_asm_ninputs (stmt); i++) 1260 n = gimple_asm_ninputs (stmt);
1334 { 1261 for (i = 0; i < n; i++)
1335 tree op = gimple_asm_input_op (stmt, i); 1262 {
1263 op = gimple_asm_input_op (stmt, i);
1336 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); 1264 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1337 parse_input_constraint (&constraint, 0, 0, noutputs, 0, 1265 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1338 oconstraints, &allows_mem, &allows_reg); 1266 oconstraints, &allows_mem, &allows_reg);
1339 if (wi) 1267 if (wi)
1340 wi->val_only = (allows_reg || !allows_mem); 1268 {
1341 1269 wi->val_only = (allows_reg || !allows_mem);
1342 /* Although input "m" is not really a LHS, we need a lvalue. */ 1270 /* Although input "m" is not really a LHS, we need a lvalue. */
1343 if (wi) 1271 wi->is_lhs = !wi->val_only;
1344 wi->is_lhs = !wi->val_only; 1272 }
1345 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL); 1273 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1346 if (ret) 1274 if (ret)
1347 return ret; 1275 return ret;
1348 } 1276 }
1349 1277
1350 if (wi) 1278 if (wi)
1351 { 1279 {
1352 wi->is_lhs = false; 1280 wi->is_lhs = false;
1353 wi->val_only = true; 1281 wi->val_only = true;
1282 }
1283
1284 n = gimple_asm_nlabels (stmt);
1285 for (i = 0; i < n; i++)
1286 {
1287 op = gimple_asm_label_op (stmt, i);
1288 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1289 if (ret)
1290 return ret;
1354 } 1291 }
1355 1292
1356 return NULL_TREE; 1293 return NULL_TREE;
1357 } 1294 }
1358 1295
1362 1299
1363 CALLBACK_OP is called on each operand of STMT via walk_tree. 1300 CALLBACK_OP is called on each operand of STMT via walk_tree.
1364 Additional parameters to walk_tree must be stored in WI. For each operand 1301 Additional parameters to walk_tree must be stored in WI. For each operand
1365 OP, walk_tree is called as: 1302 OP, walk_tree is called as:
1366 1303
1367 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET) 1304 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1368 1305
1369 If CALLBACK_OP returns non-NULL for an operand, the remaining 1306 If CALLBACK_OP returns non-NULL for an operand, the remaining
1370 operands are not scanned. 1307 operands are not scanned.
1371 1308
1372 The return value is that returned by the last call to walk_tree, or 1309 The return value is that returned by the last call to walk_tree, or
1373 NULL_TREE if no CALLBACK_OP is specified. */ 1310 NULL_TREE if no CALLBACK_OP is specified. */
1374 1311
1375 inline tree 1312 inline tree
1376 walk_gimple_op (gimple stmt, walk_tree_fn callback_op, 1313 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1377 struct walk_stmt_info *wi) 1314 struct walk_stmt_info *wi)
1378 { 1315 {
1379 struct pointer_set_t *pset = (wi) ? wi->pset : NULL; 1316 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1380 unsigned i; 1317 unsigned i;
1381 tree ret = NULL_TREE; 1318 tree ret = NULL_TREE;
1382 1319
1383 switch (gimple_code (stmt)) 1320 switch (gimple_code (stmt))
1384 { 1321 {
1385 case GIMPLE_ASSIGN: 1322 case GIMPLE_ASSIGN:
1386 /* Walk the RHS operands. A formal temporary LHS may use a 1323 /* Walk the RHS operands. A formal temporary LHS may use a
1387 COMPONENT_REF RHS. */ 1324 COMPONENT_REF RHS. */
1388 if (wi) 1325 if (wi)
1389 wi->val_only = !is_gimple_formal_tmp_var (gimple_assign_lhs (stmt)); 1326 wi->val_only = !is_gimple_reg (gimple_assign_lhs (stmt))
1327 || !gimple_assign_single_p (stmt);
1390 1328
1391 for (i = 1; i < gimple_num_ops (stmt); i++) 1329 for (i = 1; i < gimple_num_ops (stmt); i++)
1392 { 1330 {
1393 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, 1331 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1394 pset); 1332 pset);
1395 if (ret) 1333 if (ret)
1396 return ret; 1334 return ret;
1397 } 1335 }
1398 1336
1399 /* Walk the LHS. If the RHS is appropriate for a memory, we 1337 /* Walk the LHS. If the RHS is appropriate for a memory, we
1400 may use a COMPONENT_REF on the LHS. */ 1338 may use a COMPONENT_REF on the LHS. */
1401 if (wi) 1339 if (wi)
1402 { 1340 {
1403 /* If the RHS has more than 1 operand, it is not appropriate 1341 /* If the RHS has more than 1 operand, it is not appropriate
1404 for the memory. */ 1342 for the memory. */
1405 wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt)) 1343 wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
1406 || !gimple_assign_single_p (stmt); 1344 || !gimple_assign_single_p (stmt);
1407 wi->is_lhs = true; 1345 wi->is_lhs = true;
1408 } 1346 }
1409 1347
1410 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset); 1348 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1411 if (ret) 1349 if (ret)
1412 return ret; 1350 return ret;
1413 1351
1414 if (wi) 1352 if (wi)
1415 { 1353 {
1416 wi->val_only = true; 1354 wi->val_only = true;
1417 wi->is_lhs = false; 1355 wi->is_lhs = false;
1418 } 1356 }
1419 break; 1357 break;
1420 1358
1421 case GIMPLE_CALL: 1359 case GIMPLE_CALL:
1422 if (wi) 1360 if (wi)
1423 wi->is_lhs = false; 1361 wi->is_lhs = false;
1424 1362
1425 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset); 1363 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1426 if (ret) 1364 if (ret)
1427 return ret; 1365 return ret;
1428 1366
1429 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset); 1367 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1430 if (ret) 1368 if (ret)
1431 return ret; 1369 return ret;
1432 1370
1433 for (i = 0; i < gimple_call_num_args (stmt); i++) 1371 for (i = 0; i < gimple_call_num_args (stmt); i++)
1434 { 1372 {
1435 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi, 1373 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1436 pset); 1374 pset);
1437 if (ret) 1375 if (ret)
1438 return ret; 1376 return ret;
1439 } 1377 }
1440 1378
1441 if (wi) 1379 if (wi)
1442 wi->is_lhs = true; 1380 wi->is_lhs = true;
1443 1381
1444 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset); 1382 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1445 if (ret) 1383 if (ret)
1446 return ret; 1384 return ret;
1447 1385
1448 if (wi) 1386 if (wi)
1449 wi->is_lhs = false; 1387 wi->is_lhs = false;
1450 break; 1388 break;
1451 1389
1452 case GIMPLE_CATCH: 1390 case GIMPLE_CATCH:
1453 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi, 1391 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1454 pset); 1392 pset);
1455 if (ret) 1393 if (ret)
1456 return ret; 1394 return ret;
1457 break; 1395 break;
1458 1396
1459 case GIMPLE_EH_FILTER: 1397 case GIMPLE_EH_FILTER:
1460 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi, 1398 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1461 pset); 1399 pset);
1462 if (ret) 1400 if (ret)
1463 return ret; 1401 return ret;
1464 break;
1465
1466 case GIMPLE_CHANGE_DYNAMIC_TYPE:
1467 ret = walk_tree (gimple_cdt_location_ptr (stmt), callback_op, wi, pset);
1468 if (ret)
1469 return ret;
1470
1471 ret = walk_tree (gimple_cdt_new_type_ptr (stmt), callback_op, wi, pset);
1472 if (ret)
1473 return ret;
1474 break; 1402 break;
1475 1403
1476 case GIMPLE_ASM: 1404 case GIMPLE_ASM:
1477 ret = walk_gimple_asm (stmt, callback_op, wi); 1405 ret = walk_gimple_asm (stmt, callback_op, wi);
1478 if (ret) 1406 if (ret)
1479 return ret; 1407 return ret;
1480 break; 1408 break;
1481 1409
1482 case GIMPLE_OMP_CONTINUE: 1410 case GIMPLE_OMP_CONTINUE:
1483 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt), 1411 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1484 callback_op, wi, pset); 1412 callback_op, wi, pset);
1485 if (ret) 1413 if (ret)
1486 return ret; 1414 return ret;
1487 1415
1488 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt), 1416 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1489 callback_op, wi, pset); 1417 callback_op, wi, pset);
1490 if (ret) 1418 if (ret)
1491 return ret; 1419 return ret;
1492 break; 1420 break;
1493 1421
1494 case GIMPLE_OMP_CRITICAL: 1422 case GIMPLE_OMP_CRITICAL:
1495 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi, 1423 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1496 pset); 1424 pset);
1497 if (ret) 1425 if (ret)
1498 return ret; 1426 return ret;
1499 break; 1427 break;
1500 1428
1501 case GIMPLE_OMP_FOR: 1429 case GIMPLE_OMP_FOR:
1502 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi, 1430 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1503 pset); 1431 pset);
1504 if (ret) 1432 if (ret)
1505 return ret; 1433 return ret;
1506 for (i = 0; i < gimple_omp_for_collapse (stmt); i++) 1434 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1507 { 1435 {
1508 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op, 1436 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1509 wi, pset); 1437 wi, pset);
1510 if (ret)
1511 return ret;
1512 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1513 wi, pset);
1514 if (ret)
1515 return ret;
1516 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1517 wi, pset);
1518 if (ret)
1519 return ret;
1520 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1521 wi, pset);
1522 }
1523 if (ret) 1438 if (ret)
1524 return ret; 1439 return ret;
1440 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1441 wi, pset);
1442 if (ret)
1443 return ret;
1444 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1445 wi, pset);
1446 if (ret)
1447 return ret;
1448 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1449 wi, pset);
1450 }
1451 if (ret)
1452 return ret;
1525 break; 1453 break;
1526 1454
1527 case GIMPLE_OMP_PARALLEL: 1455 case GIMPLE_OMP_PARALLEL:
1528 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op, 1456 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1529 wi, pset); 1457 wi, pset);
1530 if (ret) 1458 if (ret)
1531 return ret; 1459 return ret;
1532 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op, 1460 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1533 wi, pset); 1461 wi, pset);
1534 if (ret) 1462 if (ret)
1535 return ret; 1463 return ret;
1536 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op, 1464 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1537 wi, pset); 1465 wi, pset);
1538 if (ret) 1466 if (ret)
1539 return ret; 1467 return ret;
1540 break; 1468 break;
1541 1469
1542 case GIMPLE_OMP_TASK: 1470 case GIMPLE_OMP_TASK:
1543 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op, 1471 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1544 wi, pset); 1472 wi, pset);
1545 if (ret) 1473 if (ret)
1546 return ret; 1474 return ret;
1547 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op, 1475 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1548 wi, pset); 1476 wi, pset);
1549 if (ret) 1477 if (ret)
1550 return ret; 1478 return ret;
1551 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op, 1479 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1552 wi, pset); 1480 wi, pset);
1553 if (ret) 1481 if (ret)
1554 return ret; 1482 return ret;
1555 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op, 1483 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1556 wi, pset); 1484 wi, pset);
1557 if (ret) 1485 if (ret)
1558 return ret; 1486 return ret;
1559 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op, 1487 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1560 wi, pset); 1488 wi, pset);
1561 if (ret) 1489 if (ret)
1562 return ret; 1490 return ret;
1563 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op, 1491 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1564 wi, pset); 1492 wi, pset);
1565 if (ret) 1493 if (ret)
1566 return ret; 1494 return ret;
1567 break; 1495 break;
1568 1496
1569 case GIMPLE_OMP_SECTIONS: 1497 case GIMPLE_OMP_SECTIONS:
1570 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op, 1498 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1571 wi, pset); 1499 wi, pset);
1572 if (ret) 1500 if (ret)
1573 return ret; 1501 return ret;
1574 1502
1575 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op, 1503 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1576 wi, pset); 1504 wi, pset);
1577 if (ret) 1505 if (ret)
1578 return ret; 1506 return ret;
1579 1507
1580 break; 1508 break;
1581 1509
1582 case GIMPLE_OMP_SINGLE: 1510 case GIMPLE_OMP_SINGLE:
1583 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi, 1511 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1584 pset); 1512 pset);
1585 if (ret) 1513 if (ret)
1586 return ret; 1514 return ret;
1587 break; 1515 break;
1588 1516
1589 case GIMPLE_OMP_ATOMIC_LOAD: 1517 case GIMPLE_OMP_ATOMIC_LOAD:
1590 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi, 1518 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1591 pset); 1519 pset);
1592 if (ret) 1520 if (ret)
1593 return ret; 1521 return ret;
1594 1522
1595 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi, 1523 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1596 pset); 1524 pset);
1597 if (ret) 1525 if (ret)
1598 return ret; 1526 return ret;
1599 break; 1527 break;
1600 1528
1601 case GIMPLE_OMP_ATOMIC_STORE: 1529 case GIMPLE_OMP_ATOMIC_STORE:
1602 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op, 1530 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1603 wi, pset); 1531 wi, pset);
1604 if (ret) 1532 if (ret)
1605 return ret; 1533 return ret;
1606 break; 1534 break;
1607 1535
1608 /* Tuples that do not have operands. */ 1536 /* Tuples that do not have operands. */
1609 case GIMPLE_NOP: 1537 case GIMPLE_NOP:
1610 case GIMPLE_RESX: 1538 case GIMPLE_RESX:
1612 case GIMPLE_PREDICT: 1540 case GIMPLE_PREDICT:
1613 break; 1541 break;
1614 1542
1615 default: 1543 default:
1616 { 1544 {
1617 enum gimple_statement_structure_enum gss; 1545 enum gimple_statement_structure_enum gss;
1618 gss = gimple_statement_structure (stmt); 1546 gss = gimple_statement_structure (stmt);
1619 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS) 1547 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1620 for (i = 0; i < gimple_num_ops (stmt); i++) 1548 for (i = 0; i < gimple_num_ops (stmt); i++)
1621 { 1549 {
1622 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset); 1550 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1623 if (ret) 1551 if (ret)
1624 return ret; 1552 return ret;
1625 } 1553 }
1626 } 1554 }
1627 break; 1555 break;
1628 } 1556 }
1629 1557
1630 return NULL_TREE; 1558 return NULL_TREE;
1646 1574
1647 In any other case, NULL_TREE is returned. */ 1575 In any other case, NULL_TREE is returned. */
1648 1576
1649 tree 1577 tree
1650 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt, 1578 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1651 walk_tree_fn callback_op, struct walk_stmt_info *wi) 1579 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1652 { 1580 {
1653 gimple ret; 1581 gimple ret;
1654 tree tree_ret; 1582 tree tree_ret;
1655 gimple stmt = gsi_stmt (*gsi); 1583 gimple stmt = gsi_stmt (*gsi);
1656 1584
1667 if (callback_stmt) 1595 if (callback_stmt)
1668 { 1596 {
1669 bool handled_ops = false; 1597 bool handled_ops = false;
1670 tree_ret = callback_stmt (gsi, &handled_ops, wi); 1598 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1671 if (handled_ops) 1599 if (handled_ops)
1672 return tree_ret; 1600 return tree_ret;
1673 1601
1674 /* If CALLBACK_STMT did not handle operands, it should not have 1602 /* If CALLBACK_STMT did not handle operands, it should not have
1675 a value to return. */ 1603 a value to return. */
1676 gcc_assert (tree_ret == NULL); 1604 gcc_assert (tree_ret == NULL);
1677 1605
1678 /* Re-read stmt in case the callback changed it. */ 1606 /* Re-read stmt in case the callback changed it. */
1679 stmt = gsi_stmt (*gsi); 1607 stmt = gsi_stmt (*gsi);
1680 } 1608 }
1682 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */ 1610 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1683 if (callback_op) 1611 if (callback_op)
1684 { 1612 {
1685 tree_ret = walk_gimple_op (stmt, callback_op, wi); 1613 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1686 if (tree_ret) 1614 if (tree_ret)
1687 return tree_ret; 1615 return tree_ret;
1688 } 1616 }
1689 1617
1690 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */ 1618 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1691 switch (gimple_code (stmt)) 1619 switch (gimple_code (stmt))
1692 { 1620 {
1693 case GIMPLE_BIND: 1621 case GIMPLE_BIND:
1694 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt, 1622 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1695 callback_op, wi); 1623 callback_op, wi);
1696 if (ret) 1624 if (ret)
1697 return wi->callback_result; 1625 return wi->callback_result;
1698 break; 1626 break;
1699 1627
1700 case GIMPLE_CATCH: 1628 case GIMPLE_CATCH:
1701 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt, 1629 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1702 callback_op, wi); 1630 callback_op, wi);
1703 if (ret) 1631 if (ret)
1704 return wi->callback_result; 1632 return wi->callback_result;
1705 break; 1633 break;
1706 1634
1707 case GIMPLE_EH_FILTER: 1635 case GIMPLE_EH_FILTER:
1708 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt, 1636 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1709 callback_op, wi); 1637 callback_op, wi);
1710 if (ret) 1638 if (ret)
1711 return wi->callback_result; 1639 return wi->callback_result;
1712 break; 1640 break;
1713 1641
1714 case GIMPLE_TRY: 1642 case GIMPLE_TRY:
1715 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op, 1643 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1716 wi); 1644 wi);
1717 if (ret) 1645 if (ret)
1718 return wi->callback_result; 1646 return wi->callback_result;
1719 1647
1720 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt, 1648 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1721 callback_op, wi); 1649 callback_op, wi);
1722 if (ret) 1650 if (ret)
1723 return wi->callback_result; 1651 return wi->callback_result;
1724 break; 1652 break;
1725 1653
1726 case GIMPLE_OMP_FOR: 1654 case GIMPLE_OMP_FOR:
1727 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt, 1655 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1728 callback_op, wi); 1656 callback_op, wi);
1729 if (ret) 1657 if (ret)
1730 return wi->callback_result; 1658 return wi->callback_result;
1731 1659
1732 /* FALL THROUGH. */ 1660 /* FALL THROUGH. */
1733 case GIMPLE_OMP_CRITICAL: 1661 case GIMPLE_OMP_CRITICAL:
1734 case GIMPLE_OMP_MASTER: 1662 case GIMPLE_OMP_MASTER:
1735 case GIMPLE_OMP_ORDERED: 1663 case GIMPLE_OMP_ORDERED:
1737 case GIMPLE_OMP_PARALLEL: 1665 case GIMPLE_OMP_PARALLEL:
1738 case GIMPLE_OMP_TASK: 1666 case GIMPLE_OMP_TASK:
1739 case GIMPLE_OMP_SECTIONS: 1667 case GIMPLE_OMP_SECTIONS:
1740 case GIMPLE_OMP_SINGLE: 1668 case GIMPLE_OMP_SINGLE:
1741 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op, 1669 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op,
1742 wi); 1670 wi);
1743 if (ret) 1671 if (ret)
1744 return wi->callback_result; 1672 return wi->callback_result;
1745 break; 1673 break;
1746 1674
1747 case GIMPLE_WITH_CLEANUP_EXPR: 1675 case GIMPLE_WITH_CLEANUP_EXPR:
1748 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt, 1676 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1749 callback_op, wi); 1677 callback_op, wi);
1750 if (ret) 1678 if (ret)
1751 return wi->callback_result; 1679 return wi->callback_result;
1752 break; 1680 break;
1753 1681
1754 default: 1682 default:
1755 gcc_assert (!gimple_has_substatements (stmt)); 1683 gcc_assert (!gimple_has_substatements (stmt));
1756 break; 1684 break;
1767 { 1695 {
1768 struct function *fn = DECL_STRUCT_FUNCTION (fndecl); 1696 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1769 if (fn == NULL) 1697 if (fn == NULL)
1770 { 1698 {
1771 /* If FNDECL still does not have a function structure associated 1699 /* If FNDECL still does not have a function structure associated
1772 with it, then it does not make sense for it to receive a 1700 with it, then it does not make sense for it to receive a
1773 GIMPLE body. */ 1701 GIMPLE body. */
1774 gcc_assert (seq == NULL); 1702 gcc_assert (seq == NULL);
1775 } 1703 }
1776 else 1704 else
1777 fn->gimple_body = seq; 1705 fn->gimple_body = seq;
1778 } 1706 }
1810 flags = flags_from_decl_or_type (decl); 1738 flags = flags_from_decl_or_type (decl);
1811 else 1739 else
1812 { 1740 {
1813 t = TREE_TYPE (gimple_call_fn (stmt)); 1741 t = TREE_TYPE (gimple_call_fn (stmt));
1814 if (t && TREE_CODE (t) == POINTER_TYPE) 1742 if (t && TREE_CODE (t) == POINTER_TYPE)
1815 flags = flags_from_decl_or_type (TREE_TYPE (t)); 1743 flags = flags_from_decl_or_type (TREE_TYPE (t));
1816 else 1744 else
1817 flags = 0; 1745 flags = 0;
1818 } 1746 }
1819 1747
1820 return flags; 1748 return flags;
1821 } 1749 }
1822 1750
1826 bool 1754 bool
1827 gimple_assign_copy_p (gimple gs) 1755 gimple_assign_copy_p (gimple gs)
1828 { 1756 {
1829 return gimple_code (gs) == GIMPLE_ASSIGN 1757 return gimple_code (gs) == GIMPLE_ASSIGN
1830 && get_gimple_rhs_class (gimple_assign_rhs_code (gs)) 1758 && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1831 == GIMPLE_SINGLE_RHS 1759 == GIMPLE_SINGLE_RHS
1832 && is_gimple_val (gimple_op (gs, 1)); 1760 && is_gimple_val (gimple_op (gs, 1));
1833 } 1761 }
1834 1762
1835 1763
1836 /* Return true if GS is a SSA_NAME copy assignment. */ 1764 /* Return true if GS is a SSA_NAME copy assignment. */
1837 1765
1838 bool 1766 bool
1839 gimple_assign_ssa_name_copy_p (gimple gs) 1767 gimple_assign_ssa_name_copy_p (gimple gs)
1840 { 1768 {
1841 return (gimple_code (gs) == GIMPLE_ASSIGN 1769 return (gimple_code (gs) == GIMPLE_ASSIGN
1842 && (get_gimple_rhs_class (gimple_assign_rhs_code (gs)) 1770 && (get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1843 == GIMPLE_SINGLE_RHS) 1771 == GIMPLE_SINGLE_RHS)
1844 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME 1772 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1845 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME); 1773 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1846 } 1774 }
1847 1775
1848 1776
1849 /* Return true if GS is an assignment with a singleton RHS, i.e., 1777 /* Return true if GS is an assignment with a singleton RHS, i.e.,
1850 there is no operator associated with the assignment itself. 1778 there is no operator associated with the assignment itself.
1855 bool 1783 bool
1856 gimple_assign_single_p (gimple gs) 1784 gimple_assign_single_p (gimple gs)
1857 { 1785 {
1858 return (gimple_code (gs) == GIMPLE_ASSIGN 1786 return (gimple_code (gs) == GIMPLE_ASSIGN
1859 && get_gimple_rhs_class (gimple_assign_rhs_code (gs)) 1787 && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1860 == GIMPLE_SINGLE_RHS); 1788 == GIMPLE_SINGLE_RHS);
1861 } 1789 }
1862 1790
1863 /* Return true if GS is an assignment with a unary RHS, but the 1791 /* Return true if GS is an assignment with a unary RHS, but the
1864 operator has no effect on the assigned value. The logic is adapted 1792 operator has no effect on the assigned value. The logic is adapted
1865 from STRIP_NOPS. This predicate is intended to be used in tuplifying 1793 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1871 condition and to proceed in the same manner. In each case, the 1799 condition and to proceed in the same manner. In each case, the
1872 assigned value is represented by the single RHS operand of the 1800 assigned value is represented by the single RHS operand of the
1873 assignment. I suspect there may be cases where gimple_assign_copy_p, 1801 assignment. I suspect there may be cases where gimple_assign_copy_p,
1874 gimple_assign_single_p, or equivalent logic is used where a similar 1802 gimple_assign_single_p, or equivalent logic is used where a similar
1875 treatment of unary NOPs is appropriate. */ 1803 treatment of unary NOPs is appropriate. */
1876 1804
1877 bool 1805 bool
1878 gimple_assign_unary_nop_p (gimple gs) 1806 gimple_assign_unary_nop_p (gimple gs)
1879 { 1807 {
1880 return (gimple_code (gs) == GIMPLE_ASSIGN 1808 return (gimple_code (gs) == GIMPLE_ASSIGN
1881 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)) 1809 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1900 int uid; 1828 int uid;
1901 1829
1902 t = gimple_label_label (stmt); 1830 t = gimple_label_label (stmt);
1903 uid = LABEL_DECL_UID (t); 1831 uid = LABEL_DECL_UID (t);
1904 if (uid == -1) 1832 if (uid == -1)
1905 { 1833 {
1906 unsigned old_len = VEC_length (basic_block, label_to_block_map); 1834 unsigned old_len = VEC_length (basic_block, label_to_block_map);
1907 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++; 1835 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1908 if (old_len <= (unsigned) uid) 1836 if (old_len <= (unsigned) uid)
1909 { 1837 {
1910 unsigned new_len = 3 * uid / 2; 1838 unsigned new_len = 3 * uid / 2 + 1;
1911 1839
1912 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map, 1840 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
1913 new_len); 1841 new_len);
1914 } 1842 }
1915 } 1843 }
1916 1844
1917 VEC_replace (basic_block, label_to_block_map, uid, bb); 1845 VEC_replace (basic_block, label_to_block_map, uid, bb);
1918 } 1846 }
1919 } 1847 }
1920 1848
1924 not modified. */ 1852 not modified. */
1925 1853
1926 tree 1854 tree
1927 gimple_fold (const_gimple stmt) 1855 gimple_fold (const_gimple stmt)
1928 { 1856 {
1857 location_t loc = gimple_location (stmt);
1929 switch (gimple_code (stmt)) 1858 switch (gimple_code (stmt))
1930 { 1859 {
1931 case GIMPLE_COND: 1860 case GIMPLE_COND:
1932 return fold_binary (gimple_cond_code (stmt), 1861 return fold_binary_loc (loc, gimple_cond_code (stmt),
1933 boolean_type_node, 1862 boolean_type_node,
1934 gimple_cond_lhs (stmt), 1863 gimple_cond_lhs (stmt),
1935 gimple_cond_rhs (stmt)); 1864 gimple_cond_rhs (stmt));
1936 1865
1937 case GIMPLE_ASSIGN: 1866 case GIMPLE_ASSIGN:
1938 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))) 1867 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
1939 { 1868 {
1940 case GIMPLE_UNARY_RHS: 1869 case GIMPLE_UNARY_RHS:
1941 return fold_unary (gimple_assign_rhs_code (stmt), 1870 return fold_unary_loc (loc, gimple_assign_rhs_code (stmt),
1942 TREE_TYPE (gimple_assign_lhs (stmt)), 1871 TREE_TYPE (gimple_assign_lhs (stmt)),
1943 gimple_assign_rhs1 (stmt)); 1872 gimple_assign_rhs1 (stmt));
1944 case GIMPLE_BINARY_RHS: 1873 case GIMPLE_BINARY_RHS:
1945 return fold_binary (gimple_assign_rhs_code (stmt), 1874 return fold_binary_loc (loc, gimple_assign_rhs_code (stmt),
1946 TREE_TYPE (gimple_assign_lhs (stmt)), 1875 TREE_TYPE (gimple_assign_lhs (stmt)),
1947 gimple_assign_rhs1 (stmt), 1876 gimple_assign_rhs1 (stmt),
1948 gimple_assign_rhs2 (stmt)); 1877 gimple_assign_rhs2 (stmt));
1949 case GIMPLE_SINGLE_RHS: 1878 case GIMPLE_SINGLE_RHS:
1950 return fold (gimple_assign_rhs1 (stmt)); 1879 return fold (gimple_assign_rhs1 (stmt));
1951 default:; 1880 default:;
1952 } 1881 }
1953 break; 1882 break;
1954 1883
1955 case GIMPLE_SWITCH: 1884 case GIMPLE_SWITCH:
1956 return gimple_switch_index (stmt); 1885 return gimple_switch_index (stmt);
1957 1886
2000 NOTE: The statement pointed-to by GSI may be reallocated if it 1929 NOTE: The statement pointed-to by GSI may be reallocated if it
2001 did not have enough operand slots. */ 1930 did not have enough operand slots. */
2002 1931
2003 void 1932 void
2004 gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code, 1933 gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
2005 tree op1, tree op2) 1934 tree op1, tree op2)
2006 { 1935 {
2007 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code); 1936 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2008 gimple stmt = gsi_stmt (*gsi); 1937 gimple stmt = gsi_stmt (*gsi);
2009 1938
2010 /* If the new CODE needs more operands, allocate a new statement. */ 1939 /* If the new CODE needs more operands, allocate a new statement. */
2015 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt))); 1944 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2016 gsi_replace (gsi, new_stmt, true); 1945 gsi_replace (gsi, new_stmt, true);
2017 stmt = new_stmt; 1946 stmt = new_stmt;
2018 1947
2019 /* The LHS needs to be reset as this also changes the SSA name 1948 /* The LHS needs to be reset as this also changes the SSA name
2020 on the LHS. */ 1949 on the LHS. */
2021 gimple_assign_set_lhs (stmt, lhs); 1950 gimple_assign_set_lhs (stmt, lhs);
2022 } 1951 }
2023 1952
2024 gimple_set_num_ops (stmt, new_rhs_ops + 1); 1953 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2025 gimple_set_subcode (stmt, code); 1954 gimple_set_subcode (stmt, code);
2062 gimple_call_set_lhs (stmt, lhs); 1991 gimple_call_set_lhs (stmt, lhs);
2063 else 1992 else
2064 gcc_unreachable(); 1993 gcc_unreachable();
2065 } 1994 }
2066 1995
1996 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
1997 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
1998 expression with a different value.
1999
2000 This will update any annotations (say debug bind stmts) referring
2001 to the original LHS, so that they use the RHS instead. This is
2002 done even if NLHS and LHS are the same, for it is understood that
2003 the RHS will be modified afterwards, and NLHS will not be assigned
2004 an equivalent value.
2005
2006 Adjusting any non-annotation uses of the LHS, if needed, is a
2007 responsibility of the caller.
2008
2009 The effect of this call should be pretty much the same as that of
2010 inserting a copy of STMT before STMT, and then removing the
2011 original stmt, at which time gsi_remove() would have update
2012 annotations, but using this function saves all the inserting,
2013 copying and removing. */
2014
2015 void
2016 gimple_replace_lhs (gimple stmt, tree nlhs)
2017 {
2018 if (MAY_HAVE_DEBUG_STMTS)
2019 {
2020 tree lhs = gimple_get_lhs (stmt);
2021
2022 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2023
2024 insert_debug_temp_for_var_def (NULL, lhs);
2025 }
2026
2027 gimple_set_lhs (stmt, nlhs);
2028 }
2067 2029
2068 /* Return a deep copy of statement STMT. All the operands from STMT 2030 /* Return a deep copy of statement STMT. All the operands from STMT
2069 are reallocated and copied using unshare_expr. The DEF, USE, VDEF 2031 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2070 and VUSE operand arrays are set to empty in the new copy. */ 2032 and VUSE operand arrays are set to empty in the new copy. */
2071 2033
2085 { 2047 {
2086 gimple_seq new_seq; 2048 gimple_seq new_seq;
2087 tree t; 2049 tree t;
2088 2050
2089 switch (gimple_code (stmt)) 2051 switch (gimple_code (stmt))
2090 { 2052 {
2091 case GIMPLE_BIND: 2053 case GIMPLE_BIND:
2092 new_seq = gimple_seq_copy (gimple_bind_body (stmt)); 2054 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2093 gimple_bind_set_body (copy, new_seq); 2055 gimple_bind_set_body (copy, new_seq);
2094 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt))); 2056 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2095 gimple_bind_set_block (copy, gimple_bind_block (stmt)); 2057 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2096 break; 2058 break;
2097 2059
2098 case GIMPLE_CATCH: 2060 case GIMPLE_CATCH:
2099 new_seq = gimple_seq_copy (gimple_catch_handler (stmt)); 2061 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2100 gimple_catch_set_handler (copy, new_seq); 2062 gimple_catch_set_handler (copy, new_seq);
2101 t = unshare_expr (gimple_catch_types (stmt)); 2063 t = unshare_expr (gimple_catch_types (stmt));
2102 gimple_catch_set_types (copy, t); 2064 gimple_catch_set_types (copy, t);
2103 break; 2065 break;
2104 2066
2105 case GIMPLE_EH_FILTER: 2067 case GIMPLE_EH_FILTER:
2106 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt)); 2068 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2107 gimple_eh_filter_set_failure (copy, new_seq); 2069 gimple_eh_filter_set_failure (copy, new_seq);
2108 t = unshare_expr (gimple_eh_filter_types (stmt)); 2070 t = unshare_expr (gimple_eh_filter_types (stmt));
2109 gimple_eh_filter_set_types (copy, t); 2071 gimple_eh_filter_set_types (copy, t);
2110 break; 2072 break;
2111 2073
2112 case GIMPLE_TRY: 2074 case GIMPLE_TRY:
2113 new_seq = gimple_seq_copy (gimple_try_eval (stmt)); 2075 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2114 gimple_try_set_eval (copy, new_seq); 2076 gimple_try_set_eval (copy, new_seq);
2115 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt)); 2077 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2116 gimple_try_set_cleanup (copy, new_seq); 2078 gimple_try_set_cleanup (copy, new_seq);
2117 break; 2079 break;
2118 2080
2119 case GIMPLE_OMP_FOR: 2081 case GIMPLE_OMP_FOR:
2120 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt)); 2082 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2121 gimple_omp_for_set_pre_body (copy, new_seq); 2083 gimple_omp_for_set_pre_body (copy, new_seq);
2122 t = unshare_expr (gimple_omp_for_clauses (stmt)); 2084 t = unshare_expr (gimple_omp_for_clauses (stmt));
2123 gimple_omp_for_set_clauses (copy, t); 2085 gimple_omp_for_set_clauses (copy, t);
2124 copy->gimple_omp_for.iter 2086 copy->gimple_omp_for.iter
2125 = GGC_NEWVEC (struct gimple_omp_for_iter, 2087 = GGC_NEWVEC (struct gimple_omp_for_iter,
2126 gimple_omp_for_collapse (stmt)); 2088 gimple_omp_for_collapse (stmt));
2127 for (i = 0; i < gimple_omp_for_collapse (stmt); i++) 2089 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2128 { 2090 {
2129 gimple_omp_for_set_cond (copy, i, 2091 gimple_omp_for_set_cond (copy, i,
2130 gimple_omp_for_cond (stmt, i)); 2092 gimple_omp_for_cond (stmt, i));
2131 gimple_omp_for_set_index (copy, i, 2093 gimple_omp_for_set_index (copy, i,
2132 gimple_omp_for_index (stmt, i)); 2094 gimple_omp_for_index (stmt, i));
2133 t = unshare_expr (gimple_omp_for_initial (stmt, i)); 2095 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2134 gimple_omp_for_set_initial (copy, i, t); 2096 gimple_omp_for_set_initial (copy, i, t);
2135 t = unshare_expr (gimple_omp_for_final (stmt, i)); 2097 t = unshare_expr (gimple_omp_for_final (stmt, i));
2136 gimple_omp_for_set_final (copy, i, t); 2098 gimple_omp_for_set_final (copy, i, t);
2137 t = unshare_expr (gimple_omp_for_incr (stmt, i)); 2099 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2138 gimple_omp_for_set_incr (copy, i, t); 2100 gimple_omp_for_set_incr (copy, i, t);
2139 } 2101 }
2140 goto copy_omp_body; 2102 goto copy_omp_body;
2141 2103
2142 case GIMPLE_OMP_PARALLEL: 2104 case GIMPLE_OMP_PARALLEL:
2143 t = unshare_expr (gimple_omp_parallel_clauses (stmt)); 2105 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2144 gimple_omp_parallel_set_clauses (copy, t); 2106 gimple_omp_parallel_set_clauses (copy, t);
2145 t = unshare_expr (gimple_omp_parallel_child_fn (stmt)); 2107 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2146 gimple_omp_parallel_set_child_fn (copy, t); 2108 gimple_omp_parallel_set_child_fn (copy, t);
2147 t = unshare_expr (gimple_omp_parallel_data_arg (stmt)); 2109 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2148 gimple_omp_parallel_set_data_arg (copy, t); 2110 gimple_omp_parallel_set_data_arg (copy, t);
2149 goto copy_omp_body; 2111 goto copy_omp_body;
2150 2112
2151 case GIMPLE_OMP_TASK: 2113 case GIMPLE_OMP_TASK:
2152 t = unshare_expr (gimple_omp_task_clauses (stmt)); 2114 t = unshare_expr (gimple_omp_task_clauses (stmt));
2153 gimple_omp_task_set_clauses (copy, t); 2115 gimple_omp_task_set_clauses (copy, t);
2154 t = unshare_expr (gimple_omp_task_child_fn (stmt)); 2116 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2155 gimple_omp_task_set_child_fn (copy, t); 2117 gimple_omp_task_set_child_fn (copy, t);
2156 t = unshare_expr (gimple_omp_task_data_arg (stmt)); 2118 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2157 gimple_omp_task_set_data_arg (copy, t); 2119 gimple_omp_task_set_data_arg (copy, t);
2158 t = unshare_expr (gimple_omp_task_copy_fn (stmt)); 2120 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2159 gimple_omp_task_set_copy_fn (copy, t); 2121 gimple_omp_task_set_copy_fn (copy, t);
2160 t = unshare_expr (gimple_omp_task_arg_size (stmt)); 2122 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2161 gimple_omp_task_set_arg_size (copy, t); 2123 gimple_omp_task_set_arg_size (copy, t);
2162 t = unshare_expr (gimple_omp_task_arg_align (stmt)); 2124 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2163 gimple_omp_task_set_arg_align (copy, t); 2125 gimple_omp_task_set_arg_align (copy, t);
2164 goto copy_omp_body; 2126 goto copy_omp_body;
2165 2127
2166 case GIMPLE_OMP_CRITICAL: 2128 case GIMPLE_OMP_CRITICAL:
2167 t = unshare_expr (gimple_omp_critical_name (stmt)); 2129 t = unshare_expr (gimple_omp_critical_name (stmt));
2168 gimple_omp_critical_set_name (copy, t); 2130 gimple_omp_critical_set_name (copy, t);
2169 goto copy_omp_body; 2131 goto copy_omp_body;
2170 2132
2171 case GIMPLE_OMP_SECTIONS: 2133 case GIMPLE_OMP_SECTIONS:
2172 t = unshare_expr (gimple_omp_sections_clauses (stmt)); 2134 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2173 gimple_omp_sections_set_clauses (copy, t); 2135 gimple_omp_sections_set_clauses (copy, t);
2174 t = unshare_expr (gimple_omp_sections_control (stmt)); 2136 t = unshare_expr (gimple_omp_sections_control (stmt));
2175 gimple_omp_sections_set_control (copy, t); 2137 gimple_omp_sections_set_control (copy, t);
2176 /* FALLTHRU */ 2138 /* FALLTHRU */
2177 2139
2178 case GIMPLE_OMP_SINGLE: 2140 case GIMPLE_OMP_SINGLE:
2179 case GIMPLE_OMP_SECTION: 2141 case GIMPLE_OMP_SECTION:
2180 case GIMPLE_OMP_MASTER: 2142 case GIMPLE_OMP_MASTER:
2181 case GIMPLE_OMP_ORDERED: 2143 case GIMPLE_OMP_ORDERED:
2182 copy_omp_body: 2144 copy_omp_body:
2183 new_seq = gimple_seq_copy (gimple_omp_body (stmt)); 2145 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2184 gimple_omp_set_body (copy, new_seq); 2146 gimple_omp_set_body (copy, new_seq);
2185 break; 2147 break;
2186 2148
2187 case GIMPLE_WITH_CLEANUP_EXPR: 2149 case GIMPLE_WITH_CLEANUP_EXPR:
2188 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt)); 2150 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2189 gimple_wce_set_cleanup (copy, new_seq); 2151 gimple_wce_set_cleanup (copy, new_seq);
2190 break; 2152 break;
2191 2153
2192 default: 2154 default:
2193 gcc_unreachable (); 2155 gcc_unreachable ();
2194 } 2156 }
2195 } 2157 }
2196 2158
2197 /* Make copy of operands. */ 2159 /* Make copy of operands. */
2198 if (num_ops > 0) 2160 if (num_ops > 0)
2199 { 2161 {
2200 for (i = 0; i < num_ops; i++) 2162 for (i = 0; i < num_ops; i++)
2201 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i))); 2163 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2202 2164
2203 /* Clear out SSA operand vectors on COPY. Note that we cannot 2165 /* Clear out SSA operand vectors on COPY. */
2204 call the API functions for setting addresses_taken, stores
2205 and loads. These functions free the previous values, and we
2206 cannot do that on COPY as it will affect the original
2207 statement. */
2208 if (gimple_has_ops (stmt)) 2166 if (gimple_has_ops (stmt))
2209 { 2167 {
2210 gimple_set_def_ops (copy, NULL); 2168 gimple_set_def_ops (copy, NULL);
2211 gimple_set_use_ops (copy, NULL); 2169 gimple_set_use_ops (copy, NULL);
2212 copy->gsops.opbase.addresses_taken = NULL; 2170 }
2213 }
2214 2171
2215 if (gimple_has_mem_ops (stmt)) 2172 if (gimple_has_mem_ops (stmt))
2216 { 2173 {
2217 gimple_set_vdef_ops (copy, NULL); 2174 gimple_set_vdef (copy, gimple_vdef (stmt));
2218 gimple_set_vuse_ops (copy, NULL); 2175 gimple_set_vuse (copy, gimple_vuse (stmt));
2219 copy->gsmem.membase.stores = NULL; 2176 }
2220 copy->gsmem.membase.loads = NULL; 2177
2221 } 2178 /* SSA operands need to be updated. */
2222 2179 gimple_set_modified (copy, true);
2223 update_stmt (copy);
2224 } 2180 }
2225 2181
2226 return copy; 2182 return copy;
2227 } 2183 }
2228 2184
2236 if (gimple_has_ops (s)) 2192 if (gimple_has_ops (s))
2237 { 2193 {
2238 s->gsbase.modified = (unsigned) modifiedp; 2194 s->gsbase.modified = (unsigned) modifiedp;
2239 2195
2240 if (modifiedp 2196 if (modifiedp
2241 && cfun->gimple_df 2197 && cfun->gimple_df
2242 && is_gimple_call (s) 2198 && is_gimple_call (s)
2243 && gimple_call_noreturn_p (s)) 2199 && gimple_call_noreturn_p (s))
2244 VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), s); 2200 VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), s);
2245 } 2201 }
2246 } 2202 }
2247 2203
2248 2204
2249 /* Return true if statement S has side-effects. We consider a 2205 /* Return true if statement S has side-effects. We consider a
2254 2210
2255 bool 2211 bool
2256 gimple_has_side_effects (const_gimple s) 2212 gimple_has_side_effects (const_gimple s)
2257 { 2213 {
2258 unsigned i; 2214 unsigned i;
2215
2216 if (is_gimple_debug (s))
2217 return false;
2259 2218
2260 /* We don't have to scan the arguments to check for 2219 /* We don't have to scan the arguments to check for
2261 volatile arguments, though, at present, we still 2220 volatile arguments, though, at present, we still
2262 do a scan to check for TREE_SIDE_EFFECTS. */ 2221 do a scan to check for TREE_SIDE_EFFECTS. */
2263 if (gimple_has_volatile_ops (s)) 2222 if (gimple_has_volatile_ops (s))
2268 unsigned nargs = gimple_call_num_args (s); 2227 unsigned nargs = gimple_call_num_args (s);
2269 2228
2270 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE))) 2229 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2271 return true; 2230 return true;
2272 else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE) 2231 else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE)
2273 /* An infinite loop is considered a side effect. */ 2232 /* An infinite loop is considered a side effect. */
2274 return true; 2233 return true;
2275 2234
2276 if (gimple_call_lhs (s) 2235 if (gimple_call_lhs (s)
2277 && TREE_SIDE_EFFECTS (gimple_call_lhs (s))) 2236 && TREE_SIDE_EFFECTS (gimple_call_lhs (s)))
2278 { 2237 {
2279 gcc_assert (gimple_has_volatile_ops (s)); 2238 gcc_assert (gimple_has_volatile_ops (s));
2280 return true; 2239 return true;
2281 } 2240 }
2282 2241
2283 if (TREE_SIDE_EFFECTS (gimple_call_fn (s))) 2242 if (TREE_SIDE_EFFECTS (gimple_call_fn (s)))
2284 return true; 2243 return true;
2285 2244
2286 for (i = 0; i < nargs; i++) 2245 for (i = 0; i < nargs; i++)
2287 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))) 2246 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)))
2288 { 2247 {
2289 gcc_assert (gimple_has_volatile_ops (s)); 2248 gcc_assert (gimple_has_volatile_ops (s));
2290 return true; 2249 return true;
2291 } 2250 }
2292 2251
2293 return false; 2252 return false;
2294 } 2253 }
2295 else 2254 else
2296 { 2255 {
2297 for (i = 0; i < gimple_num_ops (s); i++) 2256 for (i = 0; i < gimple_num_ops (s); i++)
2298 if (TREE_SIDE_EFFECTS (gimple_op (s, i))) 2257 if (TREE_SIDE_EFFECTS (gimple_op (s, i)))
2299 { 2258 {
2300 gcc_assert (gimple_has_volatile_ops (s)); 2259 gcc_assert (gimple_has_volatile_ops (s));
2301 return true; 2260 return true;
2302 } 2261 }
2303 } 2262 }
2304 2263
2305 return false; 2264 return false;
2306 } 2265 }
2307 2266
2325 2284
2326 /* We cannot use gimple_has_volatile_ops here, 2285 /* We cannot use gimple_has_volatile_ops here,
2327 because we must ignore a volatile LHS. */ 2286 because we must ignore a volatile LHS. */
2328 if (TREE_SIDE_EFFECTS (gimple_call_fn (s)) 2287 if (TREE_SIDE_EFFECTS (gimple_call_fn (s))
2329 || TREE_THIS_VOLATILE (gimple_call_fn (s))) 2288 || TREE_THIS_VOLATILE (gimple_call_fn (s)))
2330 { 2289 {
2331 gcc_assert (gimple_has_volatile_ops (s)); 2290 gcc_assert (gimple_has_volatile_ops (s));
2332 return true; 2291 return true;
2333 } 2292 }
2334 2293
2335 for (i = 0; i < nargs; i++) 2294 for (i = 0; i < nargs; i++)
2336 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)) 2295 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
2337 || TREE_THIS_VOLATILE (gimple_call_arg (s, i))) 2296 || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
2338 return true; 2297 return true;
2341 } 2300 }
2342 else if (is_gimple_assign (s)) 2301 else if (is_gimple_assign (s))
2343 { 2302 {
2344 /* Skip the first operand, the LHS. */ 2303 /* Skip the first operand, the LHS. */
2345 for (i = 1; i < gimple_num_ops (s); i++) 2304 for (i = 1; i < gimple_num_ops (s); i++)
2346 if (TREE_SIDE_EFFECTS (gimple_op (s, i)) 2305 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2347 || TREE_THIS_VOLATILE (gimple_op (s, i))) 2306 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2348 { 2307 {
2349 gcc_assert (gimple_has_volatile_ops (s)); 2308 gcc_assert (gimple_has_volatile_ops (s));
2350 return true; 2309 return true;
2351 } 2310 }
2352 } 2311 }
2312 else if (is_gimple_debug (s))
2313 return false;
2353 else 2314 else
2354 { 2315 {
2355 /* For statements without an LHS, examine all arguments. */ 2316 /* For statements without an LHS, examine all arguments. */
2356 for (i = 0; i < gimple_num_ops (s); i++) 2317 for (i = 0; i < gimple_num_ops (s); i++)
2357 if (TREE_SIDE_EFFECTS (gimple_op (s, i)) 2318 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2358 || TREE_THIS_VOLATILE (gimple_op (s, i))) 2319 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2359 { 2320 {
2360 gcc_assert (gimple_has_volatile_ops (s)); 2321 gcc_assert (gimple_has_volatile_ops (s));
2361 return true; 2322 return true;
2362 } 2323 }
2363 } 2324 }
2364 2325
2365 return false; 2326 return false;
2366 } 2327 }
2367 2328
2391 2352
2392 case GIMPLE_CALL: 2353 case GIMPLE_CALL:
2393 t = gimple_call_fndecl (s); 2354 t = gimple_call_fndecl (s);
2394 /* Assume that calls to weak functions may trap. */ 2355 /* Assume that calls to weak functions may trap. */
2395 if (!t || !DECL_P (t) || DECL_WEAK (t)) 2356 if (!t || !DECL_P (t) || DECL_WEAK (t))
2396 return true; 2357 return true;
2397 return false; 2358 return false;
2398 2359
2399 case GIMPLE_ASSIGN: 2360 case GIMPLE_ASSIGN:
2400 t = gimple_expr_type (s); 2361 t = gimple_expr_type (s);
2401 op = gimple_assign_rhs_code (s); 2362 op = gimple_assign_rhs_code (s);
2402 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS) 2363 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2403 div = gimple_assign_rhs2 (s); 2364 div = gimple_assign_rhs2 (s);
2404 return (operation_could_trap_p (op, FLOAT_TYPE_P (t), 2365 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2405 (INTEGRAL_TYPE_P (t) 2366 (INTEGRAL_TYPE_P (t)
2406 && TYPE_OVERFLOW_TRAPS (t)), 2367 && TYPE_OVERFLOW_TRAPS (t)),
2407 div)); 2368 div));
2408 2369
2409 default: 2370 default:
2410 break; 2371 break;
2411 } 2372 }
2412 2373
2446 fprintf (stderr, "Kind Stmts Bytes\n"); 2407 fprintf (stderr, "Kind Stmts Bytes\n");
2447 fprintf (stderr, "---------------------------------------\n"); 2408 fprintf (stderr, "---------------------------------------\n");
2448 for (i = 0; i < (int) gimple_alloc_kind_all; ++i) 2409 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2449 { 2410 {
2450 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i], 2411 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2451 gimple_alloc_counts[i], gimple_alloc_sizes[i]); 2412 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2452 total_tuples += gimple_alloc_counts[i]; 2413 total_tuples += gimple_alloc_counts[i];
2453 total_bytes += gimple_alloc_sizes[i]; 2414 total_bytes += gimple_alloc_sizes[i];
2454 } 2415 }
2455 fprintf (stderr, "---------------------------------------\n"); 2416 fprintf (stderr, "---------------------------------------\n");
2456 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes); 2417 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2459 fprintf (stderr, "No gimple statistics\n"); 2420 fprintf (stderr, "No gimple statistics\n");
2460 #endif 2421 #endif
2461 } 2422 }
2462 2423
2463 2424
2464 /* Deep copy SYMS into the set of symbols stored by STMT. If SYMS is
2465 NULL or empty, the storage used is freed up. */
2466
2467 void
2468 gimple_set_stored_syms (gimple stmt, bitmap syms, bitmap_obstack *obs)
2469 {
2470 gcc_assert (gimple_has_mem_ops (stmt));
2471
2472 if (syms == NULL || bitmap_empty_p (syms))
2473 BITMAP_FREE (stmt->gsmem.membase.stores);
2474 else
2475 {
2476 if (stmt->gsmem.membase.stores == NULL)
2477 stmt->gsmem.membase.stores = BITMAP_ALLOC (obs);
2478
2479 bitmap_copy (stmt->gsmem.membase.stores, syms);
2480 }
2481 }
2482
2483
2484 /* Deep copy SYMS into the set of symbols loaded by STMT. If SYMS is
2485 NULL or empty, the storage used is freed up. */
2486
2487 void
2488 gimple_set_loaded_syms (gimple stmt, bitmap syms, bitmap_obstack *obs)
2489 {
2490 gcc_assert (gimple_has_mem_ops (stmt));
2491
2492 if (syms == NULL || bitmap_empty_p (syms))
2493 BITMAP_FREE (stmt->gsmem.membase.loads);
2494 else
2495 {
2496 if (stmt->gsmem.membase.loads == NULL)
2497 stmt->gsmem.membase.loads = BITMAP_ALLOC (obs);
2498
2499 bitmap_copy (stmt->gsmem.membase.loads, syms);
2500 }
2501 }
2502
2503
2504 /* Return the number of operands needed on the RHS of a GIMPLE 2425 /* Return the number of operands needed on the RHS of a GIMPLE
2505 assignment for an expression with tree code CODE. */ 2426 assignment for an expression with tree code CODE. */
2506 2427
2507 unsigned 2428 unsigned
2508 get_gimple_rhs_num_ops (enum tree_code code) 2429 get_gimple_rhs_num_ops (enum tree_code code)
2515 return 2; 2436 return 2;
2516 else 2437 else
2517 gcc_unreachable (); 2438 gcc_unreachable ();
2518 } 2439 }
2519 2440
2520 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \ 2441 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2521 (unsigned char) \ 2442 (unsigned char) \
2522 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \ 2443 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2523 : ((TYPE) == tcc_binary \ 2444 : ((TYPE) == tcc_binary \
2524 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \ 2445 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2525 : ((TYPE) == tcc_constant \ 2446 : ((TYPE) == tcc_constant \
2526 || (TYPE) == tcc_declaration \ 2447 || (TYPE) == tcc_declaration \
2527 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \ 2448 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2528 : ((SYM) == TRUTH_AND_EXPR \ 2449 : ((SYM) == TRUTH_AND_EXPR \
2529 || (SYM) == TRUTH_OR_EXPR \ 2450 || (SYM) == TRUTH_OR_EXPR \
2530 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \ 2451 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2531 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \ 2452 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2532 : ((SYM) == COND_EXPR \ 2453 : ((SYM) == COND_EXPR \
2533 || (SYM) == CONSTRUCTOR \ 2454 || (SYM) == CONSTRUCTOR \
2534 || (SYM) == OBJ_TYPE_REF \ 2455 || (SYM) == OBJ_TYPE_REF \
2535 || (SYM) == ASSERT_EXPR \ 2456 || (SYM) == ASSERT_EXPR \
2536 || (SYM) == ADDR_EXPR \ 2457 || (SYM) == ADDR_EXPR \
2537 || (SYM) == WITH_SIZE_EXPR \ 2458 || (SYM) == WITH_SIZE_EXPR \
2538 || (SYM) == EXC_PTR_EXPR \ 2459 || (SYM) == SSA_NAME \
2539 || (SYM) == SSA_NAME \ 2460 || (SYM) == POLYNOMIAL_CHREC \
2540 || (SYM) == FILTER_EXPR \ 2461 || (SYM) == DOT_PROD_EXPR \
2541 || (SYM) == POLYNOMIAL_CHREC \ 2462 || (SYM) == VEC_COND_EXPR \
2542 || (SYM) == DOT_PROD_EXPR \ 2463 || (SYM) == REALIGN_LOAD_EXPR) ? GIMPLE_SINGLE_RHS \
2543 || (SYM) == VEC_COND_EXPR \
2544 || (SYM) == REALIGN_LOAD_EXPR) ? GIMPLE_SINGLE_RHS \
2545 : GIMPLE_INVALID_RHS), 2464 : GIMPLE_INVALID_RHS),
2546 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS, 2465 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2547 2466
2548 const unsigned char gimple_rhs_class_table[] = { 2467 const unsigned char gimple_rhs_class_table[] = {
2549 #include "all-tree.def" 2468 #include "all-tree.def"
2563 is_gimple_operand (const_tree op) 2482 is_gimple_operand (const_tree op)
2564 { 2483 {
2565 return op && get_gimple_rhs_class (TREE_CODE (op)) == GIMPLE_SINGLE_RHS; 2484 return op && get_gimple_rhs_class (TREE_CODE (op)) == GIMPLE_SINGLE_RHS;
2566 } 2485 }
2567 2486
2568
2569 /* Return true if T is a GIMPLE RHS for an assignment to a temporary. */
2570
2571 bool
2572 is_gimple_formal_tmp_rhs (tree t)
2573 {
2574 if (is_gimple_lvalue (t) || is_gimple_val (t))
2575 return true;
2576
2577 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
2578 }
2579
2580 /* Returns true iff T is a valid RHS for an assignment to a renamed 2487 /* Returns true iff T is a valid RHS for an assignment to a renamed
2581 user -- or front-end generated artificial -- variable. */ 2488 user -- or front-end generated artificial -- variable. */
2582 2489
2583 bool 2490 bool
2584 is_gimple_reg_rhs (tree t) 2491 is_gimple_reg_rhs (tree t)
2585 { 2492 {
2586 /* If the RHS of the MODIFY_EXPR may throw or make a nonlocal goto 2493 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
2587 and the LHS is a user variable, then we need to introduce a formal
2588 temporary. This way the optimizers can determine that the user
2589 variable is only modified if evaluation of the RHS does not throw.
2590
2591 Don't force a temp of a non-renamable type; the copy could be
2592 arbitrarily expensive. Instead we will generate a VDEF for
2593 the assignment. */
2594
2595 if (is_gimple_reg_type (TREE_TYPE (t)) && tree_could_throw_p (t))
2596 return false;
2597
2598 return is_gimple_formal_tmp_rhs (t);
2599 } 2494 }
2600 2495
2601 /* Returns true iff T is a valid RHS for an assignment to an un-renamed 2496 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
2602 LHS, or for a call argument. */ 2497 LHS, or for a call argument. */
2603 2498
2607 /* If we're dealing with a renamable type, either source or dest must be 2502 /* If we're dealing with a renamable type, either source or dest must be
2608 a renamed variable. */ 2503 a renamed variable. */
2609 if (is_gimple_reg_type (TREE_TYPE (t))) 2504 if (is_gimple_reg_type (TREE_TYPE (t)))
2610 return is_gimple_val (t); 2505 return is_gimple_val (t);
2611 else 2506 else
2612 return is_gimple_formal_tmp_rhs (t); 2507 return is_gimple_val (t) || is_gimple_lvalue (t);
2613 } 2508 }
2614 2509
2615 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */ 2510 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2616 2511
2617 bool 2512 bool
2618 is_gimple_lvalue (tree t) 2513 is_gimple_lvalue (tree t)
2619 { 2514 {
2620 return (is_gimple_addressable (t) 2515 return (is_gimple_addressable (t)
2621 || TREE_CODE (t) == WITH_SIZE_EXPR 2516 || TREE_CODE (t) == WITH_SIZE_EXPR
2622 /* These are complex lvalues, but don't have addresses, so they 2517 /* These are complex lvalues, but don't have addresses, so they
2623 go here. */ 2518 go here. */
2624 || TREE_CODE (t) == BIT_FIELD_REF); 2519 || TREE_CODE (t) == BIT_FIELD_REF);
2625 } 2520 }
2626 2521
2627 /* Return true if T is a GIMPLE condition. */ 2522 /* Return true if T is a GIMPLE condition. */
2628 2523
2629 bool 2524 bool
2630 is_gimple_condexpr (tree t) 2525 is_gimple_condexpr (tree t)
2631 { 2526 {
2632 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t) 2527 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2633 && !tree_could_trap_p (t) 2528 && !tree_could_trap_p (t)
2634 && is_gimple_val (TREE_OPERAND (t, 0)) 2529 && is_gimple_val (TREE_OPERAND (t, 0))
2635 && is_gimple_val (TREE_OPERAND (t, 1)))); 2530 && is_gimple_val (TREE_OPERAND (t, 1))));
2636 } 2531 }
2637 2532
2638 /* Return true if T is something whose address can be taken. */ 2533 /* Return true if T is something whose address can be taken. */
2639 2534
2640 bool 2535 bool
2659 return true; 2554 return true;
2660 2555
2661 /* Vector constant constructors are gimple invariant. */ 2556 /* Vector constant constructors are gimple invariant. */
2662 case CONSTRUCTOR: 2557 case CONSTRUCTOR:
2663 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) 2558 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2664 return TREE_CONSTANT (t); 2559 return TREE_CONSTANT (t);
2665 else 2560 else
2666 return false; 2561 return false;
2667 2562
2668 default: 2563 default:
2669 return false; 2564 return false;
2670 } 2565 }
2671 } 2566 }
2682 2577
2683 op = TREE_OPERAND (t, 0); 2578 op = TREE_OPERAND (t, 0);
2684 while (handled_component_p (op)) 2579 while (handled_component_p (op))
2685 { 2580 {
2686 if ((TREE_CODE (op) == ARRAY_REF 2581 if ((TREE_CODE (op) == ARRAY_REF
2687 || TREE_CODE (op) == ARRAY_RANGE_REF) 2582 || TREE_CODE (op) == ARRAY_RANGE_REF)
2688 && !is_gimple_val (TREE_OPERAND (op, 1))) 2583 && !is_gimple_val (TREE_OPERAND (op, 1)))
2689 return false; 2584 return false;
2690 2585
2691 op = TREE_OPERAND (op, 0); 2586 op = TREE_OPERAND (op, 0);
2692 } 2587 }
2693 2588
2694 if (CONSTANT_CLASS_P (op) || INDIRECT_REF_P (op)) 2589 if (CONSTANT_CLASS_P (op) || INDIRECT_REF_P (op))
2716 strip_invariant_refs (const_tree op) 2611 strip_invariant_refs (const_tree op)
2717 { 2612 {
2718 while (handled_component_p (op)) 2613 while (handled_component_p (op))
2719 { 2614 {
2720 switch (TREE_CODE (op)) 2615 switch (TREE_CODE (op))
2721 { 2616 {
2722 case ARRAY_REF: 2617 case ARRAY_REF:
2723 case ARRAY_RANGE_REF: 2618 case ARRAY_RANGE_REF:
2724 if (!is_gimple_constant (TREE_OPERAND (op, 1)) 2619 if (!is_gimple_constant (TREE_OPERAND (op, 1))
2725 || TREE_OPERAND (op, 2) != NULL_TREE 2620 || TREE_OPERAND (op, 2) != NULL_TREE
2726 || TREE_OPERAND (op, 3) != NULL_TREE) 2621 || TREE_OPERAND (op, 3) != NULL_TREE)
2727 return NULL; 2622 return NULL;
2728 break; 2623 break;
2729 2624
2730 case COMPONENT_REF: 2625 case COMPONENT_REF:
2731 if (TREE_OPERAND (op, 2) != NULL_TREE) 2626 if (TREE_OPERAND (op, 2) != NULL_TREE)
2732 return NULL; 2627 return NULL;
2733 break; 2628 break;
2734 2629
2735 default:; 2630 default:;
2736 } 2631 }
2737 op = TREE_OPERAND (op, 0); 2632 op = TREE_OPERAND (op, 0);
2738 } 2633 }
2739 2634
2740 return op; 2635 return op;
2741 } 2636 }
2820 case CASE_LABEL_EXPR: 2715 case CASE_LABEL_EXPR:
2821 case TRY_CATCH_EXPR: 2716 case TRY_CATCH_EXPR:
2822 case TRY_FINALLY_EXPR: 2717 case TRY_FINALLY_EXPR:
2823 case EH_FILTER_EXPR: 2718 case EH_FILTER_EXPR:
2824 case CATCH_EXPR: 2719 case CATCH_EXPR:
2825 case CHANGE_DYNAMIC_TYPE_EXPR:
2826 case ASM_EXPR: 2720 case ASM_EXPR:
2827 case RESX_EXPR:
2828 case STATEMENT_LIST: 2721 case STATEMENT_LIST:
2829 case OMP_PARALLEL: 2722 case OMP_PARALLEL:
2830 case OMP_FOR: 2723 case OMP_FOR:
2831 case OMP_SECTIONS: 2724 case OMP_SECTIONS:
2832 case OMP_SECTION: 2725 case OMP_SECTION:
2853 2746
2854 bool 2747 bool
2855 is_gimple_variable (tree t) 2748 is_gimple_variable (tree t)
2856 { 2749 {
2857 return (TREE_CODE (t) == VAR_DECL 2750 return (TREE_CODE (t) == VAR_DECL
2858 || TREE_CODE (t) == PARM_DECL 2751 || TREE_CODE (t) == PARM_DECL
2859 || TREE_CODE (t) == RESULT_DECL 2752 || TREE_CODE (t) == RESULT_DECL
2860 || TREE_CODE (t) == SSA_NAME); 2753 || TREE_CODE (t) == SSA_NAME);
2861 } 2754 }
2862 2755
2863 /* Return true if T is a GIMPLE identifier (something with an address). */ 2756 /* Return true if T is a GIMPLE identifier (something with an address). */
2864 2757
2865 bool 2758 bool
2866 is_gimple_id (tree t) 2759 is_gimple_id (tree t)
2867 { 2760 {
2868 return (is_gimple_variable (t) 2761 return (is_gimple_variable (t)
2869 || TREE_CODE (t) == FUNCTION_DECL 2762 || TREE_CODE (t) == FUNCTION_DECL
2870 || TREE_CODE (t) == LABEL_DECL 2763 || TREE_CODE (t) == LABEL_DECL
2871 || TREE_CODE (t) == CONST_DECL 2764 || TREE_CODE (t) == CONST_DECL
2872 /* Allow string constants, since they are addressable. */ 2765 /* Allow string constants, since they are addressable. */
2873 || TREE_CODE (t) == STRING_CST); 2766 || TREE_CODE (t) == STRING_CST);
2874 } 2767 }
2875 2768
2876 /* Return true if TYPE is a suitable type for a scalar register variable. */ 2769 /* Return true if TYPE is a suitable type for a scalar register variable. */
2877 2770
2878 bool 2771 bool
2879 is_gimple_reg_type (tree type) 2772 is_gimple_reg_type (tree type)
2880 { 2773 {
2881 /* In addition to aggregate types, we also exclude complex types if not 2774 return !AGGREGATE_TYPE_P (type);
2882 optimizing because they can be subject to partial stores in GNU C by
2883 means of the __real__ and __imag__ operators and we cannot promote
2884 them to total stores (see gimplify_modify_expr_complex_part). */
2885 return !(AGGREGATE_TYPE_P (type)
2886 || (TREE_CODE (type) == COMPLEX_TYPE && !optimize));
2887
2888 } 2775 }
2889 2776
2890 /* Return true if T is a non-aggregate register variable. */ 2777 /* Return true if T is a non-aggregate register variable. */
2891 2778
2892 bool 2779 bool
2893 is_gimple_reg (tree t) 2780 is_gimple_reg (tree t)
2894 { 2781 {
2895 if (TREE_CODE (t) == SSA_NAME) 2782 if (TREE_CODE (t) == SSA_NAME)
2896 t = SSA_NAME_VAR (t); 2783 t = SSA_NAME_VAR (t);
2897
2898 if (MTAG_P (t))
2899 return false;
2900 2784
2901 if (!is_gimple_variable (t)) 2785 if (!is_gimple_variable (t))
2902 return false; 2786 return false;
2903 2787
2904 if (!is_gimple_reg_type (TREE_TYPE (t))) 2788 if (!is_gimple_reg_type (TREE_TYPE (t)))
2935 2819
2936 return true; 2820 return true;
2937 } 2821 }
2938 2822
2939 2823
2940 /* Returns true if T is a GIMPLE formal temporary variable. */
2941
2942 bool
2943 is_gimple_formal_tmp_var (tree t)
2944 {
2945 if (TREE_CODE (t) == SSA_NAME)
2946 return true;
2947
2948 return TREE_CODE (t) == VAR_DECL && DECL_GIMPLE_FORMAL_TEMP_P (t);
2949 }
2950
2951 /* Returns true if T is a GIMPLE formal temporary register variable. */
2952
2953 bool
2954 is_gimple_formal_tmp_reg (tree t)
2955 {
2956 /* The intent of this is to get hold of a value that won't change.
2957 An SSA_NAME qualifies no matter if its of a user variable or not. */
2958 if (TREE_CODE (t) == SSA_NAME)
2959 return true;
2960
2961 /* We don't know the lifetime characteristics of user variables. */
2962 if (!is_gimple_formal_tmp_var (t))
2963 return false;
2964
2965 /* Finally, it must be capable of being placed in a register. */
2966 return is_gimple_reg (t);
2967 }
2968
2969 /* Return true if T is a GIMPLE variable whose address is not needed. */ 2824 /* Return true if T is a GIMPLE variable whose address is not needed. */
2970 2825
2971 bool 2826 bool
2972 is_gimple_non_addressable (tree t) 2827 is_gimple_non_addressable (tree t)
2973 { 2828 {
2986 if (is_gimple_variable (t) 2841 if (is_gimple_variable (t)
2987 && is_gimple_reg_type (TREE_TYPE (t)) 2842 && is_gimple_reg_type (TREE_TYPE (t))
2988 && !is_gimple_reg (t)) 2843 && !is_gimple_reg (t))
2989 return false; 2844 return false;
2990 2845
2991 /* FIXME make these decls. That can happen only when we expose the
2992 entire landing-pad construct at the tree level. */
2993 if (TREE_CODE (t) == EXC_PTR_EXPR || TREE_CODE (t) == FILTER_EXPR)
2994 return true;
2995
2996 return (is_gimple_variable (t) || is_gimple_min_invariant (t)); 2846 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2997 } 2847 }
2998 2848
2999 /* Similarly, but accept hard registers as inputs to asm statements. */ 2849 /* Similarly, but accept hard registers as inputs to asm statements. */
3000 2850
3010 /* Return true if T is a GIMPLE minimal lvalue. */ 2860 /* Return true if T is a GIMPLE minimal lvalue. */
3011 2861
3012 bool 2862 bool
3013 is_gimple_min_lval (tree t) 2863 is_gimple_min_lval (tree t)
3014 { 2864 {
2865 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2866 return false;
3015 return (is_gimple_id (t) || TREE_CODE (t) == INDIRECT_REF); 2867 return (is_gimple_id (t) || TREE_CODE (t) == INDIRECT_REF);
3016 } 2868 }
3017 2869
3018 /* Return true if T is a typecast operation. */ 2870 /* Return true if T is a typecast operation. */
3019 2871
3060 tree 2912 tree
3061 get_base_address (tree t) 2913 get_base_address (tree t)
3062 { 2914 {
3063 while (handled_component_p (t)) 2915 while (handled_component_p (t))
3064 t = TREE_OPERAND (t, 0); 2916 t = TREE_OPERAND (t, 0);
3065 2917
3066 if (SSA_VAR_P (t) 2918 if (SSA_VAR_P (t)
3067 || TREE_CODE (t) == STRING_CST 2919 || TREE_CODE (t) == STRING_CST
3068 || TREE_CODE (t) == CONSTRUCTOR 2920 || TREE_CODE (t) == CONSTRUCTOR
3069 || INDIRECT_REF_P (t)) 2921 || INDIRECT_REF_P (t))
3070 return t; 2922 return t;
3081 2933
3082 switch (TREE_CODE_CLASS (code)) 2934 switch (TREE_CODE_CLASS (code))
3083 { 2935 {
3084 case tcc_expression: 2936 case tcc_expression:
3085 switch (code) 2937 switch (code)
3086 { 2938 {
3087 case INIT_EXPR: 2939 case INIT_EXPR:
3088 case MODIFY_EXPR: 2940 case MODIFY_EXPR:
3089 case VA_ARG_EXPR: 2941 case VA_ARG_EXPR:
3090 case PREDECREMENT_EXPR: 2942 case PREDECREMENT_EXPR:
3091 case PREINCREMENT_EXPR: 2943 case PREINCREMENT_EXPR:
3092 case POSTDECREMENT_EXPR: 2944 case POSTDECREMENT_EXPR:
3093 case POSTINCREMENT_EXPR: 2945 case POSTINCREMENT_EXPR:
3094 /* All of these have side-effects, no matter what their 2946 /* All of these have side-effects, no matter what their
3095 operands are. */ 2947 operands are. */
3096 return; 2948 return;
3097 2949
3098 default: 2950 default:
3099 break; 2951 break;
3100 } 2952 }
3101 /* Fall through. */ 2953 /* Fall through. */
3102 2954
3103 case tcc_comparison: /* a comparison expression */ 2955 case tcc_comparison: /* a comparison expression */
3104 case tcc_unary: /* a unary arithmetic expression */ 2956 case tcc_unary: /* a unary arithmetic expression */
3105 case tcc_binary: /* a binary arithmetic expression */ 2957 case tcc_binary: /* a binary arithmetic expression */
3106 case tcc_reference: /* a reference */ 2958 case tcc_reference: /* a reference */
3107 case tcc_vl_exp: /* a function call */ 2959 case tcc_vl_exp: /* a function call */
3108 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t); 2960 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3109 for (i = 0; i < len; ++i) 2961 for (i = 0; i < len; ++i)
3110 { 2962 {
3111 tree op = TREE_OPERAND (t, i); 2963 tree op = TREE_OPERAND (t, i);
3112 if (op && TREE_SIDE_EFFECTS (op)) 2964 if (op && TREE_SIDE_EFFECTS (op))
3113 TREE_SIDE_EFFECTS (t) = 1; 2965 TREE_SIDE_EFFECTS (t) = 1;
3114 } 2966 }
3115 break; 2967 break;
3116 2968
3117 case tcc_constant: 2969 case tcc_constant:
3118 /* No side-effects. */ 2970 /* No side-effects. */
3119 return; 2971 return;
3128 we failed to create one. */ 2980 we failed to create one. */
3129 2981
3130 tree 2982 tree
3131 canonicalize_cond_expr_cond (tree t) 2983 canonicalize_cond_expr_cond (tree t)
3132 { 2984 {
2985 /* Strip conversions around boolean operations. */
2986 if (CONVERT_EXPR_P (t)
2987 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 0))))
2988 t = TREE_OPERAND (t, 0);
2989
3133 /* For (bool)x use x != 0. */ 2990 /* For (bool)x use x != 0. */
3134 if (TREE_CODE (t) == NOP_EXPR 2991 if (CONVERT_EXPR_P (t)
3135 && TREE_TYPE (t) == boolean_type_node) 2992 && TREE_CODE (TREE_TYPE (t)) == BOOLEAN_TYPE)
3136 { 2993 {
3137 tree top0 = TREE_OPERAND (t, 0); 2994 tree top0 = TREE_OPERAND (t, 0);
3138 t = build2 (NE_EXPR, TREE_TYPE (t), 2995 t = build2 (NE_EXPR, TREE_TYPE (t),
3139 top0, build_int_cst (TREE_TYPE (top0), 0)); 2996 top0, build_int_cst (TREE_TYPE (top0), 0));
3140 } 2997 }
3141 /* For !x use x == 0. */ 2998 /* For !x use x == 0. */
3142 else if (TREE_CODE (t) == TRUTH_NOT_EXPR) 2999 else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
3143 { 3000 {
3144 tree top0 = TREE_OPERAND (t, 0); 3001 tree top0 = TREE_OPERAND (t, 0);
3145 t = build2 (EQ_EXPR, TREE_TYPE (t), 3002 t = build2 (EQ_EXPR, TREE_TYPE (t),
3146 top0, build_int_cst (TREE_TYPE (top0), 0)); 3003 top0, build_int_cst (TREE_TYPE (top0), 0));
3147 } 3004 }
3148 /* For cmp ? 1 : 0 use cmp. */ 3005 /* For cmp ? 1 : 0 use cmp. */
3149 else if (TREE_CODE (t) == COND_EXPR 3006 else if (TREE_CODE (t) == COND_EXPR
3150 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0)) 3007 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3151 && integer_onep (TREE_OPERAND (t, 1)) 3008 && integer_onep (TREE_OPERAND (t, 1))
3152 && integer_zerop (TREE_OPERAND (t, 2))) 3009 && integer_zerop (TREE_OPERAND (t, 2)))
3153 { 3010 {
3154 tree top0 = TREE_OPERAND (t, 0); 3011 tree top0 = TREE_OPERAND (t, 0);
3155 t = build2 (TREE_CODE (top0), TREE_TYPE (t), 3012 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3156 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1)); 3013 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3157 } 3014 }
3158 3015
3159 if (is_gimple_condexpr (t)) 3016 if (is_gimple_condexpr (t))
3160 return t; 3017 return t;
3161 3018
3180 3037
3181 new_stmt = gimple_build_call_vec (fn, vargs); 3038 new_stmt = gimple_build_call_vec (fn, vargs);
3182 VEC_free (tree, heap, vargs); 3039 VEC_free (tree, heap, vargs);
3183 if (gimple_call_lhs (stmt)) 3040 if (gimple_call_lhs (stmt))
3184 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt)); 3041 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3042
3043 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3044 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3185 3045
3186 gimple_set_block (new_stmt, gimple_block (stmt)); 3046 gimple_set_block (new_stmt, gimple_block (stmt));
3187 if (gimple_has_location (stmt)) 3047 if (gimple_has_location (stmt))
3188 gimple_set_location (new_stmt, gimple_location (stmt)); 3048 gimple_set_location (new_stmt, gimple_location (stmt));
3189 3049
3195 gimple_call_set_from_thunk (new_stmt, gimple_call_from_thunk_p (stmt)); 3055 gimple_call_set_from_thunk (new_stmt, gimple_call_from_thunk_p (stmt));
3196 gimple_call_set_va_arg_pack (new_stmt, gimple_call_va_arg_pack_p (stmt)); 3056 gimple_call_set_va_arg_pack (new_stmt, gimple_call_va_arg_pack_p (stmt));
3197 #ifndef noCbC 3057 #ifndef noCbC
3198 gimple_call_set_cbc_goto (new_stmt, gimple_call_cbc_goto_p (stmt)); 3058 gimple_call_set_cbc_goto (new_stmt, gimple_call_cbc_goto_p (stmt));
3199 #endif 3059 #endif
3060
3061 gimple_set_modified (new_stmt, true);
3200 return new_stmt; 3062 return new_stmt;
3201 } 3063 }
3202 3064
3065
3066 static hashval_t gimple_type_hash (const void *);
3067
3068 /* Structure used to maintain a cache of some type pairs compared by
3069 gimple_types_compatible_p when comparing aggregate types. There are
3070 four possible values for SAME_P:
3071
3072 -2: The pair (T1, T2) has just been inserted in the table.
3073 -1: The pair (T1, T2) is currently being compared.
3074 0: T1 and T2 are different types.
3075 1: T1 and T2 are the same type.
3076
3077 This table is only used when comparing aggregate types to avoid
3078 infinite recursion due to self-referential types. */
3079 struct type_pair_d
3080 {
3081 unsigned int uid1;
3082 unsigned int uid2;
3083 int same_p;
3084 };
3085 typedef struct type_pair_d *type_pair_t;
3086
3087 /* Return a hash value for the type pair pointed-to by P. */
3088
3089 static hashval_t
3090 type_pair_hash (const void *p)
3091 {
3092 const struct type_pair_d *pair = (const struct type_pair_d *) p;
3093 hashval_t val1 = pair->uid1;
3094 hashval_t val2 = pair->uid2;
3095 return (iterative_hash_hashval_t (val2, val1)
3096 ^ iterative_hash_hashval_t (val1, val2));
3097 }
3098
3099 /* Compare two type pairs pointed-to by P1 and P2. */
3100
3101 static int
3102 type_pair_eq (const void *p1, const void *p2)
3103 {
3104 const struct type_pair_d *pair1 = (const struct type_pair_d *) p1;
3105 const struct type_pair_d *pair2 = (const struct type_pair_d *) p2;
3106 return ((pair1->uid1 == pair2->uid1 && pair1->uid2 == pair2->uid2)
3107 || (pair1->uid1 == pair2->uid2 && pair1->uid2 == pair2->uid1));
3108 }
3109
3110 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3111 entry if none existed. */
3112
3113 static type_pair_t
3114 lookup_type_pair (tree t1, tree t2, htab_t *visited_p, struct obstack *ob_p)
3115 {
3116 struct type_pair_d pair;
3117 type_pair_t p;
3118 void **slot;
3119
3120 if (*visited_p == NULL)
3121 {
3122 *visited_p = htab_create (251, type_pair_hash, type_pair_eq, NULL);
3123 gcc_obstack_init (ob_p);
3124 }
3125
3126 pair.uid1 = TYPE_UID (t1);
3127 pair.uid2 = TYPE_UID (t2);
3128 slot = htab_find_slot (*visited_p, &pair, INSERT);
3129
3130 if (*slot)
3131 p = *((type_pair_t *) slot);
3132 else
3133 {
3134 p = XOBNEW (ob_p, struct type_pair_d);
3135 p->uid1 = TYPE_UID (t1);
3136 p->uid2 = TYPE_UID (t2);
3137 p->same_p = -2;
3138 *slot = (void *) p;
3139 }
3140
3141 return p;
3142 }
3143
3144
3145 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3146 true then if any type has no name return false, otherwise return
3147 true if both types have no names. */
3148
3149 static bool
3150 compare_type_names_p (tree t1, tree t2, bool for_completion_p)
3151 {
3152 tree name1 = TYPE_NAME (t1);
3153 tree name2 = TYPE_NAME (t2);
3154
3155 /* Consider anonymous types all unique for completion. */
3156 if (for_completion_p
3157 && (!name1 || !name2))
3158 return false;
3159
3160 if (name1 && TREE_CODE (name1) == TYPE_DECL)
3161 {
3162 name1 = DECL_NAME (name1);
3163 if (for_completion_p
3164 && !name1)
3165 return false;
3166 }
3167 gcc_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
3168
3169 if (name2 && TREE_CODE (name2) == TYPE_DECL)
3170 {
3171 name2 = DECL_NAME (name2);
3172 if (for_completion_p
3173 && !name2)
3174 return false;
3175 }
3176 gcc_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
3177
3178 /* Identifiers can be compared with pointer equality rather
3179 than a string comparison. */
3180 if (name1 == name2)
3181 return true;
3182
3183 return false;
3184 }
3185
3186 /* Return true if the field decls F1 and F2 are at the same offset. */
3187
3188 bool
3189 compare_field_offset (tree f1, tree f2)
3190 {
3191 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3192 return (operand_equal_p (DECL_FIELD_OFFSET (f1),
3193 DECL_FIELD_OFFSET (f2), 0)
3194 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3195 DECL_FIELD_BIT_OFFSET (f2)));
3196
3197 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3198 should be, so handle differing ones specially by decomposing
3199 the offset into a byte and bit offset manually. */
3200 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3201 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3202 {
3203 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3204 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3205 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3206 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3207 + bit_offset1 / BITS_PER_UNIT);
3208 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3209 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3210 + bit_offset2 / BITS_PER_UNIT);
3211 if (byte_offset1 != byte_offset2)
3212 return false;
3213 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3214 }
3215
3216 return false;
3217 }
3218
3219 /* Return 1 iff T1 and T2 are structurally identical.
3220 Otherwise, return 0. */
3221
3222 static int
3223 gimple_types_compatible_p (tree t1, tree t2)
3224 {
3225 type_pair_t p = NULL;
3226
3227 /* Check first for the obvious case of pointer identity. */
3228 if (t1 == t2)
3229 return 1;
3230
3231 /* Check that we have two types to compare. */
3232 if (t1 == NULL_TREE || t2 == NULL_TREE)
3233 return 0;
3234
3235 /* Can't be the same type if the types don't have the same code. */
3236 if (TREE_CODE (t1) != TREE_CODE (t2))
3237 return 0;
3238
3239 /* Can't be the same type if they have different CV qualifiers. */
3240 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3241 return 0;
3242
3243 /* Void types are always the same. */
3244 if (TREE_CODE (t1) == VOID_TYPE)
3245 return 1;
3246
3247 /* For numerical types do some simple checks before doing three
3248 hashtable queries. */
3249 if (INTEGRAL_TYPE_P (t1)
3250 || SCALAR_FLOAT_TYPE_P (t1)
3251 || FIXED_POINT_TYPE_P (t1)
3252 || TREE_CODE (t1) == VECTOR_TYPE
3253 || TREE_CODE (t1) == COMPLEX_TYPE
3254 || TREE_CODE (t1) == OFFSET_TYPE)
3255 {
3256 /* Can't be the same type if they have different alignment,
3257 sign, precision or mode. */
3258 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3259 || TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3260 || TYPE_MODE (t1) != TYPE_MODE (t2)
3261 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3262 return 0;
3263
3264 if (TREE_CODE (t1) == INTEGER_TYPE
3265 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3266 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3267 return 0;
3268
3269 /* That's all we need to check for float and fixed-point types. */
3270 if (SCALAR_FLOAT_TYPE_P (t1)
3271 || FIXED_POINT_TYPE_P (t1))
3272 return 1;
3273
3274 /* Perform cheap tail-recursion for vector and complex types. */
3275 if (TREE_CODE (t1) == VECTOR_TYPE
3276 || TREE_CODE (t1) == COMPLEX_TYPE)
3277 return gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2));
3278
3279 /* For integral types fall thru to more complex checks. */
3280 }
3281
3282 /* If the hash values of t1 and t2 are different the types can't
3283 possibly be the same. This helps keeping the type-pair hashtable
3284 small, only tracking comparisons for hash collisions. */
3285 if (gimple_type_hash (t1) != gimple_type_hash (t2))
3286 return 0;
3287
3288 /* If we've visited this type pair before (in the case of aggregates
3289 with self-referential types), and we made a decision, return it. */
3290 p = lookup_type_pair (t1, t2, &gtc_visited, &gtc_ob);
3291 if (p->same_p == 0 || p->same_p == 1)
3292 {
3293 /* We have already decided whether T1 and T2 are the
3294 same, return the cached result. */
3295 return p->same_p == 1;
3296 }
3297 else if (p->same_p == -1)
3298 {
3299 /* We are currently comparing this pair of types, assume
3300 that they are the same and let the caller decide. */
3301 return 1;
3302 }
3303
3304 gcc_assert (p->same_p == -2);
3305
3306 /* Mark the (T1, T2) comparison in progress. */
3307 p->same_p = -1;
3308
3309 /* If their attributes are not the same they can't be the same type. */
3310 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
3311 goto different_types;
3312
3313 /* Do type-specific comparisons. */
3314 switch (TREE_CODE (t1))
3315 {
3316 case ARRAY_TYPE:
3317 /* Array types are the same if the element types are the same and
3318 the number of elements are the same. */
3319 if (!gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
3320 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3321 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
3322 goto different_types;
3323 else
3324 {
3325 tree i1 = TYPE_DOMAIN (t1);
3326 tree i2 = TYPE_DOMAIN (t2);
3327
3328 /* For an incomplete external array, the type domain can be
3329 NULL_TREE. Check this condition also. */
3330 if (i1 == NULL_TREE && i2 == NULL_TREE)
3331 goto same_types;
3332 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3333 goto different_types;
3334 /* If for a complete array type the possibly gimplified sizes
3335 are different the types are different. */
3336 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
3337 || (TYPE_SIZE (i1)
3338 && TYPE_SIZE (i2)
3339 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
3340 goto different_types;
3341 else
3342 {
3343 tree min1 = TYPE_MIN_VALUE (i1);
3344 tree min2 = TYPE_MIN_VALUE (i2);
3345 tree max1 = TYPE_MAX_VALUE (i1);
3346 tree max2 = TYPE_MAX_VALUE (i2);
3347
3348 /* The minimum/maximum values have to be the same. */
3349 if ((min1 == min2
3350 || (min1 && min2 && operand_equal_p (min1, min2, 0)))
3351 && (max1 == max2
3352 || (max1 && max2 && operand_equal_p (max1, max2, 0))))
3353 goto same_types;
3354 else
3355 goto different_types;
3356 }
3357 }
3358
3359 case METHOD_TYPE:
3360 /* Method types should belong to the same class. */
3361 if (!gimple_types_compatible_p (TYPE_METHOD_BASETYPE (t1),
3362 TYPE_METHOD_BASETYPE (t2)))
3363 goto different_types;
3364
3365 /* Fallthru */
3366
3367 case FUNCTION_TYPE:
3368 /* Function types are the same if the return type and arguments types
3369 are the same. */
3370 if (!gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
3371 goto different_types;
3372 else
3373 {
3374 if (!targetm.comp_type_attributes (t1, t2))
3375 goto different_types;
3376
3377 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3378 goto same_types;
3379 else
3380 {
3381 tree parms1, parms2;
3382
3383 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3384 parms1 && parms2;
3385 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
3386 {
3387 if (!gimple_types_compatible_p (TREE_VALUE (parms1),
3388 TREE_VALUE (parms2)))
3389 goto different_types;
3390 }
3391
3392 if (parms1 || parms2)
3393 goto different_types;
3394
3395 goto same_types;
3396 }
3397 }
3398
3399 case OFFSET_TYPE:
3400 {
3401 if (!gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
3402 || !gimple_types_compatible_p (TYPE_OFFSET_BASETYPE (t1),
3403 TYPE_OFFSET_BASETYPE (t2)))
3404 goto different_types;
3405
3406 goto same_types;
3407 }
3408
3409 case POINTER_TYPE:
3410 case REFERENCE_TYPE:
3411 {
3412 /* If the two pointers have different ref-all attributes,
3413 they can't be the same type. */
3414 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3415 goto different_types;
3416
3417 /* If one pointer points to an incomplete type variant of
3418 the other pointed-to type they are the same. */
3419 if (TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3420 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (t1))
3421 && (!COMPLETE_TYPE_P (TREE_TYPE (t1))
3422 || !COMPLETE_TYPE_P (TREE_TYPE (t2)))
3423 && compare_type_names_p (TYPE_MAIN_VARIANT (TREE_TYPE (t1)),
3424 TYPE_MAIN_VARIANT (TREE_TYPE (t2)), true))
3425 {
3426 /* Replace the pointed-to incomplete type with the
3427 complete one. */
3428 if (COMPLETE_TYPE_P (TREE_TYPE (t2)))
3429 TREE_TYPE (t1) = TREE_TYPE (t2);
3430 else
3431 TREE_TYPE (t2) = TREE_TYPE (t1);
3432 goto same_types;
3433 }
3434
3435 /* Otherwise, pointer and reference types are the same if the
3436 pointed-to types are the same. */
3437 if (gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
3438 goto same_types;
3439
3440 goto different_types;
3441 }
3442
3443 case INTEGER_TYPE:
3444 case BOOLEAN_TYPE:
3445 {
3446 tree min1 = TYPE_MIN_VALUE (t1);
3447 tree max1 = TYPE_MAX_VALUE (t1);
3448 tree min2 = TYPE_MIN_VALUE (t2);
3449 tree max2 = TYPE_MAX_VALUE (t2);
3450 bool min_equal_p = false;
3451 bool max_equal_p = false;
3452
3453 /* If either type has a minimum value, the other type must
3454 have the same. */
3455 if (min1 == NULL_TREE && min2 == NULL_TREE)
3456 min_equal_p = true;
3457 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
3458 min_equal_p = true;
3459
3460 /* Likewise, if either type has a maximum value, the other
3461 type must have the same. */
3462 if (max1 == NULL_TREE && max2 == NULL_TREE)
3463 max_equal_p = true;
3464 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
3465 max_equal_p = true;
3466
3467 if (!min_equal_p || !max_equal_p)
3468 goto different_types;
3469
3470 goto same_types;
3471 }
3472
3473 case ENUMERAL_TYPE:
3474 {
3475 /* FIXME lto, we cannot check bounds on enumeral types because
3476 different front ends will produce different values.
3477 In C, enumeral types are integers, while in C++ each element
3478 will have its own symbolic value. We should decide how enums
3479 are to be represented in GIMPLE and have each front end lower
3480 to that. */
3481 tree v1, v2;
3482
3483 /* For enumeral types, all the values must be the same. */
3484 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
3485 goto same_types;
3486
3487 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
3488 v1 && v2;
3489 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
3490 {
3491 tree c1 = TREE_VALUE (v1);
3492 tree c2 = TREE_VALUE (v2);
3493
3494 if (TREE_CODE (c1) == CONST_DECL)
3495 c1 = DECL_INITIAL (c1);
3496
3497 if (TREE_CODE (c2) == CONST_DECL)
3498 c2 = DECL_INITIAL (c2);
3499
3500 if (tree_int_cst_equal (c1, c2) != 1)
3501 goto different_types;
3502 }
3503
3504 /* If one enumeration has more values than the other, they
3505 are not the same. */
3506 if (v1 || v2)
3507 goto different_types;
3508
3509 goto same_types;
3510 }
3511
3512 case RECORD_TYPE:
3513 case UNION_TYPE:
3514 case QUAL_UNION_TYPE:
3515 {
3516 tree f1, f2;
3517
3518 /* If one type requires structural equality checks and the
3519 other doesn't, do not merge the types. */
3520 if (TYPE_STRUCTURAL_EQUALITY_P (t1)
3521 != TYPE_STRUCTURAL_EQUALITY_P (t2))
3522 goto different_types;
3523
3524 /* The struct tags shall compare equal. */
3525 if (!compare_type_names_p (TYPE_MAIN_VARIANT (t1),
3526 TYPE_MAIN_VARIANT (t2), false))
3527 goto different_types;
3528
3529 /* For aggregate types, all the fields must be the same. */
3530 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3531 f1 && f2;
3532 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3533 {
3534 /* The fields must have the same name, offset and type. */
3535 if (DECL_NAME (f1) != DECL_NAME (f2)
3536 || DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3537 || !compare_field_offset (f1, f2)
3538 || !gimple_types_compatible_p (TREE_TYPE (f1),
3539 TREE_TYPE (f2)))
3540 goto different_types;
3541 }
3542
3543 /* If one aggregate has more fields than the other, they
3544 are not the same. */
3545 if (f1 || f2)
3546 goto different_types;
3547
3548 goto same_types;
3549 }
3550
3551 default:
3552 gcc_unreachable ();
3553 }
3554
3555 /* Common exit path for types that are not compatible. */
3556 different_types:
3557 p->same_p = 0;
3558 return 0;
3559
3560 /* Common exit path for types that are compatible. */
3561 same_types:
3562 p->same_p = 1;
3563 return 1;
3564 }
3565
3566
3567
3568
3569 /* Per pointer state for the SCC finding. The on_sccstack flag
3570 is not strictly required, it is true when there is no hash value
3571 recorded for the type and false otherwise. But querying that
3572 is slower. */
3573
3574 struct sccs
3575 {
3576 unsigned int dfsnum;
3577 unsigned int low;
3578 bool on_sccstack;
3579 hashval_t hash;
3580 };
3581
3582 static unsigned int next_dfs_num;
3583
3584 static hashval_t
3585 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
3586 struct pointer_map_t *, struct obstack *);
3587
3588 /* DFS visit the edge from the callers type with state *STATE to T.
3589 Update the callers type hash V with the hash for T if it is not part
3590 of the SCC containing the callers type and return it.
3591 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3592
3593 static hashval_t
3594 visit (tree t, struct sccs *state, hashval_t v,
3595 VEC (tree, heap) **sccstack,
3596 struct pointer_map_t *sccstate,
3597 struct obstack *sccstate_obstack)
3598 {
3599 struct sccs *cstate = NULL;
3600 void **slot;
3601
3602 /* If there is a hash value recorded for this type then it can't
3603 possibly be part of our parent SCC. Simply mix in its hash. */
3604 if ((slot = pointer_map_contains (type_hash_cache, t)))
3605 return iterative_hash_hashval_t ((hashval_t) (size_t) *slot, v);
3606
3607 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
3608 cstate = (struct sccs *)*slot;
3609 if (!cstate)
3610 {
3611 hashval_t tem;
3612 /* Not yet visited. DFS recurse. */
3613 tem = iterative_hash_gimple_type (t, v,
3614 sccstack, sccstate, sccstate_obstack);
3615 if (!cstate)
3616 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
3617 state->low = MIN (state->low, cstate->low);
3618 /* If the type is no longer on the SCC stack and thus is not part
3619 of the parents SCC mix in its hash value. Otherwise we will
3620 ignore the type for hashing purposes and return the unaltered
3621 hash value. */
3622 if (!cstate->on_sccstack)
3623 return tem;
3624 }
3625 if (cstate->dfsnum < state->dfsnum
3626 && cstate->on_sccstack)
3627 state->low = MIN (cstate->dfsnum, state->low);
3628
3629 /* We are part of our parents SCC, skip this type during hashing
3630 and return the unaltered hash value. */
3631 return v;
3632 }
3633
3634 /* Hash NAME with the previous hash value V and return it. */
3635
3636 static hashval_t
3637 iterative_hash_name (tree name, hashval_t v)
3638 {
3639 if (!name)
3640 return v;
3641 if (TREE_CODE (name) == TYPE_DECL)
3642 name = DECL_NAME (name);
3643 if (!name)
3644 return v;
3645 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
3646 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
3647 }
3648
3649 /* Returning a hash value for gimple type TYPE combined with VAL.
3650 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
3651
3652 To hash a type we end up hashing in types that are reachable.
3653 Through pointers we can end up with cycles which messes up the
3654 required property that we need to compute the same hash value
3655 for structurally equivalent types. To avoid this we have to
3656 hash all types in a cycle (the SCC) in a commutative way. The
3657 easiest way is to not mix in the hashes of the SCC members at
3658 all. To make this work we have to delay setting the hash
3659 values of the SCC until it is complete. */
3660
3661 static hashval_t
3662 iterative_hash_gimple_type (tree type, hashval_t val,
3663 VEC(tree, heap) **sccstack,
3664 struct pointer_map_t *sccstate,
3665 struct obstack *sccstate_obstack)
3666 {
3667 hashval_t v;
3668 void **slot;
3669 struct sccs *state;
3670
3671 #ifdef ENABLE_CHECKING
3672 /* Not visited during this DFS walk nor during previous walks. */
3673 gcc_assert (!pointer_map_contains (type_hash_cache, type)
3674 && !pointer_map_contains (sccstate, type));
3675 #endif
3676 state = XOBNEW (sccstate_obstack, struct sccs);
3677 *pointer_map_insert (sccstate, type) = state;
3678
3679 VEC_safe_push (tree, heap, *sccstack, type);
3680 state->dfsnum = next_dfs_num++;
3681 state->low = state->dfsnum;
3682 state->on_sccstack = true;
3683
3684 /* Combine a few common features of types so that types are grouped into
3685 smaller sets; when searching for existing matching types to merge,
3686 only existing types having the same features as the new type will be
3687 checked. */
3688 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
3689 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
3690 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
3691
3692 /* Do not hash the types size as this will cause differences in
3693 hash values for the complete vs. the incomplete type variant. */
3694
3695 /* Incorporate common features of numerical types. */
3696 if (INTEGRAL_TYPE_P (type)
3697 || SCALAR_FLOAT_TYPE_P (type)
3698 || FIXED_POINT_TYPE_P (type))
3699 {
3700 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
3701 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
3702 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
3703 }
3704
3705 /* For pointer and reference types, fold in information about the type
3706 pointed to but do not recurse into possibly incomplete types to
3707 avoid hash differences for complete vs. incomplete types. */
3708 if (POINTER_TYPE_P (type))
3709 {
3710 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
3711 {
3712 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
3713 v = iterative_hash_name
3714 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
3715 }
3716 else
3717 v = visit (TREE_TYPE (type), state, v,
3718 sccstack, sccstate, sccstate_obstack);
3719 }
3720
3721 /* For integer types hash the types min/max values and the string flag. */
3722 if (TREE_CODE (type) == INTEGER_TYPE)
3723 {
3724 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
3725 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
3726 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
3727 }
3728
3729 /* For array types hash their domain and the string flag. */
3730 if (TREE_CODE (type) == ARRAY_TYPE
3731 && TYPE_DOMAIN (type))
3732 {
3733 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
3734 v = visit (TYPE_DOMAIN (type), state, v,
3735 sccstack, sccstate, sccstate_obstack);
3736 }
3737
3738 /* Recurse for aggregates with a single element type. */
3739 if (TREE_CODE (type) == ARRAY_TYPE
3740 || TREE_CODE (type) == COMPLEX_TYPE
3741 || TREE_CODE (type) == VECTOR_TYPE)
3742 v = visit (TREE_TYPE (type), state, v,
3743 sccstack, sccstate, sccstate_obstack);
3744
3745 /* Incorporate function return and argument types. */
3746 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
3747 {
3748 unsigned na;
3749 tree p;
3750
3751 /* For method types also incorporate their parent class. */
3752 if (TREE_CODE (type) == METHOD_TYPE)
3753 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
3754 sccstack, sccstate, sccstate_obstack);
3755
3756 v = visit (TREE_TYPE (type), state, v,
3757 sccstack, sccstate, sccstate_obstack);
3758
3759 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
3760 {
3761 v = visit (TREE_VALUE (p), state, v,
3762 sccstack, sccstate, sccstate_obstack);
3763 na++;
3764 }
3765
3766 v = iterative_hash_hashval_t (na, v);
3767 }
3768
3769 if (TREE_CODE (type) == RECORD_TYPE
3770 || TREE_CODE (type) == UNION_TYPE
3771 || TREE_CODE (type) == QUAL_UNION_TYPE)
3772 {
3773 unsigned nf;
3774 tree f;
3775
3776 v = iterative_hash_name (TYPE_NAME (TYPE_MAIN_VARIANT (type)), v);
3777
3778 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
3779 {
3780 v = iterative_hash_name (DECL_NAME (f), v);
3781 v = visit (TREE_TYPE (f), state, v,
3782 sccstack, sccstate, sccstate_obstack);
3783 nf++;
3784 }
3785
3786 v = iterative_hash_hashval_t (nf, v);
3787 }
3788
3789 /* Record hash for us. */
3790 state->hash = v;
3791
3792 /* See if we found an SCC. */
3793 if (state->low == state->dfsnum)
3794 {
3795 tree x;
3796
3797 /* Pop off the SCC and set its hash values. */
3798 do
3799 {
3800 struct sccs *cstate;
3801 x = VEC_pop (tree, *sccstack);
3802 gcc_assert (!pointer_map_contains (type_hash_cache, x));
3803 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
3804 cstate->on_sccstack = false;
3805 slot = pointer_map_insert (type_hash_cache, x);
3806 *slot = (void *) (size_t) cstate->hash;
3807 }
3808 while (x != type);
3809 }
3810
3811 return iterative_hash_hashval_t (v, val);
3812 }
3813
3814
3815 /* Returns a hash value for P (assumed to be a type). The hash value
3816 is computed using some distinguishing features of the type. Note
3817 that we cannot use pointer hashing here as we may be dealing with
3818 two distinct instances of the same type.
3819
3820 This function should produce the same hash value for two compatible
3821 types according to gimple_types_compatible_p. */
3822
3823 static hashval_t
3824 gimple_type_hash (const void *p)
3825 {
3826 const_tree t = (const_tree) p;
3827 VEC(tree, heap) *sccstack = NULL;
3828 struct pointer_map_t *sccstate;
3829 struct obstack sccstate_obstack;
3830 hashval_t val;
3831 void **slot;
3832
3833 if (type_hash_cache == NULL)
3834 type_hash_cache = pointer_map_create ();
3835
3836 if ((slot = pointer_map_contains (type_hash_cache, p)) != NULL)
3837 return iterative_hash_hashval_t ((hashval_t) (size_t) *slot, 0);
3838
3839 /* Perform a DFS walk and pre-hash all reachable types. */
3840 next_dfs_num = 1;
3841 sccstate = pointer_map_create ();
3842 gcc_obstack_init (&sccstate_obstack);
3843 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
3844 &sccstack, sccstate, &sccstate_obstack);
3845 VEC_free (tree, heap, sccstack);
3846 pointer_map_destroy (sccstate);
3847 obstack_free (&sccstate_obstack, NULL);
3848
3849 return val;
3850 }
3851
3852
3853 /* Returns nonzero if P1 and P2 are equal. */
3854
3855 static int
3856 gimple_type_eq (const void *p1, const void *p2)
3857 {
3858 const_tree t1 = (const_tree) p1;
3859 const_tree t2 = (const_tree) p2;
3860 return gimple_types_compatible_p (CONST_CAST_TREE (t1), CONST_CAST_TREE (t2));
3861 }
3862
3863
3864 /* Register type T in the global type table gimple_types.
3865 If another type T', compatible with T, already existed in
3866 gimple_types then return T', otherwise return T. This is used by
3867 LTO to merge identical types read from different TUs. */
3868
3869 tree
3870 gimple_register_type (tree t)
3871 {
3872 void **slot;
3873
3874 gcc_assert (TYPE_P (t));
3875
3876 /* Always register the main variant first. This is important so we
3877 pick up the non-typedef variants as canonical, otherwise we'll end
3878 up taking typedef ids for structure tags during comparison. */
3879 if (TYPE_MAIN_VARIANT (t) != t)
3880 gimple_register_type (TYPE_MAIN_VARIANT (t));
3881
3882 if (gimple_types == NULL)
3883 gimple_types = htab_create (16381, gimple_type_hash, gimple_type_eq, 0);
3884
3885 slot = htab_find_slot (gimple_types, t, INSERT);
3886 if (*slot
3887 && *(tree *)slot != t)
3888 {
3889 tree new_type = (tree) *((tree *) slot);
3890
3891 /* Do not merge types with different addressability. */
3892 gcc_assert (TREE_ADDRESSABLE (t) == TREE_ADDRESSABLE (new_type));
3893
3894 /* If t is not its main variant then make t unreachable from its
3895 main variant list. Otherwise we'd queue up a lot of duplicates
3896 there. */
3897 if (t != TYPE_MAIN_VARIANT (t))
3898 {
3899 tree tem = TYPE_MAIN_VARIANT (t);
3900 while (tem && TYPE_NEXT_VARIANT (tem) != t)
3901 tem = TYPE_NEXT_VARIANT (tem);
3902 if (tem)
3903 TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
3904 TYPE_NEXT_VARIANT (t) = NULL_TREE;
3905 }
3906
3907 /* If we are a pointer then remove us from the pointer-to or
3908 reference-to chain. Otherwise we'd queue up a lot of duplicates
3909 there. */
3910 if (TREE_CODE (t) == POINTER_TYPE)
3911 {
3912 if (TYPE_POINTER_TO (TREE_TYPE (t)) == t)
3913 TYPE_POINTER_TO (TREE_TYPE (t)) = TYPE_NEXT_PTR_TO (t);
3914 else
3915 {
3916 tree tem = TYPE_POINTER_TO (TREE_TYPE (t));
3917 while (tem && TYPE_NEXT_PTR_TO (tem) != t)
3918 tem = TYPE_NEXT_PTR_TO (tem);
3919 if (tem)
3920 TYPE_NEXT_PTR_TO (tem) = TYPE_NEXT_PTR_TO (t);
3921 }
3922 TYPE_NEXT_PTR_TO (t) = NULL_TREE;
3923 }
3924 else if (TREE_CODE (t) == REFERENCE_TYPE)
3925 {
3926 if (TYPE_REFERENCE_TO (TREE_TYPE (t)) == t)
3927 TYPE_REFERENCE_TO (TREE_TYPE (t)) = TYPE_NEXT_REF_TO (t);
3928 else
3929 {
3930 tree tem = TYPE_REFERENCE_TO (TREE_TYPE (t));
3931 while (tem && TYPE_NEXT_REF_TO (tem) != t)
3932 tem = TYPE_NEXT_REF_TO (tem);
3933 if (tem)
3934 TYPE_NEXT_REF_TO (tem) = TYPE_NEXT_REF_TO (t);
3935 }
3936 TYPE_NEXT_REF_TO (t) = NULL_TREE;
3937 }
3938
3939 t = new_type;
3940 }
3941 else
3942 *slot = (void *) t;
3943
3944 return t;
3945 }
3946
3947
3948 /* Show statistics on references to the global type table gimple_types. */
3949
3950 void
3951 print_gimple_types_stats (void)
3952 {
3953 if (gimple_types)
3954 fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
3955 "%ld searches, %ld collisions (ratio: %f)\n",
3956 (long) htab_size (gimple_types),
3957 (long) htab_elements (gimple_types),
3958 (long) gimple_types->searches,
3959 (long) gimple_types->collisions,
3960 htab_collisions (gimple_types));
3961 else
3962 fprintf (stderr, "GIMPLE type table is empty\n");
3963 if (gtc_visited)
3964 fprintf (stderr, "GIMPLE type comparison table: size %ld, %ld "
3965 "elements, %ld searches, %ld collisions (ratio: %f)\n",
3966 (long) htab_size (gtc_visited),
3967 (long) htab_elements (gtc_visited),
3968 (long) gtc_visited->searches,
3969 (long) gtc_visited->collisions,
3970 htab_collisions (gtc_visited));
3971 else
3972 fprintf (stderr, "GIMPLE type comparison table is empty\n");
3973 }
3974
3975 /* Free the gimple type hashtables used for LTO type merging. */
3976
3977 void
3978 free_gimple_type_tables (void)
3979 {
3980 /* Last chance to print stats for the tables. */
3981 if (flag_lto_report)
3982 print_gimple_types_stats ();
3983
3984 if (gimple_types)
3985 {
3986 htab_delete (gimple_types);
3987 gimple_types = NULL;
3988 }
3989 if (type_hash_cache)
3990 {
3991 pointer_map_destroy (type_hash_cache);
3992 type_hash_cache = NULL;
3993 }
3994 if (gtc_visited)
3995 {
3996 htab_delete (gtc_visited);
3997 obstack_free (&gtc_ob, NULL);
3998 gtc_visited = NULL;
3999 }
4000 }
4001
4002
4003 /* Return a type the same as TYPE except unsigned or
4004 signed according to UNSIGNEDP. */
4005
4006 static tree
4007 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
4008 {
4009 tree type1;
4010
4011 type1 = TYPE_MAIN_VARIANT (type);
4012 if (type1 == signed_char_type_node
4013 || type1 == char_type_node
4014 || type1 == unsigned_char_type_node)
4015 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4016 if (type1 == integer_type_node || type1 == unsigned_type_node)
4017 return unsignedp ? unsigned_type_node : integer_type_node;
4018 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
4019 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4020 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
4021 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4022 if (type1 == long_long_integer_type_node
4023 || type1 == long_long_unsigned_type_node)
4024 return unsignedp
4025 ? long_long_unsigned_type_node
4026 : long_long_integer_type_node;
4027 #if HOST_BITS_PER_WIDE_INT >= 64
4028 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
4029 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4030 #endif
4031 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
4032 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4033 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
4034 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4035 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
4036 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4037 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
4038 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4039
4040 #define GIMPLE_FIXED_TYPES(NAME) \
4041 if (type1 == short_ ## NAME ## _type_node \
4042 || type1 == unsigned_short_ ## NAME ## _type_node) \
4043 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4044 : short_ ## NAME ## _type_node; \
4045 if (type1 == NAME ## _type_node \
4046 || type1 == unsigned_ ## NAME ## _type_node) \
4047 return unsignedp ? unsigned_ ## NAME ## _type_node \
4048 : NAME ## _type_node; \
4049 if (type1 == long_ ## NAME ## _type_node \
4050 || type1 == unsigned_long_ ## NAME ## _type_node) \
4051 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4052 : long_ ## NAME ## _type_node; \
4053 if (type1 == long_long_ ## NAME ## _type_node \
4054 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4055 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4056 : long_long_ ## NAME ## _type_node;
4057
4058 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
4059 if (type1 == NAME ## _type_node \
4060 || type1 == u ## NAME ## _type_node) \
4061 return unsignedp ? u ## NAME ## _type_node \
4062 : NAME ## _type_node;
4063
4064 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
4065 if (type1 == sat_ ## short_ ## NAME ## _type_node \
4066 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4067 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4068 : sat_ ## short_ ## NAME ## _type_node; \
4069 if (type1 == sat_ ## NAME ## _type_node \
4070 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4071 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4072 : sat_ ## NAME ## _type_node; \
4073 if (type1 == sat_ ## long_ ## NAME ## _type_node \
4074 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4075 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4076 : sat_ ## long_ ## NAME ## _type_node; \
4077 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4078 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4079 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4080 : sat_ ## long_long_ ## NAME ## _type_node;
4081
4082 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
4083 if (type1 == sat_ ## NAME ## _type_node \
4084 || type1 == sat_ ## u ## NAME ## _type_node) \
4085 return unsignedp ? sat_ ## u ## NAME ## _type_node \
4086 : sat_ ## NAME ## _type_node;
4087
4088 GIMPLE_FIXED_TYPES (fract);
4089 GIMPLE_FIXED_TYPES_SAT (fract);
4090 GIMPLE_FIXED_TYPES (accum);
4091 GIMPLE_FIXED_TYPES_SAT (accum);
4092
4093 GIMPLE_FIXED_MODE_TYPES (qq);
4094 GIMPLE_FIXED_MODE_TYPES (hq);
4095 GIMPLE_FIXED_MODE_TYPES (sq);
4096 GIMPLE_FIXED_MODE_TYPES (dq);
4097 GIMPLE_FIXED_MODE_TYPES (tq);
4098 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
4099 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
4100 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
4101 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
4102 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
4103 GIMPLE_FIXED_MODE_TYPES (ha);
4104 GIMPLE_FIXED_MODE_TYPES (sa);
4105 GIMPLE_FIXED_MODE_TYPES (da);
4106 GIMPLE_FIXED_MODE_TYPES (ta);
4107 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
4108 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
4109 GIMPLE_FIXED_MODE_TYPES_SAT (da);
4110 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
4111
4112 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
4113 the precision; they have precision set to match their range, but
4114 may use a wider mode to match an ABI. If we change modes, we may
4115 wind up with bad conversions. For INTEGER_TYPEs in C, must check
4116 the precision as well, so as to yield correct results for
4117 bit-field types. C++ does not have these separate bit-field
4118 types, and producing a signed or unsigned variant of an
4119 ENUMERAL_TYPE may cause other problems as well. */
4120 if (!INTEGRAL_TYPE_P (type)
4121 || TYPE_UNSIGNED (type) == unsignedp)
4122 return type;
4123
4124 #define TYPE_OK(node) \
4125 (TYPE_MODE (type) == TYPE_MODE (node) \
4126 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
4127 if (TYPE_OK (signed_char_type_node))
4128 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4129 if (TYPE_OK (integer_type_node))
4130 return unsignedp ? unsigned_type_node : integer_type_node;
4131 if (TYPE_OK (short_integer_type_node))
4132 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4133 if (TYPE_OK (long_integer_type_node))
4134 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4135 if (TYPE_OK (long_long_integer_type_node))
4136 return (unsignedp
4137 ? long_long_unsigned_type_node
4138 : long_long_integer_type_node);
4139
4140 #if HOST_BITS_PER_WIDE_INT >= 64
4141 if (TYPE_OK (intTI_type_node))
4142 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4143 #endif
4144 if (TYPE_OK (intDI_type_node))
4145 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4146 if (TYPE_OK (intSI_type_node))
4147 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4148 if (TYPE_OK (intHI_type_node))
4149 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4150 if (TYPE_OK (intQI_type_node))
4151 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4152
4153 #undef GIMPLE_FIXED_TYPES
4154 #undef GIMPLE_FIXED_MODE_TYPES
4155 #undef GIMPLE_FIXED_TYPES_SAT
4156 #undef GIMPLE_FIXED_MODE_TYPES_SAT
4157 #undef TYPE_OK
4158
4159 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
4160 }
4161
4162
4163 /* Return an unsigned type the same as TYPE in other respects. */
4164
4165 tree
4166 gimple_unsigned_type (tree type)
4167 {
4168 return gimple_signed_or_unsigned_type (true, type);
4169 }
4170
4171
4172 /* Return a signed type the same as TYPE in other respects. */
4173
4174 tree
4175 gimple_signed_type (tree type)
4176 {
4177 return gimple_signed_or_unsigned_type (false, type);
4178 }
4179
4180
4181 /* Return the typed-based alias set for T, which may be an expression
4182 or a type. Return -1 if we don't do anything special. */
4183
4184 alias_set_type
4185 gimple_get_alias_set (tree t)
4186 {
4187 tree u;
4188
4189 /* Permit type-punning when accessing a union, provided the access
4190 is directly through the union. For example, this code does not
4191 permit taking the address of a union member and then storing
4192 through it. Even the type-punning allowed here is a GCC
4193 extension, albeit a common and useful one; the C standard says
4194 that such accesses have implementation-defined behavior. */
4195 for (u = t;
4196 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
4197 u = TREE_OPERAND (u, 0))
4198 if (TREE_CODE (u) == COMPONENT_REF
4199 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
4200 return 0;
4201
4202 /* That's all the expressions we handle specially. */
4203 if (!TYPE_P (t))
4204 return -1;
4205
4206 /* For convenience, follow the C standard when dealing with
4207 character types. Any object may be accessed via an lvalue that
4208 has character type. */
4209 if (t == char_type_node
4210 || t == signed_char_type_node
4211 || t == unsigned_char_type_node)
4212 return 0;
4213
4214 /* Allow aliasing between signed and unsigned variants of the same
4215 type. We treat the signed variant as canonical. */
4216 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
4217 {
4218 tree t1 = gimple_signed_type (t);
4219
4220 /* t1 == t can happen for boolean nodes which are always unsigned. */
4221 if (t1 != t)
4222 return get_alias_set (t1);
4223 }
4224 else if (POINTER_TYPE_P (t))
4225 {
4226 /* From the common C and C++ langhook implementation:
4227
4228 Unfortunately, there is no canonical form of a pointer type.
4229 In particular, if we have `typedef int I', then `int *', and
4230 `I *' are different types. So, we have to pick a canonical
4231 representative. We do this below.
4232
4233 Technically, this approach is actually more conservative that
4234 it needs to be. In particular, `const int *' and `int *'
4235 should be in different alias sets, according to the C and C++
4236 standard, since their types are not the same, and so,
4237 technically, an `int **' and `const int **' cannot point at
4238 the same thing.
4239
4240 But, the standard is wrong. In particular, this code is
4241 legal C++:
4242
4243 int *ip;
4244 int **ipp = &ip;
4245 const int* const* cipp = ipp;
4246 And, it doesn't make sense for that to be legal unless you
4247 can dereference IPP and CIPP. So, we ignore cv-qualifiers on
4248 the pointed-to types. This issue has been reported to the
4249 C++ committee. */
4250
4251 /* In addition to the above canonicalization issue with LTO
4252 we should also canonicalize `T (*)[]' to `T *' avoiding
4253 alias issues with pointer-to element types and pointer-to
4254 array types.
4255
4256 Likewise we need to deal with the situation of incomplete
4257 pointed-to types and make `*(struct X **)&a' and
4258 `*(struct X {} **)&a' alias. Otherwise we will have to
4259 guarantee that all pointer-to incomplete type variants
4260 will be replaced by pointer-to complete type variants if
4261 they are available.
4262
4263 With LTO the convenient situation of using `void *' to
4264 access and store any pointer type will also become
4265 more apparent (and `void *' is just another pointer-to
4266 incomplete type). Assigning alias-set zero to `void *'
4267 and all pointer-to incomplete types is a not appealing
4268 solution. Assigning an effective alias-set zero only
4269 affecting pointers might be - by recording proper subset
4270 relationships of all pointer alias-sets.
4271
4272 Pointer-to function types are another grey area which
4273 needs caution. Globbing them all into one alias-set
4274 or the above effective zero set would work. */
4275
4276 /* For now just assign the same alias-set to all pointers.
4277 That's simple and avoids all the above problems. */
4278 if (t != ptr_type_node)
4279 return get_alias_set (ptr_type_node);
4280 }
4281
4282 return -1;
4283 }
4284
4285
4286 /* Data structure used to count the number of dereferences to PTR
4287 inside an expression. */
4288 struct count_ptr_d
4289 {
4290 tree ptr;
4291 unsigned num_stores;
4292 unsigned num_loads;
4293 };
4294
4295 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
4296 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
4297
4298 static tree
4299 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
4300 {
4301 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
4302 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
4303
4304 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
4305 pointer 'ptr' is *not* dereferenced, it is simply used to compute
4306 the address of 'fld' as 'ptr + offsetof(fld)'. */
4307 if (TREE_CODE (*tp) == ADDR_EXPR)
4308 {
4309 *walk_subtrees = 0;
4310 return NULL_TREE;
4311 }
4312
4313 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
4314 {
4315 if (wi_p->is_lhs)
4316 count_p->num_stores++;
4317 else
4318 count_p->num_loads++;
4319 }
4320
4321 return NULL_TREE;
4322 }
4323
4324 /* Count the number of direct and indirect uses for pointer PTR in
4325 statement STMT. The number of direct uses is stored in
4326 *NUM_USES_P. Indirect references are counted separately depending
4327 on whether they are store or load operations. The counts are
4328 stored in *NUM_STORES_P and *NUM_LOADS_P. */
4329
4330 void
4331 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
4332 unsigned *num_loads_p, unsigned *num_stores_p)
4333 {
4334 ssa_op_iter i;
4335 tree use;
4336
4337 *num_uses_p = 0;
4338 *num_loads_p = 0;
4339 *num_stores_p = 0;
4340
4341 /* Find out the total number of uses of PTR in STMT. */
4342 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
4343 if (use == ptr)
4344 (*num_uses_p)++;
4345
4346 /* Now count the number of indirect references to PTR. This is
4347 truly awful, but we don't have much choice. There are no parent
4348 pointers inside INDIRECT_REFs, so an expression like
4349 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
4350 find all the indirect and direct uses of x_1 inside. The only
4351 shortcut we can take is the fact that GIMPLE only allows
4352 INDIRECT_REFs inside the expressions below. */
4353 if (is_gimple_assign (stmt)
4354 || gimple_code (stmt) == GIMPLE_RETURN
4355 || gimple_code (stmt) == GIMPLE_ASM
4356 || is_gimple_call (stmt))
4357 {
4358 struct walk_stmt_info wi;
4359 struct count_ptr_d count;
4360
4361 count.ptr = ptr;
4362 count.num_stores = 0;
4363 count.num_loads = 0;
4364
4365 memset (&wi, 0, sizeof (wi));
4366 wi.info = &count;
4367 walk_gimple_op (stmt, count_ptr_derefs, &wi);
4368
4369 *num_stores_p = count.num_stores;
4370 *num_loads_p = count.num_loads;
4371 }
4372
4373 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
4374 }
4375
4376 /* From a tree operand OP return the base of a load or store operation
4377 or NULL_TREE if OP is not a load or a store. */
4378
4379 static tree
4380 get_base_loadstore (tree op)
4381 {
4382 while (handled_component_p (op))
4383 op = TREE_OPERAND (op, 0);
4384 if (DECL_P (op)
4385 || INDIRECT_REF_P (op)
4386 || TREE_CODE (op) == TARGET_MEM_REF)
4387 return op;
4388 return NULL_TREE;
4389 }
4390
4391 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
4392 VISIT_ADDR if non-NULL on loads, store and address-taken operands
4393 passing the STMT, the base of the operand and DATA to it. The base
4394 will be either a decl, an indirect reference (including TARGET_MEM_REF)
4395 or the argument of an address expression.
4396 Returns the results of these callbacks or'ed. */
4397
4398 bool
4399 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
4400 bool (*visit_load)(gimple, tree, void *),
4401 bool (*visit_store)(gimple, tree, void *),
4402 bool (*visit_addr)(gimple, tree, void *))
4403 {
4404 bool ret = false;
4405 unsigned i;
4406 if (gimple_assign_single_p (stmt))
4407 {
4408 tree lhs, rhs;
4409 if (visit_store)
4410 {
4411 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
4412 if (lhs)
4413 ret |= visit_store (stmt, lhs, data);
4414 }
4415 rhs = gimple_assign_rhs1 (stmt);
4416 while (handled_component_p (rhs))
4417 rhs = TREE_OPERAND (rhs, 0);
4418 if (visit_addr)
4419 {
4420 if (TREE_CODE (rhs) == ADDR_EXPR)
4421 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
4422 else if (TREE_CODE (rhs) == TARGET_MEM_REF
4423 && TMR_BASE (rhs) != NULL_TREE
4424 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
4425 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
4426 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
4427 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
4428 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
4429 0), data);
4430 lhs = gimple_assign_lhs (stmt);
4431 if (TREE_CODE (lhs) == TARGET_MEM_REF
4432 && TMR_BASE (lhs) != NULL_TREE
4433 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
4434 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
4435 }
4436 if (visit_load)
4437 {
4438 rhs = get_base_loadstore (rhs);
4439 if (rhs)
4440 ret |= visit_load (stmt, rhs, data);
4441 }
4442 }
4443 else if (visit_addr
4444 && (is_gimple_assign (stmt)
4445 || gimple_code (stmt) == GIMPLE_COND))
4446 {
4447 for (i = 0; i < gimple_num_ops (stmt); ++i)
4448 if (gimple_op (stmt, i)
4449 && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR)
4450 ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data);
4451 }
4452 else if (is_gimple_call (stmt))
4453 {
4454 if (visit_store)
4455 {
4456 tree lhs = gimple_call_lhs (stmt);
4457 if (lhs)
4458 {
4459 lhs = get_base_loadstore (lhs);
4460 if (lhs)
4461 ret |= visit_store (stmt, lhs, data);
4462 }
4463 }
4464 if (visit_load || visit_addr)
4465 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4466 {
4467 tree rhs = gimple_call_arg (stmt, i);
4468 if (visit_addr
4469 && TREE_CODE (rhs) == ADDR_EXPR)
4470 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
4471 else if (visit_load)
4472 {
4473 rhs = get_base_loadstore (rhs);
4474 if (rhs)
4475 ret |= visit_load (stmt, rhs, data);
4476 }
4477 }
4478 if (visit_addr
4479 && gimple_call_chain (stmt)
4480 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
4481 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
4482 data);
4483 if (visit_addr
4484 && gimple_call_return_slot_opt_p (stmt)
4485 && gimple_call_lhs (stmt) != NULL_TREE
4486 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4487 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
4488 }
4489 else if (gimple_code (stmt) == GIMPLE_ASM)
4490 {
4491 unsigned noutputs;
4492 const char *constraint;
4493 const char **oconstraints;
4494 bool allows_mem, allows_reg, is_inout;
4495 noutputs = gimple_asm_noutputs (stmt);
4496 oconstraints = XALLOCAVEC (const char *, noutputs);
4497 if (visit_store || visit_addr)
4498 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
4499 {
4500 tree link = gimple_asm_output_op (stmt, i);
4501 tree op = get_base_loadstore (TREE_VALUE (link));
4502 if (op && visit_store)
4503 ret |= visit_store (stmt, op, data);
4504 if (visit_addr)
4505 {
4506 constraint = TREE_STRING_POINTER
4507 (TREE_VALUE (TREE_PURPOSE (link)));
4508 oconstraints[i] = constraint;
4509 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4510 &allows_reg, &is_inout);
4511 if (op && !allows_reg && allows_mem)
4512 ret |= visit_addr (stmt, op, data);
4513 }
4514 }
4515 if (visit_load || visit_addr)
4516 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
4517 {
4518 tree link = gimple_asm_input_op (stmt, i);
4519 tree op = TREE_VALUE (link);
4520 if (visit_addr
4521 && TREE_CODE (op) == ADDR_EXPR)
4522 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
4523 else if (visit_load || visit_addr)
4524 {
4525 op = get_base_loadstore (op);
4526 if (op)
4527 {
4528 if (visit_load)
4529 ret |= visit_load (stmt, op, data);
4530 if (visit_addr)
4531 {
4532 constraint = TREE_STRING_POINTER
4533 (TREE_VALUE (TREE_PURPOSE (link)));
4534 parse_input_constraint (&constraint, 0, 0, noutputs,
4535 0, oconstraints,
4536 &allows_mem, &allows_reg);
4537 if (!allows_reg && allows_mem)
4538 ret |= visit_addr (stmt, op, data);
4539 }
4540 }
4541 }
4542 }
4543 }
4544 else if (gimple_code (stmt) == GIMPLE_RETURN)
4545 {
4546 tree op = gimple_return_retval (stmt);
4547 if (op)
4548 {
4549 if (visit_addr
4550 && TREE_CODE (op) == ADDR_EXPR)
4551 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
4552 else if (visit_load)
4553 {
4554 op = get_base_loadstore (op);
4555 if (op)
4556 ret |= visit_load (stmt, op, data);
4557 }
4558 }
4559 }
4560 else if (visit_addr
4561 && gimple_code (stmt) == GIMPLE_PHI)
4562 {
4563 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
4564 {
4565 tree op = PHI_ARG_DEF (stmt, i);
4566 if (TREE_CODE (op) == ADDR_EXPR)
4567 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
4568 }
4569 }
4570
4571 return ret;
4572 }
4573
4574 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
4575 should make a faster clone for this case. */
4576
4577 bool
4578 walk_stmt_load_store_ops (gimple stmt, void *data,
4579 bool (*visit_load)(gimple, tree, void *),
4580 bool (*visit_store)(gimple, tree, void *))
4581 {
4582 return walk_stmt_load_store_addr_ops (stmt, data,
4583 visit_load, visit_store, NULL);
4584 }
4585
4586 /* Helper for gimple_ior_addresses_taken_1. */
4587
4588 static bool
4589 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
4590 tree addr, void *data)
4591 {
4592 bitmap addresses_taken = (bitmap)data;
4593 while (handled_component_p (addr))
4594 addr = TREE_OPERAND (addr, 0);
4595 if (DECL_P (addr))
4596 {
4597 bitmap_set_bit (addresses_taken, DECL_UID (addr));
4598 return true;
4599 }
4600 return false;
4601 }
4602
4603 /* Set the bit for the uid of all decls that have their address taken
4604 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
4605 were any in this stmt. */
4606
4607 bool
4608 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
4609 {
4610 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
4611 gimple_ior_addresses_taken_1);
4612 }
4613
4614
4615 /* Return a printable name for symbol DECL. */
4616
4617 const char *
4618 gimple_decl_printable_name (tree decl, int verbosity)
4619 {
4620 gcc_assert (decl && DECL_NAME (decl));
4621
4622 if (DECL_ASSEMBLER_NAME_SET_P (decl))
4623 {
4624 const char *str, *mangled_str;
4625 int dmgl_opts = DMGL_NO_OPTS;
4626
4627 if (verbosity >= 2)
4628 {
4629 dmgl_opts = DMGL_VERBOSE
4630 | DMGL_ANSI
4631 | DMGL_GNU_V3
4632 | DMGL_RET_POSTFIX;
4633 if (TREE_CODE (decl) == FUNCTION_DECL)
4634 dmgl_opts |= DMGL_PARAMS;
4635 }
4636
4637 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4638 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
4639 return (str) ? str : mangled_str;
4640 }
4641
4642 return IDENTIFIER_POINTER (DECL_NAME (decl));
4643 }
4644
4645
4646 /* Fold a OBJ_TYPE_REF expression to the address of a function.
4647 KNOWN_TYPE carries the true type of OBJ_TYPE_REF_OBJECT(REF). Adapted
4648 from cp_fold_obj_type_ref, but it tolerates types with no binfo
4649 data. */
4650
4651 tree
4652 gimple_fold_obj_type_ref (tree ref, tree known_type)
4653 {
4654 HOST_WIDE_INT index;
4655 HOST_WIDE_INT i;
4656 tree v;
4657 tree fndecl;
4658
4659 if (TYPE_BINFO (known_type) == NULL_TREE)
4660 return NULL_TREE;
4661
4662 v = BINFO_VIRTUALS (TYPE_BINFO (known_type));
4663 index = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
4664 i = 0;
4665 while (i != index)
4666 {
4667 i += (TARGET_VTABLE_USES_DESCRIPTORS
4668 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
4669 v = TREE_CHAIN (v);
4670 }
4671
4672 fndecl = TREE_VALUE (v);
4673
4674 #ifdef ENABLE_CHECKING
4675 gcc_assert (tree_int_cst_equal (OBJ_TYPE_REF_TOKEN (ref),
4676 DECL_VINDEX (fndecl)));
4677 #endif
4678
4679 cgraph_node (fndecl)->local.vtable_method = true;
4680
4681 return build_fold_addr_expr (fndecl);
4682 }
4683
3203 #include "gt-gimple.h" 4684 #include "gt-gimple.h"