Mercurial > hg > CbC > CbC_gcc
comparison gcc/gimple.c.orig @ 57:326d9e06c2e3
modify c-parser.c
author | ryoma <e075725@ie.u-ryukyu.ac.jp> |
---|---|
date | Mon, 15 Feb 2010 00:54:17 +0900 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
54:f62c169bbc24 | 57:326d9e06c2e3 |
---|---|
1 /* Gimple IR support functions. | |
2 | |
3 Copyright 2007, 2008, 2009 Free Software Foundation, Inc. | |
4 Contributed by Aldy Hernandez <aldyh@redhat.com> | |
5 | |
6 This file is part of GCC. | |
7 | |
8 GCC is free software; you can redistribute it and/or modify it under | |
9 the terms of the GNU General Public License as published by the Free | |
10 Software Foundation; either version 3, or (at your option) any later | |
11 version. | |
12 | |
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 for more details. | |
17 | |
18 You should have received a copy of the GNU General Public License | |
19 along with GCC; see the file COPYING3. If not see | |
20 <http://www.gnu.org/licenses/>. */ | |
21 | |
22 #include "config.h" | |
23 #include "system.h" | |
24 #include "coretypes.h" | |
25 #include "tm.h" | |
26 #include "target.h" | |
27 #include "tree.h" | |
28 #include "ggc.h" | |
29 #include "hard-reg-set.h" | |
30 #include "basic-block.h" | |
31 #include "gimple.h" | |
32 #include "toplev.h" | |
33 #include "diagnostic.h" | |
34 #include "tree-flow.h" | |
35 #include "value-prof.h" | |
36 #include "flags.h" | |
37 #ifndef noCbC | |
38 #include "cbc-tree.h" | |
39 #endif | |
40 #include "alias.h" | |
41 #include "demangle.h" | |
42 | |
43 #define DEFGSCODE(SYM, NAME, STRUCT) NAME, | |
44 const char *const gimple_code_name[] = { | |
45 #include "gimple.def" | |
46 }; | |
47 #undef DEFGSCODE | |
48 | |
49 /* Global type table. FIXME lto, it should be possible to re-use some | |
50 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup, | |
51 etc), but those assume that types were built with the various | |
52 build_*_type routines which is not the case with the streamer. */ | |
53 static htab_t gimple_types; | |
54 static struct pointer_map_t *type_hash_cache; | |
55 | |
56 /* Global type comparison cache. */ | |
57 static htab_t gtc_visited; | |
58 static struct obstack gtc_ob; | |
59 | |
60 /* All the tuples have their operand vector (if present) at the very bottom | |
61 of the structure. Therefore, the offset required to find the | |
62 operands vector the size of the structure minus the size of the 1 | |
63 element tree array at the end (see gimple_ops). */ | |
64 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \ | |
65 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0), | |
66 EXPORTED_CONST size_t gimple_ops_offset_[] = { | |
67 #include "gsstruct.def" | |
68 }; | |
69 #undef DEFGSSTRUCT | |
70 | |
71 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT), | |
72 static const size_t gsstruct_code_size[] = { | |
73 #include "gsstruct.def" | |
74 }; | |
75 #undef DEFGSSTRUCT | |
76 | |
77 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME, | |
78 const char *const gimple_code_name[] = { | |
79 #include "gimple.def" | |
80 }; | |
81 #undef DEFGSCODE | |
82 | |
83 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE, | |
84 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = { | |
85 #include "gimple.def" | |
86 }; | |
87 #undef DEFGSCODE | |
88 | |
89 #ifdef GATHER_STATISTICS | |
90 /* Gimple stats. */ | |
91 | |
92 int gimple_alloc_counts[(int) gimple_alloc_kind_all]; | |
93 int gimple_alloc_sizes[(int) gimple_alloc_kind_all]; | |
94 | |
95 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */ | |
96 static const char * const gimple_alloc_kind_names[] = { | |
97 "assignments", | |
98 "phi nodes", | |
99 "conditionals", | |
100 "sequences", | |
101 "everything else" | |
102 }; | |
103 | |
104 #endif /* GATHER_STATISTICS */ | |
105 | |
106 /* A cache of gimple_seq objects. Sequences are created and destroyed | |
107 fairly often during gimplification. */ | |
108 static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache; | |
109 | |
110 /* Private API manipulation functions shared only with some | |
111 other files. */ | |
112 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *); | |
113 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *); | |
114 | |
115 /* Gimple tuple constructors. | |
116 Note: Any constructor taking a ``gimple_seq'' as a parameter, can | |
117 be passed a NULL to start with an empty sequence. */ | |
118 | |
119 /* Set the code for statement G to CODE. */ | |
120 | |
121 static inline void | |
122 gimple_set_code (gimple g, enum gimple_code code) | |
123 { | |
124 g->gsbase.code = code; | |
125 } | |
126 | |
127 /* Return the number of bytes needed to hold a GIMPLE statement with | |
128 code CODE. */ | |
129 | |
130 static inline size_t | |
131 gimple_size (enum gimple_code code) | |
132 { | |
133 return gsstruct_code_size[gss_for_code (code)]; | |
134 } | |
135 | |
136 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS | |
137 operands. */ | |
138 | |
139 gimple | |
140 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL) | |
141 { | |
142 size_t size; | |
143 gimple stmt; | |
144 | |
145 size = gimple_size (code); | |
146 if (num_ops > 0) | |
147 size += sizeof (tree) * (num_ops - 1); | |
148 | |
149 #ifdef GATHER_STATISTICS | |
150 { | |
151 enum gimple_alloc_kind kind = gimple_alloc_kind (code); | |
152 gimple_alloc_counts[(int) kind]++; | |
153 gimple_alloc_sizes[(int) kind] += size; | |
154 } | |
155 #endif | |
156 | |
157 stmt = (gimple) ggc_alloc_cleared_stat (size PASS_MEM_STAT); | |
158 gimple_set_code (stmt, code); | |
159 gimple_set_num_ops (stmt, num_ops); | |
160 | |
161 /* Do not call gimple_set_modified here as it has other side | |
162 effects and this tuple is still not completely built. */ | |
163 stmt->gsbase.modified = 1; | |
164 | |
165 return stmt; | |
166 } | |
167 | |
168 /* Set SUBCODE to be the code of the expression computed by statement G. */ | |
169 | |
170 static inline void | |
171 gimple_set_subcode (gimple g, unsigned subcode) | |
172 { | |
173 /* We only have 16 bits for the RHS code. Assert that we are not | |
174 overflowing it. */ | |
175 gcc_assert (subcode < (1 << 16)); | |
176 g->gsbase.subcode = subcode; | |
177 } | |
178 | |
179 | |
180 | |
181 /* Build a tuple with operands. CODE is the statement to build (which | |
182 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code | |
183 for the new tuple. NUM_OPS is the number of operands to allocate. */ | |
184 | |
185 #define gimple_build_with_ops(c, s, n) \ | |
186 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO) | |
187 | |
188 static gimple | |
189 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode, | |
190 unsigned num_ops MEM_STAT_DECL) | |
191 { | |
192 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT); | |
193 gimple_set_subcode (s, subcode); | |
194 | |
195 return s; | |
196 } | |
197 | |
198 | |
199 /* Build a GIMPLE_RETURN statement returning RETVAL. */ | |
200 | |
201 gimple | |
202 gimple_build_return (tree retval) | |
203 { | |
204 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1); | |
205 if (retval) | |
206 gimple_return_set_retval (s, retval); | |
207 return s; | |
208 } | |
209 | |
210 /* Helper for gimple_build_call, gimple_build_call_vec and | |
211 gimple_build_call_from_tree. Build the basic components of a | |
212 GIMPLE_CALL statement to function FN with NARGS arguments. */ | |
213 | |
214 static inline gimple | |
215 gimple_build_call_1 (tree fn, unsigned nargs) | |
216 { | |
217 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3); | |
218 if (TREE_CODE (fn) == FUNCTION_DECL) | |
219 fn = build_fold_addr_expr (fn); | |
220 gimple_set_op (s, 1, fn); | |
221 return s; | |
222 } | |
223 | |
224 | |
225 /* Build a GIMPLE_CALL statement to function FN with the arguments | |
226 specified in vector ARGS. */ | |
227 | |
228 gimple | |
229 gimple_build_call_vec (tree fn, VEC(tree, heap) *args) | |
230 { | |
231 unsigned i; | |
232 unsigned nargs = VEC_length (tree, args); | |
233 gimple call = gimple_build_call_1 (fn, nargs); | |
234 | |
235 for (i = 0; i < nargs; i++) | |
236 gimple_call_set_arg (call, i, VEC_index (tree, args, i)); | |
237 | |
238 return call; | |
239 } | |
240 | |
241 | |
242 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of | |
243 arguments. The ... are the arguments. */ | |
244 | |
245 gimple | |
246 gimple_build_call (tree fn, unsigned nargs, ...) | |
247 { | |
248 va_list ap; | |
249 gimple call; | |
250 unsigned i; | |
251 | |
252 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn)); | |
253 | |
254 call = gimple_build_call_1 (fn, nargs); | |
255 | |
256 va_start (ap, nargs); | |
257 for (i = 0; i < nargs; i++) | |
258 gimple_call_set_arg (call, i, va_arg (ap, tree)); | |
259 va_end (ap); | |
260 | |
261 return call; | |
262 } | |
263 | |
264 | |
265 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is | |
266 assumed to be in GIMPLE form already. Minimal checking is done of | |
267 this fact. */ | |
268 | |
269 gimple | |
270 gimple_build_call_from_tree (tree t) | |
271 { | |
272 unsigned i, nargs; | |
273 gimple call; | |
274 tree fndecl = get_callee_fndecl (t); | |
275 | |
276 gcc_assert (TREE_CODE (t) == CALL_EXPR); | |
277 | |
278 nargs = call_expr_nargs (t); | |
279 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs); | |
280 | |
281 for (i = 0; i < nargs; i++) | |
282 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i)); | |
283 | |
284 gimple_set_block (call, TREE_BLOCK (t)); | |
285 | |
286 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */ | |
287 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t)); | |
288 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t)); | |
289 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t)); | |
290 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t)); | |
291 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t)); | |
292 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t)); | |
293 #ifndef noCbC | |
294 gimple_call_set_cbc_goto (call, CALL_EXPR_CbC_GOTO (t)); | |
295 #endif | |
296 gimple_set_no_warning (call, TREE_NO_WARNING (t)); | |
297 | |
298 return call; | |
299 } | |
300 | |
301 | |
302 /* Extract the operands and code for expression EXPR into *SUBCODE_P, | |
303 *OP1_P and *OP2_P respectively. */ | |
304 | |
305 void | |
306 extract_ops_from_tree (tree expr, enum tree_code *subcode_p, tree *op1_p, | |
307 tree *op2_p) | |
308 { | |
309 enum gimple_rhs_class grhs_class; | |
310 | |
311 *subcode_p = TREE_CODE (expr); | |
312 grhs_class = get_gimple_rhs_class (*subcode_p); | |
313 | |
314 if (grhs_class == GIMPLE_BINARY_RHS) | |
315 { | |
316 *op1_p = TREE_OPERAND (expr, 0); | |
317 *op2_p = TREE_OPERAND (expr, 1); | |
318 } | |
319 else if (grhs_class == GIMPLE_UNARY_RHS) | |
320 { | |
321 *op1_p = TREE_OPERAND (expr, 0); | |
322 *op2_p = NULL_TREE; | |
323 } | |
324 else if (grhs_class == GIMPLE_SINGLE_RHS) | |
325 { | |
326 *op1_p = expr; | |
327 *op2_p = NULL_TREE; | |
328 } | |
329 else | |
330 gcc_unreachable (); | |
331 } | |
332 | |
333 | |
334 /* Build a GIMPLE_ASSIGN statement. | |
335 | |
336 LHS of the assignment. | |
337 RHS of the assignment which can be unary or binary. */ | |
338 | |
339 gimple | |
340 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL) | |
341 { | |
342 enum tree_code subcode; | |
343 tree op1, op2; | |
344 | |
345 extract_ops_from_tree (rhs, &subcode, &op1, &op2); | |
346 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2 | |
347 PASS_MEM_STAT); | |
348 } | |
349 | |
350 | |
351 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands | |
352 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class | |
353 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */ | |
354 | |
355 gimple | |
356 gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1, | |
357 tree op2 MEM_STAT_DECL) | |
358 { | |
359 unsigned num_ops; | |
360 gimple p; | |
361 | |
362 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the | |
363 code). */ | |
364 num_ops = get_gimple_rhs_num_ops (subcode) + 1; | |
365 | |
366 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops | |
367 PASS_MEM_STAT); | |
368 gimple_assign_set_lhs (p, lhs); | |
369 gimple_assign_set_rhs1 (p, op1); | |
370 if (op2) | |
371 { | |
372 gcc_assert (num_ops > 2); | |
373 gimple_assign_set_rhs2 (p, op2); | |
374 } | |
375 | |
376 return p; | |
377 } | |
378 | |
379 | |
380 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P. | |
381 | |
382 DST/SRC are the destination and source respectively. You can pass | |
383 ungimplified trees in DST or SRC, in which case they will be | |
384 converted to a gimple operand if necessary. | |
385 | |
386 This function returns the newly created GIMPLE_ASSIGN tuple. */ | |
387 | |
388 gimple | |
389 gimplify_assign (tree dst, tree src, gimple_seq *seq_p) | |
390 { | |
391 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); | |
392 gimplify_and_add (t, seq_p); | |
393 ggc_free (t); | |
394 return gimple_seq_last_stmt (*seq_p); | |
395 } | |
396 | |
397 | |
398 /* Build a GIMPLE_COND statement. | |
399 | |
400 PRED is the condition used to compare LHS and the RHS. | |
401 T_LABEL is the label to jump to if the condition is true. | |
402 F_LABEL is the label to jump to otherwise. */ | |
403 | |
404 gimple | |
405 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs, | |
406 tree t_label, tree f_label) | |
407 { | |
408 gimple p; | |
409 | |
410 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison); | |
411 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4); | |
412 gimple_cond_set_lhs (p, lhs); | |
413 gimple_cond_set_rhs (p, rhs); | |
414 gimple_cond_set_true_label (p, t_label); | |
415 gimple_cond_set_false_label (p, f_label); | |
416 return p; | |
417 } | |
418 | |
419 | |
420 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */ | |
421 | |
422 void | |
423 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p, | |
424 tree *lhs_p, tree *rhs_p) | |
425 { | |
426 location_t loc = EXPR_LOCATION (cond); | |
427 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison | |
428 || TREE_CODE (cond) == TRUTH_NOT_EXPR | |
429 || is_gimple_min_invariant (cond) | |
430 || SSA_VAR_P (cond)); | |
431 | |
432 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p); | |
433 | |
434 /* Canonicalize conditionals of the form 'if (!VAL)'. */ | |
435 if (*code_p == TRUTH_NOT_EXPR) | |
436 { | |
437 *code_p = EQ_EXPR; | |
438 gcc_assert (*lhs_p && *rhs_p == NULL_TREE); | |
439 *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node); | |
440 } | |
441 /* Canonicalize conditionals of the form 'if (VAL)' */ | |
442 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison) | |
443 { | |
444 *code_p = NE_EXPR; | |
445 gcc_assert (*lhs_p && *rhs_p == NULL_TREE); | |
446 *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node); | |
447 } | |
448 } | |
449 | |
450 | |
451 /* Build a GIMPLE_COND statement from the conditional expression tree | |
452 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */ | |
453 | |
454 gimple | |
455 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label) | |
456 { | |
457 enum tree_code code; | |
458 tree lhs, rhs; | |
459 | |
460 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs); | |
461 return gimple_build_cond (code, lhs, rhs, t_label, f_label); | |
462 } | |
463 | |
464 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable | |
465 boolean expression tree COND. */ | |
466 | |
467 void | |
468 gimple_cond_set_condition_from_tree (gimple stmt, tree cond) | |
469 { | |
470 enum tree_code code; | |
471 tree lhs, rhs; | |
472 | |
473 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs); | |
474 gimple_cond_set_condition (stmt, code, lhs, rhs); | |
475 } | |
476 | |
477 /* Build a GIMPLE_LABEL statement for LABEL. */ | |
478 | |
479 gimple | |
480 gimple_build_label (tree label) | |
481 { | |
482 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1); | |
483 gimple_label_set_label (p, label); | |
484 return p; | |
485 } | |
486 | |
487 /* Build a GIMPLE_GOTO statement to label DEST. */ | |
488 | |
489 gimple | |
490 gimple_build_goto (tree dest) | |
491 { | |
492 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1); | |
493 gimple_goto_set_dest (p, dest); | |
494 return p; | |
495 } | |
496 | |
497 | |
498 /* Build a GIMPLE_NOP statement. */ | |
499 | |
500 gimple | |
501 gimple_build_nop (void) | |
502 { | |
503 return gimple_alloc (GIMPLE_NOP, 0); | |
504 } | |
505 | |
506 | |
507 /* Build a GIMPLE_BIND statement. | |
508 VARS are the variables in BODY. | |
509 BLOCK is the containing block. */ | |
510 | |
511 gimple | |
512 gimple_build_bind (tree vars, gimple_seq body, tree block) | |
513 { | |
514 gimple p = gimple_alloc (GIMPLE_BIND, 0); | |
515 gimple_bind_set_vars (p, vars); | |
516 if (body) | |
517 gimple_bind_set_body (p, body); | |
518 if (block) | |
519 gimple_bind_set_block (p, block); | |
520 return p; | |
521 } | |
522 | |
523 /* Helper function to set the simple fields of a asm stmt. | |
524 | |
525 STRING is a pointer to a string that is the asm blocks assembly code. | |
526 NINPUT is the number of register inputs. | |
527 NOUTPUT is the number of register outputs. | |
528 NCLOBBERS is the number of clobbered registers. | |
529 */ | |
530 | |
531 static inline gimple | |
532 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs, | |
533 unsigned nclobbers, unsigned nlabels) | |
534 { | |
535 gimple p; | |
536 int size = strlen (string); | |
537 | |
538 /* ASMs with labels cannot have outputs. This should have been | |
539 enforced by the front end. */ | |
540 gcc_assert (nlabels == 0 || noutputs == 0); | |
541 | |
542 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK, | |
543 ninputs + noutputs + nclobbers + nlabels); | |
544 | |
545 p->gimple_asm.ni = ninputs; | |
546 p->gimple_asm.no = noutputs; | |
547 p->gimple_asm.nc = nclobbers; | |
548 p->gimple_asm.nl = nlabels; | |
549 p->gimple_asm.string = ggc_alloc_string (string, size); | |
550 | |
551 #ifdef GATHER_STATISTICS | |
552 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size; | |
553 #endif | |
554 | |
555 return p; | |
556 } | |
557 | |
558 /* Build a GIMPLE_ASM statement. | |
559 | |
560 STRING is the assembly code. | |
561 NINPUT is the number of register inputs. | |
562 NOUTPUT is the number of register outputs. | |
563 NCLOBBERS is the number of clobbered registers. | |
564 INPUTS is a vector of the input register parameters. | |
565 OUTPUTS is a vector of the output register parameters. | |
566 CLOBBERS is a vector of the clobbered register parameters. | |
567 LABELS is a vector of destination labels. */ | |
568 | |
569 gimple | |
570 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs, | |
571 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers, | |
572 VEC(tree,gc)* labels) | |
573 { | |
574 gimple p; | |
575 unsigned i; | |
576 | |
577 p = gimple_build_asm_1 (string, | |
578 VEC_length (tree, inputs), | |
579 VEC_length (tree, outputs), | |
580 VEC_length (tree, clobbers), | |
581 VEC_length (tree, labels)); | |
582 | |
583 for (i = 0; i < VEC_length (tree, inputs); i++) | |
584 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i)); | |
585 | |
586 for (i = 0; i < VEC_length (tree, outputs); i++) | |
587 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i)); | |
588 | |
589 for (i = 0; i < VEC_length (tree, clobbers); i++) | |
590 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i)); | |
591 | |
592 for (i = 0; i < VEC_length (tree, labels); i++) | |
593 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i)); | |
594 | |
595 return p; | |
596 } | |
597 | |
598 /* Build a GIMPLE_CATCH statement. | |
599 | |
600 TYPES are the catch types. | |
601 HANDLER is the exception handler. */ | |
602 | |
603 gimple | |
604 gimple_build_catch (tree types, gimple_seq handler) | |
605 { | |
606 gimple p = gimple_alloc (GIMPLE_CATCH, 0); | |
607 gimple_catch_set_types (p, types); | |
608 if (handler) | |
609 gimple_catch_set_handler (p, handler); | |
610 | |
611 return p; | |
612 } | |
613 | |
614 /* Build a GIMPLE_EH_FILTER statement. | |
615 | |
616 TYPES are the filter's types. | |
617 FAILURE is the filter's failure action. */ | |
618 | |
619 gimple | |
620 gimple_build_eh_filter (tree types, gimple_seq failure) | |
621 { | |
622 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0); | |
623 gimple_eh_filter_set_types (p, types); | |
624 if (failure) | |
625 gimple_eh_filter_set_failure (p, failure); | |
626 | |
627 return p; | |
628 } | |
629 | |
630 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */ | |
631 | |
632 gimple | |
633 gimple_build_eh_must_not_throw (tree decl) | |
634 { | |
635 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 1); | |
636 | |
637 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL); | |
638 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN); | |
639 gimple_eh_must_not_throw_set_fndecl (p, decl); | |
640 | |
641 return p; | |
642 } | |
643 | |
644 /* Build a GIMPLE_TRY statement. | |
645 | |
646 EVAL is the expression to evaluate. | |
647 CLEANUP is the cleanup expression. | |
648 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on | |
649 whether this is a try/catch or a try/finally respectively. */ | |
650 | |
651 gimple | |
652 gimple_build_try (gimple_seq eval, gimple_seq cleanup, | |
653 enum gimple_try_flags kind) | |
654 { | |
655 gimple p; | |
656 | |
657 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY); | |
658 p = gimple_alloc (GIMPLE_TRY, 0); | |
659 gimple_set_subcode (p, kind); | |
660 if (eval) | |
661 gimple_try_set_eval (p, eval); | |
662 if (cleanup) | |
663 gimple_try_set_cleanup (p, cleanup); | |
664 | |
665 return p; | |
666 } | |
667 | |
668 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement. | |
669 | |
670 CLEANUP is the cleanup expression. */ | |
671 | |
672 gimple | |
673 gimple_build_wce (gimple_seq cleanup) | |
674 { | |
675 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0); | |
676 if (cleanup) | |
677 gimple_wce_set_cleanup (p, cleanup); | |
678 | |
679 return p; | |
680 } | |
681 | |
682 | |
683 /* Build a GIMPLE_RESX statement. */ | |
684 | |
685 gimple | |
686 gimple_build_resx (int region) | |
687 { | |
688 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0); | |
689 p->gimple_eh_ctrl.region = region; | |
690 return p; | |
691 } | |
692 | |
693 | |
694 /* The helper for constructing a gimple switch statement. | |
695 INDEX is the switch's index. | |
696 NLABELS is the number of labels in the switch excluding the default. | |
697 DEFAULT_LABEL is the default label for the switch statement. */ | |
698 | |
699 gimple | |
700 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label) | |
701 { | |
702 /* nlabels + 1 default label + 1 index. */ | |
703 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK, | |
704 1 + (default_label != NULL) + nlabels); | |
705 gimple_switch_set_index (p, index); | |
706 if (default_label) | |
707 gimple_switch_set_default_label (p, default_label); | |
708 return p; | |
709 } | |
710 | |
711 | |
712 /* Build a GIMPLE_SWITCH statement. | |
713 | |
714 INDEX is the switch's index. | |
715 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL. | |
716 ... are the labels excluding the default. */ | |
717 | |
718 gimple | |
719 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...) | |
720 { | |
721 va_list al; | |
722 unsigned i, offset; | |
723 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label); | |
724 | |
725 /* Store the rest of the labels. */ | |
726 va_start (al, default_label); | |
727 offset = (default_label != NULL); | |
728 for (i = 0; i < nlabels; i++) | |
729 gimple_switch_set_label (p, i + offset, va_arg (al, tree)); | |
730 va_end (al); | |
731 | |
732 return p; | |
733 } | |
734 | |
735 | |
736 /* Build a GIMPLE_SWITCH statement. | |
737 | |
738 INDEX is the switch's index. | |
739 DEFAULT_LABEL is the default label | |
740 ARGS is a vector of labels excluding the default. */ | |
741 | |
742 gimple | |
743 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args) | |
744 { | |
745 unsigned i, offset, nlabels = VEC_length (tree, args); | |
746 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label); | |
747 | |
748 /* Copy the labels from the vector to the switch statement. */ | |
749 offset = (default_label != NULL); | |
750 for (i = 0; i < nlabels; i++) | |
751 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i)); | |
752 | |
753 return p; | |
754 } | |
755 | |
756 /* Build a GIMPLE_EH_DISPATCH statement. */ | |
757 | |
758 gimple | |
759 gimple_build_eh_dispatch (int region) | |
760 { | |
761 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0); | |
762 p->gimple_eh_ctrl.region = region; | |
763 return p; | |
764 } | |
765 | |
766 /* Build a new GIMPLE_DEBUG_BIND statement. | |
767 | |
768 VAR is bound to VALUE; block and location are taken from STMT. */ | |
769 | |
770 gimple | |
771 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL) | |
772 { | |
773 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG, | |
774 (unsigned)GIMPLE_DEBUG_BIND, 2 | |
775 PASS_MEM_STAT); | |
776 | |
777 gimple_debug_bind_set_var (p, var); | |
778 gimple_debug_bind_set_value (p, value); | |
779 if (stmt) | |
780 { | |
781 gimple_set_block (p, gimple_block (stmt)); | |
782 gimple_set_location (p, gimple_location (stmt)); | |
783 } | |
784 | |
785 return p; | |
786 } | |
787 | |
788 | |
789 /* Build a GIMPLE_OMP_CRITICAL statement. | |
790 | |
791 BODY is the sequence of statements for which only one thread can execute. | |
792 NAME is optional identifier for this critical block. */ | |
793 | |
794 gimple | |
795 gimple_build_omp_critical (gimple_seq body, tree name) | |
796 { | |
797 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0); | |
798 gimple_omp_critical_set_name (p, name); | |
799 if (body) | |
800 gimple_omp_set_body (p, body); | |
801 | |
802 return p; | |
803 } | |
804 | |
805 /* Build a GIMPLE_OMP_FOR statement. | |
806 | |
807 BODY is sequence of statements inside the for loop. | |
808 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate, | |
809 lastprivate, reductions, ordered, schedule, and nowait. | |
810 COLLAPSE is the collapse count. | |
811 PRE_BODY is the sequence of statements that are loop invariant. */ | |
812 | |
813 gimple | |
814 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse, | |
815 gimple_seq pre_body) | |
816 { | |
817 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0); | |
818 if (body) | |
819 gimple_omp_set_body (p, body); | |
820 gimple_omp_for_set_clauses (p, clauses); | |
821 p->gimple_omp_for.collapse = collapse; | |
822 p->gimple_omp_for.iter = GGC_CNEWVEC (struct gimple_omp_for_iter, collapse); | |
823 if (pre_body) | |
824 gimple_omp_for_set_pre_body (p, pre_body); | |
825 | |
826 return p; | |
827 } | |
828 | |
829 | |
830 /* Build a GIMPLE_OMP_PARALLEL statement. | |
831 | |
832 BODY is sequence of statements which are executed in parallel. | |
833 CLAUSES, are the OMP parallel construct's clauses. | |
834 CHILD_FN is the function created for the parallel threads to execute. | |
835 DATA_ARG are the shared data argument(s). */ | |
836 | |
837 gimple | |
838 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn, | |
839 tree data_arg) | |
840 { | |
841 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0); | |
842 if (body) | |
843 gimple_omp_set_body (p, body); | |
844 gimple_omp_parallel_set_clauses (p, clauses); | |
845 gimple_omp_parallel_set_child_fn (p, child_fn); | |
846 gimple_omp_parallel_set_data_arg (p, data_arg); | |
847 | |
848 return p; | |
849 } | |
850 | |
851 | |
852 /* Build a GIMPLE_OMP_TASK statement. | |
853 | |
854 BODY is sequence of statements which are executed by the explicit task. | |
855 CLAUSES, are the OMP parallel construct's clauses. | |
856 CHILD_FN is the function created for the parallel threads to execute. | |
857 DATA_ARG are the shared data argument(s). | |
858 COPY_FN is the optional function for firstprivate initialization. | |
859 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */ | |
860 | |
861 gimple | |
862 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn, | |
863 tree data_arg, tree copy_fn, tree arg_size, | |
864 tree arg_align) | |
865 { | |
866 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0); | |
867 if (body) | |
868 gimple_omp_set_body (p, body); | |
869 gimple_omp_task_set_clauses (p, clauses); | |
870 gimple_omp_task_set_child_fn (p, child_fn); | |
871 gimple_omp_task_set_data_arg (p, data_arg); | |
872 gimple_omp_task_set_copy_fn (p, copy_fn); | |
873 gimple_omp_task_set_arg_size (p, arg_size); | |
874 gimple_omp_task_set_arg_align (p, arg_align); | |
875 | |
876 return p; | |
877 } | |
878 | |
879 | |
880 /* Build a GIMPLE_OMP_SECTION statement for a sections statement. | |
881 | |
882 BODY is the sequence of statements in the section. */ | |
883 | |
884 gimple | |
885 gimple_build_omp_section (gimple_seq body) | |
886 { | |
887 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0); | |
888 if (body) | |
889 gimple_omp_set_body (p, body); | |
890 | |
891 return p; | |
892 } | |
893 | |
894 | |
895 /* Build a GIMPLE_OMP_MASTER statement. | |
896 | |
897 BODY is the sequence of statements to be executed by just the master. */ | |
898 | |
899 gimple | |
900 gimple_build_omp_master (gimple_seq body) | |
901 { | |
902 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0); | |
903 if (body) | |
904 gimple_omp_set_body (p, body); | |
905 | |
906 return p; | |
907 } | |
908 | |
909 | |
910 /* Build a GIMPLE_OMP_CONTINUE statement. | |
911 | |
912 CONTROL_DEF is the definition of the control variable. | |
913 CONTROL_USE is the use of the control variable. */ | |
914 | |
915 gimple | |
916 gimple_build_omp_continue (tree control_def, tree control_use) | |
917 { | |
918 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0); | |
919 gimple_omp_continue_set_control_def (p, control_def); | |
920 gimple_omp_continue_set_control_use (p, control_use); | |
921 return p; | |
922 } | |
923 | |
924 /* Build a GIMPLE_OMP_ORDERED statement. | |
925 | |
926 BODY is the sequence of statements inside a loop that will executed in | |
927 sequence. */ | |
928 | |
929 gimple | |
930 gimple_build_omp_ordered (gimple_seq body) | |
931 { | |
932 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0); | |
933 if (body) | |
934 gimple_omp_set_body (p, body); | |
935 | |
936 return p; | |
937 } | |
938 | |
939 | |
940 /* Build a GIMPLE_OMP_RETURN statement. | |
941 WAIT_P is true if this is a non-waiting return. */ | |
942 | |
943 gimple | |
944 gimple_build_omp_return (bool wait_p) | |
945 { | |
946 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0); | |
947 if (wait_p) | |
948 gimple_omp_return_set_nowait (p); | |
949 | |
950 return p; | |
951 } | |
952 | |
953 | |
954 /* Build a GIMPLE_OMP_SECTIONS statement. | |
955 | |
956 BODY is a sequence of section statements. | |
957 CLAUSES are any of the OMP sections contsruct's clauses: private, | |
958 firstprivate, lastprivate, reduction, and nowait. */ | |
959 | |
960 gimple | |
961 gimple_build_omp_sections (gimple_seq body, tree clauses) | |
962 { | |
963 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0); | |
964 if (body) | |
965 gimple_omp_set_body (p, body); | |
966 gimple_omp_sections_set_clauses (p, clauses); | |
967 | |
968 return p; | |
969 } | |
970 | |
971 | |
972 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */ | |
973 | |
974 gimple | |
975 gimple_build_omp_sections_switch (void) | |
976 { | |
977 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0); | |
978 } | |
979 | |
980 | |
981 /* Build a GIMPLE_OMP_SINGLE statement. | |
982 | |
983 BODY is the sequence of statements that will be executed once. | |
984 CLAUSES are any of the OMP single construct's clauses: private, firstprivate, | |
985 copyprivate, nowait. */ | |
986 | |
987 gimple | |
988 gimple_build_omp_single (gimple_seq body, tree clauses) | |
989 { | |
990 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0); | |
991 if (body) | |
992 gimple_omp_set_body (p, body); | |
993 gimple_omp_single_set_clauses (p, clauses); | |
994 | |
995 return p; | |
996 } | |
997 | |
998 | |
999 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */ | |
1000 | |
1001 gimple | |
1002 gimple_build_omp_atomic_load (tree lhs, tree rhs) | |
1003 { | |
1004 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0); | |
1005 gimple_omp_atomic_load_set_lhs (p, lhs); | |
1006 gimple_omp_atomic_load_set_rhs (p, rhs); | |
1007 return p; | |
1008 } | |
1009 | |
1010 /* Build a GIMPLE_OMP_ATOMIC_STORE statement. | |
1011 | |
1012 VAL is the value we are storing. */ | |
1013 | |
1014 gimple | |
1015 gimple_build_omp_atomic_store (tree val) | |
1016 { | |
1017 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0); | |
1018 gimple_omp_atomic_store_set_val (p, val); | |
1019 return p; | |
1020 } | |
1021 | |
1022 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from | |
1023 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */ | |
1024 | |
1025 gimple | |
1026 gimple_build_predict (enum br_predictor predictor, enum prediction outcome) | |
1027 { | |
1028 gimple p = gimple_alloc (GIMPLE_PREDICT, 0); | |
1029 /* Ensure all the predictors fit into the lower bits of the subcode. */ | |
1030 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN); | |
1031 gimple_predict_set_predictor (p, predictor); | |
1032 gimple_predict_set_outcome (p, outcome); | |
1033 return p; | |
1034 } | |
1035 | |
1036 #if defined ENABLE_GIMPLE_CHECKING | |
1037 /* Complain of a gimple type mismatch and die. */ | |
1038 | |
1039 void | |
1040 gimple_check_failed (const_gimple gs, const char *file, int line, | |
1041 const char *function, enum gimple_code code, | |
1042 enum tree_code subcode) | |
1043 { | |
1044 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d", | |
1045 gimple_code_name[code], | |
1046 tree_code_name[subcode], | |
1047 gimple_code_name[gimple_code (gs)], | |
1048 gs->gsbase.subcode > 0 | |
1049 ? tree_code_name[gs->gsbase.subcode] | |
1050 : "", | |
1051 function, trim_filename (file), line); | |
1052 } | |
1053 #endif /* ENABLE_GIMPLE_CHECKING */ | |
1054 | |
1055 | |
1056 /* Allocate a new GIMPLE sequence in GC memory and return it. If | |
1057 there are free sequences in GIMPLE_SEQ_CACHE return one of those | |
1058 instead. */ | |
1059 | |
1060 gimple_seq | |
1061 gimple_seq_alloc (void) | |
1062 { | |
1063 gimple_seq seq = gimple_seq_cache; | |
1064 if (seq) | |
1065 { | |
1066 gimple_seq_cache = gimple_seq_cache->next_free; | |
1067 gcc_assert (gimple_seq_cache != seq); | |
1068 memset (seq, 0, sizeof (*seq)); | |
1069 } | |
1070 else | |
1071 { | |
1072 seq = (gimple_seq) ggc_alloc_cleared (sizeof (*seq)); | |
1073 #ifdef GATHER_STATISTICS | |
1074 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++; | |
1075 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq); | |
1076 #endif | |
1077 } | |
1078 | |
1079 return seq; | |
1080 } | |
1081 | |
1082 /* Return SEQ to the free pool of GIMPLE sequences. */ | |
1083 | |
1084 void | |
1085 gimple_seq_free (gimple_seq seq) | |
1086 { | |
1087 if (seq == NULL) | |
1088 return; | |
1089 | |
1090 gcc_assert (gimple_seq_first (seq) == NULL); | |
1091 gcc_assert (gimple_seq_last (seq) == NULL); | |
1092 | |
1093 /* If this triggers, it's a sign that the same list is being freed | |
1094 twice. */ | |
1095 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL); | |
1096 | |
1097 /* Add SEQ to the pool of free sequences. */ | |
1098 seq->next_free = gimple_seq_cache; | |
1099 gimple_seq_cache = seq; | |
1100 } | |
1101 | |
1102 | |
1103 /* Link gimple statement GS to the end of the sequence *SEQ_P. If | |
1104 *SEQ_P is NULL, a new sequence is allocated. */ | |
1105 | |
1106 void | |
1107 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs) | |
1108 { | |
1109 gimple_stmt_iterator si; | |
1110 | |
1111 if (gs == NULL) | |
1112 return; | |
1113 | |
1114 if (*seq_p == NULL) | |
1115 *seq_p = gimple_seq_alloc (); | |
1116 | |
1117 si = gsi_last (*seq_p); | |
1118 gsi_insert_after (&si, gs, GSI_NEW_STMT); | |
1119 } | |
1120 | |
1121 | |
1122 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is | |
1123 NULL, a new sequence is allocated. */ | |
1124 | |
1125 void | |
1126 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src) | |
1127 { | |
1128 gimple_stmt_iterator si; | |
1129 | |
1130 if (src == NULL) | |
1131 return; | |
1132 | |
1133 if (*dst_p == NULL) | |
1134 *dst_p = gimple_seq_alloc (); | |
1135 | |
1136 si = gsi_last (*dst_p); | |
1137 gsi_insert_seq_after (&si, src, GSI_NEW_STMT); | |
1138 } | |
1139 | |
1140 | |
1141 /* Helper function of empty_body_p. Return true if STMT is an empty | |
1142 statement. */ | |
1143 | |
1144 static bool | |
1145 empty_stmt_p (gimple stmt) | |
1146 { | |
1147 if (gimple_code (stmt) == GIMPLE_NOP) | |
1148 return true; | |
1149 if (gimple_code (stmt) == GIMPLE_BIND) | |
1150 return empty_body_p (gimple_bind_body (stmt)); | |
1151 return false; | |
1152 } | |
1153 | |
1154 | |
1155 /* Return true if BODY contains nothing but empty statements. */ | |
1156 | |
1157 bool | |
1158 empty_body_p (gimple_seq body) | |
1159 { | |
1160 gimple_stmt_iterator i; | |
1161 | |
1162 if (gimple_seq_empty_p (body)) | |
1163 return true; | |
1164 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i)) | |
1165 if (!empty_stmt_p (gsi_stmt (i)) | |
1166 && !is_gimple_debug (gsi_stmt (i))) | |
1167 return false; | |
1168 | |
1169 return true; | |
1170 } | |
1171 | |
1172 | |
1173 /* Perform a deep copy of sequence SRC and return the result. */ | |
1174 | |
1175 gimple_seq | |
1176 gimple_seq_copy (gimple_seq src) | |
1177 { | |
1178 gimple_stmt_iterator gsi; | |
1179 gimple_seq new_seq = gimple_seq_alloc (); | |
1180 gimple stmt; | |
1181 | |
1182 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1183 { | |
1184 stmt = gimple_copy (gsi_stmt (gsi)); | |
1185 gimple_seq_add_stmt (&new_seq, stmt); | |
1186 } | |
1187 | |
1188 return new_seq; | |
1189 } | |
1190 | |
1191 | |
1192 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt | |
1193 on each one. WI is as in walk_gimple_stmt. | |
1194 | |
1195 If walk_gimple_stmt returns non-NULL, the walk is stopped, the | |
1196 value is stored in WI->CALLBACK_RESULT and the statement that | |
1197 produced the value is returned. | |
1198 | |
1199 Otherwise, all the statements are walked and NULL returned. */ | |
1200 | |
1201 gimple | |
1202 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt, | |
1203 walk_tree_fn callback_op, struct walk_stmt_info *wi) | |
1204 { | |
1205 gimple_stmt_iterator gsi; | |
1206 | |
1207 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1208 { | |
1209 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi); | |
1210 if (ret) | |
1211 { | |
1212 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist | |
1213 to hold it. */ | |
1214 gcc_assert (wi); | |
1215 wi->callback_result = ret; | |
1216 return gsi_stmt (gsi); | |
1217 } | |
1218 } | |
1219 | |
1220 if (wi) | |
1221 wi->callback_result = NULL_TREE; | |
1222 | |
1223 return NULL; | |
1224 } | |
1225 | |
1226 | |
1227 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */ | |
1228 | |
1229 static tree | |
1230 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op, | |
1231 struct walk_stmt_info *wi) | |
1232 { | |
1233 tree ret, op; | |
1234 unsigned noutputs; | |
1235 const char **oconstraints; | |
1236 unsigned i, n; | |
1237 const char *constraint; | |
1238 bool allows_mem, allows_reg, is_inout; | |
1239 | |
1240 noutputs = gimple_asm_noutputs (stmt); | |
1241 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); | |
1242 | |
1243 if (wi) | |
1244 wi->is_lhs = true; | |
1245 | |
1246 for (i = 0; i < noutputs; i++) | |
1247 { | |
1248 op = gimple_asm_output_op (stmt, i); | |
1249 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); | |
1250 oconstraints[i] = constraint; | |
1251 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg, | |
1252 &is_inout); | |
1253 if (wi) | |
1254 wi->val_only = (allows_reg || !allows_mem); | |
1255 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL); | |
1256 if (ret) | |
1257 return ret; | |
1258 } | |
1259 | |
1260 n = gimple_asm_ninputs (stmt); | |
1261 for (i = 0; i < n; i++) | |
1262 { | |
1263 op = gimple_asm_input_op (stmt, i); | |
1264 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); | |
1265 parse_input_constraint (&constraint, 0, 0, noutputs, 0, | |
1266 oconstraints, &allows_mem, &allows_reg); | |
1267 if (wi) | |
1268 { | |
1269 wi->val_only = (allows_reg || !allows_mem); | |
1270 /* Although input "m" is not really a LHS, we need a lvalue. */ | |
1271 wi->is_lhs = !wi->val_only; | |
1272 } | |
1273 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL); | |
1274 if (ret) | |
1275 return ret; | |
1276 } | |
1277 | |
1278 if (wi) | |
1279 { | |
1280 wi->is_lhs = false; | |
1281 wi->val_only = true; | |
1282 } | |
1283 | |
1284 n = gimple_asm_nlabels (stmt); | |
1285 for (i = 0; i < n; i++) | |
1286 { | |
1287 op = gimple_asm_label_op (stmt, i); | |
1288 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL); | |
1289 if (ret) | |
1290 return ret; | |
1291 } | |
1292 | |
1293 return NULL_TREE; | |
1294 } | |
1295 | |
1296 | |
1297 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in | |
1298 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT. | |
1299 | |
1300 CALLBACK_OP is called on each operand of STMT via walk_tree. | |
1301 Additional parameters to walk_tree must be stored in WI. For each operand | |
1302 OP, walk_tree is called as: | |
1303 | |
1304 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET) | |
1305 | |
1306 If CALLBACK_OP returns non-NULL for an operand, the remaining | |
1307 operands are not scanned. | |
1308 | |
1309 The return value is that returned by the last call to walk_tree, or | |
1310 NULL_TREE if no CALLBACK_OP is specified. */ | |
1311 | |
1312 inline tree | |
1313 walk_gimple_op (gimple stmt, walk_tree_fn callback_op, | |
1314 struct walk_stmt_info *wi) | |
1315 { | |
1316 struct pointer_set_t *pset = (wi) ? wi->pset : NULL; | |
1317 unsigned i; | |
1318 tree ret = NULL_TREE; | |
1319 | |
1320 switch (gimple_code (stmt)) | |
1321 { | |
1322 case GIMPLE_ASSIGN: | |
1323 /* Walk the RHS operands. A formal temporary LHS may use a | |
1324 COMPONENT_REF RHS. */ | |
1325 if (wi) | |
1326 wi->val_only = !is_gimple_reg (gimple_assign_lhs (stmt)) | |
1327 || !gimple_assign_single_p (stmt); | |
1328 | |
1329 for (i = 1; i < gimple_num_ops (stmt); i++) | |
1330 { | |
1331 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, | |
1332 pset); | |
1333 if (ret) | |
1334 return ret; | |
1335 } | |
1336 | |
1337 /* Walk the LHS. If the RHS is appropriate for a memory, we | |
1338 may use a COMPONENT_REF on the LHS. */ | |
1339 if (wi) | |
1340 { | |
1341 /* If the RHS has more than 1 operand, it is not appropriate | |
1342 for the memory. */ | |
1343 wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt)) | |
1344 || !gimple_assign_single_p (stmt); | |
1345 wi->is_lhs = true; | |
1346 } | |
1347 | |
1348 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset); | |
1349 if (ret) | |
1350 return ret; | |
1351 | |
1352 if (wi) | |
1353 { | |
1354 wi->val_only = true; | |
1355 wi->is_lhs = false; | |
1356 } | |
1357 break; | |
1358 | |
1359 case GIMPLE_CALL: | |
1360 if (wi) | |
1361 wi->is_lhs = false; | |
1362 | |
1363 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset); | |
1364 if (ret) | |
1365 return ret; | |
1366 | |
1367 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset); | |
1368 if (ret) | |
1369 return ret; | |
1370 | |
1371 for (i = 0; i < gimple_call_num_args (stmt); i++) | |
1372 { | |
1373 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi, | |
1374 pset); | |
1375 if (ret) | |
1376 return ret; | |
1377 } | |
1378 | |
1379 if (wi) | |
1380 wi->is_lhs = true; | |
1381 | |
1382 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset); | |
1383 if (ret) | |
1384 return ret; | |
1385 | |
1386 if (wi) | |
1387 wi->is_lhs = false; | |
1388 break; | |
1389 | |
1390 case GIMPLE_CATCH: | |
1391 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi, | |
1392 pset); | |
1393 if (ret) | |
1394 return ret; | |
1395 break; | |
1396 | |
1397 case GIMPLE_EH_FILTER: | |
1398 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi, | |
1399 pset); | |
1400 if (ret) | |
1401 return ret; | |
1402 break; | |
1403 | |
1404 case GIMPLE_ASM: | |
1405 ret = walk_gimple_asm (stmt, callback_op, wi); | |
1406 if (ret) | |
1407 return ret; | |
1408 break; | |
1409 | |
1410 case GIMPLE_OMP_CONTINUE: | |
1411 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt), | |
1412 callback_op, wi, pset); | |
1413 if (ret) | |
1414 return ret; | |
1415 | |
1416 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt), | |
1417 callback_op, wi, pset); | |
1418 if (ret) | |
1419 return ret; | |
1420 break; | |
1421 | |
1422 case GIMPLE_OMP_CRITICAL: | |
1423 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi, | |
1424 pset); | |
1425 if (ret) | |
1426 return ret; | |
1427 break; | |
1428 | |
1429 case GIMPLE_OMP_FOR: | |
1430 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi, | |
1431 pset); | |
1432 if (ret) | |
1433 return ret; | |
1434 for (i = 0; i < gimple_omp_for_collapse (stmt); i++) | |
1435 { | |
1436 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op, | |
1437 wi, pset); | |
1438 if (ret) | |
1439 return ret; | |
1440 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op, | |
1441 wi, pset); | |
1442 if (ret) | |
1443 return ret; | |
1444 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op, | |
1445 wi, pset); | |
1446 if (ret) | |
1447 return ret; | |
1448 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op, | |
1449 wi, pset); | |
1450 } | |
1451 if (ret) | |
1452 return ret; | |
1453 break; | |
1454 | |
1455 case GIMPLE_OMP_PARALLEL: | |
1456 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op, | |
1457 wi, pset); | |
1458 if (ret) | |
1459 return ret; | |
1460 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op, | |
1461 wi, pset); | |
1462 if (ret) | |
1463 return ret; | |
1464 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op, | |
1465 wi, pset); | |
1466 if (ret) | |
1467 return ret; | |
1468 break; | |
1469 | |
1470 case GIMPLE_OMP_TASK: | |
1471 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op, | |
1472 wi, pset); | |
1473 if (ret) | |
1474 return ret; | |
1475 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op, | |
1476 wi, pset); | |
1477 if (ret) | |
1478 return ret; | |
1479 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op, | |
1480 wi, pset); | |
1481 if (ret) | |
1482 return ret; | |
1483 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op, | |
1484 wi, pset); | |
1485 if (ret) | |
1486 return ret; | |
1487 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op, | |
1488 wi, pset); | |
1489 if (ret) | |
1490 return ret; | |
1491 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op, | |
1492 wi, pset); | |
1493 if (ret) | |
1494 return ret; | |
1495 break; | |
1496 | |
1497 case GIMPLE_OMP_SECTIONS: | |
1498 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op, | |
1499 wi, pset); | |
1500 if (ret) | |
1501 return ret; | |
1502 | |
1503 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op, | |
1504 wi, pset); | |
1505 if (ret) | |
1506 return ret; | |
1507 | |
1508 break; | |
1509 | |
1510 case GIMPLE_OMP_SINGLE: | |
1511 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi, | |
1512 pset); | |
1513 if (ret) | |
1514 return ret; | |
1515 break; | |
1516 | |
1517 case GIMPLE_OMP_ATOMIC_LOAD: | |
1518 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi, | |
1519 pset); | |
1520 if (ret) | |
1521 return ret; | |
1522 | |
1523 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi, | |
1524 pset); | |
1525 if (ret) | |
1526 return ret; | |
1527 break; | |
1528 | |
1529 case GIMPLE_OMP_ATOMIC_STORE: | |
1530 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op, | |
1531 wi, pset); | |
1532 if (ret) | |
1533 return ret; | |
1534 break; | |
1535 | |
1536 /* Tuples that do not have operands. */ | |
1537 case GIMPLE_NOP: | |
1538 case GIMPLE_RESX: | |
1539 case GIMPLE_OMP_RETURN: | |
1540 case GIMPLE_PREDICT: | |
1541 break; | |
1542 | |
1543 default: | |
1544 { | |
1545 enum gimple_statement_structure_enum gss; | |
1546 gss = gimple_statement_structure (stmt); | |
1547 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS) | |
1548 for (i = 0; i < gimple_num_ops (stmt); i++) | |
1549 { | |
1550 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset); | |
1551 if (ret) | |
1552 return ret; | |
1553 } | |
1554 } | |
1555 break; | |
1556 } | |
1557 | |
1558 return NULL_TREE; | |
1559 } | |
1560 | |
1561 | |
1562 /* Walk the current statement in GSI (optionally using traversal state | |
1563 stored in WI). If WI is NULL, no state is kept during traversal. | |
1564 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates | |
1565 that it has handled all the operands of the statement, its return | |
1566 value is returned. Otherwise, the return value from CALLBACK_STMT | |
1567 is discarded and its operands are scanned. | |
1568 | |
1569 If CALLBACK_STMT is NULL or it didn't handle the operands, | |
1570 CALLBACK_OP is called on each operand of the statement via | |
1571 walk_gimple_op. If walk_gimple_op returns non-NULL for any | |
1572 operand, the remaining operands are not scanned. In this case, the | |
1573 return value from CALLBACK_OP is returned. | |
1574 | |
1575 In any other case, NULL_TREE is returned. */ | |
1576 | |
1577 tree | |
1578 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt, | |
1579 walk_tree_fn callback_op, struct walk_stmt_info *wi) | |
1580 { | |
1581 gimple ret; | |
1582 tree tree_ret; | |
1583 gimple stmt = gsi_stmt (*gsi); | |
1584 | |
1585 if (wi) | |
1586 wi->gsi = *gsi; | |
1587 | |
1588 if (wi && wi->want_locations && gimple_has_location (stmt)) | |
1589 input_location = gimple_location (stmt); | |
1590 | |
1591 ret = NULL; | |
1592 | |
1593 /* Invoke the statement callback. Return if the callback handled | |
1594 all of STMT operands by itself. */ | |
1595 if (callback_stmt) | |
1596 { | |
1597 bool handled_ops = false; | |
1598 tree_ret = callback_stmt (gsi, &handled_ops, wi); | |
1599 if (handled_ops) | |
1600 return tree_ret; | |
1601 | |
1602 /* If CALLBACK_STMT did not handle operands, it should not have | |
1603 a value to return. */ | |
1604 gcc_assert (tree_ret == NULL); | |
1605 | |
1606 /* Re-read stmt in case the callback changed it. */ | |
1607 stmt = gsi_stmt (*gsi); | |
1608 } | |
1609 | |
1610 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */ | |
1611 if (callback_op) | |
1612 { | |
1613 tree_ret = walk_gimple_op (stmt, callback_op, wi); | |
1614 if (tree_ret) | |
1615 return tree_ret; | |
1616 } | |
1617 | |
1618 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */ | |
1619 switch (gimple_code (stmt)) | |
1620 { | |
1621 case GIMPLE_BIND: | |
1622 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt, | |
1623 callback_op, wi); | |
1624 if (ret) | |
1625 return wi->callback_result; | |
1626 break; | |
1627 | |
1628 case GIMPLE_CATCH: | |
1629 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt, | |
1630 callback_op, wi); | |
1631 if (ret) | |
1632 return wi->callback_result; | |
1633 break; | |
1634 | |
1635 case GIMPLE_EH_FILTER: | |
1636 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt, | |
1637 callback_op, wi); | |
1638 if (ret) | |
1639 return wi->callback_result; | |
1640 break; | |
1641 | |
1642 case GIMPLE_TRY: | |
1643 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op, | |
1644 wi); | |
1645 if (ret) | |
1646 return wi->callback_result; | |
1647 | |
1648 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt, | |
1649 callback_op, wi); | |
1650 if (ret) | |
1651 return wi->callback_result; | |
1652 break; | |
1653 | |
1654 case GIMPLE_OMP_FOR: | |
1655 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt, | |
1656 callback_op, wi); | |
1657 if (ret) | |
1658 return wi->callback_result; | |
1659 | |
1660 /* FALL THROUGH. */ | |
1661 case GIMPLE_OMP_CRITICAL: | |
1662 case GIMPLE_OMP_MASTER: | |
1663 case GIMPLE_OMP_ORDERED: | |
1664 case GIMPLE_OMP_SECTION: | |
1665 case GIMPLE_OMP_PARALLEL: | |
1666 case GIMPLE_OMP_TASK: | |
1667 case GIMPLE_OMP_SECTIONS: | |
1668 case GIMPLE_OMP_SINGLE: | |
1669 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op, | |
1670 wi); | |
1671 if (ret) | |
1672 return wi->callback_result; | |
1673 break; | |
1674 | |
1675 case GIMPLE_WITH_CLEANUP_EXPR: | |
1676 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt, | |
1677 callback_op, wi); | |
1678 if (ret) | |
1679 return wi->callback_result; | |
1680 break; | |
1681 | |
1682 default: | |
1683 gcc_assert (!gimple_has_substatements (stmt)); | |
1684 break; | |
1685 } | |
1686 | |
1687 return NULL; | |
1688 } | |
1689 | |
1690 | |
1691 /* Set sequence SEQ to be the GIMPLE body for function FN. */ | |
1692 | |
1693 void | |
1694 gimple_set_body (tree fndecl, gimple_seq seq) | |
1695 { | |
1696 struct function *fn = DECL_STRUCT_FUNCTION (fndecl); | |
1697 if (fn == NULL) | |
1698 { | |
1699 /* If FNDECL still does not have a function structure associated | |
1700 with it, then it does not make sense for it to receive a | |
1701 GIMPLE body. */ | |
1702 gcc_assert (seq == NULL); | |
1703 } | |
1704 else | |
1705 fn->gimple_body = seq; | |
1706 } | |
1707 | |
1708 | |
1709 /* Return the body of GIMPLE statements for function FN. */ | |
1710 | |
1711 gimple_seq | |
1712 gimple_body (tree fndecl) | |
1713 { | |
1714 struct function *fn = DECL_STRUCT_FUNCTION (fndecl); | |
1715 return fn ? fn->gimple_body : NULL; | |
1716 } | |
1717 | |
1718 /* Return true when FNDECL has Gimple body either in unlowered | |
1719 or CFG form. */ | |
1720 bool | |
1721 gimple_has_body_p (tree fndecl) | |
1722 { | |
1723 struct function *fn = DECL_STRUCT_FUNCTION (fndecl); | |
1724 return (gimple_body (fndecl) || (fn && fn->cfg)); | |
1725 } | |
1726 | |
1727 /* Detect flags from a GIMPLE_CALL. This is just like | |
1728 call_expr_flags, but for gimple tuples. */ | |
1729 | |
1730 int | |
1731 gimple_call_flags (const_gimple stmt) | |
1732 { | |
1733 int flags; | |
1734 tree decl = gimple_call_fndecl (stmt); | |
1735 tree t; | |
1736 | |
1737 if (decl) | |
1738 flags = flags_from_decl_or_type (decl); | |
1739 else | |
1740 { | |
1741 t = TREE_TYPE (gimple_call_fn (stmt)); | |
1742 if (t && TREE_CODE (t) == POINTER_TYPE) | |
1743 flags = flags_from_decl_or_type (TREE_TYPE (t)); | |
1744 else | |
1745 flags = 0; | |
1746 } | |
1747 | |
1748 return flags; | |
1749 } | |
1750 | |
1751 | |
1752 /* Return true if GS is a copy assignment. */ | |
1753 | |
1754 bool | |
1755 gimple_assign_copy_p (gimple gs) | |
1756 { | |
1757 return gimple_code (gs) == GIMPLE_ASSIGN | |
1758 && get_gimple_rhs_class (gimple_assign_rhs_code (gs)) | |
1759 == GIMPLE_SINGLE_RHS | |
1760 && is_gimple_val (gimple_op (gs, 1)); | |
1761 } | |
1762 | |
1763 | |
1764 /* Return true if GS is a SSA_NAME copy assignment. */ | |
1765 | |
1766 bool | |
1767 gimple_assign_ssa_name_copy_p (gimple gs) | |
1768 { | |
1769 return (gimple_code (gs) == GIMPLE_ASSIGN | |
1770 && (get_gimple_rhs_class (gimple_assign_rhs_code (gs)) | |
1771 == GIMPLE_SINGLE_RHS) | |
1772 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME | |
1773 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME); | |
1774 } | |
1775 | |
1776 | |
1777 /* Return true if GS is an assignment with a singleton RHS, i.e., | |
1778 there is no operator associated with the assignment itself. | |
1779 Unlike gimple_assign_copy_p, this predicate returns true for | |
1780 any RHS operand, including those that perform an operation | |
1781 and do not have the semantics of a copy, such as COND_EXPR. */ | |
1782 | |
1783 bool | |
1784 gimple_assign_single_p (gimple gs) | |
1785 { | |
1786 return (gimple_code (gs) == GIMPLE_ASSIGN | |
1787 && get_gimple_rhs_class (gimple_assign_rhs_code (gs)) | |
1788 == GIMPLE_SINGLE_RHS); | |
1789 } | |
1790 | |
1791 /* Return true if GS is an assignment with a unary RHS, but the | |
1792 operator has no effect on the assigned value. The logic is adapted | |
1793 from STRIP_NOPS. This predicate is intended to be used in tuplifying | |
1794 instances in which STRIP_NOPS was previously applied to the RHS of | |
1795 an assignment. | |
1796 | |
1797 NOTE: In the use cases that led to the creation of this function | |
1798 and of gimple_assign_single_p, it is typical to test for either | |
1799 condition and to proceed in the same manner. In each case, the | |
1800 assigned value is represented by the single RHS operand of the | |
1801 assignment. I suspect there may be cases where gimple_assign_copy_p, | |
1802 gimple_assign_single_p, or equivalent logic is used where a similar | |
1803 treatment of unary NOPs is appropriate. */ | |
1804 | |
1805 bool | |
1806 gimple_assign_unary_nop_p (gimple gs) | |
1807 { | |
1808 return (gimple_code (gs) == GIMPLE_ASSIGN | |
1809 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)) | |
1810 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR) | |
1811 && gimple_assign_rhs1 (gs) != error_mark_node | |
1812 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs))) | |
1813 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs))))); | |
1814 } | |
1815 | |
1816 /* Set BB to be the basic block holding G. */ | |
1817 | |
1818 void | |
1819 gimple_set_bb (gimple stmt, basic_block bb) | |
1820 { | |
1821 stmt->gsbase.bb = bb; | |
1822 | |
1823 /* If the statement is a label, add the label to block-to-labels map | |
1824 so that we can speed up edge creation for GIMPLE_GOTOs. */ | |
1825 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL) | |
1826 { | |
1827 tree t; | |
1828 int uid; | |
1829 | |
1830 t = gimple_label_label (stmt); | |
1831 uid = LABEL_DECL_UID (t); | |
1832 if (uid == -1) | |
1833 { | |
1834 unsigned old_len = VEC_length (basic_block, label_to_block_map); | |
1835 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++; | |
1836 if (old_len <= (unsigned) uid) | |
1837 { | |
1838 unsigned new_len = 3 * uid / 2 + 1; | |
1839 | |
1840 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map, | |
1841 new_len); | |
1842 } | |
1843 } | |
1844 | |
1845 VEC_replace (basic_block, label_to_block_map, uid, bb); | |
1846 } | |
1847 } | |
1848 | |
1849 | |
1850 /* Fold the expression computed by STMT. If the expression can be | |
1851 folded, return the folded result, otherwise return NULL. STMT is | |
1852 not modified. */ | |
1853 | |
1854 tree | |
1855 gimple_fold (const_gimple stmt) | |
1856 { | |
1857 location_t loc = gimple_location (stmt); | |
1858 switch (gimple_code (stmt)) | |
1859 { | |
1860 case GIMPLE_COND: | |
1861 return fold_binary_loc (loc, gimple_cond_code (stmt), | |
1862 boolean_type_node, | |
1863 gimple_cond_lhs (stmt), | |
1864 gimple_cond_rhs (stmt)); | |
1865 | |
1866 case GIMPLE_ASSIGN: | |
1867 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))) | |
1868 { | |
1869 case GIMPLE_UNARY_RHS: | |
1870 return fold_unary_loc (loc, gimple_assign_rhs_code (stmt), | |
1871 TREE_TYPE (gimple_assign_lhs (stmt)), | |
1872 gimple_assign_rhs1 (stmt)); | |
1873 case GIMPLE_BINARY_RHS: | |
1874 return fold_binary_loc (loc, gimple_assign_rhs_code (stmt), | |
1875 TREE_TYPE (gimple_assign_lhs (stmt)), | |
1876 gimple_assign_rhs1 (stmt), | |
1877 gimple_assign_rhs2 (stmt)); | |
1878 case GIMPLE_SINGLE_RHS: | |
1879 return fold (gimple_assign_rhs1 (stmt)); | |
1880 default:; | |
1881 } | |
1882 break; | |
1883 | |
1884 case GIMPLE_SWITCH: | |
1885 return gimple_switch_index (stmt); | |
1886 | |
1887 case GIMPLE_CALL: | |
1888 return NULL_TREE; | |
1889 | |
1890 default: | |
1891 break; | |
1892 } | |
1893 | |
1894 gcc_unreachable (); | |
1895 } | |
1896 | |
1897 | |
1898 /* Modify the RHS of the assignment pointed-to by GSI using the | |
1899 operands in the expression tree EXPR. | |
1900 | |
1901 NOTE: The statement pointed-to by GSI may be reallocated if it | |
1902 did not have enough operand slots. | |
1903 | |
1904 This function is useful to convert an existing tree expression into | |
1905 the flat representation used for the RHS of a GIMPLE assignment. | |
1906 It will reallocate memory as needed to expand or shrink the number | |
1907 of operand slots needed to represent EXPR. | |
1908 | |
1909 NOTE: If you find yourself building a tree and then calling this | |
1910 function, you are most certainly doing it the slow way. It is much | |
1911 better to build a new assignment or to use the function | |
1912 gimple_assign_set_rhs_with_ops, which does not require an | |
1913 expression tree to be built. */ | |
1914 | |
1915 void | |
1916 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr) | |
1917 { | |
1918 enum tree_code subcode; | |
1919 tree op1, op2; | |
1920 | |
1921 extract_ops_from_tree (expr, &subcode, &op1, &op2); | |
1922 gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2); | |
1923 } | |
1924 | |
1925 | |
1926 /* Set the RHS of assignment statement pointed-to by GSI to CODE with | |
1927 operands OP1 and OP2. | |
1928 | |
1929 NOTE: The statement pointed-to by GSI may be reallocated if it | |
1930 did not have enough operand slots. */ | |
1931 | |
1932 void | |
1933 gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code, | |
1934 tree op1, tree op2) | |
1935 { | |
1936 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code); | |
1937 gimple stmt = gsi_stmt (*gsi); | |
1938 | |
1939 /* If the new CODE needs more operands, allocate a new statement. */ | |
1940 if (gimple_num_ops (stmt) < new_rhs_ops + 1) | |
1941 { | |
1942 tree lhs = gimple_assign_lhs (stmt); | |
1943 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1); | |
1944 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt))); | |
1945 gsi_replace (gsi, new_stmt, true); | |
1946 stmt = new_stmt; | |
1947 | |
1948 /* The LHS needs to be reset as this also changes the SSA name | |
1949 on the LHS. */ | |
1950 gimple_assign_set_lhs (stmt, lhs); | |
1951 } | |
1952 | |
1953 gimple_set_num_ops (stmt, new_rhs_ops + 1); | |
1954 gimple_set_subcode (stmt, code); | |
1955 gimple_assign_set_rhs1 (stmt, op1); | |
1956 if (new_rhs_ops > 1) | |
1957 gimple_assign_set_rhs2 (stmt, op2); | |
1958 } | |
1959 | |
1960 | |
1961 /* Return the LHS of a statement that performs an assignment, | |
1962 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE | |
1963 for a call to a function that returns no value, or for a | |
1964 statement other than an assignment or a call. */ | |
1965 | |
1966 tree | |
1967 gimple_get_lhs (const_gimple stmt) | |
1968 { | |
1969 enum gimple_code code = gimple_code (stmt); | |
1970 | |
1971 if (code == GIMPLE_ASSIGN) | |
1972 return gimple_assign_lhs (stmt); | |
1973 else if (code == GIMPLE_CALL) | |
1974 return gimple_call_lhs (stmt); | |
1975 else | |
1976 return NULL_TREE; | |
1977 } | |
1978 | |
1979 | |
1980 /* Set the LHS of a statement that performs an assignment, | |
1981 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */ | |
1982 | |
1983 void | |
1984 gimple_set_lhs (gimple stmt, tree lhs) | |
1985 { | |
1986 enum gimple_code code = gimple_code (stmt); | |
1987 | |
1988 if (code == GIMPLE_ASSIGN) | |
1989 gimple_assign_set_lhs (stmt, lhs); | |
1990 else if (code == GIMPLE_CALL) | |
1991 gimple_call_set_lhs (stmt, lhs); | |
1992 else | |
1993 gcc_unreachable(); | |
1994 } | |
1995 | |
1996 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a | |
1997 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an | |
1998 expression with a different value. | |
1999 | |
2000 This will update any annotations (say debug bind stmts) referring | |
2001 to the original LHS, so that they use the RHS instead. This is | |
2002 done even if NLHS and LHS are the same, for it is understood that | |
2003 the RHS will be modified afterwards, and NLHS will not be assigned | |
2004 an equivalent value. | |
2005 | |
2006 Adjusting any non-annotation uses of the LHS, if needed, is a | |
2007 responsibility of the caller. | |
2008 | |
2009 The effect of this call should be pretty much the same as that of | |
2010 inserting a copy of STMT before STMT, and then removing the | |
2011 original stmt, at which time gsi_remove() would have update | |
2012 annotations, but using this function saves all the inserting, | |
2013 copying and removing. */ | |
2014 | |
2015 void | |
2016 gimple_replace_lhs (gimple stmt, tree nlhs) | |
2017 { | |
2018 if (MAY_HAVE_DEBUG_STMTS) | |
2019 { | |
2020 tree lhs = gimple_get_lhs (stmt); | |
2021 | |
2022 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt); | |
2023 | |
2024 insert_debug_temp_for_var_def (NULL, lhs); | |
2025 } | |
2026 | |
2027 gimple_set_lhs (stmt, nlhs); | |
2028 } | |
2029 | |
2030 /* Return a deep copy of statement STMT. All the operands from STMT | |
2031 are reallocated and copied using unshare_expr. The DEF, USE, VDEF | |
2032 and VUSE operand arrays are set to empty in the new copy. */ | |
2033 | |
2034 gimple | |
2035 gimple_copy (gimple stmt) | |
2036 { | |
2037 enum gimple_code code = gimple_code (stmt); | |
2038 unsigned num_ops = gimple_num_ops (stmt); | |
2039 gimple copy = gimple_alloc (code, num_ops); | |
2040 unsigned i; | |
2041 | |
2042 /* Shallow copy all the fields from STMT. */ | |
2043 memcpy (copy, stmt, gimple_size (code)); | |
2044 | |
2045 /* If STMT has sub-statements, deep-copy them as well. */ | |
2046 if (gimple_has_substatements (stmt)) | |
2047 { | |
2048 gimple_seq new_seq; | |
2049 tree t; | |
2050 | |
2051 switch (gimple_code (stmt)) | |
2052 { | |
2053 case GIMPLE_BIND: | |
2054 new_seq = gimple_seq_copy (gimple_bind_body (stmt)); | |
2055 gimple_bind_set_body (copy, new_seq); | |
2056 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt))); | |
2057 gimple_bind_set_block (copy, gimple_bind_block (stmt)); | |
2058 break; | |
2059 | |
2060 case GIMPLE_CATCH: | |
2061 new_seq = gimple_seq_copy (gimple_catch_handler (stmt)); | |
2062 gimple_catch_set_handler (copy, new_seq); | |
2063 t = unshare_expr (gimple_catch_types (stmt)); | |
2064 gimple_catch_set_types (copy, t); | |
2065 break; | |
2066 | |
2067 case GIMPLE_EH_FILTER: | |
2068 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt)); | |
2069 gimple_eh_filter_set_failure (copy, new_seq); | |
2070 t = unshare_expr (gimple_eh_filter_types (stmt)); | |
2071 gimple_eh_filter_set_types (copy, t); | |
2072 break; | |
2073 | |
2074 case GIMPLE_TRY: | |
2075 new_seq = gimple_seq_copy (gimple_try_eval (stmt)); | |
2076 gimple_try_set_eval (copy, new_seq); | |
2077 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt)); | |
2078 gimple_try_set_cleanup (copy, new_seq); | |
2079 break; | |
2080 | |
2081 case GIMPLE_OMP_FOR: | |
2082 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt)); | |
2083 gimple_omp_for_set_pre_body (copy, new_seq); | |
2084 t = unshare_expr (gimple_omp_for_clauses (stmt)); | |
2085 gimple_omp_for_set_clauses (copy, t); | |
2086 copy->gimple_omp_for.iter | |
2087 = GGC_NEWVEC (struct gimple_omp_for_iter, | |
2088 gimple_omp_for_collapse (stmt)); | |
2089 for (i = 0; i < gimple_omp_for_collapse (stmt); i++) | |
2090 { | |
2091 gimple_omp_for_set_cond (copy, i, | |
2092 gimple_omp_for_cond (stmt, i)); | |
2093 gimple_omp_for_set_index (copy, i, | |
2094 gimple_omp_for_index (stmt, i)); | |
2095 t = unshare_expr (gimple_omp_for_initial (stmt, i)); | |
2096 gimple_omp_for_set_initial (copy, i, t); | |
2097 t = unshare_expr (gimple_omp_for_final (stmt, i)); | |
2098 gimple_omp_for_set_final (copy, i, t); | |
2099 t = unshare_expr (gimple_omp_for_incr (stmt, i)); | |
2100 gimple_omp_for_set_incr (copy, i, t); | |
2101 } | |
2102 goto copy_omp_body; | |
2103 | |
2104 case GIMPLE_OMP_PARALLEL: | |
2105 t = unshare_expr (gimple_omp_parallel_clauses (stmt)); | |
2106 gimple_omp_parallel_set_clauses (copy, t); | |
2107 t = unshare_expr (gimple_omp_parallel_child_fn (stmt)); | |
2108 gimple_omp_parallel_set_child_fn (copy, t); | |
2109 t = unshare_expr (gimple_omp_parallel_data_arg (stmt)); | |
2110 gimple_omp_parallel_set_data_arg (copy, t); | |
2111 goto copy_omp_body; | |
2112 | |
2113 case GIMPLE_OMP_TASK: | |
2114 t = unshare_expr (gimple_omp_task_clauses (stmt)); | |
2115 gimple_omp_task_set_clauses (copy, t); | |
2116 t = unshare_expr (gimple_omp_task_child_fn (stmt)); | |
2117 gimple_omp_task_set_child_fn (copy, t); | |
2118 t = unshare_expr (gimple_omp_task_data_arg (stmt)); | |
2119 gimple_omp_task_set_data_arg (copy, t); | |
2120 t = unshare_expr (gimple_omp_task_copy_fn (stmt)); | |
2121 gimple_omp_task_set_copy_fn (copy, t); | |
2122 t = unshare_expr (gimple_omp_task_arg_size (stmt)); | |
2123 gimple_omp_task_set_arg_size (copy, t); | |
2124 t = unshare_expr (gimple_omp_task_arg_align (stmt)); | |
2125 gimple_omp_task_set_arg_align (copy, t); | |
2126 goto copy_omp_body; | |
2127 | |
2128 case GIMPLE_OMP_CRITICAL: | |
2129 t = unshare_expr (gimple_omp_critical_name (stmt)); | |
2130 gimple_omp_critical_set_name (copy, t); | |
2131 goto copy_omp_body; | |
2132 | |
2133 case GIMPLE_OMP_SECTIONS: | |
2134 t = unshare_expr (gimple_omp_sections_clauses (stmt)); | |
2135 gimple_omp_sections_set_clauses (copy, t); | |
2136 t = unshare_expr (gimple_omp_sections_control (stmt)); | |
2137 gimple_omp_sections_set_control (copy, t); | |
2138 /* FALLTHRU */ | |
2139 | |
2140 case GIMPLE_OMP_SINGLE: | |
2141 case GIMPLE_OMP_SECTION: | |
2142 case GIMPLE_OMP_MASTER: | |
2143 case GIMPLE_OMP_ORDERED: | |
2144 copy_omp_body: | |
2145 new_seq = gimple_seq_copy (gimple_omp_body (stmt)); | |
2146 gimple_omp_set_body (copy, new_seq); | |
2147 break; | |
2148 | |
2149 case GIMPLE_WITH_CLEANUP_EXPR: | |
2150 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt)); | |
2151 gimple_wce_set_cleanup (copy, new_seq); | |
2152 break; | |
2153 | |
2154 default: | |
2155 gcc_unreachable (); | |
2156 } | |
2157 } | |
2158 | |
2159 /* Make copy of operands. */ | |
2160 if (num_ops > 0) | |
2161 { | |
2162 for (i = 0; i < num_ops; i++) | |
2163 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i))); | |
2164 | |
2165 /* Clear out SSA operand vectors on COPY. */ | |
2166 if (gimple_has_ops (stmt)) | |
2167 { | |
2168 gimple_set_def_ops (copy, NULL); | |
2169 gimple_set_use_ops (copy, NULL); | |
2170 } | |
2171 | |
2172 if (gimple_has_mem_ops (stmt)) | |
2173 { | |
2174 gimple_set_vdef (copy, gimple_vdef (stmt)); | |
2175 gimple_set_vuse (copy, gimple_vuse (stmt)); | |
2176 } | |
2177 | |
2178 /* SSA operands need to be updated. */ | |
2179 gimple_set_modified (copy, true); | |
2180 } | |
2181 | |
2182 return copy; | |
2183 } | |
2184 | |
2185 | |
2186 /* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has | |
2187 a MODIFIED field. */ | |
2188 | |
2189 void | |
2190 gimple_set_modified (gimple s, bool modifiedp) | |
2191 { | |
2192 if (gimple_has_ops (s)) | |
2193 { | |
2194 s->gsbase.modified = (unsigned) modifiedp; | |
2195 | |
2196 if (modifiedp | |
2197 && cfun->gimple_df | |
2198 && is_gimple_call (s) | |
2199 && gimple_call_noreturn_p (s)) | |
2200 VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), s); | |
2201 } | |
2202 } | |
2203 | |
2204 | |
2205 /* Return true if statement S has side-effects. We consider a | |
2206 statement to have side effects if: | |
2207 | |
2208 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST. | |
2209 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */ | |
2210 | |
2211 bool | |
2212 gimple_has_side_effects (const_gimple s) | |
2213 { | |
2214 unsigned i; | |
2215 | |
2216 if (is_gimple_debug (s)) | |
2217 return false; | |
2218 | |
2219 /* We don't have to scan the arguments to check for | |
2220 volatile arguments, though, at present, we still | |
2221 do a scan to check for TREE_SIDE_EFFECTS. */ | |
2222 if (gimple_has_volatile_ops (s)) | |
2223 return true; | |
2224 | |
2225 if (is_gimple_call (s)) | |
2226 { | |
2227 unsigned nargs = gimple_call_num_args (s); | |
2228 | |
2229 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE))) | |
2230 return true; | |
2231 else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE) | |
2232 /* An infinite loop is considered a side effect. */ | |
2233 return true; | |
2234 | |
2235 if (gimple_call_lhs (s) | |
2236 && TREE_SIDE_EFFECTS (gimple_call_lhs (s))) | |
2237 { | |
2238 gcc_assert (gimple_has_volatile_ops (s)); | |
2239 return true; | |
2240 } | |
2241 | |
2242 if (TREE_SIDE_EFFECTS (gimple_call_fn (s))) | |
2243 return true; | |
2244 | |
2245 for (i = 0; i < nargs; i++) | |
2246 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))) | |
2247 { | |
2248 gcc_assert (gimple_has_volatile_ops (s)); | |
2249 return true; | |
2250 } | |
2251 | |
2252 return false; | |
2253 } | |
2254 else | |
2255 { | |
2256 for (i = 0; i < gimple_num_ops (s); i++) | |
2257 if (TREE_SIDE_EFFECTS (gimple_op (s, i))) | |
2258 { | |
2259 gcc_assert (gimple_has_volatile_ops (s)); | |
2260 return true; | |
2261 } | |
2262 } | |
2263 | |
2264 return false; | |
2265 } | |
2266 | |
2267 /* Return true if the RHS of statement S has side effects. | |
2268 We may use it to determine if it is admissable to replace | |
2269 an assignment or call with a copy of a previously-computed | |
2270 value. In such cases, side-effects due the the LHS are | |
2271 preserved. */ | |
2272 | |
2273 bool | |
2274 gimple_rhs_has_side_effects (const_gimple s) | |
2275 { | |
2276 unsigned i; | |
2277 | |
2278 if (is_gimple_call (s)) | |
2279 { | |
2280 unsigned nargs = gimple_call_num_args (s); | |
2281 | |
2282 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE))) | |
2283 return true; | |
2284 | |
2285 /* We cannot use gimple_has_volatile_ops here, | |
2286 because we must ignore a volatile LHS. */ | |
2287 if (TREE_SIDE_EFFECTS (gimple_call_fn (s)) | |
2288 || TREE_THIS_VOLATILE (gimple_call_fn (s))) | |
2289 { | |
2290 gcc_assert (gimple_has_volatile_ops (s)); | |
2291 return true; | |
2292 } | |
2293 | |
2294 for (i = 0; i < nargs; i++) | |
2295 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)) | |
2296 || TREE_THIS_VOLATILE (gimple_call_arg (s, i))) | |
2297 return true; | |
2298 | |
2299 return false; | |
2300 } | |
2301 else if (is_gimple_assign (s)) | |
2302 { | |
2303 /* Skip the first operand, the LHS. */ | |
2304 for (i = 1; i < gimple_num_ops (s); i++) | |
2305 if (TREE_SIDE_EFFECTS (gimple_op (s, i)) | |
2306 || TREE_THIS_VOLATILE (gimple_op (s, i))) | |
2307 { | |
2308 gcc_assert (gimple_has_volatile_ops (s)); | |
2309 return true; | |
2310 } | |
2311 } | |
2312 else if (is_gimple_debug (s)) | |
2313 return false; | |
2314 else | |
2315 { | |
2316 /* For statements without an LHS, examine all arguments. */ | |
2317 for (i = 0; i < gimple_num_ops (s); i++) | |
2318 if (TREE_SIDE_EFFECTS (gimple_op (s, i)) | |
2319 || TREE_THIS_VOLATILE (gimple_op (s, i))) | |
2320 { | |
2321 gcc_assert (gimple_has_volatile_ops (s)); | |
2322 return true; | |
2323 } | |
2324 } | |
2325 | |
2326 return false; | |
2327 } | |
2328 | |
2329 | |
2330 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p. | |
2331 Return true if S can trap. If INCLUDE_LHS is true and S is a | |
2332 GIMPLE_ASSIGN, the LHS of the assignment is also checked. | |
2333 Otherwise, only the RHS of the assignment is checked. */ | |
2334 | |
2335 static bool | |
2336 gimple_could_trap_p_1 (gimple s, bool include_lhs) | |
2337 { | |
2338 unsigned i, start; | |
2339 tree t, div = NULL_TREE; | |
2340 enum tree_code op; | |
2341 | |
2342 start = (is_gimple_assign (s) && !include_lhs) ? 1 : 0; | |
2343 | |
2344 for (i = start; i < gimple_num_ops (s); i++) | |
2345 if (tree_could_trap_p (gimple_op (s, i))) | |
2346 return true; | |
2347 | |
2348 switch (gimple_code (s)) | |
2349 { | |
2350 case GIMPLE_ASM: | |
2351 return gimple_asm_volatile_p (s); | |
2352 | |
2353 case GIMPLE_CALL: | |
2354 t = gimple_call_fndecl (s); | |
2355 /* Assume that calls to weak functions may trap. */ | |
2356 if (!t || !DECL_P (t) || DECL_WEAK (t)) | |
2357 return true; | |
2358 return false; | |
2359 | |
2360 case GIMPLE_ASSIGN: | |
2361 t = gimple_expr_type (s); | |
2362 op = gimple_assign_rhs_code (s); | |
2363 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS) | |
2364 div = gimple_assign_rhs2 (s); | |
2365 return (operation_could_trap_p (op, FLOAT_TYPE_P (t), | |
2366 (INTEGRAL_TYPE_P (t) | |
2367 && TYPE_OVERFLOW_TRAPS (t)), | |
2368 div)); | |
2369 | |
2370 default: | |
2371 break; | |
2372 } | |
2373 | |
2374 return false; | |
2375 | |
2376 } | |
2377 | |
2378 | |
2379 /* Return true if statement S can trap. */ | |
2380 | |
2381 bool | |
2382 gimple_could_trap_p (gimple s) | |
2383 { | |
2384 return gimple_could_trap_p_1 (s, true); | |
2385 } | |
2386 | |
2387 | |
2388 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */ | |
2389 | |
2390 bool | |
2391 gimple_assign_rhs_could_trap_p (gimple s) | |
2392 { | |
2393 gcc_assert (is_gimple_assign (s)); | |
2394 return gimple_could_trap_p_1 (s, false); | |
2395 } | |
2396 | |
2397 | |
2398 /* Print debugging information for gimple stmts generated. */ | |
2399 | |
2400 void | |
2401 dump_gimple_statistics (void) | |
2402 { | |
2403 #ifdef GATHER_STATISTICS | |
2404 int i, total_tuples = 0, total_bytes = 0; | |
2405 | |
2406 fprintf (stderr, "\nGIMPLE statements\n"); | |
2407 fprintf (stderr, "Kind Stmts Bytes\n"); | |
2408 fprintf (stderr, "---------------------------------------\n"); | |
2409 for (i = 0; i < (int) gimple_alloc_kind_all; ++i) | |
2410 { | |
2411 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i], | |
2412 gimple_alloc_counts[i], gimple_alloc_sizes[i]); | |
2413 total_tuples += gimple_alloc_counts[i]; | |
2414 total_bytes += gimple_alloc_sizes[i]; | |
2415 } | |
2416 fprintf (stderr, "---------------------------------------\n"); | |
2417 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes); | |
2418 fprintf (stderr, "---------------------------------------\n"); | |
2419 #else | |
2420 fprintf (stderr, "No gimple statistics\n"); | |
2421 #endif | |
2422 } | |
2423 | |
2424 | |
2425 /* Return the number of operands needed on the RHS of a GIMPLE | |
2426 assignment for an expression with tree code CODE. */ | |
2427 | |
2428 unsigned | |
2429 get_gimple_rhs_num_ops (enum tree_code code) | |
2430 { | |
2431 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code); | |
2432 | |
2433 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS) | |
2434 return 1; | |
2435 else if (rhs_class == GIMPLE_BINARY_RHS) | |
2436 return 2; | |
2437 else | |
2438 gcc_unreachable (); | |
2439 } | |
2440 | |
2441 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \ | |
2442 (unsigned char) \ | |
2443 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \ | |
2444 : ((TYPE) == tcc_binary \ | |
2445 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \ | |
2446 : ((TYPE) == tcc_constant \ | |
2447 || (TYPE) == tcc_declaration \ | |
2448 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \ | |
2449 : ((SYM) == TRUTH_AND_EXPR \ | |
2450 || (SYM) == TRUTH_OR_EXPR \ | |
2451 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \ | |
2452 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \ | |
2453 : ((SYM) == COND_EXPR \ | |
2454 || (SYM) == CONSTRUCTOR \ | |
2455 || (SYM) == OBJ_TYPE_REF \ | |
2456 || (SYM) == ASSERT_EXPR \ | |
2457 || (SYM) == ADDR_EXPR \ | |
2458 || (SYM) == WITH_SIZE_EXPR \ | |
2459 || (SYM) == SSA_NAME \ | |
2460 || (SYM) == POLYNOMIAL_CHREC \ | |
2461 || (SYM) == DOT_PROD_EXPR \ | |
2462 || (SYM) == VEC_COND_EXPR \ | |
2463 || (SYM) == REALIGN_LOAD_EXPR) ? GIMPLE_SINGLE_RHS \ | |
2464 : GIMPLE_INVALID_RHS), | |
2465 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS, | |
2466 | |
2467 const unsigned char gimple_rhs_class_table[] = { | |
2468 #include "all-tree.def" | |
2469 }; | |
2470 | |
2471 #undef DEFTREECODE | |
2472 #undef END_OF_BASE_TREE_CODES | |
2473 | |
2474 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */ | |
2475 | |
2476 /* Validation of GIMPLE expressions. */ | |
2477 | |
2478 /* Return true if OP is an acceptable tree node to be used as a GIMPLE | |
2479 operand. */ | |
2480 | |
2481 bool | |
2482 is_gimple_operand (const_tree op) | |
2483 { | |
2484 return op && get_gimple_rhs_class (TREE_CODE (op)) == GIMPLE_SINGLE_RHS; | |
2485 } | |
2486 | |
2487 /* Returns true iff T is a valid RHS for an assignment to a renamed | |
2488 user -- or front-end generated artificial -- variable. */ | |
2489 | |
2490 bool | |
2491 is_gimple_reg_rhs (tree t) | |
2492 { | |
2493 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS; | |
2494 } | |
2495 | |
2496 /* Returns true iff T is a valid RHS for an assignment to an un-renamed | |
2497 LHS, or for a call argument. */ | |
2498 | |
2499 bool | |
2500 is_gimple_mem_rhs (tree t) | |
2501 { | |
2502 /* If we're dealing with a renamable type, either source or dest must be | |
2503 a renamed variable. */ | |
2504 if (is_gimple_reg_type (TREE_TYPE (t))) | |
2505 return is_gimple_val (t); | |
2506 else | |
2507 return is_gimple_val (t) || is_gimple_lvalue (t); | |
2508 } | |
2509 | |
2510 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */ | |
2511 | |
2512 bool | |
2513 is_gimple_lvalue (tree t) | |
2514 { | |
2515 return (is_gimple_addressable (t) | |
2516 || TREE_CODE (t) == WITH_SIZE_EXPR | |
2517 /* These are complex lvalues, but don't have addresses, so they | |
2518 go here. */ | |
2519 || TREE_CODE (t) == BIT_FIELD_REF); | |
2520 } | |
2521 | |
2522 /* Return true if T is a GIMPLE condition. */ | |
2523 | |
2524 bool | |
2525 is_gimple_condexpr (tree t) | |
2526 { | |
2527 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t) | |
2528 && !tree_could_trap_p (t) | |
2529 && is_gimple_val (TREE_OPERAND (t, 0)) | |
2530 && is_gimple_val (TREE_OPERAND (t, 1)))); | |
2531 } | |
2532 | |
2533 /* Return true if T is something whose address can be taken. */ | |
2534 | |
2535 bool | |
2536 is_gimple_addressable (tree t) | |
2537 { | |
2538 return (is_gimple_id (t) || handled_component_p (t) || INDIRECT_REF_P (t)); | |
2539 } | |
2540 | |
2541 /* Return true if T is a valid gimple constant. */ | |
2542 | |
2543 bool | |
2544 is_gimple_constant (const_tree t) | |
2545 { | |
2546 switch (TREE_CODE (t)) | |
2547 { | |
2548 case INTEGER_CST: | |
2549 case REAL_CST: | |
2550 case FIXED_CST: | |
2551 case STRING_CST: | |
2552 case COMPLEX_CST: | |
2553 case VECTOR_CST: | |
2554 return true; | |
2555 | |
2556 /* Vector constant constructors are gimple invariant. */ | |
2557 case CONSTRUCTOR: | |
2558 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) | |
2559 return TREE_CONSTANT (t); | |
2560 else | |
2561 return false; | |
2562 | |
2563 default: | |
2564 return false; | |
2565 } | |
2566 } | |
2567 | |
2568 /* Return true if T is a gimple address. */ | |
2569 | |
2570 bool | |
2571 is_gimple_address (const_tree t) | |
2572 { | |
2573 tree op; | |
2574 | |
2575 if (TREE_CODE (t) != ADDR_EXPR) | |
2576 return false; | |
2577 | |
2578 op = TREE_OPERAND (t, 0); | |
2579 while (handled_component_p (op)) | |
2580 { | |
2581 if ((TREE_CODE (op) == ARRAY_REF | |
2582 || TREE_CODE (op) == ARRAY_RANGE_REF) | |
2583 && !is_gimple_val (TREE_OPERAND (op, 1))) | |
2584 return false; | |
2585 | |
2586 op = TREE_OPERAND (op, 0); | |
2587 } | |
2588 | |
2589 if (CONSTANT_CLASS_P (op) || INDIRECT_REF_P (op)) | |
2590 return true; | |
2591 | |
2592 switch (TREE_CODE (op)) | |
2593 { | |
2594 case PARM_DECL: | |
2595 case RESULT_DECL: | |
2596 case LABEL_DECL: | |
2597 case FUNCTION_DECL: | |
2598 case VAR_DECL: | |
2599 case CONST_DECL: | |
2600 return true; | |
2601 | |
2602 default: | |
2603 return false; | |
2604 } | |
2605 } | |
2606 | |
2607 /* Strip out all handled components that produce invariant | |
2608 offsets. */ | |
2609 | |
2610 static const_tree | |
2611 strip_invariant_refs (const_tree op) | |
2612 { | |
2613 while (handled_component_p (op)) | |
2614 { | |
2615 switch (TREE_CODE (op)) | |
2616 { | |
2617 case ARRAY_REF: | |
2618 case ARRAY_RANGE_REF: | |
2619 if (!is_gimple_constant (TREE_OPERAND (op, 1)) | |
2620 || TREE_OPERAND (op, 2) != NULL_TREE | |
2621 || TREE_OPERAND (op, 3) != NULL_TREE) | |
2622 return NULL; | |
2623 break; | |
2624 | |
2625 case COMPONENT_REF: | |
2626 if (TREE_OPERAND (op, 2) != NULL_TREE) | |
2627 return NULL; | |
2628 break; | |
2629 | |
2630 default:; | |
2631 } | |
2632 op = TREE_OPERAND (op, 0); | |
2633 } | |
2634 | |
2635 return op; | |
2636 } | |
2637 | |
2638 /* Return true if T is a gimple invariant address. */ | |
2639 | |
2640 bool | |
2641 is_gimple_invariant_address (const_tree t) | |
2642 { | |
2643 const_tree op; | |
2644 | |
2645 if (TREE_CODE (t) != ADDR_EXPR) | |
2646 return false; | |
2647 | |
2648 op = strip_invariant_refs (TREE_OPERAND (t, 0)); | |
2649 | |
2650 return op && (CONSTANT_CLASS_P (op) || decl_address_invariant_p (op)); | |
2651 } | |
2652 | |
2653 /* Return true if T is a gimple invariant address at IPA level | |
2654 (so addresses of variables on stack are not allowed). */ | |
2655 | |
2656 bool | |
2657 is_gimple_ip_invariant_address (const_tree t) | |
2658 { | |
2659 const_tree op; | |
2660 | |
2661 if (TREE_CODE (t) != ADDR_EXPR) | |
2662 return false; | |
2663 | |
2664 op = strip_invariant_refs (TREE_OPERAND (t, 0)); | |
2665 | |
2666 return op && (CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op)); | |
2667 } | |
2668 | |
2669 /* Return true if T is a GIMPLE minimal invariant. It's a restricted | |
2670 form of function invariant. */ | |
2671 | |
2672 bool | |
2673 is_gimple_min_invariant (const_tree t) | |
2674 { | |
2675 if (TREE_CODE (t) == ADDR_EXPR) | |
2676 return is_gimple_invariant_address (t); | |
2677 | |
2678 return is_gimple_constant (t); | |
2679 } | |
2680 | |
2681 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted | |
2682 form of gimple minimal invariant. */ | |
2683 | |
2684 bool | |
2685 is_gimple_ip_invariant (const_tree t) | |
2686 { | |
2687 if (TREE_CODE (t) == ADDR_EXPR) | |
2688 return is_gimple_ip_invariant_address (t); | |
2689 | |
2690 return is_gimple_constant (t); | |
2691 } | |
2692 | |
2693 /* Return true if T looks like a valid GIMPLE statement. */ | |
2694 | |
2695 bool | |
2696 is_gimple_stmt (tree t) | |
2697 { | |
2698 const enum tree_code code = TREE_CODE (t); | |
2699 | |
2700 switch (code) | |
2701 { | |
2702 case NOP_EXPR: | |
2703 /* The only valid NOP_EXPR is the empty statement. */ | |
2704 return IS_EMPTY_STMT (t); | |
2705 | |
2706 case BIND_EXPR: | |
2707 case COND_EXPR: | |
2708 /* These are only valid if they're void. */ | |
2709 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t)); | |
2710 | |
2711 case SWITCH_EXPR: | |
2712 case GOTO_EXPR: | |
2713 case RETURN_EXPR: | |
2714 case LABEL_EXPR: | |
2715 case CASE_LABEL_EXPR: | |
2716 case TRY_CATCH_EXPR: | |
2717 case TRY_FINALLY_EXPR: | |
2718 case EH_FILTER_EXPR: | |
2719 case CATCH_EXPR: | |
2720 case ASM_EXPR: | |
2721 case STATEMENT_LIST: | |
2722 case OMP_PARALLEL: | |
2723 case OMP_FOR: | |
2724 case OMP_SECTIONS: | |
2725 case OMP_SECTION: | |
2726 case OMP_SINGLE: | |
2727 case OMP_MASTER: | |
2728 case OMP_ORDERED: | |
2729 case OMP_CRITICAL: | |
2730 case OMP_TASK: | |
2731 /* These are always void. */ | |
2732 return true; | |
2733 | |
2734 case CALL_EXPR: | |
2735 case MODIFY_EXPR: | |
2736 case PREDICT_EXPR: | |
2737 /* These are valid regardless of their type. */ | |
2738 return true; | |
2739 | |
2740 default: | |
2741 return false; | |
2742 } | |
2743 } | |
2744 | |
2745 /* Return true if T is a variable. */ | |
2746 | |
2747 bool | |
2748 is_gimple_variable (tree t) | |
2749 { | |
2750 return (TREE_CODE (t) == VAR_DECL | |
2751 || TREE_CODE (t) == PARM_DECL | |
2752 || TREE_CODE (t) == RESULT_DECL | |
2753 || TREE_CODE (t) == SSA_NAME); | |
2754 } | |
2755 | |
2756 /* Return true if T is a GIMPLE identifier (something with an address). */ | |
2757 | |
2758 bool | |
2759 is_gimple_id (tree t) | |
2760 { | |
2761 return (is_gimple_variable (t) | |
2762 || TREE_CODE (t) == FUNCTION_DECL | |
2763 || TREE_CODE (t) == LABEL_DECL | |
2764 || TREE_CODE (t) == CONST_DECL | |
2765 /* Allow string constants, since they are addressable. */ | |
2766 || TREE_CODE (t) == STRING_CST); | |
2767 } | |
2768 | |
2769 /* Return true if TYPE is a suitable type for a scalar register variable. */ | |
2770 | |
2771 bool | |
2772 is_gimple_reg_type (tree type) | |
2773 { | |
2774 return !AGGREGATE_TYPE_P (type); | |
2775 } | |
2776 | |
2777 /* Return true if T is a non-aggregate register variable. */ | |
2778 | |
2779 bool | |
2780 is_gimple_reg (tree t) | |
2781 { | |
2782 if (TREE_CODE (t) == SSA_NAME) | |
2783 t = SSA_NAME_VAR (t); | |
2784 | |
2785 if (!is_gimple_variable (t)) | |
2786 return false; | |
2787 | |
2788 if (!is_gimple_reg_type (TREE_TYPE (t))) | |
2789 return false; | |
2790 | |
2791 /* A volatile decl is not acceptable because we can't reuse it as | |
2792 needed. We need to copy it into a temp first. */ | |
2793 if (TREE_THIS_VOLATILE (t)) | |
2794 return false; | |
2795 | |
2796 /* We define "registers" as things that can be renamed as needed, | |
2797 which with our infrastructure does not apply to memory. */ | |
2798 if (needs_to_live_in_memory (t)) | |
2799 return false; | |
2800 | |
2801 /* Hard register variables are an interesting case. For those that | |
2802 are call-clobbered, we don't know where all the calls are, since | |
2803 we don't (want to) take into account which operations will turn | |
2804 into libcalls at the rtl level. For those that are call-saved, | |
2805 we don't currently model the fact that calls may in fact change | |
2806 global hard registers, nor do we examine ASM_CLOBBERS at the tree | |
2807 level, and so miss variable changes that might imply. All around, | |
2808 it seems safest to not do too much optimization with these at the | |
2809 tree level at all. We'll have to rely on the rtl optimizers to | |
2810 clean this up, as there we've got all the appropriate bits exposed. */ | |
2811 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)) | |
2812 return false; | |
2813 | |
2814 /* Complex and vector values must have been put into SSA-like form. | |
2815 That is, no assignments to the individual components. */ | |
2816 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE | |
2817 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) | |
2818 return DECL_GIMPLE_REG_P (t); | |
2819 | |
2820 return true; | |
2821 } | |
2822 | |
2823 | |
2824 /* Return true if T is a GIMPLE variable whose address is not needed. */ | |
2825 | |
2826 bool | |
2827 is_gimple_non_addressable (tree t) | |
2828 { | |
2829 if (TREE_CODE (t) == SSA_NAME) | |
2830 t = SSA_NAME_VAR (t); | |
2831 | |
2832 return (is_gimple_variable (t) && ! needs_to_live_in_memory (t)); | |
2833 } | |
2834 | |
2835 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */ | |
2836 | |
2837 bool | |
2838 is_gimple_val (tree t) | |
2839 { | |
2840 /* Make loads from volatiles and memory vars explicit. */ | |
2841 if (is_gimple_variable (t) | |
2842 && is_gimple_reg_type (TREE_TYPE (t)) | |
2843 && !is_gimple_reg (t)) | |
2844 return false; | |
2845 | |
2846 return (is_gimple_variable (t) || is_gimple_min_invariant (t)); | |
2847 } | |
2848 | |
2849 /* Similarly, but accept hard registers as inputs to asm statements. */ | |
2850 | |
2851 bool | |
2852 is_gimple_asm_val (tree t) | |
2853 { | |
2854 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)) | |
2855 return true; | |
2856 | |
2857 return is_gimple_val (t); | |
2858 } | |
2859 | |
2860 /* Return true if T is a GIMPLE minimal lvalue. */ | |
2861 | |
2862 bool | |
2863 is_gimple_min_lval (tree t) | |
2864 { | |
2865 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t)))) | |
2866 return false; | |
2867 return (is_gimple_id (t) || TREE_CODE (t) == INDIRECT_REF); | |
2868 } | |
2869 | |
2870 /* Return true if T is a typecast operation. */ | |
2871 | |
2872 bool | |
2873 is_gimple_cast (tree t) | |
2874 { | |
2875 return (CONVERT_EXPR_P (t) | |
2876 || TREE_CODE (t) == FIX_TRUNC_EXPR); | |
2877 } | |
2878 | |
2879 /* Return true if T is a valid function operand of a CALL_EXPR. */ | |
2880 | |
2881 bool | |
2882 is_gimple_call_addr (tree t) | |
2883 { | |
2884 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t)); | |
2885 } | |
2886 | |
2887 /* If T makes a function call, return the corresponding CALL_EXPR operand. | |
2888 Otherwise, return NULL_TREE. */ | |
2889 | |
2890 tree | |
2891 get_call_expr_in (tree t) | |
2892 { | |
2893 if (TREE_CODE (t) == MODIFY_EXPR) | |
2894 t = TREE_OPERAND (t, 1); | |
2895 if (TREE_CODE (t) == WITH_SIZE_EXPR) | |
2896 t = TREE_OPERAND (t, 0); | |
2897 if (TREE_CODE (t) == CALL_EXPR) | |
2898 return t; | |
2899 return NULL_TREE; | |
2900 } | |
2901 | |
2902 | |
2903 /* Given a memory reference expression T, return its base address. | |
2904 The base address of a memory reference expression is the main | |
2905 object being referenced. For instance, the base address for | |
2906 'array[i].fld[j]' is 'array'. You can think of this as stripping | |
2907 away the offset part from a memory address. | |
2908 | |
2909 This function calls handled_component_p to strip away all the inner | |
2910 parts of the memory reference until it reaches the base object. */ | |
2911 | |
2912 tree | |
2913 get_base_address (tree t) | |
2914 { | |
2915 while (handled_component_p (t)) | |
2916 t = TREE_OPERAND (t, 0); | |
2917 | |
2918 if (SSA_VAR_P (t) | |
2919 || TREE_CODE (t) == STRING_CST | |
2920 || TREE_CODE (t) == CONSTRUCTOR | |
2921 || INDIRECT_REF_P (t)) | |
2922 return t; | |
2923 else | |
2924 return NULL_TREE; | |
2925 } | |
2926 | |
2927 void | |
2928 recalculate_side_effects (tree t) | |
2929 { | |
2930 enum tree_code code = TREE_CODE (t); | |
2931 int len = TREE_OPERAND_LENGTH (t); | |
2932 int i; | |
2933 | |
2934 switch (TREE_CODE_CLASS (code)) | |
2935 { | |
2936 case tcc_expression: | |
2937 switch (code) | |
2938 { | |
2939 case INIT_EXPR: | |
2940 case MODIFY_EXPR: | |
2941 case VA_ARG_EXPR: | |
2942 case PREDECREMENT_EXPR: | |
2943 case PREINCREMENT_EXPR: | |
2944 case POSTDECREMENT_EXPR: | |
2945 case POSTINCREMENT_EXPR: | |
2946 /* All of these have side-effects, no matter what their | |
2947 operands are. */ | |
2948 return; | |
2949 | |
2950 default: | |
2951 break; | |
2952 } | |
2953 /* Fall through. */ | |
2954 | |
2955 case tcc_comparison: /* a comparison expression */ | |
2956 case tcc_unary: /* a unary arithmetic expression */ | |
2957 case tcc_binary: /* a binary arithmetic expression */ | |
2958 case tcc_reference: /* a reference */ | |
2959 case tcc_vl_exp: /* a function call */ | |
2960 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t); | |
2961 for (i = 0; i < len; ++i) | |
2962 { | |
2963 tree op = TREE_OPERAND (t, i); | |
2964 if (op && TREE_SIDE_EFFECTS (op)) | |
2965 TREE_SIDE_EFFECTS (t) = 1; | |
2966 } | |
2967 break; | |
2968 | |
2969 case tcc_constant: | |
2970 /* No side-effects. */ | |
2971 return; | |
2972 | |
2973 default: | |
2974 gcc_unreachable (); | |
2975 } | |
2976 } | |
2977 | |
2978 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns | |
2979 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if | |
2980 we failed to create one. */ | |
2981 | |
2982 tree | |
2983 canonicalize_cond_expr_cond (tree t) | |
2984 { | |
2985 /* Strip conversions around boolean operations. */ | |
2986 if (CONVERT_EXPR_P (t) | |
2987 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))) | |
2988 t = TREE_OPERAND (t, 0); | |
2989 | |
2990 /* For (bool)x use x != 0. */ | |
2991 if (CONVERT_EXPR_P (t) | |
2992 && TREE_CODE (TREE_TYPE (t)) == BOOLEAN_TYPE) | |
2993 { | |
2994 tree top0 = TREE_OPERAND (t, 0); | |
2995 t = build2 (NE_EXPR, TREE_TYPE (t), | |
2996 top0, build_int_cst (TREE_TYPE (top0), 0)); | |
2997 } | |
2998 /* For !x use x == 0. */ | |
2999 else if (TREE_CODE (t) == TRUTH_NOT_EXPR) | |
3000 { | |
3001 tree top0 = TREE_OPERAND (t, 0); | |
3002 t = build2 (EQ_EXPR, TREE_TYPE (t), | |
3003 top0, build_int_cst (TREE_TYPE (top0), 0)); | |
3004 } | |
3005 /* For cmp ? 1 : 0 use cmp. */ | |
3006 else if (TREE_CODE (t) == COND_EXPR | |
3007 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0)) | |
3008 && integer_onep (TREE_OPERAND (t, 1)) | |
3009 && integer_zerop (TREE_OPERAND (t, 2))) | |
3010 { | |
3011 tree top0 = TREE_OPERAND (t, 0); | |
3012 t = build2 (TREE_CODE (top0), TREE_TYPE (t), | |
3013 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1)); | |
3014 } | |
3015 | |
3016 if (is_gimple_condexpr (t)) | |
3017 return t; | |
3018 | |
3019 return NULL_TREE; | |
3020 } | |
3021 | |
3022 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in | |
3023 the positions marked by the set ARGS_TO_SKIP. */ | |
3024 | |
3025 gimple | |
3026 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip) | |
3027 { | |
3028 int i; | |
3029 tree fn = gimple_call_fn (stmt); | |
3030 int nargs = gimple_call_num_args (stmt); | |
3031 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs); | |
3032 gimple new_stmt; | |
3033 | |
3034 for (i = 0; i < nargs; i++) | |
3035 if (!bitmap_bit_p (args_to_skip, i)) | |
3036 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i)); | |
3037 | |
3038 new_stmt = gimple_build_call_vec (fn, vargs); | |
3039 VEC_free (tree, heap, vargs); | |
3040 if (gimple_call_lhs (stmt)) | |
3041 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt)); | |
3042 | |
3043 gimple_set_vuse (new_stmt, gimple_vuse (stmt)); | |
3044 gimple_set_vdef (new_stmt, gimple_vdef (stmt)); | |
3045 | |
3046 gimple_set_block (new_stmt, gimple_block (stmt)); | |
3047 if (gimple_has_location (stmt)) | |
3048 gimple_set_location (new_stmt, gimple_location (stmt)); | |
3049 | |
3050 /* Carry all the flags to the new GIMPLE_CALL. */ | |
3051 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt)); | |
3052 gimple_call_set_tail (new_stmt, gimple_call_tail_p (stmt)); | |
3053 gimple_call_set_cannot_inline (new_stmt, gimple_call_cannot_inline_p (stmt)); | |
3054 gimple_call_set_return_slot_opt (new_stmt, gimple_call_return_slot_opt_p (stmt)); | |
3055 gimple_call_set_from_thunk (new_stmt, gimple_call_from_thunk_p (stmt)); | |
3056 gimple_call_set_va_arg_pack (new_stmt, gimple_call_va_arg_pack_p (stmt)); | |
3057 #ifndef noCbC | |
3058 gimple_call_set_cbc_goto (new_stmt, gimple_call_cbc_goto_p (stmt)); | |
3059 #endif | |
3060 | |
3061 gimple_set_modified (new_stmt, true); | |
3062 return new_stmt; | |
3063 } | |
3064 | |
3065 | |
3066 static hashval_t gimple_type_hash (const void *); | |
3067 | |
3068 /* Structure used to maintain a cache of some type pairs compared by | |
3069 gimple_types_compatible_p when comparing aggregate types. There are | |
3070 four possible values for SAME_P: | |
3071 | |
3072 -2: The pair (T1, T2) has just been inserted in the table. | |
3073 -1: The pair (T1, T2) is currently being compared. | |
3074 0: T1 and T2 are different types. | |
3075 1: T1 and T2 are the same type. | |
3076 | |
3077 This table is only used when comparing aggregate types to avoid | |
3078 infinite recursion due to self-referential types. */ | |
3079 struct type_pair_d | |
3080 { | |
3081 unsigned int uid1; | |
3082 unsigned int uid2; | |
3083 int same_p; | |
3084 }; | |
3085 typedef struct type_pair_d *type_pair_t; | |
3086 | |
3087 /* Return a hash value for the type pair pointed-to by P. */ | |
3088 | |
3089 static hashval_t | |
3090 type_pair_hash (const void *p) | |
3091 { | |
3092 const struct type_pair_d *pair = (const struct type_pair_d *) p; | |
3093 hashval_t val1 = pair->uid1; | |
3094 hashval_t val2 = pair->uid2; | |
3095 return (iterative_hash_hashval_t (val2, val1) | |
3096 ^ iterative_hash_hashval_t (val1, val2)); | |
3097 } | |
3098 | |
3099 /* Compare two type pairs pointed-to by P1 and P2. */ | |
3100 | |
3101 static int | |
3102 type_pair_eq (const void *p1, const void *p2) | |
3103 { | |
3104 const struct type_pair_d *pair1 = (const struct type_pair_d *) p1; | |
3105 const struct type_pair_d *pair2 = (const struct type_pair_d *) p2; | |
3106 return ((pair1->uid1 == pair2->uid1 && pair1->uid2 == pair2->uid2) | |
3107 || (pair1->uid1 == pair2->uid2 && pair1->uid2 == pair2->uid1)); | |
3108 } | |
3109 | |
3110 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new | |
3111 entry if none existed. */ | |
3112 | |
3113 static type_pair_t | |
3114 lookup_type_pair (tree t1, tree t2, htab_t *visited_p, struct obstack *ob_p) | |
3115 { | |
3116 struct type_pair_d pair; | |
3117 type_pair_t p; | |
3118 void **slot; | |
3119 | |
3120 if (*visited_p == NULL) | |
3121 { | |
3122 *visited_p = htab_create (251, type_pair_hash, type_pair_eq, NULL); | |
3123 gcc_obstack_init (ob_p); | |
3124 } | |
3125 | |
3126 pair.uid1 = TYPE_UID (t1); | |
3127 pair.uid2 = TYPE_UID (t2); | |
3128 slot = htab_find_slot (*visited_p, &pair, INSERT); | |
3129 | |
3130 if (*slot) | |
3131 p = *((type_pair_t *) slot); | |
3132 else | |
3133 { | |
3134 p = XOBNEW (ob_p, struct type_pair_d); | |
3135 p->uid1 = TYPE_UID (t1); | |
3136 p->uid2 = TYPE_UID (t2); | |
3137 p->same_p = -2; | |
3138 *slot = (void *) p; | |
3139 } | |
3140 | |
3141 return p; | |
3142 } | |
3143 | |
3144 | |
3145 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is | |
3146 true then if any type has no name return false, otherwise return | |
3147 true if both types have no names. */ | |
3148 | |
3149 static bool | |
3150 compare_type_names_p (tree t1, tree t2, bool for_completion_p) | |
3151 { | |
3152 tree name1 = TYPE_NAME (t1); | |
3153 tree name2 = TYPE_NAME (t2); | |
3154 | |
3155 /* Consider anonymous types all unique for completion. */ | |
3156 if (for_completion_p | |
3157 && (!name1 || !name2)) | |
3158 return false; | |
3159 | |
3160 if (name1 && TREE_CODE (name1) == TYPE_DECL) | |
3161 { | |
3162 name1 = DECL_NAME (name1); | |
3163 if (for_completion_p | |
3164 && !name1) | |
3165 return false; | |
3166 } | |
3167 gcc_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE); | |
3168 | |
3169 if (name2 && TREE_CODE (name2) == TYPE_DECL) | |
3170 { | |
3171 name2 = DECL_NAME (name2); | |
3172 if (for_completion_p | |
3173 && !name2) | |
3174 return false; | |
3175 } | |
3176 gcc_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE); | |
3177 | |
3178 /* Identifiers can be compared with pointer equality rather | |
3179 than a string comparison. */ | |
3180 if (name1 == name2) | |
3181 return true; | |
3182 | |
3183 return false; | |
3184 } | |
3185 | |
3186 /* Return true if the field decls F1 and F2 are at the same offset. */ | |
3187 | |
3188 bool | |
3189 compare_field_offset (tree f1, tree f2) | |
3190 { | |
3191 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2)) | |
3192 return (operand_equal_p (DECL_FIELD_OFFSET (f1), | |
3193 DECL_FIELD_OFFSET (f2), 0) | |
3194 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1), | |
3195 DECL_FIELD_BIT_OFFSET (f2))); | |
3196 | |
3197 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN | |
3198 should be, so handle differing ones specially by decomposing | |
3199 the offset into a byte and bit offset manually. */ | |
3200 if (host_integerp (DECL_FIELD_OFFSET (f1), 0) | |
3201 && host_integerp (DECL_FIELD_OFFSET (f2), 0)) | |
3202 { | |
3203 unsigned HOST_WIDE_INT byte_offset1, byte_offset2; | |
3204 unsigned HOST_WIDE_INT bit_offset1, bit_offset2; | |
3205 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1)); | |
3206 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1)) | |
3207 + bit_offset1 / BITS_PER_UNIT); | |
3208 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2)); | |
3209 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2)) | |
3210 + bit_offset2 / BITS_PER_UNIT); | |
3211 if (byte_offset1 != byte_offset2) | |
3212 return false; | |
3213 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT; | |
3214 } | |
3215 | |
3216 return false; | |
3217 } | |
3218 | |
3219 /* Return 1 iff T1 and T2 are structurally identical. | |
3220 Otherwise, return 0. */ | |
3221 | |
3222 static int | |
3223 gimple_types_compatible_p (tree t1, tree t2) | |
3224 { | |
3225 type_pair_t p = NULL; | |
3226 | |
3227 /* Check first for the obvious case of pointer identity. */ | |
3228 if (t1 == t2) | |
3229 return 1; | |
3230 | |
3231 /* Check that we have two types to compare. */ | |
3232 if (t1 == NULL_TREE || t2 == NULL_TREE) | |
3233 return 0; | |
3234 | |
3235 /* Can't be the same type if the types don't have the same code. */ | |
3236 if (TREE_CODE (t1) != TREE_CODE (t2)) | |
3237 return 0; | |
3238 | |
3239 /* Can't be the same type if they have different CV qualifiers. */ | |
3240 if (TYPE_QUALS (t1) != TYPE_QUALS (t2)) | |
3241 return 0; | |
3242 | |
3243 /* Void types are always the same. */ | |
3244 if (TREE_CODE (t1) == VOID_TYPE) | |
3245 return 1; | |
3246 | |
3247 /* For numerical types do some simple checks before doing three | |
3248 hashtable queries. */ | |
3249 if (INTEGRAL_TYPE_P (t1) | |
3250 || SCALAR_FLOAT_TYPE_P (t1) | |
3251 || FIXED_POINT_TYPE_P (t1) | |
3252 || TREE_CODE (t1) == VECTOR_TYPE | |
3253 || TREE_CODE (t1) == COMPLEX_TYPE | |
3254 || TREE_CODE (t1) == OFFSET_TYPE) | |
3255 { | |
3256 /* Can't be the same type if they have different alignment, | |
3257 sign, precision or mode. */ | |
3258 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2) | |
3259 || TYPE_PRECISION (t1) != TYPE_PRECISION (t2) | |
3260 || TYPE_MODE (t1) != TYPE_MODE (t2) | |
3261 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)) | |
3262 return 0; | |
3263 | |
3264 if (TREE_CODE (t1) == INTEGER_TYPE | |
3265 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2) | |
3266 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))) | |
3267 return 0; | |
3268 | |
3269 /* That's all we need to check for float and fixed-point types. */ | |
3270 if (SCALAR_FLOAT_TYPE_P (t1) | |
3271 || FIXED_POINT_TYPE_P (t1)) | |
3272 return 1; | |
3273 | |
3274 /* Perform cheap tail-recursion for vector and complex types. */ | |
3275 if (TREE_CODE (t1) == VECTOR_TYPE | |
3276 || TREE_CODE (t1) == COMPLEX_TYPE) | |
3277 return gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)); | |
3278 | |
3279 /* For integral types fall thru to more complex checks. */ | |
3280 } | |
3281 | |
3282 /* If the hash values of t1 and t2 are different the types can't | |
3283 possibly be the same. This helps keeping the type-pair hashtable | |
3284 small, only tracking comparisons for hash collisions. */ | |
3285 if (gimple_type_hash (t1) != gimple_type_hash (t2)) | |
3286 return 0; | |
3287 | |
3288 /* If we've visited this type pair before (in the case of aggregates | |
3289 with self-referential types), and we made a decision, return it. */ | |
3290 p = lookup_type_pair (t1, t2, >c_visited, >c_ob); | |
3291 if (p->same_p == 0 || p->same_p == 1) | |
3292 { | |
3293 /* We have already decided whether T1 and T2 are the | |
3294 same, return the cached result. */ | |
3295 return p->same_p == 1; | |
3296 } | |
3297 else if (p->same_p == -1) | |
3298 { | |
3299 /* We are currently comparing this pair of types, assume | |
3300 that they are the same and let the caller decide. */ | |
3301 return 1; | |
3302 } | |
3303 | |
3304 gcc_assert (p->same_p == -2); | |
3305 | |
3306 /* Mark the (T1, T2) comparison in progress. */ | |
3307 p->same_p = -1; | |
3308 | |
3309 /* If their attributes are not the same they can't be the same type. */ | |
3310 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2))) | |
3311 goto different_types; | |
3312 | |
3313 /* Do type-specific comparisons. */ | |
3314 switch (TREE_CODE (t1)) | |
3315 { | |
3316 case ARRAY_TYPE: | |
3317 /* Array types are the same if the element types are the same and | |
3318 the number of elements are the same. */ | |
3319 if (!gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)) | |
3320 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2) | |
3321 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2)) | |
3322 goto different_types; | |
3323 else | |
3324 { | |
3325 tree i1 = TYPE_DOMAIN (t1); | |
3326 tree i2 = TYPE_DOMAIN (t2); | |
3327 | |
3328 /* For an incomplete external array, the type domain can be | |
3329 NULL_TREE. Check this condition also. */ | |
3330 if (i1 == NULL_TREE && i2 == NULL_TREE) | |
3331 goto same_types; | |
3332 else if (i1 == NULL_TREE || i2 == NULL_TREE) | |
3333 goto different_types; | |
3334 /* If for a complete array type the possibly gimplified sizes | |
3335 are different the types are different. */ | |
3336 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL)) | |
3337 || (TYPE_SIZE (i1) | |
3338 && TYPE_SIZE (i2) | |
3339 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0))) | |
3340 goto different_types; | |
3341 else | |
3342 { | |
3343 tree min1 = TYPE_MIN_VALUE (i1); | |
3344 tree min2 = TYPE_MIN_VALUE (i2); | |
3345 tree max1 = TYPE_MAX_VALUE (i1); | |
3346 tree max2 = TYPE_MAX_VALUE (i2); | |
3347 | |
3348 /* The minimum/maximum values have to be the same. */ | |
3349 if ((min1 == min2 | |
3350 || (min1 && min2 && operand_equal_p (min1, min2, 0))) | |
3351 && (max1 == max2 | |
3352 || (max1 && max2 && operand_equal_p (max1, max2, 0)))) | |
3353 goto same_types; | |
3354 else | |
3355 goto different_types; | |
3356 } | |
3357 } | |
3358 | |
3359 case METHOD_TYPE: | |
3360 /* Method types should belong to the same class. */ | |
3361 if (!gimple_types_compatible_p (TYPE_METHOD_BASETYPE (t1), | |
3362 TYPE_METHOD_BASETYPE (t2))) | |
3363 goto different_types; | |
3364 | |
3365 /* Fallthru */ | |
3366 | |
3367 case FUNCTION_TYPE: | |
3368 /* Function types are the same if the return type and arguments types | |
3369 are the same. */ | |
3370 if (!gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))) | |
3371 goto different_types; | |
3372 else | |
3373 { | |
3374 if (!targetm.comp_type_attributes (t1, t2)) | |
3375 goto different_types; | |
3376 | |
3377 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)) | |
3378 goto same_types; | |
3379 else | |
3380 { | |
3381 tree parms1, parms2; | |
3382 | |
3383 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2); | |
3384 parms1 && parms2; | |
3385 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2)) | |
3386 { | |
3387 if (!gimple_types_compatible_p (TREE_VALUE (parms1), | |
3388 TREE_VALUE (parms2))) | |
3389 goto different_types; | |
3390 } | |
3391 | |
3392 if (parms1 || parms2) | |
3393 goto different_types; | |
3394 | |
3395 goto same_types; | |
3396 } | |
3397 } | |
3398 | |
3399 case OFFSET_TYPE: | |
3400 { | |
3401 if (!gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)) | |
3402 || !gimple_types_compatible_p (TYPE_OFFSET_BASETYPE (t1), | |
3403 TYPE_OFFSET_BASETYPE (t2))) | |
3404 goto different_types; | |
3405 | |
3406 goto same_types; | |
3407 } | |
3408 | |
3409 case POINTER_TYPE: | |
3410 case REFERENCE_TYPE: | |
3411 { | |
3412 /* If the two pointers have different ref-all attributes, | |
3413 they can't be the same type. */ | |
3414 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2)) | |
3415 goto different_types; | |
3416 | |
3417 /* If one pointer points to an incomplete type variant of | |
3418 the other pointed-to type they are the same. */ | |
3419 if (TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2)) | |
3420 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (t1)) | |
3421 && (!COMPLETE_TYPE_P (TREE_TYPE (t1)) | |
3422 || !COMPLETE_TYPE_P (TREE_TYPE (t2))) | |
3423 && compare_type_names_p (TYPE_MAIN_VARIANT (TREE_TYPE (t1)), | |
3424 TYPE_MAIN_VARIANT (TREE_TYPE (t2)), true)) | |
3425 { | |
3426 /* Replace the pointed-to incomplete type with the | |
3427 complete one. */ | |
3428 if (COMPLETE_TYPE_P (TREE_TYPE (t2))) | |
3429 TREE_TYPE (t1) = TREE_TYPE (t2); | |
3430 else | |
3431 TREE_TYPE (t2) = TREE_TYPE (t1); | |
3432 goto same_types; | |
3433 } | |
3434 | |
3435 /* Otherwise, pointer and reference types are the same if the | |
3436 pointed-to types are the same. */ | |
3437 if (gimple_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))) | |
3438 goto same_types; | |
3439 | |
3440 goto different_types; | |
3441 } | |
3442 | |
3443 case INTEGER_TYPE: | |
3444 case BOOLEAN_TYPE: | |
3445 { | |
3446 tree min1 = TYPE_MIN_VALUE (t1); | |
3447 tree max1 = TYPE_MAX_VALUE (t1); | |
3448 tree min2 = TYPE_MIN_VALUE (t2); | |
3449 tree max2 = TYPE_MAX_VALUE (t2); | |
3450 bool min_equal_p = false; | |
3451 bool max_equal_p = false; | |
3452 | |
3453 /* If either type has a minimum value, the other type must | |
3454 have the same. */ | |
3455 if (min1 == NULL_TREE && min2 == NULL_TREE) | |
3456 min_equal_p = true; | |
3457 else if (min1 && min2 && operand_equal_p (min1, min2, 0)) | |
3458 min_equal_p = true; | |
3459 | |
3460 /* Likewise, if either type has a maximum value, the other | |
3461 type must have the same. */ | |
3462 if (max1 == NULL_TREE && max2 == NULL_TREE) | |
3463 max_equal_p = true; | |
3464 else if (max1 && max2 && operand_equal_p (max1, max2, 0)) | |
3465 max_equal_p = true; | |
3466 | |
3467 if (!min_equal_p || !max_equal_p) | |
3468 goto different_types; | |
3469 | |
3470 goto same_types; | |
3471 } | |
3472 | |
3473 case ENUMERAL_TYPE: | |
3474 { | |
3475 /* FIXME lto, we cannot check bounds on enumeral types because | |
3476 different front ends will produce different values. | |
3477 In C, enumeral types are integers, while in C++ each element | |
3478 will have its own symbolic value. We should decide how enums | |
3479 are to be represented in GIMPLE and have each front end lower | |
3480 to that. */ | |
3481 tree v1, v2; | |
3482 | |
3483 /* For enumeral types, all the values must be the same. */ | |
3484 if (TYPE_VALUES (t1) == TYPE_VALUES (t2)) | |
3485 goto same_types; | |
3486 | |
3487 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2); | |
3488 v1 && v2; | |
3489 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2)) | |
3490 { | |
3491 tree c1 = TREE_VALUE (v1); | |
3492 tree c2 = TREE_VALUE (v2); | |
3493 | |
3494 if (TREE_CODE (c1) == CONST_DECL) | |
3495 c1 = DECL_INITIAL (c1); | |
3496 | |
3497 if (TREE_CODE (c2) == CONST_DECL) | |
3498 c2 = DECL_INITIAL (c2); | |
3499 | |
3500 if (tree_int_cst_equal (c1, c2) != 1) | |
3501 goto different_types; | |
3502 } | |
3503 | |
3504 /* If one enumeration has more values than the other, they | |
3505 are not the same. */ | |
3506 if (v1 || v2) | |
3507 goto different_types; | |
3508 | |
3509 goto same_types; | |
3510 } | |
3511 | |
3512 case RECORD_TYPE: | |
3513 case UNION_TYPE: | |
3514 case QUAL_UNION_TYPE: | |
3515 { | |
3516 tree f1, f2; | |
3517 | |
3518 /* If one type requires structural equality checks and the | |
3519 other doesn't, do not merge the types. */ | |
3520 if (TYPE_STRUCTURAL_EQUALITY_P (t1) | |
3521 != TYPE_STRUCTURAL_EQUALITY_P (t2)) | |
3522 goto different_types; | |
3523 | |
3524 /* The struct tags shall compare equal. */ | |
3525 if (!compare_type_names_p (TYPE_MAIN_VARIANT (t1), | |
3526 TYPE_MAIN_VARIANT (t2), false)) | |
3527 goto different_types; | |
3528 | |
3529 /* For aggregate types, all the fields must be the same. */ | |
3530 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2); | |
3531 f1 && f2; | |
3532 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2)) | |
3533 { | |
3534 /* The fields must have the same name, offset and type. */ | |
3535 if (DECL_NAME (f1) != DECL_NAME (f2) | |
3536 || DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2) | |
3537 || !compare_field_offset (f1, f2) | |
3538 || !gimple_types_compatible_p (TREE_TYPE (f1), | |
3539 TREE_TYPE (f2))) | |
3540 goto different_types; | |
3541 } | |
3542 | |
3543 /* If one aggregate has more fields than the other, they | |
3544 are not the same. */ | |
3545 if (f1 || f2) | |
3546 goto different_types; | |
3547 | |
3548 goto same_types; | |
3549 } | |
3550 | |
3551 default: | |
3552 gcc_unreachable (); | |
3553 } | |
3554 | |
3555 /* Common exit path for types that are not compatible. */ | |
3556 different_types: | |
3557 p->same_p = 0; | |
3558 return 0; | |
3559 | |
3560 /* Common exit path for types that are compatible. */ | |
3561 same_types: | |
3562 p->same_p = 1; | |
3563 return 1; | |
3564 } | |
3565 | |
3566 | |
3567 | |
3568 | |
3569 /* Per pointer state for the SCC finding. The on_sccstack flag | |
3570 is not strictly required, it is true when there is no hash value | |
3571 recorded for the type and false otherwise. But querying that | |
3572 is slower. */ | |
3573 | |
3574 struct sccs | |
3575 { | |
3576 unsigned int dfsnum; | |
3577 unsigned int low; | |
3578 bool on_sccstack; | |
3579 hashval_t hash; | |
3580 }; | |
3581 | |
3582 static unsigned int next_dfs_num; | |
3583 | |
3584 static hashval_t | |
3585 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **, | |
3586 struct pointer_map_t *, struct obstack *); | |
3587 | |
3588 /* DFS visit the edge from the callers type with state *STATE to T. | |
3589 Update the callers type hash V with the hash for T if it is not part | |
3590 of the SCC containing the callers type and return it. | |
3591 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */ | |
3592 | |
3593 static hashval_t | |
3594 visit (tree t, struct sccs *state, hashval_t v, | |
3595 VEC (tree, heap) **sccstack, | |
3596 struct pointer_map_t *sccstate, | |
3597 struct obstack *sccstate_obstack) | |
3598 { | |
3599 struct sccs *cstate = NULL; | |
3600 void **slot; | |
3601 | |
3602 /* If there is a hash value recorded for this type then it can't | |
3603 possibly be part of our parent SCC. Simply mix in its hash. */ | |
3604 if ((slot = pointer_map_contains (type_hash_cache, t))) | |
3605 return iterative_hash_hashval_t ((hashval_t) (size_t) *slot, v); | |
3606 | |
3607 if ((slot = pointer_map_contains (sccstate, t)) != NULL) | |
3608 cstate = (struct sccs *)*slot; | |
3609 if (!cstate) | |
3610 { | |
3611 hashval_t tem; | |
3612 /* Not yet visited. DFS recurse. */ | |
3613 tem = iterative_hash_gimple_type (t, v, | |
3614 sccstack, sccstate, sccstate_obstack); | |
3615 if (!cstate) | |
3616 cstate = (struct sccs *)* pointer_map_contains (sccstate, t); | |
3617 state->low = MIN (state->low, cstate->low); | |
3618 /* If the type is no longer on the SCC stack and thus is not part | |
3619 of the parents SCC mix in its hash value. Otherwise we will | |
3620 ignore the type for hashing purposes and return the unaltered | |
3621 hash value. */ | |
3622 if (!cstate->on_sccstack) | |
3623 return tem; | |
3624 } | |
3625 if (cstate->dfsnum < state->dfsnum | |
3626 && cstate->on_sccstack) | |
3627 state->low = MIN (cstate->dfsnum, state->low); | |
3628 | |
3629 /* We are part of our parents SCC, skip this type during hashing | |
3630 and return the unaltered hash value. */ | |
3631 return v; | |
3632 } | |
3633 | |
3634 /* Hash NAME with the previous hash value V and return it. */ | |
3635 | |
3636 static hashval_t | |
3637 iterative_hash_name (tree name, hashval_t v) | |
3638 { | |
3639 if (!name) | |
3640 return v; | |
3641 if (TREE_CODE (name) == TYPE_DECL) | |
3642 name = DECL_NAME (name); | |
3643 if (!name) | |
3644 return v; | |
3645 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE); | |
3646 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v); | |
3647 } | |
3648 | |
3649 /* Returning a hash value for gimple type TYPE combined with VAL. | |
3650 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. | |
3651 | |
3652 To hash a type we end up hashing in types that are reachable. | |
3653 Through pointers we can end up with cycles which messes up the | |
3654 required property that we need to compute the same hash value | |
3655 for structurally equivalent types. To avoid this we have to | |
3656 hash all types in a cycle (the SCC) in a commutative way. The | |
3657 easiest way is to not mix in the hashes of the SCC members at | |
3658 all. To make this work we have to delay setting the hash | |
3659 values of the SCC until it is complete. */ | |
3660 | |
3661 static hashval_t | |
3662 iterative_hash_gimple_type (tree type, hashval_t val, | |
3663 VEC(tree, heap) **sccstack, | |
3664 struct pointer_map_t *sccstate, | |
3665 struct obstack *sccstate_obstack) | |
3666 { | |
3667 hashval_t v; | |
3668 void **slot; | |
3669 struct sccs *state; | |
3670 | |
3671 #ifdef ENABLE_CHECKING | |
3672 /* Not visited during this DFS walk nor during previous walks. */ | |
3673 gcc_assert (!pointer_map_contains (type_hash_cache, type) | |
3674 && !pointer_map_contains (sccstate, type)); | |
3675 #endif | |
3676 state = XOBNEW (sccstate_obstack, struct sccs); | |
3677 *pointer_map_insert (sccstate, type) = state; | |
3678 | |
3679 VEC_safe_push (tree, heap, *sccstack, type); | |
3680 state->dfsnum = next_dfs_num++; | |
3681 state->low = state->dfsnum; | |
3682 state->on_sccstack = true; | |
3683 | |
3684 /* Combine a few common features of types so that types are grouped into | |
3685 smaller sets; when searching for existing matching types to merge, | |
3686 only existing types having the same features as the new type will be | |
3687 checked. */ | |
3688 v = iterative_hash_hashval_t (TREE_CODE (type), 0); | |
3689 v = iterative_hash_hashval_t (TYPE_QUALS (type), v); | |
3690 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v); | |
3691 | |
3692 /* Do not hash the types size as this will cause differences in | |
3693 hash values for the complete vs. the incomplete type variant. */ | |
3694 | |
3695 /* Incorporate common features of numerical types. */ | |
3696 if (INTEGRAL_TYPE_P (type) | |
3697 || SCALAR_FLOAT_TYPE_P (type) | |
3698 || FIXED_POINT_TYPE_P (type)) | |
3699 { | |
3700 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v); | |
3701 v = iterative_hash_hashval_t (TYPE_MODE (type), v); | |
3702 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v); | |
3703 } | |
3704 | |
3705 /* For pointer and reference types, fold in information about the type | |
3706 pointed to but do not recurse into possibly incomplete types to | |
3707 avoid hash differences for complete vs. incomplete types. */ | |
3708 if (POINTER_TYPE_P (type)) | |
3709 { | |
3710 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type))) | |
3711 { | |
3712 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v); | |
3713 v = iterative_hash_name | |
3714 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v); | |
3715 } | |
3716 else | |
3717 v = visit (TREE_TYPE (type), state, v, | |
3718 sccstack, sccstate, sccstate_obstack); | |
3719 } | |
3720 | |
3721 /* For integer types hash the types min/max values and the string flag. */ | |
3722 if (TREE_CODE (type) == INTEGER_TYPE) | |
3723 { | |
3724 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v); | |
3725 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v); | |
3726 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v); | |
3727 } | |
3728 | |
3729 /* For array types hash their domain and the string flag. */ | |
3730 if (TREE_CODE (type) == ARRAY_TYPE | |
3731 && TYPE_DOMAIN (type)) | |
3732 { | |
3733 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v); | |
3734 v = visit (TYPE_DOMAIN (type), state, v, | |
3735 sccstack, sccstate, sccstate_obstack); | |
3736 } | |
3737 | |
3738 /* Recurse for aggregates with a single element type. */ | |
3739 if (TREE_CODE (type) == ARRAY_TYPE | |
3740 || TREE_CODE (type) == COMPLEX_TYPE | |
3741 || TREE_CODE (type) == VECTOR_TYPE) | |
3742 v = visit (TREE_TYPE (type), state, v, | |
3743 sccstack, sccstate, sccstate_obstack); | |
3744 | |
3745 /* Incorporate function return and argument types. */ | |
3746 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE) | |
3747 { | |
3748 unsigned na; | |
3749 tree p; | |
3750 | |
3751 /* For method types also incorporate their parent class. */ | |
3752 if (TREE_CODE (type) == METHOD_TYPE) | |
3753 v = visit (TYPE_METHOD_BASETYPE (type), state, v, | |
3754 sccstack, sccstate, sccstate_obstack); | |
3755 | |
3756 v = visit (TREE_TYPE (type), state, v, | |
3757 sccstack, sccstate, sccstate_obstack); | |
3758 | |
3759 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p)) | |
3760 { | |
3761 v = visit (TREE_VALUE (p), state, v, | |
3762 sccstack, sccstate, sccstate_obstack); | |
3763 na++; | |
3764 } | |
3765 | |
3766 v = iterative_hash_hashval_t (na, v); | |
3767 } | |
3768 | |
3769 if (TREE_CODE (type) == RECORD_TYPE | |
3770 || TREE_CODE (type) == UNION_TYPE | |
3771 || TREE_CODE (type) == QUAL_UNION_TYPE) | |
3772 { | |
3773 unsigned nf; | |
3774 tree f; | |
3775 | |
3776 v = iterative_hash_name (TYPE_NAME (TYPE_MAIN_VARIANT (type)), v); | |
3777 | |
3778 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f)) | |
3779 { | |
3780 v = iterative_hash_name (DECL_NAME (f), v); | |
3781 v = visit (TREE_TYPE (f), state, v, | |
3782 sccstack, sccstate, sccstate_obstack); | |
3783 nf++; | |
3784 } | |
3785 | |
3786 v = iterative_hash_hashval_t (nf, v); | |
3787 } | |
3788 | |
3789 /* Record hash for us. */ | |
3790 state->hash = v; | |
3791 | |
3792 /* See if we found an SCC. */ | |
3793 if (state->low == state->dfsnum) | |
3794 { | |
3795 tree x; | |
3796 | |
3797 /* Pop off the SCC and set its hash values. */ | |
3798 do | |
3799 { | |
3800 struct sccs *cstate; | |
3801 x = VEC_pop (tree, *sccstack); | |
3802 gcc_assert (!pointer_map_contains (type_hash_cache, x)); | |
3803 cstate = (struct sccs *)*pointer_map_contains (sccstate, x); | |
3804 cstate->on_sccstack = false; | |
3805 slot = pointer_map_insert (type_hash_cache, x); | |
3806 *slot = (void *) (size_t) cstate->hash; | |
3807 } | |
3808 while (x != type); | |
3809 } | |
3810 | |
3811 return iterative_hash_hashval_t (v, val); | |
3812 } | |
3813 | |
3814 | |
3815 /* Returns a hash value for P (assumed to be a type). The hash value | |
3816 is computed using some distinguishing features of the type. Note | |
3817 that we cannot use pointer hashing here as we may be dealing with | |
3818 two distinct instances of the same type. | |
3819 | |
3820 This function should produce the same hash value for two compatible | |
3821 types according to gimple_types_compatible_p. */ | |
3822 | |
3823 static hashval_t | |
3824 gimple_type_hash (const void *p) | |
3825 { | |
3826 const_tree t = (const_tree) p; | |
3827 VEC(tree, heap) *sccstack = NULL; | |
3828 struct pointer_map_t *sccstate; | |
3829 struct obstack sccstate_obstack; | |
3830 hashval_t val; | |
3831 void **slot; | |
3832 | |
3833 if (type_hash_cache == NULL) | |
3834 type_hash_cache = pointer_map_create (); | |
3835 | |
3836 if ((slot = pointer_map_contains (type_hash_cache, p)) != NULL) | |
3837 return iterative_hash_hashval_t ((hashval_t) (size_t) *slot, 0); | |
3838 | |
3839 /* Perform a DFS walk and pre-hash all reachable types. */ | |
3840 next_dfs_num = 1; | |
3841 sccstate = pointer_map_create (); | |
3842 gcc_obstack_init (&sccstate_obstack); | |
3843 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0, | |
3844 &sccstack, sccstate, &sccstate_obstack); | |
3845 VEC_free (tree, heap, sccstack); | |
3846 pointer_map_destroy (sccstate); | |
3847 obstack_free (&sccstate_obstack, NULL); | |
3848 | |
3849 return val; | |
3850 } | |
3851 | |
3852 | |
3853 /* Returns nonzero if P1 and P2 are equal. */ | |
3854 | |
3855 static int | |
3856 gimple_type_eq (const void *p1, const void *p2) | |
3857 { | |
3858 const_tree t1 = (const_tree) p1; | |
3859 const_tree t2 = (const_tree) p2; | |
3860 return gimple_types_compatible_p (CONST_CAST_TREE (t1), CONST_CAST_TREE (t2)); | |
3861 } | |
3862 | |
3863 | |
3864 /* Register type T in the global type table gimple_types. | |
3865 If another type T', compatible with T, already existed in | |
3866 gimple_types then return T', otherwise return T. This is used by | |
3867 LTO to merge identical types read from different TUs. */ | |
3868 | |
3869 tree | |
3870 gimple_register_type (tree t) | |
3871 { | |
3872 void **slot; | |
3873 | |
3874 gcc_assert (TYPE_P (t)); | |
3875 | |
3876 /* Always register the main variant first. This is important so we | |
3877 pick up the non-typedef variants as canonical, otherwise we'll end | |
3878 up taking typedef ids for structure tags during comparison. */ | |
3879 if (TYPE_MAIN_VARIANT (t) != t) | |
3880 gimple_register_type (TYPE_MAIN_VARIANT (t)); | |
3881 | |
3882 if (gimple_types == NULL) | |
3883 gimple_types = htab_create (16381, gimple_type_hash, gimple_type_eq, 0); | |
3884 | |
3885 slot = htab_find_slot (gimple_types, t, INSERT); | |
3886 if (*slot | |
3887 && *(tree *)slot != t) | |
3888 { | |
3889 tree new_type = (tree) *((tree *) slot); | |
3890 | |
3891 /* Do not merge types with different addressability. */ | |
3892 gcc_assert (TREE_ADDRESSABLE (t) == TREE_ADDRESSABLE (new_type)); | |
3893 | |
3894 /* If t is not its main variant then make t unreachable from its | |
3895 main variant list. Otherwise we'd queue up a lot of duplicates | |
3896 there. */ | |
3897 if (t != TYPE_MAIN_VARIANT (t)) | |
3898 { | |
3899 tree tem = TYPE_MAIN_VARIANT (t); | |
3900 while (tem && TYPE_NEXT_VARIANT (tem) != t) | |
3901 tem = TYPE_NEXT_VARIANT (tem); | |
3902 if (tem) | |
3903 TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t); | |
3904 TYPE_NEXT_VARIANT (t) = NULL_TREE; | |
3905 } | |
3906 | |
3907 /* If we are a pointer then remove us from the pointer-to or | |
3908 reference-to chain. Otherwise we'd queue up a lot of duplicates | |
3909 there. */ | |
3910 if (TREE_CODE (t) == POINTER_TYPE) | |
3911 { | |
3912 if (TYPE_POINTER_TO (TREE_TYPE (t)) == t) | |
3913 TYPE_POINTER_TO (TREE_TYPE (t)) = TYPE_NEXT_PTR_TO (t); | |
3914 else | |
3915 { | |
3916 tree tem = TYPE_POINTER_TO (TREE_TYPE (t)); | |
3917 while (tem && TYPE_NEXT_PTR_TO (tem) != t) | |
3918 tem = TYPE_NEXT_PTR_TO (tem); | |
3919 if (tem) | |
3920 TYPE_NEXT_PTR_TO (tem) = TYPE_NEXT_PTR_TO (t); | |
3921 } | |
3922 TYPE_NEXT_PTR_TO (t) = NULL_TREE; | |
3923 } | |
3924 else if (TREE_CODE (t) == REFERENCE_TYPE) | |
3925 { | |
3926 if (TYPE_REFERENCE_TO (TREE_TYPE (t)) == t) | |
3927 TYPE_REFERENCE_TO (TREE_TYPE (t)) = TYPE_NEXT_REF_TO (t); | |
3928 else | |
3929 { | |
3930 tree tem = TYPE_REFERENCE_TO (TREE_TYPE (t)); | |
3931 while (tem && TYPE_NEXT_REF_TO (tem) != t) | |
3932 tem = TYPE_NEXT_REF_TO (tem); | |
3933 if (tem) | |
3934 TYPE_NEXT_REF_TO (tem) = TYPE_NEXT_REF_TO (t); | |
3935 } | |
3936 TYPE_NEXT_REF_TO (t) = NULL_TREE; | |
3937 } | |
3938 | |
3939 t = new_type; | |
3940 } | |
3941 else | |
3942 *slot = (void *) t; | |
3943 | |
3944 return t; | |
3945 } | |
3946 | |
3947 | |
3948 /* Show statistics on references to the global type table gimple_types. */ | |
3949 | |
3950 void | |
3951 print_gimple_types_stats (void) | |
3952 { | |
3953 if (gimple_types) | |
3954 fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, " | |
3955 "%ld searches, %ld collisions (ratio: %f)\n", | |
3956 (long) htab_size (gimple_types), | |
3957 (long) htab_elements (gimple_types), | |
3958 (long) gimple_types->searches, | |
3959 (long) gimple_types->collisions, | |
3960 htab_collisions (gimple_types)); | |
3961 else | |
3962 fprintf (stderr, "GIMPLE type table is empty\n"); | |
3963 if (gtc_visited) | |
3964 fprintf (stderr, "GIMPLE type comparison table: size %ld, %ld " | |
3965 "elements, %ld searches, %ld collisions (ratio: %f)\n", | |
3966 (long) htab_size (gtc_visited), | |
3967 (long) htab_elements (gtc_visited), | |
3968 (long) gtc_visited->searches, | |
3969 (long) gtc_visited->collisions, | |
3970 htab_collisions (gtc_visited)); | |
3971 else | |
3972 fprintf (stderr, "GIMPLE type comparison table is empty\n"); | |
3973 } | |
3974 | |
3975 /* Free the gimple type hashtables used for LTO type merging. */ | |
3976 | |
3977 void | |
3978 free_gimple_type_tables (void) | |
3979 { | |
3980 /* Last chance to print stats for the tables. */ | |
3981 if (flag_lto_report) | |
3982 print_gimple_types_stats (); | |
3983 | |
3984 if (gimple_types) | |
3985 { | |
3986 htab_delete (gimple_types); | |
3987 gimple_types = NULL; | |
3988 } | |
3989 if (type_hash_cache) | |
3990 { | |
3991 pointer_map_destroy (type_hash_cache); | |
3992 type_hash_cache = NULL; | |
3993 } | |
3994 if (gtc_visited) | |
3995 { | |
3996 htab_delete (gtc_visited); | |
3997 obstack_free (>c_ob, NULL); | |
3998 gtc_visited = NULL; | |
3999 } | |
4000 } | |
4001 | |
4002 | |
4003 /* Return a type the same as TYPE except unsigned or | |
4004 signed according to UNSIGNEDP. */ | |
4005 | |
4006 static tree | |
4007 gimple_signed_or_unsigned_type (bool unsignedp, tree type) | |
4008 { | |
4009 tree type1; | |
4010 | |
4011 type1 = TYPE_MAIN_VARIANT (type); | |
4012 if (type1 == signed_char_type_node | |
4013 || type1 == char_type_node | |
4014 || type1 == unsigned_char_type_node) | |
4015 return unsignedp ? unsigned_char_type_node : signed_char_type_node; | |
4016 if (type1 == integer_type_node || type1 == unsigned_type_node) | |
4017 return unsignedp ? unsigned_type_node : integer_type_node; | |
4018 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node) | |
4019 return unsignedp ? short_unsigned_type_node : short_integer_type_node; | |
4020 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node) | |
4021 return unsignedp ? long_unsigned_type_node : long_integer_type_node; | |
4022 if (type1 == long_long_integer_type_node | |
4023 || type1 == long_long_unsigned_type_node) | |
4024 return unsignedp | |
4025 ? long_long_unsigned_type_node | |
4026 : long_long_integer_type_node; | |
4027 #if HOST_BITS_PER_WIDE_INT >= 64 | |
4028 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node) | |
4029 return unsignedp ? unsigned_intTI_type_node : intTI_type_node; | |
4030 #endif | |
4031 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node) | |
4032 return unsignedp ? unsigned_intDI_type_node : intDI_type_node; | |
4033 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node) | |
4034 return unsignedp ? unsigned_intSI_type_node : intSI_type_node; | |
4035 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node) | |
4036 return unsignedp ? unsigned_intHI_type_node : intHI_type_node; | |
4037 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node) | |
4038 return unsignedp ? unsigned_intQI_type_node : intQI_type_node; | |
4039 | |
4040 #define GIMPLE_FIXED_TYPES(NAME) \ | |
4041 if (type1 == short_ ## NAME ## _type_node \ | |
4042 || type1 == unsigned_short_ ## NAME ## _type_node) \ | |
4043 return unsignedp ? unsigned_short_ ## NAME ## _type_node \ | |
4044 : short_ ## NAME ## _type_node; \ | |
4045 if (type1 == NAME ## _type_node \ | |
4046 || type1 == unsigned_ ## NAME ## _type_node) \ | |
4047 return unsignedp ? unsigned_ ## NAME ## _type_node \ | |
4048 : NAME ## _type_node; \ | |
4049 if (type1 == long_ ## NAME ## _type_node \ | |
4050 || type1 == unsigned_long_ ## NAME ## _type_node) \ | |
4051 return unsignedp ? unsigned_long_ ## NAME ## _type_node \ | |
4052 : long_ ## NAME ## _type_node; \ | |
4053 if (type1 == long_long_ ## NAME ## _type_node \ | |
4054 || type1 == unsigned_long_long_ ## NAME ## _type_node) \ | |
4055 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \ | |
4056 : long_long_ ## NAME ## _type_node; | |
4057 | |
4058 #define GIMPLE_FIXED_MODE_TYPES(NAME) \ | |
4059 if (type1 == NAME ## _type_node \ | |
4060 || type1 == u ## NAME ## _type_node) \ | |
4061 return unsignedp ? u ## NAME ## _type_node \ | |
4062 : NAME ## _type_node; | |
4063 | |
4064 #define GIMPLE_FIXED_TYPES_SAT(NAME) \ | |
4065 if (type1 == sat_ ## short_ ## NAME ## _type_node \ | |
4066 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \ | |
4067 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \ | |
4068 : sat_ ## short_ ## NAME ## _type_node; \ | |
4069 if (type1 == sat_ ## NAME ## _type_node \ | |
4070 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \ | |
4071 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \ | |
4072 : sat_ ## NAME ## _type_node; \ | |
4073 if (type1 == sat_ ## long_ ## NAME ## _type_node \ | |
4074 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \ | |
4075 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \ | |
4076 : sat_ ## long_ ## NAME ## _type_node; \ | |
4077 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \ | |
4078 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \ | |
4079 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \ | |
4080 : sat_ ## long_long_ ## NAME ## _type_node; | |
4081 | |
4082 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \ | |
4083 if (type1 == sat_ ## NAME ## _type_node \ | |
4084 || type1 == sat_ ## u ## NAME ## _type_node) \ | |
4085 return unsignedp ? sat_ ## u ## NAME ## _type_node \ | |
4086 : sat_ ## NAME ## _type_node; | |
4087 | |
4088 GIMPLE_FIXED_TYPES (fract); | |
4089 GIMPLE_FIXED_TYPES_SAT (fract); | |
4090 GIMPLE_FIXED_TYPES (accum); | |
4091 GIMPLE_FIXED_TYPES_SAT (accum); | |
4092 | |
4093 GIMPLE_FIXED_MODE_TYPES (qq); | |
4094 GIMPLE_FIXED_MODE_TYPES (hq); | |
4095 GIMPLE_FIXED_MODE_TYPES (sq); | |
4096 GIMPLE_FIXED_MODE_TYPES (dq); | |
4097 GIMPLE_FIXED_MODE_TYPES (tq); | |
4098 GIMPLE_FIXED_MODE_TYPES_SAT (qq); | |
4099 GIMPLE_FIXED_MODE_TYPES_SAT (hq); | |
4100 GIMPLE_FIXED_MODE_TYPES_SAT (sq); | |
4101 GIMPLE_FIXED_MODE_TYPES_SAT (dq); | |
4102 GIMPLE_FIXED_MODE_TYPES_SAT (tq); | |
4103 GIMPLE_FIXED_MODE_TYPES (ha); | |
4104 GIMPLE_FIXED_MODE_TYPES (sa); | |
4105 GIMPLE_FIXED_MODE_TYPES (da); | |
4106 GIMPLE_FIXED_MODE_TYPES (ta); | |
4107 GIMPLE_FIXED_MODE_TYPES_SAT (ha); | |
4108 GIMPLE_FIXED_MODE_TYPES_SAT (sa); | |
4109 GIMPLE_FIXED_MODE_TYPES_SAT (da); | |
4110 GIMPLE_FIXED_MODE_TYPES_SAT (ta); | |
4111 | |
4112 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not | |
4113 the precision; they have precision set to match their range, but | |
4114 may use a wider mode to match an ABI. If we change modes, we may | |
4115 wind up with bad conversions. For INTEGER_TYPEs in C, must check | |
4116 the precision as well, so as to yield correct results for | |
4117 bit-field types. C++ does not have these separate bit-field | |
4118 types, and producing a signed or unsigned variant of an | |
4119 ENUMERAL_TYPE may cause other problems as well. */ | |
4120 if (!INTEGRAL_TYPE_P (type) | |
4121 || TYPE_UNSIGNED (type) == unsignedp) | |
4122 return type; | |
4123 | |
4124 #define TYPE_OK(node) \ | |
4125 (TYPE_MODE (type) == TYPE_MODE (node) \ | |
4126 && TYPE_PRECISION (type) == TYPE_PRECISION (node)) | |
4127 if (TYPE_OK (signed_char_type_node)) | |
4128 return unsignedp ? unsigned_char_type_node : signed_char_type_node; | |
4129 if (TYPE_OK (integer_type_node)) | |
4130 return unsignedp ? unsigned_type_node : integer_type_node; | |
4131 if (TYPE_OK (short_integer_type_node)) | |
4132 return unsignedp ? short_unsigned_type_node : short_integer_type_node; | |
4133 if (TYPE_OK (long_integer_type_node)) | |
4134 return unsignedp ? long_unsigned_type_node : long_integer_type_node; | |
4135 if (TYPE_OK (long_long_integer_type_node)) | |
4136 return (unsignedp | |
4137 ? long_long_unsigned_type_node | |
4138 : long_long_integer_type_node); | |
4139 | |
4140 #if HOST_BITS_PER_WIDE_INT >= 64 | |
4141 if (TYPE_OK (intTI_type_node)) | |
4142 return unsignedp ? unsigned_intTI_type_node : intTI_type_node; | |
4143 #endif | |
4144 if (TYPE_OK (intDI_type_node)) | |
4145 return unsignedp ? unsigned_intDI_type_node : intDI_type_node; | |
4146 if (TYPE_OK (intSI_type_node)) | |
4147 return unsignedp ? unsigned_intSI_type_node : intSI_type_node; | |
4148 if (TYPE_OK (intHI_type_node)) | |
4149 return unsignedp ? unsigned_intHI_type_node : intHI_type_node; | |
4150 if (TYPE_OK (intQI_type_node)) | |
4151 return unsignedp ? unsigned_intQI_type_node : intQI_type_node; | |
4152 | |
4153 #undef GIMPLE_FIXED_TYPES | |
4154 #undef GIMPLE_FIXED_MODE_TYPES | |
4155 #undef GIMPLE_FIXED_TYPES_SAT | |
4156 #undef GIMPLE_FIXED_MODE_TYPES_SAT | |
4157 #undef TYPE_OK | |
4158 | |
4159 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp); | |
4160 } | |
4161 | |
4162 | |
4163 /* Return an unsigned type the same as TYPE in other respects. */ | |
4164 | |
4165 tree | |
4166 gimple_unsigned_type (tree type) | |
4167 { | |
4168 return gimple_signed_or_unsigned_type (true, type); | |
4169 } | |
4170 | |
4171 | |
4172 /* Return a signed type the same as TYPE in other respects. */ | |
4173 | |
4174 tree | |
4175 gimple_signed_type (tree type) | |
4176 { | |
4177 return gimple_signed_or_unsigned_type (false, type); | |
4178 } | |
4179 | |
4180 | |
4181 /* Return the typed-based alias set for T, which may be an expression | |
4182 or a type. Return -1 if we don't do anything special. */ | |
4183 | |
4184 alias_set_type | |
4185 gimple_get_alias_set (tree t) | |
4186 { | |
4187 tree u; | |
4188 | |
4189 /* Permit type-punning when accessing a union, provided the access | |
4190 is directly through the union. For example, this code does not | |
4191 permit taking the address of a union member and then storing | |
4192 through it. Even the type-punning allowed here is a GCC | |
4193 extension, albeit a common and useful one; the C standard says | |
4194 that such accesses have implementation-defined behavior. */ | |
4195 for (u = t; | |
4196 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF; | |
4197 u = TREE_OPERAND (u, 0)) | |
4198 if (TREE_CODE (u) == COMPONENT_REF | |
4199 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE) | |
4200 return 0; | |
4201 | |
4202 /* That's all the expressions we handle specially. */ | |
4203 if (!TYPE_P (t)) | |
4204 return -1; | |
4205 | |
4206 /* For convenience, follow the C standard when dealing with | |
4207 character types. Any object may be accessed via an lvalue that | |
4208 has character type. */ | |
4209 if (t == char_type_node | |
4210 || t == signed_char_type_node | |
4211 || t == unsigned_char_type_node) | |
4212 return 0; | |
4213 | |
4214 /* Allow aliasing between signed and unsigned variants of the same | |
4215 type. We treat the signed variant as canonical. */ | |
4216 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t)) | |
4217 { | |
4218 tree t1 = gimple_signed_type (t); | |
4219 | |
4220 /* t1 == t can happen for boolean nodes which are always unsigned. */ | |
4221 if (t1 != t) | |
4222 return get_alias_set (t1); | |
4223 } | |
4224 else if (POINTER_TYPE_P (t)) | |
4225 { | |
4226 /* From the common C and C++ langhook implementation: | |
4227 | |
4228 Unfortunately, there is no canonical form of a pointer type. | |
4229 In particular, if we have `typedef int I', then `int *', and | |
4230 `I *' are different types. So, we have to pick a canonical | |
4231 representative. We do this below. | |
4232 | |
4233 Technically, this approach is actually more conservative that | |
4234 it needs to be. In particular, `const int *' and `int *' | |
4235 should be in different alias sets, according to the C and C++ | |
4236 standard, since their types are not the same, and so, | |
4237 technically, an `int **' and `const int **' cannot point at | |
4238 the same thing. | |
4239 | |
4240 But, the standard is wrong. In particular, this code is | |
4241 legal C++: | |
4242 | |
4243 int *ip; | |
4244 int **ipp = &ip; | |
4245 const int* const* cipp = ipp; | |
4246 And, it doesn't make sense for that to be legal unless you | |
4247 can dereference IPP and CIPP. So, we ignore cv-qualifiers on | |
4248 the pointed-to types. This issue has been reported to the | |
4249 C++ committee. */ | |
4250 | |
4251 /* In addition to the above canonicalization issue with LTO | |
4252 we should also canonicalize `T (*)[]' to `T *' avoiding | |
4253 alias issues with pointer-to element types and pointer-to | |
4254 array types. | |
4255 | |
4256 Likewise we need to deal with the situation of incomplete | |
4257 pointed-to types and make `*(struct X **)&a' and | |
4258 `*(struct X {} **)&a' alias. Otherwise we will have to | |
4259 guarantee that all pointer-to incomplete type variants | |
4260 will be replaced by pointer-to complete type variants if | |
4261 they are available. | |
4262 | |
4263 With LTO the convenient situation of using `void *' to | |
4264 access and store any pointer type will also become | |
4265 more apparent (and `void *' is just another pointer-to | |
4266 incomplete type). Assigning alias-set zero to `void *' | |
4267 and all pointer-to incomplete types is a not appealing | |
4268 solution. Assigning an effective alias-set zero only | |
4269 affecting pointers might be - by recording proper subset | |
4270 relationships of all pointer alias-sets. | |
4271 | |
4272 Pointer-to function types are another grey area which | |
4273 needs caution. Globbing them all into one alias-set | |
4274 or the above effective zero set would work. */ | |
4275 | |
4276 /* For now just assign the same alias-set to all pointers. | |
4277 That's simple and avoids all the above problems. */ | |
4278 if (t != ptr_type_node) | |
4279 return get_alias_set (ptr_type_node); | |
4280 } | |
4281 | |
4282 return -1; | |
4283 } | |
4284 | |
4285 | |
4286 /* Data structure used to count the number of dereferences to PTR | |
4287 inside an expression. */ | |
4288 struct count_ptr_d | |
4289 { | |
4290 tree ptr; | |
4291 unsigned num_stores; | |
4292 unsigned num_loads; | |
4293 }; | |
4294 | |
4295 /* Helper for count_uses_and_derefs. Called by walk_tree to look for | |
4296 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */ | |
4297 | |
4298 static tree | |
4299 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data) | |
4300 { | |
4301 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data; | |
4302 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info; | |
4303 | |
4304 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld, | |
4305 pointer 'ptr' is *not* dereferenced, it is simply used to compute | |
4306 the address of 'fld' as 'ptr + offsetof(fld)'. */ | |
4307 if (TREE_CODE (*tp) == ADDR_EXPR) | |
4308 { | |
4309 *walk_subtrees = 0; | |
4310 return NULL_TREE; | |
4311 } | |
4312 | |
4313 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr) | |
4314 { | |
4315 if (wi_p->is_lhs) | |
4316 count_p->num_stores++; | |
4317 else | |
4318 count_p->num_loads++; | |
4319 } | |
4320 | |
4321 return NULL_TREE; | |
4322 } | |
4323 | |
4324 /* Count the number of direct and indirect uses for pointer PTR in | |
4325 statement STMT. The number of direct uses is stored in | |
4326 *NUM_USES_P. Indirect references are counted separately depending | |
4327 on whether they are store or load operations. The counts are | |
4328 stored in *NUM_STORES_P and *NUM_LOADS_P. */ | |
4329 | |
4330 void | |
4331 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p, | |
4332 unsigned *num_loads_p, unsigned *num_stores_p) | |
4333 { | |
4334 ssa_op_iter i; | |
4335 tree use; | |
4336 | |
4337 *num_uses_p = 0; | |
4338 *num_loads_p = 0; | |
4339 *num_stores_p = 0; | |
4340 | |
4341 /* Find out the total number of uses of PTR in STMT. */ | |
4342 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE) | |
4343 if (use == ptr) | |
4344 (*num_uses_p)++; | |
4345 | |
4346 /* Now count the number of indirect references to PTR. This is | |
4347 truly awful, but we don't have much choice. There are no parent | |
4348 pointers inside INDIRECT_REFs, so an expression like | |
4349 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to | |
4350 find all the indirect and direct uses of x_1 inside. The only | |
4351 shortcut we can take is the fact that GIMPLE only allows | |
4352 INDIRECT_REFs inside the expressions below. */ | |
4353 if (is_gimple_assign (stmt) | |
4354 || gimple_code (stmt) == GIMPLE_RETURN | |
4355 || gimple_code (stmt) == GIMPLE_ASM | |
4356 || is_gimple_call (stmt)) | |
4357 { | |
4358 struct walk_stmt_info wi; | |
4359 struct count_ptr_d count; | |
4360 | |
4361 count.ptr = ptr; | |
4362 count.num_stores = 0; | |
4363 count.num_loads = 0; | |
4364 | |
4365 memset (&wi, 0, sizeof (wi)); | |
4366 wi.info = &count; | |
4367 walk_gimple_op (stmt, count_ptr_derefs, &wi); | |
4368 | |
4369 *num_stores_p = count.num_stores; | |
4370 *num_loads_p = count.num_loads; | |
4371 } | |
4372 | |
4373 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p); | |
4374 } | |
4375 | |
4376 /* From a tree operand OP return the base of a load or store operation | |
4377 or NULL_TREE if OP is not a load or a store. */ | |
4378 | |
4379 static tree | |
4380 get_base_loadstore (tree op) | |
4381 { | |
4382 while (handled_component_p (op)) | |
4383 op = TREE_OPERAND (op, 0); | |
4384 if (DECL_P (op) | |
4385 || INDIRECT_REF_P (op) | |
4386 || TREE_CODE (op) == TARGET_MEM_REF) | |
4387 return op; | |
4388 return NULL_TREE; | |
4389 } | |
4390 | |
4391 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and | |
4392 VISIT_ADDR if non-NULL on loads, store and address-taken operands | |
4393 passing the STMT, the base of the operand and DATA to it. The base | |
4394 will be either a decl, an indirect reference (including TARGET_MEM_REF) | |
4395 or the argument of an address expression. | |
4396 Returns the results of these callbacks or'ed. */ | |
4397 | |
4398 bool | |
4399 walk_stmt_load_store_addr_ops (gimple stmt, void *data, | |
4400 bool (*visit_load)(gimple, tree, void *), | |
4401 bool (*visit_store)(gimple, tree, void *), | |
4402 bool (*visit_addr)(gimple, tree, void *)) | |
4403 { | |
4404 bool ret = false; | |
4405 unsigned i; | |
4406 if (gimple_assign_single_p (stmt)) | |
4407 { | |
4408 tree lhs, rhs; | |
4409 if (visit_store) | |
4410 { | |
4411 lhs = get_base_loadstore (gimple_assign_lhs (stmt)); | |
4412 if (lhs) | |
4413 ret |= visit_store (stmt, lhs, data); | |
4414 } | |
4415 rhs = gimple_assign_rhs1 (stmt); | |
4416 while (handled_component_p (rhs)) | |
4417 rhs = TREE_OPERAND (rhs, 0); | |
4418 if (visit_addr) | |
4419 { | |
4420 if (TREE_CODE (rhs) == ADDR_EXPR) | |
4421 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data); | |
4422 else if (TREE_CODE (rhs) == TARGET_MEM_REF | |
4423 && TMR_BASE (rhs) != NULL_TREE | |
4424 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR) | |
4425 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data); | |
4426 else if (TREE_CODE (rhs) == OBJ_TYPE_REF | |
4427 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR) | |
4428 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs), | |
4429 0), data); | |
4430 lhs = gimple_assign_lhs (stmt); | |
4431 if (TREE_CODE (lhs) == TARGET_MEM_REF | |
4432 && TMR_BASE (lhs) != NULL_TREE | |
4433 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR) | |
4434 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data); | |
4435 } | |
4436 if (visit_load) | |
4437 { | |
4438 rhs = get_base_loadstore (rhs); | |
4439 if (rhs) | |
4440 ret |= visit_load (stmt, rhs, data); | |
4441 } | |
4442 } | |
4443 else if (visit_addr | |
4444 && (is_gimple_assign (stmt) | |
4445 || gimple_code (stmt) == GIMPLE_COND)) | |
4446 { | |
4447 for (i = 0; i < gimple_num_ops (stmt); ++i) | |
4448 if (gimple_op (stmt, i) | |
4449 && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR) | |
4450 ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data); | |
4451 } | |
4452 else if (is_gimple_call (stmt)) | |
4453 { | |
4454 if (visit_store) | |
4455 { | |
4456 tree lhs = gimple_call_lhs (stmt); | |
4457 if (lhs) | |
4458 { | |
4459 lhs = get_base_loadstore (lhs); | |
4460 if (lhs) | |
4461 ret |= visit_store (stmt, lhs, data); | |
4462 } | |
4463 } | |
4464 if (visit_load || visit_addr) | |
4465 for (i = 0; i < gimple_call_num_args (stmt); ++i) | |
4466 { | |
4467 tree rhs = gimple_call_arg (stmt, i); | |
4468 if (visit_addr | |
4469 && TREE_CODE (rhs) == ADDR_EXPR) | |
4470 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data); | |
4471 else if (visit_load) | |
4472 { | |
4473 rhs = get_base_loadstore (rhs); | |
4474 if (rhs) | |
4475 ret |= visit_load (stmt, rhs, data); | |
4476 } | |
4477 } | |
4478 if (visit_addr | |
4479 && gimple_call_chain (stmt) | |
4480 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR) | |
4481 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0), | |
4482 data); | |
4483 if (visit_addr | |
4484 && gimple_call_return_slot_opt_p (stmt) | |
4485 && gimple_call_lhs (stmt) != NULL_TREE | |
4486 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt)))) | |
4487 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data); | |
4488 } | |
4489 else if (gimple_code (stmt) == GIMPLE_ASM) | |
4490 { | |
4491 unsigned noutputs; | |
4492 const char *constraint; | |
4493 const char **oconstraints; | |
4494 bool allows_mem, allows_reg, is_inout; | |
4495 noutputs = gimple_asm_noutputs (stmt); | |
4496 oconstraints = XALLOCAVEC (const char *, noutputs); | |
4497 if (visit_store || visit_addr) | |
4498 for (i = 0; i < gimple_asm_noutputs (stmt); ++i) | |
4499 { | |
4500 tree link = gimple_asm_output_op (stmt, i); | |
4501 tree op = get_base_loadstore (TREE_VALUE (link)); | |
4502 if (op && visit_store) | |
4503 ret |= visit_store (stmt, op, data); | |
4504 if (visit_addr) | |
4505 { | |
4506 constraint = TREE_STRING_POINTER | |
4507 (TREE_VALUE (TREE_PURPOSE (link))); | |
4508 oconstraints[i] = constraint; | |
4509 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, | |
4510 &allows_reg, &is_inout); | |
4511 if (op && !allows_reg && allows_mem) | |
4512 ret |= visit_addr (stmt, op, data); | |
4513 } | |
4514 } | |
4515 if (visit_load || visit_addr) | |
4516 for (i = 0; i < gimple_asm_ninputs (stmt); ++i) | |
4517 { | |
4518 tree link = gimple_asm_input_op (stmt, i); | |
4519 tree op = TREE_VALUE (link); | |
4520 if (visit_addr | |
4521 && TREE_CODE (op) == ADDR_EXPR) | |
4522 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data); | |
4523 else if (visit_load || visit_addr) | |
4524 { | |
4525 op = get_base_loadstore (op); | |
4526 if (op) | |
4527 { | |
4528 if (visit_load) | |
4529 ret |= visit_load (stmt, op, data); | |
4530 if (visit_addr) | |
4531 { | |
4532 constraint = TREE_STRING_POINTER | |
4533 (TREE_VALUE (TREE_PURPOSE (link))); | |
4534 parse_input_constraint (&constraint, 0, 0, noutputs, | |
4535 0, oconstraints, | |
4536 &allows_mem, &allows_reg); | |
4537 if (!allows_reg && allows_mem) | |
4538 ret |= visit_addr (stmt, op, data); | |
4539 } | |
4540 } | |
4541 } | |
4542 } | |
4543 } | |
4544 else if (gimple_code (stmt) == GIMPLE_RETURN) | |
4545 { | |
4546 tree op = gimple_return_retval (stmt); | |
4547 if (op) | |
4548 { | |
4549 if (visit_addr | |
4550 && TREE_CODE (op) == ADDR_EXPR) | |
4551 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data); | |
4552 else if (visit_load) | |
4553 { | |
4554 op = get_base_loadstore (op); | |
4555 if (op) | |
4556 ret |= visit_load (stmt, op, data); | |
4557 } | |
4558 } | |
4559 } | |
4560 else if (visit_addr | |
4561 && gimple_code (stmt) == GIMPLE_PHI) | |
4562 { | |
4563 for (i = 0; i < gimple_phi_num_args (stmt); ++i) | |
4564 { | |
4565 tree op = PHI_ARG_DEF (stmt, i); | |
4566 if (TREE_CODE (op) == ADDR_EXPR) | |
4567 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data); | |
4568 } | |
4569 } | |
4570 | |
4571 return ret; | |
4572 } | |
4573 | |
4574 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP | |
4575 should make a faster clone for this case. */ | |
4576 | |
4577 bool | |
4578 walk_stmt_load_store_ops (gimple stmt, void *data, | |
4579 bool (*visit_load)(gimple, tree, void *), | |
4580 bool (*visit_store)(gimple, tree, void *)) | |
4581 { | |
4582 return walk_stmt_load_store_addr_ops (stmt, data, | |
4583 visit_load, visit_store, NULL); | |
4584 } | |
4585 | |
4586 /* Helper for gimple_ior_addresses_taken_1. */ | |
4587 | |
4588 static bool | |
4589 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED, | |
4590 tree addr, void *data) | |
4591 { | |
4592 bitmap addresses_taken = (bitmap)data; | |
4593 while (handled_component_p (addr)) | |
4594 addr = TREE_OPERAND (addr, 0); | |
4595 if (DECL_P (addr)) | |
4596 { | |
4597 bitmap_set_bit (addresses_taken, DECL_UID (addr)); | |
4598 return true; | |
4599 } | |
4600 return false; | |
4601 } | |
4602 | |
4603 /* Set the bit for the uid of all decls that have their address taken | |
4604 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there | |
4605 were any in this stmt. */ | |
4606 | |
4607 bool | |
4608 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt) | |
4609 { | |
4610 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL, | |
4611 gimple_ior_addresses_taken_1); | |
4612 } | |
4613 | |
4614 | |
4615 /* Return a printable name for symbol DECL. */ | |
4616 | |
4617 const char * | |
4618 gimple_decl_printable_name (tree decl, int verbosity) | |
4619 { | |
4620 gcc_assert (decl && DECL_NAME (decl)); | |
4621 | |
4622 if (DECL_ASSEMBLER_NAME_SET_P (decl)) | |
4623 { | |
4624 const char *str, *mangled_str; | |
4625 int dmgl_opts = DMGL_NO_OPTS; | |
4626 | |
4627 if (verbosity >= 2) | |
4628 { | |
4629 dmgl_opts = DMGL_VERBOSE | |
4630 | DMGL_ANSI | |
4631 | DMGL_GNU_V3 | |
4632 | DMGL_RET_POSTFIX; | |
4633 if (TREE_CODE (decl) == FUNCTION_DECL) | |
4634 dmgl_opts |= DMGL_PARAMS; | |
4635 } | |
4636 | |
4637 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)); | |
4638 str = cplus_demangle_v3 (mangled_str, dmgl_opts); | |
4639 return (str) ? str : mangled_str; | |
4640 } | |
4641 | |
4642 return IDENTIFIER_POINTER (DECL_NAME (decl)); | |
4643 } | |
4644 | |
4645 | |
4646 /* Fold a OBJ_TYPE_REF expression to the address of a function. | |
4647 KNOWN_TYPE carries the true type of OBJ_TYPE_REF_OBJECT(REF). Adapted | |
4648 from cp_fold_obj_type_ref, but it tolerates types with no binfo | |
4649 data. */ | |
4650 | |
4651 tree | |
4652 gimple_fold_obj_type_ref (tree ref, tree known_type) | |
4653 { | |
4654 HOST_WIDE_INT index; | |
4655 HOST_WIDE_INT i; | |
4656 tree v; | |
4657 tree fndecl; | |
4658 | |
4659 if (TYPE_BINFO (known_type) == NULL_TREE) | |
4660 return NULL_TREE; | |
4661 | |
4662 v = BINFO_VIRTUALS (TYPE_BINFO (known_type)); | |
4663 index = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1); | |
4664 i = 0; | |
4665 while (i != index) | |
4666 { | |
4667 i += (TARGET_VTABLE_USES_DESCRIPTORS | |
4668 ? TARGET_VTABLE_USES_DESCRIPTORS : 1); | |
4669 v = TREE_CHAIN (v); | |
4670 } | |
4671 | |
4672 fndecl = TREE_VALUE (v); | |
4673 | |
4674 #ifdef ENABLE_CHECKING | |
4675 gcc_assert (tree_int_cst_equal (OBJ_TYPE_REF_TOKEN (ref), | |
4676 DECL_VINDEX (fndecl))); | |
4677 #endif | |
4678 | |
4679 cgraph_node (fndecl)->local.vtable_method = true; | |
4680 | |
4681 return build_fold_addr_expr (fndecl); | |
4682 } | |
4683 | |
4684 #include "gt-gimple.h" |