111
|
1 /* Support for fully folding sub-trees of an expression for C compiler.
|
|
2 Copyright (C) 1992-2017 Free Software Foundation, Inc.
|
|
3
|
|
4 This file is part of GCC.
|
|
5
|
|
6 GCC is free software; you can redistribute it and/or modify it under
|
|
7 the terms of the GNU General Public License as published by the Free
|
|
8 Software Foundation; either version 3, or (at your option) any later
|
|
9 version.
|
|
10
|
|
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
14 for more details.
|
|
15
|
|
16 You should have received a copy of the GNU General Public License
|
|
17 along with GCC; see the file COPYING3. If not see
|
|
18 <http://www.gnu.org/licenses/>. */
|
|
19
|
|
20 #include "config.h"
|
|
21 #include "system.h"
|
|
22 #include "coretypes.h"
|
|
23 #include "target.h"
|
|
24 #include "function.h"
|
|
25 #include "bitmap.h"
|
|
26 #include "c-tree.h"
|
|
27 #include "intl.h"
|
|
28 #include "gimplify.h"
|
|
29
|
|
30 static tree c_fully_fold_internal (tree expr, bool, bool *, bool *, bool);
|
|
31
|
|
32 /* If DISABLE is true, stop issuing warnings. This is used when
|
|
33 parsing code that we know will not be executed. This function may
|
|
34 be called multiple times, and works as a stack. */
|
|
35
|
|
36 static void
|
|
37 c_disable_warnings (bool disable)
|
|
38 {
|
|
39 if (disable)
|
|
40 {
|
|
41 ++c_inhibit_evaluation_warnings;
|
|
42 fold_defer_overflow_warnings ();
|
|
43 }
|
|
44 }
|
|
45
|
|
46 /* If ENABLE is true, reenable issuing warnings. */
|
|
47
|
|
48 static void
|
|
49 c_enable_warnings (bool enable)
|
|
50 {
|
|
51 if (enable)
|
|
52 {
|
|
53 --c_inhibit_evaluation_warnings;
|
|
54 fold_undefer_and_ignore_overflow_warnings ();
|
|
55 }
|
|
56 }
|
|
57
|
|
58 /* Fully fold EXPR, an expression that was not folded (beyond integer
|
|
59 constant expressions and null pointer constants) when being built
|
|
60 up. If IN_INIT, this is in a static initializer and certain
|
|
61 changes are made to the folding done. Clear *MAYBE_CONST if
|
|
62 MAYBE_CONST is not NULL and EXPR is definitely not a constant
|
|
63 expression because it contains an evaluated operator (in C99) or an
|
|
64 operator outside of sizeof returning an integer constant (in C90)
|
|
65 not permitted in constant expressions, or because it contains an
|
|
66 evaluated arithmetic overflow. (*MAYBE_CONST should typically be
|
|
67 set to true by callers before calling this function.) Return the
|
|
68 folded expression. Function arguments have already been folded
|
|
69 before calling this function, as have the contents of SAVE_EXPR,
|
|
70 TARGET_EXPR, BIND_EXPR, VA_ARG_EXPR, OBJ_TYPE_REF and
|
|
71 C_MAYBE_CONST_EXPR. */
|
|
72
|
|
73 tree
|
|
74 c_fully_fold (tree expr, bool in_init, bool *maybe_const)
|
|
75 {
|
|
76 tree ret;
|
|
77 tree eptype = NULL_TREE;
|
|
78 bool dummy = true;
|
|
79 bool maybe_const_itself = true;
|
|
80 location_t loc = EXPR_LOCATION (expr);
|
|
81
|
|
82 if (!maybe_const)
|
|
83 maybe_const = &dummy;
|
|
84 if (TREE_CODE (expr) == EXCESS_PRECISION_EXPR)
|
|
85 {
|
|
86 eptype = TREE_TYPE (expr);
|
|
87 expr = TREE_OPERAND (expr, 0);
|
|
88 }
|
|
89 ret = c_fully_fold_internal (expr, in_init, maybe_const,
|
|
90 &maybe_const_itself, false);
|
|
91 if (eptype)
|
|
92 ret = fold_convert_loc (loc, eptype, ret);
|
|
93 *maybe_const &= maybe_const_itself;
|
|
94 return ret;
|
|
95 }
|
|
96
|
|
97 /* Internal helper for c_fully_fold. EXPR and IN_INIT are as for
|
|
98 c_fully_fold. *MAYBE_CONST_OPERANDS is cleared because of operands
|
|
99 not permitted, while *MAYBE_CONST_ITSELF is cleared because of
|
|
100 arithmetic overflow (for C90, *MAYBE_CONST_OPERANDS is carried from
|
|
101 both evaluated and unevaluated subexpressions while
|
|
102 *MAYBE_CONST_ITSELF is carried from only evaluated
|
|
103 subexpressions). FOR_INT_CONST indicates if EXPR is an expression
|
|
104 with integer constant operands, and if any of the operands doesn't
|
|
105 get folded to an integer constant, don't fold the expression itself. */
|
|
106
|
|
107 static tree
|
|
108 c_fully_fold_internal (tree expr, bool in_init, bool *maybe_const_operands,
|
|
109 bool *maybe_const_itself, bool for_int_const)
|
|
110 {
|
|
111 tree ret = expr;
|
|
112 enum tree_code code = TREE_CODE (expr);
|
|
113 enum tree_code_class kind = TREE_CODE_CLASS (code);
|
|
114 location_t loc = EXPR_LOCATION (expr);
|
|
115 tree op0, op1, op2, op3;
|
|
116 tree orig_op0, orig_op1, orig_op2;
|
|
117 bool op0_const = true, op1_const = true, op2_const = true;
|
|
118 bool op0_const_self = true, op1_const_self = true, op2_const_self = true;
|
|
119 bool nowarning = TREE_NO_WARNING (expr);
|
|
120 bool unused_p;
|
|
121 source_range old_range;
|
|
122
|
|
123 /* Constants, declarations, statements, errors, and anything else not
|
|
124 counted as an expression cannot usefully be folded further at this
|
|
125 point. */
|
|
126 if (!IS_EXPR_CODE_CLASS (kind)
|
|
127 || kind == tcc_statement)
|
|
128 return expr;
|
|
129
|
|
130 if (IS_EXPR_CODE_CLASS (kind))
|
|
131 old_range = EXPR_LOCATION_RANGE (expr);
|
|
132
|
|
133 /* Operands of variable-length expressions (function calls) have
|
|
134 already been folded, as have __builtin_* function calls, and such
|
|
135 expressions cannot occur in constant expressions. */
|
|
136 if (kind == tcc_vl_exp)
|
|
137 {
|
|
138 *maybe_const_operands = false;
|
|
139 ret = fold (expr);
|
|
140 goto out;
|
|
141 }
|
|
142
|
|
143 if (code == C_MAYBE_CONST_EXPR)
|
|
144 {
|
|
145 tree pre = C_MAYBE_CONST_EXPR_PRE (expr);
|
|
146 tree inner = C_MAYBE_CONST_EXPR_EXPR (expr);
|
|
147 if (C_MAYBE_CONST_EXPR_NON_CONST (expr))
|
|
148 *maybe_const_operands = false;
|
|
149 if (C_MAYBE_CONST_EXPR_INT_OPERANDS (expr))
|
|
150 {
|
|
151 *maybe_const_itself = false;
|
|
152 inner = c_fully_fold_internal (inner, in_init, maybe_const_operands,
|
|
153 maybe_const_itself, true);
|
|
154 }
|
|
155 if (pre && !in_init)
|
|
156 ret = build2 (COMPOUND_EXPR, TREE_TYPE (expr), pre, inner);
|
|
157 else
|
|
158 ret = inner;
|
|
159 goto out;
|
|
160 }
|
|
161
|
|
162 /* Assignment, increment, decrement, function call and comma
|
|
163 operators, and statement expressions, cannot occur in constant
|
|
164 expressions if evaluated / outside of sizeof. (Function calls
|
|
165 were handled above, though VA_ARG_EXPR is treated like a function
|
|
166 call here, and statement expressions are handled through
|
|
167 C_MAYBE_CONST_EXPR to avoid folding inside them.) */
|
|
168 switch (code)
|
|
169 {
|
|
170 case MODIFY_EXPR:
|
|
171 case PREDECREMENT_EXPR:
|
|
172 case PREINCREMENT_EXPR:
|
|
173 case POSTDECREMENT_EXPR:
|
|
174 case POSTINCREMENT_EXPR:
|
|
175 case COMPOUND_EXPR:
|
|
176 *maybe_const_operands = false;
|
|
177 break;
|
|
178
|
|
179 case VA_ARG_EXPR:
|
|
180 case TARGET_EXPR:
|
|
181 case BIND_EXPR:
|
|
182 case OBJ_TYPE_REF:
|
|
183 *maybe_const_operands = false;
|
|
184 ret = fold (expr);
|
|
185 goto out;
|
|
186
|
|
187 default:
|
|
188 break;
|
|
189 }
|
|
190
|
|
191 /* Fold individual tree codes as appropriate. */
|
|
192 switch (code)
|
|
193 {
|
|
194 case COMPOUND_LITERAL_EXPR:
|
|
195 /* Any non-constancy will have been marked in a containing
|
|
196 C_MAYBE_CONST_EXPR; there is no more folding to do here. */
|
|
197 goto out;
|
|
198
|
|
199 case COMPONENT_REF:
|
|
200 orig_op0 = op0 = TREE_OPERAND (expr, 0);
|
|
201 op1 = TREE_OPERAND (expr, 1);
|
|
202 op2 = TREE_OPERAND (expr, 2);
|
|
203 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
|
|
204 maybe_const_itself, for_int_const);
|
|
205 STRIP_TYPE_NOPS (op0);
|
|
206 if (op0 != orig_op0)
|
|
207 ret = build3 (COMPONENT_REF, TREE_TYPE (expr), op0, op1, op2);
|
|
208 if (ret != expr)
|
|
209 {
|
|
210 TREE_READONLY (ret) = TREE_READONLY (expr);
|
|
211 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
|
|
212 }
|
|
213 goto out;
|
|
214
|
|
215 case ARRAY_REF:
|
|
216 orig_op0 = op0 = TREE_OPERAND (expr, 0);
|
|
217 orig_op1 = op1 = TREE_OPERAND (expr, 1);
|
|
218 op2 = TREE_OPERAND (expr, 2);
|
|
219 op3 = TREE_OPERAND (expr, 3);
|
|
220 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
|
|
221 maybe_const_itself, for_int_const);
|
|
222 STRIP_TYPE_NOPS (op0);
|
|
223 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
|
|
224 maybe_const_itself, for_int_const);
|
|
225 STRIP_TYPE_NOPS (op1);
|
|
226 op1 = decl_constant_value_for_optimization (op1);
|
|
227 if (op0 != orig_op0 || op1 != orig_op1)
|
|
228 ret = build4 (ARRAY_REF, TREE_TYPE (expr), op0, op1, op2, op3);
|
|
229 if (ret != expr)
|
|
230 {
|
|
231 TREE_READONLY (ret) = TREE_READONLY (expr);
|
|
232 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
|
|
233 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
|
|
234 }
|
|
235 ret = fold (ret);
|
|
236 goto out;
|
|
237
|
|
238 case COMPOUND_EXPR:
|
|
239 case MODIFY_EXPR:
|
|
240 case PREDECREMENT_EXPR:
|
|
241 case PREINCREMENT_EXPR:
|
|
242 case POSTDECREMENT_EXPR:
|
|
243 case POSTINCREMENT_EXPR:
|
|
244 case PLUS_EXPR:
|
|
245 case MINUS_EXPR:
|
|
246 case MULT_EXPR:
|
|
247 case POINTER_PLUS_EXPR:
|
|
248 case TRUNC_DIV_EXPR:
|
|
249 case CEIL_DIV_EXPR:
|
|
250 case FLOOR_DIV_EXPR:
|
|
251 case TRUNC_MOD_EXPR:
|
|
252 case RDIV_EXPR:
|
|
253 case EXACT_DIV_EXPR:
|
|
254 case LSHIFT_EXPR:
|
|
255 case RSHIFT_EXPR:
|
|
256 case BIT_IOR_EXPR:
|
|
257 case BIT_XOR_EXPR:
|
|
258 case BIT_AND_EXPR:
|
|
259 case LT_EXPR:
|
|
260 case LE_EXPR:
|
|
261 case GT_EXPR:
|
|
262 case GE_EXPR:
|
|
263 case EQ_EXPR:
|
|
264 case NE_EXPR:
|
|
265 case COMPLEX_EXPR:
|
|
266 case TRUTH_AND_EXPR:
|
|
267 case TRUTH_OR_EXPR:
|
|
268 case TRUTH_XOR_EXPR:
|
|
269 case UNORDERED_EXPR:
|
|
270 case ORDERED_EXPR:
|
|
271 case UNLT_EXPR:
|
|
272 case UNLE_EXPR:
|
|
273 case UNGT_EXPR:
|
|
274 case UNGE_EXPR:
|
|
275 case UNEQ_EXPR:
|
|
276 /* Binary operations evaluating both arguments (increment and
|
|
277 decrement are binary internally in GCC). */
|
|
278 orig_op0 = op0 = TREE_OPERAND (expr, 0);
|
|
279 orig_op1 = op1 = TREE_OPERAND (expr, 1);
|
|
280 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
|
|
281 maybe_const_itself, for_int_const);
|
|
282 STRIP_TYPE_NOPS (op0);
|
|
283 if (code != MODIFY_EXPR
|
|
284 && code != PREDECREMENT_EXPR
|
|
285 && code != PREINCREMENT_EXPR
|
|
286 && code != POSTDECREMENT_EXPR
|
|
287 && code != POSTINCREMENT_EXPR)
|
|
288 op0 = decl_constant_value_for_optimization (op0);
|
|
289 /* The RHS of a MODIFY_EXPR was fully folded when building that
|
|
290 expression for the sake of conversion warnings. */
|
|
291 if (code != MODIFY_EXPR)
|
|
292 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
|
|
293 maybe_const_itself, for_int_const);
|
|
294 STRIP_TYPE_NOPS (op1);
|
|
295 op1 = decl_constant_value_for_optimization (op1);
|
|
296
|
|
297 if (for_int_const && (TREE_CODE (op0) != INTEGER_CST
|
|
298 || TREE_CODE (op1) != INTEGER_CST))
|
|
299 goto out;
|
|
300
|
|
301 if (op0 != orig_op0 || op1 != orig_op1 || in_init)
|
|
302 ret = in_init
|
|
303 ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
|
|
304 : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
|
|
305 else
|
|
306 ret = fold (expr);
|
|
307 if (TREE_OVERFLOW_P (ret)
|
|
308 && !TREE_OVERFLOW_P (op0)
|
|
309 && !TREE_OVERFLOW_P (op1))
|
|
310 overflow_warning (EXPR_LOC_OR_LOC (expr, input_location), ret, expr);
|
|
311 if (code == LSHIFT_EXPR
|
|
312 && TREE_CODE (orig_op0) != INTEGER_CST
|
|
313 && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
|
|
314 && TREE_CODE (op0) == INTEGER_CST
|
|
315 && c_inhibit_evaluation_warnings == 0
|
|
316 && tree_int_cst_sgn (op0) < 0)
|
|
317 warning_at (loc, OPT_Wshift_negative_value,
|
|
318 "left shift of negative value");
|
|
319 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
|
|
320 && TREE_CODE (orig_op1) != INTEGER_CST
|
|
321 && TREE_CODE (op1) == INTEGER_CST
|
|
322 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
|
|
323 && c_inhibit_evaluation_warnings == 0)
|
|
324 {
|
|
325 if (tree_int_cst_sgn (op1) < 0)
|
|
326 warning_at (loc, OPT_Wshift_count_negative,
|
|
327 (code == LSHIFT_EXPR
|
|
328 ? G_("left shift count is negative")
|
|
329 : G_("right shift count is negative")));
|
|
330 else if ((TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
|
|
331 || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
|
|
332 && compare_tree_int (op1,
|
|
333 TYPE_PRECISION (TREE_TYPE (orig_op0)))
|
|
334 >= 0)
|
|
335 warning_at (loc, OPT_Wshift_count_overflow,
|
|
336 (code == LSHIFT_EXPR
|
|
337 ? G_("left shift count >= width of type")
|
|
338 : G_("right shift count >= width of type")));
|
|
339 else if (TREE_CODE (TREE_TYPE (orig_op0)) == VECTOR_TYPE
|
|
340 && compare_tree_int (op1,
|
|
341 TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig_op0))))
|
|
342 >= 0)
|
|
343 warning_at (loc, OPT_Wshift_count_overflow,
|
|
344 code == LSHIFT_EXPR
|
|
345 ? G_("left shift count >= width of vector element")
|
|
346 : G_("right shift count >= width of vector element"));
|
|
347 }
|
|
348 if (code == LSHIFT_EXPR
|
|
349 /* If either OP0 has been folded to INTEGER_CST... */
|
|
350 && ((TREE_CODE (orig_op0) != INTEGER_CST
|
|
351 && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
|
|
352 && TREE_CODE (op0) == INTEGER_CST)
|
|
353 /* ...or if OP1 has been folded to INTEGER_CST... */
|
|
354 || (TREE_CODE (orig_op1) != INTEGER_CST
|
|
355 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
|
|
356 && TREE_CODE (op1) == INTEGER_CST))
|
|
357 && c_inhibit_evaluation_warnings == 0)
|
|
358 /* ...then maybe we can detect an overflow. */
|
|
359 maybe_warn_shift_overflow (loc, op0, op1);
|
|
360 if ((code == TRUNC_DIV_EXPR
|
|
361 || code == CEIL_DIV_EXPR
|
|
362 || code == FLOOR_DIV_EXPR
|
|
363 || code == EXACT_DIV_EXPR
|
|
364 || code == TRUNC_MOD_EXPR)
|
|
365 && TREE_CODE (orig_op1) != INTEGER_CST
|
|
366 && TREE_CODE (op1) == INTEGER_CST
|
|
367 && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
|
|
368 || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
|
|
369 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE)
|
|
370 warn_for_div_by_zero (loc, op1);
|
|
371 goto out;
|
|
372
|
|
373 case INDIRECT_REF:
|
|
374 case FIX_TRUNC_EXPR:
|
|
375 case FLOAT_EXPR:
|
|
376 CASE_CONVERT:
|
|
377 case ADDR_SPACE_CONVERT_EXPR:
|
|
378 case VIEW_CONVERT_EXPR:
|
|
379 case NON_LVALUE_EXPR:
|
|
380 case NEGATE_EXPR:
|
|
381 case BIT_NOT_EXPR:
|
|
382 case TRUTH_NOT_EXPR:
|
|
383 case ADDR_EXPR:
|
|
384 case CONJ_EXPR:
|
|
385 case REALPART_EXPR:
|
|
386 case IMAGPART_EXPR:
|
|
387 /* Unary operations. */
|
|
388 orig_op0 = op0 = TREE_OPERAND (expr, 0);
|
|
389 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
|
|
390 maybe_const_itself, for_int_const);
|
|
391 STRIP_TYPE_NOPS (op0);
|
|
392 if (code != ADDR_EXPR && code != REALPART_EXPR && code != IMAGPART_EXPR)
|
|
393 op0 = decl_constant_value_for_optimization (op0);
|
|
394
|
|
395 if (for_int_const && TREE_CODE (op0) != INTEGER_CST)
|
|
396 goto out;
|
|
397
|
|
398 /* ??? Cope with user tricks that amount to offsetof. The middle-end is
|
|
399 not prepared to deal with them if they occur in initializers. */
|
|
400 if (op0 != orig_op0
|
|
401 && code == ADDR_EXPR
|
|
402 && (op1 = get_base_address (op0)) != NULL_TREE
|
|
403 && INDIRECT_REF_P (op1)
|
|
404 && TREE_CONSTANT (TREE_OPERAND (op1, 0)))
|
|
405 ret = fold_convert_loc (loc, TREE_TYPE (expr), fold_offsetof_1 (op0));
|
|
406 else if (op0 != orig_op0 || in_init)
|
|
407 ret = in_init
|
|
408 ? fold_build1_initializer_loc (loc, code, TREE_TYPE (expr), op0)
|
|
409 : fold_build1_loc (loc, code, TREE_TYPE (expr), op0);
|
|
410 else
|
|
411 ret = fold (expr);
|
|
412 if (code == INDIRECT_REF
|
|
413 && ret != expr
|
|
414 && INDIRECT_REF_P (ret))
|
|
415 {
|
|
416 TREE_READONLY (ret) = TREE_READONLY (expr);
|
|
417 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
|
|
418 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
|
|
419 }
|
|
420 switch (code)
|
|
421 {
|
|
422 case FIX_TRUNC_EXPR:
|
|
423 case FLOAT_EXPR:
|
|
424 CASE_CONVERT:
|
|
425 /* Don't warn about explicit conversions. We will already
|
|
426 have warned about suspect implicit conversions. */
|
|
427 break;
|
|
428
|
|
429 default:
|
|
430 if (TREE_OVERFLOW_P (ret) && !TREE_OVERFLOW_P (op0))
|
|
431 overflow_warning (EXPR_LOCATION (expr), ret, op0);
|
|
432 break;
|
|
433 }
|
|
434 goto out;
|
|
435
|
|
436 case TRUTH_ANDIF_EXPR:
|
|
437 case TRUTH_ORIF_EXPR:
|
|
438 /* Binary operations not necessarily evaluating both
|
|
439 arguments. */
|
|
440 orig_op0 = op0 = TREE_OPERAND (expr, 0);
|
|
441 orig_op1 = op1 = TREE_OPERAND (expr, 1);
|
|
442 op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
|
|
443 for_int_const);
|
|
444 STRIP_TYPE_NOPS (op0);
|
|
445
|
|
446 unused_p = (op0 == (code == TRUTH_ANDIF_EXPR
|
|
447 ? truthvalue_false_node
|
|
448 : truthvalue_true_node));
|
|
449 c_disable_warnings (unused_p);
|
|
450 op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
|
|
451 for_int_const);
|
|
452 STRIP_TYPE_NOPS (op1);
|
|
453 c_enable_warnings (unused_p);
|
|
454
|
|
455 if (for_int_const
|
|
456 && (TREE_CODE (op0) != INTEGER_CST
|
|
457 /* Require OP1 be an INTEGER_CST only if it's evaluated. */
|
|
458 || (!unused_p && TREE_CODE (op1) != INTEGER_CST)))
|
|
459 goto out;
|
|
460
|
|
461 if (op0 != orig_op0 || op1 != orig_op1 || in_init)
|
|
462 ret = in_init
|
|
463 ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
|
|
464 : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
|
|
465 else
|
|
466 ret = fold (expr);
|
|
467 *maybe_const_operands &= op0_const;
|
|
468 *maybe_const_itself &= op0_const_self;
|
|
469 if (!(flag_isoc99
|
|
470 && op0_const
|
|
471 && op0_const_self
|
|
472 && (code == TRUTH_ANDIF_EXPR
|
|
473 ? op0 == truthvalue_false_node
|
|
474 : op0 == truthvalue_true_node)))
|
|
475 *maybe_const_operands &= op1_const;
|
|
476 if (!(op0_const
|
|
477 && op0_const_self
|
|
478 && (code == TRUTH_ANDIF_EXPR
|
|
479 ? op0 == truthvalue_false_node
|
|
480 : op0 == truthvalue_true_node)))
|
|
481 *maybe_const_itself &= op1_const_self;
|
|
482 goto out;
|
|
483
|
|
484 case COND_EXPR:
|
|
485 orig_op0 = op0 = TREE_OPERAND (expr, 0);
|
|
486 orig_op1 = op1 = TREE_OPERAND (expr, 1);
|
|
487 orig_op2 = op2 = TREE_OPERAND (expr, 2);
|
|
488 op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
|
|
489 for_int_const);
|
|
490
|
|
491 STRIP_TYPE_NOPS (op0);
|
|
492 c_disable_warnings (op0 == truthvalue_false_node);
|
|
493 op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
|
|
494 for_int_const);
|
|
495 STRIP_TYPE_NOPS (op1);
|
|
496 c_enable_warnings (op0 == truthvalue_false_node);
|
|
497
|
|
498 c_disable_warnings (op0 == truthvalue_true_node);
|
|
499 op2 = c_fully_fold_internal (op2, in_init, &op2_const, &op2_const_self,
|
|
500 for_int_const);
|
|
501 STRIP_TYPE_NOPS (op2);
|
|
502 c_enable_warnings (op0 == truthvalue_true_node);
|
|
503
|
|
504 if (for_int_const
|
|
505 && (TREE_CODE (op0) != INTEGER_CST
|
|
506 /* Only the evaluated operand must be an INTEGER_CST. */
|
|
507 || (op0 == truthvalue_true_node
|
|
508 ? TREE_CODE (op1) != INTEGER_CST
|
|
509 : TREE_CODE (op2) != INTEGER_CST)))
|
|
510 goto out;
|
|
511
|
|
512 if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
|
|
513 ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
|
|
514 else
|
|
515 ret = fold (expr);
|
|
516 *maybe_const_operands &= op0_const;
|
|
517 *maybe_const_itself &= op0_const_self;
|
|
518 if (!(flag_isoc99
|
|
519 && op0_const
|
|
520 && op0_const_self
|
|
521 && op0 == truthvalue_false_node))
|
|
522 *maybe_const_operands &= op1_const;
|
|
523 if (!(op0_const
|
|
524 && op0_const_self
|
|
525 && op0 == truthvalue_false_node))
|
|
526 *maybe_const_itself &= op1_const_self;
|
|
527 if (!(flag_isoc99
|
|
528 && op0_const
|
|
529 && op0_const_self
|
|
530 && op0 == truthvalue_true_node))
|
|
531 *maybe_const_operands &= op2_const;
|
|
532 if (!(op0_const
|
|
533 && op0_const_self
|
|
534 && op0 == truthvalue_true_node))
|
|
535 *maybe_const_itself &= op2_const_self;
|
|
536 goto out;
|
|
537
|
|
538 case VEC_COND_EXPR:
|
|
539 orig_op0 = op0 = TREE_OPERAND (expr, 0);
|
|
540 orig_op1 = op1 = TREE_OPERAND (expr, 1);
|
|
541 orig_op2 = op2 = TREE_OPERAND (expr, 2);
|
|
542 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
|
|
543 maybe_const_itself, for_int_const);
|
|
544 STRIP_TYPE_NOPS (op0);
|
|
545 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
|
|
546 maybe_const_itself, for_int_const);
|
|
547 STRIP_TYPE_NOPS (op1);
|
|
548 op2 = c_fully_fold_internal (op2, in_init, maybe_const_operands,
|
|
549 maybe_const_itself, for_int_const);
|
|
550 STRIP_TYPE_NOPS (op2);
|
|
551
|
|
552 if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
|
|
553 ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
|
|
554 else
|
|
555 ret = fold (expr);
|
|
556 goto out;
|
|
557
|
|
558 case EXCESS_PRECISION_EXPR:
|
|
559 /* Each case where an operand with excess precision may be
|
|
560 encountered must remove the EXCESS_PRECISION_EXPR around
|
|
561 inner operands and possibly put one around the whole
|
|
562 expression or possibly convert to the semantic type (which
|
|
563 c_fully_fold does); we cannot tell at this stage which is
|
|
564 appropriate in any particular case. */
|
|
565 gcc_unreachable ();
|
|
566
|
|
567 case SAVE_EXPR:
|
|
568 /* Make sure to fold the contents of a SAVE_EXPR exactly once. */
|
|
569 op0 = TREE_OPERAND (expr, 0);
|
|
570 if (!SAVE_EXPR_FOLDED_P (expr))
|
|
571 {
|
|
572 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
|
|
573 maybe_const_itself, for_int_const);
|
|
574 TREE_OPERAND (expr, 0) = op0;
|
|
575 SAVE_EXPR_FOLDED_P (expr) = true;
|
|
576 }
|
|
577 /* Return the SAVE_EXPR operand if it is invariant. */
|
|
578 if (tree_invariant_p (op0))
|
|
579 ret = op0;
|
|
580 goto out;
|
|
581
|
|
582 default:
|
|
583 /* Various codes may appear through folding built-in functions
|
|
584 and their arguments. */
|
|
585 goto out;
|
|
586 }
|
|
587
|
|
588 out:
|
|
589 /* Some folding may introduce NON_LVALUE_EXPRs; all lvalue checks
|
|
590 have been done by this point, so remove them again. */
|
|
591 nowarning |= TREE_NO_WARNING (ret);
|
|
592 STRIP_TYPE_NOPS (ret);
|
|
593 if (nowarning && !TREE_NO_WARNING (ret))
|
|
594 {
|
|
595 if (!CAN_HAVE_LOCATION_P (ret))
|
|
596 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
|
|
597 TREE_NO_WARNING (ret) = 1;
|
|
598 }
|
|
599 if (ret != expr)
|
|
600 {
|
|
601 protected_set_expr_location (ret, loc);
|
|
602 if (IS_EXPR_CODE_CLASS (kind))
|
|
603 set_source_range (ret, old_range.m_start, old_range.m_finish);
|
|
604 }
|
|
605 return ret;
|
|
606 }
|
|
607
|
|
608 /* If not optimizing, EXP is not a VAR_DECL, or EXP has array type,
|
|
609 return EXP. Otherwise, return either EXP or its known constant
|
|
610 value (if it has one), but return EXP if EXP has mode BLKmode. ???
|
|
611 Is the BLKmode test appropriate? */
|
|
612
|
|
613 tree
|
|
614 decl_constant_value_for_optimization (tree exp)
|
|
615 {
|
|
616 tree ret;
|
|
617
|
|
618 if (!optimize
|
|
619 || !VAR_P (exp)
|
|
620 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
|
|
621 || DECL_MODE (exp) == BLKmode)
|
|
622 return exp;
|
|
623
|
|
624 ret = decl_constant_value (exp);
|
|
625 /* Avoid unwanted tree sharing between the initializer and current
|
|
626 function's body where the tree can be modified e.g. by the
|
|
627 gimplifier. */
|
|
628 if (ret != exp && TREE_STATIC (exp))
|
|
629 ret = unshare_expr (ret);
|
|
630 return ret;
|
|
631 }
|