111
|
1 /* Preamble and helpers for the autogenerated gimple-match.c file.
|
131
|
2 Copyright (C) 2014-2018 Free Software Foundation, Inc.
|
111
|
3
|
|
4 This file is part of GCC.
|
|
5
|
|
6 GCC is free software; you can redistribute it and/or modify it under
|
|
7 the terms of the GNU General Public License as published by the Free
|
|
8 Software Foundation; either version 3, or (at your option) any later
|
|
9 version.
|
|
10
|
|
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
14 for more details.
|
|
15
|
|
16 You should have received a copy of the GNU General Public License
|
|
17 along with GCC; see the file COPYING3. If not see
|
|
18 <http://www.gnu.org/licenses/>. */
|
|
19
|
|
20 #include "config.h"
|
|
21 #include "system.h"
|
|
22 #include "coretypes.h"
|
|
23 #include "backend.h"
|
|
24 #include "target.h"
|
|
25 #include "rtl.h"
|
|
26 #include "tree.h"
|
|
27 #include "gimple.h"
|
|
28 #include "ssa.h"
|
|
29 #include "cgraph.h"
|
|
30 #include "fold-const.h"
|
|
31 #include "fold-const-call.h"
|
|
32 #include "stor-layout.h"
|
|
33 #include "gimple-fold.h"
|
|
34 #include "calls.h"
|
|
35 #include "tree-dfa.h"
|
|
36 #include "builtins.h"
|
|
37 #include "gimple-match.h"
|
|
38 #include "tree-pass.h"
|
|
39 #include "internal-fn.h"
|
|
40 #include "case-cfn-macros.h"
|
|
41 #include "gimplify.h"
|
|
42 #include "optabs-tree.h"
|
131
|
43 #include "tree-eh.h"
|
111
|
44
|
|
45
|
|
46 /* Forward declarations of the private auto-generated matchers.
|
|
47 They expect valueized operands in canonical order and do not
|
|
48 perform simplification of all-constant operands. */
|
131
|
49 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
|
111
|
50 code_helper, tree, tree);
|
131
|
51 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
|
111
|
52 code_helper, tree, tree, tree);
|
131
|
53 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
|
111
|
54 code_helper, tree, tree, tree, tree);
|
131
|
55 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
|
|
56 code_helper, tree, tree, tree, tree, tree);
|
|
57 static bool gimple_simplify (gimple_match_op *, gimple_seq *, tree (*)(tree),
|
|
58 code_helper, tree, tree, tree, tree, tree, tree);
|
111
|
59
|
131
|
60 const unsigned int gimple_match_op::MAX_NUM_OPS;
|
111
|
61
|
|
62 /* Return whether T is a constant that we'll dispatch to fold to
|
|
63 evaluate fully constant expressions. */
|
|
64
|
|
65 static inline bool
|
|
66 constant_for_folding (tree t)
|
|
67 {
|
|
68 return (CONSTANT_CLASS_P (t)
|
|
69 /* The following is only interesting to string builtins. */
|
|
70 || (TREE_CODE (t) == ADDR_EXPR
|
|
71 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST));
|
|
72 }
|
|
73
|
131
|
74 /* Try to convert conditional operation ORIG_OP into an IFN_COND_*
|
|
75 operation. Return true on success, storing the new operation in NEW_OP. */
|
111
|
76
|
131
|
77 static bool
|
|
78 convert_conditional_op (gimple_match_op *orig_op,
|
|
79 gimple_match_op *new_op)
|
111
|
80 {
|
131
|
81 internal_fn ifn;
|
|
82 if (orig_op->code.is_tree_code ())
|
|
83 ifn = get_conditional_internal_fn ((tree_code) orig_op->code);
|
|
84 else
|
111
|
85 {
|
131
|
86 combined_fn cfn = orig_op->code;
|
|
87 if (!internal_fn_p (cfn))
|
|
88 return false;
|
|
89 ifn = get_conditional_internal_fn (as_internal_fn (cfn));
|
|
90 }
|
|
91 if (ifn == IFN_LAST)
|
|
92 return false;
|
|
93 unsigned int num_ops = orig_op->num_ops;
|
|
94 new_op->set_op (as_combined_fn (ifn), orig_op->type, num_ops + 2);
|
|
95 new_op->ops[0] = orig_op->cond.cond;
|
|
96 for (unsigned int i = 0; i < num_ops; ++i)
|
|
97 new_op->ops[i + 1] = orig_op->ops[i];
|
|
98 tree else_value = orig_op->cond.else_value;
|
|
99 if (!else_value)
|
|
100 else_value = targetm.preferred_else_value (ifn, orig_op->type,
|
|
101 num_ops, orig_op->ops);
|
|
102 new_op->ops[num_ops + 1] = else_value;
|
|
103 return true;
|
|
104 }
|
|
105
|
|
106 /* RES_OP is the result of a simplification. If it is conditional,
|
|
107 try to replace it with the equivalent UNCOND form, such as an
|
|
108 IFN_COND_* call or a VEC_COND_EXPR. Also try to resimplify the
|
|
109 result of the replacement if appropriate, adding any new statements to
|
|
110 SEQ and using VALUEIZE as the valueization function. Return true if
|
|
111 this resimplification occurred and resulted in at least one change. */
|
|
112
|
|
113 static bool
|
|
114 maybe_resimplify_conditional_op (gimple_seq *seq, gimple_match_op *res_op,
|
|
115 tree (*valueize) (tree))
|
|
116 {
|
|
117 if (!res_op->cond.cond)
|
|
118 return false;
|
|
119
|
|
120 if (!res_op->cond.else_value
|
|
121 && res_op->code.is_tree_code ())
|
|
122 {
|
|
123 /* The "else" value doesn't matter. If the "then" value is a
|
|
124 gimple value, just use it unconditionally. This isn't a
|
|
125 simplification in itself, since there was no operation to
|
|
126 build in the first place. */
|
|
127 if (gimple_simplified_result_is_gimple_val (res_op))
|
111
|
128 {
|
131
|
129 res_op->cond.cond = NULL_TREE;
|
|
130 return false;
|
|
131 }
|
|
132
|
|
133 /* Likewise if the operation would not trap. */
|
|
134 bool honor_trapv = (INTEGRAL_TYPE_P (res_op->type)
|
|
135 && TYPE_OVERFLOW_TRAPS (res_op->type));
|
|
136 if (!operation_could_trap_p ((tree_code) res_op->code,
|
|
137 FLOAT_TYPE_P (res_op->type),
|
|
138 honor_trapv, res_op->op_or_null (1)))
|
|
139 {
|
|
140 res_op->cond.cond = NULL_TREE;
|
|
141 return false;
|
111
|
142 }
|
|
143 }
|
|
144
|
131
|
145 /* If the "then" value is a gimple value and the "else" value matters,
|
|
146 create a VEC_COND_EXPR between them, then see if it can be further
|
|
147 simplified. */
|
|
148 gimple_match_op new_op;
|
|
149 if (res_op->cond.else_value
|
|
150 && VECTOR_TYPE_P (res_op->type)
|
|
151 && gimple_simplified_result_is_gimple_val (res_op))
|
111
|
152 {
|
131
|
153 new_op.set_op (VEC_COND_EXPR, res_op->type,
|
|
154 res_op->cond.cond, res_op->ops[0],
|
|
155 res_op->cond.else_value);
|
|
156 *res_op = new_op;
|
|
157 return gimple_resimplify3 (seq, res_op, valueize);
|
111
|
158 }
|
|
159
|
131
|
160 /* Otherwise try rewriting the operation as an IFN_COND_* call.
|
|
161 Again, this isn't a simplification in itself, since it's what
|
|
162 RES_OP already described. */
|
|
163 if (convert_conditional_op (res_op, &new_op))
|
|
164 *res_op = new_op;
|
|
165
|
111
|
166 return false;
|
|
167 }
|
|
168
|
|
169 /* Helper that matches and simplifies the toplevel result from
|
|
170 a gimple_simplify run (where we don't want to build
|
|
171 a stmt in case it's used in in-place folding). Replaces
|
131
|
172 RES_OP with a simplified and/or canonicalized result and
|
|
173 returns whether any change was made. */
|
111
|
174
|
|
175 bool
|
131
|
176 gimple_resimplify1 (gimple_seq *seq, gimple_match_op *res_op,
|
111
|
177 tree (*valueize)(tree))
|
|
178 {
|
131
|
179 if (constant_for_folding (res_op->ops[0]))
|
111
|
180 {
|
|
181 tree tem = NULL_TREE;
|
131
|
182 if (res_op->code.is_tree_code ())
|
|
183 tem = const_unop (res_op->code, res_op->type, res_op->ops[0]);
|
111
|
184 else
|
131
|
185 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
|
|
186 res_op->ops[0]);
|
111
|
187 if (tem != NULL_TREE
|
|
188 && CONSTANT_CLASS_P (tem))
|
|
189 {
|
|
190 if (TREE_OVERFLOW_P (tem))
|
|
191 tem = drop_tree_overflow (tem);
|
131
|
192 res_op->set_value (tem);
|
|
193 maybe_resimplify_conditional_op (seq, res_op, valueize);
|
|
194 return true;
|
|
195 }
|
|
196 }
|
|
197
|
|
198 /* Limit recursion, there are cases like PR80887 and others, for
|
|
199 example when value-numbering presents us with unfolded expressions
|
|
200 that we are really not prepared to handle without eventual
|
|
201 oscillation like ((_50 + 0) + 8) where _50 gets mapped to _50
|
|
202 itself as available expression. */
|
|
203 static unsigned depth;
|
|
204 if (depth > 10)
|
|
205 {
|
|
206 if (dump_file && (dump_flags & TDF_FOLDING))
|
|
207 fprintf (dump_file, "Aborting expression simplification due to "
|
|
208 "deep recursion\n");
|
|
209 return false;
|
|
210 }
|
|
211
|
|
212 ++depth;
|
|
213 gimple_match_op res_op2 (*res_op);
|
|
214 if (gimple_simplify (&res_op2, seq, valueize,
|
|
215 res_op->code, res_op->type, res_op->ops[0]))
|
|
216 {
|
|
217 --depth;
|
|
218 *res_op = res_op2;
|
|
219 return true;
|
|
220 }
|
|
221 --depth;
|
|
222
|
|
223 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
|
|
224 return true;
|
|
225
|
|
226 return false;
|
|
227 }
|
|
228
|
|
229 /* Helper that matches and simplifies the toplevel result from
|
|
230 a gimple_simplify run (where we don't want to build
|
|
231 a stmt in case it's used in in-place folding). Replaces
|
|
232 RES_OP with a simplified and/or canonicalized result and
|
|
233 returns whether any change was made. */
|
|
234
|
|
235 bool
|
|
236 gimple_resimplify2 (gimple_seq *seq, gimple_match_op *res_op,
|
|
237 tree (*valueize)(tree))
|
|
238 {
|
|
239 if (constant_for_folding (res_op->ops[0])
|
|
240 && constant_for_folding (res_op->ops[1]))
|
|
241 {
|
|
242 tree tem = NULL_TREE;
|
|
243 if (res_op->code.is_tree_code ())
|
|
244 tem = const_binop (res_op->code, res_op->type,
|
|
245 res_op->ops[0], res_op->ops[1]);
|
|
246 else
|
|
247 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
|
|
248 res_op->ops[0], res_op->ops[1]);
|
|
249 if (tem != NULL_TREE
|
|
250 && CONSTANT_CLASS_P (tem))
|
|
251 {
|
|
252 if (TREE_OVERFLOW_P (tem))
|
|
253 tem = drop_tree_overflow (tem);
|
|
254 res_op->set_value (tem);
|
|
255 maybe_resimplify_conditional_op (seq, res_op, valueize);
|
111
|
256 return true;
|
|
257 }
|
|
258 }
|
|
259
|
|
260 /* Canonicalize operand order. */
|
|
261 bool canonicalized = false;
|
131
|
262 if (res_op->code.is_tree_code ()
|
|
263 && (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
|
|
264 || commutative_tree_code (res_op->code))
|
|
265 && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
|
111
|
266 {
|
131
|
267 std::swap (res_op->ops[0], res_op->ops[1]);
|
|
268 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison)
|
|
269 res_op->code = swap_tree_comparison (res_op->code);
|
111
|
270 canonicalized = true;
|
|
271 }
|
|
272
|
131
|
273 /* Limit recursion, see gimple_resimplify1. */
|
|
274 static unsigned depth;
|
|
275 if (depth > 10)
|
111
|
276 {
|
131
|
277 if (dump_file && (dump_flags & TDF_FOLDING))
|
|
278 fprintf (dump_file, "Aborting expression simplification due to "
|
|
279 "deep recursion\n");
|
|
280 return false;
|
|
281 }
|
|
282
|
|
283 ++depth;
|
|
284 gimple_match_op res_op2 (*res_op);
|
|
285 if (gimple_simplify (&res_op2, seq, valueize,
|
|
286 res_op->code, res_op->type,
|
|
287 res_op->ops[0], res_op->ops[1]))
|
|
288 {
|
|
289 --depth;
|
|
290 *res_op = res_op2;
|
111
|
291 return true;
|
|
292 }
|
131
|
293 --depth;
|
|
294
|
|
295 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
|
|
296 return true;
|
111
|
297
|
|
298 return canonicalized;
|
|
299 }
|
|
300
|
|
301 /* Helper that matches and simplifies the toplevel result from
|
|
302 a gimple_simplify run (where we don't want to build
|
|
303 a stmt in case it's used in in-place folding). Replaces
|
131
|
304 RES_OP with a simplified and/or canonicalized result and
|
|
305 returns whether any change was made. */
|
111
|
306
|
|
307 bool
|
131
|
308 gimple_resimplify3 (gimple_seq *seq, gimple_match_op *res_op,
|
111
|
309 tree (*valueize)(tree))
|
|
310 {
|
131
|
311 if (constant_for_folding (res_op->ops[0])
|
|
312 && constant_for_folding (res_op->ops[1])
|
|
313 && constant_for_folding (res_op->ops[2]))
|
111
|
314 {
|
|
315 tree tem = NULL_TREE;
|
131
|
316 if (res_op->code.is_tree_code ())
|
|
317 tem = fold_ternary/*_to_constant*/ (res_op->code, res_op->type,
|
|
318 res_op->ops[0], res_op->ops[1],
|
|
319 res_op->ops[2]);
|
111
|
320 else
|
131
|
321 tem = fold_const_call (combined_fn (res_op->code), res_op->type,
|
|
322 res_op->ops[0], res_op->ops[1], res_op->ops[2]);
|
111
|
323 if (tem != NULL_TREE
|
|
324 && CONSTANT_CLASS_P (tem))
|
|
325 {
|
|
326 if (TREE_OVERFLOW_P (tem))
|
|
327 tem = drop_tree_overflow (tem);
|
131
|
328 res_op->set_value (tem);
|
|
329 maybe_resimplify_conditional_op (seq, res_op, valueize);
|
111
|
330 return true;
|
|
331 }
|
|
332 }
|
|
333
|
|
334 /* Canonicalize operand order. */
|
|
335 bool canonicalized = false;
|
131
|
336 if (res_op->code.is_tree_code ()
|
|
337 && commutative_ternary_tree_code (res_op->code)
|
|
338 && tree_swap_operands_p (res_op->ops[0], res_op->ops[1]))
|
111
|
339 {
|
131
|
340 std::swap (res_op->ops[0], res_op->ops[1]);
|
111
|
341 canonicalized = true;
|
|
342 }
|
|
343
|
131
|
344 /* Limit recursion, see gimple_resimplify1. */
|
|
345 static unsigned depth;
|
|
346 if (depth > 10)
|
111
|
347 {
|
131
|
348 if (dump_file && (dump_flags & TDF_FOLDING))
|
|
349 fprintf (dump_file, "Aborting expression simplification due to "
|
|
350 "deep recursion\n");
|
|
351 return false;
|
|
352 }
|
|
353
|
|
354 ++depth;
|
|
355 gimple_match_op res_op2 (*res_op);
|
|
356 if (gimple_simplify (&res_op2, seq, valueize,
|
|
357 res_op->code, res_op->type,
|
|
358 res_op->ops[0], res_op->ops[1], res_op->ops[2]))
|
|
359 {
|
|
360 --depth;
|
|
361 *res_op = res_op2;
|
111
|
362 return true;
|
|
363 }
|
131
|
364 --depth;
|
|
365
|
|
366 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
|
|
367 return true;
|
111
|
368
|
|
369 return canonicalized;
|
|
370 }
|
|
371
|
131
|
372 /* Helper that matches and simplifies the toplevel result from
|
|
373 a gimple_simplify run (where we don't want to build
|
|
374 a stmt in case it's used in in-place folding). Replaces
|
|
375 RES_OP with a simplified and/or canonicalized result and
|
|
376 returns whether any change was made. */
|
111
|
377
|
131
|
378 bool
|
|
379 gimple_resimplify4 (gimple_seq *seq, gimple_match_op *res_op,
|
|
380 tree (*valueize)(tree))
|
|
381 {
|
|
382 /* No constant folding is defined for four-operand functions. */
|
|
383
|
|
384 /* Limit recursion, see gimple_resimplify1. */
|
|
385 static unsigned depth;
|
|
386 if (depth > 10)
|
|
387 {
|
|
388 if (dump_file && (dump_flags & TDF_FOLDING))
|
|
389 fprintf (dump_file, "Aborting expression simplification due to "
|
|
390 "deep recursion\n");
|
|
391 return false;
|
|
392 }
|
|
393
|
|
394 ++depth;
|
|
395 gimple_match_op res_op2 (*res_op);
|
|
396 if (gimple_simplify (&res_op2, seq, valueize,
|
|
397 res_op->code, res_op->type,
|
|
398 res_op->ops[0], res_op->ops[1], res_op->ops[2],
|
|
399 res_op->ops[3]))
|
|
400 {
|
|
401 --depth;
|
|
402 *res_op = res_op2;
|
|
403 return true;
|
|
404 }
|
|
405 --depth;
|
|
406
|
|
407 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
|
|
408 return true;
|
|
409
|
|
410 return false;
|
|
411 }
|
|
412
|
|
413 /* Helper that matches and simplifies the toplevel result from
|
|
414 a gimple_simplify run (where we don't want to build
|
|
415 a stmt in case it's used in in-place folding). Replaces
|
|
416 RES_OP with a simplified and/or canonicalized result and
|
|
417 returns whether any change was made. */
|
|
418
|
|
419 bool
|
|
420 gimple_resimplify5 (gimple_seq *seq, gimple_match_op *res_op,
|
|
421 tree (*valueize)(tree))
|
|
422 {
|
|
423 /* No constant folding is defined for five-operand functions. */
|
|
424
|
|
425 gimple_match_op res_op2 (*res_op);
|
|
426 if (gimple_simplify (&res_op2, seq, valueize,
|
|
427 res_op->code, res_op->type,
|
|
428 res_op->ops[0], res_op->ops[1], res_op->ops[2],
|
|
429 res_op->ops[3], res_op->ops[4]))
|
|
430 {
|
|
431 *res_op = res_op2;
|
|
432 return true;
|
|
433 }
|
|
434
|
|
435 if (maybe_resimplify_conditional_op (seq, res_op, valueize))
|
|
436 return true;
|
|
437
|
|
438 return false;
|
|
439 }
|
|
440
|
|
441 /* If in GIMPLE the operation described by RES_OP should be single-rhs,
|
|
442 build a GENERIC tree for that expression and update RES_OP accordingly. */
|
111
|
443
|
|
444 void
|
131
|
445 maybe_build_generic_op (gimple_match_op *res_op)
|
111
|
446 {
|
131
|
447 tree_code code = (tree_code) res_op->code;
|
|
448 tree val;
|
111
|
449 switch (code)
|
|
450 {
|
|
451 case REALPART_EXPR:
|
|
452 case IMAGPART_EXPR:
|
|
453 case VIEW_CONVERT_EXPR:
|
131
|
454 val = build1 (code, res_op->type, res_op->ops[0]);
|
|
455 res_op->set_value (val);
|
111
|
456 break;
|
|
457 case BIT_FIELD_REF:
|
131
|
458 val = build3 (code, res_op->type, res_op->ops[0], res_op->ops[1],
|
|
459 res_op->ops[2]);
|
|
460 REF_REVERSE_STORAGE_ORDER (val) = res_op->reverse;
|
|
461 res_op->set_value (val);
|
111
|
462 break;
|
|
463 default:;
|
|
464 }
|
|
465 }
|
|
466
|
131
|
467 tree (*mprts_hook) (gimple_match_op *);
|
111
|
468
|
131
|
469 /* Try to build RES_OP, which is known to be a call to FN. Return null
|
|
470 if the target doesn't support the function. */
|
111
|
471
|
|
472 static gcall *
|
131
|
473 build_call_internal (internal_fn fn, gimple_match_op *res_op)
|
111
|
474 {
|
|
475 if (direct_internal_fn_p (fn))
|
|
476 {
|
131
|
477 tree_pair types = direct_internal_fn_types (fn, res_op->type,
|
|
478 res_op->ops);
|
111
|
479 if (!direct_internal_fn_supported_p (fn, types, OPTIMIZE_FOR_BOTH))
|
|
480 return NULL;
|
|
481 }
|
131
|
482 return gimple_build_call_internal (fn, res_op->num_ops,
|
|
483 res_op->op_or_null (0),
|
|
484 res_op->op_or_null (1),
|
|
485 res_op->op_or_null (2),
|
|
486 res_op->op_or_null (3),
|
|
487 res_op->op_or_null (4));
|
111
|
488 }
|
|
489
|
131
|
490 /* Push the exploded expression described by RES_OP as a statement to
|
|
491 SEQ if necessary and return a gimple value denoting the value of the
|
|
492 expression. If RES is not NULL then the result will be always RES
|
|
493 and even gimple values are pushed to SEQ. */
|
111
|
494
|
|
495 tree
|
131
|
496 maybe_push_res_to_seq (gimple_match_op *res_op, gimple_seq *seq, tree res)
|
111
|
497 {
|
131
|
498 tree *ops = res_op->ops;
|
|
499 unsigned num_ops = res_op->num_ops;
|
|
500
|
|
501 /* The caller should have converted conditional operations into an UNCOND
|
|
502 form and resimplified as appropriate. The conditional form only
|
|
503 survives this far if that conversion failed. */
|
|
504 if (res_op->cond.cond)
|
|
505 return NULL_TREE;
|
|
506
|
|
507 if (res_op->code.is_tree_code ())
|
111
|
508 {
|
|
509 if (!res
|
131
|
510 && gimple_simplified_result_is_gimple_val (res_op))
|
111
|
511 return ops[0];
|
|
512 if (mprts_hook)
|
|
513 {
|
131
|
514 tree tem = mprts_hook (res_op);
|
111
|
515 if (tem)
|
|
516 return tem;
|
|
517 }
|
131
|
518 }
|
|
519
|
|
520 if (!seq)
|
|
521 return NULL_TREE;
|
|
522
|
|
523 /* Play safe and do not allow abnormals to be mentioned in
|
|
524 newly created statements. */
|
|
525 for (unsigned int i = 0; i < num_ops; ++i)
|
|
526 if (TREE_CODE (ops[i]) == SSA_NAME
|
|
527 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i]))
|
|
528 return NULL_TREE;
|
|
529
|
|
530 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
|
|
531 for (unsigned int i = 0; i < 2; ++i)
|
|
532 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
|
|
533 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i)))
|
111
|
534 return NULL_TREE;
|
131
|
535
|
|
536 if (res_op->code.is_tree_code ())
|
|
537 {
|
111
|
538 if (!res)
|
|
539 {
|
|
540 if (gimple_in_ssa_p (cfun))
|
131
|
541 res = make_ssa_name (res_op->type);
|
111
|
542 else
|
131
|
543 res = create_tmp_reg (res_op->type);
|
111
|
544 }
|
131
|
545 maybe_build_generic_op (res_op);
|
|
546 gimple *new_stmt = gimple_build_assign (res, res_op->code,
|
|
547 res_op->op_or_null (0),
|
|
548 res_op->op_or_null (1),
|
|
549 res_op->op_or_null (2));
|
111
|
550 gimple_seq_add_stmt_without_update (seq, new_stmt);
|
|
551 return res;
|
|
552 }
|
|
553 else
|
|
554 {
|
131
|
555 gcc_assert (num_ops != 0);
|
|
556 combined_fn fn = res_op->code;
|
111
|
557 gcall *new_stmt = NULL;
|
|
558 if (internal_fn_p (fn))
|
|
559 {
|
|
560 /* Generate the given function if we can. */
|
|
561 internal_fn ifn = as_internal_fn (fn);
|
131
|
562 new_stmt = build_call_internal (ifn, res_op);
|
111
|
563 if (!new_stmt)
|
|
564 return NULL_TREE;
|
|
565 }
|
|
566 else
|
|
567 {
|
|
568 /* Find the function we want to call. */
|
|
569 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
|
|
570 if (!decl)
|
|
571 return NULL;
|
|
572
|
|
573 /* We can't and should not emit calls to non-const functions. */
|
|
574 if (!(flags_from_decl_or_type (decl) & ECF_CONST))
|
|
575 return NULL;
|
|
576
|
131
|
577 new_stmt = gimple_build_call (decl, num_ops,
|
|
578 res_op->op_or_null (0),
|
|
579 res_op->op_or_null (1),
|
|
580 res_op->op_or_null (2),
|
|
581 res_op->op_or_null (3),
|
|
582 res_op->op_or_null (4));
|
111
|
583 }
|
|
584 if (!res)
|
|
585 {
|
|
586 if (gimple_in_ssa_p (cfun))
|
131
|
587 res = make_ssa_name (res_op->type);
|
111
|
588 else
|
131
|
589 res = create_tmp_reg (res_op->type);
|
111
|
590 }
|
|
591 gimple_call_set_lhs (new_stmt, res);
|
|
592 gimple_seq_add_stmt_without_update (seq, new_stmt);
|
|
593 return res;
|
|
594 }
|
|
595 }
|
|
596
|
|
597
|
|
598 /* Public API overloads follow for operation being tree_code or
|
|
599 built_in_function and for one to three operands or arguments.
|
|
600 They return NULL_TREE if nothing could be simplified or
|
|
601 the resulting simplified value with parts pushed to SEQ.
|
|
602 If SEQ is NULL then if the simplification needs to create
|
|
603 new stmts it will fail. If VALUEIZE is non-NULL then all
|
|
604 SSA names will be valueized using that hook prior to
|
|
605 applying simplifications. */
|
|
606
|
|
607 /* Unary ops. */
|
|
608
|
|
609 tree
|
|
610 gimple_simplify (enum tree_code code, tree type,
|
|
611 tree op0,
|
|
612 gimple_seq *seq, tree (*valueize)(tree))
|
|
613 {
|
|
614 if (constant_for_folding (op0))
|
|
615 {
|
|
616 tree res = const_unop (code, type, op0);
|
|
617 if (res != NULL_TREE
|
|
618 && CONSTANT_CLASS_P (res))
|
|
619 return res;
|
|
620 }
|
|
621
|
131
|
622 gimple_match_op res_op;
|
|
623 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0))
|
111
|
624 return NULL_TREE;
|
131
|
625 return maybe_push_res_to_seq (&res_op, seq);
|
111
|
626 }
|
|
627
|
|
628 /* Binary ops. */
|
|
629
|
|
630 tree
|
|
631 gimple_simplify (enum tree_code code, tree type,
|
|
632 tree op0, tree op1,
|
|
633 gimple_seq *seq, tree (*valueize)(tree))
|
|
634 {
|
|
635 if (constant_for_folding (op0) && constant_for_folding (op1))
|
|
636 {
|
|
637 tree res = const_binop (code, type, op0, op1);
|
|
638 if (res != NULL_TREE
|
|
639 && CONSTANT_CLASS_P (res))
|
|
640 return res;
|
|
641 }
|
|
642
|
|
643 /* Canonicalize operand order both for matching and fallback stmt
|
|
644 generation. */
|
|
645 if ((commutative_tree_code (code)
|
|
646 || TREE_CODE_CLASS (code) == tcc_comparison)
|
|
647 && tree_swap_operands_p (op0, op1))
|
|
648 {
|
|
649 std::swap (op0, op1);
|
|
650 if (TREE_CODE_CLASS (code) == tcc_comparison)
|
|
651 code = swap_tree_comparison (code);
|
|
652 }
|
|
653
|
131
|
654 gimple_match_op res_op;
|
|
655 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1))
|
111
|
656 return NULL_TREE;
|
131
|
657 return maybe_push_res_to_seq (&res_op, seq);
|
111
|
658 }
|
|
659
|
|
660 /* Ternary ops. */
|
|
661
|
|
662 tree
|
|
663 gimple_simplify (enum tree_code code, tree type,
|
|
664 tree op0, tree op1, tree op2,
|
|
665 gimple_seq *seq, tree (*valueize)(tree))
|
|
666 {
|
|
667 if (constant_for_folding (op0) && constant_for_folding (op1)
|
|
668 && constant_for_folding (op2))
|
|
669 {
|
|
670 tree res = fold_ternary/*_to_constant */ (code, type, op0, op1, op2);
|
|
671 if (res != NULL_TREE
|
|
672 && CONSTANT_CLASS_P (res))
|
|
673 return res;
|
|
674 }
|
|
675
|
|
676 /* Canonicalize operand order both for matching and fallback stmt
|
|
677 generation. */
|
|
678 if (commutative_ternary_tree_code (code)
|
|
679 && tree_swap_operands_p (op0, op1))
|
|
680 std::swap (op0, op1);
|
|
681
|
131
|
682 gimple_match_op res_op;
|
|
683 if (!gimple_simplify (&res_op, seq, valueize, code, type, op0, op1, op2))
|
111
|
684 return NULL_TREE;
|
131
|
685 return maybe_push_res_to_seq (&res_op, seq);
|
111
|
686 }
|
|
687
|
131
|
688 /* Builtin or internal function with one argument. */
|
111
|
689
|
|
690 tree
|
131
|
691 gimple_simplify (combined_fn fn, tree type,
|
111
|
692 tree arg0,
|
|
693 gimple_seq *seq, tree (*valueize)(tree))
|
|
694 {
|
|
695 if (constant_for_folding (arg0))
|
|
696 {
|
131
|
697 tree res = fold_const_call (fn, type, arg0);
|
111
|
698 if (res && CONSTANT_CLASS_P (res))
|
|
699 return res;
|
|
700 }
|
|
701
|
131
|
702 gimple_match_op res_op;
|
|
703 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0))
|
111
|
704 return NULL_TREE;
|
131
|
705 return maybe_push_res_to_seq (&res_op, seq);
|
111
|
706 }
|
|
707
|
131
|
708 /* Builtin or internal function with two arguments. */
|
111
|
709
|
|
710 tree
|
131
|
711 gimple_simplify (combined_fn fn, tree type,
|
111
|
712 tree arg0, tree arg1,
|
|
713 gimple_seq *seq, tree (*valueize)(tree))
|
|
714 {
|
|
715 if (constant_for_folding (arg0)
|
|
716 && constant_for_folding (arg1))
|
|
717 {
|
131
|
718 tree res = fold_const_call (fn, type, arg0, arg1);
|
111
|
719 if (res && CONSTANT_CLASS_P (res))
|
|
720 return res;
|
|
721 }
|
|
722
|
131
|
723 gimple_match_op res_op;
|
|
724 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1))
|
111
|
725 return NULL_TREE;
|
131
|
726 return maybe_push_res_to_seq (&res_op, seq);
|
111
|
727 }
|
|
728
|
131
|
729 /* Builtin or internal function with three arguments. */
|
111
|
730
|
|
731 tree
|
131
|
732 gimple_simplify (combined_fn fn, tree type,
|
111
|
733 tree arg0, tree arg1, tree arg2,
|
|
734 gimple_seq *seq, tree (*valueize)(tree))
|
|
735 {
|
|
736 if (constant_for_folding (arg0)
|
|
737 && constant_for_folding (arg1)
|
|
738 && constant_for_folding (arg2))
|
|
739 {
|
131
|
740 tree res = fold_const_call (fn, type, arg0, arg1, arg2);
|
111
|
741 if (res && CONSTANT_CLASS_P (res))
|
|
742 return res;
|
|
743 }
|
|
744
|
131
|
745 gimple_match_op res_op;
|
|
746 if (!gimple_simplify (&res_op, seq, valueize, fn, type, arg0, arg1, arg2))
|
111
|
747 return NULL_TREE;
|
131
|
748 return maybe_push_res_to_seq (&res_op, seq);
|
111
|
749 }
|
|
750
|
|
751 /* Helper for gimple_simplify valueizing OP using VALUEIZE and setting
|
|
752 VALUEIZED to true if valueization changed OP. */
|
|
753
|
|
754 static inline tree
|
|
755 do_valueize (tree op, tree (*valueize)(tree), bool &valueized)
|
|
756 {
|
|
757 if (valueize && TREE_CODE (op) == SSA_NAME)
|
|
758 {
|
|
759 tree tem = valueize (op);
|
|
760 if (tem && tem != op)
|
|
761 {
|
|
762 op = tem;
|
|
763 valueized = true;
|
|
764 }
|
|
765 }
|
|
766 return op;
|
|
767 }
|
|
768
|
131
|
769 /* If RES_OP is a call to a conditional internal function, try simplifying
|
|
770 the associated unconditional operation and using the result to build
|
|
771 a new conditional operation. For example, if RES_OP is:
|
|
772
|
|
773 IFN_COND_ADD (COND, A, B, ELSE)
|
|
774
|
|
775 try simplifying (plus A B) and using the result to build a replacement
|
|
776 for the whole IFN_COND_ADD.
|
|
777
|
|
778 Return true if this approach led to a simplification, otherwise leave
|
|
779 RES_OP unchanged (and so suitable for other simplifications). When
|
|
780 returning true, add any new statements to SEQ and use VALUEIZE as the
|
|
781 valueization function.
|
|
782
|
|
783 RES_OP is known to be a call to IFN. */
|
|
784
|
|
785 static bool
|
|
786 try_conditional_simplification (internal_fn ifn, gimple_match_op *res_op,
|
|
787 gimple_seq *seq, tree (*valueize) (tree))
|
|
788 {
|
|
789 code_helper op;
|
|
790 tree_code code = conditional_internal_fn_code (ifn);
|
|
791 if (code != ERROR_MARK)
|
|
792 op = code;
|
|
793 else
|
|
794 {
|
|
795 ifn = get_unconditional_internal_fn (ifn);
|
|
796 if (ifn == IFN_LAST)
|
|
797 return false;
|
|
798 op = as_combined_fn (ifn);
|
|
799 }
|
|
800
|
|
801 unsigned int num_ops = res_op->num_ops;
|
|
802 gimple_match_op cond_op (gimple_match_cond (res_op->ops[0],
|
|
803 res_op->ops[num_ops - 1]),
|
|
804 op, res_op->type, num_ops - 2);
|
|
805 for (unsigned int i = 1; i < num_ops - 1; ++i)
|
|
806 cond_op.ops[i - 1] = res_op->ops[i];
|
|
807 switch (num_ops - 2)
|
|
808 {
|
|
809 case 2:
|
|
810 if (!gimple_resimplify2 (seq, &cond_op, valueize))
|
|
811 return false;
|
|
812 break;
|
|
813 case 3:
|
|
814 if (!gimple_resimplify3 (seq, &cond_op, valueize))
|
|
815 return false;
|
|
816 break;
|
|
817 default:
|
|
818 gcc_unreachable ();
|
|
819 }
|
|
820 *res_op = cond_op;
|
|
821 maybe_resimplify_conditional_op (seq, res_op, valueize);
|
|
822 return true;
|
|
823 }
|
|
824
|
111
|
825 /* The main STMT based simplification entry. It is used by the fold_stmt
|
|
826 and the fold_stmt_to_constant APIs. */
|
|
827
|
|
828 bool
|
131
|
829 gimple_simplify (gimple *stmt, gimple_match_op *res_op, gimple_seq *seq,
|
111
|
830 tree (*valueize)(tree), tree (*top_valueize)(tree))
|
|
831 {
|
|
832 switch (gimple_code (stmt))
|
|
833 {
|
|
834 case GIMPLE_ASSIGN:
|
|
835 {
|
|
836 enum tree_code code = gimple_assign_rhs_code (stmt);
|
|
837 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
|
|
838 switch (gimple_assign_rhs_class (stmt))
|
|
839 {
|
|
840 case GIMPLE_SINGLE_RHS:
|
|
841 if (code == REALPART_EXPR
|
|
842 || code == IMAGPART_EXPR
|
|
843 || code == VIEW_CONVERT_EXPR)
|
|
844 {
|
|
845 tree op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
|
|
846 bool valueized = false;
|
|
847 op0 = do_valueize (op0, top_valueize, valueized);
|
131
|
848 res_op->set_op (code, type, op0);
|
|
849 return (gimple_resimplify1 (seq, res_op, valueize)
|
111
|
850 || valueized);
|
|
851 }
|
|
852 else if (code == BIT_FIELD_REF)
|
|
853 {
|
|
854 tree rhs1 = gimple_assign_rhs1 (stmt);
|
|
855 tree op0 = TREE_OPERAND (rhs1, 0);
|
|
856 bool valueized = false;
|
|
857 op0 = do_valueize (op0, top_valueize, valueized);
|
131
|
858 res_op->set_op (code, type, op0,
|
|
859 TREE_OPERAND (rhs1, 1),
|
|
860 TREE_OPERAND (rhs1, 2),
|
|
861 REF_REVERSE_STORAGE_ORDER (rhs1));
|
|
862 if (res_op->reverse)
|
|
863 return valueized;
|
|
864 return (gimple_resimplify3 (seq, res_op, valueize)
|
111
|
865 || valueized);
|
|
866 }
|
|
867 else if (code == SSA_NAME
|
|
868 && top_valueize)
|
|
869 {
|
|
870 tree op0 = gimple_assign_rhs1 (stmt);
|
|
871 tree valueized = top_valueize (op0);
|
|
872 if (!valueized || op0 == valueized)
|
|
873 return false;
|
131
|
874 res_op->set_op (TREE_CODE (op0), type, valueized);
|
111
|
875 return true;
|
|
876 }
|
|
877 break;
|
|
878 case GIMPLE_UNARY_RHS:
|
|
879 {
|
|
880 tree rhs1 = gimple_assign_rhs1 (stmt);
|
|
881 bool valueized = false;
|
|
882 rhs1 = do_valueize (rhs1, top_valueize, valueized);
|
131
|
883 res_op->set_op (code, type, rhs1);
|
|
884 return (gimple_resimplify1 (seq, res_op, valueize)
|
111
|
885 || valueized);
|
|
886 }
|
|
887 case GIMPLE_BINARY_RHS:
|
|
888 {
|
|
889 tree rhs1 = gimple_assign_rhs1 (stmt);
|
|
890 tree rhs2 = gimple_assign_rhs2 (stmt);
|
|
891 bool valueized = false;
|
|
892 rhs1 = do_valueize (rhs1, top_valueize, valueized);
|
|
893 rhs2 = do_valueize (rhs2, top_valueize, valueized);
|
131
|
894 res_op->set_op (code, type, rhs1, rhs2);
|
|
895 return (gimple_resimplify2 (seq, res_op, valueize)
|
111
|
896 || valueized);
|
|
897 }
|
|
898 case GIMPLE_TERNARY_RHS:
|
|
899 {
|
|
900 bool valueized = false;
|
|
901 tree rhs1 = gimple_assign_rhs1 (stmt);
|
|
902 /* If this is a [VEC_]COND_EXPR first try to simplify an
|
|
903 embedded GENERIC condition. */
|
|
904 if (code == COND_EXPR
|
|
905 || code == VEC_COND_EXPR)
|
|
906 {
|
|
907 if (COMPARISON_CLASS_P (rhs1))
|
|
908 {
|
|
909 tree lhs = TREE_OPERAND (rhs1, 0);
|
|
910 tree rhs = TREE_OPERAND (rhs1, 1);
|
|
911 lhs = do_valueize (lhs, top_valueize, valueized);
|
|
912 rhs = do_valueize (rhs, top_valueize, valueized);
|
131
|
913 gimple_match_op res_op2 (res_op->cond, TREE_CODE (rhs1),
|
|
914 TREE_TYPE (rhs1), lhs, rhs);
|
|
915 if ((gimple_resimplify2 (seq, &res_op2, valueize)
|
111
|
916 || valueized)
|
131
|
917 && res_op2.code.is_tree_code ())
|
111
|
918 {
|
|
919 valueized = true;
|
131
|
920 if (TREE_CODE_CLASS ((enum tree_code) res_op2.code)
|
111
|
921 == tcc_comparison)
|
131
|
922 rhs1 = build2 (res_op2.code, TREE_TYPE (rhs1),
|
|
923 res_op2.ops[0], res_op2.ops[1]);
|
|
924 else if (res_op2.code == SSA_NAME
|
|
925 || res_op2.code == INTEGER_CST
|
|
926 || res_op2.code == VECTOR_CST)
|
|
927 rhs1 = res_op2.ops[0];
|
111
|
928 else
|
|
929 valueized = false;
|
|
930 }
|
|
931 }
|
|
932 }
|
|
933 tree rhs2 = gimple_assign_rhs2 (stmt);
|
|
934 tree rhs3 = gimple_assign_rhs3 (stmt);
|
|
935 rhs1 = do_valueize (rhs1, top_valueize, valueized);
|
|
936 rhs2 = do_valueize (rhs2, top_valueize, valueized);
|
|
937 rhs3 = do_valueize (rhs3, top_valueize, valueized);
|
131
|
938 res_op->set_op (code, type, rhs1, rhs2, rhs3);
|
|
939 return (gimple_resimplify3 (seq, res_op, valueize)
|
111
|
940 || valueized);
|
|
941 }
|
|
942 default:
|
|
943 gcc_unreachable ();
|
|
944 }
|
|
945 break;
|
|
946 }
|
|
947
|
|
948 case GIMPLE_CALL:
|
|
949 /* ??? This way we can't simplify calls with side-effects. */
|
|
950 if (gimple_call_lhs (stmt) != NULL_TREE
|
|
951 && gimple_call_num_args (stmt) >= 1
|
131
|
952 && gimple_call_num_args (stmt) <= 5)
|
111
|
953 {
|
|
954 bool valueized = false;
|
131
|
955 combined_fn cfn;
|
111
|
956 if (gimple_call_internal_p (stmt))
|
131
|
957 cfn = as_combined_fn (gimple_call_internal_fn (stmt));
|
111
|
958 else
|
|
959 {
|
|
960 tree fn = gimple_call_fn (stmt);
|
|
961 if (!fn)
|
|
962 return false;
|
|
963
|
|
964 fn = do_valueize (fn, top_valueize, valueized);
|
|
965 if (TREE_CODE (fn) != ADDR_EXPR
|
|
966 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
|
|
967 return false;
|
|
968
|
|
969 tree decl = TREE_OPERAND (fn, 0);
|
|
970 if (DECL_BUILT_IN_CLASS (decl) != BUILT_IN_NORMAL
|
|
971 || !gimple_builtin_call_types_compatible_p (stmt, decl))
|
|
972 return false;
|
|
973
|
131
|
974 cfn = as_combined_fn (DECL_FUNCTION_CODE (decl));
|
111
|
975 }
|
|
976
|
131
|
977 unsigned int num_args = gimple_call_num_args (stmt);
|
|
978 res_op->set_op (cfn, TREE_TYPE (gimple_call_lhs (stmt)), num_args);
|
|
979 for (unsigned i = 0; i < num_args; ++i)
|
111
|
980 {
|
|
981 tree arg = gimple_call_arg (stmt, i);
|
131
|
982 res_op->ops[i] = do_valueize (arg, top_valueize, valueized);
|
111
|
983 }
|
131
|
984 if (internal_fn_p (cfn)
|
|
985 && try_conditional_simplification (as_internal_fn (cfn),
|
|
986 res_op, seq, valueize))
|
|
987 return true;
|
|
988 switch (num_args)
|
111
|
989 {
|
|
990 case 1:
|
131
|
991 return (gimple_resimplify1 (seq, res_op, valueize)
|
111
|
992 || valueized);
|
|
993 case 2:
|
131
|
994 return (gimple_resimplify2 (seq, res_op, valueize)
|
111
|
995 || valueized);
|
|
996 case 3:
|
131
|
997 return (gimple_resimplify3 (seq, res_op, valueize)
|
|
998 || valueized);
|
|
999 case 4:
|
|
1000 return (gimple_resimplify4 (seq, res_op, valueize)
|
|
1001 || valueized);
|
|
1002 case 5:
|
|
1003 return (gimple_resimplify5 (seq, res_op, valueize)
|
111
|
1004 || valueized);
|
|
1005 default:
|
|
1006 gcc_unreachable ();
|
|
1007 }
|
|
1008 }
|
|
1009 break;
|
|
1010
|
|
1011 case GIMPLE_COND:
|
|
1012 {
|
|
1013 tree lhs = gimple_cond_lhs (stmt);
|
|
1014 tree rhs = gimple_cond_rhs (stmt);
|
|
1015 bool valueized = false;
|
|
1016 lhs = do_valueize (lhs, top_valueize, valueized);
|
|
1017 rhs = do_valueize (rhs, top_valueize, valueized);
|
131
|
1018 res_op->set_op (gimple_cond_code (stmt), boolean_type_node, lhs, rhs);
|
|
1019 return (gimple_resimplify2 (seq, res_op, valueize)
|
111
|
1020 || valueized);
|
|
1021 }
|
|
1022
|
|
1023 default:
|
|
1024 break;
|
|
1025 }
|
|
1026
|
|
1027 return false;
|
|
1028 }
|
|
1029
|
|
1030
|
|
1031 /* Helper for the autogenerated code, valueize OP. */
|
|
1032
|
|
1033 inline tree
|
|
1034 do_valueize (tree (*valueize)(tree), tree op)
|
|
1035 {
|
|
1036 if (valueize && TREE_CODE (op) == SSA_NAME)
|
|
1037 {
|
|
1038 tree tem = valueize (op);
|
|
1039 if (tem)
|
|
1040 return tem;
|
|
1041 }
|
|
1042 return op;
|
|
1043 }
|
|
1044
|
|
1045 /* Helper for the autogenerated code, get at the definition of NAME when
|
|
1046 VALUEIZE allows that. */
|
|
1047
|
|
1048 inline gimple *
|
|
1049 get_def (tree (*valueize)(tree), tree name)
|
|
1050 {
|
|
1051 if (valueize && ! valueize (name))
|
|
1052 return NULL;
|
|
1053 return SSA_NAME_DEF_STMT (name);
|
|
1054 }
|
|
1055
|
|
1056 /* Routine to determine if the types T1 and T2 are effectively
|
|
1057 the same for GIMPLE. If T1 or T2 is not a type, the test
|
|
1058 applies to their TREE_TYPE. */
|
|
1059
|
|
1060 static inline bool
|
|
1061 types_match (tree t1, tree t2)
|
|
1062 {
|
|
1063 if (!TYPE_P (t1))
|
|
1064 t1 = TREE_TYPE (t1);
|
|
1065 if (!TYPE_P (t2))
|
|
1066 t2 = TREE_TYPE (t2);
|
|
1067
|
|
1068 return types_compatible_p (t1, t2);
|
|
1069 }
|
|
1070
|
|
1071 /* Return if T has a single use. For GIMPLE, we also allow any
|
|
1072 non-SSA_NAME (ie constants) and zero uses to cope with uses
|
|
1073 that aren't linked up yet. */
|
|
1074
|
|
1075 static inline bool
|
|
1076 single_use (tree t)
|
|
1077 {
|
|
1078 return TREE_CODE (t) != SSA_NAME || has_zero_uses (t) || has_single_use (t);
|
|
1079 }
|
|
1080
|
|
1081 /* Return true if math operations should be canonicalized,
|
|
1082 e.g. sqrt(sqrt(x)) -> pow(x, 0.25). */
|
|
1083
|
|
1084 static inline bool
|
|
1085 canonicalize_math_p ()
|
|
1086 {
|
|
1087 return !cfun || (cfun->curr_properties & PROP_gimple_opt_math) == 0;
|
|
1088 }
|
131
|
1089
|
|
1090 /* Return true if math operations that are beneficial only after
|
|
1091 vectorization should be canonicalized. */
|
|
1092
|
|
1093 static inline bool
|
|
1094 canonicalize_math_after_vectorization_p ()
|
|
1095 {
|
|
1096 return !cfun || (cfun->curr_properties & PROP_gimple_lvec) != 0;
|
|
1097 }
|
|
1098
|
|
1099 /* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
|
|
1100 As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
|
|
1101 is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
|
|
1102 where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
|
|
1103 will likely be exact, while exp (log (arg0) * arg1) might be not.
|
|
1104 Also don't do it if arg1 is phi_res above and cst2 is an exact integer. */
|
|
1105
|
|
1106 static bool
|
|
1107 optimize_pow_to_exp (tree arg0, tree arg1)
|
|
1108 {
|
|
1109 gcc_assert (TREE_CODE (arg0) == REAL_CST);
|
|
1110 if (!real_isinteger (TREE_REAL_CST_PTR (arg0), TYPE_MODE (TREE_TYPE (arg0))))
|
|
1111 return true;
|
|
1112
|
|
1113 if (TREE_CODE (arg1) != SSA_NAME)
|
|
1114 return true;
|
|
1115
|
|
1116 gimple *def = SSA_NAME_DEF_STMT (arg1);
|
|
1117 gphi *phi = dyn_cast <gphi *> (def);
|
|
1118 tree cst1 = NULL_TREE;
|
|
1119 enum tree_code code = ERROR_MARK;
|
|
1120 if (!phi)
|
|
1121 {
|
|
1122 if (!is_gimple_assign (def))
|
|
1123 return true;
|
|
1124 code = gimple_assign_rhs_code (def);
|
|
1125 switch (code)
|
|
1126 {
|
|
1127 case PLUS_EXPR:
|
|
1128 case MINUS_EXPR:
|
|
1129 break;
|
|
1130 default:
|
|
1131 return true;
|
|
1132 }
|
|
1133 if (TREE_CODE (gimple_assign_rhs1 (def)) != SSA_NAME
|
|
1134 || TREE_CODE (gimple_assign_rhs2 (def)) != REAL_CST)
|
|
1135 return true;
|
|
1136
|
|
1137 cst1 = gimple_assign_rhs2 (def);
|
|
1138
|
|
1139 phi = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def)));
|
|
1140 if (!phi)
|
|
1141 return true;
|
|
1142 }
|
|
1143
|
|
1144 tree cst2 = NULL_TREE;
|
|
1145 int n = gimple_phi_num_args (phi);
|
|
1146 for (int i = 0; i < n; i++)
|
|
1147 {
|
|
1148 tree arg = PHI_ARG_DEF (phi, i);
|
|
1149 if (TREE_CODE (arg) != REAL_CST)
|
|
1150 continue;
|
|
1151 else if (cst2 == NULL_TREE)
|
|
1152 cst2 = arg;
|
|
1153 else if (!operand_equal_p (cst2, arg, 0))
|
|
1154 return true;
|
|
1155 }
|
|
1156
|
|
1157 if (cst1 && cst2)
|
|
1158 cst2 = const_binop (code, TREE_TYPE (cst2), cst2, cst1);
|
|
1159 if (cst2
|
|
1160 && TREE_CODE (cst2) == REAL_CST
|
|
1161 && real_isinteger (TREE_REAL_CST_PTR (cst2),
|
|
1162 TYPE_MODE (TREE_TYPE (cst2))))
|
|
1163 return false;
|
|
1164 return true;
|
|
1165 }
|