comparison gcc/tree-ssa-ccp.c @ 55:77e2b8dfacca gcc-4.4.5

update it from 4.4.3 to 4.5.0
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Fri, 12 Feb 2010 23:39:51 +0900
parents 3bfb6c00c1e0
children b7f97abdc517
comparison
equal deleted inserted replaced
52:c156f1bd5cd9 55:77e2b8dfacca
3 Free Software Foundation, Inc. 3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org> 4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com> 5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 6
7 This file is part of GCC. 7 This file is part of GCC.
8 8
9 GCC is free software; you can redistribute it and/or modify it 9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the 10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any 11 Free Software Foundation; either version 3, or (at your option) any
12 later version. 12 later version.
13 13
14 GCC is distributed in the hope that it will be useful, but WITHOUT 14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details. 17 for more details.
18 18
19 You should have received a copy of the GNU General Public License 19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see 20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */ 21 <http://www.gnu.org/licenses/>. */
22 22
23 /* Conditional constant propagation (CCP) is based on the SSA 23 /* Conditional constant propagation (CCP) is based on the SSA
61 61
62 If the statement is a conditional with a constant predicate, we 62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable 63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when 64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored. 65 visiting PHI nodes to know when a PHI argument can be ignored.
66 66
67 67
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the 68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This 69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the 70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant 71 goals of this evaluation is to optimistically return constant
206 #include "tree-ssa-propagate.h" 206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h" 207 #include "value-prof.h"
208 #include "langhooks.h" 208 #include "langhooks.h"
209 #include "target.h" 209 #include "target.h"
210 #include "toplev.h" 210 #include "toplev.h"
211 #include "dbgcnt.h"
211 212
212 213
213 /* Possible lattice values. */ 214 /* Possible lattice values. */
214 typedef enum 215 typedef enum
215 { 216 {
226 memory reference used to store (i.e., the LHS of the assignment 227 memory reference used to store (i.e., the LHS of the assignment
227 doing the store). */ 228 doing the store). */
228 static prop_value_t *const_val; 229 static prop_value_t *const_val;
229 230
230 static void canonicalize_float_value (prop_value_t *); 231 static void canonicalize_float_value (prop_value_t *);
232 static bool ccp_fold_stmt (gimple_stmt_iterator *);
231 233
232 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */ 234 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
233 235
234 static void 236 static void
235 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val) 237 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
273 275
274 tree 276 tree
275 get_symbol_constant_value (tree sym) 277 get_symbol_constant_value (tree sym)
276 { 278 {
277 if (TREE_STATIC (sym) 279 if (TREE_STATIC (sym)
278 && TREE_READONLY (sym) 280 && (TREE_READONLY (sym)
279 && !MTAG_P (sym)) 281 || TREE_CODE (sym) == CONST_DECL))
280 { 282 {
281 tree val = DECL_INITIAL (sym); 283 tree val = DECL_INITIAL (sym);
282 if (val) 284 if (val)
283 { 285 {
284 STRIP_USELESS_TYPE_CONVERSION (val); 286 STRIP_USELESS_TYPE_CONVERSION (val);
285 if (is_gimple_min_invariant (val)) 287 if (is_gimple_min_invariant (val))
286 return val; 288 {
289 if (TREE_CODE (val) == ADDR_EXPR)
290 {
291 tree base = get_base_address (TREE_OPERAND (val, 0));
292 if (base && TREE_CODE (base) == VAR_DECL)
293 {
294 TREE_ADDRESSABLE (base) = 1;
295 if (gimple_referenced_vars (cfun))
296 add_referenced_var (base);
297 }
298 }
299 return val;
300 }
287 } 301 }
288 /* Variables declared 'const' without an initializer 302 /* Variables declared 'const' without an initializer
289 have zero as the initializer if they may not be 303 have zero as the initializer if they may not be
290 overridden at link or run time. */ 304 overridden at link or run time. */
291 if (!val 305 if (!val
320 static prop_value_t 334 static prop_value_t
321 get_default_value (tree var) 335 get_default_value (tree var)
322 { 336 {
323 tree sym = SSA_NAME_VAR (var); 337 tree sym = SSA_NAME_VAR (var);
324 prop_value_t val = { UNINITIALIZED, NULL_TREE }; 338 prop_value_t val = { UNINITIALIZED, NULL_TREE };
325 tree cst_val; 339 gimple stmt;
326 340
327 if (!is_gimple_reg (var)) 341 stmt = SSA_NAME_DEF_STMT (var);
328 { 342
329 /* Short circuit for regular CCP. We are not interested in any 343 if (gimple_nop_p (stmt))
330 non-register when DO_STORE_CCP is false. */ 344 {
331 val.lattice_val = VARYING; 345 /* Variables defined by an empty statement are those used
332 } 346 before being initialized. If VAR is a local variable, we
333 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE) 347 can assume initially that it is UNDEFINED, otherwise we must
334 { 348 consider it VARYING. */
335 /* Globals and static variables declared 'const' take their 349 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
336 initial value. */ 350 val.lattice_val = UNDEFINED;
337 val.lattice_val = CONSTANT; 351 else
338 val.value = cst_val; 352 val.lattice_val = VARYING;
339 } 353 }
340 else 354 else if (is_gimple_assign (stmt)
341 { 355 /* Value-returning GIMPLE_CALL statements assign to
342 gimple stmt = SSA_NAME_DEF_STMT (var); 356 a variable, and are treated similarly to GIMPLE_ASSIGN. */
343 357 || (is_gimple_call (stmt)
344 if (gimple_nop_p (stmt)) 358 && gimple_call_lhs (stmt) != NULL_TREE)
359 || gimple_code (stmt) == GIMPLE_PHI)
360 {
361 tree cst;
362 if (gimple_assign_single_p (stmt)
363 && DECL_P (gimple_assign_rhs1 (stmt))
364 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
345 { 365 {
346 /* Variables defined by an empty statement are those used 366 val.lattice_val = CONSTANT;
347 before being initialized. If VAR is a local variable, we 367 val.value = cst;
348 can assume initially that it is UNDEFINED, otherwise we must
349 consider it VARYING. */
350 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
351 val.lattice_val = UNDEFINED;
352 else
353 val.lattice_val = VARYING;
354 }
355 else if (is_gimple_assign (stmt)
356 /* Value-returning GIMPLE_CALL statements assign to
357 a variable, and are treated similarly to GIMPLE_ASSIGN. */
358 || (is_gimple_call (stmt)
359 && gimple_call_lhs (stmt) != NULL_TREE)
360 || gimple_code (stmt) == GIMPLE_PHI)
361 {
362 /* Any other variable defined by an assignment or a PHI node
363 is considered UNDEFINED. */
364 val.lattice_val = UNDEFINED;
365 } 368 }
366 else 369 else
367 { 370 /* Any other variable defined by an assignment or a PHI node
368 /* Otherwise, VAR will never take on a constant value. */ 371 is considered UNDEFINED. */
369 val.lattice_val = VARYING; 372 val.lattice_val = UNDEFINED;
370 } 373 }
374 else
375 {
376 /* Otherwise, VAR will never take on a constant value. */
377 val.lattice_val = VARYING;
371 } 378 }
372 379
373 return val; 380 return val;
374 } 381 }
375 382
503 likely_value (gimple stmt) 510 likely_value (gimple stmt)
504 { 511 {
505 bool has_constant_operand, has_undefined_operand, all_undefined_operands; 512 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
506 tree use; 513 tree use;
507 ssa_op_iter iter; 514 ssa_op_iter iter;
515 unsigned i;
508 516
509 enum gimple_code code = gimple_code (stmt); 517 enum gimple_code code = gimple_code (stmt);
510 518
511 /* This function appears to be called only for assignments, calls, 519 /* This function appears to be called only for assignments, calls,
512 conditionals, and switches, due to the logic in visit_stmt. */ 520 conditionals, and switches, due to the logic in visit_stmt. */
518 /* If the statement has volatile operands, it won't fold to a 526 /* If the statement has volatile operands, it won't fold to a
519 constant value. */ 527 constant value. */
520 if (gimple_has_volatile_ops (stmt)) 528 if (gimple_has_volatile_ops (stmt))
521 return VARYING; 529 return VARYING;
522 530
523 /* If we are not doing store-ccp, statements with loads
524 and/or stores will never fold into a constant. */
525 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
526 return VARYING;
527
528 /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
529 is_gimple_min_invariant, so we do not consider calls or
530 other forms of assignment. */
531 if (gimple_assign_single_p (stmt)
532 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
533 return CONSTANT;
534
535 if (code == GIMPLE_COND
536 && is_gimple_min_invariant (gimple_cond_lhs (stmt))
537 && is_gimple_min_invariant (gimple_cond_rhs (stmt)))
538 return CONSTANT;
539
540 if (code == GIMPLE_SWITCH
541 && is_gimple_min_invariant (gimple_switch_index (stmt)))
542 return CONSTANT;
543
544 /* Arrive here for more complex cases. */ 531 /* Arrive here for more complex cases. */
545
546 has_constant_operand = false; 532 has_constant_operand = false;
547 has_undefined_operand = false; 533 has_undefined_operand = false;
548 all_undefined_operands = true; 534 all_undefined_operands = true;
549 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE) 535 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
550 { 536 {
551 prop_value_t *val = get_value (use); 537 prop_value_t *val = get_value (use);
552 538
553 if (val->lattice_val == UNDEFINED) 539 if (val->lattice_val == UNDEFINED)
554 has_undefined_operand = true; 540 has_undefined_operand = true;
555 else 541 else
556 all_undefined_operands = false; 542 all_undefined_operands = false;
557 543
558 if (val->lattice_val == CONSTANT) 544 if (val->lattice_val == CONSTANT)
545 has_constant_operand = true;
546 }
547
548 /* There may be constants in regular rhs operands. For calls we
549 have to ignore lhs, fndecl and static chain, otherwise only
550 the lhs. */
551 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
552 i < gimple_num_ops (stmt); ++i)
553 {
554 tree op = gimple_op (stmt, i);
555 if (!op || TREE_CODE (op) == SSA_NAME)
556 continue;
557 if (is_gimple_min_invariant (op))
559 has_constant_operand = true; 558 has_constant_operand = true;
560 } 559 }
561 560
562 /* If the operation combines operands like COMPLEX_EXPR make sure to 561 /* If the operation combines operands like COMPLEX_EXPR make sure to
563 not mark the result UNDEFINED if only one part of the result is 562 not mark the result UNDEFINED if only one part of the result is
587 /* If there was an UNDEFINED operand but the result may be not UNDEFINED 586 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
588 fall back to VARYING even if there were CONSTANT operands. */ 587 fall back to VARYING even if there were CONSTANT operands. */
589 if (has_undefined_operand) 588 if (has_undefined_operand)
590 return VARYING; 589 return VARYING;
591 590
591 /* We do not consider virtual operands here -- load from read-only
592 memory may have only VARYING virtual operands, but still be
593 constant. */
592 if (has_constant_operand 594 if (has_constant_operand
593 /* We do not consider virtual operands here -- load from read-only 595 || gimple_references_memory_p (stmt))
594 memory may have only VARYING virtual operands, but still be
595 constant. */
596 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
597 return CONSTANT; 596 return CONSTANT;
598 597
599 return VARYING; 598 return VARYING;
600 } 599 }
601 600
605 surely_varying_stmt_p (gimple stmt) 604 surely_varying_stmt_p (gimple stmt)
606 { 605 {
607 /* If the statement has operands that we cannot handle, it cannot be 606 /* If the statement has operands that we cannot handle, it cannot be
608 constant. */ 607 constant. */
609 if (gimple_has_volatile_ops (stmt)) 608 if (gimple_has_volatile_ops (stmt))
610 return true;
611
612 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
613 return true; 609 return true;
614 610
615 /* If it is a call and does not return a value or is not a 611 /* If it is a call and does not return a value or is not a
616 builtin and not an indirect call, it is varying. */ 612 builtin and not an indirect call, it is varying. */
617 if (is_gimple_call (stmt)) 613 if (is_gimple_call (stmt))
620 if (!gimple_call_lhs (stmt) 616 if (!gimple_call_lhs (stmt)
621 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE 617 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
622 && !DECL_BUILT_IN (fndecl))) 618 && !DECL_BUILT_IN (fndecl)))
623 return true; 619 return true;
624 } 620 }
621
622 /* Any other store operation is not interesting. */
623 else if (gimple_vdef (stmt))
624 return true;
625 625
626 /* Anything other than assignments and conditional jumps are not 626 /* Anything other than assignments and conditional jumps are not
627 interesting for CCP. */ 627 interesting for CCP. */
628 if (gimple_code (stmt) != GIMPLE_ASSIGN 628 if (gimple_code (stmt) != GIMPLE_ASSIGN
629 && gimple_code (stmt) != GIMPLE_COND 629 && gimple_code (stmt) != GIMPLE_COND
649 gimple_stmt_iterator i; 649 gimple_stmt_iterator i;
650 650
651 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) 651 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
652 { 652 {
653 gimple stmt = gsi_stmt (i); 653 gimple stmt = gsi_stmt (i);
654 bool is_varying = surely_varying_stmt_p (stmt); 654 bool is_varying;
655
656 /* If the statement is a control insn, then we do not
657 want to avoid simulating the statement once. Failure
658 to do so means that those edges will never get added. */
659 if (stmt_ends_bb_p (stmt))
660 is_varying = false;
661 else
662 is_varying = surely_varying_stmt_p (stmt);
655 663
656 if (is_varying) 664 if (is_varying)
657 { 665 {
658 tree def; 666 tree def;
659 ssa_op_iter iter; 667 ssa_op_iter iter;
660 668
661 /* If the statement will not produce a constant, mark 669 /* If the statement will not produce a constant, mark
662 all its outputs VARYING. */ 670 all its outputs VARYING. */
663 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS) 671 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
664 { 672 set_value_varying (def);
665 if (is_varying)
666 set_value_varying (def);
667 }
668 } 673 }
669 prop_set_simulate_again (stmt, !is_varying); 674 prop_set_simulate_again (stmt, !is_varying);
670 } 675 }
671 } 676 }
672 677
687 prop_set_simulate_again (phi, true); 692 prop_set_simulate_again (phi, true);
688 } 693 }
689 } 694 }
690 } 695 }
691 696
697 /* Debug count support. Reset the values of ssa names
698 VARYING when the total number ssa names analyzed is
699 beyond the debug count specified. */
700
701 static void
702 do_dbg_cnt (void)
703 {
704 unsigned i;
705 for (i = 0; i < num_ssa_names; i++)
706 {
707 if (!dbg_cnt (ccp))
708 {
709 const_val[i].lattice_val = VARYING;
710 const_val[i].value = NULL_TREE;
711 }
712 }
713 }
714
692 715
693 /* Do final substitution of propagated values, cleanup the flowgraph and 716 /* Do final substitution of propagated values, cleanup the flowgraph and
694 free allocated storage. 717 free allocated storage.
695 718
696 Return TRUE when something was optimized. */ 719 Return TRUE when something was optimized. */
697 720
698 static bool 721 static bool
699 ccp_finalize (void) 722 ccp_finalize (void)
700 { 723 {
724 bool something_changed;
725
726 do_dbg_cnt ();
701 /* Perform substitutions based on the known constant values. */ 727 /* Perform substitutions based on the known constant values. */
702 bool something_changed = substitute_and_fold (const_val, false); 728 something_changed = substitute_and_fold (const_val, ccp_fold_stmt);
703 729
704 free (const_val); 730 free (const_val);
705 const_val = NULL; 731 const_val = NULL;
706 return something_changed;; 732 return something_changed;;
707 } 733 }
854 } 880 }
855 else 881 else
856 return SSA_PROP_NOT_INTERESTING; 882 return SSA_PROP_NOT_INTERESTING;
857 } 883 }
858 884
859 /* Return true if we may propagate the address expression ADDR into the 885 /* Return true if we may propagate the address expression ADDR into the
860 dereference DEREF and cancel them. */ 886 dereference DEREF and cancel them. */
861 887
862 bool 888 bool
863 may_propagate_address_into_dereference (tree addr, tree deref) 889 may_propagate_address_into_dereference (tree addr, tree deref)
864 { 890 {
896 otherwise return the original RHS or NULL_TREE. */ 922 otherwise return the original RHS or NULL_TREE. */
897 923
898 static tree 924 static tree
899 ccp_fold (gimple stmt) 925 ccp_fold (gimple stmt)
900 { 926 {
927 location_t loc = gimple_location (stmt);
901 switch (gimple_code (stmt)) 928 switch (gimple_code (stmt))
902 { 929 {
903 case GIMPLE_ASSIGN: 930 case GIMPLE_ASSIGN:
904 { 931 {
905 enum tree_code subcode = gimple_assign_rhs_code (stmt); 932 enum tree_code subcode = gimple_assign_rhs_code (stmt);
944 *base = save; 971 *base = save;
945 return ret; 972 return ret;
946 } 973 }
947 } 974 }
948 } 975 }
976 else if (TREE_CODE (rhs) == CONSTRUCTOR
977 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
978 && (CONSTRUCTOR_NELTS (rhs)
979 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
980 {
981 unsigned i;
982 tree val, list;
983
984 list = NULL_TREE;
985 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
986 {
987 if (TREE_CODE (val) == SSA_NAME
988 && get_value (val)->lattice_val == CONSTANT)
989 val = get_value (val)->value;
990 if (TREE_CODE (val) == INTEGER_CST
991 || TREE_CODE (val) == REAL_CST
992 || TREE_CODE (val) == FIXED_CST)
993 list = tree_cons (NULL_TREE, val, list);
994 else
995 return NULL_TREE;
996 }
997
998 return build_vector (TREE_TYPE (rhs), nreverse (list));
999 }
949 1000
950 if (kind == tcc_reference) 1001 if (kind == tcc_reference)
951 { 1002 {
952 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR 1003 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
1004 || TREE_CODE (rhs) == REALPART_EXPR
1005 || TREE_CODE (rhs) == IMAGPART_EXPR)
953 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) 1006 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
954 { 1007 {
955 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0)); 1008 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
956 if (val->lattice_val == CONSTANT) 1009 if (val->lattice_val == CONSTANT)
957 return fold_unary (VIEW_CONVERT_EXPR, 1010 return fold_unary_loc (EXPR_LOCATION (rhs),
1011 TREE_CODE (rhs),
958 TREE_TYPE (rhs), val->value); 1012 TREE_TYPE (rhs), val->value);
1013 }
1014 else if (TREE_CODE (rhs) == INDIRECT_REF
1015 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1016 {
1017 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
1018 if (val->lattice_val == CONSTANT
1019 && TREE_CODE (val->value) == ADDR_EXPR
1020 && useless_type_conversion_p (TREE_TYPE (rhs),
1021 TREE_TYPE (TREE_TYPE (val->value))))
1022 rhs = TREE_OPERAND (val->value, 0);
959 } 1023 }
960 return fold_const_aggregate_ref (rhs); 1024 return fold_const_aggregate_ref (rhs);
961 } 1025 }
962 else if (kind == tcc_declaration) 1026 else if (kind == tcc_declaration)
963 return get_symbol_constant_value (rhs); 1027 return get_symbol_constant_value (rhs);
964 return rhs; 1028 return rhs;
965 } 1029 }
966 1030
967 case GIMPLE_UNARY_RHS: 1031 case GIMPLE_UNARY_RHS:
968 { 1032 {
969 /* Handle unary operators that can appear in GIMPLE form. 1033 /* Handle unary operators that can appear in GIMPLE form.
970 Note that we know the single operand must be a constant, 1034 Note that we know the single operand must be a constant,
971 so this should almost always return a simplified RHS. */ 1035 so this should almost always return a simplified RHS. */
998 tree tem; 1062 tree tem;
999 /* Still try to generate a constant of correct type. */ 1063 /* Still try to generate a constant of correct type. */
1000 if (!useless_type_conversion_p (TREE_TYPE (lhs), 1064 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1001 TREE_TYPE (op0)) 1065 TREE_TYPE (op0))
1002 && ((tem = maybe_fold_offset_to_address 1066 && ((tem = maybe_fold_offset_to_address
1003 (op0, integer_zero_node, TREE_TYPE (lhs))) 1067 (loc,
1068 op0, integer_zero_node, TREE_TYPE (lhs)))
1004 != NULL_TREE)) 1069 != NULL_TREE))
1005 return tem; 1070 return tem;
1006 return op0; 1071 return op0;
1007 } 1072 }
1008 1073
1009 return fold_unary_ignore_overflow (subcode, 1074 return
1010 gimple_expr_type (stmt), op0); 1075 fold_unary_ignore_overflow_loc (loc, subcode,
1076 gimple_expr_type (stmt), op0);
1011 } 1077 }
1012 1078
1013 case GIMPLE_BINARY_RHS: 1079 case GIMPLE_BINARY_RHS:
1014 { 1080 {
1015 /* Handle binary operators that can appear in GIMPLE form. */ 1081 /* Handle binary operators that can appear in GIMPLE form. */
1034 /* Fold &foo + CST into an invariant reference if possible. */ 1100 /* Fold &foo + CST into an invariant reference if possible. */
1035 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR 1101 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1036 && TREE_CODE (op0) == ADDR_EXPR 1102 && TREE_CODE (op0) == ADDR_EXPR
1037 && TREE_CODE (op1) == INTEGER_CST) 1103 && TREE_CODE (op1) == INTEGER_CST)
1038 { 1104 {
1039 tree lhs = gimple_assign_lhs (stmt); 1105 tree tem = maybe_fold_offset_to_address
1040 tree tem = maybe_fold_offset_to_address (op0, op1, 1106 (loc, op0, op1, TREE_TYPE (op0));
1041 TREE_TYPE (lhs));
1042 if (tem != NULL_TREE) 1107 if (tem != NULL_TREE)
1043 return tem; 1108 return tem;
1044 } 1109 }
1045 1110
1046 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1); 1111 return fold_binary_loc (loc, subcode,
1112 gimple_expr_type (stmt), op0, op1);
1047 } 1113 }
1048 1114
1049 default: 1115 default:
1050 gcc_unreachable (); 1116 gcc_unreachable ();
1051 } 1117 }
1078 val = get_value (args[i]); 1144 val = get_value (args[i]);
1079 if (val->lattice_val == CONSTANT) 1145 if (val->lattice_val == CONSTANT)
1080 args[i] = val->value; 1146 args[i] = val->value;
1081 } 1147 }
1082 } 1148 }
1083 call = build_call_array (gimple_call_return_type (stmt), 1149 call = build_call_array_loc (loc,
1084 fn, gimple_call_num_args (stmt), args); 1150 gimple_call_return_type (stmt),
1085 retval = fold_call_expr (call, false); 1151 fn, gimple_call_num_args (stmt), args);
1152 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
1086 if (retval) 1153 if (retval)
1087 /* fold_call_expr wraps the result inside a NOP_EXPR. */ 1154 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1088 STRIP_NOPS (retval); 1155 STRIP_NOPS (retval);
1089 return retval; 1156 return retval;
1090 } 1157 }
1111 prop_value_t *val = get_value (op1); 1178 prop_value_t *val = get_value (op1);
1112 if (val->lattice_val == CONSTANT) 1179 if (val->lattice_val == CONSTANT)
1113 op1 = val->value; 1180 op1 = val->value;
1114 } 1181 }
1115 1182
1116 return fold_binary (code, boolean_type_node, op0, op1); 1183 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1117 } 1184 }
1118 1185
1119 case GIMPLE_SWITCH: 1186 case GIMPLE_SWITCH:
1120 { 1187 {
1121 tree rhs = gimple_switch_index (stmt); 1188 tree rhs = gimple_switch_index (stmt);
1145 { 1212 {
1146 prop_value_t *value; 1213 prop_value_t *value;
1147 tree base, ctor, idx, field; 1214 tree base, ctor, idx, field;
1148 unsigned HOST_WIDE_INT cnt; 1215 unsigned HOST_WIDE_INT cnt;
1149 tree cfield, cval; 1216 tree cfield, cval;
1217
1218 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1219 return get_symbol_constant_value (t);
1150 1220
1151 switch (TREE_CODE (t)) 1221 switch (TREE_CODE (t))
1152 { 1222 {
1153 case ARRAY_REF: 1223 case ARRAY_REF:
1154 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its 1224 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1226 /* Whoo-hoo! I'll fold ya baby. Yeah! */ 1296 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1227 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) 1297 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1228 if (tree_int_cst_equal (cfield, idx)) 1298 if (tree_int_cst_equal (cfield, idx))
1229 { 1299 {
1230 STRIP_USELESS_TYPE_CONVERSION (cval); 1300 STRIP_USELESS_TYPE_CONVERSION (cval);
1301 if (TREE_CODE (cval) == ADDR_EXPR)
1302 {
1303 tree base = get_base_address (TREE_OPERAND (cval, 0));
1304 if (base && TREE_CODE (base) == VAR_DECL)
1305 add_referenced_var (base);
1306 }
1231 return cval; 1307 return cval;
1232 } 1308 }
1233 break; 1309 break;
1234 1310
1235 case COMPONENT_REF: 1311 case COMPONENT_REF:
1269 if (cfield == field 1345 if (cfield == field
1270 /* FIXME: Handle bit-fields. */ 1346 /* FIXME: Handle bit-fields. */
1271 && ! DECL_BIT_FIELD (cfield)) 1347 && ! DECL_BIT_FIELD (cfield))
1272 { 1348 {
1273 STRIP_USELESS_TYPE_CONVERSION (cval); 1349 STRIP_USELESS_TYPE_CONVERSION (cval);
1350 if (TREE_CODE (cval) == ADDR_EXPR)
1351 {
1352 tree base = get_base_address (TREE_OPERAND (cval, 0));
1353 if (base && TREE_CODE (base) == VAR_DECL)
1354 add_referenced_var (base);
1355 }
1274 return cval; 1356 return cval;
1275 } 1357 }
1276 break; 1358 break;
1277 1359
1278 case REALPART_EXPR: 1360 case REALPART_EXPR:
1279 case IMAGPART_EXPR: 1361 case IMAGPART_EXPR:
1280 { 1362 {
1281 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0)); 1363 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1282 if (c && TREE_CODE (c) == COMPLEX_CST) 1364 if (c && TREE_CODE (c) == COMPLEX_CST)
1283 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c); 1365 return fold_build1_loc (EXPR_LOCATION (t),
1366 TREE_CODE (t), TREE_TYPE (t), c);
1284 break; 1367 break;
1285 } 1368 }
1286 1369
1287 case INDIRECT_REF: 1370 case INDIRECT_REF:
1288 { 1371 {
1330 { 1413 {
1331 enum gimple_code code = gimple_code (stmt); 1414 enum gimple_code code = gimple_code (stmt);
1332 if (code == GIMPLE_ASSIGN) 1415 if (code == GIMPLE_ASSIGN)
1333 { 1416 {
1334 enum tree_code subcode = gimple_assign_rhs_code (stmt); 1417 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1335 1418
1336 /* Other cases cannot satisfy is_gimple_min_invariant 1419 /* Other cases cannot satisfy is_gimple_min_invariant
1337 without folding. */ 1420 without folding. */
1338 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS) 1421 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1339 simplified = gimple_assign_rhs1 (stmt); 1422 simplified = gimple_assign_rhs1 (stmt);
1340 } 1423 }
1341 else if (code == GIMPLE_SWITCH) 1424 else if (code == GIMPLE_SWITCH)
1342 simplified = gimple_switch_index (stmt); 1425 simplified = gimple_switch_index (stmt);
1343 else 1426 else
1344 /* These cannot satisfy is_gimple_min_invariant without folding. */ 1427 /* These cannot satisfy is_gimple_min_invariant without folding. */
1345 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND); 1428 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1346 } 1429 }
1347 1430
1348 is_constant = simplified && is_gimple_min_invariant (simplified); 1431 is_constant = simplified && is_gimple_min_invariant (simplified);
1349 1432
1350 fold_undefer_overflow_warnings (is_constant, stmt, 0); 1433 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1388 } 1471 }
1389 1472
1390 return val; 1473 return val;
1391 } 1474 }
1392 1475
1476 /* Fold the stmt at *GSI with CCP specific information that propagating
1477 and regular folding does not catch. */
1478
1479 static bool
1480 ccp_fold_stmt (gimple_stmt_iterator *gsi)
1481 {
1482 gimple stmt = gsi_stmt (*gsi);
1483 prop_value_t val;
1484
1485 if (gimple_code (stmt) != GIMPLE_COND)
1486 return false;
1487
1488 /* Statement evaluation will handle type mismatches in constants
1489 more gracefully than the final propagation. This allows us to
1490 fold more conditionals here. */
1491 val = evaluate_stmt (stmt);
1492 if (val.lattice_val != CONSTANT
1493 || TREE_CODE (val.value) != INTEGER_CST)
1494 return false;
1495
1496 if (integer_zerop (val.value))
1497 gimple_cond_make_false (stmt);
1498 else
1499 gimple_cond_make_true (stmt);
1500
1501 return true;
1502 }
1503
1393 /* Visit the assignment statement STMT. Set the value of its LHS to the 1504 /* Visit the assignment statement STMT. Set the value of its LHS to the
1394 value computed by the RHS and store LHS in *OUTPUT_P. If STMT 1505 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1395 creates virtual definitions, set the value of each new name to that 1506 creates virtual definitions, set the value of each new name to that
1396 of the RHS (if we can derive a constant out of the RHS). 1507 of the RHS (if we can derive a constant out of the RHS).
1397 Value-returning call statements also perform an assignment, and 1508 Value-returning call statements also perform an assignment, and
1474 1585
1475 /* Evaluate statement STMT. If the statement produces an output value and 1586 /* Evaluate statement STMT. If the statement produces an output value and
1476 its evaluation changes the lattice value of its output, return 1587 its evaluation changes the lattice value of its output, return
1477 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the 1588 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1478 output value. 1589 output value.
1479 1590
1480 If STMT is a conditional branch and we can determine its truth 1591 If STMT is a conditional branch and we can determine its truth
1481 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying 1592 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1482 value, return SSA_PROP_VARYING. */ 1593 value, return SSA_PROP_VARYING. */
1483 1594
1484 static enum ssa_prop_result 1595 static enum ssa_prop_result
1556 { 1667 {
1557 return flag_tree_ccp != 0; 1668 return flag_tree_ccp != 0;
1558 } 1669 }
1559 1670
1560 1671
1561 struct gimple_opt_pass pass_ccp = 1672 struct gimple_opt_pass pass_ccp =
1562 { 1673 {
1563 { 1674 {
1564 GIMPLE_PASS, 1675 GIMPLE_PASS,
1565 "ccp", /* name */ 1676 "ccp", /* name */
1566 gate_ccp, /* gate */ 1677 gate_ccp, /* gate */
1577 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */ 1688 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1578 } 1689 }
1579 }; 1690 };
1580 1691
1581 1692
1582 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X]. 1693 /* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
1583 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE 1694 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1584 is the desired result type. */ 1695 is the desired result type.
1696
1697 LOC is the location of the original expression. */
1585 1698
1586 static tree 1699 static tree
1587 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type, 1700 maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset,
1701 tree orig_type,
1588 bool allow_negative_idx) 1702 bool allow_negative_idx)
1589 { 1703 {
1590 tree min_idx, idx, idx_type, elt_offset = integer_zero_node; 1704 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1591 tree array_type, elt_type, elt_size; 1705 tree array_type, elt_type, elt_size;
1592 tree domain_type; 1706 tree domain_type;
1691 give false warnings. The same is true for 1805 give false warnings. The same is true for
1692 struct A { long x; char d[0]; } *a; 1806 struct A { long x; char d[0]; } *a;
1693 (char *)a - 4; 1807 (char *)a - 4;
1694 which should be not folded to &a->d[-8]. */ 1808 which should be not folded to &a->d[-8]. */
1695 if (domain_type 1809 if (domain_type
1696 && TYPE_MAX_VALUE (domain_type) 1810 && TYPE_MAX_VALUE (domain_type)
1697 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST) 1811 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1698 { 1812 {
1699 tree up_bound = TYPE_MAX_VALUE (domain_type); 1813 tree up_bound = TYPE_MAX_VALUE (domain_type);
1700 1814
1701 if (tree_int_cst_lt (up_bound, idx) 1815 if (tree_int_cst_lt (up_bound, idx)
1715 } 1829 }
1716 else if (!allow_negative_idx 1830 else if (!allow_negative_idx
1717 && compare_tree_int (idx, 0) < 0) 1831 && compare_tree_int (idx, 0) < 0)
1718 return NULL_TREE; 1832 return NULL_TREE;
1719 1833
1720 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE); 1834 {
1835 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1836 SET_EXPR_LOCATION (t, loc);
1837 return t;
1838 }
1721 } 1839 }
1722 1840
1723 1841
1724 /* Attempt to fold *(S+O) to S.X. 1842 /* Attempt to fold *(S+O) to S.X.
1725 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE 1843 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1726 is the desired result type. */ 1844 is the desired result type.
1845
1846 LOC is the location of the original expression. */
1727 1847
1728 static tree 1848 static tree
1729 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset, 1849 maybe_fold_offset_to_component_ref (location_t loc, tree record_type,
1730 tree orig_type) 1850 tree base, tree offset, tree orig_type)
1731 { 1851 {
1732 tree f, t, field_type, tail_array_field, field_offset; 1852 tree f, t, field_type, tail_array_field, field_offset;
1733 tree ret; 1853 tree ret;
1734 tree new_base; 1854 tree new_base;
1735 1855
1780 && useless_type_conversion_p (orig_type, field_type)) 1900 && useless_type_conversion_p (orig_type, field_type))
1781 { 1901 {
1782 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE); 1902 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1783 return t; 1903 return t;
1784 } 1904 }
1785 1905
1786 /* Don't care about offsets into the middle of scalars. */ 1906 /* Don't care about offsets into the middle of scalars. */
1787 if (!AGGREGATE_TYPE_P (field_type)) 1907 if (!AGGREGATE_TYPE_P (field_type))
1788 continue; 1908 continue;
1789 1909
1790 /* Check for array at the end of the struct. This is often 1910 /* Check for array at the end of the struct. This is often
1802 continue; 1922 continue;
1803 1923
1804 /* If we matched, then set offset to the displacement into 1924 /* If we matched, then set offset to the displacement into
1805 this field. */ 1925 this field. */
1806 new_base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE); 1926 new_base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1927 SET_EXPR_LOCATION (new_base, loc);
1807 1928
1808 /* Recurse to possibly find the match. */ 1929 /* Recurse to possibly find the match. */
1809 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type, 1930 ret = maybe_fold_offset_to_array_ref (loc, new_base, t, orig_type,
1810 f == TYPE_FIELDS (record_type)); 1931 f == TYPE_FIELDS (record_type));
1811 if (ret) 1932 if (ret)
1812 return ret; 1933 return ret;
1813 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t, 1934 ret = maybe_fold_offset_to_component_ref (loc, field_type, new_base, t,
1814 orig_type); 1935 orig_type);
1815 if (ret) 1936 if (ret)
1816 return ret; 1937 return ret;
1817 } 1938 }
1818 1939
1821 1942
1822 f = tail_array_field; 1943 f = tail_array_field;
1823 field_type = TREE_TYPE (f); 1944 field_type = TREE_TYPE (f);
1824 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1); 1945 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1825 1946
1826 /* If we get here, we've got an aggregate field, and a possibly 1947 /* If we get here, we've got an aggregate field, and a possibly
1827 nonzero offset into them. Recurse and hope for a valid match. */ 1948 nonzero offset into them. Recurse and hope for a valid match. */
1828 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE); 1949 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1829 1950 SET_EXPR_LOCATION (base, loc);
1830 t = maybe_fold_offset_to_array_ref (base, offset, orig_type, 1951
1952 t = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type,
1831 f == TYPE_FIELDS (record_type)); 1953 f == TYPE_FIELDS (record_type));
1832 if (t) 1954 if (t)
1833 return t; 1955 return t;
1834 return maybe_fold_offset_to_component_ref (field_type, base, offset, 1956 return maybe_fold_offset_to_component_ref (loc, field_type, base, offset,
1835 orig_type); 1957 orig_type);
1836 } 1958 }
1837 1959
1838 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type 1960 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1839 or BASE[index] or by combination of those. 1961 or BASE[index] or by combination of those.
1962
1963 LOC is the location of original expression.
1840 1964
1841 Before attempting the conversion strip off existing ADDR_EXPRs and 1965 Before attempting the conversion strip off existing ADDR_EXPRs and
1842 handled component refs. */ 1966 handled component refs. */
1843 1967
1844 tree 1968 tree
1845 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type) 1969 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
1970 tree orig_type)
1846 { 1971 {
1847 tree ret; 1972 tree ret;
1848 tree type; 1973 tree type;
1849 1974
1850 STRIP_NOPS (base); 1975 STRIP_NOPS (base);
1870 { 1995 {
1871 base = newbase; 1996 base = newbase;
1872 if (sub_offset) 1997 if (sub_offset)
1873 offset = int_const_binop (PLUS_EXPR, offset, 1998 offset = int_const_binop (PLUS_EXPR, offset,
1874 build_int_cst (TREE_TYPE (offset), 1999 build_int_cst (TREE_TYPE (offset),
1875 sub_offset / BITS_PER_UNIT), 1); 2000 sub_offset / BITS_PER_UNIT), 1);
1876 } 2001 }
1877 } 2002 }
1878 if (useless_type_conversion_p (orig_type, TREE_TYPE (base)) 2003 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
1879 && integer_zerop (offset)) 2004 && integer_zerop (offset))
1880 return base; 2005 return base;
1881 type = TREE_TYPE (base); 2006 type = TREE_TYPE (base);
1882 2007
1883 ret = maybe_fold_offset_to_component_ref (type, base, offset, orig_type); 2008 ret = maybe_fold_offset_to_component_ref (loc, type, base, offset, orig_type);
1884 if (!ret) 2009 if (!ret)
1885 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true); 2010 ret = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type, true);
1886 2011
1887 return ret; 2012 return ret;
1888 } 2013 }
1889 2014
1890 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type 2015 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1891 or &BASE[index] or by combination of those. 2016 or &BASE[index] or by combination of those.
1892 2017
2018 LOC is the location of the original expression.
2019
1893 Before attempting the conversion strip off existing component refs. */ 2020 Before attempting the conversion strip off existing component refs. */
1894 2021
1895 tree 2022 tree
1896 maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type) 2023 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
2024 tree orig_type)
1897 { 2025 {
1898 tree t; 2026 tree t;
1899 2027
1900 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr)) 2028 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
1901 && POINTER_TYPE_P (orig_type)); 2029 && POINTER_TYPE_P (orig_type));
1902 2030
1903 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type)); 2031 t = maybe_fold_offset_to_reference (loc, addr, offset,
2032 TREE_TYPE (orig_type));
1904 if (t != NULL_TREE) 2033 if (t != NULL_TREE)
1905 { 2034 {
1906 tree orig = addr; 2035 tree orig = addr;
1907 tree ptr_type; 2036 tree ptr_type;
1908 2037
1935 return NULL_TREE; 2064 return NULL_TREE;
1936 2065
1937 ptr_type = build_pointer_type (TREE_TYPE (t)); 2066 ptr_type = build_pointer_type (TREE_TYPE (t));
1938 if (!useless_type_conversion_p (orig_type, ptr_type)) 2067 if (!useless_type_conversion_p (orig_type, ptr_type))
1939 return NULL_TREE; 2068 return NULL_TREE;
1940 return build_fold_addr_expr_with_type (t, ptr_type); 2069 return build_fold_addr_expr_with_type_loc (loc, t, ptr_type);
1941 } 2070 }
1942 2071
1943 return NULL_TREE; 2072 return NULL_TREE;
1944 } 2073 }
1945 2074
1946 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET). 2075 /* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET).
1947 Return the simplified expression, or NULL if nothing could be done. */ 2076 Return the simplified expression, or NULL if nothing could be done. */
1948 2077
1949 static tree 2078 static tree
1950 maybe_fold_stmt_indirect (tree expr, tree base, tree offset) 2079 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1951 { 2080 {
1952 tree t; 2081 tree t;
1953 bool volatile_p = TREE_THIS_VOLATILE (expr); 2082 bool volatile_p = TREE_THIS_VOLATILE (expr);
2083 location_t loc = EXPR_LOCATION (expr);
1954 2084
1955 /* We may well have constructed a double-nested PLUS_EXPR via multiple 2085 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1956 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that 2086 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1957 are sometimes added. */ 2087 are sometimes added. */
1958 base = fold (base); 2088 base = fold (base);
1989 if (TREE_CODE (base) == CONST_DECL 2119 if (TREE_CODE (base) == CONST_DECL
1990 && is_gimple_min_invariant (DECL_INITIAL (base))) 2120 && is_gimple_min_invariant (DECL_INITIAL (base)))
1991 return DECL_INITIAL (base); 2121 return DECL_INITIAL (base);
1992 2122
1993 /* Try folding *(&B+O) to B.X. */ 2123 /* Try folding *(&B+O) to B.X. */
1994 t = maybe_fold_offset_to_reference (base_addr, offset, 2124 t = maybe_fold_offset_to_reference (loc, base_addr, offset,
1995 TREE_TYPE (expr)); 2125 TREE_TYPE (expr));
1996 if (t) 2126 if (t)
1997 { 2127 {
1998 /* Preserve volatileness of the original expression. 2128 /* Preserve volatileness of the original expression.
1999 We can end up with a plain decl here which is shared 2129 We can end up with a plain decl here which is shared
2003 return t; 2133 return t;
2004 } 2134 }
2005 } 2135 }
2006 else 2136 else
2007 { 2137 {
2008 /* We can get here for out-of-range string constant accesses, 2138 /* We can get here for out-of-range string constant accesses,
2009 such as "_"[3]. Bail out of the entire substitution search 2139 such as "_"[3]. Bail out of the entire substitution search
2010 and arrange for the entire statement to be replaced by a 2140 and arrange for the entire statement to be replaced by a
2011 call to __builtin_trap. In all likelihood this will all be 2141 call to __builtin_trap. In all likelihood this will all be
2012 constant-folded away, but in the meantime we can't leave with 2142 constant-folded away, but in the meantime we can't leave with
2013 something that get_expr_operands can't understand. */ 2143 something that get_expr_operands can't understand. */
2016 STRIP_NOPS (t); 2146 STRIP_NOPS (t);
2017 if (TREE_CODE (t) == ADDR_EXPR 2147 if (TREE_CODE (t) == ADDR_EXPR
2018 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST) 2148 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2019 { 2149 {
2020 /* FIXME: Except that this causes problems elsewhere with dead 2150 /* FIXME: Except that this causes problems elsewhere with dead
2021 code not being deleted, and we die in the rtl expanders 2151 code not being deleted, and we die in the rtl expanders
2022 because we failed to remove some ssa_name. In the meantime, 2152 because we failed to remove some ssa_name. In the meantime,
2023 just return zero. */ 2153 just return zero. */
2024 /* FIXME2: This condition should be signaled by 2154 /* FIXME2: This condition should be signaled by
2025 fold_read_from_constant_string directly, rather than 2155 fold_read_from_constant_string directly, rather than
2026 re-checking for it here. */ 2156 re-checking for it here. */
2027 return integer_zero_node; 2157 return integer_zero_node;
2028 } 2158 }
2029 2159
2030 /* Try folding *(B+O) to B->X. Still an improvement. */ 2160 /* Try folding *(B+O) to B->X. Still an improvement. */
2031 if (POINTER_TYPE_P (TREE_TYPE (base))) 2161 if (POINTER_TYPE_P (TREE_TYPE (base)))
2032 { 2162 {
2033 t = maybe_fold_offset_to_reference (base, offset, 2163 t = maybe_fold_offset_to_reference (loc, base, offset,
2034 TREE_TYPE (expr)); 2164 TREE_TYPE (expr));
2035 if (t) 2165 if (t)
2036 return t; 2166 return t;
2037 } 2167 }
2038 } 2168 }
2053 type of the POINTER_PLUS_EXPR. We'd like to turn this into 2183 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2054 &array[x] 2184 &array[x]
2055 which may be able to propagate further. */ 2185 which may be able to propagate further. */
2056 2186
2057 tree 2187 tree
2058 maybe_fold_stmt_addition (tree res_type, tree op0, tree op1) 2188 maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
2059 { 2189 {
2060 tree ptd_type; 2190 tree ptd_type;
2061 tree t; 2191 tree t;
2062 2192
2063 /* It had better be a constant. */
2064 if (TREE_CODE (op1) != INTEGER_CST)
2065 return NULL_TREE;
2066 /* The first operand should be an ADDR_EXPR. */ 2193 /* The first operand should be an ADDR_EXPR. */
2067 if (TREE_CODE (op0) != ADDR_EXPR) 2194 if (TREE_CODE (op0) != ADDR_EXPR)
2068 return NULL_TREE; 2195 return NULL_TREE;
2069 op0 = TREE_OPERAND (op0, 0); 2196 op0 = TREE_OPERAND (op0, 0);
2197
2198 /* It had better be a constant. */
2199 if (TREE_CODE (op1) != INTEGER_CST)
2200 {
2201 /* Or op0 should now be A[0] and the non-constant offset defined
2202 via a multiplication by the array element size. */
2203 if (TREE_CODE (op0) == ARRAY_REF
2204 && integer_zerop (TREE_OPERAND (op0, 1))
2205 && TREE_CODE (op1) == SSA_NAME
2206 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0)), 1))
2207 {
2208 gimple offset_def = SSA_NAME_DEF_STMT (op1);
2209 if (!is_gimple_assign (offset_def))
2210 return NULL_TREE;
2211
2212 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
2213 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
2214 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def),
2215 TYPE_SIZE_UNIT (TREE_TYPE (op0))))
2216 return build_fold_addr_expr
2217 (build4 (ARRAY_REF, TREE_TYPE (op0),
2218 TREE_OPERAND (op0, 0),
2219 gimple_assign_rhs1 (offset_def),
2220 TREE_OPERAND (op0, 2),
2221 TREE_OPERAND (op0, 3)));
2222 else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0)))
2223 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
2224 return build_fold_addr_expr
2225 (build4 (ARRAY_REF, TREE_TYPE (op0),
2226 TREE_OPERAND (op0, 0),
2227 op1,
2228 TREE_OPERAND (op0, 2),
2229 TREE_OPERAND (op0, 3)));
2230 }
2231 return NULL_TREE;
2232 }
2070 2233
2071 /* If the first operand is an ARRAY_REF, expand it so that we can fold 2234 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2072 the offset into it. */ 2235 the offset into it. */
2073 while (TREE_CODE (op0) == ARRAY_REF) 2236 while (TREE_CODE (op0) == ARRAY_REF)
2074 { 2237 {
2117 if (VOID_TYPE_P (ptd_type) 2280 if (VOID_TYPE_P (ptd_type)
2118 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE) 2281 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2119 ptd_type = TREE_TYPE (TREE_TYPE (op0)); 2282 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2120 2283
2121 /* At which point we can try some of the same things as for indirects. */ 2284 /* At which point we can try some of the same things as for indirects. */
2122 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true); 2285 t = maybe_fold_offset_to_array_ref (loc, op0, op1, ptd_type, true);
2123 if (!t) 2286 if (!t)
2124 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1, 2287 t = maybe_fold_offset_to_component_ref (loc, TREE_TYPE (op0), op0, op1,
2125 ptd_type); 2288 ptd_type);
2126 if (t) 2289 if (t)
2127 t = build1 (ADDR_EXPR, res_type, t); 2290 {
2291 t = build1 (ADDR_EXPR, res_type, t);
2292 SET_EXPR_LOCATION (t, loc);
2293 }
2128 2294
2129 return t; 2295 return t;
2130 } 2296 }
2131 2297
2132 /* For passing state through walk_tree into fold_stmt_r and its 2298 /* Subroutine of fold_stmt. We perform several simplifications of the
2133 children. */ 2299 memory reference tree EXPR and make sure to re-gimplify them properly
2134 2300 after propagation of constant addresses. IS_LHS is true if the
2135 struct fold_stmt_r_data 2301 reference is supposed to be an lvalue. */
2136 {
2137 gimple stmt;
2138 bool *changed_p;
2139 bool *inside_addr_expr_p;
2140 };
2141
2142 /* Subroutine of fold_stmt called via walk_tree. We perform several
2143 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2144 2302
2145 static tree 2303 static tree
2146 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data) 2304 maybe_fold_reference (tree expr, bool is_lhs)
2147 { 2305 {
2148 struct walk_stmt_info *wi = (struct walk_stmt_info *) data; 2306 tree *t = &expr;
2149 struct fold_stmt_r_data *fold_stmt_r_data; 2307
2150 bool *inside_addr_expr_p; 2308 if (TREE_CODE (expr) == ARRAY_REF
2151 bool *changed_p; 2309 && !is_lhs)
2152 tree expr = *expr_p, t; 2310 {
2153 bool volatile_p = TREE_THIS_VOLATILE (expr); 2311 tree tem = fold_read_from_constant_string (expr);
2154 2312 if (tem)
2155 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info; 2313 return tem;
2156 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p; 2314 }
2157 changed_p = fold_stmt_r_data->changed_p; 2315
2158 2316 /* ??? We might want to open-code the relevant remaining cases
2159 /* ??? It'd be nice if walk_tree had a pre-order option. */ 2317 to avoid using the generic fold. */
2160 switch (TREE_CODE (expr)) 2318 if (handled_component_p (*t)
2161 { 2319 && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
2162 case INDIRECT_REF: 2320 {
2163 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); 2321 tree tem = fold (*t);
2164 if (t) 2322 if (tem != *t)
2165 return t; 2323 return tem;
2166 *walk_subtrees = 0; 2324 }
2167 2325
2168 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0), 2326 while (handled_component_p (*t))
2169 integer_zero_node); 2327 t = &TREE_OPERAND (*t, 0);
2328
2329 if (TREE_CODE (*t) == INDIRECT_REF)
2330 {
2331 tree tem = maybe_fold_stmt_indirect (*t, TREE_OPERAND (*t, 0),
2332 integer_zero_node);
2170 /* Avoid folding *"abc" = 5 into 'a' = 5. */ 2333 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2171 if (wi->is_lhs && t && TREE_CODE (t) == INTEGER_CST) 2334 if (is_lhs && tem && CONSTANT_CLASS_P (tem))
2172 t = NULL_TREE; 2335 tem = NULL_TREE;
2173 if (!t 2336 if (!tem
2174 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR) 2337 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR)
2175 /* If we had a good reason for propagating the address here, 2338 /* If we had a good reason for propagating the address here,
2176 make sure we end up with valid gimple. See PR34989. */ 2339 make sure we end up with valid gimple. See PR34989. */
2177 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0); 2340 tem = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
2178 break; 2341
2179 2342 if (tem)
2180 case NOP_EXPR: 2343 {
2181 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL); 2344 *t = tem;
2182 if (t) 2345 tem = maybe_fold_reference (expr, is_lhs);
2183 return t; 2346 if (tem)
2184 *walk_subtrees = 0; 2347 return tem;
2185 2348 return expr;
2186 if (POINTER_TYPE_P (TREE_TYPE (expr)) 2349 }
2187 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr))) 2350 }
2188 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0))) 2351 else if (!is_lhs
2189 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0), 2352 && DECL_P (*t))
2190 integer_zero_node, 2353 {
2191 TREE_TYPE (TREE_TYPE (expr))))) 2354 tree tem = get_symbol_constant_value (*t);
2192 return t; 2355 if (tem)
2193 break; 2356 {
2194 2357 *t = tem;
2195 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF. 2358 tem = maybe_fold_reference (expr, is_lhs);
2196 We'd only want to bother decomposing an existing ARRAY_REF if 2359 if (tem)
2197 the base array is found to have another offset contained within. 2360 return tem;
2198 Otherwise we'd be wasting time. */ 2361 return expr;
2199 case ARRAY_REF: 2362 }
2200 /* If we are not processing expressions found within an
2201 ADDR_EXPR, then we can fold constant array references.
2202 Don't fold on LHS either, to avoid folding "abc"[0] = 5
2203 into 'a' = 5. */
2204 if (!*inside_addr_expr_p && !wi->is_lhs)
2205 t = fold_read_from_constant_string (expr);
2206 else
2207 t = NULL;
2208 break;
2209
2210 case ADDR_EXPR:
2211 *inside_addr_expr_p = true;
2212 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2213 *inside_addr_expr_p = false;
2214 if (t)
2215 return t;
2216 *walk_subtrees = 0;
2217
2218 /* Make sure the value is properly considered constant, and so gets
2219 propagated as expected. */
2220 if (*changed_p)
2221 recompute_tree_invariant_for_addr_expr (expr);
2222 return NULL_TREE;
2223
2224 case COMPONENT_REF:
2225 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2226 if (t)
2227 return t;
2228 *walk_subtrees = 0;
2229
2230 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2231 We've already checked that the records are compatible, so we should
2232 come up with a set of compatible fields. */
2233 {
2234 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2235 tree expr_field = TREE_OPERAND (expr, 1);
2236
2237 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2238 {
2239 expr_field = find_compatible_field (expr_record, expr_field);
2240 TREE_OPERAND (expr, 1) = expr_field;
2241 }
2242 }
2243 break;
2244
2245 case TARGET_MEM_REF:
2246 t = maybe_fold_tmr (expr);
2247 break;
2248
2249 case POINTER_PLUS_EXPR:
2250 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2251 if (t)
2252 return t;
2253 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2254 if (t)
2255 return t;
2256 *walk_subtrees = 0;
2257
2258 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2259 TREE_OPERAND (expr, 0),
2260 TREE_OPERAND (expr, 1));
2261 break;
2262
2263 case COND_EXPR:
2264 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2265 {
2266 tree op0 = TREE_OPERAND (expr, 0);
2267 tree tem;
2268 bool set;
2269
2270 fold_defer_overflow_warnings ();
2271 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2272 TREE_OPERAND (op0, 0),
2273 TREE_OPERAND (op0, 1));
2274 /* This is actually a conditional expression, not a GIMPLE
2275 conditional statement, however, the valid_gimple_rhs_p
2276 test still applies. */
2277 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
2278 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2279 if (set)
2280 {
2281 COND_EXPR_COND (expr) = tem;
2282 t = expr;
2283 break;
2284 }
2285 }
2286 return NULL_TREE;
2287
2288 default:
2289 return NULL_TREE;
2290 }
2291
2292 if (t)
2293 {
2294 /* Preserve volatileness of the original expression.
2295 We can end up with a plain decl here which is shared
2296 and we shouldn't mess with its flags. */
2297 if (!SSA_VAR_P (t))
2298 TREE_THIS_VOLATILE (t) = volatile_p;
2299 *expr_p = t;
2300 *changed_p = true;
2301 } 2363 }
2302 2364
2303 return NULL_TREE; 2365 return NULL_TREE;
2304 } 2366 }
2367
2305 2368
2306 /* Return the string length, maximum string length or maximum value of 2369 /* Return the string length, maximum string length or maximum value of
2307 ARG in LENGTH. 2370 ARG in LENGTH.
2308 If ARG is an SSA name variable, follow its use-def chains. If LENGTH 2371 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2309 is not NULL and, for TYPE == 0, its value is not equal to the length 2372 is not NULL and, for TYPE == 0, its value is not equal to the length
2315 static bool 2378 static bool
2316 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type) 2379 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2317 { 2380 {
2318 tree var, val; 2381 tree var, val;
2319 gimple def_stmt; 2382 gimple def_stmt;
2320 2383
2321 if (TREE_CODE (arg) != SSA_NAME) 2384 if (TREE_CODE (arg) != SSA_NAME)
2322 { 2385 {
2323 if (TREE_CODE (arg) == COND_EXPR) 2386 if (TREE_CODE (arg) == COND_EXPR)
2324 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type) 2387 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2325 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type); 2388 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2410 2473
2411 if (!get_maxval_strlen (arg, length, visited, type)) 2474 if (!get_maxval_strlen (arg, length, visited, type))
2412 return false; 2475 return false;
2413 } 2476 }
2414 } 2477 }
2415 return true; 2478 return true;
2416 2479
2417 default: 2480 default:
2418 return false; 2481 return false;
2419 } 2482 }
2420 } 2483 }
2434 tree callee, a; 2497 tree callee, a;
2435 int arg_idx, type; 2498 int arg_idx, type;
2436 bitmap visited; 2499 bitmap visited;
2437 bool ignore; 2500 bool ignore;
2438 int nargs; 2501 int nargs;
2502 location_t loc = gimple_location (stmt);
2439 2503
2440 gcc_assert (is_gimple_call (stmt)); 2504 gcc_assert (is_gimple_call (stmt));
2441 2505
2442 ignore = (gimple_call_lhs (stmt) == NULL); 2506 ignore = (gimple_call_lhs (stmt) == NULL);
2443 2507
2530 } 2594 }
2531 break; 2595 break;
2532 2596
2533 case BUILT_IN_STRCPY: 2597 case BUILT_IN_STRCPY:
2534 if (val[1] && is_gimple_val (val[1]) && nargs == 2) 2598 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2535 result = fold_builtin_strcpy (callee, 2599 result = fold_builtin_strcpy (loc, callee,
2536 gimple_call_arg (stmt, 0), 2600 gimple_call_arg (stmt, 0),
2537 gimple_call_arg (stmt, 1), 2601 gimple_call_arg (stmt, 1),
2538 val[1]); 2602 val[1]);
2539 break; 2603 break;
2540 2604
2541 case BUILT_IN_STRNCPY: 2605 case BUILT_IN_STRNCPY:
2542 if (val[1] && is_gimple_val (val[1]) && nargs == 3) 2606 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2543 result = fold_builtin_strncpy (callee, 2607 result = fold_builtin_strncpy (loc, callee,
2544 gimple_call_arg (stmt, 0), 2608 gimple_call_arg (stmt, 0),
2545 gimple_call_arg (stmt, 1), 2609 gimple_call_arg (stmt, 1),
2546 gimple_call_arg (stmt, 2), 2610 gimple_call_arg (stmt, 2),
2547 val[1]); 2611 val[1]);
2548 break; 2612 break;
2549 2613
2550 case BUILT_IN_FPUTS: 2614 case BUILT_IN_FPUTS:
2551 if (nargs == 2) 2615 if (nargs == 2)
2552 result = fold_builtin_fputs (gimple_call_arg (stmt, 0), 2616 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
2553 gimple_call_arg (stmt, 1), 2617 gimple_call_arg (stmt, 1),
2554 ignore, false, val[0]); 2618 ignore, false, val[0]);
2555 break; 2619 break;
2556 2620
2557 case BUILT_IN_FPUTS_UNLOCKED: 2621 case BUILT_IN_FPUTS_UNLOCKED:
2558 if (nargs == 2) 2622 if (nargs == 2)
2559 result = fold_builtin_fputs (gimple_call_arg (stmt, 0), 2623 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
2560 gimple_call_arg (stmt, 1), 2624 gimple_call_arg (stmt, 1),
2561 ignore, true, val[0]); 2625 ignore, true, val[0]);
2562 break; 2626 break;
2563 2627
2564 case BUILT_IN_MEMCPY_CHK: 2628 case BUILT_IN_MEMCPY_CHK:
2565 case BUILT_IN_MEMPCPY_CHK: 2629 case BUILT_IN_MEMPCPY_CHK:
2566 case BUILT_IN_MEMMOVE_CHK: 2630 case BUILT_IN_MEMMOVE_CHK:
2567 case BUILT_IN_MEMSET_CHK: 2631 case BUILT_IN_MEMSET_CHK:
2568 if (val[2] && is_gimple_val (val[2]) && nargs == 4) 2632 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2569 result = fold_builtin_memory_chk (callee, 2633 result = fold_builtin_memory_chk (loc, callee,
2570 gimple_call_arg (stmt, 0), 2634 gimple_call_arg (stmt, 0),
2571 gimple_call_arg (stmt, 1), 2635 gimple_call_arg (stmt, 1),
2572 gimple_call_arg (stmt, 2), 2636 gimple_call_arg (stmt, 2),
2573 gimple_call_arg (stmt, 3), 2637 gimple_call_arg (stmt, 3),
2574 val[2], ignore, 2638 val[2], ignore,
2576 break; 2640 break;
2577 2641
2578 case BUILT_IN_STRCPY_CHK: 2642 case BUILT_IN_STRCPY_CHK:
2579 case BUILT_IN_STPCPY_CHK: 2643 case BUILT_IN_STPCPY_CHK:
2580 if (val[1] && is_gimple_val (val[1]) && nargs == 3) 2644 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2581 result = fold_builtin_stxcpy_chk (callee, 2645 result = fold_builtin_stxcpy_chk (loc, callee,
2582 gimple_call_arg (stmt, 0), 2646 gimple_call_arg (stmt, 0),
2583 gimple_call_arg (stmt, 1), 2647 gimple_call_arg (stmt, 1),
2584 gimple_call_arg (stmt, 2), 2648 gimple_call_arg (stmt, 2),
2585 val[1], ignore, 2649 val[1], ignore,
2586 DECL_FUNCTION_CODE (callee)); 2650 DECL_FUNCTION_CODE (callee));
2587 break; 2651 break;
2588 2652
2589 case BUILT_IN_STRNCPY_CHK: 2653 case BUILT_IN_STRNCPY_CHK:
2590 if (val[2] && is_gimple_val (val[2]) && nargs == 4) 2654 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2591 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0), 2655 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
2592 gimple_call_arg (stmt, 1), 2656 gimple_call_arg (stmt, 1),
2593 gimple_call_arg (stmt, 2), 2657 gimple_call_arg (stmt, 2),
2594 gimple_call_arg (stmt, 3), 2658 gimple_call_arg (stmt, 3),
2595 val[2]); 2659 val[2]);
2596 break; 2660 break;
2619 static tree 2683 static tree
2620 fold_gimple_assign (gimple_stmt_iterator *si) 2684 fold_gimple_assign (gimple_stmt_iterator *si)
2621 { 2685 {
2622 gimple stmt = gsi_stmt (*si); 2686 gimple stmt = gsi_stmt (*si);
2623 enum tree_code subcode = gimple_assign_rhs_code (stmt); 2687 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2624 2688 location_t loc = gimple_location (stmt);
2625 tree result = NULL; 2689
2690 tree result = NULL_TREE;
2626 2691
2627 switch (get_gimple_rhs_class (subcode)) 2692 switch (get_gimple_rhs_class (subcode))
2628 { 2693 {
2629 case GIMPLE_SINGLE_RHS: 2694 case GIMPLE_SINGLE_RHS:
2630 { 2695 {
2631 tree rhs = gimple_assign_rhs1 (stmt); 2696 tree rhs = gimple_assign_rhs1 (stmt);
2632 2697
2633 /* Try to fold a conditional expression. */ 2698 /* Try to fold a conditional expression. */
2634 if (TREE_CODE (rhs) == COND_EXPR) 2699 if (TREE_CODE (rhs) == COND_EXPR)
2635 { 2700 {
2636 tree temp = fold (COND_EXPR_COND (rhs)); 2701 tree op0 = COND_EXPR_COND (rhs);
2637 if (temp != COND_EXPR_COND (rhs)) 2702 tree tem;
2638 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp, 2703 bool set = false;
2639 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs)); 2704 location_t cond_loc = EXPR_LOCATION (rhs);
2705
2706 if (COMPARISON_CLASS_P (op0))
2707 {
2708 fold_defer_overflow_warnings ();
2709 tem = fold_binary_loc (cond_loc,
2710 TREE_CODE (op0), TREE_TYPE (op0),
2711 TREE_OPERAND (op0, 0),
2712 TREE_OPERAND (op0, 1));
2713 /* This is actually a conditional expression, not a GIMPLE
2714 conditional statement, however, the valid_gimple_rhs_p
2715 test still applies. */
2716 set = (tem && is_gimple_condexpr (tem)
2717 && valid_gimple_rhs_p (tem));
2718 fold_undefer_overflow_warnings (set, stmt, 0);
2719 }
2720 else if (is_gimple_min_invariant (op0))
2721 {
2722 tem = op0;
2723 set = true;
2724 }
2725 else
2726 return NULL_TREE;
2727
2728 if (set)
2729 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
2730 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2640 } 2731 }
2732
2733 else if (TREE_CODE (rhs) == TARGET_MEM_REF)
2734 return maybe_fold_tmr (rhs);
2735
2736 else if (REFERENCE_CLASS_P (rhs))
2737 return maybe_fold_reference (rhs, false);
2738
2739 else if (TREE_CODE (rhs) == ADDR_EXPR)
2740 {
2741 tree tem = maybe_fold_reference (TREE_OPERAND (rhs, 0), true);
2742 if (tem)
2743 result = fold_convert (TREE_TYPE (rhs),
2744 build_fold_addr_expr_loc (loc, tem));
2745 }
2746
2747 else if (TREE_CODE (rhs) == CONSTRUCTOR
2748 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2749 && (CONSTRUCTOR_NELTS (rhs)
2750 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2751 {
2752 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
2753 unsigned i;
2754 tree val;
2755
2756 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2757 if (TREE_CODE (val) != INTEGER_CST
2758 && TREE_CODE (val) != REAL_CST
2759 && TREE_CODE (val) != FIXED_CST)
2760 return NULL_TREE;
2761
2762 return build_vector_from_ctor (TREE_TYPE (rhs),
2763 CONSTRUCTOR_ELTS (rhs));
2764 }
2765
2766 else if (DECL_P (rhs))
2767 return get_symbol_constant_value (rhs);
2641 2768
2642 /* If we couldn't fold the RHS, hand over to the generic 2769 /* If we couldn't fold the RHS, hand over to the generic
2643 fold routines. */ 2770 fold routines. */
2644 if (result == NULL_TREE) 2771 if (result == NULL_TREE)
2645 result = fold (rhs); 2772 result = fold (rhs);
2646 2773
2647 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR 2774 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2648 that may have been added by fold, and "useless" type 2775 that may have been added by fold, and "useless" type
2649 conversions that might now be apparent due to propagation. */ 2776 conversions that might now be apparent due to propagation. */
2650 STRIP_USELESS_TYPE_CONVERSION (result); 2777 STRIP_USELESS_TYPE_CONVERSION (result);
2651 2778
2652 if (result != rhs && valid_gimple_rhs_p (result)) 2779 if (result != rhs && valid_gimple_rhs_p (result))
2653 return result; 2780 return result;
2654 else 2781
2655 /* It is possible that fold_stmt_r simplified the RHS. 2782 return NULL_TREE;
2656 Make sure that the subcode of this statement still
2657 reflects the principal operator of the rhs operand. */
2658 return rhs;
2659 } 2783 }
2660 break; 2784 break;
2661 2785
2662 case GIMPLE_UNARY_RHS: 2786 case GIMPLE_UNARY_RHS:
2663 { 2787 {
2664 tree rhs = gimple_assign_rhs1 (stmt); 2788 tree rhs = gimple_assign_rhs1 (stmt);
2665 2789
2666 result = fold_unary (subcode, gimple_expr_type (stmt), rhs); 2790 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
2667 if (result) 2791 if (result)
2668 { 2792 {
2669 /* If the operation was a conversion do _not_ mark a 2793 /* If the operation was a conversion do _not_ mark a
2670 resulting constant with TREE_OVERFLOW if the original 2794 resulting constant with TREE_OVERFLOW if the original
2671 constant was not. These conversions have implementation 2795 constant was not. These conversions have implementation
2683 else if (CONVERT_EXPR_CODE_P (subcode) 2807 else if (CONVERT_EXPR_CODE_P (subcode)
2684 && POINTER_TYPE_P (gimple_expr_type (stmt)) 2808 && POINTER_TYPE_P (gimple_expr_type (stmt))
2685 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt)))) 2809 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2686 { 2810 {
2687 tree type = gimple_expr_type (stmt); 2811 tree type = gimple_expr_type (stmt);
2688 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt), 2812 tree t = maybe_fold_offset_to_address (loc,
2813 gimple_assign_rhs1 (stmt),
2689 integer_zero_node, type); 2814 integer_zero_node, type);
2690 if (t) 2815 if (t)
2691 return t; 2816 return t;
2692 } 2817 }
2693 } 2818 }
2703 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type))); 2828 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
2704 if (!useless_type_conversion_p 2829 if (!useless_type_conversion_p
2705 (TREE_TYPE (gimple_assign_lhs (stmt)), type)) 2830 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
2706 type = TREE_TYPE (gimple_assign_rhs1 (stmt)); 2831 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2707 } 2832 }
2708 result = maybe_fold_stmt_addition (type, 2833 result = maybe_fold_stmt_addition (gimple_location (stmt),
2834 type,
2709 gimple_assign_rhs1 (stmt), 2835 gimple_assign_rhs1 (stmt),
2710 gimple_assign_rhs2 (stmt)); 2836 gimple_assign_rhs2 (stmt));
2711 } 2837 }
2712 2838
2713 if (!result) 2839 if (!result)
2714 result = fold_binary (subcode, 2840 result = fold_binary_loc (loc, subcode,
2715 TREE_TYPE (gimple_assign_lhs (stmt)), 2841 TREE_TYPE (gimple_assign_lhs (stmt)),
2716 gimple_assign_rhs1 (stmt), 2842 gimple_assign_rhs1 (stmt),
2717 gimple_assign_rhs2 (stmt)); 2843 gimple_assign_rhs2 (stmt));
2718 2844
2719 if (result) 2845 if (result)
2748 assumed that the operands have been previously folded. */ 2874 assumed that the operands have been previously folded. */
2749 2875
2750 static bool 2876 static bool
2751 fold_gimple_cond (gimple stmt) 2877 fold_gimple_cond (gimple stmt)
2752 { 2878 {
2753 tree result = fold_binary (gimple_cond_code (stmt), 2879 tree result = fold_binary_loc (gimple_location (stmt),
2880 gimple_cond_code (stmt),
2754 boolean_type_node, 2881 boolean_type_node,
2755 gimple_cond_lhs (stmt), 2882 gimple_cond_lhs (stmt),
2756 gimple_cond_rhs (stmt)); 2883 gimple_cond_rhs (stmt));
2757 2884
2758 if (result) 2885 if (result)
2766 } 2893 }
2767 2894
2768 return false; 2895 return false;
2769 } 2896 }
2770 2897
2898 static void gimplify_and_update_call_from_tree (gimple_stmt_iterator *, tree);
2771 2899
2772 /* Attempt to fold a call statement referenced by the statement iterator GSI. 2900 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2773 The statement may be replaced by another statement, e.g., if the call 2901 The statement may be replaced by another statement, e.g., if the call
2774 simplifies to a constant value. Return true if any changes were made. 2902 simplifies to a constant value. Return true if any changes were made.
2775 It is assumed that the operands have been previously folded. */ 2903 It is assumed that the operands have been previously folded. */
2786 if (callee && DECL_BUILT_IN (callee)) 2914 if (callee && DECL_BUILT_IN (callee))
2787 { 2915 {
2788 tree result = ccp_fold_builtin (stmt); 2916 tree result = ccp_fold_builtin (stmt);
2789 2917
2790 if (result) 2918 if (result)
2791 return update_call_from_tree (gsi, result); 2919 {
2920 if (!update_call_from_tree (gsi, result))
2921 gimplify_and_update_call_from_tree (gsi, result);
2922 return true;
2923 }
2792 } 2924 }
2793 else 2925 else
2794 { 2926 {
2795 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve 2927 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2796 here are when we've propagated the address of a decl into the 2928 here are when we've propagated the address of a decl into the
2824 } 2956 }
2825 2957
2826 return false; 2958 return false;
2827 } 2959 }
2828 2960
2829 /* Fold the statement pointed to by GSI. In some cases, this function may 2961 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
2830 replace the whole statement with a new one. Returns true iff folding 2962 distinguishes both cases. */
2831 makes any changes. */ 2963
2832 2964 static bool
2833 bool 2965 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
2834 fold_stmt (gimple_stmt_iterator *gsi) 2966 {
2835 {
2836 tree res;
2837 struct fold_stmt_r_data fold_stmt_r_data;
2838 struct walk_stmt_info wi;
2839
2840 bool changed = false; 2967 bool changed = false;
2841 bool inside_addr_expr = false;
2842
2843 gimple stmt = gsi_stmt (*gsi); 2968 gimple stmt = gsi_stmt (*gsi);
2844 2969 unsigned i;
2845 fold_stmt_r_data.stmt = stmt;
2846 fold_stmt_r_data.changed_p = &changed;
2847 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2848
2849 memset (&wi, 0, sizeof (wi));
2850 wi.info = &fold_stmt_r_data;
2851
2852 /* Fold the individual operands.
2853 For example, fold instances of *&VAR into VAR, etc. */
2854 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2855 gcc_assert (!res);
2856 2970
2857 /* Fold the main computation performed by the statement. */ 2971 /* Fold the main computation performed by the statement. */
2858 switch (gimple_code (stmt)) 2972 switch (gimple_code (stmt))
2859 { 2973 {
2860 case GIMPLE_ASSIGN: 2974 case GIMPLE_ASSIGN:
2861 { 2975 {
2976 unsigned old_num_ops = gimple_num_ops (stmt);
2862 tree new_rhs = fold_gimple_assign (gsi); 2977 tree new_rhs = fold_gimple_assign (gsi);
2863 if (new_rhs != NULL_TREE) 2978 if (new_rhs != NULL_TREE
2979 && (!inplace
2980 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
2864 { 2981 {
2865 gimple_assign_set_rhs_from_tree (gsi, new_rhs); 2982 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2866 changed = true; 2983 changed = true;
2867 } 2984 }
2868 stmt = gsi_stmt (*gsi);
2869 break; 2985 break;
2870 } 2986 }
2987
2871 case GIMPLE_COND: 2988 case GIMPLE_COND:
2872 changed |= fold_gimple_cond (stmt); 2989 changed |= fold_gimple_cond (stmt);
2873 break; 2990 break;
2991
2874 case GIMPLE_CALL: 2992 case GIMPLE_CALL:
2993 /* Fold *& in call arguments. */
2994 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2995 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2996 {
2997 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2998 if (tmp)
2999 {
3000 gimple_call_set_arg (stmt, i, tmp);
3001 changed = true;
3002 }
3003 }
2875 /* The entire statement may be replaced in this case. */ 3004 /* The entire statement may be replaced in this case. */
2876 changed |= fold_gimple_call (gsi); 3005 if (!inplace)
3006 changed |= fold_gimple_call (gsi);
2877 break; 3007 break;
2878 3008
2879 default: 3009 case GIMPLE_ASM:
2880 return changed; 3010 /* Fold *& in asm operands. */
3011 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3012 {
3013 tree link = gimple_asm_output_op (stmt, i);
3014 tree op = TREE_VALUE (link);
3015 if (REFERENCE_CLASS_P (op)
3016 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3017 {
3018 TREE_VALUE (link) = op;
3019 changed = true;
3020 }
3021 }
3022 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3023 {
3024 tree link = gimple_asm_input_op (stmt, i);
3025 tree op = TREE_VALUE (link);
3026 if (REFERENCE_CLASS_P (op)
3027 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
3028 {
3029 TREE_VALUE (link) = op;
3030 changed = true;
3031 }
3032 }
2881 break; 3033 break;
3034
3035 default:;
3036 }
3037
3038 stmt = gsi_stmt (*gsi);
3039
3040 /* Fold *& on the lhs. */
3041 if (gimple_has_lhs (stmt))
3042 {
3043 tree lhs = gimple_get_lhs (stmt);
3044 if (lhs && REFERENCE_CLASS_P (lhs))
3045 {
3046 tree new_lhs = maybe_fold_reference (lhs, true);
3047 if (new_lhs)
3048 {
3049 gimple_set_lhs (stmt, new_lhs);
3050 changed = true;
3051 }
3052 }
2882 } 3053 }
2883 3054
2884 return changed; 3055 return changed;
3056 }
3057
3058 /* Fold the statement pointed to by GSI. In some cases, this function may
3059 replace the whole statement with a new one. Returns true iff folding
3060 makes any changes.
3061 The statement pointed to by GSI should be in valid gimple form but may
3062 be in unfolded state as resulting from for example constant propagation
3063 which can produce *&x = 0. */
3064
3065 bool
3066 fold_stmt (gimple_stmt_iterator *gsi)
3067 {
3068 return fold_stmt_1 (gsi, false);
2885 } 3069 }
2886 3070
2887 /* Perform the minimal folding on statement STMT. Only operations like 3071 /* Perform the minimal folding on statement STMT. Only operations like
2888 *&x created by constant propagation are handled. The statement cannot 3072 *&x created by constant propagation are handled. The statement cannot
2889 be replaced with a new one. Return true if the statement was 3073 be replaced with a new one. Return true if the statement was
2890 changed, false otherwise. */ 3074 changed, false otherwise.
3075 The statement STMT should be in valid gimple form but may
3076 be in unfolded state as resulting from for example constant propagation
3077 which can produce *&x = 0. */
2891 3078
2892 bool 3079 bool
2893 fold_stmt_inplace (gimple stmt) 3080 fold_stmt_inplace (gimple stmt)
2894 { 3081 {
2895 tree res; 3082 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2896 struct fold_stmt_r_data fold_stmt_r_data; 3083 bool changed = fold_stmt_1 (&gsi, true);
2897 struct walk_stmt_info wi; 3084 gcc_assert (gsi_stmt (gsi) == stmt);
2898 gimple_stmt_iterator si;
2899
2900 bool changed = false;
2901 bool inside_addr_expr = false;
2902
2903 fold_stmt_r_data.stmt = stmt;
2904 fold_stmt_r_data.changed_p = &changed;
2905 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2906
2907 memset (&wi, 0, sizeof (wi));
2908 wi.info = &fold_stmt_r_data;
2909
2910 /* Fold the individual operands.
2911 For example, fold instances of *&VAR into VAR, etc.
2912
2913 It appears that, at one time, maybe_fold_stmt_indirect
2914 would cause the walk to return non-null in order to
2915 signal that the entire statement should be replaced with
2916 a call to _builtin_trap. This functionality is currently
2917 disabled, as noted in a FIXME, and cannot be supported here. */
2918 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2919 gcc_assert (!res);
2920
2921 /* Fold the main computation performed by the statement. */
2922 switch (gimple_code (stmt))
2923 {
2924 case GIMPLE_ASSIGN:
2925 {
2926 unsigned old_num_ops;
2927 tree new_rhs;
2928 old_num_ops = gimple_num_ops (stmt);
2929 si = gsi_for_stmt (stmt);
2930 new_rhs = fold_gimple_assign (&si);
2931 if (new_rhs != NULL_TREE
2932 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
2933 {
2934 gimple_assign_set_rhs_from_tree (&si, new_rhs);
2935 changed = true;
2936 }
2937 gcc_assert (gsi_stmt (si) == stmt);
2938 break;
2939 }
2940 case GIMPLE_COND:
2941 changed |= fold_gimple_cond (stmt);
2942 break;
2943
2944 default:
2945 break;
2946 }
2947
2948 return changed; 3085 return changed;
2949 } 3086 }
2950 3087
2951 /* Try to optimize out __builtin_stack_restore. Optimize it out 3088 /* Try to optimize out __builtin_stack_restore. Optimize it out
2952 if there is another __builtin_stack_restore in the same basic 3089 if there is another __builtin_stack_restore in the same basic
2955 ASM_EXPRs after this __builtin_stack_restore. */ 3092 ASM_EXPRs after this __builtin_stack_restore. */
2956 3093
2957 static tree 3094 static tree
2958 optimize_stack_restore (gimple_stmt_iterator i) 3095 optimize_stack_restore (gimple_stmt_iterator i)
2959 { 3096 {
2960 tree callee, rhs; 3097 tree callee;
2961 gimple stmt, stack_save; 3098 gimple stmt;
2962 gimple_stmt_iterator stack_save_gsi;
2963 3099
2964 basic_block bb = gsi_bb (i); 3100 basic_block bb = gsi_bb (i);
2965 gimple call = gsi_stmt (i); 3101 gimple call = gsi_stmt (i);
2966 3102
2967 if (gimple_code (call) != GIMPLE_CALL 3103 if (gimple_code (call) != GIMPLE_CALL
2981 callee = gimple_call_fndecl (stmt); 3117 callee = gimple_call_fndecl (stmt);
2982 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL) 3118 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2983 return NULL_TREE; 3119 return NULL_TREE;
2984 3120
2985 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE) 3121 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2986 break; 3122 goto second_stack_restore;
2987 } 3123 }
2988 3124
2989 if (gsi_end_p (i) 3125 if (!gsi_end_p (i))
2990 && (! single_succ_p (bb)
2991 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
2992 return NULL_TREE; 3126 return NULL_TREE;
2993 3127
2994 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0)); 3128 /* Allow one successor of the exit block, or zero successors. */
2995 if (gimple_code (stack_save) != GIMPLE_CALL 3129 switch (EDGE_COUNT (bb->succs))
2996 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0) 3130 {
2997 || stmt_could_throw_p (stack_save) 3131 case 0:
2998 || !has_single_use (gimple_call_arg (call, 0))) 3132 break;
2999 return NULL_TREE; 3133 case 1:
3000 3134 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
3001 callee = gimple_call_fndecl (stack_save); 3135 return NULL_TREE;
3002 if (!callee 3136 break;
3003 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL 3137 default:
3004 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
3005 || gimple_call_num_args (stack_save) != 0)
3006 return NULL_TREE;
3007
3008 stack_save_gsi = gsi_for_stmt (stack_save);
3009 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3010 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3011 if (!update_call_from_tree (&stack_save_gsi, rhs))
3012 {
3013 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3014 return NULL_TREE; 3138 return NULL_TREE;
3015 } 3139 }
3016 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi)); 3140 second_stack_restore:
3141
3142 /* If there's exactly one use, then zap the call to __builtin_stack_save.
3143 If there are multiple uses, then the last one should remove the call.
3144 In any case, whether the call to __builtin_stack_save can be removed
3145 or not is irrelevant to removing the call to __builtin_stack_restore. */
3146 if (has_single_use (gimple_call_arg (call, 0)))
3147 {
3148 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3149 if (is_gimple_call (stack_save))
3150 {
3151 callee = gimple_call_fndecl (stack_save);
3152 if (callee
3153 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
3154 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
3155 {
3156 gimple_stmt_iterator stack_save_gsi;
3157 tree rhs;
3158
3159 stack_save_gsi = gsi_for_stmt (stack_save);
3160 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3161 update_call_from_tree (&stack_save_gsi, rhs);
3162 }
3163 }
3164 }
3017 3165
3018 /* No effect, so the statement will be deleted. */ 3166 /* No effect, so the statement will be deleted. */
3019 return integer_zero_node; 3167 return integer_zero_node;
3020 } 3168 }
3021 3169
3027 static tree 3175 static tree
3028 optimize_stdarg_builtin (gimple call) 3176 optimize_stdarg_builtin (gimple call)
3029 { 3177 {
3030 tree callee, lhs, rhs, cfun_va_list; 3178 tree callee, lhs, rhs, cfun_va_list;
3031 bool va_list_simple_ptr; 3179 bool va_list_simple_ptr;
3180 location_t loc = gimple_location (call);
3032 3181
3033 if (gimple_code (call) != GIMPLE_CALL) 3182 if (gimple_code (call) != GIMPLE_CALL)
3034 return NULL_TREE; 3183 return NULL_TREE;
3035 3184
3036 callee = gimple_call_fndecl (call); 3185 callee = gimple_call_fndecl (call);
3054 lhs = gimple_call_arg (call, 0); 3203 lhs = gimple_call_arg (call, 0);
3055 if (!POINTER_TYPE_P (TREE_TYPE (lhs)) 3204 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3056 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs))) 3205 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3057 != TYPE_MAIN_VARIANT (cfun_va_list)) 3206 != TYPE_MAIN_VARIANT (cfun_va_list))
3058 return NULL_TREE; 3207 return NULL_TREE;
3059 3208
3060 lhs = build_fold_indirect_ref (lhs); 3209 lhs = build_fold_indirect_ref_loc (loc, lhs);
3061 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG], 3210 rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
3062 1, integer_zero_node); 3211 1, integer_zero_node);
3063 rhs = fold_convert (TREE_TYPE (lhs), rhs); 3212 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
3064 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs); 3213 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3065 3214
3066 case BUILT_IN_VA_COPY: 3215 case BUILT_IN_VA_COPY:
3067 if (!va_list_simple_ptr) 3216 if (!va_list_simple_ptr)
3068 return NULL_TREE; 3217 return NULL_TREE;
3074 if (!POINTER_TYPE_P (TREE_TYPE (lhs)) 3223 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3075 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs))) 3224 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3076 != TYPE_MAIN_VARIANT (cfun_va_list)) 3225 != TYPE_MAIN_VARIANT (cfun_va_list))
3077 return NULL_TREE; 3226 return NULL_TREE;
3078 3227
3079 lhs = build_fold_indirect_ref (lhs); 3228 lhs = build_fold_indirect_ref_loc (loc, lhs);
3080 rhs = gimple_call_arg (call, 1); 3229 rhs = gimple_call_arg (call, 1);
3081 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs)) 3230 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3082 != TYPE_MAIN_VARIANT (cfun_va_list)) 3231 != TYPE_MAIN_VARIANT (cfun_va_list))
3083 return NULL_TREE; 3232 return NULL_TREE;
3084 3233
3085 rhs = fold_convert (TREE_TYPE (lhs), rhs); 3234 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
3086 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs); 3235 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3087 3236
3088 case BUILT_IN_VA_END: 3237 case BUILT_IN_VA_END:
3089 /* No effect, so the statement will be deleted. */ 3238 /* No effect, so the statement will be deleted. */
3090 return integer_zero_node; 3239 return integer_zero_node;
3120 3269
3121 push_gimplify_context (&gctx); 3270 push_gimplify_context (&gctx);
3122 3271
3123 if (lhs == NULL_TREE) 3272 if (lhs == NULL_TREE)
3124 gimplify_and_add (expr, &stmts); 3273 gimplify_and_add (expr, &stmts);
3125 else 3274 else
3126 tmp = get_initialized_tmp_var (expr, &stmts, NULL); 3275 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3127 3276
3128 pop_gimplify_context (NULL); 3277 pop_gimplify_context (NULL);
3129 3278
3130 if (gimple_has_location (stmt)) 3279 if (gimple_has_location (stmt))
3139 mark_symbols_for_renaming (new_stmt); 3288 mark_symbols_for_renaming (new_stmt);
3140 gsi_next (si_p); 3289 gsi_next (si_p);
3141 } 3290 }
3142 3291
3143 if (lhs == NULL_TREE) 3292 if (lhs == NULL_TREE)
3144 new_stmt = gimple_build_nop (); 3293 {
3294 new_stmt = gimple_build_nop ();
3295 unlink_stmt_vdef (stmt);
3296 release_defs (stmt);
3297 }
3145 else 3298 else
3146 { 3299 {
3147 new_stmt = gimple_build_assign (lhs, tmp); 3300 new_stmt = gimple_build_assign (lhs, tmp);
3148 copy_virtual_operands (new_stmt, stmt); 3301 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3302 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3149 move_ssa_defining_stmt_for_defs (new_stmt, stmt); 3303 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
3150 } 3304 }
3151 3305
3152 gimple_set_location (new_stmt, gimple_location (stmt)); 3306 gimple_set_location (new_stmt, gimple_location (stmt));
3153 gsi_replace (si_p, new_stmt, false); 3307 gsi_replace (si_p, new_stmt, false);
3160 execute_fold_all_builtins (void) 3314 execute_fold_all_builtins (void)
3161 { 3315 {
3162 bool cfg_changed = false; 3316 bool cfg_changed = false;
3163 basic_block bb; 3317 basic_block bb;
3164 unsigned int todoflags = 0; 3318 unsigned int todoflags = 0;
3165 3319
3166 FOR_EACH_BB (bb) 3320 FOR_EACH_BB (bb)
3167 { 3321 {
3168 gimple_stmt_iterator i; 3322 gimple_stmt_iterator i;
3169 for (i = gsi_start_bb (bb); !gsi_end_p (i); ) 3323 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3170 { 3324 {
3228 fprintf (dump_file, "Simplified\n "); 3382 fprintf (dump_file, "Simplified\n ");
3229 print_gimple_stmt (dump_file, stmt, 0, dump_flags); 3383 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3230 } 3384 }
3231 3385
3232 old_stmt = stmt; 3386 old_stmt = stmt;
3233 push_stmt_changes (gsi_stmt_ptr (&i));
3234
3235 if (!update_call_from_tree (&i, result)) 3387 if (!update_call_from_tree (&i, result))
3236 { 3388 {
3237 gimplify_and_update_call_from_tree (&i, result); 3389 gimplify_and_update_call_from_tree (&i, result);
3238 todoflags |= TODO_rebuild_alias; 3390 todoflags |= TODO_update_address_taken;
3239 } 3391 }
3240 3392
3241 stmt = gsi_stmt (i); 3393 stmt = gsi_stmt (i);
3242 pop_stmt_changes (gsi_stmt_ptr (&i)); 3394 update_stmt (stmt);
3243 3395
3244 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt) 3396 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3245 && gimple_purge_dead_eh_edges (bb)) 3397 && gimple_purge_dead_eh_edges (bb))
3246 cfg_changed = true; 3398 cfg_changed = true;
3247 3399
3264 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL 3416 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3265 || DECL_FUNCTION_CODE (callee) == fcode) 3417 || DECL_FUNCTION_CODE (callee) == fcode)
3266 gsi_next (&i); 3418 gsi_next (&i);
3267 } 3419 }
3268 } 3420 }
3269 3421
3270 /* Delete unreachable blocks. */ 3422 /* Delete unreachable blocks. */
3271 if (cfg_changed) 3423 if (cfg_changed)
3272 todoflags |= TODO_cleanup_cfg; 3424 todoflags |= TODO_cleanup_cfg;
3273 3425
3274 return todoflags; 3426 return todoflags;
3275 } 3427 }
3276 3428
3277 3429
3278 struct gimple_opt_pass pass_fold_builtins = 3430 struct gimple_opt_pass pass_fold_builtins =
3279 { 3431 {
3280 { 3432 {
3281 GIMPLE_PASS, 3433 GIMPLE_PASS,
3282 "fab", /* name */ 3434 "fab", /* name */
3283 NULL, /* gate */ 3435 NULL, /* gate */
3284 execute_fold_all_builtins, /* execute */ 3436 execute_fold_all_builtins, /* execute */
3285 NULL, /* sub */ 3437 NULL, /* sub */
3286 NULL, /* next */ 3438 NULL, /* next */
3287 0, /* static_pass_number */ 3439 0, /* static_pass_number */
3288 0, /* tv_id */ 3440 TV_NONE, /* tv_id */
3289 PROP_cfg | PROP_ssa, /* properties_required */ 3441 PROP_cfg | PROP_ssa, /* properties_required */
3290 0, /* properties_provided */ 3442 0, /* properties_provided */
3291 0, /* properties_destroyed */ 3443 0, /* properties_destroyed */
3292 0, /* todo_flags_start */ 3444 0, /* todo_flags_start */
3293 TODO_dump_func 3445 TODO_dump_func