comparison gcc/gimplify.c @ 69:1b10fe6932e1

merge 69
author Nobuyasu Oshiro <dimolto@cr.ie.u-ryukyu.ac.jp>
date Sun, 21 Aug 2011 07:53:12 +0900
parents 326d9e06c2e3 f6334be47118
children b81903832de2
comparison
equal deleted inserted replaced
66:b362627d71ba 69:1b10fe6932e1
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees 1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form. 2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc. 4 Free Software Foundation, Inc.
5 Major work done by Sebastian Pop <s.pop@laposte.net>, 5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>. 6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 7
8 This file is part of GCC. 8 This file is part of GCC.
24 #include "config.h" 24 #include "config.h"
25 #include "system.h" 25 #include "system.h"
26 #include "coretypes.h" 26 #include "coretypes.h"
27 #include "tm.h" 27 #include "tm.h"
28 #include "tree.h" 28 #include "tree.h"
29 #include "rtl.h"
30 #include "varray.h"
31 #include "gimple.h" 29 #include "gimple.h"
32 #include "tree-iterator.h" 30 #include "tree-iterator.h"
33 #include "tree-inline.h" 31 #include "tree-inline.h"
34 #include "diagnostic.h" 32 #include "tree-pretty-print.h"
35 #include "langhooks.h" 33 #include "langhooks.h"
36 #include "langhooks-def.h"
37 #include "tree-flow.h" 34 #include "tree-flow.h"
38 #include "cgraph.h" 35 #include "cgraph.h"
39 #include "timevar.h" 36 #include "timevar.h"
40 #include "except.h"
41 #include "hashtab.h" 37 #include "hashtab.h"
42 #include "flags.h" 38 #include "flags.h"
43 #include "real.h"
44 #include "function.h" 39 #include "function.h"
45 #include "output.h" 40 #include "output.h"
46 #include "expr.h"
47 #include "ggc.h" 41 #include "ggc.h"
48 #include "toplev.h" 42 #include "diagnostic-core.h"
49 #include "target.h" 43 #include "target.h"
50 #include "optabs.h"
51 #include "pointer-set.h" 44 #include "pointer-set.h"
52 #include "splay-tree.h" 45 #include "splay-tree.h"
53 #include "vec.h" 46 #include "vec.h"
54 #include "gimple.h" 47 #include "gimple.h"
55 #ifndef noCbC 48 #ifndef noCbC
56 #include "cbc-tree.h" 49 #include "cbc-tree.h"
57 #endif 50 #endif
58 51
59 #include "tree-pass.h" 52 #include "tree-pass.h"
53
54 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name. */
55 #include "expr.h" /* FIXME: for can_move_by_pieces
56 and STACK_CHECK_MAX_VAR_SIZE. */
60 57
61 enum gimplify_omp_var_data 58 enum gimplify_omp_var_data
62 { 59 {
63 GOVD_SEEN = 1, 60 GOVD_SEEN = 1,
64 GOVD_EXPLICIT = 2, 61 GOVD_EXPLICIT = 2,
76 73
77 74
78 enum omp_region_type 75 enum omp_region_type
79 { 76 {
80 ORT_WORKSHARE = 0, 77 ORT_WORKSHARE = 0,
81 ORT_TASK = 1,
82 ORT_PARALLEL = 2, 78 ORT_PARALLEL = 2,
83 ORT_COMBINED_PARALLEL = 3 79 ORT_COMBINED_PARALLEL = 3,
80 ORT_TASK = 4,
81 ORT_UNTIED_TASK = 5
84 }; 82 };
85 83
86 struct gimplify_omp_ctx 84 struct gimplify_omp_ctx
87 { 85 {
88 struct gimplify_omp_ctx *outer_context; 86 struct gimplify_omp_ctx *outer_context;
114 void 112 void
115 mark_addressable (tree x) 113 mark_addressable (tree x)
116 { 114 {
117 while (handled_component_p (x)) 115 while (handled_component_p (x))
118 x = TREE_OPERAND (x, 0); 116 x = TREE_OPERAND (x, 0);
117 if (TREE_CODE (x) == MEM_REF
118 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
119 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
119 if (TREE_CODE (x) != VAR_DECL 120 if (TREE_CODE (x) != VAR_DECL
120 && TREE_CODE (x) != PARM_DECL 121 && TREE_CODE (x) != PARM_DECL
121 && TREE_CODE (x) != RESULT_DECL) 122 && TREE_CODE (x) != RESULT_DECL)
122 return ; 123 return;
123 TREE_ADDRESSABLE (x) = 1; 124 TREE_ADDRESSABLE (x) = 1;
124 } 125 }
125 126
126 /* Return a hash value for a formal temporary table entry. */ 127 /* Return a hash value for a formal temporary table entry. */
127 128
146 return 0; 147 return 0;
147 148
148 if (!operand_equal_p (t1, t2, 0)) 149 if (!operand_equal_p (t1, t2, 0))
149 return 0; 150 return 0;
150 151
152 #ifdef ENABLE_CHECKING
151 /* Only allow them to compare equal if they also hash equal; otherwise 153 /* Only allow them to compare equal if they also hash equal; otherwise
152 results are nondeterminate, and we fail bootstrap comparison. */ 154 results are nondeterminate, and we fail bootstrap comparison. */
153 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2)); 155 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
156 #endif
154 157
155 return 1; 158 return 1;
156 } 159 }
157 160
158 /* Link gimple statement GS to the end of the sequence *SEQ_P. If 161 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
159 *SEQ_P is NULL, a new sequence is allocated. This function is 162 *SEQ_P is NULL, a new sequence is allocated. This function is
160 similar to gimple_seq_add_stmt, but does not scan the operands. 163 similar to gimple_seq_add_stmt, but does not scan the operands.
161 During gimplification, we need to manipulate statement sequences 164 During gimplification, we need to manipulate statement sequences
162 before the def/use vectors have been constructed. */ 165 before the def/use vectors have been constructed. */
163 166
164 static void 167 void
165 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs) 168 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
166 { 169 {
167 gimple_stmt_iterator si; 170 gimple_stmt_iterator si;
168 171
169 if (gs == NULL) 172 if (gs == NULL)
320 c->outer_context = gimplify_omp_ctxp; 323 c->outer_context = gimplify_omp_ctxp;
321 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); 324 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
322 c->privatized_types = pointer_set_create (); 325 c->privatized_types = pointer_set_create ();
323 c->location = input_location; 326 c->location = input_location;
324 c->region_type = region_type; 327 c->region_type = region_type;
325 if (region_type != ORT_TASK) 328 if ((region_type & ORT_TASK) == 0)
326 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED; 329 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
327 else 330 else
328 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED; 331 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
329 332
330 return c; 333 return c;
340 XDELETE (c); 343 XDELETE (c);
341 } 344 }
342 345
343 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int); 346 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
344 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool); 347 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
345
346 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
347
348 static void
349 append_to_statement_list_1 (tree t, tree *list_p)
350 {
351 tree list = *list_p;
352 tree_stmt_iterator i;
353
354 if (!list)
355 {
356 if (t && TREE_CODE (t) == STATEMENT_LIST)
357 {
358 *list_p = t;
359 return;
360 }
361 *list_p = list = alloc_stmt_list ();
362 }
363
364 i = tsi_last (list);
365 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
366 }
367
368 /* Add T to the end of the list container pointed to by LIST_P.
369 If T is an expression with no effects, it is ignored. */
370
371 void
372 append_to_statement_list (tree t, tree *list_p)
373 {
374 if (t && TREE_SIDE_EFFECTS (t))
375 append_to_statement_list_1 (t, list_p);
376 }
377
378 /* Similar, but the statement is always added, regardless of side effects. */
379
380 void
381 append_to_statement_list_force (tree t, tree *list_p)
382 {
383 if (t != NULL_TREE)
384 append_to_statement_list_1 (t, list_p);
385 }
386 348
387 /* Both gimplify the statement T and append it to *SEQ_P. This function 349 /* Both gimplify the statement T and append it to *SEQ_P. This function
388 behaves exactly as gimplify_stmt, but you don't have to pass T as a 350 behaves exactly as gimplify_stmt, but you don't have to pass T as a
389 reference. */ 351 reference. */
390 352
507 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type)); 469 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
508 470
509 tmp_var = create_tmp_var_raw (type, prefix); 471 tmp_var = create_tmp_var_raw (type, prefix);
510 gimple_add_tmp_var (tmp_var); 472 gimple_add_tmp_var (tmp_var);
511 return tmp_var; 473 return tmp_var;
474 }
475
476 /* Create a new temporary variable declaration of type TYPE by calling
477 create_tmp_var and if TYPE is a vector or a complex number, mark the new
478 temporary as gimple register. */
479
480 tree
481 create_tmp_reg (tree type, const char *prefix)
482 {
483 tree tmp;
484
485 tmp = create_tmp_var (type, prefix);
486 if (TREE_CODE (type) == COMPLEX_TYPE
487 || TREE_CODE (type) == VECTOR_TYPE)
488 DECL_GIMPLE_REG_P (tmp) = 1;
489
490 return tmp;
512 } 491 }
513 492
514 /* Create a temporary with a name derived from VAL. Subroutine of 493 /* Create a temporary with a name derived from VAL. Subroutine of
515 lookup_tmp_var; nobody else should call this function. */ 494 lookup_tmp_var; nobody else should call this function. */
516 495
562 return ret; 541 return ret;
563 } 542 }
564 543
565 544
566 /* Return true if T is a CALL_EXPR or an expression that can be 545 /* Return true if T is a CALL_EXPR or an expression that can be
567 assignmed to a temporary. Note that this predicate should only be 546 assigned to a temporary. Note that this predicate should only be
568 used during gimplification. See the rationale for this in 547 used during gimplification. See the rationale for this in
569 gimplify_modify_expr. */ 548 gimplify_modify_expr. */
570 549
571 static bool 550 static bool
572 is_gimple_reg_rhs_or_call (tree t) 551 is_gimple_reg_rhs_or_call (tree t)
611 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)) 590 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE))
612 DECL_GIMPLE_REG_P (t) = 1; 591 DECL_GIMPLE_REG_P (t) = 1;
613 592
614 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val)); 593 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
615 594
616 if (EXPR_HAS_LOCATION (val)) 595 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_HERE (val));
617 SET_EXPR_LOCATION (mod, EXPR_LOCATION (val));
618 else
619 SET_EXPR_LOCATION (mod, input_location);
620 596
621 /* gimplify_modify_expr might want to reduce this further. */ 597 /* gimplify_modify_expr might want to reduce this further. */
622 gimplify_and_add (mod, pre_p); 598 gimplify_and_add (mod, pre_p);
623 ggc_free (mod); 599 ggc_free (mod);
624 600
676 temps = nreverse (last); 652 temps = nreverse (last);
677 653
678 block = gimple_bind_block (scope); 654 block = gimple_bind_block (scope);
679 gcc_assert (!block || TREE_CODE (block) == BLOCK); 655 gcc_assert (!block || TREE_CODE (block) == BLOCK);
680 if (!block || !debug_info) 656 if (!block || !debug_info)
681 { 657 {
682 TREE_CHAIN (last) = gimple_bind_vars (scope); 658 DECL_CHAIN (last) = gimple_bind_vars (scope);
683 gimple_bind_set_vars (scope, temps); 659 gimple_bind_set_vars (scope, temps);
684 } 660 }
685 else 661 else
686 { 662 {
687 /* We need to attach the nodes both to the BIND_EXPR and to its 663 /* We need to attach the nodes both to the BIND_EXPR and to its
688 associated BLOCK for debugging purposes. The key point here 664 associated BLOCK for debugging purposes. The key point here
689 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR 665 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
725 } 701 }
726 702
727 void 703 void
728 gimple_add_tmp_var (tree tmp) 704 gimple_add_tmp_var (tree tmp)
729 { 705 {
730 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); 706 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
731 707
732 /* Later processing assumes that the object size is constant, which might 708 /* Later processing assumes that the object size is constant, which might
733 not be true at this point. Force the use of a constant upper bound in 709 not be true at this point. Force the use of a constant upper bound in
734 this case. */ 710 this case. */
735 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1)) 711 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
738 DECL_CONTEXT (tmp) = current_function_decl; 714 DECL_CONTEXT (tmp) = current_function_decl;
739 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; 715 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
740 716
741 if (gimplify_ctxp) 717 if (gimplify_ctxp)
742 { 718 {
743 TREE_CHAIN (tmp) = gimplify_ctxp->temps; 719 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
744 gimplify_ctxp->temps = tmp; 720 gimplify_ctxp->temps = tmp;
745 721
746 /* Mark temporaries local within the nearest enclosing parallel. */ 722 /* Mark temporaries local within the nearest enclosing parallel. */
747 if (gimplify_omp_ctxp) 723 if (gimplify_omp_ctxp)
748 { 724 {
845 { 821 {
846 gimple gs = gsi_stmt (i); 822 gimple gs = gsi_stmt (i);
847 annotate_one_with_location (gs, location); 823 annotate_one_with_location (gs, location);
848 } 824 }
849 } 825 }
850 826
851 827 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
852 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes. 828 nodes that are referenced more than once in GENERIC functions. This is
829 necessary because gimplification (translation into GIMPLE) is performed
830 by modifying tree nodes in-place, so gimplication of a shared node in a
831 first context could generate an invalid GIMPLE form in a second context.
832
833 This is achieved with a simple mark/copy/unmark algorithm that walks the
834 GENERIC representation top-down, marks nodes with TREE_VISITED the first
835 time it encounters them, duplicates them if they already have TREE_VISITED
836 set, and finally removes the TREE_VISITED marks it has set.
837
838 The algorithm works only at the function level, i.e. it generates a GENERIC
839 representation of a function with no nodes shared within the function when
840 passed a GENERIC function (except for nodes that are allowed to be shared).
841
842 At the global level, it is also necessary to unshare tree nodes that are
843 referenced in more than one function, for the same aforementioned reason.
844 This requires some cooperation from the front-end. There are 2 strategies:
845
846 1. Manual unsharing. The front-end needs to call unshare_expr on every
847 expression that might end up being shared across functions.
848
849 2. Deep unsharing. This is an extension of regular unsharing. Instead
850 of calling unshare_expr on expressions that might be shared across
851 functions, the front-end pre-marks them with TREE_VISITED. This will
852 ensure that they are unshared on the first reference within functions
853 when the regular unsharing algorithm runs. The counterpart is that
854 this algorithm must look deeper than for manual unsharing, which is
855 specified by LANG_HOOKS_DEEP_UNSHARING.
856
857 If there are only few specific cases of node sharing across functions, it is
858 probably easier for a front-end to unshare the expressions manually. On the
859 contrary, if the expressions generated at the global level are as widespread
860 as expressions generated within functions, deep unsharing is very likely the
861 way to go. */
862
863 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
853 These nodes model computations that should only be done once. If we 864 These nodes model computations that should only be done once. If we
854 were to unshare something like SAVE_EXPR(i++), the gimplification 865 were to unshare something like SAVE_EXPR(i++), the gimplification
855 process would create wrong code. */ 866 process would create wrong code. */
856 867
857 static tree 868 static tree
858 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data) 869 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
859 { 870 {
860 enum tree_code code = TREE_CODE (*tp); 871 tree t = *tp;
861 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */ 872 enum tree_code code = TREE_CODE (t);
862 if (TREE_CODE_CLASS (code) == tcc_type 873
863 || TREE_CODE_CLASS (code) == tcc_declaration 874 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
864 || TREE_CODE_CLASS (code) == tcc_constant 875 copy their subtrees if we can make sure to do it only once. */
865 || code == SAVE_EXPR || code == TARGET_EXPR 876 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
866 /* We can't do anything sensible with a BLOCK used as an expression, 877 {
867 but we also can't just die when we see it because of non-expression 878 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
868 uses. So just avert our eyes and cross our fingers. Silly Java. */ 879 ;
869 || code == BLOCK) 880 else
881 *walk_subtrees = 0;
882 }
883
884 /* Stop at types, decls, constants like copy_tree_r. */
885 else if (TREE_CODE_CLASS (code) == tcc_type
886 || TREE_CODE_CLASS (code) == tcc_declaration
887 || TREE_CODE_CLASS (code) == tcc_constant
888 /* We can't do anything sensible with a BLOCK used as an
889 expression, but we also can't just die when we see it
890 because of non-expression uses. So we avert our eyes
891 and cross our fingers. Silly Java. */
892 || code == BLOCK)
870 *walk_subtrees = 0; 893 *walk_subtrees = 0;
894
895 /* Cope with the statement expression extension. */
896 else if (code == STATEMENT_LIST)
897 ;
898
899 /* Leave the bulk of the work to copy_tree_r itself. */
871 else 900 else
872 { 901 copy_tree_r (tp, walk_subtrees, NULL);
873 gcc_assert (code != BIND_EXPR);
874 copy_tree_r (tp, walk_subtrees, data);
875 }
876 902
877 return NULL_TREE; 903 return NULL_TREE;
878 } 904 }
879 905
880 /* Callback for walk_tree to unshare most of the shared trees rooted at 906 /* Callback for walk_tree to unshare most of the shared trees rooted at
881 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1), 907 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
882 then *TP is deep copied by calling copy_tree_r. 908 then *TP is deep copied by calling mostly_copy_tree_r. */
883
884 This unshares the same trees as copy_tree_r with the exception of
885 SAVE_EXPR nodes. These nodes model computations that should only be
886 done once. If we were to unshare something like SAVE_EXPR(i++), the
887 gimplification process would create wrong code. */
888 909
889 static tree 910 static tree
890 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, 911 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
891 void *data ATTRIBUTE_UNUSED)
892 { 912 {
893 tree t = *tp; 913 tree t = *tp;
894 enum tree_code code = TREE_CODE (t); 914 enum tree_code code = TREE_CODE (t);
895 915
896 /* Skip types, decls, and constants. But we do want to look at their 916 /* Skip types, decls, and constants. But we do want to look at their
909 929
910 /* If this node has been visited already, unshare it and don't look 930 /* If this node has been visited already, unshare it and don't look
911 any deeper. */ 931 any deeper. */
912 else if (TREE_VISITED (t)) 932 else if (TREE_VISITED (t))
913 { 933 {
914 walk_tree (tp, mostly_copy_tree_r, NULL, NULL); 934 walk_tree (tp, mostly_copy_tree_r, data, NULL);
915 *walk_subtrees = 0; 935 *walk_subtrees = 0;
916 } 936 }
917 937
918 /* Otherwise, mark the tree as visited and keep looking. */ 938 /* Otherwise, mark the node as visited and keep looking. */
919 else 939 else
920 TREE_VISITED (t) = 1; 940 TREE_VISITED (t) = 1;
921 941
922 return NULL_TREE; 942 return NULL_TREE;
923 } 943 }
924 944
925 static tree 945 /* Unshare most of the shared trees rooted at *TP. */
926 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, 946
927 void *data ATTRIBUTE_UNUSED) 947 static inline void
928 { 948 copy_if_shared (tree *tp)
929 if (TREE_VISITED (*tp)) 949 {
930 TREE_VISITED (*tp) = 0; 950 /* If the language requires deep unsharing, we need a pointer set to make
931 else 951 sure we don't repeatedly unshare subtrees of unshareable nodes. */
932 *walk_subtrees = 0; 952 struct pointer_set_t *visited
933 953 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
934 return NULL_TREE; 954 walk_tree (tp, copy_if_shared_r, visited, NULL);
955 if (visited)
956 pointer_set_destroy (visited);
935 } 957 }
936 958
937 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the 959 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
938 bodies of any nested functions if we are unsharing the entire body of 960 bodies of any nested functions if we are unsharing the entire body of
939 FNDECL. */ 961 FNDECL. */
941 static void 963 static void
942 unshare_body (tree *body_p, tree fndecl) 964 unshare_body (tree *body_p, tree fndecl)
943 { 965 {
944 struct cgraph_node *cgn = cgraph_node (fndecl); 966 struct cgraph_node *cgn = cgraph_node (fndecl);
945 967
946 walk_tree (body_p, copy_if_shared_r, NULL, NULL); 968 copy_if_shared (body_p);
969
947 if (body_p == &DECL_SAVED_TREE (fndecl)) 970 if (body_p == &DECL_SAVED_TREE (fndecl))
948 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) 971 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
949 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl); 972 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
950 } 973 }
951 974
975 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
976 Subtrees are walked until the first unvisited node is encountered. */
977
978 static tree
979 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
980 {
981 tree t = *tp;
982
983 /* If this node has been visited, unmark it and keep looking. */
984 if (TREE_VISITED (t))
985 TREE_VISITED (t) = 0;
986
987 /* Otherwise, don't look any deeper. */
988 else
989 *walk_subtrees = 0;
990
991 return NULL_TREE;
992 }
993
994 /* Unmark the visited trees rooted at *TP. */
995
996 static inline void
997 unmark_visited (tree *tp)
998 {
999 walk_tree (tp, unmark_visited_r, NULL, NULL);
1000 }
1001
952 /* Likewise, but mark all trees as not visited. */ 1002 /* Likewise, but mark all trees as not visited. */
953 1003
954 static void 1004 static void
955 unvisit_body (tree *body_p, tree fndecl) 1005 unvisit_body (tree *body_p, tree fndecl)
956 { 1006 {
957 struct cgraph_node *cgn = cgraph_node (fndecl); 1007 struct cgraph_node *cgn = cgraph_node (fndecl);
958 1008
959 walk_tree (body_p, unmark_visited_r, NULL, NULL); 1009 unmark_visited (body_p);
1010
960 if (body_p == &DECL_SAVED_TREE (fndecl)) 1011 if (body_p == &DECL_SAVED_TREE (fndecl))
961 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) 1012 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
962 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl); 1013 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
963 } 1014 }
964 1015
1082 gimple_seq body; 1133 gimple_seq body;
1083 1134
1084 tree temp = voidify_wrapper_expr (bind_expr, NULL); 1135 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1085 1136
1086 /* Mark variables seen in this bind expr. */ 1137 /* Mark variables seen in this bind expr. */
1087 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t)) 1138 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1088 { 1139 {
1089 if (TREE_CODE (t) == VAR_DECL) 1140 if (TREE_CODE (t) == VAR_DECL)
1090 { 1141 {
1091 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 1142 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1092 1143
1102 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun) 1153 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1103 cfun->has_local_explicit_reg_vars = true; 1154 cfun->has_local_explicit_reg_vars = true;
1104 } 1155 }
1105 1156
1106 /* Preliminarily mark non-addressed complex variables as eligible 1157 /* Preliminarily mark non-addressed complex variables as eligible
1107 for promotion to gimple registers. We'll transform their uses 1158 for promotion to gimple registers. We'll transform their uses
1108 as we find them. 1159 as we find them. */
1109 We exclude complex types if not optimizing because they can be 1160 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1110 subject to partial stores in GNU C by means of the __real__ and 1161 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1111 __imag__ operators and we cannot promote them to total stores 1162 && !TREE_THIS_VOLATILE (t)
1112 (see gimplify_modify_expr_complex_part). */ 1163 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1113 if (optimize 1164 && !needs_to_live_in_memory (t))
1114 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE 1165 DECL_GIMPLE_REG_P (t) = 1;
1115 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1116 && !TREE_THIS_VOLATILE (t)
1117 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1118 && !needs_to_live_in_memory (t))
1119 DECL_GIMPLE_REG_P (t) = 1;
1120 } 1166 }
1121 1167
1122 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL, 1168 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1123 BIND_EXPR_BLOCK (bind_expr)); 1169 BIND_EXPR_BLOCK (bind_expr));
1124 gimple_push_bind_expr (gimple_bind); 1170 gimple_push_bind_expr (gimple_bind);
1222 we don't want to extend the lifetime of the RESULT_DECL, particularly 1268 we don't want to extend the lifetime of the RESULT_DECL, particularly
1223 across another call. In addition, for those aggregates for which 1269 across another call. In addition, for those aggregates for which
1224 hard_function_value generates a PARALLEL, we'll die during normal 1270 hard_function_value generates a PARALLEL, we'll die during normal
1225 expansion of structure assignments; there's special code in expand_return 1271 expansion of structure assignments; there's special code in expand_return
1226 to handle this case that does not exist in expand_expr. */ 1272 to handle this case that does not exist in expand_expr. */
1227 if (!result_decl 1273 if (!result_decl)
1228 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl))) 1274 result = NULL_TREE;
1229 result = result_decl; 1275 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1276 {
1277 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1278 {
1279 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1280 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1281 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1282 should be effectively allocated by the caller, i.e. all calls to
1283 this function must be subject to the Return Slot Optimization. */
1284 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1285 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1286 }
1287 result = result_decl;
1288 }
1230 else if (gimplify_ctxp->return_temp) 1289 else if (gimplify_ctxp->return_temp)
1231 result = gimplify_ctxp->return_temp; 1290 result = gimplify_ctxp->return_temp;
1232 else 1291 else
1233 { 1292 {
1234 result = create_tmp_var (TREE_TYPE (result_decl), NULL); 1293 result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
1235 if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1236 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1237 DECL_GIMPLE_REG_P (result) = 1;
1238 1294
1239 /* ??? With complex control flow (usually involving abnormal edges), 1295 /* ??? With complex control flow (usually involving abnormal edges),
1240 we can wind up warning about an uninitialized value for this. Due 1296 we can wind up warning about an uninitialized value for this. Due
1241 to how this variable is constructed and initialized, this is never 1297 to how this variable is constructed and initialized, this is never
1242 true. Give up and never warn. */ 1298 true. Give up and never warn. */
1282 SET_DECL_VALUE_EXPR (decl, t); 1338 SET_DECL_VALUE_EXPR (decl, t);
1283 DECL_HAS_VALUE_EXPR_P (decl) = 1; 1339 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1284 1340
1285 t = built_in_decls[BUILT_IN_ALLOCA]; 1341 t = built_in_decls[BUILT_IN_ALLOCA];
1286 t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl)); 1342 t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
1343 /* The call has been built for a variable-sized object. */
1344 ALLOCA_FOR_VAR_P (t) = 1;
1287 t = fold_convert (ptr_type, t); 1345 t = fold_convert (ptr_type, t);
1288 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t); 1346 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1289 1347
1290 gimplify_and_add (t, seq_p); 1348 gimplify_and_add (t, seq_p);
1291 1349
1323 && flag_stack_check == GENERIC_STACK_CHECK 1381 && flag_stack_check == GENERIC_STACK_CHECK
1324 && compare_tree_int (DECL_SIZE_UNIT (decl), 1382 && compare_tree_int (DECL_SIZE_UNIT (decl),
1325 STACK_CHECK_MAX_VAR_SIZE) > 0)) 1383 STACK_CHECK_MAX_VAR_SIZE) > 0))
1326 gimplify_vla_decl (decl, seq_p); 1384 gimplify_vla_decl (decl, seq_p);
1327 1385
1386 /* Some front ends do not explicitly declare all anonymous
1387 artificial variables. We compensate here by declaring the
1388 variables, though it would be better if the front ends would
1389 explicitly declare them. */
1390 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1391 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1392 gimple_add_tmp_var (decl);
1393
1328 if (init && init != error_mark_node) 1394 if (init && init != error_mark_node)
1329 { 1395 {
1330 if (!TREE_STATIC (decl)) 1396 if (!TREE_STATIC (decl))
1331 { 1397 {
1332 DECL_INITIAL (decl) = NULL_TREE; 1398 DECL_INITIAL (decl) = NULL_TREE;
1333 init = build2 (INIT_EXPR, void_type_node, decl, init); 1399 init = build2 (INIT_EXPR, void_type_node, decl, init);
1334 gimplify_and_add (init, seq_p); 1400 gimplify_and_add (init, seq_p);
1335 ggc_free (init); 1401 ggc_free (init);
1336 } 1402 }
1337 else 1403 else
1338 /* We must still examine initializers for static variables 1404 /* We must still examine initializers for static variables
1339 as they may contain a label address. */ 1405 as they may contain a label address. */
1340 walk_tree (&init, force_labels_r, NULL, NULL); 1406 walk_tree (&init, force_labels_r, NULL, NULL);
1341 } 1407 }
1342
1343 /* Some front ends do not explicitly declare all anonymous
1344 artificial variables. We compensate here by declaring the
1345 variables, though it would be better if the front ends would
1346 explicitly declare them. */
1347 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1348 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1349 gimple_add_tmp_var (decl);
1350 } 1408 }
1351 1409
1352 return GS_ALL_DONE; 1410 return GS_ALL_DONE;
1353 } 1411 }
1354 1412
1427 /* Sort the case labels in LABEL_VEC in place in ascending order. */ 1485 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1428 1486
1429 void 1487 void
1430 sort_case_labels (VEC(tree,heap)* label_vec) 1488 sort_case_labels (VEC(tree,heap)* label_vec)
1431 { 1489 {
1432 size_t len = VEC_length (tree, label_vec); 1490 VEC_qsort (tree, label_vec, compare_case_labels);
1433 qsort (VEC_address (tree, label_vec), len, sizeof (tree),
1434 compare_case_labels);
1435 } 1491 }
1436 1492
1437 1493
1438 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can 1494 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1439 branch to. */ 1495 branch to. */
1822 if (TREE_CODE (decl) == VAR_DECL 1878 if (TREE_CODE (decl) == VAR_DECL
1823 && !DECL_SEEN_IN_BIND_EXPR_P (decl) 1879 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1824 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl) 1880 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1825 && decl_function_context (decl) == current_function_decl) 1881 && decl_function_context (decl) == current_function_decl)
1826 { 1882 {
1827 gcc_assert (errorcount || sorrycount); 1883 gcc_assert (seen_error ());
1828 return GS_ERROR; 1884 return GS_ERROR;
1829 } 1885 }
1830 1886
1831 /* When within an OpenMP context, notice uses of variables. */ 1887 /* When within an OpenMP context, notice uses of variables. */
1832 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true)) 1888 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1838 tree value_expr = DECL_VALUE_EXPR (decl); 1894 tree value_expr = DECL_VALUE_EXPR (decl);
1839 1895
1840 /* For referenced nonlocal VLAs add a decl for debugging purposes 1896 /* For referenced nonlocal VLAs add a decl for debugging purposes
1841 to the current function. */ 1897 to the current function. */
1842 if (TREE_CODE (decl) == VAR_DECL 1898 if (TREE_CODE (decl) == VAR_DECL
1843 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST 1899 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1844 && nonlocal_vlas != NULL 1900 && nonlocal_vlas != NULL
1845 && TREE_CODE (value_expr) == INDIRECT_REF 1901 && TREE_CODE (value_expr) == INDIRECT_REF
1846 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL 1902 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1847 && decl_function_context (decl) != current_function_decl) 1903 && decl_function_context (decl) != current_function_decl)
1848 { 1904 {
1849 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 1905 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1850 while (ctx && ctx->region_type == ORT_WORKSHARE) 1906 while (ctx && ctx->region_type == ORT_WORKSHARE)
1851 ctx = ctx->outer_context; 1907 ctx = ctx->outer_context;
1852 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl)) 1908 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1853 { 1909 {
1854 tree copy = copy_node (decl), block; 1910 tree copy = copy_node (decl), block;
1855 1911
1856 lang_hooks.dup_lang_specific_decl (copy); 1912 lang_hooks.dup_lang_specific_decl (copy);
1857 SET_DECL_RTL (copy, NULL_RTX); 1913 SET_DECL_RTL (copy, 0);
1858 TREE_USED (copy) = 1; 1914 TREE_USED (copy) = 1;
1859 block = DECL_INITIAL (current_function_decl); 1915 block = DECL_INITIAL (current_function_decl);
1860 TREE_CHAIN (copy) = BLOCK_VARS (block); 1916 DECL_CHAIN (copy) = BLOCK_VARS (block);
1861 BLOCK_VARS (block) = copy; 1917 BLOCK_VARS (block) = copy;
1862 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr)); 1918 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1863 DECL_HAS_VALUE_EXPR_P (copy) = 1; 1919 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1864 } 1920 }
1865 } 1921 }
1866 1922
1867 *expr_p = unshare_expr (value_expr); 1923 *expr_p = unshare_expr (value_expr);
1868 return GS_OK; 1924 return GS_OK;
1869 } 1925 }
1870 1926
1898 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 1954 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1899 fallback_t fallback) 1955 fallback_t fallback)
1900 { 1956 {
1901 tree *p; 1957 tree *p;
1902 VEC(tree,heap) *stack; 1958 VEC(tree,heap) *stack;
1903 enum gimplify_status ret = GS_OK, tret; 1959 enum gimplify_status ret = GS_ALL_DONE, tret;
1904 int i; 1960 int i;
1905 location_t loc = EXPR_LOCATION (*expr_p); 1961 location_t loc = EXPR_LOCATION (*expr_p);
1962 tree expr = *expr_p;
1906 1963
1907 /* Create a stack of the subexpressions so later we can walk them in 1964 /* Create a stack of the subexpressions so later we can walk them in
1908 order from inner to outer. */ 1965 order from inner to outer. */
1909 stack = VEC_alloc (tree, heap, 10); 1966 stack = VEC_alloc (tree, heap, 10);
1910 1967
2054 2111
2055 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */ 2112 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2056 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF) 2113 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2057 { 2114 {
2058 canonicalize_component_ref (expr_p); 2115 canonicalize_component_ref (expr_p);
2059 ret = MIN (ret, GS_OK);
2060 } 2116 }
2061 2117
2062 VEC_free (tree, heap, stack); 2118 VEC_free (tree, heap, stack);
2119
2120 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2063 2121
2064 return ret; 2122 return ret;
2065 } 2123 }
2066 2124
2067 /* Gimplify the self modifying expression pointed to by EXPR_P 2125 /* Gimplify the self modifying expression pointed to by EXPR_P
2427 internal_get_tmp_var which will then be handled by 2485 internal_get_tmp_var which will then be handled by
2428 gimplify_modify_expr. */ 2486 gimplify_modify_expr. */
2429 if (!want_value) 2487 if (!want_value)
2430 { 2488 {
2431 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we 2489 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2432 have to do is replicate it as a GIMPLE_CALL tuple. */ 2490 have to do is replicate it as a GIMPLE_CALL tuple. */
2491 gimple_stmt_iterator gsi;
2433 call = gimple_build_call_from_tree (*expr_p); 2492 call = gimple_build_call_from_tree (*expr_p);
2434 gimplify_seq_add_stmt (pre_p, call); 2493 gimplify_seq_add_stmt (pre_p, call);
2494 gsi = gsi_last (*pre_p);
2495 fold_stmt (&gsi);
2435 *expr_p = NULL_TREE; 2496 *expr_p = NULL_TREE;
2436 } 2497 }
2437 2498
2438 return ret; 2499 return ret;
2439 } 2500 }
2566 /* If there is no 'else', turn 2627 /* If there is no 'else', turn
2567 if (a && b) then c 2628 if (a && b) then c
2568 into 2629 into
2569 if (a) if (b) then c. */ 2630 if (a) if (b) then c. */
2570 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) 2631 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2571 { 2632 {
2572 /* Keep the original source location on the first 'if'. */ 2633 /* Keep the original source location on the first 'if'. */
2573 location_t locus = EXPR_HAS_LOCATION (expr) 2634 location_t locus = EXPR_LOC_OR_HERE (expr);
2574 ? EXPR_LOCATION (expr) : input_location; 2635 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2575 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); 2636 /* Set the source location of the && on the second 'if'. */
2576 /* Set the source location of the && on the second 'if'. */ 2637 if (EXPR_HAS_LOCATION (pred))
2577 if (EXPR_HAS_LOCATION (pred)) 2638 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2578 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred)); 2639 then_ = shortcut_cond_expr (expr);
2579 then_ = shortcut_cond_expr (expr); 2640 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2580 then_se = then_ && TREE_SIDE_EFFECTS (then_); 2641 pred = TREE_OPERAND (pred, 0);
2581 pred = TREE_OPERAND (pred, 0); 2642 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2582 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE); 2643 SET_EXPR_LOCATION (expr, locus);
2583 SET_EXPR_LOCATION (expr, locus); 2644 }
2584 }
2585 } 2645 }
2586 2646
2587 if (!then_se) 2647 if (!then_se)
2588 { 2648 {
2589 /* If there is no 'then', turn 2649 /* If there is no 'then', turn
2590 if (a || b); else d 2650 if (a || b); else d
2591 into 2651 into
2592 if (a); else if (b); else d. */ 2652 if (a); else if (b); else d. */
2593 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR) 2653 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2594 { 2654 {
2595 /* Keep the original source location on the first 'if'. */ 2655 /* Keep the original source location on the first 'if'. */
2596 location_t locus = EXPR_HAS_LOCATION (expr) 2656 location_t locus = EXPR_LOC_OR_HERE (expr);
2597 ? EXPR_LOCATION (expr) : input_location; 2657 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2598 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); 2658 /* Set the source location of the || on the second 'if'. */
2599 /* Set the source location of the || on the second 'if'. */ 2659 if (EXPR_HAS_LOCATION (pred))
2600 if (EXPR_HAS_LOCATION (pred)) 2660 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2601 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred)); 2661 else_ = shortcut_cond_expr (expr);
2602 else_ = shortcut_cond_expr (expr); 2662 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2603 else_se = else_ && TREE_SIDE_EFFECTS (else_); 2663 pred = TREE_OPERAND (pred, 0);
2604 pred = TREE_OPERAND (pred, 0); 2664 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2605 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_); 2665 SET_EXPR_LOCATION (expr, locus);
2606 SET_EXPR_LOCATION (expr, locus); 2666 }
2607 }
2608 } 2667 }
2609 2668
2610 /* If we're done, great. */ 2669 /* If we're done, great. */
2611 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR 2670 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2612 && TREE_CODE (pred) != TRUTH_ORIF_EXPR) 2671 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2656 false_label_p = NULL; 2715 false_label_p = NULL;
2657 2716
2658 /* If there was nothing else in our arms, just forward the label(s). */ 2717 /* If there was nothing else in our arms, just forward the label(s). */
2659 if (!then_se && !else_se) 2718 if (!then_se && !else_se)
2660 return shortcut_cond_r (pred, true_label_p, false_label_p, 2719 return shortcut_cond_r (pred, true_label_p, false_label_p,
2661 EXPR_HAS_LOCATION (expr) 2720 EXPR_LOC_OR_HERE (expr));
2662 ? EXPR_LOCATION (expr) : input_location);
2663 2721
2664 /* If our last subexpression already has a terminal label, reuse it. */ 2722 /* If our last subexpression already has a terminal label, reuse it. */
2665 if (else_se) 2723 if (else_se)
2666 t = expr_last (else_); 2724 t = expr_last (else_);
2667 else if (then_se) 2725 else if (then_se)
2689 inappropriate warning about control reaching the end of a 2747 inappropriate warning about control reaching the end of a
2690 non-void function. */ 2748 non-void function. */
2691 jump_over_else = block_may_fallthru (then_); 2749 jump_over_else = block_may_fallthru (then_);
2692 2750
2693 pred = shortcut_cond_r (pred, true_label_p, false_label_p, 2751 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2694 EXPR_HAS_LOCATION (expr) 2752 EXPR_LOC_OR_HERE (expr));
2695 ? EXPR_LOCATION (expr) : input_location);
2696 2753
2697 expr = NULL; 2754 expr = NULL;
2698 append_to_statement_list (pred, &expr); 2755 append_to_statement_list (pred, &expr);
2699 2756
2700 append_to_statement_list (then_, &expr); 2757 append_to_statement_list (then_, &expr);
2729 tree 2786 tree
2730 gimple_boolify (tree expr) 2787 gimple_boolify (tree expr)
2731 { 2788 {
2732 tree type = TREE_TYPE (expr); 2789 tree type = TREE_TYPE (expr);
2733 location_t loc = EXPR_LOCATION (expr); 2790 location_t loc = EXPR_LOCATION (expr);
2791
2792 if (TREE_CODE (expr) == NE_EXPR
2793 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2794 && integer_zerop (TREE_OPERAND (expr, 1)))
2795 {
2796 tree call = TREE_OPERAND (expr, 0);
2797 tree fn = get_callee_fndecl (call);
2798
2799 /* For __builtin_expect ((long) (x), y) recurse into x as well
2800 if x is truth_value_p. */
2801 if (fn
2802 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2803 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2804 && call_expr_nargs (call) == 2)
2805 {
2806 tree arg = CALL_EXPR_ARG (call, 0);
2807 if (arg)
2808 {
2809 if (TREE_CODE (arg) == NOP_EXPR
2810 && TREE_TYPE (arg) == TREE_TYPE (call))
2811 arg = TREE_OPERAND (arg, 0);
2812 if (truth_value_p (TREE_CODE (arg)))
2813 {
2814 arg = gimple_boolify (arg);
2815 CALL_EXPR_ARG (call, 0)
2816 = fold_convert_loc (loc, TREE_TYPE (call), arg);
2817 }
2818 }
2819 }
2820 }
2734 2821
2735 if (TREE_CODE (type) == BOOLEAN_TYPE) 2822 if (TREE_CODE (type) == BOOLEAN_TYPE)
2736 return expr; 2823 return expr;
2737 2824
2738 switch (TREE_CODE (expr)) 2825 switch (TREE_CODE (expr))
2833 2920
2834 static enum gimplify_status 2921 static enum gimplify_status
2835 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback) 2922 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2836 { 2923 {
2837 tree expr = *expr_p; 2924 tree expr = *expr_p;
2838 tree tmp, type, arm1, arm2; 2925 tree type = TREE_TYPE (expr);
2926 location_t loc = EXPR_LOCATION (expr);
2927 tree tmp, arm1, arm2;
2839 enum gimplify_status ret; 2928 enum gimplify_status ret;
2840 tree label_true, label_false, label_cont; 2929 tree label_true, label_false, label_cont;
2841 bool have_then_clause_p, have_else_clause_p; 2930 bool have_then_clause_p, have_else_clause_p;
2842 gimple gimple_cond; 2931 gimple gimple_cond;
2843 enum tree_code pred_code; 2932 enum tree_code pred_code;
2844 gimple_seq seq = NULL; 2933 gimple_seq seq = NULL;
2845 location_t loc = EXPR_LOCATION (*expr_p);
2846
2847 type = TREE_TYPE (expr);
2848 2934
2849 /* If this COND_EXPR has a value, copy the values into a temporary within 2935 /* If this COND_EXPR has a value, copy the values into a temporary within
2850 the arms. */ 2936 the arms. */
2851 if (! VOID_TYPE_P (type)) 2937 if (!VOID_TYPE_P (type))
2852 { 2938 {
2939 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
2853 tree result; 2940 tree result;
2854 2941
2855 /* If an rvalue is ok or we do not require an lvalue, avoid creating 2942 /* If either an rvalue is ok or we do not require an lvalue, create the
2856 an addressable temporary. */ 2943 temporary. But we cannot do that if the type is addressable. */
2857 if (((fallback & fb_rvalue) 2944 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
2858 || !(fallback & fb_lvalue)) 2945 && !TREE_ADDRESSABLE (type))
2859 && !TREE_ADDRESSABLE (type)) 2946 {
2860 { 2947 if (gimplify_ctxp->allow_rhs_cond_expr
2861 if (gimplify_ctxp->allow_rhs_cond_expr 2948 /* If either branch has side effects or could trap, it can't be
2862 /* If either branch has side effects or could trap, it can't be 2949 evaluated unconditionally. */
2863 evaluated unconditionally. */ 2950 && !TREE_SIDE_EFFECTS (then_)
2864 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1)) 2951 && !generic_expr_could_trap_p (then_)
2865 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 1)) 2952 && !TREE_SIDE_EFFECTS (else_)
2866 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 2)) 2953 && !generic_expr_could_trap_p (else_))
2867 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 2))) 2954 return gimplify_pure_cond_expr (expr_p, pre_p);
2868 return gimplify_pure_cond_expr (expr_p, pre_p); 2955
2869 2956 tmp = create_tmp_var (type, "iftmp");
2870 result = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp"); 2957 result = tmp;
2871 ret = GS_ALL_DONE; 2958 }
2872 } 2959
2960 /* Otherwise, only create and copy references to the values. */
2873 else 2961 else
2874 { 2962 {
2875 tree type = build_pointer_type (TREE_TYPE (expr)); 2963 type = build_pointer_type (type);
2876 2964
2877 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node) 2965 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2878 TREE_OPERAND (expr, 1) = 2966 then_ = build_fold_addr_expr_loc (loc, then_);
2879 build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 1)); 2967
2880 2968 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2881 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node) 2969 else_ = build_fold_addr_expr_loc (loc, else_);
2882 TREE_OPERAND (expr, 2) = 2970
2883 build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 2)); 2971 expr
2884 2972 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
2885 tmp = create_tmp_var (type, "iftmp"); 2973
2886 2974 tmp = create_tmp_var (type, "iftmp");
2887 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0), 2975 result = build_simple_mem_ref_loc (loc, tmp);
2888 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2)); 2976 }
2889 2977
2890 result = build_fold_indirect_ref_loc (loc, tmp); 2978 /* Build the new then clause, `tmp = then_;'. But don't build the
2891 } 2979 assignment if the value is void; in C++ it can be if it's a throw. */
2892 2980 if (!VOID_TYPE_P (TREE_TYPE (then_)))
2893 /* Build the then clause, 't1 = a;'. But don't build an assignment 2981 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
2894 if this branch is void; in C++ it can be, if it's a throw. */ 2982
2895 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node) 2983 /* Similarly, build the new else clause, `tmp = else_;'. */
2896 TREE_OPERAND (expr, 1) 2984 if (!VOID_TYPE_P (TREE_TYPE (else_)))
2897 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 1)); 2985 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
2898
2899 /* Build the else clause, 't1 = b;'. */
2900 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2901 TREE_OPERAND (expr, 2)
2902 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 2));
2903 2986
2904 TREE_TYPE (expr) = void_type_node; 2987 TREE_TYPE (expr) = void_type_node;
2905 recalculate_side_effects (expr); 2988 recalculate_side_effects (expr);
2906 2989
2907 /* Move the COND_EXPR to the prequeue. */ 2990 /* Move the COND_EXPR to the prequeue. */
3111 /* tmp = memcpy() */ 3194 /* tmp = memcpy() */
3112 t = create_tmp_var (TREE_TYPE (to_ptr), NULL); 3195 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3113 gimple_call_set_lhs (gs, t); 3196 gimple_call_set_lhs (gs, t);
3114 gimplify_seq_add_stmt (seq_p, gs); 3197 gimplify_seq_add_stmt (seq_p, gs);
3115 3198
3116 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t); 3199 *expr_p = build_simple_mem_ref (t);
3117 return GS_ALL_DONE; 3200 return GS_ALL_DONE;
3118 } 3201 }
3119 3202
3120 gimplify_seq_add_stmt (seq_p, gs); 3203 gimplify_seq_add_stmt (seq_p, gs);
3121 *expr_p = NULL; 3204 *expr_p = NULL;
3195 return t; 3278 return t;
3196 3279
3197 /* If the constructor component is indirect, determine if we have a 3280 /* If the constructor component is indirect, determine if we have a
3198 potential overlap with the lhs. The only bits of information we 3281 potential overlap with the lhs. The only bits of information we
3199 have to go on at this point are addressability and alias sets. */ 3282 have to go on at this point are addressability and alias sets. */
3200 if (TREE_CODE (t) == INDIRECT_REF 3283 if ((INDIRECT_REF_P (t)
3284 || TREE_CODE (t) == MEM_REF)
3201 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl)) 3285 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3202 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t))) 3286 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3203 return t; 3287 return t;
3204 3288
3205 /* If the constructor component is a call, determine if it can hide a 3289 /* If the constructor component is a call, determine if it can hide a
3206 potential overlap with the lhs through an INDIRECT_REF like above. */ 3290 potential overlap with the lhs through an INDIRECT_REF like above.
3291 ??? Ugh - this is completely broken. In fact this whole analysis
3292 doesn't look conservative. */
3207 if (TREE_CODE (t) == CALL_EXPR) 3293 if (TREE_CODE (t) == CALL_EXPR)
3208 { 3294 {
3209 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t))); 3295 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3210 3296
3211 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type)) 3297 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3250 { 3336 {
3251 unsigned HOST_WIDE_INT ix; 3337 unsigned HOST_WIDE_INT ix;
3252 constructor_elt *ce; 3338 constructor_elt *ce;
3253 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p); 3339 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3254 3340
3255 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++) 3341 FOR_EACH_VEC_ELT (constructor_elt, v, ix, ce)
3256 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data); 3342 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3257 3343
3258 return; 3344 return;
3259 } 3345 }
3260 3346
3261 /* If this is a variable sized type, we must remember the size. */ 3347 /* If this is a variable sized type, we must remember the size. */
3612 case RECORD_TYPE: 3698 case RECORD_TYPE:
3613 case UNION_TYPE: 3699 case UNION_TYPE:
3614 case QUAL_UNION_TYPE: 3700 case QUAL_UNION_TYPE:
3615 case ARRAY_TYPE: 3701 case ARRAY_TYPE:
3616 { 3702 {
3617 struct gimplify_init_ctor_preeval_data preeval_data; 3703 struct gimplify_init_ctor_preeval_data preeval_data;
3618 HOST_WIDE_INT num_type_elements, num_ctor_elements; 3704 HOST_WIDE_INT num_type_elements, num_ctor_elements;
3619 HOST_WIDE_INT num_nonzero_elements; 3705 HOST_WIDE_INT num_nonzero_elements;
3620 bool cleared, valid_const_initializer; 3706 bool cleared, valid_const_initializer;
3621 3707
3622 /* Aggregate types must lower constructors to initialization of 3708 /* Aggregate types must lower constructors to initialization of
3623 individual elements. The exception is that a CONSTRUCTOR node 3709 individual elements. The exception is that a CONSTRUCTOR node
3624 with no elements indicates zero-initialization of the whole. */ 3710 with no elements indicates zero-initialization of the whole. */
3625 if (VEC_empty (constructor_elt, elts)) 3711 if (VEC_empty (constructor_elt, elts))
3626 { 3712 {
3627 if (notify_temp_creation) 3713 if (notify_temp_creation)
3628 return GS_OK; 3714 return GS_OK;
3629 break; 3715 break;
3630 } 3716 }
3631 3717
3632 /* Fetch information about the constructor to direct later processing. 3718 /* Fetch information about the constructor to direct later processing.
3633 We might want to make static versions of it in various cases, and 3719 We might want to make static versions of it in various cases, and
3634 can only do so if it known to be a valid constant initializer. */ 3720 can only do so if it known to be a valid constant initializer. */
3635 valid_const_initializer 3721 valid_const_initializer
3636 = categorize_ctor_elements (ctor, &num_nonzero_elements, 3722 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3637 &num_ctor_elements, &cleared); 3723 &num_ctor_elements, &cleared);
3638 3724
3639 /* If a const aggregate variable is being initialized, then it 3725 /* If a const aggregate variable is being initialized, then it
3640 should never be a lose to promote the variable to be static. */ 3726 should never be a lose to promote the variable to be static. */
3641 if (valid_const_initializer 3727 if (valid_const_initializer
3642 && num_nonzero_elements > 1 3728 && num_nonzero_elements > 1
3643 && TREE_READONLY (object) 3729 && TREE_READONLY (object)
3644 && TREE_CODE (object) == VAR_DECL 3730 && TREE_CODE (object) == VAR_DECL
3645 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))) 3731 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3646 { 3732 {
3647 if (notify_temp_creation) 3733 if (notify_temp_creation)
3648 return GS_ERROR; 3734 return GS_ERROR;
3649 DECL_INITIAL (object) = ctor; 3735 DECL_INITIAL (object) = ctor;
3650 TREE_STATIC (object) = 1; 3736 TREE_STATIC (object) = 1;
3651 if (!DECL_NAME (object)) 3737 if (!DECL_NAME (object))
3652 DECL_NAME (object) = create_tmp_var_name ("C"); 3738 DECL_NAME (object) = create_tmp_var_name ("C");
3653 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL); 3739 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3654 3740
3655 /* ??? C++ doesn't automatically append a .<number> to the 3741 /* ??? C++ doesn't automatically append a .<number> to the
3656 assembler name, and even when it does, it looks a FE private 3742 assembler name, and even when it does, it looks a FE private
3657 data structures to figure out what that number should be, 3743 data structures to figure out what that number should be,
3658 which are not set for this variable. I suppose this is 3744 which are not set for this variable. I suppose this is
3659 important for local statics for inline functions, which aren't 3745 important for local statics for inline functions, which aren't
3660 "local" in the object file sense. So in order to get a unique 3746 "local" in the object file sense. So in order to get a unique
3661 TU-local symbol, we must invoke the lhd version now. */ 3747 TU-local symbol, we must invoke the lhd version now. */
3662 lhd_set_decl_assembler_name (object); 3748 lhd_set_decl_assembler_name (object);
3663 3749
3664 *expr_p = NULL_TREE; 3750 *expr_p = NULL_TREE;
3665 break; 3751 break;
3666 } 3752 }
3667 3753
3668 /* If there are "lots" of initialized elements, even discounting 3754 /* If there are "lots" of initialized elements, even discounting
3669 those that are not address constants (and thus *must* be 3755 those that are not address constants (and thus *must* be
3670 computed at runtime), then partition the constructor into 3756 computed at runtime), then partition the constructor into
3671 constant and non-constant parts. Block copy the constant 3757 constant and non-constant parts. Block copy the constant
3672 parts in, then generate code for the non-constant parts. */ 3758 parts in, then generate code for the non-constant parts. */
3673 /* TODO. There's code in cp/typeck.c to do this. */ 3759 /* TODO. There's code in cp/typeck.c to do this. */
3674 3760
3675 num_type_elements = count_type_elements (type, true); 3761 num_type_elements = count_type_elements (type, true);
3676 3762
3677 /* If count_type_elements could not determine number of type elements 3763 /* If count_type_elements could not determine number of type elements
3678 for a constant-sized object, assume clearing is needed. 3764 for a constant-sized object, assume clearing is needed.
3679 Don't do this for variable-sized objects, as store_constructor 3765 Don't do this for variable-sized objects, as store_constructor
3680 will ignore the clearing of variable-sized objects. */ 3766 will ignore the clearing of variable-sized objects. */
3681 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0) 3767 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3682 cleared = true; 3768 cleared = true;
3683 /* If there are "lots" of zeros, then block clear the object first. */ 3769 /* If there are "lots" of zeros, then block clear the object first. */
3684 else if (num_type_elements - num_nonzero_elements 3770 else if (num_type_elements - num_nonzero_elements
3685 > CLEAR_RATIO (optimize_function_for_speed_p (cfun)) 3771 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3686 && num_nonzero_elements < num_type_elements/4) 3772 && num_nonzero_elements < num_type_elements/4)
3687 cleared = true; 3773 cleared = true;
3688 /* ??? This bit ought not be needed. For any element not present 3774 /* ??? This bit ought not be needed. For any element not present
3689 in the initializer, we should simply set them to zero. Except 3775 in the initializer, we should simply set them to zero. Except
3690 we'd need to *find* the elements that are not present, and that 3776 we'd need to *find* the elements that are not present, and that
3691 requires trickery to avoid quadratic compile-time behavior in 3777 requires trickery to avoid quadratic compile-time behavior in
3692 large cases or excessive memory use in small cases. */ 3778 large cases or excessive memory use in small cases. */
3693 else if (num_ctor_elements < num_type_elements) 3779 else if (num_ctor_elements < num_type_elements)
3694 cleared = true; 3780 cleared = true;
3695 3781
3696 /* If there are "lots" of initialized elements, and all of them 3782 /* If there are "lots" of initialized elements, and all of them
3697 are valid address constants, then the entire initializer can 3783 are valid address constants, then the entire initializer can
3698 be dropped to memory, and then memcpy'd out. Don't do this 3784 be dropped to memory, and then memcpy'd out. Don't do this
3699 for sparse arrays, though, as it's more efficient to follow 3785 for sparse arrays, though, as it's more efficient to follow
3700 the standard CONSTRUCTOR behavior of memset followed by 3786 the standard CONSTRUCTOR behavior of memset followed by
3701 individual element initialization. Also don't do this for small 3787 individual element initialization. Also don't do this for small
3702 all-zero initializers (which aren't big enough to merit 3788 all-zero initializers (which aren't big enough to merit
3703 clearing), and don't try to make bitwise copies of 3789 clearing), and don't try to make bitwise copies of
3704 TREE_ADDRESSABLE types. */ 3790 TREE_ADDRESSABLE types. */
3705 if (valid_const_initializer 3791 if (valid_const_initializer
3706 && !(cleared || num_nonzero_elements == 0) 3792 && !(cleared || num_nonzero_elements == 0)
3707 && !TREE_ADDRESSABLE (type)) 3793 && !TREE_ADDRESSABLE (type))
3708 { 3794 {
3709 HOST_WIDE_INT size = int_size_in_bytes (type); 3795 HOST_WIDE_INT size = int_size_in_bytes (type);
3710 unsigned int align; 3796 unsigned int align;
3711 3797
3712 /* ??? We can still get unbounded array types, at least 3798 /* ??? We can still get unbounded array types, at least
3713 from the C++ front end. This seems wrong, but attempt 3799 from the C++ front end. This seems wrong, but attempt
3714 to work around it for now. */ 3800 to work around it for now. */
3715 if (size < 0) 3801 if (size < 0)
3716 { 3802 {
3717 size = int_size_in_bytes (TREE_TYPE (object)); 3803 size = int_size_in_bytes (TREE_TYPE (object));
3718 if (size >= 0) 3804 if (size >= 0)
3719 TREE_TYPE (ctor) = type = TREE_TYPE (object); 3805 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3720 } 3806 }
3721 3807
3722 /* Find the maximum alignment we can assume for the object. */ 3808 /* Find the maximum alignment we can assume for the object. */
3723 /* ??? Make use of DECL_OFFSET_ALIGN. */ 3809 /* ??? Make use of DECL_OFFSET_ALIGN. */
3724 if (DECL_P (object)) 3810 if (DECL_P (object))
3725 align = DECL_ALIGN (object); 3811 align = DECL_ALIGN (object);
3726 else 3812 else
3727 align = TYPE_ALIGN (type); 3813 align = TYPE_ALIGN (type);
3728 3814
3729 if (size > 0 3815 if (size > 0
3730 && num_nonzero_elements > 1 3816 && num_nonzero_elements > 1
3731 && !can_move_by_pieces (size, align)) 3817 && !can_move_by_pieces (size, align))
3732 { 3818 {
3733 tree new_tree; 3819 if (notify_temp_creation)
3734 3820 return GS_ERROR;
3735 if (notify_temp_creation) 3821
3736 return GS_ERROR; 3822 walk_tree (&ctor, force_labels_r, NULL, NULL);
3737 3823 ctor = tree_output_constant_def (ctor);
3738 new_tree = create_tmp_var_raw (type, "C"); 3824 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3739 3825 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3740 gimple_add_tmp_var (new_tree); 3826 TREE_OPERAND (*expr_p, 1) = ctor;
3741 TREE_STATIC (new_tree) = 1; 3827
3742 TREE_READONLY (new_tree) = 1; 3828 /* This is no longer an assignment of a CONSTRUCTOR, but
3743 DECL_INITIAL (new_tree) = ctor; 3829 we still may have processing to do on the LHS. So
3744 if (align > DECL_ALIGN (new_tree)) 3830 pretend we didn't do anything here to let that happen. */
3745 { 3831 return GS_UNHANDLED;
3746 DECL_ALIGN (new_tree) = align; 3832 }
3747 DECL_USER_ALIGN (new_tree) = 1; 3833 }
3748 } 3834
3749 walk_tree (&DECL_INITIAL (new_tree), force_labels_r, NULL, NULL); 3835 /* If the target is volatile, we have non-zero elements and more than
3750 3836 one field to assign, initialize the target from a temporary. */
3751 TREE_OPERAND (*expr_p, 1) = new_tree; 3837 if (TREE_THIS_VOLATILE (object)
3752 3838 && !TREE_ADDRESSABLE (type)
3753 /* This is no longer an assignment of a CONSTRUCTOR, but 3839 && num_nonzero_elements > 0
3754 we still may have processing to do on the LHS. So 3840 && VEC_length (constructor_elt, elts) > 1)
3755 pretend we didn't do anything here to let that happen. */ 3841 {
3756 return GS_UNHANDLED; 3842 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3757 } 3843 TREE_OPERAND (*expr_p, 0) = temp;
3758 } 3844 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3759 3845 *expr_p,
3760 if (notify_temp_creation) 3846 build2 (MODIFY_EXPR, void_type_node,
3761 return GS_OK; 3847 object, temp));
3762 3848 return GS_OK;
3763 /* If there are nonzero elements, pre-evaluate to capture elements 3849 }
3764 overlapping with the lhs into temporaries. We must do this before 3850
3765 clearing to fetch the values before they are zeroed-out. */ 3851 if (notify_temp_creation)
3766 if (num_nonzero_elements > 0) 3852 return GS_OK;
3767 { 3853
3768 preeval_data.lhs_base_decl = get_base_address (object); 3854 /* If there are nonzero elements and if needed, pre-evaluate to capture
3769 if (!DECL_P (preeval_data.lhs_base_decl)) 3855 elements overlapping with the lhs into temporaries. We must do this
3770 preeval_data.lhs_base_decl = NULL; 3856 before clearing to fetch the values before they are zeroed-out. */
3771 preeval_data.lhs_alias_set = get_alias_set (object); 3857 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
3772 3858 {
3773 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1), 3859 preeval_data.lhs_base_decl = get_base_address (object);
3774 pre_p, post_p, &preeval_data); 3860 if (!DECL_P (preeval_data.lhs_base_decl))
3775 } 3861 preeval_data.lhs_base_decl = NULL;
3776 3862 preeval_data.lhs_alias_set = get_alias_set (object);
3777 if (cleared) 3863
3778 { 3864 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3779 /* Zap the CONSTRUCTOR element list, which simplifies this case. 3865 pre_p, post_p, &preeval_data);
3780 Note that we still have to gimplify, in order to handle the 3866 }
3781 case of variable sized types. Avoid shared tree structures. */ 3867
3782 CONSTRUCTOR_ELTS (ctor) = NULL; 3868 if (cleared)
3783 TREE_SIDE_EFFECTS (ctor) = 0; 3869 {
3784 object = unshare_expr (object); 3870 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3785 gimplify_stmt (expr_p, pre_p); 3871 Note that we still have to gimplify, in order to handle the
3786 } 3872 case of variable sized types. Avoid shared tree structures. */
3787 3873 CONSTRUCTOR_ELTS (ctor) = NULL;
3788 /* If we have not block cleared the object, or if there are nonzero 3874 TREE_SIDE_EFFECTS (ctor) = 0;
3789 elements in the constructor, add assignments to the individual 3875 object = unshare_expr (object);
3790 scalar fields of the object. */ 3876 gimplify_stmt (expr_p, pre_p);
3791 if (!cleared || num_nonzero_elements > 0) 3877 }
3792 gimplify_init_ctor_eval (object, elts, pre_p, cleared); 3878
3793 3879 /* If we have not block cleared the object, or if there are nonzero
3794 *expr_p = NULL_TREE; 3880 elements in the constructor, add assignments to the individual
3881 scalar fields of the object. */
3882 if (!cleared || num_nonzero_elements > 0)
3883 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3884
3885 *expr_p = NULL_TREE;
3795 } 3886 }
3796 break; 3887 break;
3797 3888
3798 case COMPLEX_TYPE: 3889 case COMPLEX_TYPE:
3799 { 3890 {
3800 tree r, i; 3891 tree r, i;
3801 3892
3802 if (notify_temp_creation) 3893 if (notify_temp_creation)
3803 return GS_OK; 3894 return GS_OK;
3804 3895
3805 /* Extract the real and imaginary parts out of the ctor. */ 3896 /* Extract the real and imaginary parts out of the ctor. */
3806 gcc_assert (VEC_length (constructor_elt, elts) == 2); 3897 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3807 r = VEC_index (constructor_elt, elts, 0)->value; 3898 r = VEC_index (constructor_elt, elts, 0)->value;
3808 i = VEC_index (constructor_elt, elts, 1)->value; 3899 i = VEC_index (constructor_elt, elts, 1)->value;
3809 if (r == NULL || i == NULL) 3900 if (r == NULL || i == NULL)
3810 { 3901 {
3811 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node); 3902 tree zero = build_zero_cst (TREE_TYPE (type));
3812 if (r == NULL) 3903 if (r == NULL)
3813 r = zero; 3904 r = zero;
3814 if (i == NULL) 3905 if (i == NULL)
3815 i = zero; 3906 i = zero;
3816 } 3907 }
3817 3908
3818 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to 3909 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3819 represent creation of a complex value. */ 3910 represent creation of a complex value. */
3820 if (TREE_CONSTANT (r) && TREE_CONSTANT (i)) 3911 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3821 { 3912 {
3822 ctor = build_complex (type, r, i); 3913 ctor = build_complex (type, r, i);
3823 TREE_OPERAND (*expr_p, 1) = ctor; 3914 TREE_OPERAND (*expr_p, 1) = ctor;
3824 } 3915 }
3825 else 3916 else
3826 { 3917 {
3827 ctor = build2 (COMPLEX_EXPR, type, r, i); 3918 ctor = build2 (COMPLEX_EXPR, type, r, i);
3828 TREE_OPERAND (*expr_p, 1) = ctor; 3919 TREE_OPERAND (*expr_p, 1) = ctor;
3829 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), 3920 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3830 pre_p, 3921 pre_p,
3831 post_p, 3922 post_p,
3832 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)), 3923 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3833 fb_rvalue); 3924 fb_rvalue);
3834 } 3925 }
3835 } 3926 }
3836 break; 3927 break;
3837 3928
3838 case VECTOR_TYPE: 3929 case VECTOR_TYPE:
3839 { 3930 {
3840 unsigned HOST_WIDE_INT ix; 3931 unsigned HOST_WIDE_INT ix;
3841 constructor_elt *ce; 3932 constructor_elt *ce;
3842 3933
3843 if (notify_temp_creation) 3934 if (notify_temp_creation)
3844 return GS_OK; 3935 return GS_OK;
3845 3936
3846 /* Go ahead and simplify constant constructors to VECTOR_CST. */ 3937 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3847 if (TREE_CONSTANT (ctor)) 3938 if (TREE_CONSTANT (ctor))
3848 { 3939 {
3849 bool constant_p = true; 3940 bool constant_p = true;
3850 tree value; 3941 tree value;
3851 3942
3852 /* Even when ctor is constant, it might contain non-*_CST 3943 /* Even when ctor is constant, it might contain non-*_CST
3853 elements, such as addresses or trapping values like 3944 elements, such as addresses or trapping values like
3854 1.0/0.0 - 1.0/0.0. Such expressions don't belong 3945 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3855 in VECTOR_CST nodes. */ 3946 in VECTOR_CST nodes. */
3856 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value) 3947 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3857 if (!CONSTANT_CLASS_P (value)) 3948 if (!CONSTANT_CLASS_P (value))
3858 { 3949 {
3859 constant_p = false; 3950 constant_p = false;
3860 break; 3951 break;
3861 } 3952 }
3862 3953
3863 if (constant_p) 3954 if (constant_p)
3864 { 3955 {
3865 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts); 3956 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3866 break; 3957 break;
3867 } 3958 }
3868 3959
3869 /* Don't reduce an initializer constant even if we can't 3960 /* Don't reduce an initializer constant even if we can't
3870 make a VECTOR_CST. It won't do anything for us, and it'll 3961 make a VECTOR_CST. It won't do anything for us, and it'll
3871 prevent us from representing it as a single constant. */ 3962 prevent us from representing it as a single constant. */
3872 if (initializer_constant_valid_p (ctor, type)) 3963 if (initializer_constant_valid_p (ctor, type))
3873 break; 3964 break;
3874 3965
3875 TREE_CONSTANT (ctor) = 0; 3966 TREE_CONSTANT (ctor) = 0;
3876 } 3967 }
3877 3968
3878 /* Vector types use CONSTRUCTOR all the way through gimple 3969 /* Vector types use CONSTRUCTOR all the way through gimple
3879 compilation as a general initializer. */ 3970 compilation as a general initializer. */
3880 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++) 3971 FOR_EACH_VEC_ELT (constructor_elt, elts, ix, ce)
3881 { 3972 {
3882 enum gimplify_status tret; 3973 enum gimplify_status tret;
3883 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val, 3974 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3884 fb_rvalue); 3975 fb_rvalue);
3885 if (tret == GS_ERROR) 3976 if (tret == GS_ERROR)
3886 ret = GS_ERROR; 3977 ret = GS_ERROR;
3887 } 3978 }
3888 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0))) 3979 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3889 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p); 3980 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3890 } 3981 }
3891 break; 3982 break;
3892 3983
3893 default: 3984 default:
3894 /* So how did we get a CONSTRUCTOR for a scalar type? */ 3985 /* So how did we get a CONSTRUCTOR for a scalar type? */
3926 from the langhooks point of view. */ 4017 from the langhooks point of view. */
3927 4018
3928 tree 4019 tree
3929 gimple_fold_indirect_ref (tree t) 4020 gimple_fold_indirect_ref (tree t)
3930 { 4021 {
3931 tree type = TREE_TYPE (TREE_TYPE (t)); 4022 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
3932 tree sub = t; 4023 tree sub = t;
3933 tree subtype; 4024 tree subtype;
3934 4025
3935 STRIP_USELESS_TYPE_CONVERSION (sub); 4026 STRIP_NOPS (sub);
3936 subtype = TREE_TYPE (sub); 4027 subtype = TREE_TYPE (sub);
3937 if (!POINTER_TYPE_P (subtype)) 4028 if (!POINTER_TYPE_P (subtype))
3938 return NULL_TREE; 4029 return NULL_TREE;
3939 4030
3940 if (TREE_CODE (sub) == ADDR_EXPR) 4031 if (TREE_CODE (sub) == ADDR_EXPR)
3945 if (useless_type_conversion_p (type, optype)) 4036 if (useless_type_conversion_p (type, optype))
3946 return op; 4037 return op;
3947 4038
3948 /* *(foo *)&fooarray => fooarray[0] */ 4039 /* *(foo *)&fooarray => fooarray[0] */
3949 if (TREE_CODE (optype) == ARRAY_TYPE 4040 if (TREE_CODE (optype) == ARRAY_TYPE
3950 && useless_type_conversion_p (type, TREE_TYPE (optype))) 4041 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
4042 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3951 { 4043 {
3952 tree type_domain = TYPE_DOMAIN (optype); 4044 tree type_domain = TYPE_DOMAIN (optype);
3953 tree min_val = size_zero_node; 4045 tree min_val = size_zero_node;
3954 if (type_domain && TYPE_MIN_VALUE (type_domain)) 4046 if (type_domain && TYPE_MIN_VALUE (type_domain))
3955 min_val = TYPE_MIN_VALUE (type_domain); 4047 min_val = TYPE_MIN_VALUE (type_domain);
3956 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE); 4048 if (TREE_CODE (min_val) == INTEGER_CST)
4049 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3957 } 4050 }
4051 /* *(foo *)&complexfoo => __real__ complexfoo */
4052 else if (TREE_CODE (optype) == COMPLEX_TYPE
4053 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4054 return fold_build1 (REALPART_EXPR, type, op);
4055 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
4056 else if (TREE_CODE (optype) == VECTOR_TYPE
4057 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4058 {
4059 tree part_width = TYPE_SIZE (type);
4060 tree index = bitsize_int (0);
4061 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
4062 }
4063 }
4064
4065 /* *(p + CST) -> ... */
4066 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4067 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4068 {
4069 tree addr = TREE_OPERAND (sub, 0);
4070 tree off = TREE_OPERAND (sub, 1);
4071 tree addrtype;
4072
4073 STRIP_NOPS (addr);
4074 addrtype = TREE_TYPE (addr);
4075
4076 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
4077 if (TREE_CODE (addr) == ADDR_EXPR
4078 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
4079 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
4080 {
4081 HOST_WIDE_INT offset = tree_low_cst (off, 0);
4082 tree part_width = TYPE_SIZE (type);
4083 unsigned HOST_WIDE_INT part_widthi
4084 = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
4085 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
4086 tree index = bitsize_int (indexi);
4087 if (offset / part_widthi
4088 <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
4089 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
4090 part_width, index);
4091 }
4092
4093 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
4094 if (TREE_CODE (addr) == ADDR_EXPR
4095 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
4096 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
4097 {
4098 tree size = TYPE_SIZE_UNIT (type);
4099 if (tree_int_cst_equal (size, off))
4100 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
4101 }
4102
4103 /* *(p + CST) -> MEM_REF <p, CST>. */
4104 if (TREE_CODE (addr) != ADDR_EXPR
4105 || DECL_P (TREE_OPERAND (addr, 0)))
4106 return fold_build2 (MEM_REF, type,
4107 addr,
4108 build_int_cst_wide (ptype,
4109 TREE_INT_CST_LOW (off),
4110 TREE_INT_CST_HIGH (off)));
3958 } 4111 }
3959 4112
3960 /* *(foo *)fooarrptr => (*fooarrptr)[0] */ 4113 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3961 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE 4114 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
4115 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
3962 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype)))) 4116 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3963 { 4117 {
3964 tree type_domain; 4118 tree type_domain;
3965 tree min_val = size_zero_node; 4119 tree min_val = size_zero_node;
3966 tree osub = sub; 4120 tree osub = sub;
3968 if (! sub) 4122 if (! sub)
3969 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub); 4123 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3970 type_domain = TYPE_DOMAIN (TREE_TYPE (sub)); 4124 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3971 if (type_domain && TYPE_MIN_VALUE (type_domain)) 4125 if (type_domain && TYPE_MIN_VALUE (type_domain))
3972 min_val = TYPE_MIN_VALUE (type_domain); 4126 min_val = TYPE_MIN_VALUE (type_domain);
3973 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE); 4127 if (TREE_CODE (min_val) == INTEGER_CST)
4128 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3974 } 4129 }
3975 4130
3976 return NULL_TREE; 4131 return NULL_TREE;
3977 } 4132 }
3978 4133
3996 static enum gimplify_status 4151 static enum gimplify_status
3997 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, 4152 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
3998 gimple_seq *pre_p, gimple_seq *post_p, 4153 gimple_seq *pre_p, gimple_seq *post_p,
3999 bool want_value) 4154 bool want_value)
4000 { 4155 {
4001 enum gimplify_status ret = GS_OK; 4156 enum gimplify_status ret = GS_UNHANDLED;
4002 4157 bool changed;
4003 while (ret != GS_UNHANDLED) 4158
4004 switch (TREE_CODE (*from_p)) 4159 do
4005 { 4160 {
4006 case VAR_DECL: 4161 changed = false;
4007 /* If we're assigning from a read-only variable initialized with 4162 switch (TREE_CODE (*from_p))
4008 a constructor, do the direct assignment from the constructor, 4163 {
4009 but only if neither source nor target are volatile since this 4164 case VAR_DECL:
4010 latter assignment might end up being done on a per-field basis. */ 4165 /* If we're assigning from a read-only variable initialized with
4011 if (DECL_INITIAL (*from_p) 4166 a constructor, do the direct assignment from the constructor,
4012 && TREE_READONLY (*from_p) 4167 but only if neither source nor target are volatile since this
4013 && !TREE_THIS_VOLATILE (*from_p) 4168 latter assignment might end up being done on a per-field basis. */
4014 && !TREE_THIS_VOLATILE (*to_p) 4169 if (DECL_INITIAL (*from_p)
4015 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR) 4170 && TREE_READONLY (*from_p)
4016 { 4171 && !TREE_THIS_VOLATILE (*from_p)
4017 tree old_from = *from_p; 4172 && !TREE_THIS_VOLATILE (*to_p)
4018 4173 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4019 /* Move the constructor into the RHS. */ 4174 {
4020 *from_p = unshare_expr (DECL_INITIAL (*from_p)); 4175 tree old_from = *from_p;
4021 4176 enum gimplify_status subret;
4022 /* Let's see if gimplify_init_constructor will need to put 4177
4023 it in memory. If so, revert the change. */ 4178 /* Move the constructor into the RHS. */
4024 ret = gimplify_init_constructor (expr_p, NULL, NULL, false, true); 4179 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4025 if (ret == GS_ERROR) 4180
4026 { 4181 /* Let's see if gimplify_init_constructor will need to put
4027 *from_p = old_from; 4182 it in memory. */
4028 /* Fall through. */ 4183 subret = gimplify_init_constructor (expr_p, NULL, NULL,
4029 } 4184 false, true);
4030 else 4185 if (subret == GS_ERROR)
4031 { 4186 {
4032 ret = GS_OK; 4187 /* If so, revert the change. */
4033 break; 4188 *from_p = old_from;
4034 } 4189 }
4035 } 4190 else
4036 ret = GS_UNHANDLED; 4191 {
4037 break; 4192 ret = GS_OK;
4038 case INDIRECT_REF: 4193 changed = true;
4039 { 4194 }
4040 /* If we have code like 4195 }
4041 4196 break;
4042 *(const A*)(A*)&x 4197 case INDIRECT_REF:
4043 4198 {
4044 where the type of "x" is a (possibly cv-qualified variant 4199 /* If we have code like
4045 of "A"), treat the entire expression as identical to "x". 4200
4046 This kind of code arises in C++ when an object is bound 4201 *(const A*)(A*)&x
4047 to a const reference, and if "x" is a TARGET_EXPR we want 4202
4048 to take advantage of the optimization below. */ 4203 where the type of "x" is a (possibly cv-qualified variant
4049 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)); 4204 of "A"), treat the entire expression as identical to "x".
4050 if (t) 4205 This kind of code arises in C++ when an object is bound
4051 { 4206 to a const reference, and if "x" is a TARGET_EXPR we want
4052 *from_p = t; 4207 to take advantage of the optimization below. */
4053 ret = GS_OK; 4208 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4054 } 4209 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4055 else 4210 if (t)
4056 ret = GS_UNHANDLED; 4211 {
4057 break; 4212 if (TREE_THIS_VOLATILE (t) != volatile_p)
4058 } 4213 {
4059 4214 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4060 case TARGET_EXPR: 4215 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4061 { 4216 build_fold_addr_expr (t));
4062 /* If we are initializing something from a TARGET_EXPR, strip the 4217 if (REFERENCE_CLASS_P (t))
4063 TARGET_EXPR and initialize it directly, if possible. This can't 4218 TREE_THIS_VOLATILE (t) = volatile_p;
4064 be done if the initializer is void, since that implies that the 4219 }
4065 temporary is set in some non-trivial way. 4220 *from_p = t;
4066 4221 ret = GS_OK;
4067 ??? What about code that pulls out the temp and uses it 4222 changed = true;
4068 elsewhere? I think that such code never uses the TARGET_EXPR as 4223 }
4069 an initializer. If I'm wrong, we'll die because the temp won't 4224 break;
4070 have any RTL. In that case, I guess we'll need to replace 4225 }
4071 references somehow. */ 4226
4072 tree init = TARGET_EXPR_INITIAL (*from_p); 4227 case TARGET_EXPR:
4073 4228 {
4074 if (init 4229 /* If we are initializing something from a TARGET_EXPR, strip the
4075 && !VOID_TYPE_P (TREE_TYPE (init))) 4230 TARGET_EXPR and initialize it directly, if possible. This can't
4076 { 4231 be done if the initializer is void, since that implies that the
4077 *from_p = init; 4232 temporary is set in some non-trivial way.
4078 ret = GS_OK; 4233
4079 } 4234 ??? What about code that pulls out the temp and uses it
4080 else 4235 elsewhere? I think that such code never uses the TARGET_EXPR as
4081 ret = GS_UNHANDLED; 4236 an initializer. If I'm wrong, we'll die because the temp won't
4082 } 4237 have any RTL. In that case, I guess we'll need to replace
4083 break; 4238 references somehow. */
4084 4239 tree init = TARGET_EXPR_INITIAL (*from_p);
4085 case COMPOUND_EXPR: 4240
4086 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be 4241 if (init
4087 caught. */ 4242 && !VOID_TYPE_P (TREE_TYPE (init)))
4088 gimplify_compound_expr (from_p, pre_p, true); 4243 {
4089 ret = GS_OK; 4244 *from_p = init;
4090 break; 4245 ret = GS_OK;
4091 4246 changed = true;
4092 case CONSTRUCTOR: 4247 }
4093 /* If we're initializing from a CONSTRUCTOR, break this into 4248 }
4094 individual MODIFY_EXPRs. */ 4249 break;
4095 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value, 4250
4096 false); 4251 case COMPOUND_EXPR:
4097 4252 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4098 case COND_EXPR: 4253 caught. */
4099 /* If we're assigning to a non-register type, push the assignment 4254 gimplify_compound_expr (from_p, pre_p, true);
4100 down into the branches. This is mandatory for ADDRESSABLE types, 4255 ret = GS_OK;
4101 since we cannot generate temporaries for such, but it saves a 4256 changed = true;
4102 copy in other cases as well. */ 4257 break;
4103 if (!is_gimple_reg_type (TREE_TYPE (*from_p))) 4258
4104 { 4259 case CONSTRUCTOR:
4105 /* This code should mirror the code in gimplify_cond_expr. */ 4260 /* If we already made some changes, let the front end have a
4106 enum tree_code code = TREE_CODE (*expr_p); 4261 crack at this before we break it down. */
4107 tree cond = *from_p; 4262 if (ret != GS_UNHANDLED)
4108 tree result = *to_p; 4263 break;
4109 4264 /* If we're initializing from a CONSTRUCTOR, break this into
4110 ret = gimplify_expr (&result, pre_p, post_p, 4265 individual MODIFY_EXPRs. */
4111 is_gimple_lvalue, fb_lvalue); 4266 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4112 if (ret != GS_ERROR) 4267 false);
4113 ret = GS_OK; 4268
4114 4269 case COND_EXPR:
4115 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node) 4270 /* If we're assigning to a non-register type, push the assignment
4116 TREE_OPERAND (cond, 1) 4271 down into the branches. This is mandatory for ADDRESSABLE types,
4117 = build2 (code, void_type_node, result, 4272 since we cannot generate temporaries for such, but it saves a
4118 TREE_OPERAND (cond, 1)); 4273 copy in other cases as well. */
4119 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node) 4274 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4120 TREE_OPERAND (cond, 2) 4275 {
4121 = build2 (code, void_type_node, unshare_expr (result), 4276 /* This code should mirror the code in gimplify_cond_expr. */
4122 TREE_OPERAND (cond, 2)); 4277 enum tree_code code = TREE_CODE (*expr_p);
4123 4278 tree cond = *from_p;
4124 TREE_TYPE (cond) = void_type_node; 4279 tree result = *to_p;
4125 recalculate_side_effects (cond); 4280
4126 4281 ret = gimplify_expr (&result, pre_p, post_p,
4127 if (want_value) 4282 is_gimple_lvalue, fb_lvalue);
4128 { 4283 if (ret != GS_ERROR)
4129 gimplify_and_add (cond, pre_p); 4284 ret = GS_OK;
4130 *expr_p = unshare_expr (result); 4285
4131 } 4286 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4132 else 4287 TREE_OPERAND (cond, 1)
4133 *expr_p = cond; 4288 = build2 (code, void_type_node, result,
4134 return ret; 4289 TREE_OPERAND (cond, 1));
4135 } 4290 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4136 else 4291 TREE_OPERAND (cond, 2)
4137 ret = GS_UNHANDLED; 4292 = build2 (code, void_type_node, unshare_expr (result),
4138 break; 4293 TREE_OPERAND (cond, 2));
4139 4294
4140 case CALL_EXPR: 4295 TREE_TYPE (cond) = void_type_node;
4141 /* For calls that return in memory, give *to_p as the CALL_EXPR's 4296 recalculate_side_effects (cond);
4142 return slot so that we don't generate a temporary. */ 4297
4143 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p) 4298 if (want_value)
4144 && aggregate_value_p (*from_p, *from_p)) 4299 {
4145 { 4300 gimplify_and_add (cond, pre_p);
4146 bool use_target; 4301 *expr_p = unshare_expr (result);
4147 4302 }
4148 if (!(rhs_predicate_for (*to_p))(*from_p)) 4303 else
4149 /* If we need a temporary, *to_p isn't accurate. */ 4304 *expr_p = cond;
4150 use_target = false; 4305 return ret;
4151 else if (TREE_CODE (*to_p) == RESULT_DECL 4306 }
4152 && DECL_NAME (*to_p) == NULL_TREE 4307 break;
4153 && needs_to_live_in_memory (*to_p)) 4308
4154 /* It's OK to use the return slot directly unless it's an NRV. */ 4309 case CALL_EXPR:
4155 use_target = true; 4310 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4156 else if (is_gimple_reg_type (TREE_TYPE (*to_p)) 4311 return slot so that we don't generate a temporary. */
4157 || (DECL_P (*to_p) && DECL_REGISTER (*to_p))) 4312 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4158 /* Don't force regs into memory. */ 4313 && aggregate_value_p (*from_p, *from_p))
4159 use_target = false; 4314 {
4160 else if (TREE_CODE (*expr_p) == INIT_EXPR) 4315 bool use_target;
4161 /* It's OK to use the target directly if it's being 4316
4162 initialized. */ 4317 if (!(rhs_predicate_for (*to_p))(*from_p))
4163 use_target = true; 4318 /* If we need a temporary, *to_p isn't accurate. */
4164 else if (!is_gimple_non_addressable (*to_p)) 4319 use_target = false;
4165 /* Don't use the original target if it's already addressable; 4320 else if (TREE_CODE (*to_p) == RESULT_DECL
4166 if its address escapes, and the called function uses the 4321 && DECL_NAME (*to_p) == NULL_TREE
4167 NRV optimization, a conforming program could see *to_p 4322 && needs_to_live_in_memory (*to_p))
4168 change before the called function returns; see c++/19317. 4323 /* It's OK to use the return slot directly unless it's an NRV. */
4169 When optimizing, the return_slot pass marks more functions 4324 use_target = true;
4170 as safe after we have escape info. */ 4325 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4171 use_target = false; 4326 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4172 else 4327 /* Don't force regs into memory. */
4173 use_target = true; 4328 use_target = false;
4174 4329 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4175 if (use_target) 4330 /* It's OK to use the target directly if it's being
4176 { 4331 initialized. */
4177 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1; 4332 use_target = true;
4178 mark_addressable (*to_p); 4333 else if (!is_gimple_non_addressable (*to_p))
4179 } 4334 /* Don't use the original target if it's already addressable;
4180 } 4335 if its address escapes, and the called function uses the
4181 4336 NRV optimization, a conforming program could see *to_p
4182 ret = GS_UNHANDLED; 4337 change before the called function returns; see c++/19317.
4183 break; 4338 When optimizing, the return_slot pass marks more functions
4184 4339 as safe after we have escape info. */
4185 /* If we're initializing from a container, push the initialization 4340 use_target = false;
4186 inside it. */ 4341 else
4187 case CLEANUP_POINT_EXPR: 4342 use_target = true;
4188 case BIND_EXPR: 4343
4189 case STATEMENT_LIST: 4344 if (use_target)
4190 { 4345 {
4191 tree wrap = *from_p; 4346 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4192 tree t; 4347 mark_addressable (*to_p);
4193 4348 }
4194 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval, 4349 }
4195 fb_lvalue); 4350 break;
4196 if (ret != GS_ERROR) 4351
4197 ret = GS_OK; 4352 case WITH_SIZE_EXPR:
4198 4353 /* Likewise for calls that return an aggregate of non-constant size,
4199 t = voidify_wrapper_expr (wrap, *expr_p); 4354 since we would not be able to generate a temporary at all. */
4200 gcc_assert (t == *expr_p); 4355 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4201 4356 {
4202 if (want_value) 4357 *from_p = TREE_OPERAND (*from_p, 0);
4203 { 4358 /* We don't change ret in this case because the
4204 gimplify_and_add (wrap, pre_p); 4359 WITH_SIZE_EXPR might have been added in
4205 *expr_p = unshare_expr (*to_p); 4360 gimplify_modify_expr, so returning GS_OK would lead to an
4206 } 4361 infinite loop. */
4207 else 4362 changed = true;
4208 *expr_p = wrap; 4363 }
4209 return GS_OK; 4364 break;
4210 } 4365
4211 4366 /* If we're initializing from a container, push the initialization
4212 case COMPOUND_LITERAL_EXPR: 4367 inside it. */
4213 { 4368 case CLEANUP_POINT_EXPR:
4214 tree complit = TREE_OPERAND (*expr_p, 1); 4369 case BIND_EXPR:
4215 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit); 4370 case STATEMENT_LIST:
4216 tree decl = DECL_EXPR_DECL (decl_s); 4371 {
4217 tree init = DECL_INITIAL (decl); 4372 tree wrap = *from_p;
4218 4373 tree t;
4219 /* struct T x = (struct T) { 0, 1, 2 } can be optimized 4374
4220 into struct T x = { 0, 1, 2 } if the address of the 4375 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4221 compound literal has never been taken. */ 4376 fb_lvalue);
4222 if (!TREE_ADDRESSABLE (complit) 4377 if (ret != GS_ERROR)
4223 && !TREE_ADDRESSABLE (decl) 4378 ret = GS_OK;
4224 && init) 4379
4225 { 4380 t = voidify_wrapper_expr (wrap, *expr_p);
4226 *expr_p = copy_node (*expr_p); 4381 gcc_assert (t == *expr_p);
4227 TREE_OPERAND (*expr_p, 1) = init; 4382
4228 return GS_OK; 4383 if (want_value)
4229 } 4384 {
4230 } 4385 gimplify_and_add (wrap, pre_p);
4231 4386 *expr_p = unshare_expr (*to_p);
4232 default: 4387 }
4233 ret = GS_UNHANDLED; 4388 else
4234 break; 4389 *expr_p = wrap;
4235 } 4390 return GS_OK;
4391 }
4392
4393 case COMPOUND_LITERAL_EXPR:
4394 {
4395 tree complit = TREE_OPERAND (*expr_p, 1);
4396 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4397 tree decl = DECL_EXPR_DECL (decl_s);
4398 tree init = DECL_INITIAL (decl);
4399
4400 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4401 into struct T x = { 0, 1, 2 } if the address of the
4402 compound literal has never been taken. */
4403 if (!TREE_ADDRESSABLE (complit)
4404 && !TREE_ADDRESSABLE (decl)
4405 && init)
4406 {
4407 *expr_p = copy_node (*expr_p);
4408 TREE_OPERAND (*expr_p, 1) = init;
4409 return GS_OK;
4410 }
4411 }
4412
4413 default:
4414 break;
4415 }
4416 }
4417 while (changed);
4236 4418
4237 return ret; 4419 return ret;
4238 } 4420 }
4239 4421
4240 4422
4400 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value); 4582 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4401 4583
4402 /* Try to alleviate the effects of the gimplification creating artificial 4584 /* Try to alleviate the effects of the gimplification creating artificial
4403 temporaries (see for example is_gimple_reg_rhs) on the debug info. */ 4585 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4404 if (!gimplify_ctxp->into_ssa 4586 if (!gimplify_ctxp->into_ssa
4405 && DECL_P (*from_p) 4587 && TREE_CODE (*from_p) == VAR_DECL
4406 && DECL_IGNORED_P (*from_p) 4588 && DECL_IGNORED_P (*from_p)
4407 && DECL_P (*to_p) 4589 && DECL_P (*to_p)
4408 && !DECL_IGNORED_P (*to_p)) 4590 && !DECL_IGNORED_P (*to_p))
4409 { 4591 {
4410 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p)) 4592 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4412 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p))); 4594 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4413 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1; 4595 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4414 SET_DECL_DEBUG_EXPR (*from_p, *to_p); 4596 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4415 } 4597 }
4416 4598
4599 if (want_value && TREE_THIS_VOLATILE (*to_p))
4600 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4601
4417 if (TREE_CODE (*from_p) == CALL_EXPR) 4602 if (TREE_CODE (*from_p) == CALL_EXPR)
4418 { 4603 {
4419 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL 4604 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4420 instead of a GIMPLE_ASSIGN. */ 4605 instead of a GIMPLE_ASSIGN. */
4421 assign = gimple_build_call_from_tree (*from_p); 4606 assign = gimple_build_call_from_tree (*from_p);
4422 gimple_call_set_lhs (assign, *to_p); 4607 if (!gimple_call_noreturn_p (assign))
4608 gimple_call_set_lhs (assign, *to_p);
4423 } 4609 }
4424 else 4610 else
4425 { 4611 {
4426 assign = gimple_build_assign (*to_p, *from_p); 4612 assign = gimple_build_assign (*to_p, *from_p);
4427 gimple_set_location (assign, EXPR_LOCATION (*expr_p)); 4613 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4438 gimple_set_lhs (assign, *to_p); 4624 gimple_set_lhs (assign, *to_p);
4439 } 4625 }
4440 4626
4441 if (want_value) 4627 if (want_value)
4442 { 4628 {
4443 *expr_p = unshare_expr (*to_p); 4629 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4444 return GS_OK; 4630 return GS_OK;
4445 } 4631 }
4446 else 4632 else
4447 *expr_p = NULL; 4633 *expr_p = NULL;
4448 4634
4453 with a call to BUILT_IN_MEMCMP. */ 4639 with a call to BUILT_IN_MEMCMP. */
4454 4640
4455 static enum gimplify_status 4641 static enum gimplify_status
4456 gimplify_variable_sized_compare (tree *expr_p) 4642 gimplify_variable_sized_compare (tree *expr_p)
4457 { 4643 {
4644 location_t loc = EXPR_LOCATION (*expr_p);
4458 tree op0 = TREE_OPERAND (*expr_p, 0); 4645 tree op0 = TREE_OPERAND (*expr_p, 0);
4459 tree op1 = TREE_OPERAND (*expr_p, 1); 4646 tree op1 = TREE_OPERAND (*expr_p, 1);
4460 tree t, arg, dest, src; 4647 tree t, arg, dest, src, expr;
4461 location_t loc = EXPR_LOCATION (*expr_p);
4462 4648
4463 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0)); 4649 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4464 arg = unshare_expr (arg); 4650 arg = unshare_expr (arg);
4465 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0); 4651 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4466 src = build_fold_addr_expr_loc (loc, op1); 4652 src = build_fold_addr_expr_loc (loc, op1);
4467 dest = build_fold_addr_expr_loc (loc, op0); 4653 dest = build_fold_addr_expr_loc (loc, op0);
4468 t = implicit_built_in_decls[BUILT_IN_MEMCMP]; 4654 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
4469 t = build_call_expr_loc (loc, t, 3, dest, src, arg); 4655 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4470 *expr_p 4656
4657 expr
4471 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node); 4658 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4659 SET_EXPR_LOCATION (expr, loc);
4660 *expr_p = expr;
4472 4661
4473 return GS_OK; 4662 return GS_OK;
4474 } 4663 }
4475 4664
4476 /* Gimplify a comparison between two aggregate objects of integral scalar 4665 /* Gimplify a comparison between two aggregate objects of integral scalar
4620 location_t loc = EXPR_LOCATION (*expr_p); 4809 location_t loc = EXPR_LOCATION (*expr_p);
4621 4810
4622 switch (TREE_CODE (op0)) 4811 switch (TREE_CODE (op0))
4623 { 4812 {
4624 case INDIRECT_REF: 4813 case INDIRECT_REF:
4625 case MISALIGNED_INDIRECT_REF:
4626 do_indirect_ref: 4814 do_indirect_ref:
4627 /* Check if we are dealing with an expression of the form '&*ptr'. 4815 /* Check if we are dealing with an expression of the form '&*ptr'.
4628 While the front end folds away '&*ptr' into 'ptr', these 4816 While the front end folds away '&*ptr' into 'ptr', these
4629 expressions may be generated internally by the compiler (e.g., 4817 expressions may be generated internally by the compiler (e.g.,
4630 builtins like __builtin_va_end). */ 4818 builtins like __builtin_va_end). */
4889 } 5077 }
4890 } 5078 }
4891 5079
4892 /* If the operand is a memory input, it should be an lvalue. */ 5080 /* If the operand is a memory input, it should be an lvalue. */
4893 if (!allows_reg && allows_mem) 5081 if (!allows_reg && allows_mem)
4894 { 5082 {
4895 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, 5083 tree inputv = TREE_VALUE (link);
4896 is_gimple_lvalue, fb_lvalue | fb_mayfail); 5084 STRIP_NOPS (inputv);
4897 mark_addressable (TREE_VALUE (link)); 5085 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
4898 if (tret == GS_ERROR) 5086 || TREE_CODE (inputv) == PREINCREMENT_EXPR
4899 { 5087 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
4900 if (EXPR_HAS_LOCATION (TREE_VALUE (link))) 5088 || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
4901 input_location = EXPR_LOCATION (TREE_VALUE (link)); 5089 TREE_VALUE (link) = error_mark_node;
4902 error ("memory input %d is not directly addressable", i); 5090 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4903 ret = tret; 5091 is_gimple_lvalue, fb_lvalue | fb_mayfail);
4904 } 5092 mark_addressable (TREE_VALUE (link));
4905 } 5093 if (tret == GS_ERROR)
5094 {
5095 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5096 input_location = EXPR_LOCATION (TREE_VALUE (link));
5097 error ("memory input %d is not directly addressable", i);
5098 ret = tret;
5099 }
5100 }
4906 else 5101 else
4907 { 5102 {
4908 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, 5103 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4909 is_gimple_asm_val, fb_rvalue); 5104 is_gimple_asm_val, fb_rvalue);
4910 if (tret == GS_ERROR) 5105 if (tret == GS_ERROR)
4979 { 5174 {
4980 if (gsi_one_before_end_p (iter)) 5175 if (gsi_one_before_end_p (iter))
4981 { 5176 {
4982 /* Note that gsi_insert_seq_before and gsi_remove do not 5177 /* Note that gsi_insert_seq_before and gsi_remove do not
4983 scan operands, unlike some other sequence mutators. */ 5178 scan operands, unlike some other sequence mutators. */
4984 gsi_insert_seq_before_without_update (&iter, 5179 if (!gimple_wce_cleanup_eh_only (wce))
4985 gimple_wce_cleanup (wce), 5180 gsi_insert_seq_before_without_update (&iter,
4986 GSI_SAME_STMT); 5181 gimple_wce_cleanup (wce),
4987 gsi_remove (&iter, true); 5182 GSI_SAME_STMT);
4988 break; 5183 gsi_remove (&iter, true);
4989 } 5184 break;
4990 else 5185 }
4991 { 5186 else
4992 gimple gtry; 5187 {
4993 gimple_seq seq; 5188 gimple gtry;
4994 enum gimple_try_flags kind; 5189 gimple_seq seq;
4995 5190 enum gimple_try_flags kind;
4996 if (gimple_wce_cleanup_eh_only (wce)) 5191
4997 kind = GIMPLE_TRY_CATCH; 5192 if (gimple_wce_cleanup_eh_only (wce))
4998 else 5193 kind = GIMPLE_TRY_CATCH;
4999 kind = GIMPLE_TRY_FINALLY; 5194 else
5000 seq = gsi_split_seq_after (iter); 5195 kind = GIMPLE_TRY_FINALLY;
5001 5196 seq = gsi_split_seq_after (iter);
5002 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind); 5197
5198 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5003 /* Do not use gsi_replace here, as it may scan operands. 5199 /* Do not use gsi_replace here, as it may scan operands.
5004 We want to do a simple structural modification only. */ 5200 We want to do a simple structural modification only. */
5005 *gsi_stmt_ptr (&iter) = gtry; 5201 *gsi_stmt_ptr (&iter) = gtry;
5006 iter = gsi_start (seq); 5202 iter = gsi_start (seq);
5007 } 5203 }
5033 gimple wce; 5229 gimple wce;
5034 gimple_seq cleanup_stmts = NULL; 5230 gimple_seq cleanup_stmts = NULL;
5035 5231
5036 /* Errors can result in improperly nested cleanups. Which results in 5232 /* Errors can result in improperly nested cleanups. Which results in
5037 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */ 5233 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5038 if (errorcount || sorrycount) 5234 if (seen_error ())
5039 return; 5235 return;
5040 5236
5041 if (gimple_conditional_context ()) 5237 if (gimple_conditional_context ())
5042 { 5238 {
5043 /* If we're in a conditional context, this is more complex. We only 5239 /* If we're in a conditional context, this is more complex. We only
5229 5425
5230 case RECORD_TYPE: 5426 case RECORD_TYPE:
5231 case UNION_TYPE: 5427 case UNION_TYPE:
5232 case QUAL_UNION_TYPE: 5428 case QUAL_UNION_TYPE:
5233 { 5429 {
5234 tree field; 5430 tree field;
5235 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) 5431 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5236 if (TREE_CODE (field) == FIELD_DECL) 5432 if (TREE_CODE (field) == FIELD_DECL)
5237 { 5433 {
5238 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field)); 5434 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5239 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field)); 5435 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5240 } 5436 }
5241 } 5437 }
5242 break; 5438 break;
5243 5439
5244 case POINTER_TYPE: 5440 case POINTER_TYPE:
5245 case REFERENCE_TYPE: 5441 case REFERENCE_TYPE:
5320 if (flags & GOVD_SHARED) 5516 if (flags & GOVD_SHARED)
5321 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE 5517 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5322 | (flags & (GOVD_SEEN | GOVD_EXPLICIT)); 5518 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5323 5519
5324 /* We're going to make use of the TYPE_SIZE_UNIT at least in the 5520 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5325 alloca statement we generate for the variable, so make sure it 5521 alloca statement we generate for the variable, so make sure it
5326 is available. This isn't automatically needed for the SHARED 5522 is available. This isn't automatically needed for the SHARED
5327 case, since we won't be allocating local storage then. 5523 case, since we won't be allocating local storage then.
5328 For local variables TYPE_SIZE_UNIT might not be gimplified yet, 5524 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5329 in this case omp_notice_variable will be called later 5525 in this case omp_notice_variable will be called later
5330 on when it is gimplified. */ 5526 on when it is gimplified. */
5331 else if (! (flags & GOVD_LOCAL)) 5527 else if (! (flags & GOVD_LOCAL)
5332 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true); 5528 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5529 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5333 } 5530 }
5334 else if (lang_hooks.decls.omp_privatize_by_reference (decl)) 5531 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5335 { 5532 {
5336 gcc_assert ((flags & GOVD_LOCAL) == 0); 5533 gcc_assert ((flags & GOVD_LOCAL) == 0);
5337 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl)); 5534 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5345 omp_notice_variable (ctx, t, true); 5542 omp_notice_variable (ctx, t, true);
5346 } 5543 }
5347 } 5544 }
5348 5545
5349 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags); 5546 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5547 }
5548
5549 /* Notice a threadprivate variable DECL used in OpenMP context CTX.
5550 This just prints out diagnostics about threadprivate variable uses
5551 in untied tasks. If DECL2 is non-NULL, prevent this warning
5552 on that variable. */
5553
5554 static bool
5555 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5556 tree decl2)
5557 {
5558 splay_tree_node n;
5559
5560 if (ctx->region_type != ORT_UNTIED_TASK)
5561 return false;
5562 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5563 if (n == NULL)
5564 {
5565 error ("threadprivate variable %qE used in untied task", DECL_NAME (decl));
5566 error_at (ctx->location, "enclosing task");
5567 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5568 }
5569 if (decl2)
5570 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5571 return false;
5350 } 5572 }
5351 5573
5352 /* Record the fact that DECL was used within the OpenMP context CTX. 5574 /* Record the fact that DECL was used within the OpenMP context CTX.
5353 IN_CODE is true when real code uses DECL, and false when we should 5575 IN_CODE is true when real code uses DECL, and false when we should
5354 merely emit default(none) errors. Return true if DECL is going to 5576 merely emit default(none) errors. Return true if DECL is going to
5367 5589
5368 /* Threadprivate variables are predetermined. */ 5590 /* Threadprivate variables are predetermined. */
5369 if (is_global_var (decl)) 5591 if (is_global_var (decl))
5370 { 5592 {
5371 if (DECL_THREAD_LOCAL_P (decl)) 5593 if (DECL_THREAD_LOCAL_P (decl))
5372 return false; 5594 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5373 5595
5374 if (DECL_HAS_VALUE_EXPR_P (decl)) 5596 if (DECL_HAS_VALUE_EXPR_P (decl))
5375 { 5597 {
5376 tree value = get_base_address (DECL_VALUE_EXPR (decl)); 5598 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5377 5599
5378 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value)) 5600 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5379 return false; 5601 return omp_notice_threadprivate_variable (ctx, decl, value);
5380 } 5602 }
5381 } 5603 }
5382 5604
5383 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 5605 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5384 if (n == NULL) 5606 if (n == NULL)
5385 { 5607 {
5396 kind = lang_hooks.decls.omp_predetermined_sharing (decl); 5618 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5397 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED) 5619 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5398 default_kind = kind; 5620 default_kind = kind;
5399 5621
5400 switch (default_kind) 5622 switch (default_kind)
5401 { 5623 {
5402 case OMP_CLAUSE_DEFAULT_NONE: 5624 case OMP_CLAUSE_DEFAULT_NONE:
5403 error ("%qE not specified in enclosing parallel", 5625 error ("%qE not specified in enclosing parallel",
5404 DECL_NAME (decl)); 5626 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5405 error_at (ctx->location, "enclosing parallel"); 5627 if ((ctx->region_type & ORT_TASK) != 0)
5406 /* FALLTHRU */ 5628 error_at (ctx->location, "enclosing task");
5407 case OMP_CLAUSE_DEFAULT_SHARED: 5629 else
5408 flags |= GOVD_SHARED; 5630 error_at (ctx->location, "enclosing parallel");
5409 break; 5631 /* FALLTHRU */
5410 case OMP_CLAUSE_DEFAULT_PRIVATE: 5632 case OMP_CLAUSE_DEFAULT_SHARED:
5411 flags |= GOVD_PRIVATE; 5633 flags |= GOVD_SHARED;
5412 break; 5634 break;
5413 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE: 5635 case OMP_CLAUSE_DEFAULT_PRIVATE:
5414 flags |= GOVD_FIRSTPRIVATE; 5636 flags |= GOVD_PRIVATE;
5415 break; 5637 break;
5416 case OMP_CLAUSE_DEFAULT_UNSPECIFIED: 5638 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5417 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */ 5639 flags |= GOVD_FIRSTPRIVATE;
5418 gcc_assert (ctx->region_type == ORT_TASK); 5640 break;
5419 if (ctx->outer_context) 5641 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5420 omp_notice_variable (ctx->outer_context, decl, in_code); 5642 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5421 for (octx = ctx->outer_context; octx; octx = octx->outer_context) 5643 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5422 { 5644 if (ctx->outer_context)
5423 splay_tree_node n2; 5645 omp_notice_variable (ctx->outer_context, decl, in_code);
5424 5646 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5425 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl); 5647 {
5426 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED) 5648 splay_tree_node n2;
5427 { 5649
5428 flags |= GOVD_FIRSTPRIVATE; 5650 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5429 break; 5651 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5430 } 5652 {
5431 if ((octx->region_type & ORT_PARALLEL) != 0) 5653 flags |= GOVD_FIRSTPRIVATE;
5432 break; 5654 break;
5433 } 5655 }
5434 if (flags & GOVD_FIRSTPRIVATE) 5656 if ((octx->region_type & ORT_PARALLEL) != 0)
5435 break; 5657 break;
5436 if (octx == NULL 5658 }
5437 && (TREE_CODE (decl) == PARM_DECL 5659 if (flags & GOVD_FIRSTPRIVATE)
5438 || (!is_global_var (decl) 5660 break;
5439 && DECL_CONTEXT (decl) == current_function_decl))) 5661 if (octx == NULL
5440 { 5662 && (TREE_CODE (decl) == PARM_DECL
5441 flags |= GOVD_FIRSTPRIVATE; 5663 || (!is_global_var (decl)
5442 break; 5664 && DECL_CONTEXT (decl) == current_function_decl)))
5443 } 5665 {
5444 flags |= GOVD_SHARED; 5666 flags |= GOVD_FIRSTPRIVATE;
5445 break; 5667 break;
5446 default: 5668 }
5447 gcc_unreachable (); 5669 flags |= GOVD_SHARED;
5448 } 5670 break;
5671 default:
5672 gcc_unreachable ();
5673 }
5449 5674
5450 if ((flags & GOVD_PRIVATE) 5675 if ((flags & GOVD_PRIVATE)
5451 && lang_hooks.decls.omp_private_outer_ref (decl)) 5676 && lang_hooks.decls.omp_private_outer_ref (decl))
5452 flags |= GOVD_PRIVATE_OUTER_REF; 5677 flags |= GOVD_PRIVATE_OUTER_REF;
5453 5678
5916 tree expr = *expr_p; 6141 tree expr = *expr_p;
5917 gimple g; 6142 gimple g;
5918 gimple_seq body = NULL; 6143 gimple_seq body = NULL;
5919 struct gimplify_ctx gctx; 6144 struct gimplify_ctx gctx;
5920 6145
5921 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, ORT_TASK); 6146 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6147 find_omp_clause (OMP_TASK_CLAUSES (expr),
6148 OMP_CLAUSE_UNTIED)
6149 ? ORT_UNTIED_TASK : ORT_TASK);
5922 6150
5923 push_gimplify_context (&gctx); 6151 push_gimplify_context (&gctx);
5924 6152
5925 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body); 6153 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
5926 if (gimple_code (g) == GIMPLE_BIND) 6154 if (gimple_code (g) == GIMPLE_BIND)
6202 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr, 6430 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6203 lhs_var); 6431 lhs_var);
6204 break; 6432 break;
6205 case tcc_expression: 6433 case tcc_expression:
6206 switch (TREE_CODE (expr)) 6434 switch (TREE_CODE (expr))
6207 { 6435 {
6208 case TRUTH_ANDIF_EXPR: 6436 case TRUTH_ANDIF_EXPR:
6209 case TRUTH_ORIF_EXPR: 6437 case TRUTH_ORIF_EXPR:
6210 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, 6438 case TRUTH_AND_EXPR:
6211 lhs_addr, lhs_var); 6439 case TRUTH_OR_EXPR:
6212 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, 6440 case TRUTH_XOR_EXPR:
6213 lhs_addr, lhs_var); 6441 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6442 lhs_addr, lhs_var);
6443 case TRUTH_NOT_EXPR:
6444 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6445 lhs_addr, lhs_var);
6446 break;
6447 default:
6448 break;
6449 }
6214 break; 6450 break;
6215 default: 6451 default:
6216 break; 6452 break;
6217 } 6453 }
6218 break; 6454 break;
6240 tree addr = TREE_OPERAND (*expr_p, 0); 6476 tree addr = TREE_OPERAND (*expr_p, 0);
6241 tree rhs = TREE_OPERAND (*expr_p, 1); 6477 tree rhs = TREE_OPERAND (*expr_p, 1);
6242 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr))); 6478 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6243 tree tmp_load; 6479 tree tmp_load;
6244 6480
6245 tmp_load = create_tmp_var (type, NULL); 6481 tmp_load = create_tmp_reg (type, NULL);
6246 if (TREE_CODE (type) == COMPLEX_TYPE || TREE_CODE (type) == VECTOR_TYPE)
6247 DECL_GIMPLE_REG_P (tmp_load) = 1;
6248 if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0) 6482 if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6249 return GS_ERROR; 6483 return GS_ERROR;
6250 6484
6251 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue) 6485 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6252 != GS_ALL_DONE) 6486 != GS_ALL_DONE)
6323 returning to the caller, the sequence POST_P is appended to 6557 returning to the caller, the sequence POST_P is appended to
6324 the main output sequence PRE_P. 6558 the main output sequence PRE_P.
6325 6559
6326 GIMPLE_TEST_F points to a function that takes a tree T and 6560 GIMPLE_TEST_F points to a function that takes a tree T and
6327 returns nonzero if T is in the GIMPLE form requested by the 6561 returns nonzero if T is in the GIMPLE form requested by the
6328 caller. The GIMPLE predicates are in tree-gimple.c. 6562 caller. The GIMPLE predicates are in gimple.c.
6329 6563
6330 FALLBACK tells the function what sort of a temporary we want if 6564 FALLBACK tells the function what sort of a temporary we want if
6331 gimplification cannot produce an expression that complies with 6565 gimplification cannot produce an expression that complies with
6332 GIMPLE_TEST_F. 6566 GIMPLE_TEST_F.
6333 6567
6372 || gimple_test_f == is_gimple_condexpr 6606 || gimple_test_f == is_gimple_condexpr
6373 || gimple_test_f == is_gimple_mem_rhs 6607 || gimple_test_f == is_gimple_mem_rhs
6374 || gimple_test_f == is_gimple_mem_rhs_or_call 6608 || gimple_test_f == is_gimple_mem_rhs_or_call
6375 || gimple_test_f == is_gimple_reg_rhs 6609 || gimple_test_f == is_gimple_reg_rhs
6376 || gimple_test_f == is_gimple_reg_rhs_or_call 6610 || gimple_test_f == is_gimple_reg_rhs_or_call
6377 || gimple_test_f == is_gimple_asm_val) 6611 || gimple_test_f == is_gimple_asm_val
6612 || gimple_test_f == is_gimple_mem_ref_addr)
6378 gcc_assert (fallback & fb_rvalue); 6613 gcc_assert (fallback & fb_rvalue);
6379 else if (gimple_test_f == is_gimple_min_lval 6614 else if (gimple_test_f == is_gimple_min_lval
6380 || gimple_test_f == is_gimple_lvalue) 6615 || gimple_test_f == is_gimple_lvalue)
6381 gcc_assert (fallback & fb_lvalue); 6616 gcc_assert (fallback & fb_lvalue);
6382 else if (gimple_test_f == is_gimple_addressable) 6617 else if (gimple_test_f == is_gimple_addressable)
6448 continue; 6683 continue;
6449 } 6684 }
6450 else if (ret != GS_UNHANDLED) 6685 else if (ret != GS_UNHANDLED)
6451 break; 6686 break;
6452 6687
6453 ret = GS_OK; 6688 /* Make sure that all the cases set 'ret' appropriately. */
6689 ret = GS_UNHANDLED;
6454 switch (TREE_CODE (*expr_p)) 6690 switch (TREE_CODE (*expr_p))
6455 { 6691 {
6456 /* First deal with the special cases. */ 6692 /* First deal with the special cases. */
6457 6693
6458 case POSTINCREMENT_EXPR: 6694 case POSTINCREMENT_EXPR:
6459 case POSTDECREMENT_EXPR: 6695 case POSTDECREMENT_EXPR:
6460 case PREINCREMENT_EXPR: 6696 case PREINCREMENT_EXPR:
6461 case PREDECREMENT_EXPR: 6697 case PREDECREMENT_EXPR:
6462 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p, 6698 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
6463 fallback != fb_none); 6699 fallback != fb_none);
6464 break; 6700 break;
6465 6701
6466 case ARRAY_REF: 6702 case ARRAY_REF:
6467 case ARRAY_RANGE_REF: 6703 case ARRAY_RANGE_REF:
6468 case REALPART_EXPR: 6704 case REALPART_EXPR:
6469 case IMAGPART_EXPR: 6705 case IMAGPART_EXPR:
6470 case COMPONENT_REF: 6706 case COMPONENT_REF:
6471 case VIEW_CONVERT_EXPR: 6707 case VIEW_CONVERT_EXPR:
6472 ret = gimplify_compound_lval (expr_p, pre_p, post_p, 6708 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
6473 fallback ? fallback : fb_rvalue); 6709 fallback ? fallback : fb_rvalue);
6474 break; 6710 break;
6475 6711
6476 case COND_EXPR: 6712 case COND_EXPR:
6477 ret = gimplify_cond_expr (expr_p, pre_p, fallback); 6713 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
6478 6714
6479 /* C99 code may assign to an array in a structure value of a 6715 /* C99 code may assign to an array in a structure value of a
6480 conditional expression, and this has undefined behavior 6716 conditional expression, and this has undefined behavior
6481 only on execution, so create a temporary if an lvalue is 6717 only on execution, so create a temporary if an lvalue is
6482 required. */ 6718 required. */
6483 if (fallback == fb_lvalue) 6719 if (fallback == fb_lvalue)
6484 { 6720 {
6485 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); 6721 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6486 mark_addressable (*expr_p); 6722 mark_addressable (*expr_p);
6487 } 6723 ret = GS_OK;
6488 break; 6724 }
6489 6725 break;
6490 case CALL_EXPR: 6726
6491 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none); 6727 case CALL_EXPR:
6492 6728 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
6493 /* C99 code may assign to an array in a structure returned 6729
6494 from a function, and this has undefined behavior only on 6730 /* C99 code may assign to an array in a structure returned
6495 execution, so create a temporary if an lvalue is 6731 from a function, and this has undefined behavior only on
6496 required. */ 6732 execution, so create a temporary if an lvalue is
6497 if (fallback == fb_lvalue) 6733 required. */
6498 { 6734 if (fallback == fb_lvalue)
6499 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); 6735 {
6500 mark_addressable (*expr_p); 6736 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6501 } 6737 mark_addressable (*expr_p);
6502 break; 6738 ret = GS_OK;
6503 6739 }
6504 case TREE_LIST: 6740 break;
6505 gcc_unreachable (); 6741
6506 6742 case TREE_LIST:
6507 case COMPOUND_EXPR: 6743 gcc_unreachable ();
6508 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none); 6744
6509 break; 6745 case COMPOUND_EXPR:
6510 6746 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
6511 case COMPOUND_LITERAL_EXPR: 6747 break;
6512 ret = gimplify_compound_literal_expr (expr_p, pre_p); 6748
6513 break; 6749 case COMPOUND_LITERAL_EXPR:
6514 6750 ret = gimplify_compound_literal_expr (expr_p, pre_p);
6515 case MODIFY_EXPR: 6751 break;
6516 case INIT_EXPR: 6752
6517 ret = gimplify_modify_expr (expr_p, pre_p, post_p, 6753 case MODIFY_EXPR:
6518 fallback != fb_none); 6754 case INIT_EXPR:
6519 break; 6755 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
6520 6756 fallback != fb_none);
6521 case TRUTH_ANDIF_EXPR: 6757 break;
6522 case TRUTH_ORIF_EXPR: 6758
6523 /* Pass the source location of the outer expression. */ 6759 case TRUTH_ANDIF_EXPR:
6524 ret = gimplify_boolean_expr (expr_p, saved_location); 6760 case TRUTH_ORIF_EXPR:
6525 break; 6761 /* Pass the source location of the outer expression. */
6526 6762 ret = gimplify_boolean_expr (expr_p, saved_location);
6527 case TRUTH_NOT_EXPR: 6763 break;
6528 if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE) 6764
6529 { 6765 case TRUTH_NOT_EXPR:
6530 tree type = TREE_TYPE (*expr_p); 6766 if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE)
6531 *expr_p = fold_convert (type, gimple_boolify (*expr_p)); 6767 {
6532 ret = GS_OK; 6768 tree type = TREE_TYPE (*expr_p);
6533 break; 6769 *expr_p = fold_convert (type, gimple_boolify (*expr_p));
6534 } 6770 ret = GS_OK;
6535 6771 break;
6536 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 6772 }
6537 is_gimple_val, fb_rvalue); 6773
6538 recalculate_side_effects (*expr_p); 6774 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6539 break; 6775 is_gimple_val, fb_rvalue);
6540 6776 recalculate_side_effects (*expr_p);
6541 case ADDR_EXPR: 6777 break;
6542 ret = gimplify_addr_expr (expr_p, pre_p, post_p); 6778
6543 break; 6779 case ADDR_EXPR:
6544 6780 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
6545 case VA_ARG_EXPR: 6781 break;
6546 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p); 6782
6547 break; 6783 case VA_ARG_EXPR:
6548 6784 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6549 CASE_CONVERT: 6785 break;
6550 if (IS_EMPTY_STMT (*expr_p)) 6786
6551 { 6787 CASE_CONVERT:
6552 ret = GS_ALL_DONE; 6788 if (IS_EMPTY_STMT (*expr_p))
6553 break; 6789 {
6554 } 6790 ret = GS_ALL_DONE;
6555 6791 break;
6556 if (VOID_TYPE_P (TREE_TYPE (*expr_p)) 6792 }
6557 || fallback == fb_none) 6793
6558 { 6794 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
6559 /* Just strip a conversion to void (or in void context) and 6795 || fallback == fb_none)
6560 try again. */ 6796 {
6561 *expr_p = TREE_OPERAND (*expr_p, 0); 6797 /* Just strip a conversion to void (or in void context) and
6562 break; 6798 try again. */
6563 } 6799 *expr_p = TREE_OPERAND (*expr_p, 0);
6564 6800 ret = GS_OK;
6565 ret = gimplify_conversion (expr_p); 6801 break;
6566 if (ret == GS_ERROR) 6802 }
6567 break; 6803
6568 if (*expr_p != save_expr) 6804 ret = gimplify_conversion (expr_p);
6569 break; 6805 if (ret == GS_ERROR)
6570 /* FALLTHRU */ 6806 break;
6571 6807 if (*expr_p != save_expr)
6572 case FIX_TRUNC_EXPR: 6808 break;
6573 /* unary_expr: ... | '(' cast ')' val | ... */ 6809 /* FALLTHRU */
6574 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 6810
6575 is_gimple_val, fb_rvalue); 6811 case FIX_TRUNC_EXPR:
6576 recalculate_side_effects (*expr_p); 6812 /* unary_expr: ... | '(' cast ')' val | ... */
6577 break; 6813 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6578 6814 is_gimple_val, fb_rvalue);
6579 case INDIRECT_REF: 6815 recalculate_side_effects (*expr_p);
6580 *expr_p = fold_indirect_ref_loc (input_location, *expr_p); 6816 break;
6581 if (*expr_p != save_expr) 6817
6582 break; 6818 case INDIRECT_REF:
6583 /* else fall through. */ 6819 {
6584 case ALIGN_INDIRECT_REF: 6820 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
6585 case MISALIGNED_INDIRECT_REF: 6821 bool notrap = TREE_THIS_NOTRAP (*expr_p);
6586 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 6822 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
6587 is_gimple_reg, fb_rvalue); 6823
6588 recalculate_side_effects (*expr_p); 6824 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
6589 break; 6825 if (*expr_p != save_expr)
6590 6826 {
6591 /* Constants need not be gimplified. */ 6827 ret = GS_OK;
6592 case INTEGER_CST: 6828 break;
6593 case REAL_CST: 6829 }
6594 case FIXED_CST: 6830
6595 case STRING_CST: 6831 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6596 case COMPLEX_CST: 6832 is_gimple_reg, fb_rvalue);
6597 case VECTOR_CST: 6833 if (ret == GS_ERROR)
6598 ret = GS_ALL_DONE; 6834 break;
6599 break; 6835
6600 6836 recalculate_side_effects (*expr_p);
6601 case CONST_DECL: 6837 *expr_p = fold_build2_loc (input_location, MEM_REF,
6602 /* If we require an lvalue, such as for ADDR_EXPR, retain the 6838 TREE_TYPE (*expr_p),
6603 CONST_DECL node. Otherwise the decl is replaceable by its 6839 TREE_OPERAND (*expr_p, 0),
6604 value. */ 6840 build_int_cst (saved_ptr_type, 0));
6605 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */ 6841 TREE_THIS_VOLATILE (*expr_p) = volatilep;
6606 if (fallback & fb_lvalue) 6842 TREE_THIS_NOTRAP (*expr_p) = notrap;
6607 ret = GS_ALL_DONE; 6843 ret = GS_OK;
6608 else 6844 break;
6609 *expr_p = DECL_INITIAL (*expr_p); 6845 }
6610 break; 6846
6611 6847 /* We arrive here through the various re-gimplifcation paths. */
6612 case DECL_EXPR: 6848 case MEM_REF:
6613 ret = gimplify_decl_expr (expr_p, pre_p); 6849 /* First try re-folding the whole thing. */
6614 break; 6850 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
6615 6851 TREE_OPERAND (*expr_p, 0),
6616 case BIND_EXPR: 6852 TREE_OPERAND (*expr_p, 1));
6617 ret = gimplify_bind_expr (expr_p, pre_p); 6853 if (tmp)
6618 break; 6854 {
6619 6855 *expr_p = tmp;
6620 case LOOP_EXPR: 6856 recalculate_side_effects (*expr_p);
6621 ret = gimplify_loop_expr (expr_p, pre_p); 6857 ret = GS_OK;
6622 break; 6858 break;
6623 6859 }
6624 case SWITCH_EXPR: 6860 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6625 ret = gimplify_switch_expr (expr_p, pre_p); 6861 is_gimple_mem_ref_addr, fb_rvalue);
6626 break; 6862 if (ret == GS_ERROR)
6627 6863 break;
6628 case EXIT_EXPR: 6864 recalculate_side_effects (*expr_p);
6629 ret = gimplify_exit_expr (expr_p); 6865 ret = GS_ALL_DONE;
6630 break; 6866 break;
6631 6867
6632 case GOTO_EXPR: 6868 /* Constants need not be gimplified. */
6633 /* If the target is not LABEL, then it is a computed jump 6869 case INTEGER_CST:
6634 and the target needs to be gimplified. */ 6870 case REAL_CST:
6635 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL) 6871 case FIXED_CST:
6636 { 6872 case STRING_CST:
6637 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p, 6873 case COMPLEX_CST:
6638 NULL, is_gimple_val, fb_rvalue); 6874 case VECTOR_CST:
6639 if (ret == GS_ERROR) 6875 ret = GS_ALL_DONE;
6640 break; 6876 break;
6641 } 6877
6642 gimplify_seq_add_stmt (pre_p, 6878 case CONST_DECL:
6643 gimple_build_goto (GOTO_DESTINATION (*expr_p))); 6879 /* If we require an lvalue, such as for ADDR_EXPR, retain the
6644 break; 6880 CONST_DECL node. Otherwise the decl is replaceable by its
6645 6881 value. */
6646 case PREDICT_EXPR: 6882 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
6647 gimplify_seq_add_stmt (pre_p, 6883 if (fallback & fb_lvalue)
6648 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p), 6884 ret = GS_ALL_DONE;
6649 PREDICT_EXPR_OUTCOME (*expr_p))); 6885 else
6650 ret = GS_ALL_DONE; 6886 {
6651 break; 6887 *expr_p = DECL_INITIAL (*expr_p);
6652 6888 ret = GS_OK;
6653 case LABEL_EXPR: 6889 }
6654 ret = GS_ALL_DONE; 6890 break;
6655 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p)) 6891
6656 == current_function_decl); 6892 case DECL_EXPR:
6657 gimplify_seq_add_stmt (pre_p, 6893 ret = gimplify_decl_expr (expr_p, pre_p);
6658 gimple_build_label (LABEL_EXPR_LABEL (*expr_p))); 6894 break;
6659 break; 6895
6660 6896 case BIND_EXPR:
6661 case CASE_LABEL_EXPR: 6897 ret = gimplify_bind_expr (expr_p, pre_p);
6662 ret = gimplify_case_label_expr (expr_p, pre_p); 6898 break;
6663 break; 6899
6664 6900 case LOOP_EXPR:
6665 case RETURN_EXPR: 6901 ret = gimplify_loop_expr (expr_p, pre_p);
6666 ret = gimplify_return_expr (*expr_p, pre_p); 6902 break;
6667 break; 6903
6668 6904 case SWITCH_EXPR:
6669 case CONSTRUCTOR: 6905 ret = gimplify_switch_expr (expr_p, pre_p);
6670 /* Don't reduce this in place; let gimplify_init_constructor work its 6906 break;
6671 magic. Buf if we're just elaborating this for side effects, just 6907
6672 gimplify any element that has side-effects. */ 6908 case EXIT_EXPR:
6673 if (fallback == fb_none) 6909 ret = gimplify_exit_expr (expr_p);
6674 { 6910 break;
6675 unsigned HOST_WIDE_INT ix; 6911
6676 constructor_elt *ce; 6912 case GOTO_EXPR:
6677 tree temp = NULL_TREE; 6913 /* If the target is not LABEL, then it is a computed jump
6678 for (ix = 0; 6914 and the target needs to be gimplified. */
6679 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p), 6915 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
6680 ix, ce); 6916 {
6681 ix++) 6917 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
6682 if (TREE_SIDE_EFFECTS (ce->value)) 6918 NULL, is_gimple_val, fb_rvalue);
6683 append_to_statement_list (ce->value, &temp); 6919 if (ret == GS_ERROR)
6684 6920 break;
6685 *expr_p = temp; 6921 }
6686 ret = GS_OK; 6922 gimplify_seq_add_stmt (pre_p,
6687 } 6923 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
6688 /* C99 code may assign to an array in a constructed 6924 ret = GS_ALL_DONE;
6689 structure or union, and this has undefined behavior only 6925 break;
6690 on execution, so create a temporary if an lvalue is 6926
6691 required. */ 6927 case PREDICT_EXPR:
6692 else if (fallback == fb_lvalue) 6928 gimplify_seq_add_stmt (pre_p,
6693 { 6929 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
6694 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); 6930 PREDICT_EXPR_OUTCOME (*expr_p)));
6695 mark_addressable (*expr_p); 6931 ret = GS_ALL_DONE;
6696 } 6932 break;
6697 else 6933
6698 ret = GS_ALL_DONE; 6934 case LABEL_EXPR:
6699 break; 6935 ret = GS_ALL_DONE;
6700 6936 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
6701 /* The following are special cases that are not handled by the 6937 == current_function_decl);
6702 original GIMPLE grammar. */ 6938 gimplify_seq_add_stmt (pre_p,
6703 6939 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6704 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and 6940 break;
6705 eliminated. */ 6941
6706 case SAVE_EXPR: 6942 case CASE_LABEL_EXPR:
6707 ret = gimplify_save_expr (expr_p, pre_p, post_p); 6943 ret = gimplify_case_label_expr (expr_p, pre_p);
6708 break; 6944 break;
6709 6945
6710 case BIT_FIELD_REF: 6946 case RETURN_EXPR:
6711 { 6947 ret = gimplify_return_expr (*expr_p, pre_p);
6712 enum gimplify_status r0, r1, r2; 6948 break;
6713 6949
6714 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 6950 case CONSTRUCTOR:
6715 post_p, is_gimple_lvalue, fb_either); 6951 /* Don't reduce this in place; let gimplify_init_constructor work its
6716 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 6952 magic. Buf if we're just elaborating this for side effects, just
6717 post_p, is_gimple_val, fb_rvalue); 6953 gimplify any element that has side-effects. */
6718 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, 6954 if (fallback == fb_none)
6719 post_p, is_gimple_val, fb_rvalue); 6955 {
6720 recalculate_side_effects (*expr_p); 6956 unsigned HOST_WIDE_INT ix;
6721 6957 tree val;
6722 ret = MIN (r0, MIN (r1, r2)); 6958 tree temp = NULL_TREE;
6723 } 6959 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
6724 break; 6960 if (TREE_SIDE_EFFECTS (val))
6725 6961 append_to_statement_list (val, &temp);
6726 case TARGET_MEM_REF: 6962
6727 { 6963 *expr_p = temp;
6728 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE; 6964 ret = temp ? GS_OK : GS_ALL_DONE;
6729 6965 }
6730 if (TMR_SYMBOL (*expr_p)) 6966 /* C99 code may assign to an array in a constructed
6731 r0 = gimplify_expr (&TMR_SYMBOL (*expr_p), pre_p, 6967 structure or union, and this has undefined behavior only
6732 post_p, is_gimple_lvalue, fb_either); 6968 on execution, so create a temporary if an lvalue is
6733 else if (TMR_BASE (*expr_p)) 6969 required. */
6734 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p, 6970 else if (fallback == fb_lvalue)
6735 post_p, is_gimple_val, fb_either); 6971 {
6736 if (TMR_INDEX (*expr_p)) 6972 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6737 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p, 6973 mark_addressable (*expr_p);
6738 post_p, is_gimple_val, fb_rvalue); 6974 ret = GS_OK;
6739 /* TMR_STEP and TMR_OFFSET are always integer constants. */ 6975 }
6740 ret = MIN (r0, r1); 6976 else
6741 } 6977 ret = GS_ALL_DONE;
6742 break; 6978 break;
6743 6979
6744 case NON_LVALUE_EXPR: 6980 /* The following are special cases that are not handled by the
6745 /* This should have been stripped above. */ 6981 original GIMPLE grammar. */
6746 gcc_unreachable (); 6982
6747 6983 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
6748 case ASM_EXPR: 6984 eliminated. */
6749 ret = gimplify_asm_expr (expr_p, pre_p, post_p); 6985 case SAVE_EXPR:
6750 break; 6986 ret = gimplify_save_expr (expr_p, pre_p, post_p);
6751 6987 break;
6752 case TRY_FINALLY_EXPR: 6988
6753 case TRY_CATCH_EXPR: 6989 case BIT_FIELD_REF:
6754 { 6990 {
6755 gimple_seq eval, cleanup; 6991 enum gimplify_status r0, r1, r2;
6756 gimple try_; 6992
6757 6993 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6758 eval = cleanup = NULL; 6994 post_p, is_gimple_lvalue, fb_either);
6759 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval); 6995 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6760 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup); 6996 post_p, is_gimple_val, fb_rvalue);
6761 /* Don't create bogus GIMPLE_TRY with empty cleanup. */ 6997 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
6762 if (gimple_seq_empty_p (cleanup)) 6998 post_p, is_gimple_val, fb_rvalue);
6763 { 6999 recalculate_side_effects (*expr_p);
6764 gimple_seq_add_seq (pre_p, eval); 7000
6765 ret = GS_ALL_DONE; 7001 ret = MIN (r0, MIN (r1, r2));
6766 break; 7002 }
6767 } 7003 break;
6768 try_ = gimple_build_try (eval, cleanup, 7004
6769 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR 7005 case TARGET_MEM_REF:
6770 ? GIMPLE_TRY_FINALLY 7006 {
6771 : GIMPLE_TRY_CATCH); 7007 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
6772 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR) 7008
6773 gimple_try_set_catch_is_cleanup (try_, 7009 if (TMR_BASE (*expr_p))
6774 TRY_CATCH_IS_CLEANUP (*expr_p)); 7010 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
6775 gimplify_seq_add_stmt (pre_p, try_); 7011 post_p, is_gimple_mem_ref_addr, fb_either);
6776 ret = GS_ALL_DONE; 7012 if (TMR_INDEX (*expr_p))
6777 break; 7013 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
6778 } 7014 post_p, is_gimple_val, fb_rvalue);
6779 7015 if (TMR_INDEX2 (*expr_p))
6780 case CLEANUP_POINT_EXPR: 7016 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
6781 ret = gimplify_cleanup_point_expr (expr_p, pre_p); 7017 post_p, is_gimple_val, fb_rvalue);
6782 break; 7018 /* TMR_STEP and TMR_OFFSET are always integer constants. */
6783 7019 ret = MIN (r0, r1);
6784 case TARGET_EXPR: 7020 }
6785 ret = gimplify_target_expr (expr_p, pre_p, post_p); 7021 break;
6786 break; 7022
6787 7023 case NON_LVALUE_EXPR:
6788 case CATCH_EXPR: 7024 /* This should have been stripped above. */
6789 { 7025 gcc_unreachable ();
6790 gimple c; 7026
6791 gimple_seq handler = NULL; 7027 case ASM_EXPR:
6792 gimplify_and_add (CATCH_BODY (*expr_p), &handler); 7028 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
6793 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler); 7029 break;
6794 gimplify_seq_add_stmt (pre_p, c); 7030
6795 ret = GS_ALL_DONE; 7031 case TRY_FINALLY_EXPR:
6796 break; 7032 case TRY_CATCH_EXPR:
6797 } 7033 {
6798 7034 gimple_seq eval, cleanup;
6799 case EH_FILTER_EXPR: 7035 gimple try_;
6800 { 7036
6801 gimple ehf; 7037 eval = cleanup = NULL;
6802 gimple_seq failure = NULL; 7038 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
6803 7039 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
6804 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure); 7040 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
6805 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure); 7041 if (gimple_seq_empty_p (cleanup))
6806 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p)); 7042 {
6807 gimplify_seq_add_stmt (pre_p, ehf); 7043 gimple_seq_add_seq (pre_p, eval);
6808 ret = GS_ALL_DONE; 7044 ret = GS_ALL_DONE;
6809 break; 7045 break;
6810 } 7046 }
6811 7047 try_ = gimple_build_try (eval, cleanup,
6812 case OBJ_TYPE_REF: 7048 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
6813 { 7049 ? GIMPLE_TRY_FINALLY
6814 enum gimplify_status r0, r1; 7050 : GIMPLE_TRY_CATCH);
6815 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, 7051 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
6816 post_p, is_gimple_val, fb_rvalue); 7052 gimple_try_set_catch_is_cleanup (try_,
6817 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, 7053 TRY_CATCH_IS_CLEANUP (*expr_p));
6818 post_p, is_gimple_val, fb_rvalue); 7054 gimplify_seq_add_stmt (pre_p, try_);
6819 TREE_SIDE_EFFECTS (*expr_p) = 0; 7055 ret = GS_ALL_DONE;
6820 ret = MIN (r0, r1); 7056 break;
6821 } 7057 }
6822 break; 7058
6823 7059 case CLEANUP_POINT_EXPR:
6824 case LABEL_DECL: 7060 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
6825 /* We get here when taking the address of a label. We mark 7061 break;
6826 the label as "forced"; meaning it can never be removed and 7062
6827 it is a potential target for any computed goto. */ 7063 case TARGET_EXPR:
6828 FORCED_LABEL (*expr_p) = 1; 7064 ret = gimplify_target_expr (expr_p, pre_p, post_p);
6829 ret = GS_ALL_DONE; 7065 break;
6830 break; 7066
6831 7067 case CATCH_EXPR:
6832 case STATEMENT_LIST: 7068 {
6833 ret = gimplify_statement_list (expr_p, pre_p); 7069 gimple c;
6834 break; 7070 gimple_seq handler = NULL;
6835 7071 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
6836 case WITH_SIZE_EXPR: 7072 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
6837 { 7073 gimplify_seq_add_stmt (pre_p, c);
6838 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 7074 ret = GS_ALL_DONE;
6839 post_p == &internal_post ? NULL : post_p, 7075 break;
6840 gimple_test_f, fallback); 7076 }
6841 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, 7077
6842 is_gimple_val, fb_rvalue); 7078 case EH_FILTER_EXPR:
6843 } 7079 {
6844 break; 7080 gimple ehf;
6845 7081 gimple_seq failure = NULL;
6846 case VAR_DECL: 7082
6847 case PARM_DECL: 7083 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
6848 ret = gimplify_var_or_parm_decl (expr_p); 7084 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
6849 break; 7085 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
6850 7086 gimplify_seq_add_stmt (pre_p, ehf);
6851 case RESULT_DECL: 7087 ret = GS_ALL_DONE;
6852 /* When within an OpenMP context, notice uses of variables. */ 7088 break;
6853 if (gimplify_omp_ctxp) 7089 }
6854 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true); 7090
6855 ret = GS_ALL_DONE; 7091 case OBJ_TYPE_REF:
6856 break; 7092 {
6857 7093 enum gimplify_status r0, r1;
6858 case SSA_NAME: 7094 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
6859 /* Allow callbacks into the gimplifier during optimization. */ 7095 post_p, is_gimple_val, fb_rvalue);
6860 ret = GS_ALL_DONE; 7096 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
6861 break; 7097 post_p, is_gimple_val, fb_rvalue);
6862 7098 TREE_SIDE_EFFECTS (*expr_p) = 0;
6863 case OMP_PARALLEL: 7099 ret = MIN (r0, r1);
6864 gimplify_omp_parallel (expr_p, pre_p); 7100 }
6865 ret = GS_ALL_DONE; 7101 break;
6866 break; 7102
6867 7103 case LABEL_DECL:
6868 case OMP_TASK: 7104 /* We get here when taking the address of a label. We mark
6869 gimplify_omp_task (expr_p, pre_p); 7105 the label as "forced"; meaning it can never be removed and
6870 ret = GS_ALL_DONE; 7106 it is a potential target for any computed goto. */
6871 break; 7107 FORCED_LABEL (*expr_p) = 1;
6872 7108 ret = GS_ALL_DONE;
6873 case OMP_FOR: 7109 break;
6874 ret = gimplify_omp_for (expr_p, pre_p); 7110
6875 break; 7111 case STATEMENT_LIST:
6876 7112 ret = gimplify_statement_list (expr_p, pre_p);
6877 case OMP_SECTIONS: 7113 break;
6878 case OMP_SINGLE: 7114
6879 gimplify_omp_workshare (expr_p, pre_p); 7115 case WITH_SIZE_EXPR:
6880 ret = GS_ALL_DONE; 7116 {
6881 break; 7117 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6882 7118 post_p == &internal_post ? NULL : post_p,
6883 case OMP_SECTION: 7119 gimple_test_f, fallback);
6884 case OMP_MASTER: 7120 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
6885 case OMP_ORDERED: 7121 is_gimple_val, fb_rvalue);
6886 case OMP_CRITICAL: 7122 ret = GS_ALL_DONE;
6887 { 7123 }
6888 gimple_seq body = NULL; 7124 break;
6889 gimple g; 7125
6890 7126 case VAR_DECL:
6891 gimplify_and_add (OMP_BODY (*expr_p), &body); 7127 case PARM_DECL:
6892 switch (TREE_CODE (*expr_p)) 7128 ret = gimplify_var_or_parm_decl (expr_p);
6893 { 7129 break;
6894 case OMP_SECTION: 7130
6895 g = gimple_build_omp_section (body); 7131 case RESULT_DECL:
6896 break; 7132 /* When within an OpenMP context, notice uses of variables. */
6897 case OMP_MASTER: 7133 if (gimplify_omp_ctxp)
6898 g = gimple_build_omp_master (body); 7134 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
6899 break; 7135 ret = GS_ALL_DONE;
6900 case OMP_ORDERED: 7136 break;
6901 g = gimple_build_omp_ordered (body); 7137
6902 break; 7138 case SSA_NAME:
6903 case OMP_CRITICAL: 7139 /* Allow callbacks into the gimplifier during optimization. */
6904 g = gimple_build_omp_critical (body, 7140 ret = GS_ALL_DONE;
6905 OMP_CRITICAL_NAME (*expr_p)); 7141 break;
6906 break; 7142
6907 default: 7143 case OMP_PARALLEL:
6908 gcc_unreachable (); 7144 gimplify_omp_parallel (expr_p, pre_p);
6909 } 7145 ret = GS_ALL_DONE;
6910 gimplify_seq_add_stmt (pre_p, g); 7146 break;
6911 ret = GS_ALL_DONE; 7147
6912 break; 7148 case OMP_TASK:
6913 } 7149 gimplify_omp_task (expr_p, pre_p);
6914 7150 ret = GS_ALL_DONE;
6915 case OMP_ATOMIC: 7151 break;
6916 ret = gimplify_omp_atomic (expr_p, pre_p); 7152
6917 break; 7153 case OMP_FOR:
6918 7154 ret = gimplify_omp_for (expr_p, pre_p);
6919 case POINTER_PLUS_EXPR: 7155 break;
7156
7157 case OMP_SECTIONS:
7158 case OMP_SINGLE:
7159 gimplify_omp_workshare (expr_p, pre_p);
7160 ret = GS_ALL_DONE;
7161 break;
7162
7163 case OMP_SECTION:
7164 case OMP_MASTER:
7165 case OMP_ORDERED:
7166 case OMP_CRITICAL:
7167 {
7168 gimple_seq body = NULL;
7169 gimple g;
7170
7171 gimplify_and_add (OMP_BODY (*expr_p), &body);
7172 switch (TREE_CODE (*expr_p))
7173 {
7174 case OMP_SECTION:
7175 g = gimple_build_omp_section (body);
7176 break;
7177 case OMP_MASTER:
7178 g = gimple_build_omp_master (body);
7179 break;
7180 case OMP_ORDERED:
7181 g = gimple_build_omp_ordered (body);
7182 break;
7183 case OMP_CRITICAL:
7184 g = gimple_build_omp_critical (body,
7185 OMP_CRITICAL_NAME (*expr_p));
7186 break;
7187 default:
7188 gcc_unreachable ();
7189 }
7190 gimplify_seq_add_stmt (pre_p, g);
7191 ret = GS_ALL_DONE;
7192 break;
7193 }
7194
7195 case OMP_ATOMIC:
7196 ret = gimplify_omp_atomic (expr_p, pre_p);
7197 break;
7198
7199 case TRUTH_AND_EXPR:
7200 case TRUTH_OR_EXPR:
7201 case TRUTH_XOR_EXPR:
7202 /* Classified as tcc_expression. */
7203 goto expr_2;
7204
7205 case FMA_EXPR:
7206 /* Classified as tcc_expression. */
7207 goto expr_3;
7208
7209 case POINTER_PLUS_EXPR:
6920 /* Convert ((type *)A)+offset into &A->field_of_type_and_offset. 7210 /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
6921 The second is gimple immediate saving a need for extra statement. 7211 The second is gimple immediate saving a need for extra statement.
6922 */ 7212 */
6923 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST 7213 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
6924 && (tmp = maybe_fold_offset_to_address 7214 && (tmp = maybe_fold_offset_to_address
6925 (EXPR_LOCATION (*expr_p), 7215 (EXPR_LOCATION (*expr_p),
6926 TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1), 7216 TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
6927 TREE_TYPE (*expr_p)))) 7217 TREE_TYPE (*expr_p))))
6928 { 7218 {
6929 *expr_p = tmp; 7219 *expr_p = tmp;
6930 break; 7220 ret = GS_OK;
6931 } 7221 break;
6932 /* Convert (void *)&a + 4 into (void *)&a[1]. */ 7222 }
6933 if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR 7223 /* Convert (void *)&a + 4 into (void *)&a[1]. */
6934 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST 7224 if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
6935 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 7225 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
6936 0),0))) 7226 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
6937 && (tmp = maybe_fold_offset_to_address 7227 0),0)))
6938 (EXPR_LOCATION (*expr_p), 7228 && (tmp = maybe_fold_offset_to_address
6939 TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0), 7229 (EXPR_LOCATION (*expr_p),
6940 TREE_OPERAND (*expr_p, 1), 7230 TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
6941 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 7231 TREE_OPERAND (*expr_p, 1),
6942 0))))) 7232 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
6943 { 7233 0)))))
7234 {
6944 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp); 7235 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
6945 break; 7236 ret = GS_OK;
6946 } 7237 break;
7238 }
6947 /* FALLTHRU */ 7239 /* FALLTHRU */
6948 7240
6949 default: 7241 default:
6950 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p))) 7242 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6951 { 7243 {
6952 case tcc_comparison: 7244 case tcc_comparison:
6953 /* Handle comparison of objects of non scalar mode aggregates 7245 /* Handle comparison of objects of non scalar mode aggregates
6954 with a call to memcmp. It would be nice to only have to do 7246 with a call to memcmp. It would be nice to only have to do
6955 this for variable-sized objects, but then we'd have to allow 7247 this for variable-sized objects, but then we'd have to allow
6956 the same nest of reference nodes we allow for MODIFY_EXPR and 7248 the same nest of reference nodes we allow for MODIFY_EXPR and
6957 that's too complex. 7249 that's too complex.
6958 7250
6959 Compare scalar mode aggregates as scalar mode values. Using 7251 Compare scalar mode aggregates as scalar mode values. Using
6960 memcmp for them would be very inefficient at best, and is 7252 memcmp for them would be very inefficient at best, and is
6961 plain wrong if bitfields are involved. */ 7253 plain wrong if bitfields are involved. */
6962 { 7254 {
6963 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1)); 7255 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
6964 7256
6965 if (!AGGREGATE_TYPE_P (type)) 7257 if (!AGGREGATE_TYPE_P (type))
6966 goto expr_2; 7258 goto expr_2;
6967 else if (TYPE_MODE (type) != BLKmode) 7259 else if (TYPE_MODE (type) != BLKmode)
6968 ret = gimplify_scalar_mode_aggregate_compare (expr_p); 7260 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
6969 else 7261 else
6970 ret = gimplify_variable_sized_compare (expr_p); 7262 ret = gimplify_variable_sized_compare (expr_p);
6971 7263
6972 break; 7264 break;
6973 } 7265 }
6974 7266
6975 /* If *EXPR_P does not need to be special-cased, handle it 7267 /* If *EXPR_P does not need to be special-cased, handle it
6976 according to its class. */ 7268 according to its class. */
6977 case tcc_unary: 7269 case tcc_unary:
6978 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 7270 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6979 post_p, is_gimple_val, fb_rvalue); 7271 post_p, is_gimple_val, fb_rvalue);
6980 break; 7272 break;
6981 7273
6982 case tcc_binary: 7274 case tcc_binary:
6983 expr_2: 7275 expr_2:
6984 { 7276 {
6985 enum gimplify_status r0, r1; 7277 enum gimplify_status r0, r1;
6986 7278
6987 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 7279 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6988 post_p, is_gimple_val, fb_rvalue); 7280 post_p, is_gimple_val, fb_rvalue);
6989 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 7281 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6990 post_p, is_gimple_val, fb_rvalue); 7282 post_p, is_gimple_val, fb_rvalue);
6991 7283
6992 ret = MIN (r0, r1); 7284 ret = MIN (r0, r1);
6993 break; 7285 break;
6994 } 7286 }
6995 7287
6996 case tcc_declaration: 7288 expr_3:
6997 case tcc_constant: 7289 {
6998 ret = GS_ALL_DONE; 7290 enum gimplify_status r0, r1, r2;
6999 goto dont_recalculate; 7291
7000 7292 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7001 default: 7293 post_p, is_gimple_val, fb_rvalue);
7002 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR 7294 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7003 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR 7295 post_p, is_gimple_val, fb_rvalue);
7004 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR); 7296 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
7005 goto expr_2; 7297 post_p, is_gimple_val, fb_rvalue);
7006 } 7298
7007 7299 ret = MIN (MIN (r0, r1), r2);
7008 recalculate_side_effects (*expr_p); 7300 break;
7009 7301 }
7010 dont_recalculate: 7302
7011 break; 7303 case tcc_declaration:
7012 } 7304 case tcc_constant:
7013 7305 ret = GS_ALL_DONE;
7014 /* If we replaced *expr_p, gimplify again. */ 7306 goto dont_recalculate;
7015 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr)) 7307
7016 ret = GS_ALL_DONE; 7308 default:
7309 gcc_unreachable ();
7310 }
7311
7312 recalculate_side_effects (*expr_p);
7313
7314 dont_recalculate:
7315 break;
7316 }
7317
7318 gcc_assert (*expr_p || ret != GS_OK);
7017 } 7319 }
7018 while (ret == GS_OK); 7320 while (ret == GS_OK);
7019 7321
7020 /* If we encountered an error_mark somewhere nested inside, either 7322 /* If we encountered an error_mark somewhere nested inside, either
7021 stub out the statement or propagate the error back out. */ 7323 stub out the statement or propagate the error back out. */
7174 /* An lvalue will do. Take the address of the expression, store it 7476 /* An lvalue will do. Take the address of the expression, store it
7175 in a temporary, and replace the expression with an INDIRECT_REF of 7477 in a temporary, and replace the expression with an INDIRECT_REF of
7176 that temporary. */ 7478 that temporary. */
7177 tmp = build_fold_addr_expr_loc (input_location, *expr_p); 7479 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
7178 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue); 7480 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7179 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp); 7481 *expr_p = build_simple_mem_ref (tmp);
7180 } 7482 }
7181 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p)) 7483 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
7182 { 7484 {
7183 /* An rvalue will do. Assign the gimplified expression into a 7485 /* An rvalue will do. Assign the gimplified expression into a
7184 new temporary TMP and replace the original expression with 7486 new temporary TMP and replace the original expression with
7272 7574
7273 case ARRAY_TYPE: 7575 case ARRAY_TYPE:
7274 /* These types may not have declarations, so handle them here. */ 7576 /* These types may not have declarations, so handle them here. */
7275 gimplify_type_sizes (TREE_TYPE (type), list_p); 7577 gimplify_type_sizes (TREE_TYPE (type), list_p);
7276 gimplify_type_sizes (TYPE_DOMAIN (type), list_p); 7578 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
7277 /* When not optimizing, ensure VLA bounds aren't removed. */ 7579 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
7278 if (!optimize 7580 with assigned stack slots, for -O1+ -g they should be tracked
7279 && TYPE_DOMAIN (type) 7581 by VTA. */
7280 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type))) 7582 if (!(TYPE_NAME (type)
7281 { 7583 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
7282 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type)); 7584 && DECL_IGNORED_P (TYPE_NAME (type)))
7283 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t)) 7585 && TYPE_DOMAIN (type)
7284 DECL_IGNORED_P (t) = 0; 7586 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
7285 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); 7587 {
7286 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t)) 7588 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7287 DECL_IGNORED_P (t) = 0; 7589 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7288 } 7590 DECL_IGNORED_P (t) = 0;
7591 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7592 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7593 DECL_IGNORED_P (t) = 0;
7594 }
7289 break; 7595 break;
7290 7596
7291 case RECORD_TYPE: 7597 case RECORD_TYPE:
7292 case UNION_TYPE: 7598 case UNION_TYPE:
7293 case QUAL_UNION_TYPE: 7599 case QUAL_UNION_TYPE:
7294 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) 7600 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7295 if (TREE_CODE (field) == FIELD_DECL) 7601 if (TREE_CODE (field) == FIELD_DECL)
7296 { 7602 {
7297 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p); 7603 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
7298 gimplify_one_sizepos (&DECL_SIZE (field), list_p); 7604 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
7299 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p); 7605 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
7300 gimplify_type_sizes (TREE_TYPE (field), list_p); 7606 gimplify_type_sizes (TREE_TYPE (field), list_p);
7301 } 7607 }
7302 break; 7608 break;
7303 7609
7304 case POINTER_TYPE: 7610 case POINTER_TYPE:
7305 case REFERENCE_TYPE: 7611 case REFERENCE_TYPE:
7306 /* We used to recurse on the pointed-to type here, which turned out to 7612 /* We used to recurse on the pointed-to type here, which turned out to
7371 gimple stmt; 7677 gimple stmt;
7372 7678
7373 *expr_p = create_tmp_var (type, NULL); 7679 *expr_p = create_tmp_var (type, NULL);
7374 tmp = build1 (NOP_EXPR, type, expr); 7680 tmp = build1 (NOP_EXPR, type, expr);
7375 stmt = gimplify_assign (*expr_p, tmp, stmt_p); 7681 stmt = gimplify_assign (*expr_p, tmp, stmt_p);
7376 if (EXPR_HAS_LOCATION (expr)) 7682 gimple_set_location (stmt, EXPR_LOC_OR_HERE (expr));
7377 gimple_set_location (stmt, EXPR_LOCATION (expr));
7378 else
7379 gimple_set_location (stmt, input_location);
7380 } 7683 }
7381 } 7684 }
7382 7685
7383 7686
7384 /* Gimplify the body of statements pointed to by BODY_P and return a 7687 /* Gimplify the body of statements pointed to by BODY_P and return a
7439 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL); 7742 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
7440 7743
7441 *body_p = NULL_TREE; 7744 *body_p = NULL_TREE;
7442 7745
7443 /* If we had callee-copies statements, insert them at the beginning 7746 /* If we had callee-copies statements, insert them at the beginning
7444 of the function. */ 7747 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
7445 if (!gimple_seq_empty_p (parm_stmts)) 7748 if (!gimple_seq_empty_p (parm_stmts))
7446 { 7749 {
7750 tree parm;
7751
7447 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind)); 7752 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
7448 gimple_bind_set_body (outer_bind, parm_stmts); 7753 gimple_bind_set_body (outer_bind, parm_stmts);
7754
7755 for (parm = DECL_ARGUMENTS (current_function_decl);
7756 parm; parm = DECL_CHAIN (parm))
7757 if (DECL_HAS_VALUE_EXPR_P (parm))
7758 {
7759 DECL_HAS_VALUE_EXPR_P (parm) = 0;
7760 DECL_IGNORED_P (parm) = 0;
7761 }
7449 } 7762 }
7450 7763
7451 if (nonlocal_vlas) 7764 if (nonlocal_vlas)
7452 { 7765 {
7453 pointer_set_destroy (nonlocal_vlas); 7766 pointer_set_destroy (nonlocal_vlas);
7456 7769
7457 pop_gimplify_context (outer_bind); 7770 pop_gimplify_context (outer_bind);
7458 gcc_assert (gimplify_ctxp == NULL); 7771 gcc_assert (gimplify_ctxp == NULL);
7459 7772
7460 #ifdef ENABLE_TYPES_CHECKING 7773 #ifdef ENABLE_TYPES_CHECKING
7461 if (!errorcount && !sorrycount) 7774 if (!seen_error ())
7462 verify_types_in_gimple_seq (gimple_bind_body (outer_bind)); 7775 verify_types_in_gimple_seq (gimple_bind_body (outer_bind));
7463 #endif 7776 #endif
7464 7777
7465 timevar_pop (TV_TREE_GIMPLIFY); 7778 timevar_pop (TV_TREE_GIMPLIFY);
7466 input_location = saved_location; 7779 input_location = saved_location;
7467 7780
7468 return outer_bind; 7781 return outer_bind;
7782 }
7783
7784 typedef char *char_p; /* For DEF_VEC_P. */
7785 DEF_VEC_P(char_p);
7786 DEF_VEC_ALLOC_P(char_p,heap);
7787
7788 /* Return whether we should exclude FNDECL from instrumentation. */
7789
7790 static bool
7791 flag_instrument_functions_exclude_p (tree fndecl)
7792 {
7793 VEC(char_p,heap) *vec;
7794
7795 vec = (VEC(char_p,heap) *) flag_instrument_functions_exclude_functions;
7796 if (VEC_length (char_p, vec) > 0)
7797 {
7798 const char *name;
7799 int i;
7800 char *s;
7801
7802 name = lang_hooks.decl_printable_name (fndecl, 0);
7803 FOR_EACH_VEC_ELT (char_p, vec, i, s)
7804 if (strstr (name, s) != NULL)
7805 return true;
7806 }
7807
7808 vec = (VEC(char_p,heap) *) flag_instrument_functions_exclude_files;
7809 if (VEC_length (char_p, vec) > 0)
7810 {
7811 const char *name;
7812 int i;
7813 char *s;
7814
7815 name = DECL_SOURCE_FILE (fndecl);
7816 FOR_EACH_VEC_ELT (char_p, vec, i, s)
7817 if (strstr (name, s) != NULL)
7818 return true;
7819 }
7820
7821 return false;
7469 } 7822 }
7470 7823
7471 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL 7824 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
7472 node for the function we want to gimplify. 7825 node for the function we want to gimplify.
7473 7826
7488 if (DECL_STRUCT_FUNCTION (fndecl)) 7841 if (DECL_STRUCT_FUNCTION (fndecl))
7489 push_cfun (DECL_STRUCT_FUNCTION (fndecl)); 7842 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
7490 else 7843 else
7491 push_struct_function (fndecl); 7844 push_struct_function (fndecl);
7492 7845
7493 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm)) 7846 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
7494 { 7847 {
7495 /* Preliminarily mark non-addressed complex variables as eligible 7848 /* Preliminarily mark non-addressed complex variables as eligible
7496 for promotion to gimple registers. We'll transform their uses 7849 for promotion to gimple registers. We'll transform their uses
7497 as we find them. */ 7850 as we find them. */
7498 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE 7851 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
7526 { 7879 {
7527 tree x; 7880 tree x;
7528 gimple new_bind; 7881 gimple new_bind;
7529 gimple tf; 7882 gimple tf;
7530 gimple_seq cleanup = NULL, body = NULL; 7883 gimple_seq cleanup = NULL, body = NULL;
7531 7884 tree tmp_var;
7885 gimple call;
7886
7887 x = implicit_built_in_decls[BUILT_IN_RETURN_ADDRESS];
7888 call = gimple_build_call (x, 1, integer_zero_node);
7889 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
7890 gimple_call_set_lhs (call, tmp_var);
7891 gimplify_seq_add_stmt (&cleanup, call);
7532 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT]; 7892 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
7533 gimplify_seq_add_stmt (&cleanup, gimple_build_call (x, 0)); 7893 call = gimple_build_call (x, 2,
7894 build_fold_addr_expr (current_function_decl),
7895 tmp_var);
7896 gimplify_seq_add_stmt (&cleanup, call);
7534 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY); 7897 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
7535 7898
7899 x = implicit_built_in_decls[BUILT_IN_RETURN_ADDRESS];
7900 call = gimple_build_call (x, 1, integer_zero_node);
7901 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
7902 gimple_call_set_lhs (call, tmp_var);
7903 gimplify_seq_add_stmt (&body, call);
7536 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER]; 7904 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
7537 gimplify_seq_add_stmt (&body, gimple_build_call (x, 0)); 7905 call = gimple_build_call (x, 2,
7906 build_fold_addr_expr (current_function_decl),
7907 tmp_var);
7908 gimplify_seq_add_stmt (&body, call);
7538 gimplify_seq_add_stmt (&body, tf); 7909 gimplify_seq_add_stmt (&body, tf);
7539 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind)); 7910 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
7540 /* Clear the block for BIND, since it is no longer directly inside 7911 /* Clear the block for BIND, since it is no longer directly inside
7541 the function, but within a try block. */ 7912 the function, but within a try block. */
7542 gimple_bind_set_block (bind, NULL); 7913 gimple_bind_set_block (bind, NULL);
7660 8031
7661 lhs = gimple_get_lhs (stmt); 8032 lhs = gimple_get_lhs (stmt);
7662 /* If the LHS changed it in a way that requires a simple RHS, 8033 /* If the LHS changed it in a way that requires a simple RHS,
7663 create temporary. */ 8034 create temporary. */
7664 if (lhs && !is_gimple_reg (lhs)) 8035 if (lhs && !is_gimple_reg (lhs))
7665 { 8036 {
7666 bool need_temp = false; 8037 bool need_temp = false;
7667 8038
7668 if (is_gimple_assign (stmt) 8039
7669 && num_ops == 2 8040 if (is_gimple_assign (stmt)
7670 && get_gimple_rhs_class (gimple_expr_code (stmt)) 8041 && num_ops == 2
7671 == GIMPLE_SINGLE_RHS) 8042 && get_gimple_rhs_class (gimple_expr_code (stmt))
7672 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL, 8043 == GIMPLE_SINGLE_RHS)
7673 rhs_predicate_for (gimple_assign_lhs (stmt)), 8044 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
7674 fb_rvalue); 8045 rhs_predicate_for (gimple_assign_lhs (stmt)),
7675 else if (is_gimple_reg (lhs)) 8046 fb_rvalue);
7676 { 8047 else if (is_gimple_reg (lhs))
7677 if (is_gimple_reg_type (TREE_TYPE (lhs))) 8048 {
7678 { 8049 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7679 if (is_gimple_call (stmt)) 8050 {
7680 { 8051 if (is_gimple_call (stmt))
7681 i = gimple_call_flags (stmt); 8052 {
7682 if ((i & ECF_LOOPING_CONST_OR_PURE) 8053 i = gimple_call_flags (stmt);
7683 || !(i & (ECF_CONST | ECF_PURE))) 8054 if ((i & ECF_LOOPING_CONST_OR_PURE)
7684 need_temp = true; 8055 || !(i & (ECF_CONST | ECF_PURE)))
7685 } 8056 need_temp = true;
7686 if (stmt_can_throw_internal (stmt)) 8057 }
7687 need_temp = true; 8058 if (stmt_can_throw_internal (stmt))
7688 } 8059 need_temp = true;
7689 } 8060 }
7690 else 8061 }
7691 { 8062 else
7692 if (is_gimple_reg_type (TREE_TYPE (lhs))) 8063 {
7693 need_temp = true; 8064 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7694 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode) 8065 need_temp = true;
7695 { 8066 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
7696 if (is_gimple_call (stmt)) 8067 {
7697 { 8068 if (is_gimple_call (stmt))
7698 tree fndecl = gimple_call_fndecl (stmt); 8069 {
7699 8070 tree fndecl = gimple_call_fndecl (stmt);
7700 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl) 8071
7701 && !(fndecl && DECL_RESULT (fndecl) 8072 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
7702 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))) 8073 && !(fndecl && DECL_RESULT (fndecl)
7703 need_temp = true; 8074 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
7704 } 8075 need_temp = true;
7705 else 8076 }
7706 need_temp = true; 8077 else
7707 } 8078 need_temp = true;
7708 } 8079 }
7709 if (need_temp) 8080 }
7710 { 8081 if (need_temp)
7711 tree temp = create_tmp_var (TREE_TYPE (lhs), NULL); 8082 {
7712 8083 tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
7713 if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE 8084
7714 || TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE) 8085 if (TREE_CODE (orig_lhs) == SSA_NAME)
7715 DECL_GIMPLE_REG_P (temp) = 1; 8086 orig_lhs = SSA_NAME_VAR (orig_lhs);
7716 if (TREE_CODE (orig_lhs) == SSA_NAME) 8087
7717 orig_lhs = SSA_NAME_VAR (orig_lhs); 8088 if (gimple_in_ssa_p (cfun))
7718 8089 temp = make_ssa_name (temp, NULL);
7719 if (gimple_in_ssa_p (cfun)) 8090 gimple_set_lhs (stmt, temp);
7720 temp = make_ssa_name (temp, NULL); 8091 post_stmt = gimple_build_assign (lhs, temp);
7721 gimple_set_lhs (stmt, temp); 8092 if (TREE_CODE (lhs) == SSA_NAME)
7722 post_stmt = gimple_build_assign (lhs, temp); 8093 SSA_NAME_DEF_STMT (lhs) = post_stmt;
7723 if (TREE_CODE (lhs) == SSA_NAME) 8094 }
7724 SSA_NAME_DEF_STMT (lhs) = post_stmt; 8095 }
7725 }
7726 }
7727 break; 8096 break;
7728 } 8097 }
7729 8098
7730 if (gimple_referenced_vars (cfun)) 8099 if (gimple_referenced_vars (cfun))
7731 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t)) 8100 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7746 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT); 8115 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
7747 8116
7748 pop_gimplify_context (NULL); 8117 pop_gimplify_context (NULL);
7749 } 8118 }
7750 8119
8120
8121 /* Expands EXPR to list of gimple statements STMTS. GIMPLE_TEST_F specifies
8122 the predicate that will hold for the result. If VAR is not NULL, make the
8123 base variable of the final destination be VAR if suitable. */
8124
8125 tree
8126 force_gimple_operand_1 (tree expr, gimple_seq *stmts,
8127 gimple_predicate gimple_test_f, tree var)
8128 {
8129 tree t;
8130 enum gimplify_status ret;
8131 struct gimplify_ctx gctx;
8132
8133 *stmts = NULL;
8134
8135 /* gimple_test_f might be more strict than is_gimple_val, make
8136 sure we pass both. Just checking gimple_test_f doesn't work
8137 because most gimple predicates do not work recursively. */
8138 if (is_gimple_val (expr)
8139 && (*gimple_test_f) (expr))
8140 return expr;
8141
8142 push_gimplify_context (&gctx);
8143 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
8144 gimplify_ctxp->allow_rhs_cond_expr = true;
8145
8146 if (var)
8147 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
8148
8149 if (TREE_CODE (expr) != MODIFY_EXPR
8150 && TREE_TYPE (expr) == void_type_node)
8151 {
8152 gimplify_and_add (expr, stmts);
8153 expr = NULL_TREE;
8154 }
8155 else
8156 {
8157 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
8158 gcc_assert (ret != GS_ERROR);
8159 }
8160
8161 if (gimple_referenced_vars (cfun))
8162 for (t = gimplify_ctxp->temps; t ; t = DECL_CHAIN (t))
8163 add_referenced_var (t);
8164
8165 pop_gimplify_context (NULL);
8166
8167 return expr;
8168 }
7751 8169
7752 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true, 8170 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
7753 force the result to be either ssa_name or an invariant, otherwise 8171 force the result to be either ssa_name or an invariant, otherwise
7754 just force it to be a rhs expression. If VAR is not NULL, make the 8172 just force it to be a rhs expression. If VAR is not NULL, make the
7755 base variable of the final destination be VAR if suitable. */ 8173 base variable of the final destination be VAR if suitable. */
7756 8174
7757 tree 8175 tree
7758 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var) 8176 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
7759 { 8177 {
7760 tree t; 8178 return force_gimple_operand_1 (expr, stmts,
7761 enum gimplify_status ret; 8179 simple ? is_gimple_val : is_gimple_reg_rhs,
7762 gimple_predicate gimple_test_f; 8180 var);
7763 struct gimplify_ctx gctx; 8181 }
7764 8182
7765 *stmts = NULL; 8183 /* Invokes force_gimple_operand_1 for EXPR with parameters GIMPLE_TEST_F
7766 8184 and VAR. If some statements are produced, emits them at GSI.
7767 if (is_gimple_val (expr)) 8185 If BEFORE is true. the statements are appended before GSI, otherwise
7768 return expr; 8186 they are appended after it. M specifies the way GSI moves after
7769 8187 insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING are the usual values). */
7770 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs; 8188
7771 8189 tree
7772 push_gimplify_context (&gctx); 8190 force_gimple_operand_gsi_1 (gimple_stmt_iterator *gsi, tree expr,
7773 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun); 8191 gimple_predicate gimple_test_f,
7774 gimplify_ctxp->allow_rhs_cond_expr = true; 8192 tree var, bool before,
7775 8193 enum gsi_iterator_update m)
7776 if (var) 8194 {
7777 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr); 8195 gimple_seq stmts;
7778 8196
7779 if (TREE_CODE (expr) != MODIFY_EXPR 8197 expr = force_gimple_operand_1 (expr, &stmts, gimple_test_f, var);
7780 && TREE_TYPE (expr) == void_type_node) 8198
7781 { 8199 if (!gimple_seq_empty_p (stmts))
7782 gimplify_and_add (expr, stmts); 8200 {
7783 expr = NULL_TREE; 8201 if (gimple_in_ssa_p (cfun))
7784 } 8202 {
7785 else 8203 gimple_stmt_iterator i;
7786 { 8204
7787 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue); 8205 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
7788 gcc_assert (ret != GS_ERROR); 8206 mark_symbols_for_renaming (gsi_stmt (i));
7789 } 8207 }
7790 8208
7791 if (gimple_referenced_vars (cfun)) 8209 if (before)
7792 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t)) 8210 gsi_insert_seq_before (gsi, stmts, m);
7793 add_referenced_var (t); 8211 else
7794 8212 gsi_insert_seq_after (gsi, stmts, m);
7795 pop_gimplify_context (NULL); 8213 }
7796 8214
7797 return expr; 8215 return expr;
7798 } 8216 }
7799 8217
7800 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If 8218 /* Invokes force_gimple_operand_1 for EXPR with parameter VAR.
7801 some statements are produced, emits them at GSI. If BEFORE is true. 8219 If SIMPLE is true, force the result to be either ssa_name or an invariant,
7802 the statements are appended before GSI, otherwise they are appended after 8220 otherwise just force it to be a rhs expression. If some statements are
7803 it. M specifies the way GSI moves after insertion (GSI_SAME_STMT or 8221 produced, emits them at GSI. If BEFORE is true, the statements are
7804 GSI_CONTINUE_LINKING are the usual values). */ 8222 appended before GSI, otherwise they are appended after it. M specifies
8223 the way GSI moves after insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING
8224 are the usual values). */
7805 8225
7806 tree 8226 tree
7807 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr, 8227 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
7808 bool simple_p, tree var, bool before, 8228 bool simple_p, tree var, bool before,
7809 enum gsi_iterator_update m) 8229 enum gsi_iterator_update m)
7810 { 8230 {
7811 gimple_seq stmts; 8231 return force_gimple_operand_gsi_1 (gsi, expr,
7812 8232 simple_p
7813 expr = force_gimple_operand (expr, &stmts, simple_p, var); 8233 ? is_gimple_val : is_gimple_reg_rhs,
7814 8234 var, before, m);
7815 if (!gimple_seq_empty_p (stmts)) 8235 }
7816 { 8236
7817 if (gimple_in_ssa_p (cfun))
7818 {
7819 gimple_stmt_iterator i;
7820
7821 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
7822 mark_symbols_for_renaming (gsi_stmt (i));
7823 }
7824
7825 if (before)
7826 gsi_insert_seq_before (gsi, stmts, m);
7827 else
7828 gsi_insert_seq_after (gsi, stmts, m);
7829 }
7830
7831 return expr;
7832 }
7833 8237
7834 #include "gt-gimplify.h" 8238 #include "gt-gimplify.h"