Mercurial > hg > CbC > CbC_gcc
comparison gcc/gimplify.c @ 63:b7f97abdc517 gcc-4.6-20100522
update gcc from gcc-4.5.0 to gcc-4.6
author | ryoma <e075725@ie.u-ryukyu.ac.jp> |
---|---|
date | Mon, 24 May 2010 12:47:05 +0900 |
parents | 77e2b8dfacca |
children | f6334be47118 |
comparison
equal
deleted
inserted
replaced
56:3c8a44c06a95 | 63:b7f97abdc517 |
---|---|
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees | 1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees |
2 tree representation into the GIMPLE form. | 2 tree representation into the GIMPLE form. |
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 | 3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 |
4 Free Software Foundation, Inc. | 4 Free Software Foundation, Inc. |
5 Major work done by Sebastian Pop <s.pop@laposte.net>, | 5 Major work done by Sebastian Pop <s.pop@laposte.net>, |
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>. | 6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>. |
7 | 7 |
8 This file is part of GCC. | 8 This file is part of GCC. |
25 #include "system.h" | 25 #include "system.h" |
26 #include "coretypes.h" | 26 #include "coretypes.h" |
27 #include "tm.h" | 27 #include "tm.h" |
28 #include "tree.h" | 28 #include "tree.h" |
29 #include "rtl.h" | 29 #include "rtl.h" |
30 #include "varray.h" | |
31 #include "gimple.h" | 30 #include "gimple.h" |
32 #include "tree-iterator.h" | 31 #include "tree-iterator.h" |
33 #include "tree-inline.h" | 32 #include "tree-inline.h" |
34 #include "diagnostic.h" | 33 #include "diagnostic.h" |
34 #include "tree-pretty-print.h" | |
35 #include "langhooks.h" | 35 #include "langhooks.h" |
36 #include "langhooks-def.h" | 36 #include "langhooks-def.h" |
37 #include "tree-flow.h" | 37 #include "tree-flow.h" |
38 #include "cgraph.h" | 38 #include "cgraph.h" |
39 #include "timevar.h" | 39 #include "timevar.h" |
40 #include "except.h" | 40 #include "except.h" |
41 #include "hashtab.h" | 41 #include "hashtab.h" |
42 #include "flags.h" | 42 #include "flags.h" |
43 #include "real.h" | |
44 #include "function.h" | 43 #include "function.h" |
45 #include "output.h" | 44 #include "output.h" |
46 #include "expr.h" | 45 #include "expr.h" |
47 #include "ggc.h" | 46 #include "ggc.h" |
48 #include "toplev.h" | 47 #include "toplev.h" |
73 | 72 |
74 | 73 |
75 enum omp_region_type | 74 enum omp_region_type |
76 { | 75 { |
77 ORT_WORKSHARE = 0, | 76 ORT_WORKSHARE = 0, |
78 ORT_TASK = 1, | |
79 ORT_PARALLEL = 2, | 77 ORT_PARALLEL = 2, |
80 ORT_COMBINED_PARALLEL = 3 | 78 ORT_COMBINED_PARALLEL = 3, |
79 ORT_TASK = 4, | |
80 ORT_UNTIED_TASK = 5 | |
81 }; | 81 }; |
82 | 82 |
83 struct gimplify_omp_ctx | 83 struct gimplify_omp_ctx |
84 { | 84 { |
85 struct gimplify_omp_ctx *outer_context; | 85 struct gimplify_omp_ctx *outer_context; |
156 *SEQ_P is NULL, a new sequence is allocated. This function is | 156 *SEQ_P is NULL, a new sequence is allocated. This function is |
157 similar to gimple_seq_add_stmt, but does not scan the operands. | 157 similar to gimple_seq_add_stmt, but does not scan the operands. |
158 During gimplification, we need to manipulate statement sequences | 158 During gimplification, we need to manipulate statement sequences |
159 before the def/use vectors have been constructed. */ | 159 before the def/use vectors have been constructed. */ |
160 | 160 |
161 static void | 161 void |
162 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs) | 162 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs) |
163 { | 163 { |
164 gimple_stmt_iterator si; | 164 gimple_stmt_iterator si; |
165 | 165 |
166 if (gs == NULL) | 166 if (gs == NULL) |
317 c->outer_context = gimplify_omp_ctxp; | 317 c->outer_context = gimplify_omp_ctxp; |
318 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); | 318 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); |
319 c->privatized_types = pointer_set_create (); | 319 c->privatized_types = pointer_set_create (); |
320 c->location = input_location; | 320 c->location = input_location; |
321 c->region_type = region_type; | 321 c->region_type = region_type; |
322 if (region_type != ORT_TASK) | 322 if ((region_type & ORT_TASK) == 0) |
323 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED; | 323 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED; |
324 else | 324 else |
325 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED; | 325 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED; |
326 | 326 |
327 return c; | 327 return c; |
337 XDELETE (c); | 337 XDELETE (c); |
338 } | 338 } |
339 | 339 |
340 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int); | 340 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int); |
341 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool); | 341 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool); |
342 | |
343 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */ | |
344 | |
345 static void | |
346 append_to_statement_list_1 (tree t, tree *list_p) | |
347 { | |
348 tree list = *list_p; | |
349 tree_stmt_iterator i; | |
350 | |
351 if (!list) | |
352 { | |
353 if (t && TREE_CODE (t) == STATEMENT_LIST) | |
354 { | |
355 *list_p = t; | |
356 return; | |
357 } | |
358 *list_p = list = alloc_stmt_list (); | |
359 } | |
360 | |
361 i = tsi_last (list); | |
362 tsi_link_after (&i, t, TSI_CONTINUE_LINKING); | |
363 } | |
364 | |
365 /* Add T to the end of the list container pointed to by LIST_P. | |
366 If T is an expression with no effects, it is ignored. */ | |
367 | |
368 void | |
369 append_to_statement_list (tree t, tree *list_p) | |
370 { | |
371 if (t && TREE_SIDE_EFFECTS (t)) | |
372 append_to_statement_list_1 (t, list_p); | |
373 } | |
374 | |
375 /* Similar, but the statement is always added, regardless of side effects. */ | |
376 | |
377 void | |
378 append_to_statement_list_force (tree t, tree *list_p) | |
379 { | |
380 if (t != NULL_TREE) | |
381 append_to_statement_list_1 (t, list_p); | |
382 } | |
383 | 342 |
384 /* Both gimplify the statement T and append it to *SEQ_P. This function | 343 /* Both gimplify the statement T and append it to *SEQ_P. This function |
385 behaves exactly as gimplify_stmt, but you don't have to pass T as a | 344 behaves exactly as gimplify_stmt, but you don't have to pass T as a |
386 reference. */ | 345 reference. */ |
387 | 346 |
504 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type)); | 463 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type)); |
505 | 464 |
506 tmp_var = create_tmp_var_raw (type, prefix); | 465 tmp_var = create_tmp_var_raw (type, prefix); |
507 gimple_add_tmp_var (tmp_var); | 466 gimple_add_tmp_var (tmp_var); |
508 return tmp_var; | 467 return tmp_var; |
468 } | |
469 | |
470 /* Create a new temporary variable declaration of type TYPE by calling | |
471 create_tmp_var and if TYPE is a vector or a complex number, mark the new | |
472 temporary as gimple register. */ | |
473 | |
474 tree | |
475 create_tmp_reg (tree type, const char *prefix) | |
476 { | |
477 tree tmp; | |
478 | |
479 tmp = create_tmp_var (type, prefix); | |
480 if (TREE_CODE (type) == COMPLEX_TYPE | |
481 || TREE_CODE (type) == VECTOR_TYPE) | |
482 DECL_GIMPLE_REG_P (tmp) = 1; | |
483 | |
484 return tmp; | |
509 } | 485 } |
510 | 486 |
511 /* Create a temporary with a name derived from VAL. Subroutine of | 487 /* Create a temporary with a name derived from VAL. Subroutine of |
512 lookup_tmp_var; nobody else should call this function. */ | 488 lookup_tmp_var; nobody else should call this function. */ |
513 | 489 |
842 { | 818 { |
843 gimple gs = gsi_stmt (i); | 819 gimple gs = gsi_stmt (i); |
844 annotate_one_with_location (gs, location); | 820 annotate_one_with_location (gs, location); |
845 } | 821 } |
846 } | 822 } |
847 | 823 |
848 | 824 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree |
849 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes. | 825 nodes that are referenced more than once in GENERIC functions. This is |
826 necessary because gimplification (translation into GIMPLE) is performed | |
827 by modifying tree nodes in-place, so gimplication of a shared node in a | |
828 first context could generate an invalid GIMPLE form in a second context. | |
829 | |
830 This is achieved with a simple mark/copy/unmark algorithm that walks the | |
831 GENERIC representation top-down, marks nodes with TREE_VISITED the first | |
832 time it encounters them, duplicates them if they already have TREE_VISITED | |
833 set, and finally removes the TREE_VISITED marks it has set. | |
834 | |
835 The algorithm works only at the function level, i.e. it generates a GENERIC | |
836 representation of a function with no nodes shared within the function when | |
837 passed a GENERIC function (except for nodes that are allowed to be shared). | |
838 | |
839 At the global level, it is also necessary to unshare tree nodes that are | |
840 referenced in more than one function, for the same aforementioned reason. | |
841 This requires some cooperation from the front-end. There are 2 strategies: | |
842 | |
843 1. Manual unsharing. The front-end needs to call unshare_expr on every | |
844 expression that might end up being shared across functions. | |
845 | |
846 2. Deep unsharing. This is an extension of regular unsharing. Instead | |
847 of calling unshare_expr on expressions that might be shared across | |
848 functions, the front-end pre-marks them with TREE_VISITED. This will | |
849 ensure that they are unshared on the first reference within functions | |
850 when the regular unsharing algorithm runs. The counterpart is that | |
851 this algorithm must look deeper than for manual unsharing, which is | |
852 specified by LANG_HOOKS_DEEP_UNSHARING. | |
853 | |
854 If there are only few specific cases of node sharing across functions, it is | |
855 probably easier for a front-end to unshare the expressions manually. On the | |
856 contrary, if the expressions generated at the global level are as widespread | |
857 as expressions generated within functions, deep unsharing is very likely the | |
858 way to go. */ | |
859 | |
860 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes. | |
850 These nodes model computations that should only be done once. If we | 861 These nodes model computations that should only be done once. If we |
851 were to unshare something like SAVE_EXPR(i++), the gimplification | 862 were to unshare something like SAVE_EXPR(i++), the gimplification |
852 process would create wrong code. */ | 863 process would create wrong code. */ |
853 | 864 |
854 static tree | 865 static tree |
855 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data) | 866 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data) |
856 { | 867 { |
857 enum tree_code code = TREE_CODE (*tp); | 868 tree t = *tp; |
858 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */ | 869 enum tree_code code = TREE_CODE (t); |
859 if (TREE_CODE_CLASS (code) == tcc_type | 870 |
860 || TREE_CODE_CLASS (code) == tcc_declaration | 871 /* Do not copy SAVE_EXPR or TARGET_EXPR nodes themselves, but copy |
861 || TREE_CODE_CLASS (code) == tcc_constant | 872 their subtrees if we can make sure to do it only once. */ |
862 || code == SAVE_EXPR || code == TARGET_EXPR | 873 if (code == SAVE_EXPR || code == TARGET_EXPR) |
863 /* We can't do anything sensible with a BLOCK used as an expression, | 874 { |
864 but we also can't just die when we see it because of non-expression | 875 if (data && !pointer_set_insert ((struct pointer_set_t *)data, t)) |
865 uses. So just avert our eyes and cross our fingers. Silly Java. */ | 876 ; |
866 || code == BLOCK) | 877 else |
878 *walk_subtrees = 0; | |
879 } | |
880 | |
881 /* Stop at types, decls, constants like copy_tree_r. */ | |
882 else if (TREE_CODE_CLASS (code) == tcc_type | |
883 || TREE_CODE_CLASS (code) == tcc_declaration | |
884 || TREE_CODE_CLASS (code) == tcc_constant | |
885 /* We can't do anything sensible with a BLOCK used as an | |
886 expression, but we also can't just die when we see it | |
887 because of non-expression uses. So we avert our eyes | |
888 and cross our fingers. Silly Java. */ | |
889 || code == BLOCK) | |
867 *walk_subtrees = 0; | 890 *walk_subtrees = 0; |
891 | |
892 /* Cope with the statement expression extension. */ | |
893 else if (code == STATEMENT_LIST) | |
894 ; | |
895 | |
896 /* Leave the bulk of the work to copy_tree_r itself. */ | |
868 else | 897 else |
869 { | 898 { |
870 gcc_assert (code != BIND_EXPR); | 899 gcc_assert (code != BIND_EXPR); |
871 copy_tree_r (tp, walk_subtrees, data); | 900 copy_tree_r (tp, walk_subtrees, NULL); |
872 } | 901 } |
873 | 902 |
874 return NULL_TREE; | 903 return NULL_TREE; |
875 } | 904 } |
876 | 905 |
877 /* Callback for walk_tree to unshare most of the shared trees rooted at | 906 /* Callback for walk_tree to unshare most of the shared trees rooted at |
878 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1), | 907 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1), |
879 then *TP is deep copied by calling copy_tree_r. | 908 then *TP is deep copied by calling mostly_copy_tree_r. */ |
880 | |
881 This unshares the same trees as copy_tree_r with the exception of | |
882 SAVE_EXPR nodes. These nodes model computations that should only be | |
883 done once. If we were to unshare something like SAVE_EXPR(i++), the | |
884 gimplification process would create wrong code. */ | |
885 | 909 |
886 static tree | 910 static tree |
887 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, | 911 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data) |
888 void *data ATTRIBUTE_UNUSED) | |
889 { | 912 { |
890 tree t = *tp; | 913 tree t = *tp; |
891 enum tree_code code = TREE_CODE (t); | 914 enum tree_code code = TREE_CODE (t); |
892 | 915 |
893 /* Skip types, decls, and constants. But we do want to look at their | 916 /* Skip types, decls, and constants. But we do want to look at their |
906 | 929 |
907 /* If this node has been visited already, unshare it and don't look | 930 /* If this node has been visited already, unshare it and don't look |
908 any deeper. */ | 931 any deeper. */ |
909 else if (TREE_VISITED (t)) | 932 else if (TREE_VISITED (t)) |
910 { | 933 { |
911 walk_tree (tp, mostly_copy_tree_r, NULL, NULL); | 934 walk_tree (tp, mostly_copy_tree_r, data, NULL); |
912 *walk_subtrees = 0; | 935 *walk_subtrees = 0; |
913 } | 936 } |
914 | 937 |
915 /* Otherwise, mark the tree as visited and keep looking. */ | 938 /* Otherwise, mark the node as visited and keep looking. */ |
916 else | 939 else |
917 TREE_VISITED (t) = 1; | 940 TREE_VISITED (t) = 1; |
918 | 941 |
919 return NULL_TREE; | 942 return NULL_TREE; |
920 } | 943 } |
921 | 944 |
922 static tree | 945 /* Unshare most of the shared trees rooted at *TP. */ |
923 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, | 946 |
924 void *data ATTRIBUTE_UNUSED) | 947 static inline void |
925 { | 948 copy_if_shared (tree *tp) |
926 if (TREE_VISITED (*tp)) | 949 { |
927 TREE_VISITED (*tp) = 0; | 950 /* If the language requires deep unsharing, we need a pointer set to make |
928 else | 951 sure we don't repeatedly unshare subtrees of unshareable nodes. */ |
929 *walk_subtrees = 0; | 952 struct pointer_set_t *visited |
930 | 953 = lang_hooks.deep_unsharing ? pointer_set_create () : NULL; |
931 return NULL_TREE; | 954 walk_tree (tp, copy_if_shared_r, visited, NULL); |
955 if (visited) | |
956 pointer_set_destroy (visited); | |
932 } | 957 } |
933 | 958 |
934 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the | 959 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the |
935 bodies of any nested functions if we are unsharing the entire body of | 960 bodies of any nested functions if we are unsharing the entire body of |
936 FNDECL. */ | 961 FNDECL. */ |
938 static void | 963 static void |
939 unshare_body (tree *body_p, tree fndecl) | 964 unshare_body (tree *body_p, tree fndecl) |
940 { | 965 { |
941 struct cgraph_node *cgn = cgraph_node (fndecl); | 966 struct cgraph_node *cgn = cgraph_node (fndecl); |
942 | 967 |
943 walk_tree (body_p, copy_if_shared_r, NULL, NULL); | 968 copy_if_shared (body_p); |
969 | |
944 if (body_p == &DECL_SAVED_TREE (fndecl)) | 970 if (body_p == &DECL_SAVED_TREE (fndecl)) |
945 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) | 971 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) |
946 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl); | 972 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl); |
947 } | 973 } |
948 | 974 |
975 /* Callback for walk_tree to unmark the visited trees rooted at *TP. | |
976 Subtrees are walked until the first unvisited node is encountered. */ | |
977 | |
978 static tree | |
979 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) | |
980 { | |
981 tree t = *tp; | |
982 | |
983 /* If this node has been visited, unmark it and keep looking. */ | |
984 if (TREE_VISITED (t)) | |
985 TREE_VISITED (t) = 0; | |
986 | |
987 /* Otherwise, don't look any deeper. */ | |
988 else | |
989 *walk_subtrees = 0; | |
990 | |
991 return NULL_TREE; | |
992 } | |
993 | |
994 /* Unmark the visited trees rooted at *TP. */ | |
995 | |
996 static inline void | |
997 unmark_visited (tree *tp) | |
998 { | |
999 walk_tree (tp, unmark_visited_r, NULL, NULL); | |
1000 } | |
1001 | |
949 /* Likewise, but mark all trees as not visited. */ | 1002 /* Likewise, but mark all trees as not visited. */ |
950 | 1003 |
951 static void | 1004 static void |
952 unvisit_body (tree *body_p, tree fndecl) | 1005 unvisit_body (tree *body_p, tree fndecl) |
953 { | 1006 { |
954 struct cgraph_node *cgn = cgraph_node (fndecl); | 1007 struct cgraph_node *cgn = cgraph_node (fndecl); |
955 | 1008 |
956 walk_tree (body_p, unmark_visited_r, NULL, NULL); | 1009 unmark_visited (body_p); |
1010 | |
957 if (body_p == &DECL_SAVED_TREE (fndecl)) | 1011 if (body_p == &DECL_SAVED_TREE (fndecl)) |
958 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) | 1012 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) |
959 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl); | 1013 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl); |
960 } | 1014 } |
961 | 1015 |
1210 we don't want to extend the lifetime of the RESULT_DECL, particularly | 1264 we don't want to extend the lifetime of the RESULT_DECL, particularly |
1211 across another call. In addition, for those aggregates for which | 1265 across another call. In addition, for those aggregates for which |
1212 hard_function_value generates a PARALLEL, we'll die during normal | 1266 hard_function_value generates a PARALLEL, we'll die during normal |
1213 expansion of structure assignments; there's special code in expand_return | 1267 expansion of structure assignments; there's special code in expand_return |
1214 to handle this case that does not exist in expand_expr. */ | 1268 to handle this case that does not exist in expand_expr. */ |
1215 if (!result_decl | 1269 if (!result_decl) |
1216 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl))) | 1270 result = NULL_TREE; |
1217 result = result_decl; | 1271 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl))) |
1272 { | |
1273 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST) | |
1274 { | |
1275 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl))) | |
1276 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p); | |
1277 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL | |
1278 should be effectively allocated by the caller, i.e. all calls to | |
1279 this function must be subject to the Return Slot Optimization. */ | |
1280 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p); | |
1281 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p); | |
1282 } | |
1283 result = result_decl; | |
1284 } | |
1218 else if (gimplify_ctxp->return_temp) | 1285 else if (gimplify_ctxp->return_temp) |
1219 result = gimplify_ctxp->return_temp; | 1286 result = gimplify_ctxp->return_temp; |
1220 else | 1287 else |
1221 { | 1288 { |
1222 result = create_tmp_var (TREE_TYPE (result_decl), NULL); | 1289 result = create_tmp_reg (TREE_TYPE (result_decl), NULL); |
1223 if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE | |
1224 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE) | |
1225 DECL_GIMPLE_REG_P (result) = 1; | |
1226 | 1290 |
1227 /* ??? With complex control flow (usually involving abnormal edges), | 1291 /* ??? With complex control flow (usually involving abnormal edges), |
1228 we can wind up warning about an uninitialized value for this. Due | 1292 we can wind up warning about an uninitialized value for this. Due |
1229 to how this variable is constructed and initialized, this is never | 1293 to how this variable is constructed and initialized, this is never |
1230 true. Give up and never warn. */ | 1294 true. Give up and never warn. */ |
1886 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, | 1950 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, |
1887 fallback_t fallback) | 1951 fallback_t fallback) |
1888 { | 1952 { |
1889 tree *p; | 1953 tree *p; |
1890 VEC(tree,heap) *stack; | 1954 VEC(tree,heap) *stack; |
1891 enum gimplify_status ret = GS_OK, tret; | 1955 enum gimplify_status ret = GS_ALL_DONE, tret; |
1892 int i; | 1956 int i; |
1893 location_t loc = EXPR_LOCATION (*expr_p); | 1957 location_t loc = EXPR_LOCATION (*expr_p); |
1958 tree expr = *expr_p; | |
1894 | 1959 |
1895 /* Create a stack of the subexpressions so later we can walk them in | 1960 /* Create a stack of the subexpressions so later we can walk them in |
1896 order from inner to outer. */ | 1961 order from inner to outer. */ |
1897 stack = VEC_alloc (tree, heap, 10); | 1962 stack = VEC_alloc (tree, heap, 10); |
1898 | 1963 |
2042 | 2107 |
2043 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */ | 2108 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */ |
2044 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF) | 2109 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF) |
2045 { | 2110 { |
2046 canonicalize_component_ref (expr_p); | 2111 canonicalize_component_ref (expr_p); |
2047 ret = MIN (ret, GS_OK); | |
2048 } | 2112 } |
2049 | 2113 |
2050 VEC_free (tree, heap, stack); | 2114 VEC_free (tree, heap, stack); |
2115 | |
2116 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE); | |
2051 | 2117 |
2052 return ret; | 2118 return ret; |
2053 } | 2119 } |
2054 | 2120 |
2055 /* Gimplify the self modifying expression pointed to by EXPR_P | 2121 /* Gimplify the self modifying expression pointed to by EXPR_P |
2718 gimple_boolify (tree expr) | 2784 gimple_boolify (tree expr) |
2719 { | 2785 { |
2720 tree type = TREE_TYPE (expr); | 2786 tree type = TREE_TYPE (expr); |
2721 location_t loc = EXPR_LOCATION (expr); | 2787 location_t loc = EXPR_LOCATION (expr); |
2722 | 2788 |
2789 if (TREE_CODE (expr) == NE_EXPR | |
2790 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR | |
2791 && integer_zerop (TREE_OPERAND (expr, 1))) | |
2792 { | |
2793 tree call = TREE_OPERAND (expr, 0); | |
2794 tree fn = get_callee_fndecl (call); | |
2795 | |
2796 /* For __builtin_expect ((long) (x), y) recurse into x as well | |
2797 if x is truth_value_p. */ | |
2798 if (fn | |
2799 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL | |
2800 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT | |
2801 && call_expr_nargs (call) == 2) | |
2802 { | |
2803 tree arg = CALL_EXPR_ARG (call, 0); | |
2804 if (arg) | |
2805 { | |
2806 if (TREE_CODE (arg) == NOP_EXPR | |
2807 && TREE_TYPE (arg) == TREE_TYPE (call)) | |
2808 arg = TREE_OPERAND (arg, 0); | |
2809 if (truth_value_p (TREE_CODE (arg))) | |
2810 { | |
2811 arg = gimple_boolify (arg); | |
2812 CALL_EXPR_ARG (call, 0) | |
2813 = fold_convert_loc (loc, TREE_TYPE (call), arg); | |
2814 } | |
2815 } | |
2816 } | |
2817 } | |
2818 | |
2723 if (TREE_CODE (type) == BOOLEAN_TYPE) | 2819 if (TREE_CODE (type) == BOOLEAN_TYPE) |
2724 return expr; | 2820 return expr; |
2725 | 2821 |
2726 switch (TREE_CODE (expr)) | 2822 switch (TREE_CODE (expr)) |
2727 { | 2823 { |
2821 | 2917 |
2822 static enum gimplify_status | 2918 static enum gimplify_status |
2823 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback) | 2919 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback) |
2824 { | 2920 { |
2825 tree expr = *expr_p; | 2921 tree expr = *expr_p; |
2826 tree tmp, type, arm1, arm2; | 2922 tree type = TREE_TYPE (expr); |
2923 location_t loc = EXPR_LOCATION (expr); | |
2924 tree tmp, arm1, arm2; | |
2827 enum gimplify_status ret; | 2925 enum gimplify_status ret; |
2828 tree label_true, label_false, label_cont; | 2926 tree label_true, label_false, label_cont; |
2829 bool have_then_clause_p, have_else_clause_p; | 2927 bool have_then_clause_p, have_else_clause_p; |
2830 gimple gimple_cond; | 2928 gimple gimple_cond; |
2831 enum tree_code pred_code; | 2929 enum tree_code pred_code; |
2832 gimple_seq seq = NULL; | 2930 gimple_seq seq = NULL; |
2833 location_t loc = EXPR_LOCATION (*expr_p); | |
2834 | |
2835 type = TREE_TYPE (expr); | |
2836 | 2931 |
2837 /* If this COND_EXPR has a value, copy the values into a temporary within | 2932 /* If this COND_EXPR has a value, copy the values into a temporary within |
2838 the arms. */ | 2933 the arms. */ |
2839 if (! VOID_TYPE_P (type)) | 2934 if (!VOID_TYPE_P (type)) |
2840 { | 2935 { |
2936 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2); | |
2841 tree result; | 2937 tree result; |
2842 | 2938 |
2843 /* If an rvalue is ok or we do not require an lvalue, avoid creating | 2939 /* If either an rvalue is ok or we do not require an lvalue, create the |
2844 an addressable temporary. */ | 2940 temporary. But we cannot do that if the type is addressable. */ |
2845 if (((fallback & fb_rvalue) | 2941 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue)) |
2846 || !(fallback & fb_lvalue)) | |
2847 && !TREE_ADDRESSABLE (type)) | 2942 && !TREE_ADDRESSABLE (type)) |
2848 { | 2943 { |
2849 if (gimplify_ctxp->allow_rhs_cond_expr | 2944 if (gimplify_ctxp->allow_rhs_cond_expr |
2850 /* If either branch has side effects or could trap, it can't be | 2945 /* If either branch has side effects or could trap, it can't be |
2851 evaluated unconditionally. */ | 2946 evaluated unconditionally. */ |
2852 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1)) | 2947 && !TREE_SIDE_EFFECTS (then_) |
2853 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 1)) | 2948 && !generic_expr_could_trap_p (then_) |
2854 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 2)) | 2949 && !TREE_SIDE_EFFECTS (else_) |
2855 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 2))) | 2950 && !generic_expr_could_trap_p (else_)) |
2856 return gimplify_pure_cond_expr (expr_p, pre_p); | 2951 return gimplify_pure_cond_expr (expr_p, pre_p); |
2857 | 2952 |
2858 result = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp"); | 2953 tmp = create_tmp_var (type, "iftmp"); |
2859 ret = GS_ALL_DONE; | 2954 result = tmp; |
2860 } | 2955 } |
2956 | |
2957 /* Otherwise, only create and copy references to the values. */ | |
2861 else | 2958 else |
2862 { | 2959 { |
2863 tree type = build_pointer_type (TREE_TYPE (expr)); | 2960 type = build_pointer_type (type); |
2864 | 2961 |
2865 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node) | 2962 if (!VOID_TYPE_P (TREE_TYPE (then_))) |
2866 TREE_OPERAND (expr, 1) = | 2963 then_ = build_fold_addr_expr_loc (loc, then_); |
2867 build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 1)); | 2964 |
2868 | 2965 if (!VOID_TYPE_P (TREE_TYPE (else_))) |
2869 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node) | 2966 else_ = build_fold_addr_expr_loc (loc, else_); |
2870 TREE_OPERAND (expr, 2) = | 2967 |
2871 build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 2)); | 2968 expr |
2969 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_); | |
2872 | 2970 |
2873 tmp = create_tmp_var (type, "iftmp"); | 2971 tmp = create_tmp_var (type, "iftmp"); |
2874 | |
2875 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0), | |
2876 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2)); | |
2877 | |
2878 result = build_fold_indirect_ref_loc (loc, tmp); | 2972 result = build_fold_indirect_ref_loc (loc, tmp); |
2879 } | 2973 } |
2880 | 2974 |
2881 /* Build the then clause, 't1 = a;'. But don't build an assignment | 2975 /* Build the new then clause, `tmp = then_;'. But don't build the |
2882 if this branch is void; in C++ it can be, if it's a throw. */ | 2976 assignment if the value is void; in C++ it can be if it's a throw. */ |
2883 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node) | 2977 if (!VOID_TYPE_P (TREE_TYPE (then_))) |
2884 TREE_OPERAND (expr, 1) | 2978 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_); |
2885 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 1)); | 2979 |
2886 | 2980 /* Similarly, build the new else clause, `tmp = else_;'. */ |
2887 /* Build the else clause, 't1 = b;'. */ | 2981 if (!VOID_TYPE_P (TREE_TYPE (else_))) |
2888 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node) | 2982 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_); |
2889 TREE_OPERAND (expr, 2) | |
2890 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 2)); | |
2891 | 2983 |
2892 TREE_TYPE (expr) = void_type_node; | 2984 TREE_TYPE (expr) = void_type_node; |
2893 recalculate_side_effects (expr); | 2985 recalculate_side_effects (expr); |
2894 | 2986 |
2895 /* Move the COND_EXPR to the prequeue. */ | 2987 /* Move the COND_EXPR to the prequeue. */ |
3716 | 3808 |
3717 if (size > 0 | 3809 if (size > 0 |
3718 && num_nonzero_elements > 1 | 3810 && num_nonzero_elements > 1 |
3719 && !can_move_by_pieces (size, align)) | 3811 && !can_move_by_pieces (size, align)) |
3720 { | 3812 { |
3721 tree new_tree; | |
3722 | |
3723 if (notify_temp_creation) | 3813 if (notify_temp_creation) |
3724 return GS_ERROR; | 3814 return GS_ERROR; |
3725 | 3815 |
3726 new_tree = create_tmp_var_raw (type, "C"); | 3816 walk_tree (&ctor, force_labels_r, NULL, NULL); |
3727 | 3817 ctor = tree_output_constant_def (ctor); |
3728 gimple_add_tmp_var (new_tree); | 3818 if (!useless_type_conversion_p (type, TREE_TYPE (ctor))) |
3729 TREE_STATIC (new_tree) = 1; | 3819 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor); |
3730 TREE_READONLY (new_tree) = 1; | 3820 TREE_OPERAND (*expr_p, 1) = ctor; |
3731 DECL_INITIAL (new_tree) = ctor; | |
3732 if (align > DECL_ALIGN (new_tree)) | |
3733 { | |
3734 DECL_ALIGN (new_tree) = align; | |
3735 DECL_USER_ALIGN (new_tree) = 1; | |
3736 } | |
3737 walk_tree (&DECL_INITIAL (new_tree), force_labels_r, NULL, NULL); | |
3738 | |
3739 TREE_OPERAND (*expr_p, 1) = new_tree; | |
3740 | 3821 |
3741 /* This is no longer an assignment of a CONSTRUCTOR, but | 3822 /* This is no longer an assignment of a CONSTRUCTOR, but |
3742 we still may have processing to do on the LHS. So | 3823 we still may have processing to do on the LHS. So |
3743 pretend we didn't do anything here to let that happen. */ | 3824 pretend we didn't do anything here to let that happen. */ |
3744 return GS_UNHANDLED; | 3825 return GS_UNHANDLED; |
3745 } | 3826 } |
3746 } | 3827 } |
3747 | 3828 |
3829 /* If the target is volatile and we have non-zero elements | |
3830 initialize the target from a temporary. */ | |
3831 if (TREE_THIS_VOLATILE (object) | |
3832 && !TREE_ADDRESSABLE (type) | |
3833 && num_nonzero_elements > 0) | |
3834 { | |
3835 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL); | |
3836 TREE_OPERAND (*expr_p, 0) = temp; | |
3837 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p), | |
3838 *expr_p, | |
3839 build2 (MODIFY_EXPR, void_type_node, | |
3840 object, temp)); | |
3841 return GS_OK; | |
3842 } | |
3843 | |
3748 if (notify_temp_creation) | 3844 if (notify_temp_creation) |
3749 return GS_OK; | 3845 return GS_OK; |
3750 | 3846 |
3751 /* If there are nonzero elements, pre-evaluate to capture elements | 3847 /* If there are nonzero elements and if needed, pre-evaluate to capture |
3752 overlapping with the lhs into temporaries. We must do this before | 3848 elements overlapping with the lhs into temporaries. We must do this |
3753 clearing to fetch the values before they are zeroed-out. */ | 3849 before clearing to fetch the values before they are zeroed-out. */ |
3754 if (num_nonzero_elements > 0) | 3850 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR) |
3755 { | 3851 { |
3756 preeval_data.lhs_base_decl = get_base_address (object); | 3852 preeval_data.lhs_base_decl = get_base_address (object); |
3757 if (!DECL_P (preeval_data.lhs_base_decl)) | 3853 if (!DECL_P (preeval_data.lhs_base_decl)) |
3758 preeval_data.lhs_base_decl = NULL; | 3854 preeval_data.lhs_base_decl = NULL; |
3759 preeval_data.lhs_alias_set = get_alias_set (object); | 3855 preeval_data.lhs_alias_set = get_alias_set (object); |
3918 { | 4014 { |
3919 tree type = TREE_TYPE (TREE_TYPE (t)); | 4015 tree type = TREE_TYPE (TREE_TYPE (t)); |
3920 tree sub = t; | 4016 tree sub = t; |
3921 tree subtype; | 4017 tree subtype; |
3922 | 4018 |
3923 STRIP_USELESS_TYPE_CONVERSION (sub); | 4019 STRIP_NOPS (sub); |
3924 subtype = TREE_TYPE (sub); | 4020 subtype = TREE_TYPE (sub); |
3925 if (!POINTER_TYPE_P (subtype)) | 4021 if (!POINTER_TYPE_P (subtype)) |
3926 return NULL_TREE; | 4022 return NULL_TREE; |
3927 | 4023 |
3928 if (TREE_CODE (sub) == ADDR_EXPR) | 4024 if (TREE_CODE (sub) == ADDR_EXPR) |
3933 if (useless_type_conversion_p (type, optype)) | 4029 if (useless_type_conversion_p (type, optype)) |
3934 return op; | 4030 return op; |
3935 | 4031 |
3936 /* *(foo *)&fooarray => fooarray[0] */ | 4032 /* *(foo *)&fooarray => fooarray[0] */ |
3937 if (TREE_CODE (optype) == ARRAY_TYPE | 4033 if (TREE_CODE (optype) == ARRAY_TYPE |
4034 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST | |
3938 && useless_type_conversion_p (type, TREE_TYPE (optype))) | 4035 && useless_type_conversion_p (type, TREE_TYPE (optype))) |
3939 { | 4036 { |
3940 tree type_domain = TYPE_DOMAIN (optype); | 4037 tree type_domain = TYPE_DOMAIN (optype); |
3941 tree min_val = size_zero_node; | 4038 tree min_val = size_zero_node; |
3942 if (type_domain && TYPE_MIN_VALUE (type_domain)) | 4039 if (type_domain && TYPE_MIN_VALUE (type_domain)) |
3943 min_val = TYPE_MIN_VALUE (type_domain); | 4040 min_val = TYPE_MIN_VALUE (type_domain); |
3944 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE); | 4041 if (TREE_CODE (min_val) == INTEGER_CST) |
4042 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE); | |
3945 } | 4043 } |
4044 /* *(foo *)&complexfoo => __real__ complexfoo */ | |
4045 else if (TREE_CODE (optype) == COMPLEX_TYPE | |
4046 && useless_type_conversion_p (type, TREE_TYPE (optype))) | |
4047 return fold_build1 (REALPART_EXPR, type, op); | |
4048 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */ | |
4049 else if (TREE_CODE (optype) == VECTOR_TYPE | |
4050 && useless_type_conversion_p (type, TREE_TYPE (optype))) | |
4051 { | |
4052 tree part_width = TYPE_SIZE (type); | |
4053 tree index = bitsize_int (0); | |
4054 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index); | |
4055 } | |
4056 } | |
4057 | |
4058 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */ | |
4059 if (TREE_CODE (sub) == POINTER_PLUS_EXPR | |
4060 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST) | |
4061 { | |
4062 tree op00 = TREE_OPERAND (sub, 0); | |
4063 tree op01 = TREE_OPERAND (sub, 1); | |
4064 tree op00type; | |
4065 | |
4066 STRIP_NOPS (op00); | |
4067 op00type = TREE_TYPE (op00); | |
4068 if (TREE_CODE (op00) == ADDR_EXPR | |
4069 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE | |
4070 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (op00type)))) | |
4071 { | |
4072 HOST_WIDE_INT offset = tree_low_cst (op01, 0); | |
4073 tree part_width = TYPE_SIZE (type); | |
4074 unsigned HOST_WIDE_INT part_widthi | |
4075 = tree_low_cst (part_width, 0) / BITS_PER_UNIT; | |
4076 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT; | |
4077 tree index = bitsize_int (indexi); | |
4078 if (offset / part_widthi | |
4079 <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type))) | |
4080 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0), | |
4081 part_width, index); | |
4082 } | |
4083 } | |
4084 | |
4085 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */ | |
4086 if (TREE_CODE (sub) == POINTER_PLUS_EXPR | |
4087 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST) | |
4088 { | |
4089 tree op00 = TREE_OPERAND (sub, 0); | |
4090 tree op01 = TREE_OPERAND (sub, 1); | |
4091 tree op00type; | |
4092 | |
4093 STRIP_NOPS (op00); | |
4094 op00type = TREE_TYPE (op00); | |
4095 if (TREE_CODE (op00) == ADDR_EXPR | |
4096 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE | |
4097 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (op00type)))) | |
4098 { | |
4099 tree size = TYPE_SIZE_UNIT (type); | |
4100 if (tree_int_cst_equal (size, op01)) | |
4101 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0)); | |
4102 } | |
3946 } | 4103 } |
3947 | 4104 |
3948 /* *(foo *)fooarrptr => (*fooarrptr)[0] */ | 4105 /* *(foo *)fooarrptr => (*fooarrptr)[0] */ |
3949 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE | 4106 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE |
4107 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST | |
3950 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype)))) | 4108 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype)))) |
3951 { | 4109 { |
3952 tree type_domain; | 4110 tree type_domain; |
3953 tree min_val = size_zero_node; | 4111 tree min_val = size_zero_node; |
3954 tree osub = sub; | 4112 tree osub = sub; |
3956 if (! sub) | 4114 if (! sub) |
3957 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub); | 4115 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub); |
3958 type_domain = TYPE_DOMAIN (TREE_TYPE (sub)); | 4116 type_domain = TYPE_DOMAIN (TREE_TYPE (sub)); |
3959 if (type_domain && TYPE_MIN_VALUE (type_domain)) | 4117 if (type_domain && TYPE_MIN_VALUE (type_domain)) |
3960 min_val = TYPE_MIN_VALUE (type_domain); | 4118 min_val = TYPE_MIN_VALUE (type_domain); |
3961 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE); | 4119 if (TREE_CODE (min_val) == INTEGER_CST) |
4120 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE); | |
3962 } | 4121 } |
3963 | 4122 |
3964 return NULL_TREE; | 4123 return NULL_TREE; |
3965 } | 4124 } |
3966 | 4125 |
3984 static enum gimplify_status | 4143 static enum gimplify_status |
3985 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, | 4144 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, |
3986 gimple_seq *pre_p, gimple_seq *post_p, | 4145 gimple_seq *pre_p, gimple_seq *post_p, |
3987 bool want_value) | 4146 bool want_value) |
3988 { | 4147 { |
3989 enum gimplify_status ret = GS_OK; | 4148 enum gimplify_status ret = GS_UNHANDLED; |
3990 | 4149 bool changed; |
3991 while (ret != GS_UNHANDLED) | 4150 |
3992 switch (TREE_CODE (*from_p)) | 4151 do |
3993 { | 4152 { |
3994 case VAR_DECL: | 4153 changed = false; |
3995 /* If we're assigning from a read-only variable initialized with | 4154 switch (TREE_CODE (*from_p)) |
3996 a constructor, do the direct assignment from the constructor, | 4155 { |
3997 but only if neither source nor target are volatile since this | 4156 case VAR_DECL: |
3998 latter assignment might end up being done on a per-field basis. */ | 4157 /* If we're assigning from a read-only variable initialized with |
3999 if (DECL_INITIAL (*from_p) | 4158 a constructor, do the direct assignment from the constructor, |
4000 && TREE_READONLY (*from_p) | 4159 but only if neither source nor target are volatile since this |
4001 && !TREE_THIS_VOLATILE (*from_p) | 4160 latter assignment might end up being done on a per-field basis. */ |
4002 && !TREE_THIS_VOLATILE (*to_p) | 4161 if (DECL_INITIAL (*from_p) |
4003 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR) | 4162 && TREE_READONLY (*from_p) |
4163 && !TREE_THIS_VOLATILE (*from_p) | |
4164 && !TREE_THIS_VOLATILE (*to_p) | |
4165 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR) | |
4166 { | |
4167 tree old_from = *from_p; | |
4168 enum gimplify_status subret; | |
4169 | |
4170 /* Move the constructor into the RHS. */ | |
4171 *from_p = unshare_expr (DECL_INITIAL (*from_p)); | |
4172 | |
4173 /* Let's see if gimplify_init_constructor will need to put | |
4174 it in memory. */ | |
4175 subret = gimplify_init_constructor (expr_p, NULL, NULL, | |
4176 false, true); | |
4177 if (subret == GS_ERROR) | |
4178 { | |
4179 /* If so, revert the change. */ | |
4180 *from_p = old_from; | |
4181 } | |
4182 else | |
4183 { | |
4184 ret = GS_OK; | |
4185 changed = true; | |
4186 } | |
4187 } | |
4188 break; | |
4189 case INDIRECT_REF: | |
4004 { | 4190 { |
4005 tree old_from = *from_p; | 4191 /* If we have code like |
4006 | 4192 |
4007 /* Move the constructor into the RHS. */ | 4193 *(const A*)(A*)&x |
4008 *from_p = unshare_expr (DECL_INITIAL (*from_p)); | |
4009 | |
4010 /* Let's see if gimplify_init_constructor will need to put | |
4011 it in memory. If so, revert the change. */ | |
4012 ret = gimplify_init_constructor (expr_p, NULL, NULL, false, true); | |
4013 if (ret == GS_ERROR) | |
4014 { | |
4015 *from_p = old_from; | |
4016 /* Fall through. */ | |
4017 } | |
4018 else | |
4019 { | |
4020 ret = GS_OK; | |
4021 break; | |
4022 } | |
4023 } | |
4024 ret = GS_UNHANDLED; | |
4025 break; | |
4026 case INDIRECT_REF: | |
4027 { | |
4028 /* If we have code like | |
4029 | |
4030 *(const A*)(A*)&x | |
4031 | 4194 |
4032 where the type of "x" is a (possibly cv-qualified variant | 4195 where the type of "x" is a (possibly cv-qualified variant |
4033 of "A"), treat the entire expression as identical to "x". | 4196 of "A"), treat the entire expression as identical to "x". |
4034 This kind of code arises in C++ when an object is bound | 4197 This kind of code arises in C++ when an object is bound |
4035 to a const reference, and if "x" is a TARGET_EXPR we want | 4198 to a const reference, and if "x" is a TARGET_EXPR we want |
4036 to take advantage of the optimization below. */ | 4199 to take advantage of the optimization below. */ |
4037 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)); | 4200 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)); |
4038 if (t) | 4201 if (t) |
4202 { | |
4203 *from_p = t; | |
4204 ret = GS_OK; | |
4205 changed = true; | |
4206 } | |
4207 break; | |
4208 } | |
4209 | |
4210 case TARGET_EXPR: | |
4211 { | |
4212 /* If we are initializing something from a TARGET_EXPR, strip the | |
4213 TARGET_EXPR and initialize it directly, if possible. This can't | |
4214 be done if the initializer is void, since that implies that the | |
4215 temporary is set in some non-trivial way. | |
4216 | |
4217 ??? What about code that pulls out the temp and uses it | |
4218 elsewhere? I think that such code never uses the TARGET_EXPR as | |
4219 an initializer. If I'm wrong, we'll die because the temp won't | |
4220 have any RTL. In that case, I guess we'll need to replace | |
4221 references somehow. */ | |
4222 tree init = TARGET_EXPR_INITIAL (*from_p); | |
4223 | |
4224 if (init | |
4225 && !VOID_TYPE_P (TREE_TYPE (init))) | |
4226 { | |
4227 *from_p = init; | |
4228 ret = GS_OK; | |
4229 changed = true; | |
4230 } | |
4231 } | |
4232 break; | |
4233 | |
4234 case COMPOUND_EXPR: | |
4235 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be | |
4236 caught. */ | |
4237 gimplify_compound_expr (from_p, pre_p, true); | |
4238 ret = GS_OK; | |
4239 changed = true; | |
4240 break; | |
4241 | |
4242 case CONSTRUCTOR: | |
4243 /* If we're initializing from a CONSTRUCTOR, break this into | |
4244 individual MODIFY_EXPRs. */ | |
4245 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value, | |
4246 false); | |
4247 | |
4248 case COND_EXPR: | |
4249 /* If we're assigning to a non-register type, push the assignment | |
4250 down into the branches. This is mandatory for ADDRESSABLE types, | |
4251 since we cannot generate temporaries for such, but it saves a | |
4252 copy in other cases as well. */ | |
4253 if (!is_gimple_reg_type (TREE_TYPE (*from_p))) | |
4039 { | 4254 { |
4040 *from_p = t; | 4255 /* This code should mirror the code in gimplify_cond_expr. */ |
4041 ret = GS_OK; | 4256 enum tree_code code = TREE_CODE (*expr_p); |
4257 tree cond = *from_p; | |
4258 tree result = *to_p; | |
4259 | |
4260 ret = gimplify_expr (&result, pre_p, post_p, | |
4261 is_gimple_lvalue, fb_lvalue); | |
4262 if (ret != GS_ERROR) | |
4263 ret = GS_OK; | |
4264 | |
4265 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node) | |
4266 TREE_OPERAND (cond, 1) | |
4267 = build2 (code, void_type_node, result, | |
4268 TREE_OPERAND (cond, 1)); | |
4269 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node) | |
4270 TREE_OPERAND (cond, 2) | |
4271 = build2 (code, void_type_node, unshare_expr (result), | |
4272 TREE_OPERAND (cond, 2)); | |
4273 | |
4274 TREE_TYPE (cond) = void_type_node; | |
4275 recalculate_side_effects (cond); | |
4276 | |
4277 if (want_value) | |
4278 { | |
4279 gimplify_and_add (cond, pre_p); | |
4280 *expr_p = unshare_expr (result); | |
4281 } | |
4282 else | |
4283 *expr_p = cond; | |
4284 return ret; | |
4042 } | 4285 } |
4043 else | |
4044 ret = GS_UNHANDLED; | |
4045 break; | 4286 break; |
4046 } | 4287 |
4047 | 4288 case CALL_EXPR: |
4048 case TARGET_EXPR: | 4289 /* For calls that return in memory, give *to_p as the CALL_EXPR's |
4049 { | 4290 return slot so that we don't generate a temporary. */ |
4050 /* If we are initializing something from a TARGET_EXPR, strip the | 4291 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p) |
4051 TARGET_EXPR and initialize it directly, if possible. This can't | 4292 && aggregate_value_p (*from_p, *from_p)) |
4052 be done if the initializer is void, since that implies that the | |
4053 temporary is set in some non-trivial way. | |
4054 | |
4055 ??? What about code that pulls out the temp and uses it | |
4056 elsewhere? I think that such code never uses the TARGET_EXPR as | |
4057 an initializer. If I'm wrong, we'll die because the temp won't | |
4058 have any RTL. In that case, I guess we'll need to replace | |
4059 references somehow. */ | |
4060 tree init = TARGET_EXPR_INITIAL (*from_p); | |
4061 | |
4062 if (init | |
4063 && !VOID_TYPE_P (TREE_TYPE (init))) | |
4064 { | 4293 { |
4065 *from_p = init; | 4294 bool use_target; |
4066 ret = GS_OK; | 4295 |
4296 if (!(rhs_predicate_for (*to_p))(*from_p)) | |
4297 /* If we need a temporary, *to_p isn't accurate. */ | |
4298 use_target = false; | |
4299 else if (TREE_CODE (*to_p) == RESULT_DECL | |
4300 && DECL_NAME (*to_p) == NULL_TREE | |
4301 && needs_to_live_in_memory (*to_p)) | |
4302 /* It's OK to use the return slot directly unless it's an NRV. */ | |
4303 use_target = true; | |
4304 else if (is_gimple_reg_type (TREE_TYPE (*to_p)) | |
4305 || (DECL_P (*to_p) && DECL_REGISTER (*to_p))) | |
4306 /* Don't force regs into memory. */ | |
4307 use_target = false; | |
4308 else if (TREE_CODE (*expr_p) == INIT_EXPR) | |
4309 /* It's OK to use the target directly if it's being | |
4310 initialized. */ | |
4311 use_target = true; | |
4312 else if (!is_gimple_non_addressable (*to_p)) | |
4313 /* Don't use the original target if it's already addressable; | |
4314 if its address escapes, and the called function uses the | |
4315 NRV optimization, a conforming program could see *to_p | |
4316 change before the called function returns; see c++/19317. | |
4317 When optimizing, the return_slot pass marks more functions | |
4318 as safe after we have escape info. */ | |
4319 use_target = false; | |
4320 else | |
4321 use_target = true; | |
4322 | |
4323 if (use_target) | |
4324 { | |
4325 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1; | |
4326 mark_addressable (*to_p); | |
4327 } | |
4067 } | 4328 } |
4068 else | 4329 break; |
4069 ret = GS_UNHANDLED; | 4330 |
4070 } | 4331 case WITH_SIZE_EXPR: |
4071 break; | 4332 /* Likewise for calls that return an aggregate of non-constant size, |
4072 | 4333 since we would not be able to generate a temporary at all. */ |
4073 case COMPOUND_EXPR: | 4334 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR) |
4074 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be | 4335 { |
4075 caught. */ | 4336 *from_p = TREE_OPERAND (*from_p, 0); |
4076 gimplify_compound_expr (from_p, pre_p, true); | 4337 /* We don't change ret in this case because the |
4077 ret = GS_OK; | 4338 WITH_SIZE_EXPR might have been added in |
4078 break; | 4339 gimplify_modify_expr, so returning GS_OK would lead to an |
4079 | 4340 infinite loop. */ |
4080 case CONSTRUCTOR: | 4341 changed = true; |
4081 /* If we're initializing from a CONSTRUCTOR, break this into | 4342 } |
4082 individual MODIFY_EXPRs. */ | 4343 break; |
4083 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value, | 4344 |
4084 false); | 4345 /* If we're initializing from a container, push the initialization |
4085 | 4346 inside it. */ |
4086 case COND_EXPR: | 4347 case CLEANUP_POINT_EXPR: |
4087 /* If we're assigning to a non-register type, push the assignment | 4348 case BIND_EXPR: |
4088 down into the branches. This is mandatory for ADDRESSABLE types, | 4349 case STATEMENT_LIST: |
4089 since we cannot generate temporaries for such, but it saves a | |
4090 copy in other cases as well. */ | |
4091 if (!is_gimple_reg_type (TREE_TYPE (*from_p))) | |
4092 { | 4350 { |
4093 /* This code should mirror the code in gimplify_cond_expr. */ | 4351 tree wrap = *from_p; |
4094 enum tree_code code = TREE_CODE (*expr_p); | 4352 tree t; |
4095 tree cond = *from_p; | 4353 |
4096 tree result = *to_p; | 4354 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval, |
4097 | 4355 fb_lvalue); |
4098 ret = gimplify_expr (&result, pre_p, post_p, | |
4099 is_gimple_lvalue, fb_lvalue); | |
4100 if (ret != GS_ERROR) | 4356 if (ret != GS_ERROR) |
4101 ret = GS_OK; | 4357 ret = GS_OK; |
4102 | 4358 |
4103 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node) | 4359 t = voidify_wrapper_expr (wrap, *expr_p); |
4104 TREE_OPERAND (cond, 1) | 4360 gcc_assert (t == *expr_p); |
4105 = build2 (code, void_type_node, result, | |
4106 TREE_OPERAND (cond, 1)); | |
4107 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node) | |
4108 TREE_OPERAND (cond, 2) | |
4109 = build2 (code, void_type_node, unshare_expr (result), | |
4110 TREE_OPERAND (cond, 2)); | |
4111 | |
4112 TREE_TYPE (cond) = void_type_node; | |
4113 recalculate_side_effects (cond); | |
4114 | 4361 |
4115 if (want_value) | 4362 if (want_value) |
4116 { | 4363 { |
4117 gimplify_and_add (cond, pre_p); | 4364 gimplify_and_add (wrap, pre_p); |
4118 *expr_p = unshare_expr (result); | 4365 *expr_p = unshare_expr (*to_p); |
4119 } | 4366 } |
4120 else | 4367 else |
4121 *expr_p = cond; | 4368 *expr_p = wrap; |
4122 return ret; | 4369 return GS_OK; |
4123 } | 4370 } |
4124 else | 4371 |
4125 ret = GS_UNHANDLED; | 4372 case COMPOUND_LITERAL_EXPR: |
4126 break; | |
4127 | |
4128 case CALL_EXPR: | |
4129 /* For calls that return in memory, give *to_p as the CALL_EXPR's | |
4130 return slot so that we don't generate a temporary. */ | |
4131 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p) | |
4132 && aggregate_value_p (*from_p, *from_p)) | |
4133 { | 4373 { |
4134 bool use_target; | 4374 tree complit = TREE_OPERAND (*expr_p, 1); |
4135 | 4375 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit); |
4136 if (!(rhs_predicate_for (*to_p))(*from_p)) | 4376 tree decl = DECL_EXPR_DECL (decl_s); |
4137 /* If we need a temporary, *to_p isn't accurate. */ | 4377 tree init = DECL_INITIAL (decl); |
4138 use_target = false; | 4378 |
4139 else if (TREE_CODE (*to_p) == RESULT_DECL | 4379 /* struct T x = (struct T) { 0, 1, 2 } can be optimized |
4140 && DECL_NAME (*to_p) == NULL_TREE | 4380 into struct T x = { 0, 1, 2 } if the address of the |
4141 && needs_to_live_in_memory (*to_p)) | 4381 compound literal has never been taken. */ |
4142 /* It's OK to use the return slot directly unless it's an NRV. */ | 4382 if (!TREE_ADDRESSABLE (complit) |
4143 use_target = true; | 4383 && !TREE_ADDRESSABLE (decl) |
4144 else if (is_gimple_reg_type (TREE_TYPE (*to_p)) | 4384 && init) |
4145 || (DECL_P (*to_p) && DECL_REGISTER (*to_p))) | |
4146 /* Don't force regs into memory. */ | |
4147 use_target = false; | |
4148 else if (TREE_CODE (*expr_p) == INIT_EXPR) | |
4149 /* It's OK to use the target directly if it's being | |
4150 initialized. */ | |
4151 use_target = true; | |
4152 else if (!is_gimple_non_addressable (*to_p)) | |
4153 /* Don't use the original target if it's already addressable; | |
4154 if its address escapes, and the called function uses the | |
4155 NRV optimization, a conforming program could see *to_p | |
4156 change before the called function returns; see c++/19317. | |
4157 When optimizing, the return_slot pass marks more functions | |
4158 as safe after we have escape info. */ | |
4159 use_target = false; | |
4160 else | |
4161 use_target = true; | |
4162 | |
4163 if (use_target) | |
4164 { | 4385 { |
4165 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1; | 4386 *expr_p = copy_node (*expr_p); |
4166 mark_addressable (*to_p); | 4387 TREE_OPERAND (*expr_p, 1) = init; |
4388 return GS_OK; | |
4167 } | 4389 } |
4168 } | 4390 } |
4169 | 4391 |
4170 ret = GS_UNHANDLED; | 4392 default: |
4171 break; | 4393 break; |
4172 | 4394 } |
4173 /* If we're initializing from a container, push the initialization | 4395 } |
4174 inside it. */ | 4396 while (changed); |
4175 case CLEANUP_POINT_EXPR: | |
4176 case BIND_EXPR: | |
4177 case STATEMENT_LIST: | |
4178 { | |
4179 tree wrap = *from_p; | |
4180 tree t; | |
4181 | |
4182 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval, | |
4183 fb_lvalue); | |
4184 if (ret != GS_ERROR) | |
4185 ret = GS_OK; | |
4186 | |
4187 t = voidify_wrapper_expr (wrap, *expr_p); | |
4188 gcc_assert (t == *expr_p); | |
4189 | |
4190 if (want_value) | |
4191 { | |
4192 gimplify_and_add (wrap, pre_p); | |
4193 *expr_p = unshare_expr (*to_p); | |
4194 } | |
4195 else | |
4196 *expr_p = wrap; | |
4197 return GS_OK; | |
4198 } | |
4199 | |
4200 case COMPOUND_LITERAL_EXPR: | |
4201 { | |
4202 tree complit = TREE_OPERAND (*expr_p, 1); | |
4203 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit); | |
4204 tree decl = DECL_EXPR_DECL (decl_s); | |
4205 tree init = DECL_INITIAL (decl); | |
4206 | |
4207 /* struct T x = (struct T) { 0, 1, 2 } can be optimized | |
4208 into struct T x = { 0, 1, 2 } if the address of the | |
4209 compound literal has never been taken. */ | |
4210 if (!TREE_ADDRESSABLE (complit) | |
4211 && !TREE_ADDRESSABLE (decl) | |
4212 && init) | |
4213 { | |
4214 *expr_p = copy_node (*expr_p); | |
4215 TREE_OPERAND (*expr_p, 1) = init; | |
4216 return GS_OK; | |
4217 } | |
4218 } | |
4219 | |
4220 default: | |
4221 ret = GS_UNHANDLED; | |
4222 break; | |
4223 } | |
4224 | 4397 |
4225 return ret; | 4398 return ret; |
4226 } | 4399 } |
4227 | 4400 |
4228 | 4401 |
4405 if (TREE_CODE (*from_p) == CALL_EXPR) | 4578 if (TREE_CODE (*from_p) == CALL_EXPR) |
4406 { | 4579 { |
4407 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL | 4580 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL |
4408 instead of a GIMPLE_ASSIGN. */ | 4581 instead of a GIMPLE_ASSIGN. */ |
4409 assign = gimple_build_call_from_tree (*from_p); | 4582 assign = gimple_build_call_from_tree (*from_p); |
4410 gimple_call_set_lhs (assign, *to_p); | 4583 if (!gimple_call_noreturn_p (assign)) |
4584 gimple_call_set_lhs (assign, *to_p); | |
4411 } | 4585 } |
4412 else | 4586 else |
4413 { | 4587 { |
4414 assign = gimple_build_assign (*to_p, *from_p); | 4588 assign = gimple_build_assign (*to_p, *from_p); |
4415 gimple_set_location (assign, EXPR_LOCATION (*expr_p)); | 4589 gimple_set_location (assign, EXPR_LOCATION (*expr_p)); |
5335 } | 5509 } |
5336 | 5510 |
5337 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags); | 5511 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags); |
5338 } | 5512 } |
5339 | 5513 |
5514 /* Notice a threadprivate variable DECL used in OpenMP context CTX. | |
5515 This just prints out diagnostics about threadprivate variable uses | |
5516 in untied tasks. If DECL2 is non-NULL, prevent this warning | |
5517 on that variable. */ | |
5518 | |
5519 static bool | |
5520 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl, | |
5521 tree decl2) | |
5522 { | |
5523 splay_tree_node n; | |
5524 | |
5525 if (ctx->region_type != ORT_UNTIED_TASK) | |
5526 return false; | |
5527 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); | |
5528 if (n == NULL) | |
5529 { | |
5530 error ("threadprivate variable %qE used in untied task", DECL_NAME (decl)); | |
5531 error_at (ctx->location, "enclosing task"); | |
5532 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0); | |
5533 } | |
5534 if (decl2) | |
5535 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0); | |
5536 return false; | |
5537 } | |
5538 | |
5340 /* Record the fact that DECL was used within the OpenMP context CTX. | 5539 /* Record the fact that DECL was used within the OpenMP context CTX. |
5341 IN_CODE is true when real code uses DECL, and false when we should | 5540 IN_CODE is true when real code uses DECL, and false when we should |
5342 merely emit default(none) errors. Return true if DECL is going to | 5541 merely emit default(none) errors. Return true if DECL is going to |
5343 be remapped and thus DECL shouldn't be gimplified into its | 5542 be remapped and thus DECL shouldn't be gimplified into its |
5344 DECL_VALUE_EXPR (if any). */ | 5543 DECL_VALUE_EXPR (if any). */ |
5355 | 5554 |
5356 /* Threadprivate variables are predetermined. */ | 5555 /* Threadprivate variables are predetermined. */ |
5357 if (is_global_var (decl)) | 5556 if (is_global_var (decl)) |
5358 { | 5557 { |
5359 if (DECL_THREAD_LOCAL_P (decl)) | 5558 if (DECL_THREAD_LOCAL_P (decl)) |
5360 return false; | 5559 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE); |
5361 | 5560 |
5362 if (DECL_HAS_VALUE_EXPR_P (decl)) | 5561 if (DECL_HAS_VALUE_EXPR_P (decl)) |
5363 { | 5562 { |
5364 tree value = get_base_address (DECL_VALUE_EXPR (decl)); | 5563 tree value = get_base_address (DECL_VALUE_EXPR (decl)); |
5365 | 5564 |
5366 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value)) | 5565 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value)) |
5367 return false; | 5566 return omp_notice_threadprivate_variable (ctx, decl, value); |
5368 } | 5567 } |
5369 } | 5568 } |
5370 | 5569 |
5371 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); | 5570 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); |
5372 if (n == NULL) | 5571 if (n == NULL) |
5388 switch (default_kind) | 5587 switch (default_kind) |
5389 { | 5588 { |
5390 case OMP_CLAUSE_DEFAULT_NONE: | 5589 case OMP_CLAUSE_DEFAULT_NONE: |
5391 error ("%qE not specified in enclosing parallel", | 5590 error ("%qE not specified in enclosing parallel", |
5392 DECL_NAME (decl)); | 5591 DECL_NAME (decl)); |
5393 error_at (ctx->location, "enclosing parallel"); | 5592 if ((ctx->region_type & ORT_TASK) != 0) |
5593 error_at (ctx->location, "enclosing task"); | |
5594 else | |
5595 error_at (ctx->location, "enclosing parallel"); | |
5394 /* FALLTHRU */ | 5596 /* FALLTHRU */ |
5395 case OMP_CLAUSE_DEFAULT_SHARED: | 5597 case OMP_CLAUSE_DEFAULT_SHARED: |
5396 flags |= GOVD_SHARED; | 5598 flags |= GOVD_SHARED; |
5397 break; | 5599 break; |
5398 case OMP_CLAUSE_DEFAULT_PRIVATE: | 5600 case OMP_CLAUSE_DEFAULT_PRIVATE: |
5401 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE: | 5603 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE: |
5402 flags |= GOVD_FIRSTPRIVATE; | 5604 flags |= GOVD_FIRSTPRIVATE; |
5403 break; | 5605 break; |
5404 case OMP_CLAUSE_DEFAULT_UNSPECIFIED: | 5606 case OMP_CLAUSE_DEFAULT_UNSPECIFIED: |
5405 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */ | 5607 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */ |
5406 gcc_assert (ctx->region_type == ORT_TASK); | 5608 gcc_assert ((ctx->region_type & ORT_TASK) != 0); |
5407 if (ctx->outer_context) | 5609 if (ctx->outer_context) |
5408 omp_notice_variable (ctx->outer_context, decl, in_code); | 5610 omp_notice_variable (ctx->outer_context, decl, in_code); |
5409 for (octx = ctx->outer_context; octx; octx = octx->outer_context) | 5611 for (octx = ctx->outer_context; octx; octx = octx->outer_context) |
5410 { | 5612 { |
5411 splay_tree_node n2; | 5613 splay_tree_node n2; |
5904 tree expr = *expr_p; | 6106 tree expr = *expr_p; |
5905 gimple g; | 6107 gimple g; |
5906 gimple_seq body = NULL; | 6108 gimple_seq body = NULL; |
5907 struct gimplify_ctx gctx; | 6109 struct gimplify_ctx gctx; |
5908 | 6110 |
5909 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, ORT_TASK); | 6111 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, |
6112 find_omp_clause (OMP_TASK_CLAUSES (expr), | |
6113 OMP_CLAUSE_UNTIED) | |
6114 ? ORT_UNTIED_TASK : ORT_TASK); | |
5910 | 6115 |
5911 push_gimplify_context (&gctx); | 6116 push_gimplify_context (&gctx); |
5912 | 6117 |
5913 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body); | 6118 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body); |
5914 if (gimple_code (g) == GIMPLE_BIND) | 6119 if (gimple_code (g) == GIMPLE_BIND) |
6228 tree addr = TREE_OPERAND (*expr_p, 0); | 6433 tree addr = TREE_OPERAND (*expr_p, 0); |
6229 tree rhs = TREE_OPERAND (*expr_p, 1); | 6434 tree rhs = TREE_OPERAND (*expr_p, 1); |
6230 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr))); | 6435 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr))); |
6231 tree tmp_load; | 6436 tree tmp_load; |
6232 | 6437 |
6233 tmp_load = create_tmp_var (type, NULL); | 6438 tmp_load = create_tmp_reg (type, NULL); |
6234 if (TREE_CODE (type) == COMPLEX_TYPE || TREE_CODE (type) == VECTOR_TYPE) | |
6235 DECL_GIMPLE_REG_P (tmp_load) = 1; | |
6236 if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0) | 6439 if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0) |
6237 return GS_ERROR; | 6440 return GS_ERROR; |
6238 | 6441 |
6239 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue) | 6442 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue) |
6240 != GS_ALL_DONE) | 6443 != GS_ALL_DONE) |
6436 continue; | 6639 continue; |
6437 } | 6640 } |
6438 else if (ret != GS_UNHANDLED) | 6641 else if (ret != GS_UNHANDLED) |
6439 break; | 6642 break; |
6440 | 6643 |
6441 ret = GS_OK; | 6644 /* Make sure that all the cases set 'ret' appropriately. */ |
6645 ret = GS_UNHANDLED; | |
6442 switch (TREE_CODE (*expr_p)) | 6646 switch (TREE_CODE (*expr_p)) |
6443 { | 6647 { |
6444 /* First deal with the special cases. */ | 6648 /* First deal with the special cases. */ |
6445 | 6649 |
6446 case POSTINCREMENT_EXPR: | 6650 case POSTINCREMENT_EXPR: |
6470 required. */ | 6674 required. */ |
6471 if (fallback == fb_lvalue) | 6675 if (fallback == fb_lvalue) |
6472 { | 6676 { |
6473 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); | 6677 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); |
6474 mark_addressable (*expr_p); | 6678 mark_addressable (*expr_p); |
6679 ret = GS_OK; | |
6475 } | 6680 } |
6476 break; | 6681 break; |
6477 | 6682 |
6478 case CALL_EXPR: | 6683 case CALL_EXPR: |
6479 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none); | 6684 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none); |
6484 required. */ | 6689 required. */ |
6485 if (fallback == fb_lvalue) | 6690 if (fallback == fb_lvalue) |
6486 { | 6691 { |
6487 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); | 6692 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); |
6488 mark_addressable (*expr_p); | 6693 mark_addressable (*expr_p); |
6694 ret = GS_OK; | |
6489 } | 6695 } |
6490 break; | 6696 break; |
6491 | 6697 |
6492 case TREE_LIST: | 6698 case TREE_LIST: |
6493 gcc_unreachable (); | 6699 gcc_unreachable (); |
6545 || fallback == fb_none) | 6751 || fallback == fb_none) |
6546 { | 6752 { |
6547 /* Just strip a conversion to void (or in void context) and | 6753 /* Just strip a conversion to void (or in void context) and |
6548 try again. */ | 6754 try again. */ |
6549 *expr_p = TREE_OPERAND (*expr_p, 0); | 6755 *expr_p = TREE_OPERAND (*expr_p, 0); |
6756 ret = GS_OK; | |
6550 break; | 6757 break; |
6551 } | 6758 } |
6552 | 6759 |
6553 ret = gimplify_conversion (expr_p); | 6760 ret = gimplify_conversion (expr_p); |
6554 if (ret == GS_ERROR) | 6761 if (ret == GS_ERROR) |
6565 break; | 6772 break; |
6566 | 6773 |
6567 case INDIRECT_REF: | 6774 case INDIRECT_REF: |
6568 *expr_p = fold_indirect_ref_loc (input_location, *expr_p); | 6775 *expr_p = fold_indirect_ref_loc (input_location, *expr_p); |
6569 if (*expr_p != save_expr) | 6776 if (*expr_p != save_expr) |
6570 break; | 6777 { |
6778 ret = GS_OK; | |
6779 break; | |
6780 } | |
6571 /* else fall through. */ | 6781 /* else fall through. */ |
6572 case ALIGN_INDIRECT_REF: | 6782 case ALIGN_INDIRECT_REF: |
6573 case MISALIGNED_INDIRECT_REF: | 6783 case MISALIGNED_INDIRECT_REF: |
6574 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, | 6784 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, |
6575 is_gimple_reg, fb_rvalue); | 6785 is_gimple_reg, fb_rvalue); |
6592 value. */ | 6802 value. */ |
6593 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */ | 6803 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */ |
6594 if (fallback & fb_lvalue) | 6804 if (fallback & fb_lvalue) |
6595 ret = GS_ALL_DONE; | 6805 ret = GS_ALL_DONE; |
6596 else | 6806 else |
6597 *expr_p = DECL_INITIAL (*expr_p); | 6807 { |
6808 *expr_p = DECL_INITIAL (*expr_p); | |
6809 ret = GS_OK; | |
6810 } | |
6598 break; | 6811 break; |
6599 | 6812 |
6600 case DECL_EXPR: | 6813 case DECL_EXPR: |
6601 ret = gimplify_decl_expr (expr_p, pre_p); | 6814 ret = gimplify_decl_expr (expr_p, pre_p); |
6602 break; | 6815 break; |
6627 if (ret == GS_ERROR) | 6840 if (ret == GS_ERROR) |
6628 break; | 6841 break; |
6629 } | 6842 } |
6630 gimplify_seq_add_stmt (pre_p, | 6843 gimplify_seq_add_stmt (pre_p, |
6631 gimple_build_goto (GOTO_DESTINATION (*expr_p))); | 6844 gimple_build_goto (GOTO_DESTINATION (*expr_p))); |
6845 ret = GS_ALL_DONE; | |
6632 break; | 6846 break; |
6633 | 6847 |
6634 case PREDICT_EXPR: | 6848 case PREDICT_EXPR: |
6635 gimplify_seq_add_stmt (pre_p, | 6849 gimplify_seq_add_stmt (pre_p, |
6636 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p), | 6850 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p), |
6669 ix++) | 6883 ix++) |
6670 if (TREE_SIDE_EFFECTS (ce->value)) | 6884 if (TREE_SIDE_EFFECTS (ce->value)) |
6671 append_to_statement_list (ce->value, &temp); | 6885 append_to_statement_list (ce->value, &temp); |
6672 | 6886 |
6673 *expr_p = temp; | 6887 *expr_p = temp; |
6674 ret = GS_OK; | 6888 ret = temp ? GS_OK : GS_ALL_DONE; |
6675 } | 6889 } |
6676 /* C99 code may assign to an array in a constructed | 6890 /* C99 code may assign to an array in a constructed |
6677 structure or union, and this has undefined behavior only | 6891 structure or union, and this has undefined behavior only |
6678 on execution, so create a temporary if an lvalue is | 6892 on execution, so create a temporary if an lvalue is |
6679 required. */ | 6893 required. */ |
6680 else if (fallback == fb_lvalue) | 6894 else if (fallback == fb_lvalue) |
6681 { | 6895 { |
6682 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); | 6896 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); |
6683 mark_addressable (*expr_p); | 6897 mark_addressable (*expr_p); |
6898 ret = GS_OK; | |
6684 } | 6899 } |
6685 else | 6900 else |
6686 ret = GS_ALL_DONE; | 6901 ret = GS_ALL_DONE; |
6687 break; | 6902 break; |
6688 | 6903 |
6826 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, | 7041 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, |
6827 post_p == &internal_post ? NULL : post_p, | 7042 post_p == &internal_post ? NULL : post_p, |
6828 gimple_test_f, fallback); | 7043 gimple_test_f, fallback); |
6829 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, | 7044 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, |
6830 is_gimple_val, fb_rvalue); | 7045 is_gimple_val, fb_rvalue); |
7046 ret = GS_ALL_DONE; | |
6831 } | 7047 } |
6832 break; | 7048 break; |
6833 | 7049 |
6834 case VAR_DECL: | 7050 case VAR_DECL: |
6835 case PARM_DECL: | 7051 case PARM_DECL: |
6913 (EXPR_LOCATION (*expr_p), | 7129 (EXPR_LOCATION (*expr_p), |
6914 TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1), | 7130 TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1), |
6915 TREE_TYPE (*expr_p)))) | 7131 TREE_TYPE (*expr_p)))) |
6916 { | 7132 { |
6917 *expr_p = tmp; | 7133 *expr_p = tmp; |
7134 ret = GS_OK; | |
6918 break; | 7135 break; |
6919 } | 7136 } |
6920 /* Convert (void *)&a + 4 into (void *)&a[1]. */ | 7137 /* Convert (void *)&a + 4 into (void *)&a[1]. */ |
6921 if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR | 7138 if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR |
6922 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST | 7139 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST |
6928 TREE_OPERAND (*expr_p, 1), | 7145 TREE_OPERAND (*expr_p, 1), |
6929 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0), | 7146 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0), |
6930 0))))) | 7147 0))))) |
6931 { | 7148 { |
6932 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp); | 7149 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp); |
7150 ret = GS_OK; | |
6933 break; | 7151 break; |
6934 } | 7152 } |
6935 /* FALLTHRU */ | 7153 /* FALLTHRU */ |
6936 | 7154 |
6937 default: | 7155 default: |
6997 | 7215 |
6998 dont_recalculate: | 7216 dont_recalculate: |
6999 break; | 7217 break; |
7000 } | 7218 } |
7001 | 7219 |
7002 /* If we replaced *expr_p, gimplify again. */ | 7220 gcc_assert (*expr_p || ret != GS_OK); |
7003 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr)) | |
7004 ret = GS_ALL_DONE; | |
7005 } | 7221 } |
7006 while (ret == GS_OK); | 7222 while (ret == GS_OK); |
7007 | 7223 |
7008 /* If we encountered an error_mark somewhere nested inside, either | 7224 /* If we encountered an error_mark somewhere nested inside, either |
7009 stub out the statement or propagate the error back out. */ | 7225 stub out the statement or propagate the error back out. */ |
7260 | 7476 |
7261 case ARRAY_TYPE: | 7477 case ARRAY_TYPE: |
7262 /* These types may not have declarations, so handle them here. */ | 7478 /* These types may not have declarations, so handle them here. */ |
7263 gimplify_type_sizes (TREE_TYPE (type), list_p); | 7479 gimplify_type_sizes (TREE_TYPE (type), list_p); |
7264 gimplify_type_sizes (TYPE_DOMAIN (type), list_p); | 7480 gimplify_type_sizes (TYPE_DOMAIN (type), list_p); |
7265 /* When not optimizing, ensure VLA bounds aren't removed. */ | 7481 /* Ensure VLA bounds aren't removed, for -O0 they should be variables |
7266 if (!optimize | 7482 with assigned stack slots, for -O1+ -g they should be tracked |
7267 && TYPE_DOMAIN (type) | 7483 by VTA. */ |
7484 if (TYPE_DOMAIN (type) | |
7268 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type))) | 7485 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type))) |
7269 { | 7486 { |
7270 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type)); | 7487 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type)); |
7271 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t)) | 7488 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t)) |
7272 DECL_IGNORED_P (t) = 0; | 7489 DECL_IGNORED_P (t) = 0; |
7427 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL); | 7644 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL); |
7428 | 7645 |
7429 *body_p = NULL_TREE; | 7646 *body_p = NULL_TREE; |
7430 | 7647 |
7431 /* If we had callee-copies statements, insert them at the beginning | 7648 /* If we had callee-copies statements, insert them at the beginning |
7432 of the function. */ | 7649 of the function and clear DECL_VALUE_EXPR_P on the parameters. */ |
7433 if (!gimple_seq_empty_p (parm_stmts)) | 7650 if (!gimple_seq_empty_p (parm_stmts)) |
7434 { | 7651 { |
7652 tree parm; | |
7653 | |
7435 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind)); | 7654 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind)); |
7436 gimple_bind_set_body (outer_bind, parm_stmts); | 7655 gimple_bind_set_body (outer_bind, parm_stmts); |
7656 | |
7657 for (parm = DECL_ARGUMENTS (current_function_decl); | |
7658 parm; parm = TREE_CHAIN (parm)) | |
7659 if (DECL_HAS_VALUE_EXPR_P (parm)) | |
7660 { | |
7661 DECL_HAS_VALUE_EXPR_P (parm) = 0; | |
7662 DECL_IGNORED_P (parm) = 0; | |
7663 } | |
7437 } | 7664 } |
7438 | 7665 |
7439 if (nonlocal_vlas) | 7666 if (nonlocal_vlas) |
7440 { | 7667 { |
7441 pointer_set_destroy (nonlocal_vlas); | 7668 pointer_set_destroy (nonlocal_vlas); |
7694 need_temp = true; | 7921 need_temp = true; |
7695 } | 7922 } |
7696 } | 7923 } |
7697 if (need_temp) | 7924 if (need_temp) |
7698 { | 7925 { |
7699 tree temp = create_tmp_var (TREE_TYPE (lhs), NULL); | 7926 tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL); |
7700 | 7927 |
7701 if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE | |
7702 || TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE) | |
7703 DECL_GIMPLE_REG_P (temp) = 1; | |
7704 if (TREE_CODE (orig_lhs) == SSA_NAME) | 7928 if (TREE_CODE (orig_lhs) == SSA_NAME) |
7705 orig_lhs = SSA_NAME_VAR (orig_lhs); | 7929 orig_lhs = SSA_NAME_VAR (orig_lhs); |
7706 | 7930 |
7707 if (gimple_in_ssa_p (cfun)) | 7931 if (gimple_in_ssa_p (cfun)) |
7708 temp = make_ssa_name (temp, NULL); | 7932 temp = make_ssa_name (temp, NULL); |