Mercurial > hg > CbC > CbC_gcc
comparison gcc/cp/lambda.c @ 131:84e7813d76e9
gcc-8.2
author | mir3636 |
---|---|
date | Thu, 25 Oct 2018 07:37:49 +0900 |
parents | 04ced10e8804 |
children | 1830386684a0 |
comparison
equal
deleted
inserted
replaced
111:04ced10e8804 | 131:84e7813d76e9 |
---|---|
1 /* Perform the semantic phase of lambda parsing, i.e., the process of | 1 /* Perform the semantic phase of lambda parsing, i.e., the process of |
2 building tree structure, checking semantic consistency, and | 2 building tree structure, checking semantic consistency, and |
3 building RTL. These routines are used both during actual parsing | 3 building RTL. These routines are used both during actual parsing |
4 and during the instantiation of template functions. | 4 and during the instantiation of template functions. |
5 | 5 |
6 Copyright (C) 1998-2017 Free Software Foundation, Inc. | 6 Copyright (C) 1998-2018 Free Software Foundation, Inc. |
7 | 7 |
8 This file is part of GCC. | 8 This file is part of GCC. |
9 | 9 |
10 GCC is free software; you can redistribute it and/or modify it | 10 GCC is free software; you can redistribute it and/or modify it |
11 under the terms of the GNU General Public License as published by | 11 under the terms of the GNU General Public License as published by |
28 #include "stringpool.h" | 28 #include "stringpool.h" |
29 #include "cgraph.h" | 29 #include "cgraph.h" |
30 #include "tree-iterator.h" | 30 #include "tree-iterator.h" |
31 #include "toplev.h" | 31 #include "toplev.h" |
32 #include "gimplify.h" | 32 #include "gimplify.h" |
33 #include "cp-cilkplus.h" | |
34 | 33 |
35 /* Constructor for a lambda expression. */ | 34 /* Constructor for a lambda expression. */ |
36 | 35 |
37 tree | 36 tree |
38 build_lambda_expr (void) | 37 build_lambda_expr (void) |
89 do some magic to make it work here. */ | 88 do some magic to make it work here. */ |
90 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE) | 89 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE) |
91 val = build_array_copy (val); | 90 val = build_array_copy (val); |
92 else if (DECL_NORMAL_CAPTURE_P (field) | 91 else if (DECL_NORMAL_CAPTURE_P (field) |
93 && !DECL_VLA_CAPTURE_P (field) | 92 && !DECL_VLA_CAPTURE_P (field) |
94 && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE) | 93 && !TYPE_REF_P (TREE_TYPE (field))) |
95 { | 94 { |
96 /* "the entities that are captured by copy are used to | 95 /* "the entities that are captured by copy are used to |
97 direct-initialize each corresponding non-static data | 96 direct-initialize each corresponding non-static data |
98 member of the resulting closure object." | 97 member of the resulting closure object." |
99 | 98 |
199 gcc_assert (LAMBDA_TYPE_P (type)); | 198 gcc_assert (LAMBDA_TYPE_P (type)); |
200 /* Don't let debug_tree cause instantiation. */ | 199 /* Don't let debug_tree cause instantiation. */ |
201 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type) | 200 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type) |
202 && !COMPLETE_OR_OPEN_TYPE_P (type)) | 201 && !COMPLETE_OR_OPEN_TYPE_P (type)) |
203 return NULL_TREE; | 202 return NULL_TREE; |
204 lambda = lookup_member (type, cp_operator_id (CALL_EXPR), | 203 lambda = lookup_member (type, call_op_identifier, |
205 /*protect=*/0, /*want_type=*/false, | 204 /*protect=*/0, /*want_type=*/false, |
206 tf_warning_or_error); | 205 tf_warning_or_error); |
207 if (lambda) | 206 if (lambda) |
208 lambda = STRIP_TEMPLATE (get_first_fn (lambda)); | 207 lambda = STRIP_TEMPLATE (get_first_fn (lambda)); |
209 return lambda; | 208 return lambda; |
243 } | 242 } |
244 else | 243 else |
245 { | 244 { |
246 type = non_reference (unlowered_expr_type (expr)); | 245 type = non_reference (unlowered_expr_type (expr)); |
247 | 246 |
248 if (!is_this && by_reference_p) | 247 if (!is_this |
248 && (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE)) | |
249 type = build_reference_type (type); | 249 type = build_reference_type (type); |
250 } | 250 } |
251 | 251 |
252 return type; | 252 return type; |
253 } | 253 } |
259 is_capture_proxy (tree decl) | 259 is_capture_proxy (tree decl) |
260 { | 260 { |
261 return (VAR_P (decl) | 261 return (VAR_P (decl) |
262 && DECL_HAS_VALUE_EXPR_P (decl) | 262 && DECL_HAS_VALUE_EXPR_P (decl) |
263 && !DECL_ANON_UNION_VAR_P (decl) | 263 && !DECL_ANON_UNION_VAR_P (decl) |
264 && !DECL_DECOMPOSITION_P (decl) | |
264 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl))); | 265 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl))); |
265 } | 266 } |
266 | 267 |
267 /* Returns true iff DECL is a capture proxy for a normal capture | 268 /* Returns true iff DECL is a capture proxy for a normal capture |
268 (i.e. without explicit initializer). */ | 269 (i.e. without explicit initializer). */ |
286 if (TREE_CODE (val) == ADDR_EXPR) | 287 if (TREE_CODE (val) == ADDR_EXPR) |
287 val = TREE_OPERAND (val, 0); | 288 val = TREE_OPERAND (val, 0); |
288 gcc_assert (TREE_CODE (val) == COMPONENT_REF); | 289 gcc_assert (TREE_CODE (val) == COMPONENT_REF); |
289 val = TREE_OPERAND (val, 1); | 290 val = TREE_OPERAND (val, 1); |
290 return DECL_NORMAL_CAPTURE_P (val); | 291 return DECL_NORMAL_CAPTURE_P (val); |
292 } | |
293 | |
294 /* Returns true iff DECL is a capture proxy for a normal capture | |
295 of a constant variable. */ | |
296 | |
297 bool | |
298 is_constant_capture_proxy (tree decl) | |
299 { | |
300 if (is_normal_capture_proxy (decl)) | |
301 return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl)); | |
302 return false; | |
291 } | 303 } |
292 | 304 |
293 /* VAR is a capture proxy created by build_capture_proxy; add it to the | 305 /* VAR is a capture proxy created by build_capture_proxy; add it to the |
294 current function, which is the operator() for the appropriate lambda. */ | 306 current function, which is the operator() for the appropriate lambda. */ |
295 | 307 |
374 | 386 |
375 /* MEMBER is a capture field in a lambda closure class. Now that we're | 387 /* MEMBER is a capture field in a lambda closure class. Now that we're |
376 inside the operator(), build a placeholder var for future lookups and | 388 inside the operator(), build a placeholder var for future lookups and |
377 debugging. */ | 389 debugging. */ |
378 | 390 |
379 tree | 391 static tree |
380 build_capture_proxy (tree member, tree init) | 392 build_capture_proxy (tree member, tree init) |
381 { | 393 { |
382 tree var, object, fn, closure, name, lam, type; | 394 tree var, object, fn, closure, name, lam, type; |
383 | 395 |
384 if (PACK_EXPANSION_P (member)) | 396 if (PACK_EXPANSION_P (member)) |
397 /* Remove the __ inserted by add_capture. */ | 409 /* Remove the __ inserted by add_capture. */ |
398 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2); | 410 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2); |
399 | 411 |
400 type = lambda_proxy_type (object); | 412 type = lambda_proxy_type (object); |
401 | 413 |
402 if (name == this_identifier && !POINTER_TYPE_P (type)) | 414 if (name == this_identifier && !INDIRECT_TYPE_P (type)) |
403 { | 415 { |
404 type = build_pointer_type (type); | 416 type = build_pointer_type (type); |
405 type = cp_build_qualified_type (type, TYPE_QUAL_CONST); | 417 type = cp_build_qualified_type (type, TYPE_QUAL_CONST); |
406 object = build_fold_addr_expr_with_type (object, type); | 418 object = build_fold_addr_expr_with_type (object, type); |
407 } | 419 } |
418 type = build_reference_type (type); | 430 type = build_reference_type (type); |
419 REFERENCE_VLA_OK (type) = true; | 431 REFERENCE_VLA_OK (type) = true; |
420 object = convert (type, ptr); | 432 object = convert (type, ptr); |
421 } | 433 } |
422 | 434 |
435 complete_type (type); | |
436 | |
423 var = build_decl (input_location, VAR_DECL, name, type); | 437 var = build_decl (input_location, VAR_DECL, name, type); |
424 SET_DECL_VALUE_EXPR (var, object); | 438 SET_DECL_VALUE_EXPR (var, object); |
425 DECL_HAS_VALUE_EXPR_P (var) = 1; | 439 DECL_HAS_VALUE_EXPR_P (var) = 1; |
426 DECL_ARTIFICIAL (var) = 1; | 440 DECL_ARTIFICIAL (var) = 1; |
427 TREE_USED (var) = 1; | 441 TREE_USED (var) = 1; |
437 } | 451 } |
438 else | 452 else |
439 { | 453 { |
440 if (PACK_EXPANSION_P (init)) | 454 if (PACK_EXPANSION_P (init)) |
441 init = PACK_EXPANSION_PATTERN (init); | 455 init = PACK_EXPANSION_PATTERN (init); |
442 if (TREE_CODE (init) == INDIRECT_REF) | |
443 init = TREE_OPERAND (init, 0); | |
444 STRIP_NOPS (init); | |
445 } | 456 } |
457 | |
458 if (INDIRECT_REF_P (init)) | |
459 init = TREE_OPERAND (init, 0); | |
460 STRIP_NOPS (init); | |
461 | |
446 gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL); | 462 gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL); |
447 while (is_normal_capture_proxy (init)) | 463 while (is_normal_capture_proxy (init)) |
448 init = DECL_CAPTURED_VARIABLE (init); | 464 init = DECL_CAPTURED_VARIABLE (init); |
449 retrofit_lang_decl (var); | 465 retrofit_lang_decl (var); |
450 DECL_CAPTURED_VARIABLE (var) = init; | 466 DECL_CAPTURED_VARIABLE (var) = init; |
536 type = vla_capture_type (type); | 552 type = vla_capture_type (type); |
537 } | 553 } |
538 else if (!dependent_type_p (type) | 554 else if (!dependent_type_p (type) |
539 && variably_modified_type_p (type, NULL_TREE)) | 555 && variably_modified_type_p (type, NULL_TREE)) |
540 { | 556 { |
541 error ("capture of variable-size type %qT that is not an N3639 array " | 557 sorry ("capture of variably-modified type %qT that is not an N3639 array " |
542 "of runtime bound", type); | 558 "of runtime bound", type); |
543 if (TREE_CODE (type) == ARRAY_TYPE | 559 if (TREE_CODE (type) == ARRAY_TYPE |
544 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE)) | 560 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE)) |
545 inform (input_location, "because the array element type %qT has " | 561 inform (input_location, "because the array element type %qT has " |
546 "variable size", TREE_TYPE (type)); | 562 "variable size", TREE_TYPE (type)); |
547 type = error_mark_node; | 563 return error_mark_node; |
548 } | 564 } |
549 else | 565 else |
550 { | 566 { |
551 type = lambda_capture_field_type (initializer, explicit_init_p, | 567 type = lambda_capture_field_type (initializer, explicit_init_p, |
552 by_reference_p); | 568 by_reference_p); |
553 if (type == error_mark_node) | 569 if (type == error_mark_node) |
554 return error_mark_node; | 570 return error_mark_node; |
555 | 571 |
556 if (id == this_identifier && !by_reference_p) | 572 if (id == this_identifier && !by_reference_p) |
557 { | 573 { |
558 gcc_assert (POINTER_TYPE_P (type)); | 574 gcc_assert (INDIRECT_TYPE_P (type)); |
559 type = TREE_TYPE (type); | 575 type = TREE_TYPE (type); |
560 initializer = cp_build_indirect_ref (initializer, RO_NULL, | 576 initializer = cp_build_fold_indirect_ref (initializer); |
561 tf_warning_or_error); | |
562 } | 577 } |
563 | 578 |
564 if (dependent_type_p (type)) | 579 if (dependent_type_p (type)) |
565 ; | 580 ; |
566 else if (id != this_identifier && by_reference_p) | 581 else if (id != this_identifier && by_reference_p) |
646 | 661 |
647 if (LAMBDA_EXPR_CLOSURE (lambda)) | 662 if (LAMBDA_EXPR_CLOSURE (lambda)) |
648 return build_capture_proxy (member, initializer); | 663 return build_capture_proxy (member, initializer); |
649 /* For explicit captures we haven't started the function yet, so we wait | 664 /* For explicit captures we haven't started the function yet, so we wait |
650 and build the proxy from cp_parser_lambda_body. */ | 665 and build the proxy from cp_parser_lambda_body. */ |
666 LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true; | |
651 return NULL_TREE; | 667 return NULL_TREE; |
652 } | 668 } |
653 | 669 |
654 /* Register all the capture members on the list CAPTURES, which is the | 670 /* Register all the capture members on the list CAPTURES, which is the |
655 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */ | 671 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */ |
677 | 693 |
678 tree | 694 tree |
679 add_default_capture (tree lambda_stack, tree id, tree initializer) | 695 add_default_capture (tree lambda_stack, tree id, tree initializer) |
680 { | 696 { |
681 bool this_capture_p = (id == this_identifier); | 697 bool this_capture_p = (id == this_identifier); |
682 | |
683 tree var = NULL_TREE; | 698 tree var = NULL_TREE; |
684 | |
685 tree saved_class_type = current_class_type; | 699 tree saved_class_type = current_class_type; |
686 | 700 |
687 tree node; | 701 for (tree node = lambda_stack; |
688 | |
689 for (node = lambda_stack; | |
690 node; | 702 node; |
691 node = TREE_CHAIN (node)) | 703 node = TREE_CHAIN (node)) |
692 { | 704 { |
693 tree lambda = TREE_VALUE (node); | 705 tree lambda = TREE_VALUE (node); |
694 | 706 |
702 (this_capture_p | 714 (this_capture_p |
703 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) | 715 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) |
704 == CPLD_REFERENCE)), | 716 == CPLD_REFERENCE)), |
705 /*explicit_init_p=*/false); | 717 /*explicit_init_p=*/false); |
706 initializer = convert_from_reference (var); | 718 initializer = convert_from_reference (var); |
719 | |
720 /* Warn about deprecated implicit capture of this via [=]. */ | |
721 if (cxx_dialect >= cxx2a | |
722 && this_capture_p | |
723 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_COPY | |
724 && !in_system_header_at (LAMBDA_EXPR_LOCATION (lambda))) | |
725 { | |
726 if (warning_at (LAMBDA_EXPR_LOCATION (lambda), OPT_Wdeprecated, | |
727 "implicit capture of %qE via %<[=]%> is deprecated " | |
728 "in C++20", this_identifier)) | |
729 inform (LAMBDA_EXPR_LOCATION (lambda), "add explicit %<this%> or " | |
730 "%<*this%> capture"); | |
731 } | |
707 } | 732 } |
708 | 733 |
709 current_class_type = saved_class_type; | 734 current_class_type = saved_class_type; |
710 | 735 |
711 return var; | 736 return var; |
725 /* In unevaluated context this isn't an odr-use, so don't capture. */ | 750 /* In unevaluated context this isn't an odr-use, so don't capture. */ |
726 if (cp_unevaluated_operand) | 751 if (cp_unevaluated_operand) |
727 add_capture_p = false; | 752 add_capture_p = false; |
728 | 753 |
729 /* Try to default capture 'this' if we can. */ | 754 /* Try to default capture 'this' if we can. */ |
730 if (!this_capture | 755 if (!this_capture) |
731 && (!add_capture_p | |
732 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE)) | |
733 { | 756 { |
734 tree lambda_stack = NULL_TREE; | 757 tree lambda_stack = NULL_TREE; |
735 tree init = NULL_TREE; | 758 tree init = NULL_TREE; |
736 | 759 |
737 /* If we are in a lambda function, we can move out until we hit: | 760 /* If we are in a lambda function, we can move out until we hit: |
738 1. a non-lambda function or NSDMI, | 761 1. a non-lambda function or NSDMI, |
739 2. a lambda function capturing 'this', or | 762 2. a lambda function capturing 'this', or |
740 3. a non-default capturing lambda function. */ | 763 3. a non-default capturing lambda function. */ |
741 for (tree tlambda = lambda; ;) | 764 for (tree tlambda = lambda; ;) |
742 { | 765 { |
743 lambda_stack = tree_cons (NULL_TREE, | 766 if (add_capture_p |
744 tlambda, | 767 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE) |
745 lambda_stack); | 768 /* tlambda won't let us capture 'this'. */ |
746 | 769 break; |
747 if (LAMBDA_EXPR_EXTRA_SCOPE (tlambda) | 770 |
748 && TREE_CODE (LAMBDA_EXPR_EXTRA_SCOPE (tlambda)) == FIELD_DECL) | 771 if (add_capture_p) |
772 lambda_stack = tree_cons (NULL_TREE, | |
773 tlambda, | |
774 lambda_stack); | |
775 | |
776 tree closure = LAMBDA_EXPR_CLOSURE (tlambda); | |
777 tree containing_function | |
778 = decl_function_context (TYPE_NAME (closure)); | |
779 | |
780 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda); | |
781 if (ex && TREE_CODE (ex) == FIELD_DECL) | |
749 { | 782 { |
750 /* In an NSDMI, we don't have a function to look up the decl in, | 783 /* Lambda in an NSDMI. We don't have a function to look up |
751 but the fake 'this' pointer that we're using for parsing is | 784 'this' in, but we can find (or rebuild) the fake one from |
752 in scope_chain. */ | 785 inject_this_parameter. */ |
753 init = scope_chain->x_current_class_ptr; | 786 if (!containing_function && !COMPLETE_TYPE_P (closure)) |
787 /* If we're parsing a lambda in a non-local class, | |
788 we can find the fake 'this' in scope_chain. */ | |
789 init = scope_chain->x_current_class_ptr; | |
790 else | |
791 /* Otherwise it's either gone or buried in | |
792 function_context_stack, so make another. */ | |
793 init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex), | |
794 TYPE_UNQUALIFIED); | |
754 gcc_checking_assert | 795 gcc_checking_assert |
755 (init && (TREE_TYPE (TREE_TYPE (init)) | 796 (init && (TREE_TYPE (TREE_TYPE (init)) |
756 == current_nonlambda_class_type ())); | 797 == current_nonlambda_class_type ())); |
757 break; | 798 break; |
758 } | 799 } |
759 | |
760 tree closure_decl = TYPE_NAME (LAMBDA_EXPR_CLOSURE (tlambda)); | |
761 tree containing_function = decl_function_context (closure_decl); | |
762 | 800 |
763 if (containing_function == NULL_TREE) | 801 if (containing_function == NULL_TREE) |
764 /* We ran out of scopes; there's no 'this' to capture. */ | 802 /* We ran out of scopes; there's no 'this' to capture. */ |
765 break; | 803 break; |
766 | 804 |
780 { | 818 { |
781 /* An outer lambda has already captured 'this'. */ | 819 /* An outer lambda has already captured 'this'. */ |
782 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda); | 820 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda); |
783 break; | 821 break; |
784 } | 822 } |
785 | |
786 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE) | |
787 /* An outer lambda won't let us capture 'this'. */ | |
788 break; | |
789 } | 823 } |
790 | 824 |
791 if (init) | 825 if (init) |
792 { | 826 { |
793 if (add_capture_p) | 827 if (add_capture_p) |
827 } | 861 } |
828 | 862 |
829 return result; | 863 return result; |
830 } | 864 } |
831 | 865 |
866 /* Return the innermost LAMBDA_EXPR we're currently in, if any. */ | |
867 | |
868 tree | |
869 current_lambda_expr (void) | |
870 { | |
871 tree type = current_class_type; | |
872 while (type && !LAMBDA_TYPE_P (type)) | |
873 type = decl_type_context (TYPE_NAME (type)); | |
874 if (type) | |
875 return CLASSTYPE_LAMBDA_EXPR (type); | |
876 else | |
877 return NULL_TREE; | |
878 } | |
879 | |
832 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy | 880 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy |
833 object. NULL otherwise.. */ | 881 object. NULL otherwise.. */ |
834 | 882 |
835 static tree | 883 static tree |
836 resolvable_dummy_lambda (tree object) | 884 resolvable_dummy_lambda (tree object) |
843 | 891 |
844 if (type != current_class_type | 892 if (type != current_class_type |
845 && current_class_type | 893 && current_class_type |
846 && LAMBDA_TYPE_P (current_class_type) | 894 && LAMBDA_TYPE_P (current_class_type) |
847 && lambda_function (current_class_type) | 895 && lambda_function (current_class_type) |
848 && DERIVED_FROM_P (type, current_nonlambda_class_type ())) | 896 && DERIVED_FROM_P (type, nonlambda_method_basetype())) |
849 return CLASSTYPE_LAMBDA_EXPR (current_class_type); | 897 return CLASSTYPE_LAMBDA_EXPR (current_class_type); |
850 | 898 |
851 return NULL_TREE; | 899 return NULL_TREE; |
852 } | 900 } |
853 | 901 |
860 maybe_resolve_dummy (tree object, bool add_capture_p) | 908 maybe_resolve_dummy (tree object, bool add_capture_p) |
861 { | 909 { |
862 if (tree lam = resolvable_dummy_lambda (object)) | 910 if (tree lam = resolvable_dummy_lambda (object)) |
863 if (tree cap = lambda_expr_this_capture (lam, add_capture_p)) | 911 if (tree cap = lambda_expr_this_capture (lam, add_capture_p)) |
864 if (cap != error_mark_node) | 912 if (cap != error_mark_node) |
865 object = build_x_indirect_ref (EXPR_LOCATION (object), cap, | 913 object = build_fold_indirect_ref (cap); |
866 RO_NULL, tf_warning_or_error); | |
867 | 914 |
868 return object; | 915 return object; |
869 } | 916 } |
870 | 917 |
871 /* When parsing a generic lambda containing an argument-dependent | 918 /* When parsing a generic lambda containing an argument-dependent |
909 while (fn && LAMBDA_FUNCTION_P (fn)) | 956 while (fn && LAMBDA_FUNCTION_P (fn)) |
910 fn = decl_function_context (fn); | 957 fn = decl_function_context (fn); |
911 return fn; | 958 return fn; |
912 } | 959 } |
913 | 960 |
914 /* Returns the method basetype of the innermost non-lambda function, or | 961 /* Returns the method basetype of the innermost non-lambda function, including |
915 NULL_TREE if none. */ | 962 a hypothetical constructor if inside an NSDMI, or NULL_TREE if none. */ |
916 | 963 |
917 tree | 964 tree |
918 nonlambda_method_basetype (void) | 965 nonlambda_method_basetype (void) |
919 { | 966 { |
920 tree fn, type; | |
921 if (!current_class_ref) | 967 if (!current_class_ref) |
922 return NULL_TREE; | 968 return NULL_TREE; |
923 | 969 |
924 type = current_class_type; | 970 tree type = current_class_type; |
925 if (!LAMBDA_TYPE_P (type)) | 971 if (!type || !LAMBDA_TYPE_P (type)) |
926 return type; | 972 return type; |
927 | 973 |
928 /* Find the nearest enclosing non-lambda function. */ | 974 while (true) |
929 fn = TYPE_NAME (type); | 975 { |
930 do | 976 tree lam = CLASSTYPE_LAMBDA_EXPR (type); |
931 fn = decl_function_context (fn); | 977 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam); |
932 while (fn && LAMBDA_FUNCTION_P (fn)); | 978 if (ex && TREE_CODE (ex) == FIELD_DECL) |
933 | 979 /* Lambda in an NSDMI. */ |
934 if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn)) | 980 return DECL_CONTEXT (ex); |
935 return NULL_TREE; | 981 |
936 | 982 tree fn = TYPE_CONTEXT (type); |
937 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn)); | 983 if (!fn || TREE_CODE (fn) != FUNCTION_DECL |
984 || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn)) | |
985 /* No enclosing non-lambda method. */ | |
986 return NULL_TREE; | |
987 if (!LAMBDA_FUNCTION_P (fn)) | |
988 /* Found an enclosing non-lambda method. */ | |
989 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn)); | |
990 type = DECL_CONTEXT (fn); | |
991 } | |
938 } | 992 } |
939 | 993 |
940 /* Like current_scope, but looking through lambdas. */ | 994 /* Like current_scope, but looking through lambdas. */ |
941 | 995 |
942 tree | 996 tree |
985 return (LAMBDA_FUNCTION_P (callop) | 1039 return (LAMBDA_FUNCTION_P (callop) |
986 && DECL_TEMPLATE_INFO (callop) | 1040 && DECL_TEMPLATE_INFO (callop) |
987 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop))); | 1041 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop))); |
988 } | 1042 } |
989 | 1043 |
990 /* Returns true iff we need to consider default capture for an enclosing | |
991 generic lambda. */ | |
992 | |
993 bool | |
994 need_generic_capture (void) | |
995 { | |
996 if (!processing_template_decl) | |
997 return false; | |
998 | |
999 tree outer_closure = NULL_TREE; | |
1000 for (tree t = current_class_type; t; | |
1001 t = decl_type_context (TYPE_MAIN_DECL (t))) | |
1002 { | |
1003 tree lam = CLASSTYPE_LAMBDA_EXPR (t); | |
1004 if (!lam || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE) | |
1005 /* No default capture. */ | |
1006 break; | |
1007 outer_closure = t; | |
1008 } | |
1009 | |
1010 if (!outer_closure) | |
1011 /* No lambda. */ | |
1012 return false; | |
1013 else if (dependent_type_p (outer_closure)) | |
1014 /* The enclosing context isn't instantiated. */ | |
1015 return false; | |
1016 else | |
1017 return true; | |
1018 } | |
1019 | |
1020 /* A lambda-expression...is said to implicitly capture the entity...if the | |
1021 compound-statement...names the entity in a potentially-evaluated | |
1022 expression where the enclosing full-expression depends on a generic lambda | |
1023 parameter declared within the reaching scope of the lambda-expression. */ | |
1024 | |
1025 static tree | |
1026 dependent_capture_r (tree *tp, int *walk_subtrees, void *data) | |
1027 { | |
1028 hash_set<tree> *pset = (hash_set<tree> *)data; | |
1029 | |
1030 if (TYPE_P (*tp)) | |
1031 *walk_subtrees = 0; | |
1032 | |
1033 if (outer_automatic_var_p (*tp)) | |
1034 { | |
1035 tree t = process_outer_var_ref (*tp, tf_warning_or_error, /*force*/true); | |
1036 if (t != *tp | |
1037 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE | |
1038 && TREE_CODE (TREE_TYPE (*tp)) != REFERENCE_TYPE) | |
1039 t = convert_from_reference (t); | |
1040 *tp = t; | |
1041 } | |
1042 | |
1043 if (pset->add (*tp)) | |
1044 *walk_subtrees = 0; | |
1045 | |
1046 switch (TREE_CODE (*tp)) | |
1047 { | |
1048 /* Don't walk into unevaluated context or another lambda. */ | |
1049 case SIZEOF_EXPR: | |
1050 case ALIGNOF_EXPR: | |
1051 case TYPEID_EXPR: | |
1052 case NOEXCEPT_EXPR: | |
1053 case LAMBDA_EXPR: | |
1054 *walk_subtrees = 0; | |
1055 break; | |
1056 | |
1057 /* Don't walk into statements whose subexpressions we already | |
1058 handled. */ | |
1059 case TRY_BLOCK: | |
1060 case EH_SPEC_BLOCK: | |
1061 case HANDLER: | |
1062 case IF_STMT: | |
1063 case FOR_STMT: | |
1064 case RANGE_FOR_STMT: | |
1065 case WHILE_STMT: | |
1066 case DO_STMT: | |
1067 case SWITCH_STMT: | |
1068 case STATEMENT_LIST: | |
1069 case RETURN_EXPR: | |
1070 *walk_subtrees = 0; | |
1071 break; | |
1072 | |
1073 case DECL_EXPR: | |
1074 { | |
1075 tree decl = DECL_EXPR_DECL (*tp); | |
1076 if (VAR_P (decl)) | |
1077 { | |
1078 /* walk_tree_1 won't step in here. */ | |
1079 cp_walk_tree (&DECL_INITIAL (decl), | |
1080 dependent_capture_r, &pset, NULL); | |
1081 *walk_subtrees = 0; | |
1082 } | |
1083 } | |
1084 break; | |
1085 | |
1086 default: | |
1087 break; | |
1088 } | |
1089 | |
1090 return NULL_TREE; | |
1091 } | |
1092 | |
1093 tree | |
1094 do_dependent_capture (tree expr, bool force) | |
1095 { | |
1096 if (!need_generic_capture () | |
1097 || (!force && !instantiation_dependent_expression_p (expr))) | |
1098 return expr; | |
1099 | |
1100 hash_set<tree> pset; | |
1101 cp_walk_tree (&expr, dependent_capture_r, &pset, NULL); | |
1102 return expr; | |
1103 } | |
1104 | |
1105 /* If the closure TYPE has a static op(), also add a conversion to function | 1044 /* If the closure TYPE has a static op(), also add a conversion to function |
1106 pointer. */ | 1045 pointer. */ |
1107 | 1046 |
1108 void | 1047 void |
1109 maybe_add_lambda_conv_op (tree type) | 1048 maybe_add_lambda_conv_op (tree type) |
1152 /* Prepare the dependent member call for the static member function | 1091 /* Prepare the dependent member call for the static member function |
1153 '_FUN' and, potentially, prepare another call to be used in a decltype | 1092 '_FUN' and, potentially, prepare another call to be used in a decltype |
1154 return expression for a deduced return call op to allow for simple | 1093 return expression for a deduced return call op to allow for simple |
1155 implementation of the conversion operator. */ | 1094 implementation of the conversion operator. */ |
1156 | 1095 |
1157 tree instance = cp_build_indirect_ref (thisarg, RO_NULL, | 1096 tree instance = cp_build_fold_indirect_ref (thisarg); |
1158 tf_warning_or_error); | |
1159 tree objfn = build_min (COMPONENT_REF, NULL_TREE, | 1097 tree objfn = build_min (COMPONENT_REF, NULL_TREE, |
1160 instance, DECL_NAME (callop), NULL_TREE); | 1098 instance, DECL_NAME (callop), NULL_TREE); |
1161 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1; | 1099 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1; |
1162 | 1100 |
1163 call = prepare_op_call (objfn, nargs); | 1101 call = prepare_op_call (objfn, nargs); |
1218 | 1156 |
1219 src = TREE_CHAIN (src); | 1157 src = TREE_CHAIN (src); |
1220 } | 1158 } |
1221 } | 1159 } |
1222 | 1160 |
1223 | |
1224 if (generic_lambda_p) | 1161 if (generic_lambda_p) |
1225 { | 1162 { |
1226 if (decltype_call) | 1163 if (decltype_call) |
1227 { | 1164 { |
1228 fn_result = finish_decltype_type | 1165 fn_result = finish_decltype_type |
1253 tree rettype = build_pointer_type (stattype); | 1190 tree rettype = build_pointer_type (stattype); |
1254 tree name = make_conv_op_name (rettype); | 1191 tree name = make_conv_op_name (rettype); |
1255 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST); | 1192 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST); |
1256 tree fntype = build_method_type_directly (thistype, rettype, void_list_node); | 1193 tree fntype = build_method_type_directly (thistype, rettype, void_list_node); |
1257 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype); | 1194 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype); |
1195 SET_DECL_LANGUAGE (convfn, lang_cplusplus); | |
1258 tree fn = convfn; | 1196 tree fn = convfn; |
1259 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); | 1197 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); |
1260 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY); | 1198 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY); |
1261 SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR); | |
1262 grokclassfn (type, fn, NO_SPECIAL); | 1199 grokclassfn (type, fn, NO_SPECIAL); |
1263 set_linkage_according_to_type (type, fn); | 1200 set_linkage_according_to_type (type, fn); |
1264 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof); | 1201 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof); |
1265 DECL_IN_AGGR_P (fn) = 1; | 1202 DECL_IN_AGGR_P (fn) = 1; |
1266 DECL_ARTIFICIAL (fn) = 1; | 1203 DECL_ARTIFICIAL (fn) = 1; |
1284 return; | 1221 return; |
1285 } | 1222 } |
1286 | 1223 |
1287 /* Now build up the thunk to be returned. */ | 1224 /* Now build up the thunk to be returned. */ |
1288 | 1225 |
1289 name = get_identifier ("_FUN"); | 1226 tree statfn = build_lang_decl (FUNCTION_DECL, fun_identifier, stattype); |
1290 tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype); | 1227 SET_DECL_LANGUAGE (statfn, lang_cplusplus); |
1291 fn = statfn; | 1228 fn = statfn; |
1292 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); | 1229 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); |
1293 grokclassfn (type, fn, NO_SPECIAL); | 1230 grokclassfn (type, fn, NO_SPECIAL); |
1294 set_linkage_according_to_type (type, fn); | 1231 set_linkage_according_to_type (type, fn); |
1295 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof); | 1232 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof); |
1310 | 1247 |
1311 if (generic_lambda_p) | 1248 if (generic_lambda_p) |
1312 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); | 1249 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); |
1313 | 1250 |
1314 if (flag_sanitize & SANITIZE_NULL) | 1251 if (flag_sanitize & SANITIZE_NULL) |
1315 { | 1252 /* Don't UBsan this function; we're deliberately calling op() with a null |
1316 /* Don't UBsan this function; we're deliberately calling op() with a null | 1253 object argument. */ |
1317 object argument. */ | 1254 add_no_sanitize_value (fn, SANITIZE_UNDEFINED); |
1318 add_no_sanitize_value (fn, SANITIZE_UNDEFINED); | |
1319 } | |
1320 | 1255 |
1321 add_method (type, fn, false); | 1256 add_method (type, fn, false); |
1322 | 1257 |
1323 if (nested) | 1258 if (nested) |
1324 push_function_context (); | 1259 push_function_context (); |
1451 { | 1386 { |
1452 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope; | 1387 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope; |
1453 LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++; | 1388 LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++; |
1454 } | 1389 } |
1455 | 1390 |
1391 /* This lambda is an instantiation of a lambda in a template default argument | |
1392 that got no LAMBDA_EXPR_EXTRA_SCOPE, so this shouldn't either. But we do | |
1393 need to use and increment the global count to avoid collisions. */ | |
1394 | |
1395 void | |
1396 record_null_lambda_scope (tree lambda) | |
1397 { | |
1398 if (vec_safe_is_empty (lambda_scope_stack)) | |
1399 record_lambda_scope (lambda); | |
1400 else | |
1401 { | |
1402 tree_int *p = lambda_scope_stack->begin(); | |
1403 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = p->t; | |
1404 LAMBDA_EXPR_DISCRIMINATOR (lambda) = p->i++; | |
1405 } | |
1406 gcc_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == NULL_TREE); | |
1407 } | |
1408 | |
1456 void | 1409 void |
1457 finish_lambda_scope (void) | 1410 finish_lambda_scope (void) |
1458 { | 1411 { |
1459 tree_int *p = &lambda_scope_stack->last (); | 1412 tree_int *p = &lambda_scope_stack->last (); |
1460 if (lambda_scope != p->t) | 1413 if (lambda_scope != p->t) |
1482 build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap)); | 1435 build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap)); |
1483 | 1436 |
1484 return body; | 1437 return body; |
1485 } | 1438 } |
1486 | 1439 |
1440 /* Subroutine of prune_lambda_captures: CAP is a node in | |
1441 LAMBDA_EXPR_CAPTURE_LIST. Return the variable it captures for which we | |
1442 might optimize away the capture, or NULL_TREE if there is no such | |
1443 variable. */ | |
1444 | |
1445 static tree | |
1446 var_to_maybe_prune (tree cap) | |
1447 { | |
1448 if (LAMBDA_CAPTURE_EXPLICIT_P (cap)) | |
1449 /* Don't prune explicit captures. */ | |
1450 return NULL_TREE; | |
1451 | |
1452 tree mem = TREE_PURPOSE (cap); | |
1453 if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem)) | |
1454 /* Packs and init-captures aren't captures of constant vars. */ | |
1455 return NULL_TREE; | |
1456 | |
1457 tree init = TREE_VALUE (cap); | |
1458 if (is_normal_capture_proxy (init)) | |
1459 init = DECL_CAPTURED_VARIABLE (init); | |
1460 if (decl_constant_var_p (init)) | |
1461 return init; | |
1462 | |
1463 return NULL_TREE; | |
1464 } | |
1465 | |
1466 /* walk_tree helper for prune_lambda_captures: Remember which capture proxies | |
1467 for constant variables are actually used in the lambda body. | |
1468 | |
1469 There will always be a DECL_EXPR for the capture proxy; remember it when we | |
1470 see it, but replace it with any other use. */ | |
1471 | |
1472 static tree | |
1473 mark_const_cap_r (tree *t, int *walk_subtrees, void *data) | |
1474 { | |
1475 hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data; | |
1476 | |
1477 tree var = NULL_TREE; | |
1478 if (TREE_CODE (*t) == DECL_EXPR) | |
1479 { | |
1480 tree decl = DECL_EXPR_DECL (*t); | |
1481 if (is_constant_capture_proxy (decl)) | |
1482 var = DECL_CAPTURED_VARIABLE (decl); | |
1483 *walk_subtrees = 0; | |
1484 } | |
1485 else if (is_constant_capture_proxy (*t)) | |
1486 var = DECL_CAPTURED_VARIABLE (*t); | |
1487 | |
1488 if (var) | |
1489 { | |
1490 tree *&slot = const_vars.get_or_insert (var); | |
1491 if (!slot || VAR_P (*t)) | |
1492 slot = t; | |
1493 } | |
1494 | |
1495 return NULL_TREE; | |
1496 } | |
1497 | |
1498 /* We're at the end of processing a lambda; go back and remove any captures of | |
1499 constant variables for which we've folded away all uses. */ | |
1500 | |
1501 static void | |
1502 prune_lambda_captures (tree body) | |
1503 { | |
1504 tree lam = current_lambda_expr (); | |
1505 if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam)) | |
1506 /* No uses were optimized away. */ | |
1507 return; | |
1508 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE) | |
1509 /* No default captures, and we don't prune explicit captures. */ | |
1510 return; | |
1511 | |
1512 hash_map<tree,tree*> const_vars; | |
1513 | |
1514 cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars); | |
1515 | |
1516 tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam)); | |
1517 for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; ) | |
1518 { | |
1519 tree cap = *capp; | |
1520 if (tree var = var_to_maybe_prune (cap)) | |
1521 { | |
1522 tree **use = const_vars.get (var); | |
1523 if (use && TREE_CODE (**use) == DECL_EXPR) | |
1524 { | |
1525 /* All uses of this capture were folded away, leaving only the | |
1526 proxy declaration. */ | |
1527 | |
1528 /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST. */ | |
1529 *capp = TREE_CHAIN (cap); | |
1530 | |
1531 /* And out of TYPE_FIELDS. */ | |
1532 tree field = TREE_PURPOSE (cap); | |
1533 while (*fieldp != field) | |
1534 fieldp = &DECL_CHAIN (*fieldp); | |
1535 *fieldp = DECL_CHAIN (*fieldp); | |
1536 | |
1537 /* And remove the capture proxy declaration. */ | |
1538 **use = void_node; | |
1539 continue; | |
1540 } | |
1541 } | |
1542 | |
1543 capp = &TREE_CHAIN (cap); | |
1544 } | |
1545 } | |
1546 | |
1487 void | 1547 void |
1488 finish_lambda_function (tree body) | 1548 finish_lambda_function (tree body) |
1489 { | 1549 { |
1490 finish_function_body (body); | 1550 finish_function_body (body); |
1551 | |
1552 prune_lambda_captures (body); | |
1491 | 1553 |
1492 /* Finish the function and generate code for it if necessary. */ | 1554 /* Finish the function and generate code for it if necessary. */ |
1493 tree fn = finish_function (/*inline_p=*/true); | 1555 tree fn = finish_function (/*inline_p=*/true); |
1494 | 1556 |
1495 /* Only expand if the call op is not a template. */ | 1557 /* Only expand if the call op is not a template. */ |