111
|
1 /* Perform the semantic phase of lambda parsing, i.e., the process of
|
|
2 building tree structure, checking semantic consistency, and
|
|
3 building RTL. These routines are used both during actual parsing
|
|
4 and during the instantiation of template functions.
|
|
5
|
131
|
6 Copyright (C) 1998-2018 Free Software Foundation, Inc.
|
111
|
7
|
|
8 This file is part of GCC.
|
|
9
|
|
10 GCC is free software; you can redistribute it and/or modify it
|
|
11 under the terms of the GNU General Public License as published by
|
|
12 the Free Software Foundation; either version 3, or (at your option)
|
|
13 any later version.
|
|
14
|
|
15 GCC is distributed in the hope that it will be useful, but
|
|
16 WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
18 General Public License for more details.
|
|
19
|
|
20 You should have received a copy of the GNU General Public License
|
|
21 along with GCC; see the file COPYING3. If not see
|
|
22 <http://www.gnu.org/licenses/>. */
|
|
23
|
|
24 #include "config.h"
|
|
25 #include "system.h"
|
|
26 #include "coretypes.h"
|
|
27 #include "cp-tree.h"
|
|
28 #include "stringpool.h"
|
|
29 #include "cgraph.h"
|
|
30 #include "tree-iterator.h"
|
|
31 #include "toplev.h"
|
|
32 #include "gimplify.h"
|
|
33
|
|
34 /* Constructor for a lambda expression. */
|
|
35
|
|
36 tree
|
|
37 build_lambda_expr (void)
|
|
38 {
|
|
39 tree lambda = make_node (LAMBDA_EXPR);
|
|
40 LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
|
|
41 LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
|
|
42 LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
|
|
43 LAMBDA_EXPR_PENDING_PROXIES (lambda) = NULL;
|
|
44 LAMBDA_EXPR_MUTABLE_P (lambda) = false;
|
|
45 return lambda;
|
|
46 }
|
|
47
|
|
48 /* Create the closure object for a LAMBDA_EXPR. */
|
|
49
|
|
50 tree
|
|
51 build_lambda_object (tree lambda_expr)
|
|
52 {
|
|
53 /* Build aggregate constructor call.
|
|
54 - cp_parser_braced_list
|
|
55 - cp_parser_functional_cast */
|
|
56 vec<constructor_elt, va_gc> *elts = NULL;
|
|
57 tree node, expr, type;
|
|
58 location_t saved_loc;
|
|
59
|
|
60 if (processing_template_decl || lambda_expr == error_mark_node)
|
|
61 return lambda_expr;
|
|
62
|
|
63 /* Make sure any error messages refer to the lambda-introducer. */
|
|
64 saved_loc = input_location;
|
|
65 input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
|
|
66
|
|
67 for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
|
|
68 node;
|
|
69 node = TREE_CHAIN (node))
|
|
70 {
|
|
71 tree field = TREE_PURPOSE (node);
|
|
72 tree val = TREE_VALUE (node);
|
|
73
|
|
74 if (field == error_mark_node)
|
|
75 {
|
|
76 expr = error_mark_node;
|
|
77 goto out;
|
|
78 }
|
|
79
|
|
80 if (TREE_CODE (val) == TREE_LIST)
|
|
81 val = build_x_compound_expr_from_list (val, ELK_INIT,
|
|
82 tf_warning_or_error);
|
|
83
|
|
84 if (DECL_P (val))
|
|
85 mark_used (val);
|
|
86
|
|
87 /* Mere mortals can't copy arrays with aggregate initialization, so
|
|
88 do some magic to make it work here. */
|
|
89 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
|
|
90 val = build_array_copy (val);
|
|
91 else if (DECL_NORMAL_CAPTURE_P (field)
|
|
92 && !DECL_VLA_CAPTURE_P (field)
|
131
|
93 && !TYPE_REF_P (TREE_TYPE (field)))
|
111
|
94 {
|
|
95 /* "the entities that are captured by copy are used to
|
|
96 direct-initialize each corresponding non-static data
|
|
97 member of the resulting closure object."
|
|
98
|
|
99 There's normally no way to express direct-initialization
|
|
100 from an element of a CONSTRUCTOR, so we build up a special
|
|
101 TARGET_EXPR to bypass the usual copy-initialization. */
|
|
102 val = force_rvalue (val, tf_warning_or_error);
|
|
103 if (TREE_CODE (val) == TARGET_EXPR)
|
|
104 TARGET_EXPR_DIRECT_INIT_P (val) = true;
|
|
105 }
|
|
106
|
|
107 CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
|
|
108 }
|
|
109
|
|
110 expr = build_constructor (init_list_type_node, elts);
|
|
111 CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
|
|
112
|
|
113 /* N2927: "[The closure] class type is not an aggregate."
|
|
114 But we briefly treat it as an aggregate to make this simpler. */
|
|
115 type = LAMBDA_EXPR_CLOSURE (lambda_expr);
|
|
116 CLASSTYPE_NON_AGGREGATE (type) = 0;
|
|
117 expr = finish_compound_literal (type, expr, tf_warning_or_error);
|
|
118 CLASSTYPE_NON_AGGREGATE (type) = 1;
|
|
119
|
|
120 out:
|
|
121 input_location = saved_loc;
|
|
122 return expr;
|
|
123 }
|
|
124
|
|
125 /* Return an initialized RECORD_TYPE for LAMBDA.
|
|
126 LAMBDA must have its explicit captures already. */
|
|
127
|
|
128 tree
|
|
129 begin_lambda_type (tree lambda)
|
|
130 {
|
|
131 tree type;
|
|
132
|
|
133 {
|
|
134 /* Unique name. This is just like an unnamed class, but we cannot use
|
|
135 make_anon_name because of certain checks against TYPE_UNNAMED_P. */
|
|
136 tree name;
|
|
137 name = make_lambda_name ();
|
|
138
|
|
139 /* Create the new RECORD_TYPE for this lambda. */
|
|
140 type = xref_tag (/*tag_code=*/record_type,
|
|
141 name,
|
|
142 /*scope=*/ts_lambda,
|
|
143 /*template_header_p=*/false);
|
|
144 if (type == error_mark_node)
|
|
145 return error_mark_node;
|
|
146 }
|
|
147
|
|
148 /* Designate it as a struct so that we can use aggregate initialization. */
|
|
149 CLASSTYPE_DECLARED_CLASS (type) = false;
|
|
150
|
|
151 /* Cross-reference the expression and the type. */
|
|
152 LAMBDA_EXPR_CLOSURE (lambda) = type;
|
|
153 CLASSTYPE_LAMBDA_EXPR (type) = lambda;
|
|
154
|
|
155 /* In C++17, assume the closure is literal; we'll clear the flag later if
|
|
156 necessary. */
|
|
157 if (cxx_dialect >= cxx17)
|
|
158 CLASSTYPE_LITERAL_P (type) = true;
|
|
159
|
|
160 /* Clear base types. */
|
|
161 xref_basetypes (type, /*bases=*/NULL_TREE);
|
|
162
|
|
163 /* Start the class. */
|
|
164 type = begin_class_definition (type);
|
|
165
|
|
166 return type;
|
|
167 }
|
|
168
|
|
169 /* Returns the type to use for the return type of the operator() of a
|
|
170 closure class. */
|
|
171
|
|
172 tree
|
|
173 lambda_return_type (tree expr)
|
|
174 {
|
|
175 if (expr == NULL_TREE)
|
|
176 return void_type_node;
|
|
177 if (type_unknown_p (expr)
|
|
178 || BRACE_ENCLOSED_INITIALIZER_P (expr))
|
|
179 {
|
|
180 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
|
|
181 return error_mark_node;
|
|
182 }
|
|
183 gcc_checking_assert (!type_dependent_expression_p (expr));
|
|
184 return cv_unqualified (type_decays_to (unlowered_expr_type (expr)));
|
|
185 }
|
|
186
|
|
187 /* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
|
|
188 closure type. */
|
|
189
|
|
190 tree
|
|
191 lambda_function (tree lambda)
|
|
192 {
|
|
193 tree type;
|
|
194 if (TREE_CODE (lambda) == LAMBDA_EXPR)
|
|
195 type = LAMBDA_EXPR_CLOSURE (lambda);
|
|
196 else
|
|
197 type = lambda;
|
|
198 gcc_assert (LAMBDA_TYPE_P (type));
|
|
199 /* Don't let debug_tree cause instantiation. */
|
|
200 if (CLASSTYPE_TEMPLATE_INSTANTIATION (type)
|
|
201 && !COMPLETE_OR_OPEN_TYPE_P (type))
|
|
202 return NULL_TREE;
|
131
|
203 lambda = lookup_member (type, call_op_identifier,
|
111
|
204 /*protect=*/0, /*want_type=*/false,
|
|
205 tf_warning_or_error);
|
|
206 if (lambda)
|
|
207 lambda = STRIP_TEMPLATE (get_first_fn (lambda));
|
|
208 return lambda;
|
|
209 }
|
|
210
|
|
211 /* Returns the type to use for the FIELD_DECL corresponding to the
|
|
212 capture of EXPR. EXPLICIT_INIT_P indicates whether this is a
|
|
213 C++14 init capture, and BY_REFERENCE_P indicates whether we're
|
|
214 capturing by reference. */
|
|
215
|
|
216 tree
|
|
217 lambda_capture_field_type (tree expr, bool explicit_init_p,
|
|
218 bool by_reference_p)
|
|
219 {
|
|
220 tree type;
|
|
221 bool is_this = is_this_parameter (tree_strip_nop_conversions (expr));
|
|
222
|
|
223 if (!is_this && type_dependent_expression_p (expr))
|
|
224 {
|
|
225 type = cxx_make_type (DECLTYPE_TYPE);
|
|
226 DECLTYPE_TYPE_EXPR (type) = expr;
|
|
227 DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
|
|
228 DECLTYPE_FOR_INIT_CAPTURE (type) = explicit_init_p;
|
|
229 DECLTYPE_FOR_REF_CAPTURE (type) = by_reference_p;
|
|
230 SET_TYPE_STRUCTURAL_EQUALITY (type);
|
|
231 }
|
|
232 else if (!is_this && explicit_init_p)
|
|
233 {
|
|
234 tree auto_node = make_auto ();
|
|
235
|
|
236 type = auto_node;
|
|
237 if (by_reference_p)
|
|
238 /* Add the reference now, so deduction doesn't lose
|
|
239 outermost CV qualifiers of EXPR. */
|
|
240 type = build_reference_type (type);
|
|
241 type = do_auto_deduction (type, expr, auto_node);
|
|
242 }
|
|
243 else
|
|
244 {
|
|
245 type = non_reference (unlowered_expr_type (expr));
|
|
246
|
131
|
247 if (!is_this
|
|
248 && (by_reference_p || TREE_CODE (type) == FUNCTION_TYPE))
|
111
|
249 type = build_reference_type (type);
|
|
250 }
|
|
251
|
|
252 return type;
|
|
253 }
|
|
254
|
|
255 /* Returns true iff DECL is a lambda capture proxy variable created by
|
|
256 build_capture_proxy. */
|
|
257
|
|
258 bool
|
|
259 is_capture_proxy (tree decl)
|
|
260 {
|
|
261 return (VAR_P (decl)
|
|
262 && DECL_HAS_VALUE_EXPR_P (decl)
|
|
263 && !DECL_ANON_UNION_VAR_P (decl)
|
131
|
264 && !DECL_DECOMPOSITION_P (decl)
|
111
|
265 && LAMBDA_FUNCTION_P (DECL_CONTEXT (decl)));
|
|
266 }
|
|
267
|
|
268 /* Returns true iff DECL is a capture proxy for a normal capture
|
|
269 (i.e. without explicit initializer). */
|
|
270
|
|
271 bool
|
|
272 is_normal_capture_proxy (tree decl)
|
|
273 {
|
|
274 if (!is_capture_proxy (decl))
|
|
275 /* It's not a capture proxy. */
|
|
276 return false;
|
|
277
|
|
278 if (variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
|
|
279 /* VLA capture. */
|
|
280 return true;
|
|
281
|
|
282 /* It is a capture proxy, is it a normal capture? */
|
|
283 tree val = DECL_VALUE_EXPR (decl);
|
|
284 if (val == error_mark_node)
|
|
285 return true;
|
|
286
|
|
287 if (TREE_CODE (val) == ADDR_EXPR)
|
|
288 val = TREE_OPERAND (val, 0);
|
|
289 gcc_assert (TREE_CODE (val) == COMPONENT_REF);
|
|
290 val = TREE_OPERAND (val, 1);
|
|
291 return DECL_NORMAL_CAPTURE_P (val);
|
|
292 }
|
|
293
|
131
|
294 /* Returns true iff DECL is a capture proxy for a normal capture
|
|
295 of a constant variable. */
|
|
296
|
|
297 bool
|
|
298 is_constant_capture_proxy (tree decl)
|
|
299 {
|
|
300 if (is_normal_capture_proxy (decl))
|
|
301 return decl_constant_var_p (DECL_CAPTURED_VARIABLE (decl));
|
|
302 return false;
|
|
303 }
|
|
304
|
111
|
305 /* VAR is a capture proxy created by build_capture_proxy; add it to the
|
|
306 current function, which is the operator() for the appropriate lambda. */
|
|
307
|
|
308 void
|
|
309 insert_capture_proxy (tree var)
|
|
310 {
|
|
311 if (is_normal_capture_proxy (var))
|
|
312 {
|
|
313 tree cap = DECL_CAPTURED_VARIABLE (var);
|
|
314 if (CHECKING_P)
|
|
315 {
|
|
316 gcc_assert (!is_normal_capture_proxy (cap));
|
|
317 tree old = retrieve_local_specialization (cap);
|
|
318 if (old)
|
|
319 gcc_assert (DECL_CONTEXT (old) != DECL_CONTEXT (var));
|
|
320 }
|
|
321 register_local_specialization (var, cap);
|
|
322 }
|
|
323
|
|
324 /* Put the capture proxy in the extra body block so that it won't clash
|
|
325 with a later local variable. */
|
|
326 pushdecl_outermost_localscope (var);
|
|
327
|
|
328 /* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
|
|
329 var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
|
|
330 tree stmt_list = (*stmt_list_stack)[1];
|
|
331 gcc_assert (stmt_list);
|
|
332 append_to_statement_list_force (var, &stmt_list);
|
|
333 }
|
|
334
|
|
335 /* We've just finished processing a lambda; if the containing scope is also
|
|
336 a lambda, insert any capture proxies that were created while processing
|
|
337 the nested lambda. */
|
|
338
|
|
339 void
|
|
340 insert_pending_capture_proxies (void)
|
|
341 {
|
|
342 tree lam;
|
|
343 vec<tree, va_gc> *proxies;
|
|
344 unsigned i;
|
|
345
|
|
346 if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
|
|
347 return;
|
|
348
|
|
349 lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
|
|
350 proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
|
|
351 for (i = 0; i < vec_safe_length (proxies); ++i)
|
|
352 {
|
|
353 tree var = (*proxies)[i];
|
|
354 insert_capture_proxy (var);
|
|
355 }
|
|
356 release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
|
|
357 LAMBDA_EXPR_PENDING_PROXIES (lam) = NULL;
|
|
358 }
|
|
359
|
|
360 /* Given REF, a COMPONENT_REF designating a field in the lambda closure,
|
|
361 return the type we want the proxy to have: the type of the field itself,
|
|
362 with added const-qualification if the lambda isn't mutable and the
|
|
363 capture is by value. */
|
|
364
|
|
365 tree
|
|
366 lambda_proxy_type (tree ref)
|
|
367 {
|
|
368 tree type;
|
|
369 if (ref == error_mark_node)
|
|
370 return error_mark_node;
|
|
371 if (REFERENCE_REF_P (ref))
|
|
372 ref = TREE_OPERAND (ref, 0);
|
|
373 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
|
|
374 type = TREE_TYPE (ref);
|
|
375 if (!type || WILDCARD_TYPE_P (non_reference (type)))
|
|
376 {
|
|
377 type = cxx_make_type (DECLTYPE_TYPE);
|
|
378 DECLTYPE_TYPE_EXPR (type) = ref;
|
|
379 DECLTYPE_FOR_LAMBDA_PROXY (type) = true;
|
|
380 SET_TYPE_STRUCTURAL_EQUALITY (type);
|
|
381 }
|
|
382 if (DECL_PACK_P (TREE_OPERAND (ref, 1)))
|
|
383 type = make_pack_expansion (type);
|
|
384 return type;
|
|
385 }
|
|
386
|
|
387 /* MEMBER is a capture field in a lambda closure class. Now that we're
|
|
388 inside the operator(), build a placeholder var for future lookups and
|
|
389 debugging. */
|
|
390
|
131
|
391 static tree
|
111
|
392 build_capture_proxy (tree member, tree init)
|
|
393 {
|
|
394 tree var, object, fn, closure, name, lam, type;
|
|
395
|
|
396 if (PACK_EXPANSION_P (member))
|
|
397 member = PACK_EXPANSION_PATTERN (member);
|
|
398
|
|
399 closure = DECL_CONTEXT (member);
|
|
400 fn = lambda_function (closure);
|
|
401 lam = CLASSTYPE_LAMBDA_EXPR (closure);
|
|
402
|
|
403 /* The proxy variable forwards to the capture field. */
|
|
404 object = build_fold_indirect_ref (DECL_ARGUMENTS (fn));
|
|
405 object = finish_non_static_data_member (member, object, NULL_TREE);
|
|
406 if (REFERENCE_REF_P (object))
|
|
407 object = TREE_OPERAND (object, 0);
|
|
408
|
|
409 /* Remove the __ inserted by add_capture. */
|
|
410 name = get_identifier (IDENTIFIER_POINTER (DECL_NAME (member)) + 2);
|
|
411
|
|
412 type = lambda_proxy_type (object);
|
|
413
|
131
|
414 if (name == this_identifier && !INDIRECT_TYPE_P (type))
|
111
|
415 {
|
|
416 type = build_pointer_type (type);
|
|
417 type = cp_build_qualified_type (type, TYPE_QUAL_CONST);
|
|
418 object = build_fold_addr_expr_with_type (object, type);
|
|
419 }
|
|
420
|
|
421 if (DECL_VLA_CAPTURE_P (member))
|
|
422 {
|
|
423 /* Rebuild the VLA type from the pointer and maxindex. */
|
|
424 tree field = next_initializable_field (TYPE_FIELDS (type));
|
|
425 tree ptr = build_simple_component_ref (object, field);
|
|
426 field = next_initializable_field (DECL_CHAIN (field));
|
|
427 tree max = build_simple_component_ref (object, field);
|
|
428 type = build_cplus_array_type (TREE_TYPE (TREE_TYPE (ptr)),
|
|
429 build_index_type (max));
|
|
430 type = build_reference_type (type);
|
|
431 REFERENCE_VLA_OK (type) = true;
|
|
432 object = convert (type, ptr);
|
|
433 }
|
|
434
|
131
|
435 complete_type (type);
|
|
436
|
111
|
437 var = build_decl (input_location, VAR_DECL, name, type);
|
|
438 SET_DECL_VALUE_EXPR (var, object);
|
|
439 DECL_HAS_VALUE_EXPR_P (var) = 1;
|
|
440 DECL_ARTIFICIAL (var) = 1;
|
|
441 TREE_USED (var) = 1;
|
|
442 DECL_CONTEXT (var) = fn;
|
|
443
|
|
444 if (DECL_NORMAL_CAPTURE_P (member))
|
|
445 {
|
|
446 if (DECL_VLA_CAPTURE_P (member))
|
|
447 {
|
|
448 init = CONSTRUCTOR_ELT (init, 0)->value;
|
|
449 init = TREE_OPERAND (init, 0); // Strip ADDR_EXPR.
|
|
450 init = TREE_OPERAND (init, 0); // Strip ARRAY_REF.
|
|
451 }
|
|
452 else
|
|
453 {
|
|
454 if (PACK_EXPANSION_P (init))
|
|
455 init = PACK_EXPANSION_PATTERN (init);
|
|
456 }
|
131
|
457
|
|
458 if (INDIRECT_REF_P (init))
|
|
459 init = TREE_OPERAND (init, 0);
|
|
460 STRIP_NOPS (init);
|
|
461
|
111
|
462 gcc_assert (VAR_P (init) || TREE_CODE (init) == PARM_DECL);
|
|
463 while (is_normal_capture_proxy (init))
|
|
464 init = DECL_CAPTURED_VARIABLE (init);
|
|
465 retrofit_lang_decl (var);
|
|
466 DECL_CAPTURED_VARIABLE (var) = init;
|
|
467 }
|
|
468
|
|
469 if (name == this_identifier)
|
|
470 {
|
|
471 gcc_assert (LAMBDA_EXPR_THIS_CAPTURE (lam) == member);
|
|
472 LAMBDA_EXPR_THIS_CAPTURE (lam) = var;
|
|
473 }
|
|
474
|
|
475 if (fn == current_function_decl)
|
|
476 insert_capture_proxy (var);
|
|
477 else
|
|
478 vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
|
|
479
|
|
480 return var;
|
|
481 }
|
|
482
|
|
483 static GTY(()) tree ptr_id;
|
|
484 static GTY(()) tree max_id;
|
|
485
|
|
486 /* Return a struct containing a pointer and a length for lambda capture of
|
|
487 an array of runtime length. */
|
|
488
|
|
489 static tree
|
|
490 vla_capture_type (tree array_type)
|
|
491 {
|
|
492 tree type = xref_tag (record_type, make_anon_name (), ts_current, false);
|
|
493 xref_basetypes (type, NULL_TREE);
|
|
494 type = begin_class_definition (type);
|
|
495 if (!ptr_id)
|
|
496 {
|
|
497 ptr_id = get_identifier ("ptr");
|
|
498 max_id = get_identifier ("max");
|
|
499 }
|
|
500 tree ptrtype = build_pointer_type (TREE_TYPE (array_type));
|
|
501 tree field = build_decl (input_location, FIELD_DECL, ptr_id, ptrtype);
|
|
502 finish_member_declaration (field);
|
|
503 field = build_decl (input_location, FIELD_DECL, max_id, sizetype);
|
|
504 finish_member_declaration (field);
|
|
505 return finish_struct (type, NULL_TREE);
|
|
506 }
|
|
507
|
|
508 /* From an ID and INITIALIZER, create a capture (by reference if
|
|
509 BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
|
|
510 and return it. If ID is `this', BY_REFERENCE_P says whether
|
|
511 `*this' is captured by reference. */
|
|
512
|
|
513 tree
|
|
514 add_capture (tree lambda, tree id, tree orig_init, bool by_reference_p,
|
|
515 bool explicit_init_p)
|
|
516 {
|
|
517 char *buf;
|
|
518 tree type, member, name;
|
|
519 bool vla = false;
|
|
520 bool variadic = false;
|
|
521 tree initializer = orig_init;
|
|
522
|
|
523 if (PACK_EXPANSION_P (initializer))
|
|
524 {
|
|
525 initializer = PACK_EXPANSION_PATTERN (initializer);
|
|
526 variadic = true;
|
|
527 }
|
|
528
|
|
529 if (TREE_CODE (initializer) == TREE_LIST
|
|
530 /* A pack expansion might end up with multiple elements. */
|
|
531 && !PACK_EXPANSION_P (TREE_VALUE (initializer)))
|
|
532 initializer = build_x_compound_expr_from_list (initializer, ELK_INIT,
|
|
533 tf_warning_or_error);
|
|
534 type = TREE_TYPE (initializer);
|
|
535 if (type == error_mark_node)
|
|
536 return error_mark_node;
|
|
537
|
|
538 if (array_of_runtime_bound_p (type))
|
|
539 {
|
|
540 vla = true;
|
|
541 if (!by_reference_p)
|
|
542 error ("array of runtime bound cannot be captured by copy, "
|
|
543 "only by reference");
|
|
544
|
|
545 /* For a VLA, we capture the address of the first element and the
|
|
546 maximum index, and then reconstruct the VLA for the proxy. */
|
|
547 tree elt = cp_build_array_ref (input_location, initializer,
|
|
548 integer_zero_node, tf_warning_or_error);
|
|
549 initializer = build_constructor_va (init_list_type_node, 2,
|
|
550 NULL_TREE, build_address (elt),
|
|
551 NULL_TREE, array_type_nelts (type));
|
|
552 type = vla_capture_type (type);
|
|
553 }
|
|
554 else if (!dependent_type_p (type)
|
|
555 && variably_modified_type_p (type, NULL_TREE))
|
|
556 {
|
131
|
557 sorry ("capture of variably-modified type %qT that is not an N3639 array "
|
111
|
558 "of runtime bound", type);
|
|
559 if (TREE_CODE (type) == ARRAY_TYPE
|
|
560 && variably_modified_type_p (TREE_TYPE (type), NULL_TREE))
|
|
561 inform (input_location, "because the array element type %qT has "
|
|
562 "variable size", TREE_TYPE (type));
|
131
|
563 return error_mark_node;
|
111
|
564 }
|
|
565 else
|
|
566 {
|
|
567 type = lambda_capture_field_type (initializer, explicit_init_p,
|
|
568 by_reference_p);
|
|
569 if (type == error_mark_node)
|
|
570 return error_mark_node;
|
|
571
|
|
572 if (id == this_identifier && !by_reference_p)
|
|
573 {
|
131
|
574 gcc_assert (INDIRECT_TYPE_P (type));
|
111
|
575 type = TREE_TYPE (type);
|
131
|
576 initializer = cp_build_fold_indirect_ref (initializer);
|
111
|
577 }
|
|
578
|
|
579 if (dependent_type_p (type))
|
|
580 ;
|
|
581 else if (id != this_identifier && by_reference_p)
|
|
582 {
|
|
583 if (!lvalue_p (initializer))
|
|
584 {
|
|
585 error ("cannot capture %qE by reference", initializer);
|
|
586 return error_mark_node;
|
|
587 }
|
|
588 }
|
|
589 else
|
|
590 {
|
|
591 /* Capture by copy requires a complete type. */
|
|
592 type = complete_type (type);
|
|
593 if (!COMPLETE_TYPE_P (type))
|
|
594 {
|
|
595 error ("capture by copy of incomplete type %qT", type);
|
|
596 cxx_incomplete_type_inform (type);
|
|
597 return error_mark_node;
|
|
598 }
|
|
599 }
|
|
600 }
|
|
601
|
|
602 /* Add __ to the beginning of the field name so that user code
|
|
603 won't find the field with name lookup. We can't just leave the name
|
|
604 unset because template instantiation uses the name to find
|
|
605 instantiated fields. */
|
|
606 buf = (char *) alloca (IDENTIFIER_LENGTH (id) + 3);
|
|
607 buf[1] = buf[0] = '_';
|
|
608 memcpy (buf + 2, IDENTIFIER_POINTER (id),
|
|
609 IDENTIFIER_LENGTH (id) + 1);
|
|
610 name = get_identifier (buf);
|
|
611
|
|
612 /* If TREE_TYPE isn't set, we're still in the introducer, so check
|
|
613 for duplicates. */
|
|
614 if (!LAMBDA_EXPR_CLOSURE (lambda))
|
|
615 {
|
|
616 if (IDENTIFIER_MARKED (name))
|
|
617 {
|
|
618 pedwarn (input_location, 0,
|
|
619 "already captured %qD in lambda expression", id);
|
|
620 return NULL_TREE;
|
|
621 }
|
|
622 IDENTIFIER_MARKED (name) = true;
|
|
623 }
|
|
624
|
|
625 if (variadic)
|
|
626 type = make_pack_expansion (type);
|
|
627
|
|
628 /* Make member variable. */
|
|
629 member = build_decl (input_location, FIELD_DECL, name, type);
|
|
630 DECL_VLA_CAPTURE_P (member) = vla;
|
|
631
|
|
632 if (!explicit_init_p)
|
|
633 /* Normal captures are invisible to name lookup but uses are replaced
|
|
634 with references to the capture field; we implement this by only
|
|
635 really making them invisible in unevaluated context; see
|
|
636 qualify_lookup. For now, let's make explicitly initialized captures
|
|
637 always visible. */
|
|
638 DECL_NORMAL_CAPTURE_P (member) = true;
|
|
639
|
|
640 if (id == this_identifier)
|
|
641 LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
|
|
642
|
|
643 /* Add it to the appropriate closure class if we've started it. */
|
|
644 if (current_class_type
|
|
645 && current_class_type == LAMBDA_EXPR_CLOSURE (lambda))
|
|
646 {
|
|
647 if (COMPLETE_TYPE_P (current_class_type))
|
|
648 internal_error ("trying to capture %qD in instantiation of "
|
|
649 "generic lambda", id);
|
|
650 finish_member_declaration (member);
|
|
651 }
|
|
652
|
|
653 tree listmem = member;
|
|
654 if (variadic)
|
|
655 {
|
|
656 listmem = make_pack_expansion (member);
|
|
657 initializer = orig_init;
|
|
658 }
|
|
659 LAMBDA_EXPR_CAPTURE_LIST (lambda)
|
|
660 = tree_cons (listmem, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
|
|
661
|
|
662 if (LAMBDA_EXPR_CLOSURE (lambda))
|
|
663 return build_capture_proxy (member, initializer);
|
|
664 /* For explicit captures we haven't started the function yet, so we wait
|
|
665 and build the proxy from cp_parser_lambda_body. */
|
131
|
666 LAMBDA_CAPTURE_EXPLICIT_P (LAMBDA_EXPR_CAPTURE_LIST (lambda)) = true;
|
111
|
667 return NULL_TREE;
|
|
668 }
|
|
669
|
|
670 /* Register all the capture members on the list CAPTURES, which is the
|
|
671 LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
|
|
672
|
|
673 void
|
|
674 register_capture_members (tree captures)
|
|
675 {
|
|
676 if (captures == NULL_TREE)
|
|
677 return;
|
|
678
|
|
679 register_capture_members (TREE_CHAIN (captures));
|
|
680
|
|
681 tree field = TREE_PURPOSE (captures);
|
|
682 if (PACK_EXPANSION_P (field))
|
|
683 field = PACK_EXPANSION_PATTERN (field);
|
|
684
|
|
685 /* We set this in add_capture to avoid duplicates. */
|
|
686 IDENTIFIER_MARKED (DECL_NAME (field)) = false;
|
|
687 finish_member_declaration (field);
|
|
688 }
|
|
689
|
|
690 /* Similar to add_capture, except this works on a stack of nested lambdas.
|
|
691 BY_REFERENCE_P in this case is derived from the default capture mode.
|
|
692 Returns the capture for the lambda at the bottom of the stack. */
|
|
693
|
|
694 tree
|
|
695 add_default_capture (tree lambda_stack, tree id, tree initializer)
|
|
696 {
|
|
697 bool this_capture_p = (id == this_identifier);
|
|
698 tree var = NULL_TREE;
|
|
699 tree saved_class_type = current_class_type;
|
|
700
|
131
|
701 for (tree node = lambda_stack;
|
111
|
702 node;
|
|
703 node = TREE_CHAIN (node))
|
|
704 {
|
|
705 tree lambda = TREE_VALUE (node);
|
|
706
|
|
707 current_class_type = LAMBDA_EXPR_CLOSURE (lambda);
|
|
708 if (DECL_PACK_P (initializer))
|
|
709 initializer = make_pack_expansion (initializer);
|
|
710 var = add_capture (lambda,
|
|
711 id,
|
|
712 initializer,
|
|
713 /*by_reference_p=*/
|
|
714 (this_capture_p
|
|
715 || (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
|
|
716 == CPLD_REFERENCE)),
|
|
717 /*explicit_init_p=*/false);
|
|
718 initializer = convert_from_reference (var);
|
131
|
719
|
|
720 /* Warn about deprecated implicit capture of this via [=]. */
|
|
721 if (cxx_dialect >= cxx2a
|
|
722 && this_capture_p
|
|
723 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_COPY
|
|
724 && !in_system_header_at (LAMBDA_EXPR_LOCATION (lambda)))
|
|
725 {
|
|
726 if (warning_at (LAMBDA_EXPR_LOCATION (lambda), OPT_Wdeprecated,
|
|
727 "implicit capture of %qE via %<[=]%> is deprecated "
|
|
728 "in C++20", this_identifier))
|
|
729 inform (LAMBDA_EXPR_LOCATION (lambda), "add explicit %<this%> or "
|
|
730 "%<*this%> capture");
|
|
731 }
|
111
|
732 }
|
|
733
|
|
734 current_class_type = saved_class_type;
|
|
735
|
|
736 return var;
|
|
737 }
|
|
738
|
|
739 /* Return the capture pertaining to a use of 'this' in LAMBDA, in the
|
|
740 form of an INDIRECT_REF, possibly adding it through default
|
|
741 capturing, if ADD_CAPTURE_P is true. */
|
|
742
|
|
743 tree
|
|
744 lambda_expr_this_capture (tree lambda, bool add_capture_p)
|
|
745 {
|
|
746 tree result;
|
|
747
|
|
748 tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
|
|
749
|
|
750 /* In unevaluated context this isn't an odr-use, so don't capture. */
|
|
751 if (cp_unevaluated_operand)
|
|
752 add_capture_p = false;
|
|
753
|
|
754 /* Try to default capture 'this' if we can. */
|
131
|
755 if (!this_capture)
|
111
|
756 {
|
|
757 tree lambda_stack = NULL_TREE;
|
|
758 tree init = NULL_TREE;
|
|
759
|
|
760 /* If we are in a lambda function, we can move out until we hit:
|
|
761 1. a non-lambda function or NSDMI,
|
|
762 2. a lambda function capturing 'this', or
|
|
763 3. a non-default capturing lambda function. */
|
|
764 for (tree tlambda = lambda; ;)
|
|
765 {
|
131
|
766 if (add_capture_p
|
|
767 && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (tlambda) == CPLD_NONE)
|
|
768 /* tlambda won't let us capture 'this'. */
|
|
769 break;
|
|
770
|
|
771 if (add_capture_p)
|
|
772 lambda_stack = tree_cons (NULL_TREE,
|
|
773 tlambda,
|
|
774 lambda_stack);
|
|
775
|
|
776 tree closure = LAMBDA_EXPR_CLOSURE (tlambda);
|
|
777 tree containing_function
|
|
778 = decl_function_context (TYPE_NAME (closure));
|
111
|
779
|
131
|
780 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (tlambda);
|
|
781 if (ex && TREE_CODE (ex) == FIELD_DECL)
|
111
|
782 {
|
131
|
783 /* Lambda in an NSDMI. We don't have a function to look up
|
|
784 'this' in, but we can find (or rebuild) the fake one from
|
|
785 inject_this_parameter. */
|
|
786 if (!containing_function && !COMPLETE_TYPE_P (closure))
|
|
787 /* If we're parsing a lambda in a non-local class,
|
|
788 we can find the fake 'this' in scope_chain. */
|
|
789 init = scope_chain->x_current_class_ptr;
|
|
790 else
|
|
791 /* Otherwise it's either gone or buried in
|
|
792 function_context_stack, so make another. */
|
|
793 init = build_this_parm (NULL_TREE, DECL_CONTEXT (ex),
|
|
794 TYPE_UNQUALIFIED);
|
111
|
795 gcc_checking_assert
|
|
796 (init && (TREE_TYPE (TREE_TYPE (init))
|
|
797 == current_nonlambda_class_type ()));
|
|
798 break;
|
|
799 }
|
|
800
|
|
801 if (containing_function == NULL_TREE)
|
|
802 /* We ran out of scopes; there's no 'this' to capture. */
|
|
803 break;
|
|
804
|
|
805 if (!LAMBDA_FUNCTION_P (containing_function))
|
|
806 {
|
|
807 /* We found a non-lambda function. */
|
|
808 if (DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function))
|
|
809 /* First parameter is 'this'. */
|
|
810 init = DECL_ARGUMENTS (containing_function);
|
|
811 break;
|
|
812 }
|
|
813
|
|
814 tlambda
|
|
815 = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
|
|
816
|
|
817 if (LAMBDA_EXPR_THIS_CAPTURE (tlambda))
|
|
818 {
|
|
819 /* An outer lambda has already captured 'this'. */
|
|
820 init = LAMBDA_EXPR_THIS_CAPTURE (tlambda);
|
|
821 break;
|
|
822 }
|
|
823 }
|
|
824
|
|
825 if (init)
|
|
826 {
|
|
827 if (add_capture_p)
|
|
828 this_capture = add_default_capture (lambda_stack,
|
|
829 /*id=*/this_identifier,
|
|
830 init);
|
|
831 else
|
|
832 this_capture = init;
|
|
833 }
|
|
834 }
|
|
835
|
|
836 if (cp_unevaluated_operand)
|
|
837 result = this_capture;
|
|
838 else if (!this_capture)
|
|
839 {
|
|
840 if (add_capture_p)
|
|
841 {
|
|
842 error ("%<this%> was not captured for this lambda function");
|
|
843 result = error_mark_node;
|
|
844 }
|
|
845 else
|
|
846 result = NULL_TREE;
|
|
847 }
|
|
848 else
|
|
849 {
|
|
850 /* To make sure that current_class_ref is for the lambda. */
|
|
851 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref))
|
|
852 == LAMBDA_EXPR_CLOSURE (lambda));
|
|
853
|
|
854 result = this_capture;
|
|
855
|
|
856 /* If 'this' is captured, each use of 'this' is transformed into an
|
|
857 access to the corresponding unnamed data member of the closure
|
|
858 type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
|
|
859 ensures that the transformed expression is an rvalue. ] */
|
|
860 result = rvalue (result);
|
|
861 }
|
|
862
|
|
863 return result;
|
|
864 }
|
|
865
|
131
|
866 /* Return the innermost LAMBDA_EXPR we're currently in, if any. */
|
|
867
|
|
868 tree
|
|
869 current_lambda_expr (void)
|
|
870 {
|
|
871 tree type = current_class_type;
|
|
872 while (type && !LAMBDA_TYPE_P (type))
|
|
873 type = decl_type_context (TYPE_NAME (type));
|
|
874 if (type)
|
|
875 return CLASSTYPE_LAMBDA_EXPR (type);
|
|
876 else
|
|
877 return NULL_TREE;
|
|
878 }
|
|
879
|
111
|
880 /* Return the current LAMBDA_EXPR, if this is a resolvable dummy
|
|
881 object. NULL otherwise.. */
|
|
882
|
|
883 static tree
|
|
884 resolvable_dummy_lambda (tree object)
|
|
885 {
|
|
886 if (!is_dummy_object (object))
|
|
887 return NULL_TREE;
|
|
888
|
|
889 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (object));
|
|
890 gcc_assert (!TYPE_PTR_P (type));
|
|
891
|
|
892 if (type != current_class_type
|
|
893 && current_class_type
|
|
894 && LAMBDA_TYPE_P (current_class_type)
|
|
895 && lambda_function (current_class_type)
|
131
|
896 && DERIVED_FROM_P (type, nonlambda_method_basetype()))
|
111
|
897 return CLASSTYPE_LAMBDA_EXPR (current_class_type);
|
|
898
|
|
899 return NULL_TREE;
|
|
900 }
|
|
901
|
|
902 /* We don't want to capture 'this' until we know we need it, i.e. after
|
|
903 overload resolution has chosen a non-static member function. At that
|
|
904 point we call this function to turn a dummy object into a use of the
|
|
905 'this' capture. */
|
|
906
|
|
907 tree
|
|
908 maybe_resolve_dummy (tree object, bool add_capture_p)
|
|
909 {
|
|
910 if (tree lam = resolvable_dummy_lambda (object))
|
|
911 if (tree cap = lambda_expr_this_capture (lam, add_capture_p))
|
|
912 if (cap != error_mark_node)
|
131
|
913 object = build_fold_indirect_ref (cap);
|
111
|
914
|
|
915 return object;
|
|
916 }
|
|
917
|
|
918 /* When parsing a generic lambda containing an argument-dependent
|
|
919 member function call we defer overload resolution to instantiation
|
|
920 time. But we have to know now whether to capture this or not.
|
|
921 Do that if FNS contains any non-static fns.
|
|
922 The std doesn't anticipate this case, but I expect this to be the
|
|
923 outcome of discussion. */
|
|
924
|
|
925 void
|
|
926 maybe_generic_this_capture (tree object, tree fns)
|
|
927 {
|
|
928 if (tree lam = resolvable_dummy_lambda (object))
|
|
929 if (!LAMBDA_EXPR_THIS_CAPTURE (lam))
|
|
930 {
|
|
931 /* We've not yet captured, so look at the function set of
|
|
932 interest. */
|
|
933 if (BASELINK_P (fns))
|
|
934 fns = BASELINK_FUNCTIONS (fns);
|
|
935 bool id_expr = TREE_CODE (fns) == TEMPLATE_ID_EXPR;
|
|
936 if (id_expr)
|
|
937 fns = TREE_OPERAND (fns, 0);
|
|
938
|
|
939 for (lkp_iterator iter (fns); iter; ++iter)
|
|
940 if ((!id_expr || TREE_CODE (*iter) == TEMPLATE_DECL)
|
|
941 && DECL_NONSTATIC_MEMBER_FUNCTION_P (*iter))
|
|
942 {
|
|
943 /* Found a non-static member. Capture this. */
|
|
944 lambda_expr_this_capture (lam, true);
|
|
945 break;
|
|
946 }
|
|
947 }
|
|
948 }
|
|
949
|
|
950 /* Returns the innermost non-lambda function. */
|
|
951
|
|
952 tree
|
|
953 current_nonlambda_function (void)
|
|
954 {
|
|
955 tree fn = current_function_decl;
|
|
956 while (fn && LAMBDA_FUNCTION_P (fn))
|
|
957 fn = decl_function_context (fn);
|
|
958 return fn;
|
|
959 }
|
|
960
|
131
|
961 /* Returns the method basetype of the innermost non-lambda function, including
|
|
962 a hypothetical constructor if inside an NSDMI, or NULL_TREE if none. */
|
111
|
963
|
|
964 tree
|
|
965 nonlambda_method_basetype (void)
|
|
966 {
|
|
967 if (!current_class_ref)
|
|
968 return NULL_TREE;
|
|
969
|
131
|
970 tree type = current_class_type;
|
|
971 if (!type || !LAMBDA_TYPE_P (type))
|
111
|
972 return type;
|
|
973
|
131
|
974 while (true)
|
|
975 {
|
|
976 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
|
|
977 tree ex = LAMBDA_EXPR_EXTRA_SCOPE (lam);
|
|
978 if (ex && TREE_CODE (ex) == FIELD_DECL)
|
|
979 /* Lambda in an NSDMI. */
|
|
980 return DECL_CONTEXT (ex);
|
111
|
981
|
131
|
982 tree fn = TYPE_CONTEXT (type);
|
|
983 if (!fn || TREE_CODE (fn) != FUNCTION_DECL
|
|
984 || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn))
|
|
985 /* No enclosing non-lambda method. */
|
|
986 return NULL_TREE;
|
|
987 if (!LAMBDA_FUNCTION_P (fn))
|
|
988 /* Found an enclosing non-lambda method. */
|
|
989 return TYPE_METHOD_BASETYPE (TREE_TYPE (fn));
|
|
990 type = DECL_CONTEXT (fn);
|
|
991 }
|
111
|
992 }
|
|
993
|
|
994 /* Like current_scope, but looking through lambdas. */
|
|
995
|
|
996 tree
|
|
997 current_nonlambda_scope (void)
|
|
998 {
|
|
999 tree scope = current_scope ();
|
|
1000 for (;;)
|
|
1001 {
|
|
1002 if (TREE_CODE (scope) == FUNCTION_DECL
|
|
1003 && LAMBDA_FUNCTION_P (scope))
|
|
1004 {
|
|
1005 scope = CP_TYPE_CONTEXT (DECL_CONTEXT (scope));
|
|
1006 continue;
|
|
1007 }
|
|
1008 else if (LAMBDA_TYPE_P (scope))
|
|
1009 {
|
|
1010 scope = CP_TYPE_CONTEXT (scope);
|
|
1011 continue;
|
|
1012 }
|
|
1013 break;
|
|
1014 }
|
|
1015 return scope;
|
|
1016 }
|
|
1017
|
|
1018 /* Helper function for maybe_add_lambda_conv_op; build a CALL_EXPR with
|
|
1019 indicated FN and NARGS, but do not initialize the return type or any of the
|
|
1020 argument slots. */
|
|
1021
|
|
1022 static tree
|
|
1023 prepare_op_call (tree fn, int nargs)
|
|
1024 {
|
|
1025 tree t;
|
|
1026
|
|
1027 t = build_vl_exp (CALL_EXPR, nargs + 3);
|
|
1028 CALL_EXPR_FN (t) = fn;
|
|
1029 CALL_EXPR_STATIC_CHAIN (t) = NULL;
|
|
1030
|
|
1031 return t;
|
|
1032 }
|
|
1033
|
|
1034 /* Return true iff CALLOP is the op() for a generic lambda. */
|
|
1035
|
|
1036 bool
|
|
1037 generic_lambda_fn_p (tree callop)
|
|
1038 {
|
|
1039 return (LAMBDA_FUNCTION_P (callop)
|
|
1040 && DECL_TEMPLATE_INFO (callop)
|
|
1041 && PRIMARY_TEMPLATE_P (DECL_TI_TEMPLATE (callop)));
|
|
1042 }
|
|
1043
|
|
1044 /* If the closure TYPE has a static op(), also add a conversion to function
|
|
1045 pointer. */
|
|
1046
|
|
1047 void
|
|
1048 maybe_add_lambda_conv_op (tree type)
|
|
1049 {
|
|
1050 bool nested = (cfun != NULL);
|
|
1051 bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
|
|
1052 tree callop = lambda_function (type);
|
|
1053 tree lam = CLASSTYPE_LAMBDA_EXPR (type);
|
|
1054
|
|
1055 if (LAMBDA_EXPR_CAPTURE_LIST (lam) != NULL_TREE
|
|
1056 || LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) != CPLD_NONE)
|
|
1057 return;
|
|
1058
|
|
1059 if (processing_template_decl)
|
|
1060 return;
|
|
1061
|
|
1062 bool const generic_lambda_p = generic_lambda_fn_p (callop);
|
|
1063
|
|
1064 if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
|
|
1065 {
|
|
1066 /* If the op() wasn't instantiated due to errors, give up. */
|
|
1067 gcc_assert (errorcount || sorrycount);
|
|
1068 return;
|
|
1069 }
|
|
1070
|
|
1071 /* Non-template conversion operators are defined directly with build_call_a
|
|
1072 and using DIRECT_ARGVEC for arguments (including 'this'). Templates are
|
|
1073 deferred and the CALL is built in-place. In the case of a deduced return
|
|
1074 call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
|
|
1075 the return type is also built in-place. The arguments of DECLTYPE_CALL in
|
|
1076 the return expression may differ in flags from those in the body CALL. In
|
|
1077 particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
|
|
1078 the body CALL, but not in DECLTYPE_CALL. */
|
|
1079
|
|
1080 vec<tree, va_gc> *direct_argvec = 0;
|
|
1081 tree decltype_call = 0, call = 0;
|
|
1082 tree optype = TREE_TYPE (callop);
|
|
1083 tree fn_result = TREE_TYPE (optype);
|
|
1084
|
|
1085 tree thisarg = build_nop (TREE_TYPE (DECL_ARGUMENTS (callop)),
|
|
1086 null_pointer_node);
|
|
1087 if (generic_lambda_p)
|
|
1088 {
|
|
1089 ++processing_template_decl;
|
|
1090
|
|
1091 /* Prepare the dependent member call for the static member function
|
|
1092 '_FUN' and, potentially, prepare another call to be used in a decltype
|
|
1093 return expression for a deduced return call op to allow for simple
|
|
1094 implementation of the conversion operator. */
|
|
1095
|
131
|
1096 tree instance = cp_build_fold_indirect_ref (thisarg);
|
111
|
1097 tree objfn = build_min (COMPONENT_REF, NULL_TREE,
|
|
1098 instance, DECL_NAME (callop), NULL_TREE);
|
|
1099 int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;
|
|
1100
|
|
1101 call = prepare_op_call (objfn, nargs);
|
|
1102 if (type_uses_auto (fn_result))
|
|
1103 decltype_call = prepare_op_call (objfn, nargs);
|
|
1104 }
|
|
1105 else
|
|
1106 {
|
|
1107 direct_argvec = make_tree_vector ();
|
|
1108 direct_argvec->quick_push (thisarg);
|
|
1109 }
|
|
1110
|
|
1111 /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
|
|
1112 declare the static member function "_FUN" below. For each arg append to
|
|
1113 DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
|
|
1114 call args (for the template case). If a parameter pack is found, expand
|
|
1115 it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */
|
|
1116
|
|
1117 tree fn_args = NULL_TREE;
|
|
1118 {
|
|
1119 int ix = 0;
|
|
1120 tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
|
|
1121 tree tgt = NULL;
|
|
1122
|
|
1123 while (src)
|
|
1124 {
|
|
1125 tree new_node = copy_node (src);
|
|
1126
|
|
1127 if (!fn_args)
|
|
1128 fn_args = tgt = new_node;
|
|
1129 else
|
|
1130 {
|
|
1131 TREE_CHAIN (tgt) = new_node;
|
|
1132 tgt = new_node;
|
|
1133 }
|
|
1134
|
|
1135 mark_exp_read (tgt);
|
|
1136
|
|
1137 if (generic_lambda_p)
|
|
1138 {
|
|
1139 /* Avoid capturing variables in this context. */
|
|
1140 ++cp_unevaluated_operand;
|
|
1141 tree a = forward_parm (tgt);
|
|
1142 --cp_unevaluated_operand;
|
|
1143
|
|
1144 CALL_EXPR_ARG (call, ix) = a;
|
|
1145 if (decltype_call)
|
|
1146 CALL_EXPR_ARG (decltype_call, ix) = unshare_expr (a);
|
|
1147
|
|
1148 if (PACK_EXPANSION_P (a))
|
|
1149 /* Set this after unsharing so it's not in decltype_call. */
|
|
1150 PACK_EXPANSION_LOCAL_P (a) = true;
|
|
1151
|
|
1152 ++ix;
|
|
1153 }
|
|
1154 else
|
|
1155 vec_safe_push (direct_argvec, tgt);
|
|
1156
|
|
1157 src = TREE_CHAIN (src);
|
|
1158 }
|
|
1159 }
|
|
1160
|
|
1161 if (generic_lambda_p)
|
|
1162 {
|
|
1163 if (decltype_call)
|
|
1164 {
|
|
1165 fn_result = finish_decltype_type
|
|
1166 (decltype_call, /*id_expression_or_member_access_p=*/false,
|
|
1167 tf_warning_or_error);
|
|
1168 }
|
|
1169 }
|
|
1170 else
|
|
1171 call = build_call_a (callop,
|
|
1172 direct_argvec->length (),
|
|
1173 direct_argvec->address ());
|
|
1174
|
|
1175 CALL_FROM_THUNK_P (call) = 1;
|
|
1176 SET_EXPR_LOCATION (call, UNKNOWN_LOCATION);
|
|
1177
|
|
1178 tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));
|
|
1179 stattype = (cp_build_type_attribute_variant
|
|
1180 (stattype, TYPE_ATTRIBUTES (optype)));
|
|
1181 if (flag_noexcept_type
|
|
1182 && TYPE_NOTHROW_P (TREE_TYPE (callop)))
|
|
1183 stattype = build_exception_variant (stattype, noexcept_true_spec);
|
|
1184
|
|
1185 if (generic_lambda_p)
|
|
1186 --processing_template_decl;
|
|
1187
|
|
1188 /* First build up the conversion op. */
|
|
1189
|
|
1190 tree rettype = build_pointer_type (stattype);
|
|
1191 tree name = make_conv_op_name (rettype);
|
|
1192 tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
|
|
1193 tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
|
|
1194 tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
|
131
|
1195 SET_DECL_LANGUAGE (convfn, lang_cplusplus);
|
111
|
1196 tree fn = convfn;
|
|
1197 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
|
|
1198 SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
|
|
1199 grokclassfn (type, fn, NO_SPECIAL);
|
|
1200 set_linkage_according_to_type (type, fn);
|
|
1201 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
|
|
1202 DECL_IN_AGGR_P (fn) = 1;
|
|
1203 DECL_ARTIFICIAL (fn) = 1;
|
|
1204 DECL_NOT_REALLY_EXTERN (fn) = 1;
|
|
1205 DECL_DECLARED_INLINE_P (fn) = 1;
|
|
1206 DECL_ARGUMENTS (fn) = build_this_parm (fn, fntype, TYPE_QUAL_CONST);
|
|
1207
|
|
1208 if (nested_def)
|
|
1209 DECL_INTERFACE_KNOWN (fn) = 1;
|
|
1210
|
|
1211 if (generic_lambda_p)
|
|
1212 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
|
|
1213
|
|
1214 add_method (type, fn, false);
|
|
1215
|
|
1216 /* Generic thunk code fails for varargs; we'll complain in mark_used if
|
|
1217 the conversion op is used. */
|
|
1218 if (varargs_function_p (callop))
|
|
1219 {
|
|
1220 DECL_DELETED_FN (fn) = 1;
|
|
1221 return;
|
|
1222 }
|
|
1223
|
|
1224 /* Now build up the thunk to be returned. */
|
|
1225
|
131
|
1226 tree statfn = build_lang_decl (FUNCTION_DECL, fun_identifier, stattype);
|
|
1227 SET_DECL_LANGUAGE (statfn, lang_cplusplus);
|
111
|
1228 fn = statfn;
|
|
1229 DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
|
|
1230 grokclassfn (type, fn, NO_SPECIAL);
|
|
1231 set_linkage_according_to_type (type, fn);
|
|
1232 rest_of_decl_compilation (fn, namespace_bindings_p (), at_eof);
|
|
1233 DECL_IN_AGGR_P (fn) = 1;
|
|
1234 DECL_ARTIFICIAL (fn) = 1;
|
|
1235 DECL_NOT_REALLY_EXTERN (fn) = 1;
|
|
1236 DECL_DECLARED_INLINE_P (fn) = 1;
|
|
1237 DECL_STATIC_FUNCTION_P (fn) = 1;
|
|
1238 DECL_ARGUMENTS (fn) = fn_args;
|
|
1239 for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
|
|
1240 {
|
|
1241 /* Avoid duplicate -Wshadow warnings. */
|
|
1242 DECL_NAME (arg) = NULL_TREE;
|
|
1243 DECL_CONTEXT (arg) = fn;
|
|
1244 }
|
|
1245 if (nested_def)
|
|
1246 DECL_INTERFACE_KNOWN (fn) = 1;
|
|
1247
|
|
1248 if (generic_lambda_p)
|
|
1249 fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));
|
|
1250
|
|
1251 if (flag_sanitize & SANITIZE_NULL)
|
131
|
1252 /* Don't UBsan this function; we're deliberately calling op() with a null
|
|
1253 object argument. */
|
|
1254 add_no_sanitize_value (fn, SANITIZE_UNDEFINED);
|
111
|
1255
|
|
1256 add_method (type, fn, false);
|
|
1257
|
|
1258 if (nested)
|
|
1259 push_function_context ();
|
|
1260 else
|
|
1261 /* Still increment function_depth so that we don't GC in the
|
|
1262 middle of an expression. */
|
|
1263 ++function_depth;
|
|
1264
|
|
1265 /* Generate the body of the thunk. */
|
|
1266
|
|
1267 start_preparsed_function (statfn, NULL_TREE,
|
|
1268 SF_PRE_PARSED | SF_INCLASS_INLINE);
|
|
1269 if (DECL_ONE_ONLY (statfn))
|
|
1270 {
|
|
1271 /* Put the thunk in the same comdat group as the call op. */
|
|
1272 cgraph_node::get_create (statfn)->add_to_same_comdat_group
|
|
1273 (cgraph_node::get_create (callop));
|
|
1274 }
|
|
1275 tree body = begin_function_body ();
|
|
1276 tree compound_stmt = begin_compound_stmt (0);
|
|
1277 if (!generic_lambda_p)
|
|
1278 {
|
|
1279 set_flags_from_callee (call);
|
|
1280 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
|
|
1281 call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
|
|
1282 }
|
|
1283 call = convert_from_reference (call);
|
|
1284 finish_return_stmt (call);
|
|
1285
|
|
1286 finish_compound_stmt (compound_stmt);
|
|
1287 finish_function_body (body);
|
|
1288
|
|
1289 fn = finish_function (/*inline_p=*/true);
|
|
1290 if (!generic_lambda_p)
|
|
1291 expand_or_defer_fn (fn);
|
|
1292
|
|
1293 /* Generate the body of the conversion op. */
|
|
1294
|
|
1295 start_preparsed_function (convfn, NULL_TREE,
|
|
1296 SF_PRE_PARSED | SF_INCLASS_INLINE);
|
|
1297 body = begin_function_body ();
|
|
1298 compound_stmt = begin_compound_stmt (0);
|
|
1299
|
|
1300 /* decl_needed_p needs to see that it's used. */
|
|
1301 TREE_USED (statfn) = 1;
|
|
1302 finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));
|
|
1303
|
|
1304 finish_compound_stmt (compound_stmt);
|
|
1305 finish_function_body (body);
|
|
1306
|
|
1307 fn = finish_function (/*inline_p=*/true);
|
|
1308 if (!generic_lambda_p)
|
|
1309 expand_or_defer_fn (fn);
|
|
1310
|
|
1311 if (nested)
|
|
1312 pop_function_context ();
|
|
1313 else
|
|
1314 --function_depth;
|
|
1315 }
|
|
1316
|
|
1317 /* True if FN is the static function "_FUN" that gets returned from the lambda
|
|
1318 conversion operator. */
|
|
1319
|
|
1320 bool
|
|
1321 lambda_static_thunk_p (tree fn)
|
|
1322 {
|
|
1323 return (fn && TREE_CODE (fn) == FUNCTION_DECL
|
|
1324 && DECL_ARTIFICIAL (fn)
|
|
1325 && DECL_STATIC_FUNCTION_P (fn)
|
|
1326 && LAMBDA_TYPE_P (CP_DECL_CONTEXT (fn)));
|
|
1327 }
|
|
1328
|
|
1329 /* Returns true iff VAL is a lambda-related declaration which should
|
|
1330 be ignored by unqualified lookup. */
|
|
1331
|
|
1332 bool
|
|
1333 is_lambda_ignored_entity (tree val)
|
|
1334 {
|
|
1335 /* Look past normal capture proxies. */
|
|
1336 if (is_normal_capture_proxy (val))
|
|
1337 return true;
|
|
1338
|
|
1339 /* Always ignore lambda fields, their names are only for debugging. */
|
|
1340 if (TREE_CODE (val) == FIELD_DECL
|
|
1341 && CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (val)))
|
|
1342 return true;
|
|
1343
|
|
1344 /* None of the lookups that use qualify_lookup want the op() from the
|
|
1345 lambda; they want the one from the enclosing class. */
|
|
1346 if (TREE_CODE (val) == FUNCTION_DECL && LAMBDA_FUNCTION_P (val))
|
|
1347 return true;
|
|
1348
|
|
1349 return false;
|
|
1350 }
|
|
1351
|
|
1352 /* Lambdas that appear in variable initializer or default argument scope
|
|
1353 get that in their mangling, so we need to record it. We might as well
|
|
1354 use the count for function and namespace scopes as well. */
|
|
1355 static GTY(()) tree lambda_scope;
|
|
1356 static GTY(()) int lambda_count;
|
|
1357 struct GTY(()) tree_int
|
|
1358 {
|
|
1359 tree t;
|
|
1360 int i;
|
|
1361 };
|
|
1362 static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack;
|
|
1363
|
|
1364 void
|
|
1365 start_lambda_scope (tree decl)
|
|
1366 {
|
|
1367 tree_int ti;
|
|
1368 gcc_assert (decl);
|
|
1369 /* Once we're inside a function, we ignore variable scope and just push
|
|
1370 the function again so that popping works properly. */
|
|
1371 if (current_function_decl && TREE_CODE (decl) == VAR_DECL)
|
|
1372 decl = current_function_decl;
|
|
1373 ti.t = lambda_scope;
|
|
1374 ti.i = lambda_count;
|
|
1375 vec_safe_push (lambda_scope_stack, ti);
|
|
1376 if (lambda_scope != decl)
|
|
1377 {
|
|
1378 /* Don't reset the count if we're still in the same function. */
|
|
1379 lambda_scope = decl;
|
|
1380 lambda_count = 0;
|
|
1381 }
|
|
1382 }
|
|
1383
|
|
1384 void
|
|
1385 record_lambda_scope (tree lambda)
|
|
1386 {
|
|
1387 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = lambda_scope;
|
|
1388 LAMBDA_EXPR_DISCRIMINATOR (lambda) = lambda_count++;
|
|
1389 }
|
|
1390
|
131
|
1391 /* This lambda is an instantiation of a lambda in a template default argument
|
|
1392 that got no LAMBDA_EXPR_EXTRA_SCOPE, so this shouldn't either. But we do
|
|
1393 need to use and increment the global count to avoid collisions. */
|
|
1394
|
|
1395 void
|
|
1396 record_null_lambda_scope (tree lambda)
|
|
1397 {
|
|
1398 if (vec_safe_is_empty (lambda_scope_stack))
|
|
1399 record_lambda_scope (lambda);
|
|
1400 else
|
|
1401 {
|
|
1402 tree_int *p = lambda_scope_stack->begin();
|
|
1403 LAMBDA_EXPR_EXTRA_SCOPE (lambda) = p->t;
|
|
1404 LAMBDA_EXPR_DISCRIMINATOR (lambda) = p->i++;
|
|
1405 }
|
|
1406 gcc_assert (LAMBDA_EXPR_EXTRA_SCOPE (lambda) == NULL_TREE);
|
|
1407 }
|
|
1408
|
111
|
1409 void
|
|
1410 finish_lambda_scope (void)
|
|
1411 {
|
|
1412 tree_int *p = &lambda_scope_stack->last ();
|
|
1413 if (lambda_scope != p->t)
|
|
1414 {
|
|
1415 lambda_scope = p->t;
|
|
1416 lambda_count = p->i;
|
|
1417 }
|
|
1418 lambda_scope_stack->pop ();
|
|
1419 }
|
|
1420
|
|
1421 tree
|
|
1422 start_lambda_function (tree fco, tree lambda_expr)
|
|
1423 {
|
|
1424 /* Let the front end know that we are going to be defining this
|
|
1425 function. */
|
|
1426 start_preparsed_function (fco,
|
|
1427 NULL_TREE,
|
|
1428 SF_PRE_PARSED | SF_INCLASS_INLINE);
|
|
1429
|
|
1430 tree body = begin_function_body ();
|
|
1431
|
|
1432 /* Push the proxies for any explicit captures. */
|
|
1433 for (tree cap = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); cap;
|
|
1434 cap = TREE_CHAIN (cap))
|
|
1435 build_capture_proxy (TREE_PURPOSE (cap), TREE_VALUE (cap));
|
|
1436
|
|
1437 return body;
|
|
1438 }
|
|
1439
|
131
|
1440 /* Subroutine of prune_lambda_captures: CAP is a node in
|
|
1441 LAMBDA_EXPR_CAPTURE_LIST. Return the variable it captures for which we
|
|
1442 might optimize away the capture, or NULL_TREE if there is no such
|
|
1443 variable. */
|
|
1444
|
|
1445 static tree
|
|
1446 var_to_maybe_prune (tree cap)
|
|
1447 {
|
|
1448 if (LAMBDA_CAPTURE_EXPLICIT_P (cap))
|
|
1449 /* Don't prune explicit captures. */
|
|
1450 return NULL_TREE;
|
|
1451
|
|
1452 tree mem = TREE_PURPOSE (cap);
|
|
1453 if (!DECL_P (mem) || !DECL_NORMAL_CAPTURE_P (mem))
|
|
1454 /* Packs and init-captures aren't captures of constant vars. */
|
|
1455 return NULL_TREE;
|
|
1456
|
|
1457 tree init = TREE_VALUE (cap);
|
|
1458 if (is_normal_capture_proxy (init))
|
|
1459 init = DECL_CAPTURED_VARIABLE (init);
|
|
1460 if (decl_constant_var_p (init))
|
|
1461 return init;
|
|
1462
|
|
1463 return NULL_TREE;
|
|
1464 }
|
|
1465
|
|
1466 /* walk_tree helper for prune_lambda_captures: Remember which capture proxies
|
|
1467 for constant variables are actually used in the lambda body.
|
|
1468
|
|
1469 There will always be a DECL_EXPR for the capture proxy; remember it when we
|
|
1470 see it, but replace it with any other use. */
|
|
1471
|
|
1472 static tree
|
|
1473 mark_const_cap_r (tree *t, int *walk_subtrees, void *data)
|
|
1474 {
|
|
1475 hash_map<tree,tree*> &const_vars = *(hash_map<tree,tree*>*)data;
|
|
1476
|
|
1477 tree var = NULL_TREE;
|
|
1478 if (TREE_CODE (*t) == DECL_EXPR)
|
|
1479 {
|
|
1480 tree decl = DECL_EXPR_DECL (*t);
|
|
1481 if (is_constant_capture_proxy (decl))
|
|
1482 var = DECL_CAPTURED_VARIABLE (decl);
|
|
1483 *walk_subtrees = 0;
|
|
1484 }
|
|
1485 else if (is_constant_capture_proxy (*t))
|
|
1486 var = DECL_CAPTURED_VARIABLE (*t);
|
|
1487
|
|
1488 if (var)
|
|
1489 {
|
|
1490 tree *&slot = const_vars.get_or_insert (var);
|
|
1491 if (!slot || VAR_P (*t))
|
|
1492 slot = t;
|
|
1493 }
|
|
1494
|
|
1495 return NULL_TREE;
|
|
1496 }
|
|
1497
|
|
1498 /* We're at the end of processing a lambda; go back and remove any captures of
|
|
1499 constant variables for which we've folded away all uses. */
|
|
1500
|
|
1501 static void
|
|
1502 prune_lambda_captures (tree body)
|
|
1503 {
|
|
1504 tree lam = current_lambda_expr ();
|
|
1505 if (!LAMBDA_EXPR_CAPTURE_OPTIMIZED (lam))
|
|
1506 /* No uses were optimized away. */
|
|
1507 return;
|
|
1508 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_NONE)
|
|
1509 /* No default captures, and we don't prune explicit captures. */
|
|
1510 return;
|
|
1511
|
|
1512 hash_map<tree,tree*> const_vars;
|
|
1513
|
|
1514 cp_walk_tree_without_duplicates (&body, mark_const_cap_r, &const_vars);
|
|
1515
|
|
1516 tree *fieldp = &TYPE_FIELDS (LAMBDA_EXPR_CLOSURE (lam));
|
|
1517 for (tree *capp = &LAMBDA_EXPR_CAPTURE_LIST (lam); *capp; )
|
|
1518 {
|
|
1519 tree cap = *capp;
|
|
1520 if (tree var = var_to_maybe_prune (cap))
|
|
1521 {
|
|
1522 tree **use = const_vars.get (var);
|
|
1523 if (use && TREE_CODE (**use) == DECL_EXPR)
|
|
1524 {
|
|
1525 /* All uses of this capture were folded away, leaving only the
|
|
1526 proxy declaration. */
|
|
1527
|
|
1528 /* Splice the capture out of LAMBDA_EXPR_CAPTURE_LIST. */
|
|
1529 *capp = TREE_CHAIN (cap);
|
|
1530
|
|
1531 /* And out of TYPE_FIELDS. */
|
|
1532 tree field = TREE_PURPOSE (cap);
|
|
1533 while (*fieldp != field)
|
|
1534 fieldp = &DECL_CHAIN (*fieldp);
|
|
1535 *fieldp = DECL_CHAIN (*fieldp);
|
|
1536
|
|
1537 /* And remove the capture proxy declaration. */
|
|
1538 **use = void_node;
|
|
1539 continue;
|
|
1540 }
|
|
1541 }
|
|
1542
|
|
1543 capp = &TREE_CHAIN (cap);
|
|
1544 }
|
|
1545 }
|
|
1546
|
111
|
1547 void
|
|
1548 finish_lambda_function (tree body)
|
|
1549 {
|
|
1550 finish_function_body (body);
|
|
1551
|
131
|
1552 prune_lambda_captures (body);
|
|
1553
|
111
|
1554 /* Finish the function and generate code for it if necessary. */
|
|
1555 tree fn = finish_function (/*inline_p=*/true);
|
|
1556
|
|
1557 /* Only expand if the call op is not a template. */
|
|
1558 if (!DECL_TEMPLATE_INFO (fn))
|
|
1559 expand_or_defer_fn (fn);
|
|
1560 }
|
|
1561
|
|
1562 #include "gt-cp-lambda.h"
|